diff --git a/.bazelrc b/.bazelrc index 801b7193d4..03aa1d718e 100644 --- a/.bazelrc +++ b/.bazelrc @@ -28,6 +28,7 @@ build:linux --cxxopt="-fdiagnostics-color=always" build:windows --cxxopt="/GS-" --cxxopt="/std:c++17" --cxxopt="/permissive-" build:windows --cxxopt="/wd4244" --cxxopt="/wd4267" --cxxopt="/wd4819" +build:windows --cxxopt="/utf-8" build:windows --features=windows_export_all_symbols build:python --define=target_lang=python diff --git a/.github/scripts/generate-tensorrt-test-matrix.py b/.github/scripts/generate-tensorrt-test-matrix.py index ab24336ee2..004e6bdb68 100644 --- a/.github/scripts/generate-tensorrt-test-matrix.py +++ b/.github/scripts/generate-tensorrt-test-matrix.py @@ -52,6 +52,10 @@ "urls": "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/zip/TensorRT-10.11.0.33.Windows.win10.cuda-12.9.zip", "strip_prefix": "TensorRT-10.11.0.33", }, + "10.12.0": { + "urls": "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/zip/TensorRT-10.12.0.36.Windows.win10.cuda-12.9.zip", + "strip_prefix": "TensorRT-10.12.0.36", + }, }, "linux": { "10.3.0": { @@ -78,6 +82,10 @@ "urls": "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/tars/TensorRT-10.11.0.33.Linux.x86_64-gnu.cuda-12.9.tar.gz", "strip_prefix": "TensorRT-10.11.0.33", }, + "10.12.0": { + "urls": "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/tars/TensorRT-10.12.0.36.Linux.x86_64-gnu.cuda-12.9.tar.gz", + "strip_prefix": "TensorRT-10.12.0.36", + }, }, } diff --git a/.github/workflows/build-test-linux-aarch64-jetpack.yml b/.github/workflows/build-test-linux-aarch64-jetpack.yml index 744a802bfa..e4e4fbfc30 100644 --- a/.github/workflows/build-test-linux-aarch64-jetpack.yml +++ b/.github/workflows/build-test-linux-aarch64-jetpack.yml @@ -1,16 +1,17 @@ name: Build and test Linux aarch64 wheels for Jetpack on: - pull_request: - push: - branches: - - main - - nightly - - release/* - tags: - # NOTE: Binary build pipelines should only get triggered on release candidate builds - # Release candidate tags look like: v1.11.0-rc1 - - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + # TODO: Uncomment this when we have a stable release + # pull_request: + # push: + # branches: + # - main + # - nightly + # - release/* + # tags: + # # NOTE: Binary build pipelines should only get triggered on release candidate builds + # # Release candidate tags look like: v1.11.0-rc1 + # - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ workflow_dispatch: jobs: diff --git a/.github/workflows/build-test-tensorrt-windows.yml b/.github/workflows/build-test-tensorrt-windows.yml index 427b689656..1d7bf606e5 100644 --- a/.github/workflows/build-test-tensorrt-windows.yml +++ b/.github/workflows/build-test-tensorrt-windows.yml @@ -254,9 +254,9 @@ jobs: pushd . cd tests/py cd dynamo - python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_runtime_test_results.xml --ignore runtime/test_002_cudagraphs_py.py --ignore runtime/test_002_cudagraphs_cpp.py runtime/ + ../../../packaging/vc_env_helper.bat python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_runtime_test_results.xml --ignore runtime/test_002_cudagraphs_py.py --ignore runtime/test_002_cudagraphs_cpp.py runtime/ python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_partitioning_test_results.xml partitioning/ - python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_lowering_test_results.xml lowering/ + ../../../packaging/vc_env_helper.bat python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_lowering_test_results.xml lowering/ popd tests-py-dynamo-cudagraphs: diff --git a/.github/workflows/build-test-windows.yml b/.github/workflows/build-test-windows.yml index 40171540fc..ad8ae83846 100644 --- a/.github/workflows/build-test-windows.yml +++ b/.github/workflows/build-test-windows.yml @@ -230,7 +230,7 @@ jobs: cd dynamo python -m pytest -ra -n 10 --junitxml=${RUNNER_TEST_RESULTS_DIR}/torch_compile_be_test_results.xml backend/ python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/torch_complete_be_e2e_test_results.xml --ir torch_compile models/test_models.py - python -m pytest -ra --junitxml=${RUNNER_TEST_RESULTS_DIR}/torch_compile_dyn_models_export.xml --ir torch_compile models/test_dyn_models.py + ../../../packaging/vc_env_helper.bat python -m pytest -ra --junitxml=${RUNNER_TEST_RESULTS_DIR}/torch_compile_dyn_models_export.xml --ir torch_compile models/test_dyn_models.py popd tests-py-dynamo-core: @@ -258,9 +258,9 @@ jobs: pushd . cd tests/py cd dynamo - python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_runtime_test_results.xml --ignore runtime/test_002_cudagraphs_py.py --ignore runtime/test_002_cudagraphs_cpp.py runtime/ + ../../../packaging/vc_env_helper.bat python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_runtime_test_results.xml --ignore runtime/test_002_cudagraphs_py.py --ignore runtime/test_002_cudagraphs_cpp.py runtime/ python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_partitioning_test_results.xml partitioning/ - python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_lowering_test_results.xml lowering/ + ../../../packaging/vc_env_helper.bat python -m pytest -ra -n 4 --junitxml=${RUNNER_TEST_RESULTS_DIR}/tests_py_dynamo_core_lowering_test_results.xml lowering/ popd tests-py-dynamo-cudagraphs: diff --git a/.github/workflows/docgen.yml b/.github/workflows/docgen.yml index fc6afa8d0d..a943efe302 100644 --- a/.github/workflows/docgen.yml +++ b/.github/workflows/docgen.yml @@ -14,12 +14,12 @@ jobs: if: ${{ ! contains(github.actor, 'pytorchbot') }} environment: pytorchbot-env container: - image: docker.io/pytorch/manylinux2_28-builder:cuda12.8 + image: docker.io/pytorch/manylinux2_28-builder:cuda12.9 options: --gpus all env: - CUDA_HOME: /usr/local/cuda-12.8 - VERSION_SUFFIX: cu128 - CU_VERSION: cu128 + CUDA_HOME: /usr/local/cuda-12.9 + VERSION_SUFFIX: cu129 + CU_VERSION: cu129 CHANNEL: nightly CI_BUILD: 1 steps: @@ -35,14 +35,14 @@ jobs: - name: Install base deps run: | python3 -m pip install pip --upgrade - python3 -m pip install pyyaml numpy torch --pre --extra-index-url https://download.pytorch.org/whl/nightly/cu128 + python3 -m pip install pyyaml numpy torch --pre --extra-index-url https://download.pytorch.org/whl/nightly/cu129 ./packaging/pre_build_script.sh - name: Get HEAD SHA id: vars run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - name: Build Python Package run: | - python3 -m pip install --pre . --extra-index-url https://download.pytorch.org/whl/nightly/cu128 + python3 -m pip install --pre . --extra-index-url https://download.pytorch.org/whl/nightly/cu129 - name: Generate New Docs run: | cd docsrc diff --git a/MODULE.bazel b/MODULE.bazel index 59196b85be..1b66e2c900 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -1,6 +1,6 @@ module( name = "torch_tensorrt", - version = "2.8.0a0", + version = "2.9.0a0", repo_name = "org_pytorch_tensorrt", ) @@ -103,18 +103,18 @@ http_archive( http_archive( name = "tensorrt", build_file = "@//third_party/tensorrt/archive:BUILD", - strip_prefix = "TensorRT-10.11.0.33", + strip_prefix = "TensorRT-10.12.0.36", urls = [ - "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/tars/TensorRT-10.11.0.33.Linux.x86_64-gnu.cuda-12.9.tar.gz", + "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/tars/TensorRT-10.12.0.36.Linux.x86_64-gnu.cuda-12.9.tar.gz", ], ) http_archive( name = "tensorrt_sbsa", build_file = "@//third_party/tensorrt/archive:BUILD", - strip_prefix = "TensorRT-10.11.0.33", + strip_prefix = "TensorRT-10.12.0.36", urls = [ - "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/tars/TensorRT-10.11.0.33.Linux.aarch64-gnu.cuda-12.9.tar.gz", + "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/tars/TensorRT-10.12.0.36.Linux.aarch64-gnu.cuda-12.9.tar.gz", ], ) @@ -130,9 +130,9 @@ http_archive( http_archive( name = "tensorrt_win", build_file = "@//third_party/tensorrt/archive:BUILD", - strip_prefix = "TensorRT-10.11.0.33", + strip_prefix = "TensorRT-10.12.0.36", urls = [ - "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/zip/TensorRT-10.11.0.33.Windows.win10.cuda-12.9.zip", + "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/zip/TensorRT-10.12.0.36.Windows.win10.cuda-12.9.zip", ], ) diff --git a/cpp/include/torch_tensorrt/macros.h b/cpp/include/torch_tensorrt/macros.h index bdc25f6cd8..020b94c114 100644 --- a/cpp/include/torch_tensorrt/macros.h +++ b/cpp/include/torch_tensorrt/macros.h @@ -24,7 +24,7 @@ #define STR(x) XSTR(x) #define TORCH_TENSORRT_MAJOR_VERSION 2 -#define TORCH_TENSORRT_MINOR_VERSION 6 +#define TORCH_TENSORRT_MINOR_VERSION 9 #define TORCH_TENSORRT_PATCH_VERSION 0 #define TORCH_TENSORRT_VERSION \ STR(TORCH_TENSORRT_MAJOR_VERSION) \ diff --git a/dev_dep_versions.yml b/dev_dep_versions.yml index 492035a76f..c9a738feb6 100644 --- a/dev_dep_versions.yml +++ b/dev_dep_versions.yml @@ -1,2 +1,2 @@ __cuda_version__: "12.8" -__tensorrt_version__: "10.11.0" +__tensorrt_version__: "10.12.0" diff --git a/docker/Dockerfile b/docker/Dockerfile index 23786435c6..b218211e38 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -2,9 +2,9 @@ # Base image starts with CUDA #TODO: cuda version -ARG BASE_IMG=nvidia/cuda:12.8.0-devel-ubuntu22.04 +ARG BASE_IMG=nvidia/cuda:12.9.0-devel-ubuntu22.04 FROM ${BASE_IMG} as base -ENV BASE_IMG=nvidia/cuda:12.8.0-devel-ubuntu22.04 +ENV BASE_IMG=nvidia/cuda:12.9.0-devel-ubuntu22.04 ARG TENSORRT_VERSION ENV TENSORRT_VERSION=${TENSORRT_VERSION} diff --git a/docker/README.md b/docker/README.md index 2e4141e7c7..c24660dce3 100644 --- a/docker/README.md +++ b/docker/README.md @@ -15,14 +15,14 @@ ### Instructions -- The example below uses TensorRT 10.11.0.33 +- The example below uses TensorRT 10.12.0.36 - See dependencies for a list of current default dependencies. > From root of Torch-TensorRT repo Build: ``` -DOCKER_BUILDKIT=1 docker build --build-arg TENSORRT_VERSION=10.11.0 -f docker/Dockerfile -t torch_tensorrt:latest . +DOCKER_BUILDKIT=1 docker build --build-arg TENSORRT_VERSION=10.12.0 -f docker/Dockerfile -t torch_tensorrt:latest . ``` Run: diff --git a/docker/dist-build.sh b/docker/dist-build.sh index faabedade2..7790c05f82 100755 --- a/docker/dist-build.sh +++ b/docker/dist-build.sh @@ -4,7 +4,7 @@ set -x TOP_DIR=$(cd $(dirname $0); pwd)/.. -BUILD_CMD="python -m pip wheel . --extra-index-url https://download.pytorch.org/whl/nightly/cu128 -w dist" +BUILD_CMD="python -m pip wheel . --extra-index-url https://download.pytorch.org/whl/nightly/cu129 -w dist" # TensorRT restricts our pip version cd ${TOP_DIR} \ diff --git a/docs/_cpp_api/classtorch__tensorrt_1_1DataType.html b/docs/_cpp_api/classtorch__tensorrt_1_1DataType.html index 73e2a7346e..5b796731d1 100644 --- a/docs/_cpp_api/classtorch__tensorrt_1_1DataType.html +++ b/docs/_cpp_api/classtorch__tensorrt_1_1DataType.html @@ -10,7 +10,7 @@ - Class DataType — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Class DataType — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
- v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
@@ -324,7 +324,7 @@

Getting Started

User Guide

@@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/classtorch__tensorrt_1_1Device_1_1DeviceType.html b/docs/_cpp_api/classtorch__tensorrt_1_1Device_1_1DeviceType.html index 8d514d4223..03315b06a3 100644 --- a/docs/_cpp_api/classtorch__tensorrt_1_1Device_1_1DeviceType.html +++ b/docs/_cpp_api/classtorch__tensorrt_1_1Device_1_1DeviceType.html @@ -10,7 +10,7 @@ - Class Device::DeviceType — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Class Device::DeviceType — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/classtorch__tensorrt_1_1TensorFormat.html b/docs/_cpp_api/classtorch__tensorrt_1_1TensorFormat.html index 0730638929..c4339870ee 100644 --- a/docs/_cpp_api/classtorch__tensorrt_1_1TensorFormat.html +++ b/docs/_cpp_api/classtorch__tensorrt_1_1TensorFormat.html @@ -10,7 +10,7 @@ - Class TensorFormat — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Class TensorFormat — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8CacheCalibrator.html b/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8CacheCalibrator.html index 401f7e04ef..5281cc2f4e 100644 --- a/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8CacheCalibrator.html +++ b/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8CacheCalibrator.html @@ -10,7 +10,7 @@ - Template Class Int8CacheCalibrator — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Template Class Int8CacheCalibrator — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8Calibrator.html b/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8Calibrator.html index 1260e6ff4a..e63ba1e774 100644 --- a/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8Calibrator.html +++ b/docs/_cpp_api/classtorch__tensorrt_1_1ptq_1_1Int8Calibrator.html @@ -10,7 +10,7 @@ - Template Class Int8Calibrator — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Template Class Int8Calibrator — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1a18d295a837ac71add5578860b55e5502.html b/docs/_cpp_api/define_macros_8h_1a18d295a837ac71add5578860b55e5502.html index 5079f76199..eebb2ccac4 100644 --- a/docs/_cpp_api/define_macros_8h_1a18d295a837ac71add5578860b55e5502.html +++ b/docs/_cpp_api/define_macros_8h_1a18d295a837ac71add5578860b55e5502.html @@ -10,7 +10,7 @@ - Define STR — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define STR — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1a282fd3c0b1c3a215148ae372070e1268.html b/docs/_cpp_api/define_macros_8h_1a282fd3c0b1c3a215148ae372070e1268.html index bc9abb1e30..1d55ee9217 100644 --- a/docs/_cpp_api/define_macros_8h_1a282fd3c0b1c3a215148ae372070e1268.html +++ b/docs/_cpp_api/define_macros_8h_1a282fd3c0b1c3a215148ae372070e1268.html @@ -10,7 +10,7 @@ - Define TORCH_TENSORRT_PATCH_VERSION — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCH_TENSORRT_PATCH_VERSION — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1a31398a6d4d27e28817afb0f0139e909e.html b/docs/_cpp_api/define_macros_8h_1a31398a6d4d27e28817afb0f0139e909e.html index 3d5bc3cd48..0d7b6ed73b 100644 --- a/docs/_cpp_api/define_macros_8h_1a31398a6d4d27e28817afb0f0139e909e.html +++ b/docs/_cpp_api/define_macros_8h_1a31398a6d4d27e28817afb0f0139e909e.html @@ -10,7 +10,7 @@ - Define TORCH_TENSORRT_MAJOR_VERSION — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCH_TENSORRT_MAJOR_VERSION — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1a35703561b26b1a9d2738ad7d58b27827.html b/docs/_cpp_api/define_macros_8h_1a35703561b26b1a9d2738ad7d58b27827.html index a42178a931..59ef3e3961 100644 --- a/docs/_cpp_api/define_macros_8h_1a35703561b26b1a9d2738ad7d58b27827.html +++ b/docs/_cpp_api/define_macros_8h_1a35703561b26b1a9d2738ad7d58b27827.html @@ -10,7 +10,7 @@ - Define TORCH_TENSORRT_MINOR_VERSION — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCH_TENSORRT_MINOR_VERSION — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1a46612a64c219548c5ef03013eb2144ec.html b/docs/_cpp_api/define_macros_8h_1a46612a64c219548c5ef03013eb2144ec.html index d91f999bf9..0940c8e5d1 100644 --- a/docs/_cpp_api/define_macros_8h_1a46612a64c219548c5ef03013eb2144ec.html +++ b/docs/_cpp_api/define_macros_8h_1a46612a64c219548c5ef03013eb2144ec.html @@ -10,7 +10,7 @@ - Define TORCH_TENSORRT_PTQ_DEPRECATION — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCH_TENSORRT_PTQ_DEPRECATION — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1abd1465eb38256d3f22cc1426b23d516b.html b/docs/_cpp_api/define_macros_8h_1abd1465eb38256d3f22cc1426b23d516b.html index 7093b26d1a..acafa1782d 100644 --- a/docs/_cpp_api/define_macros_8h_1abd1465eb38256d3f22cc1426b23d516b.html +++ b/docs/_cpp_api/define_macros_8h_1abd1465eb38256d3f22cc1426b23d516b.html @@ -10,7 +10,7 @@ - Define TORCHTRT_API — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCHTRT_API — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1abe87b341f562fd1cf40b7672e4d759da.html b/docs/_cpp_api/define_macros_8h_1abe87b341f562fd1cf40b7672e4d759da.html index 7320711417..82f591f5f7 100644 --- a/docs/_cpp_api/define_macros_8h_1abe87b341f562fd1cf40b7672e4d759da.html +++ b/docs/_cpp_api/define_macros_8h_1abe87b341f562fd1cf40b7672e4d759da.html @@ -10,7 +10,7 @@ - Define XSTR — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define XSTR — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1ad19939408f7be171a74a89928b36eb59.html b/docs/_cpp_api/define_macros_8h_1ad19939408f7be171a74a89928b36eb59.html index c0a9255226..0bbb0c258e 100644 --- a/docs/_cpp_api/define_macros_8h_1ad19939408f7be171a74a89928b36eb59.html +++ b/docs/_cpp_api/define_macros_8h_1ad19939408f7be171a74a89928b36eb59.html @@ -10,7 +10,7 @@ - Define TORCHTRT_HIDDEN — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCHTRT_HIDDEN — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/define_macros_8h_1adad592a7b1b7eed529cdf6acd584c883.html b/docs/_cpp_api/define_macros_8h_1adad592a7b1b7eed529cdf6acd584c883.html index 734a543be7..135899a5fd 100644 --- a/docs/_cpp_api/define_macros_8h_1adad592a7b1b7eed529cdf6acd584c883.html +++ b/docs/_cpp_api/define_macros_8h_1adad592a7b1b7eed529cdf6acd584c883.html @@ -10,7 +10,7 @@ - Define TORCH_TENSORRT_VERSION — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Define TORCH_TENSORRT_VERSION — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/dir_cpp.html b/docs/_cpp_api/dir_cpp.html index fa05e9e775..be4f7a0773 100644 --- a/docs/_cpp_api/dir_cpp.html +++ b/docs/_cpp_api/dir_cpp.html @@ -10,7 +10,7 @@ - Directory cpp — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Directory cpp — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/dir_cpp_include.html b/docs/_cpp_api/dir_cpp_include.html index 4b5c8007c0..ac3c694a51 100644 --- a/docs/_cpp_api/dir_cpp_include.html +++ b/docs/_cpp_api/dir_cpp_include.html @@ -10,7 +10,7 @@ - Directory include — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Directory include — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/dir_cpp_include_torch_tensorrt.html b/docs/_cpp_api/dir_cpp_include_torch_tensorrt.html index 1e17ed9ed6..e238a6634d 100644 --- a/docs/_cpp_api/dir_cpp_include_torch_tensorrt.html +++ b/docs/_cpp_api/dir_cpp_include_torch_tensorrt.html @@ -10,7 +10,7 @@ - Directory torch_tensorrt — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Directory torch_tensorrt — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/enum_logging_8h_1a130f65408ad8cbaee060f05e8db69558.html b/docs/_cpp_api/enum_logging_8h_1a130f65408ad8cbaee060f05e8db69558.html index 555eea042b..4a5a6d73b0 100644 --- a/docs/_cpp_api/enum_logging_8h_1a130f65408ad8cbaee060f05e8db69558.html +++ b/docs/_cpp_api/enum_logging_8h_1a130f65408ad8cbaee060f05e8db69558.html @@ -10,7 +10,7 @@ - Enum Level — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Enum Level — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/enum_torch__tensorrt_8h_1a3fbe5d72e4fc624dbd038853079620eb.html b/docs/_cpp_api/enum_torch__tensorrt_8h_1a3fbe5d72e4fc624dbd038853079620eb.html index bac86514a0..4858490cde 100644 --- a/docs/_cpp_api/enum_torch__tensorrt_8h_1a3fbe5d72e4fc624dbd038853079620eb.html +++ b/docs/_cpp_api/enum_torch__tensorrt_8h_1a3fbe5d72e4fc624dbd038853079620eb.html @@ -10,7 +10,7 @@ - Enum EngineCapability — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Enum EngineCapability — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/file_cpp_include_torch_tensorrt_logging.h.html b/docs/_cpp_api/file_cpp_include_torch_tensorrt_logging.h.html index 05291f65b0..b3ce5a7305 100644 --- a/docs/_cpp_api/file_cpp_include_torch_tensorrt_logging.h.html +++ b/docs/_cpp_api/file_cpp_include_torch_tensorrt_logging.h.html @@ -10,7 +10,7 @@ - File logging.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + File logging.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/file_cpp_include_torch_tensorrt_macros.h.html b/docs/_cpp_api/file_cpp_include_torch_tensorrt_macros.h.html index f1c9897ba1..a033874257 100644 --- a/docs/_cpp_api/file_cpp_include_torch_tensorrt_macros.h.html +++ b/docs/_cpp_api/file_cpp_include_torch_tensorrt_macros.h.html @@ -10,7 +10,7 @@ - File macros.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + File macros.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/file_cpp_include_torch_tensorrt_ptq.h.html b/docs/_cpp_api/file_cpp_include_torch_tensorrt_ptq.h.html index 41c42b05ff..a7bb744834 100644 --- a/docs/_cpp_api/file_cpp_include_torch_tensorrt_ptq.h.html +++ b/docs/_cpp_api/file_cpp_include_torch_tensorrt_ptq.h.html @@ -10,7 +10,7 @@ - File ptq.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + File ptq.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/file_cpp_include_torch_tensorrt_torch_tensorrt.h.html b/docs/_cpp_api/file_cpp_include_torch_tensorrt_torch_tensorrt.h.html index b0bda43bb9..f5f827ed48 100644 --- a/docs/_cpp_api/file_cpp_include_torch_tensorrt_torch_tensorrt.h.html +++ b/docs/_cpp_api/file_cpp_include_torch_tensorrt_torch_tensorrt.h.html @@ -10,7 +10,7 @@ - File torch_tensorrt.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + File torch_tensorrt.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1a0593f776f469c20469e2f729fc7861a3.html b/docs/_cpp_api/function_logging_8h_1a0593f776f469c20469e2f729fc7861a3.html index 8b7e6800b0..5040bd5fa4 100644 --- a/docs/_cpp_api/function_logging_8h_1a0593f776f469c20469e2f729fc7861a3.html +++ b/docs/_cpp_api/function_logging_8h_1a0593f776f469c20469e2f729fc7861a3.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::get_logging_prefix — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::get_logging_prefix — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1a0c012cb374addd90eb1f42eaec570650.html b/docs/_cpp_api/function_logging_8h_1a0c012cb374addd90eb1f42eaec570650.html index 91c69b4359..1b26ba728d 100644 --- a/docs/_cpp_api/function_logging_8h_1a0c012cb374addd90eb1f42eaec570650.html +++ b/docs/_cpp_api/function_logging_8h_1a0c012cb374addd90eb1f42eaec570650.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::get_reportable_log_level — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::get_reportable_log_level — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1a56e110feaaba2c3fd44bd201fd21a76a.html b/docs/_cpp_api/function_logging_8h_1a56e110feaaba2c3fd44bd201fd21a76a.html index cb0bf6fd6c..6abcbf672f 100644 --- a/docs/_cpp_api/function_logging_8h_1a56e110feaaba2c3fd44bd201fd21a76a.html +++ b/docs/_cpp_api/function_logging_8h_1a56e110feaaba2c3fd44bd201fd21a76a.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::get_is_colored_output_on — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::get_is_colored_output_on — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1a7cb50492421ea9de4e3db895819df6f2.html b/docs/_cpp_api/function_logging_8h_1a7cb50492421ea9de4e3db895819df6f2.html index a72f173be7..37286fc036 100644 --- a/docs/_cpp_api/function_logging_8h_1a7cb50492421ea9de4e3db895819df6f2.html +++ b/docs/_cpp_api/function_logging_8h_1a7cb50492421ea9de4e3db895819df6f2.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::set_reportable_log_level — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::set_reportable_log_level — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1ac46ac0901cb97e3ae6e93b45f24e90b8.html b/docs/_cpp_api/function_logging_8h_1ac46ac0901cb97e3ae6e93b45f24e90b8.html index d5c577f9f2..2920517062 100644 --- a/docs/_cpp_api/function_logging_8h_1ac46ac0901cb97e3ae6e93b45f24e90b8.html +++ b/docs/_cpp_api/function_logging_8h_1ac46ac0901cb97e3ae6e93b45f24e90b8.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::log — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::log — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1ad2efd47b6c3689e58ccc595680579ae5.html b/docs/_cpp_api/function_logging_8h_1ad2efd47b6c3689e58ccc595680579ae5.html index aea0cf0b97..c993306f32 100644 --- a/docs/_cpp_api/function_logging_8h_1ad2efd47b6c3689e58ccc595680579ae5.html +++ b/docs/_cpp_api/function_logging_8h_1ad2efd47b6c3689e58ccc595680579ae5.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::set_is_colored_output_on — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::set_is_colored_output_on — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_logging_8h_1af8f3443813315af7901903d25dd495cc.html b/docs/_cpp_api/function_logging_8h_1af8f3443813315af7901903d25dd495cc.html index e359d78daf..6c6c69c67c 100644 --- a/docs/_cpp_api/function_logging_8h_1af8f3443813315af7901903d25dd495cc.html +++ b/docs/_cpp_api/function_logging_8h_1af8f3443813315af7901903d25dd495cc.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::logging::set_logging_prefix — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::logging::set_logging_prefix — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_ptq_8h_1ab79e3404965db0eec712f7268f29138a.html b/docs/_cpp_api/function_ptq_8h_1ab79e3404965db0eec712f7268f29138a.html index 0f90ffca58..bd3265dd70 100644 --- a/docs/_cpp_api/function_ptq_8h_1ab79e3404965db0eec712f7268f29138a.html +++ b/docs/_cpp_api/function_ptq_8h_1ab79e3404965db0eec712f7268f29138a.html @@ -10,7 +10,7 @@ - Template Function torch_tensorrt::ptq::make_int8_calibrator — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Template Function torch_tensorrt::ptq::make_int8_calibrator — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_ptq_8h_1af43f19b7b8f732447847cef1e8bd02ac.html b/docs/_cpp_api/function_ptq_8h_1af43f19b7b8f732447847cef1e8bd02ac.html index 48d70f9b5c..856e852857 100644 --- a/docs/_cpp_api/function_ptq_8h_1af43f19b7b8f732447847cef1e8bd02ac.html +++ b/docs/_cpp_api/function_ptq_8h_1af43f19b7b8f732447847cef1e8bd02ac.html @@ -10,7 +10,7 @@ - Template Function torch_tensorrt::ptq::make_int8_cache_calibrator — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Template Function torch_tensorrt::ptq::make_int8_cache_calibrator — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1a5b405fd3bf3c8fc2e2a54cbbab979797.html b/docs/_cpp_api/function_torch__tensorrt_8h_1a5b405fd3bf3c8fc2e2a54cbbab979797.html index 4fec65214a..7dd0d70e95 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1a5b405fd3bf3c8fc2e2a54cbbab979797.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1a5b405fd3bf3c8fc2e2a54cbbab979797.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::torchscript::check_method_operator_support — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::torchscript::check_method_operator_support — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1a6e19490a08fb1553c9dd347a5ae79db9.html b/docs/_cpp_api/function_torch__tensorrt_8h_1a6e19490a08fb1553c9dd347a5ae79db9.html index 38b2bbfdf5..5c885628ee 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1a6e19490a08fb1553c9dd347a5ae79db9.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1a6e19490a08fb1553c9dd347a5ae79db9.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::torchscript::compile — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::torchscript::compile — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1a81f9783517335dda877d8cfcf38987c9.html b/docs/_cpp_api/function_torch__tensorrt_8h_1a81f9783517335dda877d8cfcf38987c9.html index aafafc1275..22f10c6f6c 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1a81f9783517335dda877d8cfcf38987c9.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1a81f9783517335dda877d8cfcf38987c9.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::torchscript::embed_engine_in_new_module — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::torchscript::embed_engine_in_new_module — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1ac4ab8313ae72c2c899ea31548b528528.html b/docs/_cpp_api/function_torch__tensorrt_8h_1ac4ab8313ae72c2c899ea31548b528528.html index 3da78143e0..bf4bab510c 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1ac4ab8313ae72c2c899ea31548b528528.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1ac4ab8313ae72c2c899ea31548b528528.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::get_build_info — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::get_build_info — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1ad1acd06eaeaffbbcf6e7ebf426891384.html b/docs/_cpp_api/function_torch__tensorrt_8h_1ad1acd06eaeaffbbcf6e7ebf426891384.html index ca229242c8..ad5492cff9 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1ad1acd06eaeaffbbcf6e7ebf426891384.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1ad1acd06eaeaffbbcf6e7ebf426891384.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::set_device — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::set_device — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1ad6a4ee8ca6c8f6e5519eb1128ec7f4a1.html b/docs/_cpp_api/function_torch__tensorrt_8h_1ad6a4ee8ca6c8f6e5519eb1128ec7f4a1.html index 5b5577cdf7..3d15cc3dce 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1ad6a4ee8ca6c8f6e5519eb1128ec7f4a1.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1ad6a4ee8ca6c8f6e5519eb1128ec7f4a1.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::dump_build_info — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::dump_build_info — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/function_torch__tensorrt_8h_1ae8d56472106eeef37fbe51ff7f40c9b2.html b/docs/_cpp_api/function_torch__tensorrt_8h_1ae8d56472106eeef37fbe51ff7f40c9b2.html index 4065ebf9b4..30b5da50f3 100644 --- a/docs/_cpp_api/function_torch__tensorrt_8h_1ae8d56472106eeef37fbe51ff7f40c9b2.html +++ b/docs/_cpp_api/function_torch__tensorrt_8h_1ae8d56472106eeef37fbe51ff7f40c9b2.html @@ -10,7 +10,7 @@ - Function torch_tensorrt::torchscript::convert_method_to_trt_engine — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Function torch_tensorrt::torchscript::convert_method_to_trt_engine — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/namespace_torch_tensorrt.html b/docs/_cpp_api/namespace_torch_tensorrt.html index 909c9c18b4..a29c31e2d6 100644 --- a/docs/_cpp_api/namespace_torch_tensorrt.html +++ b/docs/_cpp_api/namespace_torch_tensorrt.html @@ -10,7 +10,7 @@ - Namespace torch_tensorrt — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Namespace torch_tensorrt — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/namespace_torch_tensorrt__logging.html b/docs/_cpp_api/namespace_torch_tensorrt__logging.html index 67c56201b5..d95f350a44 100644 --- a/docs/_cpp_api/namespace_torch_tensorrt__logging.html +++ b/docs/_cpp_api/namespace_torch_tensorrt__logging.html @@ -10,7 +10,7 @@ - Namespace torch_tensorrt::logging — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Namespace torch_tensorrt::logging — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/namespace_torch_tensorrt__ptq.html b/docs/_cpp_api/namespace_torch_tensorrt__ptq.html index e09b764ffe..3aa7f69eca 100644 --- a/docs/_cpp_api/namespace_torch_tensorrt__ptq.html +++ b/docs/_cpp_api/namespace_torch_tensorrt__ptq.html @@ -10,7 +10,7 @@ - Namespace torch_tensorrt::ptq — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Namespace torch_tensorrt::ptq — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/namespace_torch_tensorrt__torchscript.html b/docs/_cpp_api/namespace_torch_tensorrt__torchscript.html index 8f366ed4fa..5a9321b338 100644 --- a/docs/_cpp_api/namespace_torch_tensorrt__torchscript.html +++ b/docs/_cpp_api/namespace_torch_tensorrt__torchscript.html @@ -10,7 +10,7 @@ - Namespace torch_tensorrt::torchscript — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Namespace torch_tensorrt::torchscript — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_logging.h.html b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_logging.h.html index 73e98c40d1..5dda4bbf58 100644 --- a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_logging.h.html +++ b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_logging.h.html @@ -10,7 +10,7 @@ - Program Listing for File logging.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Program Listing for File logging.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.html b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.html index 1b46092535..df22283e9f 100644 --- a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.html +++ b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.html @@ -10,7 +10,7 @@ - Program Listing for File macros.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Program Listing for File macros.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -518,7 +517,7 @@ #define STR(x) XSTR(x) #define TORCH_TENSORRT_MAJOR_VERSION 2 -#define TORCH_TENSORRT_MINOR_VERSION 6 +#define TORCH_TENSORRT_MINOR_VERSION 9 #define TORCH_TENSORRT_PATCH_VERSION 0 #define TORCH_TENSORRT_VERSION \ STR(TORCH_TENSORRT_MAJOR_VERSION) \ diff --git a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_ptq.h.html b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_ptq.h.html index 8c97b48227..d7dd1cc928 100644 --- a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_ptq.h.html +++ b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_ptq.h.html @@ -10,7 +10,7 @@ - Program Listing for File ptq.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Program Listing for File ptq.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_torch_tensorrt.h.html b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_torch_tensorrt.h.html index 6d2d285c74..b0ec916224 100644 --- a/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_torch_tensorrt.h.html +++ b/docs/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_torch_tensorrt.h.html @@ -10,7 +10,7 @@ - Program Listing for File torch_tensorrt.h — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Program Listing for File torch_tensorrt.h — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/structtorch__tensorrt_1_1Device.html b/docs/_cpp_api/structtorch__tensorrt_1_1Device.html index cf862f8ffc..ec69aeb2fb 100644 --- a/docs/_cpp_api/structtorch__tensorrt_1_1Device.html +++ b/docs/_cpp_api/structtorch__tensorrt_1_1Device.html @@ -10,7 +10,7 @@ - Struct Device — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Struct Device — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/structtorch__tensorrt_1_1GraphInputs.html b/docs/_cpp_api/structtorch__tensorrt_1_1GraphInputs.html index b3d77efa05..b46000f911 100644 --- a/docs/_cpp_api/structtorch__tensorrt_1_1GraphInputs.html +++ b/docs/_cpp_api/structtorch__tensorrt_1_1GraphInputs.html @@ -10,7 +10,7 @@ - Struct GraphInputs — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Struct GraphInputs — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/structtorch__tensorrt_1_1Input.html b/docs/_cpp_api/structtorch__tensorrt_1_1Input.html index 9247cd0394..10fb4c7aa1 100644 --- a/docs/_cpp_api/structtorch__tensorrt_1_1Input.html +++ b/docs/_cpp_api/structtorch__tensorrt_1_1Input.html @@ -10,7 +10,7 @@ - Struct Input — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Struct Input — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/structtorch__tensorrt_1_1torchscript_1_1CompileSpec.html b/docs/_cpp_api/structtorch__tensorrt_1_1torchscript_1_1CompileSpec.html index bebf72a4ae..4ee92ee792 100644 --- a/docs/_cpp_api/structtorch__tensorrt_1_1torchscript_1_1CompileSpec.html +++ b/docs/_cpp_api/structtorch__tensorrt_1_1torchscript_1_1CompileSpec.html @@ -10,7 +10,7 @@ - Struct CompileSpec — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Struct CompileSpec — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/torch_tensort_cpp.html b/docs/_cpp_api/torch_tensort_cpp.html index 1d14175a0f..7cd25ba30e 100644 --- a/docs/_cpp_api/torch_tensort_cpp.html +++ b/docs/_cpp_api/torch_tensort_cpp.html @@ -10,7 +10,7 @@ - Torch-TensorRT C++ API — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Torch-TensorRT C++ API — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_cpp_api/unabridged_orphan.html b/docs/_cpp_api/unabridged_orphan.html index 60c9e00db2..8ef1b37a5c 100644 --- a/docs/_cpp_api/unabridged_orphan.html +++ b/docs/_cpp_api/unabridged_orphan.html @@ -10,7 +10,7 @@ - Full API — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Full API — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_downloads/012651bbc3509a32c4bc4b89d616e4c3/hierarchical_partitioner_example.ipynb b/docs/_downloads/012651bbc3509a32c4bc4b89d616e4c3/hierarchical_partitioner_example.ipynb new file mode 100644 index 0000000000..fb8446f6c5 --- /dev/null +++ b/docs/_downloads/012651bbc3509a32c4bc4b89d616e4c3/hierarchical_partitioner_example.ipynb @@ -0,0 +1,43 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n\n# Hierarchical Partitioner Example\n\nBasic example on how to use the hierarchical adjacency partitioner function and manually compile the partitioned model.\nNot yet available in the compile API.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from typing import Any, Callable\n\nimport torch\nimport torch.nn as nn\nimport torch_tensorrt\nfrom torch_tensorrt._enums import dtype\nfrom torch_tensorrt.dynamo import partitioning\nfrom torch_tensorrt.dynamo._compiler import convert_module\nfrom torch_tensorrt.dynamo.conversion._ConverterRegistry import (\n DYNAMO_CONVERTERS as CONVERTERS,\n)\nfrom torch_tensorrt.dynamo.lowering import (\n get_decompositions,\n pre_export_lowering,\n)\nfrom torch_tensorrt.dynamo.partitioning._hierarchical_partitioner import (\n hierarchical_adjacency_partition,\n)\nfrom torch_tensorrt.dynamo.utils import (\n get_output_metadata,\n)\nfrom torchvision import models\n\n\nclass InductorModule(torch.nn.Module): # type: ignore[misc]\n \"\"\"Wrapper module for inductor compiled function.\"\"\"\n\n def __init__(self, func: Callable[..., Any]) -> None:\n super().__init__()\n self.func = func\n\n def forward(self, *args: Any, **kwargs: Any) -> Any:\n return self.func(*args, **kwargs)\n\n\nclass SimpleModel(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 64, kernel_size=3, padding=1)\n self.conv2 = nn.Conv2d(64, 128, kernel_size=3, padding=1)\n self.bn1 = nn.BatchNorm2d(64)\n self.bn2 = nn.BatchNorm2d(128)\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.bn1(x)\n x = torch.relu(x)\n x = self.conv2(x)\n x = self.bn2(x)\n x = torch.relu(x)\n return x\n\n\ndef main():\n # Create model\n model = SimpleModel().cuda()\n # model = models.efficientnet_b0(pretrained=True).cuda()\n model = model.eval()\n\n # Create example input\n example_input = torch.randn(1, 3, 224, 224).cuda()\n\n exported_program = torch.export.export(model, (example_input,))\n exported_program = pre_export_lowering(exported_program)\n exported_program = exported_program.run_decompositions(get_decompositions())\n\n gm = exported_program.module()\n\n print(\"Original Model Structure:\\n\", gm)\n\n original_output = model(example_input)\n\n # 1. Partition the model into blocks that can be executed by different backends\n partitioned_model, op_support = hierarchical_adjacency_partition(\n gm,\n min_block_size=1,\n backend_priority=[\"inductor\", \"tensorrt\"],\n backend_support_map={\n \"inductor\": {\n \"torch.ops.aten.convolution.default\",\n },\n \"tensorrt\": CONVERTERS.keys(),\n },\n torch_executed_ops={\n \"torch.ops.aten._native_batch_norm_legit_no_training.default\"\n },\n require_full_compilation=False,\n skip_fusion=True,\n )\n\n print(\"1. Partitioned Model Structure:\\n\", partitioned_model)\n\n # 2. Compile each submodule with the corresponding backend\n submodule_node_dict = {}\n for node in partitioned_model.graph.nodes:\n if \"_run_on_acc\" not in node.name:\n continue\n submodule_node_dict[node.name] = node\n\n # Store compiled replicas of Torch subgraphs\n compiled_modules = {}\n\n for name, _ in partitioned_model.named_children():\n submodule = getattr(partitioned_model, name)\n if not isinstance(submodule, torch.fx.graph_module.GraphModule):\n continue\n\n if \"_run_on_acc\" not in name:\n submodule.to(\"cuda\")\n continue\n\n if name not in submodule_node_dict:\n raise ValueError(\n f\"node_name: {name} does not exist in the submodule node dictionary\"\n )\n\n # set the submodule metadata back to the parent module_node\n metadata_list = get_output_metadata(submodule)\n assert len(metadata_list) > 0\n metadata_keys = [\"val\", \"tensor_meta\"]\n for key in metadata_keys:\n if key not in submodule_node_dict[name].meta:\n meta_val_list = [\n metadata[key] for metadata in metadata_list if key in metadata\n ]\n submodule_node_dict[name].meta[key] = meta_val_list\n break\n\n # Get the submodule inputs for min, opt, max shapes of the graph inputs\n submodule_inputs = partitioning.construct_submodule_inputs(submodule)\n assert submodule_inputs is not None\n\n # compile submodule with pytorch inductor backend\n if \"_run_on_acc_inductor\" in name:\n sub_inputs = []\n for input in submodule_inputs:\n sub_input = input.torch_tensor.to(\n dtype.to(input.dtype, t=torch.dtype)\n ).cuda()\n sub_inputs.append(sub_input)\n\n compiled_func = torch._inductor.compile(\n submodule,\n sub_inputs,\n )\n # Wrap the compiled function to be a torch.nn.Module\n compiled_submodule = InductorModule(compiled_func)\n\n # compile submodule with tensorrt backend\n elif \"_run_on_acc_tensorrt\" in name:\n compiled_submodule = convert_module(\n submodule,\n submodule_inputs,\n name=name,\n )\n else:\n raise ValueError(f\"Unknown backend for submodule: {name}\")\n\n compiled_modules[name] = compiled_submodule\n\n # Replace all FX Modules with compiled Modules\n for name, compiled_module in compiled_modules.items():\n setattr(partitioned_model, name, compiled_module)\n\n print(\"2. Compiled Model Structure:\\n\", partitioned_model)\n\n with torch.no_grad():\n partitioned_output = partitioned_model(example_input)\n print(\n \"3. Verify that Partitioned output == Original output:\",\n torch.allclose(partitioned_output, original_output, 1e-2, 1e-2),\n )\n\n\nif __name__ == \"__main__\":\n main()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.13" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/docs/_downloads/06a1dddfb8c2b5515b697700d863a453/engine_caching_bert_example.ipynb b/docs/_downloads/06a1dddfb8c2b5515b697700d863a453/engine_caching_bert_example.ipynb index ff65e05477..8a56ac763f 100644 --- a/docs/_downloads/06a1dddfb8c2b5515b697700d863a453/engine_caching_bert_example.ipynb +++ b/docs/_downloads/06a1dddfb8c2b5515b697700d863a453/engine_caching_bert_example.ipynb @@ -15,7 +15,7 @@ }, "outputs": [], "source": [ - "import numpy as np\nimport torch\nimport torch_tensorrt\nfrom engine_caching_example import remove_timing_cache\nfrom transformers import BertModel\n\nnp.random.seed(0)\ntorch.manual_seed(0)\n\nmodel = BertModel.from_pretrained(\"bert-base-uncased\", return_dict=False).cuda().eval()\ninputs = [\n torch.randint(0, 2, (1, 14), dtype=torch.int32).to(\"cuda\"),\n torch.randint(0, 2, (1, 14), dtype=torch.int32).to(\"cuda\"),\n]\n\n\ndef compile_bert(iterations=3):\n times = []\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n # remove timing cache and reset dynamo for engine caching messurement\n remove_timing_cache()\n torch._dynamo.reset()\n\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n compilation_kwargs = {\n \"use_python_runtime\": False,\n \"enabled_precisions\": {torch.float},\n \"truncate_double\": True,\n \"debug\": False,\n \"min_block_size\": 1,\n \"immutable_weights\": False,\n \"cache_built_engines\": cache_built_engines,\n \"reuse_cached_engines\": reuse_cached_engines,\n \"engine_cache_dir\": \"/tmp/torch_trt_bert_engine_cache\",\n \"engine_cache_size\": 1 << 30, # 1GB\n }\n optimized_model = torch.compile(\n model,\n backend=\"torch_tensorrt\",\n options=compilation_kwargs,\n )\n optimized_model(*inputs)\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"-----compile bert-----> compilation time:\\n\", times, \"milliseconds\")\n\n\nif __name__ == \"__main__\":\n compile_bert()" + "import numpy as np\nimport torch\nimport torch_tensorrt\nfrom engine_caching_example import remove_timing_cache\nfrom transformers import BertModel\n\nnp.random.seed(0)\ntorch.manual_seed(0)\n\nmodel = BertModel.from_pretrained(\"bert-base-uncased\", return_dict=False).cuda().eval()\ninputs = [\n torch.randint(0, 2, (1, 14), dtype=torch.int32).to(\"cuda\"),\n torch.randint(0, 2, (1, 14), dtype=torch.int32).to(\"cuda\"),\n]\n\n\ndef compile_bert(iterations=3):\n times = []\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n # remove timing cache and reset dynamo for engine caching messurement\n remove_timing_cache()\n torch._dynamo.reset()\n\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n compilation_kwargs = {\n \"use_python_runtime\": False,\n \"enabled_precisions\": {torch.float},\n \"truncate_double\": True,\n \"min_block_size\": 1,\n \"immutable_weights\": False,\n \"cache_built_engines\": cache_built_engines,\n \"reuse_cached_engines\": reuse_cached_engines,\n \"engine_cache_dir\": \"/tmp/torch_trt_bert_engine_cache\",\n \"engine_cache_size\": 1 << 30, # 1GB\n }\n optimized_model = torch.compile(\n model,\n backend=\"torch_tensorrt\",\n options=compilation_kwargs,\n )\n optimized_model(*inputs)\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"-----compile bert-----> compilation time:\\n\", times, \"milliseconds\")\n\n\nif __name__ == \"__main__\":\n compile_bert()" ] } ], @@ -35,7 +35,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/0c66936e5fb86b43de3fa45f5f060b9d/torch_export_flux_dev.ipynb b/docs/_downloads/0c66936e5fb86b43de3fa45f5f060b9d/torch_export_flux_dev.ipynb index fe84782f38..2f62f38159 100644 --- a/docs/_downloads/0c66936e5fb86b43de3fa45f5f060b9d/torch_export_flux_dev.ipynb +++ b/docs/_downloads/0c66936e5fb86b43de3fa45f5f060b9d/torch_export_flux_dev.ipynb @@ -7,6 +7,17 @@ "\n\n# Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend\n\nThis example illustrates the state of the art model [FLUX.1-dev](https://huggingface.co/black-forest-labs/FLUX.1-dev) optimized using\nTorch-TensorRT.\n\n**FLUX.1 [dev]** is a 12 billion parameter rectified flow transformer capable of generating images from text descriptions. It is an open-weight, guidance-distilled model for non-commercial applications.\n\nTo run this demo, you need to have access to Flux model (request for access if you do not have it already on the [FLUX.1-dev](https://huggingface.co/black-forest-labs/FLUX.1-dev) page) and install the following dependencies\n\n```python\npip install sentencepiece==\"0.2.0\" transformers==\"4.48.2\" accelerate==\"1.3.0\" diffusers==\"0.32.2\" protobuf==\"5.29.3\"\n```\nThere are different components of the ``FLUX.1-dev`` pipeline such as ``transformer``, ``vae``, ``text_encoder``, ``tokenizer`` and ``scheduler``. In this example,\nwe demonstrate optimizing the ``transformer`` component of the model (which typically consumes >95% of the e2e diffusion latency)\n" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import register_sdpa # Register SDPA as a standalone operator" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -76,7 +87,7 @@ }, "outputs": [], "source": [ - "trt_gm = torch_tensorrt.dynamo.compile(\n ep,\n inputs=dummy_inputs,\n enabled_precisions={torch.float32},\n truncate_double=True,\n min_block_size=1,\n use_fp32_acc=True,\n use_explicit_typing=True,\n)" + "trt_gm = torch_tensorrt.dynamo.compile(\n ep,\n inputs=dummy_inputs,\n enabled_precisions={torch.float32},\n truncate_double=True,\n min_block_size=1,\n use_fp32_acc=True,\n use_explicit_typing=True,\n immutable_weights=False,\n offload_module_to_cpu=True,\n)" ] }, { @@ -94,7 +105,7 @@ }, "outputs": [], "source": [ - "del ep\nbackbone.to(\"cpu\")\npipe.to(DEVICE)\ntorch.cuda.empty_cache()\npipe.transformer = trt_gm\npipe.transformer.config = config" + "pipe.transformer = None\npipe.to(DEVICE)\npipe.transformer = trt_gm\ndel ep\ntorch.cuda.empty_cache()\npipe.transformer.config = config\ntrt_gm.device = torch.device(\"cuda\")" ] }, { @@ -139,7 +150,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/0daf1d0af656cac7b808856b71e6616f/torch_compile_resnet_example.ipynb b/docs/_downloads/0daf1d0af656cac7b808856b71e6616f/torch_compile_resnet_example.ipynb index 257f7f277d..ab12bb4a15 100644 --- a/docs/_downloads/0daf1d0af656cac7b808856b71e6616f/torch_compile_resnet_example.ipynb +++ b/docs/_downloads/0daf1d0af656cac7b808856b71e6616f/torch_compile_resnet_example.ipynb @@ -51,7 +51,7 @@ }, "outputs": [], "source": [ - "# Enabled precision for TensorRT optimization\nenabled_precisions = {torch.half}\n\n# Whether to print verbose logs\ndebug = True\n\n# Workspace size for TensorRT\nworkspace_size = 20 << 30\n\n# Maximum number of TRT Engines\n# (Lower value allows more graph segmentation)\nmin_block_size = 7\n\n# Operations to Run in Torch, regardless of converter support\ntorch_executed_ops = {}" + "# Enabled precision for TensorRT optimization\nenabled_precisions = {torch.half}\n\n\n# Workspace size for TensorRT\nworkspace_size = 20 << 30\n\n# Maximum number of TRT Engines\n# (Lower value allows more graph segmentation)\nmin_block_size = 7\n\n# Operations to Run in Torch, regardless of converter support\ntorch_executed_ops = {}" ] }, { @@ -69,7 +69,7 @@ }, "outputs": [], "source": [ - "# Build and compile the model with torch.compile, using Torch-TensorRT backend\noptimized_model = torch_tensorrt.compile(\n model,\n ir=\"torch_compile\",\n inputs=inputs,\n enabled_precisions=enabled_precisions,\n debug=debug,\n workspace_size=workspace_size,\n min_block_size=min_block_size,\n torch_executed_ops=torch_executed_ops,\n)" + "# Build and compile the model with torch.compile, using Torch-TensorRT backend\noptimized_model = torch_tensorrt.compile(\n model,\n ir=\"torch_compile\",\n inputs=inputs,\n enabled_precisions=enabled_precisions,\n workspace_size=workspace_size,\n min_block_size=min_block_size,\n torch_executed_ops=torch_executed_ops,\n)" ] }, { @@ -123,7 +123,7 @@ }, "outputs": [], "source": [ - "# The following code illustrates the workflow using ir=torch_compile (which uses torch.compile under the hood)\ninputs_bs8 = torch.randn((8, 3, 224, 224)).half().to(\"cuda\")\n# This indicates dimension 0 of inputs_bs8 is dynamic whose range of values is [2, 16]\ntorch._dynamo.mark_dynamic(inputs_bs8, 0, min=2, max=16)\noptimized_model = torch_tensorrt.compile(\n model,\n ir=\"torch_compile\",\n inputs=inputs_bs8,\n enabled_precisions=enabled_precisions,\n debug=debug,\n workspace_size=workspace_size,\n min_block_size=min_block_size,\n torch_executed_ops=torch_executed_ops,\n)\noutputs_bs8 = optimized_model(inputs_bs8)\n\n# No recompilation happens for batch size = 12\ninputs_bs12 = torch.randn((12, 3, 224, 224)).half().to(\"cuda\")\noutputs_bs12 = optimized_model(inputs_bs12)\n\n# The following code illustrates the workflow using ir=dynamo (which uses torch.export APIs under the hood)\n# dynamic shapes for any inputs are specified using torch_tensorrt.Input API\ncompile_spec = {\n \"inputs\": [\n torch_tensorrt.Input(\n min_shape=(1, 3, 224, 224),\n opt_shape=(8, 3, 224, 224),\n max_shape=(16, 3, 224, 224),\n dtype=torch.half,\n )\n ],\n \"enabled_precisions\": enabled_precisions,\n \"ir\": \"dynamo\",\n}\ntrt_model = torch_tensorrt.compile(model, **compile_spec)\n\n# No recompilation happens for batch size = 12\ninputs_bs12 = torch.randn((12, 3, 224, 224)).half().to(\"cuda\")\noutputs_bs12 = trt_model(inputs_bs12)" + "# The following code illustrates the workflow using ir=torch_compile (which uses torch.compile under the hood)\ninputs_bs8 = torch.randn((8, 3, 224, 224)).half().to(\"cuda\")\n# This indicates dimension 0 of inputs_bs8 is dynamic whose range of values is [2, 16]\ntorch._dynamo.mark_dynamic(inputs_bs8, 0, min=2, max=16)\noptimized_model = torch_tensorrt.compile(\n model,\n ir=\"torch_compile\",\n inputs=inputs_bs8,\n enabled_precisions=enabled_precisions,\n workspace_size=workspace_size,\n min_block_size=min_block_size,\n torch_executed_ops=torch_executed_ops,\n)\noutputs_bs8 = optimized_model(inputs_bs8)\n\n# No recompilation happens for batch size = 12\ninputs_bs12 = torch.randn((12, 3, 224, 224)).half().to(\"cuda\")\noutputs_bs12 = optimized_model(inputs_bs12)\n\n# The following code illustrates the workflow using ir=dynamo (which uses torch.export APIs under the hood)\n# dynamic shapes for any inputs are specified using torch_tensorrt.Input API\ncompile_spec = {\n \"inputs\": [\n torch_tensorrt.Input(\n min_shape=(1, 3, 224, 224),\n opt_shape=(8, 3, 224, 224),\n max_shape=(16, 3, 224, 224),\n dtype=torch.half,\n )\n ],\n \"enabled_precisions\": enabled_precisions,\n \"ir\": \"dynamo\",\n}\ntrt_model = torch_tensorrt.compile(model, **compile_spec)\n\n# No recompilation happens for batch size = 12\ninputs_bs12 = torch.randn((12, 3, 224, 224)).half().to(\"cuda\")\noutputs_bs12 = trt_model(inputs_bs12)" ] } ], @@ -143,7 +143,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/0e30a6276601af7e5fc4d5166e2e3d37/torch_compile_advanced_usage.py b/docs/_downloads/0e30a6276601af7e5fc4d5166e2e3d37/torch_compile_advanced_usage.py index af7d4b212d..71d0d77005 100644 --- a/docs/_downloads/0e30a6276601af7e5fc4d5166e2e3d37/torch_compile_advanced_usage.py +++ b/docs/_downloads/0e30a6276601af7e5fc4d5166e2e3d37/torch_compile_advanced_usage.py @@ -73,7 +73,6 @@ def forward(self, x: torch.Tensor, y: torch.Tensor): # py/torch_tensorrt/dynamo/_settings.py backend_kwargs = { "enabled_precisions": {torch.half}, - "debug": True, "min_block_size": 2, "torch_executed_ops": {"torch.ops.aten.sub.Tensor"}, "optimization_level": 4, diff --git a/docs/_downloads/11bd814a14cab34bab72bf8a16425e4a/torch_export_flux_dev.py b/docs/_downloads/11bd814a14cab34bab72bf8a16425e4a/torch_export_flux_dev.py index 3891fcbb9a..4a6d36a960 100644 --- a/docs/_downloads/11bd814a14cab34bab72bf8a16425e4a/torch_export_flux_dev.py +++ b/docs/_downloads/11bd814a14cab34bab72bf8a16425e4a/torch_export_flux_dev.py @@ -19,6 +19,8 @@ we demonstrate optimizing the ``transformer`` component of the model (which typically consumes >95% of the e2e diffusion latency) """ +import register_sdpa # Register SDPA as a standalone operator + # %% # Import the following libraries # ----------------------------- @@ -112,6 +114,8 @@ min_block_size=1, use_fp32_acc=True, use_explicit_typing=True, + immutable_weights=False, + offload_module_to_cpu=True, ) # %% @@ -119,14 +123,13 @@ # --------------------------- # Release the GPU memory occupied by the exported program and the pipe.transformer # Set the transformer in the Flux pipeline to the Torch-TRT compiled model - -del ep -backbone.to("cpu") +pipe.transformer = None pipe.to(DEVICE) -torch.cuda.empty_cache() pipe.transformer = trt_gm +del ep +torch.cuda.empty_cache() pipe.transformer.config = config - +trt_gm.device = torch.device("cuda") # %% # Image generation using prompt # --------------------------- diff --git a/docs/_downloads/1c759c0181fe2845e5579cc82e5b7a7a/engine_caching_example.py b/docs/_downloads/1c759c0181fe2845e5579cc82e5b7a7a/engine_caching_example.py index fb4c341077..34fa56f9a1 100644 --- a/docs/_downloads/1c759c0181fe2845e5579cc82e5b7a7a/engine_caching_example.py +++ b/docs/_downloads/1c759c0181fe2845e5579cc82e5b7a7a/engine_caching_example.py @@ -39,7 +39,6 @@ model = models.resnet18(pretrained=True).eval().to("cuda") enabled_precisions = {torch.float} -debug = False min_block_size = 1 use_python_runtime = False @@ -95,7 +94,6 @@ def torch_compile(iterations=3): options={ "use_python_runtime": True, "enabled_precisions": enabled_precisions, - "debug": debug, "min_block_size": min_block_size, "immutable_weights": False, "cache_built_engines": cache_built_engines, @@ -155,7 +153,6 @@ def dynamo_compile(iterations=3): tuple(inputs), use_python_runtime=use_python_runtime, enabled_precisions=enabled_precisions, - debug=debug, min_block_size=min_block_size, immutable_weights=False, cache_built_engines=cache_built_engines, @@ -266,7 +263,6 @@ def torch_compile_my_cache(iterations=3): options={ "use_python_runtime": True, "enabled_precisions": enabled_precisions, - "debug": debug, "min_block_size": min_block_size, "immutable_weights": False, "cache_built_engines": cache_built_engines, diff --git a/docs/_downloads/2c4fd8e65aa979aa6a0402a43ff9b15e/cross_runtime_compilation_for_windows.py b/docs/_downloads/2c4fd8e65aa979aa6a0402a43ff9b15e/cross_runtime_compilation_for_windows.py index 184470ffa0..433df12d29 100644 --- a/docs/_downloads/2c4fd8e65aa979aa6a0402a43ff9b15e/cross_runtime_compilation_for_windows.py +++ b/docs/_downloads/2c4fd8e65aa979aa6a0402a43ff9b15e/cross_runtime_compilation_for_windows.py @@ -71,7 +71,6 @@ "cross runtime compiled model for windows can only be compiled in Linux system" ) compile_spec = { - "debug": True, "min_block_size": 1, } torchtrt.cross_compile_for_windows( diff --git a/docs/_downloads/2ce302f7f4f71543c3d2bd7f93375eec/torch_compile_gpt2.ipynb b/docs/_downloads/2ce302f7f4f71543c3d2bd7f93375eec/torch_compile_gpt2.ipynb index 1f9d245954..c04411386d 100644 --- a/docs/_downloads/2ce302f7f4f71543c3d2bd7f93375eec/torch_compile_gpt2.ipynb +++ b/docs/_downloads/2ce302f7f4f71543c3d2bd7f93375eec/torch_compile_gpt2.ipynb @@ -186,7 +186,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/2fbbf7380f818b1cbce2b90bbcaf2904/mutable_torchtrt_module_example.py b/docs/_downloads/2fbbf7380f818b1cbce2b90bbcaf2904/mutable_torchtrt_module_example.py index a6c8a5384e..f422a6e629 100644 --- a/docs/_downloads/2fbbf7380f818b1cbce2b90bbcaf2904/mutable_torchtrt_module_example.py +++ b/docs/_downloads/2fbbf7380f818b1cbce2b90bbcaf2904/mutable_torchtrt_module_example.py @@ -22,6 +22,7 @@ import torch import torch_tensorrt as torch_trt import torchvision.models as models +from diffusers import DiffusionPipeline np.random.seed(5) torch.manual_seed(5) @@ -31,7 +32,7 @@ # Initialize the Mutable Torch TensorRT Module with settings. # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ settings = { - "use_python": False, + "use_python_runtime": False, "enabled_precisions": {torch.float32}, "immutable_weights": False, } @@ -40,7 +41,6 @@ mutable_module = torch_trt.MutableTorchTensorRTModule(model, **settings) # You can use the mutable module just like the original pytorch module. The compilation happens while you first call the mutable module. mutable_module(*inputs) - # %% # Make modifications to the mutable module. # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -73,13 +73,11 @@ # Stable Diffusion with Huggingface # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -from diffusers import DiffusionPipeline with torch.no_grad(): settings = { "use_python_runtime": True, "enabled_precisions": {torch.float16}, - "debug": True, "immutable_weights": False, } @@ -106,7 +104,7 @@ "text_embeds": {0: BATCH}, "time_ids": {0: BATCH}, }, - "return_dict": False, + "return_dict": None, } pipe.unet.set_expected_dynamic_shape_range( args_dynamic_shapes, kwargs_dynamic_shapes @@ -181,7 +179,7 @@ def forward(self, a, b, c={}): }, # a's shape does not change so we give it an empty dict } # Export the model first with custom dynamic shape constraints -model = torch_trt.MutableTorchTensorRTModule(model, debug=True, min_block_size=1) +model = torch_trt.MutableTorchTensorRTModule(model, min_block_size=1) model.set_expected_dynamic_shape_range(args_dynamic_shapes, kwarg_dynamic_shapes) # Compile model(*inputs, **kwargs) @@ -212,7 +210,6 @@ def forward(self, a, b, c={}): model, use_python_runtime=True, enabled_precisions={torch.float}, - debug=True, min_block_size=1, immutable_weights=False, cache_built_engines=True, diff --git a/docs/_downloads/3454ee6d4b68e83cdf0c757f0059986b/engine_caching_example.ipynb b/docs/_downloads/3454ee6d4b68e83cdf0c757f0059986b/engine_caching_example.ipynb index 28f03e3915..38de1cb9a1 100644 --- a/docs/_downloads/3454ee6d4b68e83cdf0c757f0059986b/engine_caching_example.ipynb +++ b/docs/_downloads/3454ee6d4b68e83cdf0c757f0059986b/engine_caching_example.ipynb @@ -15,7 +15,7 @@ }, "outputs": [], "source": [ - "import os\nfrom typing import Dict, Optional\n\nimport numpy as np\nimport torch\nimport torch_tensorrt as torch_trt\nimport torchvision.models as models\nfrom torch_tensorrt.dynamo._defaults import TIMING_CACHE_PATH\nfrom torch_tensorrt.dynamo._engine_cache import BaseEngineCache\n\nnp.random.seed(0)\ntorch.manual_seed(0)\n\nmodel = models.resnet18(pretrained=True).eval().to(\"cuda\")\nenabled_precisions = {torch.float}\ndebug = False\nmin_block_size = 1\nuse_python_runtime = False\n\n\ndef remove_timing_cache(path=TIMING_CACHE_PATH):\n if os.path.exists(path):\n os.remove(path)" + "import os\nfrom typing import Dict, Optional\n\nimport numpy as np\nimport torch\nimport torch_tensorrt as torch_trt\nimport torchvision.models as models\nfrom torch_tensorrt.dynamo._defaults import TIMING_CACHE_PATH\nfrom torch_tensorrt.dynamo._engine_cache import BaseEngineCache\n\nnp.random.seed(0)\ntorch.manual_seed(0)\n\nmodel = models.resnet18(pretrained=True).eval().to(\"cuda\")\nenabled_precisions = {torch.float}\nmin_block_size = 1\nuse_python_runtime = False\n\n\ndef remove_timing_cache(path=TIMING_CACHE_PATH):\n if os.path.exists(path):\n os.remove(path)" ] }, { @@ -33,7 +33,7 @@ }, "outputs": [], "source": [ - "def torch_compile(iterations=3):\n times = []\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n inputs = [torch.rand((100, 3, 224, 224)).to(\"cuda\")]\n # remove timing cache and reset dynamo just for engine caching messurement\n remove_timing_cache()\n torch._dynamo.reset()\n\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n compiled_model = torch.compile(\n model,\n backend=\"tensorrt\",\n options={\n \"use_python_runtime\": True,\n \"enabled_precisions\": enabled_precisions,\n \"debug\": debug,\n \"min_block_size\": min_block_size,\n \"immutable_weights\": False,\n \"cache_built_engines\": cache_built_engines,\n \"reuse_cached_engines\": reuse_cached_engines,\n },\n )\n compiled_model(*inputs) # trigger the compilation\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"----------------torch_compile----------------\")\n print(\"disable engine caching, used:\", times[0], \"ms\")\n print(\"enable engine caching to cache engines, used:\", times[1], \"ms\")\n print(\"enable engine caching to reuse engines, used:\", times[2], \"ms\")\n\n\ntorch_compile()" + "def torch_compile(iterations=3):\n times = []\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n inputs = [torch.rand((100, 3, 224, 224)).to(\"cuda\")]\n # remove timing cache and reset dynamo just for engine caching messurement\n remove_timing_cache()\n torch._dynamo.reset()\n\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n compiled_model = torch.compile(\n model,\n backend=\"tensorrt\",\n options={\n \"use_python_runtime\": True,\n \"enabled_precisions\": enabled_precisions,\n \"min_block_size\": min_block_size,\n \"immutable_weights\": False,\n \"cache_built_engines\": cache_built_engines,\n \"reuse_cached_engines\": reuse_cached_engines,\n },\n )\n compiled_model(*inputs) # trigger the compilation\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"----------------torch_compile----------------\")\n print(\"disable engine caching, used:\", times[0], \"ms\")\n print(\"enable engine caching to cache engines, used:\", times[1], \"ms\")\n print(\"enable engine caching to reuse engines, used:\", times[2], \"ms\")\n\n\ntorch_compile()" ] }, { @@ -51,7 +51,7 @@ }, "outputs": [], "source": [ - "def dynamo_compile(iterations=3):\n times = []\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n example_inputs = (torch.randn((100, 3, 224, 224)).to(\"cuda\"),)\n # Mark the dim0 of inputs as dynamic\n batch = torch.export.Dim(\"batch\", min=1, max=200)\n exp_program = torch.export.export(\n model, args=example_inputs, dynamic_shapes={\"x\": {0: batch}}\n )\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n inputs = [torch.rand((100 + i, 3, 224, 224)).to(\"cuda\")]\n remove_timing_cache() # remove timing cache just for engine caching messurement\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n trt_gm = torch_trt.dynamo.compile(\n exp_program,\n tuple(inputs),\n use_python_runtime=use_python_runtime,\n enabled_precisions=enabled_precisions,\n debug=debug,\n min_block_size=min_block_size,\n immutable_weights=False,\n cache_built_engines=cache_built_engines,\n reuse_cached_engines=reuse_cached_engines,\n engine_cache_size=1 << 30, # 1GB\n )\n # output = trt_gm(*inputs)\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"----------------dynamo_compile----------------\")\n print(\"disable engine caching, used:\", times[0], \"ms\")\n print(\"enable engine caching to cache engines, used:\", times[1], \"ms\")\n print(\"enable engine caching to reuse engines, used:\", times[2], \"ms\")\n\n\ndynamo_compile()" + "def dynamo_compile(iterations=3):\n times = []\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n example_inputs = (torch.randn((100, 3, 224, 224)).to(\"cuda\"),)\n # Mark the dim0 of inputs as dynamic\n batch = torch.export.Dim(\"batch\", min=1, max=200)\n exp_program = torch.export.export(\n model, args=example_inputs, dynamic_shapes={\"x\": {0: batch}}\n )\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n inputs = [torch.rand((100 + i, 3, 224, 224)).to(\"cuda\")]\n remove_timing_cache() # remove timing cache just for engine caching messurement\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n trt_gm = torch_trt.dynamo.compile(\n exp_program,\n tuple(inputs),\n use_python_runtime=use_python_runtime,\n enabled_precisions=enabled_precisions,\n min_block_size=min_block_size,\n immutable_weights=False,\n cache_built_engines=cache_built_engines,\n reuse_cached_engines=reuse_cached_engines,\n engine_cache_size=1 << 30, # 1GB\n )\n # output = trt_gm(*inputs)\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"----------------dynamo_compile----------------\")\n print(\"disable engine caching, used:\", times[0], \"ms\")\n print(\"enable engine caching to cache engines, used:\", times[1], \"ms\")\n print(\"enable engine caching to reuse engines, used:\", times[2], \"ms\")\n\n\ndynamo_compile()" ] }, { @@ -69,7 +69,7 @@ }, "outputs": [], "source": [ - "class RAMEngineCache(BaseEngineCache):\n def __init__(\n self,\n ) -> None:\n \"\"\"\n Constructs a user held engine cache in memory.\n \"\"\"\n self.engine_cache: Dict[str, bytes] = {}\n\n def save(\n self,\n hash: str,\n blob: bytes,\n ):\n \"\"\"\n Insert the engine blob to the cache.\n\n Args:\n hash (str): The hash key to associate with the engine blob.\n blob (bytes): The engine blob to be saved.\n\n Returns:\n None\n \"\"\"\n self.engine_cache[hash] = blob\n\n def load(self, hash: str) -> Optional[bytes]:\n \"\"\"\n Load the engine blob from the cache.\n\n Args:\n hash (str): The hash key of the engine to load.\n\n Returns:\n Optional[bytes]: The engine blob if found, None otherwise.\n \"\"\"\n if hash in self.engine_cache:\n return self.engine_cache[hash]\n else:\n return None\n\n\ndef torch_compile_my_cache(iterations=3):\n times = []\n engine_cache = RAMEngineCache()\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n inputs = [torch.rand((100, 3, 224, 224)).to(\"cuda\")]\n # remove timing cache and reset dynamo just for engine caching messurement\n remove_timing_cache()\n torch._dynamo.reset()\n\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n compiled_model = torch.compile(\n model,\n backend=\"tensorrt\",\n options={\n \"use_python_runtime\": True,\n \"enabled_precisions\": enabled_precisions,\n \"debug\": debug,\n \"min_block_size\": min_block_size,\n \"immutable_weights\": False,\n \"cache_built_engines\": cache_built_engines,\n \"reuse_cached_engines\": reuse_cached_engines,\n \"custom_engine_cache\": engine_cache,\n },\n )\n compiled_model(*inputs) # trigger the compilation\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"----------------torch_compile----------------\")\n print(\"disable engine caching, used:\", times[0], \"ms\")\n print(\"enable engine caching to cache engines, used:\", times[1], \"ms\")\n print(\"enable engine caching to reuse engines, used:\", times[2], \"ms\")\n\n\ntorch_compile_my_cache()" + "class RAMEngineCache(BaseEngineCache):\n def __init__(\n self,\n ) -> None:\n \"\"\"\n Constructs a user held engine cache in memory.\n \"\"\"\n self.engine_cache: Dict[str, bytes] = {}\n\n def save(\n self,\n hash: str,\n blob: bytes,\n ):\n \"\"\"\n Insert the engine blob to the cache.\n\n Args:\n hash (str): The hash key to associate with the engine blob.\n blob (bytes): The engine blob to be saved.\n\n Returns:\n None\n \"\"\"\n self.engine_cache[hash] = blob\n\n def load(self, hash: str) -> Optional[bytes]:\n \"\"\"\n Load the engine blob from the cache.\n\n Args:\n hash (str): The hash key of the engine to load.\n\n Returns:\n Optional[bytes]: The engine blob if found, None otherwise.\n \"\"\"\n if hash in self.engine_cache:\n return self.engine_cache[hash]\n else:\n return None\n\n\ndef torch_compile_my_cache(iterations=3):\n times = []\n engine_cache = RAMEngineCache()\n start = torch.cuda.Event(enable_timing=True)\n end = torch.cuda.Event(enable_timing=True)\n\n # The 1st iteration is to measure the compilation time without engine caching\n # The 2nd and 3rd iterations are to measure the compilation time with engine caching.\n # Since the 2nd iteration needs to compile and save the engine, it will be slower than the 1st iteration.\n # The 3rd iteration should be faster than the 1st iteration because it loads the cached engine.\n for i in range(iterations):\n inputs = [torch.rand((100, 3, 224, 224)).to(\"cuda\")]\n # remove timing cache and reset dynamo just for engine caching messurement\n remove_timing_cache()\n torch._dynamo.reset()\n\n if i == 0:\n cache_built_engines = False\n reuse_cached_engines = False\n else:\n cache_built_engines = True\n reuse_cached_engines = True\n\n start.record()\n compiled_model = torch.compile(\n model,\n backend=\"tensorrt\",\n options={\n \"use_python_runtime\": True,\n \"enabled_precisions\": enabled_precisions,\n \"min_block_size\": min_block_size,\n \"immutable_weights\": False,\n \"cache_built_engines\": cache_built_engines,\n \"reuse_cached_engines\": reuse_cached_engines,\n \"custom_engine_cache\": engine_cache,\n },\n )\n compiled_model(*inputs) # trigger the compilation\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\n print(\"----------------torch_compile----------------\")\n print(\"disable engine caching, used:\", times[0], \"ms\")\n print(\"enable engine caching to cache engines, used:\", times[1], \"ms\")\n print(\"enable engine caching to reuse engines, used:\", times[2], \"ms\")\n\n\ntorch_compile_my_cache()" ] } ], @@ -89,7 +89,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/3e4586a9107efae8f87a361bd207b6e0/weight_streaming_example.ipynb b/docs/_downloads/3e4586a9107efae8f87a361bd207b6e0/weight_streaming_example.ipynb index 7c6b79b6f6..192555d68e 100644 --- a/docs/_downloads/3e4586a9107efae8f87a361bd207b6e0/weight_streaming_example.ipynb +++ b/docs/_downloads/3e4586a9107efae8f87a361bd207b6e0/weight_streaming_example.ipynb @@ -22,7 +22,7 @@ }, "outputs": [], "source": [ - "import copy\nimport timeit\n\nimport numpy as np\nimport torch\nimport torch_tensorrt\nfrom transformers import AutoModelForCausalLM\nfrom utils import export_llm\n\n\ndef time_generate(model, inputs, output_seq_length, iterations=10):\n \"\"\"\n Measure the time for generating a sentence over certain number of iterations\n \"\"\"\n # We only support single input (B x seq_len) for LLMs now\n input_seq = inputs[0]\n with torch.no_grad():\n timings = []\n for _ in range(iterations):\n start_time = timeit.default_timer()\n inputs_copy = copy.copy(input_seq)\n # Greedy decoding of the model. This generates up to max_tokens.\n while inputs_copy.shape[1] <= output_seq_length:\n outputs = model(inputs_copy)\n logits = outputs.logits\n next_token_logits = logits[:, -1, :]\n next_tokens = torch.argmax(next_token_logits, dim=-1)\n inputs_copy = torch.cat([inputs_copy, next_tokens[:, None]], dim=-1)\n torch.cuda.synchronize()\n end_time = timeit.default_timer()\n timings.append(end_time - start_time)\n\n times = np.array(timings)\n time_mean_ms = np.mean(times) * 1000\n\n return time_mean_ms\n\n\n# Load the LLaMA-2 model\nDEVICE = torch.device(\"cuda:0\")\nllama_path = \"meta-llama/Llama-2-7b-chat-hf\"\nwith torch.no_grad():\n model = AutoModelForCausalLM.from_pretrained(\n llama_path, use_cache=False, attn_implementation=\"eager\"\n ).eval()\n\n# Set input and output sequence lengths\nisl = 128\nosl = 256\n\n# Create random input tensors\ninput_tensors = [torch.randint(0, 5, (1, isl), dtype=torch.int64).cuda()]\n# Convert the model to half precision (FP16)\nmodel = model.half()\n# Exports the LLM model into an ExportedProgram with dynamic shapes.\nllama2_ep = export_llm(model, input_tensors[0], max_seq_len=osl)" + "import copy\nimport timeit\n\nimport numpy as np\nimport torch\nimport torch_tensorrt\nfrom transformers import AutoModelForCausalLM\n\n\ndef export_llm(model, inputs, min_seq_len=1, max_seq_len=16):\n \"\"\"\n Exports the LLM model into an ExportedProgram with dynamic shapes.\n In the case of guard failures due to some PyTorch kernel implements, we also\n try to re-export the graph by expressing them as runtime assert nodes\n \"\"\"\n with torch.no_grad():\n # max=1024 has contraint violation error. https://github.com/pytorch/pytorch/issues/125604\n seq_len = torch.export.Dim(\"seq_len\", min=min_seq_len, max=max_seq_len)\n position_ids = torch.arange(inputs.shape[1]).unsqueeze(0).to(inputs.device)\n try:\n print(\"Trying to export the model using torch.export.export()..\")\n # strict=False only enables aotautograd tracing and excludes dynamo.\n ep = torch.export.export(\n model,\n args=(inputs,),\n kwargs={\"position_ids\": position_ids},\n dynamic_shapes=({1: seq_len}, {1: seq_len}),\n strict=False,\n )\n except:\n print(\n \"Trying torch.export._trace._export to trace the graph since torch.export.export() failed\"\n )\n # This API is used to express the constraint violation guards as asserts in the graph.\n ep = torch.export._trace._export(\n model,\n args=(inputs,),\n kwargs={\"position_ids\": position_ids},\n dynamic_shapes=({1: seq_len}, {1: seq_len}),\n strict=False,\n allow_complex_guards_as_runtime_asserts=True,\n )\n\n return ep\n\n\ndef time_generate(model, inputs, output_seq_length, iterations=10):\n \"\"\"\n Measure the time for generating a sentence over certain number of iterations\n \"\"\"\n # We only support single input (B x seq_len) for LLMs now\n input_seq = inputs[0]\n with torch.no_grad():\n timings = []\n for _ in range(iterations):\n start_time = timeit.default_timer()\n inputs_copy = copy.copy(input_seq)\n # Greedy decoding of the model. This generates up to max_tokens.\n while inputs_copy.shape[1] <= output_seq_length:\n outputs = model(inputs_copy)\n logits = outputs.logits\n next_token_logits = logits[:, -1, :]\n next_tokens = torch.argmax(next_token_logits, dim=-1)\n inputs_copy = torch.cat([inputs_copy, next_tokens[:, None]], dim=-1)\n torch.cuda.synchronize()\n end_time = timeit.default_timer()\n timings.append(end_time - start_time)\n\n times = np.array(timings)\n time_mean_ms = np.mean(times) * 1000\n\n return time_mean_ms\n\n\n# Load the LLaMA-2 model\nDEVICE = torch.device(\"cuda:0\")\nllama_path = \"meta-llama/Llama-2-7b-chat-hf\"\nwith torch.no_grad():\n model = AutoModelForCausalLM.from_pretrained(\n llama_path, use_cache=False, attn_implementation=\"eager\"\n ).eval()\n\n# Set input and output sequence lengths\nisl = 128\nosl = 256\n\n# Create random input tensors\ninput_tensors = [torch.randint(0, 5, (1, isl), dtype=torch.int64).cuda()]\n# Convert the model to half precision (FP16)\nmodel = model.half()\n# Exports the LLM model into an ExportedProgram with dynamic shapes.\nllama2_ep = export_llm(model, input_tensors[0], max_seq_len=osl)" ] }, { @@ -96,7 +96,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/50cdc517d124443e61b8e11d4bdb29f0/torch_export_cudagraphs.ipynb b/docs/_downloads/50cdc517d124443e61b8e11d4bdb29f0/torch_export_cudagraphs.ipynb index d57c9ccac7..c1d3608800 100644 --- a/docs/_downloads/50cdc517d124443e61b8e11d4bdb29f0/torch_export_cudagraphs.ipynb +++ b/docs/_downloads/50cdc517d124443e61b8e11d4bdb29f0/torch_export_cudagraphs.ipynb @@ -136,7 +136,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/5d228e6c421cd8885936a0d2b838a74a/llama2_flashinfer_rmsnorm.ipynb b/docs/_downloads/5d228e6c421cd8885936a0d2b838a74a/llama2_flashinfer_rmsnorm.ipynb index 4ca9492336..7d83b1a8c4 100644 --- a/docs/_downloads/5d228e6c421cd8885936a0d2b838a74a/llama2_flashinfer_rmsnorm.ipynb +++ b/docs/_downloads/5d228e6c421cd8885936a0d2b838a74a/llama2_flashinfer_rmsnorm.ipynb @@ -15,7 +15,7 @@ }, "outputs": [], "source": [ - "from typing import Callable, Optional, Sequence, Union\n\nimport flashinfer\nimport torch\nimport torch_tensorrt\nfrom torch.fx.passes.shape_prop import TensorMetadata\nfrom torch_tensorrt.dynamo.lowering.passes._aten_lowering_pass import (\n _aten_lowering_pass,\n)\nfrom torch_tensorrt.dynamo.lowering.passes.pass_utils import (\n clean_up_graph_after_modifications,\n)\nfrom transformers import LlamaConfig, LlamaForCausalLM\n\n\n@torch.library.custom_op(\"flashinfer::rmsnorm\", mutates_args=()) # type: ignore[misc]\ndef flashinfer_rmsnorm(\n input: torch.Tensor, weight: torch.Tensor, eps: float = 1e-6\n) -> torch.Tensor:\n return flashinfer.norm.rmsnorm(input, weight)\n\n\n@torch.library.register_fake(\"flashinfer::rmsnorm\")\ndef _(input: torch.Tensor, weight: torch.Tensor, b: float = 1e-6) -> torch.Tensor:\n return input\n\n\ntorch_tensorrt.dynamo.conversion.plugins.custom_op(\n \"flashinfer::rmsnorm\", supports_dynamic_shapes=True\n)\n\n\n@_aten_lowering_pass\ndef replace_rmsnorm(\n gm: torch.fx.GraphModule, sample_inputs: Sequence[torch.Tensor]\n) -> torch.fx.GraphModule:\n for node in gm.graph.nodes:\n if (\n node.target == torch.ops.aten._to_copy.default\n and node.kwargs.get(\"dtype\") is torch.float32\n and len(node.users) == 2\n ):\n if (\n list(node.users)[0].target == torch.ops.aten.pow.Tensor_Scalar\n and list(node.users)[1].target == torch.ops.aten.mul.Tensor\n ):\n pow_node = list(node.users)[0]\n if (\n len(pow_node.users) == 1\n and list(pow_node.users)[0].target == torch.ops.aten.mean.dim\n ):\n mean_node = list(pow_node.users)[0]\n if (\n len(mean_node.users) == 1\n and list(mean_node.users)[0].target == torch.ops.aten.add.Tensor\n ):\n add_node = list(mean_node.users)[0]\n if (\n len(add_node.users) == 1\n and list(add_node.users)[0].target\n == torch.ops.aten.sqrt.default\n ):\n sqrt_node = list(add_node.users)[0]\n if (\n len(sqrt_node.users) == 1\n and list(sqrt_node.users)[0].target\n == torch.ops.aten.div.Tensor\n ):\n div_node = list(sqrt_node.users)[0]\n if list(div_node.users)[0] == list(node.users)[1]:\n mul_node = list(div_node.users)[0]\n copy_node = list(mul_node.users)[0]\n weight_mul_node = list(copy_node.users)[0]\n\n weight = weight_mul_node.args[0]\n\n original_meta = weight_mul_node.meta.get(\n \"tensor_meta\", {}\n )\n memory_format = original_meta.memory_format\n\n with gm.graph.inserting_after(weight_mul_node):\n b = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.sym_size.int,\n args=(node.args[0], 0),\n )\n b.meta[\"tensor_meta\"] = TensorMetadata(\n shape=torch.Size([1]),\n dtype=torch.int64,\n requires_grad=False,\n stride=None,\n memory_format=memory_format,\n is_quantized=False,\n qparams={},\n )\n s = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.sym_size.int,\n args=(node.args[0], 1),\n )\n s.meta.update(b.meta)\n\n d = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.sym_size.int,\n args=(node.args[0], 2),\n )\n d.meta.update(b.meta)\n\n with gm.graph.inserting_after(b):\n new_first_dim = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.mul.Scalar,\n args=(b, s),\n )\n new_first_dim.meta.update(b.meta)\n\n with gm.graph.inserting_after(new_first_dim):\n # with gm.graph.inserting_after(weight_mul_node):\n reshape_node = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.reshape.default,\n args=(node.args[0], [new_first_dim, d]),\n )\n b_val = original_meta.shape[0]\n s_val = original_meta.shape[1]\n d_val = original_meta.shape[2]\n\n reshape_node.meta[\"tensor_meta\"] = (\n TensorMetadata(\n shape=torch.Size(\n [b_val * s_val, d_val]\n ),\n dtype=original_meta.dtype,\n requires_grad=True,\n stride=None,\n memory_format=memory_format,\n is_quantized=False,\n qparams={},\n )\n )\n\n with gm.graph.inserting_after(reshape_node):\n flashinfer_rmsnorm_node = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.flashinfer.rmsnorm.default,\n args=(\n reshape_node,\n weight,\n add_node.args[1],\n ),\n )\n flashinfer_rmsnorm_node.meta.update(\n reshape_node.meta\n )\n\n with gm.graph.inserting_after(\n flashinfer_rmsnorm_node\n ):\n reshapback_node = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.reshape.default,\n args=(\n flashinfer_rmsnorm_node,\n [b, s, d],\n ),\n )\n\n weight_mul_node.replace_all_uses_with(\n reshapback_node\n )\n reshapback_node.meta.update(weight_mul_node.meta)\n\n modified_graph = True\n\n gm.graph.erase_node(weight_mul_node)\n gm.graph.erase_node(copy_node)\n gm.graph.erase_node(mul_node)\n gm.graph.erase_node(div_node)\n gm.graph.erase_node(sqrt_node)\n gm.graph.erase_node(add_node)\n gm.graph.erase_node(mean_node)\n gm.graph.erase_node(pow_node)\n gm.graph.erase_node(node)\n\n if modified_graph:\n gm = clean_up_graph_after_modifications(gm)\n\n return gm\n\n\n# 1. Create a custom config with 1 layer\nconfig = LlamaConfig(\n vocab_size=32000,\n hidden_size=4096, # LLaMA2-7B dimensions\n intermediate_size=11008, # FFN hidden_dim = 4 * 4096 * 0.7 (SwiGLU scaling)\n num_hidden_layers=1, # Only 1 decoder layer\n num_attention_heads=32,\n max_position_embeddings=4096,\n use_cache=False, # Disable KV caching for export\n)\n\n# 2. Initialize model (random weights)\nwith torch.no_grad():\n model = LlamaForCausalLM(config).eval().half()\n\n# 3. Export with static shapes\ninput_ids = torch.randint(0, 32000, (1, 64)) # Static [batch=1, seq=64]\nexported = torch.export.export(\n model,\n (input_ids,),\n dynamic_shapes=None, # Fully static\n)\n\n# Test forward pass\ninput_ids = torch.randint(0, 32000, (1, 64))\noutput = model(input_ids)\nprint(output)\n\n# Export validation\n\nDEVICE = torch.device(\"cuda:0\")\n\nwith torch_tensorrt.logging.errors():\n trt_model = torch_tensorrt.dynamo.compile(\n exported,\n inputs=[input_ids],\n enabled_precisions={torch.float32, torch.float16},\n truncate_double=True,\n device=DEVICE,\n disable_tf32=True,\n use_explicit_typing=False,\n use_fp32_acc=True,\n # debug=True,\n )\n\ninput_ids = input_ids.to(DEVICE)\n\nres = trt_model.forward(input_ids)\nprint(res)" + "from typing import Callable, Optional, Sequence, Union\n\nimport flashinfer\nimport torch\nimport torch_tensorrt\nfrom torch.fx.passes.shape_prop import TensorMetadata\nfrom torch_tensorrt.dynamo.lowering.passes._aten_lowering_pass import (\n _aten_lowering_pass,\n)\nfrom torch_tensorrt.dynamo.lowering.passes.pass_utils import (\n clean_up_graph_after_modifications,\n)\nfrom transformers import LlamaConfig, LlamaForCausalLM\n\n\n@torch.library.custom_op(\"flashinfer::rmsnorm\", mutates_args=()) # type: ignore[misc]\ndef flashinfer_rmsnorm(\n input: torch.Tensor, weight: torch.Tensor, eps: float = 1e-6\n) -> torch.Tensor:\n return flashinfer.norm.rmsnorm(input, weight)\n\n\n@torch.library.register_fake(\"flashinfer::rmsnorm\")\ndef _(input: torch.Tensor, weight: torch.Tensor, b: float = 1e-6) -> torch.Tensor:\n return input\n\n\ntorch_tensorrt.dynamo.conversion.plugins.custom_op(\n \"flashinfer::rmsnorm\", supports_dynamic_shapes=True\n)\n\n\n@_aten_lowering_pass\ndef replace_rmsnorm(\n gm: torch.fx.GraphModule, sample_inputs: Sequence[torch.Tensor]\n) -> torch.fx.GraphModule:\n for node in gm.graph.nodes:\n if (\n node.target == torch.ops.aten._to_copy.default\n and node.kwargs.get(\"dtype\") is torch.float32\n and len(node.users) == 2\n ):\n if (\n list(node.users)[0].target == torch.ops.aten.pow.Tensor_Scalar\n and list(node.users)[1].target == torch.ops.aten.mul.Tensor\n ):\n pow_node = list(node.users)[0]\n if (\n len(pow_node.users) == 1\n and list(pow_node.users)[0].target == torch.ops.aten.mean.dim\n ):\n mean_node = list(pow_node.users)[0]\n if (\n len(mean_node.users) == 1\n and list(mean_node.users)[0].target == torch.ops.aten.add.Tensor\n ):\n add_node = list(mean_node.users)[0]\n if (\n len(add_node.users) == 1\n and list(add_node.users)[0].target\n == torch.ops.aten.sqrt.default\n ):\n sqrt_node = list(add_node.users)[0]\n if (\n len(sqrt_node.users) == 1\n and list(sqrt_node.users)[0].target\n == torch.ops.aten.div.Tensor\n ):\n div_node = list(sqrt_node.users)[0]\n if list(div_node.users)[0] == list(node.users)[1]:\n mul_node = list(div_node.users)[0]\n copy_node = list(mul_node.users)[0]\n weight_mul_node = list(copy_node.users)[0]\n\n weight = weight_mul_node.args[0]\n\n original_meta = weight_mul_node.meta.get(\n \"tensor_meta\", {}\n )\n memory_format = original_meta.memory_format\n\n with gm.graph.inserting_after(weight_mul_node):\n b = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.sym_size.int,\n args=(node.args[0], 0),\n )\n b.meta[\"tensor_meta\"] = TensorMetadata(\n shape=torch.Size([1]),\n dtype=torch.int64,\n requires_grad=False,\n stride=None,\n memory_format=memory_format,\n is_quantized=False,\n qparams={},\n )\n s = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.sym_size.int,\n args=(node.args[0], 1),\n )\n s.meta.update(b.meta)\n\n d = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.sym_size.int,\n args=(node.args[0], 2),\n )\n d.meta.update(b.meta)\n\n with gm.graph.inserting_after(b):\n new_first_dim = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.mul.Scalar,\n args=(b, s),\n )\n new_first_dim.meta.update(b.meta)\n\n with gm.graph.inserting_after(new_first_dim):\n # with gm.graph.inserting_after(weight_mul_node):\n reshape_node = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.reshape.default,\n args=(node.args[0], [new_first_dim, d]),\n )\n b_val = original_meta.shape[0]\n s_val = original_meta.shape[1]\n d_val = original_meta.shape[2]\n\n reshape_node.meta[\"tensor_meta\"] = (\n TensorMetadata(\n shape=torch.Size(\n [b_val * s_val, d_val]\n ),\n dtype=original_meta.dtype,\n requires_grad=True,\n stride=None,\n memory_format=memory_format,\n is_quantized=False,\n qparams={},\n )\n )\n\n with gm.graph.inserting_after(reshape_node):\n flashinfer_rmsnorm_node = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.flashinfer.rmsnorm.default,\n args=(\n reshape_node,\n weight,\n add_node.args[1],\n ),\n )\n flashinfer_rmsnorm_node.meta.update(\n reshape_node.meta\n )\n\n with gm.graph.inserting_after(\n flashinfer_rmsnorm_node\n ):\n reshapback_node = gm.graph.create_node(\n op=\"call_function\",\n target=torch.ops.aten.reshape.default,\n args=(\n flashinfer_rmsnorm_node,\n [b, s, d],\n ),\n )\n\n weight_mul_node.replace_all_uses_with(\n reshapback_node\n )\n reshapback_node.meta.update(weight_mul_node.meta)\n\n modified_graph = True\n\n gm.graph.erase_node(weight_mul_node)\n gm.graph.erase_node(copy_node)\n gm.graph.erase_node(mul_node)\n gm.graph.erase_node(div_node)\n gm.graph.erase_node(sqrt_node)\n gm.graph.erase_node(add_node)\n gm.graph.erase_node(mean_node)\n gm.graph.erase_node(pow_node)\n gm.graph.erase_node(node)\n\n if modified_graph:\n gm = clean_up_graph_after_modifications(gm)\n\n return gm\n\n\n# 1. Create a custom config with 1 layer\nconfig = LlamaConfig(\n vocab_size=32000,\n hidden_size=4096, # LLaMA2-7B dimensions\n intermediate_size=11008, # FFN hidden_dim = 4 * 4096 * 0.7 (SwiGLU scaling)\n num_hidden_layers=1, # Only 1 decoder layer\n num_attention_heads=32,\n max_position_embeddings=4096,\n use_cache=False, # Disable KV caching for export\n)\n\n# 2. Initialize model (random weights)\nwith torch.no_grad():\n model = LlamaForCausalLM(config).eval().half()\n\n# 3. Export with static shapes\ninput_ids = torch.randint(0, 32000, (1, 64)) # Static [batch=1, seq=64]\nexported = torch.export.export(\n model,\n (input_ids,),\n dynamic_shapes=None, # Fully static\n)\n\n# Test forward pass\ninput_ids = torch.randint(0, 32000, (1, 64))\noutput = model(input_ids)\nprint(output)\n\n# Export validation\n\nDEVICE = torch.device(\"cuda:0\")\n\nwith torch_tensorrt.logging.errors():\n trt_model = torch_tensorrt.dynamo.compile(\n exported,\n inputs=[input_ids],\n enabled_precisions={torch.float32, torch.float16},\n truncate_double=True,\n device=DEVICE,\n disable_tf32=True,\n use_explicit_typing=False,\n use_fp32_acc=True,\n )\n\ninput_ids = input_ids.to(DEVICE)\n\nres = trt_model.forward(input_ids)\nprint(res)" ] } ], @@ -35,7 +35,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/669d3d90aba7fad1bec8bbd852aa9cbc/cross_runtime_compilation_for_windows.ipynb b/docs/_downloads/669d3d90aba7fad1bec8bbd852aa9cbc/cross_runtime_compilation_for_windows.ipynb index 56c729a8d5..611024d266 100644 --- a/docs/_downloads/669d3d90aba7fad1bec8bbd852aa9cbc/cross_runtime_compilation_for_windows.ipynb +++ b/docs/_downloads/669d3d90aba7fad1bec8bbd852aa9cbc/cross_runtime_compilation_for_windows.ipynb @@ -40,7 +40,7 @@ }, "outputs": [], "source": [ - "if args.load:\n # load the saved model in Windows\n if platform.system() != \"Windows\" or platform.machine() != \"AMD64\":\n raise ValueError(\n \"cross runtime compiled model for windows can only be loaded in Windows system\"\n )\n loaded_model = torchtrt.load_cross_compiled_exported_program(args.path).module()\n print(f\"model has been successfully loaded from ${args.path}\")\n # inference\n trt_output = loaded_model(input)\n print(f\"inference result: {trt_output}\")\nelse:\n if platform.system() != \"Linux\" or platform.architecture()[0] != \"64bit\":\n raise ValueError(\n \"cross runtime compiled model for windows can only be compiled in Linux system\"\n )\n compile_spec = {\n \"debug\": True,\n \"min_block_size\": 1,\n }\n torchtrt.cross_compile_for_windows(\n model, file_path=args.path, inputs=inputs, **compile_spec\n )\n print(\n f\"model has been successfully cross compiled and saved in Linux to {args.path}\"\n )" + "if args.load:\n # load the saved model in Windows\n if platform.system() != \"Windows\" or platform.machine() != \"AMD64\":\n raise ValueError(\n \"cross runtime compiled model for windows can only be loaded in Windows system\"\n )\n loaded_model = torchtrt.load_cross_compiled_exported_program(args.path).module()\n print(f\"model has been successfully loaded from ${args.path}\")\n # inference\n trt_output = loaded_model(input)\n print(f\"inference result: {trt_output}\")\nelse:\n if platform.system() != \"Linux\" or platform.architecture()[0] != \"64bit\":\n raise ValueError(\n \"cross runtime compiled model for windows can only be compiled in Linux system\"\n )\n compile_spec = {\n \"min_block_size\": 1,\n }\n torchtrt.cross_compile_for_windows(\n model, file_path=args.path, inputs=inputs, **compile_spec\n )\n print(\n f\"model has been successfully cross compiled and saved in Linux to {args.path}\"\n )" ] } ], @@ -60,7 +60,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/68b8589f80a47518afd92bbad3fda19d/mutable_torchtrt_module_example.ipynb b/docs/_downloads/68b8589f80a47518afd92bbad3fda19d/mutable_torchtrt_module_example.ipynb index b271bc5e42..3c437d77a8 100644 --- a/docs/_downloads/68b8589f80a47518afd92bbad3fda19d/mutable_torchtrt_module_example.ipynb +++ b/docs/_downloads/68b8589f80a47518afd92bbad3fda19d/mutable_torchtrt_module_example.ipynb @@ -15,7 +15,7 @@ }, "outputs": [], "source": [ - "import numpy as np\nimport torch\nimport torch_tensorrt as torch_trt\nimport torchvision.models as models\n\nnp.random.seed(5)\ntorch.manual_seed(5)\ninputs = [torch.rand((1, 3, 224, 224)).to(\"cuda\")]" + "import numpy as np\nimport torch\nimport torch_tensorrt as torch_trt\nimport torchvision.models as models\nfrom diffusers import DiffusionPipeline\n\nnp.random.seed(5)\ntorch.manual_seed(5)\ninputs = [torch.rand((1, 3, 224, 224)).to(\"cuda\")]" ] }, { @@ -33,7 +33,7 @@ }, "outputs": [], "source": [ - "settings = {\n \"use_python\": False,\n \"enabled_precisions\": {torch.float32},\n \"immutable_weights\": False,\n}\n\nmodel = models.resnet18(pretrained=True).eval().to(\"cuda\")\nmutable_module = torch_trt.MutableTorchTensorRTModule(model, **settings)\n# You can use the mutable module just like the original pytorch module. The compilation happens while you first call the mutable module.\nmutable_module(*inputs)" + "settings = {\n \"use_python_runtime\": False,\n \"enabled_precisions\": {torch.float32},\n \"immutable_weights\": False,\n}\n\nmodel = models.resnet18(pretrained=True).eval().to(\"cuda\")\nmutable_module = torch_trt.MutableTorchTensorRTModule(model, **settings)\n# You can use the mutable module just like the original pytorch module. The compilation happens while you first call the mutable module.\nmutable_module(*inputs)" ] }, { @@ -94,7 +94,7 @@ }, "outputs": [], "source": [ - "from diffusers import DiffusionPipeline\n\nwith torch.no_grad():\n settings = {\n \"use_python_runtime\": True,\n \"enabled_precisions\": {torch.float16},\n \"debug\": True,\n \"immutable_weights\": False,\n }\n\n model_id = \"stabilityai/stable-diffusion-xl-base-1.0\"\n device = \"cuda:0\"\n\n prompt = \"cinematic photo elsa, police uniform , . 35mm photograph, film, bokeh, professional, 4k, highly detailed\"\n negative = \"drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly, nude\"\n\n pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16)\n pipe.to(device)\n\n # The only extra line you need\n pipe.unet = torch_trt.MutableTorchTensorRTModule(pipe.unet, **settings)\n BATCH = torch.export.Dim(\"BATCH\", min=2, max=24)\n _HEIGHT = torch.export.Dim(\"_HEIGHT\", min=16, max=32)\n _WIDTH = torch.export.Dim(\"_WIDTH\", min=16, max=32)\n HEIGHT = 4 * _HEIGHT\n WIDTH = 4 * _WIDTH\n args_dynamic_shapes = ({0: BATCH, 2: HEIGHT, 3: WIDTH}, {})\n kwargs_dynamic_shapes = {\n \"encoder_hidden_states\": {0: BATCH},\n \"added_cond_kwargs\": {\n \"text_embeds\": {0: BATCH},\n \"time_ids\": {0: BATCH},\n },\n \"return_dict\": False,\n }\n pipe.unet.set_expected_dynamic_shape_range(\n args_dynamic_shapes, kwargs_dynamic_shapes\n )\n image = pipe(\n prompt,\n negative_prompt=negative,\n num_inference_steps=30,\n height=1024,\n width=768,\n num_images_per_prompt=2,\n ).images[0]\n image.save(\"./without_LoRA_mutable.jpg\")\n\n # Standard Huggingface LoRA loading procedure\n pipe.load_lora_weights(\n \"stablediffusionapi/load_lora_embeddings\",\n weight_name=\"all-disney-princess-xl-lo.safetensors\",\n adapter_name=\"lora1\",\n )\n pipe.set_adapters([\"lora1\"], adapter_weights=[1])\n pipe.fuse_lora()\n pipe.unload_lora_weights()\n\n # Refit triggered\n image = pipe(\n prompt,\n negative_prompt=negative,\n num_inference_steps=30,\n height=1024,\n width=1024,\n num_images_per_prompt=1,\n ).images[0]\n image.save(\"./with_LoRA_mutable.jpg\")" + "with torch.no_grad():\n settings = {\n \"use_python_runtime\": True,\n \"enabled_precisions\": {torch.float16},\n \"immutable_weights\": False,\n }\n\n model_id = \"stabilityai/stable-diffusion-xl-base-1.0\"\n device = \"cuda:0\"\n\n prompt = \"cinematic photo elsa, police uniform , . 35mm photograph, film, bokeh, professional, 4k, highly detailed\"\n negative = \"drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly, nude\"\n\n pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16)\n pipe.to(device)\n\n # The only extra line you need\n pipe.unet = torch_trt.MutableTorchTensorRTModule(pipe.unet, **settings)\n BATCH = torch.export.Dim(\"BATCH\", min=2, max=24)\n _HEIGHT = torch.export.Dim(\"_HEIGHT\", min=16, max=32)\n _WIDTH = torch.export.Dim(\"_WIDTH\", min=16, max=32)\n HEIGHT = 4 * _HEIGHT\n WIDTH = 4 * _WIDTH\n args_dynamic_shapes = ({0: BATCH, 2: HEIGHT, 3: WIDTH}, {})\n kwargs_dynamic_shapes = {\n \"encoder_hidden_states\": {0: BATCH},\n \"added_cond_kwargs\": {\n \"text_embeds\": {0: BATCH},\n \"time_ids\": {0: BATCH},\n },\n \"return_dict\": None,\n }\n pipe.unet.set_expected_dynamic_shape_range(\n args_dynamic_shapes, kwargs_dynamic_shapes\n )\n image = pipe(\n prompt,\n negative_prompt=negative,\n num_inference_steps=30,\n height=1024,\n width=768,\n num_images_per_prompt=2,\n ).images[0]\n image.save(\"./without_LoRA_mutable.jpg\")\n\n # Standard Huggingface LoRA loading procedure\n pipe.load_lora_weights(\n \"stablediffusionapi/load_lora_embeddings\",\n weight_name=\"all-disney-princess-xl-lo.safetensors\",\n adapter_name=\"lora1\",\n )\n pipe.set_adapters([\"lora1\"], adapter_weights=[1])\n pipe.fuse_lora()\n pipe.unload_lora_weights()\n\n # Refit triggered\n image = pipe(\n prompt,\n negative_prompt=negative,\n num_inference_steps=30,\n height=1024,\n width=1024,\n num_images_per_prompt=1,\n ).images[0]\n image.save(\"./with_LoRA_mutable.jpg\")" ] }, { @@ -112,7 +112,7 @@ }, "outputs": [], "source": [ - "class Model(torch.nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, a, b, c={}):\n x = torch.matmul(a, b)\n x = torch.matmul(c[\"a\"], c[\"b\"].T)\n print(c[\"b\"][0])\n x = 2 * c[\"b\"]\n return x\n\n\ndevice = \"cuda:0\"\nmodel = Model().eval().to(device)\ninputs = (torch.rand(10, 3).to(device), torch.rand(3, 30).to(device))\nkwargs = {\n \"c\": {\"a\": torch.rand(10, 30).to(device), \"b\": torch.rand(10, 30).to(device)},\n}\ndim_0 = torch.export.Dim(\"dim\", min=1, max=50)\ndim_1 = torch.export.Dim(\"dim\", min=1, max=50)\ndim_2 = torch.export.Dim(\"dim2\", min=1, max=50)\nargs_dynamic_shapes = ({1: dim_1}, {0: dim_0})\nkwarg_dynamic_shapes = {\n \"c\": {\n \"a\": {},\n \"b\": {0: dim_2},\n }, # a's shape does not change so we give it an empty dict\n}\n# Export the model first with custom dynamic shape constraints\nmodel = torch_trt.MutableTorchTensorRTModule(model, debug=True, min_block_size=1)\nmodel.set_expected_dynamic_shape_range(args_dynamic_shapes, kwarg_dynamic_shapes)\n# Compile\nmodel(*inputs, **kwargs)\n# Change input shape\ninputs_2 = (torch.rand(10, 5).to(device), torch.rand(10, 30).to(device))\nkwargs_2 = {\n \"c\": {\"a\": torch.rand(10, 30).to(device), \"b\": torch.rand(5, 30).to(device)},\n}\n# Run without recompiling\nmodel(*inputs_2, **kwargs_2)" + "class Model(torch.nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, a, b, c={}):\n x = torch.matmul(a, b)\n x = torch.matmul(c[\"a\"], c[\"b\"].T)\n print(c[\"b\"][0])\n x = 2 * c[\"b\"]\n return x\n\n\ndevice = \"cuda:0\"\nmodel = Model().eval().to(device)\ninputs = (torch.rand(10, 3).to(device), torch.rand(3, 30).to(device))\nkwargs = {\n \"c\": {\"a\": torch.rand(10, 30).to(device), \"b\": torch.rand(10, 30).to(device)},\n}\ndim_0 = torch.export.Dim(\"dim\", min=1, max=50)\ndim_1 = torch.export.Dim(\"dim\", min=1, max=50)\ndim_2 = torch.export.Dim(\"dim2\", min=1, max=50)\nargs_dynamic_shapes = ({1: dim_1}, {0: dim_0})\nkwarg_dynamic_shapes = {\n \"c\": {\n \"a\": {},\n \"b\": {0: dim_2},\n }, # a's shape does not change so we give it an empty dict\n}\n# Export the model first with custom dynamic shape constraints\nmodel = torch_trt.MutableTorchTensorRTModule(model, min_block_size=1)\nmodel.set_expected_dynamic_shape_range(args_dynamic_shapes, kwarg_dynamic_shapes)\n# Compile\nmodel(*inputs, **kwargs)\n# Change input shape\ninputs_2 = (torch.rand(10, 5).to(device), torch.rand(10, 30).to(device))\nkwargs_2 = {\n \"c\": {\"a\": torch.rand(10, 30).to(device), \"b\": torch.rand(5, 30).to(device)},\n}\n# Run without recompiling\nmodel(*inputs_2, **kwargs_2)" ] }, { @@ -130,7 +130,7 @@ }, "outputs": [], "source": [ - "import os\n\nfrom torch_tensorrt.dynamo._defaults import TIMING_CACHE_PATH\n\nmodel = models.resnet18(pretrained=True).eval().to(\"cuda\")\n\ntimes = []\nstart = torch.cuda.Event(enable_timing=True)\nend = torch.cuda.Event(enable_timing=True)\n\nexample_inputs = (torch.randn((100, 3, 224, 224)).to(\"cuda\"),)\nmodel = torch_trt.MutableTorchTensorRTModule(\n model,\n use_python_runtime=True,\n enabled_precisions={torch.float},\n debug=True,\n min_block_size=1,\n immutable_weights=False,\n cache_built_engines=True,\n reuse_cached_engines=True,\n engine_cache_size=1 << 30, # 1GB\n)\n\n\ndef remove_timing_cache(path=TIMING_CACHE_PATH):\n if os.path.exists(path):\n os.remove(path)\n\n\nremove_timing_cache()\n\nfor i in range(4):\n inputs = [torch.rand((100 + i, 3, 224, 224)).to(\"cuda\")]\n\n start.record()\n model(*inputs) # Recompile\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\nprint(\"----------------dynamo_compile----------------\")\nprint(\"Without engine caching, used:\", times[0], \"ms\")\nprint(\"With engine caching used:\", times[1], \"ms\")\nprint(\"With engine caching used:\", times[2], \"ms\")\nprint(\"With engine caching used:\", times[3], \"ms\")" + "import os\n\nfrom torch_tensorrt.dynamo._defaults import TIMING_CACHE_PATH\n\nmodel = models.resnet18(pretrained=True).eval().to(\"cuda\")\n\ntimes = []\nstart = torch.cuda.Event(enable_timing=True)\nend = torch.cuda.Event(enable_timing=True)\n\nexample_inputs = (torch.randn((100, 3, 224, 224)).to(\"cuda\"),)\nmodel = torch_trt.MutableTorchTensorRTModule(\n model,\n use_python_runtime=True,\n enabled_precisions={torch.float},\n min_block_size=1,\n immutable_weights=False,\n cache_built_engines=True,\n reuse_cached_engines=True,\n engine_cache_size=1 << 30, # 1GB\n)\n\n\ndef remove_timing_cache(path=TIMING_CACHE_PATH):\n if os.path.exists(path):\n os.remove(path)\n\n\nremove_timing_cache()\n\nfor i in range(4):\n inputs = [torch.rand((100 + i, 3, 224, 224)).to(\"cuda\")]\n\n start.record()\n model(*inputs) # Recompile\n end.record()\n torch.cuda.synchronize()\n times.append(start.elapsed_time(end))\n\nprint(\"----------------dynamo_compile----------------\")\nprint(\"Without engine caching, used:\", times[0], \"ms\")\nprint(\"With engine caching used:\", times[1], \"ms\")\nprint(\"With engine caching used:\", times[2], \"ms\")\nprint(\"With engine caching used:\", times[3], \"ms\")" ] } ], @@ -150,7 +150,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/6a6052d9668b2cb8332d349d328e21c1/_rendered_examples_jupyter.zip b/docs/_downloads/6a6052d9668b2cb8332d349d328e21c1/_rendered_examples_jupyter.zip index b281f05bfb..05a211fe85 100644 Binary files a/docs/_downloads/6a6052d9668b2cb8332d349d328e21c1/_rendered_examples_jupyter.zip and b/docs/_downloads/6a6052d9668b2cb8332d349d328e21c1/_rendered_examples_jupyter.zip differ diff --git a/docs/_downloads/6c6c0a48adee16bd8076df5fef7d0491/aot_plugin.py b/docs/_downloads/6c6c0a48adee16bd8076df5fef7d0491/aot_plugin.py new file mode 100644 index 0000000000..86dccfddfc --- /dev/null +++ b/docs/_downloads/6c6c0a48adee16bd8076df5fef7d0491/aot_plugin.py @@ -0,0 +1,174 @@ +""" +.. _aot_plugin: +Automatically Generate a TensorRT AOT Plugin +=================================================================== +We are going to demonstrate how to automatically generate a plugin for a custom kernel using Torch-TensorRT using +the new Python based plugin system in TensorRT 10.7. + +Torch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT +does not know how to compile them in TensorRT. However, this comes at the cost of a graph break and will reduce the performance of the model. +The easiest way to fix lack of support for ops is by adding a decomposition (see: +`Writing lowering passes for the Dynamo frontend `_) - which defines the operator +in terms of PyTorch ops that are supported in Torch-TensorRT or a converter (see: +`Writing converters for the Dynamo frontend `_) - which defines the operator in terms of TensorRT operators. + +In some cases there isn't a great way to do either of these, perhaps because the operator is a custom kernel that is not part of standard PyTorch or +TensorRT cannot support it natively. + +For these cases, it is possible to use a TensorRT plugin to replace the operator **inside** the TensorRT engine, thereby avoiding +the performance and resource overhead from a graph break. + +Previously this involved a complex process in not only building a performant kernel but setting it up to run in TensorRT (see: `Using Custom Kernels within TensorRT Engines with Torch-TensorRT `_). +As of TensorRT 10.7, there is a new Python native plugin system which greatly streamlines this process. This +plugin system also allows Torch-TensorRT to automatically generate the necessary conversion code to convert the +operation in PyTorch to TensorRT. + +In addition, Torch-TensorRT provides automatic generation of TensorRT plugin feature (see: `Automatically Generate a Plugin for a Custom Kernel `_). +However, the above methods generates a JIT plugin that might not satisfy user's performance requirements. +To support that, Torch-TensorRT provides auto generation of TensorRT AOT Plugin which raps a function to define an Ahead-of-Time (AOT) implementation for a plugin already registered. +This provides a performance boost comparing to JIT plugin. +""" + +import argparse +from typing import Tuple, Union + +import tensorrt as trt +import tensorrt.plugin as trtp +import torch +import torch_tensorrt +import triton +import triton.language as tl + +trt_logger = trt.Logger(trt.Logger.VERBOSE) + + +@triton.jit +def add_one_kernel(x_ptr, n_elements, y_ptr, BLOCK_SIZE: tl.constexpr): + pid = tl.program_id(0) + block_start = pid * BLOCK_SIZE + offsets = block_start + tl.arange(0, BLOCK_SIZE) + mask = offsets < n_elements + x = tl.load(x_ptr + offsets, mask=mask) + output = x + 1 + tl.store(y_ptr + offsets, output, mask=mask) + + +@torch.library.custom_op("my::add_one", mutates_args=()) # type: ignore[misc] +def add_one(X: torch.Tensor) -> torch.Tensor: + # Ensure the tensors are on the GPU + assert X.is_cuda + + # Create output tensor + Y = torch.empty_like(X) + + # Define block size + BLOCK_SIZE = 256 + + # Grid of programs + grid = lambda meta: (triton.cdiv(X.numel(), meta["BLOCK_SIZE"]),) + + # Launch the kernel + add_one_kernel[grid](X, X.numel(), Y, BLOCK_SIZE=BLOCK_SIZE) + + return Y + + +@torch.library.register_fake("my::add_one") +def _(X: torch.Tensor) -> torch.Tensor: + return X + + +@trtp.register("my::add_one") +def add_plugin_desc(X: trtp.TensorDesc) -> Tuple[trtp.TensorDesc]: + return X.like() + + +@trtp.aot_impl("my::add_one") +def add_plugin_aot_impl( + X: trtp.TensorDesc, outputs: Tuple[trtp.TensorDesc], tactic: int +) -> Tuple[ + Union[str, bytes], Union[str, bytes], trtp.KernelLaunchParams, trtp.SymExprs +]: + type_str = "fp32" if X.dtype == trt.float32 else "fp16" + + block_size = 256 + src = triton.compiler.ASTSource( + fn=add_one_kernel, + signature={ + "x_ptr": f"*{type_str}", + "n_elements": "i32", + "y_ptr": f"*{type_str}", + "BLOCK_SIZE": "constexpr", + }, + constants={ + "BLOCK_SIZE": block_size, + }, + ) + + compiled_kernel = triton.compile(src) + + N = X.shape_expr.numel() + launch_params = trtp.KernelLaunchParams() + + # grid dims + launch_params.grid_x = trtp.cdiv(N, block_size) + # block dims + launch_params.block_x = compiled_kernel.metadata.num_warps * 32 + # shared memory + launch_params.shared_mem = compiled_kernel.metadata.shared + + extra_args = trtp.SymIntExprs(1) + extra_args[0] = trtp.SymInt32(N) + + return ( + compiled_kernel.metadata.name, + compiled_kernel.asm["ptx"], + launch_params, + extra_args, + ) + + +torch_tensorrt.dynamo.conversion.plugins.generate_plugin_converter( + "my::add_one", + supports_dynamic_shapes=False, + requires_output_allocator=False, + use_aot_if_available=True, +) + + +class MyModel(torch.nn.Module): + def __init__(self): + super().__init__() + + def forward(self, X: torch.Tensor) -> torch.Tensor: + res = torch.ops.my.add_one.default(X) + + return res + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--aot", action="store_true", help="Try to use AOT compilation", default=False + ) + args = parser.parse_args() + + my_model = MyModel().to("cuda") + m = torch.full((64, 64), 2, device="cuda", dtype=torch.float) + + assert my_model(X=m)[0][0] == 3.0 + + with torch_tensorrt.logging.debug(): + trt_inputs = [m] + model_trt = torch_tensorrt.compile( + my_model, + inputs=trt_inputs, + min_block_size=1, + ) + print("Model compiled successfully!") + print("Running inference with compiled model...") + for i in range(10): + res = model_trt(m) + assert torch.allclose(res, my_model(m)), "Results do not match!" + + print("Inference successful!") diff --git a/docs/_downloads/798cda8f83bd9f5e2cc93f329a04332c/_rendered_examples_python.zip b/docs/_downloads/798cda8f83bd9f5e2cc93f329a04332c/_rendered_examples_python.zip index 412ec5814a..6def9285e4 100644 Binary files a/docs/_downloads/798cda8f83bd9f5e2cc93f329a04332c/_rendered_examples_python.zip and b/docs/_downloads/798cda8f83bd9f5e2cc93f329a04332c/_rendered_examples_python.zip differ diff --git a/docs/_downloads/79b11f38b95116a32190bdd045626574/custom_kernel_plugins.ipynb b/docs/_downloads/79b11f38b95116a32190bdd045626574/custom_kernel_plugins.ipynb index 2878f38613..18a145e6ac 100644 --- a/docs/_downloads/79b11f38b95116a32190bdd045626574/custom_kernel_plugins.ipynb +++ b/docs/_downloads/79b11f38b95116a32190bdd045626574/custom_kernel_plugins.ipynb @@ -154,7 +154,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "```none\nGraphModule(\n (_run_on_gpu_0): GraphModule()\n (_run_on_acc_1): GraphModule(\n (conv): Module()\n )\n)\n\n++++++++++++++ Dry-Run Results for Graph +++++++++++++++++\n\nThe graph consists of 2 Total Operators, of which 1 operators are supported, 50.0% coverage\n\nThe following ops are currently unsupported or excluded from conversion, and are listed with their op-count in the graph:\n torch.ops.torchtrt_ex.triton_circular_pad.default: 1\n\nThe following nodes are currently set to run in Torch:\nNode: torch.ops.torchtrt_ex.triton_circular_pad.default, with layer location: __/triton_circular_pad\nNote: Some of the above nodes may be supported, but were not included in a TRT graph by the partitioner\n\nCompiled with: CompilationSettings(enabled_precisions={}, debug=False, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=True, hardware_compatible=False)\n\n Graph Structure:\n\n Inputs: List[Tensor: (1, 1, 3, 3)@float32]\n ...\n TRT Engine #1 - Submodule name: _run_on_acc_1\n Engine Inputs: List[Tensor: (1, 1, 5, 5)@float32]\n Number of Operators in Engine: 1\n Engine Outputs: Tensor: (1, 5, 3, 3)@float32\n ...\n Outputs: List[Tensor: (1, 5, 3, 3)@float32]\n\n --------- Aggregate Stats ---------\n\n Average Number of Operators per TRT Engine: 1.0\n Most Operators in a TRT Engine: 1\n\n ********** Recommendations **********\n\n - For minimal graph segmentation, select min_block_size=1 which would generate 1 TRT engine(s)\n - The current level of graph segmentation is equivalent to selecting min_block_size=1 which generates 1 TRT engine(s)\n```\nWe see that there is going to be 2 subgraphs, one that will run through PyTorch for our custom op and one through TensorRT for the convolution. This graph break is going to be a significant portion of the latency of this model.\n\n" + "```none\nGraphModule(\n (_run_on_gpu_0): GraphModule()\n (_run_on_acc_1): GraphModule(\n (conv): Module()\n )\n)\n\n++++++++++++++ Dry-Run Results for Graph +++++++++++++++++\n\nThe graph consists of 2 Total Operators, of which 1 operators are supported, 50.0% coverage\n\nThe following ops are currently unsupported or excluded from conversion, and are listed with their op-count in the graph:\n torch.ops.torchtrt_ex.triton_circular_pad.default: 1\n\nThe following nodes are currently set to run in Torch:\nNode: torch.ops.torchtrt_ex.triton_circular_pad.default, with layer location: __/triton_circular_pad\nNote: Some of the above nodes may be supported, but were not included in a TRT graph by the partitioner\n\nCompiled with: CompilationSettings(enabled_precisions={}, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=True, hardware_compatible=False)\n\n Graph Structure:\n\n Inputs: List[Tensor: (1, 1, 3, 3)@float32]\n ...\n TRT Engine #1 - Submodule name: _run_on_acc_1\n Engine Inputs: List[Tensor: (1, 1, 5, 5)@float32]\n Number of Operators in Engine: 1\n Engine Outputs: Tensor: (1, 5, 3, 3)@float32\n ...\n Outputs: List[Tensor: (1, 5, 3, 3)@float32]\n\n --------- Aggregate Stats ---------\n\n Average Number of Operators per TRT Engine: 1.0\n Most Operators in a TRT Engine: 1\n\n ********** Recommendations **********\n\n - For minimal graph segmentation, select min_block_size=1 which would generate 1 TRT engine(s)\n - The current level of graph segmentation is equivalent to selecting min_block_size=1 which generates 1 TRT engine(s)\n```\nWe see that there is going to be 2 subgraphs, one that will run through PyTorch for our custom op and one through TensorRT for the convolution. This graph break is going to be a significant portion of the latency of this model.\n\n" ] }, { @@ -215,7 +215,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "```none\nGraphModule(\n (_run_on_acc_0): TorchTensorRTModule()\n)\n\n+++++++++++++++ Dry-Run Results for Graph ++++++++++++++++\n\nThe graph consists of 2 Total Operators, of which 2 operators are supported, 100.0% coverage\n\nCompiled with: CompilationSettings(enabled_precisions={}, debug=True, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=False, hardware_compatible=False)\n\n Graph Structure:\n\n Inputs: List[Tensor: (1, 1, 3, 3)@float32]\n ...\n TRT Engine #1 - Submodule name: _run_on_acc_0\n Engine Inputs: List[Tensor: (1, 1, 3, 3)@float32]\n Number of Operators in Engine: 2\n Engine Outputs: Tensor: (1, 5, 3, 3)@float32\n ...\n Outputs: List[Tensor: (1, 5, 3, 3)@float32]\n\n ---------- Aggregate Stats -------------\n\n Average Number of Operators per TRT Engine: 2.0\n Most Operators in a TRT Engine: 2\n\n ********** Recommendations **********\n\n - For minimal graph segmentation, select min_block_size=2 which would generate 1 TRT engine(s)\n - The current level of graph segmentation is equivalent to selecting min_block_size=2 which generates 1 TRT engine(s)\n```\n" + "```none\nGraphModule(\n (_run_on_acc_0): TorchTensorRTModule()\n)\n\n+++++++++++++++ Dry-Run Results for Graph ++++++++++++++++\n\nThe graph consists of 2 Total Operators, of which 2 operators are supported, 100.0% coverage\n\nCompiled with: CompilationSettings(enabled_precisions={}, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=False, hardware_compatible=False)\n\n Graph Structure:\n\n Inputs: List[Tensor: (1, 1, 3, 3)@float32]\n ...\n TRT Engine #1 - Submodule name: _run_on_acc_0\n Engine Inputs: List[Tensor: (1, 1, 3, 3)@float32]\n Number of Operators in Engine: 2\n Engine Outputs: Tensor: (1, 5, 3, 3)@float32\n ...\n Outputs: List[Tensor: (1, 5, 3, 3)@float32]\n\n ---------- Aggregate Stats -------------\n\n Average Number of Operators per TRT Engine: 2.0\n Most Operators in a TRT Engine: 2\n\n ********** Recommendations **********\n\n - For minimal graph segmentation, select min_block_size=2 which would generate 1 TRT engine(s)\n - The current level of graph segmentation is equivalent to selecting min_block_size=2 which generates 1 TRT engine(s)\n```\n" ] }, { @@ -285,7 +285,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/7e3a125a2d4ba8274a41b46f5e0723fa/refit_engine_example.py b/docs/_downloads/7e3a125a2d4ba8274a41b46f5e0723fa/refit_engine_example.py index 66a1a70964..2637b4fd7e 100644 --- a/docs/_downloads/7e3a125a2d4ba8274a41b46f5e0723fa/refit_engine_example.py +++ b/docs/_downloads/7e3a125a2d4ba8274a41b46f5e0723fa/refit_engine_example.py @@ -56,7 +56,6 @@ model = models.resnet18(pretrained=False).eval().to("cuda") exp_program = torch.export.export(model, tuple(inputs)) enabled_precisions = {torch.float} -debug = False workspace_size = 20 << 30 min_block_size = 0 use_python_runtime = False @@ -66,7 +65,6 @@ tuple(inputs), use_python_runtime=use_python_runtime, enabled_precisions=enabled_precisions, - debug=debug, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, immutable_weights=False, @@ -101,6 +99,7 @@ ) # Check the output +model2.to("cuda") expected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm(*inputs) for expected_output, refitted_output in zip(expected_outputs, refitted_outputs): assert torch.allclose( diff --git a/docs/_downloads/81aa3becef967e19fae2b5a1a07d0355/pre_allocated_output_example.ipynb b/docs/_downloads/81aa3becef967e19fae2b5a1a07d0355/pre_allocated_output_example.ipynb index f7e04b995d..96838019c5 100644 --- a/docs/_downloads/81aa3becef967e19fae2b5a1a07d0355/pre_allocated_output_example.ipynb +++ b/docs/_downloads/81aa3becef967e19fae2b5a1a07d0355/pre_allocated_output_example.ipynb @@ -96,7 +96,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/b26ba3d33b5fc57e738fb2f26cabe4e8/weight_streaming_example.py b/docs/_downloads/b26ba3d33b5fc57e738fb2f26cabe4e8/weight_streaming_example.py index e1076a9e75..601292ba95 100644 --- a/docs/_downloads/b26ba3d33b5fc57e738fb2f26cabe4e8/weight_streaming_example.py +++ b/docs/_downloads/b26ba3d33b5fc57e738fb2f26cabe4e8/weight_streaming_example.py @@ -32,7 +32,43 @@ import torch import torch_tensorrt from transformers import AutoModelForCausalLM -from utils import export_llm + + +def export_llm(model, inputs, min_seq_len=1, max_seq_len=16): + """ + Exports the LLM model into an ExportedProgram with dynamic shapes. + In the case of guard failures due to some PyTorch kernel implements, we also + try to re-export the graph by expressing them as runtime assert nodes + """ + with torch.no_grad(): + # max=1024 has contraint violation error. https://github.com/pytorch/pytorch/issues/125604 + seq_len = torch.export.Dim("seq_len", min=min_seq_len, max=max_seq_len) + position_ids = torch.arange(inputs.shape[1]).unsqueeze(0).to(inputs.device) + try: + print("Trying to export the model using torch.export.export()..") + # strict=False only enables aotautograd tracing and excludes dynamo. + ep = torch.export.export( + model, + args=(inputs,), + kwargs={"position_ids": position_ids}, + dynamic_shapes=({1: seq_len}, {1: seq_len}), + strict=False, + ) + except: + print( + "Trying torch.export._trace._export to trace the graph since torch.export.export() failed" + ) + # This API is used to express the constraint violation guards as asserts in the graph. + ep = torch.export._trace._export( + model, + args=(inputs,), + kwargs={"position_ids": position_ids}, + dynamic_shapes=({1: seq_len}, {1: seq_len}), + strict=False, + allow_complex_guards_as_runtime_asserts=True, + ) + + return ep def time_generate(model, inputs, output_seq_length, iterations=10): diff --git a/docs/_downloads/b35883282793ac3413933fdb22d00d81/torch_compile_advanced_usage.ipynb b/docs/_downloads/b35883282793ac3413933fdb22d00d81/torch_compile_advanced_usage.ipynb index e48725f606..9ca4b1135f 100644 --- a/docs/_downloads/b35883282793ac3413933fdb22d00d81/torch_compile_advanced_usage.ipynb +++ b/docs/_downloads/b35883282793ac3413933fdb22d00d81/torch_compile_advanced_usage.ipynb @@ -91,7 +91,7 @@ }, "outputs": [], "source": [ - "# If we want to customize certain options in the backend,\n# but still use the torch.compile call directly, we can provide\n# custom options to the backend via the \"options\" keyword\n# which takes in a dictionary mapping options to values.\n#\n# For accepted backend options, see the CompilationSettings dataclass:\n# py/torch_tensorrt/dynamo/_settings.py\nbackend_kwargs = {\n \"enabled_precisions\": {torch.half},\n \"debug\": True,\n \"min_block_size\": 2,\n \"torch_executed_ops\": {\"torch.ops.aten.sub.Tensor\"},\n \"optimization_level\": 4,\n \"use_python_runtime\": False,\n}\n\n# Run the model on an input to cause compilation, as so:\noptimized_model_custom = torch.compile(\n model_half,\n backend=\"torch_tensorrt\",\n options=backend_kwargs,\n dynamic=False,\n)\noptimized_model_custom(*sample_inputs_half)" + "# If we want to customize certain options in the backend,\n# but still use the torch.compile call directly, we can provide\n# custom options to the backend via the \"options\" keyword\n# which takes in a dictionary mapping options to values.\n#\n# For accepted backend options, see the CompilationSettings dataclass:\n# py/torch_tensorrt/dynamo/_settings.py\nbackend_kwargs = {\n \"enabled_precisions\": {torch.half},\n \"min_block_size\": 2,\n \"torch_executed_ops\": {\"torch.ops.aten.sub.Tensor\"},\n \"optimization_level\": 4,\n \"use_python_runtime\": False,\n}\n\n# Run the model on an input to cause compilation, as so:\noptimized_model_custom = torch.compile(\n model_half,\n backend=\"torch_tensorrt\",\n options=backend_kwargs,\n dynamic=False,\n)\noptimized_model_custom(*sample_inputs_half)" ] }, { @@ -136,7 +136,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/b776287bc876f7ce24942b82a66beb05/torch_compile_stable_diffusion.ipynb b/docs/_downloads/b776287bc876f7ce24942b82a66beb05/torch_compile_stable_diffusion.ipynb index fa67bcc80d..aec64827c5 100644 --- a/docs/_downloads/b776287bc876f7ce24942b82a66beb05/torch_compile_stable_diffusion.ipynb +++ b/docs/_downloads/b776287bc876f7ce24942b82a66beb05/torch_compile_stable_diffusion.ipynb @@ -60,7 +60,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/c0341280f3b022df00c4241c42d9ee8b/custom_kernel_plugins.py b/docs/_downloads/c0341280f3b022df00c4241c42d9ee8b/custom_kernel_plugins.py index 398c0a1ebe..dccb0ff0cf 100644 --- a/docs/_downloads/c0341280f3b022df00c4241c42d9ee8b/custom_kernel_plugins.py +++ b/docs/_downloads/c0341280f3b022df00c4241c42d9ee8b/custom_kernel_plugins.py @@ -276,7 +276,7 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: # Node: torch.ops.torchtrt_ex.triton_circular_pad.default, with layer location: __/triton_circular_pad # Note: Some of the above nodes may be supported, but were not included in a TRT graph by the partitioner # -# Compiled with: CompilationSettings(enabled_precisions={}, debug=False, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=True, hardware_compatible=False) +# Compiled with: CompilationSettings(enabled_precisions={}, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=True, hardware_compatible=False) # # Graph Structure: # @@ -581,7 +581,7 @@ def circular_padding_converter( # # The graph consists of 2 Total Operators, of which 2 operators are supported, 100.0% coverage # -# Compiled with: CompilationSettings(enabled_precisions={}, debug=True, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=False, hardware_compatible=False) +# Compiled with: CompilationSettings(enabled_precisions={}, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=False, hardware_compatible=False) # # Graph Structure: # diff --git a/docs/_downloads/c091c7b61c14cfc9df236d4e6a3e20e2/auto_generate_converters.ipynb b/docs/_downloads/c091c7b61c14cfc9df236d4e6a3e20e2/auto_generate_converters.ipynb index 7bc3049a71..643c6c392c 100644 --- a/docs/_downloads/c091c7b61c14cfc9df236d4e6a3e20e2/auto_generate_converters.ipynb +++ b/docs/_downloads/c091c7b61c14cfc9df236d4e6a3e20e2/auto_generate_converters.ipynb @@ -119,7 +119,7 @@ }, "outputs": [], "source": [ - "class MyModel(torch.nn.Module): # type: ignore[misc]\n def __init__(self):\n super().__init__()\n\n def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:\n z = torch.add(x, y)\n res = torch.ops.torchtrt_ex.elementwise_mul.default(x, z, a=1)\n\n return res\n\n\nmy_model = MyModel().to(\"cuda\")\nm = torch.full((64, 64), 2, device=\"cuda\", dtype=torch.float)\nn = torch.full((64, 64), 3, device=\"cuda\", dtype=torch.float)\n\nwith torch_tensorrt.logging.errors():\n model_trt = torch_tensorrt.compile(\n my_model, inputs=[m, n], debug=True, min_block_size=1\n )\n for i in range(300):\n res = model_trt(m, n)\n assert torch.allclose(res, my_model(m, n))\n\nprint(\"Ran with custom plugin!\")" + "class MyModel(torch.nn.Module): # type: ignore[misc]\n def __init__(self):\n super().__init__()\n\n def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:\n z = torch.add(x, y)\n res = torch.ops.torchtrt_ex.elementwise_mul.default(x, z, a=1)\n\n return res\n\n\nmy_model = MyModel().to(\"cuda\")\nm = torch.full((64, 64), 2, device=\"cuda\", dtype=torch.float)\nn = torch.full((64, 64), 3, device=\"cuda\", dtype=torch.float)\n\nwith torch_tensorrt.logging.errors():\n model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1)\n for i in range(300):\n res = model_trt(m, n)\n assert torch.allclose(res, my_model(m, n))\n\nprint(\"Ran with custom plugin!\")" ] } ], @@ -139,7 +139,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/c86d57c05e15d5d3edf6229b6a3566c8/auto_generate_converters.py b/docs/_downloads/c86d57c05e15d5d3edf6229b6a3566c8/auto_generate_converters.py index bd3ce24ee6..af9cffb8ff 100644 --- a/docs/_downloads/c86d57c05e15d5d3edf6229b6a3566c8/auto_generate_converters.py +++ b/docs/_downloads/c86d57c05e15d5d3edf6229b6a3566c8/auto_generate_converters.py @@ -174,9 +174,7 @@ def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: n = torch.full((64, 64), 3, device="cuda", dtype=torch.float) with torch_tensorrt.logging.errors(): - model_trt = torch_tensorrt.compile( - my_model, inputs=[m, n], debug=True, min_block_size=1 - ) + model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1) for i in range(300): res = model_trt(m, n) assert torch.allclose(res, my_model(m, n)) diff --git a/docs/_downloads/ce102e287ddb5744f0a1364e8c0c7f68/torch_compile_transformers_example.ipynb b/docs/_downloads/ce102e287ddb5744f0a1364e8c0c7f68/torch_compile_transformers_example.ipynb index ce19826b8d..747780179e 100644 --- a/docs/_downloads/ce102e287ddb5744f0a1364e8c0c7f68/torch_compile_transformers_example.ipynb +++ b/docs/_downloads/ce102e287ddb5744f0a1364e8c0c7f68/torch_compile_transformers_example.ipynb @@ -51,7 +51,7 @@ }, "outputs": [], "source": [ - "# Enabled precision for TensorRT optimization\nenabled_precisions = {torch.float}\n\n# Whether to print verbose logs\ndebug = True\n\n# Workspace size for TensorRT\nworkspace_size = 20 << 30\n\n# Maximum number of TRT Engines\n# (Lower value allows more graph segmentation)\nmin_block_size = 7\n\n# Operations to Run in Torch, regardless of converter support\ntorch_executed_ops = {}" + "# Enabled precision for TensorRT optimization\nenabled_precisions = {torch.float}\n\n# Workspace size for TensorRT\nworkspace_size = 20 << 30\n\n# Maximum number of TRT Engines\n# (Lower value allows more graph segmentation)\nmin_block_size = 7\n\n# Operations to Run in Torch, regardless of converter support\ntorch_executed_ops = {}" ] }, { @@ -69,7 +69,7 @@ }, "outputs": [], "source": [ - "# Define backend compilation keyword arguments\ncompilation_kwargs = {\n \"enabled_precisions\": enabled_precisions,\n \"debug\": debug,\n \"workspace_size\": workspace_size,\n \"min_block_size\": min_block_size,\n \"torch_executed_ops\": torch_executed_ops,\n}\n\n# Build and compile the model with torch.compile, using Torch-TensorRT backend\noptimized_model = torch.compile(\n model,\n backend=\"torch_tensorrt\",\n dynamic=False,\n options=compilation_kwargs,\n)\noptimized_model(*inputs)" + "# Define backend compilation keyword arguments\ncompilation_kwargs = {\n \"enabled_precisions\": enabled_precisions,\n \"workspace_size\": workspace_size,\n \"min_block_size\": min_block_size,\n \"torch_executed_ops\": torch_executed_ops,\n}\n\n# Build and compile the model with torch.compile, using Torch-TensorRT backend\noptimized_model = torch.compile(\n model,\n backend=\"torch_tensorrt\",\n dynamic=False,\n options=compilation_kwargs,\n)\noptimized_model(*inputs)" ] }, { @@ -150,7 +150,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/ceee048e511b8a73d9f1ce18da61ae14/torch_export_sam2.ipynb b/docs/_downloads/ceee048e511b8a73d9f1ce18da61ae14/torch_export_sam2.ipynb index bc5883b6a2..a673cbd56c 100644 --- a/docs/_downloads/ceee048e511b8a73d9f1ce18da61ae14/torch_export_sam2.ipynb +++ b/docs/_downloads/ceee048e511b8a73d9f1ce18da61ae14/torch_export_sam2.ipynb @@ -243,7 +243,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/cfe89df17e42f6acff939282b7e174b5/aot_plugin.ipynb b/docs/_downloads/cfe89df17e42f6acff939282b7e174b5/aot_plugin.ipynb new file mode 100644 index 0000000000..091f6e0e50 --- /dev/null +++ b/docs/_downloads/cfe89df17e42f6acff939282b7e174b5/aot_plugin.ipynb @@ -0,0 +1,43 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\nAutomatically Generate a TensorRT AOT Plugin\n===================================================================\nWe are going to demonstrate how to automatically generate a plugin for a custom kernel using Torch-TensorRT using\nthe new Python based plugin system in TensorRT 10.7.\n\nTorch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT\ndoes not know how to compile them in TensorRT. However, this comes at the cost of a graph break and will reduce the performance of the model.\nThe easiest way to fix lack of support for ops is by adding a decomposition (see:\n[Writing lowering passes for the Dynamo frontend](https://pytorch.org/TensorRT/contributors/writing_dynamo_aten_lowering_passes.html)) - which defines the operator\nin terms of PyTorch ops that are supported in Torch-TensorRT or a converter (see:\n[Writing converters for the Dynamo frontend](https://pytorch.org/TensorRT/contributors/dynamo_converters.html)) - which defines the operator in terms of TensorRT operators.\n\nIn some cases there isn't a great way to do either of these, perhaps because the operator is a custom kernel that is not part of standard PyTorch or\nTensorRT cannot support it natively.\n\nFor these cases, it is possible to use a TensorRT plugin to replace the operator **inside** the TensorRT engine, thereby avoiding\nthe performance and resource overhead from a graph break.\n\nPreviously this involved a complex process in not only building a performant kernel but setting it up to run in TensorRT (see: [Using Custom Kernels within TensorRT Engines with Torch-TensorRT](https://pytorch.org/TensorRT/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.html)).\nAs of TensorRT 10.7, there is a new Python native plugin system which greatly streamlines this process. This\nplugin system also allows Torch-TensorRT to automatically generate the necessary conversion code to convert the\noperation in PyTorch to TensorRT.\n\nIn addition, Torch-TensorRT provides automatic generation of TensorRT plugin feature (see: [Automatically Generate a Plugin for a Custom Kernel](https://docs.pytorch.org/TensorRT/tutorials/_rendered_examples/dynamo/auto_generate_plugins.html)).\nHowever, the above methods generates a JIT plugin that might not satisfy user's performance requirements.\nTo support that, Torch-TensorRT provides auto generation of TensorRT AOT Plugin which raps a function to define an Ahead-of-Time (AOT) implementation for a plugin already registered.\nThis provides a performance boost comparing to JIT plugin.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import argparse\nfrom typing import Tuple, Union\n\nimport tensorrt as trt\nimport tensorrt.plugin as trtp\nimport torch\nimport torch_tensorrt\nimport triton\nimport triton.language as tl\n\ntrt_logger = trt.Logger(trt.Logger.VERBOSE)\n\n\n@triton.jit\ndef add_one_kernel(x_ptr, n_elements, y_ptr, BLOCK_SIZE: tl.constexpr):\n pid = tl.program_id(0)\n block_start = pid * BLOCK_SIZE\n offsets = block_start + tl.arange(0, BLOCK_SIZE)\n mask = offsets < n_elements\n x = tl.load(x_ptr + offsets, mask=mask)\n output = x + 1\n tl.store(y_ptr + offsets, output, mask=mask)\n\n\n@torch.library.custom_op(\"my::add_one\", mutates_args=()) # type: ignore[misc]\ndef add_one(X: torch.Tensor) -> torch.Tensor:\n # Ensure the tensors are on the GPU\n assert X.is_cuda\n\n # Create output tensor\n Y = torch.empty_like(X)\n\n # Define block size\n BLOCK_SIZE = 256\n\n # Grid of programs\n grid = lambda meta: (triton.cdiv(X.numel(), meta[\"BLOCK_SIZE\"]),)\n\n # Launch the kernel\n add_one_kernel[grid](X, X.numel(), Y, BLOCK_SIZE=BLOCK_SIZE)\n\n return Y\n\n\n@torch.library.register_fake(\"my::add_one\")\ndef _(X: torch.Tensor) -> torch.Tensor:\n return X\n\n\n@trtp.register(\"my::add_one\")\ndef add_plugin_desc(X: trtp.TensorDesc) -> Tuple[trtp.TensorDesc]:\n return X.like()\n\n\n@trtp.aot_impl(\"my::add_one\")\ndef add_plugin_aot_impl(\n X: trtp.TensorDesc, outputs: Tuple[trtp.TensorDesc], tactic: int\n) -> Tuple[\n Union[str, bytes], Union[str, bytes], trtp.KernelLaunchParams, trtp.SymExprs\n]:\n type_str = \"fp32\" if X.dtype == trt.float32 else \"fp16\"\n\n block_size = 256\n src = triton.compiler.ASTSource(\n fn=add_one_kernel,\n signature={\n \"x_ptr\": f\"*{type_str}\",\n \"n_elements\": \"i32\",\n \"y_ptr\": f\"*{type_str}\",\n \"BLOCK_SIZE\": \"constexpr\",\n },\n constants={\n \"BLOCK_SIZE\": block_size,\n },\n )\n\n compiled_kernel = triton.compile(src)\n\n N = X.shape_expr.numel()\n launch_params = trtp.KernelLaunchParams()\n\n # grid dims\n launch_params.grid_x = trtp.cdiv(N, block_size)\n # block dims\n launch_params.block_x = compiled_kernel.metadata.num_warps * 32\n # shared memory\n launch_params.shared_mem = compiled_kernel.metadata.shared\n\n extra_args = trtp.SymIntExprs(1)\n extra_args[0] = trtp.SymInt32(N)\n\n return (\n compiled_kernel.metadata.name,\n compiled_kernel.asm[\"ptx\"],\n launch_params,\n extra_args,\n )\n\n\ntorch_tensorrt.dynamo.conversion.plugins.generate_plugin_converter(\n \"my::add_one\",\n supports_dynamic_shapes=False,\n requires_output_allocator=False,\n use_aot_if_available=True,\n)\n\n\nclass MyModel(torch.nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, X: torch.Tensor) -> torch.Tensor:\n res = torch.ops.my.add_one.default(X)\n\n return res\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"--aot\", action=\"store_true\", help=\"Try to use AOT compilation\", default=False\n )\n args = parser.parse_args()\n\n my_model = MyModel().to(\"cuda\")\n m = torch.full((64, 64), 2, device=\"cuda\", dtype=torch.float)\n\n assert my_model(X=m)[0][0] == 3.0\n\n with torch_tensorrt.logging.debug():\n trt_inputs = [m]\n model_trt = torch_tensorrt.compile(\n my_model,\n inputs=trt_inputs,\n min_block_size=1,\n )\n print(\"Model compiled successfully!\")\n print(\"Running inference with compiled model...\")\n for i in range(10):\n res = model_trt(m)\n assert torch.allclose(res, my_model(m)), \"Results do not match!\"\n\n print(\"Inference successful!\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.13" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/docs/_downloads/d0b2df4f38205e5c78371907285feeea/llama2_flashinfer_rmsnorm.py b/docs/_downloads/d0b2df4f38205e5c78371907285feeea/llama2_flashinfer_rmsnorm.py index 9d57794c6a..7542a9a1b7 100644 --- a/docs/_downloads/d0b2df4f38205e5c78371907285feeea/llama2_flashinfer_rmsnorm.py +++ b/docs/_downloads/d0b2df4f38205e5c78371907285feeea/llama2_flashinfer_rmsnorm.py @@ -249,7 +249,6 @@ def replace_rmsnorm( disable_tf32=True, use_explicit_typing=False, use_fp32_acc=True, - # debug=True, ) input_ids = input_ids.to(DEVICE) diff --git a/docs/_downloads/d28c19d0b239befcec511ab7597f4192/auto_generate_plugins.ipynb b/docs/_downloads/d28c19d0b239befcec511ab7597f4192/auto_generate_plugins.ipynb index 71ca9381af..21c30d862a 100644 --- a/docs/_downloads/d28c19d0b239befcec511ab7597f4192/auto_generate_plugins.ipynb +++ b/docs/_downloads/d28c19d0b239befcec511ab7597f4192/auto_generate_plugins.ipynb @@ -76,7 +76,7 @@ }, "outputs": [], "source": [ - "class MyModel(torch.nn.Module): # type: ignore[misc]\n def __init__(self):\n super().__init__()\n\n def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:\n z = torch.add(x, y)\n res = torch.ops.torchtrt_ex.elementwise_scale_mul.default(x, z, b=0.5)\n\n return res\n\n\nmy_model = MyModel().to(\"cuda\")\nm = torch.randint(0, 5, (64, 64), device=\"cuda\", dtype=torch.float)\nn = torch.randint(0, 5, (64, 64), device=\"cuda\", dtype=torch.float)\n\nwith torch_tensorrt.logging.errors():\n model_trt = torch_tensorrt.compile(\n my_model, inputs=[m, n], debug=True, min_block_size=1\n )\n for i in range(300):\n res = model_trt(m, n)\n assert torch.allclose(res, my_model(m, n))\n\nprint(\"Ran with custom plugin!\")" + "class MyModel(torch.nn.Module): # type: ignore[misc]\n def __init__(self):\n super().__init__()\n\n def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:\n z = torch.add(x, y)\n res = torch.ops.torchtrt_ex.elementwise_scale_mul.default(x, z, b=0.5)\n\n return res\n\n\nmy_model = MyModel().to(\"cuda\")\nm = torch.randint(0, 5, (64, 64), device=\"cuda\", dtype=torch.float)\nn = torch.randint(0, 5, (64, 64), device=\"cuda\", dtype=torch.float)\n\nwith torch_tensorrt.logging.errors():\n model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1)\n for i in range(300):\n res = model_trt(m, n)\n assert torch.allclose(res, my_model(m, n))\n\nprint(\"Ran with custom plugin!\")" ] } ], @@ -96,7 +96,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/d3458223a5f37d5d4d5d3ad5b7e1c312/hierarchical_partitioner_example.py b/docs/_downloads/d3458223a5f37d5d4d5d3ad5b7e1c312/hierarchical_partitioner_example.py new file mode 100644 index 0000000000..73975e2453 --- /dev/null +++ b/docs/_downloads/d3458223a5f37d5d4d5d3ad5b7e1c312/hierarchical_partitioner_example.py @@ -0,0 +1,187 @@ +""" + +.. _hierarchical_partitioner_example: + +Hierarchical Partitioner Example +================================ + +Basic example on how to use the hierarchical adjacency partitioner function and manually compile the partitioned model. +Not yet available in the compile API. +""" + +from typing import Any, Callable + +import torch +import torch.nn as nn +import torch_tensorrt +from torch_tensorrt._enums import dtype +from torch_tensorrt.dynamo import partitioning +from torch_tensorrt.dynamo._compiler import convert_module +from torch_tensorrt.dynamo.conversion._ConverterRegistry import ( + DYNAMO_CONVERTERS as CONVERTERS, +) +from torch_tensorrt.dynamo.lowering import ( + get_decompositions, + pre_export_lowering, +) +from torch_tensorrt.dynamo.partitioning._hierarchical_partitioner import ( + hierarchical_adjacency_partition, +) +from torch_tensorrt.dynamo.utils import ( + get_output_metadata, +) +from torchvision import models + + +class InductorModule(torch.nn.Module): # type: ignore[misc] + """Wrapper module for inductor compiled function.""" + + def __init__(self, func: Callable[..., Any]) -> None: + super().__init__() + self.func = func + + def forward(self, *args: Any, **kwargs: Any) -> Any: + return self.func(*args, **kwargs) + + +class SimpleModel(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=3, padding=1) + self.conv2 = nn.Conv2d(64, 128, kernel_size=3, padding=1) + self.bn1 = nn.BatchNorm2d(64) + self.bn2 = nn.BatchNorm2d(128) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = torch.relu(x) + x = self.conv2(x) + x = self.bn2(x) + x = torch.relu(x) + return x + + +def main(): + # Create model + model = SimpleModel().cuda() + # model = models.efficientnet_b0(pretrained=True).cuda() + model = model.eval() + + # Create example input + example_input = torch.randn(1, 3, 224, 224).cuda() + + exported_program = torch.export.export(model, (example_input,)) + exported_program = pre_export_lowering(exported_program) + exported_program = exported_program.run_decompositions(get_decompositions()) + + gm = exported_program.module() + + print("Original Model Structure:\n", gm) + + original_output = model(example_input) + + # 1. Partition the model into blocks that can be executed by different backends + partitioned_model, op_support = hierarchical_adjacency_partition( + gm, + min_block_size=1, + backend_priority=["inductor", "tensorrt"], + backend_support_map={ + "inductor": { + "torch.ops.aten.convolution.default", + }, + "tensorrt": CONVERTERS.keys(), + }, + torch_executed_ops={ + "torch.ops.aten._native_batch_norm_legit_no_training.default" + }, + require_full_compilation=False, + skip_fusion=True, + ) + + print("1. Partitioned Model Structure:\n", partitioned_model) + + # 2. Compile each submodule with the corresponding backend + submodule_node_dict = {} + for node in partitioned_model.graph.nodes: + if "_run_on_acc" not in node.name: + continue + submodule_node_dict[node.name] = node + + # Store compiled replicas of Torch subgraphs + compiled_modules = {} + + for name, _ in partitioned_model.named_children(): + submodule = getattr(partitioned_model, name) + if not isinstance(submodule, torch.fx.graph_module.GraphModule): + continue + + if "_run_on_acc" not in name: + submodule.to("cuda") + continue + + if name not in submodule_node_dict: + raise ValueError( + f"node_name: {name} does not exist in the submodule node dictionary" + ) + + # set the submodule metadata back to the parent module_node + metadata_list = get_output_metadata(submodule) + assert len(metadata_list) > 0 + metadata_keys = ["val", "tensor_meta"] + for key in metadata_keys: + if key not in submodule_node_dict[name].meta: + meta_val_list = [ + metadata[key] for metadata in metadata_list if key in metadata + ] + submodule_node_dict[name].meta[key] = meta_val_list + break + + # Get the submodule inputs for min, opt, max shapes of the graph inputs + submodule_inputs = partitioning.construct_submodule_inputs(submodule) + assert submodule_inputs is not None + + # compile submodule with pytorch inductor backend + if "_run_on_acc_inductor" in name: + sub_inputs = [] + for input in submodule_inputs: + sub_input = input.torch_tensor.to( + dtype.to(input.dtype, t=torch.dtype) + ).cuda() + sub_inputs.append(sub_input) + + compiled_func = torch._inductor.compile( + submodule, + sub_inputs, + ) + # Wrap the compiled function to be a torch.nn.Module + compiled_submodule = InductorModule(compiled_func) + + # compile submodule with tensorrt backend + elif "_run_on_acc_tensorrt" in name: + compiled_submodule = convert_module( + submodule, + submodule_inputs, + name=name, + ) + else: + raise ValueError(f"Unknown backend for submodule: {name}") + + compiled_modules[name] = compiled_submodule + + # Replace all FX Modules with compiled Modules + for name, compiled_module in compiled_modules.items(): + setattr(partitioned_model, name, compiled_module) + + print("2. Compiled Model Structure:\n", partitioned_model) + + with torch.no_grad(): + partitioned_output = partitioned_model(example_input) + print( + "3. Verify that Partitioned output == Original output:", + torch.allclose(partitioned_output, original_output, 1e-2, 1e-2), + ) + + +if __name__ == "__main__": + main() diff --git a/docs/_downloads/d606a9660cce1388933de8448182f4ee/vgg16_ptq.ipynb b/docs/_downloads/d606a9660cce1388933de8448182f4ee/vgg16_ptq.ipynb index 0eb5544686..f031053673 100644 --- a/docs/_downloads/d606a9660cce1388933de8448182f4ee/vgg16_ptq.ipynb +++ b/docs/_downloads/d606a9660cce1388933de8448182f4ee/vgg16_ptq.ipynb @@ -112,7 +112,7 @@ }, "outputs": [], "source": [ - "# Load the testing dataset\ntesting_dataset = datasets.CIFAR10(\n root=\"./data\",\n train=False,\n download=True,\n transform=transforms.Compose(\n [\n transforms.ToTensor(),\n transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),\n ]\n ),\n)\n\ntesting_dataloader = torch.utils.data.DataLoader(\n testing_dataset,\n batch_size=args.batch_size,\n shuffle=False,\n num_workers=2,\n drop_last=True,\n) # set drop_last=True to drop the last incomplete batch for static shape `torchtrt.dynamo.compile()`\n\nwith torch.no_grad():\n with export_torch_mode():\n # Compile the model with Torch-TensorRT Dynamo backend\n input_tensor = images.cuda()\n\n exp_program = torch.export.export(model, (input_tensor,), strict=False)\n if args.quantize_type == \"int8\":\n enabled_precisions = {torch.int8}\n elif args.quantize_type == \"fp8\":\n enabled_precisions = {torch.float8_e4m3fn}\n trt_model = torchtrt.dynamo.compile(\n exp_program,\n inputs=[input_tensor],\n enabled_precisions=enabled_precisions,\n min_block_size=1,\n debug=False,\n )\n # You can also use torch compile path to compile the model with Torch-TensorRT:\n # trt_model = torch.compile(model, backend=\"tensorrt\")\n\n # Inference compiled Torch-TensorRT model over the testing dataset\n total = 0\n correct = 0\n loss = 0.0\n class_probs = []\n class_preds = []\n for data, labels in testing_dataloader:\n data, labels = data.cuda(), labels.cuda(non_blocking=True)\n out = trt_model(data)\n loss += crit(out, labels)\n preds = torch.max(out, 1)[1]\n class_probs.append([F.softmax(i, dim=0) for i in out])\n class_preds.append(preds)\n total += labels.size(0)\n correct += (preds == labels).sum().item()\n\n test_probs = torch.cat([torch.stack(batch) for batch in class_probs])\n test_preds = torch.cat(class_preds)\n test_loss = loss / total\n test_acc = correct / total\n print(\"Test Loss: {:.5f} Test Acc: {:.2f}%\".format(test_loss, 100 * test_acc))" + "# Load the testing dataset\ntesting_dataset = datasets.CIFAR10(\n root=\"./data\",\n train=False,\n download=True,\n transform=transforms.Compose(\n [\n transforms.ToTensor(),\n transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),\n ]\n ),\n)\n\ntesting_dataloader = torch.utils.data.DataLoader(\n testing_dataset,\n batch_size=args.batch_size,\n shuffle=False,\n num_workers=2,\n drop_last=True,\n) # set drop_last=True to drop the last incomplete batch for static shape `torchtrt.dynamo.compile()`\n\nwith torch.no_grad():\n with export_torch_mode():\n # Compile the model with Torch-TensorRT Dynamo backend\n input_tensor = images.cuda()\n\n exp_program = torch.export.export(model, (input_tensor,), strict=False)\n if args.quantize_type == \"int8\":\n enabled_precisions = {torch.int8}\n elif args.quantize_type == \"fp8\":\n enabled_precisions = {torch.float8_e4m3fn}\n trt_model = torchtrt.dynamo.compile(\n exp_program,\n inputs=[input_tensor],\n enabled_precisions=enabled_precisions,\n min_block_size=1,\n )\n # You can also use torch compile path to compile the model with Torch-TensorRT:\n # trt_model = torch.compile(model, backend=\"tensorrt\")\n\n # Inference compiled Torch-TensorRT model over the testing dataset\n total = 0\n correct = 0\n loss = 0.0\n class_probs = []\n class_preds = []\n for data, labels in testing_dataloader:\n data, labels = data.cuda(), labels.cuda(non_blocking=True)\n out = trt_model(data)\n loss += crit(out, labels)\n preds = torch.max(out, 1)[1]\n class_probs.append([F.softmax(i, dim=0) for i in out])\n class_preds.append(preds)\n total += labels.size(0)\n correct += (preds == labels).sum().item()\n\n test_probs = torch.cat([torch.stack(batch) for batch in class_probs])\n test_preds = torch.cat(class_preds)\n test_loss = loss / total\n test_acc = correct / total\n print(\"Test Loss: {:.5f} Test Acc: {:.2f}%\".format(test_loss, 100 * test_acc))" ] } ], @@ -132,7 +132,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/d6acb461de023f3484b5a07f2bbaed33/converter_overloading.ipynb b/docs/_downloads/d6acb461de023f3484b5a07f2bbaed33/converter_overloading.ipynb index 2fcb22f8cf..eed09c8e51 100644 --- a/docs/_downloads/d6acb461de023f3484b5a07f2bbaed33/converter_overloading.ipynb +++ b/docs/_downloads/d6acb461de023f3484b5a07f2bbaed33/converter_overloading.ipynb @@ -204,7 +204,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/d6e1bb6ec5f884994554d9d12e37a0f6/torch_compile_resnet_example.py b/docs/_downloads/d6e1bb6ec5f884994554d9d12e37a0f6/torch_compile_resnet_example.py index fb75986099..6a85de6fbc 100644 --- a/docs/_downloads/d6e1bb6ec5f884994554d9d12e37a0f6/torch_compile_resnet_example.py +++ b/docs/_downloads/d6e1bb6ec5f884994554d9d12e37a0f6/torch_compile_resnet_example.py @@ -28,8 +28,6 @@ # Enabled precision for TensorRT optimization enabled_precisions = {torch.half} -# Whether to print verbose logs -debug = True # Workspace size for TensorRT workspace_size = 20 << 30 @@ -51,7 +49,6 @@ ir="torch_compile", inputs=inputs, enabled_precisions=enabled_precisions, - debug=debug, workspace_size=workspace_size, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, @@ -88,7 +85,6 @@ ir="torch_compile", inputs=inputs_bs8, enabled_precisions=enabled_precisions, - debug=debug, workspace_size=workspace_size, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, diff --git a/docs/_downloads/d9a9caffd95dc397ffb9ea9d37a89f06/refit_engine_example.ipynb b/docs/_downloads/d9a9caffd95dc397ffb9ea9d37a89f06/refit_engine_example.ipynb index a41753e140..06e56ef6d6 100644 --- a/docs/_downloads/d9a9caffd95dc397ffb9ea9d37a89f06/refit_engine_example.ipynb +++ b/docs/_downloads/d9a9caffd95dc397ffb9ea9d37a89f06/refit_engine_example.ipynb @@ -47,7 +47,7 @@ }, "outputs": [], "source": [ - "model = models.resnet18(pretrained=False).eval().to(\"cuda\")\nexp_program = torch.export.export(model, tuple(inputs))\nenabled_precisions = {torch.float}\ndebug = False\nworkspace_size = 20 << 30\nmin_block_size = 0\nuse_python_runtime = False\ntorch_executed_ops = {}\ntrt_gm = torch_trt.dynamo.compile(\n exp_program,\n tuple(inputs),\n use_python_runtime=use_python_runtime,\n enabled_precisions=enabled_precisions,\n debug=debug,\n min_block_size=min_block_size,\n torch_executed_ops=torch_executed_ops,\n immutable_weights=False,\n reuse_cached_engines=False,\n) # Output is a torch.fx.GraphModule\n\n# Save the graph module as an exported program\ntorch_trt.save(trt_gm, \"./compiled.ep\", inputs=inputs)" + "model = models.resnet18(pretrained=False).eval().to(\"cuda\")\nexp_program = torch.export.export(model, tuple(inputs))\nenabled_precisions = {torch.float}\nworkspace_size = 20 << 30\nmin_block_size = 0\nuse_python_runtime = False\ntorch_executed_ops = {}\ntrt_gm = torch_trt.dynamo.compile(\n exp_program,\n tuple(inputs),\n use_python_runtime=use_python_runtime,\n enabled_precisions=enabled_precisions,\n min_block_size=min_block_size,\n torch_executed_ops=torch_executed_ops,\n immutable_weights=False,\n reuse_cached_engines=False,\n) # Output is a torch.fx.GraphModule\n\n# Save the graph module as an exported program\ntorch_trt.save(trt_gm, \"./compiled.ep\", inputs=inputs)" ] }, { @@ -65,7 +65,7 @@ }, "outputs": [], "source": [ - "# Create and compile the updated model\nmodel2 = models.resnet18(pretrained=True).eval().to(\"cuda\")\nexp_program2 = torch.export.export(model2, tuple(inputs))\n\n\ncompiled_trt_ep = torch_trt.load(\"./compiled.ep\")\n\n# This returns a new module with updated weights\nnew_trt_gm = refit_module_weights(\n compiled_module=compiled_trt_ep,\n new_weight_module=exp_program2,\n arg_inputs=inputs,\n)\n\n# Check the output\nexpected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm(*inputs)\nfor expected_output, refitted_output in zip(expected_outputs, refitted_outputs):\n assert torch.allclose(\n expected_output, refitted_output, 1e-2, 1e-2\n ), \"Refit Result is not correct. Refit failed\"\n\nprint(\"Refit successfully!\")" + "# Create and compile the updated model\nmodel2 = models.resnet18(pretrained=True).eval().to(\"cuda\")\nexp_program2 = torch.export.export(model2, tuple(inputs))\n\n\ncompiled_trt_ep = torch_trt.load(\"./compiled.ep\")\n\n# This returns a new module with updated weights\nnew_trt_gm = refit_module_weights(\n compiled_module=compiled_trt_ep,\n new_weight_module=exp_program2,\n arg_inputs=inputs,\n)\n\n# Check the output\nmodel2.to(\"cuda\")\nexpected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm(*inputs)\nfor expected_output, refitted_output in zip(expected_outputs, refitted_outputs):\n assert torch.allclose(\n expected_output, refitted_output, 1e-2, 1e-2\n ), \"Refit Result is not correct. Refit failed\"\n\nprint(\"Refit successfully!\")" ] }, { @@ -92,7 +92,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.12" + "version": "3.11.13" } }, "nbformat": 4, diff --git a/docs/_downloads/dfa60e8f9850fd7761f3e7da81304d32/torch_compile_transformers_example.py b/docs/_downloads/dfa60e8f9850fd7761f3e7da81304d32/torch_compile_transformers_example.py index 17cf46e8a3..7737e95682 100644 --- a/docs/_downloads/dfa60e8f9850fd7761f3e7da81304d32/torch_compile_transformers_example.py +++ b/docs/_downloads/dfa60e8f9850fd7761f3e7da81304d32/torch_compile_transformers_example.py @@ -32,9 +32,6 @@ # Enabled precision for TensorRT optimization enabled_precisions = {torch.float} -# Whether to print verbose logs -debug = True - # Workspace size for TensorRT workspace_size = 20 << 30 @@ -52,7 +49,6 @@ # Define backend compilation keyword arguments compilation_kwargs = { "enabled_precisions": enabled_precisions, - "debug": debug, "workspace_size": workspace_size, "min_block_size": min_block_size, "torch_executed_ops": torch_executed_ops, diff --git a/docs/_downloads/ea588ed7ecee251ffdf5927a36eccc98/auto_generate_plugins.py b/docs/_downloads/ea588ed7ecee251ffdf5927a36eccc98/auto_generate_plugins.py index 6bfce60758..68a8635454 100644 --- a/docs/_downloads/ea588ed7ecee251ffdf5927a36eccc98/auto_generate_plugins.py +++ b/docs/_downloads/ea588ed7ecee251ffdf5927a36eccc98/auto_generate_plugins.py @@ -144,9 +144,7 @@ def forward(self, x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: n = torch.randint(0, 5, (64, 64), device="cuda", dtype=torch.float) with torch_tensorrt.logging.errors(): - model_trt = torch_tensorrt.compile( - my_model, inputs=[m, n], debug=True, min_block_size=1 - ) + model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1) for i in range(300): res = model_trt(m, n) assert torch.allclose(res, my_model(m, n)) diff --git a/docs/_downloads/ef6d47fc0355ddff78547f419a7ddbf6/vgg16_ptq.py b/docs/_downloads/ef6d47fc0355ddff78547f419a7ddbf6/vgg16_ptq.py index 7fa943040e..c72cf9281d 100644 --- a/docs/_downloads/ef6d47fc0355ddff78547f419a7ddbf6/vgg16_ptq.py +++ b/docs/_downloads/ef6d47fc0355ddff78547f419a7ddbf6/vgg16_ptq.py @@ -244,7 +244,6 @@ def calibrate_loop(model): inputs=[input_tensor], enabled_precisions=enabled_precisions, min_block_size=1, - debug=False, ) # You can also use torch compile path to compile the model with Torch-TensorRT: # trt_model = torch.compile(model, backend="tensorrt") diff --git a/docs/_downloads/fdd0cb7713d049345adec03926d28414/engine_caching_bert_example.py b/docs/_downloads/fdd0cb7713d049345adec03926d28414/engine_caching_bert_example.py index 1148d4f792..66f5a69ac0 100644 --- a/docs/_downloads/fdd0cb7713d049345adec03926d28414/engine_caching_bert_example.py +++ b/docs/_downloads/fdd0cb7713d049345adec03926d28414/engine_caching_bert_example.py @@ -50,7 +50,6 @@ def compile_bert(iterations=3): "use_python_runtime": False, "enabled_precisions": {torch.float}, "truncate_double": True, - "debug": False, "min_block_size": 1, "immutable_weights": False, "cache_built_engines": cache_built_engines, diff --git a/docs/_images/sphx_glr_aot_plugin_thumb.png b/docs/_images/sphx_glr_aot_plugin_thumb.png new file mode 100644 index 0000000000..8a5fed589d Binary files /dev/null and b/docs/_images/sphx_glr_aot_plugin_thumb.png differ diff --git a/docs/_images/sphx_glr_hierarchical_partitioner_example_thumb.png b/docs/_images/sphx_glr_hierarchical_partitioner_example_thumb.png new file mode 100644 index 0000000000..8a5fed589d Binary files /dev/null and b/docs/_images/sphx_glr_hierarchical_partitioner_example_thumb.png differ diff --git a/docs/_modules/index.html b/docs/_modules/index.html index 80b6a1b088..077f624b14 100644 --- a/docs/_modules/index.html +++ b/docs/_modules/index.html @@ -9,7 +9,7 @@ - Overview: module code — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Overview: module code — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/_Device.html b/docs/_modules/torch_tensorrt/_Device.html index 825cbd4d12..c3b8e2c1fb 100644 --- a/docs/_modules/torch_tensorrt/_Device.html +++ b/docs/_modules/torch_tensorrt/_Device.html @@ -9,7 +9,7 @@ - torch_tensorrt._Device — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt._Device — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/_Input.html b/docs/_modules/torch_tensorrt/_Input.html index 1674a6cb80..7636333850 100644 --- a/docs/_modules/torch_tensorrt/_Input.html +++ b/docs/_modules/torch_tensorrt/_Input.html @@ -9,7 +9,7 @@ - torch_tensorrt._Input — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt._Input — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/_compile.html b/docs/_modules/torch_tensorrt/_compile.html index a15822a81e..fe1e840a18 100644 --- a/docs/_modules/torch_tensorrt/_compile.html +++ b/docs/_modules/torch_tensorrt/_compile.html @@ -9,7 +9,7 @@ - torch_tensorrt._compile — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt._compile — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -493,12 +492,12 @@

    Source code for torch_tensorrt._compile

     import logging
     import platform
     from enum import Enum
    -from typing import Any, Callable, List, Optional, Sequence, Set
    +from typing import Any, Callable, List, Optional, Sequence, Set, Union
     
     import torch
     import torch.fx
     from torch_tensorrt._enums import dtype
    -from torch_tensorrt._features import ENABLED_FEATURES
    +from torch_tensorrt._features import ENABLED_FEATURES, needs_cross_compile
     from torch_tensorrt._Input import Input
     from torch_tensorrt.dynamo import _defaults
     from torch_tensorrt.dynamo.runtime._CudaGraphsTorchTensorRTModule import (
    @@ -659,7 +658,7 @@ 

    Source code for torch_tensorrt._compile

         inputs: Optional[Sequence[Input | torch.Tensor | InputTensorSpec]] = None,
         arg_inputs: Optional[Sequence[Sequence[Any]]] = None,
         kwarg_inputs: Optional[dict[Any, Any]] = None,
    -    enabled_precisions: Optional[Set[torch.dtype | dtype]] = None,
    +    enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None,
         **kwargs: Any,
     ) -> (
         torch.nn.Module | torch.jit.ScriptModule | torch.fx.GraphModule | Callable[..., Any]
    @@ -702,7 +701,7 @@ 

    Source code for torch_tensorrt._compile

         """
     
         input_list = inputs if inputs is not None else []
    -    enabled_precisions_set: Set[dtype | torch.dtype] = (
    +    enabled_precisions_set: Set[Union[torch.dtype, dtype]] = (
             enabled_precisions
             if enabled_precisions is not None
             else _defaults.ENABLED_PRECISIONS
    @@ -790,13 +789,14 @@ 

    Source code for torch_tensorrt._compile

             raise RuntimeError("Module is an unknown format or the ir requested is unknown")
    -
    [docs]def cross_compile_for_windows( +@needs_cross_compile +def cross_compile_for_windows( module: torch.nn.Module, file_path: str, inputs: Optional[Sequence[Input | torch.Tensor]] = None, arg_inputs: Optional[Sequence[Sequence[Any]]] = None, kwarg_inputs: Optional[dict[Any, Any]] = None, - enabled_precisions: Optional[Set[torch.dtype | dtype]] = None, + enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None, **kwargs: Any, ) -> None: """Compile a PyTorch module using TensorRT in Linux for Inference in Windows @@ -886,7 +886,7 @@

    Source code for torch_tensorrt._compile

         )
     
         dynamo_save_cross_compiled_exported_program(trt_gm, file_path)
    -    logger.debug("successfully compiled and saved the module for windows")
    + logger.debug("successfully compiled and saved the module for windows") def torch_compile(module: torch.nn.Module, **kwargs: Any) -> Any: @@ -912,7 +912,7 @@

    Source code for torch_tensorrt._compile

         arg_inputs: Optional[Sequence[Sequence[Any]]] = None,
         kwarg_inputs: Optional[dict[Any, Any]] = None,
         ir: str = "default",
    -    enabled_precisions: Optional[Set[torch.dtype | dtype]] = None,
    +    enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None,
         **kwargs: Any,
     ) -> bytes:
         """Convert a TorchScript module method to a serialized TensorRT engine
    @@ -1074,6 +1074,7 @@ 

    Source code for torch_tensorrt._compile

         kwarg_inputs: Optional[dict[str, Any]] = None,
         retrace: bool = False,
         pickle_protocol: int = 2,
    +    **kwargs: Any,
     ) -> None:
         """
         Save the model to disk in the specified output format.
    @@ -1083,7 +1084,7 @@ 

    Source code for torch_tensorrt._compile

             inputs (torch.Tensor): Torch input tensors
             arg_inputs (Tuple[Any, ...]): Same as inputs. Alias for better understanding with kwarg_inputs.
             kwarg_inputs (dict[Any, ...]): Optional, kwarg inputs to the module forward function.
    -        output_format (str): Format to save the model. Options include exported_program | torchscript.
    +        output_format (str): Format to save the model. Options include exported_program | torchscript | aot_inductor.
             retrace (bool): When the module type is a fx.GraphModule, this option re-exports the graph using torch.export.export(strict=False) to save it.
                     This flag is experimental for now.
             pickle_protocol (int): The pickle protocol to use to save the model. Default is 2. Increase this to 4 or higher for large models
    @@ -1091,7 +1092,7 @@ 

    Source code for torch_tensorrt._compile

         if isinstance(module, CudaGraphsTorchTensorRTModule):
             module = module.compiled_module
         module_type = _parse_module_type(module)
    -    accepted_formats = {"exported_program", "torchscript"}
    +    accepted_formats = {"exported_program", "torchscript", "aot_inductor"}
         if arg_inputs is not None and not all(
             isinstance(input, torch.Tensor) for input in arg_inputs
         ):
    @@ -1114,6 +1115,10 @@ 

    Source code for torch_tensorrt._compile

             raise ValueError(
                 f"Provided output_format {output_format} is not supported. Supported options are exported_program | torchscript"
             )
    +    if output_format == "aot_inductor" and platform.system() != "Linux":
    +        raise ValueError(
    +            f"The AOT Inductor format is only supported on Linux, {platform.system()} is not a supported platform for this format"
    +        )
         if not file_path:
             raise ValueError("File path cannot be empty. Please provide a valid file path")
     
    @@ -1122,9 +1127,9 @@ 

    Source code for torch_tensorrt._compile

                 "Input model is of type nn.Module. Saving nn.Module directly is not supported. Supported model types torch.jit.ScriptModule | torch.fx.GraphModule | torch.export.ExportedProgram."
             )
         elif module_type == _ModuleType.ts:
    -        if output_format == "exported_program":
    +        if not all([output_format == f for f in ["exported_program", "aot_inductor"]]):
                 raise ValueError(
    -                "Provided model is a torch.jit.ScriptModule but the output_format specified is exported_program. Please verify the output_format"
    +                "Provided model is a torch.jit.ScriptModule but the output_format specified is not torchscript. Other output formats are not supported"
                 )
             else:
                 if arg_inputs is not None:
    @@ -1142,7 +1147,22 @@ 

    Source code for torch_tensorrt._compile

                     logger.warning(
                         "Provided model is a torch.export.ExportedProgram, inputs or arg_inputs is not necessary during save, it uses the inputs or arg_inputs provided during export and compile"
                     )
    -            torch.export.save(module, file_path)
    +            if output_format == "exported_program":
    +                torch.export.save(module, file_path, pickle_protocol=pickle_protocol)
    +            elif output_format == "aot_inductor":
    +                inductor_configs = {}
    +                if "inductor_configs" in kwargs:
    +                    inductor_configs = kwargs["inductor_configs"]
    +
    +                torch._inductor.aoti_compile_and_package(
    +                    exp_program,
    +                    inductor_configs=inductor_configs,
    +                    package_path=file_path,
    +                )
    +            else:
    +                raise RuntimeError(
    +                    "Attempted to serialize an exported program with an unsupported format. Exported programs support exported_program and aot_inductor"
    +                )
         elif module_type == _ModuleType.fx:
             # The module type is torch.fx.GraphModule
             if output_format == "torchscript":
    @@ -1159,9 +1179,24 @@ 

    Source code for torch_tensorrt._compile

                             "Provided model is a torch.fx.GraphModule and retrace is False, inputs or arg_inputs is not necessary during save."
                         )
                     exp_program = export(module)
    -                torch.export.save(
    -                    exp_program, file_path, pickle_protocol=pickle_protocol
    -                )
    +                if output_format == "exported_program":
    +                    torch.export.save(
    +                        exp_program, file_path, pickle_protocol=pickle_protocol
    +                    )
    +                elif output_format == "aot_inductor":
    +                    inductor_configs = {}
    +                    if "inductor_configs" in kwargs:
    +                        inductor_configs = kwargs["inductor_configs"]
    +
    +                    torch._inductor.aoti_compile_and_package(
    +                        exp_program,
    +                        inductor_configs=inductor_configs,
    +                        package_path=file_path,
    +                    )
    +                else:
    +                    raise RuntimeError(
    +                        "Attempted to serialize an exported program with an unsupported format. Exported programs support exported_program and aot_inductor"
    +                    )
                 else:
                     if arg_inputs is None:
                         raise ValueError(
    @@ -1173,9 +1208,25 @@ 

    Source code for torch_tensorrt._compile

                         kwargs=kwarg_inputs,
                         strict=False,
                     )
    -                torch.export.save(
    -                    exp_program, file_path, pickle_protocol=pickle_protocol
    -                )
    + + if output_format == "exported_program": + torch.export.save( + exp_program, file_path, pickle_protocol=pickle_protocol + ) + elif output_format == "aot_inductor": + inductor_configs = {} + if "inductor_configs" in kwargs: + inductor_configs = kwargs["inductor_configs"] + + torch._inductor.aoti_compile_and_package( + exp_program, + inductor_configs=inductor_configs, + package_path=file_path, + ) + else: + raise RuntimeError( + "Attempted to serialize an exported program with an unsupported format. Exported programs support exported_program and aot_inductor" + )
    diff --git a/docs/_modules/torch_tensorrt/_enums.html b/docs/_modules/torch_tensorrt/_enums.html index 6fc65ba01b..709a2fe055 100644 --- a/docs/_modules/torch_tensorrt/_enums.html +++ b/docs/_modules/torch_tensorrt/_enums.html @@ -9,7 +9,7 @@ - torch_tensorrt._enums — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt._enums — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -566,6 +565,12 @@

    Source code for torch_tensorrt._enums

         f8 = auto()
         """8 bit floating-point number, equivalent to ``dtype.fp8`` and ``dtype.float8``
     
    +    :meta hide-value:
    +    """
    +
    +    f4 = auto()
    +    """4 bit floating-point number, equivalent to ``dtype.fp4`` and ``dtype.float4``
    +
         :meta hide-value:
         """
     
    @@ -580,6 +585,9 @@ 

    Source code for torch_tensorrt._enums

         float8 = f8
         fp8 = f8
     
    +    float4 = f4
    +    fp4 = f4
    +
         half = f16
         fp16 = f16
         float16 = f16
    @@ -651,6 +659,8 @@ 

    Source code for torch_tensorrt._enums

                     return dtype.i32
                 elif t == torch.float8_e4m3fn:
                     return dtype.f8
    +            elif t == torch.float4_e2m1fn_x2:
    +                return dtype.f4
                 elif t == torch.half:
                     return dtype.f16
                 elif t == torch.float:
    @@ -677,6 +687,8 @@ 

    Source code for torch_tensorrt._enums

                     return dtype.i8
                 elif t == trt.DataType.FP8:
                     return dtype.f8
    +            elif t == trt.DataType.FP4:
    +                return dtype.fp4
                 elif t == trt.DataType.INT32:
                     return dtype.i32
                 elif t == trt.DataType.INT64:
    @@ -846,6 +858,8 @@ 

    Source code for torch_tensorrt._enums

                     return torch.long
                 elif self == dtype.f8:
                     return torch.float8_e4m3fn
    +            elif self == dtype.f4:
    +                return torch.float4_e2m1fn_x2
                 elif self == dtype.f16:
                     return torch.half
                 elif self == dtype.f32:
    @@ -883,6 +897,8 @@ 

    Source code for torch_tensorrt._enums

                     return trt.DataType.BOOL
                 elif self == dtype.bf16:
                     return trt.DataType.BF16
    +            elif self == dtype.f4:
    +                return trt.DataType.FP4
                 elif use_default:
                     return trt.DataType.FLOAT
                 else:
    @@ -899,6 +915,8 @@ 

    Source code for torch_tensorrt._enums

                     return np.int64
                 elif self == dtype.f16:
                     return np.float16
    +            elif self == dtype.f4:
    +                return np.float4_e2m1fn_x2
                 elif self == dtype.f32:
                     return np.float32
                 elif self == dtype.f64:
    diff --git a/docs/_modules/torch_tensorrt/dynamo/_compiler.html b/docs/_modules/torch_tensorrt/dynamo/_compiler.html
    index 5a74548a0d..6523ccfcb4 100644
    --- a/docs/_modules/torch_tensorrt/dynamo/_compiler.html
    +++ b/docs/_modules/torch_tensorrt/dynamo/_compiler.html
    @@ -9,7 +9,7 @@
       
       
       
    -  torch_tensorrt.dynamo._compiler — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation
    +  torch_tensorrt.dynamo._compiler — Torch-TensorRT v2.9.0.dev0+92a6908 documentation
       
     
       
    @@ -290,7 +290,7 @@
                   
                   
                     
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -491,6 +490,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < import collections.abc import logging +import os import platform import warnings from typing import Any, Collection, List, Optional, Sequence, Set, Tuple, Union @@ -500,6 +500,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < from torch.fx.node import Target from torch_tensorrt._Device import Device from torch_tensorrt._enums import EngineCapability, dtype +from torch_tensorrt._features import needs_cross_compile from torch_tensorrt._Input import Input from torch_tensorrt.dynamo import _defaults, partitioning from torch_tensorrt.dynamo._DryRunTracker import ( @@ -520,6 +521,8 @@

    Source code for torch_tensorrt.dynamo._compiler

    < from torch_tensorrt.dynamo.conversion._ConverterRegistry import ( DYNAMO_CONVERTERS as CONVERTERS, ) +from torch_tensorrt.dynamo.debug._DebuggerConfig import DebuggerConfig +from torch_tensorrt.dynamo.debug._supports_debugger import fn_supports_debugger from torch_tensorrt.dynamo.lowering import ( get_decompositions, post_lowering, @@ -531,7 +534,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < get_output_metadata, parse_graph_io, prepare_inputs, - set_log_level, to_torch_device, to_torch_tensorrt_device, ) @@ -539,7 +541,8 @@

    Source code for torch_tensorrt.dynamo._compiler

    < logger = logging.getLogger(__name__) -
    [docs]def cross_compile_for_windows( +@needs_cross_compile +def cross_compile_for_windows( exported_program: ExportedProgram, inputs: Optional[Sequence[Sequence[Any]]] = None, *, @@ -553,7 +556,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < Set[Union[torch.dtype, dtype]], Tuple[Union[torch.dtype, dtype]] ] = _defaults.ENABLED_PRECISIONS, engine_capability: EngineCapability = _defaults.ENGINE_CAPABILITY, - debug: bool = _defaults.DEBUG, num_avg_timing_iters: int = _defaults.NUM_AVG_TIMING_ITERS, workspace_size: int = _defaults.WORKSPACE_SIZE, dla_sram_size: int = _defaults.DLA_SRAM_SIZE, @@ -627,7 +629,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < assume_dynamic_shape_support (bool): Setting this to true enables the converters work for both dynamic and static shapes. Default: False sparse_weights (bool): Enable sparsity for convolution and fully connected layers. enabled_precision (Set(Union(torch.dtype, torch_tensorrt.dtype))): The set of datatypes that TensorRT can use when selecting kernels - debug (bool): Enable debuggable engine capability (torch_tensorrt.EngineCapability): Restrict kernel selection to safe gpu kernels or safe dla kernels num_avg_timing_iters (int): Number of averaging timing iterations used to select kernels workspace_size (int): Maximum size of workspace given to TensorRT @@ -674,8 +675,12 @@

    Source code for torch_tensorrt.dynamo._compiler

    < f"Cross compile for windows is only supported on x86-64 Linux architecture, current platform: {platform.system()=}, {platform.architecture()[0]=}" ) - if debug: - set_log_level(logger.parent, logging.DEBUG) + if kwargs.get("debug", False): + warnings.warn( + "`debug` is deprecated. Please use `with torch_tensorrt.dynamo.Debugger(...)` to wrap your compilation call to enable debugging functionality.", + DeprecationWarning, + stacklevel=2, + ) if "truncate_long_and_double" in kwargs.keys(): if truncate_double is not _defaults.TRUNCATE_DOUBLE: @@ -741,10 +746,11 @@

    Source code for torch_tensorrt.dynamo._compiler

    < if use_explicit_typing: if len(enabled_precisions) != 1 or not any( - x in enabled_precisions for x in {torch.float32, dtype.f32} + x in enabled_precisions + for x in {torch.float32, dtype.f32, torch.float4_e2m1fn_x2, dtype.f4} ): raise AssertionError( - f"When use_explicit_typing is enabled, only torch.float32 is allowed in the enabled_precisions but found {enabled_precisions}" + f"use_explicit_typing was set to True, however found that enabled_precisions was also specified (saw: {enabled_precisions}, expected: dtype.f32, dtype.f4). enabled_precisions should not be used when use_explicit_typing=True" ) if use_fp32_acc: @@ -786,7 +792,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < "enabled_precisions": ( enabled_precisions if enabled_precisions else _defaults.ENABLED_PRECISIONS ), - "debug": debug, "device": device, "assume_dynamic_shape_support": assume_dynamic_shape_support, "workspace_size": workspace_size, @@ -871,7 +876,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < trt_kwarg_inputs, settings, ) - return trt_gm
    + return trt_gm
    [docs]def compile( @@ -888,7 +893,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < Set[Union[torch.dtype, dtype]], Tuple[Union[torch.dtype, dtype]] ] = _defaults.ENABLED_PRECISIONS, engine_capability: EngineCapability = _defaults.ENGINE_CAPABILITY, - debug: bool = _defaults.DEBUG, num_avg_timing_iters: int = _defaults.NUM_AVG_TIMING_ITERS, workspace_size: int = _defaults.WORKSPACE_SIZE, dla_sram_size: int = _defaults.DLA_SRAM_SIZE, @@ -964,7 +968,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < assume_dynamic_shape_support (bool): Setting this to true enables the converters work for both dynamic and static shapes. Default: False sparse_weights (bool): Enable sparsity for convolution and fully connected layers. enabled_precision (Set(Union(torch.dtype, torch_tensorrt.dtype))): The set of datatypes that TensorRT can use when selecting kernels - debug (bool): Enable debuggable engine capability (torch_tensorrt.EngineCapability): Restrict kernel selection to safe gpu kernels or safe dla kernels num_avg_timing_iters (int): Number of averaging timing iterations used to select kernels workspace_size (int): Maximum size of workspace given to TensorRT @@ -1007,8 +1010,13 @@

    Source code for torch_tensorrt.dynamo._compiler

    < torch.fx.GraphModule: Compiled FX Module, when run it will execute via TensorRT """ - if debug: - set_log_level(logger.parent, logging.DEBUG) + if kwargs.get("debug", False): + warnings.warn( + "`debug` is deprecated. Please use `with torch_tensorrt.dynamo.Debugger(...)` to wrap your compilation call to enable debugging functionality", + DeprecationWarning, + stacklevel=2, + ) + if "truncate_long_and_double" in kwargs.keys(): if truncate_double is not _defaults.TRUNCATE_DOUBLE: raise ValueError( @@ -1072,10 +1080,11 @@

    Source code for torch_tensorrt.dynamo._compiler

    < if use_explicit_typing: if len(enabled_precisions) != 1 or not any( - x in enabled_precisions for x in {torch.float32, dtype.f32} + x in enabled_precisions + for x in {torch.float32, dtype.f32, torch.float4_e2m1fn_x2, dtype.f4} ): raise AssertionError( - f"When use_explicit_typing is enabled, only torch.float32 is allowed in the enabled_precisions but found {enabled_precisions}" + f"use_explicit_typing was set to True, however found that enabled_precisions was also specified (saw: {enabled_precisions}, expected: dtype.f32, dtype.f4). enabled_precisions should not be used when use_explicit_typing=True" ) if use_fp32_acc: @@ -1130,7 +1139,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < "enabled_precisions": ( enabled_precisions if enabled_precisions else _defaults.ENABLED_PRECISIONS ), - "debug": debug, "device": device, "assume_dynamic_shape_support": assume_dynamic_shape_support, "workspace_size": workspace_size, @@ -1180,6 +1188,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < ) gm = exported_program.module() + # Move the weights in the state_dict to CPU logger.debug("Input graph: " + str(gm.graph)) # Apply lowering on the graph module @@ -1204,12 +1213,15 @@

    Source code for torch_tensorrt.dynamo._compiler

    < return trt_gm
    +@fn_supports_debugger def compile_module( gm: torch.fx.GraphModule, sample_arg_inputs: Sequence[Input], sample_kwarg_inputs: Optional[dict[Any, Any]] = None, settings: CompilationSettings = CompilationSettings(), engine_cache: Optional[BaseEngineCache] = None, + *, + _debugger_config: Optional[DebuggerConfig] = None, ) -> torch.fx.GraphModule: """Compile a traced FX module @@ -1233,7 +1245,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < # Check the number of supported operations in the graph num_supported_ops, total_ops = partitioning.get_graph_converter_support( - gm, settings.debug, settings.torch_executed_ops + gm, settings.torch_executed_ops ) dryrun_tracker.total_ops_in_graph = total_ops @@ -1277,6 +1289,28 @@

    Source code for torch_tensorrt.dynamo._compiler

    < "Some nodes do not have metadata (shape and dtype information). This could lead to problems sometimes if the graph has PyTorch and TensorRT segments." ) + # Store the original input spec for later use + original_in_spec = getattr(gm, "_in_spec", None) + original_out_spec = getattr(gm, "_out_spec", None) + + # Function to preserve and restore module specs + def preserve_module_specs( + in_spec: Any, out_spec: Any, target_module: torch.fx.GraphModule + ) -> None: + """ + Applies input and output specs to the target module. + + Args: + in_spec: The input spec to apply + out_spec: The output spec to apply + target_module: The module to apply specs to + """ + # Apply specs to target module + if in_spec is not None: + target_module._in_spec = in_spec + if out_spec is not None: + target_module._out_spec = out_spec + # Partition module into components that can be TRT-accelerated fast_partitioner_failed = False # If specified, try using the fast partitioner and fall back to the global one on failure @@ -1285,7 +1319,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < logger.info("Partitioning the graph via the fast partitioner") partitioned_module, supported_ops = partitioning.fast_partition( gm, - verbose=settings.debug, min_block_size=settings.min_block_size, torch_executed_ops=settings.torch_executed_ops, require_full_compilation=settings.require_full_compilation, @@ -1306,7 +1339,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < logger.info("Partitioning the graph via the global partitioner") partitioned_module, supported_ops = partitioning.global_partition( gm, - verbose=settings.debug, min_block_size=settings.min_block_size, torch_executed_ops=settings.torch_executed_ops, require_full_compilation=settings.require_full_compilation, @@ -1324,6 +1356,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < continue submodule_node_dict[node.name] = node + preserve_module_specs(original_in_spec, original_out_spec, partitioned_module) # Store TRT replicas of Torch subgraphs trt_modules = {} # Iterate over all components that can be accelerated @@ -1401,7 +1434,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < parse_graph_io(submodule, subgraph_data) dryrun_tracker.tensorrt_graph_count += 1 dryrun_tracker.per_subgraph_data.append(subgraph_data) - + torch.cuda.empty_cache() # Create TRT engines from submodule if not settings.dryrun: trt_module = convert_module( @@ -1414,6 +1447,41 @@

    Source code for torch_tensorrt.dynamo._compiler

    < trt_modules[name] = trt_module + if _debugger_config: + + if _debugger_config.save_engine_profile: + if settings.use_python_runtime: + if _debugger_config.profile_format != "cudagraph": + raise ValueError( + "Profiling with TREX can only be enabled when using the C++ runtime. Python runtime profiling only support cudagraph visualization." + ) + else: + trt_module.enable_profiling() + else: + if _debugger_config.profile_format == "cudagraph": + raise ValueError( + "Profiling with Cudagraph can only be enabled when using the Python runtime. C++ runtime profiling only support TREX/Perfetto visualization." + ) + else: + path = os.path.join( + _debugger_config.logging_dir, + "engine_visualization_profile", + ) + os.makedirs(path, exist_ok=True) + trt_module.enable_profiling( + profiling_results_dir=path, + profile_format=_debugger_config.profile_format, + ) + + if _debugger_config.save_layer_info: + with open( + os.path.join( + _debugger_config.logging_dir, "engine_layer_info.json" + ), + "w", + ) as f: + f.write(trt_module.get_layer_info()) + # Parse the graph I/O and store it in dryrun tracker parse_graph_io(gm, dryrun_tracker) @@ -1438,10 +1506,9 @@

    Source code for torch_tensorrt.dynamo._compiler

    < *, arg_inputs: Optional[Sequence[Sequence[Any]]] = None, kwarg_inputs: Optional[dict[Any, Any]] = None, - enabled_precisions: ( - Set[torch.dtype | dtype] | Tuple[torch.dtype | dtype] - ) = _defaults.ENABLED_PRECISIONS, - debug: bool = _defaults.DEBUG, + enabled_precisions: Union[ + Set[Union[torch.dtype, dtype]], Tuple[Union[torch.dtype, dtype]] + ] = _defaults.ENABLED_PRECISIONS, assume_dynamic_shape_support: bool = _defaults.ASSUME_DYNAMIC_SHAPE_SUPPORT, workspace_size: int = _defaults.WORKSPACE_SIZE, min_block_size: int = _defaults.MIN_BLOCK_SIZE, @@ -1503,7 +1570,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < torch.randn((1, 3, 224, 244)) # Use an example tensor and let torch_tensorrt infer settings ] enabled_precisions (Optional[Set[torch.dtype | _enums.dtype]]): The set of datatypes that TensorRT can use - debug (bool): Whether to print out verbose debugging information workspace_size (int): Workspace TRT is allowed to use for the module (0 is default) min_block_size (int): Minimum number of operators per TRT-Engine Block torch_executed_ops (Set[str]): Set of operations to run in Torch, regardless of converter coverage @@ -1543,8 +1609,12 @@

    Source code for torch_tensorrt.dynamo._compiler

    < Returns: bytes: Serialized TensorRT engine, can either be saved to a file or deserialized via TensorRT APIs """ - if debug: - set_log_level(logger.parent, logging.DEBUG) + if kwargs.get("debug", False): + warnings.warn( + "`debug` is deprecated. Please use `with torch_tensorrt.dynamo.Debugger(...)` to wrap your compilation call to enable debugging functionality.", + DeprecationWarning, + stacklevel=2, + ) if "truncate_long_and_double" in kwargs.keys(): if truncate_double is not _defaults.TRUNCATE_DOUBLE: @@ -1628,7 +1698,6 @@

    Source code for torch_tensorrt.dynamo._compiler

    < compilation_options = { "assume_dynamic_shape_support": assume_dynamic_shape_support, "enabled_precisions": enabled_precisions, - "debug": debug, "workspace_size": workspace_size, "min_block_size": min_block_size, "torch_executed_ops": torch_executed_ops, @@ -1712,7 +1781,8 @@

    Source code for torch_tensorrt.dynamo._compiler

    < return serialized_engine
    -
    [docs]def save_cross_compiled_exported_program( +@needs_cross_compile +def save_cross_compiled_exported_program( gm: torch.fx.GraphModule, file_path: str, ) -> None: @@ -1730,7 +1800,7 @@

    Source code for torch_tensorrt.dynamo._compiler

    < exp_program = export(gm, cross_compile_module=True) torch.export.save(exp_program, file_path) - logger.debug(f"successfully saved the module for windows at {file_path}")
    + logger.debug(f"successfully saved the module for windows at {file_path}")
    [docs]def load_cross_compiled_exported_program(file_path: str = "") -> Any: diff --git a/docs/_modules/torch_tensorrt/dynamo/_exporter.html b/docs/_modules/torch_tensorrt/dynamo/_exporter.html index 738150945c..d9ed0f4997 100644 --- a/docs/_modules/torch_tensorrt/dynamo/_exporter.html +++ b/docs/_modules/torch_tensorrt/dynamo/_exporter.html @@ -9,7 +9,7 @@ - torch_tensorrt.dynamo._exporter — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.dynamo._exporter — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/dynamo/_settings.html b/docs/_modules/torch_tensorrt/dynamo/_settings.html index f1478a4457..9ea9f0838b 100644 --- a/docs/_modules/torch_tensorrt/dynamo/_settings.html +++ b/docs/_modules/torch_tensorrt/dynamo/_settings.html @@ -9,7 +9,7 @@ - torch_tensorrt.dynamo._settings — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.dynamo._settings — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -488,7 +487,7 @@

    Source code for torch_tensorrt.dynamo._settings

     from dataclasses import dataclass, field
    -from typing import Collection, Optional, Set, Tuple, Union
    +from typing import Any, Collection, Optional, Set, Tuple, Union
     
     from torch.fx.node import Target
     from torch_tensorrt._Device import Device
    @@ -496,7 +495,6 @@ 

    Source code for torch_tensorrt.dynamo._settings

    < from torch_tensorrt.dynamo._defaults import ( ASSUME_DYNAMIC_SHAPE_SUPPORT, CACHE_BUILT_ENGINES, - DEBUG, DISABLE_TF32, DLA_GLOBAL_DRAM_SIZE, DLA_LOCAL_DRAM_SIZE, @@ -590,7 +588,6 @@

    Source code for torch_tensorrt.dynamo._settings

    < """ enabled_precisions: Set[dtype] = field(default_factory=lambda: ENABLED_PRECISIONS) - debug: bool = DEBUG workspace_size: int = WORKSPACE_SIZE min_block_size: int = MIN_BLOCK_SIZE torch_executed_ops: Collection[Target] = field(default_factory=set) @@ -630,7 +627,22 @@

    Source code for torch_tensorrt.dynamo._settings

    < tiling_optimization_level: str = TILING_OPTIMIZATION_LEVEL l2_limit_for_tiling: int = L2_LIMIT_FOR_TILING use_distributed_mode_trace: bool = USE_DISTRIBUTED_MODE_TRACE - offload_module_to_cpu: bool = OFFLOAD_MODULE_TO_CPU
    + offload_module_to_cpu: bool = OFFLOAD_MODULE_TO_CPU + + def __getstate__(self) -> dict[str, Any]: + from torch_tensorrt.dynamo.conversion._ConverterRegistry import ( + ConverterRegistry, + ) + + state = self.__dict__.copy() + state["torch_executed_ops"] = { + op if isinstance(op, str) else ConverterRegistry.qualified_name_or_str(op) + for op in state["torch_executed_ops"] + } + return state + + def __setstate__(self, state: dict[str, Any]) -> None: + self.__dict__.update(state)
    _SETTINGS_TO_BE_ENGINE_INVARIANT = ( diff --git a/docs/_modules/torch_tensorrt/dynamo/_tracer.html b/docs/_modules/torch_tensorrt/dynamo/_tracer.html index 3a06cd06ba..46a5d3ac24 100644 --- a/docs/_modules/torch_tensorrt/dynamo/_tracer.html +++ b/docs/_modules/torch_tensorrt/dynamo/_tracer.html @@ -9,7 +9,7 @@ - torch_tensorrt.dynamo._tracer — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.dynamo._tracer — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -496,8 +495,8 @@

    Source code for torch_tensorrt.dynamo._tracer

    import torch from torch.export import Dim, export from torch_tensorrt._Input import Input -from torch_tensorrt.dynamo._defaults import DEBUG, default_device -from torch_tensorrt.dynamo.utils import get_torch_inputs, set_log_level, to_torch_device +from torch_tensorrt.dynamo._defaults import default_device +from torch_tensorrt.dynamo.utils import get_torch_inputs, to_torch_device logger = logging.getLogger(__name__) @@ -559,10 +558,6 @@

    Source code for torch_tensorrt.dynamo._tracer

    if kwarg_inputs is None: kwarg_inputs = {} - debug = kwargs.get("debug", DEBUG) - if debug: - set_log_level(logger.parent, logging.DEBUG) - device = to_torch_device(kwargs.get("device", default_device())) torch_arg_inputs = get_torch_inputs(arg_inputs, device) torch_kwarg_inputs = get_torch_inputs(kwarg_inputs, device) @@ -574,6 +569,7 @@

    Source code for torch_tensorrt.dynamo._tracer

    tuple(torch_arg_inputs), kwargs=torch_kwarg_inputs, dynamic_shapes=dynamic_shapes, + strict=kwargs.get("strict", False), ) return exp_program
    diff --git a/docs/_modules/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.html b/docs/_modules/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.html index 12830cd4ee..fc2dbe8120 100644 --- a/docs/_modules/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.html +++ b/docs/_modules/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.html @@ -9,7 +9,7 @@ - torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -489,21 +488,23 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule

     import inspect
     import logging
    +import warnings
     from copy import deepcopy
     from enum import Enum, auto
    -from typing import Any, Collection, Dict, Iterator, List, Optional, Set, Union
    +from typing import Any, Dict, Iterator, Optional, Set, Union
     
     import numpy as np
     import torch
    -from torch.fx.node import Target
    +import torch_tensorrt
    +from torch.export._trace import _export
     from torch_tensorrt._Device import Device
    -from torch_tensorrt._enums import EngineCapability, dtype
    +from torch_tensorrt._enums import dtype
     from torch_tensorrt.dynamo import _defaults
     from torch_tensorrt.dynamo._compiler import compile as dynamo_compile
     from torch_tensorrt.dynamo._refit import refit_module_weights
    -from torch_tensorrt.dynamo._settings import CompilationSettings
     from torch_tensorrt.dynamo.utils import (
         check_output_equal,
    +    deallocate_module,
         to_torch_device,
         to_torch_tensorrt_device,
     )
    @@ -552,35 +553,12 @@ 

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModulepytorch_model: torch.nn.Module, *, device: Optional[Union[Device, torch.device, str]] = _defaults.DEVICE, - disable_tf32: bool = _defaults.DISABLE_TF32, - assume_dynamic_shape_support: bool = _defaults.ASSUME_DYNAMIC_SHAPE_SUPPORT, - sparse_weights: bool = _defaults.SPARSE_WEIGHTS, - enabled_precisions: Set[ - Union[torch.dtype, dtype] - ] = _defaults.ENABLED_PRECISIONS, - engine_capability: EngineCapability = _defaults.ENGINE_CAPABILITY, - immutable_weights: bool = False, - debug: bool = _defaults.DEBUG, - num_avg_timing_iters: int = _defaults.NUM_AVG_TIMING_ITERS, - workspace_size: int = _defaults.WORKSPACE_SIZE, - dla_sram_size: int = _defaults.DLA_SRAM_SIZE, - dla_local_dram_size: int = _defaults.DLA_LOCAL_DRAM_SIZE, - dla_global_dram_size: int = _defaults.DLA_GLOBAL_DRAM_SIZE, - truncate_double: bool = _defaults.TRUNCATE_DOUBLE, - require_full_compilation: bool = _defaults.REQUIRE_FULL_COMPILATION, - min_block_size: int = _defaults.MIN_BLOCK_SIZE, - torch_executed_ops: Optional[Collection[Target]] = None, - torch_executed_modules: Optional[List[str]] = None, - pass_through_build_failures: bool = _defaults.PASS_THROUGH_BUILD_FAILURES, - max_aux_streams: Optional[int] = _defaults.MAX_AUX_STREAMS, - version_compatible: bool = _defaults.VERSION_COMPATIBLE, - optimization_level: Optional[int] = _defaults.OPTIMIZATION_LEVEL, use_python_runtime: bool = _defaults.USE_PYTHON_RUNTIME, - use_fast_partitioner: bool = _defaults.USE_FAST_PARTITIONER, - enable_experimental_decompositions: bool = _defaults.ENABLE_EXPERIMENTAL_DECOMPOSITIONS, - dryrun: bool = _defaults.DRYRUN, - hardware_compatible: bool = _defaults.HARDWARE_COMPATIBLE, - timing_cache_path: str = _defaults.TIMING_CACHE_PATH, + immutable_weights: bool = False, + strict: bool = True, + allow_complex_guards_as_runtime_asserts: bool = False, + weight_streaming_budget: Optional[int] = None, + enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None, **kwargs: Any, ) -> None: """ @@ -598,7 +576,6 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule sparse_weights (bool): Enable sparsity for convolution and fully connected layers. enabled_precision (Set(Union(torch.dtype, torch_tensorrt.dtype))): The set of datatypes that TensorRT can use when selecting kernels immutable_weights (bool): Build non-refittable engines. This is useful for some layers that are not refittable. - debug (bool): Enable debuggable engine capability (torch_tensorrt.EngineCapability): Restrict kernel selection to safe gpu kernels or safe dla kernels num_avg_timing_iters (int): Number of averaging timing iterations used to select kernels workspace_size (int): Maximum size of workspace given to TensorRT @@ -622,6 +599,7 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule hardware_compatible (bool): Build the TensorRT engines compatible with GPU architectures other than that of the GPU on which the engine was built (currently works for NVIDIA Ampere and newer) timing_cache_path (str): Path to the timing cache if it exists (or) where it will be saved after compilation lazy_engine_init (bool): Defer setting up engines until the compilation of all engines is complete. Can allow larger models with multiple graph breaks to compile but can lead to oversubscription of GPU memory at runtime. + enabled_precisions (Set(Union(torch.dtype, torch_tensorrt.dtype))): The set of datatypes that TensorRT can use when selecting kernels **kwargs: Any, Returns: MutableTorchTensorRTModule @@ -643,53 +621,38 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModuleself.exp_program: Any = None self.arg_inputs: tuple[Any, ...] = tuple() self.kwarg_inputs: dict[str, Any] = {} - device = to_torch_tensorrt_device(device) - enabled_precisions = {dtype._from(p) for p in enabled_precisions} + self.additional_settings = kwargs + self.strict = strict + self.allow_complex_guards_as_runtime_asserts = ( + allow_complex_guards_as_runtime_asserts + ) + self.use_python_runtime = use_python_runtime + self.trt_device = to_torch_tensorrt_device(device) assert ( not immutable_weights - ), "`immutable_weights` has to be False for a MutableTorchTensorRTModule." - compilation_options = { - "enabled_precisions": ( - enabled_precisions - if enabled_precisions - else _defaults.ENABLED_PRECISIONS - ), - "debug": debug, - "device": device, - "assume_dynamic_shape_support": assume_dynamic_shape_support, - "workspace_size": workspace_size, - "min_block_size": min_block_size, - "torch_executed_ops": ( - torch_executed_ops if torch_executed_ops is not None else set() - ), - "pass_through_build_failures": pass_through_build_failures, - "max_aux_streams": max_aux_streams, - "version_compatible": version_compatible, - "optimization_level": optimization_level, - "use_python_runtime": use_python_runtime, - "truncate_double": truncate_double, - "use_fast_partitioner": use_fast_partitioner, - "num_avg_timing_iters": num_avg_timing_iters, - "enable_experimental_decompositions": enable_experimental_decompositions, - "require_full_compilation": require_full_compilation, - "disable_tf32": disable_tf32, - "sparse_weights": sparse_weights, - "immutable_weights": immutable_weights, - "engine_capability": engine_capability, - "dla_sram_size": dla_sram_size, - "dla_local_dram_size": dla_local_dram_size, - "dla_global_dram_size": dla_global_dram_size, - "dryrun": dryrun, - "hardware_compatible": hardware_compatible, - "timing_cache_path": timing_cache_path, - } + ), "`immutable_weights has to be False for a MutableTorchTensorRTModule" + self.arg_dynamic_shapes: Optional[tuple[Any]] = None self.kwarg_dynamic_shapes: Optional[dict[Any, Any]] = None - - self.settings = CompilationSettings(**compilation_options) + self.serializable_dynamic_shapes_dims: dict[str, tuple[str, int, int]] = {} self.run_info: Optional[tuple[Any, ...]] = None self.state_dict_metadata: dict[str, torch.Size] = {} self._store_state_dict_metadata() + self.enable_weight_streaming = ( + kwargs["enable_weight_streaming"] + if "enable_weight_streaming" in kwargs + else False + ) + self.weight_streaming_ctx = None + self.weight_streaming_budget = weight_streaming_budget + if self.enable_weight_streaming: + if weight_streaming_budget is None: + logger.warning( + "Weight stremaing budget is not set. Using auto weight streaming budget" + ) + self.enabled_precisions = enabled_precisions + if self.enabled_precisions is None: + self.enabled_precisions = _defaults.ENABLED_PRECISIONS cls = self.__class__ self.__class__ = type( @@ -782,10 +745,9 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule# to determine whether refit/recompilation is needed. If the output is the same, no further process needed. if self.run_info: args, kwargs, result = self.run_info - self.original_model.to(to_torch_device(self.settings.device)) + self.original_model.to(to_torch_device(self.trt_device)) new_result = self.original_model(*args, **kwargs) - self.original_model.cpu() - torch.cuda.empty_cache() + deallocate_module(self.original_model, delete_module=False) if check_output_equal(result, new_result): self.refit_state.set_state(RefitFlag.LIVE) return @@ -814,17 +776,17 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule MutableTorchTensorRTModule automatically catches weight value updates and call this function to refit the module. If it fails to catch the changes, please call this function manually to update the TRT graph module. """ - self.original_model.to(to_torch_device(self.settings.device)) + if self.exp_program is None: - self.exp_program = torch.export.export( - self.original_model, self.arg_inputs, kwargs=self.kwarg_inputs - ) + self.original_model.to(to_torch_device(self.trt_device)) + self.exp_program = self.get_exported_program() else: self.exp_program._state_dict = ( MutableTorchTensorRTModule._transform_state_dict( self.original_model.state_dict() ) ) + self.exp_program.module().to(to_torch_device(self.trt_device)) self.gm = refit_module_weights( self.gm, self.exp_program, @@ -834,8 +796,46 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModulein_place=True, ) - self.original_model.cpu() - torch.cuda.empty_cache()

    + deallocate_module(self.original_model, delete_module=False)
    + + def get_exported_program(self) -> torch.export.ExportedProgram: + + def export_fn() -> torch.export.ExportedProgram: + if self.allow_complex_guards_as_runtime_asserts: + return _export( + self.original_model, + self.arg_inputs, + kwargs=self.kwarg_inputs, + dynamic_shapes=self._get_total_dynamic_shapes(), + strict=self.strict, + allow_complex_guards_as_runtime_asserts=self.allow_complex_guards_as_runtime_asserts, + ) + else: + return torch.export.export( + self.original_model, + self.arg_inputs, + kwargs=self.kwarg_inputs, + dynamic_shapes=self._get_total_dynamic_shapes(), + strict=self.strict, + ) + + # Check if any quantization precision is enabled + if self.enabled_precisions and any( + precision in self.enabled_precisions + for precision in (torch.float8_e4m3fn, torch.int8, torch.float4_e2m1fn_x2) + ): + try: + from modelopt.torch.quantization.utils import export_torch_mode + + assert torch.ops.tensorrt.quantize_op.default + except Exception as e: + logger.warning( + "Unable to import quantization op. Please install modelopt library (https://github.com/NVIDIA/TensorRT-Model-Optimizer?tab=readme-ov-file#installation) to add support for compiling quantized models" + ) + with export_torch_mode(): + return export_fn() + else: + return export_fn()
    [docs] def compile(self) -> None: """ @@ -845,25 +845,37 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule If it fails to catch the changes, please call this function manually to recompile the TRT graph module. """ # Export the module - self.original_model.to(to_torch_device(self.settings.device)) - self.exp_program = torch.export.export( - self.original_model, - self.arg_inputs, - kwargs=self.kwarg_inputs, - dynamic_shapes=self._get_total_dynamic_shapes(), - ) + self.original_model.to(to_torch_device(self.trt_device)) + self.exp_program = self.get_exported_program() self.gm = dynamo_compile( self.exp_program, arg_inputs=self.arg_inputs, kwarg_inputs=self.kwarg_inputs, - **self.settings.__dict__, + immutable_weights=False, + use_python_runtime=self.use_python_runtime, + enabled_precisions=self.enabled_precisions, + **self.additional_settings, + ) + deallocate_module(self.original_model, delete_module=False) + if self.enable_weight_streaming: + self.set_weight_streaming_ctx(self.weight_streaming_budget)

    + +
    [docs] def set_weight_streaming_ctx(self, requested_budget: Optional[int] = None) -> None: + """ + Set the weight streaming budget. If budget is not set, then automatic weight streaming budget + is used. + """ + self.weight_streaming_ctx = torch_tensorrt.runtime.weight_streaming(self.gm) + requested_budget = ( + requested_budget + if requested_budget is not None + else self.weight_streaming_ctx.get_automatic_weight_streaming_budget() ) - self.original_model.cpu() - torch.cuda.empty_cache()
    + self.weight_streaming_ctx.device_budget = requested_budget
    def _validate_inputs(self, *args: Any, **kwargs: Any) -> None: - if not self.arg_inputs: + if not self.arg_inputs and not self.kwarg_inputs: logger.info("First time compilation initiated. This may take some time.") self.refit_state.set_state(RefitFlag.NEEDS_RECOMPILE) self._store_inputs(args, kwargs) @@ -953,6 +965,12 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModule) def forward(self, *args: Any, **kwargs: Any) -> Any: + warnings.warn( + "Direct calls to {self.__class__}.forward() are currently broken by due to https://github.com/pytorch/pytorch/issues/157183. Either call {self.__class__}(...) directly or use {self.__class__}._forward as a work around" + ) + return self._forward(*args, **kwargs) + + def _forward(self, *args: Any, **kwargs: Any) -> Any: # Step 1: Check whether the input shape has changed kwargs = MutableTorchTensorRTModule._process_kwarg_inputs(kwargs) self._validate_inputs(*args, **kwargs) @@ -980,14 +998,24 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModuleself._store_state_dict_metadata() self.refit_state.set_state(RefitFlag.LIVE) + weight_streaming_ctx = ( + self.weight_streaming_ctx if self.enable_weight_streaming else None + ) result = self.gm(*args, **kwargs) # Storing inputs and outputs for verification when the state is unknown self.run_info = (args, kwargs, result) return result - def to(self, device: str) -> None: - logger.warning("Original PyTorch model is moved. CPU offload may failed.") - self.original_model.to(device) + def to(self, *args: Any, **kwargs: Any) -> None: + logger.warning( + "Trying to move the original PyTorch model. This will cause CPU offloading failing and increase GPU memory usage." + + "If this is absolute necessary, please call module.pytorch_model.to(...) \n" + + "The model is still on the original device." + ) + + @property + def device(self) -> torch.device: + return to_torch_device(self.trt_device) def __deepcopy__(self, memo: Any) -> Any: cls = self.__class__ @@ -1002,7 +1030,9 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModulereturn result def __call__(self, *args: Any, **kwargs: Any) -> Any: - return self.forward(*args, **kwargs) + # Due to https://github.com/pytorch/pytorch/issues/157183, we cannot use forward call, use _forward as a workaround. + # This is a temporary fix. + return self._forward(*args, **kwargs) def __getattr__(self, name: str) -> Any: if name in self.__dict__: @@ -1113,18 +1143,58 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModulereturn True + def serialize_dynamic_shapes(self) -> None: + dims = self.serializable_dynamic_shapes_dims + + def resursivly_serialize_dynamic_shape(obj: Any) -> None: + if isinstance(obj, dict): + for axis, v in obj.items(): + if isinstance(v, torch.export.dynamic_shapes._Dim): + name = str(v).split("'")[1].split(".")[-1] + # We use string of the hash to be the unique identifier of Dim object + dims.setdefault(str(hash(v)), (name, v.min, v.max)) + obj[axis] = str(hash(v)) + else: + resursivly_serialize_dynamic_shape(v) + if isinstance(obj, (tuple, list)): + for v in obj: + resursivly_serialize_dynamic_shape(v) + + resursivly_serialize_dynamic_shape(self.arg_dynamic_shapes) + resursivly_serialize_dynamic_shape(self.kwarg_dynamic_shapes) + + def deserialize_dynamic_shapes(self) -> None: + dims = self.serializable_dynamic_shapes_dims + + def resursivly_deserialize_dynamic_shape(obj: Any) -> None: + if isinstance(obj, dict): + for axis, v in obj.items(): + if isinstance(v, str): + obj[axis] = torch.export.Dim( + dims[v][0], min=dims[v][1], max=dims[v][2] + ) + else: + resursivly_deserialize_dynamic_shape(v) + if isinstance(obj, (tuple, list)): + for v in obj: + resursivly_deserialize_dynamic_shape(v) + + resursivly_deserialize_dynamic_shape(self.arg_dynamic_shapes) + resursivly_deserialize_dynamic_shape(self.kwarg_dynamic_shapes) + @staticmethod def save(module: Any, path: str) -> None: # Cast the object back to MutableTorchTensorRTModule to save assert ( - not module.settings.use_python_runtime + not module.use_python_runtime ), "Python runtime does not support serialization. Save failed." module.init_finished = False module.__class__ = MutableTorchTensorRTModule exp_program = module.exp_program module.pytorch_model = None module.exp_program = None - torch.save(module, path) + module.serialize_dynamic_shapes() + torch.save(module, path, pickle_protocol=4) # Restore deleted attributes module.exp_program = exp_program module.pytorch_model = _make_refit_change_trigger( @@ -1147,19 +1217,26 @@

    Source code for torch_tensorrt.dynamo.runtime._MutableTorchTensorRTModulemodule.pytorch_model = _make_refit_change_trigger( module.original_model, module.refit_state ) - module.original_model.to(to_torch_device(module.settings.device)) + module.original_model.to(to_torch_device(module.device)) module.exp_program = torch.export.export( module.original_model, module.arg_inputs, kwargs=module.kwarg_inputs ) - module.original_model.to("cpu") + deallocate_module(module.original_model, delete_module=False) cls = module.__class__ module.__class__ = type( module.original_model.__class__.__name__, (cls, module.original_model.__class__), {}, ) + module.deserialize_dynamic_shapes() module.init_finished = True - return module

    + return module + + def _reset_stateful_cache(obj: Any) -> None: + """ + Does nothing. Support Huggingface CPU offload hooks. Override the huggingface cache reset function because we don't want the TRT module to be handled by HuggingFace. + """ + return
    def recursively_remove_trigger(obj: Any) -> Any: diff --git a/docs/_modules/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.html b/docs/_modules/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.html index ccba540284..74ab57ca78 100644 --- a/docs/_modules/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.html +++ b/docs/_modules/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.html @@ -9,7 +9,7 @@ - torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -501,6 +500,8 @@

    Source code for torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule

    from torch_tensorrt._Device import Device from torch_tensorrt._enums import Platform, dtype from torch_tensorrt.dynamo._settings import CompilationSettings +from torch_tensorrt.dynamo.debug._DebuggerConfig import DebuggerConfig +from torch_tensorrt.dynamo.debug._supports_debugger import cls_supports_debugger from torch_tensorrt.dynamo.utils import DYNAMIC_DIM from torch_tensorrt.logging import TRT_LOGGER from torch_tensorrt.runtime._utils import ( @@ -600,7 +601,8 @@

    Source code for torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule

    ) -
    [docs]class PythonTorchTensorRTModule(Module): # type: ignore[misc] +
    [docs]@cls_supports_debugger +class PythonTorchTensorRTModule(Module): # type: ignore[misc] """PythonTorchTensorRTModule is a PyTorch module which encompasses an arbitrary TensorRT Engine. This module is backed by the Torch-TensorRT runtime and is only compatible with @@ -617,6 +619,7 @@

    Source code for torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule

    settings: CompilationSettings = CompilationSettings(), weight_name_map: Optional[dict[Any, Any]] = None, requires_output_allocator: bool = False, + _debugger_config: Optional[DebuggerConfig] = None, ): """Takes a name, target device, serialized TensorRT engine, and binding names / order and constructs a PyTorch ``torch.nn.Module`` around it. Uses TensorRT Python APIs to run the engine @@ -646,6 +649,7 @@

    Source code for torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule

    """ self.context: Any + self._debugger_config: Optional[DebuggerConfig] = _debugger_config super(PythonTorchTensorRTModule, self).__init__() self._register_state_dict_hook(PythonTorchTensorRTModule._on_state_dict) @@ -682,7 +686,11 @@

    Source code for torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule

    self.target_device_properties = torch.cuda.get_device_properties( self.target_device_id ) - self.profiling_enabled = settings.debug if settings.debug is not None else False + self.profiling_enabled = ( + _debugger_config.save_engine_profile + if _debugger_config is not None + else False + ) self.settings = settings self.engine = None self.weight_name_map = weight_name_map @@ -1232,7 +1240,14 @@

    Source code for torch_tensorrt.dynamo.runtime._PythonTorchTensorRTModule

    # Representation of input shapes to a given model # Shapes are concatenated as so: # x: (3, 4), y: (4, 5) --> Key: (3,4)(4,5) - new_shape_key = "".join(str(tuple(t.shape)).replace(" ", "") for t in inputs) + tensor_inputs = [] + for t in inputs: + if not isinstance(t, torch.Tensor): + return True + tensor_inputs.append(t) + new_shape_key = "".join( + str(tuple(t.shape)).replace(" ", "") for t in tensor_inputs + ) # If the new shape key differs from the existing one, # invalidate the old shape key and remove the CUDAGraph diff --git a/docs/_modules/torch_tensorrt/dynamo/runtime/_TorchTensorRTModule.html b/docs/_modules/torch_tensorrt/dynamo/runtime/_TorchTensorRTModule.html index 2d464c1e4d..33b2642ff8 100644 --- a/docs/_modules/torch_tensorrt/dynamo/runtime/_TorchTensorRTModule.html +++ b/docs/_modules/torch_tensorrt/dynamo/runtime/_TorchTensorRTModule.html @@ -9,7 +9,7 @@ - torch_tensorrt.dynamo.runtime._TorchTensorRTModule — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.dynamo.runtime._TorchTensorRTModule — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -823,7 +822,11 @@

    Source code for torch_tensorrt.dynamo.runtime._TorchTensorRTModule

    return tuple(outputs) - def enable_profiling(self, profiling_results_dir: Optional[str] = None) -> None: + def enable_profiling( + self, + profiling_results_dir: Optional[str] = None, + profile_format: str = "perfetto", + ) -> None: """Enable the profiler to collect latency information about the execution of the engine Traces can be visualized using https://ui.perfetto.dev/ or compatible alternatives @@ -836,7 +839,9 @@

    Source code for torch_tensorrt.dynamo.runtime._TorchTensorRTModule

    if profiling_results_dir is not None: self.engine.profile_path_prefix = profiling_results_dir + assert profile_format in ["trex", "perfetto"] self.engine.enable_profiling() + self.engine.set_profile_format(profile_format) def disable_profiling(self) -> None: """Disable the profiler""" diff --git a/docs/_modules/torch_tensorrt/fx/fx2trt.html b/docs/_modules/torch_tensorrt/fx/fx2trt.html index 33852bb751..10c70ec75a 100644 --- a/docs/_modules/torch_tensorrt/fx/fx2trt.html +++ b/docs/_modules/torch_tensorrt/fx/fx2trt.html @@ -9,7 +9,7 @@ - torch_tensorrt.fx.fx2trt — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.fx.fx2trt — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/fx/input_tensor_spec.html b/docs/_modules/torch_tensorrt/fx/input_tensor_spec.html index 344e6d8114..ad00adbcfb 100644 --- a/docs/_modules/torch_tensorrt/fx/input_tensor_spec.html +++ b/docs/_modules/torch_tensorrt/fx/input_tensor_spec.html @@ -9,7 +9,7 @@ - torch_tensorrt.fx.input_tensor_spec — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.fx.input_tensor_spec — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/fx/lower.html b/docs/_modules/torch_tensorrt/fx/lower.html index 8b3d02fdf3..96746055e6 100644 --- a/docs/_modules/torch_tensorrt/fx/lower.html +++ b/docs/_modules/torch_tensorrt/fx/lower.html @@ -9,7 +9,7 @@ - torch_tensorrt.fx.lower — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.fx.lower — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/fx/trt_module.html b/docs/_modules/torch_tensorrt/fx/trt_module.html index 1dae0d3b7d..62301e002c 100644 --- a/docs/_modules/torch_tensorrt/fx/trt_module.html +++ b/docs/_modules/torch_tensorrt/fx/trt_module.html @@ -9,7 +9,7 @@ - torch_tensorrt.fx.trt_module — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.fx.trt_module — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/logging.html b/docs/_modules/torch_tensorrt/logging.html index dba7615ea1..8c165b7236 100644 --- a/docs/_modules/torch_tensorrt/logging.html +++ b/docs/_modules/torch_tensorrt/logging.html @@ -9,7 +9,7 @@ - torch_tensorrt.logging — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.logging — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/runtime/_cudagraphs.html b/docs/_modules/torch_tensorrt/runtime/_cudagraphs.html index 1f96b5c517..c7c7c8e52c 100644 --- a/docs/_modules/torch_tensorrt/runtime/_cudagraphs.html +++ b/docs/_modules/torch_tensorrt/runtime/_cudagraphs.html @@ -9,7 +9,7 @@ - torch_tensorrt.runtime._cudagraphs — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.runtime._cudagraphs — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -558,48 +557,16 @@

    Source code for torch_tensorrt.runtime._cudagraphs

    self.old_mode = _PY_RT_CUDAGRAPHS self.compiled_module = compiled_module self.cudagraphs_module: Optional[CudaGraphsTorchTensorRTModule] = None + self.old_module = None - def __enter__(self) -> torch.nn.Module: - global _PY_RT_CUDAGRAPHS - - num_torch_module = 0 - num_trt_module = 0 - for name, module in self.compiled_module.named_children(): - # need to disable cudagraphs if any model requires output allocator - if ( - hasattr(module, "requires_output_allocator") - and module.requires_output_allocator - ): - raise RuntimeError( - "The model contains submodules that require a dynamic output allocator at runtime, which is incompatible with CUDA Graphs. Please disable CUDA Graphs." - ) - if "_run_on_acc" in name: - num_trt_module += 1 - elif "_run_on_gpu" in name: - num_torch_module += 1 - - if num_torch_module > 0: - # Set whole cudagraphs mode and returns wrapped module - _PY_RT_CUDAGRAPHS = CudaGraphsMode.WHOLE_GRAPH_CUDAGRAPHS - # Set new mode for C++ - if torch_tensorrt.ENABLED_FEATURES.torch_tensorrt_runtime: - torch.ops.tensorrt.set_cudagraphs_mode(_PY_RT_CUDAGRAPHS) + def __enter__(self) -> Union[torch.nn.Module, torch.fx.GraphModule]: - logger.debug( - "Found pytorch subgraphs in module, wrapping module in CudaGraphsTorchTensorRTModule" - ) - self.cudagraphs_module = CudaGraphsTorchTensorRTModule(self.compiled_module) - return self.cudagraphs_module - else: - if num_trt_module > 0: - logger.debug("No graph breaks detected, using runtime cudagraphs mode") - else: - logger.debug( - "Please consider dynamo if there is graph breaks. Using runtime cudagraphs mode" - ) - # Enable cudagraphs for TRT submodule - set_cudagraphs_mode(True) + if isinstance(self.compiled_module, torch_tensorrt.MutableTorchTensorRTModule): + self.old_module = self.compiled_module.gm + self.compiled_module.gm = get_cuda_graph_module(self.compiled_module.gm) return self.compiled_module + else: + return get_cuda_graph_module(self.compiled_module) def __exit__(self, *args: Any) -> None: # Set cudagraphs back to old mode @@ -607,6 +574,52 @@

    Source code for torch_tensorrt.runtime._cudagraphs

    # __del__ is not entirely predictable, so we reset cudagraph here if self.cudagraphs_module: self.cudagraphs_module._reset_captured_graph() + if self.old_module: # MutableTorchTRTModule + self.compiled_module.gm = self.old_module + + +def get_cuda_graph_module( + compiled_module: torch.fx.GraphModule, +) -> Union[torch.nn.Module, torch.fx.GraphModule]: + global _PY_RT_CUDAGRAPHS + + num_torch_module = 0 + num_trt_module = 0 + for name, module in compiled_module.named_children(): + # need to disable cudagraphs if any model requires output allocator + if ( + hasattr(module, "requires_output_allocator") + and module.requires_output_allocator + ): + raise RuntimeError( + "The model contains submodules that require a dynamic output allocator at runtime, which is incompatible with CUDA Graphs. Please disable CUDA Graphs." + ) + if "_run_on_acc" in name: + num_trt_module += 1 + elif "_run_on_gpu" in name: + num_torch_module += 1 + + if num_torch_module > 0: + # Set whole cudagraphs mode and returns wrapped module + _PY_RT_CUDAGRAPHS = CudaGraphsMode.WHOLE_GRAPH_CUDAGRAPHS + # Set new mode for C++ + if torch_tensorrt.ENABLED_FEATURES.torch_tensorrt_runtime: + torch.ops.tensorrt.set_cudagraphs_mode(_PY_RT_CUDAGRAPHS) + + logger.debug( + "Found pytorch subgraphs in module, wrapping module in CudaGraphsTorchTensorRTModule" + ) + return CudaGraphsTorchTensorRTModule(compiled_module) + else: + if num_trt_module > 0: + logger.debug("No graph breaks detected, using runtime cudagraphs mode") + else: + logger.debug( + "Please consider dynamo if there is graph breaks. Using runtime cudagraphs mode" + ) + # Enable cudagraphs for TRT submodule + set_cudagraphs_mode(True) + return compiled_module
    [docs]def enable_cudagraphs( diff --git a/docs/_modules/torch_tensorrt/runtime/_multi_device_safe_mode.html b/docs/_modules/torch_tensorrt/runtime/_multi_device_safe_mode.html index 78d79b9739..2bc5fd9873 100644 --- a/docs/_modules/torch_tensorrt/runtime/_multi_device_safe_mode.html +++ b/docs/_modules/torch_tensorrt/runtime/_multi_device_safe_mode.html @@ -9,7 +9,7 @@ - torch_tensorrt.runtime._multi_device_safe_mode — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.runtime._multi_device_safe_mode — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/runtime/_output_allocator.html b/docs/_modules/torch_tensorrt/runtime/_output_allocator.html index 139a5e0f99..0ea8ab600b 100644 --- a/docs/_modules/torch_tensorrt/runtime/_output_allocator.html +++ b/docs/_modules/torch_tensorrt/runtime/_output_allocator.html @@ -9,7 +9,7 @@ - torch_tensorrt.runtime._output_allocator — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.runtime._output_allocator — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/runtime/_pre_allocated_outputs.html b/docs/_modules/torch_tensorrt/runtime/_pre_allocated_outputs.html index 29df77921e..0bbc9d5f69 100644 --- a/docs/_modules/torch_tensorrt/runtime/_pre_allocated_outputs.html +++ b/docs/_modules/torch_tensorrt/runtime/_pre_allocated_outputs.html @@ -9,7 +9,7 @@ - torch_tensorrt.runtime._pre_allocated_outputs — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.runtime._pre_allocated_outputs — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/runtime/_weight_streaming.html b/docs/_modules/torch_tensorrt/runtime/_weight_streaming.html index ecfc6139bd..822b2fe9ed 100644 --- a/docs/_modules/torch_tensorrt/runtime/_weight_streaming.html +++ b/docs/_modules/torch_tensorrt/runtime/_weight_streaming.html @@ -9,7 +9,7 @@ - torch_tensorrt.runtime._weight_streaming — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.runtime._weight_streaming — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_modules/torch_tensorrt/ts/_compile_spec.html b/docs/_modules/torch_tensorrt/ts/_compile_spec.html index 36d1bd5763..1eda21d32a 100644 --- a/docs/_modules/torch_tensorrt/ts/_compile_spec.html +++ b/docs/_modules/torch_tensorrt/ts/_compile_spec.html @@ -9,7 +9,7 @@ - torch_tensorrt.ts._compile_spec — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.ts._compile_spec — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -490,8 +489,9 @@

    Source code for torch_tensorrt.ts._compile_spec

    < from __future__ import annotations from copy import deepcopy -from typing import Any, Dict, List, Optional, Set +from typing import Any, Dict, List, Optional, Set, Union +import tensorrt as trt import torch import torch_tensorrt._C.ts as _ts_C from torch_tensorrt import _C @@ -502,8 +502,6 @@

    Source code for torch_tensorrt.ts._compile_spec

    < from torch_tensorrt.ts._Input import TorchScriptInput from torch_tensorrt.ts.logging import Level, log -import tensorrt as trt - def _internal_input_to_torch_class_input(i: _C.Input) -> torch.classes.tensorrt._Input: clone = torch.classes.tensorrt._Input() @@ -799,7 +797,7 @@

    Source code for torch_tensorrt.ts._compile_spec

    < device: Optional[torch.device | Device] = None, disable_tf32: bool = False, sparse_weights: bool = False, - enabled_precisions: Optional[Set[torch.dtype | dtype]] = None, + enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None, refit: bool = False, debug: bool = False, capability: EngineCapability = EngineCapability.STANDARD, diff --git a/docs/_modules/torch_tensorrt/ts/_compiler.html b/docs/_modules/torch_tensorrt/ts/_compiler.html index ac653830ec..6e4eaa9a3d 100644 --- a/docs/_modules/torch_tensorrt/ts/_compiler.html +++ b/docs/_modules/torch_tensorrt/ts/_compiler.html @@ -9,7 +9,7 @@ - torch_tensorrt.ts._compiler — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torch_tensorrt.ts._compiler — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -490,7 +489,7 @@

    Source code for torch_tensorrt.ts._compiler

     from __future__ import annotations
     
     import warnings
    -from typing import Any, List, Optional, Sequence, Set, Tuple
    +from typing import Any, List, Optional, Sequence, Set, Tuple, Union
     
     import torch
     import torch_tensorrt._C.ts as _C
    @@ -507,7 +506,7 @@ 

    Source code for torch_tensorrt.ts._compiler

         device: Device = Device._current_device(),
         disable_tf32: bool = False,
         sparse_weights: bool = False,
    -    enabled_precisions: Optional[Set[torch.dtype | dtype]] = None,
    +    enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None,
         refit: bool = False,
         debug: bool = False,
         capability: EngineCapability = EngineCapability.STANDARD,
    @@ -661,7 +660,7 @@ 

    Source code for torch_tensorrt.ts._compiler

         device: Device = Device._current_device(),
         disable_tf32: bool = False,
         sparse_weights: bool = False,
    -    enabled_precisions: Optional[Set[torch.dtype | dtype]] = None,
    +    enabled_precisions: Optional[Set[Union[torch.dtype, dtype]]] = None,
         refit: bool = False,
         debug: bool = False,
         capability: EngineCapability = EngineCapability.STANDARD,
    diff --git a/docs/_modules/torch_tensorrt/ts/ptq.html b/docs/_modules/torch_tensorrt/ts/ptq.html
    index f5b2e78d2c..765bf42bde 100644
    --- a/docs/_modules/torch_tensorrt/ts/ptq.html
    +++ b/docs/_modules/torch_tensorrt/ts/ptq.html
    @@ -9,7 +9,7 @@
       
       
       
    -  torch_tensorrt.ts.ptq — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation
    +  torch_tensorrt.ts.ptq — Torch-TensorRT v2.9.0.dev0+92a6908 documentation
       
     
       
    @@ -290,7 +290,7 @@
                   
                   
                     
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/_sources/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.rst.txt b/docs/_sources/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.rst.txt index e9d90659f9..2763a898e7 100644 --- a/docs/_sources/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.rst.txt +++ b/docs/_sources/_cpp_api/program_listing_file_cpp_include_torch_tensorrt_macros.h.rst.txt @@ -36,7 +36,7 @@ Program Listing for File macros.h #define STR(x) XSTR(x) #define TORCH_TENSORRT_MAJOR_VERSION 2 - #define TORCH_TENSORRT_MINOR_VERSION 6 + #define TORCH_TENSORRT_MINOR_VERSION 9 #define TORCH_TENSORRT_PATCH_VERSION 0 #define TORCH_TENSORRT_VERSION \ STR(TORCH_TENSORRT_MAJOR_VERSION) \ diff --git a/docs/_sources/contributors/partitioning.rst.txt b/docs/_sources/contributors/partitioning.rst.txt index 8c83ddcadc..77880cef6a 100644 --- a/docs/_sources/contributors/partitioning.rst.txt +++ b/docs/_sources/contributors/partitioning.rst.txt @@ -239,3 +239,16 @@ In this example we will collect the arithmetic ops in a TensorRT segment and the In some cases this approach may create adjacent segments in the partition which have the same target. As a clean-up step we can consolidate these adjacent segments to further reduce the number of segments in the final partition. The merge segments step identifies a list of segments that are adjacent in the graph, have the same target, and are not marked as `do_not_merge`. The nodes from these segments will be combined into a single new segment that will replace the merged segments in the partition. The `do_not_merge` marking is used to prevent merging of segments created for conditional nodes and loops that are handled as special cases in graph stitching and should not be merged with adjacent segments of the same type. + + +Hierarchical Partitioner for Dynamo +=================================== + +The Hierarchical Partitioner is an extension to the standard TensorRT partitioner that allows for more sophisticated partitioning strategies by considering backend priority and operator support. This is particularly useful when you want to distribute different parts of your model across multiple backends based on their capabilities and priorities. + +We currently support hierarchical adjacency partitioner, which extends the standard adjacency partitioner with the following capabilities: + +1. **Backend priority ordering**: Assign operators to backends based on a priority order, ensuring that operators are assigned to the highest-priority backend that supports them. +2. **Multi-backend support**: Distribute model execution across multiple backends based on operator support. + +Please refer to `hierarchical_partitioner_example `_ for more details. diff --git a/docs/_sources/getting_started/jetpack.rst.txt b/docs/_sources/getting_started/jetpack.rst.txt index ddbf89dc63..edfe1ae52e 100644 --- a/docs/_sources/getting_started/jetpack.rst.txt +++ b/docs/_sources/getting_started/jetpack.rst.txt @@ -1,119 +1,122 @@ -.. _Torch_TensorRT_in_JetPack_6.1 +.. _Torch_TensorRT_in_JetPack: -Overview -################## - -JetPack 6.1 ---------------------- -Nvida JetPack 6.1 is the latest production release ofJetPack 6. -With this release it incorporates: -CUDA 12.6 -TensorRT 10.3 -cuDNN 9.3 -DLFW 24.09 +Torch-TensorRT in JetPack +############################# -You can find more details for the JetPack 6.1: +Overview +******** - * https://docs.nvidia.com/jetson/jetpack/release-notes/index.html - * https://docs.nvidia.com/deeplearning/frameworks/install-pytorch-jetson-platform/index.html +JetPack 6.2 +=========== +NVIDIA JetPack 6.2 is the latest production release for Jetson platforms, featuring: +- CUDA 12.6 +- TensorRT 10.3 +- cuDNN 9.3 +For detailed information about JetPack 6.2, refer to: +* `JetPack 6.2 Release Notes `_ +* `PyTorch for Jetson Platform `_ Prerequisites -~~~~~~~~~~~~~~ +************* +System Preparation +================== +1. **Flash your Jetson device** -Ensure your jetson developer kit has been flashed with the latest JetPack 6.1. You can find more details on how to flash Jetson board via sdk-manager: + with JetPack 6.2 using SDK Manager: + - `SDK Manager Guide `_ - * https://developer.nvidia.com/sdk-manager +2. **Verify JetPack installation**: + .. code-block:: sh -check the current jetpack version using + apt show nvidia-jetpack -.. code-block:: sh +3. **Install development components**: + .. code-block:: sh - apt show nvidia-jetpack + sudo apt-get update + sudo apt-get install nvidia-jetpack -Ensure you have installed JetPack Dev components. This step is required if you need to build on jetson board. +4. **Confirm CUDA 12.6 installation**: -You can only install the dev components that you require: ex, tensorrt-dev would be the meta-package for all TRT development or install everthing. + .. code-block:: sh -.. code-block:: sh - # install all the nvidia-jetpack dev components - sudo apt-get update - sudo apt-get install nvidia-jetpack + nvcc --version + # If missing or incorrect version: + sudo apt-get install cuda-toolkit-12-6 -Ensure you have cuda 12.6 installed(this should be installed automatically from nvidia-jetpack) +5. **Validate cuSPARSELt library**: -.. code-block:: sh + .. code-block:: sh - # check the cuda version - nvcc --version - # if not installed or the version is not 12.6, install via the below cmd: - sudo apt-get update - sudo apt-get install cuda-toolkit-12-6 + # Check library presence + ls /usr/local/cuda/lib64/libcusparseLt.so -Ensure libcusparseLt.so exists at /usr/local/cuda/lib64/: + # Install if missing + wget https://developer.download.nvidia.com/compute/cusparselt/redist/libcusparse_lt/linux-sbsa/libcusparse_lt-linux-sbsa-0.5.2.1-archive.tar.xz + tar xf libcusparse_lt-linux-sbsa-0.5.2.1-archive.tar.xz + sudo cp -a libcusparse_lt-linux-sbsa-0.5.2.1-archive/include/* /usr/local/cuda/include/ + sudo cp -a libcusparse_lt-linux-sbsa-0.5.2.1-archive/lib/* /usr/local/cuda/lib64/ -.. code-block:: sh +Building Torch-TensorRT +*********************** - # if not exist, download and copy to the directory - wget https://developer.download.nvidia.com/compute/cusparselt/redist/libcusparse_lt/linux-sbsa/libcusparse_lt-linux-sbsa-0.5.2.1-archive.tar.xz - tar xf libcusparse_lt-linux-sbsa-0.5.2.1-archive.tar.xz - sudo cp -a libcusparse_lt-linux-sbsa-0.5.2.1-archive/include/* /usr/local/cuda/include/ - sudo cp -a libcusparse_lt-linux-sbsa-0.5.2.1-archive/lib/* /usr/local/cuda/lib64/ +Build Environment Setup +======================= +1. **Install Build Dependencies**: + .. code-block:: sh -Build torch_tensorrt -~~~~~~~~~~~~~~ + wget https://github.com/bazelbuild/bazelisk/releases/download/v1.26.0/bazelisk-linux-arm64 + sudo mv bazelisk-linux-arm64 /usr/bin/bazel + sudo chmod +x /usr/bin/bazel + .. code-block:: sh -Install bazel + apt-get install ninja-build vim libopenblas-dev git -.. code-block:: sh +2. **Install Python dependencies**: - wget -v https://github.com/bazelbuild/bazelisk/releases/download/v1.20.0/bazelisk-linux-arm64 - sudo mv bazelisk-linux-arm64 /usr/bin/bazel - chmod +x /usr/bin/bazel + .. code-block:: sh -Install pip and required python packages: - * https://pip.pypa.io/en/stable/installation/ + wget https://bootstrap.pypa.io/get-pip.py + python get-pip.py + python -m pip install pyyaml -.. code-block:: sh +3. **Install PyTorch**: - # install pip - wget https://bootstrap.pypa.io/get-pip.py - python get-pip.py + .. code-block:: sh -.. code-block:: sh - - # install pytorch from nvidia jetson distribution: https://developer.download.nvidia.com/compute/redist/jp/v61/pytorch - python -m pip install torch https://developer.download.nvidia.com/compute/redist/jp/v61/pytorch/torch-2.5.0a0+872d972e41.nv24.08.17622132-cp310-cp310-linux_aarch64.whl - -.. code-block:: sh + # Can only install the torch and torchvision wheel from the JPL repo which is built specifically for JetPack 6.2 + python -m pip install torch==2.7.0 torchvision==0.22.0 --index-url=https://pypi.jetson-ai-lab.dev/jp6/cu126/ - # install required python packages - python -m pip install -r toolchains/jp_workspaces/requirements.txt - # if you want to run the test cases, then install the test required python packages - python -m pip install -r toolchains/jp_workspaces/test_requirements.txt +Building the Wheel +================== +.. code-block:: sh + python setup.py bdist_wheel -Build and Install torch_tensorrt wheel file - +Installation +============ -Since torch_tensorrt version has dependencies on torch version. torch version supported by JetPack6.1 is from DLFW 24.08/24.09(torch 2.5.0). +.. code-block:: sh + # you will be able to find the wheel in the dist directory, has platform name linux_tegra_aarch64 + cd dist + python -m pip install torch_tensorrt-2.8.0.dev0+d8318d8fc-cp310-cp310-linux_tegra_aarch64.whl -Please make sure to build torch_tensorrt wheel file from source release/2.5 branch -(TODO: lanl to update the branch name once release/ngc branch is available) +Post-Installation Verification +============================== -.. code-block:: sh +Verify installation by importing in Python: +.. code-block:: python - cuda_version=$(nvcc --version | grep Cuda | grep release | cut -d ',' -f 2 | sed -e 's/ release //g') - export TORCH_INSTALL_PATH=$(python -c "import torch, os; print(os.path.dirname(torch.__file__))") - export SITE_PACKAGE_PATH=${TORCH_INSTALL_PATH::-6} - export CUDA_HOME=/usr/local/cuda-${cuda_version}/ - # replace the MODULE.bazel with the jetpack one - cat toolchains/jp_workspaces/MODULE.bazel.tmpl | envsubst > MODULE.bazel - # build and install torch_tensorrt wheel file - python setup.py install --user + # verify whether the torch-tensorrt can be imported + import torch + import torch_tensorrt + print(torch_tensorrt.__version__) + # verify whether the examples can be run + python examples/dynamo/torch_compile_resnet_example.py diff --git a/docs/_sources/index.rst.txt b/docs/_sources/index.rst.txt index 67fbdc56f5..4d28d77640 100644 --- a/docs/_sources/index.rst.txt +++ b/docs/_sources/index.rst.txt @@ -140,11 +140,10 @@ Model Zoo * :ref:`torch_compile_resnet` * :ref:`torch_compile_transformer` * :ref:`torch_compile_stable_diffusion` +* :ref:`compile_hf_models` * :ref:`torch_compile_gpt2` * :ref:`torch_export_gpt2` -* :ref:`torch_export_llama2` * :ref:`torch_export_sam2` -* :ref:`torch_export_flux_dev` * :ref:`notebooks` .. toctree:: @@ -155,11 +154,10 @@ Model Zoo tutorials/_rendered_examples/dynamo/torch_compile_resnet_example tutorials/_rendered_examples/dynamo/torch_compile_transformers_example tutorials/_rendered_examples/dynamo/torch_compile_stable_diffusion + tutorials/compile_hf_models tutorials/_rendered_examples/distributed_inference/data_parallel_gpt2 tutorials/_rendered_examples/distributed_inference/data_parallel_stable_diffusion tutorials/_rendered_examples/dynamo/torch_compile_gpt2 - tutorials/_rendered_examples/dynamo/torch_export_gpt2 - tutorials/_rendered_examples/dynamo/torch_export_llama2 tutorials/_rendered_examples/dynamo/torch_export_sam2 tutorials/_rendered_examples/dynamo/torch_export_flux_dev tutorials/notebooks diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/aot_plugin.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/aot_plugin.rst.txt new file mode 100644 index 0000000000..522a1fc707 --- /dev/null +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/aot_plugin.rst.txt @@ -0,0 +1,227 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "tutorials/_rendered_examples/dynamo/aot_plugin.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_tutorials__rendered_examples_dynamo_aot_plugin.py: + + +.. _aot_plugin: +Automatically Generate a TensorRT AOT Plugin +=================================================================== +We are going to demonstrate how to automatically generate a plugin for a custom kernel using Torch-TensorRT using +the new Python based plugin system in TensorRT 10.7. + +Torch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT +does not know how to compile them in TensorRT. However, this comes at the cost of a graph break and will reduce the performance of the model. +The easiest way to fix lack of support for ops is by adding a decomposition (see: +`Writing lowering passes for the Dynamo frontend `_) - which defines the operator +in terms of PyTorch ops that are supported in Torch-TensorRT or a converter (see: +`Writing converters for the Dynamo frontend `_) - which defines the operator in terms of TensorRT operators. + +In some cases there isn't a great way to do either of these, perhaps because the operator is a custom kernel that is not part of standard PyTorch or +TensorRT cannot support it natively. + +For these cases, it is possible to use a TensorRT plugin to replace the operator **inside** the TensorRT engine, thereby avoiding +the performance and resource overhead from a graph break. + +Previously this involved a complex process in not only building a performant kernel but setting it up to run in TensorRT (see: `Using Custom Kernels within TensorRT Engines with Torch-TensorRT `_). +As of TensorRT 10.7, there is a new Python native plugin system which greatly streamlines this process. This +plugin system also allows Torch-TensorRT to automatically generate the necessary conversion code to convert the +operation in PyTorch to TensorRT. + +In addition, Torch-TensorRT provides automatic generation of TensorRT plugin feature (see: `Automatically Generate a Plugin for a Custom Kernel `_). +However, the above methods generates a JIT plugin that might not satisfy user's performance requirements. +To support that, Torch-TensorRT provides auto generation of TensorRT AOT Plugin which raps a function to define an Ahead-of-Time (AOT) implementation for a plugin already registered. +This provides a performance boost comparing to JIT plugin. + +.. GENERATED FROM PYTHON SOURCE LINES 31-175 + +.. code-block:: python + + + import argparse + from typing import Tuple, Union + + import tensorrt as trt + import tensorrt.plugin as trtp + import torch + import torch_tensorrt + import triton + import triton.language as tl + + trt_logger = trt.Logger(trt.Logger.VERBOSE) + + + @triton.jit + def add_one_kernel(x_ptr, n_elements, y_ptr, BLOCK_SIZE: tl.constexpr): + pid = tl.program_id(0) + block_start = pid * BLOCK_SIZE + offsets = block_start + tl.arange(0, BLOCK_SIZE) + mask = offsets < n_elements + x = tl.load(x_ptr + offsets, mask=mask) + output = x + 1 + tl.store(y_ptr + offsets, output, mask=mask) + + + @torch.library.custom_op("my::add_one", mutates_args=()) # type: ignore[misc] + def add_one(X: torch.Tensor) -> torch.Tensor: + # Ensure the tensors are on the GPU + assert X.is_cuda + + # Create output tensor + Y = torch.empty_like(X) + + # Define block size + BLOCK_SIZE = 256 + + # Grid of programs + grid = lambda meta: (triton.cdiv(X.numel(), meta["BLOCK_SIZE"]),) + + # Launch the kernel + add_one_kernel[grid](X, X.numel(), Y, BLOCK_SIZE=BLOCK_SIZE) + + return Y + + + @torch.library.register_fake("my::add_one") + def _(X: torch.Tensor) -> torch.Tensor: + return X + + + @trtp.register("my::add_one") + def add_plugin_desc(X: trtp.TensorDesc) -> Tuple[trtp.TensorDesc]: + return X.like() + + + @trtp.aot_impl("my::add_one") + def add_plugin_aot_impl( + X: trtp.TensorDesc, outputs: Tuple[trtp.TensorDesc], tactic: int + ) -> Tuple[ + Union[str, bytes], Union[str, bytes], trtp.KernelLaunchParams, trtp.SymExprs + ]: + type_str = "fp32" if X.dtype == trt.float32 else "fp16" + + block_size = 256 + src = triton.compiler.ASTSource( + fn=add_one_kernel, + signature={ + "x_ptr": f"*{type_str}", + "n_elements": "i32", + "y_ptr": f"*{type_str}", + "BLOCK_SIZE": "constexpr", + }, + constants={ + "BLOCK_SIZE": block_size, + }, + ) + + compiled_kernel = triton.compile(src) + + N = X.shape_expr.numel() + launch_params = trtp.KernelLaunchParams() + + # grid dims + launch_params.grid_x = trtp.cdiv(N, block_size) + # block dims + launch_params.block_x = compiled_kernel.metadata.num_warps * 32 + # shared memory + launch_params.shared_mem = compiled_kernel.metadata.shared + + extra_args = trtp.SymIntExprs(1) + extra_args[0] = trtp.SymInt32(N) + + return ( + compiled_kernel.metadata.name, + compiled_kernel.asm["ptx"], + launch_params, + extra_args, + ) + + + torch_tensorrt.dynamo.conversion.plugins.generate_plugin_converter( + "my::add_one", + supports_dynamic_shapes=False, + requires_output_allocator=False, + use_aot_if_available=True, + ) + + + class MyModel(torch.nn.Module): + def __init__(self): + super().__init__() + + def forward(self, X: torch.Tensor) -> torch.Tensor: + res = torch.ops.my.add_one.default(X) + + return res + + + if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "--aot", action="store_true", help="Try to use AOT compilation", default=False + ) + args = parser.parse_args() + + my_model = MyModel().to("cuda") + m = torch.full((64, 64), 2, device="cuda", dtype=torch.float) + + assert my_model(X=m)[0][0] == 3.0 + + with torch_tensorrt.logging.debug(): + trt_inputs = [m] + model_trt = torch_tensorrt.compile( + my_model, + inputs=trt_inputs, + min_block_size=1, + ) + print("Model compiled successfully!") + print("Running inference with compiled model...") + for i in range(10): + res = model_trt(m) + assert torch.allclose(res, my_model(m)), "Results do not match!" + + print("Inference successful!") + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** ( 0 minutes 0.000 seconds) + + +.. _sphx_glr_download_tutorials__rendered_examples_dynamo_aot_plugin.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + + + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: aot_plugin.py ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: aot_plugin.ipynb ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_converters.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_converters.rst.txt index 2767c6856c..22754cb3f7 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_converters.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_converters.rst.txt @@ -213,7 +213,7 @@ Now we can use our custom operator in a model and compile it with Torch-TensorRT We can see that the custom operator is used as one of the operations in the forward pass of the model. The process of compiling the model at this point is identical to standard Torch-TensorRT usage. -.. GENERATED FROM PYTHON SOURCE LINES 161-185 +.. GENERATED FROM PYTHON SOURCE LINES 161-183 .. code-block:: python @@ -233,9 +233,7 @@ The process of compiling the model at this point is identical to standard Torch- n = torch.full((64, 64), 3, device="cuda", dtype=torch.float) with torch_tensorrt.logging.errors(): - model_trt = torch_tensorrt.compile( - my_model, inputs=[m, n], debug=True, min_block_size=1 - ) + model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1) for i in range(300): res = model_trt(m, n) assert torch.allclose(res, my_model(m, n)) diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_plugins.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_plugins.rst.txt index 4ecc5f949a..eb7133c18a 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_plugins.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/auto_generate_plugins.rst.txt @@ -169,7 +169,7 @@ Now we can use our custom operator in a model and compile it with Torch-TensorRT We can see that the custom operator is used as one of the operations in the forward pass of the model. The process of compiling the model at this point is identical to standard Torch-TensorRT usage. -.. GENERATED FROM PYTHON SOURCE LINES 131-155 +.. GENERATED FROM PYTHON SOURCE LINES 131-153 .. code-block:: python @@ -189,9 +189,7 @@ The process of compiling the model at this point is identical to standard Torch- n = torch.randint(0, 5, (64, 64), device="cuda", dtype=torch.float) with torch_tensorrt.logging.errors(): - model_trt = torch_tensorrt.compile( - my_model, inputs=[m, n], debug=True, min_block_size=1 - ) + model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1) for i in range(300): res = model_trt(m, n) assert torch.allclose(res, my_model(m, n)) diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.rst.txt index dfc8544c0c..3f566227e0 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.rst.txt @@ -80,7 +80,7 @@ According to the argument, it is either cross compile and save resnet model for or load the saved resnet model in Windows ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 57-83 +.. GENERATED FROM PYTHON SOURCE LINES 57-82 .. code-block:: python @@ -101,7 +101,6 @@ or load the saved resnet model in Windows "cross runtime compiled model for windows can only be compiled in Linux system" ) compile_spec = { - "debug": True, "min_block_size": 1, } torchtrt.cross_compile_for_windows( diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.rst.txt index 09549cce33..f931372fe3 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.rst.txt @@ -340,7 +340,7 @@ If we try to compile this model with Torch-TensorRT, we can see that (as of Torc Node: torch.ops.torchtrt_ex.triton_circular_pad.default, with layer location: __/triton_circular_pad Note: Some of the above nodes may be supported, but were not included in a TRT graph by the partitioner - Compiled with: CompilationSettings(enabled_precisions={}, debug=False, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=True, hardware_compatible=False) + Compiled with: CompilationSettings(enabled_precisions={}, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=True, hardware_compatible=False) Graph Structure: @@ -666,7 +666,7 @@ Finally, we are now able to fully compile our model The graph consists of 2 Total Operators, of which 2 operators are supported, 100.0% coverage - Compiled with: CompilationSettings(enabled_precisions={}, debug=True, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=False, hardware_compatible=False) + Compiled with: CompilationSettings(enabled_precisions={}, workspace_size=0, min_block_size=1, torch_executed_ops=set(), pass_through_build_failures=False, max_aux_streams=None, version_compatible=False, optimization_level=None, use_python_runtime=False, truncate_double=False, use_fast_partitioner=True, enable_experimental_decompositions=False, device=Device(type=DeviceType.GPU, gpu_id=0), require_full_compilation=False, disable_tf32=False, sparse_weights=False, refit=False, engine_capability=, num_avg_timing_iters=1, dla_sram_size=1048576, dla_local_dram_size=1073741824, dla_global_dram_size=536870912, dryrun=False, hardware_compatible=False) Graph Structure: diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_bert_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_bert_example.rst.txt index cc84e2b968..552204a47d 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_bert_example.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_bert_example.rst.txt @@ -25,7 +25,7 @@ Engine Caching (BERT) Small caching example on BERT. -.. GENERATED FROM PYTHON SOURCE LINES 10-76 +.. GENERATED FROM PYTHON SOURCE LINES 10-75 .. code-block:: python @@ -72,7 +72,6 @@ Small caching example on BERT. "use_python_runtime": False, "enabled_precisions": {torch.float}, "truncate_double": True, - "debug": False, "min_block_size": 1, "immutable_weights": False, "cache_built_engines": cache_built_engines, diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_example.rst.txt index a21b53f623..1b70b7430a 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_example.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/engine_caching_example.rst.txt @@ -41,7 +41,7 @@ The example uses a pre-trained ResNet18 model and shows the differences between compilation without caching, with caching enabled, and when reusing cached engines. -.. GENERATED FROM PYTHON SOURCE LINES 26-52 +.. GENERATED FROM PYTHON SOURCE LINES 26-51 .. code-block:: python @@ -61,7 +61,6 @@ and when reusing cached engines. model = models.resnet18(pretrained=True).eval().to("cuda") enabled_precisions = {torch.float} - debug = False min_block_size = 1 use_python_runtime = False @@ -72,7 +71,7 @@ and when reusing cached engines. -.. GENERATED FROM PYTHON SOURCE LINES 53-67 +.. GENERATED FROM PYTHON SOURCE LINES 52-66 Engine Caching for JIT Compilation ---------------------------------- @@ -89,7 +88,7 @@ pull the built engine and **refit** the weights which can reduce compilation tim As such, in order to insert a new engine into the cache (i.e. ``cache_built_engines=True``), the engine must be refittable (``immutable_weights=False``). See :ref:`refit_engine_example` for more details. -.. GENERATED FROM PYTHON SOURCE LINES 67-118 +.. GENERATED FROM PYTHON SOURCE LINES 66-116 .. code-block:: python @@ -124,7 +123,6 @@ the engine must be refittable (``immutable_weights=False``). See :ref:`refit_eng options={ "use_python_runtime": True, "enabled_precisions": enabled_precisions, - "debug": debug, "min_block_size": min_block_size, "immutable_weights": False, "cache_built_engines": cache_built_engines, @@ -145,7 +143,7 @@ the engine must be refittable (``immutable_weights=False``). See :ref:`refit_eng torch_compile() -.. GENERATED FROM PYTHON SOURCE LINES 119-124 +.. GENERATED FROM PYTHON SOURCE LINES 117-122 Engine Caching for AOT Compilation ---------------------------------- @@ -153,7 +151,7 @@ Similarly to the JIT workflow, AOT workflows can benefit from engine caching. As the same architecture or common subgraphs get recompiled, the cache will pull previously built engines and refit the weights. -.. GENERATED FROM PYTHON SOURCE LINES 124-178 +.. GENERATED FROM PYTHON SOURCE LINES 122-175 .. code-block:: python @@ -191,7 +189,6 @@ previously built engines and refit the weights. tuple(inputs), use_python_runtime=use_python_runtime, enabled_precisions=enabled_precisions, - debug=debug, min_block_size=min_block_size, immutable_weights=False, cache_built_engines=cache_built_engines, @@ -212,7 +209,7 @@ previously built engines and refit the weights. dynamo_compile() -.. GENERATED FROM PYTHON SOURCE LINES 179-195 +.. GENERATED FROM PYTHON SOURCE LINES 176-192 Custom Engine Cache ---------------------- @@ -231,7 +228,7 @@ The blob contains a serialized engine, calling spec data, and weight map informa Below is an example of a custom engine cache implementation that implents a ``RAMEngineCache``. -.. GENERATED FROM PYTHON SOURCE LINES 195-289 +.. GENERATED FROM PYTHON SOURCE LINES 192-285 .. code-block:: python @@ -309,7 +306,6 @@ Below is an example of a custom engine cache implementation that implents a ``RA options={ "use_python_runtime": True, "enabled_precisions": enabled_precisions, - "debug": debug, "min_block_size": min_block_size, "immutable_weights": False, "cache_built_engines": cache_built_engines, diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.rst.txt new file mode 100644 index 0000000000..1f43b1b627 --- /dev/null +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.rst.txt @@ -0,0 +1,239 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_tutorials__rendered_examples_dynamo_hierarchical_partitioner_example.py: + + +.. _hierarchical_partitioner_example: + +Hierarchical Partitioner Example +================================ + +Basic example on how to use the hierarchical adjacency partitioner function and manually compile the partitioned model. +Not yet available in the compile API. + +.. GENERATED FROM PYTHON SOURCE LINES 11-188 + +.. code-block:: python + + + from typing import Any, Callable + + import torch + import torch.nn as nn + import torch_tensorrt + from torch_tensorrt._enums import dtype + from torch_tensorrt.dynamo import partitioning + from torch_tensorrt.dynamo._compiler import convert_module + from torch_tensorrt.dynamo.conversion._ConverterRegistry import ( + DYNAMO_CONVERTERS as CONVERTERS, + ) + from torch_tensorrt.dynamo.lowering import ( + get_decompositions, + pre_export_lowering, + ) + from torch_tensorrt.dynamo.partitioning._hierarchical_partitioner import ( + hierarchical_adjacency_partition, + ) + from torch_tensorrt.dynamo.utils import ( + get_output_metadata, + ) + from torchvision import models + + + class InductorModule(torch.nn.Module): # type: ignore[misc] + """Wrapper module for inductor compiled function.""" + + def __init__(self, func: Callable[..., Any]) -> None: + super().__init__() + self.func = func + + def forward(self, *args: Any, **kwargs: Any) -> Any: + return self.func(*args, **kwargs) + + + class SimpleModel(nn.Module): + def __init__(self): + super().__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=3, padding=1) + self.conv2 = nn.Conv2d(64, 128, kernel_size=3, padding=1) + self.bn1 = nn.BatchNorm2d(64) + self.bn2 = nn.BatchNorm2d(128) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = torch.relu(x) + x = self.conv2(x) + x = self.bn2(x) + x = torch.relu(x) + return x + + + def main(): + # Create model + model = SimpleModel().cuda() + # model = models.efficientnet_b0(pretrained=True).cuda() + model = model.eval() + + # Create example input + example_input = torch.randn(1, 3, 224, 224).cuda() + + exported_program = torch.export.export(model, (example_input,)) + exported_program = pre_export_lowering(exported_program) + exported_program = exported_program.run_decompositions(get_decompositions()) + + gm = exported_program.module() + + print("Original Model Structure:\n", gm) + + original_output = model(example_input) + + # 1. Partition the model into blocks that can be executed by different backends + partitioned_model, op_support = hierarchical_adjacency_partition( + gm, + min_block_size=1, + backend_priority=["inductor", "tensorrt"], + backend_support_map={ + "inductor": { + "torch.ops.aten.convolution.default", + }, + "tensorrt": CONVERTERS.keys(), + }, + torch_executed_ops={ + "torch.ops.aten._native_batch_norm_legit_no_training.default" + }, + require_full_compilation=False, + skip_fusion=True, + ) + + print("1. Partitioned Model Structure:\n", partitioned_model) + + # 2. Compile each submodule with the corresponding backend + submodule_node_dict = {} + for node in partitioned_model.graph.nodes: + if "_run_on_acc" not in node.name: + continue + submodule_node_dict[node.name] = node + + # Store compiled replicas of Torch subgraphs + compiled_modules = {} + + for name, _ in partitioned_model.named_children(): + submodule = getattr(partitioned_model, name) + if not isinstance(submodule, torch.fx.graph_module.GraphModule): + continue + + if "_run_on_acc" not in name: + submodule.to("cuda") + continue + + if name not in submodule_node_dict: + raise ValueError( + f"node_name: {name} does not exist in the submodule node dictionary" + ) + + # set the submodule metadata back to the parent module_node + metadata_list = get_output_metadata(submodule) + assert len(metadata_list) > 0 + metadata_keys = ["val", "tensor_meta"] + for key in metadata_keys: + if key not in submodule_node_dict[name].meta: + meta_val_list = [ + metadata[key] for metadata in metadata_list if key in metadata + ] + submodule_node_dict[name].meta[key] = meta_val_list + break + + # Get the submodule inputs for min, opt, max shapes of the graph inputs + submodule_inputs = partitioning.construct_submodule_inputs(submodule) + assert submodule_inputs is not None + + # compile submodule with pytorch inductor backend + if "_run_on_acc_inductor" in name: + sub_inputs = [] + for input in submodule_inputs: + sub_input = input.torch_tensor.to( + dtype.to(input.dtype, t=torch.dtype) + ).cuda() + sub_inputs.append(sub_input) + + compiled_func = torch._inductor.compile( + submodule, + sub_inputs, + ) + # Wrap the compiled function to be a torch.nn.Module + compiled_submodule = InductorModule(compiled_func) + + # compile submodule with tensorrt backend + elif "_run_on_acc_tensorrt" in name: + compiled_submodule = convert_module( + submodule, + submodule_inputs, + name=name, + ) + else: + raise ValueError(f"Unknown backend for submodule: {name}") + + compiled_modules[name] = compiled_submodule + + # Replace all FX Modules with compiled Modules + for name, compiled_module in compiled_modules.items(): + setattr(partitioned_model, name, compiled_module) + + print("2. Compiled Model Structure:\n", partitioned_model) + + with torch.no_grad(): + partitioned_output = partitioned_model(example_input) + print( + "3. Verify that Partitioned output == Original output:", + torch.allclose(partitioned_output, original_output, 1e-2, 1e-2), + ) + + + if __name__ == "__main__": + main() + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** ( 0 minutes 0.000 seconds) + + +.. _sphx_glr_download_tutorials__rendered_examples_dynamo_hierarchical_partitioner_example.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + + + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: hierarchical_partitioner_example.py ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: hierarchical_partitioner_example.ipynb ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/index.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/index.rst.txt index 8ee9bea380..fb2709eaa1 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/index.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/index.rst.txt @@ -138,35 +138,35 @@ Model Zoo .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_cudagraphs_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_engine_caching_bert_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_cudagraphs.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_engine_caching_bert_example.py` .. raw:: html -
    Torch Export with Cudagraphs
    +
    Engine Caching (BERT)
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_engine_caching_bert_example_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_cudagraphs_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_engine_caching_bert_example.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_cudagraphs.py` .. raw:: html -
    Engine Caching (BERT)
    +
    Torch Export with Cudagraphs
    @@ -223,120 +223,120 @@ Model Zoo .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_gpt2_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_converters_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_gpt2.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_converters.py` .. raw:: html -
    Compiling GPT2 using the dynamo backend
    +
    Automatically Generate a Converter for a Custom Kernel
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_llama2_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_plugins_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_llama2.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_plugins.py` .. raw:: html -
    Compiling Llama2 using the dynamo backend
    +
    Automatically Generate a Plugin for a Custom Kernel
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_converters_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_converter_overloading_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_converters.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_converter_overloading.py` .. raw:: html -
    Automatically Generate a Converter for a Custom Kernel
    +
    Overloading Torch-TensorRT Converters with Custom Converters
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_plugins_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_aot_plugin_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_plugins.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_aot_plugin.py` .. raw:: html -
    Automatically Generate a Plugin for a Custom Kernel
    +
    Torch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_converter_overloading_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_hierarchical_partitioner_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_converter_overloading.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_hierarchical_partitioner_example.py` .. raw:: html -
    Overloading Torch-TensorRT Converters with Custom Converters
    +
    Hierarchical Partitioner Example
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_weight_streaming_example_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_mutable_torchtrt_module_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_weight_streaming_example.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_mutable_torchtrt_module_example.py` .. raw:: html -
    Weight Streaming
    +
    Mutable Torch TensorRT Module
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_mutable_torchtrt_module_example_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_weight_streaming_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_mutable_torchtrt_module_example.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_weight_streaming_example.py` .. raw:: html -
    Mutable Torch TensorRT Module
    +
    Weight Streaming
    @@ -439,18 +439,18 @@ Model Zoo /tutorials/_rendered_examples/dynamo/torch_compile_transformers_example /tutorials/_rendered_examples/dynamo/torch_compile_gpt2 /tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage - /tutorials/_rendered_examples/dynamo/torch_export_cudagraphs /tutorials/_rendered_examples/dynamo/engine_caching_bert_example + /tutorials/_rendered_examples/dynamo/torch_export_cudagraphs /tutorials/_rendered_examples/dynamo/pre_allocated_output_example /tutorials/_rendered_examples/dynamo/torch_compile_resnet_example /tutorials/_rendered_examples/dynamo/torch_export_flux_dev - /tutorials/_rendered_examples/dynamo/torch_export_gpt2 - /tutorials/_rendered_examples/dynamo/torch_export_llama2 /tutorials/_rendered_examples/dynamo/auto_generate_converters /tutorials/_rendered_examples/dynamo/auto_generate_plugins /tutorials/_rendered_examples/dynamo/converter_overloading - /tutorials/_rendered_examples/dynamo/weight_streaming_example + /tutorials/_rendered_examples/dynamo/aot_plugin + /tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example /tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example + /tutorials/_rendered_examples/dynamo/weight_streaming_example /tutorials/_rendered_examples/dynamo/torch_export_sam2 /tutorials/_rendered_examples/dynamo/vgg16_ptq /tutorials/_rendered_examples/dynamo/engine_caching_example diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.rst.txt index c7312ea5fd..740ed8f468 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.rst.txt @@ -33,7 +33,7 @@ Key features: This example illustrates advanced extensibility in Torch-TensorRT through automatic plugin generation and operator lowering customization. -.. GENERATED FROM PYTHON SOURCE LINES 17-259 +.. GENERATED FROM PYTHON SOURCE LINES 17-258 .. code-block:: python @@ -272,7 +272,6 @@ This example illustrates advanced extensibility in Torch-TensorRT through automa disable_tf32=True, use_explicit_typing=False, use_fp32_acc=True, - # debug=True, ) input_ids = input_ids.to(DEVICE) diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.rst.txt index e3c00f2c64..52765a7bef 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.rst.txt @@ -35,7 +35,7 @@ In this tutorial, we are going to walk through 3. Integration with Huggingface pipeline in LoRA use case 4. Usage of dynamic shape with Mutable Torch TensorRT Module -.. GENERATED FROM PYTHON SOURCE LINES 21-30 +.. GENERATED FROM PYTHON SOURCE LINES 21-31 .. code-block:: python @@ -43,23 +43,24 @@ In this tutorial, we are going to walk through import torch import torch_tensorrt as torch_trt import torchvision.models as models + from diffusers import DiffusionPipeline np.random.seed(5) torch.manual_seed(5) inputs = [torch.rand((1, 3, 224, 224)).to("cuda")] -.. GENERATED FROM PYTHON SOURCE LINES 31-33 +.. GENERATED FROM PYTHON SOURCE LINES 32-34 Initialize the Mutable Torch TensorRT Module with settings. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 33-44 +.. GENERATED FROM PYTHON SOURCE LINES 34-44 .. code-block:: python settings = { - "use_python": False, + "use_python_runtime": False, "enabled_precisions": {torch.float32}, "immutable_weights": False, } @@ -69,7 +70,6 @@ Initialize the Mutable Torch TensorRT Module with settings. # You can use the mutable module just like the original pytorch module. The compilation happens while you first call the mutable module. mutable_module(*inputs) - .. GENERATED FROM PYTHON SOURCE LINES 45-47 Make modifications to the mutable module. @@ -118,18 +118,16 @@ Saving Mutable Torch TensorRT Module Stable Diffusion with Huggingface ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 75-146 +.. GENERATED FROM PYTHON SOURCE LINES 75-144 .. code-block:: python - from diffusers import DiffusionPipeline with torch.no_grad(): settings = { "use_python_runtime": True, "enabled_precisions": {torch.float16}, - "debug": True, "immutable_weights": False, } @@ -156,7 +154,7 @@ Stable Diffusion with Huggingface "text_embeds": {0: BATCH}, "time_ids": {0: BATCH}, }, - "return_dict": False, + "return_dict": None, } pipe.unet.set_expected_dynamic_shape_range( args_dynamic_shapes, kwargs_dynamic_shapes @@ -194,7 +192,7 @@ Stable Diffusion with Huggingface -.. GENERATED FROM PYTHON SOURCE LINES 147-153 +.. GENERATED FROM PYTHON SOURCE LINES 145-151 Use Mutable Torch TensorRT module with dynamic shape ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -203,7 +201,7 @@ and should not omit any entries (except None in the kwarg_inputs). If there is a If the dynamic shape is not required for an input, an empty dictionary should be given as the shape hint for that input. Note that you should exclude keyword arguments with value None as those will be filtered out. -.. GENERATED FROM PYTHON SOURCE LINES 153-196 +.. GENERATED FROM PYTHON SOURCE LINES 151-194 .. code-block:: python @@ -238,7 +236,7 @@ Note that you should exclude keyword arguments with value None as those will be }, # a's shape does not change so we give it an empty dict } # Export the model first with custom dynamic shape constraints - model = torch_trt.MutableTorchTensorRTModule(model, debug=True, min_block_size=1) + model = torch_trt.MutableTorchTensorRTModule(model, min_block_size=1) model.set_expected_dynamic_shape_range(args_dynamic_shapes, kwarg_dynamic_shapes) # Compile model(*inputs, **kwargs) @@ -251,13 +249,13 @@ Note that you should exclude keyword arguments with value None as those will be model(*inputs_2, **kwargs_2) -.. GENERATED FROM PYTHON SOURCE LINES 197-200 +.. GENERATED FROM PYTHON SOURCE LINES 195-198 Use Mutable Torch TensorRT module with persistent cache ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Leveraging engine caching, we are able to shortcut the engine compilation and save much time. -.. GENERATED FROM PYTHON SOURCE LINES 200-245 +.. GENERATED FROM PYTHON SOURCE LINES 198-242 .. code-block:: python @@ -276,7 +274,6 @@ Leveraging engine caching, we are able to shortcut the engine compilation and sa model, use_python_runtime=True, enabled_precisions={torch.float}, - debug=True, min_block_size=1, immutable_weights=False, cache_built_engines=True, diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/refit_engine_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/refit_engine_example.rst.txt index c0acbf4cb8..ffc06f716d 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/refit_engine_example.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/refit_engine_example.rst.txt @@ -80,7 +80,7 @@ these setttings will not be able to be refit. In this case we are going to compile a ResNet18 model with randomly initialized weights and save it. -.. GENERATED FROM PYTHON SOURCE LINES 55-80 +.. GENERATED FROM PYTHON SOURCE LINES 55-78 .. code-block:: python @@ -88,7 +88,6 @@ In this case we are going to compile a ResNet18 model with randomly initialized model = models.resnet18(pretrained=False).eval().to("cuda") exp_program = torch.export.export(model, tuple(inputs)) enabled_precisions = {torch.float} - debug = False workspace_size = 20 << 30 min_block_size = 0 use_python_runtime = False @@ -98,7 +97,6 @@ In this case we are going to compile a ResNet18 model with randomly initialized tuple(inputs), use_python_runtime=use_python_runtime, enabled_precisions=enabled_precisions, - debug=debug, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, immutable_weights=False, @@ -110,7 +108,7 @@ In this case we are going to compile a ResNet18 model with randomly initialized -.. GENERATED FROM PYTHON SOURCE LINES 81-88 +.. GENERATED FROM PYTHON SOURCE LINES 79-86 Refit the Program with Pretrained Weights ------------------------------------------ @@ -120,7 +118,7 @@ refit the model with the pretrained weights. This is done by setting up another with the target weights and exporting it as an ExportedProgram. Then the ``refit_module_weights`` function is used to update the weights of the compiled module with the new weights. -.. GENERATED FROM PYTHON SOURCE LINES 88-112 +.. GENERATED FROM PYTHON SOURCE LINES 86-111 .. code-block:: python @@ -140,6 +138,7 @@ function is used to update the weights of the compiled module with the new weigh ) # Check the output + model2.to("cuda") expected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm(*inputs) for expected_output, refitted_output in zip(expected_outputs, refitted_outputs): assert torch.allclose( @@ -149,7 +148,7 @@ function is used to update the weights of the compiled module with the new weigh print("Refit successfully!") -.. GENERATED FROM PYTHON SOURCE LINES 113-141 +.. GENERATED FROM PYTHON SOURCE LINES 112-140 Advanced Usage ----------------------------- diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.rst.txt index 1bcf8a50c5..132b701731 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.rst.txt @@ -109,7 +109,7 @@ Compilation with `torch.compile` Using Custom Settings model_half = Model().eval().cuda() -.. GENERATED FROM PYTHON SOURCE LINES 66-92 +.. GENERATED FROM PYTHON SOURCE LINES 66-91 .. code-block:: python @@ -123,7 +123,6 @@ Compilation with `torch.compile` Using Custom Settings # py/torch_tensorrt/dynamo/_settings.py backend_kwargs = { "enabled_precisions": {torch.half}, - "debug": True, "min_block_size": 2, "torch_executed_ops": {"torch.ops.aten.sub.Tensor"}, "optimization_level": 4, @@ -140,12 +139,12 @@ Compilation with `torch.compile` Using Custom Settings optimized_model_custom(*sample_inputs_half) -.. GENERATED FROM PYTHON SOURCE LINES 93-95 +.. GENERATED FROM PYTHON SOURCE LINES 92-94 Cleanup ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 95-99 +.. GENERATED FROM PYTHON SOURCE LINES 94-98 .. code-block:: python @@ -154,7 +153,7 @@ Cleanup torch._dynamo.reset() -.. GENERATED FROM PYTHON SOURCE LINES 100-109 +.. GENERATED FROM PYTHON SOURCE LINES 99-108 Cuda Driver Error Note ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_resnet_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_resnet_example.rst.txt index d57a490ff7..c83d0c9452 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_resnet_example.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_resnet_example.rst.txt @@ -55,7 +55,7 @@ Imports and Model Definition Optional Input Arguments to `torch_tensorrt.compile` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 27-44 +.. GENERATED FROM PYTHON SOURCE LINES 27-42 .. code-block:: python @@ -63,8 +63,6 @@ Optional Input Arguments to `torch_tensorrt.compile` # Enabled precision for TensorRT optimization enabled_precisions = {torch.half} - # Whether to print verbose logs - debug = True # Workspace size for TensorRT workspace_size = 20 << 30 @@ -77,12 +75,12 @@ Optional Input Arguments to `torch_tensorrt.compile` torch_executed_ops = {} -.. GENERATED FROM PYTHON SOURCE LINES 45-47 +.. GENERATED FROM PYTHON SOURCE LINES 43-45 Compilation with `torch_tensorrt.compile` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 47-60 +.. GENERATED FROM PYTHON SOURCE LINES 45-57 .. code-block:: python @@ -93,24 +91,23 @@ Compilation with `torch_tensorrt.compile` ir="torch_compile", inputs=inputs, enabled_precisions=enabled_precisions, - debug=debug, workspace_size=workspace_size, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, ) -.. GENERATED FROM PYTHON SOURCE LINES 61-63 +.. GENERATED FROM PYTHON SOURCE LINES 58-60 Equivalently, we could have run the above via the torch.compile frontend, as so: `optimized_model = torch.compile(model, backend="torch_tensorrt", options={"enabled_precisions": enabled_precisions, ...}); optimized_model(*inputs)` -.. GENERATED FROM PYTHON SOURCE LINES 65-67 +.. GENERATED FROM PYTHON SOURCE LINES 62-64 Inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 67-72 +.. GENERATED FROM PYTHON SOURCE LINES 64-69 .. code-block:: python @@ -120,7 +117,7 @@ Inference new_outputs = optimized_model(*new_inputs) -.. GENERATED FROM PYTHON SOURCE LINES 73-78 +.. GENERATED FROM PYTHON SOURCE LINES 70-75 .. code-block:: python @@ -130,12 +127,12 @@ Inference new_batch_size_outputs = optimized_model(*new_batch_size_inputs) -.. GENERATED FROM PYTHON SOURCE LINES 79-81 +.. GENERATED FROM PYTHON SOURCE LINES 76-78 Avoid recompilation by specifying dynamic shapes before Torch-TRT compilation ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 81-121 +.. GENERATED FROM PYTHON SOURCE LINES 78-117 .. code-block:: python @@ -149,7 +146,6 @@ Avoid recompilation by specifying dynamic shapes before Torch-TRT compilation ir="torch_compile", inputs=inputs_bs8, enabled_precisions=enabled_precisions, - debug=debug, workspace_size=workspace_size, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_transformers_example.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_transformers_example.rst.txt index f7f6a67020..21a5902ea7 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_transformers_example.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_compile_transformers_example.rst.txt @@ -59,7 +59,7 @@ Imports and Model Definition Optional Input Arguments to `torch_tensorrt.compile` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 31-48 +.. GENERATED FROM PYTHON SOURCE LINES 31-45 .. code-block:: python @@ -67,9 +67,6 @@ Optional Input Arguments to `torch_tensorrt.compile` # Enabled precision for TensorRT optimization enabled_precisions = {torch.float} - # Whether to print verbose logs - debug = True - # Workspace size for TensorRT workspace_size = 20 << 30 @@ -81,12 +78,12 @@ Optional Input Arguments to `torch_tensorrt.compile` torch_executed_ops = {} -.. GENERATED FROM PYTHON SOURCE LINES 49-51 +.. GENERATED FROM PYTHON SOURCE LINES 46-48 Compilation with `torch.compile` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 51-70 +.. GENERATED FROM PYTHON SOURCE LINES 48-66 .. code-block:: python @@ -94,7 +91,6 @@ Compilation with `torch.compile` # Define backend compilation keyword arguments compilation_kwargs = { "enabled_precisions": enabled_precisions, - "debug": debug, "workspace_size": workspace_size, "min_block_size": min_block_size, "torch_executed_ops": torch_executed_ops, @@ -110,17 +106,17 @@ Compilation with `torch.compile` optimized_model(*inputs) -.. GENERATED FROM PYTHON SOURCE LINES 71-73 +.. GENERATED FROM PYTHON SOURCE LINES 67-69 Equivalently, we could have run the above via the convenience frontend, as so: `torch_tensorrt.compile(model, ir="torch_compile", inputs=inputs, **compilation_kwargs)` -.. GENERATED FROM PYTHON SOURCE LINES 75-77 +.. GENERATED FROM PYTHON SOURCE LINES 71-73 Inference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 77-85 +.. GENERATED FROM PYTHON SOURCE LINES 73-81 .. code-block:: python @@ -133,7 +129,7 @@ Inference new_outputs = optimized_model(*new_inputs) -.. GENERATED FROM PYTHON SOURCE LINES 86-94 +.. GENERATED FROM PYTHON SOURCE LINES 82-90 .. code-block:: python @@ -146,12 +142,12 @@ Inference new_outputs = optimized_model(*new_inputs) -.. GENERATED FROM PYTHON SOURCE LINES 95-97 +.. GENERATED FROM PYTHON SOURCE LINES 91-93 Cleanup ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. GENERATED FROM PYTHON SOURCE LINES 97-101 +.. GENERATED FROM PYTHON SOURCE LINES 93-97 .. code-block:: python @@ -160,7 +156,7 @@ Cleanup torch._dynamo.reset() -.. GENERATED FROM PYTHON SOURCE LINES 102-111 +.. GENERATED FROM PYTHON SOURCE LINES 98-107 Cuda Driver Error Note ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_export_flux_dev.rst.txt b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_export_flux_dev.rst.txt index 1b21d941c9..9b6bb7e3e0 100644 --- a/docs/_sources/tutorials/_rendered_examples/dynamo/torch_export_flux_dev.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/dynamo/torch_export_flux_dev.rst.txt @@ -37,12 +37,20 @@ To run this demo, you need to have access to Flux model (request for access if y There are different components of the ``FLUX.1-dev`` pipeline such as ``transformer``, ``vae``, ``text_encoder``, ``tokenizer`` and ``scheduler``. In this example, we demonstrate optimizing the ``transformer`` component of the model (which typically consumes >95% of the e2e diffusion latency) -.. GENERATED FROM PYTHON SOURCE LINES 23-25 +.. GENERATED FROM PYTHON SOURCE LINES 21-24 + +.. code-block:: python + + + import register_sdpa # Register SDPA as a standalone operator + + +.. GENERATED FROM PYTHON SOURCE LINES 25-27 Import the following libraries ----------------------------- -.. GENERATED FROM PYTHON SOURCE LINES 25-30 +.. GENERATED FROM PYTHON SOURCE LINES 27-32 .. code-block:: python @@ -52,7 +60,7 @@ Import the following libraries from torch.export._trace import _export -.. GENERATED FROM PYTHON SOURCE LINES 31-36 +.. GENERATED FROM PYTHON SOURCE LINES 33-38 Define the FLUX-1.dev model ----------------------------- @@ -60,7 +68,7 @@ Load the ``FLUX-1.dev`` pretrained pipeline using ``FluxPipeline`` class. ``FluxPipeline`` includes different components such as ``transformer``, ``vae``, ``text_encoder``, ``tokenizer`` and ``scheduler`` necessary to generate an image. We load the weights in ``FP16`` precision using ``torch_dtype`` argument -.. GENERATED FROM PYTHON SOURCE LINES 36-46 +.. GENERATED FROM PYTHON SOURCE LINES 38-48 .. code-block:: python @@ -75,14 +83,14 @@ to generate an image. We load the weights in ``FP16`` precision using ``torch_dt backbone = pipe.transformer.to(DEVICE) -.. GENERATED FROM PYTHON SOURCE LINES 47-51 +.. GENERATED FROM PYTHON SOURCE LINES 49-53 Export the backbone using torch.export -------------------------------------------------- Define the dummy inputs and their respective dynamic shapes. We export the transformer backbone with dynamic shapes with a ``batch_size=2`` due to `0/1 specialization `_ -.. GENERATED FROM PYTHON SOURCE LINES 51-96 +.. GENERATED FROM PYTHON SOURCE LINES 53-98 .. code-block:: python @@ -132,7 +140,7 @@ due to `0/1 specialization `_ and only float32 precision is allowed in enabled_precisions option -.. GENERATED FROM PYTHON SOURCE LINES 93-108 +.. GENERATED FROM PYTHON SOURCE LINES 129-144 .. code-block:: python @@ -137,7 +173,7 @@ the engine with weight streaming feature. use_explicit_typing=True option create _ = time_generate(trt_model, input_tensors, osl, 3) -.. GENERATED FROM PYTHON SOURCE LINES 109-115 +.. GENERATED FROM PYTHON SOURCE LINES 145-151 Running with automatic budget size ---------------------------------- @@ -146,7 +182,7 @@ Once you specify the enable_weight_streaming compile option, automatic budget si This automatic size may not always provide the optimal solution because the automatically determined budget lacks insight into the user's specific memory constraints and usage patterns -.. GENERATED FROM PYTHON SOURCE LINES 115-128 +.. GENERATED FROM PYTHON SOURCE LINES 151-164 .. code-block:: python @@ -164,7 +200,7 @@ budget lacks insight into the user's specific memory constraints and usage patte ) -.. GENERATED FROM PYTHON SOURCE LINES 129-137 +.. GENERATED FROM PYTHON SOURCE LINES 165-173 Running with weight streaming context manager ---------------------------------- @@ -175,7 +211,7 @@ The permissible range for the budget size is from 0 to ctx.total_device_budget. equal to ctx.total_device_budget will disable weight streaming. If multiple trt engines are created, budgets are distributed proportionally -.. GENERATED FROM PYTHON SOURCE LINES 137-175 +.. GENERATED FROM PYTHON SOURCE LINES 173-211 .. code-block:: python diff --git a/docs/_sources/tutorials/_rendered_examples/index.rst.txt b/docs/_sources/tutorials/_rendered_examples/index.rst.txt index bcdac2769d..c9b43f6a58 100644 --- a/docs/_sources/tutorials/_rendered_examples/index.rst.txt +++ b/docs/_sources/tutorials/_rendered_examples/index.rst.txt @@ -150,35 +150,35 @@ Model Zoo .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_cudagraphs_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_engine_caching_bert_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_cudagraphs.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_engine_caching_bert_example.py` .. raw:: html -
    Torch Export with Cudagraphs
    +
    Engine Caching (BERT)
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_engine_caching_bert_example_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_cudagraphs_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_engine_caching_bert_example.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_cudagraphs.py` .. raw:: html -
    Engine Caching (BERT)
    +
    Torch Export with Cudagraphs
    @@ -235,120 +235,120 @@ Model Zoo .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_gpt2_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_converters_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_gpt2.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_converters.py` .. raw:: html -
    Compiling GPT2 using the dynamo backend
    +
    Automatically Generate a Converter for a Custom Kernel
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_torch_export_llama2_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_plugins_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_torch_export_llama2.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_plugins.py` .. raw:: html -
    Compiling Llama2 using the dynamo backend
    +
    Automatically Generate a Plugin for a Custom Kernel
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_converters_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_converter_overloading_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_converters.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_converter_overloading.py` .. raw:: html -
    Automatically Generate a Converter for a Custom Kernel
    +
    Overloading Torch-TensorRT Converters with Custom Converters
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_auto_generate_plugins_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_aot_plugin_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_auto_generate_plugins.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_aot_plugin.py` .. raw:: html -
    Automatically Generate a Plugin for a Custom Kernel
    +
    Torch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_converter_overloading_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_hierarchical_partitioner_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_converter_overloading.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_hierarchical_partitioner_example.py` .. raw:: html -
    Overloading Torch-TensorRT Converters with Custom Converters
    +
    Hierarchical Partitioner Example
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_weight_streaming_example_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_mutable_torchtrt_module_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_weight_streaming_example.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_mutable_torchtrt_module_example.py` .. raw:: html -
    Weight Streaming
    +
    Mutable Torch TensorRT Module
    .. raw:: html -
    +
    .. only:: html - .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_mutable_torchtrt_module_example_thumb.png + .. image:: /tutorials/_rendered_examples/dynamo/images/thumb/sphx_glr_weight_streaming_example_thumb.png :alt: - :ref:`sphx_glr_tutorials__rendered_examples_dynamo_mutable_torchtrt_module_example.py` + :ref:`sphx_glr_tutorials__rendered_examples_dynamo_weight_streaming_example.py` .. raw:: html -
    Mutable Torch TensorRT Module
    +
    Weight Streaming
    diff --git a/docs/_sources/tutorials/compile_hf_models.rst.txt b/docs/_sources/tutorials/compile_hf_models.rst.txt new file mode 100644 index 0000000000..f6da87b145 --- /dev/null +++ b/docs/_sources/tutorials/compile_hf_models.rst.txt @@ -0,0 +1,218 @@ +.. _compile_hf_models: + +Compiling LLM models from Huggingface +====================================== + +This tutorial walks you through how to compile LLM models from Huggingface using Torch-TensorRT. We also introduce KV caching in Torch-TensorRT which can greatly improve the performance of LLM inference. +The code is available in the `tools/llm `_ directory. We use the ``run_llm.py`` script to compile the model, generate outputs, and measure the performance. + +.. note:: + This is an **experimental release** and APIs may change in future versions. + +.. note:: + The compilation scripts and tutorials for Llama-2-7b-chat-hf and gpt2 models have been consolidated into the unified ``run_llm.py`` script located in the `tools/llm `_ directory. + +Overview of tools/llm Directory +------------------------------- + +The ``tools/llm`` directory provides the following tools to compile LLM models from Huggingface: + +* **run_llm.py**: Main entry point for model compilation, generating outputs, and benchmarking +* **Static Cache Utilities**: ``static_cache_v1.py`` and ``static_cache_v2.py`` for KV cache optimization +* **SDPA Attention**: ``sdpa_converter.py`` and ``register_sdpa.py`` for registering scaled dot-product attention converter and lowering pass. +* **Testing Components**: Model-specific test files for validation +* **Utility Functions**: ``utils.py`` and ``cache_utils.py`` for common operations + +Supported Models +---------------- +We have officially verified support for the following LLM families: + +.. list-table:: + :widths: 20 40 20 20 + :header-rows: 1 + + * - Model Series + - HuggingFace Model Card + - Precision + - KV Cache Support ? + * - GPT-2 + - gpt2 + - FP16, FP32 + - Yes + * - LLaMA 2 + - meta-llama/Llama-2-7b-chat-hf + - FP16, FP32 + - Yes + * - LLaMA 3.1 + - meta-llama/Llama-3.1-8B-Instruct + - FP16, FP32 + - Yes + * - LLaMA 3.2 + - | meta-llama/Llama-3.2-1B-Instruct + | meta-llama/Llama-3.2-3B-Instruct + - FP16, FP32 + - Yes + * - Qwen 2.5 + - | Qwen/Qwen2.5-0.5B-Instruct + | Qwen/Qwen2.5-1.5B-Instruct + | Qwen/Qwen2.5-3B-Instruct + | Qwen/Qwen2.5-7B-Instruct + - FP16, FP32 + - Yes + +Getting Started with run_llm.py +------------------------------- + +The main entry point is ``run_llm.py``, which provides a complete workflow for model compilation and benchmarking. + +Basic Usage +^^^^^^^^^^^ + +.. code-block:: bash + + python tools/llm/run_llm.py \ + --model meta-llama/Llama-3.2-1B-Instruct \ + --prompt "What is parallel programming?" \ + --precision FP16 \ + --num_tokens 128 \ + --cache static_v2 \ + --benchmark + +Key Arguments +^^^^^^^^^^^^^ + +* ``--model``: Name or path of the HuggingFace LLM +* ``--tokenizer``: (Optional) Tokenizer name; defaults to model name +* ``--prompt``: Input prompt for text generation +* ``--precision``: Precision mode (``FP16``, ``FP32``) +* ``--num_tokens``: Number of output tokens to generate +* ``--cache``: KV cache type (``static_v1``, ``static_v2``, or empty for no KV caching) +* ``--benchmark``: Enable benchmarking mode for performance comparison +* ``--enable_pytorch_run``: Also run and compare PyTorch baseline + + +Other Usage Examples +^^^^^^^^^^^^^^^^^^^^ +.. code-block:: bash + + # Compare different models performance + python tools/llm/run_llm.py --model gpt2 --benchmark --enable_pytorch_run + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --benchmark --enable_pytorch_run + + # Generate the outputs (disable benchmarking) by specifying the number of tokens to generate. Default = 128 + python tools/llm/run_llm.py --model gpt2 --prompt "What is parallel programming?" --num_tokens 128 + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --prompt "What is parallel programming?" --num_tokens 128 + + # Test different caching approaches + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --cache static_v1 + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --cache static_v2 + + # Compare FP16 vs FP32 performance + python tools/llm/run_llm.py --model Qwen/Qwen2.5-1.5B-Instruct --precision FP16 --benchmark + python tools/llm/run_llm.py --model Qwen/Qwen2.5-1.5B-Instruct --precision FP32 --benchmark + + +KV Caching in Torch-TensorRT +--------------------------------- + +We provide two versions of static KV caching: `static_cache_v1 `_ and `static_cache_v2 `_. +In both implementations, we add static KV cache tensors as model inputs/outputs without storing them as external memory. +The length of KV cache = input sequence length + output sequence length (specified by ``--num_tokens``). The number of heads and head dimension are determined by the model config. + +Static Cache v1 +^^^^^^^^^^^^^^^^ + +The ``static_cache_v1.py`` implements KV cache in the model graph as follows: + +.. code-block:: python + + class StaticCacheV1Model(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True): + # Concatenate new key/value pairs with existing cache + new_key_cache = torch.cat((key_cache[:, :, :start_idx, :], k, key_cache[:, :, end_idx:, :]), dim=2) + new_value_cache = torch.cat((value_cache[:, :, :start_idx, :], v, value_cache[:, :, end_idx:, :]), dim=2) + + # Compute attention using the updated cache + attn_output = torch._C._nn.scaled_dot_product_attention( + q, + new_key_cache[:, :, :end_idx, :], + new_value_cache[:, :, :end_idx, :], + dropout_p=0.0, + is_causal=is_causal + ) + + return attn_output, new_key_cache, new_value_cache + +In the above code, we concatenate the new key/value pairs with the existing cache and update it. To compute the attention, we use the updated cache and gather the corresponding keys/values from the cache up until and including the current token index. +The above code is actually implemented as a FX graph transformation pass. We register it as a Torch-TensorRT lowering pass using the decorator ``@_aten_lowering_pass`` when we import the ``static_cache_v1.py`` module. + +.. note:: + The ``start_idx`` and ``end_idx`` are the start and end indices of the current token in the cache. For prefill phase, ``start_idx`` is 0 and ``end_idx`` is the input sequence length. + For decode phase, ``start_idx`` begins at the input sequence length and ``end_idx`` equals ``start_idx + 1``. The ``start_idx`` is incremented by 1 until the end of the sequence or we reach the maximum number of tokens to generate. + + +Static Cache v2 +^^^^^^^^^^^^^^^^ + +The ``static_cache_v2.py`` is similar to ``static_cache_v1.py`` but it uses less number of slice operations. It implements KV cache in the model graph as follows: + +.. code-block:: python + + class StaticCacheV2Model(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True): + concat_keys = torch.cat((key_cache[:, :, :start_idx, :], k), dim=2) + concat_values = torch.cat((value_cache[:, :, :start_idx, :], v), dim=2) + new_key_cache = torch.cat((concat_keys, key_cache[:, :, end_idx:, :]), dim=2) + new_value_cache = torch.cat((concat_values, value_cache[:, :, end_idx:, :]), dim=2) + attn_output = torch._C._nn.scaled_dot_product_attention( + q, concat_keys, concat_values, dropout_p=0.0, is_causal=is_causal + ) + + return attn_output, new_key_cache, new_value_cache + +In the above code, we concatenate the existing key/value cache with current key/value of the token. We use this to directly compute the attention and update the key/value cache inserting the current key/value. +The above code is actually implemented as a FX graph transformation pass. We register it as a Torch-TensorRT lowering pass using the decorator ``@_aten_lowering_pass`` when we import the ``static_cache_v1.py`` module. +The definitons of ``start_idx`` and ``end_idx`` are the same as ``static_cache_v1.py``. + +After the model is compiled with static KV cache, the input signature of the model is changed. The new input signature is ``(input_ids, position_ids, key_cache_0, value_cache_0, ..., start_idx, end_idx)``. +The number of key/value cache tensors is equal to the number of attention heads in the model. We can use the ``generate_with_static_cache`` function to generate the outputs. + +Generating Outputs +------------------- +We use custom `generate `_ function to generate the outputs. This function performs standard autoregressive decoding without KV caching. +There is also a `generate_with_static_cache `_ function that performs autoregressive decoding with KV caching. + +The ``generate_with_static_cache`` function takes care of preparing the inputs to the model compiled with static KV cache. +The model inputs are ``input_ids``, ``position_ids``, ``key_cache_0``, ``value_cache_0``, ...., ``start_idx``, ``end_idx``. +We initialize the key/value cache tensors with zeros and for every token generated, the new key/value cache tensors are the outputs of the model. + +SDPA Converter (sdpa_converter.py) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Converts scaled dot-product attention operation using TRT Python API. +* Supports causal and standard self-attention. + +SDPA Registration (register_sdpa.py) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* This is a Torch-TensorRT lowering pass that replaces variants of SDPA with ``torch.nn.functional.scaled_dot_product_attention``. +* Registers the SDPA converter which is used for converting ``torch.nn.functional.scaled_dot_product_attention`` operation. + + +Limitations and Known Issues +---------------------------- + +* Sliding window attention (used in Gemma3 and Qwen 3 models) is not yet supported +* Some model architectures (e.g. Phi-4) have issues with exporting the torch model. + +Requirements +^^^^^^^^^^^^ + +* Torch-TensorRT 2.8.0 or later +* Transformers v4.52.3 \ No newline at end of file diff --git a/docs/_sources/user_guide/runtime.rst.txt b/docs/_sources/user_guide/runtime.rst.txt index fc73aef8ac..8d2151a615 100644 --- a/docs/_sources/user_guide/runtime.rst.txt +++ b/docs/_sources/user_guide/runtime.rst.txt @@ -24,7 +24,7 @@ programs just as you would otherwise via PyTorch API. .. note:: If you are linking ``libtorchtrt_runtime.so``, likely using the following flags will help ``-Wl,--no-as-needed -ltorchtrt -Wl,--as-needed`` as there's no direct symbol dependency to anything in the Torch-TensorRT runtime for most Torch-TensorRT runtime applications -An example of how to use ``libtorchtrt_runtime.so`` can be found here: https://github.com/pytorch/TensorRT/tree/master/examples/torchtrt_runtime_example +An example of how to use ``libtorchtrt_runtime.so`` can be found here: https://github.com/pytorch/TensorRT/tree/master/examples/torchtrt_aoti_example Plugin Library --------------- @@ -87,8 +87,8 @@ Cudagraphs can accelerate certain models by reducing kernel overheads, as docume with torch_tensorrt.runtime.enable_cudagraphs(trt_module): ... -In the current implementation, use of a new input shape (for instance in dynamic shape -cases), will cause the cudagraph to be re-recorded. Cudagraph recording is generally +In the current implementation, use of a new input shape (for instance in dynamic shape +cases), will cause the cudagraph to be re-recorded. Cudagraph recording is generally not latency intensive, and future improvements include caching cudagraphs for multiple input shapes. Dynamic Output Allocation Mode @@ -101,11 +101,11 @@ Without dynamic output allocation, the output buffer is allocated based on the i There are two scenarios in which dynamic output allocation is enabled: -1. The model has been identified at compile time to require dynamic output allocation for at least one TensorRT subgraph. -These models will engage the runtime mode automatically (with logging) and are incompatible with other runtime modes +1. The model has been identified at compile time to require dynamic output allocation for at least one TensorRT subgraph. +These models will engage the runtime mode automatically (with logging) and are incompatible with other runtime modes such as CUDA Graphs. -Converters can declare that subgraphs that they produce will require the output allocator using `requires_output_allocator=True` +Converters can declare that subgraphs that they produce will require the output allocator using `requires_output_allocator=True` there by forcing any model which utilizes the converter to automatically use the output allocator runtime mode. e.g., .. code-block:: python @@ -131,3 +131,127 @@ there by forcing any model which utilizes the converter to automatically use the # Enables Dynamic Output Allocation Mode, then resets the mode to its prior setting with torch_tensorrt.runtime.enable_output_allocator(trt_module): ... + +Deploying Torch-TensorRT Programs without Python +-------------------------------------------------------- + +AOT-Inductor +~~~~~~~~~~~~~~~~ + +AOTInductor is a specialized version of TorchInductor, designed to process exported PyTorch models, optimize them, and produce shared +libraries as well as other relevant artifacts. These compiled artifacts are specifically crafted for deployment in non-Python environments, +which are frequently employed for inference deployments on the server side. + +Torch-TensorRT is able to accelerate subgraphs within AOTInductor exports in the same way it does in Python. + +.. code-block:: py + + dynamo_model = torch_tensorrt.compile(model, ir="dynamo", arg_inputs=[...]) + torch_tensorrt.save( + dynamo_model, + file_path=os.path.join(os.getcwd(), "model.pt2"), + output_format="aot_inductor", + retrace=True, + arg_inputs=[...], + ) + +This artifact then can be loaded in a C++ application to be executed with out a Python dependency. + +.. code-block:: c++ + + #include + #include + + #include "torch/torch.h" + #include "torch/csrc/inductor/aoti_package/model_package_loader.h" + + int main(int argc, const char* argv[]) { + // Check for correct number of command-line arguments + std::string trt_aoti_module_path = "model.pt2"; + + if (argc == 2) { + trt_aoti_module_path = argv[1]; + } + + std::cout << trt_aoti_module_path << std::endl; + + // Get the path to the TRT AOTI model package from the command line + c10::InferenceMode mode; + + torch::inductor::AOTIModelPackageLoader loader(trt_aoti_module_path); + // Assume running on CUDA + std::vector inputs = {torch::randn({8, 10}, at::kCUDA)}; + std::vector outputs = loader.run(inputs); + std::cout << "Result from the first inference:"<< std::endl; + std::cout << outputs << std::endl; + + // The second inference uses a different batch size and it works because we + // specified that dimension as dynamic when compiling model.pt2. + std::cout << "Result from the second inference:"<< std::endl; + // Assume running on CUDA + std::cout << loader.run({torch::randn({1, 10}, at::kCUDA)}) << std::endl; + + return 0; + } + +Note: Similar to Python, at runtime, no Torch-TensorRT APIs are used to operate the model. Therefore typically additional +flags are needed to make sure that ``libtorchtrt_runtime.so`` gets optimized out (see above). + +See: ``//examples/torchtrt_aoti_example`` for a full end to end demo of this workflow + + +TorchScript +~~~~~~~~~~~~~~ + +TorchScript is a legacy compiler stack for PyTorch that includes a Python-less interpreter for TorchScript programs. +It has historically been used by Torch-TensorRT to execute models without Python. Even after the transition to TorchDynamo, +the TorchScript interpreter can continue to be used to run PyTorch models with TensorRT engines outside of Python. + +.. code-block:: py + + dynamo_model = torch_tensorrt.compile(model, ir="dynamo", arg_inputs=[...]) + ts_model = torch.jit.trace(dynamo_model, inputs=[...]) + torch.jit.save(ts_model, os.path.join(os.getcwd(), "model.ts"),) + +This artifact then can be loaded in a C++ application to be executed with out a Python dependency. + +.. code-block:: c++ + + #include + #include + #include + #include + #include + #include "torch/script.h" + + int main(int argc, const char* argv[]) { + if (argc < 2) { + std::cerr << "usage: samplertapp \n"; + return -1; + } + + std::string trt_ts_module_path = argv[1]; + + torch::jit::Module trt_ts_mod; + try { + // Deserialize the ScriptModule from a file using torch::jit::load(). + trt_ts_mod = torch::jit::load(trt_ts_module_path); + } catch (const c10::Error& e) { + std::cerr << "error loading the model from : " << trt_ts_module_path << std::endl; + return -1; + } + + std::cout << "Running TRT engine" << std::endl; + std::vector trt_inputs_ivalues; + trt_inputs_ivalues.push_back(at::randint(-5, 5, {1, 3, 5, 5}, {at::kCUDA}).to(torch::kFloat32)); + torch::jit::IValue trt_results_ivalues = trt_ts_mod.forward(trt_inputs_ivalues); + std::cout << "==================TRT outputs================" << std::endl; + std::cout << trt_results_ivalues << std::endl; + std::cout << "=============================================" << std::endl; + std::cout << "TRT engine execution completed. " << std::endl; + } + +Note: Similar to Python, at runtime, no Torch-TensorRT APIs are used to operate the model. Therefore typically additional +flags are needed to make sure that ``libtorchtrt_runtime.so`` gets optimized out (see above). + +See: ``//examples/torchtrt_runtime_example`` for a full end to end demo of this workflow diff --git a/docs/_sources/user_guide/saving_models.rst.txt b/docs/_sources/user_guide/saving_models.rst.txt index dc4b5da222..bef9b4dec3 100644 --- a/docs/_sources/user_guide/saving_models.rst.txt +++ b/docs/_sources/user_guide/saving_models.rst.txt @@ -14,12 +14,13 @@ Saving models compiled with Torch-TensorRT can be done using `torch_tensorrt.sav Dynamo IR ------------- -The output type of `ir=dynamo` compilation of Torch-TensorRT is `torch.fx.GraphModule` object by default. -We can save this object in either `TorchScript` (`torch.jit.ScriptModule`) or `ExportedProgram` (`torch.export.ExportedProgram`) formats by +The output type of `ir=dynamo` compilation of Torch-TensorRT is `torch.fx.GraphModule` object by default. +We can save this object in either `TorchScript` (`torch.jit.ScriptModule`), `ExportedProgram` (`torch.export.ExportedProgram`) or `PT2` formats by specifying the `output_format` flag. Here are the options `output_format` will accept * `exported_program` : This is the default. We perform transformations on the graphmodule first and use `torch.export.save` to save the module. * `torchscript` : We trace the graphmodule via `torch.jit.trace` and save it via `torch.jit.save`. +* `PT2 Format` : This is a next generation runtime for PyTorch models, allowing them to run in Python and in C++ a) ExportedProgram ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -52,8 +53,8 @@ b) Torchscript model = MyModel().eval().cuda() inputs = [torch.randn((1, 3, 224, 224)).cuda()] # trt_gm is a torch.fx.GraphModule object - trt_gm = torch_tensorrt.compile(model, ir="dynamo", inputs=inputs) - torch_tensorrt.save(trt_gm, "trt.ts", output_format="torchscript", inputs=inputs) + trt_gm = torch_tensorrt.compile(model, ir="dynamo", arg_inputs=inputs) + torch_tensorrt.save(trt_gm, "trt.ts", output_format="torchscript", arg_inputs=inputs) # Later, you can load it and run inference model = torch.jit.load("trt.ts").cuda() @@ -73,7 +74,7 @@ For `ir=ts`, this behavior stays the same in 2.X versions as well. model = MyModel().eval().cuda() inputs = [torch.randn((1, 3, 224, 224)).cuda()] - trt_ts = torch_tensorrt.compile(model, ir="ts", inputs=inputs) # Output is a ScriptModule object + trt_ts = torch_tensorrt.compile(model, ir="ts", arg_inputs=inputs) # Output is a ScriptModule object torch.jit.save(trt_ts, "trt_model.ts") # Later, you can load it and run inference @@ -98,3 +99,26 @@ Here's an example usage inputs = [torch.randn((1, 3, 224, 224)).cuda()] model = torch_tensorrt.load().module() model(*inputs) + +b) PT2 Format +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +PT2 is a new format that allows models to be run outside of Python in the future. It utilizes `AOTInductor `_ +to generate kernels for components that will not be run in TensorRT. + +Here's an example on how to save and load Torch-TensorRT Module using AOTInductor in Python + +.. code-block:: python + + import torch + import torch_tensorrt + + model = MyModel().eval().cuda() + inputs = [torch.randn((1, 3, 224, 224)).cuda()] + # trt_ep is a torch.fx.GraphModule object + trt_gm = torch_tensorrt.compile(model, ir="dynamo", inputs=inputs) + torch_tensorrt.save(trt_gm, "trt.pt2", arg_inputs=inputs, output_format="aot_inductor", retrace=True) + + # Later, you can load it and run inference + model = torch._inductor.aoti_load_package("trt.pt2") + model(*inputs) diff --git a/docs/_static/documentation_options.js b/docs/_static/documentation_options.js index 786e44a6f1..a2fde8fe2e 100644 --- a/docs/_static/documentation_options.js +++ b/docs/_static/documentation_options.js @@ -1,6 +1,6 @@ var DOCUMENTATION_OPTIONS = { URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), - VERSION: 'v2.8.0.dev0+ee32da0', + VERSION: 'v2.9.0.dev0+92a6908', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/docs/_static/js/theme.js b/docs/_static/js/theme.js index ceee8f062c..1f071b8486 100644 --- a/docs/_static/js/theme.js +++ b/docs/_static/js/theme.js @@ -945,10 +945,19 @@ if (downloadNote.length >= 1) { var tutorialUrlArray = $("#tutorial-type").text().split('/'); tutorialUrlArray[0] = tutorialUrlArray[0] + "_source" - var githubLink = "https://github.com/pytorch/tutorials/blob/master/" + tutorialUrlArray.join("/") + ".py", - notebookLink = $(".reference.download")[1].href, - notebookDownloadPath = notebookLink.split('_downloads')[1], - colabLink = "https://colab.research.google.com/github/pytorch/tutorials/blob/gh-pages/_downloads" + notebookDownloadPath; + var githubLink = "https://github.com/pytorch/tutorials/blob/master/" + tutorialUrlArray.join("/") + ".py"; + var notebookLink = ""; + // some versions of sphinx gallery have different orders of the download + // links so we need to check if the link ends with .ipynb to find the + // correct one + for (var i = 0; i < $(".reference.download").length; i++) { + notebookLink = $(".reference.download")[i].href; + if (notebookLink.endsWith(".ipynb")) { + break; + } + } + var notebookDownloadPath = notebookLink.split('_downloads')[1]; + var colabLink = "https://colab.research.google.com/github/pytorch/tutorials/blob/gh-pages/_downloads" + notebookDownloadPath; $("#google-colab-link").wrap(""); $("#download-notebook-link").wrap(""); diff --git a/docs/cli/torchtrtc.html b/docs/cli/torchtrtc.html index 3878948003..add26a1144 100644 --- a/docs/cli/torchtrtc.html +++ b/docs/cli/torchtrtc.html @@ -10,7 +10,7 @@ - torchtrtc — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + torchtrtc — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/conversion.html b/docs/contributors/conversion.html index 8f9e39e5d6..7afb5077ef 100644 --- a/docs/contributors/conversion.html +++ b/docs/contributors/conversion.html @@ -10,7 +10,7 @@ - Conversion Phase — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Conversion Phase — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/dynamo_converters.html b/docs/contributors/dynamo_converters.html index 077dccd7d2..20a4a73aa9 100644 --- a/docs/contributors/dynamo_converters.html +++ b/docs/contributors/dynamo_converters.html @@ -10,7 +10,7 @@ - Writing Dynamo Converters — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Writing Dynamo Converters — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/lowering.html b/docs/contributors/lowering.html index db68f640af..b4332cfca6 100644 --- a/docs/contributors/lowering.html +++ b/docs/contributors/lowering.html @@ -10,7 +10,7 @@ - Lowering Phase — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Lowering Phase — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/partitioning.html b/docs/contributors/partitioning.html index dcc0bee79e..ac6a13f0c3 100644 --- a/docs/contributors/partitioning.html +++ b/docs/contributors/partitioning.html @@ -10,7 +10,7 @@ - Partitioning Phase — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Partitioning Phase — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -711,6 +710,16 @@

    Dependency Aware Partitioning +

    Hierarchical Partitioner for Dynamo

    +

    The Hierarchical Partitioner is an extension to the standard TensorRT partitioner that allows for more sophisticated partitioning strategies by considering backend priority and operator support. This is particularly useful when you want to distribute different parts of your model across multiple backends based on their capabilities and priorities.

    +

    We currently support hierarchical adjacency partitioner, which extends the standard adjacency partitioner with the following capabilities:

    +
      +
    1. Backend priority ordering: Assign operators to backends based on a priority order, ensuring that operators are assigned to the highest-priority backend that supports them.

    2. +
    3. Multi-backend support: Distribute model execution across multiple backends based on operator support.

    4. +
    +

    Please refer to hierarchical_partitioner_example for more details.

    @@ -765,6 +774,7 @@

    Dependency Aware PartitioningAutomatic Fallback
  • Dependency Aware Partitioning
  • +
  • Hierarchical Partitioner for Dynamo
  • diff --git a/docs/contributors/phases.html b/docs/contributors/phases.html index cbc7111d3d..3056cd57d5 100644 --- a/docs/contributors/phases.html +++ b/docs/contributors/phases.html @@ -10,7 +10,7 @@ - Compiler Phases — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Compiler Phases — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/runtime.html b/docs/contributors/runtime.html index ab1b911002..c34bfbbb5c 100644 --- a/docs/contributors/runtime.html +++ b/docs/contributors/runtime.html @@ -10,7 +10,7 @@ - Runtime Phase — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Runtime Phase — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/system_overview.html b/docs/contributors/system_overview.html index 43a147b2ab..42a4303b6f 100644 --- a/docs/contributors/system_overview.html +++ b/docs/contributors/system_overview.html @@ -10,7 +10,7 @@ - System Overview — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + System Overview — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/ts_converters.html b/docs/contributors/ts_converters.html index 3b632fe43d..feacf295fc 100644 --- a/docs/contributors/ts_converters.html +++ b/docs/contributors/ts_converters.html @@ -10,7 +10,7 @@ - Writing TorchScript Converters — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Writing TorchScript Converters — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/useful_links.html b/docs/contributors/useful_links.html index fb82b34dc3..a2d8af54e9 100644 --- a/docs/contributors/useful_links.html +++ b/docs/contributors/useful_links.html @@ -10,7 +10,7 @@ - Useful Links for Torch-TensorRT Development — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Useful Links for Torch-TensorRT Development — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/contributors/writing_dynamo_aten_lowering_passes.html b/docs/contributors/writing_dynamo_aten_lowering_passes.html index a5d0fb5b24..7b713c0eb8 100644 --- a/docs/contributors/writing_dynamo_aten_lowering_passes.html +++ b/docs/contributors/writing_dynamo_aten_lowering_passes.html @@ -10,7 +10,7 @@ - Writing Dynamo ATen Lowering Passes — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Writing Dynamo ATen Lowering Passes — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/dynamo/dynamo_export.html b/docs/dynamo/dynamo_export.html index 00bf97e87f..7b14272b68 100644 --- a/docs/dynamo/dynamo_export.html +++ b/docs/dynamo/dynamo_export.html @@ -10,7 +10,7 @@ - Compiling Exported Programs with Torch-TensorRT — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Compiling Exported Programs with Torch-TensorRT — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/dynamo/torch_compile.html b/docs/dynamo/torch_compile.html index 346d514736..8ba8f59eb6 100644 --- a/docs/dynamo/torch_compile.html +++ b/docs/dynamo/torch_compile.html @@ -10,7 +10,7 @@ - TensorRT Backend for torch.compile — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + TensorRT Backend for torch.compile — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -512,7 +511,7 @@

    Key Features

    -class torch_tensorrt.dynamo.CompilationSettings(enabled_precisions: ~typing.Set[~torch_tensorrt._enums.dtype] = <factory>, debug: bool = False, workspace_size: int = 0, min_block_size: int = 5, torch_executed_ops: ~typing.Collection[~typing.Union[~typing.Callable[[...], ~typing.Any], str]] = <factory>, pass_through_build_failures: bool = False, max_aux_streams: ~typing.Optional[int] = None, version_compatible: bool = False, optimization_level: ~typing.Optional[int] = None, use_python_runtime: ~typing.Optional[bool] = False, truncate_double: bool = False, use_fast_partitioner: bool = True, enable_experimental_decompositions: bool = False, device: ~torch_tensorrt._Device.Device = <factory>, require_full_compilation: bool = False, disable_tf32: bool = False, assume_dynamic_shape_support: bool = False, sparse_weights: bool = False, engine_capability: ~torch_tensorrt._enums.EngineCapability = <factory>, num_avg_timing_iters: int = 1, dla_sram_size: int = 1048576, dla_local_dram_size: int = 1073741824, dla_global_dram_size: int = 536870912, dryrun: ~typing.Union[bool, str] = False, hardware_compatible: bool = False, timing_cache_path: str = '/tmp/torch_tensorrt_engine_cache/timing_cache.bin', lazy_engine_init: bool = False, cache_built_engines: bool = False, reuse_cached_engines: bool = False, use_explicit_typing: bool = False, use_fp32_acc: bool = False, refit_identical_engine_weights: bool = False, strip_engine_weights: bool = False, immutable_weights: bool = True, enable_weight_streaming: bool = False, enable_cross_compile_for_windows: bool = False, tiling_optimization_level: str = 'none', l2_limit_for_tiling: int = -1, use_distributed_mode_trace: bool = False, offload_module_to_cpu: bool = False)[source]
    +class torch_tensorrt.dynamo.CompilationSettings(enabled_precisions: ~typing.Set[~torch_tensorrt._enums.dtype] = <factory>, workspace_size: int = 0, min_block_size: int = 5, torch_executed_ops: ~typing.Collection[~typing.Union[~typing.Callable[[...], ~typing.Any], str]] = <factory>, pass_through_build_failures: bool = False, max_aux_streams: ~typing.Optional[int] = None, version_compatible: bool = False, optimization_level: ~typing.Optional[int] = None, use_python_runtime: ~typing.Optional[bool] = False, truncate_double: bool = False, use_fast_partitioner: bool = True, enable_experimental_decompositions: bool = False, device: ~torch_tensorrt._Device.Device = <factory>, require_full_compilation: bool = False, disable_tf32: bool = False, assume_dynamic_shape_support: bool = False, sparse_weights: bool = False, engine_capability: ~torch_tensorrt._enums.EngineCapability = <factory>, num_avg_timing_iters: int = 1, dla_sram_size: int = 1048576, dla_local_dram_size: int = 1073741824, dla_global_dram_size: int = 536870912, dryrun: ~typing.Union[bool, str] = False, hardware_compatible: bool = False, timing_cache_path: str = '/tmp/torch_tensorrt_engine_cache/timing_cache.bin', lazy_engine_init: bool = False, cache_built_engines: bool = False, reuse_cached_engines: bool = False, use_explicit_typing: bool = False, use_fp32_acc: bool = False, refit_identical_engine_weights: bool = False, strip_engine_weights: bool = False, immutable_weights: bool = True, enable_weight_streaming: bool = False, enable_cross_compile_for_windows: bool = False, tiling_optimization_level: str = 'none', l2_limit_for_tiling: int = -1, use_distributed_mode_trace: bool = False, offload_module_to_cpu: bool = False)[source]

    Compilation settings for Torch-TensorRT Dynamo Paths

    Parameters
    diff --git a/docs/fx/getting_started_with_fx_path.html b/docs/fx/getting_started_with_fx_path.html index 8ed5eb46b4..111d7cdf21 100644 --- a/docs/fx/getting_started_with_fx_path.html +++ b/docs/fx/getting_started_with_fx_path.html @@ -10,7 +10,7 @@ - Torch-TensorRT (FX Frontend) User Guide — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Torch-TensorRT (FX Frontend) User Guide — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/genindex.html b/docs/genindex.html index 461c82c6e2..c4bb49d297 100644 --- a/docs/genindex.html +++ b/docs/genindex.html @@ -9,7 +9,7 @@ - Index — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Index — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -290,7 +290,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -321,7 +321,7 @@

    Getting Started

    User Guide

    @@ -373,9 +373,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -671,6 +670,8 @@

    F

  • f16 (torch_tensorrt.dtype attribute)
  • f32 (torch_tensorrt.dtype attribute) +
  • +
  • f4 (torch_tensorrt.dtype attribute)
  • f64 (torch_tensorrt.dtype attribute)
  • @@ -853,13 +854,15 @@

    S

  • set_cudagraphs_mode() (in module torch_tensorrt.runtime)
  • - - +
    • set_extra_state() (torch_tensorrt.runtime.TorchTensorRTModule method)
    • set_multi_device_safe_mode() (in module torch_tensorrt.runtime) +
    • +
    • set_weight_streaming_ctx() (torch_tensorrt.MutableTorchTensorRTModule method)
    • STANDARD (torch_tensorrt.EngineCapability attribute)
    • diff --git a/docs/getting_started/installation.html b/docs/getting_started/installation.html index f58586d290..002c8460e2 100644 --- a/docs/getting_started/installation.html +++ b/docs/getting_started/installation.html @@ -10,7 +10,7 @@ - Installation — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Installation — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -39,7 +39,7 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      +
      +
      + + + + + +
      +
      +
      + + + + + + + + + + + +
      +
      +
      + + + + + + + + + + + + + + + + +
      + +
        + +
      • + + + Docs + + > +
      • + + +
      • Automatically Generate a TensorRT AOT Plugin
      • + + +
      • + + + + + +
      • + +
      + + +
      +
      + +
      + Shortcuts +
      +
      + +
      +
      + + + + + + +
      + +
      +
      + + +
      +

      Automatically Generate a TensorRT AOT Plugin

      +

      We are going to demonstrate how to automatically generate a plugin for a custom kernel using Torch-TensorRT using +the new Python based plugin system in TensorRT 10.7.

      +

      Torch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT +does not know how to compile them in TensorRT. However, this comes at the cost of a graph break and will reduce the performance of the model. +The easiest way to fix lack of support for ops is by adding a decomposition (see: +Writing lowering passes for the Dynamo frontend) - which defines the operator +in terms of PyTorch ops that are supported in Torch-TensorRT or a converter (see: +Writing converters for the Dynamo frontend) - which defines the operator in terms of TensorRT operators.

      +

      In some cases there isn’t a great way to do either of these, perhaps because the operator is a custom kernel that is not part of standard PyTorch or +TensorRT cannot support it natively.

      +

      For these cases, it is possible to use a TensorRT plugin to replace the operator inside the TensorRT engine, thereby avoiding +the performance and resource overhead from a graph break.

      +

      Previously this involved a complex process in not only building a performant kernel but setting it up to run in TensorRT (see: Using Custom Kernels within TensorRT Engines with Torch-TensorRT). +As of TensorRT 10.7, there is a new Python native plugin system which greatly streamlines this process. This +plugin system also allows Torch-TensorRT to automatically generate the necessary conversion code to convert the +operation in PyTorch to TensorRT.

      +

      In addition, Torch-TensorRT provides automatic generation of TensorRT plugin feature (see: Automatically Generate a Plugin for a Custom Kernel). +However, the above methods generates a JIT plugin that might not satisfy user’s performance requirements. +To support that, Torch-TensorRT provides auto generation of TensorRT AOT Plugin which raps a function to define an Ahead-of-Time (AOT) implementation for a plugin already registered. +This provides a performance boost comparing to JIT plugin.

      +
      import argparse
      +from typing import Tuple, Union
      +
      +import tensorrt as trt
      +import tensorrt.plugin as trtp
      +import torch
      +import torch_tensorrt
      +import triton
      +import triton.language as tl
      +
      +trt_logger = trt.Logger(trt.Logger.VERBOSE)
      +
      +
      +@triton.jit
      +def add_one_kernel(x_ptr, n_elements, y_ptr, BLOCK_SIZE: tl.constexpr):
      +    pid = tl.program_id(0)
      +    block_start = pid * BLOCK_SIZE
      +    offsets = block_start + tl.arange(0, BLOCK_SIZE)
      +    mask = offsets < n_elements
      +    x = tl.load(x_ptr + offsets, mask=mask)
      +    output = x + 1
      +    tl.store(y_ptr + offsets, output, mask=mask)
      +
      +
      +@torch.library.custom_op("my::add_one", mutates_args=())  # type: ignore[misc]
      +def add_one(X: torch.Tensor) -> torch.Tensor:
      +    # Ensure the tensors are on the GPU
      +    assert X.is_cuda
      +
      +    # Create output tensor
      +    Y = torch.empty_like(X)
      +
      +    # Define block size
      +    BLOCK_SIZE = 256
      +
      +    # Grid of programs
      +    grid = lambda meta: (triton.cdiv(X.numel(), meta["BLOCK_SIZE"]),)
      +
      +    # Launch the kernel
      +    add_one_kernel[grid](X, X.numel(), Y, BLOCK_SIZE=BLOCK_SIZE)
      +
      +    return Y
      +
      +
      +@torch.library.register_fake("my::add_one")
      +def _(X: torch.Tensor) -> torch.Tensor:
      +    return X
      +
      +
      +@trtp.register("my::add_one")
      +def add_plugin_desc(X: trtp.TensorDesc) -> Tuple[trtp.TensorDesc]:
      +    return X.like()
      +
      +
      +@trtp.aot_impl("my::add_one")
      +def add_plugin_aot_impl(
      +    X: trtp.TensorDesc, outputs: Tuple[trtp.TensorDesc], tactic: int
      +) -> Tuple[
      +    Union[str, bytes], Union[str, bytes], trtp.KernelLaunchParams, trtp.SymExprs
      +]:
      +    type_str = "fp32" if X.dtype == trt.float32 else "fp16"
      +
      +    block_size = 256
      +    src = triton.compiler.ASTSource(
      +        fn=add_one_kernel,
      +        signature={
      +            "x_ptr": f"*{type_str}",
      +            "n_elements": "i32",
      +            "y_ptr": f"*{type_str}",
      +            "BLOCK_SIZE": "constexpr",
      +        },
      +        constants={
      +            "BLOCK_SIZE": block_size,
      +        },
      +    )
      +
      +    compiled_kernel = triton.compile(src)
      +
      +    N = X.shape_expr.numel()
      +    launch_params = trtp.KernelLaunchParams()
      +
      +    # grid dims
      +    launch_params.grid_x = trtp.cdiv(N, block_size)
      +    # block dims
      +    launch_params.block_x = compiled_kernel.metadata.num_warps * 32
      +    # shared memory
      +    launch_params.shared_mem = compiled_kernel.metadata.shared
      +
      +    extra_args = trtp.SymIntExprs(1)
      +    extra_args[0] = trtp.SymInt32(N)
      +
      +    return (
      +        compiled_kernel.metadata.name,
      +        compiled_kernel.asm["ptx"],
      +        launch_params,
      +        extra_args,
      +    )
      +
      +
      +torch_tensorrt.dynamo.conversion.plugins.generate_plugin_converter(
      +    "my::add_one",
      +    supports_dynamic_shapes=False,
      +    requires_output_allocator=False,
      +    use_aot_if_available=True,
      +)
      +
      +
      +class MyModel(torch.nn.Module):
      +    def __init__(self):
      +        super().__init__()
      +
      +    def forward(self, X: torch.Tensor) -> torch.Tensor:
      +        res = torch.ops.my.add_one.default(X)
      +
      +        return res
      +
      +
      +if __name__ == "__main__":
      +    parser = argparse.ArgumentParser()
      +    parser.add_argument(
      +        "--aot", action="store_true", help="Try to use AOT compilation", default=False
      +    )
      +    args = parser.parse_args()
      +
      +    my_model = MyModel().to("cuda")
      +    m = torch.full((64, 64), 2, device="cuda", dtype=torch.float)
      +
      +    assert my_model(X=m)[0][0] == 3.0
      +
      +    with torch_tensorrt.logging.debug():
      +        trt_inputs = [m]
      +        model_trt = torch_tensorrt.compile(
      +            my_model,
      +            inputs=trt_inputs,
      +            min_block_size=1,
      +        )
      +        print("Model compiled successfully!")
      +        print("Running inference with compiled model...")
      +        for i in range(10):
      +            res = model_trt(m)
      +            assert torch.allclose(res, my_model(m)), "Results do not match!"
      +
      +    print("Inference successful!")
      +
      +
      +

      Total running time of the script: ( 0 minutes 0.000 seconds)

      + +

      Gallery generated by Sphinx-Gallery

      +
      + + +
      + +
      +
      + + + + +
      + + + +
      +

      + © Copyright 2024, NVIDIA Corporation. + +

      +
      + +
      + Built with Sphinx using a theme provided by Read the Docs. +
      + + +
      + +
      +
      + + +
      +
      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      +
      +
      +
      +

      Docs

      +

      Access comprehensive developer documentation for PyTorch

      + View Docs +
      + +
      +

      Tutorials

      +

      Get in-depth tutorials for beginners and advanced developers

      + View Tutorials +
      + +
      +

      Resources

      +

      Find development resources and get your questions answered

      + View Resources +
      +
      +
      +
      + + + + + + + + + +
      +
      +
      +
      + + +
      +
      +
      + + +
      + + + + + + + + \ No newline at end of file diff --git a/docs/tutorials/_rendered_examples/dynamo/auto_generate_converters.html b/docs/tutorials/_rendered_examples/dynamo/auto_generate_converters.html index f4ce8ec302..bfba9c54c3 100644 --- a/docs/tutorials/_rendered_examples/dynamo/auto_generate_converters.html +++ b/docs/tutorials/_rendered_examples/dynamo/auto_generate_converters.html @@ -10,7 +10,7 @@ - Automatically Generate a Converter for a Custom Kernel — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Automatically Generate a Converter for a Custom Kernel — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -324,7 +324,7 @@

      Getting Started

      User Guide

      @@ -376,9 +376,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -650,9 +649,7 @@

      Using our converter with a modeln = torch.full((64, 64), 3, device="cuda", dtype=torch.float) with torch_tensorrt.logging.errors(): - model_trt = torch_tensorrt.compile( - my_model, inputs=[m, n], debug=True, min_block_size=1 - ) + model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1) for i in range(300): res = model_trt(m, n) assert torch.allclose(res, my_model(m, n)) diff --git a/docs/tutorials/_rendered_examples/dynamo/auto_generate_plugins.html b/docs/tutorials/_rendered_examples/dynamo/auto_generate_plugins.html index fce1f76c2d..fbe4f5d2e7 100644 --- a/docs/tutorials/_rendered_examples/dynamo/auto_generate_plugins.html +++ b/docs/tutorials/_rendered_examples/dynamo/auto_generate_plugins.html @@ -10,7 +10,7 @@ - Automatically Generate a Plugin for a Custom Kernel — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Automatically Generate a Plugin for a Custom Kernel — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -324,7 +324,7 @@

      Getting Started

      User Guide

      @@ -376,9 +376,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -625,9 +624,7 @@

      Using our converter with a modeln = torch.randint(0, 5, (64, 64), device="cuda", dtype=torch.float) with torch_tensorrt.logging.errors(): - model_trt = torch_tensorrt.compile( - my_model, inputs=[m, n], debug=True, min_block_size=1 - ) + model_trt = torch_tensorrt.compile(my_model, inputs=[m, n], min_block_size=1) for i in range(300): res = model_trt(m, n) assert torch.allclose(res, my_model(m, n)) diff --git a/docs/tutorials/_rendered_examples/dynamo/converter_overloading.html b/docs/tutorials/_rendered_examples/dynamo/converter_overloading.html index e10c87c756..a2ffe65e97 100644 --- a/docs/tutorials/_rendered_examples/dynamo/converter_overloading.html +++ b/docs/tutorials/_rendered_examples/dynamo/converter_overloading.html @@ -10,7 +10,7 @@ - Overloading Torch-TensorRT Converters with Custom Converters — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Overloading Torch-TensorRT Converters with Custom Converters — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -324,7 +324,7 @@

      Getting Started

      User Guide

      @@ -376,9 +376,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • diff --git a/docs/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.html b/docs/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.html index 2486bb7671..f215bf18d7 100644 --- a/docs/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.html +++ b/docs/tutorials/_rendered_examples/dynamo/cross_runtime_compilation_for_windows.html @@ -10,7 +10,7 @@ - Cross runtime compilation for windows example — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Cross runtime compilation for windows example — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -322,7 +322,7 @@

      Getting Started

      User Guide

      @@ -374,9 +374,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -554,7 +553,6 @@

      Imports and Model Definition"cross runtime compiled model for windows can only be compiled in Linux system" ) compile_spec = { - "debug": True, "min_block_size": 1, } torchtrt.cross_compile_for_windows( diff --git a/docs/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.html b/docs/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.html index 61ed8c850e..c026fa9ce3 100644 --- a/docs/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.html +++ b/docs/tutorials/_rendered_examples/dynamo/custom_kernel_plugins.html @@ -10,7 +10,7 @@ - Using Custom Kernels within TensorRT Engines with Torch-TensorRT — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Using Custom Kernels within TensorRT Engines with Torch-TensorRT — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -324,7 +324,7 @@

      Getting Started

      User Guide

      @@ -376,9 +376,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -737,7 +736,7 @@

      Using the Custom Operator in a Model - Engine Caching (BERT) — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Engine Caching (BERT) — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -324,7 +324,7 @@

      Getting Started

      User Guide

      @@ -376,9 +376,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -541,7 +540,6 @@ "use_python_runtime": False, "enabled_precisions": {torch.float}, "truncate_double": True, - "debug": False, "min_block_size": 1, "immutable_weights": False, "cache_built_engines": cache_built_engines, diff --git a/docs/tutorials/_rendered_examples/dynamo/engine_caching_example.html b/docs/tutorials/_rendered_examples/dynamo/engine_caching_example.html index ca6328e8b3..bb334a6b6c 100644 --- a/docs/tutorials/_rendered_examples/dynamo/engine_caching_example.html +++ b/docs/tutorials/_rendered_examples/dynamo/engine_caching_example.html @@ -10,7 +10,7 @@ - Engine Caching — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Engine Caching — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -324,7 +324,7 @@

      Getting Started

      User Guide

      @@ -376,9 +376,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -531,7 +530,6 @@ model = models.resnet18(pretrained=True).eval().to("cuda") enabled_precisions = {torch.float} -debug = False min_block_size = 1 use_python_runtime = False @@ -583,7 +581,6 @@

      Engine Caching for JIT Compilationoptions={ "use_python_runtime": True, "enabled_precisions": enabled_precisions, - "debug": debug, "min_block_size": min_block_size, "immutable_weights": False, "cache_built_engines": cache_built_engines, @@ -642,7 +639,6 @@

      Engine Caching for AOT Compilationtuple(inputs), use_python_runtime=use_python_runtime, enabled_precisions=enabled_precisions, - debug=debug, min_block_size=min_block_size, immutable_weights=False, cache_built_engines=cache_built_engines, @@ -752,7 +748,6 @@

      Custom Engine Cacheoptions={ "use_python_runtime": True, "enabled_precisions": enabled_precisions, - "debug": debug, "min_block_size": min_block_size, "immutable_weights": False, "cache_built_engines": cache_built_engines, diff --git a/docs/tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.html b/docs/tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.html new file mode 100644 index 0000000000..867487eb65 --- /dev/null +++ b/docs/tutorials/_rendered_examples/dynamo/hierarchical_partitioner_example.html @@ -0,0 +1,1031 @@ + + + + + + + + + + + + + Hierarchical Partitioner Example — Torch-TensorRT v2.9.0.dev0+92a6908 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      +
      +
      + + + + + +
      +
      +
      + + + + + + + + + + + +
      +
      +
      + + + + + + + + + + + + + + + + +
      + +
        + +
      • + + + Docs + + > +
      • + + +
      • Hierarchical Partitioner Example
      • + + +
      • + + + + + +
      • + +
      + + +
      +
      + +
      + Shortcuts +
      +
      + +
      +
      + + + + + + +
      + +
      +
      + + +
      +

      Hierarchical Partitioner Example

      +

      Basic example on how to use the hierarchical adjacency partitioner function and manually compile the partitioned model. +Not yet available in the compile API.

      +
      from typing import Any, Callable
      +
      +import torch
      +import torch.nn as nn
      +import torch_tensorrt
      +from torch_tensorrt._enums import dtype
      +from torch_tensorrt.dynamo import partitioning
      +from torch_tensorrt.dynamo._compiler import convert_module
      +from torch_tensorrt.dynamo.conversion._ConverterRegistry import (
      +    DYNAMO_CONVERTERS as CONVERTERS,
      +)
      +from torch_tensorrt.dynamo.lowering import (
      +    get_decompositions,
      +    pre_export_lowering,
      +)
      +from torch_tensorrt.dynamo.partitioning._hierarchical_partitioner import (
      +    hierarchical_adjacency_partition,
      +)
      +from torch_tensorrt.dynamo.utils import (
      +    get_output_metadata,
      +)
      +from torchvision import models
      +
      +
      +class InductorModule(torch.nn.Module):  # type: ignore[misc]
      +    """Wrapper module for inductor compiled function."""
      +
      +    def __init__(self, func: Callable[..., Any]) -> None:
      +        super().__init__()
      +        self.func = func
      +
      +    def forward(self, *args: Any, **kwargs: Any) -> Any:
      +        return self.func(*args, **kwargs)
      +
      +
      +class SimpleModel(nn.Module):
      +    def __init__(self):
      +        super().__init__()
      +        self.conv1 = nn.Conv2d(3, 64, kernel_size=3, padding=1)
      +        self.conv2 = nn.Conv2d(64, 128, kernel_size=3, padding=1)
      +        self.bn1 = nn.BatchNorm2d(64)
      +        self.bn2 = nn.BatchNorm2d(128)
      +
      +    def forward(self, x):
      +        x = self.conv1(x)
      +        x = self.bn1(x)
      +        x = torch.relu(x)
      +        x = self.conv2(x)
      +        x = self.bn2(x)
      +        x = torch.relu(x)
      +        return x
      +
      +
      +def main():
      +    # Create model
      +    model = SimpleModel().cuda()
      +    # model = models.efficientnet_b0(pretrained=True).cuda()
      +    model = model.eval()
      +
      +    # Create example input
      +    example_input = torch.randn(1, 3, 224, 224).cuda()
      +
      +    exported_program = torch.export.export(model, (example_input,))
      +    exported_program = pre_export_lowering(exported_program)
      +    exported_program = exported_program.run_decompositions(get_decompositions())
      +
      +    gm = exported_program.module()
      +
      +    print("Original Model Structure:\n", gm)
      +
      +    original_output = model(example_input)
      +
      +    # 1. Partition the model into blocks that can be executed by different backends
      +    partitioned_model, op_support = hierarchical_adjacency_partition(
      +        gm,
      +        min_block_size=1,
      +        backend_priority=["inductor", "tensorrt"],
      +        backend_support_map={
      +            "inductor": {
      +                "torch.ops.aten.convolution.default",
      +            },
      +            "tensorrt": CONVERTERS.keys(),
      +        },
      +        torch_executed_ops={
      +            "torch.ops.aten._native_batch_norm_legit_no_training.default"
      +        },
      +        require_full_compilation=False,
      +        skip_fusion=True,
      +    )
      +
      +    print("1. Partitioned Model Structure:\n", partitioned_model)
      +
      +    # 2. Compile each submodule with the corresponding backend
      +    submodule_node_dict = {}
      +    for node in partitioned_model.graph.nodes:
      +        if "_run_on_acc" not in node.name:
      +            continue
      +        submodule_node_dict[node.name] = node
      +
      +    # Store compiled replicas of Torch subgraphs
      +    compiled_modules = {}
      +
      +    for name, _ in partitioned_model.named_children():
      +        submodule = getattr(partitioned_model, name)
      +        if not isinstance(submodule, torch.fx.graph_module.GraphModule):
      +            continue
      +
      +        if "_run_on_acc" not in name:
      +            submodule.to("cuda")
      +            continue
      +
      +        if name not in submodule_node_dict:
      +            raise ValueError(
      +                f"node_name: {name} does not exist in the submodule node dictionary"
      +            )
      +
      +        # set the submodule metadata back to the parent module_node
      +        metadata_list = get_output_metadata(submodule)
      +        assert len(metadata_list) > 0
      +        metadata_keys = ["val", "tensor_meta"]
      +        for key in metadata_keys:
      +            if key not in submodule_node_dict[name].meta:
      +                meta_val_list = [
      +                    metadata[key] for metadata in metadata_list if key in metadata
      +                ]
      +                submodule_node_dict[name].meta[key] = meta_val_list
      +                break
      +
      +        # Get the submodule inputs for min, opt, max shapes of the graph inputs
      +        submodule_inputs = partitioning.construct_submodule_inputs(submodule)
      +        assert submodule_inputs is not None
      +
      +        # compile submodule with pytorch inductor backend
      +        if "_run_on_acc_inductor" in name:
      +            sub_inputs = []
      +            for input in submodule_inputs:
      +                sub_input = input.torch_tensor.to(
      +                    dtype.to(input.dtype, t=torch.dtype)
      +                ).cuda()
      +                sub_inputs.append(sub_input)
      +
      +            compiled_func = torch._inductor.compile(
      +                submodule,
      +                sub_inputs,
      +            )
      +            # Wrap the compiled function to be a torch.nn.Module
      +            compiled_submodule = InductorModule(compiled_func)
      +
      +        # compile submodule with tensorrt backend
      +        elif "_run_on_acc_tensorrt" in name:
      +            compiled_submodule = convert_module(
      +                submodule,
      +                submodule_inputs,
      +                name=name,
      +            )
      +        else:
      +            raise ValueError(f"Unknown backend for submodule: {name}")
      +
      +        compiled_modules[name] = compiled_submodule
      +
      +    # Replace all FX Modules with compiled Modules
      +    for name, compiled_module in compiled_modules.items():
      +        setattr(partitioned_model, name, compiled_module)
      +
      +    print("2. Compiled Model Structure:\n", partitioned_model)
      +
      +    with torch.no_grad():
      +        partitioned_output = partitioned_model(example_input)
      +        print(
      +            "3. Verify that Partitioned output == Original output:",
      +            torch.allclose(partitioned_output, original_output, 1e-2, 1e-2),
      +        )
      +
      +
      +if __name__ == "__main__":
      +    main()
      +
      +
      +

      Total running time of the script: ( 0 minutes 0.000 seconds)

      + +

      Gallery generated by Sphinx-Gallery

      +
      + + +
      + +
      +
      + + + + +
      + + + +
      +

      + © Copyright 2024, NVIDIA Corporation. + +

      +
      + +
      + Built with Sphinx using a theme provided by Read the Docs. +
      + + +
      + +
      +
      + + +
      +
      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      +
      +
      +
      +

      Docs

      +

      Access comprehensive developer documentation for PyTorch

      + View Docs +
      + +
      +

      Tutorials

      +

      Get in-depth tutorials for beginners and advanced developers

      + View Tutorials +
      + +
      +

      Resources

      +

      Find development resources and get your questions answered

      + View Resources +
      +
      +
      +
      + + + + + + + + + +
      +
      +
      +
      + + +
      +
      +
      + + +
      + + + + + + + + \ No newline at end of file diff --git a/docs/tutorials/_rendered_examples/dynamo/index.html b/docs/tutorials/_rendered_examples/dynamo/index.html index 79988aef63..5528f6ab63 100644 --- a/docs/tutorials/_rendered_examples/dynamo/index.html +++ b/docs/tutorials/_rendered_examples/dynamo/index.html @@ -10,7 +10,7 @@ - Dependencies — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Dependencies — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
      - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
      @@ -322,7 +322,7 @@

      Getting Started

      User Guide

      @@ -374,9 +374,8 @@
    • Compiling ResNet with dynamic shapes using the torch.compile backend
    • Compiling BERT using the torch.compile backend
    • Compiling Stable Diffusion model using the torch.compile backend
    • +
    • Compiling LLM models from Huggingface
    • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
    • -
    • Compiling GPT2 using the dynamo backend
    • -
    • Compiling Llama2 using the dynamo backend
    • Compiling SAM2 using the dynamo backend
    • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    • Legacy notebooks
    • @@ -527,12 +526,12 @@

      Model Zoo

      Torch Compile Advanced Usage

      Torch Compile Advanced Usage
      -

    -

    Torch Export with Cudagraphs

    -
    Torch Export with Cudagraphs

    Engine Caching (BERT)

    Engine Caching (BERT)
    +
    +

    Torch Export with Cudagraphs

    +
    Torch Export with Cudagraphs

    Pre-allocated output buffer

    Pre-allocated output buffer
    @@ -542,12 +541,6 @@

    Model Zoo

    Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend

    Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
    -

    -

    Compiling GPT2 using the dynamo backend

    -
    Compiling GPT2 using the dynamo backend
    -
    -

    Compiling Llama2 using the dynamo backend

    -
    Compiling Llama2 using the dynamo backend

    Automatically Generate a Converter for a Custom Kernel

    Automatically Generate a Converter for a Custom Kernel
    @@ -557,12 +550,18 @@

    Model Zoo

    Overloading Torch-TensorRT Converters with Custom Converters

    Overloading Torch-TensorRT Converters with Custom Converters
    -

    -

    Weight Streaming

    -
    Weight Streaming
    +
    +

    sphx_glr_tutorials__rendered_examples_dynamo_aot_plugin.py

    +
    Torch-TensorRT supports falling back to PyTorch implementations of operations in the case that Torch-TensorRT
    +
    +

    Hierarchical Partitioner Example

    +
    Hierarchical Partitioner Example

    Mutable Torch TensorRT Module

    Mutable Torch TensorRT Module
    +
    +

    Weight Streaming

    +
    Weight Streaming

    Compiling SAM2 using the dynamo backend

    Compiling SAM2 using the dynamo backend
    diff --git a/docs/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.html b/docs/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.html index a4650a9eea..0eda121ab1 100644 --- a/docs/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.html +++ b/docs/tutorials/_rendered_examples/dynamo/llama2_flashinfer_rmsnorm.html @@ -10,7 +10,7 @@ - Automatically generate a TensorRT Plugin for RMSNorm module and apply it in Llama2 — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Automatically generate a TensorRT Plugin for RMSNorm module and apply it in Llama2 — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -291,7 +291,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -322,7 +322,7 @@

    Getting Started

    User Guide

    @@ -374,9 +374,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -738,7 +737,6 @@

    Automatically generate a TensorRT Plugin for RMSNorm module and apply it in disable_tf32=True, use_explicit_typing=False, use_fp32_acc=True, - # debug=True, ) input_ids = input_ids.to(DEVICE) diff --git a/docs/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.html b/docs/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.html index 417b9c4b86..1caad6e9e2 100644 --- a/docs/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.html +++ b/docs/tutorials/_rendered_examples/dynamo/mutable_torchtrt_module_example.html @@ -10,7 +10,7 @@ - Mutable Torch TensorRT Module — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Mutable Torch TensorRT Module — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -515,6 +514,7 @@ import torch import torch_tensorrt as torch_trt import torchvision.models as models +from diffusers import DiffusionPipeline np.random.seed(5) torch.manual_seed(5) @@ -524,7 +524,7 @@

    Initialize the Mutable Torch TensorRT Module with settings.

    settings = {
    -    "use_python": False,
    +    "use_python_runtime": False,
         "enabled_precisions": {torch.float32},
         "immutable_weights": False,
     }
    @@ -565,13 +565,10 @@ 

    Saving Mutable Torch TensorRT Module

    Stable Diffusion with Huggingface

    -
    from diffusers import DiffusionPipeline
    -
    -with torch.no_grad():
    +
    with torch.no_grad():
         settings = {
             "use_python_runtime": True,
             "enabled_precisions": {torch.float16},
    -        "debug": True,
             "immutable_weights": False,
         }
     
    @@ -598,7 +595,7 @@ 

    Stable Diffusion with Huggingface"text_embeds": {0: BATCH}, "time_ids": {0: BATCH}, }, - "return_dict": False, + "return_dict": None, } pipe.unet.set_expected_dynamic_shape_range( args_dynamic_shapes, kwargs_dynamic_shapes @@ -671,7 +668,7 @@

    Use Mutable Torch TensorRT module with dynamic shape}, # a's shape does not change so we give it an empty dict } # Export the model first with custom dynamic shape constraints -model = torch_trt.MutableTorchTensorRTModule(model, debug=True, min_block_size=1) +model = torch_trt.MutableTorchTensorRTModule(model, min_block_size=1) model.set_expected_dynamic_shape_range(args_dynamic_shapes, kwarg_dynamic_shapes) # Compile model(*inputs, **kwargs) @@ -703,7 +700,6 @@

    Use Mutable Torch TensorRT module with persistent cachemodel, use_python_runtime=True, enabled_precisions={torch.float}, - debug=True, min_block_size=1, immutable_weights=False, cache_built_engines=True, diff --git a/docs/tutorials/_rendered_examples/dynamo/pre_allocated_output_example.html b/docs/tutorials/_rendered_examples/dynamo/pre_allocated_output_example.html index 7880d4f997..cba027e9d8 100644 --- a/docs/tutorials/_rendered_examples/dynamo/pre_allocated_output_example.html +++ b/docs/tutorials/_rendered_examples/dynamo/pre_allocated_output_example.html @@ -10,7 +10,7 @@ - Pre-allocated output buffer — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Pre-allocated output buffer — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/tutorials/_rendered_examples/dynamo/refit_engine_example.html b/docs/tutorials/_rendered_examples/dynamo/refit_engine_example.html index 0e9913af33..0dec8a3f6b 100644 --- a/docs/tutorials/_rendered_examples/dynamo/refit_engine_example.html +++ b/docs/tutorials/_rendered_examples/dynamo/refit_engine_example.html @@ -10,7 +10,7 @@ - Refitting Torch-TensorRT Programs with New Weights — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Refitting Torch-TensorRT Programs with New Weights — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -543,7 +542,6 @@

    Make a refittable Compilation Program
    model = models.resnet18(pretrained=False).eval().to("cuda")
     exp_program = torch.export.export(model, tuple(inputs))
     enabled_precisions = {torch.float}
    -debug = False
     workspace_size = 20 << 30
     min_block_size = 0
     use_python_runtime = False
    @@ -553,7 +551,6 @@ 

    Make a refittable Compilation Programtuple(inputs), use_python_runtime=use_python_runtime, enabled_precisions=enabled_precisions, - debug=debug, min_block_size=min_block_size, torch_executed_ops=torch_executed_ops, immutable_weights=False, @@ -586,6 +583,7 @@

    Refit the Program with Pretrained Weights) # Check the output +model2.to("cuda") expected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm(*inputs) for expected_output, refitted_output in zip(expected_outputs, refitted_outputs): assert torch.allclose( diff --git a/docs/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.html b/docs/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.html index 87517d080f..26827ff2dd 100644 --- a/docs/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.html +++ b/docs/tutorials/_rendered_examples/dynamo/torch_compile_advanced_usage.html @@ -10,7 +10,7 @@ - Torch Compile Advanced Usage — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Torch Compile Advanced Usage — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -558,7 +557,6 @@

    Compilation with torch.compile Using Custom Settings# py/torch_tensorrt/dynamo/_settings.py backend_kwargs = { "enabled_precisions": {torch.half}, - "debug": True, "min_block_size": 2, "torch_executed_ops": {"torch.ops.aten.sub.Tensor"}, "optimization_level": 4, diff --git a/docs/tutorials/_rendered_examples/dynamo/torch_compile_gpt2.html b/docs/tutorials/_rendered_examples/dynamo/torch_compile_gpt2.html index 0895049c8d..29515fc8c1 100644 --- a/docs/tutorials/_rendered_examples/dynamo/torch_compile_gpt2.html +++ b/docs/tutorials/_rendered_examples/dynamo/torch_compile_gpt2.html @@ -10,7 +10,7 @@ - Compiling GPT2 using the Torch-TensorRT torch.compile frontend — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Compiling GPT2 using the Torch-TensorRT torch.compile frontend — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -39,8 +39,8 @@ - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    + + + + + +
    +
    +
    + + + + + + + + + + + +
    +
    +
    + + + + + + + + + + + + + + + + +
    + +
      + +
    • + + + Docs + + > +
    • + + +
    • Compiling LLM models from Huggingface
    • + + +
    • + + + + + +
    • + +
    + + +
    +
    + +
    + Shortcuts +
    +
    + +
    +
    + + + + + + +
    + +
    +
    + +
    +

    Compiling LLM models from Huggingface

    +

    This tutorial walks you through how to compile LLM models from Huggingface using Torch-TensorRT. We also introduce KV caching in Torch-TensorRT which can greatly improve the performance of LLM inference. +The code is available in the tools/llm directory. We use the run_llm.py script to compile the model, generate outputs, and measure the performance.

    +
    +

    Note

    +

    This is an experimental release and APIs may change in future versions.

    +
    +
    +

    Note

    +

    The compilation scripts and tutorials for Llama-2-7b-chat-hf and gpt2 models have been consolidated into the unified run_llm.py script located in the tools/llm directory.

    +
    +
    +

    Overview of tools/llm Directory

    +

    The tools/llm directory provides the following tools to compile LLM models from Huggingface:

    +
      +
    • run_llm.py: Main entry point for model compilation, generating outputs, and benchmarking

    • +
    • Static Cache Utilities: static_cache_v1.py and static_cache_v2.py for KV cache optimization

    • +
    • SDPA Attention: sdpa_converter.py and register_sdpa.py for registering scaled dot-product attention converter and lowering pass.

    • +
    • Testing Components: Model-specific test files for validation

    • +
    • Utility Functions: utils.py and cache_utils.py for common operations

    • +
    +
    +
    +

    Supported Models

    +

    We have officially verified support for the following LLM families:

    + ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    Model Series

    HuggingFace Model Card

    Precision

    KV Cache Support ?

    GPT-2

    gpt2

    FP16, FP32

    Yes

    LLaMA 2

    meta-llama/Llama-2-7b-chat-hf

    FP16, FP32

    Yes

    LLaMA 3.1

    meta-llama/Llama-3.1-8B-Instruct

    FP16, FP32

    Yes

    LLaMA 3.2

    +
    meta-llama/Llama-3.2-1B-Instruct
    +
    meta-llama/Llama-3.2-3B-Instruct
    +
    +

    FP16, FP32

    Yes

    Qwen 2.5

    +
    Qwen/Qwen2.5-0.5B-Instruct
    +
    Qwen/Qwen2.5-1.5B-Instruct
    +
    Qwen/Qwen2.5-3B-Instruct
    +
    Qwen/Qwen2.5-7B-Instruct
    +
    +

    FP16, FP32

    Yes

    +
    +
    +

    Getting Started with run_llm.py

    +

    The main entry point is run_llm.py, which provides a complete workflow for model compilation and benchmarking.

    +
    +

    Basic Usage

    +
    python tools/llm/run_llm.py \
    +  --model meta-llama/Llama-3.2-1B-Instruct \
    +  --prompt "What is parallel programming?" \
    +  --precision FP16 \
    +  --num_tokens 128 \
    +  --cache static_v2 \
    +  --benchmark
    +
    +
    +
    +
    +

    Key Arguments

    +
      +
    • --model: Name or path of the HuggingFace LLM

    • +
    • --tokenizer: (Optional) Tokenizer name; defaults to model name

    • +
    • --prompt: Input prompt for text generation

    • +
    • --precision: Precision mode (FP16, FP32)

    • +
    • --num_tokens: Number of output tokens to generate

    • +
    • --cache: KV cache type (static_v1, static_v2, or empty for no KV caching)

    • +
    • --benchmark: Enable benchmarking mode for performance comparison

    • +
    • --enable_pytorch_run: Also run and compare PyTorch baseline

    • +
    +
    +
    +

    Other Usage Examples

    +
    # Compare different models performance
    +python tools/llm/run_llm.py --model gpt2 --benchmark --enable_pytorch_run
    +python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --benchmark --enable_pytorch_run
    +
    +# Generate the outputs (disable benchmarking) by specifying the number of tokens to generate. Default = 128
    +python tools/llm/run_llm.py --model gpt2 --prompt "What is parallel programming?" --num_tokens 128
    +python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --prompt "What is parallel programming?" --num_tokens 128
    +
    +# Test different caching approaches
    +python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --cache static_v1
    +python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --cache static_v2
    +
    +# Compare FP16 vs FP32 performance
    +python tools/llm/run_llm.py --model Qwen/Qwen2.5-1.5B-Instruct --precision FP16 --benchmark
    +python tools/llm/run_llm.py --model Qwen/Qwen2.5-1.5B-Instruct --precision FP32 --benchmark
    +
    +
    +
    +
    +
    +

    KV Caching in Torch-TensorRT

    +

    We provide two versions of static KV caching: static_cache_v1 and static_cache_v2. +In both implementations, we add static KV cache tensors as model inputs/outputs without storing them as external memory. +The length of KV cache = input sequence length + output sequence length (specified by --num_tokens). The number of heads and head dimension are determined by the model config.

    +
    +

    Static Cache v1

    +

    The static_cache_v1.py implements KV cache in the model graph as follows:

    +
    class StaticCacheV1Model(nn.Module):
    +    def __init__(self):
    +        super().__init__()
    +
    +    def forward(self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True):
    +        # Concatenate new key/value pairs with existing cache
    +        new_key_cache = torch.cat((key_cache[:, :, :start_idx, :], k, key_cache[:, :, end_idx:, :]), dim=2)
    +        new_value_cache = torch.cat((value_cache[:, :, :start_idx, :], v, value_cache[:, :, end_idx:, :]), dim=2)
    +
    +        # Compute attention using the updated cache
    +        attn_output = torch._C._nn.scaled_dot_product_attention(
    +            q,
    +            new_key_cache[:, :, :end_idx, :],
    +            new_value_cache[:, :, :end_idx, :],
    +            dropout_p=0.0,
    +            is_causal=is_causal
    +        )
    +
    +        return attn_output, new_key_cache, new_value_cache
    +
    +
    +

    In the above code, we concatenate the new key/value pairs with the existing cache and update it. To compute the attention, we use the updated cache and gather the corresponding keys/values from the cache up until and including the current token index. +The above code is actually implemented as a FX graph transformation pass. We register it as a Torch-TensorRT lowering pass using the decorator @_aten_lowering_pass when we import the static_cache_v1.py module.

    +
    +

    Note

    +

    The start_idx and end_idx are the start and end indices of the current token in the cache. For prefill phase, start_idx is 0 and end_idx is the input sequence length. +For decode phase, start_idx begins at the input sequence length and end_idx equals start_idx + 1. The start_idx is incremented by 1 until the end of the sequence or we reach the maximum number of tokens to generate.

    +
    +
    +
    +

    Static Cache v2

    +

    The static_cache_v2.py is similar to static_cache_v1.py but it uses less number of slice operations. It implements KV cache in the model graph as follows:

    +
    class StaticCacheV2Model(nn.Module):
    +    def __init__(self):
    +        super().__init__()
    +
    +    def forward(self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True):
    +        concat_keys = torch.cat((key_cache[:, :, :start_idx, :], k), dim=2)
    +        concat_values = torch.cat((value_cache[:, :, :start_idx, :], v), dim=2)
    +        new_key_cache = torch.cat((concat_keys, key_cache[:, :, end_idx:, :]), dim=2)
    +        new_value_cache = torch.cat((concat_values, value_cache[:, :, end_idx:, :]), dim=2)
    +        attn_output = torch._C._nn.scaled_dot_product_attention(
    +              q, concat_keys, concat_values, dropout_p=0.0, is_causal=is_causal
    +        )
    +
    +        return attn_output, new_key_cache, new_value_cache
    +
    +
    +

    In the above code, we concatenate the existing key/value cache with current key/value of the token. We use this to directly compute the attention and update the key/value cache inserting the current key/value. +The above code is actually implemented as a FX graph transformation pass. We register it as a Torch-TensorRT lowering pass using the decorator @_aten_lowering_pass when we import the static_cache_v1.py module. +The definitons of start_idx and end_idx are the same as static_cache_v1.py.

    +

    After the model is compiled with static KV cache, the input signature of the model is changed. The new input signature is (input_ids, position_ids, key_cache_0, value_cache_0, ..., start_idx, end_idx). +The number of key/value cache tensors is equal to the number of attention heads in the model. We can use the generate_with_static_cache function to generate the outputs.

    +
    +
    +
    +

    Generating Outputs

    +

    We use custom generate function to generate the outputs. This function performs standard autoregressive decoding without KV caching. +There is also a generate_with_static_cache function that performs autoregressive decoding with KV caching.

    +

    The generate_with_static_cache function takes care of preparing the inputs to the model compiled with static KV cache. +The model inputs are input_ids, position_ids, key_cache_0, value_cache_0, …., start_idx, end_idx. +We initialize the key/value cache tensors with zeros and for every token generated, the new key/value cache tensors are the outputs of the model.

    +
    +

    SDPA Converter (sdpa_converter.py)

    +
      +
    • Converts scaled dot-product attention operation using TRT Python API.

    • +
    • Supports causal and standard self-attention.

    • +
    +
    +
    +

    SDPA Registration (register_sdpa.py)

    +
      +
    • This is a Torch-TensorRT lowering pass that replaces variants of SDPA with torch.nn.functional.scaled_dot_product_attention.

    • +
    • Registers the SDPA converter which is used for converting torch.nn.functional.scaled_dot_product_attention operation.

    • +
    +
    +
    +
    +

    Limitations and Known Issues

    +
      +
    • Sliding window attention (used in Gemma3 and Qwen 3 models) is not yet supported

    • +
    • Some model architectures (e.g. Phi-4) have issues with exporting the torch model.

    • +
    +
    +

    Requirements

    +
      +
    • Torch-TensorRT 2.8.0 or later

    • +
    • Transformers v4.52.3

    • +
    +
    +
    +
    + + +
    + +
    + + +
    +
    + + +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    +
    +
    +

    Docs

    +

    Access comprehensive developer documentation for PyTorch

    + View Docs +
    + +
    +

    Tutorials

    +

    Get in-depth tutorials for beginners and advanced developers

    + View Tutorials +
    + +
    +

    Resources

    +

    Find development resources and get your questions answered

    + View Resources +
    +
    +
    +
    + + + + + + + + + +
    +
    +
    +
    + + +
    +
    +
    + + +
    + + + + + + + + \ No newline at end of file diff --git a/docs/tutorials/notebooks.html b/docs/tutorials/notebooks.html index c37a891978..c6f6f6e8e9 100644 --- a/docs/tutorials/notebooks.html +++ b/docs/tutorials/notebooks.html @@ -10,7 +10,7 @@ - Legacy notebooks — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Legacy notebooks — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/tutorials/serving_torch_tensorrt_with_triton.html b/docs/tutorials/serving_torch_tensorrt_with_triton.html index 11e6a5a890..6ddbe2d7f5 100644 --- a/docs/tutorials/serving_torch_tensorrt_with_triton.html +++ b/docs/tutorials/serving_torch_tensorrt_with_triton.html @@ -10,7 +10,7 @@ - Serving a Torch-TensorRT model with Triton — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Serving a Torch-TensorRT model with Triton — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/user_guide/dynamic_shapes.html b/docs/user_guide/dynamic_shapes.html index a3864c3b26..aa194b278e 100644 --- a/docs/user_guide/dynamic_shapes.html +++ b/docs/user_guide/dynamic_shapes.html @@ -10,7 +10,7 @@ - Dynamic shapes with Torch-TensorRT — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Dynamic shapes with Torch-TensorRT — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/user_guide/mixed_precision.html b/docs/user_guide/mixed_precision.html index 0410976908..a69166c136 100644 --- a/docs/user_guide/mixed_precision.html +++ b/docs/user_guide/mixed_precision.html @@ -10,7 +10,7 @@ - Compile Mixed Precision models with Torch-TensorRT — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Compile Mixed Precision models with Torch-TensorRT — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/user_guide/runtime.html b/docs/user_guide/runtime.html index 70b6a61eac..7cb9e413bd 100644 --- a/docs/user_guide/runtime.html +++ b/docs/user_guide/runtime.html @@ -10,7 +10,7 @@ - Deploying Torch-TensorRT Programs — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Deploying Torch-TensorRT Programs — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -512,7 +511,7 @@

    Runtime LibraryNote

    If you are linking libtorchtrt_runtime.so, likely using the following flags will help -Wl,--no-as-needed -ltorchtrt -Wl,--as-needed as there’s no direct symbol dependency to anything in the Torch-TensorRT runtime for most Torch-TensorRT runtime applications

    -

    An example of how to use libtorchtrt_runtime.so can be found here: https://github.com/pytorch/TensorRT/tree/master/examples/torchtrt_runtime_example

    +

    An example of how to use libtorchtrt_runtime.so can be found here: https://github.com/pytorch/TensorRT/tree/master/examples/torchtrt_aoti_example

    Plugin Library

    @@ -605,6 +604,116 @@

    Dynamic Output Allocation Mode +

    Deploying Torch-TensorRT Programs without Python

    +
    +

    AOT-Inductor

    +

    AOTInductor is a specialized version of TorchInductor, designed to process exported PyTorch models, optimize them, and produce shared +libraries as well as other relevant artifacts. These compiled artifacts are specifically crafted for deployment in non-Python environments, +which are frequently employed for inference deployments on the server side.

    +

    Torch-TensorRT is able to accelerate subgraphs within AOTInductor exports in the same way it does in Python.

    +
    dynamo_model = torch_tensorrt.compile(model, ir="dynamo", arg_inputs=[...])
    +torch_tensorrt.save(
    +    dynamo_model,
    +    file_path=os.path.join(os.getcwd(), "model.pt2"),
    +    output_format="aot_inductor",
    +    retrace=True,
    +    arg_inputs=[...],
    +)
    +
    +
    +

    This artifact then can be loaded in a C++ application to be executed with out a Python dependency.

    +
    #include <iostream>
    +#include <vector>
    +
    +#include "torch/torch.h"
    +#include "torch/csrc/inductor/aoti_package/model_package_loader.h"
    +
    +int main(int argc, const char* argv[]) {
    +// Check for correct number of command-line arguments
    +std::string trt_aoti_module_path = "model.pt2";
    +
    +if (argc == 2) {
    +    trt_aoti_module_path = argv[1];
    +}
    +
    +    std::cout << trt_aoti_module_path << std::endl;
    +
    +    // Get the path to the TRT AOTI model package from the command line
    +    c10::InferenceMode mode;
    +
    +    torch::inductor::AOTIModelPackageLoader loader(trt_aoti_module_path);
    +    // Assume running on CUDA
    +    std::vector<torch::Tensor> inputs = {torch::randn({8, 10}, at::kCUDA)};
    +    std::vector<torch::Tensor> outputs = loader.run(inputs);
    +    std::cout << "Result from the first inference:"<< std::endl;
    +    std::cout << outputs << std::endl;
    +
    +    // The second inference uses a different batch size and it works because we
    +    // specified that dimension as dynamic when compiling model.pt2.
    +    std::cout << "Result from the second inference:"<< std::endl;
    +    // Assume running on CUDA
    +    std::cout << loader.run({torch::randn({1, 10}, at::kCUDA)}) << std::endl;
    +
    +    return 0;
    +}
    +
    +
    +

    Note: Similar to Python, at runtime, no Torch-TensorRT APIs are used to operate the model. Therefore typically additional +flags are needed to make sure that libtorchtrt_runtime.so gets optimized out (see above).

    +

    See: //examples/torchtrt_aoti_example for a full end to end demo of this workflow

    +
    +
    +

    TorchScript

    +

    TorchScript is a legacy compiler stack for PyTorch that includes a Python-less interpreter for TorchScript programs. +It has historically been used by Torch-TensorRT to execute models without Python. Even after the transition to TorchDynamo, +the TorchScript interpreter can continue to be used to run PyTorch models with TensorRT engines outside of Python.

    +
    dynamo_model = torch_tensorrt.compile(model, ir="dynamo", arg_inputs=[...])
    +ts_model = torch.jit.trace(dynamo_model, inputs=[...])
    +torch.jit.save(ts_model, os.path.join(os.getcwd(), "model.ts"),)
    +
    +
    +

    This artifact then can be loaded in a C++ application to be executed with out a Python dependency.

    +
    #include <fstream>
    +#include <iostream>
    +#include <memory>
    +#include <sstream>
    +#include <vector>
    +#include "torch/script.h"
    +
    +int main(int argc, const char* argv[]) {
    +    if (argc < 2) {
    +        std::cerr << "usage: samplertapp <path-to-pre-built-trt-ts module>\n";
    +        return -1;
    +    }
    +
    +    std::string trt_ts_module_path = argv[1];
    +
    +    torch::jit::Module trt_ts_mod;
    +    try {
    +        // Deserialize the ScriptModule from a file using torch::jit::load().
    +        trt_ts_mod = torch::jit::load(trt_ts_module_path);
    +    } catch (const c10::Error& e) {
    +        std::cerr << "error loading the model from : " << trt_ts_module_path << std::endl;
    +        return -1;
    +    }
    +
    +    std::cout << "Running TRT engine" << std::endl;
    +    std::vector<torch::jit::IValue> trt_inputs_ivalues;
    +    trt_inputs_ivalues.push_back(at::randint(-5, 5, {1, 3, 5, 5}, {at::kCUDA}).to(torch::kFloat32));
    +    torch::jit::IValue trt_results_ivalues = trt_ts_mod.forward(trt_inputs_ivalues);
    +    std::cout << "==================TRT outputs================" << std::endl;
    +    std::cout << trt_results_ivalues << std::endl;
    +    std::cout << "=============================================" << std::endl;
    +    std::cout << "TRT engine execution completed. " << std::endl;
    +}
    +
    +
    +

    Note: Similar to Python, at runtime, no Torch-TensorRT APIs are used to operate the model. Therefore typically additional +flags are needed to make sure that libtorchtrt_runtime.so gets optimized out (see above).

    +

    See: //examples/torchtrt_runtime_example for a full end to end demo of this workflow

    +
    +

    @@ -657,6 +766,11 @@

    Dynamic Output Allocation ModeMulti Device Safe Mode
  • Cudagraphs Mode
  • Dynamic Output Allocation Mode
  • +
  • Deploying Torch-TensorRT Programs without Python +
  • diff --git a/docs/user_guide/saving_models.html b/docs/user_guide/saving_models.html index 54f414bbc2..5ef7273621 100644 --- a/docs/user_guide/saving_models.html +++ b/docs/user_guide/saving_models.html @@ -10,7 +10,7 @@ - Saving models compiled with Torch-TensorRT — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Saving models compiled with Torch-TensorRT — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • @@ -497,11 +496,12 @@

    Dynamo IR

    The output type of ir=dynamo compilation of Torch-TensorRT is torch.fx.GraphModule object by default. -We can save this object in either TorchScript (torch.jit.ScriptModule) or ExportedProgram (torch.export.ExportedProgram) formats by +We can save this object in either TorchScript (torch.jit.ScriptModule), ExportedProgram (torch.export.ExportedProgram) or PT2 formats by specifying the output_format flag. Here are the options output_format will accept

    • exported_program : This is the default. We perform transformations on the graphmodule first and use torch.export.save to save the module.

    • torchscript : We trace the graphmodule via torch.jit.trace and save it via torch.jit.save.

    • +
    • PT2 Format : This is a next generation runtime for PyTorch models, allowing them to run in Python and in C++

    a) ExportedProgram

    @@ -529,8 +529,8 @@

    b) Torchscriptmodel = MyModel().eval().cuda() inputs = [torch.randn((1, 3, 224, 224)).cuda()] # trt_gm is a torch.fx.GraphModule object -trt_gm = torch_tensorrt.compile(model, ir="dynamo", inputs=inputs) -torch_tensorrt.save(trt_gm, "trt.ts", output_format="torchscript", inputs=inputs) +trt_gm = torch_tensorrt.compile(model, ir="dynamo", arg_inputs=inputs) +torch_tensorrt.save(trt_gm, "trt.ts", output_format="torchscript", arg_inputs=inputs) # Later, you can load it and run inference model = torch.jit.load("trt.ts").cuda() @@ -548,7 +548,7 @@

    Torchscript IRmodel = MyModel().eval().cuda() inputs = [torch.randn((1, 3, 224, 224)).cuda()] -trt_ts = torch_tensorrt.compile(model, ir="ts", inputs=inputs) # Output is a ScriptModule object +trt_ts = torch_tensorrt.compile(model, ir="ts", arg_inputs=inputs) # Output is a ScriptModule object torch.jit.save(trt_ts, "trt_model.ts") # Later, you can load it and run inference @@ -571,6 +571,26 @@

    Loading the modelsmodel(*inputs)

    +
    +

    b) PT2 Format

    +

    PT2 is a new format that allows models to be run outside of Python in the future. It utilizes AOTInductor +to generate kernels for components that will not be run in TensorRT.

    +

    Here’s an example on how to save and load Torch-TensorRT Module using AOTInductor in Python

    +
    import torch
    +import torch_tensorrt
    +
    +model = MyModel().eval().cuda()
    +inputs = [torch.randn((1, 3, 224, 224)).cuda()]
    +# trt_ep is a torch.fx.GraphModule object
    +trt_gm = torch_tensorrt.compile(model, ir="dynamo", inputs=inputs)
    +torch_tensorrt.save(trt_gm, "trt.pt2", arg_inputs=inputs, output_format="aot_inductor", retrace=True)
    +
    +# Later, you can load it and run inference
    +model = torch._inductor.aoti_load_package("trt.pt2")
    +model(*inputs)
    +
    +
    +
    @@ -624,7 +644,10 @@

    Loading the modelsTorchscript IR -
  • Loading the models
  • +
  • Loading the models +
  • diff --git a/docs/user_guide/torch_tensorrt_explained.html b/docs/user_guide/torch_tensorrt_explained.html index c702bbae19..234a8cb9ff 100644 --- a/docs/user_guide/torch_tensorrt_explained.html +++ b/docs/user_guide/torch_tensorrt_explained.html @@ -10,7 +10,7 @@ - Torch-TensorRT Explained — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + Torch-TensorRT Explained — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docs/user_guide/using_dla.html b/docs/user_guide/using_dla.html index f0ffe332cc..724f7c05c1 100644 --- a/docs/user_guide/using_dla.html +++ b/docs/user_guide/using_dla.html @@ -10,7 +10,7 @@ - DLA — Torch-TensorRT v2.8.0.dev0+ee32da0 documentation + DLA — Torch-TensorRT v2.9.0.dev0+92a6908 documentation @@ -293,7 +293,7 @@
    - v2.8.0.dev0+ee32da0 + v2.9.0.dev0+92a6908
    @@ -324,7 +324,7 @@

    Getting Started

    User Guide

    @@ -376,9 +376,8 @@
  • Compiling ResNet with dynamic shapes using the torch.compile backend
  • Compiling BERT using the torch.compile backend
  • Compiling Stable Diffusion model using the torch.compile backend
  • +
  • Compiling LLM models from Huggingface
  • Compiling GPT2 using the Torch-TensorRT torch.compile frontend
  • -
  • Compiling GPT2 using the dynamo backend
  • -
  • Compiling Llama2 using the dynamo backend
  • Compiling SAM2 using the dynamo backend
  • Compiling FLUX.1-dev model using the Torch-TensorRT dynamo backend
  • Legacy notebooks
  • diff --git a/docsrc/index.rst b/docsrc/index.rst index 67fbdc56f5..4d28d77640 100644 --- a/docsrc/index.rst +++ b/docsrc/index.rst @@ -140,11 +140,10 @@ Model Zoo * :ref:`torch_compile_resnet` * :ref:`torch_compile_transformer` * :ref:`torch_compile_stable_diffusion` +* :ref:`compile_hf_models` * :ref:`torch_compile_gpt2` * :ref:`torch_export_gpt2` -* :ref:`torch_export_llama2` * :ref:`torch_export_sam2` -* :ref:`torch_export_flux_dev` * :ref:`notebooks` .. toctree:: @@ -155,11 +154,10 @@ Model Zoo tutorials/_rendered_examples/dynamo/torch_compile_resnet_example tutorials/_rendered_examples/dynamo/torch_compile_transformers_example tutorials/_rendered_examples/dynamo/torch_compile_stable_diffusion + tutorials/compile_hf_models tutorials/_rendered_examples/distributed_inference/data_parallel_gpt2 tutorials/_rendered_examples/distributed_inference/data_parallel_stable_diffusion tutorials/_rendered_examples/dynamo/torch_compile_gpt2 - tutorials/_rendered_examples/dynamo/torch_export_gpt2 - tutorials/_rendered_examples/dynamo/torch_export_llama2 tutorials/_rendered_examples/dynamo/torch_export_sam2 tutorials/_rendered_examples/dynamo/torch_export_flux_dev tutorials/notebooks diff --git a/docsrc/tutorials/compile_hf_models.rst b/docsrc/tutorials/compile_hf_models.rst new file mode 100644 index 0000000000..f6da87b145 --- /dev/null +++ b/docsrc/tutorials/compile_hf_models.rst @@ -0,0 +1,218 @@ +.. _compile_hf_models: + +Compiling LLM models from Huggingface +====================================== + +This tutorial walks you through how to compile LLM models from Huggingface using Torch-TensorRT. We also introduce KV caching in Torch-TensorRT which can greatly improve the performance of LLM inference. +The code is available in the `tools/llm `_ directory. We use the ``run_llm.py`` script to compile the model, generate outputs, and measure the performance. + +.. note:: + This is an **experimental release** and APIs may change in future versions. + +.. note:: + The compilation scripts and tutorials for Llama-2-7b-chat-hf and gpt2 models have been consolidated into the unified ``run_llm.py`` script located in the `tools/llm `_ directory. + +Overview of tools/llm Directory +------------------------------- + +The ``tools/llm`` directory provides the following tools to compile LLM models from Huggingface: + +* **run_llm.py**: Main entry point for model compilation, generating outputs, and benchmarking +* **Static Cache Utilities**: ``static_cache_v1.py`` and ``static_cache_v2.py`` for KV cache optimization +* **SDPA Attention**: ``sdpa_converter.py`` and ``register_sdpa.py`` for registering scaled dot-product attention converter and lowering pass. +* **Testing Components**: Model-specific test files for validation +* **Utility Functions**: ``utils.py`` and ``cache_utils.py`` for common operations + +Supported Models +---------------- +We have officially verified support for the following LLM families: + +.. list-table:: + :widths: 20 40 20 20 + :header-rows: 1 + + * - Model Series + - HuggingFace Model Card + - Precision + - KV Cache Support ? + * - GPT-2 + - gpt2 + - FP16, FP32 + - Yes + * - LLaMA 2 + - meta-llama/Llama-2-7b-chat-hf + - FP16, FP32 + - Yes + * - LLaMA 3.1 + - meta-llama/Llama-3.1-8B-Instruct + - FP16, FP32 + - Yes + * - LLaMA 3.2 + - | meta-llama/Llama-3.2-1B-Instruct + | meta-llama/Llama-3.2-3B-Instruct + - FP16, FP32 + - Yes + * - Qwen 2.5 + - | Qwen/Qwen2.5-0.5B-Instruct + | Qwen/Qwen2.5-1.5B-Instruct + | Qwen/Qwen2.5-3B-Instruct + | Qwen/Qwen2.5-7B-Instruct + - FP16, FP32 + - Yes + +Getting Started with run_llm.py +------------------------------- + +The main entry point is ``run_llm.py``, which provides a complete workflow for model compilation and benchmarking. + +Basic Usage +^^^^^^^^^^^ + +.. code-block:: bash + + python tools/llm/run_llm.py \ + --model meta-llama/Llama-3.2-1B-Instruct \ + --prompt "What is parallel programming?" \ + --precision FP16 \ + --num_tokens 128 \ + --cache static_v2 \ + --benchmark + +Key Arguments +^^^^^^^^^^^^^ + +* ``--model``: Name or path of the HuggingFace LLM +* ``--tokenizer``: (Optional) Tokenizer name; defaults to model name +* ``--prompt``: Input prompt for text generation +* ``--precision``: Precision mode (``FP16``, ``FP32``) +* ``--num_tokens``: Number of output tokens to generate +* ``--cache``: KV cache type (``static_v1``, ``static_v2``, or empty for no KV caching) +* ``--benchmark``: Enable benchmarking mode for performance comparison +* ``--enable_pytorch_run``: Also run and compare PyTorch baseline + + +Other Usage Examples +^^^^^^^^^^^^^^^^^^^^ +.. code-block:: bash + + # Compare different models performance + python tools/llm/run_llm.py --model gpt2 --benchmark --enable_pytorch_run + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --benchmark --enable_pytorch_run + + # Generate the outputs (disable benchmarking) by specifying the number of tokens to generate. Default = 128 + python tools/llm/run_llm.py --model gpt2 --prompt "What is parallel programming?" --num_tokens 128 + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --prompt "What is parallel programming?" --num_tokens 128 + + # Test different caching approaches + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --cache static_v1 + python tools/llm/run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --cache static_v2 + + # Compare FP16 vs FP32 performance + python tools/llm/run_llm.py --model Qwen/Qwen2.5-1.5B-Instruct --precision FP16 --benchmark + python tools/llm/run_llm.py --model Qwen/Qwen2.5-1.5B-Instruct --precision FP32 --benchmark + + +KV Caching in Torch-TensorRT +--------------------------------- + +We provide two versions of static KV caching: `static_cache_v1 `_ and `static_cache_v2 `_. +In both implementations, we add static KV cache tensors as model inputs/outputs without storing them as external memory. +The length of KV cache = input sequence length + output sequence length (specified by ``--num_tokens``). The number of heads and head dimension are determined by the model config. + +Static Cache v1 +^^^^^^^^^^^^^^^^ + +The ``static_cache_v1.py`` implements KV cache in the model graph as follows: + +.. code-block:: python + + class StaticCacheV1Model(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True): + # Concatenate new key/value pairs with existing cache + new_key_cache = torch.cat((key_cache[:, :, :start_idx, :], k, key_cache[:, :, end_idx:, :]), dim=2) + new_value_cache = torch.cat((value_cache[:, :, :start_idx, :], v, value_cache[:, :, end_idx:, :]), dim=2) + + # Compute attention using the updated cache + attn_output = torch._C._nn.scaled_dot_product_attention( + q, + new_key_cache[:, :, :end_idx, :], + new_value_cache[:, :, :end_idx, :], + dropout_p=0.0, + is_causal=is_causal + ) + + return attn_output, new_key_cache, new_value_cache + +In the above code, we concatenate the new key/value pairs with the existing cache and update it. To compute the attention, we use the updated cache and gather the corresponding keys/values from the cache up until and including the current token index. +The above code is actually implemented as a FX graph transformation pass. We register it as a Torch-TensorRT lowering pass using the decorator ``@_aten_lowering_pass`` when we import the ``static_cache_v1.py`` module. + +.. note:: + The ``start_idx`` and ``end_idx`` are the start and end indices of the current token in the cache. For prefill phase, ``start_idx`` is 0 and ``end_idx`` is the input sequence length. + For decode phase, ``start_idx`` begins at the input sequence length and ``end_idx`` equals ``start_idx + 1``. The ``start_idx`` is incremented by 1 until the end of the sequence or we reach the maximum number of tokens to generate. + + +Static Cache v2 +^^^^^^^^^^^^^^^^ + +The ``static_cache_v2.py`` is similar to ``static_cache_v1.py`` but it uses less number of slice operations. It implements KV cache in the model graph as follows: + +.. code-block:: python + + class StaticCacheV2Model(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True): + concat_keys = torch.cat((key_cache[:, :, :start_idx, :], k), dim=2) + concat_values = torch.cat((value_cache[:, :, :start_idx, :], v), dim=2) + new_key_cache = torch.cat((concat_keys, key_cache[:, :, end_idx:, :]), dim=2) + new_value_cache = torch.cat((concat_values, value_cache[:, :, end_idx:, :]), dim=2) + attn_output = torch._C._nn.scaled_dot_product_attention( + q, concat_keys, concat_values, dropout_p=0.0, is_causal=is_causal + ) + + return attn_output, new_key_cache, new_value_cache + +In the above code, we concatenate the existing key/value cache with current key/value of the token. We use this to directly compute the attention and update the key/value cache inserting the current key/value. +The above code is actually implemented as a FX graph transformation pass. We register it as a Torch-TensorRT lowering pass using the decorator ``@_aten_lowering_pass`` when we import the ``static_cache_v1.py`` module. +The definitons of ``start_idx`` and ``end_idx`` are the same as ``static_cache_v1.py``. + +After the model is compiled with static KV cache, the input signature of the model is changed. The new input signature is ``(input_ids, position_ids, key_cache_0, value_cache_0, ..., start_idx, end_idx)``. +The number of key/value cache tensors is equal to the number of attention heads in the model. We can use the ``generate_with_static_cache`` function to generate the outputs. + +Generating Outputs +------------------- +We use custom `generate `_ function to generate the outputs. This function performs standard autoregressive decoding without KV caching. +There is also a `generate_with_static_cache `_ function that performs autoregressive decoding with KV caching. + +The ``generate_with_static_cache`` function takes care of preparing the inputs to the model compiled with static KV cache. +The model inputs are ``input_ids``, ``position_ids``, ``key_cache_0``, ``value_cache_0``, ...., ``start_idx``, ``end_idx``. +We initialize the key/value cache tensors with zeros and for every token generated, the new key/value cache tensors are the outputs of the model. + +SDPA Converter (sdpa_converter.py) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Converts scaled dot-product attention operation using TRT Python API. +* Supports causal and standard self-attention. + +SDPA Registration (register_sdpa.py) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* This is a Torch-TensorRT lowering pass that replaces variants of SDPA with ``torch.nn.functional.scaled_dot_product_attention``. +* Registers the SDPA converter which is used for converting ``torch.nn.functional.scaled_dot_product_attention`` operation. + + +Limitations and Known Issues +---------------------------- + +* Sliding window attention (used in Gemma3 and Qwen 3 models) is not yet supported +* Some model architectures (e.g. Phi-4) have issues with exporting the torch model. + +Requirements +^^^^^^^^^^^^ + +* Torch-TensorRT 2.8.0 or later +* Transformers v4.52.3 \ No newline at end of file diff --git a/examples/apps/README.md b/examples/apps/README.md index ac63500d29..b6b77e17f1 100644 --- a/examples/apps/README.md +++ b/examples/apps/README.md @@ -23,6 +23,11 @@ python flux_demo.py ### Using Different Precision Modes +- FP4 mode: +```bash +python flux_demo.py --dtype fp4 +``` + - FP8 mode: ```bash python flux_demo.py --dtype fp8 diff --git a/examples/apps/flux_demo.py b/examples/apps/flux_demo.py index 4e8aaf3a4e..5220f38ec6 100644 --- a/examples/apps/flux_demo.py +++ b/examples/apps/flux_demo.py @@ -12,10 +12,6 @@ from diffusers import FluxPipeline from diffusers.models.transformers.transformer_flux import FluxTransformer2DModel -# Register SDPA as a standalone operator. Converter and lowering pass are defined in register_sdpa.py -sys.path.append(os.path.join(os.path.dirname(__file__), "../dynamo")) -from register_sdpa import * - DEVICE = "cuda:0" @@ -24,8 +20,25 @@ def compile_model( ) -> tuple[ FluxPipeline, FluxTransformer2DModel, torch_tensorrt.MutableTorchTensorRTModule ]: + use_explicit_typing = False + if args.use_sdpa: + # currently use sdpa is not working correctly with flux model, so we don't use it + # Register SDPA as a standalone operator. Converter and lowering pass are defined in register_sdpa.py + sys.path.append( + os.path.join(os.path.dirname(__file__), "../../tools/llm/torchtrt_ext") + ) + import register_sdpa + + if args.dtype == "fp4": + use_explicit_typing = True + enabled_precisions = {torch.float4_e2m1fn_x2} + ptq_config = mtq.NVFP4_DEFAULT_CFG + if args.fp4_mha: + from modelopt.core.torch.quantization.config import NVFP4_FP8_MHA_CONFIG + + ptq_config = NVFP4_FP8_MHA_CONFIG - if args.dtype == "fp8": + elif args.dtype == "fp8": enabled_precisions = {torch.float8_e4m3fn, torch.float16} ptq_config = mtq.FP8_DEFAULT_CFG @@ -109,24 +122,31 @@ def forward_loop(mod): "min_block_size": 1, "use_python_runtime": True, "immutable_weights": False, - "offload_module_to_cpu": True, + "offload_module_to_cpu": args.low_vram_mode, + "use_explicit_typing": use_explicit_typing, } if args.low_vram_mode: pipe.remove_all_hooks() pipe.enable_sequential_cpu_offload() remove_hook_from_module(pipe.transformer, recurse=True) pipe.transformer.to(DEVICE) + trt_gm = torch_tensorrt.MutableTorchTensorRTModule(backbone, **settings) if dynamic_shapes: trt_gm.set_expected_dynamic_shape_range((), dynamic_shapes) pipe.transformer = trt_gm - + seed = 42 image = pipe( - "Test", + [ + "enchanted winter forest, soft diffuse light on a snow-filled day, serene nature scene, the forest is illuminated by the snow" + ], output_type="pil", - num_inference_steps=2, + num_inference_steps=30, num_images_per_prompt=batch_size, + generator=torch.Generator("cuda").manual_seed(seed), ).images + print(f"generated {len(image)} images") + image[0].save("/tmp/forest.png") torch.cuda.empty_cache() @@ -242,12 +262,22 @@ def main(args): parser = argparse.ArgumentParser( description="Run Flux quantization with different dtypes" ) - + parser.add_argument( + "--use_sdpa", + action="store_true", + help="Use sdpa", + default=False, + ) parser.add_argument( "--dtype", - choices=["fp8", "int8", "fp16"], + choices=["fp4", "fp8", "int8", "fp16"], default="fp16", - help="Select the data type to use (fp8 or int8 or fp16)", + help="Select the data type to use (fp4 or fp8 or int8 or fp16)", + ) + parser.add_argument( + "--fp4_mha", + action="store_true", + help="Use NVFP4_FP8_MHA_CONFIG config instead of NVFP4_DEFAULT_CFG", ) parser.add_argument( "--low_vram_mode", diff --git a/examples/distributed_inference/rotary_embedding.py b/examples/distributed_inference/rotary_embedding.py new file mode 100644 index 0000000000..1153ea2180 --- /dev/null +++ b/examples/distributed_inference/rotary_embedding.py @@ -0,0 +1,117 @@ +""" +.. _rotary_embedding: + +Rotary Embedding Implementation for Tensor Parallel Attention +============================================================ + +This module provides an implementation of rotary positional embeddings (RoPE) for transformer models +with support for tensor parallel distributed inference. Rotary embeddings are used to encode positional +information in transformer attention mechanisms. +""" + +import time + +import tensorrt as trt +import torch +import torch.distributed as dist +import torch.nn as nn +import torch_tensorrt +from tensor_parallel_initialize_dist import initialize_distributed_env +from torch.distributed._tensor import Shard +from torch.distributed.tensor.parallel import ( + ColwiseParallel, + RowwiseParallel, + parallelize_module, +) + +""" +This example covers the rotary embedding and rotary attention case for tensor parallel +""" + + +def precompute_freqs_cis( + dim: int, end: int, theta: float = 10000.0, n_parallel=1 +) -> torch.Tensor: + """Precompute the frequency tensor for complex exponentials (cis) with given dimensions. + This function calculates a frequency tensor with complex exponentials using the given dimension 'dim' + and the end index 'end'. The 'theta' parameter scales the frequencies. + The returned tensor contains complex values in complex64 data type. + Args: + dim (int): Dimension of the frequency tensor. + end (int): End index for precomputing frequencies. + theta (float, optional): Scaling factor for frequency computation. Defaults to 10000.0. + n_parallel (int, optional): Number of GPUs for parallel computation. Defaults to 1. + Returns: + torch.Tensor: Precomputed frequency tensor with complex exponentials. + """ + freqs = 1.0 / (theta ** (torch.arange(0, dim // n_parallel, 2).float() / dim)) + t = torch.arange(end, device=freqs.device) + freqs = torch.outer(t, freqs).float() + return torch.polar(torch.ones_like(freqs), freqs) + + +def rotary_embedding(xq, xk, dim, freqs_cis=None): + """This calculates the rotary embedding for the query and key tensors. + Args: + xq (torch.Tensor): Query tensor. + xk (torch.Tensor): Key tensor. + dim (int): Dimension of the query and key tensors. + freqs_cis (torch.Tensor, optional): Precomputed frequency tensor. Defaults to None. + Returns: + tuple: Tuple containing the rotated query and key tensors. + """ + freqs_cis = freqs_cis[None, :, None, :] + xq_ = torch.view_as_complex(xq.float().reshape(*xq.shape[:-1], -1, 2)) + xk_ = torch.view_as_complex(xk.float().reshape(*xk.shape[:-1], -1, 2)) + + xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3) + xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3) + return (xq_out.type_as(xq), xk_out.type_as(xk)) + + +########Tensor Parallel######## +def parallel_rotary_block(rotary_block, tp_mesh): + """Parallel rotary block for tensor parallel + Args: + rotary_block: Rotary block to parallelize + tp_mesh: Tensor parallel mesh + """ + if tp_mesh.size() <= 1: + return + + plan = { + "wq": ColwiseParallel(), + "wk": ColwiseParallel(), + "wo": RowwiseParallel(output_layouts=Shard(0)), + } + rotary_block.n_parallel = 1 # this is for single GPU, to do remove this hardcode + + parallelize_module(rotary_block, tp_mesh, plan) + + +class RotaryAttention(nn.Module): + def __init__(self, dim: int, seq_len: int): + super().__init__() + self.dim = dim + self.wq = nn.Linear(dim, dim) + self.wk = nn.Linear(dim, dim) + self.wo = nn.Linear(dim, dim) + self.seq_len = seq_len + self.n_parallel = 1 + self.register_buffer("freqs_cis", self._precompute_freqs_cis(), persistent=True) + self.init_weights() + + def _precompute_freqs_cis(self) -> torch.Tensor: + theta = 10000.0 + return precompute_freqs_cis(self.dim, self.seq_len, theta, self.n_parallel) + + def init_weights(self): + with torch.device(self.freqs_cis.device): + self.freqs_cis = self.freqs_cis + + def forward(self, x): + q = self.wq(x) + k = self.wk(x) + freqs_cis = self._precompute_freqs_cis().to(q.device) + q, k = rotary_embedding(q, k, self.dim, freqs_cis=freqs_cis) + return self.wo(q) diff --git a/examples/distributed_inference/tensor_parallel_initialize_dist.py b/examples/distributed_inference/tensor_parallel_initialize_dist.py index 21e4cbc282..98d3ca18e9 100644 --- a/examples/distributed_inference/tensor_parallel_initialize_dist.py +++ b/examples/distributed_inference/tensor_parallel_initialize_dist.py @@ -1,3 +1,11 @@ +""" +.. _tensor_parallel_initialize_dist: +Tensor Parallel Initialize Distributed Environment +================================================== + +This module provides functions to initialize and clean up the distributed environment for tensor parallel distributed inference. +""" + import logging import os from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -65,3 +73,9 @@ def initialize_distributed_env(logger_file_name, rank=0, world_size=1, port=2950 torch.cuda.set_device(device_id) return device_mesh, world_size, rank, logger + + +def cleanup_distributed_env(): + """Clean up distributed process group to prevent resource leaks.""" + if dist.is_initialized(): + dist.destroy_process_group() diff --git a/examples/distributed_inference/tensor_parallel_rotary_embedding.py b/examples/distributed_inference/tensor_parallel_rotary_embedding.py new file mode 100644 index 0000000000..da3f3fd8fd --- /dev/null +++ b/examples/distributed_inference/tensor_parallel_rotary_embedding.py @@ -0,0 +1,59 @@ +""" +.. _tensor_parallel_rotary_embedding: +Tensor Parallel Rotary Embedding Example +======================================= + +This example demonstrates how to use Torch-TensorRT with tensor parallel distributed inference +for models that use rotary positional embeddings (RoPE). It lowers the complex +operations in attention models with rotary embeddings across multiple GPUs. + +""" + +import logging +import os +import time + +import torch +import torch_tensorrt +from rotary_embedding import RotaryAttention, parallel_rotary_block +from tensor_parallel_initialize_dist import ( + cleanup_distributed_env, + initialize_distributed_env, +) + +device_mesh, _world_size, _rank, logger = initialize_distributed_env( + "./tensor_parallel_rotary_embedding" +) + + +""" +This example covers the rotary embedding in Llama3 model and is derived from https://lightning.ai/lightning-ai/studios/tensor-parallelism-supercharging-large-model-training-with-pytorch-lightning +Command to run with single GPU: mpirun -n 1 --allow-run-as-root python tensor_parallel_rotary_embedding.py +""" + +BATCH = 2 +SEQ_LEN = 128 +HEADS = 4 +DIM = 128 + +with torch.no_grad(): + model = RotaryAttention(DIM, SEQ_LEN) + parallel_rotary_block(model, device_mesh) + device = torch.device("cuda", device_mesh.get_rank()) + model.to(device) + x = torch.randn(BATCH, SEQ_LEN, HEADS, DIM).to(device) + + python_result = model(x) + + logger.info("Torch-tensorrt compilation for rotary embedding") + + model = torch.compile(model, backend="torch_tensorrt") + + torch.manual_seed(0) + start = time.time() + output = model(x) + end = time.time() + logger.info(f"Compilation time is {end-start}") + assert (python_result - output).std() < 0.01, "Compilation result is not correct." + + cleanup_distributed_env() diff --git a/examples/distributed_inference/tensor_parallel_simple_example.py b/examples/distributed_inference/tensor_parallel_simple_example.py index d2e3c590c6..c5688c6e5b 100755 --- a/examples/distributed_inference/tensor_parallel_simple_example.py +++ b/examples/distributed_inference/tensor_parallel_simple_example.py @@ -1,3 +1,24 @@ +""" +.. _tensor_parallel_simple_example: + +Torch Parallel Distributed example for simple model +========================================= + +Below example shows how to use Torch-TensorRT backend for distributed inference with tensor parallelism. + +This example demonstrates: + - Setting up distributed environment for tensor parallelism + - Model sharding across multiple GPUs + - Compilation with Torch-TensorRT + - Distributed inference execution + +Usage +----- +.. code-block:: bash + + mpirun -n 2 --allow-run-as-root python tensor_parallel_simple_example.py +""" + import time import tensorrt as trt @@ -5,7 +26,10 @@ import torch.distributed as dist import torch.nn as nn import torch_tensorrt -from tensor_parallel_initialize_dist import initialize_distributed_env +from tensor_parallel_initialize_dist import ( + cleanup_distributed_env, + initialize_distributed_env, +) from torch.distributed._tensor import Shard from torch.distributed.tensor.parallel import ( ColwiseParallel, @@ -18,7 +42,7 @@ ) """ -This example copies some code from https://github.com/pytorch/examples/blob/main/distributed/tensor_parallelism/tensor_parallel_example.py +This example takes some code from https://github.com/pytorch/examples/blob/main/distributed/tensor_parallelism/tensor_parallel_example.py """ @@ -79,23 +103,15 @@ def forward(self, x): dynamic=None, ) -try: - for i in range(10): - # For TP, input needs to be same across all TP ranks. - # Setting the random seed is to mimic the behavior of dataloader. - torch.manual_seed(i) - inp = torch.rand(20, 10, device="cuda") - start = time.time() - output = tp_model(inp) - end = time.time() - if i == 0: - logger.info(f"Compilation time is {end-start}") - assert ( - python_result - output - ).std() < 0.01, "Compilation result is not correct." - elif _rank == 0: - logger.info(f"Inference time is {end-start}") -finally: - # This cleans up the distributed process group - if dist.is_initialized(): - dist.destroy_process_group() +# For TP, input needs to be same across all TP ranks. +# Setting the random seed is to mimic the behavior of dataloader. +torch.manual_seed(0) +inp = torch.rand(20, 10, device="cuda") +start = time.time() +output = tp_model(inp) +end = time.time() +logger.info(f"Compilation time is {end - start}") +assert (python_result - output).std() < 0.01, "Result is not correct." + +# This cleans up the distributed process group +cleanup_distributed_env() diff --git a/examples/dynamo/requirements.txt b/examples/dynamo/requirements.txt index 3d0f94bf28..780d2c6c48 100644 --- a/examples/dynamo/requirements.txt +++ b/examples/dynamo/requirements.txt @@ -1,7 +1,7 @@ cupy==13.1.0 triton==2.3.0 diffusers==0.30.3 -transformers==4.50.0 +transformers==4.53.1 matplotlib pandas huggingface_hub diff --git a/examples/dynamo/torch_export_gpt2.py b/examples/dynamo/torch_export_gpt2.py deleted file mode 100644 index 4d34c58de4..0000000000 --- a/examples/dynamo/torch_export_gpt2.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -.. _torch_export_gpt2: - -Compiling GPT2 using the dynamo backend -========================================================== - -This script illustrates Torch-TensorRT workflow with dynamo backend on popular GPT2 model. -""" - -# %% -# Imports and Model Definition -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -import torch -import torch_tensorrt -from transformers import AutoModelForCausalLM, AutoTokenizer -from utils import export_llm, generate - -# %% - -# Define the parameters and initialize the model -MAX_TOKENS = 32 -DEVICE = torch.device("cuda:0") - -# Define the GPT2 model from hugging face -# kv_cache is not supported in Torch-TRT currently. -# CPU is used here so that GPU memory is reserved for TRT compilation. -with torch.no_grad(): - tokenizer = AutoTokenizer.from_pretrained("gpt2") - model = ( - AutoModelForCausalLM.from_pretrained( - "gpt2", - pad_token_id=tokenizer.eos_token_id, - use_cache=False, - attn_implementation="eager", - ) - .eval() - .half() - ) - -# %% -# Tokenize a sample input prompt and get pytorch model outputs -prompt = "I enjoy walking with my cute dog" -model_inputs = tokenizer(prompt, return_tensors="pt") -input_ids = model_inputs["input_ids"] - -# Auto-regressive generation loop for greedy decoding using PyTorch model -# We use a custom generate function which is very similar to the huggingface one. -pyt_gen_tokens = generate(model, input_ids, MAX_TOKENS, tokenizer.eos_token_id) - - -# %% -# Compilation with `Torch-TensorRT` using dynamo backend and generate TensorRT outputs -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -# Export the GPT2 model into an ExportedProgram which is input of TRT compilation -# To compile the model in FP16, we do the following -# 1) Cast the model to FP16 via model.half() -# 2) Enable use_explicit_typing=True. Certain layers are explicitly casted to FP32 within the pytorch model and this flag respects this behavior during TRT compilation -# 3) Enable use_fp32_acc=True. This ensures all the matmuls are accumulated in FP32 precision (similar to PyTorch) -gpt2_ep = export_llm(model, input_ids, max_seq_len=1024) -trt_model = torch_tensorrt.dynamo.compile( - gpt2_ep, - inputs=[input_ids], - enabled_precisions={torch.float32}, - truncate_double=True, - device=DEVICE, - disable_tf32=True, - use_explicit_typing=True, - use_fp32_acc=True, -) - -# Auto-regressive generation loop for greedy decoding using TensorRT model -# We use a custom generate function which is very similar to the huggingface one. -# Move inputs to GPU -input_ids = input_ids.to(DEVICE) -trt_gen_tokens = generate(trt_model, input_ids, MAX_TOKENS, tokenizer.eos_token_id) - -# %% -# Decode the output sentences of PyTorch and TensorRT -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -print("=============================") -print( - "Pytorch model generated text: ", - tokenizer.decode(pyt_gen_tokens[0], skip_special_tokens=True), -) -print("=============================") -print( - "TensorRT model generated text: ", - tokenizer.decode(trt_gen_tokens[0], skip_special_tokens=True), -) - -# Prompt : What is parallel programming ? - -# ============================= -# Pytorch model generated text: The parallel programming paradigm is a set of programming languages that are designed to be used in parallel. The main difference between parallel programming and parallel programming is that - -# ============================= -# TensorRT model generated text: The parallel programming paradigm is a set of programming languages that are designed to be used in parallel. The main difference between parallel programming and parallel programming is that diff --git a/examples/dynamo/torch_export_llama2.py b/examples/dynamo/torch_export_llama2.py deleted file mode 100644 index 2f3e3cba43..0000000000 --- a/examples/dynamo/torch_export_llama2.py +++ /dev/null @@ -1,102 +0,0 @@ -""" -.. _torch_export_llama2: - -Compiling Llama2 using the dynamo backend -========================================================== - -This script illustrates Torch-TensorRT workflow with dynamo backend on popular Llama2 model. -""" - -# %% -# Imports and Model Definition -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -import torch -import torch_tensorrt -from transformers import AutoModelForCausalLM, AutoTokenizer -from utils import export_llm, generate - -# %% -# Define the parameters and initialize the model -MAX_TOKENS = 32 -DEVICE = torch.device("cuda:0") - -# Define the Llama2 model from hugging face -# kv_cache is not supported in Torch-TRT currently. -# CPU is used here so that GPU memory is reserved for TRT compilation. -llama_path = "meta-llama/Llama-2-7b-chat-hf" -with torch.no_grad(): - model = ( - AutoModelForCausalLM.from_pretrained( - llama_path, use_cache=False, attn_implementation="eager" - ) - .eval() - .half() - ) - -tokenizer = AutoTokenizer.from_pretrained(llama_path) - -# %% -# Tokenize a sample input prompt and get pytorch model outputs -prompt = "What is dynamic programming?" -model_inputs = tokenizer(prompt, return_tensors="pt") -input_ids = model_inputs.input_ids - -# Auto-regressive generation loop for greedy decoding using PyTorch model -# We use a custom generate function which is very similar to the huggingface one. -pyt_gen_tokens = generate(model, input_ids, MAX_TOKENS, tokenizer.eos_token_id) - -# %% -# Compilation with `Torch-TensorRT` using dynamo backend and generate TensorRT outputs -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -# Export the llama2 model into an ExportedProgram which is input of TRT compilation -# To compile the model in FP16, we do the following -# 1) Cast the model to FP16 via model.half() -# 2) Enable use_explicit_typing=True. Certain layers are explicitly casted to FP32 within the pytorch model and this flag respects this behavior during TRT compilation -# 3) Enable use_fp32_acc=True. This ensures all the matmuls are accumulated in FP32 precision (similar to PyTorch) -llama2_ep = export_llm(model, input_ids, max_seq_len=64) -trt_model = torch_tensorrt.dynamo.compile( - llama2_ep, - inputs=[input_ids], - enabled_precisions={torch.float32}, - truncate_double=True, - device=DEVICE, - disable_tf32=True, - use_explicit_typing=True, - use_fp32_acc=True, -) - -# Auto-regressive generation loop for greedy decoding using TensorRT model -# We use a custom generate function which is very similar to the huggingface one. -# Move inputs to GPU -input_ids = input_ids.to(DEVICE) -trt_gen_tokens = generate(trt_model, input_ids, MAX_TOKENS, tokenizer.eos_token_id) - -# %% -# Decode the output sentences of PyTorch and TensorRT -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -print("=============================") -print( - "Pytorch model generated text: ", - tokenizer.batch_decode( - pyt_gen_tokens, skip_special_tokens=True, clean_up_tokenization_spaces=False - )[0], -) -print("=============================") -print( - "TensorRT model generated text: ", - tokenizer.batch_decode( - trt_gen_tokens, - skip_special_tokens=True, - clean_up_tokenization_spaces=False, - )[0], -) - - -# Prompt : What is dynamic programming? - -# ============================= -# Pytorch model generated text: Dynamic programming is an algorithmic technique used to solve complex problems by breaking them down into smaller subproblems, solving each subproblem only once, and - -# ============================= -# TensorRT model generated text: Dynamic programming is an algorithmic technique used to solve complex problems by breaking them down into smaller subproblems, solving each subproblem only once, and diff --git a/examples/dynamo/utils.py b/examples/dynamo/utils.py deleted file mode 100644 index 25ad99c12d..0000000000 --- a/examples/dynamo/utils.py +++ /dev/null @@ -1,63 +0,0 @@ -import torch -from transformers import StoppingCriteriaList -from transformers.generation.stopping_criteria import ( - EosTokenCriteria, - MaxLengthCriteria, -) - - -def export_llm(model, inputs, min_seq_len=1, max_seq_len=16): - """ - Exports the LLM model into an ExportedProgram with dynamic shapes. - In the case of guard failures due to some PyTorch kernel implements, we also - try to re-export the graph by expressing them as runtime assert nodes - """ - with torch.no_grad(): - # max=1024 has contraint violation error. https://github.com/pytorch/pytorch/issues/125604 - seq_len = torch.export.Dim("seq_len", min=min_seq_len, max=max_seq_len) - try: - print("Trying to export the model using torch.export.export()..") - # strict=False only enables aotautograd tracing and excludes dynamo. - ep = torch.export.export( - model, (inputs,), dynamic_shapes=({1: seq_len},), strict=False - ) - except: - print( - "Trying torch.export._trace._export to trace the graph since torch.export.export() failed" - ) - # This API is used to express the constraint violation guards as asserts in the graph. - ep = torch.export._trace._export( - model, - (inputs,), - dynamic_shapes=({1: seq_len},), - strict=False, - allow_complex_guards_as_runtime_asserts=True, - ) - - return ep - - -def generate(model, input_seq, max_tokens, eos_token_id): - """ - Greedy decoding of the model. This generates up to max_tokens. - """ - # Max length of output seq = current input_seq length + max_tokens allowed to generate - max_output_seq_length = input_seq.shape[1] + max_tokens - stopping_criteria = StoppingCriteriaList( - [ - MaxLengthCriteria(max_length=max_output_seq_length), - EosTokenCriteria(eos_token_id=eos_token_id), - ] - ) - - while True: - outputs = model(input_seq) - logits = outputs.logits - next_token_logits = logits[:, -1, :] - next_tokens = torch.argmax(next_token_logits, dim=-1) - input_seq = torch.cat([input_seq, next_tokens[:, None]], dim=-1) - # TODO: Handle batch in this check - if stopping_criteria(input_seq, logits).item(): - break - - return input_seq diff --git a/examples/dynamo/weight_streaming_example.py b/examples/dynamo/weight_streaming_example.py index e1076a9e75..601292ba95 100644 --- a/examples/dynamo/weight_streaming_example.py +++ b/examples/dynamo/weight_streaming_example.py @@ -32,7 +32,43 @@ import torch import torch_tensorrt from transformers import AutoModelForCausalLM -from utils import export_llm + + +def export_llm(model, inputs, min_seq_len=1, max_seq_len=16): + """ + Exports the LLM model into an ExportedProgram with dynamic shapes. + In the case of guard failures due to some PyTorch kernel implements, we also + try to re-export the graph by expressing them as runtime assert nodes + """ + with torch.no_grad(): + # max=1024 has contraint violation error. https://github.com/pytorch/pytorch/issues/125604 + seq_len = torch.export.Dim("seq_len", min=min_seq_len, max=max_seq_len) + position_ids = torch.arange(inputs.shape[1]).unsqueeze(0).to(inputs.device) + try: + print("Trying to export the model using torch.export.export()..") + # strict=False only enables aotautograd tracing and excludes dynamo. + ep = torch.export.export( + model, + args=(inputs,), + kwargs={"position_ids": position_ids}, + dynamic_shapes=({1: seq_len}, {1: seq_len}), + strict=False, + ) + except: + print( + "Trying torch.export._trace._export to trace the graph since torch.export.export() failed" + ) + # This API is used to express the constraint violation guards as asserts in the graph. + ep = torch.export._trace._export( + model, + args=(inputs,), + kwargs={"position_ids": position_ids}, + dynamic_shapes=({1: seq_len}, {1: seq_len}), + strict=False, + allow_complex_guards_as_runtime_asserts=True, + ) + + return ep def time_generate(model, inputs, output_seq_length, iterations=10): diff --git a/notebooks/EfficientNet-example.ipynb b/notebooks/EfficientNet-example.ipynb index bbbfe6f94e..c02d9a0150 100644 --- a/notebooks/EfficientNet-example.ipynb +++ b/notebooks/EfficientNet-example.ipynb @@ -253,7 +253,7 @@ "!mkdir -p ./data\n", "!wget -O ./data/img0.JPG \"https://d17fnq9dkz9hgj.cloudfront.net/breed-uploads/2018/08/siberian-husky-detail.jpg?bust=1535566590&width=630\"\n", "!wget -O ./data/img1.JPG \"https://www.hakaimagazine.com/wp-content/uploads/header-gulf-birds.jpg\"\n", - "!wget -O ./data/img2.JPG \"https://www.artis.nl/media/filer_public_thumbnails/filer_public/00/f1/00f1b6db-fbed-4fef-9ab0-84e944ff11f8/chimpansee_amber_r_1920x1080.jpg__1920x1080_q85_subject_location-923%2C365_subsampling-2.jpg\"\n", + "!wget -O ./data/img2.JPG \"https://live.staticflickr.com/3235/2873249326_a697e741f5_o.jpg\"\n", "!wget -O ./data/img3.JPG \"https://www.familyhandyman.com/wp-content/uploads/2018/09/How-to-Avoid-Snakes-Slithering-Up-Your-Toilet-shutterstock_780480850.jpg\"\n", "\n", "!wget -O ./data/imagenet_class_index.json \"https://s3.amazonaws.com/deep-learning-models/image-models/imagenet_class_index.json\"" @@ -278,7 +278,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADnCAYAAAC9roUQAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9WbBnyX3fiX0y86z//e639qV3dLMbDRAEARIkRYmklhmt1ihmFKMJTThke+wXP9mPEw6Hww92+M12hB22R7akkWStExxRJEVxARcQaKDRDfRa3V171d3v/e//s2T6ITPPOf9bVY0GOQWhwpXdt+7/nv9Z8uTyze9vTWGM4Wl5Wp6Wp+Vp+dEU+R+6Ak/L0/K0PC3//1Segu7T8rQ8LU/Lj7A8Bd2n5Wl5Wp6WH2F5CrpPy9PytDwtP8LyFHSflqflaXlafoQleBw3/Ue/9x0znU6rv4UQCCEAkFKitUZrvXSNP0cIgTGm+mkeF1KAEEgpMRrKUlPqkqTVIQwiDCWBCkjTDkmSEAQBSoWEYYCUCikMKgAhQWDA3VcJgTEarZvPMygpCAT2Oimqei4V5/0hDCDw/9Rfu0MGKAFjTH3w9LkNTxL7HONubD8adyNjDKUBo0FrqnaSUoAAXRqKLKfIC/IiRwBhHCMAnRcs5nNmeUZh6j449VYPFlOf49ug2Ra/+OK5H3iLp+VpeVoeE+gqpQjD8FO/V0oBNWD44j83QdcfN8Ie11ojkCgpLWgqRRCGSBkQhiFJkhCGIUHgwFYKpIRQCZQEhFkCciGgLEEpgUA4AAPpQFSIh4BtXWFbXwesmAe+cmAqHkA2+37V2QhRYfgPLELYZ1ZtYwzGgEBgtAFtMGVJvsjQWmO0QUpJkedkRc5ndRX05/l2afaPf7aUTwWmp+Vp+azlsYDup03CJnj9oIl/ekKbJjNzQIMQSCEJVIAKII5iwjAkDEOUUkhpma1yPxZAqY7be4HxnwGJsUy4OmKx1Ne8+c0Db9AAVrP054N/2XfydfAg9pCbPKJtxKkTtLb11kWBLgqKPMeUJQLIFwvHkA2l0RjLmx91+8brCLdeiAcWR98/j1yQnpan5Wl5oDwW0P20Sdhkr6dBVwgPdcKJ/tKCoxAWUIwBx4CRFsQCFSKEREmIVEAoA0IlCAOJklZNIISwrLUC3BpAhDAIrIrBCfC+NhaAHRiaZcz8k7SK1RRUaNsAPGEa4CdOPeYhDz3Vvl4qACiKEp3nFHlOWRTVe2oPuLqstRuNJ1QA2riv1h6cxUP7tMl8n5an5Wn5bOWxgK4vTd3tkm62obc9PWntdwKtNUEgkFK5c+x9jNEVwGhjCJRCBQqlJFJYlYOUElkBgkc6K48bLCOUjvVakdw44Flm6FJ4FcPD9K3u82doB/tOy+ca4eHVU92a9TZF9+r801KBAXOKpxpjKMuCvCgoygLdWKRKYyjQjbZgSePh1TRN2C0pl/qvWYenDPdpeVr+ZOWxgK4xhqIoKMuyPihqDndab2sNXsqCMPXkVkqhwggQ6LJAYDClrq4VWHYWqMCxYolU6gH1htcDG6RjuYBTIUhnnHuYoG1YZpFV3R3trXSqn6E9HqZaeOBh1QevODBL3zXroc2D97UqB7fYBJJSl+hCY7S2cCuWkX9pEWi0efUMrR8A26c63KflafnTlccCumVZMyRPqgBwzPa054LWusGcLJAIIQnChDhJLGBmgrIsKzZq0AilKt2mMJZ9KSkt8Ph7G1PBmKwYLU7VULNh4Rixv59GoI3n1/Zuyj74tHR/Slv7qGIqnvlgEY1/qR5gmlzWgDZWzeLPEXYZWmpHjcEIkFIRBIbCmIaXwiNq6bBYGAgCVfVRWcqqr5qeJHU1nzLdp+Vp+WHLYwHdxWIBOF2sqaFDCFlN3NPA64sxhrIoUUGEkMp5Hyi0NpiyqCz+xgiEssxWSIlwlrESgzAajEDoEuN0ul6fKyQIKZDSuoE5/F0yFgkck2TZJctIgRKmuqYygvGgqO/fv3ovGp5iS6d+CnA5FYIxouEe5u+9/IyyLCnLsmKlUghQyrJVbdnuw57kVRxCCGuAjEKEEBRFgWjohP2zTgPv04RJT8vT8sOVx6ZeiKLIivXGoB3TUlKhpHS6x3JZjdBwIVNOReANPF7fWKoQg0CXGik1KlDEcUwUxagwQgQBQgWgrOgvpEQ23MOktF4LUnl9rUGaUzyzYeAyBoy2jLcyvDklr1pyMDA/kOl6MP/hQcrqg/1lWludtpcMvH+uV8809eRe4vDqntN+z9UTnJQQhqHToXupQWGUNyQu65lPq4ielqflafls5bGAbhRFS8Yy7X578R9qZtYE1eYEDgJF064lhEAFgWXJUiOkIE5ikjgliiJUEKBU4AxqoJREKeu1UFnnhXb+twYlbDjeaRctI7AiurFgp42pDG/GWAC2J9ZsUzZUKPZ2Xj3QKJ8JmJbB23sOSAFa+LpodKkxUiOUtAuUECglEcLqxr0Bs6mPrQC/wej9F8JgdeHKLkz+FaQSKFSjQvVbnmbxT8vT8rR8tvJYQNcGJcgadB3b8seFEOR5vsSa/Plaa5QSKClRgSJwYByFIRJDKVywglTEcUIURQRBQBA0giACVbNaalbmAx6EcxUwS0aiWo9ag26DqRuJ0BYWtXR63IYjgGygZRM4PQh73fLpsmykqmvhrzVeL+EMZ0ZryjynMAYZBBCDEEHD7cu/q6wWNitFmCXgP81YpfP6qNUW1rXOGPHQ8wU2sEQ59c7T8rQ8LZ+tPFamCzwAuqd9dE8baPznIAwIg9Alh3B6YAEicOJ9YKPPLOhKhLQgIYXVu0oHztLpcYEldygjKtOZwyLjnBKcZ0IjxLaKghOBra92umHHkmt9rXDGtoe7wv0gtmuMdyV70DMBgNJ6b+iiJMsyZOBUAY1Fq3pWoy0rj4NHuHxVUX1B4BizB3B1KjS6qd819X2egu7T8rR85vKYQDekMi8ZMFpjjBX5eYSPblMdgRAWAKSf7KCNtsCKQEjp1Af1jw+i8EazKmeBaN7fA6WpwNZo736Fd9oFvI+xVS04TLZGOp+3oVoswEgwso4Qk04x/DA9r3nggzPCmeUYuKp+rlperaCLkrIo0E5fW6gA6dQMVSCEflAPK4UEYVU8QRCggtrzA+EkC6UqHXgQyMpwV7ebb8slv4qnoPu0PC0/RHksoBtHEeBZrsHoWqfrj4vAgbArVWCAQyslFcIYO8FNiTEFAu9aZnXDgRSVblY6OV9gMMKgjXUdQxvH3Grg0Mb+o52hrNTaBRLU76CNdvWuagiA0B59SusFIaTThVqPCA1IZAOkmuz2FAw7TwbtdMdGi4pB+hM8yy4LTVEUFEVOWdaqmSLPkLnCSwNC1MY2nEeCD5+2KhdVLVSV1IHXe1u3OF8H7aWB0yuF8OEkT275V//wf2eM86ypu965F/pX88edSsr2buNL0fB+qVf3uj2b96R+xnJwjaju3xwfVlKT1rf6tLFXehWZldgwBlElDLTvpCqfmtql0nv+2Mwl7r5esjMVFam8Zqxnu+9/gzEaUdld7J01ujlAcDMQV21KYT9rR3asK6YLVzcCIzTCWCnW3te9R3VP41Rc2jWixN/c9o12rVi3MEJiTOmO2xlpRF0n13sYNBLbHrbW1h6kAWGac8O2rhESYyRClBhj29DgRV4XuVnVQ/Cf/p3/+qGT5LGAbuJ8ay07q9ULS4YdKSqvBjgteRuEsFFltuttg3tx3npDqSVH/lpdge0fYY1flauYqCdJxSwN6BJKTWXsA88U68FVP6NpnLJ1FI49SmmQynWQsN/Vda4nV5M22gXFLky61JZZat8CNSu3oFtSZBlFntl7uJcrnWeCcEwVfD2s2G8XOoN0BjlfL9kMlFgykNW9UOWm8Kf4geje4Ul2WvCg0vxbOVCAxjjxYeDVUWt5FKeyojYNk6fFmcotDztxbZEIoavvhZCNZ9aTVxjriaOofcYrNZnwgGG7RjpwCyo9vKzBoaq6Vb9VeqyqBu4NRQ3Owgi0A+LaQ0dU49YIgTASY40dNcD793DjrhT2LiXYUH43tyyWC7TwwGqqt3ww66xw429Zgq0bXNg29co+IbF5/URlF6kXVgviDk5cF9t5oo0lTc0et5PC3lmK0rWJxIg6JsAsV+pTy2MB3W63S57n5HlOWdj0i17FUIGuEciGSN0cBAKQQiKEwghTuW0Zn1XMhQYbrTENg10TeCuvCCFcJrGmTtKL4TYNovUKsOJ66TJyQb3ye5WFcanE/IARnqE45i28igOv5qhZtmX1xgGxrDrTOMAty5KyMOR5WXke+Gu1saCrdYE0hjAIUFFs2aqLZ1YqsO2itGVJUjgvDjvoJMK6xwnf3DXAVkuLWWb2xtfRHa8Gr3FTw0snD9M//9gXywbLairWXAUa7WAcJFVzuwasaiGl2bD1YgmnLK5LwAUgbVrRZrWEd5Wsp71np4ADYYOsALEGfCEcIAvHUKkB11ajZnwCC3bCWKCyAKrdiRpjPBDTACZwPNDWo6q5wAhFrhUloJAYkxPI0r2DcZzQLWrGVNBaW4obg3EJSJut4445hlx1UtVYjVwhACj7t/BAX/e0BWVd9YVBV6CunSwAVpK1c90tKW4u2WcptwD5VnDk0MjlPj1VHhPTjQjDgKKIyLKMLMssk3NynE3NWFvG64Z2lRfN7FUGLQW6rNZiB2xW3EGXlLhwYyMr3axn1mUFQNaVodZ76hqsyxKtS4qisOqQJaZSG5K8R8Npg9XDXLHqHLcSqSAMVeVZUEXBVYBmo+3yLCcvCmx+Cft8T4wF1rgYRSFRFBKGyvoMa0VelMznGZgcbUqyPKPISqQ0pGlEmqbEkSKJrc4WA1pYw5qBJeCpNNPGDu7KWOiYggdfW0fLAPSnjbAf01KxOFPBYzUMmxoe4Wi+qEiBZ3MOeAVgvMTlEbcx9YVjVh6kXVvLCrAt4zVu8lcATy2s+qtk9Z2ygCgsYEo31qxYbNll9STPmo2VGCXSpUgF2WSOph4DVXy7O15DoHDc1aAFFCbiqFzh5nSLg6zL7jTGFAU5AlMuWA3nXGjtcnXliE4wsXcx7l2N58KeI5YYo7DqAsfw/bJSLXQWJP24q20LdiGqWsx4UKR+J/cU359+znq+YIyVUHUlJThpRjiOa04rgOp11p7XWDhOjaHT5TGldrRszmfz0lpTihJRmorF2Up7EaJemyrvBecGhnDidelyORhpI9WE10tp68olLAgbbUNlfd6HCvikRKgm0NVqD12WaBft5vV8otG8tVuXbHymOl+4OjYXbGtoUwihUNp6VjQnp6Go7uPbI4wC4iSs2sWraLznRxha7wIV2MEwGs5YLEqKogShybIZu7t7nJyMwQiOxxOuX7/BpfMXWMwzLl2+wHNXL3D2zCpBI+ihNJqd+4d8+P4dvvyVl4mTOheyF/s8u7dGvZpkaA1lc5I+IUVXv2u9pTe2AhYfHCJLBzKW+Qg3oUQ1MY3wZKvOkue1N7qah14NIByANyU752mDtDpad9ROXuNrCLhIw4oIWFblmbWXnvwdpFsofJ6SWs1Ro4K3HVdDd0mXWi9GtilMtUBMzAq/f/gy741XuDvWxHlGK4CFDhlnObNFgNY9ZouEvtjkz1055mcv3iKQmdO3esit2asFXlkx44ole91Is/41Bavu5QP9RQW4xq2D9lojrBrRNq+ujgl8NkGvTvBd4KNPhfssMFUOWE2lvffqleb68Clj7zGldqzHlAlsQnOtDYg6GMKeV4OvFPU6EoYhcRwThJ6+W/DJ85yicEpsd420o9WqCEpDWXWXLU3Vgym9jsf63lq/V+MGAdbvVEpnFLN8wetbS+fNQD1Mlt41UAKfdKcZKWajxaTLaaCqEa51rTfyi5O/Vko3eSux3tQBEMoGluzsnXByMmZl0KHXgsU8Yz6f0Y1Dzj93hcODY44P9nnpykU+/xMvcP/eHv/gH/1T1rY3+erPfpVz5y9wcjzk1s0bZJM5rXbCiy89Zxl0xdap3rduz6qX3QIHjbRGT1BpqqNqsdL3rJ9AiIaE5VieVzfJapLb+wkcSPr8IMKBpPHfO6G/mpS1gcYDr3Hn2TvIKrWo191CXR/pJngN9A0EtYf829TgX51i1Ql2Ia1DxJtwBqZSq1TAZSSjPOa7e2f46Djkjd0xk/GCJArIhCA0kAaGeWHQiykLJAeZ4v/+/bN8/6DLf/7y+3TisZ1bPhuJse/qua/9UVTSrH9v4xccp/4Q9rxKTYAGoXxzO5yulCwOzD1DXpY83IhoqCBtSzeoV9WgwgO8F2ywGGZtT8K90sPTHMDjYrqOwmvnfqSCkNCAyTIKk6NCa0EPfLivqFUJBlAqIAqtdwJCYpCUxgYDqDyn1Noa0qq0jxY8i0JDXtjMWl48lk48WjLaGevV4BmDlAShrCLaPJMwxqAL48JpS4qGasH7xtqQZQuI0nkE2Ny9jcnoFodaTeHusyTGOuAVGp8D2BoZLGsKjaEUhsPhjLff/oDeoMvWap9erDBlwTxfsNLrIGWPyWzG0eEeV7e3iOOQYjZjc32F2eiYj/Z32bt7g/XtCwwnCy5sDPilX/oFtrfWSdsxsXI8wy35VVBHk7YZO3l9Ap8nULuA548l1AY146L/PGd1beBZbKXXq0iCceKqcOBsZ6HnahWhdH3oOdpS1mIhlwJ4rL3XMVNTqyashrH2PrDMUDvw9X3m+WytEnAPcXDfUCG4PrZivHCU18Gem79erEYKSg3DMbx/Y4gINfPb/1+ufbckUy+RRc+QGU0ic/7j1WPmh9cZiBb/OvkiE5Fi4pDCwNfvdjmevMDf++I11tJjaNTTqj5qacpUTNLXt8Dzbss2/dvVemM3SKt+acJp1RJOv+u9EyRWzeQXqAqYG1faOsrq+xq863pYbbVTQzT0vA8rjyefrtd5gt21IRCATaQSRaFLx1i7LQnP7FyxAQ4QeLYsBMoISgGBFBXza65KGuNAU6GKgrKs+b43btE43z9HuCg25dmtA0rcL59CUbv0iODYraFWobgkOkJ4sdLU53kApmmPdfrhpWlRsyfRGADSlCAk2gju7w75/W99j6PDQ37xa1+k305QlGSFIVABMlAMj47o9Hq0woD/7p/9c175/E/wE194jVs3bnC4v4PWmps3PuTihT3+yt/6W2wMuuzcvclKP2VzY2D5RQNQpdeBY2VNYdyLOA8eIVw++SetGCgrJl9xUCt+VkzIZ7Rziymmag9bvJHFTvZKMBf2O0cRHYt1i3XFjH1j0niSHeP2o+e19diAhlZV+OtqEVk0dfDOYLakq62ehmNjnvNSM1rcNaaOANo/nHP91oSVwQoXzp3jmeef4923ND91519z7f63eO/wCkkwZVv1efvWTcTKJuNrb/ILv7zFv5hdRecCI9uYcs47xzH/xz/+HP/rL79NNx0tkaGa02q3GFAvCP4thDtTeAnFzTfpvRS84cx/V8No86hpqJIqG0t1RZPv++tqplsx7sYyVi1yolZcPao8noQ3+AlpmUNQiduhraDb6UG55AfSMwL/XtL73tYTWguX40A47zgv8hj7XekHnlDIQKJLoDHYfcPadvNigj2u3L1rxXj9HpZVgDGyamIvClbv2LimoSWp71Uxl0YbPaBpd9N86Vo7wbQRHAynfOt7H/DJjZv84s99kfV+i0hBnhVMpxNKbcjmGZ1Oizs3b/D7v/3bnAxPSDptPvroI373d7+OknB8uM/6+iYr6ytonfF7v/3bfPVnfprtzQ0SF9RSDXFHxn27aGz+Ya3rd5JN7HiCijHe79agwOlSDbXRzLsUiRqIHOOt12/j9LDGAZ7wNM0toJ5zagfc7rz6CO4RtrgETLKh0rBepjVvNR7o7QVOsG4a+qgZKjW7tezOM0k32YRXLQh3Zn2uERKzKNjfOebgOzd5KYoY//G3WRQFN3/5ddaSFf7nf+ev8uZ3f59/8eY+OyczevM9vnd4yMnN6zy72uZCdp/B4GVOTIo+mdmsd6Xg+gH8P751mf/qK+8TBXNniPXP1g1ANEugXAGiEbhQpWrB8N4UjqRX/r/VClPdX1RtUUkp1bf1zK+nca3YwX/y48DZbTBNBVuJ+QGbrD8W0HXOFLUVVRiUNBjp120Lhj42woJuk/WBf0GJndQSD7q2UaWsG0DjBqqyng6lEbXNqoGAsmFkEMJU10tjlkS8JdR09be90uCqov4tGoO45idmqZv98333amqf5SXNUcW+vJZLsD+a8403P+SjT67z/DPnWO0mhFJa391CM1/Mabd6tBPFdDFiOhnz9ltvs3NwTPpHbzBdTFhkMxbzGbOTE9rdDiJq89233iNE8Mxzz9BJ0woErDuZ03cDRrt+E3aBs4uRbUNZL/Gnh8GPdTGidDhqgcsYkEJXi7Ifvd66rZqGiqaCQDieKrT1k3XHMKYC8iVx1S/W7t96TIjGteDVU7Umt/JAdYu8/abE38/d3wm6Vh/afGGo7+a8AnTtBuUNQH5xkQjk927ypd/+Q9LhDpO9EbNWi/sXz5D31wnTguNbnxDv3uFXLvf51fc6DDnhb/9sm2k246eSl/mN6AqtMqMoSuRKyImJ0SdzQPKt4xV+9f2L/OWXriHcAqcr10PvpeAr7t/OL15Uf+NJkSNz/rhfOKlQyC5GfmadjlhtNBJeKl6SgYTEbqTogy7AWfCrFscjljl93+Xy2NQLHhwrNinrQQNOJ+o+e1CtW9MJFkIg61zo1deefTUh0hok7MFHuTCJpc81y6jDYRsVbF7UNEc2G9QfrthQ4zvn1F7driF7WtBqMIuG9V+4FbzQgsks52g44Y+/831OxjO2N1Z45tJ5umkLhHUzWywWSKlIWhGLxYw7O/f5nW98k/aZS3z+lZ8kL0qEFkQq5MzlZyi1JGn3EFGKiju0kgAZd20eh1Ov79vF92Xtp1tnjrPs7skLA66ZlLAM04GUz0rndbh2saknlMKzTx/Z5YDO+bU2xXtvaRG4nByNRc2ySVE9C6gNYUI29Mj22caYxrBzxxxhsAPx1CJvGsPZi8MVAfbA5sDKq7pcBwsjkGXJ5T/4Ptsne7Bxhdalgna6xuWXXuOTL/0FVJoSBQEj+X9g5/230eGXua6njI7us722zdvlBRYzRdgTbAUFkyBilmcsAg3YnB6/eus8l9I9XrmwTxDIpXrZKCHPQhts0+iqvRoiAl4hVi9wtd+txQvXh8avMP56L1M0Jq9YcrqryVjD39qCq4+OkwhRWJc3HLh/ikfPY1Mv+NVHGid+Czd5XZ09g/XFu8AADdHb0NBVL7GLJj+sVxlOnfiwmlWwv3SvT71WnPpw6hq/jtaAu4zc9qmGUtc313Lpyup2JZJZrvnk1j7f/u67HJ8cMhoPuXzpHC8+e4mNlT6htIEh3i1usLJClhd8cO0Ov/eH3ybTCWvblzg43McYw9aZs5w7v4ESgg+3z7C7u8/WhUvMZgXjecbb71znmQvbRPJUOz74KktCKFCFeD9pTFf4SeoHmABh6lgk6ZTXfuFpSkl1Kk/n+Wp8OKkbi84rYMnmIAwI6SZcrTaop7FlVxUvM1Tqs1IYAvd0r/sFGu3eEI4barBKleAlKj+vlvrUuWdVeOQWg9mU+RvfRj93gWQ0JV69xIIS8d3f4dv5jHxtjTSV3H3vGm/d7TINtyiSi9zVBXfDlF4kWC3HtOcjZkjCIqMddBGRZIGGMGVawj/9+DmeOTOjLcdWVPfAaFvtVK/Z961A2Ph280DtPROMa78G1zFeRmjey/2uhq9vH7/4+UPm1DUCKKsHCGGjQEtZQC5PXfNgeTzeCw1tlhFOrSCw+qyGka05WCReLyUqNlhBwAN46lvDuOd9+pQ3zbEmHnZmwz/TU4T6L2pezHLnPKpUz9AVC9I+isn4Hz+AoNAlo1HGZLrgxt0Drt24w5079zC6YD49YTDosrU2YKXXIpSG6WxGFEUUeUlmBLv39/nw2idc++QOWrWYTA6YjI4YnxwTt1IGgzY/9YWfYNDr0mm1uHV7l1e/+Brff+cTbly7SVYUVnVQBdrXDWCc5FAKnyNC1AwX56r3aabaH/NS81azNDZq4BUoUYOv734tHLM1tRjvPVNqX17HobzXQwWyPsTAVDNFiHocV6EVriuUqUd99cHU53ojYLUkOsJTTREBOMnEM3PrfVKDybKLk6Fstzhab9P75BbrnZiwyGCtw/Gzqwj5Eff/6Nc4GilOxori7F+mGyhanVWKg2PkYoGJEwoCBhR0VMFdo5kYQdJpoVHElCxkwN3FgH954zX+9nPfRFBU+mbjfYxrOYB69tcMFVFSq3sqKu/ucZoMlJUeV4hGG7nr6kXSyTHemGeWPRWsd4IfM/YZRSHQ4Qzd0USmS6vdeeSYezx+un5VrqJEqvo2xIQGUxUNVmAa14jlJqvIYSUa1Pf7VJ7lG7iuxtKnB5cAls6qr3z40x5Y2Kp+t4MhcDcxaEqsG1imYTpZcP/+AW++9SG/9wdvECcps3zC6mqf6fiYo4N9VgY9Ll/cJk0iyjyjUIpFWTAezjEaFmXBZDyi3+3wk59/hWuf3ORwb4fNrVXSJGBldZXXXnqRFy5eQCrJlcsX6Q/WeOnZi+zdO2HaP+Y/+jM/SSSN1YX7Nm+8jxfcjF8MRQ28Uj6ZEWkAtQvXsutQgKhE/+YU9+DmdZA+eKDEs19T6YcFzbFvyYQfZabycrGrsOPU1JKU9aJQDXWbtYPUaoamt0EFnS422/9XGZL8ABXUoCaM8+l3XjmNnB/oAmTA7H/2N/jk//QPCRYTWvs3uV0MuPM//Sucyec8e38PjOCa7PKv9vcZXfk8CQWTOGRqIlZGhygzZ8qMTCnuD55HLOZkSQfVCgnDmBRBnEhuzSO+f/AiL6++jbeo13PKq2Ga6pO6XeqoSerrPFAvhejWc9z3iahUMqK61p5lgbypVfQ2UqDhsiaWfHVV3kKUM8rBAYvkR8x0qSrvxCVTH2mu0t541XTT8sdxh/ylAirLrz1klu/5KexziTU/9ITaCdwhND53QiO2pbLmN6gNVO/hv/ecwTEhDCWCRV4ymWUcHg25v3PAt998l2984w12d/cIQ8WZs+dotyQhObt3btiQXZMxHh3R7SREgUTnBTPmHI9Hjj2XbK6scHZthXbaIlCKL/3EC5yMv0KUxBwcnRDGKWv9FmkEhYbFdM6gm7LWTXj9c8/xyz/zKqu9FG940afIgVkSE5rO46L6/qHCw4958Yu6Mk4ag8oDwfu72n61s0oKbIY5pxi149Ht6OHu6JmTdrkPcPeugdughSDAg7RAGEHwkPaT1SxoQIxb6Cr965KtwTNE/7fzzqjcm6yU6ff+owJo8D65puKVwiL4lUuM/+v/Ce/t7tE/uEt85x4agQpj4gsvcGH/+6xzxEfpIb+ZlwyFQAeKIrP67gPZIj35mHnnImb3DgUxuruFWECqIJYFl1qCQay4MbnC+fYO3ehexcz9u9kgEKdSEALMabVD1aNUYIhx0Xqmbrvm0laxOt8g9UK5TOiEbSxv0KieUYkQS3UQZQxHiozDh9TRlscHuk1XMD9BK4x0XStANEQv3wbNMbj0mmJZ1KgNa59OtcQjPlfPaOCIqJYue0/pY+epRR3LTHwYoeVI3h1HA9N5wfBkxp07uwyHM/aHE27fvcf+zn2OD4945+23yXRBb6VPf7XF8z/xKu12l+OjA1hMyRcLxsdDtjbWODo+5PBgj7NnN1mEAePplNwY9g+PGQy6rPa7tMOAQCkEgqQTsdpdozCag3s7/F//L3+fF154jv/i7/x1FouM+TRjY3ud8bhkdaXNhbM9axMwyy5tvlRBAK59jAEjhU3Us2Ruf8KKcOkPqwVbIKQLzmmM1yo/c20VroIWtMTlOqBmoHiJ0wKsdPexRi9HFkQd61RLFrJiXsKcHqX1SugBt9YanBKxhKH2irH/CpfCxYOF8AuNqVmxv2flu2ks4QhWumQrA3a5itAF2mik0GTnz9L91rcxa8/zRXGZ320PWOiQMAkx0xHz4Q5Z3EHGWwxFh0gawiQkHn9Mv9fi4uKIC+khz/Qm/MSZKaKn2Ly8x/gk5O6dmGzu2b1szG+32FAbvqyxWrt3sovmkq2oBh37S1iJrfKld0RpyUh/6lmIJg+ulybwWQfr1rbTJUAe9x859B6TeqGCxeplT0vfPjKkpr4NDwJ37ek8B7rBLE8/71N47EOLz5lgH91wdG5UQhiB0II6yto4v3HDdFGwyAuOT0YMTybki4yD/UO+8Udv8P1334coREtY294GIbhz/Rrjk2Nm8wX9wSorcczGmTNcuHIVESiEKZieHHB8cEC+mJPEIbdufEKrlXDvzm3OnjvDdDwhLwobDo0kmkQUhUZEXudo/QaNsS5Ozz17kT//Sz/L+x9+RF7YpEOXL55DCMn33/6Qr3zlBUJlV3Lv/1m1aUPdo0Q9SP3C8kM2949d8TpV5UJ5EbWaATwhcAoCu8pWoqxnh9I4DukmZWnqMV/fx+14ggUEm/VL1J4K1YB3umX/Z9OuIU5JHNVArXvMg4cXyZZhFwcgPgVjo1SLhXWLMqZ217JSm8H6nmogAKPRBibtGKEUwcH3eGF9wOv9lJuLiMOpQIspetBlPb3D1vAGl/qKi9036cfHtM9KyuhZtrbHdDbfI+1mtLua1dUOa501dJbz0su/wPvv9bj1SUlRONe5SsdLDZiNNJC1K55rj6V/ar9b0+jDiuw2BnPlqNBovVpOMY1VFddOzdasJeDiU+wcj43pLlXiITP0f8g523y/psX40072lmGvq8RY3WSel8zmGcfHY0bjKTfv7HJ4PCUvS3YPDsjyHK1hb+eQ6XjC7s59xuMjRif7CFMwPLzHbDokSrtcvvIs09EJW2cv0O90MeWM7bNbHA9HhJHg6tULbJzd5PatW9y5eYPdO3fZuXubOApZWVtlMZuiywXHRwccHuwTpy1OToZkWUa/v8Z8VvDGd9/hJz//OXrthEhaAUo7AEmTiF/+pa/xtZ//CnEcoNqSJI3RSJ45d4Zupxkw6iP3Ti2QDeOQMU7U43Rk+ZOKwC6KXtQqIRf1SvOdLBN1LdOQxKqJa/ySJSviXE1s47ODnbZviDqgAlONR+8dapYmtjvHLCsBvN62UoEZ6smwJA77qgsrmjsQP82SLeB6Ebt0l9ssw/b60o0Ug5Gwf2GVC5M5O5liOJkShAU/u/VvOXv133EmvcH2mRntRKAKQVHkbJ+FOBUU+R1UaEgSkJlBlFCOh5Sxoiwk6H/Jq69e5eqV13jrO13292yD2kVR1jkbGhBbLVHG92tD8qhaq+6TKljK81qvujHLGCIaY6EiZs72wSlS6IljKQy/ceM3+Nv8708POOAxgq5fKSs1gKlXG/8ila/tQ4GyQfkb1nLf6V4V01z5/Llg0FqQFSWT6ZwsK8izgvFoyiLLOTwckmtNWRSURcFsvmBv74jj4zH37+8xm8/4+MYtToYnpElAu9MlTlrIOKCVtmh12syzGWknoVumRC1odSIO9+7RXd2k21/h5HCHD9/+Fq1On9loH6MEvcEq1979Pv1+l97WOqtrKwhKpuMR4+GQxWyC1AXDoyFxFJKmKQd799lJYsaTKUGUsHPvHkeHR2xszTh78TL39/a59vF1nr1ykUE7AYnNqgbYPwyt2G6triT0WpHd+VeCMorS67xOtX1dlg04NWOozzkNUk9GcXkuKrA0VQCEDXZojM8lAG6YdGocxuuAG4IwIJyKob7eu9svnyUaF9bMuXnctrlu+Nkuj3xj/L4WHpRNdYY/x6VkqdQKdf3rz00gEdQ7Q/iautmFETC+8grXb2/xvwl/jlj8Jv/LL/43nDu/Q3cA2QwWC0ErhTgxXP9EMJkaytLQTSRJakhbIUWRkSjItWFv/4AkbaGyGeOTd+it3OMXf+m/4M3vTPjoPY02sjLO+7b0QRA+oaWXNHxmsqZm3AeSuBMeii8yEFWAWR1OfGrHkMYvMJwmtR+cvMcnh2/yqPKYma4tVf5ZTB1NI5anabVmNVcQt7r5UFzv36yNZ/gGb1DQQpAtcj75+DZvvPku337zHW7d2aEUEbnOGE6OMGXJxsYGV5+9SpykxHFEmc05ORpy9+5dDJrxaEi33+fln/wcaadNFARkWcF0MuVgb5f5+JiynKNNyXw+IoliiiBABCFnLz/DbDZFGrjw7Isc7N0nm08J45jB2irT6YxOq0XSSrn87BUClyvhcH+fxXQGBkIVMi8N0+EYISAKI452DzjcP+J4OGR/b4/h4TGLRUHcajPodxiOpuzuHSLMCv12anNWONFYCeGs4M7pXViHPmWkFXsbYqwXw2pxtKlPs5OtNjLgjfJPpCHNpgWVlU2hjkh06oHKfVdUJNczRB/x5flV7XPrPQtOQbWonNLqCD4HnhVxEO4vF1EjaWbdasiMxoFeddTULNwDrcbu29e8EMdshX+vBvj6laPmsdU1QKX/pfrWMsLjtZ/iHxytcSX6B/yNv/b/IewVTKaCIFCEWtPtAYUlQFsphErSGmjMrESMYToJWFuTzIdz0tBACMfjnNWeJBKa+cl9iuTXefWVLTqtS7z9XUlZBhWj9Ab4umZ1G1YkTVARNKt+W2a9tlE9WJcUayPktI0Yh7XKZmmhO10aKif391ayxtfOf/kh59ry2FI7Pqx+otFYgkerAioANjWzAn/PhvOHMeQlHI1m/PbvfoPf/q3f5xvf+CZFPmE2GwGaMGqztnWGsxevcOn5q1y+cp5OJ6bX6dFKUspswY1bt7nw3AXGozHHxyekrZTVtVU63TZCSO7euU9RZuzfu818OCZsJahQsZjNCMIQLRVnzp2nFIK1M2eQQlCanPPPP8N0PCIJJLc++ZjpyRFJkrJ9/iIXLpyjKAqOjw6598nHTCcTwjhhsL6OigJmkymT8ZgkSRgeHfPB997i0nPPkSYp42DE+PiI4fEJK/0+02wOUrB/eECZd1lZXbW7Rsg6YQ3Q2JHi1IpHk7k+qGKoT6p7qAKJJxFxgSp3hAdLb1PwASJeTPWGmQYrrLIfuDaTbmLKJYlXVOQCaESf2XO8sc3u5uDZricnNZQ0mWgzMste7rxuaBiYTelyF9QuYzXYmuWOXQJ/U4ezN76uzhGW9fphktPjH3/QYm/4r/nSa/+U6cJwMYJIGcbzElMKyhm0kpjjYUaSGIQWrG8p9ndKxkeQLWYY3SZqhZT3croJtFTG/o5ic73HYjbnqPwe/Y0Tzq3u0fmZV/nmNyIWWYRo5KCoMrk1GthK+35BopEAyPcPVd8Zp0s3CPKTjHn7Pt32ZYJJsrzaQUPqE1W/6NKGwwdRTJEXrAabbKxsPHLoPTZD2rJcVH/8THPUTwB3XSN4C78qC6BA8HtvXOM7737C7s5dovV1Xv/5r2G04WBvjygMSbst4jhga2ud3qDHfDpBUVIsFixaqc12FsDR3V1GowkgCRSgc4TOmU7mfHLtfSbjGUJqolaICgXT8TF7d+6Sdrt0eivMRiN0EHLu/DlanZhWFDCfzpgNT/jg7e9z/+Z12p0OvQsDXnz5ZaSUlGXG8OSYg/t3mUwmbJ6/xOrZLVbPbjIdjjg5OiKfZ7S7XW5+9BGmtEaYoiiYTSZMxxOm0xlJegapAjpJytFwhApjwiCkncYI1Uy8blgscsIwRDrjW7Xx4ZKbjBOqmgPUneM8qNxcFcuT+Akq0m1dA6oymiG8V80pUdL7A7pp7idcpQKoJDGxdKzWG7rgHxoJbIRmyYXJo7FnYd6LxhiMtATDG5eFqPvJs/FKzykagF3xuYrq1ZZ2/z6mkYuWWtbxi4KtC1XbWKNgwMc3zvLxje8xePWfMzMDvvfRLqXSlBIWE0Eaw1pfMFca2ob5BERo2Ns3dDci9nZzZjPD0f6YbkfRTSU3P5BcPV8SFwJuGFZW+4hxiNpuU2xrusU7fPG1i3zjjQ2b0EoImjF6zaxjlarBlWaiIeH+Ni5KsI4ZECSLNXJzzFH4LudWf4riuKTUpW+E6n5+oTZaM/rwHjwTs739Mvf/4T/lxmHCTz136ZFj7zFFpC2r+6V4MGrM9/0PNHz54gaTZ79Fafj9b3/I1//4PTbPrnCpfZ47d+9SaMNiMefspYsopYhixZXL50liRaQUYRAwm89RUjAajQA43D9k5849tDYcHB7SWxtw5swZkjhmni1ASLLFgm6vy/HhIfPZlOO9HYpsQpHHzOZzDg8PWNvaZjGfsbExgLIgyzMW8xmT8QhjDEVZsFjMGQx6rK6scGt0y+qV8wX5fIIQ0Or22d7aYDQ84ca1a4yOTpjNDTKIuHfnNt3BgDBNIMDurEFBURRgDEoquv0Bk9mcTjuwkWYycCuzwWjBNNOU2ZT1ICBxuzPrajI6vWRDovAZMl0XWEOdsamjETZS7VMMtT+2RQu/k7QBr8OlZqRLPkSVPC7wG0VCTR6rHXUd82/IFoAD2dM6RqfKsAa2ppbXA7VrWPeQ5eP+dqbWDFCrAHzUpzEPssCl7Tj9O1GDkbsTni36u2vHqoWAhV7je9+5wdtv/GP+3i9vcfFcCnKX63cisjInm0K/A6U0TBYFaRvmBjolLEaGtJ2ztqE4zkvSNcP0umZ+oAiF4d2vS157uUURlZj3FSocUB6EpLsRer1DmA+40hV8dFy7htWJbryax6rBTpFa+1YN1wWBbDYxoZIURtDNn0GYe5wkH7B97nOc3JuhC91I2WAIhaJFwPxoyMnqHuurZ/jg5lv8t//i23RiSTG+x998xNj7keh0gQZb+GGuqT96IDCiRAu4vTfl99+8QW89JMsy7t67zvDwmNmsYP3MFlGouHBum3YnIYlCYrfVzXA4ZLGYcXg4YTweMZvNGJ+MGA1PSNMWk+EQpSSLlRWm0yndXodLly4gjKFczFEYrr33DvPxkMJIOv1VuivrdAd9jDB02ylKQGk0Os9Jo5goimi1WsznU2aTMbPJiDDcotNp026nhEnM7M6YyckQrUGGIecunCUMBJ98+AnFzi4mbTEcDgnzgqsvXaIscgKl6XZSssWU4+EJaM1gdUAripFSUpRAVuvohuM57374CfNsxhdefp6L22sVGNSsSNTd1fjGHqvdxZf66AkEXa9awGetarojembvZTbPOj3DdAxQG1EFObj/HY412rHZWg09hWl84zWMS/65QjeCcSyke1nam8k8AakzPYMP9PFbWFXPqLwVBPV2OfU13nhXvb7LZ1BrJLxyQ5CP1zCjd9GjAya3f4V9OWHzwg7IfSSwkIaP9wR39gSdLkQSjDJ0JQz6gsVtuzB0OpDsKC693OGdb07odiQb2wauZaSbLdgUyOMAM40odtrIiYBixnPRCocy4IgFmMIterLOWyE8xbOt41rPHbHSol8mfS9JAUEgKfMCIUK6xTmmh7e5n32flc3z6GGEngm6cUwrz5hPxyxKwb3ZITfMNxGHL7CYR1z63AptCiZh/six95h0uj+YvS6d0ViSHnblshpKMJoU/Kt//XvkxQgxTxDM0GXO/bt36HbXuHj5Aqu9Fiu9NoaC6WTGycmEsiyYTEZobQgCRRRFTCYTsmzObDqiyDKy2ZRFFHF8dIRSijPBFkkS0e12uHWwz9HOHcZHB+iiYHX7EoP1beJ2CyMEg0GXIBDoIufk6JCTo0P2dnaRUpKVBbrI0WXBaHjCwcE+WhdcuHCOja1Ndj++znw8ZjgcoYKItJ1y5sI5ikJjypLjw2OkiukMurQ6HdbWVomVIgyl3R2iLNFSssgyjArQoqTdajHPcoyxjPXXf/N3+fv/r39EkqT8lb/yF/mv/sd/o9oO5oE2N/WP75fTZ4pHHH8SSuXi5dibBR4nqlYuQV7qF7W3TBXBJBCi3mC1NtLwgEO+1yfWygmvylhmk7VZy9S5sKHBeGkwabNk4PLn+QxXHnC9a5OH+ipbWXWZrvyGfS2orvedbyqyL4A7731MPtshTltM71/i/tERu7de4tkv/BFJb46KDWECsxHkC4HWkjDQDLVgnIGaSEwmaAlJT2uiWUG00uHoRNKZ5SgBo0lCMusSLFpMRprOokAVfVpiQj6+y1dbF/lOa5N74j7G2JwKNmmPN29WtNS1uleV1Hr2OsDCIFDMs4I//J3f5LlnXmHj/HkSeR49yTicntBrC85vpxzcm3EnF5gggRBYbXHz6wfk999g6+xZfv6nf5rhcMR4Pnzk2HuMwRFNmuqOn7Kw1Xuk8UAk2oPFiTla8Gu/9QZ3jg7ZPrfKycmQ+WzK0c4h2Tzn6hcuY7IpQgcIYjK3r9rB4THD4QlCGKI4IAoDgkDSaiUUWYfhyTHZbIZUAqEkk/EEpRS6KFjf3CRMQorFnLsfvc98kXPmyoucuXiFVn9AksaUZUZ/0CUvFhyfzNnb2eHk8IgyzxCAkorWyjpr29skrTZlntvVNZRcfvZZPnrrTcYn+0xPjtGFIYpSOu0W2SJjdHLMbD5HRoL1zW0uXrzAYKVLEkVEQcTw5AQB9PoDlAwIgpAoVESBoJO2OTgeUZaG1dU10rTD1vYan3/1RZQXL13D23nu95GzQSEVcLgfjRVFtYvsqUTXJ6zUjgoNdYIzfpkKlZxetuGDWUUjVaAsaqbsTeX4e7v7GG/0sfcy0sFABaaVd24FxJ69CtPQs9LkJ2JJteCBeclnvTK01fX2XNaI0t7bBU34/BoWXQ3eTcwDuxfWNYr50T7FYsy5jTMcHoz53GrKvfcvMooPMduf0N8Y0Y5h0DOMDgR6ZuhtGkYngnkJWWDQuWY2TRgVkrFRaBlyWG5zGYWe54gy5qSMSFRAoEPm0xItp4h2iAk6tCYHfDHXvN/vc41j2z7GB5fURq7KYImHXt0Y875dXSJ6ITnzzFX+5a/+E7pBDxUHhJcFul3y0y9cQKtz5MUVEHb/tkSmTPduwKTF9cmMNDmhi8SEC175/OuPHHuPL+GNaK7fNWN64NzmwIeqcx9yU8Bw+/4Jv/MH32JlYxUlJHs7OxRlTtqJ+U//9v+I9a11DvcPuXvvDovFDIPdXWE+nzOfLwhCiZAwm06JwpAwDAkju/ttURTEcYTWBTrPONjd4eTwkOloxPHREXc+fpf5ImPz7BXWz1/kyovP0+51yeczyiKj2+2gAml3cihLTk5OKIsCIwRpu03aSml3O4xHYzrtlCSJicKQrQtn6a6vM75+h707t9m9f5+1tR6ttE2SpsTtFptntxgOZ0RJQqvTY3V1hSgIiIKQPC84Ojomjlus9nruvRRxIGlFgtZ6n6woWP/5L/K1L75Ep53S66TgggKkEdXOHH7/LV8aRMzl0BXV5NZaPDwM/gkoAsvuKmND5WJkWB6CDsyo2V4l8os64YmhVjtUm6wKU4N29eBaZVEH6HgnNLBb0Zg6AU0FHDT6pQbEitD5ulY1NA13Kc+fPQNsMGt3j2pfOK9kcAxZUzNgz8qDXNEVBdNuj+noY9TgKhfEgvKtc8y/s6Dz5T3K/hyZZER7XaKNKcWiRMuA3sqc27cDdFszywv6Zo18PyCchhRG8Vsf9fmLHQimBiVK5hJEoRGEULbQBMQpLOYFmRa8bDTZoMMtM0aLEuGCuKuWcuHejTfDA7QRdXpNXH9cufQsvS88S/Fexnj3iNGtMZe/nHLxxVe5fftD1sVVhNFEQUJX9Hjl8udJwy3eeu/3uHf7HZ7/ouH82k85FcfDy2PX6VYDYont1n9XzeOZQ3Xe8l0MkOeG/+6//30KYUgSyc3r161P7Lmz/JmvfZkz66tkRU6Z5dy6XXL77j3arZTFPLebVmJQUqKkZF4ULOZzgiCg2+2SJAmjo2MQEqkEh3u76CJndHzIR6VmMhpT5CWbl57n/HMvMFhZp5W26CQxJg5I04QoDjg8OiRbLFgsFrTabQDyxdRtrx7T7fXJs4zpbIpSEq01SRzR6vaR0S7ZdMLBzg7D89tEkf1+sLpC2m6hohPanS5hGNPvD5DGECjF5tYWRVHSbnWwG4EqgkARhIF9H2FIw4AkVKx0IsD6NWsE0ouVBu8iig9drXZCaDIqrxkzFnCfRCMaOC4kmywVl/S/Dvus2SvVpGx6MTRZhHefk43x7NmoJyFNSiFq2b/WOjq/2Sr9aU1rq0f6Y82YQH+tPU83VBBWdDbabxHuYwkdBLmON9U9rJFNe/cqYceHRlQbeApj2OiEHLnv7sqAT4Z3eCFJWA8LinGf6NsxZVujVYacr7MnNdHVXTobc+azE7KsoBjnhMLALCAaZyyyPQ5251y/p7g/XfD3vnCOdDbH6JBQKVAKNS9RwRbZcEaU5EzLCe+OIi6FqxTtgnssGn3jSmX4tOzXRwcuyQ8NTxGB5PXLL/PNb/4WLTmjvxLwc3/pNfYne7YdgV5nnVh30UXBjVv3WcwyzicJzz3/K+RMqSL/HlEeO+guDc1Gewio9j/7LGoFMLx/7TZvfv9d+hsDJtMx+/v32dza5M//2a9xdmOAKQ1FltFOY86fP8d0OmM6GRO0I46OhwRhSBhEHO0fMJtPkUrSbrcRrZD5bE62mBN1O8StFmEYMzk5YTGdMp1O2b54kSDucvXFF1g7s4U0hu2tNVrtGLA+sJPJmEAqpJCsbW3S6kwpsozJWFHmGXEco5Tk5PiY3koHoSwrVmHA1eeeY/fGXbr9LqOTA8ajEe1Oi8UiJwgj4iSl2xtwfDRiNpsTqYReu4XA0GtppFSkaUqaJBhjiKUkErIyQNp8CcsM1nrciGqTSTQEos7BIIVGG0HpBqTdH825GDnWpTH1LsFPUBHCOxtZ8V/gtvV2sKkbulPwxKFBK70k7hcqvGqiAdr+Dh6A/aVN4PSRUU7UXwpGER6c7YOa4b84sdkAQrvvjT6lwquB1IOqzz5mwVs3GKCu9b9uMSgbagW7p5hBkHH+TIvknQPm7cv8q2zMKOrzph7yUhQi22ukJkfMY0opOUoS5uUB4laffKckzBJWe4asoyENyIsFB1rTjRLObrSZ3g94dzTl//a9Q/4X59sE0lCaCKVDEIpimgMKoQ0r7ZyMjPd3Si72WxTrIXvYcDK/oDQjCJd7xYNwDbz2tUue37zE4K/9AidHH7Hy+grd1RVu3H+PLXWVlfQSg6TDrRu3ONg/4GjvA1Qw5vLrnyeIu9zf+4j54og46j9y7P0IvBeag4CKMYjGtz+oGEAXgn/2z36NolwQRnDn7l22tjb5a3/5L3LhzCYBJXNdMFvMyLKMMAhYW1uj1+0AhsGgy71797h39z4CQakNMlC0uz067Q6hChkeD9nu9VgZDAiUYtrtkHTarG1s0Bv0KbKSwdoKYRzRShOEhPl8gS5LxpMJRZ6jlMIYQxiGmFYKaUKchghtyPIMFVpVxtHhEWEYEgQBoZRM51OiNCHtdYnimCLPmIxHTKYTiqLgzJltVgZ9nr0aUxYl89mctcEAhWE2m9FqpyRJYndaloI4aGwQI2hM+WUu0BQlm196a7mPpPKBkMr5NRqBjXrSoqmNeIKKqH585qkmI/CBDl7311Q71Hpf97dotGvVfo1IpVofYc/XlmV7Pa5FPsuOJNYNb2nPPuE5mY3PrBZMV5ZVIk4mMdYP2D663hzK6nIFhqKWXhxLtuTbApCVdPzG4tqqGYxlyuqs4vn1Fh8ND7gsFboVM1InvJ9OiFsKOQ8pyoKWLogikFFAwD7j/IQkyUnKgvZUgJozac3Jdcp0done3QFfXBU801vnuwcL/t93FvzdcwoZzhGBscnkS43JIie+p2zICOYnHIaSraOU0WDBXOraWUr4kdtYLR3rt29vrBrIYHdBcYvm+sULdD5nKJOM8aSkyCcMkpdYHE64V+xz5/q7LGZ3WT1/lu7G84BEm4L11Qvsn7zBevjFR468HwHoNunsg7JocxV6AILd6BIYbly/y+99/Q944QsvYzBsnTnPL/2Zn+PSmS1iockWOdPpjEWes8gW7O7u0un22NpcIwwkwpQMuglRoDg6GVOeGPorKwRhTJK2OP/MM4ymUwIJeTanvbrCmXNnGI9GTifbYjZdoE1JGkdgSqIoJFsUDIcT5vMZUiqGw2OyLMNv2y6kQChlF4s4IowjkiQhSSPSNCUMQ6aTKWmrTdrt0FlZZX1zCyFhkc0py5IoitjcWGdjtUen06csDaaEosiJ04SEFO237xHQCiOUoMG6HtbyP6DLxPI1wqkiwE5M1QCRJ1GtK52sXo0/IR3ggAVIWY0/z2TBmpxEA/VE9T21SsGV6lbu+mWNmlMnVABgXJt7UGjMCB9Z5Zh2M0esfbZNpV6Z+ip8MfgkNvZAlY6eGopMrUGp6ldijJcDLORqX19KxumQtStnWXn/kAujOW/tTojPrWEGE2R7HzkrCDJYzBTdJKLQGSQQRZJpviDuhcymOaPckLQgY47cC5ClJMcwEBm/eDbinUnEv8ki/noaks2HmHaMCCKINEUhSUtBKTWLecTiZA95ts+5do8b8by5pFT9vSRHGNPob9vWgTDksSCPJmStI4piiMkMJ9N9ImWYHxTkw4/Zv3uLMJ1w9uWXUXHLefkYhEv1mCQtsmL8yLH3I/PTbW5xDlT6Q6ih9uGEyVAaza/+239P0m8RJglp2mF7a4tLZzZIAmsAW+QFUipaSYtOe0GcJEwnUyaTmJVeh27aIlYB+YWzxOkhUkpa7S537t1FCMnW9hlWVvq045jRaMTh8RGz+Yw4juj1OkymE5QsKfOSvZ17BJFkb3cXYxRFqcmzBUIYptMpWmuUUsRJUol7URwhtCGKIlbXVuh0u4RhgDCadprS6XV5/tVX2Dp/kc2tLfJ8ThgF9HpdVgd9zp/ZoN9p0Uo7SBmiS5jPZggEYRyCMeT5gnarSxSHf4IeqhWGtcbL9Z0x1VY9nv1aA9yndtyPddHCbb3uhe+G2quybjuPBsv4oHLzmk0QrU4tHVQqBjuRpTtglr6DimXhoc8xzEqv7heAhjrD65UrpkmDUmNVBpVfqicwFjQdP22I2P75pjq3FKC8D29lb5H2nYWNdNNGYyjBRfEVZOw9e5Hu8YTzwzGHWvM7f/Qx0Zbk4i8k9LZ26ZQzilyymHdIkylZNgIlCVuSnTKjNZCEGg6mAVL1SNKEbJAymc5YjGEDzedXYV8kfC9IeTmbkk+OkCsB2ayDNF1MFpGEKavRhF+/dpPudJ8rwSXa5/qcyIzaT6/RdtQSxOkw9txo5vEOs+SIEAlKYjJJUZ4QZwMm+zvs7Nxk++I67a3n7R10bWrUWoCQxOEVjsbvPXLsPV7Q9YOIOqPPw3x4zfK718Up/g+Pp3zru+/xwsufYzyfsCpKnn/2Iv22ZZyLPGeeZ9YGLOwuuUJKoigijmPKUlMUBePxkHwxY321T6fdYTyZc/H8RfK8ICsWrK30ee7KZaaTKXuHB+zs7mKMIc8zDvf3OTo8soxPBshQkqRt4ih1DHtBq9Wi1W6hNURRRBgGSCWI477dN0trWu2Efr9Lp9tHGI0pC7JFxmBlwOb2Nuvrm3S6HaYzq67o9bpcPH+OjdUV0jhGSoUUISYQhIFCa40UEEcxGEMSBFXi7AeSYTcXvdrr/cEvl3QP/sNDpBBz+oInp9iE4ZIqSbbR7jPVQukZbt1M9j0LMSOghd01Vla5ce23Eh8K5q+tGGW1lDXavgG4D5NG6mtqycX3nXbzqmKzwuOMqwOWfdWCtA+rMA6IauzOxjuozhqBCNDCqhe0sQY4UbkPuucKw6ijCV46z+YHn/DFUcGdC5f47ltv0jbnSf/sJtng+4TRFNkeIkxC3ArReorUBj0NGE+7IBShVkR3P0dvLmlFBbqniTuGe9Oc+6OQpFXwT3aP+KutiC+0VhCLFiYEMsEi10gj6CcDrpzf5p/c/4SfuX2XFV0iLvaw2ces3lxX6iS/wEjXZF6vbVsomg6Yx3MWxRwl4GB8HYDhnYjJzodceuUnCNPUqVtcDmRTmUIRRqNkhOFHHBzx0PKQefsDL3Hs6ne//ib9jW2yfI4pM1YGLZLAIE1JkZfMswXzfMZ8kVNqKIoSKWWl4wQoS402Je00ptXqUgJ3ij1Goyk+FPKFZ67QayWsdTtsb66x0m9zeHzMbDZHCUlv0CfPS0ajMcPjIwKprIfDyQlBFBElA+KkRa/bIwhChDB0OilBIJESRKmJk5A0jVEywJQFpdCUheDs2W16/QGrq2sESjJftJhNp2ysrrO1ukIripBKolSA3/I8CEKOj09QgSIOQlpxQhpEjVDez9YvS/71P/B043IyOEgSPrXjk1X81uu1Jtflv/MqAuHDTJuuVwACmUjyckYUdNwRW3wyFBCVMwFeMez9aB3Y1UERDe+BpqqiQsRmqm7RqE+9bbwvwlhjmveCsIuJM5zhI9xM9Qzj9NlWJ6X4aOff8uyZv1hn8AOnXLARbM1t4Euh2d9ucealc6y8c53Pj94hf/kV9u/eonjvEsMzKTI8Ieq8DatHdtcMGaDLkjgtGO1KVg5+ip5WKJMynw+ZjYasDgSd1JB2F0zKjLuHmjyN+PtHBVG4wusRpGIVnURI2hTTCUXY5VeeeZ37ccR7szs8fzIk2QkItxMb8FJ5EtTKhno5831mdcZxtMJY3iYvpmQLSVkcEAcDklaf7S8+i5GhdZ304E1R3drepUSYgH763CPH3uMHXWMQsiG5NL76wXNVMJ4s+PXf+Dqr5zc4OTxk0B+wsb5GuxWCKcnLHKGEdb8yJcPRiNmioJXGZIsF85lhYTRlFBKFEWWpKXWJVCFRGGJ0ye7ePs9cOU+33aKdxkRxRJZlPPfMFWbzOVlWcnh4zCTPmcwWTCYzbt28xfDkkPliisEazqIoIopCgjAgjkN6nQ6tVgJogkDSbSUARFGIznOKPKcsAsqi5MzmBoOVVVqtFtpoOu2ErNWi2+7QTltEUUStv/MbQhrSJEaqgCSM6Max3Z69nrkPbeWlHTnc70pIWtLlekd9lgIlvI7ydGTPk1QqsDElCKtoqF/QQVK1A2xtAUcYpGgx0iNCOlVkWxX6W9HSGtrs/w/ZTty5d9Ws1H72br4gEabecW9Z54tTpvv7+pwDXo1QiyLCMz27E6VzExSUwqDcYhCqAeP8Ojvjt9juvFrpKev7N5Qi/tWE5OBLz7C9t8OFj3dI73yTb599mShbcHgzwRQxw/sv8OqXW0zkNVqJIs9CZqMUWUj0ccYEzXqqbb4CmTLeGzLpQX8DVjpzopU5O0ddht0V/lWhiUSLl+MVGA9J9XXM0W0Ua6gw4z85+zz/5304Sqaom/uc65+D1Iec1Gy03jvOvlWgFOv9dQarz3Nz9gllPkGXiuPR+0RBiJaa1a1zmNway+pwRYPVfyugBKPcOCgJ5Y94N2BfrBHA7ZWG+YzzsnmS4drHt/ngww+4YMbs3P4YVMyLLz7DdDoh73Qdu9AYrZES0nZK2o2YzeacHB+TZwvarYQszzA6ZJEVaJGRtmM63S5ZfhulJL1u2wJmEJHlBVIpeu027U4HjKTXG3A8HHE0HDOf3ieOUwZrG2idMToZksYJaZpY41igaKcx6yt9kjhGSkGShKRRaKeoEOTZnMlkQhgEhGFEGEZ0Oh2iKAKsnjoJYlppilQBQigb59/IFaocmw+kopMmFnBdEy4ZRj5jWbLIN66v3ebd5KfeSeKHfcaPT3Esh3qc+tQvNcurWwC8eC8RIqYMdhtGNFPdo+lnaxrXLakTHOP0Envl+oVXw3l26lUgNSO2GOmB3NTuucJUAE2DRTvZxJ20vLAaFxJhF9CIQbzJzujbtOI+aXDBRR02WLOgYbyz4yBrxez+yk+z+S9/m+5wSjD8mN3jdxAbV9htbxP31jBHa7SLAePxEVJrWkmfPC8g1gi9YJRltClpY+gmKaPxlN2hILkc0Vqdce7iEb05nIxjflOlrMxmXJ5eBwFRp4tUHfTwBu0s42+uX+T/ebRDxgHqnUPOvL5WE4lGvwghacUJm2uXWOufY14U7Ozf5K75XXKhOT7aQZYFffMCgUiJ4gCdObUMfjES+J2BbZ8W1e4en2ZefsxM14svf5JiNz78+u9/l/WNVUxZIAUMD/d4+9vf4erFMxRFDthUh14007oky6bkeY5SArRiNpuRxjGT+ZzJbEY3iijyAmOg1WqhVEAcxYRhSKvVptAlSikQhsC5RfVVSBgGtFptJuMZO3u7xCqhLAOMHtLudFhZWSGOYzbW1ul2UnrtDmkYowKBlBCFoTVKGMPC7fBgvQ4MaWpdvgROOV9qpBJIaVMz+ggm4YwmSkmrOjGGVhQSBfX+BL7F/zTcs5lf90FkXb5zLbA+OcUIkEZjhPJHgEZggZfMPIAJn+zHYFDE0lDqnEBG1NBrHKh6tknFZB22OgJqMG4bd6orG1nKTG38qnLcNrwZ7L20+6CdJqLufV3pKR1LrhZhu6hoqJ7h6ymlYGvlp9m5/8+4N/wmV1ZaQB8Pz/56G8Ulq/fRwKwT0/ra68S/8zZbKuRKd4XZ4oBv7Ay5sfk8pQxJojaqGFGYgtlwn07SYjEzlFHIwgimZc5ELxhITS8J6ZuIg3sltw6he1Gyed6Q9k7YH3X4d0XEfyY3aBW7LDII4wgMBPMFV/uavzQ4y68mCTevf0Lr9ojehTZenSCFpN9ZYWv9Mq14ldHJEfeuv8fh/nXGbUOxOmQ6HmMWIy7Gv0hkVjGLEpPpJUNm5dvr2twY7+yusCoGP64eLI8FdH2HQEMP5b/wIk/TciZOX20n/Wiy4Nr12yTtmNHwgNl4jKJkNp1QasN8kdmQy8ppH3ycuBSG6XSMLksCJdHETOcLZvOMTl8gpKQo58RJzGDQZ3VlQBKnSGX1o8a9hFBugkroyA5RVPLcs1fZ3Nrk2kcfc3B4wAGC6WTK1nZIK0npd9us9nqkcUyoAlQgkdWWAdgUjwiSJLW+gca4wI0A3ECOwsi+k6jZq/1lUEoRBAFKCKIgIFY1UDyoy20cMMsTt9FbDzmO06k3DTx1OKU5baR7AkuVjaARCCHcJmnVqBXVxkd4XbpA0DUtJpwQskkFsE2QBOeKVRvQ7AjV9eYvpuqS6pgFMrf5jPBQYVVJwoF1re5oihreEaypz63TnttrTX2Jy+eLAS000kAan6Eddphm9zkcfpOV/i+4d7a5NqQxVUBJc6E1GA4ubbL2E5eJvn8HNZyx1Q74j0PNB/vf5V7rC5wEKTIMKMZ2Z5WDoyGdNARato4yYaI1C5GT5YaugI0opCvb3Lme8+Fhxsb5FTb6XfYPSn47KvmVvE8cHVMWNmQ41yXR9Igvxau8u3/IW9029z7ap7vdIoxDVnobbG1eQRFTTEfsHl7j/r3r3L99g/v3r3P2F89Q5oLF9Jjz0VeJWLHv5xdLr4+nrFUMzbngdeqioY56SHnM6oU6o1G1Vj5krj5wyBkRPvzkLiKCuBUjVQ+dzRgeH3L//n2Oj064cvYMSRgxLzKUUjYwQWvyLKMoCvJsgZSK0XhCEMVkRYk2EEcJYRShRxMGgx6DXoetzXWSOCGOwqpGdmsWgxbOIV0FgGR10CeNI6QpyS6d5cXnnmU0GtHtdmm3W2yurdJOYgK3INQLjG54DQibmEaFCAFFWVIWpT1XCMIoZJFl1oIscHRFooQgVIIgECQqJKioz8MA9+FtfPo0U51hTh0X9QR2RwyS0vi38fvEPnkAXOWZrTfScWoZgXZ+sZWngL2idisTIOQaU32LjtjEJrTRFWD6LdorN1Dq+H7PYZtqBSOEc7/zTNcBr6mSOFZ3qe9TOSpVPed9af1f1a4Y/h2q53uWXFYLjgGUkZztfon3D/4tu9xGJu/Qj16p4PX06Kji45z70eGrl1kfTYk+3iUnZO3cWV7ev8Pmvbd4L1plp71CHsUUWUYnSRnOZoj5IavdHkJEZKrFSEIuc0pZUCLpCMGVtM9BHnD7u3O6mwYVx9xYTbkxvc5Fs0YcSERpCJgyzwISdZ+/sdrlrT/+I4ZrbU52cr76Cz+L1Irp4S5H+zuMxyfcvv0BRTGhtbbClbOvkbdOmIz3OBt8nph1C6ZGV31oP+jqd9WsWC8RO4Z8zoX/gGHAPh+9wSW+/oylNJCbgN6gjTZTpEjob64hAzDakC8WLOYLQqmqQIT5bOZcw8YcHx+T5zkyiCm0YTKbs8gyAhVQliWTyRxdGlpxxLmtTVb6PdIkJA4CKkd4xzS1LjHGUBjH0AWgIy6cO8N0MWcxyMmyjDRNicOQMFBEYVCHcILVOxsL5FmWMRyNaKctOmstABZZRpZlgFU7RJFlukbbd8PgANcmuYkDVU+ExnM+c1L4qoiHTChfmoDrjpjay1Nj0MaqgZ489UJzMXR6U6EAUfkfN3dvqNkw7uUDSjOnICMwAQ1TJD6/LlVLCWrY8uoC2Ty78kjwWd48KNtTXEIcp5LwwE614NUZyewx43ivWXqqcMuoVz74BceyM3tGr/0c7P4Ow8WIYPgd0hVBHD6P1gojNbEImBXeYm8wosQFUVMqycFPv8j5eUF5f0hxdEiv1AStlDTIuHv0IZ8EK+wIKGVMJ47IS8P+cAQSummfPEnJdcDcFGy2FHkQkBrBRhrSayfslzmZANNK+Wh9QHTvPhGGVnvDBitNj9nTkvODbf5Xr7/G//Y7f8xbb39Mf3CWTrHLweiAo5N9Sjlh5exF2oNnQCq++40PufXWPV7cbJE+t+VUehq/JX0lQRiXZ++USGjX8Notjx+1eqGqC/UkrWPUH35eQwmBEJCX8MmtO9bVShqiKOTkeIEUgnTQQ4YBaZpa30qn71RBQOn0sXme2x10Q+3SO2ryLGdlYJOTh1FCmsSsDrqs9rt0WilJGFbGKAEYpWyDygCDITAQSE2hFZGU5GWJlFbEL+OYOI4JlHLvqiudK0BZGkpdUuQF09mMyXhCp9VCCK8aMZalF7kF5jwDBFEcE6iAUAUkQUAUhkRKVbtz+FI7fNf/PlrEqd3la6L8qLNFbUAxvj+F82hwnfWEAS6AD6m1bSGo86xWsEbtZ+t3qfQ+uPa8WCVMyzFdMaBKz2hq0bt5j8rqT3Nh9CKKRghFzVp9XUz9PLdo4xmyC112HNs9xbOwajP4hhxS0x/TON8b5rRjb0oozvRf4sP9b5PKEQejP+LS6m0OdZfRaMSZwZ+tckVo9zyvI0ZIiijg7tdeZvPffZvpzgHTbELZTog6q7wclmyWu9xWMZ+UEceyg1QpSRwxzTLm5YxsOCWKu6TtDrtFwFRAr9smKXIGSciZKGGsZ8x1RDEcc6s8QRQlg6xEqS0mWUEETMd7XFnZ4O9eucB/sz/h3/37P+DqVkZnXbBy6Sxhq4tQlpkWuWJ/X/PeH73DV//LX8QYiU30fsqoWhGoWrSspRD8ytlgQg8vP4IsY58m2NaldnyxP8NxQWk07XbK/n5Bls8IAsU8y8gWU/YPD8mKkjBSDtwkKggRQhKGIb1ej9F4wnA0RmtYzEta7ZjxZEzabtHrpKx0u6yvDuh2WrSikKDJZvBzq56MCgidq1YRKLKyJAgs+/SbPlpXrprV2ltY4NLGsMgzirJwrm/rRIGiKAxxGBAFirIsKbRmMptSlppet0caxaRhRCsObPy5ofIzbRbx4KGlFq9E5VMvKXQ1/V20ac3BTHUPUfmY+qxTFcQv7cj4ZBQjdGOTHJajJRscVDRA1n/2kNySKxzrIV3Ts/eo+hsLwrKeqF615rnm6RSmtbAOlWduJcE0dwY2S2qKJR4rfB3txPfX1FhtquTkTa2s1yGXjgVv9V7lndtvMgpKEq25M/qIiTb01M+gdSOdj/ALSW1EBEOeRuz8uZ9k67e+SXAvQ81zCCdkSYe1WBMXBSvjO9xTHT5RA4bBClEQEsYJC10wN3P2DuYM1tYoVI/ZBDY3zpKhaYuSQGmCw7ski5xcdDD6hOH0Hl2RUWrDraMZfdNGdFpcvHCRvxTc49dPRtwdtvjpL14hVPWGYiaXvPm77/DNf/+b/PW/+SW6Z89ilWaGOuih6fvs3rfpElhRlrpXEOUjx97j25jSD9Tq6KMB93QpDNzbHzIZj1DKGpyUkkgkcaQ4PDxiNrN5FpI4Qhtc6kZJHLfQzMjLkqIsieOYPLeKb6Ui4jikk8Rs9PvW3zeJaMURkZL1/lWwPAtZBlAlBEpCGEiiQFKEmiy3inxtNJR+vlh9l5TS5aK1RrA0TcEIoihCGMuWwzBESokUVrfYarU4ODwkTUKSKCBxgFuJvqda19axXrQ+tcVPY6R4UGdndc+nxJPGCu6lbmsI/EE9+uNX7O4Qp1rQGOotG5w0UA1mN7n8giQEgU4oxD4lJdIEjTFjVz//jIqlNvRB9fTVLqOZqdtZVPhV8dVa1eN1qLVx2p8jqolf73mmTQOyhX9N7bwQasgurYIAYwRB2KWTdCmCE2bZgqwwmEJx/sxz1aJc77XmXqoxX4yBIgm590tfYv1b79F55yP0eIYJW4SRZNCN6UjDYDRmkO9yx2TstzYYkxIFMYFIUDJjPj8mNyVqfZu9wxHdOGShc8TshI2VPicr6yTX3yYbZeSyB5MFSpX0oi6f3Nqh09vgfrnCYHCBP9e+yx+MFrz7xh1e+cnzCGEoF3B4/y733/n3fPUnz3H55WeQQVD5Jxs/TvAqBfueTRuHHUum8tO2w0PyKbPvRxiR9mnF18/1W65h72jIdDLi8HCP2WxKFMXIQBGImP5gwN7uHifDIZ1W6hLMCIyGvCgYTycUuiRttQiChNlsAQjW1zYYdFPOba6zvrpCO45Io5BEBY3B7Q0mDdGdUxjsvlKACOyGl1EQsCgLcl1SFJaxetC1obqCIAhsFrAgxBtpyqKsWLI3BEohaacpan2NOAyJgoBAWsD1Aqs+VZfl5mwc/EEEtDrVTZ4GgpqHnUadREcASIF59KL+Y1yWCYF9P+lYphX1l9+/MdH8AixtwvsZM9rG5k72Hgt2CNWM2Xuh2O3arfjq9X92oZRYVyPhdjGwi55PhlOzLQ+eGuHyK9i3abBf48G59mbwGceMMwxJz4gx+C3bG4I0a4NN7kxPGC00aSAITAslk0p/78+tF2TjFrH62WUYsPflV5gNWrTf+j7s7aEHPVpCEkYxq/GCVqbYFjPu736fvWiN/fWrnLQGhGmb6WKKETA/3ke1BsyFIM0mtKKARV5QiJJ06woDUo6LKVlp2JQGJSNWVi7wO29e42uf/ypvThYk7U2+crbkg/KE2x/dY3WzTxiUnItjfvav/Vk62+dcv/jUpX6JqxRpVKuhlyLcWlNL86Lhu/0fUL3wWYugFo2nk5zd/T1yPSdOErQuSZKQQMXs7w3pdLusrq3R7fVRUiFVQLnImC0WTKZTptMZWkOWl2isv+7WxiYXtrfodVNW+h3i2PrdRkFQxdoj/JD77KKyr3OoBIG02e/LwKCNtjkftKZ0GcCMDvDO5tYhXdot0p3BLstzDIZYhQggcVuph1KhGghgJ+9yedi6+si19rQo7RM9CwsK2qkQqgneWBSlZ1pI5zz/6QPsx7cY15B1Swo8a/cKBJy9QNdJaYQzRekxO9l9tIJpMKaVtZ0/rDNQGVMlBvNAKitjm9cRN1QYuMQ17jrvAyuhBsQQdGkQ2i+9pn4Pp/et4cGngLS8t1YIG5qeyTQA1+K8W37CkFYIRSFoRQnk7QbfrhegemgI625X6fidKkrA6KVnyda7rP/OH6HHhxCAHCToJKQVaCKlSOWUDTNkPHuPgyPBvk456W4y7W0yB4zOkQWIMIRAosM2i8mY0WzMMAnZFpvcXYw4mE/ZbA1AatbXC77+wUdcef513ji4zX6ouTxQjDoSEUIQRWi5T2frgm13U9aLic/LW40V/066mjO1jbX2JLHjRuKztD2s/NiAri9GwGiW0UojkjRmNp0QBAFBoFgsNCvra4RhSH/QRwpJnhcUZUFeZIxGQ46ODlnkC7KF3Za83+mwvbnJxuoandQq7ZMoIg5CkiiyARRAJTafRqqH4YmoP9QLvdXZhUISSgCJDtwW5tSM1xv9pBDkZclsoZHKgrQQirLMKbRGCo1SgV1UpKwfe0ptU4upP7g0875WopFj9qZiz95IJh4AXLuy1xuGV+5Pjx5fP8bFIVVDP7rk0VAZxAx1msMSoSVGGgQx8dwwVffJWpDJAbGOK9az1EPuo92Lq3TfNHMCuO+NrnZtMK4OWngvBMNoPuWDyTu83v9itQ2Pk81q9oWd+gq/U7F2MFtWTLtePPwC6jpZaNvv+YJesEZ/4xU+vPU9MLn1IadxbmXgoxYDmzlqfb2wvquLjU12f/nnWf3kY4rDGZ28QK1twOiYMtckrRSlDbGAaHFMe3GXyehjjj9MGK1eIvvcC8wmGhVIwnaf2fEh2XRGQE7c7/HBvfv0Ox2OwphxeUxsFBEZ/Y5kf+c6F1cGXD/aZVYGXMpD0tBg5mMWK30qt7DK5mGNlM33rX1CGseqpqh9qv2k+TQnoscWHGHrbqpcnT/4CntWqWG8yCnLHCEF48mYTqdD4TKHtdotJpMJRVEym00pF4KTyQnD8Zij4UmVy1YqydkzZ7h8/jxrK6vOlUsQBSFRENKKbJ6C6ukNsJWV2ICz6jbKkvKzoYIQ9ZFKWBV2/7GqBVStNRaAFJKsKClcGLPVsym0MeS6RAjre9xUqZ5uywpIH6FYfSTbpZ4oBihNrZvVRtj3FjhxySzdSNBM8l0nZHnSis/a5fMaVG3b7N9T+lMjtOtTRT+5RKIHHBT3OFZ7bHN+qf9NpRP3R+11ppqUwgErLj+CPSZPTWzvtNQKUm5NdojDN3m59Xpj3HoFAmhRIis9pMsJTIGNI9OVHruGEA/JurqfUBEb3c+jxQKCDxBSE2ivrzTLE0b4xOrOX8Kd0zC34Vlv1u2x88qr7Beajffe59xUEwcR8zxjXpbIqISkjVg7Q6QixOo6SWeDdaORg3XmWcj7b+8yH0+Q+ZQgCNFCkeUFQTumnxraE82RCimCiKK1Dse7JEJyfABb/VX2J0dcO5ZcDkLaHCPSlo3/EF4K8cBZey5Ug8PUenJRtbpXKVQhLz8Q8B4L6Pr9tWypGdWj6uLFMIlhmhnu7+7brdILQyCU7VclGHQ3yOZzWmnXXmcyZtMZo+GQRVHQSlq0khZJEpEmKSsrK/RaLZIkRkmFknaXhiSJCMMH6VkFJq7aQljtXgVGS4D7qNL013gAIusV0EAgJUoqhKi3zhNyOStSICU+HcsPU8ypxzfZbT22PNI4m7q7qBIajWfB1VyqxFotaoatfmjf4B+DIgzLmxj6yK86p5fFDBeGXTcVPuWoJCBVq6xpxaG8x0ItiMoIv8G7ZTz1hKzHTx2w4D0J/Cef/wq89GHcVjm2li8PXuStg++wlZxnVa5VvNqgLRiYGg7sPZaXxOZTvXubPVdWT/SQoohoqx7zbESiYgfWdapIf55ftKie7DXVTknl9dQGSiEwYcjO515A3log7x5wYkbMB13iV65QIiiVokg6lEGAQKGMzXFw96MDfvPf/PfsT+Ern3+ZV197EVGUzMZD2q0YdXaDZHKLNA/ZOSkpckWwcoZsOmYgjzgY5Wy14WRR8O6dfc4NWvTf3SG6NKDox9RudfrUG7oEOU6aaG5Iatfl2s+Zavl7dPkRqhf81H30BC0x7B6Mmc1mzGYT9vZ26Q76jMcnmNIwGU/QZUm/2+PC2bN00pRcQjCZkqoAKRVxnNBppbRaLZI4Jols1JdSEoEgjkOSMLTDYkncXi51mr6a3Eq/CJ56q6XrTr9frRN44CoBNpJOu3TaphbHBFa9ECjVYBafzmofXupVevk60XDnt9ND412OnFLRuOtE/Z7aMTOfjuUUCX5yituu2xq5apCrzZT1nmGw3INN3gqCVA5o58cMwx1WuVBBrgcpOwRquKu2Rq9AvXTg3wAsoytpQ/pEOsCF9CLX0o/41s4f86UzX2ZVDhBl7YDmQdP+p+u+q4xqVB1mn2nBVhq7gY+HbO9u0UvOsT96m6DTx4a/1r1tXd9kVetK3K6WEfdWDZWJW7IpA8lur0d70eODbEhurnGht0Zt9JPVuM8Xmg+/+zE7Nz/hmQspg9GY+fQW2xdf4d6ORImIla0BZhAxX7uKwLA2z7lzaxfmISYdUOYT2nFOVgb0ggKF4cbxjM1Wh607GelCk2/E9XZ/3vuk0bLWt9IDaz0q/Lvafm7KpQ8vP9J8ugYerutwSFYYzTw3gEYbq6c1ooUBxsMhQgVIpXj++ed5+cUXiclZhCGagFIXBEFEmraIw8DmJlCKUFr9qUQQRyFpbLeyeVip8PEh7dVQEoDxanZ3+ulrHrj/8pfNpUe4NvEg7xXxNqw5qM73uGs+pTM/O/hZ1lUaKqAvjXcv8pjb9CStNVkG4zartC9uAK2bb/RkFLtPX22OqkV9g1TWYOWWWnC7JtRjQ1aAJoxACE1fnmevfJ88mBEVSXWq8EZKBMJopDGUjtE2s4vVvyv6ZAEXq/oB6+ESy5Bz6Rk+Lm7xb6/9c3754p9jPbzopBDP0erINWvw8f3nXdOsDcGfWzNXx62NAGk1ySvtS9w7eJs47ldG4EYrOhuFb6fGW7gh4QG81oeKSmIQccSsyJkXx6y9uFmPMue6J4RidDDmwze+x2g4ZjgakXRafOWVC7zwhS+BDIjTfTorEbIfU4gcYRQG+N7wQ945fptndZeN+Av0eyuYIiMwEp0XdKOMSGlGxrCbhawfaeTomPjiOnlUNNYWX3+shxTLOveHye+eDT+qPBbQlc1+aawGpavI6aTXftAVpWIyW1DqDK1LwjCiLDS6LMkWM1qDFV753Ku8/uqrpJGkLDRBlNDtBTZqK4yQUtp9yYzdnA5pXbHiMCKNApsgp1qp3PPN6Tp/WrHX18akJcmiFiF/AAZ5oBZCoEvjNAq1VVu5XYX97R51S//dQ59xuuZO/+oIbGV99e9T6xmX1x7pKlu6742bF7UW5MkCXKDquKW2FSCMxMiSRTInnsbgPEuqxncBDxY4LBjaCLKAXnmGo/IOm+KqBelKS2uZaxPGPch7Mm2EcbpYVy9nsNSOoToPMoSBy+kV7mbXGc4qTaoDVutlgfcbrYSVhsENHD9tgrRpfAM+WsZgaMUbBCIgibssR2j5AUx1ByFs4FDtlOwbuill1dfooM39/B6L7ojO1nO2/iiQUOaSwzvHfPid72NMjhAlnXbBKz/1OhsXLlp3S6PZ2Fpx47GskMZg+NatN7h9cMQ02uWCKXhGfpVO0ELoguECUhWSyoJuPmEsOtyaSM6mHbL3T0gvdGDgJU7ntrck9brPPgq0YvinQsUfUR4P062sgI1KNDqnOqeyegIGsqwgSgI6nTZZltlMW2XBaDhCCsmrL7/Mz37lp+mmKabMEFIRKEEQRnYbG5cGUbosUUoYwtAm+I6CsAH2D1mdGmPEToS61R5YtXySEGMeaORHEOUHn0eTPbtPnt24JDnWy+Ez3vBRK+updzZmWcsn3UTxeSJK4wXdUw/1IH1qGTD1eHyyStNy2mQ1QkAu0HLCtJORjvrVZLa+1bIhZoKnbQZDLPvI4h5ZNCMuWw6MTQWWXioSDpCNEFZKEPWzq5YXNqdFDdX1mOwGHaQpGHTOkGHdDEu03/uCZcNnw0BWJfLxe6f5d7J38AuwVbnY65QIuLRyiXaQklf3rCeLcIZBD9I2MsgZ3cyycck7PACU+YBP3rxG2JnyzEsX8DIeCCaHC9795ofc//gD2n2FiFK2zm9x5aUvEbT7VsVlCuodnDUQOJC0Y/nLF77IpPw6M53x3ug6ImjzQvvnYC4JA8HdwxPSpM1KBMFiyiJMuV0knI8Djj+ekKwq0ostSrFojJnGWG+qpIRzE6sa/dMn7WN19nmAhXmGZB5eJaUErVZEEAZsbW0SRiGj8Qmj0Yif/Mkv8/Nf+Sr9dogUhVUZSFm5kymlXICB3YI8UpJWEtNNUpIgsBnyH2XhF5ahavGnI20/NPYIL+ay5ErmAfd0VT5NtfCZ6me8KuPRdTaNdqi4iQfl5hWNtfOH0zH/eBRjGvFbwkWOCW8AMqSLVUw+Z9I9rnxXgXqyLemipL2fDFgNXmRc3nWeEe4czzgdqAj3HJ+G1E7ZRlCvmyACy34tA/a/DVob+uEmW4OLjIoxu8WQ37/1azVr9RIJNTAY7ILquar1A16GX29Us/t/1X17rvMF1meZ/d7lg7Z1r7XX9fiQ1SRyHNc3HH5h0fmAm394g5XtkrMvbiCjFJDoMuXOxyP++Nd+n+//0RugDHcPx2xfWuGZ1z+HSrtoXaK1lQyNMdbrx7iFw9jdY7TRvLj+PH/jpb9At9XnxfMX+bnXv8zWMyGH8YK5CBj0u4znY3YWESpMaImSkJy7OgGhuHVzzL3vHmMWbrv3pfzHkmp7p+qzP+7UVp/iR/lYQFebumMr4G04Z/vvq4xVxlYkDKz+rN/v0uv3abUTjCk5c2aLr3z5y6RRZEmGtqKFClSlu0VCqCRpGNJJInrtFq04QXl6uyztLP2cJj268eMFSB/G+zCAMTQFr4c85yHFVyeQAiWsKsEnOBdY8PVRTKL6+fQV4fSrPerJD7tLqQ3NbAS+70pqQ5pwbaWgCoUO1KNJ9o9zET6s0/N+v7mjqc2L7fk6UucsekPXYRoV+Qy1pgJjb/gsi5LDk0Ni02KhRh5vTz3Xiq1+3Al/UiNhjr1nxW/dGKihURjD1eQ5ZvmUUkk+2r9DJ+g1RF+nkmisjB5S/X5ptoO1e2+3YXmlevNQancCXsiYaCRol/7+7n7GJvf0Ok8tmjKUb1dvkHYj2fS4/5191i4H9C8MEFKgS8VimHB0/YR7H3zMtVuHMMr46MNDvvJzr3H1tVctGTCFW5DKagGwgR8lNkGNY+nG1mm7fYb/6Lk/z9ee+3kCGZNxzFH6q+StAtNus7G+gihnHC2gRCKKAiFK9kRIEodc2xlz7Y19inEPYayfsqiMzKcXE6hdA+HB7Znq8nhcxtwKbYTxMqmrXnPlrbqhqrgU0G2nxKFg2u9x5swWmJJ+u0MShRitKR1T8PlzBaCkIFKKJIwIZK1/q+7dEAsehg/m1PGmb/GDk+ZBZmdO3fhR19bf19onJXB6Ztd2xum1GoSybqPPWJpMdsnaLKrx0TjB/vIiISyJxNJ3lBSValM7OdF7MgThE4m6DvCcIciJ+AKBXiwQcYBBkU5XmQ4OyVfnBCctFvEJhDHRLPLModIbzGVClsQkChbmkJg2VNZ9avWBZSEVaFd7mDn/Wns7U6lGhfdqQVcpH1eDFQZ5l810CxMfcanzqmc3eD9T03hWLegbzJJxzRZTxXLLqg7+3yIIuLXa4vlM8G5inEdGbTirs5U1zZIPzgJdJuy9eUzQH9I527PBQPOURHSYjncwMuDbb1/j5PCI1bWU/+Rv/Qyr5886wqOpQ+DdhDZ2i3ghXBi/yynRnPD9eBUhSrQpOBnvMrsz5ePFv+HZzV9Cxqu045TZcMiNwwnbqx0SBKbdZbQY0+62ef/+MVrusXV5g/7GCCMLC+017XVuerVZ1uLvjzgiTTv94KfBRe083aiMFLQjRWACttZWCZVkY3WFQbdLGkeAqaKzVGDTLkopSYOANFQ0/QV/GIz605bGuvInuFYQKMWiyDFaV9FddsKYyqPBM+A/aTGP+Aw0JowtPjm5wJGfB1QdTfHRf3rCgNepTJpZ1TzwyjBgXh5wJI5pxWuEWcq0NaS1ojFlyVycsAhAlQEUYEqDNBELc0wQSEazBWkS0+2UjEe1SsGllEE0dMIWO4TbLNLWw3od2LoIt6I3DWk+2PS55ApTccJakBEHHSrLphNTvPa2AkaopChtakbruXtzM3LtnucNvoWSXC/GnDEt7njPDf8CeN2uqOrt8d8XrVP2v7egEDtsPbNBGq9TTCLMfMTR+B6zWYYoDygK+OrrHZ778k/SP3fG5ZI2VsQ3Gl0FMQv3v3K2Gqfi8BILEk3JIh9x//AG9/ZvUCwKonSV9U7J+bNTFuYMRwcRYRDST9vc3t9jTUVkxdjmeYgjVjZX2VkIzN6C1bUOWh4vgWtF4KuX9eqjRyPCY4tIq0TSylDgmBaV40rFGn0oqjAQSNChopOkhKuKtZUVQqWQwg0EA0GgKjEtVIo4stS/qY99GKjz0CPu+Ckm/MB5ohrL1d+nB9YPC7zeTUxKp4T3SW/wCgX7IN+ej3rAA3poc6rLK9Zsmn/WL+KJg2NEpTaUfpZjma80csljAyxDryfrk1Zc4If2IiP1IJCKhE1WdMQ422NUzNBzyUTdJ4p6iCBAuIQupcgwam7Hm14Qa0GZTInLgPb165QrX2Uqm5PUYFxyG0yt2rJKAS+223nhg49tCp6arfocCQkRs/GCNhp0iRB2HPm8DXau+RBXgRSm6mPfi5p6YcfXi+ohtp0cm59EMZvHC5J2wDyy7+Hj5Rq1s3c3loFKJFo9w2K/4ODw93jta1eJg1Vi2szVmDv3dphMDugOBPH6CqmEi89s0z9zDq39zrvOZ9e490PgfcklgsPxfYo5rK72CIMOmoLRfJeP777Hwc4tWnFC2OnT7nbIygNePffzBNEKLTGkFaaMFy0m04gglkR6wuUrHYxYMJvMKHWCISZUc3K1oOo04/W8finzo8rX8UfMdCsMqKz7rmqNYIQq8YsP63CTQErchoyCKFAEwu4FVhY5Stn0h2WpUUjiMKQVBQ0XnPoZp2ryqUTsYVuSV5c1UFj7ufmwl/1TED0pQLhsXUVhiCOrKPWqMGvc8rbdR9DqRsUMTgWAd9muNX2+6IZOykOxT08nhXT+pO7GhqXrlwz3NIfck1bquC04JbEISNUKCQMKM2G62OeovM5I3iaN2sTZglmckhUZulhgAFVqUtOhJbcJ1DoHScT524fcvrBK0bR2u4fo6phXIfjvHNvEZgPzqgBpas7qBcmV6AxMM4SsRerSGGaHU1przoPCuZD5a7TRVRL8pqrPqySoQNgzEf9bcLMreOHI8O56Ha1YSzoGlQtMOUMnLUTwEq32NjduT7h//2Ne/uLryDxiPl+wP7zJOLvBqNhn++o2gWxx953rvPjSgP5gC1Mx1iZRq5UkHuxOshN+45u/TjGf02612Ny8yGhyQrE4pNdps3Z+hU6SEhjBdvdZDvItijJDGZvuNY4mRPGE1Z7EbBmkShAix9Cm3fN1KPDZ2SzZ87ggq/rYJUhgU4N+Oqw+vuAIJxY9cNg0gKx5qvskrMCAlAIT2O1TvO+hwCeFEgTC0HJb4tTuJn+K6j7gImaq+tonPuT+/wNgjcAao6QQVl/tt+dRtmsehq/LzPa0KGct1AVWR+5FILN0xfLVTZZf7SrrRI+KHZta54tYBvAnsVi/2EZbuPm9LF8IhBSEpkM/7NINL3CQv8tU7GKyMTOdIVH0wy3irEccrdAONvB5vPII7gQ5G3eH7J3tUkpT8UJwbW38bhE10Pp+qPSjDi3nkxFRJ0aYECWcGk8oIqlYOMIhgbRUHI+OaK2lCOFS3RgoZwVSaWQUVukcrWSuXTYzr5ekKUBTu0NpCqXYjUs2piU7bZsVrZwZVBTYUP1DxSJVTORzZHmXe7fvcff2DQ7vXeO1s69S5CP2Du4zntznuHuDMjKUuye0OgFnt7fZeuUqwbzAiIDZZJe0vQLY3WHrzGmaTOfcOrjFH3znmxTzKSIAM52S3fmQza0Oa9trdNqGRCg2k0u04jMIEXImWgdT4HXxRng/a+PUFHY+2F0x/Oio55gR2g2SSnyvc+k6H5TmQv6w8njUC6butqZj+dIAh0oX4g0a7pAVW5VCSInUoIvc+eb54WyIw7D2TKAG80e5L/2gvcOa34vGv9XHBov8zOURjPR0FWWlVijshGuAvh0YzVs6EGwea+h7K+MMXmVQn3OqGtbGbaxhxbIWUd3XM56mrtc3icayqWVVzJPFdgWW7UtRJ5TxKq5KTPaN5CFIhKzGn2PCmHmSckFeRRrJPJzRkimx2gZdc0SBoEwidtcMmzeGjDYCZlFEqWxIQ6vsYEzBRE4aKhrnjmXqvva2CqFnzMsdtnpXYJqBkcxu3OSb795Edu/yhS99GXUy5dgErJ1/hlCF4H1atWZxtI/qSwhsStFSlGhj4+Ok63tLeCS6hPFYIJMCsBsCBJFEiJL9nuK5G2OO4w6LQEJomJ9MufnxCavFgM3PbbHW7pCNbvPJO7/LIk8oipJ33/02YSLorIZsnz9DMJxzv/0JM1HS75yBsM3xvROU2eX+0ZhPbtxBhS1QAd02XFl5ntW1MyAk/+4bv8bt/V3iWHDhQpd+L0HEXeb5HmmroCMlZ5ILdONnkDKs5gNIpIzcOK+pg6n0xo1R7ZC2ERzt0lc2sEa4seIHj1d2/6gNaX74+IyUfjtpj8GVuNqI6PBgYzMlgZHK6qa0ptQlCFWl3pMCwjC0jeFmjHH3abIVX2yDfza4fBh0NJng0l1+wC1P38s0bqYb15duogceAVx9jTCUlIBs6Oke/uxKi+t+KazvaTNL2jIAN/Y5o6E3rhKSi8qXVQsP0A3xFgfSDYPpE1WMn0reEk/FfIXwQQP16T6RjBACsRCcDV4lFCkCQavUHOV7vHPyh7x+9jXSWRtRWA+U+WjOeHzCzr1jLuwIuLxKZAStRZcX7i0oyoL9swnXL2ZInwBJKiIVIGRIqCLQmpIA3b3EvLxDNkzodNYZDifcPrzHaGh49xsfciHtcHx4wMlMc/7Zl+mt9kjbHWRZYnRJZ2UbrQv0eEEYdpmNx5SLGUk6sBuglrntax2ACGH/mBvHc+7d+oSJDNjeaPHScz1MT3J7q8XZayfce2aNeQDBSsKFc5pg1uInrj7HeDxDr7T40k9/kY/2NN+/dh+hPuDcSy9gTACiZKN7lTVzhVwX3Avu0SkizPouxYmmbRIuXOyxWEC+KEkixTi7xezgBkGQsL4ZsXbuHFpltNKUWX5CKId0gxCAy2uvkagNjFF4N0AN1jaEchKGC/H2hMsYlgJf8MAKdeRhQxKkguJapAS0KRGfsg3vY3IZkzXIUv8WpyxRDfbuRCp7HGNckg9BaTwsgJISvw+ZfQ4PzHdh6hxPP/zOuA1gevDWPxzLdeefvqZakJa+EEgl7Q4TFauyQFC6DH0Nu9ZDyunlQCwdau5q6zeY1LisV03FiWncyhgL2jifZfcy9quGI/8Tqmfwslgl1zim7wUzH6Hn837ZawRCS7aDFwlJa2FEhdy5f4/f/eg36eqQ58NtomALspxwMuXO967xvQ9vcX7jDBuR4PzWBpOD64Sbl1g5e4a42+PMah8pbEL+2XTKYj4nK0r2D49YZDPybEF32/DBH75P3F5je/2Y9z+6aQfGaotgQ/Lrb7zD+c0EE0R8eP07iFuavDSIICYKIgKpkCokX9h37iQJSoI2u7QUHB4ckrQSjqcLJhmgAj748C56NmE+nnHltcuIu5rv/cYHXH3l8whOaH/YYqgLttcjnt04S7x2kTu3b5KVhmvvv4P4eIdy0GdyWPLcz71CoXxgg7BkCrglrnGiF6SF4nCxw1ZynpXuBt1Jn3xyTDpYYY9jlA4xYoERmnayQV4MEZSEekGntwY6w4iIxWxEELSxhrcSYaS1x3ibhYVfmwazgkDn52vquWOo3fqoZD4/GeuZ493y7G7SD5v1y+WxgG7pLMKVH7E7LvE2UpYjrhrqB9su1phj3GeE1a1Vr++CBh6gnQ1DnRcnxNJxL/Q1rmlWwB9pWP+bSa0fUDkswfLDnUSaxyoDSCWyNNQq7p20MTb4o9RWgV0a6qi1h9/XHnBDYknmb15gKnVDPSxqr5Fm+kL/vTb1ZiXGUWFvWa9Z7hNavHSEV2+JalIuh6c3+9VgipJQdRtNa9Blyctbr6GEZLp7m/uvnCWSh7TLFlF/jWfPrrL+hRcoJyUr7VXykwn7wynH4Q7rapv8aI+ja2+TmznkY4zO0GiMLohbMZqS/dkuF9QWZy+vIFXJdHKbF35izSa6V9t89c98vvJEqBNL2sQ2Bh9FphGl5s73dvj3v/ldtF6Q5wvOnGnzyleeo301pdNt0zYdhLJS0vYzCSe7exzuHHPz+JB5ueBgcsJgcsLNoyHD4wOunF1nfFASvZ7yh7/zWwznU/TOPrN7x/zsXc1bzx6g46vot24iX71AKcrKkH4cHDEpb4Nc59rJtxnIDVQYIBAMOucw7XMAKB0xyY+ZiZJ5MUcXJ7RUi4HapC/OEqV9NIaJPmFXXGOuj2nRsWxW1Dkqql4XZmnW+3SN9k9nYPRqUqHd9Y1ADwe6RtReG9bXWVUL9qPKYzKkuU43orL4e/9CCTQ3BPQg5MHRszmljVMzSIS0Gzj6BkAE1D6BsNSe1H6HSw8RHuBMrWY2zfDFRhHNWz6i8U4fbiwcj2K3/iSz9J1w64UiSBLyLCfXBk1JBAghyUttrdPUi4Be6lTReHfTyDFhv/NhpcapHJZc6xyw+rFU+ls5v1LtWEk11oyP1jPVNkO1HvRJKroCW7uVji+ikjasYalxhTZ2TzjtnePrb6WUPLf2Mt+490e8d3+MCk+42nmJZ1t/lSgc0EoVs9mMw51b7B3fJb15h+wbb3D4j/81K9M5rV/+HMHPfRklByBdULCwEuMnBzcZFjm99iqoEG00rV6v8fSiohMuULgx5y0ESwkChQkEBRHrAQxeOsfnv/oSUbeDkT5ZjXaSjdUyt1dh65lNjNFMjkbcu7ZHKzbsz4/ZHWdcv3mLlWTKQa65M9xn9XKf8y+dQ5wckZozfDJ8h4Puq0TxgDtvf4/La32Kc20MBVES8/H0m7SSAfN8gZ4eIlqbJEkH8OMQMIJEbpCEW2gzYRwcslMWbKgeq+3P4xSYSKCvOggBx5MbtLtnQQuMCJDO89xLM0uOz1C5TC4BqxsPtV9ulV7I1a9phLYs2JMWIU6jQF0ej3rBq0lEwwdSeGi14oW2SwWeefrIZhdJ54MVq8FtjQsWPGbzBUVhSOIY9/Y0Hfjd4xq9RuMbe25z599Gqlr3tVj+7lOLWf4llkG1OqtxUOAGuF+chO3OVhQzQ5JnGcLgwFZjtDVweOnBTh9nqDRezeLVB6JWy/oloDJsOgbr2Z3PROUNcZ7BVnX2zLZWI9Sh2wJjZPX9k1YqZ7oGqxWNrG6+LPs2N81d7l/jMhAYwf3Dt5lQslEEPNv/82x1fpbZFO7e22Ex2cGUJ7TaAc+8dJb0W98h+/7HhEqwuLrK+JkLrIcJOGZqb69BKD4Yf8D56Dz4nWor5XstDtcumH4BdVpMZxvwK+3KUNFLI87/3Z9B9AfOBdZKV9olvTF4QHHjy3X+xsYq7UEXjGI+FCTfu8FifsS9Mudzn79A9/wG7Y0W0eyQ45U7jPM2ShbMP/yIxeCYyWSHfHcDc66NMZLFPCORXeKgzcH8iJW4hymmzt+YemA1WKUULXoiJU9y2qqD9tF1RmDcrhm94CylnKHlCGUG1Yj2rNff1Af+uGlkB0G1/To1iBiwagU3VExzjHhdhEUx4/a5+7TY+McDuo1seFpWGoJK9q0nqXD/O32JGyzSWdNtjlfX+S7Zq9aGyWyK1jN6vR6RB14HJB5wK9j0DdQA1ib9936sTXboDZLC+BQW9bUs/e3uUbF1UTHBB4tZ+uR1q+DngyAQAiUDFhRkRU6oBOQapRRa2S3YDRJVqW6WI1/MAx3tl+G63UtT77nl1Rv1fepG8vXzuyw3J4HRXr1gjxdPIOhCrWiymrhaOhLG+2Q3IXd5NfUbVQoBUkakrT4XWq9x9fJfIJ8EjI/3uXbjDZAz0k7CynqXMNpCO4Pc/L/8q8ivvUY2H6NffIb1+P/H3n912ZJkd57Yz4SLo0PduDrzptZZKIEqiG4ALYe9ZrpJ9gzFcJGfgB+ED3zgWnzqN8p54OJaI9aQw+menkbPNNAoAIVCVXVVZWalulqFjiNcmhkfzMz9xM2bNxOozixgIXZVZNw4x4+7H3Ozv+393yrHp9OudS0QfiEfrB7ykr7c7eguAK7/v384+SCnLCtiPKlbe/bSWSSSjccLZseWm69MUCrxYVHWg7UJAN5/Ps6B6IISFKYlGTzi+ObrnH7yIdPRRS5cuc/1dxSTawIlTjg4fo9cXqN97TIP3V2mx5rh3gmcHDO4kPGj2/d545vXw1pxDMQOrZ0z1UOkPfa0aL8lEkEtPgdf+EeyxRVEVSAGkr5wUOTfFRvDl0mbQ4wIQLi+NsRagkX8u1sD6/lm/SYb/VG9lRiOcXEe9efrUejp8tVEL3jWOoJ/AFyHsH4iGaIW2YckdYE2HbHoH4pFYEM/NKUTxqMBw3yIc462bUFKZJJAyJgS9EVDngTTThvt3u8faNzVu7ELefIyatEuGGpru5hfpCI88B601kHerQFefx+OWGvIAc7akHbpuevWCeq6JZGWNNGo1pEoSBKNdBYDnssTEvGMh9uFugSNP16j02Y7T+QT4+EEznqzO2banwnh8GElvvas+4vUIv6rIzFEygNsKKEZraqoCIhQQDymA65lTwipSFTOxvgCKtugMQmP72r2H/4CYeaMx5Ldq5vIZMd/wLlQ7N36IuYK7Osv0j+IEJ+NXxfxsToERla4ZBztRJyVWFqUjC12HA9WB7x/8gve3nyToUwRNtJKjpyE6d4BlbYcXJ7SBoqkT43oocZblVGLdnTdb4XDGDCnmnLvEVvbWxwdf8wbf1cxHQ4xew03f/op17/3JoneBTmh3PoZw//0XZ470litGL9yiR0ZbDThtfA0SUjTTVaLOfcPT3nh4qhzIvvLxkW5voYcKslBj8K5OjjuARVo5BbSiuD7EmtarqLvm9KPIaxvVmchuFPewn31jLBYfxGQ4bt9zaDbxYCtmaq9Vr/25YJZFNNhxRmAik4BWNQ1+0eP2drYoFgJBnlGWRTkA1+UWBiBVBqJCE0lRbdGurW0hr4dzETlGq9BRx7PrGk1nRYr6AA4KBf0tR5E/IuOH4lc5xra9vAmejMdfCcG5x2QtbUcL0rmJyfsPbjH5sYmg0yzuTFjMppA2yCNZTSdUrqafJDHOfkZ+O1MxcDbGmJpPxcKk/d35tY+5EJcuA2abhiB8D5d2N56Kdi/bhIZur4Cn+iBGEHVgBYOG9qda6lRKkMnOVhFWTTYCu4eHXFw+BHF8iG1fMzLN14lTS/5jc1ZD2BdDrejT4ZY09+iKRGyxvpnaTmsTlmaU+rMN9OpW8etu++xuTNia3wlbA6Cw3LJv3jv3/D99EdcmFxlezhlO58wTYdcMXC8aXjtwhvc3D8Cp+mDBbvSOOH+IvDCejWGSMHZbIhyln/9Zx+TpR/xze/scLT3gOmjLa6+/i5abWEdJHpMpqYcqV8wunSNzc03KMtjRhkYd6mbT4Mk4dS2nD4sOJk79MWeHIvPqXOc0ysZoTRRUGp8kSgPH2sRBAKcCoqE7ONQBD3I+vEXEDYBgfDV0hxdmKAfqB5co9IS+odAuEe6MZOdM/Np8tV0jpC9g6r7LX3kWlykQfkikoKyVzWQAozxg6QkLCvLJ7ceUbagZUtdFNy9f5cXX3gRrRzPX7vExmziK1sKOq3Fx/32Wm+HTtDVV45j5U2QOMT9I3HhHmNM8Pp992HTvWnRc5wx5Kp7gY53xQNg1LiN9VRKYyzWWKajIW2xQDqDFpb50TG2seztLymWS0ajnF0Dk2HKMA9tv5+ysUZ+TwjRa6M2OM7CTk4A1hiVEMV2HC7EqAVwPorkjEf/rOX210WU9DUuorJva0ueZBwfVcyXNY8fnpDYislsRj4cUpuWYnnI6WLJ/OiQw5sfonTF27/1TaZbEyr9kGGuSdJRiMGO2pFfmnSbbKBtRGyHHiiDDnDDyIbn9tOHf0aqM1a2ASwf3/kB5fA2hufZ4DLzpuTj+UP+2w9+HyGhcSUny9scloo7iaatW1In+J3rb6IuPKSRNRkbHWjFeHFBLLm6Rnt1Wj4gBZU+ZrHc586dIZ/evM23/uGSxaJkQz+HuPEaic7DNxJYJ9jZeZuH1R8h9H2Oyj0SdYIrBIP8uwiuY4FhOqJsGh7ffkgqJYkchIuvLVDCuIgIdYGQFBGKZV9drFfziEH8Ym1dI/xm2HGI3bayhhMuWsC+RVLv1+nfi8jSURrOAd7B/6SP6En5amovRM4xamAChPSTTIZ7j+9L5GeGCnxtXOccjXNMhiOuP/8yQsJouMFousFoc5PpeIR2LdPxiDSA7ZnMskBTsDY8ncS+d+HPblxFvxd0r0dAPRPv3F/nM3uaW9McWUsMdP0V12vXRoAj8f2dLI5Lk+u89cLVMH79sX2wmejObOJG8qRHS9AVxwaHcBYdPtWGY2PkQuRv43n6hUfgdMPENEGDsGc/89dNfvqTQ0rjWB0esb27wWi8ReIKbn/6KQ8f3GLv5IhRrtnZTBmOMgbDIQmOh5884MPbj7m0O+Ef/kd/l+n2Rf6Hf/WnHO/8D3z7hX9E13bdResgJv6GgaRnaLrNGbqH4ESfA+ic4Pr0Gu8/+nOq4gBmL5Nun9Aowbw64L+7+c/55OZNjk2NHTtGeUKeJlSLkqp16HbA65PrfPPKu6STKctmycBmPK5uszt4bi1Bx3UWUKd0dF0fLEJB4W6zsgdkq7fZ2h3y679+wPYLB0zs62Sj1/BQ4tbmvmE4uMzV9GVqfdvXjJZXadqGpfgIaU8YilepZMOkyEjFilxJtBhE3THcXAhU73RcP/ujI15iQayXgF//XG+debpAeO6242dDwZrOmebWnoPjjObYcfjheUbLfI0KInxvf53Ph9avNA043q9/kY5j6AGjd1T1oNsbEA4QynFlZ8DV7RvI6PwSAhuASArXa9Dr16M7IeujEgn8s6+eHd+zx/diPwdfxBN/RWD87IFPP4EV/fd3rANZTJRYdxKuA3g3Sp9zX2v3EjcTu/bveEvB+9vfpn/XV+jvHWbrVkIHxk+++ddE/vt//q8REna3N/j4ww/Z3M545cY2L7y8xVvffJcky0izHKTqN1BjkKf/lvuf3OH3fuvv8jsHW4iDQ9xzl7g3+duM1RDhbBgvB052PdViKxnfUn3dUee68fVWYLSKPLTcmL3MxeMd6voAKxx6nGFPTximY6Yjx/a2Ybud8YE7ZNUahDH8jtlgtnODq9feJlUDnBC0oqG1lkTkFKuK+/Yml0bPA/39dk60OLecQ2eOMnmEq+awGDP/oGWxeMzGlZrNwesMs9dxLpj0rp/HXiSZ+gZDNWEwfEBrG1q9waKscHZOVf8Mudgko2RzmFAYSF22VvTn7JxyiFBPJCpqERhDC6V15OjScD0oh57HAWhFB8AIE7dE4rbjOkeb62PYu3uQ0fyDDsHimz1QuydGYl2+8m7ArhvAIOKJv8OLvXUQEigEYWBUSKqQsdIcQoCK5u6T4P45ItaR/dlHPv3Ap1jwTz3d+lf9vJty64FZ8WP97tTXPwAnJGcq73/m3OLMhvb5cnacuoQLnLdCBL5B5pq2203igM7O9WDsKYc1QP5rJr/zt6+wMd2gbS3D6YgLF7cZDMdBr3K0XfSGW+Nc4bW//xvcePUyv/1oi63b/w43Mvwd84gf/+3f4yjPOmrBK08u9HgMyQnrS9HF+gpx/Nbpqu5FJPD27Dt8fPpz2qSkXu2TjnYRCIZuyBuX32CqL/Bt4I/v/JjfHl3hsr6Au+LrQjhnPX9vNcb4XKxrmy/y/33v/8d3Xxmwk+54rHBRS+0rjSVDQZMeYNuWeT0nr29w9ZXn+dEPf0KbLhkkb9B3Yu19/dFSjApB07yCLDYYbjxAySW1rcCW1HVJ+uCYZpgwL2qyNCWVI6LfxK+53pzv5Yk4aREj8+OMDg61zqnmOpqwowqFT2RwYRPsd4z4feIm0tmTTyyzcFyHZ5FucE847T4rX1mPtHUTtRuPuCshsS58mafgUhekEswHIawfRCwSX5bO0xN/Ue0qAo/43J943BmKRKx9BXrCQIY9V65p7PG+vuje3NqP30jCJBE9Ly2Ff5Qq/PtzzxgefPz8+vX77xa12nD1MF96hixy8f05pASlRajs5u/WV9zrOWBrQ6ryXzP55ve+x4tvvsar33iTay88TzocBf49aH7WdT+safRCKoZXrnPr1vvYRCAu3UDu73H1D/8L/AYYtUXwMGQ6MIrBYHGhW1w/nwIgiADG1tnOEfvc6AVOiyMeFT9HpDO0GiOkZJANabXjyBzy/OR1/nfP/wdcyS8iLr2MsBJrfc1ei8FWK1bzFcL59lBv7r7Ff/2j/44H5V7AuF6zA0jyjDYrmFcL5os90gouJy8ymUxJ8oLdjW0E2Vp6eNyI48Yt8GEFEoGhrrZYHb2MZEKWDBlpGKqrDGa/hnPvcnTs0DYlySb+cyJ8HojEZD+H1ygzRG8dhDnqDwlAGDa7LrZ5HQxDWx/bRTJAn4OJfzIOXIi3ElHBob+v/rrhmk7jvgBWxVeRyvnzU19mRQjvHRYBVLQItLPrgUyvgUkfxeBrLwjb0wmePok73GcB94vgdz2G90l52hj0lb4+b3yedrbAfK7Pg7+gPLUV0FOuuU4v2DPHPf1bdv7psBmaEAu9zukCGGdxVvbnd3iKAR9d0dEN8TwhY+03rv51dKedy7l8/fKVaLqfBbGgJREWuehB4qmNHp+wWcUaiP1Vz/fvkjlY05LjDv1LydNPcCbI+0tKH6p09l7j+Z7Ulp88bt2Kkc9Uwc/lXM7lSfnqkiNiEsgafsZiv/0ajWFV8djAaa05knwx4fXPfJ6++nXJ+rU/uwFIscbW/rJ8p+i/t1s/6fodBI5qPSrtyff9G2unjVq/EGvBtuGYM5xueDkE05+5DxGy79yTFdPO5VzO5VnyFTnS1heuV2ul7sGob3LRUyQOh4qfCz+RQo9JamfrXP770B7D3X7pE0XA/3zaoed9+sP/Isr5+r08k2gIoBfDXdbDys4e19MHLjog1ugdKdecYWuXkFJ8rhXioKvTG62XczmXc/ly8pXQC0r3ZHQsArzuCV6v4uOdMWdVwsg1xk72ztrPw7i/0rLOlX6lF3mmiDObk7UuVJOKn3+yYtn6qdc8+OHg9XO54Oh5VvbNuZzLuZyVr0TTTbSgacF7/6K2G4OLozYruv/6v+nBOa7sNW0qar6/rHb7l23n8xeR9Spln3GqheiBLwvGX8hhr4V+PXGRtXPEl9buy/WRGOuUiGccAg0Uvoe1fYGQWJYz8tTWCdS5qnsu5/Kl5asBXaBdD4x1IbijC2PyL/eNUvxBHpTjZ54aZ0DMBHlWoZdnSYfnX1o5+7zrfMnrr1GmvfPwS176MweKz7wl8NnmpoPdz7kvFzj16CQLYVBrNG53sxKwZj3zZo2iCLxCaHSCQ5xzuudyLn8B+cqSIxyhpK90EHKde6B8suQEdLF+OIRcqzL0hHncaaR/aad50LC/FNf6SwIunwXcX176M64XC/qi2xKAWtvIonL6tIy2GOrnbBzvUBjG2LPHBurBb5Sf3xPqXM7lXHr5ShtTQjBD13rI9DnVHno9rK4TCE/BjjVc8A603uHWXecvcZ9/EUahD5l6wnR/yr/OJjI/eSLWRuAvLzEj50zxmXUH3pljBU/ucPHzT0+ccWfu0bmQFIEvPYntNd7z6NxzOZe/mHw1mm5wsDghuuLAMdMqLmYRzFglXJeC2KUvPMkRxs+5z4LeL3eTaxf4Jc/0RMBCJ3btoPieEGtNd//SIog9vZ6MPPgiCY+HWG81lqhzEKpyii7lui+pvfb59Thkhw8RPJdzOZcvJV+Rpvuk88y/HtNlY+lyz82ereJ+xux/6lr257b287PTPu+ePkeP/nokABTE79drqH+prsVP43ujd+xzrt9/uNeUu7efFhPchZf1em/UcH0qpguJruegey7n8mXlK9N047qVInKy67XWXc/hRvXPcZafhE7ziqGvn9HoInj8ewna/XynFYR7e+IocebfT4P09UI+9Bp7R6t8gazvQB1FsDY+XSrwk+VzvljWMftJKuFZG5OQXk3vwszc54ecncu5nMtn5aspYi7WmVvX0QIOuhYpvqj20xd3X6vS1wiQLnxARAfdOsid5YM/T9aDKZ5yxSeP/sKjxOf8+8vK2ZpJT781D9hdAcB1vfQsp9ttPH+xe1jvXuH/XiuPtx51Ef9YB2rR88HnvO65nMuXl6+k4M25nMu5nMu5PF2+stKO53Iu53Iu5/JZOQfdczmXczmXr1HOQfdczuVczuVrlHPQPZdzOZdz+RrlHHTP5VzO5Vy+RjkH3XM5l3M5l69RzkH3XM7lXM7la5Rz0D2XczmXc/ka5Rx0z+VczuVcvkY5B91zOZdzOZevUc5B91zO5VzO5WuUc9A9l3M5l3P5GuUcdM/lXM7lXL5GOQfdczmXczmXr1HOQfdczuVczuVrlHPQPZdzOZdz+RrlHHTP5VzO5Vy+RjkH3XM5l3M5l69RzkH3XM7lXM7la5SvphvwuZzLXxP53j993onQWbNvNe/C3yCERAiNkAopE6TSSKmQUhI/hxPgRPf52HdQ4Ls5O+ebs1prwK03rG8x1oSGoHD2PqK4rim0EKJ7z5/Trh1nwtEOsIBFCIcL/3bO4uJvZwCLdQbnDNYZrG2x1tJ1j31CpOz1s76vojvTlFQIEPJsz0U/hv6+nYiNUCVSKoRIkCpBCh3+Fv6+pUMgEEL1YxG7hn/mHrordRcUIkEJhRIapVT4kV0zV9/UViJd31zVIXBOhmcpu9dwDofpxrT7wNoc6Z+UH+//9//px89s1XoOuufyN1rWF6/vQg3gPMhICUIhpfY/QgXwlWcAUAiBCEZjBMezwOWhRilwTiCcDddVvpu1iMc8TULv6whe/oIIHE5IPLgK6K4fgSGChAUc1nqAdULgnMCtgb9wICJIW3+v3fd4yjj13yl+3/61s3/3n3W4fkyEw//pcNbipMVaEcZV4qw90747dgb3fzztXvrXhBBhg5HQAX485lmdq8Vn/imwoQu3e8rT+Wx78fXbfJacg+65/I0Wrwl6NUrE30IipEJIiQiAK1AgZABXD4ROiLVPBm3OubVz+YXunENYh3MCi8AhAsD53zwJIo6gsYUziTUtbU3zcziEWNdAQcoItgLnFEI4rDNIGcHW4vDgK5zAWgHS4ZzyWrj47O34c7snNPAoPfg4J8JfLmwO/ddxLt6vQCI6g8LiwNpwfwDxu7puLF383p+DaD0Ih2PEU14HpFy7of6uu/f9PTtwfiML20/YMNxTAFt0R/C5YP5ZOQfdc/kbLc6JoDlKEBKEQCqF0AopBAIVQC/QCVJ6bVGKbmEKBFJIb7RKCWs4GkFMCkFUQk1AIX8+i7WKJxElAoQHOn+ts6/BGdjoNEDR/bhgCksia+A1XOcE1pnuvDgXoE0FU7o/31k64yzw9u+ta5Ci02JFeM8Jr3f3tx0+Z50HfDxVYq03Lj4ja9pt3MyevgGEg598RfTj2B3i+vGLO4BbpzLi6+HYuAH6F3paxzkXTIWzgP4sOQfdc/kbLp7DE8G0lUohlQIkkgC2qGCmSpwAIQXICNYB4CIcdP/xq1UQTNw12kEgsdZzrMIqpFw3j8+axJ3mHV6TUq7xigQAC9ytiCZ8/DHhINkfK/zrnpiIYOIQwiGERQiw2DUkXecuewCOWv3TqAR/zu6j/lc8lejxzuEQAbT853o+/CxZ6rrrr18bPgd8w3We5Mh7zl309xXnAF7r7nn6Xnv1Tzk+YdfdB8J1z8s98ZlnyTnonsvfaOm1NIEUCon2P1GzDVosQoEUiKDhrlMKMvKpcfERAFlER02PPjJwhNZajDAEn5YHnzXHVP9bBoeS9JyyEDhhPX4G0HLrAOlsZyxbS+B0o7asiHRpYD09CAqFwyGF1/YUbaAhguPrKWRoD35ngc/fU489nfEeNwMRv5wAZKA8/JHW2qDpeoiL9I0M7z8JvE/eB3hH2BmttnvG/WsRQv13F2u7Qgepa5S840nnWa/+rn3n8PfT+OYn5Rx0z+VvtPTUgfeoKxU4XBHBVnROMyd6MI3efE8riACKnuvtzo3oAEbCmknrENYiROMB0AQOIkQY+PsS3f1FB5OM/K1Q4fwOIQxCCKywwQwWHcBJaaOu3d2XlNIDi1NgZadlWysRTrNuT0eHn3fLhcgG8XRu14X31s32iGc9Dq3di5BBd+xBFwjAK3HGQYwQEY6nKcBnrk1/js++/8TrnYUS7ztqt2LtVw+0Ub91a695IHbd0xY85cY+R54Juv/sn/0fHU5QNzWHRyc83tvn8OQAmSnefOUlXnn+RXamMxKRYJqW4+NjPrp5kx+99wEffnqH09MVVW1ojMEYg3PO81nhe6owqFpKlBLEqSSF6LidaNZ5FV5inMPYFtsaWmNx1mKt869jwQmsCwPVmTIiEPl2ba/yO52M5pxUfod28aiw0wrJaDjmjbffYXM243f/1u9wslwxGI1wrSFJEhyWi9s7rMoKYVv2DvZ5vLfPwwcPuHvvFj9/7z3qpsZJhUsUVmusdFiH995a110ba3CtxbUNrrWY6IQBhHDIoF0pnOcJowfbRaZJhE3YYdf4KOfWjGAhgmnnX7ei58sc3doKH7Ke6wsHez4wTsY4DV1gusJ5ow0pLMJ5RtGPf8+HxWvUxw+//Gz9CkQJ5TVcmfRRCjLwu7Jn8SDME3rwi3+r8Bkhgwc+aHGdWb9mbgv8czbWYITnPhGe33Q2jpv/UJz7xHCqCMTrX6AzbeN/1k3/qOFKnJN9aFp4zkgH1iGl8wAdzGMnEoQ1eHrCrV3P+md/xvL/LNA5BGJtDXavddqtxFmBC1R2vM9O43W2m7Qe8P33lC5uRPBZx16vSXvt2XVzOf70QxaODbSFPfMdxBOfiPztk9puf8z6+Hwu1bwmzwRd2zRYoK5rnLMorciHI7Z2Zly5fIVrl6+wu7HJIBmAtSwWp8wmYyajAaPRgA8+vM3ewQmybmmEpDUmEOZ+1zTWIhBY61BWoAJvpaU3O4QUaCE8IEsddkeLtRKrLU3bYlqDMRblLFiBcXF39YNuCI4E4TBOdkMogkkR/ycR6EST5TnOOS7s7rJ76QqTyZSLu5f4tW98k90Ll9BJGiaeRCUK27SkSrJcLUAkOOe4mAy4cuUaybe/x97eI97/+Bd8/NGH3Hl4j+PlglYKWucoqpK2aSCYe35BgwgmoZDSb1bxgXibMihPQVMQApwE6eLmC8Ew7DUtiGtWil4D88qVC/ya/5wMbpfOESL8PflNCO8MCvpYbz7HEfQaUTgc7/HvuT//RGRUL7BfZoZ+1RIiFaQM4BtMeSElLoypEN5x5kG413yFEKhOO5YIFSIe8JTAeu6RJIBa3MysjxZwLm6YNji6ekCJoHsmJjhKoCMiYDwJfiKATwRY/3HZAZbHW08y9Nq+DeZ279iLPGs0n8Wa4tLfSgDAbh7FvSMCp+g2Ded6TdxZrwidjT8O38VahPTRHvF/kVL5zHXXtkbn+k0iOv/OjsvaxvUZXnrtu66B7lnA7oc/KOFnOOYvI88E3aap/fwwLWmSMp1OGU9nXLlygcs7F9nZ3GZ7c4tBkiOA8XhInqWkaUKa5AyzAe9/cosHjw4oVhVVC9IIjBDekeD8pLFCYCxIBEoKnHIo4dk1pNfutBQoJbFIrPPxfUpAjdeYrfVarwwT24YZYMJgWOd3Shs0WRdGUAp/bvCA9MZrb/DKa2/y1ltvcuXKdUCSZSlSSk6OF8znS5RSWCFJtEZJWJQVy6LAGEPTtgzyjLaFolwyGA751jvf5s3X3uH45IjVasndvfv8+IOfc+fRA1YGvxE5000oIfCL3hkPaiKaUHEqh41ChEUR3+qnxmde93PTf18VJzgOI8NE6xyyazu4gC5cSdA5X/xY2mBNhrFbu7sO8LsL07/qovYjPgskvwIRUiFETHgIP0oFjc/FVdVbXER6QSNj4L30wfcEWqLnXxVeg3OILknBgRVek3RJ0D5dAAAbrLL1a/amcBRrw2e6mfykLhe/nOieT4QlF56UjMq4URjOOtL8B7zd2WvHIV4YYI3vfcbI9lyqE91e04F/fN8F7jmCr9eQCBfvv0cXPta9tCZPqN+sAW746d+NDs8n7j5aZt0f8dqsjWB/cOeUE593zOfLM0G3bhowBoQiy1KywYA8z7ly8RIXtnaYjCYMByOyPAcgSRQ6BpELiUo0+SAnG9zk/r3HnM5XNE2LMYIWaK0LXJHFODA4DBKHxMmw6I1DSEmMJ/GTHkQAYWX9Di2lwlgBxp9POLBCIK3XIiQecGWwHuKGq5QiTTTbOxd45fXX+fVvf48Xb7yEsS31qmCxWFLWtacshEInmqZpaa1jOMxBSOrVisWqoG0alNIcHhrqumKUZ4zGY4xpqZqGtmmZDkf82suv89zFK3x69zYf3v6Uj2/fYr5YYJ1F4RDOUxvW2WDWh0kWzFPnglYkfKxl3I19HGQw/AW9iRfmgwISBBLvMInLyToXjo86sr8HETQbiQxODeGzhYzD4D/gteHghT4ztaPS7egcF2f4Svclp+hXK167lYFi8MkProtbiqAbtHr8ZujBNkEp3WU7SSmRQgfN1COaCBxvTIQAT7Eh/SbnrEJa3W+4zoebybDZhYft5+sTWlsXqvQUwF2PAHDxOQV1t/PASxA2gLuLmq5GiLbTKnFxTkWP/zoBFS2XZ0mklVgDUtFxvX5sBIEVJM6KzlHp+u/Tb9DdzPrca3oNwltUbu1n7a7ObPiiswzjN+pDwp4Mz+vvoXs0uLCJdd/1C+SZoCuThKatEcKhkyFpNmA8nDAdj5kMR+RZjtYJJNprQmLAANhyxnNW1vl4RiVJtOLeg8ecni6oqhbRCpwxWAvOmC5+xVnPGxknUFhaBIjWD4tzHXcWF3Kc8P3uL4JTALCmWzcRsaz1gCOlRumEV197hTdefY1XXn6Nzc1tTOvY39/HOctquaJsWtqmoWxbnHXkeQ5CYGxLohKyLKNpGg4ODmhbC8ETPBwOqMqKVVUzHqZ+ApiWg5NjkFDVNZuDKb/+2tvszGb8uw8/ZLGYU62KwLFaBBJFzzHLsLPKOPHCZI3TRYSJFhU0F036aO9HE1HQR2QKTwxE2O6nYh/uJIN/SCA8T4zzXnfCZ8WaZm27KdlxcBaLdGEhhzCbpygnvxJZd55JqcPzi1q4DRpgb32gJEJH7VYF4PXg21ETncnL2m9HTE6ImpILAG9lAIjIrQfg7sDC9qCxnoLrTXcPit016DfoXvxzkwRT2K5HNEQdTeBD4wKohkkknFoDojDHwt+q26KfvF53h2v/Fp2jMT76LokCTzVY2QMtgTaLccRn58pnM+ZYP6vzXLQTsrOo14H3LOcdX49ha+tW5ZOAH0aqM+ICt+/imP17AN08G4AxNE2FcBYlJVJJtNIkSqOVRmrVP2UNMtNkJmc8mXBhp6E0LWVd05gGpEMnipOTFXVZIxqBag2NgxbnzZag8Tpr/eQLg2GdV2KVlN0El1IgZRI0Be8X0DqhMR5CTFl6OsH5BTOZjEnSjHw4YmNzi52dC7zz1jtcuXQFrRKsaWnrksWqwDpLsVpxfHJKURQgJEprTpDexxJMy0SnoBRlUVC3LVZ4beX05AScYzydshoN2Jz5zUpJwcliSWMcWkiUFHzjpTd45dpz/PgX7/HeRx8xPz3FOoFKUoTw1oYUgs2NDUbjMa51HBzsYdqm01ycEJ7ADkRT1CS6yd0tVr/gvAMupH2ubdHrepN1BOcmnearkH5hyrDAo2YWNBMrHLZzZESNqLO/iBRDz/H+akXGyAWhEeiwtck18AnaoRAIpVC653CVUkjt6zF4p3DgsIHOIO/y+EOWk5PeshMOlEJYhbLaO1PjPQXFwkcVWG8N9mjwhKzvYE+Ot4Puu/i/LI7oD/AoETnVcJyVhInUfzacvqNCOnogJlWsgf6a2R6v4bpAOT/WNty26JSBSF34yAoimOEtuajwrzktngK48Vzxnw4rLALvQ7LWsh51cpaHPavFRt67My3Xzv1URkx85h/PlC8GXWsDcFmEc7RNQ9M24BwqLlHbhoVrgzdUkmUZo9GQ6XTIzs4mi2pB3daAQ0nF/HSBKgVVHaIVjPX0gLXeqRQnnAWn/OmtdRipUM6RJnFa+51IKcVkusHrb77DYDjk3v173L//gOPjI2rT+uIWMuXC7hXefvstbly/zng0wQlFVTe00lJXJYvFklVR0LQNprWoJCUVgsVyyfLwBOEsTdMwHI0ZDEeMR6BlhrWWtm4oqppVuaBtDYN8QGsNdVWyKgpm4wEb0wmz8Yg0zbDGoKWgNZZBNuIbr73F5mTCx3fvcXCwz/J0DlbznW9/lzdfe51XX3qF5emcP/rBn/DnP/kh+4/ux/SmjvvtTTSCFemi3ypmOGKlCNy3Q7h1HmttrRK8uiFVVQgXloVFCoFVUasW/fXBa0fW83VRe45aVfy3C2q4/HJz9CsVH3Fw1jnWj0Sgs6RAKolUGi0DHSG153CV/yHE00ZLINItkY9dd2hZG3lfiZMSJ1Woy+B8uQchOnz1lIZ/DjFutYtNpYMCuqPFEwCC3zCliJUEwmbnQuJE0Cj9Pz0wGRcZ+iguHLMO7uvafK8dx7nUfzpuRj6ao5ubItBX4uzYd3e+ttmdmZTxik9qoN3bImwQXuMVCKxTnaYbQ9LOni/QCevxuPE5Ruuh2yVkt87WL71G5H2hPBN0tZLoJCW1FqQ3D41pWK2WlFVJ09RkTY1yfhd2zquj1rZY4XBakGQJg3HOdGPMxmpCWRdYa5DKoeYKUVbIqkU1hrY1tE4gjIn4jcNhDBhrUNahlMOhUdIhhV/WSiqef+Flfvfv/D1uPP8CL734Mk5IPr35KZ988gm3bt3i9PSUi5cu8tzVK8xmW+AsVdVwMl8yHg48MC4WnJ6eeDMSicGDLm3LqihYrlaYtkYJwWw2wxrjTTXnvJPPtiwXJzSmQSWp52Stoa4qlBAshUMISaIUCkuiE4xpEECSZmTZgCu/domXbhzz4a1PePjgPv/Tf/SP+eY3v8X/+Pv/kv/+X/xzUqV569XXcBL+9N8WHJ8ce062mxzBgIv7Ic5rV1EjcYHCkYTA+rWp4td2t7DBe7itk6hOQ/YHqDgJbdBqRbgD60/kuuIpcXraQI0E4MV92Tn6lYrovPfijAbYabcBcLXynK+SIZog8L+RD/bA4rX+uACjw5PuL688yECz+JRijZUWLdbHpF/8UiqEExjXIKUP1xM2FmLx3+AzWu5TBtaftt8gPXkCAuXvUpieSolmeNRhhQiREIqem++dbpHv7ewrcXYMo4bbbxH+twz3LiOKRTAO23sE5fh9rOsPixE1/YMUfm4TlAkh/PwO1dSMVf5ZIrHO+nnYkcb95uG608Zd7wnfQ+SLfwl5JuiatgEcWifoRKO1AgeLxYKj40Omg5wkUeSZP401BmsdVdNQVCVVU2OwSCVIs4TRKGcyG1LbBidBJYq0SChXNeWypmoaVNt6JxshHMZ6AGgdGPzkFMbSmhaJRmcpV65d5/d+9/f4wR99n//qP/8v+O6vf4/f+72/z9bmBa7/zvMUp3Nu37lFY1tWy5L7Dx5SVRVCCoaDIavVkoePHlPVNcvlEq00o9EYJTWr5YrjowNWy4KqqhFYhJSUZclw4E1NZ7yWvFzOAwcuEdbSNg1GW7R21HWFEI7hYIBMEpI0B+eoakNdNlA35GnCcrFgNhjyrVffYPDNb/P2m+8wynL+4d//nzCdbvCf/T//r/zs5/+Ob37z21y9fp1FsaStWkzksegnTAybE4H8dYC0DiUc0nqN07qwuXWcYdQS6DOsjHfcSSU7Q1CI/rcRhIIurv98uJveY91rxXGxuL8C1Zw7DUtEzjlWu/KedKECjRAANmqoMhbE6cKtIh8aT+zWIjXiRkYPN8JH5hDAu4M46zr/xHqChHMKX6rAdcCyvvhd1Fx50vT2Ys84k/oNr9MyXU8xyGjyi/VrBR7YdR6FJyJqIq0hei3WyVDPQHaUwdl7FGsOvv5cT+qREV+j3SS7eRUdfAFzifRHsO7C/LRWYmXr57kIGW6it876qz8ZPuYplf450F33aTzDU9mfp8gzQbcsl0ihUFqTpSlaalrTslgueHygmAxzsjxFqxFCOExjqduWsqlZFYU3yYsVVevjfHUqGIw0Y5OBsKSZpCxSllnNSlWsViVVVVELAcLStgYrOFNuzhqLFQbTQo3g8u4l3nnn1/jn/+V/ycc3b2IELE9PqKqSb3zzNxmNhmglmS9WnMznOOfjY4uiYDwekSQJ5aogSVPmqxU4y3g6RjjFqio5PjpmVRSsqpK6qdFSkCUaZ2E4GmKsoSqXqMDlKqEoqsI/LOlL9+lkRqpHKCE42NtjOhkznc4YDgdMNrdYFSV1XbJcLkh0gqVhkCa8/NwLSOc4Pjzk+OAAV7dsTmfcuvkJf/r9P+C1N9+muP4cn978FFvbzkrqIS9OJotzootXdqJjz7ridWcCxJ2fzDZweK3wHLAKsaRCghJ9REMM17PWYS2YtUwmv759ZK8IGo1y1sOF/dXzC37txGI3a04w4WkHueYkk1L0gNvxoH1CROdY6eAh/hWiQQg1baO5LixCSSQ6xK47hDQhquCsCS2kQMTx6gDMdWDbpeTSIUN3D+tOuI4gWKcvwnchJiV4WPMKRvg3rHOgcm0DDTAXx6OLKfbhRzZQNAhC7Hd85n1MiwjWU9y0ukiRiLRrQOi/s3coW2d759yZ2sLxWA+altYDr+2dYNHhGzPe+vHsr9MpBx3A9wpEV03uLzGFnwm6bVsjhCJVEq0VidTgHIuqYu/4kO3ZhM3ZlFGeISU0bUvVtFRVzbJccbxcsFgtWa2WVE2BoUaljmykcDIlzRRZ1pLolERotJQshUDKGh9UJmitz0+Pk9LhaI0vQTccjXj1ldf4H3//97l56zZaa9I0YV6U/NmP/hydDrly+SpSCA4OD1lUleesTMvly5fIs4zlqsC1BqxjNBzCcIh1MJ8vKKuKsqlpbEumFa5RGFPTtgoJZDohzQeslguaqsGaluXylLquqduGLMsReIpmc3ObNNFMplPKYsnpfIGQgrppSNKMJE0ZujHz5YpisWKYJXz0yScM8gGnpyecnB7z8MFDdJIy29yiaVv2Hj/muctXOT4+5mB/32fk9fNtfZ6GidJzWVb0YNxppP18hO69YDo6C07ipHc3OekCiPjPGwvGOv+84kR1EidCiEVnWjpi4tVfBREyaqkqaGjeiYVSSCV9lIJUawDrtVLWUocjRSHXwLDf+gLMOAe0/etCRBODLlrBhsKPMnBroo92kEEzttZ22uO6FuZcSG7oU8DOKoxROiuErvYCQdt0IgCsCDEza3PCV2EzYQ4RzO4AbEJATCw5k5EXL0DQVAO4xncEfuMRUVNlDQD9C32EQTyVxwDbjWtIn/gc9ItgbG3rHfHCQShc1KnQ3Wzsnb9CRA0+jrXoNoW+6LzqgPizoP/58kzQHQ7H1E2JcQ1gUUrQGj/URVmzKguaqsY0DUJrjLW0bUtVNyyLkkWxZLFasiyXlHVBa3zol04sI5ViEkuSJGiRIm2cvCALBa7CigZazzeZ1mewmbD4tZDcuPESD+/e4f7DR4xGQwaDAWk+JB+PGQ7HHBztszGdopMkZPs42qbl4qVdBlnG8ckpUipWqyIAuUQrSbVaYoJ23rQNi+WCYllQl4VPiFA+c83ETgBCMF/OadqWum0oqoKmaWiNRemU1WrF48ePeO7qVXSScGF2mWK5ZH5yikoShCoYDgdkgwEIgdKKpih57/332JxOOFnMOTk+YTbb5vKVGwynm7R1Rao0GxsztrcusJrPMcbQti0xCiR6iXveNnJ1IYWYPsvMBBMT4YL59USMpwMrbTA9Y0AbHjQi1ruoEXlKw4mzK9+t/cfHGH/pefqVia+xICICdOa2lLJLljij0YbfiBjF0HOF62Z2TzmuhyL11/ChTE/eDCHbMXDnPdIEc33N2deFOflrwLp5G4AkfLwrywh9eYSnjQXrjkTCOUOctVDr2NSZ+3ROsD5krr+HOCL+JxYGQthubHw8cnAMCkOMHu+/9Nr3Wbun/t/+S/U0Q/97vSKZdS2YkAAiVXTBR5XiDHe8Tk/0e0Yfc92nLbu1Z3H2vp4lzwTdzZ2LrFYLynKFs6arPi+VAhMKIAv/BZVQaGERFsrKg+6qqFiVBauioKwLqraksRVCQZoq0AlKWYSxOCOJRL/Uvjq/KgVl1dC0kgaBNCaEfvjwr8sXL/LeT3/KcDRiMp4wnW2wsbnNbGOLbDhAq4SDk2Ok9BEK1jmmkwm2qTk8qtnd3aUsKk5O5ozGQ/Ik4cHjR9i2Jc8SThYnzE+PmJ8cY61FKUmepUwGOdPplMlkAgi00kwnUxIpqcoS03it15qG1eIIXEOmFYdHh2SLJaPRiOlkjJ5MKMoV1WJJW5dYIToqxRjLdGOTqq4oViUvv/ku+XBMsVzynLW0tmUxn6OU5LXX4PTkiJOjA5+uighZY8HB5qK56CUmqNqoq4i1ilPht4UObJ0QkeJDiC7YKMThOqzwXndpJVZYVFfMWgZnxtkQNlyfwfarFgfdmMnOlJd9aOIa0PoECAUhtlcIryhIF8MmwxSOdRecRTgbNsEuAR0ROkaI+BRE7OwQtLI1jVmEHc112Qp9gsnZNb5OafjrxKinM/VrA2D2GpzrzitC1mZ0lsbaJTHJJiQzh0QGr+0THYmyL/jjgVt1/KkALAYnfEEdJyxS2Fgd02uuwlMbAuOv94TiGiExhqlJPzG71/oxiaFnLjzPXhGw1mGMjyyJyVc+Hto+QTOE59QVrV/f4MK22m2oT8ynLwG8zwbdzW2yfMh8foxtSuq69g6stRhFJRUSiQrmlrGOqq5ZlRVlWVFUFauypGwqGttgXOMzyaRAJBLbStLcYRuJ9b4g0iwlTTPSMkEvC4pVhRTQSBEy0xTD4YhitUJnORdGU7Y2dtjc2mQ8mrCxueU9w6bh+OSIo4N9yqZhY2OT1rTMFw0vvfwStjWcHB+T5xlt3XLrwUOMM1RFSWsaHj18QFVV5FkKSFKlGA6HzKabpFnGcDjCWOfpgSRlPJqQ5kPSw5TlculBR/gQlcVy7heXsWxMZgyy55jONsjynHpYc3Sw7xNKnGO5XKEQKOkXZToYUa0KsnTAlUuXaEyLRLLaLJCmZTabcef2JzRVwWq1wFmBco421LaInJzqtCY/WXwcqQ8hw/V+b4dAOjAyTOxgQgXsjVOSYP/F5YtWAqzEiVDDodOY4wT1dyLDBP+rEKfbAVXUaELRGqX6/lyds0mGVF8VIxeCZhe4bRe15qjtC08B9Pn86xpYAHMhQ7ZjsCDWbqezS9Y1u/h73SlGr+VGzfesNiiCU6j/iIg1c3kSKELkQCxyZPvUZH9YsHVCzQoXNp7exNdAgpTerWVCau1k6zUuXv8W040Ztjxm78H7PNr7mLZZgTABxG2/sYc2Q5+ZIWeMp7Pabx9K5+e9EVFR6CMljPWbnhQ+KsQ5up5srD2HM1aLcF8KTP+9aLqD4YgkyZFSsZgfsVzMqesaBCSJB1xPaDuMdTTGUDU1q6qkKEsPuquCsiqp2gaLASnRIkE4iW0tzim0VCRakqUWJRStNWR5TVqmZFnKqV6wWhboOhTH0ZprV66QDsdMZptMN7aZbW4zm45J04zVcsnx4QGHR4esqhKtNUmasVguGI1GvPrKq1RlydHJnNF4Ql2X3L13l6qsWC5OWCwXgCBLE1579TWv2UtJW9VUdcNgMGA8GjLIUmSWY52jrSrKssS5CULC9pYhz3Na13J4cERT1z6srGlYrOY83t+jqitG4ynj8RQ722SxOMU5R53U3Lt/n6YsGY2GZFnO4dEBzrRcv3KdV996l83NTbaFz+QpyyW/+Vt/m5PTE8qyQChBU9dxyoQpRKfR9ht2jJnttYIIuK47to+nXCcpnHMo0a8BJwRKOlxouxLDkvwCj6nGAcSD5fxXofaCp16C00ioM4DbAW0AGCFUaN8TQ8WiJrTOZfaLLzaC7J5DZyr7UDRc0HiNDQDtEMJ0m9N6plRfS2BtcTsBToYuEOsA2vO2HbAGDRUXoiRitmCgOc4ARnhmPrxE9tcmcJxhTJyUAW4VTmqyyQ6bF17i0vW3mYw3cM5QVXPu33+A2VvxxvQFThvBDx+WvPut/y3fHUmO9+9wcnqX46MHHBzeYVUco7UmSxWniwOcLVDUIRLEdRZFlLNUS9jIY+q8C3TlGUolxCFbn+DinAi0Q6RJ4rwUMUgyJLjETViGTegs5bH+3L9Ingm6QijSTDNoh7RNzWq1pLXGd0RVGicF1liM9eUbm7qhKCuWhTeJV6sVq1VB2Ta0ziKU88HlaJxTePrR76xSOl8oJ82wWLImI01TBlmGTjRJqlgtS5qqIdEpw8GAuqzZ2trlwuWrDAYDXFNy8PgRH3/6MYvFAgsMBkMaYxAqwRnLZDxl7+DQUwV5xtHRIXfu3GE+n9OUBUprdi9f5de+8S6//p1vcXH3EkonrIoSqSQnp6fsP9rjwcMHlEUFzjDIUlyq2d7dZmtri52dbfIsZzSaoBJN29QcHR3xyc3bvPfzn3Pzk0+4e+8+q6LislCMRiNmsymtsxwfH5OmKRsbMx7cX/L48UOqoiLNUtq25fatT9k72OO73/0tppvblEWBMS03rj7HxUuXebz3iFVR4GIKdODLIrHbBRYFs25dY4ogGWqcEWnBMxGW0coVjhYRwDXwoPRplM4F0HZe45EhIqKnMP5qEAweFHuTXYaqXuvNJ4WSPnvsDOD6+NbYrLI/X1z8gS5wPa1zJvjfiVBLRPnaut3YOQhRDjFZdx103Rq4BqW2pwj8mXueV6yNsOitmY42itpv51WKYL4+PgGckV1hdyEkVkik0yAl2egCL735d1HTF9javsRiVbFvatLhgEpWPN77iBsHDyg/+iHPvfQGHxT7/Jv33+PKcy/z+vY3eO3695ivFmgBEwWzfMh4NORg9YBP7vyQP/iD/wem2fvMs/tsVpoh0i8IH6cbt8JeXfDJKf6ZqxCTH5SANc5dSoVzMXY5WIHBmRaHlycu/2Vn87ML3tQNKqT5KqXROiNLDQR1va4biqqirGp0IlhVJauiZFWsWCxXLJcrqjLEkEpASRKVomSKMwLvhJKBP7LoVAbN2p87yxJqk6IzQZoJsjxhtShxRiKFYJBnCJVRLBe01YpyOefuvbvcuX2bbDBgNtugbWqkVrRNzdbV6wghSBJNnuU8fvyI23dvc3J6QqoUl64/x7d+/dd59xvvooVk/+CAg0c/ZzKbUbc+1nY8GjEajnj33XdpW8P85BiBIM0zRtMZwjmqsqRclcyP7oKQjMcj8iTlrddf45033+DmrVt8//t/xIP797l37w6mabh0+TKz6ZSirDg+PEAqzXg8YTDIOdh7zN7eI29NVD4cr6wKXnvjXQRQFAWDNGFzYwslE7AlpvUzIQbir8+P6N+KCy7GIIhuYfogfBsPjtVI1r0wFl9Xw7nubSckSriu/qlwPlJBOSILh8WhEJ4H/isQMuacT+LwyluIRZaBMpAKoXw3YCX71utqDZQR8oxJv97qHNdXchH0IBEda/0il56nFIDU4CytsPQ1jCNAhgieTutd0379mcO3CiDqTAcOHe/ZacsBUDuuODwL98RmLIAQb+uE6L6zRIDSzK7/BtvPf5uf3Dmg3r8HP/8EoxOazR1GtBSH+4g7DxAPPuHRnZvwoz/hsUnZm+7y6NYjHl3YJZOWV196GSuG7M42GM22sDpjkmf8xjuvMj894Qd/9n9BYNa+Y//8wujjqwi5s9QshFA113HmDgHW4JCo4OHw3HJ4Hi5aHTHjdV3pWOOb/5I6w7NDxpqKtoWmaX3Kqk7I8yHGtFgL8+WKg+MTEiHJ8iGrsmRerFiuVixXS8qypm4NTnp+MskSMp2Ck7RtKHAtfK63lL7iV5Yl3omhJYnV5Gh0BmkiSRKNThLa0tdwqKqCk8U+QijyLKWuKh4+ekjrLKmU1E1D2zZMsjGXrlxlMp2SaE2uE/YfP+LTTz7l4Ggfnaa89dY7/PZv/TYbsyknR8ecnpywPJ1zOl/w4P4d9vb3aI0hzzIuXbrCCy+/zGA4omlaRqMxD+4/IMky3vv5T3l4/y7SwXiUk6QpO7uXuXjxCtONLZJEc/XiLv/x//yf8ic/+DP+9M/+lFt37rBYrdi5cJHRcEgiJccnxygUR8cHbGztsFgs2Xv4gKbxE+/WrU84OjogzwfkWU6eDSjrAmMsZePTtH1crohMQgDIaF76ReesN8e6DJ5AOfQmrOdnRUiqAOmLb1vA+ZRgHJ5aCAArRHSgcdYcFiCtXyAiFsD5FYs1BqeDpisjnRAJ7L71uhJ6jVLoefL4JT34hTbnbh38IC7qKBGchfNlMj2I+ap4wnXL21+nC9uLxkr/bHqQf7qIYJl0+nF4JrHm8VlLJ2rAodyqC8XEiQkOrh8XJ3BCk134NebjV/j5v/sE2xQ0xmCLgmw6Ynlwh8vbG+w9PiJbNYxwiMZweLzPYuMKs2REdu8jkg+/T6Uk//IPHPLiS+jLr7F57QVevrTJW5evsds6vvfd/4S7d3/Iw8d/HsZR9GMex6H7zmsBZU51CoUHz/gZ3/TAj7Xnd92Z+g8uOPMC8Lo+okMIX3Uv9sv7y8iz2/UIQV0XFEWBMwYpfHYaztE0Nafz0tdfsDAeNhhnKYqCYrmiLEqaxuDwKbJZphlkGanMaFtDQw3C180V2j9sqRRahVhICZnyRL1OPY2E9JzuwtRUVUlbOU6PD2mb1odc5UOM9ee0jUEOBcPRgI3NHXYvXGA0GDKdTPjo4494+Hif+eIUhOStt97iN7/3XebHB5weHVJXFffu3WN//zEPH9zn/r07VE0FUpHohDt3b/HjH/+A2cYm27tXyJKUk5NT7tz+iMFgyMl8wcnRAcMsZXt7C2PgYG+f8XjEtRsvIvRlpLN89zvfQSrF97//R+wdHFLWNVubW0ynEzY3ZwjhKKshi+WcCxcuMD89pTg+RGtJPhhQrhZUqxVHzrG5sYXINHVbE0OSbADSuCXHDEYbOFbRLdjIgYX5bEVPCYT3hXCeh8R7gSPHZjpqARw2hJ7ZUOouZBV2fGCoVRU+K9znwcXXJzZG5YRat53JGrU66Pqiee3Wv+072doOCP26tvTFYs6uSM8b+/nrXKh253rg6ArOBIkdQgw+46/TZc841dbpANddsrd819M0HKxtFsbZtePi/Z+lL6IIIVCh7KQPK1Sw8RJH+iL3P30PmRtm17ZRNmX7wjVyAaflPjsqZevqDU73L3Lww5Za1lS/9hxbb/4t2u//EelPf4IWFnv1VYaDCXZxyuatH/FnD37GnRuv8bPbd5gMcnYvXKKdvoh79KMQMfJZjbf7Gm6NphGmewo95WIjlwAOjLE4aehiteN5xdqG2gG3P5Gl23s+45f4Mn6KZ4KuCl7ttimxxgZOBxA+f7msGk5kQaISqtrTDvPFgsViQVmVWNeilCTRgiQUNldC07Yx2DsUIBfBUy+9NtEBcOr5NKl87YXWWMqypanB2JYszZifntJaC7YlUZqy8BW8NjY20EoynWxw/eo1BolmtVpxdHjEwcEBq8UpTV3z4gsv8r3vfJdsMMI5wSeffMQHP/8ZNz/5iNPTQ5qmxlgHUlK3BoSPyR2PxkidcnTyPlVToaVitVpy795tH9YmFU1TcHp6zM1bt1A6YToZc+XWTZ574SVeefkV6tbw/PXn2T845IP33uPhw4cURcly5TXisjY465hONjk63OPSxV2K1RJnLJn2GXRlVWOdZTwcsLNx0U8Ga7rl1mXoRxPU2i5Z0tu9PWXg51iHKl1JSSscWkQawh9piWUy6TQyaUMuk4gQ4wgWMVKcSdJcYyx/tWKdDcDb4FxKjGf1zRA97wd0ROg6LDobQbgvJdOXPnxCAjh7x5Uh9kKDoKlFJ1Fw4ogQ/RFNW+tcR7ue4Xg76ihoxUKTaIlKxuTDGXma01pHlg9o24ZyOaeol6yWB9AsfVEx50HehfEwzoSaHf6S0WLq7lckVJNrPDaG8UbN997cZkO1KNfwcSIp6pLvbU+5Oh2wNbtK08748fVtBuMJ+tJlPjwx1P/kH3Pj1RtkVrL/whu0THnw+BbDj3/GJXtK2j5i/+OfcqC3ubt8iZ2jW2gkKnxn283gNfAVcWuPG8damF7Hxfb0gAs8iw9DNSAS70yNj1v4Apb+GoZYgS5+Nl5pHWjdl1B/n63pOtFVxm+q0vNNQoWCNYo0H+BEwrJxNGYBpuF0fspitaBtmy6NUieSPMnRMun5FhlCU5ToSiUK580ri/OxkkohtMMKSdqmJJkhySpU4qMRcj3s0jPnqwJjY7C14fj4iAsXd9mYznw9iLrBtY6j40MOjg9ZLJdcuXSZV195lbqq+PjDX/D44UN+/vMf8+knH3J4fEJZ1ZhQ9UwKv3HkSYrRGmssqzJ2i2hIs9zXYDAtTd1QVRWtMV09CusgTVPuPXzIg0cPMcZycXeXyXTGc9eusff4EZ/ePObmrU+5WKzY3NymtY40SzGmZTLbZH/vMWmaohJNXZeB5vHhNmmagVAkWnvLOGyMHYZ2tJ3olKK4mXtzKr6wHtELsUeajeDaT+mwSGPLGQlxA+28yN1EwjnfPNERcV78ldB0Ab/B2PUuDE++34NcFxoVm0MK8MAcONwgT2o8Ds8dd+2qcF09BL++18P18dlh4Xkp58GlWVv0OOOL8quUyeQiG9vX2dp5nuefe4VksIkVOZM8IxsMuHm8YMMpNkYDVm2JpKWYP+b9j37Ixx9+n6PHH4NrOt6403c7bS9y9v4/ZniRB6s52WzBt68OeDddsf/4mMW8ZSYfcWk0QFU1B6e3KVcFD2zL269/lyYbMzcZm66iWsx5/u/9h4xmY2w5Z7E0LMdbzLKrvHucIE6PuLCzy9I2HA8XHD24z7oOv7YDBa2Wz763PvrRoRkzzdbmuwiT0gkTxj58WhAcbaHpV7jOOuD66dFbK3yJOf1sTte0vrKSTkEUNG2L1n7rTbMhg6GPXxXOUVRz6uWc+ekpVVXiMCiVkqbKx90mGUppbN0inUQJhVUWhA0xvn5PtVYiWpCx7IbQKJ9HQaIcSlVYW3IyPyXbGqOVBttgUFR1zcZsxoNHj3E6RSlN09S+l5ox7B8ccO/hfaq6ZjqeMptMON5/xO2P3me1WrC/v8fd+/eZLxbMi4JlWdJahxKKJJFUbUuharSQpEqjtHcOqZDVJqSiKkusbWlbS9U2vn5EiJet29ZrErc+pioWPP/cDba2LrB16TKz2ZTRcEDTNDzce0zdNIwnU0xo6lmWK4aTKYeH+yRac3x85NsopT4zLk09fzydzTBNS1EWoZ/Z+tSLc1J05q6fxyYovaEeQHSe4PqeakTQdf0CxCO5CJqriNxXyLH3DTd7LcNCVz/AOddHUvwqZR1UIh0SRETKwHlnme88ElN++5HtC+bIwJH3Gld0rBHbU8UQLQJFgQfcGFHQ35ZfD770ZqhzIbQff6EZzTa5cPFF3njtewym16mMoG4lRzj2Dyum1sCw5aPDO1xBYqYDPljVtI3j25Mh6eA6mzc2+cdv/D3u3/kRf/iH/y+WR5/6Fu1rzijRbbaeY7ZCs0qGFLZix84oVjn/+uiApmgwqxZrHJflBR7fe8AbL7zK6S/uc/H6G3xy8zb5eMy/SwTtdAO1m7Enjnh06z3Uwz0Gt+7w6uFjqFtuWcdOvoFOEiai4Z3xgD/TiqryPRLXjP3u4YknZvqZ4kPrj9r178davR1742JssPW1eEOMsQdT1Z9EhAiRaJV0e1OXp/lMeSboVlVFmiZonaB0hjErrGl9Pnqakg9HSKdp6pqqXFKUNctVQdu2SAVaQZII8jQjSTKcdbStDaUIvZEqpUMpiOaashIT9S8Xdg4niTGDntj3izdmxWjpPcy+qLdjczbDobAWGmuZTWc8uHePe48eUlYVOIuzDU1d8vHHv2C5OGW1WnF4cMjJfM6irKhby3A0RUpJG1oAZdLXgjDWUQtDagVaq67AS1NXvvCLcdSNwSIYDidkWUpV1xSrFauyClzSA9qm4fDogI3jQ6azbZwxpIm3IJarAodlMprQhtCVjdmEYnuHPNEsFifIwBGORyPGoxFOWja2N3AS9OkpWghQfmMwofMFws8tY/1m4EP+fLEaEQtJBR42hs3Q8cKRyOq5Wk8vRDKh5xJ9FMS6Fucz2GwsHAIoPrswvn45ew+us/JFyNCCaMTGLtNRzqbMhnA66bp/x7GLCRLORi03jLfozxNBf13C3ufjcK1D6QG7l1/j2ovfpdHbpOMRR2LAxwclQktu1w0PDwu+g+Tj1PJRlbBTW7ZmI77fFLgm4RUkHznDyWLFwGlWK0u7/Ta/+Q8u8LM/+L9x+9M/BVjbVMSZHylgZVeoVMOqxSiBKHMGUoOsQBpWyxPUKOf9o8fsbl/mka1pmxPM8W1+tnhInmVclUPKIuHw3gPmCj48OWBjMOVeecp4+zL3KNiYDrg03UIbRyYV1RlbRHSa55o2EX6tzdcwrl0EQhePzpkZ23/ah/v5xFCHdI5YQc5H+MheDbZeWXRx03VRtXi2PBN0T+cLtjY3fB+xNMe0jQ/BAhKtyJIURNK1xCjrmrppcDi0ksgEVCbJUk0ivSZqjK/644QPBJdaIpVAWGhNz5m5sPM45xvX9R5Vn36cpz5xYjIes1j6kC8jIEk0uxcvcbIoSLKMNEm5/+Aej/f2KKuS+ckJSSKZjAYsFicsjo94/Pgxy+WSZVVRNYaqaZBSMR6OWa4W5HnKcDTGNDVNVdE2DVmaEWsoKQ2thVQIamupGk9JaOm7TTjrsK0hH6SY1lC1BrsqcPuPKZua+XLJpcstUivMylCsTtFJwnK1whrfIihLBhSrOa+99DLL+QltXYKDLE2ZTmZkeU46HZDmAybbiq3dS2zPplzY3SXPU6+ZhoLdzgnauqEsC5arBav5gqIssSaYVwiqqqZYzKmLAmdaqrqlbsLzC5SJcc5PoKjZOnwb8aD1Rh4jNnS0IhzvokPtVw+6zq3dx5nFG9nMPjPtSQ66j8oQ3YLu8/K9mW6DdktXiW3dCRTDznra5qxRHP6jh0y3X0VfeYfZcJf3TysuTyfcv7eHShSvTnb4b+/f5ZHMeaNquLk746MkYxN4ezbiF7KkUAPeUTlbecLtumErH/LOxgbvHT3iqIIXJ1d58Vv/CY8f3WZVPKTveddnFkZ9MrWG1JQYkXB/r8AVFYnS5KnANBWjkSRNHIO0BY5wDx6TO0iF4teOj0lbhxWKj53DJSnOWN6++BxKaI7rkl0yWiUY0WCXh8yTkqI44gzCfu7UEZEHiSRJz+G6oAAI57ElhpI9+VzxCqHn0unSkgUORKw+EiMdBDGn+MxceoY8E3QPDg/Js5R8kKG1RicpTVvTtA25s74Bn/A7tzGGuvH2rEohyQRZpkhTjdTCv982XlN0nrgWylcYU0rS1i6YdwaCk8367R/nTFfMpWkMbes4KRdcvyy5cvUKt++0CBwyl+TDEVcvX+OiaXh8ckKeKO4/fszx4pTFfIFparQeMMpzlss5VVVS1TVVa2itpXUW6QwbwwGXd3doqiE37z8iSxLSJEEYyzDzJS2bpiZXGicktTGoRFOWBW6+xBYlubBcunqD3YHkp59+wmoxJ9VJ6CkGddlwIk8RQLk8BZnQNA3GNLSmZTSa0rQtGY4sEVx57gWyNOHToiDPcpx1qCTl8uUrSK3YmE55/bXXeXx6Sl3WXL96neevXuW569eYbGyQpElnShlraJqaxXzO4vSYsq7wUSkNTe35wuOTfY4P9lkcHzOfLzg+PWE+96GAFgfKopPQIBSfYVhVNVVV46xDZwnZcMRgMEQnCUmWkecD72Q0hmKx/MIJ+tVL4LiddybFWrJd99szNIJftL6WRb+YO8ANx7mgLTlrPY9rBc4anGt7IIj1GSLFAzgkNnSvi5zqxsbzpFe/zU+OLavH0Lg9duoln57ucWBL3smn/Ddyn08GCc+1msHFEfc3Er6TbfOaMfy5qUjHm7wlFLNW8INqwU6W85JM+fj4gNVgwKU09RX1Nq/yzd/4X/CHv//PwFc78aa8CAFs4QvnrkYkUzSKQZqwMiVNs8QW+CJVRtEkI+oS6ragXZXkSArTkMuEE1sxno4oT/a5MpqQZtOQeJLy1qWXEHrIMM+pmhPGiePxR39EbVZ+3Lp6vkFXXdso1+skeIkbXEzLFh0v79tVrUdwsAa+wWuxTve4SDX4d6XwVEcs0dkF6TwB4E+TZ4Lu4/1DRsOcbbXpnTNSonSCaSrawJNK4XwxHNNinEUliiRJSDJJNvAZZUooGmswrcHZBiENqBqpHFL7wfHe8FDTMzxgKUKaKoD1ZnDTtjRtS1nUNK0hSTWz6QzTVBgcSaJ83reByWjC4cmc45NjTk8XlFWoe2tSDg/38d0jKhrT0NjW0yhpTrl0XBtpdtp9PjpekWtI0gHCwcXLF9ja2kFpSbk4RThBUVbMF6eMpjOcEBzIB9TNPtu55bp7SLtSaGPItcIIzWCQgrUIawMAWubzU7LhFCmEpwKkpK5KBoMRzoaccSlom5qyKsiyAVordJrwygs3eHR4zGg0ZbixQWlv0wwd2WjMdDZjd/cSly5dZDgadXGKOGiahmK5pKl9soWxkqIoqOsS4aBqSk6PD3n08D4nR8ccHR+zf3jE6XyBk44klQwHGq0ItIqlqQ1lWaHSlK3tXba2LzCbbTHd2GI23SQfjDAOqrJmfnr8hRP0qxbRaSgQM77CXn9GQ+qcacRA+/7z3UFd8kPP30YrkE4LcmuHnjWBfdxsC1iSZMT04mtMdl7jzx5ZBm3Dw9UpddEwyCwfbV3i18eXmeD4ODP87vgKF5c1/5pjZqXjWlOzf3FMax1brmR3mfJ4J+PycMZsWdOsSpqLQ9g/ZOwyPnQVVzem2Be+wdX33+bRgx95eq/bWHotPitXJBNB5SSz2QZDDdoZpIVlNUdqTao0k8kUY2uyNKO1LU1r2BhuUBUnJK7l7WuvMtJTaid5sNxjZ7zFph6xbA35KKFta/bvv09T7AXe9uxzsz1J28Pl2nOje83x2VCuaMmctS9c/6Ez532SknDOIZ3fNK2gr6j3y0YvHB4dsTkdkGeawSBH4UIXVI2UYE2DEAprGpxtEdIhkxAelmtvFqcpOEXjGs9xSYtUFiEtSaJQCdjWEPOZRazvKnxlIi0CJIeVIBGMRyNeeeEVrly5ysP7D5gMhxyfNszGY5CKZVmyKhtOF3PqpvElF433zurEJ1/UZYGUAmMaEq1JtWUwnuDQTIZTlmlKbTQ2y9gaZ2xd2EVJyebWJrPpBm1rUBs7WGc5PDpgMBozGPuCN2ZVYa3gZHHMvTKjaGG2fRmlfZUmhKBcLShWS6QQtHVNWTUkA+cBtzXUtkZr3yyzKJeo0YSDY194SArJaDQBZ9i9cJEPPvmU0cYmR8dznHIoJJWtqdsaG3jdJPVFhPLU8+TWWto2ZTzMKYoyRFtYLlwQtK0v5VmVJcXONhubM44PDjg4OmHy6BFHJ8dYZ8gHCcNBila+doE1lrbxpS5nW5fY2txlOtlgsrnJ5uYu49EUrRK/cdYVZVV84QT9OsQFa8p2zq7oZwhAaftWRV0yQlc8m14DcnFBWpzpq4Z5wAbvnAwUjoshcz0dYW2DdZbhcBc7e4lby5z94weIuuWu8ckFs+mIX8wmPN/CQMG/nULapujS8KOdAdkANkTLZA737IqttOWqFIjJlH8130ccL/gHYsSjzQl//Ogh/+FkTDWvmUwVHJ2yMJZrz32DR/d/ihQhbXzN0e2QpCJFJwOSfMjJ8QHXNmYkEtqqZnsyxmBZNRVtWzLSGuFaaCsmgwnDdMAkn5HpBI1mkE6oEbwy22KQ5KzKkvF0gpENDx494Hh+N6z7oFniOuoq/tcDbYwsiRps5yILEvBFxGfmt0+LwNchDu45gT9/aEu/FhhGB/Au+JP8kySmUbvPAPvT5Zmgu1wVzOcL8kxhzIhUJ0ggTVK01sTUOmt9MRutJYYErX0SRZpmaJXSNDbO2zAgBqEEKvEZPwbTV1qSPR8Tc0tcqDiktWY6m7E5S1FOslwuyQYDmqpkN9tFSk0+HFDUNYvihKKuaaqSVbGkLgqk8uFuBsuqMiRKIqRmkA9BpWxtXUAISbFakeQ5k8mMi2nGcDAIGXkp08mE6cYGTd2QpylIwWA4xhjjnY5JToLkUlWwOD0h0T6/27Yty9UCrCXNcg6VxFiLkp7frOoGvVr51NNEQ20xxrKqCjbSKcVqichzpPUxyGmSMRrknK6WnK5KkuGUixdG5MOcyxd2OVktEYnCNA2P9vZZlQ15mjFMUpyzjIc5Kkko6wopJEoIBoOcfJiipKRtKsxgQD0akcQNdDhEpZp8MqRta4bDlMkgJdHesDOuxbaG4WDMZHaR0XiD4WjKdGOTyXiDPBv4521amqYJ/OavVqy1PobY+tRdazxV5gPlY6aa7GKSAV+HIbRA6hooniFj1+JoOxs1arQQuW7nTKzZ5YHags52UdvvcPNYoZ3jcbHkxuYWq3rJjUXNYuRQ21PeyHe5xQmNm/NmusUVoXiYVgxsy+XG8ng74z9fPMIcn/C/rDe5NVjx0+pTdt2YloQ/WH7AAsGSAX9iC+S9JdfShO1BwvDaa+TjC5TL/WBtSpxMSJIJs+3rbD73Lqt8h/tH98jkA+bzEzbSlFRpEuuQ1jLVGTodIIRAM2ZjmmAx5NkALRVFUSDyATbPGLQCYQ1aSbZ3d1hUh3z00R/y6PEHSFd1lkdXUhLotdSAF937YYzF2XF/qtkfOCIb07GDBUiIaoh/AnTdoSM4I9ceebB/XNwQni3PBN1VWVEWBWWRkCSOVickKkWnAxKl/E5oDKatAUuifbpkkjiSVIXsNbCtw/m8ApTEm6aJQiXeK+gcvhNw8Bh7jsyHLlmM70QsfAeGPBcMsymT4RQpNHsHh7QORnnGaDikaCyLVQHOURQFi/mcYrXywyJ9JlZVN6RaIWVC2xiU0oxURqIUs9kmzdRzqZPxJNy/QUof9zocT5BCkSiDbWqMNTjTMspzpPYt6Te3NrBHrnO0aSVp6gqZSKSFum18Wci2QTqfZo1SNKb1GXU6xdU+hdlaX35yY3MD5xwXLlwi1YphnnF8csTe0RFWaA6Pj1Eq4cqlHWaTERfGU1oE+w/2ufnxHbROmU1nCK0Y5BlpmviaEFmOUpKLu1ts7WySpRohHEkicS5HrpZMGPsCOgIaY3ASmrZhmGk2RhmJ8ok00UTOB2MGo03GwymD0ZjReEQ+zDpKxLSWpq6CJ/9XKzGV1lrni+RbQ2taEAIFXjuyEqTAhDxnF+bjk11s15ebsz0FEaM/uiAQYmypwLo2ALOEZJv71ZSHH3wCy4oDZ5nOtvnJyvBSOsNcH3I0TXmrsqyGKw52EgbUlO2SPx9t8a+Ob5FUBf8ze4Xk+Caz1V2GxQmaS6Ta8O7hTXaTTZpRQVLc5/lswnLqOCnv89bOS/zCrXit0owLw2tv/X2OVkeUbcNkPGNz6zqzzecx2ZiVU+Rty4G1fLz/iCs6Z9AaXFOTpgkJAmcEonZonTPIhzjAKMGqbhGiZTzZQanMF93PNJN8hpRw9/7Pee/Df03bzpHCdsDXK5GRABdBE/X1iCNYxmPWfJzhEdhey42BDyJugvLs+YV/7vFqPkplPS07Hr8G6JFd+GWjF6qypqorpJKkaQo4GlMjTLLGTxmMaXx5xiwDoVGJ8aUfhaY1vr2Ow4AwCGWRGnSqSbTyDwc8GY3zRaJxIEGGCshSCLI0AydplUbLFJ0kVEWFaQ2NE7SmZnPzEvXhnLZtWayWNHVJ3fjOvEraULxEUJUVapBDIkjyNBQoT8jSFOkco2xAlRiqasV4OMYZw6oquX79OZQUmLZkmOchIUGA9Vp6Y3zpx+FwRGsd+/uPmY5GlFXD6ckhaZoitPY8cll7vtY0frOR0re8Ae+gy3Lm82Nms02cFBRlyXgwBuf7k+X5gPsfvs/JsmA6mWIFVG3D0cmCPMsxtmY+X/Do4MBzqHXLPeVrVwwHQx8XPBlz8cIOL77wHDeev8polAIhSxBHWZUgLVqHWOt8QD4cMqgKdCMZ5prBMCVNlC8YY30Kt0wSBnlClidkWUqSSHQi0KlAKuFLAooE19ZfOEG/anHOYJ3sMtNM2/p056AyWbGWAiwlxgmc8nGevnW7PAu6cf2Hf/Sp1PEI0fO7ApwwGAG2HbFXDCjLmubhXcTGjM3nX+bK+AJHqmFrussH8pjV4Jh89jw1jvmDe2wlI142GT86/gWDg/cZlC1OnHIhEXzz+BHtfA56n4KWzeUJk+SA5fAeW3XDhfEmq7LikqmZqANkM+feYIe0KXj31e/xwstvc1JU5Crn+LQkyzMqUzFvG36+/4D70lE/f5Wbn/yM+cmcSZIyKhUbWY4UiuFQo6TCWIvUKUpqknTMIBuRJBmL6oTDxT3cvOFAag4OPuJw/ybONR2QGtHrldL5xKc4lpFndutgS/w7mPw8yfG6jmY4w9h2XHCMNY/A6ykV0Z2zp5XifXi8j863XzJ6oQhZVcPxlM2tXZyDoljRtLV3tgBt29Aag9IJOT7rCOWrh7XG0dS+wWRrTeAzJYlMUSpDCB0ynRzKV9sNVd0lWkqksjjpi6QYfFNKJWTo1QZZlvrCOEmKkYKD04L9o2OqqkRIRZoM0LrCWkNrBFIp7/zTCSrLcc759GMlfEHxJKdpG0g0zhrSJGF5espwMuXFF6+TZRmplly8eNWb5EnCalWSao0JzRabpube/YfkWcLbb73Dp59+TNOU7Oxe5OjoENs2pGnKMBvQtpXvLqFK6tYXh7etwTl/HiEly9WSra1tkizzhL2E8WTMzfu3ufP4sS+AE7jqhZQo50OT0jShrArqqqIxPuYW68BYr8EZw2iQs3Nhm42tLYyFVVF5AE0SGmtobes7YLQGKWToCp0gtfL5FDH0Jkw+KVRghxxVtUIoidASNFjluXmhNG0w4du2BSZfOEm/aomcrud1W19Q37W0ik6TVUqFaOMGF8LInJNIpZD0gfPrjpe1K/CZtShEF+ts7YC7i5x7J4fU1iIHI+rJhMt1w8fukMtzRbtdYGaSrXJO4k750LbsHX9CaVOW8iLfomG6d4idlwh7SJmnpMWK3DrUwLCoC8YWUluzfzQnFRqhLfPqhJnIeLxasaUzHspjruxcZGUMd5Yn5ElO62rEQFMIy0IIjqxhkRsuXB6x+uQXFM0pj8oSnMZqSd2UbI6GmOWCtG4Yz2YkQqB1xmg4xmK5df9H3Ln159TVCVLYro6BEC0IT99Ep6UV1ndCJpIAUa2MESB4uqfjA9aoh7DBCSnPACudU3QtBMXJ0MbKf7bndOMBfZHT6EBbf9T/XurpFmVNbb1JvbO9i1KaxWrJ0dGeT3UtCxpfQ5AkzVHa0pgGY2ts6ydVXbbUja+3a4RnaaVKUFbRtgrb+qIjONW1dvF1BRVu7Ysa48jSIanIyZKcPMs5WZSMJxPfh6woOTw+xVqDTlPKw0NWZYmUkrqq/WCGKItsMEAL39VBJSnDRGNag00Fo8nUZ1JJh7SOKy+8wHA0pq4qNqcTNre2yPMBB/v7bI6GaJXhbMvR8RGTydjH81q4d+8e88WCy7uXuXf/Nndv3WQ0HpMNRwiLT9cl5+T0kEQnPvEJnzadCkFRtORpRmssVVEyGY4ZD3wTzJ89usfD/X1c4MGKcsXpqQbbhlTsY5ahvKZUSUiPhiz3PeSWqyUbzSaD0YCDgwPyPGM0SNjamKCEoKprX1yobamahrqpcc5z6oPBkDRLoJFkSqFixVInu9A+Zy2tsWFN+AiUqq0QhcbiMx0b02KM482LF7/URP2qxIVWOs5aMAZEiwkLWTlPSUmhcM6n46K9FSuV6njYGKcc9V2fWLF+jfUrei4wXtc6wckyY+/ePlpANpmRDjKSyy8yMg133Zxr117g29MZBwefUhZjLsiCkgP00TGDylDXh+hMsVNWVA4GWtI4Q641ozTHOoPWmnGSs2oqdJKSKM3hfBla7KxYNQ3ZZINEaxYDzYlxXMJgEsfCNMwlGAeFMzTKcFUOaG4dc3vvLm21ZDqc4IBVVaGzDONAW4dMPRUlU4EQhk/v/ZTHDz5gtXyAFLF4voqjgk+ZEfRJDKarBtaDpM/yk/iEGxdjiQOqemBei0xYj7PuVN9Q0S28JxGkImeazBhnAwrTcuXSq9zfu83e/A4mnNFbLr1TrWeQ1qMbni3PBN1FUbJYVVghGQwnjEZjhuMpQjqODvYpFgvffFGl6MTHjdIo2qqlrVuapqUuW19VzFpqGlQikVJ7LUgInIG2AtPg61gCCEVrHaKVSC1J1BASQ7MyNNKSDxKqxpBoDbbEGUPbNJwuF6xWS1bFCtM2OOdo21D/NwnFdKxjkOcM85y2LskSjTWGuq19EohS7OxeYDAY+ZYsSlJWFdeuX+fCzg5ZlpElOcIJssGAg4NDnGnY2blAlmU4JNNNyCczHj24y6effMJ0tsFwOGZVLjFNgzEW6Vps8Pj7FjAW43w7n7qu0Gnizdm6wgCVMRR1jTU1n967x2KxIEk1w8EQqVOcaXHWsFgsfWIKvoLtqlj55EQVdnqp0Er5YkSpDzmbTidIKamaFmOCqWQ8UCsl0Er5akzWF6FXQvm4bZWitfQhY8JB43w8tqkQOkElmqZpEGWBbVYYLMa6wJuGfPZfsURnl8/Oa7BC4WsphMXofN83YS3WSRSe83b4sSQUX5FnCF3oNSIvPX8Yr+t55LrN2DssObUtzYXLTGzL6eUp2/UJD56fYnFM8po/Pr7Hezd/wI5IWSWXeXMg2VwZ6rpGY7FtSoLCKUjyAauq9JSgktRNS5ZlnBZLJJJEKsqiRCnVW1XGMj8+xA0yDu8UpNc1+acfcvnaZabTMQMpybTipFhx8+iQD+7d4Rfv/RRz2iIayWySY+qKjfGQcT6icYbxYEI6GqOzjNPFPo8+ep9i9QjhWmL7nI5D7Uasj16KoOrhNXKlrqcGugQO153HiwyarFesuufREbzrD8Vrz5qEidtgZ7HF1cEVnn/9Hd54/Xc4Oj3g8eIWP7n1Az6++1NOy4NOo+2covGkImrfz5Zngu58VXJ0uuB0vqRuDRtJgk4TWrND27YslwsaYximmiRJsBbfVdcKTGNpKktVttStoTKWFkitLwxtXEiVtGBqsI0jEb7ldayFaYx33OAMwmXoVDDKhzR1y2q1Ylka6rKkKFfUVYltK5QIZfGU8AukbVBKoVWCMS1pmqCE8rGuWvlAfoFvN5NoX7TdeV5TZimmMbz44otsbG1TlBV37n7IN955lwu7F6hrHyonpWBzcwvhHPfv3+Gn773Pq2++yyuvvs5kNOYnP/ohSZow1VOcc5TFCmlbsDXpbIpWKaerJfvHx6hEgZG+lGbQUtu2RauEqmlItUZrz6nXVYPWLalQNNZyfHSEcY6iLJBChpC9HJRGJRotlW8tZGqqpmL/4JDZ1oyDowNGo4zRMEVq5UHItRRVTVN7+qhpDXVdUZQFprVIodFK+xrHChpT+8I+raWuCqxYYX07QqzPPMBhcaFFiu/QkHzhBP3qxa0Br8HYlnVt1Hf7je3NY1Frh1COvvuALwvoNTR/1nUHzll6weKsL1RkGseDe0fszyVKD5hoxfZ4k/unK25sbCMPlqysQ5t9lvUxN8qaTddQuvvIIueySjiSoJxGpWOEqEiVxUpH4yrqqvI0j1bYVjBMt2nqitpVJFJTt2at5oMjTxQ1LRQtd+7dpEkU9/ZSODyksS2HbUMiFZ88vsfh3Vu4Zc2mTLkw3WCSjEiHGwykZpiP0VlKPpxhbMuDR58i6oLNrUts716lahoUEhRY4euKWFtj2pK2KTFthe+8YcBV0dj3eCF82F3Horvo4IqFyOOYi7Wfnmro2QEXUFOS6ZwLg8uIA8ntn93k8jev8daN32R38yKb04s8797g19/4BzxaPOL9T3/A+7f/nI/vvEfZLmht4yvmddX6fsnohbqoOTg84cHDRxwcHzKdThjkAwb5iPFog3xwTNOcIpSvCIZztG1DXfmspqa21LWlrBtK09I6ME7RBs3LJ7Q5XOsHwIeR+c69QiusVCidAQJnJbPpBoM0w+ahNdDymKosaZraa1yh/KKwhtFghFkWSATJYEhVlmjlHT5t24b26RYtNUprMIaTkxNPaEiJThKEabl06Qqb2xcQUvHxRz/j+3/4+6TS8fLLbzCaboJQDEZDtE65d/NDfvbez/jhj37EzoVdpqMhOxd2eefdb3Dz5i0c8OjBPaqmwlQVTbFAKcWNq9fJByOsccyrEpPGZygYDoZYYxDWkaUZdbViMp3S1hXz5QJnW4yRWActPlRPAFVoA9/kBqkV1vpymWmWU2cpxjlq51CDpMs6XKzmJFozHGRgvYP0dHFCUzUYA2VZYeqaVEmsUygl6Xks77dtQ21apMDYxreyb1t0mvrCSVL6UpwOpMq+cIJ+1RK1lah5SmsRnn3GOV9KULqQChycXzGP34cYhdrCUnC28lh3he7HuZBEZFqvWbcaSJlujamtQg8SjjfHHLSPONqCW0f3ECePaQfX+GY25DiZUtYlY61ogUwmZFpRA7V2LE4esFo+pioXISoixARbr/VVgyuMtl7EOENdl0jjNXgppc+YaxxSaoZas1gteHTrE+zjB5SupHEt1gmUM9jlAn16zJVsgBKKncEO1iZolTMajhEqIR1t8J03vsUgGVG3lnyQYxsfIeKkAiUREjQWrR2pcBR1yXFxymJ5StMWnM4PeHzwKXtHd6lXh1TLfdp2ieo0XNNFFMTfHbhy1lkWEyRcUPakVQjn2y5d27jBc8Mb3Lz9MZvjbd546y2Ko0c8aAzJYILQiiRJ2ZC7/Nbr/xG/9eY/QiSGRXXKg8N7fHjzp7z/6Y+4vfcBjW2+cM49E3SNMewfnnLnzgMePv+QjekEubUNQqCThNF44s1HfLxj3ZSUxcJ751tfb6BuLXXjqBtHKzRWaGqjsICWAikcykEC1MrgWsgGOY0T1I3DVA3DwZCN8ZgsSfG1GBwnp3NM23A6P6GuKwZZTl1WtG3NxtYWq7LmdLFE+UZdaCnRynfEjRWIdJJ7Z6ATVGVJTPUbj6c+lCrNuHDhgq+0lmZs716mLBv+6//Pf8XLL/yMd7/5G6SDEcPpBj/5yZ/zb//Nv+TT27e4cOEKo+EYKTV1XTGbbXH5cs3B3gFbm5t88Iv3KIslxhmcMRydnDCZTNnc2qHF+SgGa7tiG1ZJVqulDxuzluVySZImDBnTNBXQ+GprztLWNVpKquAc8t020q7MZGtbhNOUTcmgHbO3t49ra/YO9tiabTIc+BAyrSQCy3CQ+LZJTUvbNGB9bKlvxOjDqqXUKGsx+JKfWgxASqROEVKjtSZPc5IkBSFoTUgF1+kXTtCvS2y4fyEM0nlvtbMOlF+4Ukofy2ui1tX/dB1/xboqtS4hyse23lEXMtUeH5xy76gmHRuy0QajxSn1bsYVI5ksTtiZ75O3LbI6QTYlz+U5hUopG8NJWzNKRqSDhKODT3n44D6YonMCeYeQ70bs6U3LYnkXNrYRF19DHD5AncyR1iEDONu2BY+HjIWkWp1Srw7JJUwFaCHQQlA3JSbJGEpJmo6pjCHPErJ0xKpyGAfXxtt8eOeUwcyxNdL85IM9SHOsUpDk6NZ3N9nZ2QRlGSrFUE64tHWRcsNSCMlO63jVVZi2AVlxfHiHj+78GQePfs7+g/cwzRLpGtZqhXklIHC1MfpARK1WxDaamlm2yTde+Q6z0YTbH3/E7Z/f5O5Hd/lb3/ldbrz6NheuXuP4YMHR/h5H+4eUVUuW5Yw2Zpwcz3n97dfZ3b7M1eeu8J0b38X8XcNPP/w+P/z5H3/hXPuCerqwWpbsHR1yeLDH0cGG79qbZUjhyPMBo9GIqiho6pKiWFE1jfd6O99+J02gbjzfoWUKTmNaH3LRCoFSXoMQ0pBlGQIoS8N0Y4dpmmIdOGsZD6cMswTbNuwvT6mqmuViSVP56ISDwwMkgmw4JhuMOF0W2LYlSVLa1pJoTVX65pKTLCdNNYPhkHJVIGzLZOTTUwe5D+DXOuXq1SukaYbSCp3mzKYzXnn1NX784z/hj3/wfX7+/s/59e/9Np9+OuXmxx9w9/4dRqMxb771Dju7lxgMPS8s8pxd6esd7O3fJ8sHLFZLQDEcjTB1Rd00zJdLGmupKk8PJGlKolIa68iyzNd1sJ7GP53P0crX9S3qFVJI2ralbRuckGipGY5HHryt9RzsYMBgOPTFeZIMLRSmaNirDjjaP2I2nTKZTJBCMBrmTIYZB0JgjfHWAI5U+roYWkITsn98PRdB04ZOq0L5CS5SnFM+JMs4rAqlEY0I1Q3UM6ff1yF9U0dPMZhQ9Ec6QGpMLHsZ1VfjSRMj14HXIpyh7xC7HsYUYtltjYn1lY3BGEu1bNmabJENhhwoR3JhSrEqeby34LXdGb852OVEnpIDVita48iURqaKFQmn7RHHDz+hqU49RxrTkMN1hZCxWmEAHcPpg/fQwyluNCNflaQYVONrX+NCjWMnwBk0Di196VJCVYih0iyQDMZDFmVJayxapxzNVxwfHDJfWra3rnH/4GdMLl1D33PgWu6ZAat8QG4kRVVxdfMC1UDTti07dc12lvPGzg4fFcfUCLZ1zq4QPDICp1NyPSDfnPHbW2+wNbb89NM/5s9/9q94cOsHUO37+45NIdYSI2ImoPfLCYSTfOPF7/JPfut/zQtX3+Kjj/8cvUy5e3KLvfyAy1eu+3htB5PtDYqy4O7jfZzOub69g8pSRqOUk72H2KYkzRXL4yMSpbkoLvIf/+b/5gvn3LNBF9/2pSx9Ue6yWFGsFviGd4JUK/J8QNPUFIsVxrYonTEcpbR1Q1U1SOVbg/iOv779jgutXxIhkcJ3pBhkGh00qPFogtYpxvpuvsMsIUlSkkRTNA1t1VJXFUfHxxjThmpcLWk+ZJRmHojbhiRNAYFQjqpY0TQNmUyRSUKiE4T0/KXvvrtJW3uHX2tati/uMN3cRAiNCt7eCxcv863v/CZCOO7d+ZRiueDg0X0uX8/I0owrl5/j5Vff5PW3vsFwPKMxrQ9Ta1qKoiTLfGGRuq7Z3PQdg4U1mKZiVayYr1Y0rY/0SHRCNhyymM9JdIZA0Lbe4Tefn1IWHph1lnRJKon2baWTNCMPccQ68eF1zhpM7bBpghSKVGgSBOPhBGMNxeqEx+UeZVmRDXOWZcF8pUgTyWw8oaxq2saSJ0lo26TAGNpW0iTeyVbVLUjjnWhCIYzCSGhrSx0aNboQCyulQn8JT+9XLb5rgK++5guZ+3Y7Thpf01lKn6WG6boE2xDpENvSO+dwyiKM7RtWduIw1lMKEXQ9JeRYtZ5/LVyLujBFi5L69JSRPaVaDdid7XBpnFBbQ1m3nNa1t7pGA6rVXQ4efYSjCREAffREvKfoJIp8iNfzDM3NH5Nce5tyOqNZzhkgUK1ByRB41bYg/MbqWosOsbKtbamdoG4N7XyBQ3Lv/mOWx0sOVi0m3WBn5wqPi4Kj4yVj56gTzVJn5KqmPjlkcuESR5szjnNN0tYM5oKpVvxh1fCT40N205Q3BwPmbcFcJ9xtW2RhectmzHTKp5VhmebcuPG7vHjtN/jg9h/yB3/wf6d4/AugxuEI4f1n0hT8vuPIRMI3rn+b3fwag2TKpa3nKKb7fHTwU65dvsbLr79OpjXHd++QDAY0xZy2XFG0JbfbhrffeZOXXn+JpqpIB0Py8dAnWOHAGZrlFxdxenYLdmeRjr6qvgPTGspi5bs6CHw8rfAFr7MsZzjMcE5SlQUHh4dQN+jE987Sia+eZB0IKcjSEIdiBUJL0iRlOBxT1r4urlA+xOhkWZDIyvfdshaDoLU+hKkoln4St43f7a2jqkusM/jSlxLX1LRtg9KC8XDUZ0I5H+ubZZmPqJC+bGGWJiglqYsShGa1WDKabjDZ3OKVV99gc3OT+3dvUsxPGU/GPHhwj+2dXd5889d4/sWX2Ni6wNHxMfPTI7SS3Ll9i9V8zs7uDjrNcFKRqJTxYIhwlqbVzBdzqrZGKt8ZQmtNXVaAYLGcszscBN7Ra15ta1AabFX6hWIsQqYkIW05y3MPJkiyPFSJ0z45omoboKUoVkipSbKMwXAT57zD82h/D2NbRnnGcJBxMJ/7TJy2ZZCkpFqTpQn1MGeQ+mLuOOeLxTsLSctwMEYbQ9uCcDVtImgbvznQep5Z/uqzgIk1UG1s3NuleXqNTwrRh75F3xngGm+yOuv5cmstUpquSHt3fsAZ3w4ohtM524KFPEkQaUKrBROnaecVr+VD3CAhV5K5qZiJBGUsI51jtaJKJUfzO5w8/kWgL2NV4r7wuHMCG2oHICVK56TpiOFwxniyzWiyxfbFF0gnu7x3+xZ7dz9hcbKPrlvfs8y2IFpa1yKVpDqpGCSZ75otJUdHxxwdntAsS45OFth0THr5OrMLF3CTjFdvvMqDxjBXKcY2YBytzimFoEkkZbMiH6SkCB65hsfSMpyM+M44597eY/7UbTAbTbmg/QanheBoWbM/liwEHB8seXmU8dxkzBsv/x1ms6v8y3/xf+bw3p8SwsR9LeRYCYwYwQDP797gxuW3KU4XrNI9xKLg05//lEcP7nH98gvoVJGOx6yWFcuyYTCe8U/+V/+UZDjGlDU61RSrJXnq12PbWDYuXaStSoQ1DIdf7Kd4Juha5zDOhnKDIVXS+LTFpnEolQZeT5LmGVk2YpBPfPPI5YJluaRqfFk3ZS1Kx1YG3izN0hQagbGSPEsZZAOkhCxVtLamaSvKcoFzimGWg3OY1tJaGE9nrFYrjk+OkVIxGOaMphNfIyGRuKWvtC+lwAR6Yzgak2Spd5iE0CkRErarsvCRFziuXL7KIB8ilWI0mjAYTfym0zSk6YBrz7/A1vYWTVnQtg3Xb7zk6++OZyTZgLJYkSeS8cVd5vM5F3cvkj3/PErC5t0d7t9/SNtWmDYnSfzGIqUGa0iHGauyIBmMmRcr8iQNITTe9FsuloyGE8pVASF2MEl8LHOe5iTZAIAmRGVopT3/qjzQC6EZZClCKVrTslrOGThDkma0GBrbcHx6Qu0Mtx8X5JkiTVPP58nEx+omCZPBkNEgZZSnpEkSzFNJ66zfUAcVeT70zr/GoVWFlj4tHGu7WNdftfhi4lHbteG1oCcJr9F2FrdfzTjnfJv21juphPXJHp/RcqNPx7Y+HM36TsEy+A5QUFBTDzLacoUzhrd2nmNDCxbWF8TfX56C9ZtnqRNe+uZvs3t4nf0Lz/kEjqakqku0ytBSB3/LgPFo08/dwZDxaIPRcAYixTjffqu1lseLFbtX3mClt7EnJxQH96nmezSrQ473HlKc7FEuTilKH5ZZLpYIJWgThcuHKDUg2blAmWnajZxyOmAwm/DDB7+gHG8y3b5Cnoxoa8f+cIaYjnllmHPr8WNOhinHGxd4cZgx2XvAe/WcH5KycfUadrXi/aNj3pjlvJONec9ULMaKadugmgpnU+7sL1lISWots8Er/O3f+9/zz/+b/wPF4U8RznYxemdC9ZxjY7jJ1uwiW9NtX+VPNjR1y5WLL/Pbv/sPgIzl6ZLT+YLTk1PSJKM2cOn55xhORmBa9vcfkE42UNkALXwAwerkkL3bn6Ck4uIb33jmnHsm6LpgDlZFyXyxpKwK2naE1pKqKlCqDi1eHHk+ZDiZMB7McMb68KxEI2SLki1WWpAmFFfRSKUY5AKZDzGNIdUDihBbqHSOaVrybIATAtP67KWqNpiQRTYZj5kPh2xubaAQ1I3xMbBSYWvnIyoC2EqpPYdsHEIlSNEyyDOcgzzNqYoCKzUoxcZ0xo0XbjDb2mZ+csp8foKQkjTLUSqhbStoHanOyGfD4EhSOKzfEJoGrRRKZrSN72qlE01RFswmE15+8SVu37xFtZojXMl8EQHI0y/GGNq6oixX1EVBqjUitOgGqMrKa/HKg/Qgz9GJZDqeMhyM0CrFOkPTVFR1HXZ9g20qGmdphEYlCUKnDPIBeZ57p5mEtm6xQuCcYl74MMFl1SBEQWtayqZikPg+cYMsYzIcMEwzEp36RAmlcMLXmlDah5OlWUae5qQqQeuUTGvvvDStT/P+FYuzFickUlqsFb4sqPX5/Bbr45+dxWJ9hI7UkQH2IXBOIpzvDSel6jKe+pBQX67R2hZnLV3jTufIpE+RHmRDpM5IBynHpmAoMzZVwqJeIcPzr1qDvHKF5156l8lLma99IaBsG9o2ZG8pHWKtnQ+RdC0E62m/KDDWz7NlueK0LVhaweP5HCtT7rUWpQYsKoE2irIy3L91D0OLHCS4LCXdvUGSJrQ0ZBcuMdm8wpSEe3c/QG3lyCtDXrz+Cvc/ucn9rKTadrx6+TmerzL+s49/RJte4OboGt98920OHt7nQ3PEfpmxeWGLF23FB6Zkv3X8p9vbZIM57+n/P3N/9qNbmqX3Yb932OM3xnjmnLNyqKypm91NsqluiqMIWgJpG4ZFwJBgG4btG/8NvjDgW8mAL3xj+Mo2BJq2BZswCZlSswc2m11d3V1TjmceYv6mPb2jL94dkVmkVElIpqp34WRWRpw8GV/E/tZe71rP83sk2zLya8WMV+sNJx4+Kme0WtHESBYjh1KyNo7p/vv80m/8T/ndf/i/Jnbn6TT9MzKugJY5R4u7SB+TVh7B5ek556eXfPTtX+POOx/Q7RqGwXN4dMi9+/eJUrFZXfLsp3/C/uE+IcDlyQnZpqXa26NZb3j68Aseff4pClgu9/iNr7nnvqbopr90Xc96taVpOuZzg/eSvhsI3qVlTpSoqiDPC7I8x/uA0hKl04t1zmCjIYokrhe6IJJjhhatBZmuCUCeZ3R9i9YBqQt2Y+4YCGxnaPqWPMvwzrPbbMi1YrlYsl5vsdESfCCTCaTjW4/xDiVyfEiLoGuCVFVOKOuaoe8w1jEYS1UXLPb2+ejDb7J/cMR6vRnno1nKfIuRoohEqZEijkdFhS6KJEERMuV2Cp26Zmtp2h3GWoqiHE8Jgft37/Gbv/mb/MkPvk+3u8KanoCgnC1YNy/xNnV/fdcm0TqzBBJSKXlDZ4rNtsE5Q64Vk6qkrmrq6YzJbJ/FYp9JkVNkis12w4uXT2j6Hh+v0ywAH4nqOkYmmVuUkJR5SWM6ZpMJjTdcNMl56L0lzwqcs6w3G6RMtthMa4qsIFdZQvXpjExpcpkMMJlOXZVQSeuaKZVsxkIRSHlxv+grxJRfl04/Mcmr5PW4IGUWEyMifDlbiDeG1FEhEMQNEOlG4n+tbCDcWIxvEprHeavOJBvnMduG918/ZF6UGDNgYmC13qZoLCFBSFrnGXJJ1JEBCDHj2WDIVUGWwzDGMVk0RkDbGwoBRmq6waJFxpBLnAs0UvCw61mZlr4z7LqeRQGxNURlePbqc/rtCfLeATpCzCPTBw9QB3eZzo+4XeV0IXBuOlw955c/+IDHTx9z2nb8/ukjvvfht7ltO340nLKzO/Tt2/yV6j3+1G35xjzjf3h7j0fLnP/j5Rd8a3nEPV3zvir5v5w+Yp0rfiQ8f/n2LZqzZzwzntej51eOjvjnfcN5VnBXKHxrWCpJpQXng8MEwWt3/xyP3/sbPPvj/wSiQ8WbqQICwVu33uPNw/foW4OQLcH2/OCf/yGT5V0G6zG94c6bb6CkRIRAu16xvnhGWdesTp7zkz/6fQav+dav/DpOZ1ydXyCV4Oj+HQ7u3WY6m2HN8LX33NeoF9JX3Q+Gy9Wa1XrDcrGgKgucg34Y8HaDzkvmZY2SacMdgyM4l9JxnaHpNtjgEEKQZSUiFuS5JIScGBMEwxiLDREfItEp7GDGIpMjhSD4QHCB89UF1lsmucY6S9sOaJ2jnUOpjNl8ihksbbsbt7dpjqxVRj/0OO+ZziZkUuOEoOkH8rygnkz5zne+x3e++12EkFiXDBi77TahLJWk70NaQpUFWunUnSqJHd8c1hqctVg74IxDS4VFsdlsAJhOasqq4qOPvsmDBw/4g9//Zzx58gUX5+eIvKCeLxgGS1ZUrNcrJvUkucFkEt6HURKmSPL92WxGWaQZblVOktNuMmG5dwuCZbo8pCgrHj95iLEGN2qThUxuNWsGRvs/8+mEYB06JJTf4XwP5x3rtmG9W9OZngzJpKwZrMVaR2cs0CJJOu1MSKQUZKRE4mR0GQXtIi04rt8EgXSK+oVf19Zzxg5XinHkcE1vHRdSPoyLKTt2twoV5Rg79dU56rVMKUHn4brghnHmel14QarIYlox21uSF5plNaHIS7p2h1ASdEEgkbmsc1xuNvzR9oS/cvsdWi/ICoENsBksOktfQw84JXC6ZN0PnDnLNK/Y2g43dFxEz8N2zXRSMs0kPTtCt+LThz8mRsuh71EVVNUhLgZcXXLv/husY+DqcEm89YB/5/XvsF8W/Hj9gmebFW/pA0JWcbl9gleez7an/O17H/IHr9b882HDD8+f8L988E2+ePqIL4aBh7bnw+U+/6MYuLI9Jg7o+Zy/sDflPBiEshwXkf/w7h1+oFcgSjKh+SVy/rBpMDLjN/aWnGHJUNQYQm9QquYbH/27PPvi94ibhwlyPp44pMj45uu/xrvv/hp1UVMUJSfPr7j71kf85t/5e9i+ZbaYM1nMMd2AG0zStNue3fqcV5dn/PjRI4Seoj//DBE8V6tzXj57ysX5KafnZwSga1v+6n//3/+5t9zXFN3kcQ7WcXm54uzsgr3FHkLIm/DFvuuRxlNNbcoC8+mIbU1KdnA20ncDvR3QWckw7IhTTZbNkWICsaZpHX1v6I2BCMakLtrFgLfblJ0WHFpKmt2WvmvYiMjQdSitqMuSg+UiJeACZ1fnaKESYCd4iizHYhn6QJ5lKCkY7IB1ntl8yt5yn7fffoePvvURQgi6kWub5wVlVdH3A5v1mr7vyfKcqqoRpEj1KBVm6MmzHO8sfZ9SNayxCQwjVdr0I9jtdjhrmM1nHB0e8df+5t/it3/rP+f3L38ba11SKcicrt2htQYhcd5R1zVd34yRL4zyO5n0zUoxn80o6xnT6ZTFfJnmv0qhdc7+wW36oefs/JRuGMb5sUCLJIuxpifLNO1gCc4ke7Mel6QIZIhJWhYcQQqsMWluOy7pXEzKBeMHhjF0UQTx5ULqK1HijAUs/S0S/wx0uteRmtfSsRACUiXcigwJ3CSu/3e9qCJ1u4GEHxUifU/HM+0Y5Z1AyWLM2vqZ8PpxyJgry2Ask6zk1mIPN3h2bUuzXhODBSkQo307hki4XPHPPvmE7x7c4f4IjgmywEjBxiUEqsoCUilsb1B5yXvFnM8uLnkqDKpW+GHgncM9fvDspyjvKFROZs+ZHCv60xWb6Fi89jar1QVWtMSDBfmDN7ldZHRFYFbl2N2aHz07SH8CmAAAd+JJREFUpfEtt32kxjHbnPGG6VFSco8r3LNP2DNbyhyOswV10zDPC2bzCc/6nsHCm/UBf7B9xZ/6DX94ZvmfLe7yn66e8Qf9JZ97y39wcJu7W0cnDFK3fG9vRphKoiwIESZ95NGw5UNdEaYlMXpu8xYP3vm3ePr9ZwTsjYLjYHaPb7z55yirKcN2DdFiTcdiuWT/9jG7i3NC8EgixShNNTbQdD2fffIFbQ/3XnuXjz/7jP/H//P/zrSqmBQlR8fHXDUNT05eESOJW/I1188fLwBEgfeB1WbLyekFe8s5WguKPEfLjDyrCDLdVsE6vLAYM4xRPgqtSkLQrNcrsixy5/YbTOvDZKkMAqXSvFYC3jr6oU/SmiDGhAHDMPQwFv/NdoU1ye67nM9YzNKSq+87RIzsNltsP6R49BBHNYJE6YIQ0w3pYkQjEwOgM8xfW/Arv/bncUPPersdQTkKJJyenBBjZOj6xEZwjihTKqoeN//d0DOpJigBza6hKNNoJNcZMs/HLDnJbDpJxZQE+bl9fMi7H3zAH/3x99lsG4IUaKkp8gJbFHjvk6vMjkdH70Cm8YP3kd12S5FltF1PVswQIsMMA5nzydYcE+Tm4PA2u2aLMeZGNO+ReGuRKkvfp+CxLhC8Q/hkIb6z2E96UmfxIdANPc5b3MhQkJDoTeF6/p+SnpNm8logGrjGlMSx2/uS7fSLn+kCXxobxq43hkgUgShTh8o1jyFGog/j99CPsejwpS73WrQlxmUO3BRa8S8VXQFRGozdYr2hVCU6c4S4w2cZIPGjIsS5JE8rmh1XL57yf/ij3+E/+OZ32KsqpAgcT0vmNnDi4E454YeXL7k732PjAp9tz3n/1h6H60t+ZK64tZzhup7XpoLtasXJ80sKNeOWWPCJeUooMvJc8tYbr/NZf0m92OeWypk4QbVdcRR2+HjG9uIl1g/kUrHVBXrbsq8dSsGyNJirc15zPXFVcHu+oju/YhYD282U9fKI12bHDEFwYQbeKyoeUKECeF0yzTw7Ipet4crM+ftmxYOq539eFsz6BpcPFHrC7WmJKx2DDRQxgyhZyBnf+u7f4dlP/7/E3XMQkMuKv/rd/w6392+zevGY1ctnnL56Sds5fuNv/3fpmx3ODGRa0e62ZFlBCIGmbVgc3eN7v3mHq8tTPvnTH7C/t5eAXWj++t/4W7z37W/y/T/8Xf53//F/xLZpOJjvf+399jUz3ch1cqY1hqvdist18unvLfcotEbGElFoiiJDiIh3NnEQgiPXGVVek6kJWrZoWeGtAJ/eilvbQdOQpPJq1OxGdrsN1gWk0nhvsUOPsQZrBqyxgEgs3gi77Y6iKlBSpYLtLDrLKPKCTdNQFCVVWRJ9wCpDpjXRR4QW2MFweHjIX/j1X6eqCtZDT2cMq9UKZw3VZMJg0n/XuwBCJlSiTfI05x26yBERbD+QZxkhRnRMAHU1UQxtm0IZpSS4ElFJQkwPlM1my7179/jgw2/xL/7wD3HWYLzFB4+xbpRXStqmQStFb02CrYRIb3ukKDDW0nYtZdkwmAmqBcoaQWQIDsGEsqqoywo7DKx3u6RqEAV1Pcc6i7OW2WSGqhVt29F2TZona8Uyr9jIDV5KZFHRWoUdmnScJiL89Uae6xqbZDo3R7v4L9XWkS+b1vr/GiXx3+z1JbwkjnmoMc1mI4ggk703zReShAF1HRF7/Te+LLbXg4pxpvulM4GvfhNSNAwIYamzNCrz0nG5XXG1WSUYCSNcSKYl39APZJlicvKCTXT873cd33rwOrPlhCpmvLO3h29b1r7htpL8aHvJfl5wO3f88eULqqzm3WzKf/HiCZdWcp8lm/OHtLtLmrjldrmkmM1Awv1iwh1VMtiKurccnL7EbRrk6oSVtXQ2cnF2jrWWsxh5gqJ1DlEXzGcTquUBSI2uMrQ3lC6y2myZeo+4ytHTPfz+fR7rkv1SUE8mHBd7RCc4jgO5FuwJmNu0OM+VoYiax9stFRW/01zxoPT8tWLJPsnYlPWSz5steVZyfOcNDh58l/OfvIDouTO/wzv3PqS7uOT0i8+JAcpqzu3XbgGedn2F61uK2eLGsaq0ZDKd0PYW03cIAYv9fd4QitPTcy4vL7k6P+FP/8WO1cUFv/qrv8r5xSV1Pf/ae+5rdboxgsoEuoIgB4xraNoVeS5RkwVapUQHLXXSrXmPNSm5oSgK5vMph90evWnoe4cZWh49vmQ2XVBVNQRBEEmPJ9CI6JnNSpq2Z7fbsW0ahn64UQVIEZhVJdNJjdYa7wNaKczQ0bUtUmcIlRxRSuqkJ5YCpbLR4aUgBLbbLT5EvvnRd3nzzbfY7bYMQ4pYL0bC12a1TjNmBE3bUhQFnRlGBGV6D2qpEpPABXZ+Sz2d0Q8bzNilO9JSbDqZ0A9DYhqwpTcL6knN7eMjfuMv/xU+/fwhZycviSIpFNTIW/XR4ZwjRj2mESTYh5KaEBONSUiJtZbd6gI5c0wnU0zf4GyPHqVZy9ksLficZ7PbYEMCxpfVFOcdu77j6GAfhUxmjb6lKicoITmoZ7jgaXY7fEjx84NNgJvruT/x+og+2oJu9FLXu/5r6F38V+vwL/D6mdDCMZInypBGAzKOgPpkergWgSa6RIqTH1k+RL4McInjqu1macZNdb75b6Va75GxpVm/ZPbaOxiVMWSaIH3aYQRPby1BwLwqWPeGiWuZnJyxaXsedVseKsk7Rw/4+08+5sPFHX40bPi12R3++eYlXZHzb9X7/M7qBX3I+LVsyvPdJZu+Rw2SiY9chMg8WJZDyz2TeMKTdkUMCs5fst5tadqB4XLN1fklputwgyU4d/PzF5CMRiKitOKLuqaYVEyO9ji4dUx9fBtTlLiyoAgZmQ10nWflAnmtEVWFnx7yXFdE5akrzX45S1zu6Phmbjnwgb3c02EQ9KwHxy5UXHnJJlreKScsi4LMSYzJ+ODbf5P/4tPfobANHzz4FvvzfUpRkL0jiS5w5xsfklclq5OXrM5ekal0wpQqeQ7CCEDqmh1tmxCp+0d3yMspgw04FzDe4Vdr1lcbtMrJdcGsnHztPfc144XkEKsmGXt7BbN5jiojKIvzPcYVaJWhQo63Bsu1eaIbj0Sassg52F9gfM9ut6YzHboIdHaFCz1FXpJlNRKdOA3GY4xNDF6X3ryDGYjOIMhS+KRIy462bRBC0HYuxan3hqJOL9o6S17k+JC+QVKEGymTjyl7LM8L7ty7S17kZENydtlhoOk6dm2TbMbGoPKMKBLA3Q89eVlQFRVN15EVBTFGyrJks14laVs3IJTEeEtd5ARd4YXAjg+OrMwR0TOpa/Iso64LlJJ0Q09VlTcPrxgDxhgEAhNsuhmcw1rDYjaj71r8WNyN7snyjK7bEZxFa00Int1uQ4gCh0QXNblJqRTGGYzpiEJQlTUKwWA9h3t7WG9p24au79KDSijmxQShM9qhpTXDWGcjOI8z5qa4xOun0XVZvQmuEqR0kH81gfUXfY2nfb7ahV9TxxBJk3tj6xXjIySGMaFZfuWRMiaexURWE1/p5K/Vu9c5EmI8BShpaLsz/uj5U2IfYDAIZ5AipswxrYgj1nOSabTzNM2OQx/Yrbd8YzmjWZ0j8hxz0NGYHRe7Hu13CKVQnWe/ucAGwYQt5dkZod1xEDOy7RrXNCx8pIoDxcU5692Kl7uex7uei/NLnA2gMzKdE4sZoZgjvYOhJ7qB6Cw4T7AOvMeHwM6s2a42XL085WL6lItbt5gc7lEd7LPcX1IdRNadpwdql1EPA0XveWUMrQiIMiNO5rygYCcCVamZ1VOySeRpsBwXnqN8jq4iT33Dk9hCvMU8wl5ZQyV4cP8jisU9Dror/vxHf4n95QH91RrTrAjeY01DPUuL6rMXz1kuDyjKCogpzbieEiLoomCvqrDWUk2mzJf7HB7f5uzkhKIs2FxeMu8HIoL5ZDqO1X7+9TU24IjKYTLLWS4n7O9XzOY5070Jy+mSTFRYF7DO0LY7cp1jnKHr25FfoJN9V2eUZcbgM9AeHyRKFUAGQWNd6hbbbiAGgbFpoZblBUpKNJG+2RKDxw4JTN6E9AM2fYe1BqJHSMn+cslqvUFoTbIxeyKCvu+QOiPLMpq2TV2xVCitqKqMy9P050xn05Q04TyuHDWaPvEQqrJKQJcsp+97oo9kWZbMAV/RAxtnUUJTCoVQmkwXGNOjM5WSLkZ84upqRSTy4sVLNps1MfrxtaTNvkCk8EohGYaeTEmMGUh0pYyqLFLn5S3ODgRXoCdTBtMlna81CKkoyxlIibcmWYczTWt6iA7MQJbl6KygN4Ztr5nP5my2O5q2QYy6z4gAKZOjzruUdOEc2kdKKbHGpcWljCNR/8vU1VRlvpRTAamT/LOwSLshVl0/JCKCsdsR/isdqryZTF/HGMRIAsTLMK7awlceOGkJzQ234cvX/hVgWfqLbYjDlqK6jRpPIMYP9EPSWUspKbWmswZcZF6U7JqWSmuyVcCc93xzcUBzesU38pxl1fKoX3GrmhDzS+r1GbmQ5Lpm7+ULyjBQyAy2DWJ7TtsbnvWO89NzNl1DRCNjhlvcIo6Bk+QlNhjwkYJAYS1d3+H6Ft+3CGsRxqYiHBKO1YfAbrejaRr084KDW8eEN16D1rDOS9S0IndTdFRs7YaNd4hMMo2RLPRc9FegAjMl2JvucS5XXMYBUZWU01tcVp42cyynilw5nvmcU3YcysjeYsnh3Y+Yv/wTal0y7HYYZ9BVydXJKS8/+YTz8imri3M+/tEPefPtd9CZwtYVRVETVUYxnZFP55h+YCJH16UxOG8pZzXPPv2U3W7Dg9cecDc4Xr14/i9ZwP/Lr691pOWZoKo0s1nBYlEzn03Y29vj1v5ttKhpW0fbbRm65qYAughFUcAIEVZSMK2nqFzgosXYiO1LrMnoho6u29APlr7rxuN8QAtNVBIfHFJEnHeIEFB5BiLStGnuqEWK2BBCUE2mQCJkZVnGru2SftI7umGglCn1NsFJEuD85PQVL16dkhUZfhjod5ubMMm7d+9zcnIyHssdziZdcqE12TgjndYlg0lvrqqukDrHR6jrOiUuoCnKgrLIaZqWZrPhzp3brLdb5pMK4y1XqzXej8oH62+iuJES4QMmetquZVKWo4IhWUnryZRMK3IlURK8M3TtDiWgNxbjDZnKkSojz3MCkTwvMNaQS0VvBwafHlZCauqRL0xw1EVB1zap046BQim8FJghAU6yEAnWjhZrhxZizLNKxTmMbqCbYnNd3EJAhZCQnvEX3+1ey8OuLyGSCiGS5GPxWkomEq5SkjS7wkOU4GVAhnT/pTjvUSY2BiaKfwnULv7L/kF4duvPKebHXG1TF6miJxcKYmAYY7OUzsilpGl2gKSUgvXliolWKHdF026pypxLoaiGhmlRsY6aYbfBS8GpyOg3a3ywbJH4TUOzWRN9QMsSNzkiW9zChIhRGVEo8iiYSU1DZJpPqINnFyyNNoSiREznKDPgh544dAgzQNekZIJEn0GopA8+efkCIzT95BDfbjiwA4dR0jm4BEKhmIoJMy3wJsVV7Wc5uYyU1nJpdigcRT8gGstD94TVYsrx0W1mMvKQliu/Y5bvUaiSe7ffh8snnD99Tl+23HrjbV578Ca33zSEIGjXl6y3O4wxXLx8weHREYuDQ6JI6o96OkOOvgA5xjV572g3Oy7PT3n+8hW//3v/jF/67nep8oy9/T2qSf2199zXd7pCkWWKosypqoqqqphMJ8yXS+psiRng4vKUi76lbbZYF9BlfaNhjVGRKUlZ1WAcg9XstoZ2Z7lWQgbv2G03tG2LEIn0VZSJiTAYgXGG+XSSYs6VJoqAM+mhoETaOBdFQakVInjKoiCOeV2DtYmQFSPOGHrTk2uFkoKu3fLw8y947cFrtJsNfd/StR3nr16kuWXfUSiByAsyrTGmp8wKqrrCW8etO7eQeYHUDqU0aVwsyLMEtS7zDOscXdtgreXxF59TFAXLvX2Ojo959uwFi4ND/uj7P2AYhmQJDv7aLU4mJM5brLVJxhYCZa7JpMA7h3OGLKtRmSaQYOe73QYlE5siAkKL9EDsWvquRWs9Bmc6XHC0/QBSkZX1KCkTyUmoJFmmcaZPfAxniSqidEZNBO+wMgWtyHH+jFRELYmjvToyFt5RDCxGQpoMARV+8QU3XeJni3+aEoyM6y/HINdgnDQtScwFMY6wgwIhwjXdNUnhRv2ulGMhH0fCScWbLinkzUzUd+dsnv0LZntvQX5Iu7pk0zd4a9JcXghM34/cjOReXG12uChQRcHp6iLlELaO9dBBDDg9cGosjenIgBDTUjYGDzJHyZy9wzeZTBdsnefUWDJnUc7Rx4COAqVhK8AhCTpjv5ohfOC4qBhMz7ltU/ir9ah+h2u3hNkC+g6MIbqBxMM0EAKr02f4POf1X/9rxOmEK9ez2rUoLVjKCaUXBOcJhWJelhQ6o1YZrRkospxaVSyynLZp6ezANBr2dcG5z9jmkdvzJTsBz/st9/eP+eav/3vcnRxjjada7LE4upvqwmCZ7R1Q7x9izcCLz75IQQpljcoLut7ijE/gJiFoN5u0QJeSrtnxe7/1T/nk84fk1+PFIqWo5MXXW9u/VqcrJOgsDZnrcsKkmlHkFXleUlczqiotdNpmx2azxjg3JkBotEpee6kyovVIscfQb5ExkGeKfjC0bcd61+C9Z1JV47IrSwU+zxFEMlHfaB1F9Ox2O6qiZFrXZFrSdi3OpQTRvm8wzoHMsC5ph63pUyFylvV6xXw6JdOa7WbLJ598zBtvvsUbD+5xdX4OUpBVE548foRSr7h1fEQ9m6OyDCUEdVEwqUpcnrqkzcUlbdumlIZCs9v2OGex1mCM4eT0hLZpqOoZd2/d4pvf+jYqy7i4uGCxv8/t+/eZLmajRVSMHaJEjbbT9WY1al49zg40waNEpNAZmY/4IHBRUciEr8uzjMEYsizDjHrpbAzctN4h7WjNLSuGwZBrT9e15EU9LiSBkJZ2WZaN2umYDBDO4cOAk4G9SYVQELOGaBU+QougFTIxU8diG8UYHBgC0nuUdSggEx75ZyCCPYwQ7+t5dARkEDfKi2Stjz/zKy19x5GDlGOBvbFY3BTx8aUnJkPkJiE8NYBf9rzXDNyhO6UfrqiOv0l5eBdzek5wK7phoB+6pMwRCYrZDgYfYFJP8J1BOM9cZygvqWOGkhJF6pSnYVx0C8F8PmOxOGJ2cMhsOme3ueJye4XZbhHOYqLHy5gYHXlB7Hv8qFkXWvMiOKSATCveufcu1dkFl82Gbbsh5jlCF0hrEdOQJJr9Dt+swQwwjh62L57z8X/2n/L+v/f3KL/3l1g3K+anL5jZFqcEh2VNXRXpvyNT0auqklKAiiKR+KRgXpVMtKZoNjzeXJLtLTjWE07yLWjJsijZy3KGpqdteo51PqqNItp7pMgwZUk1W3Cx2vLq+Rl7t+4xOczJspzgGZ2I0LUNfdehsmSKWi6XrNZXvPPgHT763rdZLic8/OQn/MHvfZ9/9z/8X/zce+5rdbpSKfK8ZDZdMp8dUpZzFBnBj5HpUlKWNZPZkuzqnM6sv7yhY8B7k2DkVtIZR55N6HuJCwO73cBqtaPrOqb1hKoosNYQgqNrW9zQk0k4OtynquvEdLA9l0XG1TqBONZbgxsGZrMapTK6LjE+wziLve4qopREH+i7pKl1Y6DmdDqlKAve+eA9bt++xe/91u+wWMy59au/wssXZzx5/Ij16scJGl7X7C9maCnZbDdsdzvW293NmzCENKgz1tK2HSDZ3z/iw4++zbvvvMtsPuHy8pKuHzg6Pub1t9/i84dPx9lwQVlYjIHemDTqswYzpHm1VBpvDaWQeMDJiPUJA9n1Bq3qG/hMGLflWikGN+BdTxAKGyLBDVjvKKspeVljvEV5T981KKWZTnP6wYwc1SyZTERAeFDOUeLQOlKpyHJeoWY5MXic0mxd4KwzbIzHXU9Ax9EP0SOdI9eKqQhUIiLFn4Fud9TepuFr+lAgEarEOJdO47w40ukSeQwR05w6Xo8axnPbOKe+5rdeF2quC21M3xOAEK95vIDIkXnNcu8ue/u3OD5+i+w7f5GPHz6kef6Q7cUrht0OHR1E0FVOpnOmswlKjXDuLC2Ogw8UeZEkjki8zKCcoosS4xxRV5xs1lxcnjInPRgXeU1mHefDgJca4Q1NVuOyKVFqDuoJSx8QRUa0htPNFf+8XRN9pNaaZVVyvttCPU2mHW/ph4aQLcmmc8TQ4PuW2O6g7zDrK374D/5P7LUrth99B79/i3J3ycI2fNMbPiAjj5GZkEzLfMz9U3Qmacb36mm6f0LkaugI0bHcaWx2ga8rjmYL1utLNjGyrCbsH92mKOvxh+mI0WFNj2kb1leXPHv2hElRcffNNyjmC7KyZsQUEkLEDgNKKeazObtd5NatW3zn/Q94/xsfcHTnFsEPGJfm3F93/fyiK1K3M5/tM6v3qbIFuayIXtK3A7loyfJ0Y+U66T67rsOHdPSN1tJ1A32frJNFXrFeb+mHgc0Ik/HOcbh/iPPpGE4M5Llif76gridM6gnWGnZNy9XqktV6jRkGvB3SbFUK8kmVjszO453F+siuaSiqJCsjpOIblcC5wHa7IQpBXmYQA6Y3vHj+illd8Prbr/Py+Uu2mx2Hh/u89sZb7HY7Hn3+OZ9//imff/oJXdPSDR39qNnVKnFsi7xgvtjj3t37/PL33ubtt95lulyw3W04vzzh6fM0s37/mx/y5lvvsGtaXr54zg/++A/ZbjbptWuNcgFUoGk7UsKuRYeY5rs2JfWqPMf6NFIIWcA4jw2CfAQE5bnG2HQki0LQ7bYJmiJV6qpVTwiBPKvohoS+tM4m0ubNJt4zqUsYQDmJcIJKV5AFqjxyNMmZVgVCKfoAl51Dbhu68xXBjYVMjkUlCDSCaZ1xNMmYlYoq+8XH9YivjDlS0MB1EUzz6evpiBCMsTeM2YPXS7NR93BduMd+V0iRbM/xK7Pr652dICWCaEVW7HFw9DaHywfcPnqTw+PXqeoFQkjOm4FvvHfEy7sfoC8v6XeXyN05CxkZInTOsigL9iYTmrbBDB17izlS1Vw2LauupQuafHFI5xz95Rmr3YaeNWG3TfJPAl3XEKxJjOtqwkxXqL5jtWsQZU2IA5cSLoNHGVjUE27fus9J09Gajo3wVLpkqipM9FgB1jlEXhFFilAvwxJ7dQ55CX1HHDpCb7n6//y/0f1AfP8DGgVN4zi5vOSfnj6nEpa3lxPenezz9mSPw+mcKs/J82To6Y2hi4FKapY6RzjP5dkL9m/f4265YHb8Hndyz96sJAZNUVXpZOIjzg5sLy65OD+laxrmswXOWZrNNgUbqByBxNlA2zS0bUtWlGlf4QPRON77xtscHE64OnlB8I7tdkvX9l97z30txLwsK+q8IlcFcoR7WBPYbVrcAEU1JI+9CJTlhLqeMwwdQ5+eSE3X42PK8No1aSPe7LYUOSxmNaFKcy1vPZkUZDpjudijKCcgFOumo+8aLq8uGNotwTmc6RN0RUsUgukYm951CTDTD5ambZFZjpaSXEmSt0ETMGyaBq0VB/sHlFXFp198xnQ+4b333uX4zh2Eznjy6DGrzYbLyyuOj27xm3/5N/hrf/2vYQZz80NYr1eEEKjrCVVZpYj1skZIyW6zot1c8ulP/5Rt1zCfL9k7PKQsSuaLfbbbHdtdw7OnTxjahqFrmEySWQFSHtquaVFCEaLFuQE9UqR0rlNXTQoCNdaQ5zkWweDSzehDTFk6yNEclnKxhNQpXh1JrhOLtShK1pv1iMLUTCYT+lGPrJSkyAosaWvrrKPIJHvVhINFzcHeDJUrtoNFbTqMlJw2DWbbE4W8MQzICLkW7M9r7hzM2J9XzKtffNGNo9Y0jg+IFPNy/cl0v99E/YrrzjU9lsRIEBQ3brP0K2Euryvs9Z9FopnpgnpyxK3b73N8+02ODl5nsbhDpkqQmtb1rHrL1jjOTWDn4QpJUy1w1QK5d0ydC3pnaaPg3AwMlxewsYgoKaTCBQe7lr5p6POcev2IPlNkmcSXOXqzRtmEkowxclTMOGlPaETASGiw6CrNfLUquVNVnJy8xGYTjJJc9pad31DVRyymC67WF+xUhSoVi7xgfXlJmMwIc0GtoNtd0buIOJ4ihhbhLaLd4FZnhO0W81v/iDoa+g+/QZjVeOvwEXoh+X6z4Y8vr3i3mPI37r/NW8sDMiXIshypBBWS6CLOWiKKd+6+xf7eMVIpsr6nLBKVMMiUfI1ID0LvHKZrIUTu3r3H4tdr+m6g2ey4ODlngaacpdNx13UolXZE1hoiUM6mZC5nu2s4OXnB6uqK7WbLdDL92nvu55sjRIrfVmoElQcIFox3OBEYWoPe7ajqGoSiyAsmkzkhgjEOKTOELPA+0I6SMCEEe3s1xjqMFWhSqkMugEwzmUyZLvYZjKFttgzDwDB0mKEjIjA+jMaHBD0/3JszuMjF1SWRpN/dNg3WeYbBILyjWi5QQmBcYgDvmpbUmEjOTk8YhoF/8o9OqaqSW0fHVHXNa2+8xsXZOc+ePOHx48/44vOfkCvN/v4hxWRGXtcs9vcSX8JYttsVl2cv2Ww2rJpdSvxVkmoyZ7ZYoquK6WzGbpMkNM45nr94MbpYqqSqUJrBJH5xN/QQ40g5a9PScNQZhRCJwaOFBCFwMdL0A2Ud0tEJiXMWqTRhhG8LqchVzmANUSmMM+RZOm5lIWEe22aDyhK0qC6rBC7yKfHhen4ZbMANnugdUmmyXJMXJQ5FXniK3FJoPR6p3bUglRgCZaFZzCccHS442p8w/9fY9P6bvvwYBS/Ho7+I119vEoFxLfeSiSyW7rFUbceGGCG/VECIcakob5reCCqn3rvNg/sf8ebr3+HW0WvMJse4cUk3OE/vBf3g2A7pPdJaQ9saNtaz3ayQpsHuNmxPX/DZ2QvWzhCmewRRsz9YqizDFAX2YkMsC8gyZosZEz9wP5/wcttwcnmBO31B322xk5oQI6VULG7f58GsZtc0CC+5WF+xjRJTVASleWUD2cExVabJvefl6oJOTRjMjmmRM5OSre9wOmfXe8q6xgFWCZzKKBeJ/yGEJus32HY1ckkgliVxt8X84PvIvSnh3Tchg7gz0EaikylAM8KL7YZlUXE4maBEZJJppBBkkymz6SH1ZEmuc6LURO+p51My2dLtdhTTPYQUeGvwvSEEkEVF5jx7RcFivmB1vuLZw+cEdYEsJ8QswxPYrDe4LuUtOm84ffac09NTQvRcvHrBYDu22w2vv/Y6Rf7fcJF2LfROTiggpq4p+kCMjuiSnMs7k+RLmaKqymTVFZ4QNTmBIAza9dSFoChKBjdwcjIQXMR0O0QIuOAoyxI7CpJ9SECbbhgwpocIxhhi8EzqijLTHOwtCFGw2lyRZRnbzZamaVO0TIy0fY8oMvq+I8/z1A3aFDUTYsT0ifOwWl1SlhOePX3GW2++yXqzIS8Ljm/dAiG4urrk2aPHXDZn/PSHP2DTtvTWMS1ymj6Ry3KtybUmCMlyb4/b91+jnk7JiwopBdv1hiIrmM5mmL5jvbpivV7xySc/QQhPNiaymsHQdQ06y8ZNuSeEMDrQxnmhSmm+vTHMypQW0fU926ZlOqkg+MRpwFJVNb11aF0glAcvEutC6sQmHjfrZVFhrGfoO7wP5FmOkgLvHUonhmxwCbjjjMcOFtMb2t6CUlhr8M6lXz4xHGQUBDEGlUePUhmTumA2mzCbzqjKX3wEu/d+VBmkdFiiIIb0dd9IySJf0eamWW5yYcnx9DfGst80xKnLjUKwPHqbX/vlv83br3+H/eUhSiiGGGmtozUOGTy7rkskNh/oz0+4vLyk323Ynjzj8ZPPePToc1btBlFUTJdHZIt9JuUMc3bOUNZcTvYp9/bYC56DIbJa79hJhYk9u+6KEwnGgSJyOJ9ApjmPoFSG3V3w6atHN1Sz/fmC1/ffZLXaYnzg/PKMba4RWYGMOcc6514948q2dKFlG2p0PWWe5azPz7HzGb4oWGYZl+enuGpCttijaEnMa31AlBkBi5rOcF2LXG/wuyviH/0xs7szmNXs3IAwkcIFbhclby0PKXWGIyKkpDOWuiiZlhP29x8QZGJv/+iTR+wGKEvFhweSXPYMmxUH1QwzDHRNi2kaQkx42KuTM4qiYLKYIbOcSOIiZ1mWCII20Qb7zZoyz0AKzs5OePjF55i+pcgV1ht2uxW/87tfsNlt+F/9b/7jn3vPfc14IYUp+piQi4Nx6YYcRfzODeRCIFSCI1dFTa4VWaYZfKCuKqbTOd4Hzs9fslld0vU9XSfY7Sxd2+DMQK7SWKGua6p6junaceHU0LQNzqbAxUwrFtMJi1n6te0G2r7He8d6kxZyg00ZZChJdJaoBUM/pI5dp4RiJQTOB3qbWAyZylhdXtDsNrx6dYIQgtXqkqurS+qqYjCGejahmFR4Af3pKafPn7HbJiBNJEW2v/Hmmyz2D+nbln7osSFQVo7FfEHwISkIrKOqJZdXK7z3zGYzLk5fouoKH2AwiQTmY8D5lE0Xgr+h/cvrznHsqLquQ2cFZZaCK6USTIoKR0r6oG9xSJYHh/RDGlG46BnMkOzbWifympDkRY4xBmc6jOko6xo7GLq+o6onlHlBbwayPKPpLKdXKxyO6TBjCJ5N17PeNbRdR/Qe9xWuAUSi92OEShqLDF8PZPo3fiX1giAwZqL5cfQqIkGOdt5ActONc5IEwUljsRDT/FaI+BVVgiBEwd7iPn/91/999pav4XvDq5entMZw5Qx972gur3h0+ghnPGHoyAI8evQ5u92a7WrNbn2B810K8cxzosrooscOBil7jqopZ0OPN+fYl0+4LDWnkyka0M5xMJ0jZ/s8W18Q+57Q7Xg5qfBCIZVmOSk4yo+5bDeIakHQmot2w8l2TUQzm0y57SZcra5oCkfwPS+rpHI5OrxF3K3ZIvB5iSnnVHdKOr8jaEVb1MzefJNd12Kip5gtME2LVzVqOie2V2jTIGQO+Qyf5bA9Z/KnX6B/6UN2rUX2PaW3HE7m9MNAVk4odI7Kcsoioy5m2GzGT086ZsuMddOzigV6mnPR7Hjy6pxhOKNUBXt304K7uVyxubokqwrOXrzi0Y8/5o1332VxfIvlkWa3atH1hOl8SjWbEBsoyxrft2RFjlQalGS2mLILPe998A6PH3/B97//Odu2TXFVX3N9PWUMQfCRoTM0sqPPDd539P2OUmfILGPoNJBALlrlKK2ZzjJmywMm1Qw7WKwZaLYt7eBYXTXstmk+i3d4qZnWU6p6SoTkYtltcMbinMM4i4iBxWzO0cE+eZax2ja03YBWkq4faLse55J1OCISLESAVqlYKZ3Rdcn2WuQZw2BxxjKta4SITCY1fd/z6NFDqrJkf7lkKKs0v93tEELQdx2DtRwc3wIpx9e0TfzdELAhEkNgs93gfWQ2n2ONx7nA/sEh0QcWyyX9YNlst1xdXDCtay5ivIlCsi7ZfrXQbAfD0I9Q5Bs9Uxod5FlBDOnYb4aBoqrxpIejG2eUdoz7dt5zfn5BWZT0QBYtJljariPLC4KPzOqKxSTSSsGu65KmN8vRumDXXtEPHVU9RUuFs56dc6x3G04uzpnOZ6g8p3WBi23L0CepXgzX0O/EKhh6w3bdsKo1dmjT7PMXfF0rEoQIXxbTEWROCITxIXeT/EsEef3GSrFTSaWQfk8aqyiKouKt13+dH3/2Ci9PGIwlBMluN/DctOwGw3FVUnQDJ+2GoduhI2TGsjk9YxAeigxsRGpFVU+QecVcl7S7K9p+zZOsSsUzK8m7LZc7i9vN8FmBJ/DUNSgliVqzt9zD7Da8Oj3HVTOoZ3jRsZrN0UXOcjLjuCjpPz5hm2koNNtuS1dIytcfcEcWvHzxGV4pXJFzsVtzdHyE7Fs2QrCbVEynh+QvPqPLc/rplOkkp+m2hHqOqRdUk4behNRVmy025AQnodAInZGVJd3zNWH2CLUouFdUZDJjL6+5N99HjypnIRQqnyLmd3ly3rC+OEe/PGcnYbe9wp5fMdvbZ6qukP0Z9++/TVZWZFkxjjUku8tLXjx9wmp1hVCKejZFzRXOOFar3Uj4M3S7HdYOlJMa13dcvTjn9OQlzS6ZTBaLKe+8+w6nZxs+++ILXrx88bX33NfMdNMsd+gGNpsd+IjOBC4MaWQgPSGCcwOxC4ToqSdLtJJMZzPmiyWZKjDKwAVsW8dqtWU1pjJUZUHbevKsZFpPGQZH27WYIXWvxhmscxACk7pCKM2uGzCbdhxuy4QTZJRrIXDeo7VGjRZfP+owzWCSPReB0hlFofEhgUWmoxxts15z8vIVt27fYvqd73Ln3j2ePXnCYrFgvVrRdElPvNtdjcGQOWVdo6Tgar1mGDratkmzZLFjubdM+Ehj0oxpOmG73fInf/on6KJkfXWFFFCUJQQw1mD6PsXGC5PSdmPaoaeOSo52VIkLiYplXQTpEH2PEJFNP6D2kpNPKU0/WJSSDH2L1FNQGpVJVIAQHM4atJR01jLJEww+D9B1DXlZU1cT8rykaxvapkFJxTA4hFRcXG7pux1OPSefTggqZxcjJqQZ7jVtLEbwMdK2Ay9fnhJMS11qnP+zADFnxFGm/y9FRITEphWjRhSuiy6JxQDjOCE99JJK4Uu6goie/cUbCLFkvdviFZyd7hi0Q/YuHVmj4/H6jNemUzIfWZ+fsR06nGnJM4nsOrJdSyglg4+Y7Yos27JXv85yMiE2O/phjTeWs6ogn83JsxkHRcndyR7nlxe8OD/F5zlxUnORSeZ37jEXgtIrFgIeX75gM+zQ5YzOW0JZcHDrDhPjaNeXrHSgzwoGr2Bac+uN92l9y8q22GnFK+e5d+9NXG9oMuh15P73fpn1qxesVca2mnDw3oecb3b4LKeWiuHFKaGaofcPsGZAtD1xcIiiIitz9G6DuOoopzl36pKj2QQZJME5dJ6n1OysoguC0+dPeBkEVgT81Zb7944pbI+ZOIS3aNdSZCUHt29TzqYolbIclYKXj5/w8PPPmdUT5vtLqsmErEj7p6YbaNuWAc92dYmIgXJWs2rXPH3yiEdPHtJvV9w63GPoG5bLBb/6q3+ef/uv/S1+/JMff+0t93OLrhIgfMT2lq7pE4kHSRSWqB1eStCCKBOYxfqUGVXVU4oiJxPAaGY4PV+x2bR0bU+MDiVikikFz35VkWlB03aYoWcYOnzwDM5hrCVTEpXltIPnan2JUoIYA5lKb9x8jFQ31iQNpBAoKZJm1TryqsJZS/AuLeF0MjpYn8YWZVVh+4Gf/vgnHBwe0H7RsL93wPHd22gluXX7mPV6RZGXBOs4PT3FeYuSiuACZVUghMRZx9V6jXWWSk7YtTuE0OwfHFJVJWfn50ghef70CcYMdM0OrRR1UbBtGoxxWDsgIihZpmKQ3vLjrClZmqXMUYI05nEGpRQmGpSWWOvZNluKPB837xlNt6MqCvqmISsmxGgpigIfND4Eorc4A71INkdJIMsL+m5HNsbyGK1w1qRstrSKT/PJ3tGbgdWmQ1Q1VAUh0wkBcx3ACMTg6TvP6dCzWa3QIuKN+69TJv//fsUo8OFLMKMaORGpURdfdrmCkSEhv5Lvlu75EBLIPHEVJLma8tmnH1MUGSfrFdNqzsXTZ7RKsCwn3PKel5srPj1/iSYyF45dt8Y7S2cDwhlUJsgRFAklBxFO12cJQKNyDmcL4mbDi90W7wN9VdE2O7qmY3F8m3vLJVpo5GD5PHZcekcuMxolkEPHfm/R65YwHdhNSh7HGlRNVhcczx7gT8/YaUvQkRWRTV1xtLzF/OQpOyUJWcHLznL3eIF5+hC32GfrJff356y2W4aspFxOqWJPW9bEoqT0jn6wlPMD3Ok5MiuJ6x2Z6ZlnBdPFEmMb9kLNUhfsZQWVLiE4Jrlif3aArQ758VVPiDlFofA2MJgGd3WW3jPescgVh9WE470lyzu3yasCb1NWXbdrePzoMS9Pz7j97TuUdZnUUFlGPZtST0pOT0+ZHu5RVAWZT2ahZug4257R5YaT7Ql7ywkXl1ecnl/yj//RP+H99z/iN//G3/za++1r2QshBKxxmN7gakuGBumAIXVeOlkeg4t4O+B3ES8EVT+gZU/TWh49fc75xRUEmxCM/YAKHoKlyFLS7On5Obu2p84V1lo2TYsZBqpcMZ3MEULhTIeSKRE4IkdfesIXRgKe61mno8hLRBRkmUqAlpiKgNQR4mgVjol5mhbNnrbfEc89WmWcnb7ik88+5p233+b2vTvs7+8lC2VZsphOOb+8IMsr3BitXZYFVVnRDxbv0gx2Nl2y2N8bk4Ydbdvw6IuH9H3PZnNJ8BElBbPZHLdZMww9g0nb69ToekS8ttgm+ZXUCinTa1FSEwWYsfDmKkeISNfsEGKKwJKXE0RM4BysIUaV/OTRUxQlfd8jlEQqhYiRTOnRoBIJHjbbFUVRIiLorMRbm5gEMbK/f4vzywt8dEhZJNRkjDfsAmIaLYzmLvwYdNmanmgN0f3iO92v2jNiTCxdMZoWECSt7nXRvU6HGIE2cnQBfsnkDcQoUEpzdbnj6vKcWVVwdbHiAkVUirbrGOQJB8s9Kq2wbY/1hsvgyWYTdNej6Fg7x5ApVJaRKcVBVtH1HbtmBzpDlDWXMZBNZ0z2D9GDpXCONnqulhWtGVBas8AzbdbU56/os4J8NiEMAy9mU4oHd8jzmjvFjKePfsKmaZH7NU4rThQUb7zFLSInj36YwD77klWjWD54l8nqBc9Xz0FF4pBxd3/Bc7PlqvO89uBNjuSas8qyqyPfO3zAH+5abFZxkN3i1ekKW1eIqyuCKsEarA5shsT8mLqMfGs5vF2jIizyjMPJHnVRYPUeL1xF5yz9y+fcmmi0ccxCw+UXT1npgvZsxa/+pT9HNSnYu32LvK6IIeKNwQ+GoevZbJK8VGqNsT3tbktUkigCMhPsri4opgWTvUXaney2XK2vOOtOsRPHu9/7kOZ5h7UFb37wHos//BGf/vQn/Mov//mvvee+Ng3YWUfX9fR2IIQKIQQ+DigZUsHVPkF/Qw4u2VKNsfRth7eBi8sdp6cnyODouoam6xiGARESrOZoUbMbBs6uVlRFwdV2x2a3pW9bcqUpixlByLRVd+bmOFhVNbu2JRNg7UAIAR+SiwrhgZgSU8scP9LGtJYoKRDBkxUZsRjhOc0uYR+lTGkMdcnv/8HvMwwDMQZ2uy13795hUpfkmeJSK27fvo3ziRsxmUw4Pz9nvlgQtztu3TomuJQMIfOC05evsIOjaRvOT19xtbri+NYtXr56RVWWibcQA9b0iakgzU3hY3RzCXzS2JJoZhAIzibB9qjg9yGMDitDs9uO4PIWoVJKRogS47bU2TIVdGvJs5zgzeioCxhr0VITgsUFz9C79Dnn8QLyrEh/jhkQUnDn7gNenT4jKIVTEhsTB1ZeL89ubF7XR/hACOlH9GfBkJYouOKmI0+aXW7mt4mlkBZjkpT8G4Mg6jTbjTFZiNM8N/2z0hmTySEidrxcXzJf7HE2WHxVcG82Z9vuODOGTCgeLA9ZrS85a1bEABmeaAd0CATjCd5hfeR04sknEypVM8kq5uWEz/stjUrg/7KumTmHOTshe/4Mt7ePIHLW9VzO58j795mLjOl2w0nXYPZmWB9AOJwYKN/5gImU+NUF53QYUeOUJpuV3P/oe1gfObt4ijWwqZa8vXebplvThR3Pd44/9+Zd+qefM4QJd4YFLzY/4UDe5pu+xA1rqtIzm5R86/YD/qkUmCjRB0ushzLTdNuGXmmutOQwrzB9xzQWvH3rAC0EtdZYkfHT5xectytWmw0TBRsTeHNvynpl2M33+Lxx3Dm+D1lOoTNUWQIC23WYdocfOrJM8O1vv8+9u/tMJwXb1RUIxaTvEVrS9zukBDv0dK2iqEpEJhhiRzmZ8Je/99d57eg+/7f/63/CJ59/ylvffI+/9Xf+Lv/g//z3uTjffu0d97XqBecS39Z7T8ATYhpEqywBbcKIsFM6omSOtQEp0py363pOTs8I1hC8Y7PdYQZD3zXIGNF1RW8DJ5dXiBjpvaPpWjRQZCVVXROkYuh7mu0GKVMm2HQ6JcRAdBanJX3XEaUCpYnXEHIh8COEXdxYMAVt26O0RmUJaO6co2laFssli+mcU3PB+mo9RqtLXr08YX9vn08//ZT7D+5h+oHL1RWTesJ6k3CTOsuZzxdkWUGRDeR5zvQgLZeev3zBF4++oNA5WZlztbrCe8vQtRR5jhCSpuswY4csYsRYB0SU1KM0SSCQ5LoYX1MkjgQqZw15NUGr7Ca5ViuFc56maShKR1mmmbUbi4NpW/JqkngVhUwPMwSOa93quGAaIS+CpGIxzjDYgbKoKIua3vYUeUVWTejdl1KEGBLcJhljxwFJiCMQPC12BBH8zxK4fhGXiNeWXUbhfHpUfBWCE+HGGHHzscAY2S5uut1r5YIURRqjKYkLgU00GDMw2I5MTZgv9hguXmFsx7OuZbqYs4gWhp61s5jpFGKk8I7KGFo70JcFA6BjQOcanZfs9wO7wYB12E3D86IgP9qnzGt01xNPTrg6PqQXDmEDRlmYlZRxSXQ5oVAEY1hrmRya+YS7y31Mv6GrCyIDq6bnsq44Xk6oThw70VBGz0vT8s1f/ogffPq7FFnFYT9l2KwxjWezesiDtufxT77PJ/oL3nhwxJvLBXffPOZ+tqIqPYF9xHRHaFukzhFSkJcKNXiUUvi85rL3fFsGcpVSjn98avjJ5ZYqm3J7b0ahJKevHvMvLs44P3uJiYILPedbh0s2r54T7+2R5xkiePpuS99sMF2HUoHDW3OKyiOCTtmGbYMdegKe1fk5UTiCG1hdJBVPFJ55PeN//Hf/J6gQ+e3f+i1enZyjQs9v/6N/yC//hb/I3/3v/V1i//XL4a/V6Ubr8dEhSE9UKSNSS3KdSEc+WkyATEQyrShkigv31tHsekzXUWWKRyeX2GGg7zusGRLMIltyud7gjCEbb9BMapTW6CId6Zw1dG2LtZayyAgiEbT6MYTSDQM+gNQZduggejJdJjtrBOfdKHwPRB/Y9QNFFihLR1HkkCm6vsUOFc5bpBRMqpKISEaHq0t+95/9LnuLBfVkQlFV3Lpzh+gDg7HoXLE+v2QyqanqGT4GtFbE6Bis4dnTp6yvrjB2wMfkiVdC0TQNWZGhtGboWoahI3g/dlapU/celFAjOSXFnouY5roEB1FiXI+UGlmmEQFCJjxm8HgRsVZQ5nXC6w2GPC8Y+pYgxgIbAaFSQR712ClFLc28CZFBqhHeEum2W4J1zKYLiqygZ0DmBT4YfIzpaP4V66uIY5JEHLWsSqUqp9SfCZ5u4iuMRocIEIlS4GNEfqWQwjV+/FqlIFI2nIjEkBBiUURCEAhyAoKL1YqjxTGf7S5YzOdMm46XVUZjBm5XU1ah4VxD07XMlWZGxjoOhKwgyoAfwA6OfFLBYImVRCHZ7la048+9znJmjeVJqfGTij54BjcwzWF2sCB6T29yQmgwec7ZZI64/xrVbMHrPvDF4x8R7QR9KBHG89IX5PuHvF6XPHz1Cb6YISkQfcvtd76B7VuenH2CfO0IXMFSZExNyaOfvmLaT/j40x/zzEHWbsmKnNuHsDs55SQMtG+94M7f/Mv88u0HnMYpJrvDD0+vcDJD9C8QUkHcEUXStp+3licnjzG756y4w8Xk21DNqYOhX13w4xevWDcXXIUkB93LZry7KMi3z9H5gr3lfYoqJ3qD7df0zZrdasvq8gprevp+R55NKKf7qEzjzMB2s2K3WScnqDMIIeg2GyZ1xWvHr3H14ozpYsbTp8+Z1ROMkfzw408Z2pY//xd/nftvvve199zXUMbAxxSKJzXoLKKziMoVOtPJ4REcPkQ8gagEhZqgZJq37rZbtJZcrdYpDz447DCk5VM9oRkMbT+Mce2AkukoHANKKEzXIISk7TtUjHgkWiiapsO6nhBTmCUqw4WA7TqKXJNneQJjeMdgIiqOlveR5JU6m0CuFQFN23ZkKrETppVDacXVZUqBuDw/xTpP9IFXL15yeHTMxeqSTKUZnzGG2WJJUZQ0bUtdlrR9R28tednRDwkn6dxooc2rUYQdGQZHJTKMSQT+4B1CJTfNDY9WJphJGOPpE0JS4d2AC27kAhtCD5nWKJ0hIGEZnRuVDx5vHHboRklTwO02KJ2zsZZJPcHZBJ6OBCZVhR8Lp3eGijIZMnxPiBE3GBq25EVOUVbkOktAoZE1IEZ6w01Ru3n4S8gEAjV+5hcvGYvXfxECruli/st5Ltdwm/G6kYVxbQkey/HNxyPTag8RSvrecHRUIs8t62HNfr5Abnc4bzjrd1TlhKx3ED3b7Q4zLSnnM/RqRWjW9ErSLOYorcmlYJ+M1hkutSO6DjHSxA73l9ztDReuw2uFG1o2GexmBWU15Y3ZAf1nj3moe5wFMShEFrhQitvvfhcZLKcvH9LOK0K+wPWKLkYO9JzdtELZFadnHed7c37puOb51XPeCoqjYZ9g5/zoB39Ee3HJ9GrLng+8f+917r79Ot/59jd5571vUmYZT18+43K9ZvfDT7lz3DN7/U3+SGmelaB8YKsLvpXD+8PAfLlEIyh9RPgVm82Ol6GnLC2LqzNif87vvrwgZjM2aoaY1uTCsF8vOAaWSnG0mDGbzxJd0LSYvqXZrnn5/CWXF5dstit224ZvvPdt7i72KCcTmvUq7YtGaaaPpIDbGJjPlpy/eokJlsdPHvL0xSmPnzxhNp/w6uSKZtdyenbOO+++z1/5H/y9n3vP/dyim9JeI3kORSHRWTJC6Eynbo4wJu5aovBkKgcp0AqccyOUPGDdQJlLzoaefhgSujDLUiaZGdLGXIAMaR7mAdsmlqi3FiUEUSYXWW8tQ9ehFSA1Pkq0lAxDCo8TKvEDhHCEIIhapMJ8fSwnJoLU2LVnOkuMWiWZ1DXDMJAXGVmuUUrgjMF6R7NZs2saNu0jnjx8xNtvv4m1juXeAlEofAycn59STSc8ff6SoWnYbpsxyTiiZPK+S+9RWVqIDd2AMQNt02CdIYxpEQmkHRDCI1U+btFT6GeWZ4BA64JoByIRBTg7JGtucJTVJC0IhUAIOdLefEqxdcl8ap2hLMdg8JAWQ845one4LCVS6CxPD0RrCSFwHahonGHMzUXnBXVZIU2D8+6GCREZO16ZuAsgk6NLytTtjrbaX/w1fr03oLFrvYi4+ayAf/VLHUdXxGuM5ZefssMAbsdivuTp9opDoXlelGzrguPLlvXmgs3BEqcURyZiL9eczwusUoRgub1/iLM9LQ4f0sJR6AxfV8yzKaFb40NgiJGt6flUphPUvekt4tWWx3GLETIZZGzHRb/i+I3XObh8SS88UVva1StO8zmiG3h9b48Fml2zJS+gtJ711pDPCj7Y3+NHT35APJyyPy+Zdpa7jcI8OuO3fvCY33jjPX765Dk8fsF0VvOdD9/nOx99yL0H93jw+pu4MGAHw/F0ylxpptOa3W5H/vIRR0Hwb0+mFHdvod64y6HO2HffZTopURJmy32sMfzgsyf8R//4n/Lq+7/Ld+/t8UQIWuvRdgvBUnUbDu7e5WBYsYyO+7f2eO2tu5R1jjcddremb7Zsr654+vgxF6stzjvms31u3XvA/q3b5HlOJlUKKsjy9DCVEhcsZVGyd3SEyhXPHz3F+siDO3eZVjVd33C4t+Dh40f8yacP+dHDR/xvv+aO+/mdrhIUlaaqNXklyUpBlivyXCPl9cxUEEVAKUlZTiizGmKG7Q1CBLq+py4L3OCwzhNIYBznPV3fJkuqENhxDqijoGt2aWMfE/4vQVoUfd/hXSqgQpZpRigl1ru0vNEqSc2swQY/ArlTVE7wAT9u1F0I9H2KXK4qyaQq8T75sZVM8eYuRnKlcIXAtA6Vac5PX/Hi1QnWOiaTREzbP9hnOpviTE/bDbw8PWW9WrPdbFit1ym6PTiyPGdS1bgY0DHgbZqbdl2T1A1DMkUkPqsYodnXc3N/o1SIowPqmpHgggMlCXZUTQSPEZJyOk/JBiSRv9KjLtnZNPsOybAQvGcwKS4+jp21cx4pQ1pOEhnMQF7XxOBQUiUNtR1SF9jsmE2mFHrLMOIyo0hdbJRjlxuul1ICVPo4SoL82pXCfztXhBuhLj9TP29K8M1vvRk5XH9cjB/78iNRQmt2ZOWM5uqMeTEhW61po2FWaHIhaNZX9HWFDILFpKZYbxgmGik0m8Ew2T/gwFr6Zo2aZGxE5JU3qOg5nu8x2w08dw1dDg6P9YYT1/L60SHLK7jwDVJGou053XlOxZb5rOSOVTx6+Bg/q6B2ZHmFXMPe0YKjgwesTs55ata4+YIsC3S7c/L+iloPLN2MRyZwZ/kaP/jj30LHwPe/eEj/+IT9THLnYIYZGv74T/+Izz//mIPDA8p6zofvf8jQdITgePUy6Z+LIk8nWJ1TrS9ZrTcMVclqMuHb3/4Ow2A5/elzurbjP/vsjLUoMYfHPBo8f3yxI+YTOmcIe3coVM03j464011wTw1859vvc3zvDlkmaNcXDJs1Xbtjt9lyeXXJydkanZfcv3/A3vEtqukMIQT1YsGxziiuruiGhEDN8Ji+5+T5U7Is5/zsjK7dobXg8HBBFEtevnrBbDZl1+y4vFp/7e3283W6uaSa5tTTkrLOycuMolDoLHWKMYVhoaIiL0pmswV1vsA7Rd9dobOUEFzkY1R42yOQlHlO07bEEFBKj359h5Kark+awxBjGgfkWYqUcRbvA9EZsnJKFDp1XyKkWS6ghST6iJWpSJGluaULMXFEgx9tv5pMZwy9QWtFVVXYvgelybTEOEupNbEsGZqWiKRtW7RK6RFD1/P5p59ycHjIF59+wmK5wHrHrmnBJ08+MdLstig1cgtCxIWEVuyNJTqHdx5je5z1o2tOj46oJEeKyJENIG6OsMEHqqJKSzMfEMGjYureg3eE6Mbu3CKigujx3iG0Tn+OTrHeXgmcGZCkJGEhUhpF9AElJZmSoyQqGUycSSYKHx1KqcQsDj7lyVnL8WyPPlh6M9wUoXR3pHnntQQLKRNn4M/EcCEV0f96xrjkjrrGPn71413bQTYh5IqFzliVkgfHt7ncrjlTksM332J6ekGbwaUUeAUH8wpxseZSBNalopGRsqy4O6nomh0rPxBRRJnSbxfTCbNW0PVbqEEFwXZ7xceuo95f8vpQsXr+jMuZwlUVspgwFAq1nPNgMmNje1zoaLorvhiuCGbOkRp4YzHjydnH1H7CsbjDrfw+h7c+4uL8jI8//xPcYAg6Z7Zu2Tw/4aIxzKuM470ZgogxhskkhcP2fc/J2Tkff/xTPvrwI8pM0bUdD157i9V6Tdf1vDw7gx/+kNfuPWAtJfsHC/5f/+AzPvn4c2LwqPkejyYPyPbuU80qTi9PmR8fUlc1u+0VbxQVD7LAW6JhllvePN7n8NY+1aRiaFYM2w3eWQSKoqzZ29vj5GzNbDLnwetvMF0uCSGMXBHBZDpLevkQUUXBanXJbr3iix/9kNlyj6PjQ/7hP/6HnLx8SV1PkJmmNx0XFxf03YCUX88T+blF9xpgU9UlZZnfQGNkluAsGWkMRoQyr6mrGXUxg6Botj1KK7QKQJKdtW1HnmWImIpq8AEfHdbYNA9TiWXQdSl8Ms/zlGTrUieriGRlSZZnaQkkJabrsTbFgruYjrelSiYBQfJbp6DH6+NgyltTqkQpnULn6ioFO9pURLSUZEqNx73UcTrnaLs+JVE4C8B6tcI7S7Pd0TQ72qFHqZy8yGA8jmd5ftOJD2YgV2lR6LwnOIMzJn0vIuRKIqXGhcRNEKMZIsuu8+bSjNFYk/zgJMWAkiB9BC1HGFGgbzuqqiIiGIyhEKmIOpv+PKUcwXlEpmnbljiqHrz3adFZliigGHkM3trEhPB2nNimzb33HjsMiEwyr6YQAyak0Q7XOyggyOv5qCReM2t/pqf8xVwKlb5MkQA1/1VfkfhKcb3+OQgRU0y7uI5nT3N+mQm6Ls3cg/OcNismekIhoPEtISjeXs4QQ8+J244RTzn3jw9p1iukbYlFTOD5yZTJZMre1WU65RFou5bPy0ixP+c1V9O9OuGkMrhJjcMzDGtme8cs3W06t8bEnkIU2O2GT9odoiz54N5rxBfn/HiyJuqBKlN8++ADKgtHnaUWlt3lFf/k0/+cW7Mpb8+WZNsVt7ICffqK9SfPIHrqScmkzFJ6NdC2HW3XobSirEqqoqRrO370oz9hUs94cPcuXd/x8SefgpCsNlvqqmS9W1MWBT/4wWMe3H+NO3cecOvOAz5e7zh7vkL6K4IeCJM5ORmHwvPuvTu8VTgWOuCuXpLRU9c5Plr6dsPu9JRms0kPfaURQjOpJ+RZxmw6Y3l8mDTnzhG9R2cFkIBG/dAitKCa1PzB736MjJ6PHz7k3v17TGczyqymLAru3r3N4Ht+5/d/j832RYq2+prra9QLEqUUZVFQV9eFNx1DgxAgFTKmrKcir1EyI88LrIG8KMgzjVYCa1xK6JVpA9/bAe8NzdDgnSfTGd5ZvIfdLsWqq3HL3Q3dlzE2ErRKMTRCKKKxWNOPQ+9EzE/LjWQHljHNQVWW4UIk0xo/bqGVVGnGKQV937NtGkKArMggCswwoIRAS4HIFMYatmaVknNVkmsZ0yEidLtNStlVKgHcTXoPKqWxzuDsACFHCpnCIkPA+5CkRiGMUd9JbSFEcp5JCUorrE1LtbzIUEqPOtwUDy60TjHYIQA+2YdDShNOkUGaLMuw1iPFQFaWaF0gYzoVWCw+eISMtF1DmRcIJNYahIjUeY6UKi0cZZoNK6cwQw9SYZ0lk0n+NfQdLhoQEq2zZP31Pn2/BcgYR7dgHJMX+JdbxF/o9eUY4WdVCv/qb0zjBPmVJ8qX04bxoegHisUdtust9a3bLM9OeN43HO0dMTt9jpAdL4Li4NYxy+c9QTq6oef5vGB+5zY8fUowgbXreGIMqiw5ONpn9uKEqzBgKo2NnsG11IsFM/0am+aC1u7ICkUmBGfbNXGqOJjdZX818LK95ESuEVlNpmFW5sRpzfKyJQuGqTrk4bMn1Is9fundv8If/sk/52z3GKkjKvPsL6Z8WJWYiytC0/Ar33wzPcSDo+sGwphg4nygKAqkD6w3W+bTKVVZ0jQte8t9Xp1f8tnD52S5SjySuOHu3XvMJkmx8Cu/+itMJ3MuL9KyemUkQde4sqSe76M6i85L3qjh28cTjmiwqxOenL9EesPBwZJmtWEXPK8ePeLy/Bwpkz5/6DvOLi9omwZBGKljHbFIzZHOMoa+pes72maD95aoJMe3jvmdf/rb2GD54slDptM9/uKv/RrzacFqfcGf/ugxzjnmiwVl/vWM6J9bdL0N9K0nOMhUSZ6VZDppcxFqPNJGBBotylFgnyDOVVWg8xylM5q2o217vA/oInUW3nliACEUXdelhU2MZEqlo6eUWGOQSqXZY/BoVeBC+lwIITEfYkRLnR5QgNI53lkkqaONkQRrDindNelYU8R2P/ysttSO+liExBmDGC2wQgT6viPEiJaOTGXI6EfzQtKeJnOoSPrh4IlSJeWAc8m+S0jpxVKixw7Rx5BO32OhFaQFmlIZXhqkFBRFgfMpwFNpddMcxgBZVmCHDhciIUpKlRF1ivFJsq2QXIPj6w12oKySrI2gUDrHOUemFIPpcTJpsHOVXn/QmlznoDKkiHTGoGQyT8jx5+SsRSLItUa7hN6MjA/8UV+cjhiBXEnKKqOoClSm+NIk/Iu7bjCMQMqF/69WVYy4m690uuJnul8xvidk9DjbE7xh5bZMreEqF7zAciQUbddwlklWXcatusadvuRsotl2ay5cw2uHE4ZXV/gqI2YSryN9FiiWc0rjGOjQGSgluNp1XJQTpgdv8qDZ0O7WPGte4uoCoUr2RM69W29x/mxNHRrmecE0q7hoe6rbh7xXfY+TJw952Vt29hHy9CFueZc7s0NU7Hnz1hF36gVit+FQwvPtmmbdEO2AGSzTec1sOsUMA+thQOuMxXKOtZbe9Bhj0z4iBC4vz3n4+ITlYkkIjsurFQcHhzTNjjffuM/Zqxc8f/KUzabDWcPt+3fYXVnKyS2y/TsEoSmNY1rlvHNnSdm9wnVnPP/sJzx9+pKjg32CcTTrHW3b8uTpCx4+fMjQdcynE+o6Y71N1vxr+uH6as1QWg6O9vDe0rYJtiWFZLl/wKeffczl2QnLxYyirviTH/6QVjS8ePWE33v8GSenZ1jjkVGyN5//a/URP7foOhdZrRpOz3bcOhpYLCeQj0F7UiBlkqyImIqGsZZM9kiVJFF5XiC1HnODkrwmRI8SGmccMQSsSWm3IXjyPEerNKt11qKz7Ib2k+cFQiRylkbhnMG6lPQrhUiLoRDReeIJOBcI3qelTgg3eMpMpaJvnU+x1kIyDI6IINPpc8OIm+yto9Aa53oiqTtUIiJVytDyI1/2WkGQSYhR4YkoAdFbuJYd4RE66YW9S6/dOQ/BJ6K986lDlGluK2KK3cmKAukdUiqUypLkTSli9GilyPMKZ3u0TrwJISWaSJFpcp2lr6uoiMEmKZlW6KgxIRAIOAeZ0oQ8/cySdu/Lh4BUafQhlaaskuogAsZZirLGWUc/DFR6yqyo2IYB50eDRgxjGINAasWsKthb1BwfzJhP6j8TRTdd4ksBwlcTJL/6O/4l2pgUEilFen2SGziUEJKIodA9dV2glaQvFLUIRNfR54IsFmhhkTga4akP9pBDS64k06pi1bXUD26zNA6l07hitVvT5pp6UfKmzSkM/LR5RZ9noPdQpuH4+C2uguClfcRUtuyXr7Es99nulXxj/8/x4icGHWoeX/Rsz/4EROCdvdtMqz368yeUynBU7fPmcp/bkwnnK8glLJwlBkuZZ4SDfZ50Qyq0umdSlUgiRZ5TVQVFVbJardk1LXLkNA9D+v1nFxt2bUuIDi01i8Uei/mCe3fuoqRmuTxk6AcmkzlCeH766BGvzIxtHzg+uA9ljpotKNxA2F7hbcPJky/4/ItHHN26w/sffZOj+/cILpH9msFgQiRqjdeK1gaMgzt3HnD3jbfRRUXbDCiVpV3N0LHdrGm2O8qyTJlzJqmynDOcPX6FIrBanfHbv/0CKQWr1RprHXt7+6mxU/9NzREx4K3jcr3m/GLNclGRF9MkfZJyjCXRN7pL7zxD6BAyof2KomA6nWPdKyKRpm0g0xAMzps0UghpTqWVujEzRACV4sevwd06y0AkqLZWAtN3KKkgpNBGFx0IhUDivcWG5IK7JnMlD70iBo91gXYw5EokRi1gvRuXP2J806XZqL2OMr9WDhAptCZEATo5ta6zsGSIKElKH7hmrYqkWSZGvItkWY4dHWDee0Jw6BE3p1VCRCKTPjptyyp0PnI8hUArTV7UGJcwlXlVo0RKXFbCE6NBq4yyKMjKEkEkz3PwqYNTMsFdsqzAxICQFucdWmucD2RZypRDJYWBUjrlm+U5OUkKFmJEdB0iBvIiR2pN06ypqoqpzrl0Lc4nxnKIAYGgzDJm05J7+1Nev3PIrK7+jKzSrrvdL5XFN1/WzRRE/Ex3e910CJHYC1Kkh50cfwnAmitCnJDbwBPZc5ynwMbP6JlPat7yOb3zPOlWSUpWTzmWFV80J2xMxxWeB/M97raCZ8MJ3l8Q9JRezJjfeZN846ivzkCeUwnJ7eotvrAN5et3eevCITaXdMOMP33yCPv4Y8pC8cuLN3n16gxLC26DyjVdt+ad+Wu0fp9b+zWLYs6dukb7jj0RWRLw/QbvDeC5c7RPVRQ0XceV6XDOkRfJWXn1dENWGvI8Q0pFcI711Yr79++xt3/IT3//j6gnJVrn7C8O+eXv/jn2Dve5d/c22805n33+CU8ePWFveYQxhkHmqL1jMl3Sd1v2FvsYE8kycASarmG1XjGbzPjWt7/N/bffJC8Kzl+95Oz0lLPzC9abLYPtaU3HpKxZTJd84/0P2Ds6piin6EqSZTD0A6bv2a7WSdEz9MntqTOmsxnvv/8Bn3/2KR+89z6D6dluNxhnefL0Ga9OTxlMj1QCpfOvvd9+vk5XC/KJRGeBwbXsmh3VJEdn1RjgN4rco0hSLGdTlyYlWpeURcGkniKESlbi4HDWYcftehobBKQaRxUx3AQn+uBHIE2SR0kBxvYIIekai3cOleubzXjKCJSjWcNDSIs6LTXBJckXMdx0gzEmi7NSqXPzLhVvKcCHgOk75GhEwIT0WBHgQqQoUhaVtBJrUoFUUuEIyckv5JgQnGYeOstxw5AA3jEyWEuWZaPSlaQYUBqlc4ZuS5ZVeJXhbBqvECNy7KZDDGnkIYtkXvABXacFlkhBcEzrCdO6SqmuQlBqRTWd0Pc9UQhkpohkhBjIrxeMBGIAawbKLJ04hICqrJOtVyuEkOyVZXKeIcbZukhEs5hUDGWRI0WPjwHvk7wtEskETHPNoipZlhXzquIrmZC/4CsCPj0gUQQRf7bgXkvJrgusEDd/F2N3e93lJjAO5BomdYmiZMGKZV3juoEqOGI+MBP7+PaKvO4IuWe+OCTbQaUMth7Iq5rFbMZQaubNBpllyLpClfs87DqmiylvzT6kuXxIXt/i4cUVG/8SoRTv791mFiUvdiuC21LkkipWhGi4vdhn155RFlPuThe8tTjmMKtAHWF9jxy2nLWv6NZXlIOjms4pMs1q1/D05StAsZgtuPfgHmIeMKEDb4hB8O779zk5aYjeM52U7JqW4CMXlys++/wpk2mK9bp36z6//hf+AvWkZrGcUxYZz9crNlcrPvrwA/b292najsbC9tJzOfTMippZWbIyLYyju643NJ3ljTfe4O6D+0wXc7ZXV5y+fMXnX3zBi5OXPH/xfEwAr3nj/uv86i99yGtvv8Fifw+hMmL0SBEY+o7L05PEYSkKXr14wfmrl7z73vsc3Trmpz/+U/b2FpyeveLoYI/XHtzh4eNH3Lt7TFmXvDo5paoKsuzrre0/t+iWtWaxLFnuFRS1AOmTY8MDOoFXZNQQRqF7SJ2qt4ESQV3W9HlOURRpgSJVsgIPPdGncDgpBFme451DIFP+11hMMp0TQ6Aoy5TVBVhrcdak5ZtPR/MQ3AjC8QQxdpBjeOD1sizPs5TYKtP23FpHEAEpdNpaxxQxo7NxZCJVKjpFTmeSmoKY8H0hQCAkg4UaZ8wx/TtyTGyIITn0Mq3IlURkKTHYj51fpnOs7RBComUihw0j0EeqtMC0No5vbjXKtBy5LhGCm7mqtRakwtsOGzxVVXF4sI9Wmt56ohTUVcG0moxGCUHUCTqPt4hYpNw5PFiXwiizjLqeUOQZs8kEIVWKSiI5EQ8WkvOry/Tz8Ml4oVWSB6oIudR01oy3RHoNwSVtb9u17NqW9Lz8s9HpXl9fltd/9TMwjtWuO93rAvuV7vfm84wxR0OHzyreWt4mKxYMVeS9IVDP9jlfBaZVyQdkTKpDnqwDV1XkYPIG02FNNdnni/M1RkdeX97loFmgsoKfXO1oaVl1W/zsgNeL1znf9Xhr0DKQy0TNunV0TLsRDHKCF45M5+xVEw4XM6amxseBWgmO65yuvcQ1L+n7EwbX4oNBe836XHD54pK6yNl2PYNxZFqw3jbcvnuPxdGMHz3/NH1vhKTUCxbvfpPL00u0sxQonj07oewih3uHNLsd33j7bf76X/2rRAHD0PHjHzxiu91SFpqqKnny5Amff/GQbbNFKk23eI2+gzN+RF0V7B/eZoage/kZzauXLOspb3zjHaZ7S/CB1dk5r16+ZLVe07Qdw+CY1DV3jo75lV/+Fd7/5occ3DpGZRnGWEL0DMPA1fkZF69eIbI0Hlws9iBGPv7kR7x4+YQYI5eXl/jgado1d2/d4vTVS7KiZDGb4N0eu2Z7k7H3866fX3QnGfNFxf5+zWJRMlnUzJdz8qwkRD96ztUobUpHTx9D0p+KgTwvUUpQVyXepyOcNT1iPHozCs2DT52iDQ4lJCFI1JhHlRX5qEpI3a93No0LpLrRtYYEDEjLr7HjlqTcqzQSUGkuE9MIwfs0JgiRFEFEmglnKhIElGWBdam411pRFyV9NwABSVoeplxLCTLgXVpaKTU+5WJA6VQoM60TP0HFtO0d1RrXb1BB2upbH8lkIDAyFkaThBg3/llRMBhD8ClPTpA8/0qno5ztG4iS/fmS5WyaukiVZsF5XjI4R55luAhVWWE9mE6jpcdHgVIZKkKMgTrPKaRiVk/J8py6mqQZWdshlaTIIov5nNVmg9LgxpOF8x4RAvNiQu/s2O2nNdUweLbrljNA+MB6XaG/nrf038KVVqDcUG++VCZ81QMRR/DPTWG97nIZf0bjCep6DIEA7xrC4MimB+gwUB8sOVbv0a0atpVFiIx99QZuNeBjgxOaMDngQXnEedOSlRYbGja+4+7BbYbLhuVkxlQ5ikwwqwqOj24x7Tpys8EJS12VHJUz3pvfZr5eYmuJjw37Vc296Zx+u0NvOk7OzzB2xaPTLYNpidECCWoVSTsQshzXBaxMqR/ep72D9ZG2Gzi4e4R6oXHR4vHsYk8rPP18j+7qCl0KJgf7yNbw+v3X+JXvfYflfMZiOeFf/IvvY63n/r373L99j6zQ/PEP/4jTiwuyPCfLc66uztmpJWp2hxgN6/U5928ds4yO6BusaXjtzbsc3TokyySbq0suzk5Yr1fsmi3OefYO9nhwa5/vfec7fOvbH7A4PCAvS6xp6XYrmt2O9dWKdtPQdz1RK7rLK7qh5eT0BT/44z/EWsdiNqMoUxbh/6+9M2uOLLuu83eGO2Ym5qGAGru62JxFihrcomzRCj9Zln6A/5F/g5/87DdHOPxgPzlEilIExebU3dVzVQGowoycbuYdz/HDPplA0XKXI2RRDBO7AggkMvPmrcTJffdZe+21Li/O2d/dZWtji5cnx/R6PbIkYjrtcM2bV9yXrvoktfR7YiS4MshZGayyvrmDxTKdjUX5Xwsm5hAcU2476qagnFs0DmMUaRKLpkCQYHRO1Kw65/AIZWqBJSoEW/Qo0RBoOlzXSbLzHmNCdWGEaqaNDnY3DhNJIu86J5CF61BaB4qWou5arIe2dcQaWkAhDa04i8WGp21D8rM0bcNgMGA4mdLUHbENjhRBDKbrvMj7eXkfjDFhuy7iLlZrfCfKSUBIzoY2cH01Ajl0zmGI5CKmDHgxyBP4RS9Hm6UhKUlf+5Y06YvJphNzzCiWY1hrSNAYZYWN0IFNLMZJk8sHrNcpaMuKLO3RaqH4aeXI0kQgk6aj0o0srDyjLksKoOoSVgYrzGYzYpPgvMIBk2KC9zK63QXIQpqGMO0KfNUwvJqKbcpvG2VsOcShXmucLZLozfNdqK8pdZ2I1c3nIQI41jZoN2VlZZ0stuSbm9xZ2SYrhpS6JY8zsqwirgrOaUjSlNVswOasZrUumPo5eZqxmq2yt/eYnWLM2M1JEs8g7/Owv8ValHI2n9Cpmrabk2pFpuZsVjNmTcXV6CUviyEH9Zy2ndH5moWNfOc6fEi0Ik3ZiVaH7zC5wVWaOsBwzjvR/7CGeVmRlxGJ7dE0Q0DWdpL2iNM+ebZG3IGLXvLN/X3+/E/eZTS8xLWiaftgb5fWOc5OXrHxzjskaUxZzdHWsrG5ycXFGcPhEMwF2ua0ZcGMmiaLSDcH5KZkcP8O+w/vEqcRzbzg6vSY8eiCeTmjrhu0MWRxwjtPnvDkyWPWN9awEVTzCaOrc06PX3Fy9Iqr4YQ4TumvrTErCsaTEc+efc7R8Uvu7NwnspamFf/Bl8fHzIspx6dnPLi3z9GrIw4PnmGtYaWXYe0/djjCaOJYyv4sy8l6Kf1+n1iltK1jWo3xsmMXupSSpItRtHXLbD6mnnuiyLKxvsrzg1cYa3FNF+hbYe7fe5q2wmgV3A40SZpSl7Kt977BIaaMItWowXe0jVC+nBdcdHG/mB92YbirIzJWlLuUoKjGaNrOo42l6jqhlDUdeQ/xxXIuGDVa6q7DOkcaR3R1jVZgrVjBixKBRitH04prrtEabxUuiH0bpem0DxNmfknCr9s2VLPyyXWuk+kzFM41eAXWClshsglaS7LUyLG6dsE4Fp1a19ToNMUGNoc1hrp1RLFFa0MWyeuqzqFNRGQcUZrSOEeSiBaGthbamiy22MjS7+VEUUJVNxSzmdj3DFZJe330cMR0pumaTnBnbcTg0miuyik9F+G7ls45lBPsvuyE00knWsALyOifNdSNr0V1GwY3lvoLyqP1gqVwLXBOqHZ1oJld03VD4vXgTIfxBZ0riElJaNm+u8dWvcGwKvCRYef+gHvTkuPZhNI3ZEnC3fsDHpYVV+UEExnSJGLbJtxrUobzgun8kmp+wvnoMy6amrKuKOu5MG26Bu87+Sw5t/z8eN/JFw7nXRhyWSRaF1y/OzrX4r0myTRr2S5Xh1ehqBIyeRLH0mfoYgbZKkV9hVMKpQ3bK+tEugfjKXZ4xd2vPOJrb79NFhsKq3n4+G3KYsb52QlJpOj3LLNyzAcfP+P87IRxMWc4vGA2K6RBPrmki3PaqEd9OmcaK9pql621lK29XdJ+ynw2pRgOubo4oawKmq6h6WT6dH11jZ2tbfr9PuCZT4ZMR2NOX53w4uCQ86shnVOsbiTE3jOdz5lMx1R1SRwlPLh7j93dbV4dH/LJp5+SZzmj0YQPPv0cZRS9fs7ockY9m9Pf3JDBqDfEl48BW0Mcx2Rpn36+Shr3sZElsRlpmjGrJnjXgpIGlAIa5fBaHFarsmAyqdBB9tE5R2wtVVejUVIhyoQASknD3DtpFJkAP/jOSVXpveChWovTr3N41wb8MxYJRy1eSs45EVVBruRWK5S2UokqqSq10tR1TRQnVE1F6zrqxqMRCCKKLEp7tNdhoSWUpsThaVonladWeDq6FpQRR2AF0ApNJYpilHfY8OaIyLpocGnvUdrI1lx74Sz70ORzIsTTOidJ1Bhwgk93bRfWfof2IkuZpDldI5VJL83RRnYJNjThFuNUbdvgHdisT9c2xDahtg3O1SgFadajmDR4bdFRQt04kjQi7yXMZlPK8RVdt0IUR/R6+RKqqeqKPO9TNiL63ilNUc6JvaL1ikAzll2Bc7jOQwe+/W3opInkJIuCYZFwF9CPRpgKxosT8M1qWCu0udlI+/UKWf6mXddwdfmc6egV0XHGaHefXjYQXD6ynEfn0LRkTUPsHfWs5PlJgQpKea2rOC9nPG9LdKhIF3zwa9bFDQL3Qk7TBWF1HISE67z8jBcjThf43M47KT7Cz0ppNla2+YPHf8qvmp/z9JP3SSLhr0zGV3TtnN3dLdFakZILi2ctMtTjOb2u5Xvf+RZb68LZ1UYkWd//+Xt0najtKWMomoof/4//RjGd0euJBkKvnxPFhqpq8C3Q1AwGm6ylOXE3Z5WGjbUd0kGPrusYnZxzevyS8+EF46KgrBuUNlil2N3cJImFuz+fjpgMp5ydnPHy+JSjkzMaB1GSsKKkH7WxucnHH/1KpFcjQ5ZnTEdDiskIY0Arx2Q65+SsABx3t9dZXelTVyXFZPKPHwOO45hBf421wRZZuo7VyVKC0RgR2F5YDi8qBO3Ba+FlytW0I04sKsjedW2LNsgAgnfQiTW31QqNxxuLCQs+SRM80phoXIf3i+27x6BofYdrW/KsB85jopi6rsB7jInC4rFoFDZMQvkwURTZiKZtiOOYoijAWJqmEXcJJW7CSjkI+FYSx6RJIuOOTlTCrBHYoguwggkcYBfEcmIb0XW1COh0AnOoG+paXoPrkPdGy1CJNjY0AeVDLUyLFodBG03bSrXhnaP1XiQvQ9Vsw3hx50F3cuGzSni/xkRY42l9J41CGxG1YtnjvQjfdK2jP1jHtTUOQ9W22Kom72X0+wPG4xHFeEici8lflvVo2442XPw2+1uMjKHzsNltcHJ5HMwn/VKhTFTevIwF/19wGv+p4zpJAsrJ1FygLiwgh4UYjoz93vRMu8Zzg1vl8rg3RXAAvGvoaPG+5PBgCF7haQMmHKpplisDi8JoWYviTSGi8IthE+Xdjdk5SZyLR0n16pe3JJFKwnVhJH6hcexxognsHS64R3slqnh7mw/55pPvUQ89Tz/5gLIsMWE832hNWzX4RmFURKIydrL73FUZb72zx+b6gOnogqauiOOIrprz4MFDjFF8/slHTGZTTl6ecXh4iOtga2uHza1tPv70Y9q2pq5L5vOKsnIYm9Ab9FmLPHcHfXbXB/RW+pgoZnhxysHRIaenp4ymYy6GQy4nFc7B1uYmm2srKOWYTyaMzk8ZXU05Pb/k1fkVoyLs3uKIWTlnNp/R7+fcubNHFMWcXZzywYfv82/+7E8Zji8ZDSdM5/Pl+z0aTthe7ZFkEaurq3gMo0nxxjX3pUm3319jkK+TRn0inaKcpZp16E7kGEU5yi0rABAdAI8kBROJDm+kNFFkAke0xTvoXIMKilmdD8R+tRj/FYcIg0y4NVpBIwsmtpa2a1A2wjeN6Ce0jWydteCwnZIttG+F0oST55ooRoXq0bcOEycYa5caB3UXYA8UsUbMLus2uCko4iSGphH/MAdoFdyGLdZI0nPe09Q1cRRUu9D44KeFMkAXZBcJFWCHVhoTGpJSPUsn3QQTSHGQkPPSyuNa4UE73+G1R9dV2CmILgMKsjgBrdBWB1lGJ8MXKGZlQRzn5P0eqpBk37WtNBqx2Dii7Twm0kznczrvWe33WFvfZFZMieOIqqqYtp04K8cJV+Mr2s6xtrEtibhtmPVWcPMpVVXR+QZthb6mtfxNAvj9zxwOMMsNAeFCK1k3vO/qmoOL7kT3+UZlexNekN+9nnABSbIufDb8Aha6duZYSPYuGDcEOqWIHYXCBqnGvb95Ebu+LUlZdk2EpqgPO0Tv5Xg+DML7oMUsZdG1eNGiX5BGPe7vPGRjZYu8nzPo9ZhOp0RxxPpanzyJiazm7upDIlJ2V3a5s7rH/XsPWRn0KIop/cE6vik5ODhEK8W+MZxeXnB6dsL7Hz1lNB4zWFnhwcNHjEZXfPbZpxTTCcV0QhxH9JKUNAJjFbtZxN5Kxlt399je2yXOMspixtnJKQevjrkaTTg9v+T86oqqbdkYrAaJSIerSy7O5gxHlxRFw9VoTlmLSa02hjQVzvhHnzzl8OgZX33nq7z7L77PD//6r2nmU05Pjnlwb5+2Ufzq6VPybIrzJZNxydWwZHdrl3I2BeVI88EbV9yXJt1enJHoBI0Bp+kamE9LmrKlceVS6Frsp1XQ/ZDRTxWq3SRJ6GqwofnlnA8CNh5thUZlTUxoM6GUIjZGeKtBj7U2hhLCFbpbajFoJQlKnH7lQ2ysoasaacQ52U6rgIEa4+m6ljiOqZ0MBNRlidWapvVyEeCafxkZg7VWxIyNoW4txnUYBY0LmFj48DlY0tWU0lgbYw10WBbanF1o6ikf8q8TmTvCdJNC0bU1cZyirRWHCBBGR9cSpTldW4sgetuBkmZb41o88n8T+pwVBofREJkweOLwKqXxwjTQXYuxCUlsqZsapw2urmh8Sd4bUJcFYIjilLrpmNcdvSyi1xc1KWuNTOMEXMigOLs4oa5rVlbWQWuKekbVVFT1nCyL6aUxg15MnFjiWBGb3wb2wo3qFJma1Fovf7OAHpb9tJv83ZBZX5d7fP2YchRJmN6zHB0PT1w+n8Uxg7CODzCHCzDAopG3qFL94vleLhKL5q3zPmCzAat1NzDbRaW7+Ny6kHKX+K6EVpbcrtPNHc+efcLZ+Uu890JPxNM2DaXyjK6uWF3fZie7x/3NfVZ7fcrZlHkxomkczyYT7m5v8vDBI07Pj/nRj3/I8fEJKMXdew+xp8cUxZjnLz6nrWrG46lcEryXSpqG9bUN8nyVzSTna2895v69fXr9jPm84ODgBR9+9JSLyyuuJjMuhhPGxQzvwazJxc9G0vQejcZcDUdMipJ5DU4Z4igmz3PKpiLLMtZWB/zq/RF/83d/y3g04ltf/wbWKD5/9pTLqwseP/oGf/5nP+Do5RE//dnPODo+5+BsyMMH97izd48XB8+ZzqZvXHFfvuq1Wja8XNgit7pDUdO6OS0tWotpoTOahWqqV+BD5WYjjfKefi/BKpg1wdYlyOJ559GRiKYopUWMJYqIogirFU3bSgNJaYEijHBjnZPk5vGBCbGQP9SIIhdoK9sgGyW0TUkY8RJxcxTKWpqqDD5XksC1MSRRJJoOcUTqxLNMLg6GqgJrLHQ1RktCW0g5isIXWC0MB9c0GKPROhYLHe/pAl9Ca710Ilbhm1xEJAFH1iyrd+8Fg9Z40ahwnWRt57DBvbdrG7qmpq4qYmvpEJ8533lMJHBJ50XFLY4i2q4l6mQK0PmOuq6xUUrTiOLbysoaRTGl6xqyLKGYFzgP/V5OEsmGN0kTkansQoXVtRTFBKVlx72zsYOJY9QVaNWysdZna2PA2iBjZdAjjd88vfNPHSokN9mBSAV7He4aYwjbcXm0bOFVEMMPR/o12MEvITe1qJyDBsV1hX/tNrE8HwIMH37rcOils7J67UELo0/8EkggmLdJQ8wHzjjXSRcXtEFCAna+XXLaCQXUar7Nn3znX7OV7fL046d8/tmnGG1ES7ltqZuGOEopq5Y7acp2PiDPMi5HY2bljCjSFJOC7/3+d0njlI8+/CWvTk84ODohyzLSLOfgxXPqtpbJrrqirTviJCIzhqKY4TtPnKWkSc6g1+crD+6zv79D1suoq5qjFy/4+c/f47PPnzOeTnHKMC9rvPMM+n1p9rlGsFblmM9LppMZRdnQeYOymnyQMVhdoaprjg4PGI4u+fZ3fp+f/OQnPP34U4yOeHD/DmeXV5yenfLy+Jx7dx/y6NFb/MH3vsfG4SG/ev8pzw6P2drZZe/eQ46ODt645r7cgr2TPxpOy7SSd7jG4bsG50uUdejECsXEaXRApbQ3QXdVBZGWjjSPiWKLn8qQhCxCh7Gx0KKsDWtJNBQWHfimbYNlDYF25cO+XERkrE3DovFYLZ1kayNQGmu8jMcaS1OFChxH66QZpZxQ1jonydr7hXiNeH3NAi9VYDuhvc2riiiOqZpaOvORcJQ1wvVFGfJ+nzSN6KqKKLagojBg0DAeyfbHWmmHOyV0Mp0YOVcPaZKitKb14ibReU+SZsuxWuNkqqyuG0xbB/5yK+4RTmbdVXCFUFoU17KAYTvvqBsZFe6ihKqqhEcsZRjWaqr5lDxZJ0+zUClBHGd0XSMCOTaRBk/bysj1wvPOaGbFmKZrWFlZw5iIjZUNnG9wbs7+9iZ399bZ2Vgjz1NZG78VIbXogqd7fRuWezelbzw2VL/avcZieL3CVUssGHXtPedv8NNvGmCqACkEBHz5yniCRkVI2ItDI2pzhKTt6ZZwAt4H2tc1DcwjuL0k3EUS7kLV625aiLI12OS773yHB7tPWF/Z4P0P3xMtaGtwTgZilI2JkpzdjS16ecpoUuBwlPOKYtKwv7fFF599TNc5hqMh58MxxkY8evyY8/NTLi/PqWpZu23XCs+/6bBGE0cRq6vrbG/tstpf4+H9R+ztbxPnCY1rOT064ue/+AU/++X7XA4nJElCnKQkSUIvH7A66JGnMW3bcH5+TprElGXFvKqpGsk1VhviNBNjhabj9PiU8WTMvXv3efTgEXU5Z/fODlGWgY5QOmU8mfDpF884OHpJ3su5s7fPn777x7z3s/d49uKQr37lCRsbO29cbeqmw+lt3MbvWtz/TuxvVqmi96sDdCOYn9JacHWtgvCQMEqMMTcm0USDRAdNBr2Yt1gkXgFfhTnA65jvcsAiJFutlLB3lnjvIjELt90vat7g2OyXdDAXLgkLKEHU9ZbKd+5msnWBRhYgCU8Yc455uPaE33/0Lusbezx78Zy/+fH/pJjNhJ3UNOxsb7O/ucUPvv8uW1trXJ6LG8OzF0fcu7dPMRkymxWANLSfHxwyKuasr69zfnFBVc3QwLwSt5UkjVDKMplMWB/k5FnOvbv32N/d5/Gjt9ne2SZOE6Io4vTsFU8/+oRfffAxzw5f0nSOjbU1bBRTty3rK2vcvbPNIE9o5lORIUV4+R6N05Y46ZH0BqxsbuPbjq6aU5YzVlbXuBxeUFUFTV2TxDKJmec9Ef8aDZnPZ2gj3ojjqyu++c1vc3J2znA45PGjh9y7e4//8B//05d2iH8bQLXbuI1/xgiVopdKVqnQ1VdC7VMEZwkVJtd+DR5YYrnev/YY72/ct4ARfPg5wAWLu/XiNNQCOfA4f2NIeon9Xp/zoqJdUMEWxZMk0vYaPljoNbuF8egCKQl9CPz1Py9MhGre8dmnz0iyK07PzzA64fHDRzRVyeGr57x9/z5/9HtfY6VnuLo4ppfn9FPDkwd3GY2HlEVBnudMJhNeHR9RzOaksexo0ZooFnhpdnRMVc3pXE05n+FRtJlmZ+cOd/f2efTgIVs765hEds4Hn33BR598wrOXp4yKOWvrGzRNi1KaOE4w2pKnKYNexvbaKk2ecHp2xnQ2p21lFxdZi4li8tVNotU9dlYztnuWD95/j/d++VP6/R6vXh2RZRl37uxwePiFyBhoTV13WBuz0uvhfcvq6gpHRwckWU5dNRTTkquLyRtX3G3SvY3f8bgJLQSHX7UY2nhdvOSao3v9XH8jed483iJ5LgHa66NcP/LXGnA3H+ZUGL1ZVLksku7iIiF2VotzuMlgcAHjdWHQx3svNEPvWRLN/JJ7JM8P7BiDJVIxcZzw7vf/jCTrc356Ti9K+fjDX9JL4F/+4beJjeLi7JzxeMru7i6+azm/OKGYzQHNxcUl89mcPF1hY2uf8XTM+eUpZSnuMVpH3Lv3gCg+Y1qMGY8roihme3uH7e1ddu/codfPRWmw8Ry/fMXTDz/i2aszxpMpNk5I0pyNOKOua5I4IYlj8jQhiZPw3hqUsgIfhsa+jSLywRrRygZtvsbMxpxcHfLq5IjPP/uMfr+HUoavfe1rHB29oJjNAM/Z+aX0P0KTsq4bsiRlf3+X6fkE7RVvPXjM47fefuOKu026t/E7Hh6vrhOl9CSC2SQ+TD9K8r1Zaf46/itEKx8qXI9wu7jO51w3a5UX6qBePEbuXibwRW5dYsCLQ7jF6wRnZu/R0rUOZLEFhutDldstK+ww+ClJV0lSl2QtTVF5AY3qLOW05fnFc9of/XfefvCA/mCfwdYeV1enfP8Pv4nRHZcXF0DM5uYOvXzAxcU5p8fHaBPz5O23UCpia2uPtc1tzocj/v4Xv2BYtHz3yX0iq+j1Y375yw9YW1+hbmasrGRoJYJKcZJgbcTl1ZA4jhlPhnzx/IAXr045PjlnPq9Y39pCR47Ow9raBoN+P4hLyd9uOpszmxUUVRkonxrnwcY5yeo6Ls7pbMppp2kPTzm7vODu/j2+/uQrNDg+/Ph9ZrOCKEqIo5w0qUXzREnjPkli6kYYF3XZsb/zgG9//Vs8ePvxG1fcbdK9jd/pWNCzFupQi+EEUNcJ8UZcsw38jd9dsyAW2/TXmmEs7nvtlf/B8/n1wnhRwV4n7kWzWXSgl3gsi+r2mpq2qGC9WyRicAuBKJY9uBvnovFOklbbdvzwxz/i+Ref85f/9t+jjWXvzhaumfPq5RU7G5tUjWNzZ5vDF4d89PEnbGys85W3HnJ4cszx6QVnwwv6x+v0+iv80fe+y8PRlMvLMz7+6H1OTw6lKdeIf2CaRiRJjjHiJPPFs4OgUdJyNRxydnnJq9NLroYTOudxl0O2iIL7uCZNU+LIgmsZT4ZMJxOmxZQu6J5YE6E7j0l7tCpm1irmswqnNDurO6z0N6gnl/TyNWbNlDzpcXZ8wfbOBtpoNjbWgq6IoyxLYfvYGO8sezv3+Yt/95fcffyIla2tN66526R7G7/T4f63KQaJJZTwf0iO1xzZ68dI00tKWx+aZT5gwK/btocXeI25cOPQi3PzN/pwofK+TuqK1gfNZq7hh4XAkPeLaTRJtp0XOuKS+abcEttdHGDRTIxsJBodCuZlyWw6pq1nuNYzuppirSdLM54dfMrBq1M2N+6Q5yvcf3CP44sz/vanP6WsWrz/FGtj1jY2ubO7w9HxMW3rGE8mOOeJo5hWBX2XNEE7UEbMKoe+wHtF0zbMq5KzyzGXl2M6D1neI8v6xFFCGickSULTtHRtQ9tWTCZTZtOCtm1J05w4CZOtNsYTMe8Ul5MZl6MKpw1rawkQ8Rd/9Vc8fPyQn/zd3/J73/g2q/0+n37xOd5Z5vMZ06Kgbmra1lNXjiw1fP2bT/hXP/gBX/3WN1jd3Cbv/SOHI27jNv5/j2VFqv6hyvbLFH8X2Kq75vZ6FWCCm5VwtzyWfBce+QJD+DKlNY+wKVQYMRPdBJZF+ALLBYJcaoAU3HVzzDkXoAcVIIbXK/TXboRBnzbYUEUmYqU/oJiOmIxG7Oy+xeHnJ6yvxrw4PODTzz7HeU3Xat5++zEnp6/4u7//GU3r2NvbZDabU5aOl6+OGQ4vSdKE2bwkTyI21noUzjErHSZJsZEm8cLxn5Ul1iR4r7A2pi0rxpOCeVUSJyk2TtjeusPezg6rg76IKZWzYHQwp5jNmc9L8J5+LyLPB2Kiqy2tF8rbxWTEcdmS9td50Xk28hVmTcF//S//ma7suHf/Lo8ePeDl6TEnJ4doDeV8DkrhOoVWMV9/5+t8/90/5q3Hj+gPBsSJeEK+KW4pY7dxG7dxG7/BeLO3xG3cxm3cxm38P4vbpHsbt3Ebt/EbjNukexu3cRu38RuM26R7G7dxG7fxG4zbpHsbt3Ebt/EbjNukexu3cRu38RuM/wVHDXP2+PziqwAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADnCAYAAAC9roUQAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9WbBnyX3fiX0y86z//e639qV3dLMbDRAEARIkRYmklhmt1ihmFKMJTThke+wXP9mPEw6Hww92+M12hB22R7akkWStExxRJEVxARcQaKDRDfRa3V171d3v/e//s2T6ITPPOf9bVY0GOQWhwpXdt+7/nv9Z8uTyze9vTWGM4Wl5Wp6Wp+Vp+dEU+R+6Ak/L0/K0PC3//1Segu7T8rQ8LU/Lj7A8Bd2n5Wl5Wp6WH2F5CrpPy9PytDwtP8LyFHSflqflaXlafoQleBw3/Ue/9x0znU6rv4UQCCEAkFKitUZrvXSNP0cIgTGm+mkeF1KAEEgpMRrKUlPqkqTVIQwiDCWBCkjTDkmSEAQBSoWEYYCUCikMKgAhQWDA3VcJgTEarZvPMygpCAT2Oimqei4V5/0hDCDw/9Rfu0MGKAFjTH3w9LkNTxL7HONubD8adyNjDKUBo0FrqnaSUoAAXRqKLKfIC/IiRwBhHCMAnRcs5nNmeUZh6j449VYPFlOf49ug2Ra/+OK5H3iLp+VpeVoeE+gqpQjD8FO/V0oBNWD44j83QdcfN8Ie11ojkCgpLWgqRRCGSBkQhiFJkhCGIUHgwFYKpIRQCZQEhFkCciGgLEEpgUA4AAPpQFSIh4BtXWFbXwesmAe+cmAqHkA2+37V2QhRYfgPLELYZ1ZtYwzGgEBgtAFtMGVJvsjQWmO0QUpJkedkRc5ndRX05/l2afaPf7aUTwWmp+Vp+azlsYDup03CJnj9oIl/ekKbJjNzQIMQSCEJVIAKII5iwjAkDEOUUkhpma1yPxZAqY7be4HxnwGJsUy4OmKx1Ne8+c0Db9AAVrP054N/2XfydfAg9pCbPKJtxKkTtLb11kWBLgqKPMeUJQLIFwvHkA2l0RjLmx91+8brCLdeiAcWR98/j1yQnpan5Wl5oDwW0P20Sdhkr6dBVwgPdcKJ/tKCoxAWUIwBx4CRFsQCFSKEREmIVEAoA0IlCAOJklZNIISwrLUC3BpAhDAIrIrBCfC+NhaAHRiaZcz8k7SK1RRUaNsAPGEa4CdOPeYhDz3Vvl4qACiKEp3nFHlOWRTVe2oPuLqstRuNJ1QA2riv1h6cxUP7tMl8n5an5Wn5bOWxgK4vTd3tkm62obc9PWntdwKtNUEgkFK5c+x9jNEVwGhjCJRCBQqlJFJYlYOUElkBgkc6K48bLCOUjvVakdw44Flm6FJ4FcPD9K3u82doB/tOy+ca4eHVU92a9TZF9+r801KBAXOKpxpjKMuCvCgoygLdWKRKYyjQjbZgSePh1TRN2C0pl/qvWYenDPdpeVr+ZOWxgK4xhqIoKMuyPihqDndab2sNXsqCMPXkVkqhwggQ6LJAYDClrq4VWHYWqMCxYolU6gH1htcDG6RjuYBTIUhnnHuYoG1YZpFV3R3trXSqn6E9HqZaeOBh1QevODBL3zXroc2D97UqB7fYBJJSl+hCY7S2cCuWkX9pEWi0efUMrR8A26c63KflafnTlccCumVZMyRPqgBwzPa054LWusGcLJAIIQnChDhJLGBmgrIsKzZq0AilKt2mMJZ9KSkt8Ph7G1PBmKwYLU7VULNh4Rixv59GoI3n1/Zuyj74tHR/Slv7qGIqnvlgEY1/qR5gmlzWgDZWzeLPEXYZWmpHjcEIkFIRBIbCmIaXwiNq6bBYGAgCVfVRWcqqr5qeJHU1nzLdp+Vp+WHLYwHdxWIBOF2sqaFDCFlN3NPA64sxhrIoUUGEkMp5Hyi0NpiyqCz+xgiEssxWSIlwlrESgzAajEDoEuN0ul6fKyQIKZDSuoE5/F0yFgkck2TZJctIgRKmuqYygvGgqO/fv3ovGp5iS6d+CnA5FYIxouEe5u+9/IyyLCnLsmKlUghQyrJVbdnuw57kVRxCCGuAjEKEEBRFgWjohP2zTgPv04RJT8vT8sOVx6ZeiKLIivXGoB3TUlKhpHS6x3JZjdBwIVNOReANPF7fWKoQg0CXGik1KlDEcUwUxagwQgQBQgWgrOgvpEQ23MOktF4LUnl9rUGaUzyzYeAyBoy2jLcyvDklr1pyMDA/kOl6MP/hQcrqg/1lWludtpcMvH+uV8809eRe4vDqntN+z9UTnJQQhqHToXupQWGUNyQu65lPq4ielqflafls5bGAbhRFS8Yy7X578R9qZtYE1eYEDgJF064lhEAFgWXJUiOkIE5ikjgliiJUEKBU4AxqoJREKeu1UFnnhXb+twYlbDjeaRctI7AiurFgp42pDG/GWAC2J9ZsUzZUKPZ2Xj3QKJ8JmJbB23sOSAFa+LpodKkxUiOUtAuUECglEcLqxr0Bs6mPrQC/wej9F8JgdeHKLkz+FaQSKFSjQvVbnmbxT8vT8rR8tvJYQNcGJcgadB3b8seFEOR5vsSa/Plaa5QSKClRgSJwYByFIRJDKVywglTEcUIURQRBQBA0giACVbNaalbmAx6EcxUwS0aiWo9ag26DqRuJ0BYWtXR63IYjgGygZRM4PQh73fLpsmykqmvhrzVeL+EMZ0ZryjynMAYZBBCDEEHD7cu/q6wWNitFmCXgP81YpfP6qNUW1rXOGPHQ8wU2sEQ59c7T8rQ8LZ+tPFamCzwAuqd9dE8baPznIAwIg9Alh3B6YAEicOJ9YKPPLOhKhLQgIYXVu0oHztLpcYEldygjKtOZwyLjnBKcZ0IjxLaKghOBra92umHHkmt9rXDGtoe7wv0gtmuMdyV70DMBgNJ6b+iiJMsyZOBUAY1Fq3pWoy0rj4NHuHxVUX1B4BizB3B1KjS6qd819X2egu7T8rR85vKYQDekMi8ZMFpjjBX5eYSPblMdgRAWAKSf7KCNtsCKQEjp1Af1jw+i8EazKmeBaN7fA6WpwNZo736Fd9oFvI+xVS04TLZGOp+3oVoswEgwso4Qk04x/DA9r3nggzPCmeUYuKp+rlperaCLkrIo0E5fW6gA6dQMVSCEflAPK4UEYVU8QRCggtrzA+EkC6UqHXgQyMpwV7ebb8slv4qnoPu0PC0/RHksoBtHEeBZrsHoWqfrj4vAgbArVWCAQyslFcIYO8FNiTEFAu9aZnXDgRSVblY6OV9gMMKgjXUdQxvH3Grg0Mb+o52hrNTaBRLU76CNdvWuagiA0B59SusFIaTThVqPCA1IZAOkmuz2FAw7TwbtdMdGi4pB+hM8yy4LTVEUFEVOWdaqmSLPkLnCSwNC1MY2nEeCD5+2KhdVLVSV1IHXe1u3OF8H7aWB0yuF8OEkT275V//wf2eM86ypu965F/pX88edSsr2buNL0fB+qVf3uj2b96R+xnJwjaju3xwfVlKT1rf6tLFXehWZldgwBlElDLTvpCqfmtql0nv+2Mwl7r5esjMVFam8Zqxnu+9/gzEaUdld7J01ujlAcDMQV21KYT9rR3asK6YLVzcCIzTCWCnW3te9R3VP41Rc2jWixN/c9o12rVi3MEJiTOmO2xlpRF0n13sYNBLbHrbW1h6kAWGac8O2rhESYyRClBhj29DgRV4XuVnVQ/Cf/p3/+qGT5LGAbuJ8ay07q9ULS4YdKSqvBjgteRuEsFFltuttg3tx3npDqSVH/lpdge0fYY1flauYqCdJxSwN6BJKTWXsA88U68FVP6NpnLJ1FI49SmmQynWQsN/Vda4nV5M22gXFLky61JZZat8CNSu3oFtSZBlFntl7uJcrnWeCcEwVfD2s2G8XOoN0BjlfL9kMlFgykNW9UOWm8Kf4geje4Ul2WvCg0vxbOVCAxjjxYeDVUWt5FKeyojYNk6fFmcotDztxbZEIoavvhZCNZ9aTVxjriaOofcYrNZnwgGG7RjpwCyo9vKzBoaq6Vb9VeqyqBu4NRQ3Owgi0A+LaQ0dU49YIgTASY40dNcD793DjrhT2LiXYUH43tyyWC7TwwGqqt3ww66xw429Zgq0bXNg29co+IbF5/URlF6kXVgviDk5cF9t5oo0lTc0et5PC3lmK0rWJxIg6JsAsV+pTy2MB3W63S57n5HlOWdj0i17FUIGuEciGSN0cBAKQQiKEwghTuW0Zn1XMhQYbrTENg10TeCuvCCFcJrGmTtKL4TYNovUKsOJ66TJyQb3ye5WFcanE/IARnqE45i28igOv5qhZtmX1xgGxrDrTOMAty5KyMOR5WXke+Gu1saCrdYE0hjAIUFFs2aqLZ1YqsO2itGVJUjgvDjvoJMK6xwnf3DXAVkuLWWb2xtfRHa8Gr3FTw0snD9M//9gXywbLairWXAUa7WAcJFVzuwasaiGl2bD1YgmnLK5LwAUgbVrRZrWEd5Wsp71np4ADYYOsALEGfCEcIAvHUKkB11ajZnwCC3bCWKCyAKrdiRpjPBDTACZwPNDWo6q5wAhFrhUloJAYkxPI0r2DcZzQLWrGVNBaW4obg3EJSJut4445hlx1UtVYjVwhACj7t/BAX/e0BWVd9YVBV6CunSwAVpK1c90tKW4u2WcptwD5VnDk0MjlPj1VHhPTjQjDgKKIyLKMLMssk3NynE3NWFvG64Z2lRfN7FUGLQW6rNZiB2xW3EGXlLhwYyMr3axn1mUFQNaVodZ76hqsyxKtS4qisOqQJaZSG5K8R8Npg9XDXLHqHLcSqSAMVeVZUEXBVYBmo+3yLCcvCmx+Cft8T4wF1rgYRSFRFBKGyvoMa0VelMznGZgcbUqyPKPISqQ0pGlEmqbEkSKJrc4WA1pYw5qBJeCpNNPGDu7KWOiYggdfW0fLAPSnjbAf01KxOFPBYzUMmxoe4Wi+qEiBZ3MOeAVgvMTlEbcx9YVjVh6kXVvLCrAt4zVu8lcATy2s+qtk9Z2ygCgsYEo31qxYbNll9STPmo2VGCXSpUgF2WSOph4DVXy7O15DoHDc1aAFFCbiqFzh5nSLg6zL7jTGFAU5AlMuWA3nXGjtcnXliE4wsXcx7l2N58KeI5YYo7DqAsfw/bJSLXQWJP24q20LdiGqWsx4UKR+J/cU359+znq+YIyVUHUlJThpRjiOa04rgOp11p7XWDhOjaHT5TGldrRszmfz0lpTihJRmorF2Up7EaJemyrvBecGhnDidelyORhpI9WE10tp68olLAgbbUNlfd6HCvikRKgm0NVqD12WaBft5vV8otG8tVuXbHymOl+4OjYXbGtoUwihUNp6VjQnp6Go7uPbI4wC4iSs2sWraLznRxha7wIV2MEwGs5YLEqKogShybIZu7t7nJyMwQiOxxOuX7/BpfMXWMwzLl2+wHNXL3D2zCpBI+ihNJqd+4d8+P4dvvyVl4mTOheyF/s8u7dGvZpkaA1lc5I+IUVXv2u9pTe2AhYfHCJLBzKW+Qg3oUQ1MY3wZKvOkue1N7qah14NIByANyU752mDtDpad9ROXuNrCLhIw4oIWFblmbWXnvwdpFsofJ6SWs1Ro4K3HVdDd0mXWi9GtilMtUBMzAq/f/gy741XuDvWxHlGK4CFDhlnObNFgNY9ZouEvtjkz1055mcv3iKQmdO3esit2asFXlkx44ole91Is/41Bavu5QP9RQW4xq2D9lojrBrRNq+ujgl8NkGvTvBd4KNPhfssMFUOWE2lvffqleb68Clj7zGldqzHlAlsQnOtDYg6GMKeV4OvFPU6EoYhcRwThJ6+W/DJ85yicEpsd420o9WqCEpDWXWXLU3Vgym9jsf63lq/V+MGAdbvVEpnFLN8wetbS+fNQD1Mlt41UAKfdKcZKWajxaTLaaCqEa51rTfyi5O/Vko3eSux3tQBEMoGluzsnXByMmZl0KHXgsU8Yz6f0Y1Dzj93hcODY44P9nnpykU+/xMvcP/eHv/gH/1T1rY3+erPfpVz5y9wcjzk1s0bZJM5rXbCiy89Zxl0xdap3rduz6qX3QIHjbRGT1BpqqNqsdL3rJ9AiIaE5VieVzfJapLb+wkcSPr8IMKBpPHfO6G/mpS1gcYDr3Hn2TvIKrWo191CXR/pJngN9A0EtYf829TgX51i1Ql2Ia1DxJtwBqZSq1TAZSSjPOa7e2f46Djkjd0xk/GCJArIhCA0kAaGeWHQiykLJAeZ4v/+/bN8/6DLf/7y+3TisZ1bPhuJse/qua/9UVTSrH9v4xccp/4Q9rxKTYAGoXxzO5yulCwOzD1DXpY83IhoqCBtSzeoV9WgwgO8F2ywGGZtT8K90sPTHMDjYrqOwmvnfqSCkNCAyTIKk6NCa0EPfLivqFUJBlAqIAqtdwJCYpCUxgYDqDyn1Noa0qq0jxY8i0JDXtjMWl48lk48WjLaGevV4BmDlAShrCLaPJMwxqAL48JpS4qGasH7xtqQZQuI0nkE2Ny9jcnoFodaTeHusyTGOuAVGp8D2BoZLGsKjaEUhsPhjLff/oDeoMvWap9erDBlwTxfsNLrIGWPyWzG0eEeV7e3iOOQYjZjc32F2eiYj/Z32bt7g/XtCwwnCy5sDPilX/oFtrfWSdsxsXI8wy35VVBHk7YZO3l9Ap8nULuA548l1AY146L/PGd1beBZbKXXq0iCceKqcOBsZ6HnahWhdH3oOdpS1mIhlwJ4rL3XMVNTqyashrH2PrDMUDvw9X3m+WytEnAPcXDfUCG4PrZivHCU18Gem79erEYKSg3DMbx/Y4gINfPb/1+ufbckUy+RRc+QGU0ic/7j1WPmh9cZiBb/OvkiE5Fi4pDCwNfvdjmevMDf++I11tJjaNTTqj5qacpUTNLXt8Dzbss2/dvVemM3SKt+acJp1RJOv+u9EyRWzeQXqAqYG1faOsrq+xq863pYbbVTQzT0vA8rjyefrtd5gt21IRCATaQSRaFLx1i7LQnP7FyxAQ4QeLYsBMoISgGBFBXza65KGuNAU6GKgrKs+b43btE43z9HuCg25dmtA0rcL59CUbv0iODYraFWobgkOkJ4sdLU53kApmmPdfrhpWlRsyfRGADSlCAk2gju7w75/W99j6PDQ37xa1+k305QlGSFIVABMlAMj47o9Hq0woD/7p/9c175/E/wE194jVs3bnC4v4PWmps3PuTihT3+yt/6W2wMuuzcvclKP2VzY2D5RQNQpdeBY2VNYdyLOA8eIVw++SetGCgrJl9xUCt+VkzIZ7Rziymmag9bvJHFTvZKMBf2O0cRHYt1i3XFjH1j0niSHeP2o+e19diAhlZV+OtqEVk0dfDOYLakq62ehmNjnvNSM1rcNaaOANo/nHP91oSVwQoXzp3jmeef4923ND91519z7f63eO/wCkkwZVv1efvWTcTKJuNrb/ILv7zFv5hdRecCI9uYcs47xzH/xz/+HP/rL79NNx0tkaGa02q3GFAvCP4thDtTeAnFzTfpvRS84cx/V8No86hpqJIqG0t1RZPv++tqplsx7sYyVi1yolZcPao8noQ3+AlpmUNQiduhraDb6UG55AfSMwL/XtL73tYTWguX40A47zgv8hj7XekHnlDIQKJLoDHYfcPadvNigj2u3L1rxXj9HpZVgDGyamIvClbv2LimoSWp71Uxl0YbPaBpd9N86Vo7wbQRHAynfOt7H/DJjZv84s99kfV+i0hBnhVMpxNKbcjmGZ1Oizs3b/D7v/3bnAxPSDptPvroI373d7+OknB8uM/6+iYr6ytonfF7v/3bfPVnfprtzQ0SF9RSDXFHxn27aGz+Ya3rd5JN7HiCijHe79agwOlSDbXRzLsUiRqIHOOt12/j9LDGAZ7wNM0toJ5zagfc7rz6CO4RtrgETLKh0rBepjVvNR7o7QVOsG4a+qgZKjW7tezOM0k32YRXLQh3Zn2uERKzKNjfOebgOzd5KYoY//G3WRQFN3/5ddaSFf7nf+ev8uZ3f59/8eY+OyczevM9vnd4yMnN6zy72uZCdp/B4GVOTIo+mdmsd6Xg+gH8P751mf/qK+8TBXNniPXP1g1ANEugXAGiEbhQpWrB8N4UjqRX/r/VClPdX1RtUUkp1bf1zK+nca3YwX/y48DZbTBNBVuJ+QGbrD8W0HXOFLUVVRiUNBjp120Lhj42woJuk/WBf0GJndQSD7q2UaWsG0DjBqqyng6lEbXNqoGAsmFkEMJU10tjlkS8JdR09be90uCqov4tGoO45idmqZv98333amqf5SXNUcW+vJZLsD+a8403P+SjT67z/DPnWO0mhFJa391CM1/Mabd6tBPFdDFiOhnz9ltvs3NwTPpHbzBdTFhkMxbzGbOTE9rdDiJq89233iNE8Mxzz9BJ0woErDuZ03cDRrt+E3aBs4uRbUNZL/Gnh8GPdTGidDhqgcsYkEJXi7Ifvd66rZqGiqaCQDieKrT1k3XHMKYC8iVx1S/W7t96TIjGteDVU7Umt/JAdYu8/abE38/d3wm6Vh/afGGo7+a8AnTtBuUNQH5xkQjk927ypd/+Q9LhDpO9EbNWi/sXz5D31wnTguNbnxDv3uFXLvf51fc6DDnhb/9sm2k246eSl/mN6AqtMqMoSuRKyImJ0SdzQPKt4xV+9f2L/OWXriHcAqcr10PvpeAr7t/OL15Uf+NJkSNz/rhfOKlQyC5GfmadjlhtNBJeKl6SgYTEbqTogy7AWfCrFscjljl93+Xy2NQLHhwrNinrQQNOJ+o+e1CtW9MJFkIg61zo1deefTUh0hok7MFHuTCJpc81y6jDYRsVbF7UNEc2G9QfrthQ4zvn1F7driF7WtBqMIuG9V+4FbzQgsks52g44Y+/831OxjO2N1Z45tJ5umkLhHUzWywWSKlIWhGLxYw7O/f5nW98k/aZS3z+lZ8kL0qEFkQq5MzlZyi1JGn3EFGKiju0kgAZd20eh1Ov79vF92Xtp1tnjrPs7skLA66ZlLAM04GUz0rndbh2saknlMKzTx/Z5YDO+bU2xXtvaRG4nByNRc2ySVE9C6gNYUI29Mj22caYxrBzxxxhsAPx1CJvGsPZi8MVAfbA5sDKq7pcBwsjkGXJ5T/4Ptsne7Bxhdalgna6xuWXXuOTL/0FVJoSBQEj+X9g5/230eGXua6njI7us722zdvlBRYzRdgTbAUFkyBilmcsAg3YnB6/eus8l9I9XrmwTxDIpXrZKCHPQhts0+iqvRoiAl4hVi9wtd+txQvXh8avMP56L1M0Jq9YcrqryVjD39qCq4+OkwhRWJc3HLh/ikfPY1Mv+NVHGid+Czd5XZ09g/XFu8AADdHb0NBVL7GLJj+sVxlOnfiwmlWwv3SvT71WnPpw6hq/jtaAu4zc9qmGUtc313Lpyup2JZJZrvnk1j7f/u67HJ8cMhoPuXzpHC8+e4mNlT6htIEh3i1usLJClhd8cO0Ov/eH3ybTCWvblzg43McYw9aZs5w7v4ESgg+3z7C7u8/WhUvMZgXjecbb71znmQvbRPJUOz74KktCKFCFeD9pTFf4SeoHmABh6lgk6ZTXfuFpSkl1Kk/n+Wp8OKkbi84rYMnmIAwI6SZcrTaop7FlVxUvM1Tqs1IYAvd0r/sFGu3eEI4barBKleAlKj+vlvrUuWdVeOQWg9mU+RvfRj93gWQ0JV69xIIS8d3f4dv5jHxtjTSV3H3vGm/d7TINtyiSi9zVBXfDlF4kWC3HtOcjZkjCIqMddBGRZIGGMGVawj/9+DmeOTOjLcdWVPfAaFvtVK/Z961A2Ph280DtPROMa78G1zFeRmjey/2uhq9vH7/4+UPm1DUCKKsHCGGjQEtZQC5PXfNgeTzeCw1tlhFOrSCw+qyGka05WCReLyUqNlhBwAN46lvDuOd9+pQ3zbEmHnZmwz/TU4T6L2pezHLnPKpUz9AVC9I+isn4Hz+AoNAlo1HGZLrgxt0Drt24w5079zC6YD49YTDosrU2YKXXIpSG6WxGFEUUeUlmBLv39/nw2idc++QOWrWYTA6YjI4YnxwTt1IGgzY/9YWfYNDr0mm1uHV7l1e/+Brff+cTbly7SVYUVnVQBdrXDWCc5FAKnyNC1AwX56r3aabaH/NS81azNDZq4BUoUYOv734tHLM1tRjvPVNqX17HobzXQwWyPsTAVDNFiHocV6EVriuUqUd99cHU53ojYLUkOsJTTREBOMnEM3PrfVKDybKLk6Fstzhab9P75BbrnZiwyGCtw/Gzqwj5Eff/6Nc4GilOxori7F+mGyhanVWKg2PkYoGJEwoCBhR0VMFdo5kYQdJpoVHElCxkwN3FgH954zX+9nPfRFBU+mbjfYxrOYB69tcMFVFSq3sqKu/ucZoMlJUeV4hGG7nr6kXSyTHemGeWPRWsd4IfM/YZRSHQ4Qzd0USmS6vdeeSYezx+un5VrqJEqvo2xIQGUxUNVmAa14jlJqvIYSUa1Pf7VJ7lG7iuxtKnB5cAls6qr3z40x5Y2Kp+t4MhcDcxaEqsG1imYTpZcP/+AW++9SG/9wdvECcps3zC6mqf6fiYo4N9VgY9Ll/cJk0iyjyjUIpFWTAezjEaFmXBZDyi3+3wk59/hWuf3ORwb4fNrVXSJGBldZXXXnqRFy5eQCrJlcsX6Q/WeOnZi+zdO2HaP+Y/+jM/SSSN1YX7Nm+8jxfcjF8MRQ28Uj6ZEWkAtQvXsutQgKhE/+YU9+DmdZA+eKDEs19T6YcFzbFvyYQfZabycrGrsOPU1JKU9aJQDXWbtYPUaoamt0EFnS422/9XGZL8ABXUoCaM8+l3XjmNnB/oAmTA7H/2N/jk//QPCRYTWvs3uV0MuPM//Sucyec8e38PjOCa7PKv9vcZXfk8CQWTOGRqIlZGhygzZ8qMTCnuD55HLOZkSQfVCgnDmBRBnEhuzSO+f/AiL6++jbeo13PKq2Ga6pO6XeqoSerrPFAvhejWc9z3iahUMqK61p5lgbypVfQ2UqDhsiaWfHVV3kKUM8rBAYvkR8x0qSrvxCVTH2mu0t541XTT8sdxh/ylAirLrz1klu/5KexziTU/9ITaCdwhND53QiO2pbLmN6gNVO/hv/ecwTEhDCWCRV4ymWUcHg25v3PAt998l2984w12d/cIQ8WZs+dotyQhObt3btiQXZMxHh3R7SREgUTnBTPmHI9Hjj2XbK6scHZthXbaIlCKL/3EC5yMv0KUxBwcnRDGKWv9FmkEhYbFdM6gm7LWTXj9c8/xyz/zKqu9FG940afIgVkSE5rO46L6/qHCw4958Yu6Mk4ag8oDwfu72n61s0oKbIY5pxi149Ht6OHu6JmTdrkPcPeugdughSDAg7RAGEHwkPaT1SxoQIxb6Cr965KtwTNE/7fzzqjcm6yU6ff+owJo8D65puKVwiL4lUuM/+v/Ce/t7tE/uEt85x4agQpj4gsvcGH/+6xzxEfpIb+ZlwyFQAeKIrP67gPZIj35mHnnImb3DgUxuruFWECqIJYFl1qCQay4MbnC+fYO3ehexcz9u9kgEKdSEALMabVD1aNUYIhx0Xqmbrvm0laxOt8g9UK5TOiEbSxv0KieUYkQS3UQZQxHiozDh9TRlscHuk1XMD9BK4x0XStANEQv3wbNMbj0mmJZ1KgNa59OtcQjPlfPaOCIqJYue0/pY+epRR3LTHwYoeVI3h1HA9N5wfBkxp07uwyHM/aHE27fvcf+zn2OD4945+23yXRBb6VPf7XF8z/xKu12l+OjA1hMyRcLxsdDtjbWODo+5PBgj7NnN1mEAePplNwY9g+PGQy6rPa7tMOAQCkEgqQTsdpdozCag3s7/F//L3+fF154jv/i7/x1FouM+TRjY3ud8bhkdaXNhbM9axMwyy5tvlRBAK59jAEjhU3Us2Ruf8KKcOkPqwVbIKQLzmmM1yo/c20VroIWtMTlOqBmoHiJ0wKsdPexRi9HFkQd61RLFrJiXsKcHqX1SugBt9YanBKxhKH2irH/CpfCxYOF8AuNqVmxv2flu2ks4QhWumQrA3a5itAF2mik0GTnz9L91rcxa8/zRXGZ320PWOiQMAkx0xHz4Q5Z3EHGWwxFh0gawiQkHn9Mv9fi4uKIC+khz/Qm/MSZKaKn2Ly8x/gk5O6dmGzu2b1szG+32FAbvqyxWrt3sovmkq2oBh37S1iJrfKld0RpyUh/6lmIJg+ulybwWQfr1rbTJUAe9x859B6TeqGCxeplT0vfPjKkpr4NDwJ37ek8B7rBLE8/71N47EOLz5lgH91wdG5UQhiB0II6yto4v3HDdFGwyAuOT0YMTybki4yD/UO+8Udv8P1334coREtY294GIbhz/Rrjk2Nm8wX9wSorcczGmTNcuHIVESiEKZieHHB8cEC+mJPEIbdufEKrlXDvzm3OnjvDdDwhLwobDo0kmkQUhUZEXudo/QaNsS5Ozz17kT//Sz/L+x9+RF7YpEOXL55DCMn33/6Qr3zlBUJlV3Lv/1m1aUPdo0Q9SP3C8kM2949d8TpV5UJ5EbWaATwhcAoCu8pWoqxnh9I4DukmZWnqMV/fx+14ggUEm/VL1J4K1YB3umX/Z9OuIU5JHNVArXvMg4cXyZZhFwcgPgVjo1SLhXWLMqZ217JSm8H6nmogAKPRBibtGKEUwcH3eGF9wOv9lJuLiMOpQIspetBlPb3D1vAGl/qKi9036cfHtM9KyuhZtrbHdDbfI+1mtLua1dUOa501dJbz0su/wPvv9bj1SUlRONe5SsdLDZiNNJC1K55rj6V/ar9b0+jDiuw2BnPlqNBovVpOMY1VFddOzdasJeDiU+wcj43pLlXiITP0f8g523y/psX40072lmGvq8RY3WSel8zmGcfHY0bjKTfv7HJ4PCUvS3YPDsjyHK1hb+eQ6XjC7s59xuMjRif7CFMwPLzHbDokSrtcvvIs09EJW2cv0O90MeWM7bNbHA9HhJHg6tULbJzd5PatW9y5eYPdO3fZuXubOApZWVtlMZuiywXHRwccHuwTpy1OToZkWUa/v8Z8VvDGd9/hJz//OXrthEhaAUo7AEmTiF/+pa/xtZ//CnEcoNqSJI3RSJ45d4Zupxkw6iP3Ti2QDeOQMU7U43Rk+ZOKwC6KXtQqIRf1SvOdLBN1LdOQxKqJa/ySJSviXE1s47ODnbZviDqgAlONR+8dapYmtjvHLCsBvN62UoEZ6smwJA77qgsrmjsQP82SLeB6Ebt0l9ssw/b60o0Ug5Gwf2GVC5M5O5liOJkShAU/u/VvOXv133EmvcH2mRntRKAKQVHkbJ+FOBUU+R1UaEgSkJlBlFCOh5Sxoiwk6H/Jq69e5eqV13jrO13292yD2kVR1jkbGhBbLVHG92tD8qhaq+6TKljK81qvujHLGCIaY6EiZs72wSlS6IljKQy/ceM3+Nv8708POOAxgq5fKSs1gKlXG/8ila/tQ4GyQfkb1nLf6V4V01z5/Llg0FqQFSWT6ZwsK8izgvFoyiLLOTwckmtNWRSURcFsvmBv74jj4zH37+8xm8/4+MYtToYnpElAu9MlTlrIOKCVtmh12syzGWknoVumRC1odSIO9+7RXd2k21/h5HCHD9/+Fq1On9loH6MEvcEq1979Pv1+l97WOqtrKwhKpuMR4+GQxWyC1AXDoyFxFJKmKQd799lJYsaTKUGUsHPvHkeHR2xszTh78TL39/a59vF1nr1ykUE7AYnNqgbYPwyt2G6triT0WpHd+VeCMorS67xOtX1dlg04NWOozzkNUk9GcXkuKrA0VQCEDXZojM8lAG6YdGocxuuAG4IwIJyKob7eu9svnyUaF9bMuXnctrlu+Nkuj3xj/L4WHpRNdYY/x6VkqdQKdf3rz00gEdQ7Q/iautmFETC+8grXb2/xvwl/jlj8Jv/LL/43nDu/Q3cA2QwWC0ErhTgxXP9EMJkaytLQTSRJakhbIUWRkSjItWFv/4AkbaGyGeOTd+it3OMXf+m/4M3vTPjoPY02sjLO+7b0QRA+oaWXNHxmsqZm3AeSuBMeii8yEFWAWR1OfGrHkMYvMJwmtR+cvMcnh2/yqPKYma4tVf5ZTB1NI5anabVmNVcQt7r5UFzv36yNZ/gGb1DQQpAtcj75+DZvvPku337zHW7d2aEUEbnOGE6OMGXJxsYGV5+9SpykxHFEmc05ORpy9+5dDJrxaEi33+fln/wcaadNFARkWcF0MuVgb5f5+JiynKNNyXw+IoliiiBABCFnLz/DbDZFGrjw7Isc7N0nm08J45jB2irT6YxOq0XSSrn87BUClyvhcH+fxXQGBkIVMi8N0+EYISAKI452DzjcP+J4OGR/b4/h4TGLRUHcajPodxiOpuzuHSLMCv12anNWONFYCeGs4M7pXViHPmWkFXsbYqwXw2pxtKlPs5OtNjLgjfJPpCHNpgWVlU2hjkh06oHKfVdUJNczRB/x5flV7XPrPQtOQbWonNLqCD4HnhVxEO4vF1EjaWbdasiMxoFeddTULNwDrcbu29e8EMdshX+vBvj6laPmsdU1QKX/pfrWMsLjtZ/iHxytcSX6B/yNv/b/IewVTKaCIFCEWtPtAYUlQFsphErSGmjMrESMYToJWFuTzIdz0tBACMfjnNWeJBKa+cl9iuTXefWVLTqtS7z9XUlZBhWj9Ab4umZ1G1YkTVARNKt+W2a9tlE9WJcUayPktI0Yh7XKZmmhO10aKif391ayxtfOf/kh59ry2FI7Pqx+otFYgkerAioANjWzAn/PhvOHMeQlHI1m/PbvfoPf/q3f5xvf+CZFPmE2GwGaMGqztnWGsxevcOn5q1y+cp5OJ6bX6dFKUspswY1bt7nw3AXGozHHxyekrZTVtVU63TZCSO7euU9RZuzfu818OCZsJahQsZjNCMIQLRVnzp2nFIK1M2eQQlCanPPPP8N0PCIJJLc++ZjpyRFJkrJ9/iIXLpyjKAqOjw6598nHTCcTwjhhsL6OigJmkymT8ZgkSRgeHfPB997i0nPPkSYp42DE+PiI4fEJK/0+02wOUrB/eECZd1lZXbW7Rsg6YQ3Q2JHi1IpHk7k+qGKoT6p7qAKJJxFxgSp3hAdLb1PwASJeTPWGmQYrrLIfuDaTbmLKJYlXVOQCaESf2XO8sc3u5uDZricnNZQ0mWgzMste7rxuaBiYTelyF9QuYzXYmuWOXQJ/U4ezN76uzhGW9fphktPjH3/QYm/4r/nSa/+U6cJwMYJIGcbzElMKyhm0kpjjYUaSGIQWrG8p9ndKxkeQLWYY3SZqhZT3croJtFTG/o5ic73HYjbnqPwe/Y0Tzq3u0fmZV/nmNyIWWYRo5KCoMrk1GthK+35BopEAyPcPVd8Zp0s3CPKTjHn7Pt32ZYJJsrzaQUPqE1W/6NKGwwdRTJEXrAabbKxsPHLoPTZD2rJcVH/8THPUTwB3XSN4C78qC6BA8HtvXOM7737C7s5dovV1Xv/5r2G04WBvjygMSbst4jhga2ud3qDHfDpBUVIsFixaqc12FsDR3V1GowkgCRSgc4TOmU7mfHLtfSbjGUJqolaICgXT8TF7d+6Sdrt0eivMRiN0EHLu/DlanZhWFDCfzpgNT/jg7e9z/+Z12p0OvQsDXnz5ZaSUlGXG8OSYg/t3mUwmbJ6/xOrZLVbPbjIdjjg5OiKfZ7S7XW5+9BGmtEaYoiiYTSZMxxOm0xlJegapAjpJytFwhApjwiCkncYI1Uy8blgscsIwRDrjW7Xx4ZKbjBOqmgPUneM8qNxcFcuT+Akq0m1dA6oymiG8V80pUdL7A7pp7idcpQKoJDGxdKzWG7rgHxoJbIRmyYXJo7FnYd6LxhiMtATDG5eFqPvJs/FKzykagF3xuYrq1ZZ2/z6mkYuWWtbxi4KtC1XbWKNgwMc3zvLxje8xePWfMzMDvvfRLqXSlBIWE0Eaw1pfMFca2ob5BERo2Ns3dDci9nZzZjPD0f6YbkfRTSU3P5BcPV8SFwJuGFZW+4hxiNpuU2xrusU7fPG1i3zjjQ2b0EoImjF6zaxjlarBlWaiIeH+Ni5KsI4ZECSLNXJzzFH4LudWf4riuKTUpW+E6n5+oTZaM/rwHjwTs739Mvf/4T/lxmHCTz136ZFj7zFFpC2r+6V4MGrM9/0PNHz54gaTZ79Fafj9b3/I1//4PTbPrnCpfZ47d+9SaMNiMefspYsopYhixZXL50liRaQUYRAwm89RUjAajQA43D9k5849tDYcHB7SWxtw5swZkjhmni1ASLLFgm6vy/HhIfPZlOO9HYpsQpHHzOZzDg8PWNvaZjGfsbExgLIgyzMW8xmT8QhjDEVZsFjMGQx6rK6scGt0y+qV8wX5fIIQ0Or22d7aYDQ84ca1a4yOTpjNDTKIuHfnNt3BgDBNIMDurEFBURRgDEoquv0Bk9mcTjuwkWYycCuzwWjBNNOU2ZT1ICBxuzPrajI6vWRDovAZMl0XWEOdsamjETZS7VMMtT+2RQu/k7QBr8OlZqRLPkSVPC7wG0VCTR6rHXUd82/IFoAD2dM6RqfKsAa2ppbXA7VrWPeQ5eP+dqbWDFCrAHzUpzEPssCl7Tj9O1GDkbsTni36u2vHqoWAhV7je9+5wdtv/GP+3i9vcfFcCnKX63cisjInm0K/A6U0TBYFaRvmBjolLEaGtJ2ztqE4zkvSNcP0umZ+oAiF4d2vS157uUURlZj3FSocUB6EpLsRer1DmA+40hV8dFy7htWJbryax6rBTpFa+1YN1wWBbDYxoZIURtDNn0GYe5wkH7B97nOc3JuhC91I2WAIhaJFwPxoyMnqHuurZ/jg5lv8t//i23RiSTG+x998xNj7keh0gQZb+GGuqT96IDCiRAu4vTfl99+8QW89JMsy7t67zvDwmNmsYP3MFlGouHBum3YnIYlCYrfVzXA4ZLGYcXg4YTweMZvNGJ+MGA1PSNMWk+EQpSSLlRWm0yndXodLly4gjKFczFEYrr33DvPxkMJIOv1VuivrdAd9jDB02ylKQGk0Os9Jo5goimi1WsznU2aTMbPJiDDcotNp026nhEnM7M6YyckQrUGGIecunCUMBJ98+AnFzi4mbTEcDgnzgqsvXaIscgKl6XZSssWU4+EJaM1gdUAripFSUpRAVuvohuM57374CfNsxhdefp6L22sVGNSsSNTd1fjGHqvdxZf66AkEXa9awGetarojembvZTbPOj3DdAxQG1EFObj/HY412rHZWg09hWl84zWMS/65QjeCcSyke1nam8k8AakzPYMP9PFbWFXPqLwVBPV2OfU13nhXvb7LZ1BrJLxyQ5CP1zCjd9GjAya3f4V9OWHzwg7IfSSwkIaP9wR39gSdLkQSjDJ0JQz6gsVtuzB0OpDsKC693OGdb07odiQb2wauZaSbLdgUyOMAM40odtrIiYBixnPRCocy4IgFmMIterLOWyE8xbOt41rPHbHSol8mfS9JAUEgKfMCIUK6xTmmh7e5n32flc3z6GGEngm6cUwrz5hPxyxKwb3ZITfMNxGHL7CYR1z63AptCiZh/six95h0uj+YvS6d0ViSHnblshpKMJoU/Kt//XvkxQgxTxDM0GXO/bt36HbXuHj5Aqu9Fiu9NoaC6WTGycmEsiyYTEZobQgCRRRFTCYTsmzObDqiyDKy2ZRFFHF8dIRSijPBFkkS0e12uHWwz9HOHcZHB+iiYHX7EoP1beJ2CyMEg0GXIBDoIufk6JCTo0P2dnaRUpKVBbrI0WXBaHjCwcE+WhdcuHCOja1Ndj++znw8ZjgcoYKItJ1y5sI5ikJjypLjw2OkiukMurQ6HdbWVomVIgyl3R2iLNFSssgyjArQoqTdajHPcoyxjPXXf/N3+fv/r39EkqT8lb/yF/mv/sd/o9oO5oE2N/WP75fTZ4pHHH8SSuXi5dibBR4nqlYuQV7qF7W3TBXBJBCi3mC1NtLwgEO+1yfWygmvylhmk7VZy9S5sKHBeGkwabNk4PLn+QxXHnC9a5OH+ipbWXWZrvyGfS2orvedbyqyL4A7731MPtshTltM71/i/tERu7de4tkv/BFJb46KDWECsxHkC4HWkjDQDLVgnIGaSEwmaAlJT2uiWUG00uHoRNKZ5SgBo0lCMusSLFpMRprOokAVfVpiQj6+y1dbF/lOa5N74j7G2JwKNmmPN29WtNS1uleV1Hr2OsDCIFDMs4I//J3f5LlnXmHj/HkSeR49yTicntBrC85vpxzcm3EnF5gggRBYbXHz6wfk999g6+xZfv6nf5rhcMR4Pnzk2HuMwRFNmuqOn7Kw1Xuk8UAk2oPFiTla8Gu/9QZ3jg7ZPrfKycmQ+WzK0c4h2Tzn6hcuY7IpQgcIYjK3r9rB4THD4QlCGKI4IAoDgkDSaiUUWYfhyTHZbIZUAqEkk/EEpRS6KFjf3CRMQorFnLsfvc98kXPmyoucuXiFVn9AksaUZUZ/0CUvFhyfzNnb2eHk8IgyzxCAkorWyjpr29skrTZlntvVNZRcfvZZPnrrTcYn+0xPjtGFIYpSOu0W2SJjdHLMbD5HRoL1zW0uXrzAYKVLEkVEQcTw5AQB9PoDlAwIgpAoVESBoJO2OTgeUZaG1dU10rTD1vYan3/1RZQXL13D23nu95GzQSEVcLgfjRVFtYvsqUTXJ6zUjgoNdYIzfpkKlZxetuGDWUUjVaAsaqbsTeX4e7v7GG/0sfcy0sFABaaVd24FxJ69CtPQs9LkJ2JJteCBeclnvTK01fX2XNaI0t7bBU34/BoWXQ3eTcwDuxfWNYr50T7FYsy5jTMcHoz53GrKvfcvMooPMduf0N8Y0Y5h0DOMDgR6ZuhtGkYngnkJWWDQuWY2TRgVkrFRaBlyWG5zGYWe54gy5qSMSFRAoEPm0xItp4h2iAk6tCYHfDHXvN/vc41j2z7GB5fURq7KYImHXt0Y875dXSJ6ITnzzFX+5a/+E7pBDxUHhJcFul3y0y9cQKtz5MUVEHb/tkSmTPduwKTF9cmMNDmhi8SEC175/OuPHHuPL+GNaK7fNWN64NzmwIeqcx9yU8Bw+/4Jv/MH32JlYxUlJHs7OxRlTtqJ+U//9v+I9a11DvcPuXvvDovFDIPdXWE+nzOfLwhCiZAwm06JwpAwDAkju/ttURTEcYTWBTrPONjd4eTwkOloxPHREXc+fpf5ImPz7BXWz1/kyovP0+51yeczyiKj2+2gAml3cihLTk5OKIsCIwRpu03aSml3O4xHYzrtlCSJicKQrQtn6a6vM75+h707t9m9f5+1tR6ttE2SpsTtFptntxgOZ0RJQqvTY3V1hSgIiIKQPC84Ojomjlus9nruvRRxIGlFgtZ6n6woWP/5L/K1L75Ep53S66TgggKkEdXOHH7/LV8aRMzl0BXV5NZaPDwM/gkoAsvuKmND5WJkWB6CDsyo2V4l8os64YmhVjtUm6wKU4N29eBaZVEH6HgnNLBb0Zg6AU0FHDT6pQbEitD5ulY1NA13Kc+fPQNsMGt3j2pfOK9kcAxZUzNgz8qDXNEVBdNuj+noY9TgKhfEgvKtc8y/s6Dz5T3K/hyZZER7XaKNKcWiRMuA3sqc27cDdFszywv6Zo18PyCchhRG8Vsf9fmLHQimBiVK5hJEoRGEULbQBMQpLOYFmRa8bDTZoMMtM0aLEuGCuKuWcuHejTfDA7QRdXpNXH9cufQsvS88S/Fexnj3iNGtMZe/nHLxxVe5fftD1sVVhNFEQUJX9Hjl8udJwy3eeu/3uHf7HZ7/ouH82k85FcfDy2PX6VYDYont1n9XzeOZQ3Xe8l0MkOeG/+6//30KYUgSyc3r161P7Lmz/JmvfZkz66tkRU6Z5dy6XXL77j3arZTFPLebVmJQUqKkZF4ULOZzgiCg2+2SJAmjo2MQEqkEh3u76CJndHzIR6VmMhpT5CWbl57n/HMvMFhZp5W26CQxJg5I04QoDjg8OiRbLFgsFrTabQDyxdRtrx7T7fXJs4zpbIpSEq01SRzR6vaR0S7ZdMLBzg7D89tEkf1+sLpC2m6hohPanS5hGNPvD5DGECjF5tYWRVHSbnWwG4EqgkARhIF9H2FIw4AkVKx0IsD6NWsE0ouVBu8iig9drXZCaDIqrxkzFnCfRCMaOC4kmywVl/S/Dvus2SvVpGx6MTRZhHefk43x7NmoJyFNSiFq2b/WOjq/2Sr9aU1rq0f6Y82YQH+tPU83VBBWdDbabxHuYwkdBLmON9U9rJFNe/cqYceHRlQbeApj2OiEHLnv7sqAT4Z3eCFJWA8LinGf6NsxZVujVYacr7MnNdHVXTobc+azE7KsoBjnhMLALCAaZyyyPQ5251y/p7g/XfD3vnCOdDbH6JBQKVAKNS9RwRbZcEaU5EzLCe+OIi6FqxTtgnssGn3jSmX4tOzXRwcuyQ8NTxGB5PXLL/PNb/4WLTmjvxLwc3/pNfYne7YdgV5nnVh30UXBjVv3WcwyzicJzz3/K+RMqSL/HlEeO+guDc1Gewio9j/7LGoFMLx/7TZvfv9d+hsDJtMx+/v32dza5M//2a9xdmOAKQ1FltFOY86fP8d0OmM6GRO0I46OhwRhSBhEHO0fMJtPkUrSbrcRrZD5bE62mBN1O8StFmEYMzk5YTGdMp1O2b54kSDucvXFF1g7s4U0hu2tNVrtGLA+sJPJmEAqpJCsbW3S6kwpsozJWFHmGXEco5Tk5PiY3koHoSwrVmHA1eeeY/fGXbr9LqOTA8ajEe1Oi8UiJwgj4iSl2xtwfDRiNpsTqYReu4XA0GtppFSkaUqaJBhjiKUkErIyQNp8CcsM1nrciGqTSTQEos7BIIVGG0HpBqTdH825GDnWpTH1LsFPUBHCOxtZ8V/gtvV2sKkbulPwxKFBK70k7hcqvGqiAdr+Dh6A/aVN4PSRUU7UXwpGER6c7YOa4b84sdkAQrvvjT6lwquB1IOqzz5mwVs3GKCu9b9uMSgbagW7p5hBkHH+TIvknQPm7cv8q2zMKOrzph7yUhQi22ukJkfMY0opOUoS5uUB4laffKckzBJWe4asoyENyIsFB1rTjRLObrSZ3g94dzTl//a9Q/4X59sE0lCaCKVDEIpimgMKoQ0r7ZyMjPd3Si72WxTrIXvYcDK/oDQjCJd7xYNwDbz2tUue37zE4K/9AidHH7Hy+grd1RVu3H+PLXWVlfQSg6TDrRu3ONg/4GjvA1Qw5vLrnyeIu9zf+4j54og46j9y7P0IvBeag4CKMYjGtz+oGEAXgn/2z36NolwQRnDn7l22tjb5a3/5L3LhzCYBJXNdMFvMyLKMMAhYW1uj1+0AhsGgy71797h39z4CQakNMlC0uz067Q6hChkeD9nu9VgZDAiUYtrtkHTarG1s0Bv0KbKSwdoKYRzRShOEhPl8gS5LxpMJRZ6jlMIYQxiGmFYKaUKchghtyPIMFVpVxtHhEWEYEgQBoZRM51OiNCHtdYnimCLPmIxHTKYTiqLgzJltVgZ9nr0aUxYl89mctcEAhWE2m9FqpyRJYndaloI4aGwQI2hM+WUu0BQlm196a7mPpPKBkMr5NRqBjXrSoqmNeIKKqH585qkmI/CBDl7311Q71Hpf97dotGvVfo1IpVofYc/XlmV7Pa5FPsuOJNYNb2nPPuE5mY3PrBZMV5ZVIk4mMdYP2D663hzK6nIFhqKWXhxLtuTbApCVdPzG4tqqGYxlyuqs4vn1Fh8ND7gsFboVM1InvJ9OiFsKOQ8pyoKWLogikFFAwD7j/IQkyUnKgvZUgJozac3Jdcp0done3QFfXBU801vnuwcL/t93FvzdcwoZzhGBscnkS43JIie+p2zICOYnHIaSraOU0WDBXOraWUr4kdtYLR3rt29vrBrIYHdBcYvm+sULdD5nKJOM8aSkyCcMkpdYHE64V+xz5/q7LGZ3WT1/lu7G84BEm4L11Qvsn7zBevjFR468HwHoNunsg7JocxV6AILd6BIYbly/y+99/Q944QsvYzBsnTnPL/2Zn+PSmS1iockWOdPpjEWes8gW7O7u0un22NpcIwwkwpQMuglRoDg6GVOeGPorKwRhTJK2OP/MM4ymUwIJeTanvbrCmXNnGI9GTifbYjZdoE1JGkdgSqIoJFsUDIcT5vMZUiqGw2OyLMNv2y6kQChlF4s4IowjkiQhSSPSNCUMQ6aTKWmrTdrt0FlZZX1zCyFhkc0py5IoitjcWGdjtUen06csDaaEosiJ04SEFO237xHQCiOUoMG6HtbyP6DLxPI1wqkiwE5M1QCRJ1GtK52sXo0/IR3ggAVIWY0/z2TBmpxEA/VE9T21SsGV6lbu+mWNmlMnVABgXJt7UGjMCB9Z5Zh2M0esfbZNpV6Z+ip8MfgkNvZAlY6eGopMrUGp6ldijJcDLORqX19KxumQtStnWXn/kAujOW/tTojPrWEGE2R7HzkrCDJYzBTdJKLQGSQQRZJpviDuhcymOaPckLQgY47cC5ClJMcwEBm/eDbinUnEv8ki/noaks2HmHaMCCKINEUhSUtBKTWLecTiZA95ts+5do8b8by5pFT9vSRHGNPob9vWgTDksSCPJmStI4piiMkMJ9N9ImWYHxTkw4/Zv3uLMJ1w9uWXUXHLefkYhEv1mCQtsmL8yLH3I/PTbW5xDlT6Q6ih9uGEyVAaza/+239P0m8RJglp2mF7a4tLZzZIAmsAW+QFUipaSYtOe0GcJEwnUyaTmJVeh27aIlYB+YWzxOkhUkpa7S537t1FCMnW9hlWVvq045jRaMTh8RGz+Yw4juj1OkymE5QsKfOSvZ17BJFkb3cXYxRFqcmzBUIYptMpWmuUUsRJUol7URwhtCGKIlbXVuh0u4RhgDCadprS6XV5/tVX2Dp/kc2tLfJ8ThgF9HpdVgd9zp/ZoN9p0Uo7SBmiS5jPZggEYRyCMeT5gnarSxSHf4IeqhWGtcbL9Z0x1VY9nv1aA9yndtyPddHCbb3uhe+G2quybjuPBsv4oHLzmk0QrU4tHVQqBjuRpTtglr6DimXhoc8xzEqv7heAhjrD65UrpkmDUmNVBpVfqicwFjQdP22I2P75pjq3FKC8D29lb5H2nYWNdNNGYyjBRfEVZOw9e5Hu8YTzwzGHWvM7f/Qx0Zbk4i8k9LZ26ZQzilyymHdIkylZNgIlCVuSnTKjNZCEGg6mAVL1SNKEbJAymc5YjGEDzedXYV8kfC9IeTmbkk+OkCsB2ayDNF1MFpGEKavRhF+/dpPudJ8rwSXa5/qcyIzaT6/RdtQSxOkw9txo5vEOs+SIEAlKYjJJUZ4QZwMm+zvs7Nxk++I67a3n7R10bWrUWoCQxOEVjsbvPXLsPV7Q9YOIOqPPw3x4zfK718Up/g+Pp3zru+/xwsufYzyfsCpKnn/2Iv22ZZyLPGeeZ9YGLOwuuUJKoigijmPKUlMUBePxkHwxY321T6fdYTyZc/H8RfK8ICsWrK30ee7KZaaTKXuHB+zs7mKMIc8zDvf3OTo8soxPBshQkqRt4ih1DHtBq9Wi1W6hNURRRBgGSCWI477dN0trWu2Efr9Lp9tHGI0pC7JFxmBlwOb2Nuvrm3S6HaYzq67o9bpcPH+OjdUV0jhGSoUUISYQhIFCa40UEEcxGEMSBFXi7AeSYTcXvdrr/cEvl3QP/sNDpBBz+oInp9iE4ZIqSbbR7jPVQukZbt1M9j0LMSOghd01Vla5ce23Eh8K5q+tGGW1lDXavgG4D5NG6mtqycX3nXbzqmKzwuOMqwOWfdWCtA+rMA6IauzOxjuozhqBCNDCqhe0sQY4UbkPuucKw6ijCV46z+YHn/DFUcGdC5f47ltv0jbnSf/sJtng+4TRFNkeIkxC3ArReorUBj0NGE+7IBShVkR3P0dvLmlFBbqniTuGe9Oc+6OQpFXwT3aP+KutiC+0VhCLFiYEMsEi10gj6CcDrpzf5p/c/4SfuX2XFV0iLvaw2ces3lxX6iS/wEjXZF6vbVsomg6Yx3MWxRwl4GB8HYDhnYjJzodceuUnCNPUqVtcDmRTmUIRRqNkhOFHHBzx0PKQefsDL3Hs6ne//ib9jW2yfI4pM1YGLZLAIE1JkZfMswXzfMZ8kVNqKIoSKWWl4wQoS402Je00ptXqUgJ3ij1Goyk+FPKFZ67QayWsdTtsb66x0m9zeHzMbDZHCUlv0CfPS0ajMcPjIwKprIfDyQlBFBElA+KkRa/bIwhChDB0OilBIJESRKmJk5A0jVEywJQFpdCUheDs2W16/QGrq2sESjJftJhNp2ysrrO1ukIripBKolSA3/I8CEKOj09QgSIOQlpxQhpEjVDez9YvS/71P/B043IyOEgSPrXjk1X81uu1Jtflv/MqAuHDTJuuVwACmUjyckYUdNwRW3wyFBCVMwFeMez9aB3Y1UERDe+BpqqiQsRmqm7RqE+9bbwvwlhjmveCsIuJM5zhI9xM9Qzj9NlWJ6X4aOff8uyZv1hn8AOnXLARbM1t4Euh2d9ucealc6y8c53Pj94hf/kV9u/eonjvEsMzKTI8Ieq8DatHdtcMGaDLkjgtGO1KVg5+ip5WKJMynw+ZjYasDgSd1JB2F0zKjLuHmjyN+PtHBVG4wusRpGIVnURI2hTTCUXY5VeeeZ37ccR7szs8fzIk2QkItxMb8FJ5EtTKhno5831mdcZxtMJY3iYvpmQLSVkcEAcDklaf7S8+i5GhdZ304E1R3drepUSYgH763CPH3uMHXWMQsiG5NL76wXNVMJ4s+PXf+Dqr5zc4OTxk0B+wsb5GuxWCKcnLHKGEdb8yJcPRiNmioJXGZIsF85lhYTRlFBKFEWWpKXWJVCFRGGJ0ye7ePs9cOU+33aKdxkRxRJZlPPfMFWbzOVlWcnh4zCTPmcwWTCYzbt28xfDkkPliisEazqIoIopCgjAgjkN6nQ6tVgJogkDSbSUARFGIznOKPKcsAsqi5MzmBoOVVVqtFtpoOu2ErNWi2+7QTltEUUStv/MbQhrSJEaqgCSM6Max3Z69nrkPbeWlHTnc70pIWtLlekd9lgIlvI7ydGTPk1QqsDElCKtoqF/QQVK1A2xtAUcYpGgx0iNCOlVkWxX6W9HSGtrs/w/ZTty5d9Ws1H72br4gEabecW9Z54tTpvv7+pwDXo1QiyLCMz27E6VzExSUwqDcYhCqAeP8Ojvjt9juvFrpKev7N5Qi/tWE5OBLz7C9t8OFj3dI73yTb599mShbcHgzwRQxw/sv8OqXW0zkNVqJIs9CZqMUWUj0ccYEzXqqbb4CmTLeGzLpQX8DVjpzopU5O0ddht0V/lWhiUSLl+MVGA9J9XXM0W0Ua6gw4z85+zz/5304Sqaom/uc65+D1Iec1Gy03jvOvlWgFOv9dQarz3Nz9gllPkGXiuPR+0RBiJaa1a1zmNway+pwRYPVfyugBKPcOCgJ5Y94N2BfrBHA7ZWG+YzzsnmS4drHt/ngww+4YMbs3P4YVMyLLz7DdDoh73Qdu9AYrZES0nZK2o2YzeacHB+TZwvarYQszzA6ZJEVaJGRtmM63S5ZfhulJL1u2wJmEJHlBVIpeu027U4HjKTXG3A8HHE0HDOf3ieOUwZrG2idMToZksYJaZpY41igaKcx6yt9kjhGSkGShKRRaKeoEOTZnMlkQhgEhGFEGEZ0Oh2iKAKsnjoJYlppilQBQigb59/IFaocmw+kopMmFnBdEy4ZRj5jWbLIN66v3ebd5KfeSeKHfcaPT3Esh3qc+tQvNcurWwC8eC8RIqYMdhtGNFPdo+lnaxrXLakTHOP0Envl+oVXw3l26lUgNSO2GOmB3NTuucJUAE2DRTvZxJ20vLAaFxJhF9CIQbzJzujbtOI+aXDBRR02WLOgYbyz4yBrxez+yk+z+S9/m+5wSjD8mN3jdxAbV9htbxP31jBHa7SLAePxEVJrWkmfPC8g1gi9YJRltClpY+gmKaPxlN2hILkc0Vqdce7iEb05nIxjflOlrMxmXJ5eBwFRp4tUHfTwBu0s42+uX+T/ebRDxgHqnUPOvL5WE4lGvwghacUJm2uXWOufY14U7Ozf5K75XXKhOT7aQZYFffMCgUiJ4gCdObUMfjES+J2BbZ8W1e4en2ZefsxM14svf5JiNz78+u9/l/WNVUxZIAUMD/d4+9vf4erFMxRFDthUh14007oky6bkeY5SArRiNpuRxjGT+ZzJbEY3iijyAmOg1WqhVEAcxYRhSKvVptAlSikQhsC5RfVVSBgGtFptJuMZO3u7xCqhLAOMHtLudFhZWSGOYzbW1ul2UnrtDmkYowKBlBCFoTVKGMPC7fBgvQ4MaWpdvgROOV9qpBJIaVMz+ggm4YwmSkmrOjGGVhQSBfX+BL7F/zTcs5lf90FkXb5zLbA+OcUIkEZjhPJHgEZggZfMPIAJn+zHYFDE0lDqnEBG1NBrHKh6tknFZB22OgJqMG4bd6orG1nKTG38qnLcNrwZ7L20+6CdJqLufV3pKR1LrhZhu6hoqJ7h6ymlYGvlp9m5/8+4N/wmV1ZaQB8Pz/56G8Ulq/fRwKwT0/ra68S/8zZbKuRKd4XZ4oBv7Ay5sfk8pQxJojaqGFGYgtlwn07SYjEzlFHIwgimZc5ELxhITS8J6ZuIg3sltw6he1Gyed6Q9k7YH3X4d0XEfyY3aBW7LDII4wgMBPMFV/uavzQ4y68mCTevf0Lr9ojehTZenSCFpN9ZYWv9Mq14ldHJEfeuv8fh/nXGbUOxOmQ6HmMWIy7Gv0hkVjGLEpPpJUNm5dvr2twY7+yusCoGP64eLI8FdH2HQEMP5b/wIk/TciZOX20n/Wiy4Nr12yTtmNHwgNl4jKJkNp1QasN8kdmQy8ppH3ycuBSG6XSMLksCJdHETOcLZvOMTl8gpKQo58RJzGDQZ3VlQBKnSGX1o8a9hFBugkroyA5RVPLcs1fZ3Nrk2kcfc3B4wAGC6WTK1nZIK0npd9us9nqkcUyoAlQgkdWWAdgUjwiSJLW+gca4wI0A3ECOwsi+k6jZq/1lUEoRBAFKCKIgIFY1UDyoy20cMMsTt9FbDzmO06k3DTx1OKU5baR7AkuVjaARCCHcJmnVqBXVxkd4XbpA0DUtJpwQskkFsE2QBOeKVRvQ7AjV9eYvpuqS6pgFMrf5jPBQYVVJwoF1re5oihreEaypz63TnttrTX2Jy+eLAS000kAan6Eddphm9zkcfpOV/i+4d7a5NqQxVUBJc6E1GA4ubbL2E5eJvn8HNZyx1Q74j0PNB/vf5V7rC5wEKTIMKMZ2Z5WDoyGdNARato4yYaI1C5GT5YaugI0opCvb3Lme8+Fhxsb5FTb6XfYPSn47KvmVvE8cHVMWNmQ41yXR9Igvxau8u3/IW9029z7ap7vdIoxDVnobbG1eQRFTTEfsHl7j/r3r3L99g/v3r3P2F89Q5oLF9Jjz0VeJWLHv5xdLr4+nrFUMzbngdeqioY56SHnM6oU6o1G1Vj5krj5wyBkRPvzkLiKCuBUjVQ+dzRgeH3L//n2Oj064cvYMSRgxLzKUUjYwQWvyLKMoCvJsgZSK0XhCEMVkRYk2EEcJYRShRxMGgx6DXoetzXWSOCGOwqpGdmsWgxbOIV0FgGR10CeNI6QpyS6d5cXnnmU0GtHtdmm3W2yurdJOYgK3INQLjG54DQibmEaFCAFFWVIWpT1XCMIoZJFl1oIscHRFooQgVIIgECQqJKioz8MA9+FtfPo0U51hTh0X9QR2RwyS0vi38fvEPnkAXOWZrTfScWoZgXZ+sZWngL2idisTIOQaU32LjtjEJrTRFWD6LdorN1Dq+H7PYZtqBSOEc7/zTNcBr6mSOFZ3qe9TOSpVPed9af1f1a4Y/h2q53uWXFYLjgGUkZztfon3D/4tu9xGJu/Qj16p4PX06Kji45z70eGrl1kfTYk+3iUnZO3cWV7ev8Pmvbd4L1plp71CHsUUWUYnSRnOZoj5IavdHkJEZKrFSEIuc0pZUCLpCMGVtM9BHnD7u3O6mwYVx9xYTbkxvc5Fs0YcSERpCJgyzwISdZ+/sdrlrT/+I4ZrbU52cr76Cz+L1Irp4S5H+zuMxyfcvv0BRTGhtbbClbOvkbdOmIz3OBt8nph1C6ZGV31oP+jqd9WsWC8RO4Z8zoX/gGHAPh+9wSW+/oylNJCbgN6gjTZTpEjob64hAzDakC8WLOYLQqmqQIT5bOZcw8YcHx+T5zkyiCm0YTKbs8gyAhVQliWTyRxdGlpxxLmtTVb6PdIkJA4CKkd4xzS1LjHGUBjH0AWgIy6cO8N0MWcxyMmyjDRNicOQMFBEYVCHcILVOxsL5FmWMRyNaKctOmstABZZRpZlgFU7RJFlukbbd8PgANcmuYkDVU+ExnM+c1L4qoiHTChfmoDrjpjay1Nj0MaqgZ489UJzMXR6U6EAUfkfN3dvqNkw7uUDSjOnICMwAQ1TJD6/LlVLCWrY8uoC2Ty78kjwWd48KNtTXEIcp5LwwE614NUZyewx43ivWXqqcMuoVz74BceyM3tGr/0c7P4Ow8WIYPgd0hVBHD6P1gojNbEImBXeYm8wosQFUVMqycFPv8j5eUF5f0hxdEiv1AStlDTIuHv0IZ8EK+wIKGVMJ47IS8P+cAQSummfPEnJdcDcFGy2FHkQkBrBRhrSayfslzmZANNK+Wh9QHTvPhGGVnvDBitNj9nTkvODbf5Xr7/G//Y7f8xbb39Mf3CWTrHLweiAo5N9Sjlh5exF2oNnQCq++40PufXWPV7cbJE+t+VUehq/JX0lQRiXZ++USGjX8Notjx+1eqGqC/UkrWPUH35eQwmBEJCX8MmtO9bVShqiKOTkeIEUgnTQQ4YBaZpa30qn71RBQOn0sXme2x10Q+3SO2ryLGdlYJOTh1FCmsSsDrqs9rt0WilJGFbGKAEYpWyDygCDITAQSE2hFZGU5GWJlFbEL+OYOI4JlHLvqiudK0BZGkpdUuQF09mMyXhCp9VCCK8aMZalF7kF5jwDBFEcE6iAUAUkQUAUhkRKVbtz+FI7fNf/PlrEqd3la6L8qLNFbUAxvj+F82hwnfWEAS6AD6m1bSGo86xWsEbtZ+t3qfQ+uPa8WCVMyzFdMaBKz2hq0bt5j8rqT3Nh9CKKRghFzVp9XUz9PLdo4xmyC112HNs9xbOwajP4hhxS0x/TON8b5rRjb0oozvRf4sP9b5PKEQejP+LS6m0OdZfRaMSZwZ+tckVo9zyvI0ZIiijg7tdeZvPffZvpzgHTbELZTog6q7wclmyWu9xWMZ+UEceyg1QpSRwxzTLm5YxsOCWKu6TtDrtFwFRAr9smKXIGSciZKGGsZ8x1RDEcc6s8QRQlg6xEqS0mWUEETMd7XFnZ4O9eucB/sz/h3/37P+DqVkZnXbBy6Sxhq4tQlpkWuWJ/X/PeH73DV//LX8QYiU30fsqoWhGoWrSspRD8ytlgQg8vP4IsY58m2NaldnyxP8NxQWk07XbK/n5Bls8IAsU8y8gWU/YPD8mKkjBSDtwkKggRQhKGIb1ej9F4wnA0RmtYzEta7ZjxZEzabtHrpKx0u6yvDuh2WrSikKDJZvBzq56MCgidq1YRKLKyJAgs+/SbPlpXrprV2ltY4NLGsMgzirJwrm/rRIGiKAxxGBAFirIsKbRmMptSlppet0caxaRhRCsObPy5ofIzbRbx4KGlFq9E5VMvKXQ1/V20ac3BTHUPUfmY+qxTFcQv7cj4ZBQjdGOTHJajJRscVDRA1n/2kNySKxzrIV3Ts/eo+hsLwrKeqF615rnm6RSmtbAOlWduJcE0dwY2S2qKJR4rfB3txPfX1FhtquTkTa2s1yGXjgVv9V7lndtvMgpKEq25M/qIiTb01M+gdSOdj/ALSW1EBEOeRuz8uZ9k67e+SXAvQ81zCCdkSYe1WBMXBSvjO9xTHT5RA4bBClEQEsYJC10wN3P2DuYM1tYoVI/ZBDY3zpKhaYuSQGmCw7ski5xcdDD6hOH0Hl2RUWrDraMZfdNGdFpcvHCRvxTc49dPRtwdtvjpL14hVPWGYiaXvPm77/DNf/+b/PW/+SW6Z89ilWaGOuih6fvs3rfpElhRlrpXEOUjx97j25jSD9Tq6KMB93QpDNzbHzIZj1DKGpyUkkgkcaQ4PDxiNrN5FpI4Qhtc6kZJHLfQzMjLkqIsieOYPLeKb6Ui4jikk8Rs9PvW3zeJaMURkZL1/lWwPAtZBlAlBEpCGEiiQFKEmiy3inxtNJR+vlh9l5TS5aK1RrA0TcEIoihCGMuWwzBESokUVrfYarU4ODwkTUKSKCBxgFuJvqda19axXrQ+tcVPY6R4UGdndc+nxJPGCu6lbmsI/EE9+uNX7O4Qp1rQGOotG5w0UA1mN7n8giQEgU4oxD4lJdIEjTFjVz//jIqlNvRB9fTVLqOZqdtZVPhV8dVa1eN1qLVx2p8jqolf73mmTQOyhX9N7bwQasgurYIAYwRB2KWTdCmCE2bZgqwwmEJx/sxz1aJc77XmXqoxX4yBIgm590tfYv1b79F55yP0eIYJW4SRZNCN6UjDYDRmkO9yx2TstzYYkxIFMYFIUDJjPj8mNyVqfZu9wxHdOGShc8TshI2VPicr6yTX3yYbZeSyB5MFSpX0oi6f3Nqh09vgfrnCYHCBP9e+yx+MFrz7xh1e+cnzCGEoF3B4/y733/n3fPUnz3H55WeQQVD5Jxs/TvAqBfueTRuHHUum8tO2w0PyKbPvRxiR9mnF18/1W65h72jIdDLi8HCP2WxKFMXIQBGImP5gwN7uHifDIZ1W6hLMCIyGvCgYTycUuiRttQiChNlsAQjW1zYYdFPOba6zvrpCO45Io5BEBY3B7Q0mDdGdUxjsvlKACOyGl1EQsCgLcl1SFJaxetC1obqCIAhsFrAgxBtpyqKsWLI3BEohaacpan2NOAyJgoBAWsD1Aqs+VZfl5mwc/EEEtDrVTZ4GgpqHnUadREcASIF59KL+Y1yWCYF9P+lYphX1l9+/MdH8AixtwvsZM9rG5k72Hgt2CNWM2Xuh2O3arfjq9X92oZRYVyPhdjGwi55PhlOzLQ+eGuHyK9i3abBf48G59mbwGceMMwxJz4gx+C3bG4I0a4NN7kxPGC00aSAITAslk0p/78+tF2TjFrH62WUYsPflV5gNWrTf+j7s7aEHPVpCEkYxq/GCVqbYFjPu736fvWiN/fWrnLQGhGmb6WKKETA/3ke1BsyFIM0mtKKARV5QiJJ06woDUo6LKVlp2JQGJSNWVi7wO29e42uf/ypvThYk7U2+crbkg/KE2x/dY3WzTxiUnItjfvav/Vk62+dcv/jUpX6JqxRpVKuhlyLcWlNL86Lhu/0fUL3wWYugFo2nk5zd/T1yPSdOErQuSZKQQMXs7w3pdLusrq3R7fVRUiFVQLnImC0WTKZTptMZWkOWl2isv+7WxiYXtrfodVNW+h3i2PrdRkFQxdoj/JD77KKyr3OoBIG02e/LwKCNtjkftKZ0GcCMDvDO5tYhXdot0p3BLstzDIZYhQggcVuph1KhGghgJ+9yedi6+si19rQo7RM9CwsK2qkQqgneWBSlZ1pI5zz/6QPsx7cY15B1Swo8a/cKBJy9QNdJaYQzRekxO9l9tIJpMKaVtZ0/rDNQGVMlBvNAKitjm9cRN1QYuMQ17jrvAyuhBsQQdGkQ2i+9pn4Pp/et4cGngLS8t1YIG5qeyTQA1+K8W37CkFYIRSFoRQnk7QbfrhegemgI625X6fidKkrA6KVnyda7rP/OH6HHhxCAHCToJKQVaCKlSOWUDTNkPHuPgyPBvk456W4y7W0yB4zOkQWIMIRAosM2i8mY0WzMMAnZFpvcXYw4mE/ZbA1AatbXC77+wUdcef513ji4zX6ouTxQjDoSEUIQRWi5T2frgm13U9aLic/LW40V/066mjO1jbX2JLHjRuKztD2s/NiAri9GwGiW0UojkjRmNp0QBAFBoFgsNCvra4RhSH/QRwpJnhcUZUFeZIxGQ46ODlnkC7KF3Za83+mwvbnJxuoandQq7ZMoIg5CkiiyARRAJTafRqqH4YmoP9QLvdXZhUISSgCJDtwW5tSM1xv9pBDkZclsoZHKgrQQirLMKbRGCo1SgV1UpKwfe0ptU4upP7g0875WopFj9qZiz95IJh4AXLuy1xuGV+5Pjx5fP8bFIVVDP7rk0VAZxAx1msMSoSVGGgQx8dwwVffJWpDJAbGOK9az1EPuo92Lq3TfNHMCuO+NrnZtMK4OWngvBMNoPuWDyTu83v9itQ2Pk81q9oWd+gq/U7F2MFtWTLtePPwC6jpZaNvv+YJesEZ/4xU+vPU9MLn1IadxbmXgoxYDmzlqfb2wvquLjU12f/nnWf3kY4rDGZ28QK1twOiYMtckrRSlDbGAaHFMe3GXyehjjj9MGK1eIvvcC8wmGhVIwnaf2fEh2XRGQE7c7/HBvfv0Ox2OwphxeUxsFBEZ/Y5kf+c6F1cGXD/aZVYGXMpD0tBg5mMWK30qt7DK5mGNlM33rX1CGseqpqh9qv2k+TQnoscWHGHrbqpcnT/4CntWqWG8yCnLHCEF48mYTqdD4TKHtdotJpMJRVEym00pF4KTyQnD8Zij4UmVy1YqydkzZ7h8/jxrK6vOlUsQBSFRENKKbJ6C6ukNsJWV2ICz6jbKkvKzoYIQ9ZFKWBV2/7GqBVStNRaAFJKsKClcGLPVsym0MeS6RAjre9xUqZ5uywpIH6FYfSTbpZ4oBihNrZvVRtj3FjhxySzdSNBM8l0nZHnSis/a5fMaVG3b7N9T+lMjtOtTRT+5RKIHHBT3OFZ7bHN+qf9NpRP3R+11ppqUwgErLj+CPSZPTWzvtNQKUm5NdojDN3m59Xpj3HoFAmhRIis9pMsJTIGNI9OVHruGEA/JurqfUBEb3c+jxQKCDxBSE2ivrzTLE0b4xOrOX8Kd0zC34Vlv1u2x88qr7Beajffe59xUEwcR8zxjXpbIqISkjVg7Q6QixOo6SWeDdaORg3XmWcj7b+8yH0+Q+ZQgCNFCkeUFQTumnxraE82RCimCiKK1Dse7JEJyfABb/VX2J0dcO5ZcDkLaHCPSlo3/EF4K8cBZey5Ug8PUenJRtbpXKVQhLz8Q8B4L6Pr9tWypGdWj6uLFMIlhmhnu7+7brdILQyCU7VclGHQ3yOZzWmnXXmcyZtMZo+GQRVHQSlq0khZJEpEmKSsrK/RaLZIkRkmFknaXhiSJCMMH6VkFJq7aQljtXgVGS4D7qNL013gAIusV0EAgJUoqhKi3zhNyOStSICU+HcsPU8ypxzfZbT22PNI4m7q7qBIajWfB1VyqxFotaoatfmjf4B+DIgzLmxj6yK86p5fFDBeGXTcVPuWoJCBVq6xpxaG8x0ItiMoIv8G7ZTz1hKzHTx2w4D0J/Cef/wq89GHcVjm2li8PXuStg++wlZxnVa5VvNqgLRiYGg7sPZaXxOZTvXubPVdWT/SQoohoqx7zbESiYgfWdapIf55ftKie7DXVTknl9dQGSiEwYcjO515A3log7x5wYkbMB13iV65QIiiVokg6lEGAQKGMzXFw96MDfvPf/PfsT+Ern3+ZV197EVGUzMZD2q0YdXaDZHKLNA/ZOSkpckWwcoZsOmYgjzgY5Wy14WRR8O6dfc4NWvTf3SG6NKDox9RudfrUG7oEOU6aaG5Iatfl2s+Zavl7dPkRqhf81H30BC0x7B6Mmc1mzGYT9vZ26Q76jMcnmNIwGU/QZUm/2+PC2bN00pRcQjCZkqoAKRVxnNBppbRaLZI4Jols1JdSEoEgjkOSMLTDYkncXi51mr6a3Eq/CJ56q6XrTr9frRN44CoBNpJOu3TaphbHBFa9ECjVYBafzmofXupVevk60XDnt9ND412OnFLRuOtE/Z7aMTOfjuUUCX5yituu2xq5apCrzZT1nmGw3INN3gqCVA5o58cMwx1WuVBBrgcpOwRquKu2Rq9AvXTg3wAsoytpQ/pEOsCF9CLX0o/41s4f86UzX2ZVDhBl7YDmQdP+p+u+q4xqVB1mn2nBVhq7gY+HbO9u0UvOsT96m6DTx4a/1r1tXd9kVetK3K6WEfdWDZWJW7IpA8lur0d70eODbEhurnGht0Zt9JPVuM8Xmg+/+zE7Nz/hmQspg9GY+fQW2xdf4d6ORImIla0BZhAxX7uKwLA2z7lzaxfmISYdUOYT2nFOVgb0ggKF4cbxjM1Wh607GelCk2/E9XZ/3vuk0bLWt9IDaz0q/Lvafm7KpQ8vP9J8ugYerutwSFYYzTw3gEYbq6c1ooUBxsMhQgVIpXj++ed5+cUXiclZhCGagFIXBEFEmraIw8DmJlCKUFr9qUQQRyFpbLeyeVip8PEh7dVQEoDxanZ3+ulrHrj/8pfNpUe4NvEg7xXxNqw5qM73uGs+pTM/O/hZ1lUaKqAvjXcv8pjb9CStNVkG4zartC9uAK2bb/RkFLtPX22OqkV9g1TWYOWWWnC7JtRjQ1aAJoxACE1fnmevfJ88mBEVSXWq8EZKBMJopDGUjtE2s4vVvyv6ZAEXq/oB6+ESy5Bz6Rk+Lm7xb6/9c3754p9jPbzopBDP0erINWvw8f3nXdOsDcGfWzNXx62NAGk1ySvtS9w7eJs47ldG4EYrOhuFb6fGW7gh4QG81oeKSmIQccSsyJkXx6y9uFmPMue6J4RidDDmwze+x2g4ZjgakXRafOWVC7zwhS+BDIjTfTorEbIfU4gcYRQG+N7wQ945fptndZeN+Av0eyuYIiMwEp0XdKOMSGlGxrCbhawfaeTomPjiOnlUNNYWX3+shxTLOveHye+eDT+qPBbQlc1+aawGpavI6aTXftAVpWIyW1DqDK1LwjCiLDS6LMkWM1qDFV753Ku8/uqrpJGkLDRBlNDtBTZqK4yQUtp9yYzdnA5pXbHiMCKNApsgp1qp3PPN6Tp/WrHX18akJcmiFiF/AAZ5oBZCoEvjNAq1VVu5XYX97R51S//dQ59xuuZO/+oIbGV99e9T6xmX1x7pKlu6742bF7UW5MkCXKDquKW2FSCMxMiSRTInnsbgPEuqxncBDxY4LBjaCLKAXnmGo/IOm+KqBelKS2uZaxPGPch7Mm2EcbpYVy9nsNSOoToPMoSBy+kV7mbXGc4qTaoDVutlgfcbrYSVhsENHD9tgrRpfAM+WsZgaMUbBCIgibssR2j5AUx1ByFs4FDtlOwbuill1dfooM39/B6L7ojO1nO2/iiQUOaSwzvHfPid72NMjhAlnXbBKz/1OhsXLlp3S6PZ2Fpx47GskMZg+NatN7h9cMQ02uWCKXhGfpVO0ELoguECUhWSyoJuPmEsOtyaSM6mHbL3T0gvdGDgJU7ntrck9brPPgq0YvinQsUfUR4P062sgI1KNDqnOqeyegIGsqwgSgI6nTZZltlMW2XBaDhCCsmrL7/Mz37lp+mmKabMEFIRKEEQRnYbG5cGUbosUUoYwtAm+I6CsAH2D1mdGmPEToS61R5YtXySEGMeaORHEOUHn0eTPbtPnt24JDnWy+Ez3vBRK+updzZmWcsn3UTxeSJK4wXdUw/1IH1qGTD1eHyyStNy2mQ1QkAu0HLCtJORjvrVZLa+1bIhZoKnbQZDLPvI4h5ZNCMuWw6MTQWWXioSDpCNEFZKEPWzq5YXNqdFDdX1mOwGHaQpGHTOkGHdDEu03/uCZcNnw0BWJfLxe6f5d7J38AuwVbnY65QIuLRyiXaQklf3rCeLcIZBD9I2MsgZ3cyycck7PACU+YBP3rxG2JnyzEsX8DIeCCaHC9795ofc//gD2n2FiFK2zm9x5aUvEbT7VsVlCuodnDUQOJC0Y/nLF77IpPw6M53x3ug6ImjzQvvnYC4JA8HdwxPSpM1KBMFiyiJMuV0knI8Djj+ekKwq0ostSrFojJnGWG+qpIRzE6sa/dMn7WN19nmAhXmGZB5eJaUErVZEEAZsbW0SRiGj8Qmj0Yif/Mkv8/Nf+Sr9dogUhVUZSFm5kymlXICB3YI8UpJWEtNNUpIgsBnyH2XhF5ahavGnI20/NPYIL+ay5ErmAfd0VT5NtfCZ6me8KuPRdTaNdqi4iQfl5hWNtfOH0zH/eBRjGvFbwkWOCW8AMqSLVUw+Z9I9rnxXgXqyLemipL2fDFgNXmRc3nWeEe4czzgdqAj3HJ+G1E7ZRlCvmyACy34tA/a/DVob+uEmW4OLjIoxu8WQ37/1azVr9RIJNTAY7ILquar1A16GX29Us/t/1X17rvMF1meZ/d7lg7Z1r7XX9fiQ1SRyHNc3HH5h0fmAm394g5XtkrMvbiCjFJDoMuXOxyP++Nd+n+//0RugDHcPx2xfWuGZ1z+HSrtoXaK1lQyNMdbrx7iFw9jdY7TRvLj+PH/jpb9At9XnxfMX+bnXv8zWMyGH8YK5CBj0u4znY3YWESpMaImSkJy7OgGhuHVzzL3vHmMWbrv3pfzHkmp7p+qzP+7UVp/iR/lYQFebumMr4G04Z/vvq4xVxlYkDKz+rN/v0uv3abUTjCk5c2aLr3z5y6RRZEmGtqKFClSlu0VCqCRpGNJJInrtFq04QXl6uyztLP2cJj268eMFSB/G+zCAMTQFr4c85yHFVyeQAiWsKsEnOBdY8PVRTKL6+fQV4fSrPerJD7tLqQ3NbAS+70pqQ5pwbaWgCoUO1KNJ9o9zET6s0/N+v7mjqc2L7fk6UucsekPXYRoV+Qy1pgJjb/gsi5LDk0Ni02KhRh5vTz3Xiq1+3Al/UiNhjr1nxW/dGKihURjD1eQ5ZvmUUkk+2r9DJ+g1RF+nkmisjB5S/X5ptoO1e2+3YXmlevNQancCXsiYaCRol/7+7n7GJvf0Ok8tmjKUb1dvkHYj2fS4/5191i4H9C8MEFKgS8VimHB0/YR7H3zMtVuHMMr46MNDvvJzr3H1tVctGTCFW5DKagGwgR8lNkGNY+nG1mm7fYb/6Lk/z9ee+3kCGZNxzFH6q+StAtNus7G+gihnHC2gRCKKAiFK9kRIEodc2xlz7Y19inEPYayfsqiMzKcXE6hdA+HB7Znq8nhcxtwKbYTxMqmrXnPlrbqhqrgU0G2nxKFg2u9x5swWmJJ+u0MShRitKR1T8PlzBaCkIFKKJIwIZK1/q+7dEAsehg/m1PGmb/GDk+ZBZmdO3fhR19bf19onJXB6Ztd2xum1GoSybqPPWJpMdsnaLKrx0TjB/vIiISyJxNJ3lBSValM7OdF7MgThE4m6DvCcIciJ+AKBXiwQcYBBkU5XmQ4OyVfnBCctFvEJhDHRLPLModIbzGVClsQkChbmkJg2VNZ9avWBZSEVaFd7mDn/Wns7U6lGhfdqQVcpH1eDFQZ5l810CxMfcanzqmc3eD9T03hWLegbzJJxzRZTxXLLqg7+3yIIuLXa4vlM8G5inEdGbTirs5U1zZIPzgJdJuy9eUzQH9I527PBQPOURHSYjncwMuDbb1/j5PCI1bWU/+Rv/Qyr5886wqOpQ+DdhDZ2i3ghXBi/yynRnPD9eBUhSrQpOBnvMrsz5ePFv+HZzV9Cxqu045TZcMiNwwnbqx0SBKbdZbQY0+62ef/+MVrusXV5g/7GCCMLC+017XVuerVZ1uLvjzgiTTv94KfBRe083aiMFLQjRWACttZWCZVkY3WFQbdLGkeAqaKzVGDTLkopSYOANFQ0/QV/GIz605bGuvInuFYQKMWiyDFaV9FddsKYyqPBM+A/aTGP+Aw0JowtPjm5wJGfB1QdTfHRf3rCgNepTJpZ1TzwyjBgXh5wJI5pxWuEWcq0NaS1ojFlyVycsAhAlQEUYEqDNBELc0wQSEazBWkS0+2UjEe1SsGllEE0dMIWO4TbLNLWw3od2LoIt6I3DWk+2PS55ApTccJakBEHHSrLphNTvPa2AkaopChtakbruXtzM3LtnucNvoWSXC/GnDEt7njPDf8CeN2uqOrt8d8XrVP2v7egEDtsPbNBGq9TTCLMfMTR+B6zWYYoDygK+OrrHZ778k/SP3fG5ZI2VsQ3Gl0FMQv3v3K2Gqfi8BILEk3JIh9x//AG9/ZvUCwKonSV9U7J+bNTFuYMRwcRYRDST9vc3t9jTUVkxdjmeYgjVjZX2VkIzN6C1bUOWh4vgWtF4KuX9eqjRyPCY4tIq0TSylDgmBaV40rFGn0oqjAQSNChopOkhKuKtZUVQqWQwg0EA0GgKjEtVIo4stS/qY99GKjz0CPu+Ckm/MB5ohrL1d+nB9YPC7zeTUxKp4T3SW/wCgX7IN+ej3rAA3poc6rLK9Zsmn/WL+KJg2NEpTaUfpZjma80csljAyxDryfrk1Zc4If2IiP1IJCKhE1WdMQ422NUzNBzyUTdJ4p6iCBAuIQupcgwam7Hm14Qa0GZTInLgPb165QrX2Uqm5PUYFxyG0yt2rJKAS+223nhg49tCp6arfocCQkRs/GCNhp0iRB2HPm8DXau+RBXgRSm6mPfi5p6YcfXi+ohtp0cm59EMZvHC5J2wDyy7+Hj5Rq1s3c3loFKJFo9w2K/4ODw93jta1eJg1Vi2szVmDv3dphMDugOBPH6CqmEi89s0z9zDq39zrvOZ9e490PgfcklgsPxfYo5rK72CIMOmoLRfJeP777Hwc4tWnFC2OnT7nbIygNePffzBNEKLTGkFaaMFy0m04gglkR6wuUrHYxYMJvMKHWCISZUc3K1oOo04/W8finzo8rX8UfMdCsMqKz7rmqNYIQq8YsP63CTQErchoyCKFAEwu4FVhY5Stn0h2WpUUjiMKQVBQ0XnPoZp2ryqUTsYVuSV5c1UFj7ufmwl/1TED0pQLhsXUVhiCOrKPWqMGvc8rbdR9DqRsUMTgWAd9muNX2+6IZOykOxT08nhXT+pO7GhqXrlwz3NIfck1bquC04JbEISNUKCQMKM2G62OeovM5I3iaN2sTZglmckhUZulhgAFVqUtOhJbcJ1DoHScT524fcvrBK0bR2u4fo6phXIfjvHNvEZgPzqgBpas7qBcmV6AxMM4SsRerSGGaHU1przoPCuZD5a7TRVRL8pqrPqySoQNgzEf9bcLMreOHI8O56Ha1YSzoGlQtMOUMnLUTwEq32NjduT7h//2Ne/uLryDxiPl+wP7zJOLvBqNhn++o2gWxx953rvPjSgP5gC1Mx1iZRq5UkHuxOshN+45u/TjGf02612Ny8yGhyQrE4pNdps3Z+hU6SEhjBdvdZDvItijJDGZvuNY4mRPGE1Z7EbBmkShAix9Cm3fN1KPDZ2SzZ87ggq/rYJUhgU4N+Oqw+vuAIJxY9cNg0gKx5qvskrMCAlAIT2O1TvO+hwCeFEgTC0HJb4tTuJn+K6j7gImaq+tonPuT+/wNgjcAao6QQVl/tt+dRtmsehq/LzPa0KGct1AVWR+5FILN0xfLVTZZf7SrrRI+KHZta54tYBvAnsVi/2EZbuPm9LF8IhBSEpkM/7NINL3CQv8tU7GKyMTOdIVH0wy3irEccrdAONvB5vPII7gQ5G3eH7J3tUkpT8UJwbW38bhE10Pp+qPSjDi3nkxFRJ0aYECWcGk8oIqlYOMIhgbRUHI+OaK2lCOFS3RgoZwVSaWQUVukcrWSuXTYzr5ekKUBTu0NpCqXYjUs2piU7bZsVrZwZVBTYUP1DxSJVTORzZHmXe7fvcff2DQ7vXeO1s69S5CP2Du4zntznuHuDMjKUuye0OgFnt7fZeuUqwbzAiIDZZJe0vQLY3WHrzGmaTOfcOrjFH3znmxTzKSIAM52S3fmQza0Oa9trdNqGRCg2k0u04jMIEXImWgdT4HXxRng/a+PUFHY+2F0x/Oio55gR2g2SSnyvc+k6H5TmQv6w8njUC6butqZj+dIAh0oX4g0a7pAVW5VCSInUoIvc+eb54WyIw7D2TKAG80e5L/2gvcOa34vGv9XHBov8zOURjPR0FWWlVijshGuAvh0YzVs6EGwea+h7K+MMXmVQn3OqGtbGbaxhxbIWUd3XM56mrtc3icayqWVVzJPFdgWW7UtRJ5TxKq5KTPaN5CFIhKzGn2PCmHmSckFeRRrJPJzRkimx2gZdc0SBoEwidtcMmzeGjDYCZlFEqWxIQ6vsYEzBRE4aKhrnjmXqvva2CqFnzMsdtnpXYJqBkcxu3OSb795Edu/yhS99GXUy5dgErJ1/hlCF4H1atWZxtI/qSwhsStFSlGhj4+Ok63tLeCS6hPFYIJMCsBsCBJFEiJL9nuK5G2OO4w6LQEJomJ9MufnxCavFgM3PbbHW7pCNbvPJO7/LIk8oipJ33/02YSLorIZsnz9DMJxzv/0JM1HS75yBsM3xvROU2eX+0ZhPbtxBhS1QAd02XFl5ntW1MyAk/+4bv8bt/V3iWHDhQpd+L0HEXeb5HmmroCMlZ5ILdONnkDKs5gNIpIzcOK+pg6n0xo1R7ZC2ERzt0lc2sEa4seIHj1d2/6gNaX74+IyUfjtpj8GVuNqI6PBgYzMlgZHK6qa0ptQlCFWl3pMCwjC0jeFmjHH3abIVX2yDfza4fBh0NJng0l1+wC1P38s0bqYb15duogceAVx9jTCUlIBs6Oke/uxKi+t+KazvaTNL2jIAN/Y5o6E3rhKSi8qXVQsP0A3xFgfSDYPpE1WMn0reEk/FfIXwQQP16T6RjBACsRCcDV4lFCkCQavUHOV7vHPyh7x+9jXSWRtRWA+U+WjOeHzCzr1jLuwIuLxKZAStRZcX7i0oyoL9swnXL2ZInwBJKiIVIGRIqCLQmpIA3b3EvLxDNkzodNYZDifcPrzHaGh49xsfciHtcHx4wMlMc/7Zl+mt9kjbHWRZYnRJZ2UbrQv0eEEYdpmNx5SLGUk6sBuglrntax2ACGH/mBvHc+7d+oSJDNjeaPHScz1MT3J7q8XZayfce2aNeQDBSsKFc5pg1uInrj7HeDxDr7T40k9/kY/2NN+/dh+hPuDcSy9gTACiZKN7lTVzhVwX3Avu0SkizPouxYmmbRIuXOyxWEC+KEkixTi7xezgBkGQsL4ZsXbuHFpltNKUWX5CKId0gxCAy2uvkagNjFF4N0AN1jaEchKGC/H2hMsYlgJf8MAKdeRhQxKkguJapAS0KRGfsg3vY3IZkzXIUv8WpyxRDfbuRCp7HGNckg9BaTwsgJISvw+ZfQ4PzHdh6hxPP/zOuA1gevDWPxzLdeefvqZakJa+EEgl7Q4TFauyQFC6DH0Nu9ZDyunlQCwdau5q6zeY1LisV03FiWncyhgL2jifZfcy9quGI/8Tqmfwslgl1zim7wUzH6Hn837ZawRCS7aDFwlJa2FEhdy5f4/f/eg36eqQ58NtomALspxwMuXO967xvQ9vcX7jDBuR4PzWBpOD64Sbl1g5e4a42+PMah8pbEL+2XTKYj4nK0r2D49YZDPybEF32/DBH75P3F5je/2Y9z+6aQfGaotgQ/Lrb7zD+c0EE0R8eP07iFuavDSIICYKIgKpkCokX9h37iQJSoI2u7QUHB4ckrQSjqcLJhmgAj748C56NmE+nnHltcuIu5rv/cYHXH3l8whOaH/YYqgLttcjnt04S7x2kTu3b5KVhmvvv4P4eIdy0GdyWPLcz71CoXxgg7BkCrglrnGiF6SF4nCxw1ZynpXuBt1Jn3xyTDpYYY9jlA4xYoERmnayQV4MEZSEekGntwY6w4iIxWxEELSxhrcSYaS1x3ibhYVfmwazgkDn52vquWOo3fqoZD4/GeuZ493y7G7SD5v1y+WxgG7pLMKVH7E7LvE2UpYjrhrqB9su1phj3GeE1a1Vr++CBh6gnQ1DnRcnxNJxL/Q1rmlWwB9pWP+bSa0fUDkswfLDnUSaxyoDSCWyNNQq7p20MTb4o9RWgV0a6qi1h9/XHnBDYknmb15gKnVDPSxqr5Fm+kL/vTb1ZiXGUWFvWa9Z7hNavHSEV2+JalIuh6c3+9VgipJQdRtNa9Blyctbr6GEZLp7m/uvnCWSh7TLFlF/jWfPrrL+hRcoJyUr7VXykwn7wynH4Q7rapv8aI+ja2+TmznkY4zO0GiMLohbMZqS/dkuF9QWZy+vIFXJdHKbF35izSa6V9t89c98vvJEqBNL2sQ2Bh9FphGl5s73dvj3v/ldtF6Q5wvOnGnzyleeo301pdNt0zYdhLJS0vYzCSe7exzuHHPz+JB5ueBgcsJgcsLNoyHD4wOunF1nfFASvZ7yh7/zWwznU/TOPrN7x/zsXc1bzx6g46vot24iX71AKcrKkH4cHDEpb4Nc59rJtxnIDVQYIBAMOucw7XMAKB0xyY+ZiZJ5MUcXJ7RUi4HapC/OEqV9NIaJPmFXXGOuj2nRsWxW1Dkqql4XZmnW+3SN9k9nYPRqUqHd9Y1ADwe6RtReG9bXWVUL9qPKYzKkuU43orL4e/9CCTQ3BPQg5MHRszmljVMzSIS0Gzj6BkAE1D6BsNSe1H6HSw8RHuBMrWY2zfDFRhHNWz6i8U4fbiwcj2K3/iSz9J1w64UiSBLyLCfXBk1JBAghyUttrdPUi4Be6lTReHfTyDFhv/NhpcapHJZc6xyw+rFU+ls5v1LtWEk11oyP1jPVNkO1HvRJKroCW7uVji+ikjasYalxhTZ2TzjtnePrb6WUPLf2Mt+490e8d3+MCk+42nmJZ1t/lSgc0EoVs9mMw51b7B3fJb15h+wbb3D4j/81K9M5rV/+HMHPfRklByBdULCwEuMnBzcZFjm99iqoEG00rV6v8fSiohMuULgx5y0ESwkChQkEBRHrAQxeOsfnv/oSUbeDkT5ZjXaSjdUyt1dh65lNjNFMjkbcu7ZHKzbsz4/ZHWdcv3mLlWTKQa65M9xn9XKf8y+dQ5wckZozfDJ8h4Puq0TxgDtvf4/La32Kc20MBVES8/H0m7SSAfN8gZ4eIlqbJEkH8OMQMIJEbpCEW2gzYRwcslMWbKgeq+3P4xSYSKCvOggBx5MbtLtnQQuMCJDO89xLM0uOz1C5TC4BqxsPtV9ulV7I1a9phLYs2JMWIU6jQF0ej3rBq0lEwwdSeGi14oW2SwWeefrIZhdJ54MVq8FtjQsWPGbzBUVhSOIY9/Y0Hfjd4xq9RuMbe25z599Gqlr3tVj+7lOLWf4llkG1OqtxUOAGuF+chO3OVhQzQ5JnGcLgwFZjtDVweOnBTh9nqDRezeLVB6JWy/oloDJsOgbr2Z3PROUNcZ7BVnX2zLZWI9Sh2wJjZPX9k1YqZ7oGqxWNrG6+LPs2N81d7l/jMhAYwf3Dt5lQslEEPNv/82x1fpbZFO7e22Ex2cGUJ7TaAc+8dJb0W98h+/7HhEqwuLrK+JkLrIcJOGZqb69BKD4Yf8D56Dz4nWor5XstDtcumH4BdVpMZxvwK+3KUNFLI87/3Z9B9AfOBdZKV9olvTF4QHHjy3X+xsYq7UEXjGI+FCTfu8FifsS9Mudzn79A9/wG7Y0W0eyQ45U7jPM2ShbMP/yIxeCYyWSHfHcDc66NMZLFPCORXeKgzcH8iJW4hymmzt+YemA1WKUULXoiJU9y2qqD9tF1RmDcrhm94CylnKHlCGUG1Yj2rNff1Af+uGlkB0G1/To1iBiwagU3VExzjHhdhEUx4/a5+7TY+McDuo1seFpWGoJK9q0nqXD/O32JGyzSWdNtjlfX+S7Zq9aGyWyK1jN6vR6RB14HJB5wK9j0DdQA1ib9936sTXboDZLC+BQW9bUs/e3uUbF1UTHBB4tZ+uR1q+DngyAQAiUDFhRkRU6oBOQapRRa2S3YDRJVqW6WI1/MAx3tl+G63UtT77nl1Rv1fepG8vXzuyw3J4HRXr1gjxdPIOhCrWiymrhaOhLG+2Q3IXd5NfUbVQoBUkakrT4XWq9x9fJfIJ8EjI/3uXbjDZAz0k7CynqXMNpCO4Pc/L/8q8ivvUY2H6NffIb1+P/H3n912ZJkd57Yz4SLo0PduDrzptZZKIEqiG4ALYe9ZrpJ9gzFcJGfgB+ED3zgWnzqN8p54OJaI9aQw+menkbPNNAoAIVCVXVVZWalulqFjiNcmhkfzMz9xM2bNxOozixgIXZVZNw4x4+7H3Ozv+393yrHp9OudS0QfiEfrB7ykr7c7eguAK7/v384+SCnLCtiPKlbe/bSWSSSjccLZseWm69MUCrxYVHWg7UJAN5/Ps6B6IISFKYlGTzi+ObrnH7yIdPRRS5cuc/1dxSTawIlTjg4fo9cXqN97TIP3V2mx5rh3gmcHDO4kPGj2/d545vXw1pxDMQOrZ0z1UOkPfa0aL8lEkEtPgdf+EeyxRVEVSAGkr5wUOTfFRvDl0mbQ4wIQLi+NsRagkX8u1sD6/lm/SYb/VG9lRiOcXEe9efrUejp8tVEL3jWOoJ/AFyHsH4iGaIW2YckdYE2HbHoH4pFYEM/NKUTxqMBw3yIc462bUFKZJJAyJgS9EVDngTTThvt3u8faNzVu7ELefIyatEuGGpru5hfpCI88B601kHerQFefx+OWGvIAc7akHbpuevWCeq6JZGWNNGo1pEoSBKNdBYDnssTEvGMh9uFugSNP16j02Y7T+QT4+EEznqzO2banwnh8GElvvas+4vUIv6rIzFEygNsKKEZraqoCIhQQDymA65lTwipSFTOxvgCKtugMQmP72r2H/4CYeaMx5Ldq5vIZMd/wLlQ7N36IuYK7Osv0j+IEJ+NXxfxsToERla4ZBztRJyVWFqUjC12HA9WB7x/8gve3nyToUwRNtJKjpyE6d4BlbYcXJ7SBoqkT43oocZblVGLdnTdb4XDGDCnmnLvEVvbWxwdf8wbf1cxHQ4xew03f/op17/3JoneBTmh3PoZw//0XZ470litGL9yiR0ZbDThtfA0SUjTTVaLOfcPT3nh4qhzIvvLxkW5voYcKslBj8K5OjjuARVo5BbSiuD7EmtarqLvm9KPIaxvVmchuFPewn31jLBYfxGQ4bt9zaDbxYCtmaq9Vr/25YJZFNNhxRmAik4BWNQ1+0eP2drYoFgJBnlGWRTkA1+UWBiBVBqJCE0lRbdGurW0hr4dzETlGq9BRx7PrGk1nRYr6AA4KBf0tR5E/IuOH4lc5xra9vAmejMdfCcG5x2QtbUcL0rmJyfsPbjH5sYmg0yzuTFjMppA2yCNZTSdUrqafJDHOfkZ+O1MxcDbGmJpPxcKk/d35tY+5EJcuA2abhiB8D5d2N56Kdi/bhIZur4Cn+iBGEHVgBYOG9qda6lRKkMnOVhFWTTYCu4eHXFw+BHF8iG1fMzLN14lTS/5jc1ZD2BdDrejT4ZY09+iKRGyxvpnaTmsTlmaU+rMN9OpW8etu++xuTNia3wlbA6Cw3LJv3jv3/D99EdcmFxlezhlO58wTYdcMXC8aXjtwhvc3D8Cp+mDBbvSOOH+IvDCejWGSMHZbIhyln/9Zx+TpR/xze/scLT3gOmjLa6+/i5abWEdJHpMpqYcqV8wunSNzc03KMtjRhkYd6mbT4Mk4dS2nD4sOJk79MWeHIvPqXOc0ysZoTRRUGp8kSgPH2sRBAKcCoqE7ONQBD3I+vEXEDYBgfDV0hxdmKAfqB5co9IS+odAuEe6MZOdM/Np8tV0jpC9g6r7LX3kWlykQfkikoKyVzWQAozxg6QkLCvLJ7ceUbagZUtdFNy9f5cXX3gRrRzPX7vExmziK1sKOq3Fx/32Wm+HTtDVV45j5U2QOMT9I3HhHmNM8Pp992HTvWnRc5wx5Kp7gY53xQNg1LiN9VRKYyzWWKajIW2xQDqDFpb50TG2seztLymWS0ajnF0Dk2HKMA9tv5+ysUZ+TwjRa6M2OM7CTk4A1hiVEMV2HC7EqAVwPorkjEf/rOX210WU9DUuorJva0ueZBwfVcyXNY8fnpDYislsRj4cUpuWYnnI6WLJ/OiQw5sfonTF27/1TaZbEyr9kGGuSdJRiMGO2pFfmnSbbKBtRGyHHiiDDnDDyIbn9tOHf0aqM1a2ASwf3/kB5fA2hufZ4DLzpuTj+UP+2w9+HyGhcSUny9scloo7iaatW1In+J3rb6IuPKSRNRkbHWjFeHFBLLm6Rnt1Wj4gBZU+ZrHc586dIZ/evM23/uGSxaJkQz+HuPEaic7DNxJYJ9jZeZuH1R8h9H2Oyj0SdYIrBIP8uwiuY4FhOqJsGh7ffkgqJYkchIuvLVDCuIgIdYGQFBGKZV9drFfziEH8Ym1dI/xm2HGI3bayhhMuWsC+RVLv1+nfi8jSURrOAd7B/6SP6En5amovRM4xamAChPSTTIZ7j+9L5GeGCnxtXOccjXNMhiOuP/8yQsJouMFousFoc5PpeIR2LdPxiDSA7ZnMskBTsDY8ncS+d+HPblxFvxd0r0dAPRPv3F/nM3uaW9McWUsMdP0V12vXRoAj8f2dLI5Lk+u89cLVMH79sX2wmejObOJG8qRHS9AVxwaHcBYdPtWGY2PkQuRv43n6hUfgdMPENEGDsGc/89dNfvqTQ0rjWB0esb27wWi8ReIKbn/6KQ8f3GLv5IhRrtnZTBmOMgbDIQmOh5884MPbj7m0O+Ef/kd/l+n2Rf6Hf/WnHO/8D3z7hX9E13bdResgJv6GgaRnaLrNGbqH4ESfA+ic4Pr0Gu8/+nOq4gBmL5Nun9Aowbw64L+7+c/55OZNjk2NHTtGeUKeJlSLkqp16HbA65PrfPPKu6STKctmycBmPK5uszt4bi1Bx3UWUKd0dF0fLEJB4W6zsgdkq7fZ2h3y679+wPYLB0zs62Sj1/BQ4tbmvmE4uMzV9GVqfdvXjJZXadqGpfgIaU8YilepZMOkyEjFilxJtBhE3THcXAhU73RcP/ujI15iQayXgF//XG+debpAeO6242dDwZrOmebWnoPjjObYcfjheUbLfI0KInxvf53Ph9avNA043q9/kY5j6AGjd1T1oNsbEA4QynFlZ8DV7RvI6PwSAhuASArXa9Dr16M7IeujEgn8s6+eHd+zx/diPwdfxBN/RWD87IFPP4EV/fd3rANZTJRYdxKuA3g3Sp9zX2v3EjcTu/bveEvB+9vfpn/XV+jvHWbrVkIHxk+++ddE/vt//q8REna3N/j4ww/Z3M545cY2L7y8xVvffJcky0izHKTqN1BjkKf/lvuf3OH3fuvv8jsHW4iDQ9xzl7g3+duM1RDhbBgvB052PdViKxnfUn3dUee68fVWYLSKPLTcmL3MxeMd6voAKxx6nGFPTximY6Yjx/a2Ybud8YE7ZNUahDH8jtlgtnODq9feJlUDnBC0oqG1lkTkFKuK+/Yml0bPA/39dk60OLecQ2eOMnmEq+awGDP/oGWxeMzGlZrNwesMs9dxLpj0rp/HXiSZ+gZDNWEwfEBrG1q9waKscHZOVf8Mudgko2RzmFAYSF22VvTn7JxyiFBPJCpqERhDC6V15OjScD0oh57HAWhFB8AIE7dE4rbjOkeb62PYu3uQ0fyDDsHimz1QuydGYl2+8m7ArhvAIOKJv8OLvXUQEigEYWBUSKqQsdIcQoCK5u6T4P45ItaR/dlHPv3Ap1jwTz3d+lf9vJty64FZ8WP97tTXPwAnJGcq73/m3OLMhvb5cnacuoQLnLdCBL5B5pq2203igM7O9WDsKYc1QP5rJr/zt6+wMd2gbS3D6YgLF7cZDMdBr3K0XfSGW+Nc4bW//xvcePUyv/1oi63b/w43Mvwd84gf/+3f4yjPOmrBK08u9HgMyQnrS9HF+gpx/Nbpqu5FJPD27Dt8fPpz2qSkXu2TjnYRCIZuyBuX32CqL/Bt4I/v/JjfHl3hsr6Au+LrQjhnPX9vNcb4XKxrmy/y/33v/8d3Xxmwk+54rHBRS+0rjSVDQZMeYNuWeT0nr29w9ZXn+dEPf0KbLhkkb9B3Yu19/dFSjApB07yCLDYYbjxAySW1rcCW1HVJ+uCYZpgwL2qyNCWVI6LfxK+53pzv5Yk4aREj8+OMDg61zqnmOpqwowqFT2RwYRPsd4z4feIm0tmTTyyzcFyHZ5FucE847T4rX1mPtHUTtRuPuCshsS58mafgUhekEswHIawfRCwSX5bO0xN/Ue0qAo/43J943BmKRKx9BXrCQIY9V65p7PG+vuje3NqP30jCJBE9Ly2Ff5Qq/PtzzxgefPz8+vX77xa12nD1MF96hixy8f05pASlRajs5u/WV9zrOWBrQ6ryXzP55ve+x4tvvsar33iTay88TzocBf49aH7WdT+safRCKoZXrnPr1vvYRCAu3UDu73H1D/8L/AYYtUXwMGQ6MIrBYHGhW1w/nwIgiADG1tnOEfvc6AVOiyMeFT9HpDO0GiOkZJANabXjyBzy/OR1/nfP/wdcyS8iLr2MsBJrfc1ei8FWK1bzFcL59lBv7r7Ff/2j/44H5V7AuF6zA0jyjDYrmFcL5os90gouJy8ymUxJ8oLdjW0E2Vp6eNyI48Yt8GEFEoGhrrZYHb2MZEKWDBlpGKqrDGa/hnPvcnTs0DYlySb+cyJ8HojEZD+H1ygzRG8dhDnqDwlAGDa7LrZ5HQxDWx/bRTJAn4OJfzIOXIi3ElHBob+v/rrhmk7jvgBWxVeRyvnzU19mRQjvHRYBVLQItLPrgUyvgUkfxeBrLwjb0wmePok73GcB94vgdz2G90l52hj0lb4+b3yedrbAfK7Pg7+gPLUV0FOuuU4v2DPHPf1bdv7psBmaEAu9zukCGGdxVvbnd3iKAR9d0dEN8TwhY+03rv51dKedy7l8/fKVaLqfBbGgJREWuehB4qmNHp+wWcUaiP1Vz/fvkjlY05LjDv1LydNPcCbI+0tKH6p09l7j+Z7Ulp88bt2Kkc9Uwc/lXM7lSfnqkiNiEsgafsZiv/0ajWFV8djAaa05knwx4fXPfJ6++nXJ+rU/uwFIscbW/rJ8p+i/t1s/6fodBI5qPSrtyff9G2unjVq/EGvBtuGYM5xueDkE05+5DxGy79yTFdPO5VzO5VnyFTnS1heuV2ul7sGob3LRUyQOh4qfCz+RQo9JamfrXP770B7D3X7pE0XA/3zaoed9+sP/Isr5+r08k2gIoBfDXdbDys4e19MHLjog1ugdKdecYWuXkFJ8rhXioKvTG62XczmXc/ly8pXQC0r3ZHQsArzuCV6v4uOdMWdVwsg1xk72ztrPw7i/0rLOlX6lF3mmiDObk7UuVJOKn3+yYtn6qdc8+OHg9XO54Oh5VvbNuZzLuZyVr0TTTbSgacF7/6K2G4OLozYruv/6v+nBOa7sNW0qar6/rHb7l23n8xeR9Spln3GqheiBLwvGX8hhr4V+PXGRtXPEl9buy/WRGOuUiGccAg0Uvoe1fYGQWJYz8tTWCdS5qnsu5/Kl5asBXaBdD4x1IbijC2PyL/eNUvxBHpTjZ54aZ0DMBHlWoZdnSYfnX1o5+7zrfMnrr1GmvfPwS176MweKz7wl8NnmpoPdz7kvFzj16CQLYVBrNG53sxKwZj3zZo2iCLxCaHSCQ5xzuudyLn8B+cqSIxyhpK90EHKde6B8suQEdLF+OIRcqzL0hHncaaR/aad50LC/FNf6SwIunwXcX176M64XC/qi2xKAWtvIonL6tIy2GOrnbBzvUBjG2LPHBurBb5Sf3xPqXM7lXHr5ShtTQjBD13rI9DnVHno9rK4TCE/BjjVc8A603uHWXecvcZ9/EUahD5l6wnR/yr/OJjI/eSLWRuAvLzEj50zxmXUH3pljBU/ucPHzT0+ccWfu0bmQFIEvPYntNd7z6NxzOZe/mHw1mm5wsDghuuLAMdMqLmYRzFglXJeC2KUvPMkRxs+5z4LeL3eTaxf4Jc/0RMBCJ3btoPieEGtNd//SIog9vZ6MPPgiCY+HWG81lqhzEKpyii7lui+pvfb59Thkhw8RPJdzOZcvJV+Rpvuk88y/HtNlY+lyz82ereJ+xux/6lr257b287PTPu+ePkeP/nokABTE79drqH+prsVP43ujd+xzrt9/uNeUu7efFhPchZf1em/UcH0qpguJruegey7n8mXlK9N047qVInKy67XWXc/hRvXPcZafhE7ziqGvn9HoInj8ewna/XynFYR7e+IocebfT4P09UI+9Bp7R6t8gazvQB1FsDY+XSrwk+VzvljWMftJKuFZG5OQXk3vwszc54ecncu5nMtn5aspYi7WmVvX0QIOuhYpvqj20xd3X6vS1wiQLnxARAfdOsid5YM/T9aDKZ5yxSeP/sKjxOf8+8vK2ZpJT781D9hdAcB1vfQsp9ttPH+xe1jvXuH/XiuPtx51Ef9YB2rR88HnvO65nMuXl6+k4M25nMu5nMu5PF2+stKO53Iu53Iu5/JZOQfdczmXczmXr1HOQfdczuVczuVrlHPQPZdzOZdz+RrlHHTP5VzO5Vy+RjkH3XM5l3M5l69RzkH3XM7lXM7la5Rz0D2XczmXc/ka5Rx0z+VczuVcvkY5B91zOZdzOZevUc5B91zO5VzO5WuUc9A9l3M5l3P5GuUcdM/lXM7lXL5GOQfdczmXczmXr1HOQfdczuVczuVrlHPQPZdzOZdz+RrlHHTP5VzO5Vy+RjkH3XM5l3M5l69RzkH3XM7lXM7la5SvphvwuZzLXxP53j993onQWbNvNe/C3yCERAiNkAopE6TSSKmQUhI/hxPgRPf52HdQ4Ls5O+ebs1prwK03rG8x1oSGoHD2PqK4rim0EKJ7z5/Trh1nwtEOsIBFCIcL/3bO4uJvZwCLdQbnDNYZrG2x1tJ1j31CpOz1s76vojvTlFQIEPJsz0U/hv6+nYiNUCVSKoRIkCpBCh3+Fv6+pUMgEEL1YxG7hn/mHrordRcUIkEJhRIapVT4kV0zV9/UViJd31zVIXBOhmcpu9dwDofpxrT7wNoc6Z+UH+//9//px89s1XoOuufyN1rWF6/vQg3gPMhICUIhpfY/QgXwlWcAUAiBCEZjBMezwOWhRilwTiCcDddVvpu1iMc8TULv6whe/oIIHE5IPLgK6K4fgSGChAUc1nqAdULgnMCtgb9wICJIW3+v3fd4yjj13yl+3/61s3/3n3W4fkyEw//pcNbipMVaEcZV4qw90747dgb3fzztXvrXhBBhg5HQAX485lmdq8Vn/imwoQu3e8rT+Wx78fXbfJacg+65/I0Wrwl6NUrE30IipEJIiQiAK1AgZABXD4ROiLVPBm3OubVz+YXunENYh3MCi8AhAsD53zwJIo6gsYUziTUtbU3zcziEWNdAQcoItgLnFEI4rDNIGcHW4vDgK5zAWgHS4ZzyWrj47O34c7snNPAoPfg4J8JfLmwO/ddxLt6vQCI6g8LiwNpwfwDxu7puLF383p+DaD0Ih2PEU14HpFy7of6uu/f9PTtwfiML20/YMNxTAFt0R/C5YP5ZOQfdc/kbLc6JoDlKEBKEQCqF0AopBAIVQC/QCVJ6bVGKbmEKBFJIb7RKCWs4GkFMCkFUQk1AIX8+i7WKJxElAoQHOn+ts6/BGdjoNEDR/bhgCksia+A1XOcE1pnuvDgXoE0FU7o/31k64yzw9u+ta5Ci02JFeM8Jr3f3tx0+Z50HfDxVYq03Lj4ja9pt3MyevgGEg598RfTj2B3i+vGLO4BbpzLi6+HYuAH6F3paxzkXTIWzgP4sOQfdc/kbLp7DE8G0lUohlQIkkgC2qGCmSpwAIQXICNYB4CIcdP/xq1UQTNw12kEgsdZzrMIqpFw3j8+axJ3mHV6TUq7xigQAC9ytiCZ8/DHhINkfK/zrnpiIYOIQwiGERQiw2DUkXecuewCOWv3TqAR/zu6j/lc8lejxzuEQAbT853o+/CxZ6rrrr18bPgd8w3We5Mh7zl309xXnAF7r7nn6Xnv1Tzk+YdfdB8J1z8s98ZlnyTnonsvfaOm1NIEUCon2P1GzDVosQoEUiKDhrlMKMvKpcfERAFlER02PPjJwhNZajDAEn5YHnzXHVP9bBoeS9JyyEDhhPX4G0HLrAOlsZyxbS+B0o7asiHRpYD09CAqFwyGF1/YUbaAhguPrKWRoD35ngc/fU489nfEeNwMRv5wAZKA8/JHW2qDpeoiL9I0M7z8JvE/eB3hH2BmttnvG/WsRQv13F2u7Qgepa5S840nnWa/+rn3n8PfT+OYn5Rx0z+VvtPTUgfeoKxU4XBHBVnROMyd6MI3efE8riACKnuvtzo3oAEbCmknrENYiROMB0AQOIkQY+PsS3f1FB5OM/K1Q4fwOIQxCCKywwQwWHcBJaaOu3d2XlNIDi1NgZadlWysRTrNuT0eHn3fLhcgG8XRu14X31s32iGc9Dq3di5BBd+xBFwjAK3HGQYwQEY6nKcBnrk1/js++/8TrnYUS7ztqt2LtVw+0Ub91a695IHbd0xY85cY+R54Juv/sn/0fHU5QNzWHRyc83tvn8OQAmSnefOUlXnn+RXamMxKRYJqW4+NjPrp5kx+99wEffnqH09MVVW1ojMEYg3PO81nhe6owqFpKlBLEqSSF6LidaNZ5FV5inMPYFtsaWmNx1mKt869jwQmsCwPVmTIiEPl2ba/yO52M5pxUfod28aiw0wrJaDjmjbffYXM243f/1u9wslwxGI1wrSFJEhyWi9s7rMoKYVv2DvZ5vLfPwwcPuHvvFj9/7z3qpsZJhUsUVmusdFiH995a110ba3CtxbUNrrWY6IQBhHDIoF0pnOcJowfbRaZJhE3YYdf4KOfWjGAhgmnnX7ei58sc3doKH7Ke6wsHez4wTsY4DV1gusJ5ow0pLMJ5RtGPf8+HxWvUxw+//Gz9CkQJ5TVcmfRRCjLwu7Jn8SDME3rwi3+r8Bkhgwc+aHGdWb9mbgv8czbWYITnPhGe33Q2jpv/UJz7xHCqCMTrX6AzbeN/1k3/qOFKnJN9aFp4zkgH1iGl8wAdzGMnEoQ1eHrCrV3P+md/xvL/LNA5BGJtDXavddqtxFmBC1R2vM9O43W2m7Qe8P33lC5uRPBZx16vSXvt2XVzOf70QxaODbSFPfMdxBOfiPztk9puf8z6+Hwu1bwmzwRd2zRYoK5rnLMorciHI7Z2Zly5fIVrl6+wu7HJIBmAtSwWp8wmYyajAaPRgA8+vM3ewQmybmmEpDUmEOZ+1zTWIhBY61BWoAJvpaU3O4QUaCE8IEsddkeLtRKrLU3bYlqDMRblLFiBcXF39YNuCI4E4TBOdkMogkkR/ycR6EST5TnOOS7s7rJ76QqTyZSLu5f4tW98k90Ll9BJGiaeRCUK27SkSrJcLUAkOOe4mAy4cuUaybe/x97eI97/+Bd8/NGH3Hl4j+PlglYKWucoqpK2aSCYe35BgwgmoZDSb1bxgXibMihPQVMQApwE6eLmC8Ew7DUtiGtWil4D88qVC/ya/5wMbpfOESL8PflNCO8MCvpYbz7HEfQaUTgc7/HvuT//RGRUL7BfZoZ+1RIiFaQM4BtMeSElLoypEN5x5kG413yFEKhOO5YIFSIe8JTAeu6RJIBa3MysjxZwLm6YNji6ekCJoHsmJjhKoCMiYDwJfiKATwRY/3HZAZbHW08y9Nq+DeZ279iLPGs0n8Wa4tLfSgDAbh7FvSMCp+g2Ded6TdxZrwidjT8O38VahPTRHvF/kVL5zHXXtkbn+k0iOv/OjsvaxvUZXnrtu66B7lnA7oc/KOFnOOYvI88E3aap/fwwLWmSMp1OGU9nXLlygcs7F9nZ3GZ7c4tBkiOA8XhInqWkaUKa5AyzAe9/cosHjw4oVhVVC9IIjBDekeD8pLFCYCxIBEoKnHIo4dk1pNfutBQoJbFIrPPxfUpAjdeYrfVarwwT24YZYMJgWOd3Shs0WRdGUAp/bvCA9MZrb/DKa2/y1ltvcuXKdUCSZSlSSk6OF8znS5RSWCFJtEZJWJQVy6LAGEPTtgzyjLaFolwyGA751jvf5s3X3uH45IjVasndvfv8+IOfc+fRA1YGvxE5000oIfCL3hkPaiKaUHEqh41ChEUR3+qnxmde93PTf18VJzgOI8NE6xyyazu4gC5cSdA5X/xY2mBNhrFbu7sO8LsL07/qovYjPgskvwIRUiFETHgIP0oFjc/FVdVbXER6QSNj4L30wfcEWqLnXxVeg3OILknBgRVek3RJ0D5dAAAbrLL1a/amcBRrw2e6mfykLhe/nOieT4QlF56UjMq4URjOOtL8B7zd2WvHIV4YYI3vfcbI9lyqE91e04F/fN8F7jmCr9eQCBfvv0cXPta9tCZPqN+sAW746d+NDs8n7j5aZt0f8dqsjWB/cOeUE593zOfLM0G3bhowBoQiy1KywYA8z7ly8RIXtnaYjCYMByOyPAcgSRQ6BpELiUo0+SAnG9zk/r3HnM5XNE2LMYIWaK0LXJHFODA4DBKHxMmw6I1DSEmMJ/GTHkQAYWX9Di2lwlgBxp9POLBCIK3XIiQecGWwHuKGq5QiTTTbOxd45fXX+fVvf48Xb7yEsS31qmCxWFLWtacshEInmqZpaa1jOMxBSOrVisWqoG0alNIcHhrqumKUZ4zGY4xpqZqGtmmZDkf82suv89zFK3x69zYf3v6Uj2/fYr5YYJ1F4RDOUxvW2WDWh0kWzFPnglYkfKxl3I19HGQw/AW9iRfmgwISBBLvMInLyToXjo86sr8HETQbiQxODeGzhYzD4D/gteHghT4ztaPS7egcF2f4Svclp+hXK167lYFi8MkProtbiqAbtHr8ZujBNkEp3WU7SSmRQgfN1COaCBxvTIQAT7Eh/SbnrEJa3W+4zoebybDZhYft5+sTWlsXqvQUwF2PAHDxOQV1t/PASxA2gLuLmq5GiLbTKnFxTkWP/zoBFS2XZ0mklVgDUtFxvX5sBIEVJM6KzlHp+u/Tb9DdzPrca3oNwltUbu1n7a7ObPiiswzjN+pDwp4Mz+vvoXs0uLCJdd/1C+SZoCuThKatEcKhkyFpNmA8nDAdj5kMR+RZjtYJJNprQmLAANhyxnNW1vl4RiVJtOLeg8ecni6oqhbRCpwxWAvOmC5+xVnPGxknUFhaBIjWD4tzHXcWF3Kc8P3uL4JTALCmWzcRsaz1gCOlRumEV197hTdefY1XXn6Nzc1tTOvY39/HOctquaJsWtqmoWxbnHXkeQ5CYGxLohKyLKNpGg4ODmhbC8ETPBwOqMqKVVUzHqZ+ApiWg5NjkFDVNZuDKb/+2tvszGb8uw8/ZLGYU62KwLFaBBJFzzHLsLPKOPHCZI3TRYSJFhU0F036aO9HE1HQR2QKTwxE2O6nYh/uJIN/SCA8T4zzXnfCZ8WaZm27KdlxcBaLdGEhhzCbpygnvxJZd55JqcPzi1q4DRpgb32gJEJH7VYF4PXg21ETncnL2m9HTE6ImpILAG9lAIjIrQfg7sDC9qCxnoLrTXcPit016DfoXvxzkwRT2K5HNEQdTeBD4wKohkkknFoDojDHwt+q26KfvF53h2v/Fp2jMT76LokCTzVY2QMtgTaLccRn58pnM+ZYP6vzXLQTsrOo14H3LOcdX49ha+tW5ZOAH0aqM+ICt+/imP17AN08G4AxNE2FcBYlJVJJtNIkSqOVRmrVP2UNMtNkJmc8mXBhp6E0LWVd05gGpEMnipOTFXVZIxqBag2NgxbnzZag8Tpr/eQLg2GdV2KVlN0El1IgZRI0Be8X0DqhMR5CTFl6OsH5BTOZjEnSjHw4YmNzi52dC7zz1jtcuXQFrRKsaWnrksWqwDpLsVpxfHJKURQgJEprTpDexxJMy0SnoBRlUVC3LVZ4beX05AScYzydshoN2Jz5zUpJwcliSWMcWkiUFHzjpTd45dpz/PgX7/HeRx8xPz3FOoFKUoTw1oYUgs2NDUbjMa51HBzsYdqm01ycEJ7ADkRT1CS6yd0tVr/gvAMupH2ubdHrepN1BOcmnearkH5hyrDAo2YWNBMrHLZzZESNqLO/iBRDz/H+akXGyAWhEeiwtck18AnaoRAIpVC653CVUkjt6zF4p3DgsIHOIO/y+EOWk5PeshMOlEJYhbLaO1PjPQXFwkcVWG8N9mjwhKzvYE+Ot4Puu/i/LI7oD/AoETnVcJyVhInUfzacvqNCOnogJlWsgf6a2R6v4bpAOT/WNty26JSBSF34yAoimOEtuajwrzktngK48Vzxnw4rLALvQ7LWsh51cpaHPavFRt67My3Xzv1URkx85h/PlC8GXWsDcFmEc7RNQ9M24BwqLlHbhoVrgzdUkmUZo9GQ6XTIzs4mi2pB3daAQ0nF/HSBKgVVHaIVjPX0gLXeqRQnnAWn/OmtdRipUM6RJnFa+51IKcVkusHrb77DYDjk3v173L//gOPjI2rT+uIWMuXC7hXefvstbly/zng0wQlFVTe00lJXJYvFklVR0LQNprWoJCUVgsVyyfLwBOEsTdMwHI0ZDEeMR6BlhrWWtm4oqppVuaBtDYN8QGsNdVWyKgpm4wEb0wmz8Yg0zbDGoKWgNZZBNuIbr73F5mTCx3fvcXCwz/J0DlbznW9/lzdfe51XX3qF5emcP/rBn/DnP/kh+4/ux/SmjvvtTTSCFemi3ypmOGKlCNy3Q7h1HmttrRK8uiFVVQgXloVFCoFVUasW/fXBa0fW83VRe45aVfy3C2q4/HJz9CsVH3Fw1jnWj0Sgs6RAKolUGi0DHSG153CV/yHE00ZLINItkY9dd2hZG3lfiZMSJ1Woy+B8uQchOnz1lIZ/DjFutYtNpYMCuqPFEwCC3zCliJUEwmbnQuJE0Cj9Pz0wGRcZ+iguHLMO7uvafK8dx7nUfzpuRj6ao5ubItBX4uzYd3e+ttmdmZTxik9qoN3bImwQXuMVCKxTnaYbQ9LOni/QCevxuPE5Ruuh2yVkt87WL71G5H2hPBN0tZLoJCW1FqQ3D41pWK2WlFVJ09RkTY1yfhd2zquj1rZY4XBakGQJg3HOdGPMxmpCWRdYa5DKoeYKUVbIqkU1hrY1tE4gjIn4jcNhDBhrUNahlMOhUdIhhV/WSiqef+Flfvfv/D1uPP8CL734Mk5IPr35KZ988gm3bt3i9PSUi5cu8tzVK8xmW+AsVdVwMl8yHg48MC4WnJ6eeDMSicGDLm3LqihYrlaYtkYJwWw2wxrjTTXnvJPPtiwXJzSmQSWp52Stoa4qlBAshUMISaIUCkuiE4xpEECSZmTZgCu/domXbhzz4a1PePjgPv/Tf/SP+eY3v8X/+Pv/kv/+X/xzUqV569XXcBL+9N8WHJ8ce062mxzBgIv7Ic5rV1EjcYHCkYTA+rWp4td2t7DBe7itk6hOQ/YHqDgJbdBqRbgD60/kuuIpcXraQI0E4MV92Tn6lYrovPfijAbYabcBcLXynK+SIZog8L+RD/bA4rX+uACjw5PuL688yECz+JRijZUWLdbHpF/8UiqEExjXIKUP1xM2FmLx3+AzWu5TBtaftt8gPXkCAuXvUpieSolmeNRhhQiREIqem++dbpHv7ewrcXYMo4bbbxH+twz3LiOKRTAO23sE5fh9rOsPixE1/YMUfm4TlAkh/PwO1dSMVf5ZIrHO+nnYkcb95uG608Zd7wnfQ+SLfwl5JuiatgEcWifoRKO1AgeLxYKj40Omg5wkUeSZP401BmsdVdNQVCVVU2OwSCVIs4TRKGcyG1LbBidBJYq0SChXNeWypmoaVNt6JxshHMZ6AGgdGPzkFMbSmhaJRmcpV65d5/d+9/f4wR99n//qP/8v+O6vf4/f+72/z9bmBa7/zvMUp3Nu37lFY1tWy5L7Dx5SVRVCCoaDIavVkoePHlPVNcvlEq00o9EYJTWr5YrjowNWy4KqqhFYhJSUZclw4E1NZ7yWvFzOAwcuEdbSNg1GW7R21HWFEI7hYIBMEpI0B+eoakNdNlA35GnCcrFgNhjyrVffYPDNb/P2m+8wynL+4d//nzCdbvCf/T//r/zs5/+Ob37z21y9fp1FsaStWkzksegnTAybE4H8dYC0DiUc0nqN07qwuXWcYdQS6DOsjHfcSSU7Q1CI/rcRhIIurv98uJveY91rxXGxuL8C1Zw7DUtEzjlWu/KedKECjRAANmqoMhbE6cKtIh8aT+zWIjXiRkYPN8JH5hDAu4M46zr/xHqChHMKX6rAdcCyvvhd1Fx50vT2Ys84k/oNr9MyXU8xyGjyi/VrBR7YdR6FJyJqIq0hei3WyVDPQHaUwdl7FGsOvv5cT+qREV+j3SS7eRUdfAFzifRHsO7C/LRWYmXr57kIGW6it876qz8ZPuYplf450F33aTzDU9mfp8gzQbcsl0ihUFqTpSlaalrTslgueHygmAxzsjxFqxFCOExjqduWsqlZFYU3yYsVVevjfHUqGIw0Y5OBsKSZpCxSllnNSlWsViVVVVELAcLStgYrOFNuzhqLFQbTQo3g8u4l3nnn1/jn/+V/ycc3b2IELE9PqKqSb3zzNxmNhmglmS9WnMznOOfjY4uiYDwekSQJ5aogSVPmqxU4y3g6RjjFqio5PjpmVRSsqpK6qdFSkCUaZ2E4GmKsoSqXqMDlKqEoqsI/LOlL9+lkRqpHKCE42NtjOhkznc4YDgdMNrdYFSV1XbJcLkh0gqVhkCa8/NwLSOc4Pjzk+OAAV7dsTmfcuvkJf/r9P+C1N9+muP4cn978FFvbzkrqIS9OJotzootXdqJjz7ridWcCxJ2fzDZweK3wHLAKsaRCghJ9REMM17PWYS2YtUwmv759ZK8IGo1y1sOF/dXzC37txGI3a04w4WkHueYkk1L0gNvxoH1CROdY6eAh/hWiQQg1baO5LixCSSQ6xK47hDQhquCsCS2kQMTx6gDMdWDbpeTSIUN3D+tOuI4gWKcvwnchJiV4WPMKRvg3rHOgcm0DDTAXx6OLKfbhRzZQNAhC7Hd85n1MiwjWU9y0ukiRiLRrQOi/s3coW2d759yZ2sLxWA+altYDr+2dYNHhGzPe+vHsr9MpBx3A9wpEV03uLzGFnwm6bVsjhCJVEq0VidTgHIuqYu/4kO3ZhM3ZlFGeISU0bUvVtFRVzbJccbxcsFgtWa2WVE2BoUaljmykcDIlzRRZ1pLolERotJQshUDKGh9UJmitz0+Pk9LhaI0vQTccjXj1ldf4H3//97l56zZaa9I0YV6U/NmP/hydDrly+SpSCA4OD1lUleesTMvly5fIs4zlqsC1BqxjNBzCcIh1MJ8vKKuKsqlpbEumFa5RGFPTtgoJZDohzQeslguaqsGaluXylLquqduGLMsReIpmc3ObNNFMplPKYsnpfIGQgrppSNKMJE0ZujHz5YpisWKYJXz0yScM8gGnpyecnB7z8MFDdJIy29yiaVv2Hj/muctXOT4+5mB/32fk9fNtfZ6GidJzWVb0YNxppP18hO69YDo6C07ipHc3OekCiPjPGwvGOv+84kR1EidCiEVnWjpi4tVfBREyaqkqaGjeiYVSSCV9lIJUawDrtVLWUocjRSHXwLDf+gLMOAe0/etCRBODLlrBhsKPMnBroo92kEEzttZ22uO6FuZcSG7oU8DOKoxROiuErvYCQdt0IgCsCDEza3PCV2EzYQ4RzO4AbEJATCw5k5EXL0DQVAO4xncEfuMRUVNlDQD9C32EQTyVxwDbjWtIn/gc9ItgbG3rHfHCQShc1KnQ3Wzsnb9CRA0+jrXoNoW+6LzqgPizoP/58kzQHQ7H1E2JcQ1gUUrQGj/URVmzKguaqsY0DUJrjLW0bUtVNyyLkkWxZLFasiyXlHVBa3zol04sI5ViEkuSJGiRIm2cvCALBa7CigZazzeZ1mewmbD4tZDcuPESD+/e4f7DR4xGQwaDAWk+JB+PGQ7HHBztszGdopMkZPs42qbl4qVdBlnG8ckpUipWqyIAuUQrSbVaYoJ23rQNi+WCYllQl4VPiFA+c83ETgBCMF/OadqWum0oqoKmaWiNRemU1WrF48ePeO7qVXSScGF2mWK5ZH5yikoShCoYDgdkgwEIgdKKpih57/332JxOOFnMOTk+YTbb5vKVGwynm7R1Rao0GxsztrcusJrPMcbQti0xCiR6iXveNnJ1IYWYPsvMBBMT4YL59USMpwMrbTA9Y0AbHjQi1ruoEXlKw4mzK9+t/cfHGH/pefqVia+xICICdOa2lLJLljij0YbfiBjF0HOF62Z2TzmuhyL11/ChTE/eDCHbMXDnPdIEc33N2deFOflrwLp5G4AkfLwrywh9eYSnjQXrjkTCOUOctVDr2NSZ+3ROsD5krr+HOCL+JxYGQthubHw8cnAMCkOMHu+/9Nr3Wbun/t/+S/U0Q/97vSKZdS2YkAAiVXTBR5XiDHe8Tk/0e0Yfc92nLbu1Z3H2vp4lzwTdzZ2LrFYLynKFs6arPi+VAhMKIAv/BZVQaGERFsrKg+6qqFiVBauioKwLqraksRVCQZoq0AlKWYSxOCOJRL/Uvjq/KgVl1dC0kgaBNCaEfvjwr8sXL/LeT3/KcDRiMp4wnW2wsbnNbGOLbDhAq4SDk2Ok9BEK1jmmkwm2qTk8qtnd3aUsKk5O5ozGQ/Ik4cHjR9i2Jc8SThYnzE+PmJ8cY61FKUmepUwGOdPplMlkAgi00kwnUxIpqcoS03it15qG1eIIXEOmFYdHh2SLJaPRiOlkjJ5MKMoV1WJJW5dYIToqxRjLdGOTqq4oViUvv/ku+XBMsVzynLW0tmUxn6OU5LXX4PTkiJOjA5+uighZY8HB5qK56CUmqNqoq4i1ilPht4UObJ0QkeJDiC7YKMThOqzwXndpJVZYVFfMWgZnxtkQNlyfwfarFgfdmMnOlJd9aOIa0PoECAUhtlcIryhIF8MmwxSOdRecRTgbNsEuAR0ROkaI+BRE7OwQtLI1jVmEHc112Qp9gsnZNb5OafjrxKinM/VrA2D2GpzrzitC1mZ0lsbaJTHJJiQzh0QGr+0THYmyL/jjgVt1/KkALAYnfEEdJyxS2Fgd02uuwlMbAuOv94TiGiExhqlJPzG71/oxiaFnLjzPXhGw1mGMjyyJyVc+Hto+QTOE59QVrV/f4MK22m2oT8ynLwG8zwbdzW2yfMh8foxtSuq69g6stRhFJRUSiQrmlrGOqq5ZlRVlWVFUFauypGwqGttgXOMzyaRAJBLbStLcYRuJ9b4g0iwlTTPSMkEvC4pVhRTQSBEy0xTD4YhitUJnORdGU7Y2dtjc2mQ8mrCxueU9w6bh+OSIo4N9yqZhY2OT1rTMFw0vvfwStjWcHB+T5xlt3XLrwUOMM1RFSWsaHj18QFVV5FkKSFKlGA6HzKabpFnGcDjCWOfpgSRlPJqQ5kPSw5TlculBR/gQlcVy7heXsWxMZgyy55jONsjynHpYc3Sw7xNKnGO5XKEQKOkXZToYUa0KsnTAlUuXaEyLRLLaLJCmZTabcef2JzRVwWq1wFmBco421LaInJzqtCY/WXwcqQ8hw/V+b4dAOjAyTOxgQgXsjVOSYP/F5YtWAqzEiVDDodOY4wT1dyLDBP+rEKfbAVXUaELRGqX6/lyds0mGVF8VIxeCZhe4bRe15qjtC08B9Pn86xpYAHMhQ7ZjsCDWbqezS9Y1u/h73SlGr+VGzfesNiiCU6j/iIg1c3kSKELkQCxyZPvUZH9YsHVCzQoXNp7exNdAgpTerWVCau1k6zUuXv8W040Ztjxm78H7PNr7mLZZgTABxG2/sYc2Q5+ZIWeMp7Pabx9K5+e9EVFR6CMljPWbnhQ+KsQ5up5srD2HM1aLcF8KTP+9aLqD4YgkyZFSsZgfsVzMqesaBCSJB1xPaDuMdTTGUDU1q6qkKEsPuquCsiqp2gaLASnRIkE4iW0tzim0VCRakqUWJRStNWR5TVqmZFnKqV6wWhboOhTH0ZprV66QDsdMZptMN7aZbW4zm45J04zVcsnx4QGHR4esqhKtNUmasVguGI1GvPrKq1RlydHJnNF4Ql2X3L13l6qsWC5OWCwXgCBLE1579TWv2UtJW9VUdcNgMGA8GjLIUmSWY52jrSrKssS5CULC9pYhz3Na13J4cERT1z6srGlYrOY83t+jqitG4ynj8RQ722SxOMU5R53U3Lt/n6YsGY2GZFnO4dEBzrRcv3KdV996l83NTbaFz+QpyyW/+Vt/m5PTE8qyQChBU9dxyoQpRKfR9ht2jJnttYIIuK47to+nXCcpnHMo0a8BJwRKOlxouxLDkvwCj6nGAcSD5fxXofaCp16C00ioM4DbAW0AGCFUaN8TQ8WiJrTOZfaLLzaC7J5DZyr7UDRc0HiNDQDtEMJ0m9N6plRfS2BtcTsBToYuEOsA2vO2HbAGDRUXoiRitmCgOc4ARnhmPrxE9tcmcJxhTJyUAW4VTmqyyQ6bF17i0vW3mYw3cM5QVXPu33+A2VvxxvQFThvBDx+WvPut/y3fHUmO9+9wcnqX46MHHBzeYVUco7UmSxWniwOcLVDUIRLEdRZFlLNUS9jIY+q8C3TlGUolxCFbn+DinAi0Q6RJ4rwUMUgyJLjETViGTegs5bH+3L9Ingm6QijSTDNoh7RNzWq1pLXGd0RVGicF1liM9eUbm7qhKCuWhTeJV6sVq1VB2Ta0ziKU88HlaJxTePrR76xSOl8oJ82wWLImI01TBlmGTjRJqlgtS5qqIdEpw8GAuqzZ2trlwuWrDAYDXFNy8PgRH3/6MYvFAgsMBkMaYxAqwRnLZDxl7+DQUwV5xtHRIXfu3GE+n9OUBUprdi9f5de+8S6//p1vcXH3EkonrIoSqSQnp6fsP9rjwcMHlEUFzjDIUlyq2d7dZmtri52dbfIsZzSaoBJN29QcHR3xyc3bvPfzn3Pzk0+4e+8+q6LislCMRiNmsymtsxwfH5OmKRsbMx7cX/L48UOqoiLNUtq25fatT9k72OO73/0tppvblEWBMS03rj7HxUuXebz3iFVR4GIKdODLIrHbBRYFs25dY4ogGWqcEWnBMxGW0coVjhYRwDXwoPRplM4F0HZe45EhIqKnMP5qEAweFHuTXYaqXuvNJ4WSPnvsDOD6+NbYrLI/X1z8gS5wPa1zJvjfiVBLRPnaut3YOQhRDjFZdx103Rq4BqW2pwj8mXueV6yNsOitmY42itpv51WKYL4+PgGckV1hdyEkVkik0yAl2egCL735d1HTF9javsRiVbFvatLhgEpWPN77iBsHDyg/+iHPvfQGHxT7/Jv33+PKcy/z+vY3eO3695ivFmgBEwWzfMh4NORg9YBP7vyQP/iD/wem2fvMs/tsVpoh0i8IH6cbt8JeXfDJKf6ZqxCTH5SANc5dSoVzMXY5WIHBmRaHlycu/2Vn87ML3tQNKqT5KqXROiNLDQR1va4biqqirGp0IlhVJauiZFWsWCxXLJcrqjLEkEpASRKVomSKMwLvhJKBP7LoVAbN2p87yxJqk6IzQZoJsjxhtShxRiKFYJBnCJVRLBe01YpyOefuvbvcuX2bbDBgNtugbWqkVrRNzdbV6wghSBJNnuU8fvyI23dvc3J6QqoUl64/x7d+/dd59xvvooVk/+CAg0c/ZzKbUbc+1nY8GjEajnj33XdpW8P85BiBIM0zRtMZwjmqsqRclcyP7oKQjMcj8iTlrddf45033+DmrVt8//t/xIP797l37w6mabh0+TKz6ZSirDg+PEAqzXg8YTDIOdh7zN7eI29NVD4cr6wKXnvjXQRQFAWDNGFzYwslE7AlpvUzIQbir8+P6N+KCy7GIIhuYfogfBsPjtVI1r0wFl9Xw7nubSckSriu/qlwPlJBOSILh8WhEJ4H/isQMuacT+LwyluIRZaBMpAKoXw3YCX71utqDZQR8oxJv97qHNdXchH0IBEda/0il56nFIDU4CytsPQ1jCNAhgieTutd0379mcO3CiDqTAcOHe/ZacsBUDuuODwL98RmLIAQb+uE6L6zRIDSzK7/BtvPf5uf3Dmg3r8HP/8EoxOazR1GtBSH+4g7DxAPPuHRnZvwoz/hsUnZm+7y6NYjHl3YJZOWV196GSuG7M42GM22sDpjkmf8xjuvMj894Qd/9n9BYNa+Y//8wujjqwi5s9QshFA113HmDgHW4JCo4OHw3HJ4Hi5aHTHjdV3pWOOb/5I6w7NDxpqKtoWmaX3Kqk7I8yHGtFgL8+WKg+MTEiHJ8iGrsmRerFiuVixXS8qypm4NTnp+MskSMp2Ck7RtKHAtfK63lL7iV5Yl3omhJYnV5Gh0BmkiSRKNThLa0tdwqKqCk8U+QijyLKWuKh4+ekjrLKmU1E1D2zZMsjGXrlxlMp2SaE2uE/YfP+LTTz7l4Ggfnaa89dY7/PZv/TYbsyknR8ecnpywPJ1zOl/w4P4d9vb3aI0hzzIuXbrCCy+/zGA4omlaRqMxD+4/IMky3vv5T3l4/y7SwXiUk6QpO7uXuXjxCtONLZJEc/XiLv/x//yf8ic/+DP+9M/+lFt37rBYrdi5cJHRcEgiJccnxygUR8cHbGztsFgs2Xv4gKbxE+/WrU84OjogzwfkWU6eDSjrAmMsZePTtH1crohMQgDIaF76ReesN8e6DJ5AOfQmrOdnRUiqAOmLb1vA+ZRgHJ5aCAArRHSgcdYcFiCtXyAiFsD5FYs1BqeDpisjnRAJ7L71uhJ6jVLoefL4JT34hTbnbh38IC7qKBGchfNlMj2I+ap4wnXL21+nC9uLxkr/bHqQf7qIYJl0+nF4JrHm8VlLJ2rAodyqC8XEiQkOrh8XJ3BCk134NebjV/j5v/sE2xQ0xmCLgmw6Ynlwh8vbG+w9PiJbNYxwiMZweLzPYuMKs2REdu8jkg+/T6Uk//IPHPLiS+jLr7F57QVevrTJW5evsds6vvfd/4S7d3/Iw8d/HsZR9GMex6H7zmsBZU51CoUHz/gZ3/TAj7Xnd92Z+g8uOPMC8Lo+okMIX3Uv9sv7y8iz2/UIQV0XFEWBMwYpfHYaztE0Nafz0tdfsDAeNhhnKYqCYrmiLEqaxuDwKbJZphlkGanMaFtDQw3C180V2j9sqRRahVhICZnyRL1OPY2E9JzuwtRUVUlbOU6PD2mb1odc5UOM9ee0jUEOBcPRgI3NHXYvXGA0GDKdTPjo4494+Hif+eIUhOStt97iN7/3XebHB5weHVJXFffu3WN//zEPH9zn/r07VE0FUpHohDt3b/HjH/+A2cYm27tXyJKUk5NT7tz+iMFgyMl8wcnRAcMsZXt7C2PgYG+f8XjEtRsvIvRlpLN89zvfQSrF97//R+wdHFLWNVubW0ynEzY3ZwjhKKshi+WcCxcuMD89pTg+RGtJPhhQrhZUqxVHzrG5sYXINHVbE0OSbADSuCXHDEYbOFbRLdjIgYX5bEVPCYT3hXCeh8R7gSPHZjpqARw2hJ7ZUOouZBV2fGCoVRU+K9znwcXXJzZG5YRat53JGrU66Pqiee3Wv+072doOCP26tvTFYs6uSM8b+/nrXKh253rg6ArOBIkdQgw+46/TZc841dbpANddsrd819M0HKxtFsbZtePi/Z+lL6IIIVCh7KQPK1Sw8RJH+iL3P30PmRtm17ZRNmX7wjVyAaflPjsqZevqDU73L3Lww5Za1lS/9hxbb/4t2u//EelPf4IWFnv1VYaDCXZxyuatH/FnD37GnRuv8bPbd5gMcnYvXKKdvoh79KMQMfJZjbf7Gm6NphGmewo95WIjlwAOjLE4aehiteN5xdqG2gG3P5Gl23s+45f4Mn6KZ4KuCl7ttimxxgZOBxA+f7msGk5kQaISqtrTDvPFgsViQVmVWNeilCTRgiQUNldC07Yx2DsUIBfBUy+9NtEBcOr5NKl87YXWWMqypanB2JYszZifntJaC7YlUZqy8BW8NjY20EoynWxw/eo1BolmtVpxdHjEwcEBq8UpTV3z4gsv8r3vfJdsMMI5wSeffMQHP/8ZNz/5iNPTQ5qmxlgHUlK3BoSPyR2PxkidcnTyPlVToaVitVpy795tH9YmFU1TcHp6zM1bt1A6YToZc+XWTZ574SVeefkV6tbw/PXn2T845IP33uPhw4cURcly5TXisjY465hONjk63OPSxV2K1RJnLJn2GXRlVWOdZTwcsLNx0U8Ga7rl1mXoRxPU2i5Z0tu9PWXg51iHKl1JSSscWkQawh9piWUy6TQyaUMuk4gQ4wgWMVKcSdJcYyx/tWKdDcDb4FxKjGf1zRA97wd0ROg6LDobQbgvJdOXPnxCAjh7x5Uh9kKDoKlFJ1Fw4ogQ/RFNW+tcR7ue4Xg76ihoxUKTaIlKxuTDGXma01pHlg9o24ZyOaeol6yWB9AsfVEx50HehfEwzoSaHf6S0WLq7lckVJNrPDaG8UbN997cZkO1KNfwcSIp6pLvbU+5Oh2wNbtK08748fVtBuMJ+tJlPjwx1P/kH3Pj1RtkVrL/whu0THnw+BbDj3/GJXtK2j5i/+OfcqC3ubt8iZ2jW2gkKnxn283gNfAVcWuPG8damF7Hxfb0gAs8iw9DNSAS70yNj1v4Apb+GoZYgS5+Nl5pHWjdl1B/n63pOtFVxm+q0vNNQoWCNYo0H+BEwrJxNGYBpuF0fspitaBtmy6NUieSPMnRMun5FhlCU5ToSiUK580ri/OxkkohtMMKSdqmJJkhySpU4qMRcj3s0jPnqwJjY7C14fj4iAsXd9mYznw9iLrBtY6j40MOjg9ZLJdcuXSZV195lbqq+PjDX/D44UN+/vMf8+knH3J4fEJZ1ZhQ9UwKv3HkSYrRGmssqzJ2i2hIs9zXYDAtTd1QVRWtMV09CusgTVPuPXzIg0cPMcZycXeXyXTGc9eusff4EZ/ePObmrU+5WKzY3NymtY40SzGmZTLbZH/vMWmaohJNXZeB5vHhNmmagVAkWnvLOGyMHYZ2tJ3olKK4mXtzKr6wHtELsUeajeDaT+mwSGPLGQlxA+28yN1EwjnfPNERcV78ldB0Ab/B2PUuDE++34NcFxoVm0MK8MAcONwgT2o8Ds8dd+2qcF09BL++18P18dlh4Xkp58GlWVv0OOOL8quUyeQiG9vX2dp5nuefe4VksIkVOZM8IxsMuHm8YMMpNkYDVm2JpKWYP+b9j37Ixx9+n6PHH4NrOt6403c7bS9y9v4/ZniRB6s52WzBt68OeDddsf/4mMW8ZSYfcWk0QFU1B6e3KVcFD2zL269/lyYbMzcZm66iWsx5/u/9h4xmY2w5Z7E0LMdbzLKrvHucIE6PuLCzy9I2HA8XHD24z7oOv7YDBa2Wz763PvrRoRkzzdbmuwiT0gkTxj58WhAcbaHpV7jOOuD66dFbK3yJOf1sTte0vrKSTkEUNG2L1n7rTbMhg6GPXxXOUVRz6uWc+ekpVVXiMCiVkqbKx90mGUppbN0inUQJhVUWhA0xvn5PtVYiWpCx7IbQKJ9HQaIcSlVYW3IyPyXbGqOVBttgUFR1zcZsxoNHj3E6RSlN09S+l5ox7B8ccO/hfaq6ZjqeMptMON5/xO2P3me1WrC/v8fd+/eZLxbMi4JlWdJahxKKJJFUbUuharSQpEqjtHcOqZDVJqSiKkusbWlbS9U2vn5EiJet29ZrErc+pioWPP/cDba2LrB16TKz2ZTRcEDTNDzce0zdNIwnU0xo6lmWK4aTKYeH+yRac3x85NsopT4zLk09fzydzTBNS1EWoZ/Z+tSLc1J05q6fxyYovaEeQHSe4PqeakTQdf0CxCO5CJqriNxXyLH3DTd7LcNCVz/AOddHUvwqZR1UIh0SRETKwHlnme88ElN++5HtC+bIwJH3Gld0rBHbU8UQLQJFgQfcGFHQ35ZfD770ZqhzIbQff6EZzTa5cPFF3njtewym16mMoG4lRzj2Dyum1sCw5aPDO1xBYqYDPljVtI3j25Mh6eA6mzc2+cdv/D3u3/kRf/iH/y+WR5/6Fu1rzijRbbaeY7ZCs0qGFLZix84oVjn/+uiApmgwqxZrHJflBR7fe8AbL7zK6S/uc/H6G3xy8zb5eMy/SwTtdAO1m7Enjnh06z3Uwz0Gt+7w6uFjqFtuWcdOvoFOEiai4Z3xgD/TiqryPRLXjP3u4YknZvqZ4kPrj9r178davR1742JssPW1eEOMsQdT1Z9EhAiRaJV0e1OXp/lMeSboVlVFmiZonaB0hjErrGl9Pnqakg9HSKdp6pqqXFKUNctVQdu2SAVaQZII8jQjSTKcdbStDaUIvZEqpUMpiOaashIT9S8Xdg4niTGDntj3izdmxWjpPcy+qLdjczbDobAWGmuZTWc8uHePe48eUlYVOIuzDU1d8vHHv2C5OGW1WnF4cMjJfM6irKhby3A0RUpJG1oAZdLXgjDWUQtDagVaq67AS1NXvvCLcdSNwSIYDidkWUpV1xSrFauyClzSA9qm4fDogI3jQ6azbZwxpIm3IJarAodlMprQhtCVjdmEYnuHPNEsFifIwBGORyPGoxFOWja2N3AS9OkpWghQfmMwofMFws8tY/1m4EP+fLEaEQtJBR42hs3Q8cKRyOq5Wk8vRDKh5xJ9FMS6Fucz2GwsHAIoPrswvn45ew+us/JFyNCCaMTGLtNRzqbMhnA66bp/x7GLCRLORi03jLfozxNBf13C3ufjcK1D6QG7l1/j2ovfpdHbpOMRR2LAxwclQktu1w0PDwu+g+Tj1PJRlbBTW7ZmI77fFLgm4RUkHznDyWLFwGlWK0u7/Ta/+Q8u8LM/+L9x+9M/BVjbVMSZHylgZVeoVMOqxSiBKHMGUoOsQBpWyxPUKOf9o8fsbl/mka1pmxPM8W1+tnhInmVclUPKIuHw3gPmCj48OWBjMOVeecp4+zL3KNiYDrg03UIbRyYV1RlbRHSa55o2EX6tzdcwrl0EQhePzpkZ23/ah/v5xFCHdI5YQc5H+MheDbZeWXRx03VRtXi2PBN0T+cLtjY3fB+xNMe0jQ/BAhKtyJIURNK1xCjrmrppcDi0ksgEVCbJUk0ivSZqjK/644QPBJdaIpVAWGhNz5m5sPM45xvX9R5Vn36cpz5xYjIes1j6kC8jIEk0uxcvcbIoSLKMNEm5/+Aej/f2KKuS+ckJSSKZjAYsFicsjo94/Pgxy+WSZVVRNYaqaZBSMR6OWa4W5HnKcDTGNDVNVdE2DVmaEWsoKQ2thVQIamupGk9JaOm7TTjrsK0hH6SY1lC1BrsqcPuPKZua+XLJpcstUivMylCsTtFJwnK1whrfIihLBhSrOa+99DLL+QltXYKDLE2ZTmZkeU46HZDmAybbiq3dS2zPplzY3SXPU6+ZhoLdzgnauqEsC5arBav5gqIssSaYVwiqqqZYzKmLAmdaqrqlbsLzC5SJcc5PoKjZOnwb8aD1Rh4jNnS0IhzvokPtVw+6zq3dx5nFG9nMPjPtSQ66j8oQ3YLu8/K9mW6DdktXiW3dCRTDznra5qxRHP6jh0y3X0VfeYfZcJf3TysuTyfcv7eHShSvTnb4b+/f5ZHMeaNquLk746MkYxN4ezbiF7KkUAPeUTlbecLtumErH/LOxgbvHT3iqIIXJ1d58Vv/CY8f3WZVPKTveddnFkZ9MrWG1JQYkXB/r8AVFYnS5KnANBWjkSRNHIO0BY5wDx6TO0iF4teOj0lbhxWKj53DJSnOWN6++BxKaI7rkl0yWiUY0WCXh8yTkqI44gzCfu7UEZEHiSRJz+G6oAAI57ElhpI9+VzxCqHn0unSkgUORKw+EiMdBDGn+MxceoY8E3QPDg/Js5R8kKG1RicpTVvTtA25s74Bn/A7tzGGuvH2rEohyQRZpkhTjdTCv982XlN0nrgWylcYU0rS1i6YdwaCk8367R/nTFfMpWkMbes4KRdcvyy5cvUKt++0CBwyl+TDEVcvX+OiaXh8ckKeKO4/fszx4pTFfIFparQeMMpzlss5VVVS1TVVa2itpXUW6QwbwwGXd3doqiE37z8iSxLSJEEYyzDzJS2bpiZXGicktTGoRFOWBW6+xBYlubBcunqD3YHkp59+wmoxJ9VJ6CkGddlwIk8RQLk8BZnQNA3GNLSmZTSa0rQtGY4sEVx57gWyNOHToiDPcpx1qCTl8uUrSK3YmE55/bXXeXx6Sl3WXL96neevXuW569eYbGyQpElnShlraJqaxXzO4vSYsq7wUSkNTe35wuOTfY4P9lkcHzOfLzg+PWE+96GAFgfKopPQIBSfYVhVNVVV46xDZwnZcMRgMEQnCUmWkecD72Q0hmKx/MIJ+tVL4LiddybFWrJd99szNIJftL6WRb+YO8ANx7mgLTlrPY9rBc4anGt7IIj1GSLFAzgkNnSvi5zqxsbzpFe/zU+OLavH0Lg9duoln57ucWBL3smn/Ddyn08GCc+1msHFEfc3Er6TbfOaMfy5qUjHm7wlFLNW8INqwU6W85JM+fj4gNVgwKU09RX1Nq/yzd/4X/CHv//PwFc78aa8CAFs4QvnrkYkUzSKQZqwMiVNs8QW+CJVRtEkI+oS6ragXZXkSArTkMuEE1sxno4oT/a5MpqQZtOQeJLy1qWXEHrIMM+pmhPGiePxR39EbVZ+3Lp6vkFXXdso1+skeIkbXEzLFh0v79tVrUdwsAa+wWuxTve4SDX4d6XwVEcs0dkF6TwB4E+TZ4Lu4/1DRsOcbbXpnTNSonSCaSrawJNK4XwxHNNinEUliiRJSDJJNvAZZUooGmswrcHZBiENqBqpHFL7wfHe8FDTMzxgKUKaKoD1ZnDTtjRtS1nUNK0hSTWz6QzTVBgcSaJ83reByWjC4cmc45NjTk8XlFWoe2tSDg/38d0jKhrT0NjW0yhpTrl0XBtpdtp9PjpekWtI0gHCwcXLF9ja2kFpSbk4RThBUVbMF6eMpjOcEBzIB9TNPtu55bp7SLtSaGPItcIIzWCQgrUIawMAWubzU7LhFCmEpwKkpK5KBoMRzoaccSlom5qyKsiyAVordJrwygs3eHR4zGg0ZbixQWlv0wwd2WjMdDZjd/cSly5dZDgadXGKOGiahmK5pKl9soWxkqIoqOsS4aBqSk6PD3n08D4nR8ccHR+zf3jE6XyBk44klQwHGq0ItIqlqQ1lWaHSlK3tXba2LzCbbTHd2GI23SQfjDAOqrJmfnr8hRP0qxbRaSgQM77CXn9GQ+qcacRA+/7z3UFd8kPP30YrkE4LcmuHnjWBfdxsC1iSZMT04mtMdl7jzx5ZBm3Dw9UpddEwyCwfbV3i18eXmeD4ODP87vgKF5c1/5pjZqXjWlOzf3FMax1brmR3mfJ4J+PycMZsWdOsSpqLQ9g/ZOwyPnQVVzem2Be+wdX33+bRgx95eq/bWHotPitXJBNB5SSz2QZDDdoZpIVlNUdqTao0k8kUY2uyNKO1LU1r2BhuUBUnJK7l7WuvMtJTaid5sNxjZ7zFph6xbA35KKFta/bvv09T7AXe9uxzsz1J28Pl2nOje83x2VCuaMmctS9c/6Ez532SknDOIZ3fNK2gr6j3y0YvHB4dsTkdkGeawSBH4UIXVI2UYE2DEAprGpxtEdIhkxAelmtvFqcpOEXjGs9xSYtUFiEtSaJQCdjWEPOZRazvKnxlIi0CJIeVIBGMRyNeeeEVrly5ysP7D5gMhxyfNszGY5CKZVmyKhtOF3PqpvElF433zurEJ1/UZYGUAmMaEq1JtWUwnuDQTIZTlmlKbTQ2y9gaZ2xd2EVJyebWJrPpBm1rUBs7WGc5PDpgMBozGPuCN2ZVYa3gZHHMvTKjaGG2fRmlfZUmhKBcLShWS6QQtHVNWTUkA+cBtzXUtkZr3yyzKJeo0YSDY194SArJaDQBZ9i9cJEPPvmU0cYmR8dznHIoJJWtqdsaG3jdJPVFhPLU8+TWWto2ZTzMKYoyRFtYLlwQtK0v5VmVJcXONhubM44PDjg4OmHy6BFHJ8dYZ8gHCcNBila+doE1lrbxpS5nW5fY2txlOtlgsrnJ5uYu49EUrRK/cdYVZVV84QT9OsQFa8p2zq7oZwhAaftWRV0yQlc8m14DcnFBWpzpq4Z5wAbvnAwUjoshcz0dYW2DdZbhcBc7e4lby5z94weIuuWu8ckFs+mIX8wmPN/CQMG/nULapujS8KOdAdkANkTLZA737IqttOWqFIjJlH8130ccL/gHYsSjzQl//Ogh/+FkTDWvmUwVHJ2yMJZrz32DR/d/ihQhbXzN0e2QpCJFJwOSfMjJ8QHXNmYkEtqqZnsyxmBZNRVtWzLSGuFaaCsmgwnDdMAkn5HpBI1mkE6oEbwy22KQ5KzKkvF0gpENDx494Hh+N6z7oFniOuoq/tcDbYwsiRps5yILEvBFxGfmt0+LwNchDu45gT9/aEu/FhhGB/Au+JP8kySmUbvPAPvT5Zmgu1wVzOcL8kxhzIhUJ0ggTVK01sTUOmt9MRutJYYErX0SRZpmaJXSNDbO2zAgBqEEKvEZPwbTV1qSPR8Tc0tcqDiktWY6m7E5S1FOslwuyQYDmqpkN9tFSk0+HFDUNYvihKKuaaqSVbGkLgqk8uFuBsuqMiRKIqRmkA9BpWxtXUAISbFakeQ5k8mMi2nGcDAIGXkp08mE6cYGTd2QpylIwWA4xhjjnY5JToLkUlWwOD0h0T6/27Yty9UCrCXNcg6VxFiLkp7frOoGvVr51NNEQ20xxrKqCjbSKcVqichzpPUxyGmSMRrknK6WnK5KkuGUixdG5MOcyxd2OVktEYnCNA2P9vZZlQ15mjFMUpyzjIc5Kkko6wopJEoIBoOcfJiipKRtKsxgQD0akcQNdDhEpZp8MqRta4bDlMkgJdHesDOuxbaG4WDMZHaR0XiD4WjKdGOTyXiDPBv4521amqYJ/OavVqy1PobY+tRdazxV5gPlY6aa7GKSAV+HIbRA6hooniFj1+JoOxs1arQQuW7nTKzZ5YHags52UdvvcPNYoZ3jcbHkxuYWq3rJjUXNYuRQ21PeyHe5xQmNm/NmusUVoXiYVgxsy+XG8ng74z9fPMIcn/C/rDe5NVjx0+pTdt2YloQ/WH7AAsGSAX9iC+S9JdfShO1BwvDaa+TjC5TL/WBtSpxMSJIJs+3rbD73Lqt8h/tH98jkA+bzEzbSlFRpEuuQ1jLVGTodIIRAM2ZjmmAx5NkALRVFUSDyATbPGLQCYQ1aSbZ3d1hUh3z00R/y6PEHSFd1lkdXUhLotdSAF937YYzF2XF/qtkfOCIb07GDBUiIaoh/AnTdoSM4I9ceebB/XNwQni3PBN1VWVEWBWWRkCSOVickKkWnAxKl/E5oDKatAUuifbpkkjiSVIXsNbCtw/m8ApTEm6aJQiXeK+gcvhNw8Bh7jsyHLlmM70QsfAeGPBcMsymT4RQpNHsHh7QORnnGaDikaCyLVQHOURQFi/mcYrXywyJ9JlZVN6RaIWVC2xiU0oxURqIUs9kmzdRzqZPxJNy/QUof9zocT5BCkSiDbWqMNTjTMspzpPYt6Te3NrBHrnO0aSVp6gqZSKSFum18Wci2QTqfZo1SNKb1GXU6xdU+hdlaX35yY3MD5xwXLlwi1YphnnF8csTe0RFWaA6Pj1Eq4cqlHWaTERfGU1oE+w/2ufnxHbROmU1nCK0Y5BlpmviaEFmOUpKLu1ts7WySpRohHEkicS5HrpZMGPsCOgIaY3ASmrZhmGk2RhmJ8ok00UTOB2MGo03GwymD0ZjReEQ+zDpKxLSWpq6CJ/9XKzGV1lrni+RbQ2taEAIFXjuyEqTAhDxnF+bjk11s15ebsz0FEaM/uiAQYmypwLo2ALOEZJv71ZSHH3wCy4oDZ5nOtvnJyvBSOsNcH3I0TXmrsqyGKw52EgbUlO2SPx9t8a+Ob5FUBf8ze4Xk+Caz1V2GxQmaS6Ta8O7hTXaTTZpRQVLc5/lswnLqOCnv89bOS/zCrXit0owLw2tv/X2OVkeUbcNkPGNz6zqzzecx2ZiVU+Rty4G1fLz/iCs6Z9AaXFOTpgkJAmcEonZonTPIhzjAKMGqbhGiZTzZQanMF93PNJN8hpRw9/7Pee/Df03bzpHCdsDXK5GRABdBE/X1iCNYxmPWfJzhEdhey42BDyJugvLs+YV/7vFqPkplPS07Hr8G6JFd+GWjF6qypqorpJKkaQo4GlMjTLLGTxmMaXx5xiwDoVGJ8aUfhaY1vr2Ow4AwCGWRGnSqSbTyDwc8GY3zRaJxIEGGCshSCLI0AydplUbLFJ0kVEWFaQ2NE7SmZnPzEvXhnLZtWayWNHVJ3fjOvEraULxEUJUVapBDIkjyNBQoT8jSFOkco2xAlRiqasV4OMYZw6oquX79OZQUmLZkmOchIUGA9Vp6Y3zpx+FwRGsd+/uPmY5GlFXD6ckhaZoitPY8cll7vtY0frOR0re8Ae+gy3Lm82Nms02cFBRlyXgwBuf7k+X5gPsfvs/JsmA6mWIFVG3D0cmCPMsxtmY+X/Do4MBzqHXLPeVrVwwHQx8XPBlz8cIOL77wHDeev8polAIhSxBHWZUgLVqHWOt8QD4cMqgKdCMZ5prBMCVNlC8YY30Kt0wSBnlClidkWUqSSHQi0KlAKuFLAooE19ZfOEG/anHOYJ3sMtNM2/p056AyWbGWAiwlxgmc8nGevnW7PAu6cf2Hf/Sp1PEI0fO7ApwwGAG2HbFXDCjLmubhXcTGjM3nX+bK+AJHqmFrussH8pjV4Jh89jw1jvmDe2wlI142GT86/gWDg/cZlC1OnHIhEXzz+BHtfA56n4KWzeUJk+SA5fAeW3XDhfEmq7LikqmZqANkM+feYIe0KXj31e/xwstvc1JU5Crn+LQkyzMqUzFvG36+/4D70lE/f5Wbn/yM+cmcSZIyKhUbWY4UiuFQo6TCWIvUKUpqknTMIBuRJBmL6oTDxT3cvOFAag4OPuJw/ybONR2QGtHrldL5xKc4lpFndutgS/w7mPw8yfG6jmY4w9h2XHCMNY/A6ykV0Z2zp5XifXi8j863XzJ6oQhZVcPxlM2tXZyDoljRtLV3tgBt29Aag9IJOT7rCOWrh7XG0dS+wWRrTeAzJYlMUSpDCB0ynRzKV9sNVd0lWkqksjjpi6QYfFNKJWTo1QZZlvrCOEmKkYKD04L9o2OqqkRIRZoM0LrCWkNrBFIp7/zTCSrLcc759GMlfEHxJKdpG0g0zhrSJGF5espwMuXFF6+TZRmplly8eNWb5EnCalWSao0JzRabpube/YfkWcLbb73Dp59+TNOU7Oxe5OjoENs2pGnKMBvQtpXvLqFK6tYXh7etwTl/HiEly9WSra1tkizzhL2E8WTMzfu3ufP4sS+AE7jqhZQo50OT0jShrArqqqIxPuYW68BYr8EZw2iQs3Nhm42tLYyFVVF5AE0SGmtobes7YLQGKWToCp0gtfL5FDH0Jkw+KVRghxxVtUIoidASNFjluXmhNG0w4du2BSZfOEm/aomcrud1W19Q37W0ik6TVUqFaOMGF8LInJNIpZD0gfPrjpe1K/CZtShEF+ts7YC7i5x7J4fU1iIHI+rJhMt1w8fukMtzRbtdYGaSrXJO4k750LbsHX9CaVOW8iLfomG6d4idlwh7SJmnpMWK3DrUwLCoC8YWUluzfzQnFRqhLfPqhJnIeLxasaUzHspjruxcZGUMd5Yn5ElO62rEQFMIy0IIjqxhkRsuXB6x+uQXFM0pj8oSnMZqSd2UbI6GmOWCtG4Yz2YkQqB1xmg4xmK5df9H3Ln159TVCVLYro6BEC0IT99Ep6UV1ndCJpIAUa2MESB4uqfjA9aoh7DBCSnPACudU3QtBMXJ0MbKf7bndOMBfZHT6EBbf9T/XurpFmVNbb1JvbO9i1KaxWrJ0dGeT3UtCxpfQ5AkzVHa0pgGY2ts6ydVXbbUja+3a4RnaaVKUFbRtgrb+qIjONW1dvF1BRVu7Ysa48jSIanIyZKcPMs5WZSMJxPfh6woOTw+xVqDTlPKw0NWZYmUkrqq/WCGKItsMEAL39VBJSnDRGNag00Fo8nUZ1JJh7SOKy+8wHA0pq4qNqcTNre2yPMBB/v7bI6GaJXhbMvR8RGTydjH81q4d+8e88WCy7uXuXf/Nndv3WQ0HpMNRwiLT9cl5+T0kEQnPvEJnzadCkFRtORpRmssVVEyGY4ZD3wTzJ89usfD/X1c4MGKcsXpqQbbhlTsY5ahvKZUSUiPhiz3PeSWqyUbzSaD0YCDgwPyPGM0SNjamKCEoKprX1yobamahrqpcc5z6oPBkDRLoJFkSqFixVInu9A+Zy2tsWFN+AiUqq0QhcbiMx0b02KM482LF7/URP2qxIVWOs5aMAZEiwkLWTlPSUmhcM6n46K9FSuV6njYGKcc9V2fWLF+jfUrei4wXtc6wckyY+/ePlpANpmRDjKSyy8yMg133Zxr117g29MZBwefUhZjLsiCkgP00TGDylDXh+hMsVNWVA4GWtI4Q641ozTHOoPWmnGSs2oqdJKSKM3hfBla7KxYNQ3ZZINEaxYDzYlxXMJgEsfCNMwlGAeFMzTKcFUOaG4dc3vvLm21ZDqc4IBVVaGzDONAW4dMPRUlU4EQhk/v/ZTHDz5gtXyAFLF4voqjgk+ZEfRJDKarBtaDpM/yk/iEGxdjiQOqemBei0xYj7PuVN9Q0S28JxGkImeazBhnAwrTcuXSq9zfu83e/A4mnNFbLr1TrWeQ1qMbni3PBN1FUbJYVVghGQwnjEZjhuMpQjqODvYpFgvffFGl6MTHjdIo2qqlrVuapqUuW19VzFpqGlQikVJ7LUgInIG2AtPg61gCCEVrHaKVSC1J1BASQ7MyNNKSDxKqxpBoDbbEGUPbNJwuF6xWS1bFCtM2OOdo21D/NwnFdKxjkOcM85y2LskSjTWGuq19EohS7OxeYDAY+ZYsSlJWFdeuX+fCzg5ZlpElOcIJssGAg4NDnGnY2blAlmU4JNNNyCczHj24y6effMJ0tsFwOGZVLjFNgzEW6Vps8Pj7FjAW43w7n7qu0Gnizdm6wgCVMRR1jTU1n967x2KxIEk1w8EQqVOcaXHWsFgsfWIKvoLtqlj55EQVdnqp0Er5YkSpDzmbTidIKamaFmOCqWQ8UCsl0Er5akzWF6FXQvm4bZWitfQhY8JB43w8tqkQOkElmqZpEGWBbVYYLMa6wJuGfPZfsURnl8/Oa7BC4WsphMXofN83YS3WSRSe83b4sSQUX5FnCF3oNSIvPX8Yr+t55LrN2DssObUtzYXLTGzL6eUp2/UJD56fYnFM8po/Pr7Hezd/wI5IWSWXeXMg2VwZ6rpGY7FtSoLCKUjyAauq9JSgktRNS5ZlnBZLJJJEKsqiRCnVW1XGMj8+xA0yDu8UpNc1+acfcvnaZabTMQMpybTipFhx8+iQD+7d4Rfv/RRz2iIayWySY+qKjfGQcT6icYbxYEI6GqOzjNPFPo8+ep9i9QjhWmL7nI5D7Uasj16KoOrhNXKlrqcGugQO153HiwyarFesuufREbzrD8Vrz5qEidtgZ7HF1cEVnn/9Hd54/Xc4Oj3g8eIWP7n1Az6++1NOy4NOo+2covGkImrfz5Zngu58VXJ0uuB0vqRuDRtJgk4TWrND27YslwsaYximmiRJsBbfVdcKTGNpKktVttStoTKWFkitLwxtXEiVtGBqsI0jEb7ldayFaYx33OAMwmXoVDDKhzR1y2q1Ylka6rKkKFfUVYltK5QIZfGU8AukbVBKoVWCMS1pmqCE8rGuWvlAfoFvN5NoX7TdeV5TZimmMbz44otsbG1TlBV37n7IN955lwu7F6hrHyonpWBzcwvhHPfv3+Gn773Pq2++yyuvvs5kNOYnP/ohSZow1VOcc5TFCmlbsDXpbIpWKaerJfvHx6hEgZG+lGbQUtu2RauEqmlItUZrz6nXVYPWLalQNNZyfHSEcY6iLJBChpC9HJRGJRotlW8tZGqqpmL/4JDZ1oyDowNGo4zRMEVq5UHItRRVTVN7+qhpDXVdUZQFprVIodFK+xrHChpT+8I+raWuCqxYYX07QqzPPMBhcaFFiu/QkHzhBP3qxa0Br8HYlnVt1Hf7je3NY1Frh1COvvuALwvoNTR/1nUHzll6weKsL1RkGseDe0fszyVKD5hoxfZ4k/unK25sbCMPlqysQ5t9lvUxN8qaTddQuvvIIueySjiSoJxGpWOEqEiVxUpH4yrqqvI0j1bYVjBMt2nqitpVJFJTt2at5oMjTxQ1LRQtd+7dpEkU9/ZSODyksS2HbUMiFZ88vsfh3Vu4Zc2mTLkw3WCSjEiHGwykZpiP0VlKPpxhbMuDR58i6oLNrUts716lahoUEhRY4euKWFtj2pK2KTFthe+8YcBV0dj3eCF82F3Horvo4IqFyOOYi7Wfnmro2QEXUFOS6ZwLg8uIA8ntn93k8jev8daN32R38yKb04s8797g19/4BzxaPOL9T3/A+7f/nI/vvEfZLmht4yvmddX6fsnohbqoOTg84cHDRxwcHzKdThjkAwb5iPFog3xwTNOcIpSvCIZztG1DXfmspqa21LWlrBtK09I6ME7RBs3LJ7Q5XOsHwIeR+c69QiusVCidAQJnJbPpBoM0w+ahNdDymKosaZraa1yh/KKwhtFghFkWSATJYEhVlmjlHT5t24b26RYtNUprMIaTkxNPaEiJThKEabl06Qqb2xcQUvHxRz/j+3/4+6TS8fLLbzCaboJQDEZDtE65d/NDfvbez/jhj37EzoVdpqMhOxd2eefdb3Dz5i0c8OjBPaqmwlQVTbFAKcWNq9fJByOsccyrEpPGZygYDoZYYxDWkaUZdbViMp3S1hXz5QJnW4yRWActPlRPAFVoA9/kBqkV1vpymWmWU2cpxjlq51CDpMs6XKzmJFozHGRgvYP0dHFCUzUYA2VZYeqaVEmsUygl6Xks77dtQ21apMDYxreyb1t0mvrCSVL6UpwOpMq+cIJ+1RK1lah5SmsRnn3GOV9KULqQChycXzGP34cYhdrCUnC28lh3he7HuZBEZFqvWbcaSJlujamtQg8SjjfHHLSPONqCW0f3ECePaQfX+GY25DiZUtYlY61ogUwmZFpRA7V2LE4esFo+pioXISoixARbr/VVgyuMtl7EOENdl0jjNXgppc+YaxxSaoZas1gteHTrE+zjB5SupHEt1gmUM9jlAn16zJVsgBKKncEO1iZolTMajhEqIR1t8J03vsUgGVG3lnyQYxsfIeKkAiUREjQWrR2pcBR1yXFxymJ5StMWnM4PeHzwKXtHd6lXh1TLfdp2ieo0XNNFFMTfHbhy1lkWEyRcUPakVQjn2y5d27jBc8Mb3Lz9MZvjbd546y2Ko0c8aAzJYILQiiRJ2ZC7/Nbr/xG/9eY/QiSGRXXKg8N7fHjzp7z/6Y+4vfcBjW2+cM49E3SNMewfnnLnzgMePv+QjekEubUNQqCThNF44s1HfLxj3ZSUxcJ751tfb6BuLXXjqBtHKzRWaGqjsICWAikcykEC1MrgWsgGOY0T1I3DVA3DwZCN8ZgsSfG1GBwnp3NM23A6P6GuKwZZTl1WtG3NxtYWq7LmdLFE+UZdaCnRynfEjRWIdJJ7Z6ATVGVJTPUbj6c+lCrNuHDhgq+0lmZs716mLBv+6//Pf8XLL/yMd7/5G6SDEcPpBj/5yZ/zb//Nv+TT27e4cOEKo+EYKTV1XTGbbXH5cs3B3gFbm5t88Iv3KIslxhmcMRydnDCZTNnc2qHF+SgGa7tiG1ZJVqulDxuzluVySZImDBnTNBXQ+GprztLWNVpKquAc8t020q7MZGtbhNOUTcmgHbO3t49ra/YO9tiabTIc+BAyrSQCy3CQ+LZJTUvbNGB9bKlvxOjDqqXUKGsx+JKfWgxASqROEVKjtSZPc5IkBSFoTUgF1+kXTtCvS2y4fyEM0nlvtbMOlF+4Ukofy2ui1tX/dB1/xboqtS4hyse23lEXMtUeH5xy76gmHRuy0QajxSn1bsYVI5ksTtiZ75O3LbI6QTYlz+U5hUopG8NJWzNKRqSDhKODT3n44D6YonMCeYeQ70bs6U3LYnkXNrYRF19DHD5AncyR1iEDONu2BY+HjIWkWp1Srw7JJUwFaCHQQlA3JSbJGEpJmo6pjCHPErJ0xKpyGAfXxtt8eOeUwcyxNdL85IM9SHOsUpDk6NZ3N9nZ2QRlGSrFUE64tHWRcsNSCMlO63jVVZi2AVlxfHiHj+78GQePfs7+g/cwzRLpGtZqhXklIHC1MfpARK1WxDaamlm2yTde+Q6z0YTbH3/E7Z/f5O5Hd/lb3/ldbrz6NheuXuP4YMHR/h5H+4eUVUuW5Yw2Zpwcz3n97dfZ3b7M1eeu8J0b38X8XcNPP/w+P/z5H3/hXPuCerqwWpbsHR1yeLDH0cGG79qbZUjhyPMBo9GIqiho6pKiWFE1jfd6O99+J02gbjzfoWUKTmNaH3LRCoFSXoMQ0pBlGQIoS8N0Y4dpmmIdOGsZD6cMswTbNuwvT6mqmuViSVP56ISDwwMkgmw4JhuMOF0W2LYlSVLa1pJoTVX65pKTLCdNNYPhkHJVIGzLZOTTUwe5D+DXOuXq1SukaYbSCp3mzKYzXnn1NX784z/hj3/wfX7+/s/59e/9Np9+OuXmxx9w9/4dRqMxb771Dju7lxgMPS8s8pxd6esd7O3fJ8sHLFZLQDEcjTB1Rd00zJdLGmupKk8PJGlKolIa68iyzNd1sJ7GP53P0crX9S3qFVJI2ralbRuckGipGY5HHryt9RzsYMBgOPTFeZIMLRSmaNirDjjaP2I2nTKZTJBCMBrmTIYZB0JgjfHWAI5U+roYWkITsn98PRdB04ZOq0L5CS5SnFM+JMs4rAqlEY0I1Q3UM6ff1yF9U0dPMZhQ9Ec6QGpMLHsZ1VfjSRMj14HXIpyh7xC7HsYUYtltjYn1lY3BGEu1bNmabJENhhwoR3JhSrEqeby34LXdGb852OVEnpIDVita48iURqaKFQmn7RHHDz+hqU49RxrTkMN1hZCxWmEAHcPpg/fQwyluNCNflaQYVONrX+NCjWMnwBk0Di196VJCVYih0iyQDMZDFmVJayxapxzNVxwfHDJfWra3rnH/4GdMLl1D33PgWu6ZAat8QG4kRVVxdfMC1UDTti07dc12lvPGzg4fFcfUCLZ1zq4QPDICp1NyPSDfnPHbW2+wNbb89NM/5s9/9q94cOsHUO37+45NIdYSI2ImoPfLCYSTfOPF7/JPfut/zQtX3+Kjj/8cvUy5e3KLvfyAy1eu+3htB5PtDYqy4O7jfZzOub69g8pSRqOUk72H2KYkzRXL4yMSpbkoLvIf/+b/5gvn3LNBF9/2pSx9Ue6yWFGsFviGd4JUK/J8QNPUFIsVxrYonTEcpbR1Q1U1SOVbg/iOv779jgutXxIhkcJ3pBhkGh00qPFogtYpxvpuvsMsIUlSkkRTNA1t1VJXFUfHxxjThmpcLWk+ZJRmHojbhiRNAYFQjqpY0TQNmUyRSUKiE4T0/KXvvrtJW3uHX2tati/uMN3cRAiNCt7eCxcv863v/CZCOO7d+ZRiueDg0X0uX8/I0owrl5/j5Vff5PW3vsFwPKMxrQ9Ta1qKoiTLfGGRuq7Z3PQdg4U1mKZiVayYr1Y0rY/0SHRCNhyymM9JdIZA0Lbe4Tefn1IWHph1lnRJKon2baWTNCMPccQ68eF1zhpM7bBpghSKVGgSBOPhBGMNxeqEx+UeZVmRDXOWZcF8pUgTyWw8oaxq2saSJ0lo26TAGNpW0iTeyVbVLUjjnWhCIYzCSGhrSx0aNboQCyulQn8JT+9XLb5rgK++5guZ+3Y7Thpf01lKn6WG6boE2xDpENvSO+dwyiKM7RtWduIw1lMKEXQ9JeRYtZ5/LVyLujBFi5L69JSRPaVaDdid7XBpnFBbQ1m3nNa1t7pGA6rVXQ4efYSjCREAffREvKfoJIp8iNfzDM3NH5Nce5tyOqNZzhkgUK1ByRB41bYg/MbqWosOsbKtbamdoG4N7XyBQ3Lv/mOWx0sOVi0m3WBn5wqPi4Kj4yVj56gTzVJn5KqmPjlkcuESR5szjnNN0tYM5oKpVvxh1fCT40N205Q3BwPmbcFcJ9xtW2RhectmzHTKp5VhmebcuPG7vHjtN/jg9h/yB3/wf6d4/AugxuEI4f1n0hT8vuPIRMI3rn+b3fwag2TKpa3nKKb7fHTwU65dvsbLr79OpjXHd++QDAY0xZy2XFG0JbfbhrffeZOXXn+JpqpIB0Py8dAnWOHAGZrlFxdxenYLdmeRjr6qvgPTGspi5bs6CHw8rfAFr7MsZzjMcE5SlQUHh4dQN+jE987Sia+eZB0IKcjSEIdiBUJL0iRlOBxT1r4urlA+xOhkWZDIyvfdshaDoLU+hKkoln4St43f7a2jqkusM/jSlxLX1LRtg9KC8XDUZ0I5H+ubZZmPqJC+bGGWJiglqYsShGa1WDKabjDZ3OKVV99gc3OT+3dvUsxPGU/GPHhwj+2dXd5889d4/sWX2Ni6wNHxMfPTI7SS3Ll9i9V8zs7uDjrNcFKRqJTxYIhwlqbVzBdzqrZGKt8ZQmtNXVaAYLGcszscBN7Ra15ta1AabFX6hWIsQqYkIW05y3MPJkiyPFSJ0z45omoboKUoVkipSbKMwXAT57zD82h/D2NbRnnGcJBxMJ/7TJy2ZZCkpFqTpQn1MGeQ+mLuOOeLxTsLSctwMEYbQ9uCcDVtImgbvznQep5Z/uqzgIk1UG1s3NuleXqNTwrRh75F3xngGm+yOuv5cmstUpquSHt3fsAZ3w4ohtM524KFPEkQaUKrBROnaecVr+VD3CAhV5K5qZiJBGUsI51jtaJKJUfzO5w8/kWgL2NV4r7wuHMCG2oHICVK56TpiOFwxniyzWiyxfbFF0gnu7x3+xZ7dz9hcbKPrlvfs8y2IFpa1yKVpDqpGCSZ75otJUdHxxwdntAsS45OFth0THr5OrMLF3CTjFdvvMqDxjBXKcY2YBytzimFoEkkZbMiH6SkCB65hsfSMpyM+M44597eY/7UbTAbTbmg/QanheBoWbM/liwEHB8seXmU8dxkzBsv/x1ms6v8y3/xf+bw3p8SwsR9LeRYCYwYwQDP797gxuW3KU4XrNI9xKLg05//lEcP7nH98gvoVJGOx6yWFcuyYTCe8U/+V/+UZDjGlDU61RSrJXnq12PbWDYuXaStSoQ1DIdf7Kd4Juha5zDOhnKDIVXS+LTFpnEolQZeT5LmGVk2YpBPfPPI5YJluaRqfFk3ZS1Kx1YG3izN0hQagbGSPEsZZAOkhCxVtLamaSvKcoFzimGWg3OY1tJaGE9nrFYrjk+OkVIxGOaMphNfIyGRuKWvtC+lwAR6Yzgak2Spd5iE0CkRErarsvCRFziuXL7KIB8ilWI0mjAYTfym0zSk6YBrz7/A1vYWTVnQtg3Xb7zk6++OZyTZgLJYkSeS8cVd5vM5F3cvkj3/PErC5t0d7t9/SNtWmDYnSfzGIqUGa0iHGauyIBmMmRcr8iQNITTe9FsuloyGE8pVASF2MEl8LHOe5iTZAIAmRGVopT3/qjzQC6EZZClCKVrTslrOGThDkma0GBrbcHx6Qu0Mtx8X5JkiTVPP58nEx+omCZPBkNEgZZSnpEkSzFNJ66zfUAcVeT70zr/GoVWFlj4tHGu7WNdftfhi4lHbteG1oCcJr9F2FrdfzTjnfJv21juphPXJHp/RcqNPx7Y+HM36TsEy+A5QUFBTDzLacoUzhrd2nmNDCxbWF8TfX56C9ZtnqRNe+uZvs3t4nf0Lz/kEjqakqku0ytBSB3/LgPFo08/dwZDxaIPRcAYixTjffqu1lseLFbtX3mClt7EnJxQH96nmezSrQ473HlKc7FEuTilKH5ZZLpYIJWgThcuHKDUg2blAmWnajZxyOmAwm/DDB7+gHG8y3b5Cnoxoa8f+cIaYjnllmHPr8WNOhinHGxd4cZgx2XvAe/WcH5KycfUadrXi/aNj3pjlvJONec9ULMaKadugmgpnU+7sL1lISWots8Er/O3f+9/zz/+b/wPF4U8RznYxemdC9ZxjY7jJ1uwiW9NtX+VPNjR1y5WLL/Pbv/sPgIzl6ZLT+YLTk1PSJKM2cOn55xhORmBa9vcfkE42UNkALXwAwerkkL3bn6Ck4uIb33jmnHsm6LpgDlZFyXyxpKwK2naE1pKqKlCqDi1eHHk+ZDiZMB7McMb68KxEI2SLki1WWpAmFFfRSKUY5AKZDzGNIdUDihBbqHSOaVrybIATAtP67KWqNpiQRTYZj5kPh2xubaAQ1I3xMbBSYWvnIyoC2EqpPYdsHEIlSNEyyDOcgzzNqYoCKzUoxcZ0xo0XbjDb2mZ+csp8foKQkjTLUSqhbStoHanOyGfD4EhSOKzfEJoGrRRKZrSN72qlE01RFswmE15+8SVu37xFtZojXMl8EQHI0y/GGNq6oixX1EVBqjUitOgGqMrKa/HKg/Qgz9GJZDqeMhyM0CrFOkPTVFR1HXZ9g20qGmdphEYlCUKnDPIBeZ57p5mEtm6xQuCcYl74MMFl1SBEQWtayqZikPg+cYMsYzIcMEwzEp36RAmlcMLXmlDah5OlWUae5qQqQeuUTGvvvDStT/P+FYuzFickUlqsFb4sqPX5/Bbr45+dxWJ9hI7UkQH2IXBOIpzvDSel6jKe+pBQX67R2hZnLV3jTufIpE+RHmRDpM5IBynHpmAoMzZVwqJeIcPzr1qDvHKF5156l8lLma99IaBsG9o2ZG8pHWKtnQ+RdC0E62m/KDDWz7NlueK0LVhaweP5HCtT7rUWpQYsKoE2irIy3L91D0OLHCS4LCXdvUGSJrQ0ZBcuMdm8wpSEe3c/QG3lyCtDXrz+Cvc/ucn9rKTadrx6+TmerzL+s49/RJte4OboGt98920OHt7nQ3PEfpmxeWGLF23FB6Zkv3X8p9vbZIM57+n/P3N/9qNbmqX3Yb932OM3xnjmnLNyqKypm91NsqluiqMIWgJpG4ZFwJBgG4btG/8NvjDgW8mAL3xj+Mo2BJq2BZswCZlSswc2m11d3V1TjmceYv6mPb2jL94dkVmkVElIpqp34WRWRpw8GV/E/tZe71rP83sk2zLya8WMV+sNJx4+Kme0WtHESBYjh1KyNo7p/vv80m/8T/ndf/i/Jnbn6TT9MzKugJY5R4u7SB+TVh7B5ek556eXfPTtX+POOx/Q7RqGwXN4dMi9+/eJUrFZXfLsp3/C/uE+IcDlyQnZpqXa26NZb3j68Aseff4pClgu9/iNr7nnvqbopr90Xc96taVpOuZzg/eSvhsI3qVlTpSoqiDPC7I8x/uA0hKl04t1zmCjIYokrhe6IJJjhhatBZmuCUCeZ3R9i9YBqQt2Y+4YCGxnaPqWPMvwzrPbbMi1YrlYsl5vsdESfCCTCaTjW4/xDiVyfEiLoGuCVFVOKOuaoe8w1jEYS1UXLPb2+ejDb7J/cMR6vRnno1nKfIuRoohEqZEijkdFhS6KJEERMuV2Cp26Zmtp2h3GWoqiHE8Jgft37/Gbv/mb/MkPvk+3u8KanoCgnC1YNy/xNnV/fdcm0TqzBBJSKXlDZ4rNtsE5Q64Vk6qkrmrq6YzJbJ/FYp9JkVNkis12w4uXT2j6Hh+v0ywAH4nqOkYmmVuUkJR5SWM6ZpMJjTdcNMl56L0lzwqcs6w3G6RMtthMa4qsIFdZQvXpjExpcpkMMJlOXZVQSeuaKZVsxkIRSHlxv+grxJRfl04/Mcmr5PW4IGUWEyMifDlbiDeG1FEhEMQNEOlG4n+tbCDcWIxvEprHeavOJBvnMduG918/ZF6UGDNgYmC13qZoLCFBSFrnGXJJ1JEBCDHj2WDIVUGWwzDGMVk0RkDbGwoBRmq6waJFxpBLnAs0UvCw61mZlr4z7LqeRQGxNURlePbqc/rtCfLeATpCzCPTBw9QB3eZzo+4XeV0IXBuOlw955c/+IDHTx9z2nb8/ukjvvfht7ltO340nLKzO/Tt2/yV6j3+1G35xjzjf3h7j0fLnP/j5Rd8a3nEPV3zvir5v5w+Yp0rfiQ8f/n2LZqzZzwzntej51eOjvjnfcN5VnBXKHxrWCpJpQXng8MEwWt3/xyP3/sbPPvj/wSiQ8WbqQICwVu33uPNw/foW4OQLcH2/OCf/yGT5V0G6zG94c6bb6CkRIRAu16xvnhGWdesTp7zkz/6fQav+dav/DpOZ1ydXyCV4Oj+HQ7u3WY6m2HN8LX33NeoF9JX3Q+Gy9Wa1XrDcrGgKgucg34Y8HaDzkvmZY2SacMdgyM4l9JxnaHpNtjgEEKQZSUiFuS5JIScGBMEwxiLDREfItEp7GDGIpMjhSD4QHCB89UF1lsmucY6S9sOaJ2jnUOpjNl8ihksbbsbt7dpjqxVRj/0OO+ZziZkUuOEoOkH8rygnkz5zne+x3e++12EkFiXDBi77TahLJWk70NaQpUFWunUnSqJHd8c1hqctVg74IxDS4VFsdlsAJhOasqq4qOPvsmDBw/4g9//Zzx58gUX5+eIvKCeLxgGS1ZUrNcrJvUkucFkEt6HURKmSPL92WxGWaQZblVOktNuMmG5dwuCZbo8pCgrHj95iLEGN2qThUxuNWsGRvs/8+mEYB06JJTf4XwP5x3rtmG9W9OZngzJpKwZrMVaR2cs0CJJOu1MSKQUZKRE4mR0GQXtIi04rt8EgXSK+oVf19Zzxg5XinHkcE1vHRdSPoyLKTt2twoV5Rg79dU56rVMKUHn4brghnHmel14QarIYlox21uSF5plNaHIS7p2h1ASdEEgkbmsc1xuNvzR9oS/cvsdWi/ICoENsBksOktfQw84JXC6ZN0PnDnLNK/Y2g43dFxEz8N2zXRSMs0kPTtCt+LThz8mRsuh71EVVNUhLgZcXXLv/husY+DqcEm89YB/5/XvsF8W/Hj9gmebFW/pA0JWcbl9gleez7an/O17H/IHr9b882HDD8+f8L988E2+ePqIL4aBh7bnw+U+/6MYuLI9Jg7o+Zy/sDflPBiEshwXkf/w7h1+oFcgSjKh+SVy/rBpMDLjN/aWnGHJUNQYQm9QquYbH/27PPvi94ibhwlyPp44pMj45uu/xrvv/hp1UVMUJSfPr7j71kf85t/5e9i+ZbaYM1nMMd2AG0zStNue3fqcV5dn/PjRI4Seoj//DBE8V6tzXj57ysX5KafnZwSga1v+6n//3/+5t9zXFN3kcQ7WcXm54uzsgr3FHkLIm/DFvuuRxlNNbcoC8+mIbU1KdnA20ncDvR3QWckw7IhTTZbNkWICsaZpHX1v6I2BCMakLtrFgLfblJ0WHFpKmt2WvmvYiMjQdSitqMuSg+UiJeACZ1fnaKESYCd4iizHYhn6QJ5lKCkY7IB1ntl8yt5yn7fffoePvvURQgi6kWub5wVlVdH3A5v1mr7vyfKcqqoRpEj1KBVm6MmzHO8sfZ9SNayxCQwjVdr0I9jtdjhrmM1nHB0e8df+5t/it3/rP+f3L38ba11SKcicrt2htQYhcd5R1zVd34yRL4zyO5n0zUoxn80o6xnT6ZTFfJnmv0qhdc7+wW36oefs/JRuGMb5sUCLJIuxpifLNO1gCc4ke7Mel6QIZIhJWhYcQQqsMWluOy7pXEzKBeMHhjF0UQTx5ULqK1HijAUs/S0S/wx0uteRmtfSsRACUiXcigwJ3CSu/3e9qCJ1u4GEHxUifU/HM+0Y5Z1AyWLM2vqZ8PpxyJgry2Ask6zk1mIPN3h2bUuzXhODBSkQo307hki4XPHPPvmE7x7c4f4IjgmywEjBxiUEqsoCUilsb1B5yXvFnM8uLnkqDKpW+GHgncM9fvDspyjvKFROZs+ZHCv60xWb6Fi89jar1QVWtMSDBfmDN7ldZHRFYFbl2N2aHz07SH8CmAAAd+JJREFUpfEtt32kxjHbnPGG6VFSco8r3LNP2DNbyhyOswV10zDPC2bzCc/6nsHCm/UBf7B9xZ/6DX94ZvmfLe7yn66e8Qf9JZ97y39wcJu7W0cnDFK3fG9vRphKoiwIESZ95NGw5UNdEaYlMXpu8xYP3vm3ePr9ZwTsjYLjYHaPb7z55yirKcN2DdFiTcdiuWT/9jG7i3NC8EgixShNNTbQdD2fffIFbQ/3XnuXjz/7jP/H//P/zrSqmBQlR8fHXDUNT05eESOJW/I1188fLwBEgfeB1WbLyekFe8s5WguKPEfLjDyrCDLdVsE6vLAYM4xRPgqtSkLQrNcrsixy5/YbTOvDZKkMAqXSvFYC3jr6oU/SmiDGhAHDMPQwFv/NdoU1ye67nM9YzNKSq+87RIzsNltsP6R49BBHNYJE6YIQ0w3pYkQjEwOgM8xfW/Arv/bncUPPersdQTkKJJyenBBjZOj6xEZwjihTKqoeN//d0DOpJigBza6hKNNoJNcZMs/HLDnJbDpJxZQE+bl9fMi7H3zAH/3x99lsG4IUaKkp8gJbFHjvk6vMjkdH70Cm8YP3kd12S5FltF1PVswQIsMMA5nzydYcE+Tm4PA2u2aLMeZGNO+ReGuRKkvfp+CxLhC8Q/hkIb6z2E96UmfxIdANPc5b3MhQkJDoTeF6/p+SnpNm8logGrjGlMSx2/uS7fSLn+kCXxobxq43hkgUgShTh8o1jyFGog/j99CPsejwpS73WrQlxmUO3BRa8S8VXQFRGozdYr2hVCU6c4S4w2cZIPGjIsS5JE8rmh1XL57yf/ij3+E/+OZ32KsqpAgcT0vmNnDi4E454YeXL7k732PjAp9tz3n/1h6H60t+ZK64tZzhup7XpoLtasXJ80sKNeOWWPCJeUooMvJc8tYbr/NZf0m92OeWypk4QbVdcRR2+HjG9uIl1g/kUrHVBXrbsq8dSsGyNJirc15zPXFVcHu+oju/YhYD282U9fKI12bHDEFwYQbeKyoeUKECeF0yzTw7Ipet4crM+ftmxYOq539eFsz6BpcPFHrC7WmJKx2DDRQxgyhZyBnf+u7f4dlP/7/E3XMQkMuKv/rd/w6392+zevGY1ctnnL56Sds5fuNv/3fpmx3ODGRa0e62ZFlBCIGmbVgc3eN7v3mHq8tTPvnTH7C/t5eAXWj++t/4W7z37W/y/T/8Xf53//F/xLZpOJjvf+399jUz3ch1cqY1hqvdist18unvLfcotEbGElFoiiJDiIh3NnEQgiPXGVVek6kJWrZoWeGtAJ/eilvbQdOQpPJq1OxGdrsN1gWk0nhvsUOPsQZrBqyxgEgs3gi77Y6iKlBSpYLtLDrLKPKCTdNQFCVVWRJ9wCpDpjXRR4QW2MFweHjIX/j1X6eqCtZDT2cMq9UKZw3VZMJg0n/XuwBCJlSiTfI05x26yBERbD+QZxkhRnRMAHU1UQxtm0IZpSS4ElFJQkwPlM1my7179/jgw2/xL/7wD3HWYLzFB4+xbpRXStqmQStFb02CrYRIb3ukKDDW0nYtZdkwmAmqBcoaQWQIDsGEsqqoywo7DKx3u6RqEAV1Pcc6i7OW2WSGqhVt29F2TZona8Uyr9jIDV5KZFHRWoUdmnScJiL89Uae6xqbZDo3R7v4L9XWkS+b1vr/GiXx3+z1JbwkjnmoMc1mI4ggk703zReShAF1HRF7/Te+LLbXg4pxpvulM4GvfhNSNAwIYamzNCrz0nG5XXG1WSUYCSNcSKYl39APZJlicvKCTXT873cd33rwOrPlhCpmvLO3h29b1r7htpL8aHvJfl5wO3f88eULqqzm3WzKf/HiCZdWcp8lm/OHtLtLmrjldrmkmM1Awv1iwh1VMtiKurccnL7EbRrk6oSVtXQ2cnF2jrWWsxh5gqJ1DlEXzGcTquUBSI2uMrQ3lC6y2myZeo+4ytHTPfz+fR7rkv1SUE8mHBd7RCc4jgO5FuwJmNu0OM+VoYiax9stFRW/01zxoPT8tWLJPsnYlPWSz5steVZyfOcNDh58l/OfvIDouTO/wzv3PqS7uOT0i8+JAcpqzu3XbgGedn2F61uK2eLGsaq0ZDKd0PYW03cIAYv9fd4QitPTcy4vL7k6P+FP/8WO1cUFv/qrv8r5xSV1Pf/ae+5rdboxgsoEuoIgB4xraNoVeS5RkwVapUQHLXXSrXmPNSm5oSgK5vMph90evWnoe4cZWh49vmQ2XVBVNQRBEEmPJ9CI6JnNSpq2Z7fbsW0ahn64UQVIEZhVJdNJjdYa7wNaKczQ0bUtUmcIlRxRSuqkJ5YCpbLR4aUgBLbbLT5EvvnRd3nzzbfY7bYMQ4pYL0bC12a1TjNmBE3bUhQFnRlGBGV6D2qpEpPABXZ+Sz2d0Q8bzNilO9JSbDqZ0A9DYhqwpTcL6knN7eMjfuMv/xU+/fwhZycviSIpFNTIW/XR4ZwjRj2mESTYh5KaEBONSUiJtZbd6gI5c0wnU0zf4GyPHqVZy9ksLficZ7PbYEMCxpfVFOcdu77j6GAfhUxmjb6lKicoITmoZ7jgaXY7fEjx84NNgJvruT/x+og+2oJu9FLXu/5r6F38V+vwL/D6mdDCMZInypBGAzKOgPpkergWgSa6RIqTH1k+RL4McInjqu1macZNdb75b6Va75GxpVm/ZPbaOxiVMWSaIH3aYQRPby1BwLwqWPeGiWuZnJyxaXsedVseKsk7Rw/4+08+5sPFHX40bPi12R3++eYlXZHzb9X7/M7qBX3I+LVsyvPdJZu+Rw2SiY9chMg8WJZDyz2TeMKTdkUMCs5fst5tadqB4XLN1fklputwgyU4d/PzF5CMRiKitOKLuqaYVEyO9ji4dUx9fBtTlLiyoAgZmQ10nWflAnmtEVWFnx7yXFdE5akrzX45S1zu6Phmbjnwgb3c02EQ9KwHxy5UXHnJJlreKScsi4LMSYzJ+ODbf5P/4tPfobANHzz4FvvzfUpRkL0jiS5w5xsfklclq5OXrM5ekal0wpQqeQ7CCEDqmh1tmxCp+0d3yMspgw04FzDe4Vdr1lcbtMrJdcGsnHztPfc144XkEKsmGXt7BbN5jiojKIvzPcYVaJWhQo63Bsu1eaIbj0Sassg52F9gfM9ut6YzHboIdHaFCz1FXpJlNRKdOA3GY4xNDF6X3ryDGYjOIMhS+KRIy462bRBC0HYuxan3hqJOL9o6S17k+JC+QVKEGymTjyl7LM8L7ty7S17kZENydtlhoOk6dm2TbMbGoPKMKBLA3Q89eVlQFRVN15EVBTFGyrJks14laVs3IJTEeEtd5ARd4YXAjg+OrMwR0TOpa/Iso64LlJJ0Q09VlTcPrxgDxhgEAhNsuhmcw1rDYjaj71r8WNyN7snyjK7bEZxFa00Int1uQ4gCh0QXNblJqRTGGYzpiEJQlTUKwWA9h3t7WG9p24au79KDSijmxQShM9qhpTXDWGcjOI8z5qa4xOun0XVZvQmuEqR0kH81gfUXfY2nfb7ahV9TxxBJk3tj6xXjIySGMaFZfuWRMiaexURWE1/p5K/Vu9c5EmI8BShpaLsz/uj5U2IfYDAIZ5AipswxrYgj1nOSabTzNM2OQx/Yrbd8YzmjWZ0j8hxz0NGYHRe7Hu13CKVQnWe/ucAGwYQt5dkZod1xEDOy7RrXNCx8pIoDxcU5692Kl7uex7uei/NLnA2gMzKdE4sZoZgjvYOhJ7qB6Cw4T7AOvMeHwM6s2a42XL085WL6lItbt5gc7lEd7LPcX1IdRNadpwdql1EPA0XveWUMrQiIMiNO5rygYCcCVamZ1VOySeRpsBwXnqN8jq4iT33Dk9hCvMU8wl5ZQyV4cP8jisU9Dror/vxHf4n95QH91RrTrAjeY01DPUuL6rMXz1kuDyjKCogpzbieEiLoomCvqrDWUk2mzJf7HB7f5uzkhKIs2FxeMu8HIoL5ZDqO1X7+9TU24IjKYTLLWS4n7O9XzOY5070Jy+mSTFRYF7DO0LY7cp1jnKHr25FfoJN9V2eUZcbgM9AeHyRKFUAGQWNd6hbbbiAGgbFpoZblBUpKNJG+2RKDxw4JTN6E9AM2fYe1BqJHSMn+cslqvUFoTbIxeyKCvu+QOiPLMpq2TV2xVCitqKqMy9P050xn05Q04TyuHDWaPvEQqrJKQJcsp+97oo9kWZbMAV/RAxtnUUJTCoVQmkwXGNOjM5WSLkZ84upqRSTy4sVLNps1MfrxtaTNvkCk8EohGYaeTEmMGUh0pYyqLFLn5S3ODgRXoCdTBtMlna81CKkoyxlIibcmWYczTWt6iA7MQJbl6KygN4Ztr5nP5my2O5q2QYy6z4gAKZOjzruUdOEc2kdKKbHGpcWljCNR/8vU1VRlvpRTAamT/LOwSLshVl0/JCKCsdsR/isdqryZTF/HGMRIAsTLMK7awlceOGkJzQ234cvX/hVgWfqLbYjDlqK6jRpPIMYP9EPSWUspKbWmswZcZF6U7JqWSmuyVcCc93xzcUBzesU38pxl1fKoX3GrmhDzS+r1GbmQ5Lpm7+ULyjBQyAy2DWJ7TtsbnvWO89NzNl1DRCNjhlvcIo6Bk+QlNhjwkYJAYS1d3+H6Ft+3CGsRxqYiHBKO1YfAbrejaRr084KDW8eEN16D1rDOS9S0IndTdFRs7YaNd4hMMo2RLPRc9FegAjMl2JvucS5XXMYBUZWU01tcVp42cyynilw5nvmcU3YcysjeYsnh3Y+Yv/wTal0y7HYYZ9BVydXJKS8/+YTz8imri3M+/tEPefPtd9CZwtYVRVETVUYxnZFP55h+YCJH16UxOG8pZzXPPv2U3W7Dg9cecDc4Xr14/i9ZwP/Lr691pOWZoKo0s1nBYlEzn03Y29vj1v5ttKhpW0fbbRm65qYAughFUcAIEVZSMK2nqFzgosXYiO1LrMnoho6u29APlr7rxuN8QAtNVBIfHFJEnHeIEFB5BiLStGnuqEWK2BBCUE2mQCJkZVnGru2SftI7umGglCn1NsFJEuD85PQVL16dkhUZfhjod5ubMMm7d+9zcnIyHssdziZdcqE12TgjndYlg0lvrqqukDrHR6jrOiUuoCnKgrLIaZqWZrPhzp3brLdb5pMK4y1XqzXej8oH62+iuJES4QMmetquZVKWo4IhWUnryZRMK3IlURK8M3TtDiWgNxbjDZnKkSojz3MCkTwvMNaQS0VvBwafHlZCauqRL0xw1EVB1zap046BQim8FJghAU6yEAnWjhZrhxZizLNKxTmMbqCbYnNd3EJAhZCQnvEX3+1ey8OuLyGSCiGS5GPxWkomEq5SkjS7wkOU4GVAhnT/pTjvUSY2BiaKfwnULv7L/kF4duvPKebHXG1TF6miJxcKYmAYY7OUzsilpGl2gKSUgvXliolWKHdF026pypxLoaiGhmlRsY6aYbfBS8GpyOg3a3ywbJH4TUOzWRN9QMsSNzkiW9zChIhRGVEo8iiYSU1DZJpPqINnFyyNNoSiREznKDPgh544dAgzQNekZIJEn0GopA8+efkCIzT95BDfbjiwA4dR0jm4BEKhmIoJMy3wJsVV7Wc5uYyU1nJpdigcRT8gGstD94TVYsrx0W1mMvKQliu/Y5bvUaiSe7ffh8snnD99Tl+23HrjbV578Ca33zSEIGjXl6y3O4wxXLx8weHREYuDQ6JI6o96OkOOvgA5xjV572g3Oy7PT3n+8hW//3v/jF/67nep8oy9/T2qSf2199zXd7pCkWWKosypqoqqqphMJ8yXS+psiRng4vKUi76lbbZYF9BlfaNhjVGRKUlZ1WAcg9XstoZ2Z7lWQgbv2G03tG2LEIn0VZSJiTAYgXGG+XSSYs6VJoqAM+mhoETaOBdFQakVInjKoiCOeV2DtYmQFSPOGHrTk2uFkoKu3fLw8y947cFrtJsNfd/StR3nr16kuWXfUSiByAsyrTGmp8wKqrrCW8etO7eQeYHUDqU0aVwsyLMEtS7zDOscXdtgreXxF59TFAXLvX2Ojo959uwFi4ND/uj7P2AYhmQJDv7aLU4mJM5brLVJxhYCZa7JpMA7h3OGLKtRmSaQYOe73QYlE5siAkKL9EDsWvquRWs9Bmc6XHC0/QBSkZX1KCkTyUmoJFmmcaZPfAxniSqidEZNBO+wMgWtyHH+jFRELYmjvToyFt5RDCxGQpoMARV+8QU3XeJni3+aEoyM6y/HINdgnDQtScwFMY6wgwIhwjXdNUnhRv2ulGMhH0fCScWbLinkzUzUd+dsnv0LZntvQX5Iu7pk0zd4a9JcXghM34/cjOReXG12uChQRcHp6iLlELaO9dBBDDg9cGosjenIgBDTUjYGDzJHyZy9wzeZTBdsnefUWDJnUc7Rx4COAqVhK8AhCTpjv5ohfOC4qBhMz7ltU/ir9ah+h2u3hNkC+g6MIbqBxMM0EAKr02f4POf1X/9rxOmEK9ez2rUoLVjKCaUXBOcJhWJelhQ6o1YZrRkospxaVSyynLZp6ezANBr2dcG5z9jmkdvzJTsBz/st9/eP+eav/3vcnRxjjada7LE4upvqwmCZ7R1Q7x9izcCLz75IQQpljcoLut7ijE/gJiFoN5u0QJeSrtnxe7/1T/nk84fk1+PFIqWo5MXXW9u/VqcrJOgsDZnrcsKkmlHkFXleUlczqiotdNpmx2azxjg3JkBotEpee6kyovVIscfQb5ExkGeKfjC0bcd61+C9Z1JV47IrSwU+zxFEMlHfaB1F9Ox2O6qiZFrXZFrSdi3OpQTRvm8wzoHMsC5ph63pUyFylvV6xXw6JdOa7WbLJ598zBtvvsUbD+5xdX4OUpBVE548foRSr7h1fEQ9m6OyDCUEdVEwqUpcnrqkzcUlbdumlIZCs9v2OGex1mCM4eT0hLZpqOoZd2/d4pvf+jYqy7i4uGCxv8/t+/eZLmajRVSMHaJEjbbT9WY1al49zg40waNEpNAZmY/4IHBRUciEr8uzjMEYsizDjHrpbAzctN4h7WjNLSuGwZBrT9e15EU9LiSBkJZ2WZaN2umYDBDO4cOAk4G9SYVQELOGaBU+QougFTIxU8diG8UYHBgC0nuUdSggEx75ZyCCPYwQ7+t5dARkEDfKi2Stjz/zKy19x5GDlGOBvbFY3BTx8aUnJkPkJiE8NYBf9rzXDNyhO6UfrqiOv0l5eBdzek5wK7phoB+6pMwRCYrZDgYfYFJP8J1BOM9cZygvqWOGkhJF6pSnYVx0C8F8PmOxOGJ2cMhsOme3ueJye4XZbhHOYqLHy5gYHXlB7Hv8qFkXWvMiOKSATCveufcu1dkFl82Gbbsh5jlCF0hrEdOQJJr9Dt+swQwwjh62L57z8X/2n/L+v/f3KL/3l1g3K+anL5jZFqcEh2VNXRXpvyNT0auqklKAiiKR+KRgXpVMtKZoNjzeXJLtLTjWE07yLWjJsijZy3KGpqdteo51PqqNItp7pMgwZUk1W3Cx2vLq+Rl7t+4xOczJspzgGZ2I0LUNfdehsmSKWi6XrNZXvPPgHT763rdZLic8/OQn/MHvfZ9/9z/8X/zce+5rdbpSKfK8ZDZdMp8dUpZzFBnBj5HpUlKWNZPZkuzqnM6sv7yhY8B7k2DkVtIZR55N6HuJCwO73cBqtaPrOqb1hKoosNYQgqNrW9zQk0k4OtynquvEdLA9l0XG1TqBONZbgxsGZrMapTK6LjE+wziLve4qopREH+i7pKl1Y6DmdDqlKAve+eA9bt++xe/91u+wWMy59au/wssXZzx5/Ij16scJGl7X7C9maCnZbDdsdzvW293NmzCENKgz1tK2HSDZ3z/iw4++zbvvvMtsPuHy8pKuHzg6Pub1t9/i84dPx9lwQVlYjIHemDTqswYzpHm1VBpvDaWQeMDJiPUJA9n1Bq3qG/hMGLflWikGN+BdTxAKGyLBDVjvKKspeVljvEV5T981KKWZTnP6wYwc1SyZTERAeFDOUeLQOlKpyHJeoWY5MXic0mxd4KwzbIzHXU9Ax9EP0SOdI9eKqQhUIiLFn4Fud9TepuFr+lAgEarEOJdO47w40ukSeQwR05w6Xo8axnPbOKe+5rdeF2quC21M3xOAEK95vIDIkXnNcu8ue/u3OD5+i+w7f5GPHz6kef6Q7cUrht0OHR1E0FVOpnOmswlKjXDuLC2Ogw8UeZEkjki8zKCcoosS4xxRV5xs1lxcnjInPRgXeU1mHefDgJca4Q1NVuOyKVFqDuoJSx8QRUa0htPNFf+8XRN9pNaaZVVyvttCPU2mHW/ph4aQLcmmc8TQ4PuW2O6g7zDrK374D/5P7LUrth99B79/i3J3ycI2fNMbPiAjj5GZkEzLfMz9U3Qmacb36mm6f0LkaugI0bHcaWx2ga8rjmYL1utLNjGyrCbsH92mKOvxh+mI0WFNj2kb1leXPHv2hElRcffNNyjmC7KyZsQUEkLEDgNKKeazObtd5NatW3zn/Q94/xsfcHTnFsEPGJfm3F93/fyiK1K3M5/tM6v3qbIFuayIXtK3A7loyfJ0Y+U66T67rsOHdPSN1tJ1A32frJNFXrFeb+mHgc0Ik/HOcbh/iPPpGE4M5Llif76gridM6gnWGnZNy9XqktV6jRkGvB3SbFUK8kmVjszO453F+siuaSiqJCsjpOIblcC5wHa7IQpBXmYQA6Y3vHj+illd8Prbr/Py+Uu2mx2Hh/u89sZb7HY7Hn3+OZ9//imff/oJXdPSDR39qNnVKnFsi7xgvtjj3t37/PL33ubtt95lulyw3W04vzzh6fM0s37/mx/y5lvvsGtaXr54zg/++A/ZbjbptWuNcgFUoGk7UsKuRYeY5rs2JfWqPMf6NFIIWcA4jw2CfAQE5bnG2HQki0LQ7bYJmiJV6qpVTwiBPKvohoS+tM4m0ubNJt4zqUsYQDmJcIJKV5AFqjxyNMmZVgVCKfoAl51Dbhu68xXBjYVMjkUlCDSCaZ1xNMmYlYoq+8XH9YivjDlS0MB1EUzz6evpiBCMsTeM2YPXS7NR93BduMd+V0iRbM/xK7Pr652dICWCaEVW7HFw9DaHywfcPnqTw+PXqeoFQkjOm4FvvHfEy7sfoC8v6XeXyN05CxkZInTOsigL9iYTmrbBDB17izlS1Vw2LauupQuafHFI5xz95Rmr3YaeNWG3TfJPAl3XEKxJjOtqwkxXqL5jtWsQZU2IA5cSLoNHGVjUE27fus9J09Gajo3wVLpkqipM9FgB1jlEXhFFilAvwxJ7dQ55CX1HHDpCb7n6//y/0f1AfP8DGgVN4zi5vOSfnj6nEpa3lxPenezz9mSPw+mcKs/J82To6Y2hi4FKapY6RzjP5dkL9m/f4265YHb8Hndyz96sJAZNUVXpZOIjzg5sLy65OD+laxrmswXOWZrNNgUbqByBxNlA2zS0bUtWlGlf4QPRON77xtscHE64OnlB8I7tdkvX9l97z30txLwsK+q8IlcFcoR7WBPYbVrcAEU1JI+9CJTlhLqeMwwdQ5+eSE3X42PK8No1aSPe7LYUOSxmNaFKcy1vPZkUZDpjudijKCcgFOumo+8aLq8uGNotwTmc6RN0RUsUgukYm951CTDTD5ambZFZjpaSXEmSt0ETMGyaBq0VB/sHlFXFp198xnQ+4b333uX4zh2Eznjy6DGrzYbLyyuOj27xm3/5N/hrf/2vYQZz80NYr1eEEKjrCVVZpYj1skZIyW6zot1c8ulP/5Rt1zCfL9k7PKQsSuaLfbbbHdtdw7OnTxjahqFrmEySWQFSHtquaVFCEaLFuQE9UqR0rlNXTQoCNdaQ5zkWweDSzehDTFk6yNEclnKxhNQpXh1JrhOLtShK1pv1iMLUTCYT+lGPrJSkyAosaWvrrKPIJHvVhINFzcHeDJUrtoNFbTqMlJw2DWbbE4W8MQzICLkW7M9r7hzM2J9XzKtffNGNo9Y0jg+IFPNy/cl0v99E/YrrzjU9lsRIEBQ3brP0K2Euryvs9Z9FopnpgnpyxK3b73N8+02ODl5nsbhDpkqQmtb1rHrL1jjOTWDn4QpJUy1w1QK5d0ydC3pnaaPg3AwMlxewsYgoKaTCBQe7lr5p6POcev2IPlNkmcSXOXqzRtmEkowxclTMOGlPaETASGiw6CrNfLUquVNVnJy8xGYTjJJc9pad31DVRyymC67WF+xUhSoVi7xgfXlJmMwIc0GtoNtd0buIOJ4ihhbhLaLd4FZnhO0W81v/iDoa+g+/QZjVeOvwEXoh+X6z4Y8vr3i3mPI37r/NW8sDMiXIshypBBWS6CLOWiKKd+6+xf7eMVIpsr6nLBKVMMiUfI1ID0LvHKZrIUTu3r3H4tdr+m6g2ey4ODlngaacpdNx13UolXZE1hoiUM6mZC5nu2s4OXnB6uqK7WbLdDL92nvu55sjRIrfVmoElQcIFox3OBEYWoPe7ajqGoSiyAsmkzkhgjEOKTOELPA+0I6SMCEEe3s1xjqMFWhSqkMugEwzmUyZLvYZjKFttgzDwDB0mKEjIjA+jMaHBD0/3JszuMjF1SWRpN/dNg3WeYbBILyjWi5QQmBcYgDvmpbUmEjOTk8YhoF/8o9OqaqSW0fHVHXNa2+8xsXZOc+ePOHx48/44vOfkCvN/v4hxWRGXtcs9vcSX8JYttsVl2cv2Ww2rJpdSvxVkmoyZ7ZYoquK6WzGbpMkNM45nr94MbpYqqSqUJrBJH5xN/QQ40g5a9PScNQZhRCJwaOFBCFwMdL0A2Ud0tEJiXMWqTRhhG8LqchVzmANUSmMM+RZOm5lIWEe22aDyhK0qC6rBC7yKfHhen4ZbMANnugdUmmyXJMXJQ5FXniK3FJoPR6p3bUglRgCZaFZzCccHS442p8w/9fY9P6bvvwYBS/Ho7+I119vEoFxLfeSiSyW7rFUbceGGCG/VECIcakob5reCCqn3rvNg/sf8ebr3+HW0WvMJse4cUk3OE/vBf3g2A7pPdJaQ9saNtaz3ayQpsHuNmxPX/DZ2QvWzhCmewRRsz9YqizDFAX2YkMsC8gyZosZEz9wP5/wcttwcnmBO31B322xk5oQI6VULG7f58GsZtc0CC+5WF+xjRJTVASleWUD2cExVabJvefl6oJOTRjMjmmRM5OSre9wOmfXe8q6xgFWCZzKKBeJ/yGEJus32HY1ckkgliVxt8X84PvIvSnh3Tchg7gz0EaikylAM8KL7YZlUXE4maBEZJJppBBkkymz6SH1ZEmuc6LURO+p51My2dLtdhTTPYQUeGvwvSEEkEVF5jx7RcFivmB1vuLZw+cEdYEsJ8QswxPYrDe4LuUtOm84ffac09NTQvRcvHrBYDu22w2vv/Y6Rf7fcJF2LfROTiggpq4p+kCMjuiSnMs7k+RLmaKqymTVFZ4QNTmBIAza9dSFoChKBjdwcjIQXMR0O0QIuOAoyxI7CpJ9SECbbhgwpocIxhhi8EzqijLTHOwtCFGw2lyRZRnbzZamaVO0TIy0fY8oMvq+I8/z1A3aFDUTYsT0ifOwWl1SlhOePX3GW2++yXqzIS8Ljm/dAiG4urrk2aPHXDZn/PSHP2DTtvTWMS1ymj6Ry3KtybUmCMlyb4/b91+jnk7JiwopBdv1hiIrmM5mmL5jvbpivV7xySc/QQhPNiaymsHQdQ06y8ZNuSeEMDrQxnmhSmm+vTHMypQW0fU926ZlOqkg+MRpwFJVNb11aF0glAcvEutC6sQmHjfrZVFhrGfoO7wP5FmOkgLvHUonhmxwCbjjjMcOFtMb2t6CUlhr8M6lXz4xHGQUBDEGlUePUhmTumA2mzCbzqjKX3wEu/d+VBmkdFiiIIb0dd9IySJf0eamWW5yYcnx9DfGst80xKnLjUKwPHqbX/vlv83br3+H/eUhSiiGGGmtozUOGTy7rkskNh/oz0+4vLyk323Ynjzj8ZPPePToc1btBlFUTJdHZIt9JuUMc3bOUNZcTvYp9/bYC56DIbJa79hJhYk9u+6KEwnGgSJyOJ9ApjmPoFSG3V3w6atHN1Sz/fmC1/ffZLXaYnzg/PKMba4RWYGMOcc6514948q2dKFlG2p0PWWe5azPz7HzGb4oWGYZl+enuGpCttijaEnMa31AlBkBi5rOcF2LXG/wuyviH/0xs7szmNXs3IAwkcIFbhclby0PKXWGIyKkpDOWuiiZlhP29x8QZGJv/+iTR+wGKEvFhweSXPYMmxUH1QwzDHRNi2kaQkx42KuTM4qiYLKYIbOcSOIiZ1mWCII20Qb7zZoyz0AKzs5OePjF55i+pcgV1ht2uxW/87tfsNlt+F/9b/7jn3vPfc14IYUp+piQi4Nx6YYcRfzODeRCIFSCI1dFTa4VWaYZfKCuKqbTOd4Hzs9fslld0vU9XSfY7Sxd2+DMQK7SWKGua6p6junaceHU0LQNzqbAxUwrFtMJi1n6te0G2r7He8d6kxZyg00ZZChJdJaoBUM/pI5dp4RiJQTOB3qbWAyZylhdXtDsNrx6dYIQgtXqkqurS+qqYjCGejahmFR4Af3pKafPn7HbJiBNJEW2v/Hmmyz2D+nbln7osSFQVo7FfEHwISkIrKOqJZdXK7z3zGYzLk5fouoKH2AwiQTmY8D5lE0Xgr+h/cvrznHsqLquQ2cFZZaCK6USTIoKR0r6oG9xSJYHh/RDGlG46BnMkOzbWifympDkRY4xBmc6jOko6xo7GLq+o6onlHlBbwayPKPpLKdXKxyO6TBjCJ5N17PeNbRdR/Qe9xWuAUSi92OEShqLDF8PZPo3fiX1giAwZqL5cfQqIkGOdt5ActONc5IEwUljsRDT/FaI+BVVgiBEwd7iPn/91/999pav4XvDq5entMZw5Qx972gur3h0+ghnPGHoyAI8evQ5u92a7WrNbn2B810K8cxzosrooscOBil7jqopZ0OPN+fYl0+4LDWnkyka0M5xMJ0jZ/s8W18Q+57Q7Xg5qfBCIZVmOSk4yo+5bDeIakHQmot2w8l2TUQzm0y57SZcra5oCkfwPS+rpHI5OrxF3K3ZIvB5iSnnVHdKOr8jaEVb1MzefJNd12Kip5gtME2LVzVqOie2V2jTIGQO+Qyf5bA9Z/KnX6B/6UN2rUX2PaW3HE7m9MNAVk4odI7Kcsoioy5m2GzGT086ZsuMddOzigV6mnPR7Hjy6pxhOKNUBXt304K7uVyxubokqwrOXrzi0Y8/5o1332VxfIvlkWa3atH1hOl8SjWbEBsoyxrft2RFjlQalGS2mLILPe998A6PH3/B97//Odu2TXFVX3N9PWUMQfCRoTM0sqPPDd539P2OUmfILGPoNJBALlrlKK2ZzjJmywMm1Qw7WKwZaLYt7eBYXTXstmk+i3d4qZnWU6p6SoTkYtltcMbinMM4i4iBxWzO0cE+eZax2ja03YBWkq4faLse55J1OCISLESAVqlYKZ3Rdcn2WuQZw2BxxjKta4SITCY1fd/z6NFDqrJkf7lkKKs0v93tEELQdx2DtRwc3wIpx9e0TfzdELAhEkNgs93gfWQ2n2ONx7nA/sEh0QcWyyX9YNlst1xdXDCtay5ivIlCsi7ZfrXQbAfD0I9Q5Bs9Uxod5FlBDOnYb4aBoqrxpIejG2eUdoz7dt5zfn5BWZT0QBYtJljariPLC4KPzOqKxSTSSsGu65KmN8vRumDXXtEPHVU9RUuFs56dc6x3G04uzpnOZ6g8p3WBi23L0CepXgzX0O/EKhh6w3bdsKo1dmjT7PMXfF0rEoQIXxbTEWROCITxIXeT/EsEef3GSrFTSaWQfk8aqyiKouKt13+dH3/2Ci9PGIwlBMluN/DctOwGw3FVUnQDJ+2GoduhI2TGsjk9YxAeigxsRGpFVU+QecVcl7S7K9p+zZOsSsUzK8m7LZc7i9vN8FmBJ/DUNSgliVqzt9zD7Da8Oj3HVTOoZ3jRsZrN0UXOcjLjuCjpPz5hm2koNNtuS1dIytcfcEcWvHzxGV4pXJFzsVtzdHyE7Fs2QrCbVEynh+QvPqPLc/rplOkkp+m2hHqOqRdUk4behNRVmy025AQnodAInZGVJd3zNWH2CLUouFdUZDJjL6+5N99HjypnIRQqnyLmd3ly3rC+OEe/PGcnYbe9wp5fMdvbZ6qukP0Z9++/TVZWZFkxjjUku8tLXjx9wmp1hVCKejZFzRXOOFar3Uj4M3S7HdYOlJMa13dcvTjn9OQlzS6ZTBaLKe+8+w6nZxs+++ILXrx88bX33NfMdNMsd+gGNpsd+IjOBC4MaWQgPSGCcwOxC4ToqSdLtJJMZzPmiyWZKjDKwAVsW8dqtWU1pjJUZUHbevKsZFpPGQZH27WYIXWvxhmscxACk7pCKM2uGzCbdhxuy4QTZJRrIXDeo7VGjRZfP+owzWCSPReB0hlFofEhgUWmoxxts15z8vIVt27fYvqd73Ln3j2ePXnCYrFgvVrRdElPvNtdjcGQOWVdo6Tgar1mGDratkmzZLFjubdM+Ehj0oxpOmG73fInf/on6KJkfXWFFFCUJQQw1mD6PsXGC5PSdmPaoaeOSo52VIkLiYplXQTpEH2PEJFNP6D2kpNPKU0/WJSSDH2L1FNQGpVJVIAQHM4atJR01jLJEww+D9B1DXlZU1cT8rykaxvapkFJxTA4hFRcXG7pux1OPSefTggqZxcjJqQZ7jVtLEbwMdK2Ay9fnhJMS11qnP+zADFnxFGm/y9FRITEphWjRhSuiy6JxQDjOCE99JJK4Uu6goie/cUbCLFkvdviFZyd7hi0Q/YuHVmj4/H6jNemUzIfWZ+fsR06nGnJM4nsOrJdSyglg4+Y7Yos27JXv85yMiE2O/phjTeWs6ogn83JsxkHRcndyR7nlxe8OD/F5zlxUnORSeZ37jEXgtIrFgIeX75gM+zQ5YzOW0JZcHDrDhPjaNeXrHSgzwoGr2Bac+uN92l9y8q22GnFK+e5d+9NXG9oMuh15P73fpn1qxesVca2mnDw3oecb3b4LKeWiuHFKaGaofcPsGZAtD1xcIiiIitz9G6DuOoopzl36pKj2QQZJME5dJ6n1OysoguC0+dPeBkEVgT81Zb7944pbI+ZOIS3aNdSZCUHt29TzqYolbIclYKXj5/w8PPPmdUT5vtLqsmErEj7p6YbaNuWAc92dYmIgXJWs2rXPH3yiEdPHtJvV9w63GPoG5bLBb/6q3+ef/uv/S1+/JMff+0t93OLrhIgfMT2lq7pE4kHSRSWqB1eStCCKBOYxfqUGVXVU4oiJxPAaGY4PV+x2bR0bU+MDiVikikFz35VkWlB03aYoWcYOnzwDM5hrCVTEpXltIPnan2JUoIYA5lKb9x8jFQ31iQNpBAoKZJm1TryqsJZS/AuLeF0MjpYn8YWZVVh+4Gf/vgnHBwe0H7RsL93wPHd22gluXX7mPV6RZGXBOs4PT3FeYuSiuACZVUghMRZx9V6jXWWSk7YtTuE0OwfHFJVJWfn50ghef70CcYMdM0OrRR1UbBtGoxxWDsgIihZpmKQ3vLjrClZmqXMUYI05nEGpRQmGpSWWOvZNluKPB837xlNt6MqCvqmISsmxGgpigIfND4Eorc4A71INkdJIMsL+m5HNsbyGK1w1qRstrSKT/PJ3tGbgdWmQ1Q1VAUh0wkBcx3ACMTg6TvP6dCzWa3QIuKN+69TJv//fsUo8OFLMKMaORGpURdfdrmCkSEhv5Lvlu75EBLIPHEVJLma8tmnH1MUGSfrFdNqzsXTZ7RKsCwn3PKel5srPj1/iSYyF45dt8Y7S2cDwhlUJsgRFAklBxFO12cJQKNyDmcL4mbDi90W7wN9VdE2O7qmY3F8m3vLJVpo5GD5PHZcekcuMxolkEPHfm/R65YwHdhNSh7HGlRNVhcczx7gT8/YaUvQkRWRTV1xtLzF/OQpOyUJWcHLznL3eIF5+hC32GfrJff356y2W4aspFxOqWJPW9bEoqT0jn6wlPMD3Ok5MiuJ6x2Z6ZlnBdPFEmMb9kLNUhfsZQWVLiE4Jrlif3aArQ758VVPiDlFofA2MJgGd3WW3jPescgVh9WE470lyzu3yasCb1NWXbdrePzoMS9Pz7j97TuUdZnUUFlGPZtST0pOT0+ZHu5RVAWZT2ahZug4257R5YaT7Ql7ywkXl1ecnl/yj//RP+H99z/iN//G3/za++1r2QshBKxxmN7gakuGBumAIXVeOlkeg4t4O+B3ES8EVT+gZU/TWh49fc75xRUEmxCM/YAKHoKlyFLS7On5Obu2p84V1lo2TYsZBqpcMZ3MEULhTIeSKRE4IkdfesIXRgKe61mno8hLRBRkmUqAlpiKgNQR4mgVjol5mhbNnrbfEc89WmWcnb7ik88+5p233+b2vTvs7+8lC2VZsphOOb+8IMsr3BitXZYFVVnRDxbv0gx2Nl2y2N8bk4Ydbdvw6IuH9H3PZnNJ8BElBbPZHLdZMww9g0nb69ToekS8ttgm+ZXUCinTa1FSEwWYsfDmKkeISNfsEGKKwJKXE0RM4BysIUaV/OTRUxQlfd8jlEQqhYiRTOnRoBIJHjbbFUVRIiLorMRbm5gEMbK/f4vzywt8dEhZJNRkjDfsAmIaLYzmLvwYdNmanmgN0f3iO92v2jNiTCxdMZoWECSt7nXRvU6HGIE2cnQBfsnkDcQoUEpzdbnj6vKcWVVwdbHiAkVUirbrGOQJB8s9Kq2wbY/1hsvgyWYTdNej6Fg7x5ApVJaRKcVBVtH1HbtmBzpDlDWXMZBNZ0z2D9GDpXCONnqulhWtGVBas8AzbdbU56/os4J8NiEMAy9mU4oHd8jzmjvFjKePfsKmaZH7NU4rThQUb7zFLSInj36YwD77klWjWD54l8nqBc9Xz0FF4pBxd3/Bc7PlqvO89uBNjuSas8qyqyPfO3zAH+5abFZxkN3i1ekKW1eIqyuCKsEarA5shsT8mLqMfGs5vF2jIizyjMPJHnVRYPUeL1xF5yz9y+fcmmi0ccxCw+UXT1npgvZsxa/+pT9HNSnYu32LvK6IIeKNwQ+GoevZbJK8VGqNsT3tbktUkigCMhPsri4opgWTvUXaney2XK2vOOtOsRPHu9/7kOZ5h7UFb37wHos//BGf/vQn/Mov//mvvee+Ng3YWUfX9fR2IIQKIQQ+DigZUsHVPkF/Qw4u2VKNsfRth7eBi8sdp6cnyODouoam6xiGARESrOZoUbMbBs6uVlRFwdV2x2a3pW9bcqUpixlByLRVd+bmOFhVNbu2JRNg7UAIAR+SiwrhgZgSU8scP9LGtJYoKRDBkxUZsRjhOc0uYR+lTGkMdcnv/8HvMwwDMQZ2uy13795hUpfkmeJSK27fvo3ziRsxmUw4Pz9nvlgQtztu3TomuJQMIfOC05evsIOjaRvOT19xtbri+NYtXr56RVWWibcQA9b0iakgzU3hY3RzCXzS2JJoZhAIzibB9qjg9yGMDitDs9uO4PIWoVJKRogS47bU2TIVdGvJs5zgzeioCxhr0VITgsUFz9C79Dnn8QLyrEh/jhkQUnDn7gNenT4jKIVTEhsTB1ZeL89ubF7XR/hACOlH9GfBkJYouOKmI0+aXW7mt4mlkBZjkpT8G4Mg6jTbjTFZiNM8N/2z0hmTySEidrxcXzJf7HE2WHxVcG82Z9vuODOGTCgeLA9ZrS85a1bEABmeaAd0CATjCd5hfeR04sknEypVM8kq5uWEz/stjUrg/7KumTmHOTshe/4Mt7ePIHLW9VzO58j795mLjOl2w0nXYPZmWB9AOJwYKN/5gImU+NUF53QYUeOUJpuV3P/oe1gfObt4ijWwqZa8vXebplvThR3Pd44/9+Zd+qefM4QJd4YFLzY/4UDe5pu+xA1rqtIzm5R86/YD/qkUmCjRB0ushzLTdNuGXmmutOQwrzB9xzQWvH3rAC0EtdZYkfHT5xectytWmw0TBRsTeHNvynpl2M33+Lxx3Dm+D1lOoTNUWQIC23WYdocfOrJM8O1vv8+9u/tMJwXb1RUIxaTvEVrS9zukBDv0dK2iqEpEJhhiRzmZ8Je/99d57eg+/7f/63/CJ59/ylvffI+/9Xf+Lv/g//z3uTjffu0d97XqBecS39Z7T8ATYhpEqywBbcKIsFM6omSOtQEp0py363pOTs8I1hC8Y7PdYQZD3zXIGNF1RW8DJ5dXiBjpvaPpWjRQZCVVXROkYuh7mu0GKVMm2HQ6JcRAdBanJX3XEaUCpYnXEHIh8COEXdxYMAVt26O0RmUJaO6co2laFssli+mcU3PB+mo9RqtLXr08YX9vn08//ZT7D+5h+oHL1RWTesJ6k3CTOsuZzxdkWUGRDeR5zvQgLZeev3zBF4++oNA5WZlztbrCe8vQtRR5jhCSpuswY4csYsRYB0SU1KM0SSCQ5LoYX1MkjgQqZw15NUGr7Ca5ViuFc56maShKR1mmmbUbi4NpW/JqkngVhUwPMwSOa93quGAaIS+CpGIxzjDYgbKoKIua3vYUeUVWTejdl1KEGBLcJhljxwFJiCMQPC12BBH8zxK4fhGXiNeWXUbhfHpUfBWCE+HGGHHzscAY2S5uut1r5YIURRqjKYkLgU00GDMw2I5MTZgv9hguXmFsx7OuZbqYs4gWhp61s5jpFGKk8I7KGFo70JcFA6BjQOcanZfs9wO7wYB12E3D86IgP9qnzGt01xNPTrg6PqQXDmEDRlmYlZRxSXQ5oVAEY1hrmRya+YS7y31Mv6GrCyIDq6bnsq44Xk6oThw70VBGz0vT8s1f/ogffPq7FFnFYT9l2KwxjWezesiDtufxT77PJ/oL3nhwxJvLBXffPOZ+tqIqPYF9xHRHaFukzhFSkJcKNXiUUvi85rL3fFsGcpVSjn98avjJ5ZYqm3J7b0ahJKevHvMvLs44P3uJiYILPedbh0s2r54T7+2R5xkiePpuS99sMF2HUoHDW3OKyiOCTtmGbYMdegKe1fk5UTiCG1hdJBVPFJ55PeN//Hf/J6gQ+e3f+i1enZyjQs9v/6N/yC//hb/I3/3v/V1i//XL4a/V6Ubr8dEhSE9UKSNSS3KdSEc+WkyATEQyrShkigv31tHsekzXUWWKRyeX2GGg7zusGRLMIltyud7gjCEbb9BMapTW6CId6Zw1dG2LtZayyAgiEbT6MYTSDQM+gNQZduggejJdJjtrBOfdKHwPRB/Y9QNFFihLR1HkkCm6vsUOFc5bpBRMqpKISEaHq0t+95/9LnuLBfVkQlFV3Lpzh+gDg7HoXLE+v2QyqanqGT4GtFbE6Bis4dnTp6yvrjB2wMfkiVdC0TQNWZGhtGboWoahI3g/dlapU/celFAjOSXFnouY5roEB1FiXI+UGlmmEQFCJjxm8HgRsVZQ5nXC6w2GPC8Y+pYgxgIbAaFSQR712ClFLc28CZFBqhHeEum2W4J1zKYLiqygZ0DmBT4YfIzpaP4V66uIY5JEHLWsSqUqp9SfCZ5u4iuMRocIEIlS4GNEfqWQwjV+/FqlIFI2nIjEkBBiUURCEAhyAoKL1YqjxTGf7S5YzOdMm46XVUZjBm5XU1ah4VxD07XMlWZGxjoOhKwgyoAfwA6OfFLBYImVRCHZ7la048+9znJmjeVJqfGTij54BjcwzWF2sCB6T29yQmgwec7ZZI64/xrVbMHrPvDF4x8R7QR9KBHG89IX5PuHvF6XPHz1Cb6YISkQfcvtd76B7VuenH2CfO0IXMFSZExNyaOfvmLaT/j40x/zzEHWbsmKnNuHsDs55SQMtG+94M7f/Mv88u0HnMYpJrvDD0+vcDJD9C8QUkHcEUXStp+3licnjzG756y4w8Xk21DNqYOhX13w4xevWDcXXIUkB93LZry7KMi3z9H5gr3lfYoqJ3qD7df0zZrdasvq8gprevp+R55NKKf7qEzjzMB2s2K3WScnqDMIIeg2GyZ1xWvHr3H14ozpYsbTp8+Z1ROMkfzw408Z2pY//xd/nftvvve199zXUMbAxxSKJzXoLKKziMoVOtPJ4REcPkQ8gagEhZqgZJq37rZbtJZcrdYpDz447DCk5VM9oRkMbT+Mce2AkukoHANKKEzXIISk7TtUjHgkWiiapsO6nhBTmCUqw4WA7TqKXJNneQJjeMdgIiqOlveR5JU6m0CuFQFN23ZkKrETppVDacXVZUqBuDw/xTpP9IFXL15yeHTMxeqSTKUZnzGG2WJJUZQ0bUtdlrR9R28tednRDwkn6dxooc2rUYQdGQZHJTKMSQT+4B1CJTfNDY9WJphJGOPpE0JS4d2AC27kAhtCD5nWKJ0hIGEZnRuVDx5vHHboRklTwO02KJ2zsZZJPcHZBJ6OBCZVhR8Lp3eGijIZMnxPiBE3GBq25EVOUVbkOktAoZE1IEZ6w01Ru3n4S8gEAjV+5hcvGYvXfxECruli/st5Ltdwm/G6kYVxbQkey/HNxyPTag8RSvrecHRUIs8t62HNfr5Abnc4bzjrd1TlhKx3ED3b7Q4zLSnnM/RqRWjW9ErSLOYorcmlYJ+M1hkutSO6DjHSxA73l9ztDReuw2uFG1o2GexmBWU15Y3ZAf1nj3moe5wFMShEFrhQitvvfhcZLKcvH9LOK0K+wPWKLkYO9JzdtELZFadnHed7c37puOb51XPeCoqjYZ9g5/zoB39Ee3HJ9GrLng+8f+917r79Ot/59jd5571vUmYZT18+43K9ZvfDT7lz3DN7/U3+SGmelaB8YKsLvpXD+8PAfLlEIyh9RPgVm82Ol6GnLC2LqzNif87vvrwgZjM2aoaY1uTCsF8vOAaWSnG0mDGbzxJd0LSYvqXZrnn5/CWXF5dstit224ZvvPdt7i72KCcTmvUq7YtGaaaPpIDbGJjPlpy/eokJlsdPHvL0xSmPnzxhNp/w6uSKZtdyenbOO+++z1/5H/y9n3vP/dyim9JeI3kORSHRWTJC6Eynbo4wJu5aovBkKgcp0AqccyOUPGDdQJlLzoaefhgSujDLUiaZGdLGXIAMaR7mAdsmlqi3FiUEUSYXWW8tQ9ehFSA1Pkq0lAxDCo8TKvEDhHCEIIhapMJ8fSwnJoLU2LVnOkuMWiWZ1DXDMJAXGVmuUUrgjMF6R7NZs2saNu0jnjx8xNtvv4m1juXeAlEofAycn59STSc8ff6SoWnYbpsxyTiiZPK+S+9RWVqIDd2AMQNt02CdIYxpEQmkHRDCI1U+btFT6GeWZ4BA64JoByIRBTg7JGtucJTVJC0IhUAIOdLefEqxdcl8ap2hLMdg8JAWQ845one4LCVS6CxPD0RrCSFwHahonGHMzUXnBXVZIU2D8+6GCREZO16ZuAsgk6NLytTtjrbaX/w1fr03oLFrvYi4+ayAf/VLHUdXxGuM5ZefssMAbsdivuTp9opDoXlelGzrguPLlvXmgs3BEqcURyZiL9eczwusUoRgub1/iLM9LQ4f0sJR6AxfV8yzKaFb40NgiJGt6flUphPUvekt4tWWx3GLETIZZGzHRb/i+I3XObh8SS88UVva1StO8zmiG3h9b48Fml2zJS+gtJ711pDPCj7Y3+NHT35APJyyPy+Zdpa7jcI8OuO3fvCY33jjPX765Dk8fsF0VvOdD9/nOx99yL0H93jw+pu4MGAHw/F0ylxpptOa3W5H/vIRR0Hwb0+mFHdvod64y6HO2HffZTopURJmy32sMfzgsyf8R//4n/Lq+7/Ld+/t8UQIWuvRdgvBUnUbDu7e5WBYsYyO+7f2eO2tu5R1jjcddremb7Zsr654+vgxF6stzjvms31u3XvA/q3b5HlOJlUKKsjy9DCVEhcsZVGyd3SEyhXPHz3F+siDO3eZVjVd33C4t+Dh40f8yacP+dHDR/xvv+aO+/mdrhIUlaaqNXklyUpBlivyXCPl9cxUEEVAKUlZTiizGmKG7Q1CBLq+py4L3OCwzhNIYBznPV3fJkuqENhxDqijoGt2aWMfE/4vQVoUfd/hXSqgQpZpRigl1ru0vNEqSc2swQY/ArlTVE7wAT9u1F0I9H2KXK4qyaQq8T75sZVM8eYuRnKlcIXAtA6Vac5PX/Hi1QnWOiaTREzbP9hnOpviTE/bDbw8PWW9WrPdbFit1ym6PTiyPGdS1bgY0DHgbZqbdl2T1A1DMkUkPqsYodnXc3N/o1SIowPqmpHgggMlCXZUTQSPEZJyOk/JBiSRv9KjLtnZNPsOybAQvGcwKS4+jp21cx4pQ1pOEhnMQF7XxOBQUiUNtR1SF9jsmE2mFHrLMOIyo0hdbJRjlxuul1ICVPo4SoL82pXCfztXhBuhLj9TP29K8M1vvRk5XH9cjB/78iNRQmt2ZOWM5uqMeTEhW61po2FWaHIhaNZX9HWFDILFpKZYbxgmGik0m8Ew2T/gwFr6Zo2aZGxE5JU3qOg5nu8x2w08dw1dDg6P9YYT1/L60SHLK7jwDVJGou053XlOxZb5rOSOVTx6+Bg/q6B2ZHmFXMPe0YKjgwesTs55ata4+YIsC3S7c/L+iloPLN2MRyZwZ/kaP/jj30LHwPe/eEj/+IT9THLnYIYZGv74T/+Izz//mIPDA8p6zofvf8jQdITgePUy6Z+LIk8nWJ1TrS9ZrTcMVclqMuHb3/4Ow2A5/elzurbjP/vsjLUoMYfHPBo8f3yxI+YTOmcIe3coVM03j464011wTw1859vvc3zvDlkmaNcXDJs1Xbtjt9lyeXXJydkanZfcv3/A3vEtqukMIQT1YsGxziiuruiGhEDN8Ji+5+T5U7Is5/zsjK7dobXg8HBBFEtevnrBbDZl1+y4vFp/7e3283W6uaSa5tTTkrLOycuMolDoLHWKMYVhoaIiL0pmswV1vsA7Rd9dobOUEFzkY1R42yOQlHlO07bEEFBKj359h5Kark+awxBjGgfkWYqUcRbvA9EZsnJKFDp1XyKkWS6ghST6iJWpSJGluaULMXFEgx9tv5pMZwy9QWtFVVXYvgelybTEOEupNbEsGZqWiKRtW7RK6RFD1/P5p59ycHjIF59+wmK5wHrHrmnBJ08+MdLstig1cgtCxIWEVuyNJTqHdx5je5z1o2tOj46oJEeKyJENIG6OsMEHqqJKSzMfEMGjYureg3eE6Mbu3CKigujx3iG0Tn+OTrHeXgmcGZCkJGEhUhpF9AElJZmSoyQqGUycSSYKHx1KqcQsDj7lyVnL8WyPPlh6M9wUoXR3pHnntQQLKRNn4M/EcCEV0f96xrjkjrrGPn71413bQTYh5IqFzliVkgfHt7ncrjlTksM332J6ekGbwaUUeAUH8wpxseZSBNalopGRsqy4O6nomh0rPxBRRJnSbxfTCbNW0PVbqEEFwXZ7xceuo95f8vpQsXr+jMuZwlUVspgwFAq1nPNgMmNje1zoaLorvhiuCGbOkRp4YzHjydnH1H7CsbjDrfw+h7c+4uL8jI8//xPcYAg6Z7Zu2Tw/4aIxzKuM470ZgogxhskkhcP2fc/J2Tkff/xTPvrwI8pM0bUdD157i9V6Tdf1vDw7gx/+kNfuPWAtJfsHC/5f/+AzPvn4c2LwqPkejyYPyPbuU80qTi9PmR8fUlc1u+0VbxQVD7LAW6JhllvePN7n8NY+1aRiaFYM2w3eWQSKoqzZ29vj5GzNbDLnwetvMF0uCSGMXBHBZDpLevkQUUXBanXJbr3iix/9kNlyj6PjQ/7hP/6HnLx8SV1PkJmmNx0XFxf03YCUX88T+blF9xpgU9UlZZnfQGNkluAsGWkMRoQyr6mrGXUxg6Botj1KK7QKQJKdtW1HnmWImIpq8AEfHdbYNA9TiWXQdSl8Ms/zlGTrUieriGRlSZZnaQkkJabrsTbFgruYjrelSiYBQfJbp6DH6+NgyltTqkQpnULn6ioFO9pURLSUZEqNx73UcTrnaLs+JVE4C8B6tcI7S7Pd0TQ72qFHqZy8yGA8jmd5ftOJD2YgV2lR6LwnOIMzJn0vIuRKIqXGhcRNEKMZIsuu8+bSjNFYk/zgJMWAkiB9BC1HGFGgbzuqqiIiGIyhEKmIOpv+PKUcwXlEpmnbljiqHrz3adFZliigGHkM3trEhPB2nNimzb33HjsMiEwyr6YQAyak0Q7XOyggyOv5qCReM2t/pqf8xVwKlb5MkQA1/1VfkfhKcb3+OQgRU0y7uI5nT3N+mQm6Ls3cg/OcNismekIhoPEtISjeXs4QQ8+J244RTzn3jw9p1iukbYlFTOD5yZTJZMre1WU65RFou5bPy0ixP+c1V9O9OuGkMrhJjcMzDGtme8cs3W06t8bEnkIU2O2GT9odoiz54N5rxBfn/HiyJuqBKlN8++ADKgtHnaUWlt3lFf/k0/+cW7Mpb8+WZNsVt7ICffqK9SfPIHrqScmkzFJ6NdC2HW3XobSirEqqoqRrO370oz9hUs94cPcuXd/x8SefgpCsNlvqqmS9W1MWBT/4wWMe3H+NO3cecOvOAz5e7zh7vkL6K4IeCJM5ORmHwvPuvTu8VTgWOuCuXpLRU9c5Plr6dsPu9JRms0kPfaURQjOpJ+RZxmw6Y3l8mDTnzhG9R2cFkIBG/dAitKCa1PzB736MjJ6PHz7k3v17TGczyqymLAru3r3N4Ht+5/d/j832RYq2+prra9QLEqUUZVFQV9eFNx1DgxAgFTKmrKcir1EyI88LrIG8KMgzjVYCa1xK6JVpA9/bAe8NzdDgnSfTGd5ZvIfdLsWqq3HL3Q3dlzE2ErRKMTRCKKKxWNOPQ+9EzE/LjWQHljHNQVWW4UIk0xo/bqGVVGnGKQV937NtGkKArMggCswwoIRAS4HIFMYatmaVknNVkmsZ0yEidLtNStlVKgHcTXoPKqWxzuDsACFHCpnCIkPA+5CkRiGMUd9JbSFEcp5JCUorrE1LtbzIUEqPOtwUDy60TjHYIQA+2YdDShNOkUGaLMuw1iPFQFaWaF0gYzoVWCw+eISMtF1DmRcIJNYahIjUeY6UKi0cZZoNK6cwQw9SYZ0lk0n+NfQdLhoQEq2zZP31Pn2/BcgYR7dgHJMX+JdbxF/o9eUY4WdVCv/qb0zjBPmVJ8qX04bxoegHisUdtust9a3bLM9OeN43HO0dMTt9jpAdL4Li4NYxy+c9QTq6oef5vGB+5zY8fUowgbXreGIMqiw5ONpn9uKEqzBgKo2NnsG11IsFM/0am+aC1u7ICkUmBGfbNXGqOJjdZX818LK95ESuEVlNpmFW5sRpzfKyJQuGqTrk4bMn1Is9fundv8If/sk/52z3GKkjKvPsL6Z8WJWYiytC0/Ar33wzPcSDo+sGwphg4nygKAqkD6w3W+bTKVVZ0jQte8t9Xp1f8tnD52S5SjySuOHu3XvMJkmx8Cu/+itMJ3MuL9KyemUkQde4sqSe76M6i85L3qjh28cTjmiwqxOenL9EesPBwZJmtWEXPK8ePeLy/Bwpkz5/6DvOLi9omwZBGKljHbFIzZHOMoa+pes72maD95aoJMe3jvmdf/rb2GD54slDptM9/uKv/RrzacFqfcGf/ugxzjnmiwVl/vWM6J9bdL0N9K0nOMhUSZ6VZDppcxFqPNJGBBotylFgnyDOVVWg8xylM5q2o217vA/oInUW3nliACEUXdelhU2MZEqlo6eUWGOQSqXZY/BoVeBC+lwIITEfYkRLnR5QgNI53lkkqaONkQRrDindNelYU8R2P/ysttSO+liExBmDGC2wQgT6viPEiJaOTGXI6EfzQtKeJnOoSPrh4IlSJeWAc8m+S0jpxVKixw7Rx5BO32OhFaQFmlIZXhqkFBRFgfMpwFNpddMcxgBZVmCHDhciIUpKlRF1ivFJsq2QXIPj6w12oKySrI2gUDrHOUemFIPpcTJpsHOVXn/QmlznoDKkiHTGoGQyT8jx5+SsRSLItUa7hN6MjA/8UV+cjhiBXEnKKqOoClSm+NIk/Iu7bjCMQMqF/69WVYy4m690uuJnul8xvidk9DjbE7xh5bZMreEqF7zAciQUbddwlklWXcatusadvuRsotl2ay5cw2uHE4ZXV/gqI2YSryN9FiiWc0rjGOjQGSgluNp1XJQTpgdv8qDZ0O7WPGte4uoCoUr2RM69W29x/mxNHRrmecE0q7hoe6rbh7xXfY+TJw952Vt29hHy9CFueZc7s0NU7Hnz1hF36gVit+FQwvPtmmbdEO2AGSzTec1sOsUMA+thQOuMxXKOtZbe9Bhj0z4iBC4vz3n4+ITlYkkIjsurFQcHhzTNjjffuM/Zqxc8f/KUzabDWcPt+3fYXVnKyS2y/TsEoSmNY1rlvHNnSdm9wnVnPP/sJzx9+pKjg32CcTTrHW3b8uTpCx4+fMjQdcynE+o6Y71N1vxr+uH6as1QWg6O9vDe0rYJtiWFZLl/wKeffczl2QnLxYyirviTH/6QVjS8ePWE33v8GSenZ1jjkVGyN5//a/URP7foOhdZrRpOz3bcOhpYLCeQj0F7UiBlkqyImIqGsZZM9kiVJFF5XiC1HnODkrwmRI8SGmccMQSsSWm3IXjyPEerNKt11qKz7Ib2k+cFQiRylkbhnMG6lPQrhUiLoRDReeIJOBcI3qelTgg3eMpMpaJvnU+x1kIyDI6IINPpc8OIm+yto9Aa53oiqTtUIiJVytDyI1/2WkGQSYhR4YkoAdFbuJYd4RE66YW9S6/dOQ/BJ6K986lDlGluK2KK3cmKAukdUiqUypLkTSli9GilyPMKZ3u0TrwJISWaSJFpcp2lr6uoiMEmKZlW6KgxIRAIOAeZ0oQ8/cySdu/Lh4BUafQhlaaskuogAsZZirLGWUc/DFR6yqyo2IYB50eDRgxjGINAasWsKthb1BwfzJhP6j8TRTdd4ksBwlcTJL/6O/4l2pgUEilFen2SGziUEJKIodA9dV2glaQvFLUIRNfR54IsFmhhkTga4akP9pBDS64k06pi1bXUD26zNA6l07hitVvT5pp6UfKmzSkM/LR5RZ9noPdQpuH4+C2uguClfcRUtuyXr7Es99nulXxj/8/x4icGHWoeX/Rsz/4EROCdvdtMqz368yeUynBU7fPmcp/bkwnnK8glLJwlBkuZZ4SDfZ50Qyq0umdSlUgiRZ5TVQVFVbJardk1LXLkNA9D+v1nFxt2bUuIDi01i8Uei/mCe3fuoqRmuTxk6AcmkzlCeH766BGvzIxtHzg+uA9ljpotKNxA2F7hbcPJky/4/ItHHN26w/sffZOj+/cILpH9msFgQiRqjdeK1gaMgzt3HnD3jbfRRUXbDCiVpV3N0LHdrGm2O8qyTJlzJqmynDOcPX6FIrBanfHbv/0CKQWr1RprHXt7+6mxU/9NzREx4K3jcr3m/GLNclGRF9MkfZJyjCXRN7pL7zxD6BAyof2KomA6nWPdKyKRpm0g0xAMzps0UghpTqWVujEzRACV4sevwd06y0AkqLZWAtN3KKkgpNBGFx0IhUDivcWG5IK7JnMlD70iBo91gXYw5EokRi1gvRuXP2J806XZqL2OMr9WDhAptCZEATo5ta6zsGSIKElKH7hmrYqkWSZGvItkWY4dHWDee0Jw6BE3p1VCRCKTPjptyyp0PnI8hUArTV7UGJcwlXlVo0RKXFbCE6NBq4yyKMjKEkEkz3PwqYNTMsFdsqzAxICQFucdWmucD2RZypRDJYWBUjrlm+U5OUkKFmJEdB0iBvIiR2pN06ypqoqpzrl0Lc4nxnKIAYGgzDJm05J7+1Nev3PIrK7+jKzSrrvdL5XFN1/WzRRE/Ex3e910CJHYC1Kkh50cfwnAmitCnJDbwBPZc5ynwMbP6JlPat7yOb3zPOlWSUpWTzmWFV80J2xMxxWeB/M97raCZ8MJ3l8Q9JRezJjfeZN846ivzkCeUwnJ7eotvrAN5et3eevCITaXdMOMP33yCPv4Y8pC8cuLN3n16gxLC26DyjVdt+ad+Wu0fp9b+zWLYs6dukb7jj0RWRLw/QbvDeC5c7RPVRQ0XceV6XDOkRfJWXn1dENWGvI8Q0pFcI711Yr79++xt3/IT3//j6gnJVrn7C8O+eXv/jn2Dve5d/c22805n33+CU8ePWFveYQxhkHmqL1jMl3Sd1v2FvsYE8kycASarmG1XjGbzPjWt7/N/bffJC8Kzl+95Oz0lLPzC9abLYPtaU3HpKxZTJd84/0P2Ds6piin6EqSZTD0A6bv2a7WSdEz9MntqTOmsxnvv/8Bn3/2KR+89z6D6dluNxhnefL0Ga9OTxlMj1QCpfOvvd9+vk5XC/KJRGeBwbXsmh3VJEdn1RjgN4rco0hSLGdTlyYlWpeURcGkniKESlbi4HDWYcftehobBKQaRxUx3AQn+uBHIE2SR0kBxvYIIekai3cOleubzXjKCJSjWcNDSIs6LTXBJckXMdx0gzEmi7NSqXPzLhVvKcCHgOk75GhEwIT0WBHgQqQoUhaVtBJrUoFUUuEIyckv5JgQnGYeOstxw5AA3jEyWEuWZaPSlaQYUBqlc4ZuS5ZVeJXhbBqvECNy7KZDDGnkIYtkXvABXacFlkhBcEzrCdO6SqmuQlBqRTWd0Pc9UQhkpohkhBjIrxeMBGIAawbKLJ04hICqrJOtVyuEkOyVZXKeIcbZukhEs5hUDGWRI0WPjwHvk7wtEskETHPNoipZlhXzquIrmZC/4CsCPj0gUQQRf7bgXkvJrgusEDd/F2N3e93lJjAO5BomdYmiZMGKZV3juoEqOGI+MBP7+PaKvO4IuWe+OCTbQaUMth7Iq5rFbMZQaubNBpllyLpClfs87DqmiylvzT6kuXxIXt/i4cUVG/8SoRTv791mFiUvdiuC21LkkipWhGi4vdhn155RFlPuThe8tTjmMKtAHWF9jxy2nLWv6NZXlIOjms4pMs1q1/D05StAsZgtuPfgHmIeMKEDb4hB8O779zk5aYjeM52U7JqW4CMXlys++/wpk2mK9bp36z6//hf+AvWkZrGcUxYZz9crNlcrPvrwA/b292najsbC9tJzOfTMippZWbIyLYyju643NJ3ljTfe4O6D+0wXc7ZXV5y+fMXnX3zBi5OXPH/xfEwAr3nj/uv86i99yGtvv8Fifw+hMmL0SBEY+o7L05PEYSkKXr14wfmrl7z73vsc3Trmpz/+U/b2FpyeveLoYI/XHtzh4eNH3Lt7TFmXvDo5paoKsuzrre0/t+iWtWaxLFnuFRS1AOmTY8MDOoFXZNQQRqF7SJ2qt4ESQV3W9HlOURRpgSJVsgIPPdGncDgpBFme451DIFP+11hMMp0TQ6Aoy5TVBVhrcdak5ZtPR/MQ3AjC8QQxdpBjeOD1sizPs5TYKtP23FpHEAEpdNpaxxQxo7NxZCJVKjpFTmeSmoKY8H0hQCAkg4UaZ8wx/TtyTGyIITn0Mq3IlURkKTHYj51fpnOs7RBComUihw0j0EeqtMC0No5vbjXKtBy5LhGCm7mqtRakwtsOGzxVVXF4sI9Wmt56ohTUVcG0moxGCUHUCTqPt4hYpNw5PFiXwiizjLqeUOQZs8kEIVWKSiI5EQ8WkvOry/Tz8Ml4oVWSB6oIudR01oy3RHoNwSVtb9u17NqW9Lz8s9HpXl9fltd/9TMwjtWuO93rAvuV7vfm84wxR0OHzyreWt4mKxYMVeS9IVDP9jlfBaZVyQdkTKpDnqwDV1XkYPIG02FNNdnni/M1RkdeX97loFmgsoKfXO1oaVl1W/zsgNeL1znf9Xhr0DKQy0TNunV0TLsRDHKCF45M5+xVEw4XM6amxseBWgmO65yuvcQ1L+n7EwbX4oNBe836XHD54pK6yNl2PYNxZFqw3jbcvnuPxdGMHz3/NH1vhKTUCxbvfpPL00u0sxQonj07oewih3uHNLsd33j7bf76X/2rRAHD0PHjHzxiu91SFpqqKnny5Amff/GQbbNFKk23eI2+gzN+RF0V7B/eZoage/kZzauXLOspb3zjHaZ7S/CB1dk5r16+ZLVe07Qdw+CY1DV3jo75lV/+Fd7/5occ3DpGZRnGWEL0DMPA1fkZF69eIbI0Hlws9iBGPv7kR7x4+YQYI5eXl/jgado1d2/d4vTVS7KiZDGb4N0eu2Z7k7H3866fX3QnGfNFxf5+zWJRMlnUzJdz8qwkRD96ztUobUpHTx9D0p+KgTwvUUpQVyXepyOcNT1iPHozCs2DT52iDQ4lJCFI1JhHlRX5qEpI3a93No0LpLrRtYYEDEjLr7HjlqTcqzQSUGkuE9MIwfs0JgiRFEFEmglnKhIElGWBdam411pRFyV9NwABSVoeplxLCTLgXVpaKTU+5WJA6VQoM60TP0HFtO0d1RrXb1BB2upbH8lkIDAyFkaThBg3/llRMBhD8ClPTpA8/0qno5ztG4iS/fmS5WyaukiVZsF5XjI4R55luAhVWWE9mE6jpcdHgVIZKkKMgTrPKaRiVk/J8py6mqQZWdshlaTIIov5nNVmg9LgxpOF8x4RAvNiQu/s2O2nNdUweLbrljNA+MB6XaG/nrf038KVVqDcUG++VCZ81QMRR/DPTWG97nIZf0bjCep6DIEA7xrC4MimB+gwUB8sOVbv0a0atpVFiIx99QZuNeBjgxOaMDngQXnEedOSlRYbGja+4+7BbYbLhuVkxlQ5ikwwqwqOj24x7Tpys8EJS12VHJUz3pvfZr5eYmuJjw37Vc296Zx+u0NvOk7OzzB2xaPTLYNpidECCWoVSTsQshzXBaxMqR/ep72D9ZG2Gzi4e4R6oXHR4vHsYk8rPP18j+7qCl0KJgf7yNbw+v3X+JXvfYflfMZiOeFf/IvvY63n/r373L99j6zQ/PEP/4jTiwuyPCfLc66uztmpJWp2hxgN6/U5928ds4yO6BusaXjtzbsc3TokyySbq0suzk5Yr1fsmi3OefYO9nhwa5/vfec7fOvbH7A4PCAvS6xp6XYrmt2O9dWKdtPQdz1RK7rLK7qh5eT0BT/44z/EWsdiNqMoUxbh/6+9M2uOLLuu83eGO2Ym5qGAGru62JxFihrcomzRCj9Zln6A/5F/g5/87DdHOPxgPzlEilIExebU3dVzVQGowoycbuYdz/HDPplA0XKXI2RRDBO7AggkMvPmrcTJffdZe+21Li/O2d/dZWtji5cnx/R6PbIkYjrtcM2bV9yXrvoktfR7YiS4MshZGayyvrmDxTKdjUX5Xwsm5hAcU2476qagnFs0DmMUaRKLpkCQYHRO1Kw65/AIZWqBJSoEW/Qo0RBoOlzXSbLzHmNCdWGEaqaNDnY3DhNJIu86J5CF61BaB4qWou5arIe2dcQaWkAhDa04i8WGp21D8rM0bcNgMGA4mdLUHbENjhRBDKbrvMj7eXkfjDFhuy7iLlZrfCfKSUBIzoY2cH01Ajl0zmGI5CKmDHgxyBP4RS9Hm6UhKUlf+5Y06YvJphNzzCiWY1hrSNAYZYWN0IFNLMZJk8sHrNcpaMuKLO3RaqH4aeXI0kQgk6aj0o0srDyjLksKoOoSVgYrzGYzYpPgvMIBk2KC9zK63QXIQpqGMO0KfNUwvJqKbcpvG2VsOcShXmucLZLozfNdqK8pdZ2I1c3nIQI41jZoN2VlZZ0stuSbm9xZ2SYrhpS6JY8zsqwirgrOaUjSlNVswOasZrUumPo5eZqxmq2yt/eYnWLM2M1JEs8g7/Owv8ValHI2n9Cpmrabk2pFpuZsVjNmTcXV6CUviyEH9Zy2ndH5moWNfOc6fEi0Ik3ZiVaH7zC5wVWaOsBwzjvR/7CGeVmRlxGJ7dE0Q0DWdpL2iNM+ebZG3IGLXvLN/X3+/E/eZTS8xLWiaftgb5fWOc5OXrHxzjskaUxZzdHWsrG5ycXFGcPhEMwF2ua0ZcGMmiaLSDcH5KZkcP8O+w/vEqcRzbzg6vSY8eiCeTmjrhu0MWRxwjtPnvDkyWPWN9awEVTzCaOrc06PX3Fy9Iqr4YQ4TumvrTErCsaTEc+efc7R8Uvu7NwnspamFf/Bl8fHzIspx6dnPLi3z9GrIw4PnmGtYaWXYe0/djjCaOJYyv4sy8l6Kf1+n1iltK1jWo3xsmMXupSSpItRtHXLbD6mnnuiyLKxvsrzg1cYa3FNF+hbYe7fe5q2wmgV3A40SZpSl7Kt977BIaaMItWowXe0jVC+nBdcdHG/mB92YbirIzJWlLuUoKjGaNrOo42l6jqhlDUdeQ/xxXIuGDVa6q7DOkcaR3R1jVZgrVjBixKBRitH04prrtEabxUuiH0bpem0DxNmfknCr9s2VLPyyXWuk+kzFM41eAXWClshsglaS7LUyLG6dsE4Fp1a19ToNMUGNoc1hrp1RLFFa0MWyeuqzqFNRGQcUZrSOEeSiBaGthbamiy22MjS7+VEUUJVNxSzmdj3DFZJe330cMR0pumaTnBnbcTg0miuyik9F+G7ls45lBPsvuyE00knWsALyOifNdSNr0V1GwY3lvoLyqP1gqVwLXBOqHZ1oJld03VD4vXgTIfxBZ0riElJaNm+u8dWvcGwKvCRYef+gHvTkuPZhNI3ZEnC3fsDHpYVV+UEExnSJGLbJtxrUobzgun8kmp+wvnoMy6amrKuKOu5MG26Bu87+Sw5t/z8eN/JFw7nXRhyWSRaF1y/OzrX4r0myTRr2S5Xh1ehqBIyeRLH0mfoYgbZKkV9hVMKpQ3bK+tEugfjKXZ4xd2vPOJrb79NFhsKq3n4+G3KYsb52QlJpOj3LLNyzAcfP+P87IRxMWc4vGA2K6RBPrmki3PaqEd9OmcaK9pql621lK29XdJ+ynw2pRgOubo4oawKmq6h6WT6dH11jZ2tbfr9PuCZT4ZMR2NOX53w4uCQ86shnVOsbiTE3jOdz5lMx1R1SRwlPLh7j93dbV4dH/LJp5+SZzmj0YQPPv0cZRS9fs7ockY9m9Pf3JDBqDfEl48BW0Mcx2Rpn36+Shr3sZElsRlpmjGrJnjXgpIGlAIa5fBaHFarsmAyqdBB9tE5R2wtVVejUVIhyoQASknD3DtpFJkAP/jOSVXpveChWovTr3N41wb8MxYJRy1eSs45EVVBruRWK5S2UokqqSq10tR1TRQnVE1F6zrqxqMRCCKKLEp7tNdhoSWUpsThaVonladWeDq6FpQRR2AF0ApNJYpilHfY8OaIyLpocGnvUdrI1lx74Sz70ORzIsTTOidJ1Bhwgk93bRfWfof2IkuZpDldI5VJL83RRnYJNjThFuNUbdvgHdisT9c2xDahtg3O1SgFadajmDR4bdFRQt04kjQi7yXMZlPK8RVdt0IUR/R6+RKqqeqKPO9TNiL63ilNUc6JvaL1ikAzll2Bc7jOQwe+/W3opInkJIuCYZFwF9CPRpgKxosT8M1qWCu0udlI+/UKWf6mXddwdfmc6egV0XHGaHefXjYQXD6ynEfn0LRkTUPsHfWs5PlJgQpKea2rOC9nPG9LdKhIF3zwa9bFDQL3Qk7TBWF1HISE67z8jBcjThf43M47KT7Cz0ppNla2+YPHf8qvmp/z9JP3SSLhr0zGV3TtnN3dLdFakZILi2ctMtTjOb2u5Xvf+RZb68LZ1UYkWd//+Xt0najtKWMomoof/4//RjGd0euJBkKvnxPFhqpq8C3Q1AwGm6ylOXE3Z5WGjbUd0kGPrusYnZxzevyS8+EF46KgrBuUNlil2N3cJImFuz+fjpgMp5ydnPHy+JSjkzMaB1GSsKKkH7WxucnHH/1KpFcjQ5ZnTEdDiskIY0Arx2Q65+SsABx3t9dZXelTVyXFZPKPHwOO45hBf421wRZZuo7VyVKC0RgR2F5YDi8qBO3Ba+FlytW0I04sKsjedW2LNsgAgnfQiTW31QqNxxuLCQs+SRM80phoXIf3i+27x6BofYdrW/KsB85jopi6rsB7jInC4rFoFDZMQvkwURTZiKZtiOOYoijAWJqmEXcJJW7CSjkI+FYSx6RJIuOOTlTCrBHYoguwggkcYBfEcmIb0XW1COh0AnOoG+paXoPrkPdGy1CJNjY0AeVDLUyLFodBG03bSrXhnaP1XiQvQ9Vsw3hx50F3cuGzSni/xkRY42l9J41CGxG1YtnjvQjfdK2jP1jHtTUOQ9W22Kom72X0+wPG4xHFeEici8lflvVo2442XPw2+1uMjKHzsNltcHJ5HMwn/VKhTFTevIwF/19wGv+p4zpJAsrJ1FygLiwgh4UYjoz93vRMu8Zzg1vl8rg3RXAAvGvoaPG+5PBgCF7haQMmHKpplisDi8JoWYviTSGi8IthE+Xdjdk5SZyLR0n16pe3JJFKwnVhJH6hcexxognsHS64R3slqnh7mw/55pPvUQ89Tz/5gLIsMWE832hNWzX4RmFURKIydrL73FUZb72zx+b6gOnogqauiOOIrprz4MFDjFF8/slHTGZTTl6ecXh4iOtga2uHza1tPv70Y9q2pq5L5vOKsnIYm9Ab9FmLPHcHfXbXB/RW+pgoZnhxysHRIaenp4ymYy6GQy4nFc7B1uYmm2srKOWYTyaMzk8ZXU05Pb/k1fkVoyLs3uKIWTlnNp/R7+fcubNHFMWcXZzywYfv82/+7E8Zji8ZDSdM5/Pl+z0aTthe7ZFkEaurq3gMo0nxxjX3pUm3319jkK+TRn0inaKcpZp16E7kGEU5yi0rABAdAI8kBROJDm+kNFFkAke0xTvoXIMKilmdD8R+tRj/FYcIg0y4NVpBIwsmtpa2a1A2wjeN6Ce0jWydteCwnZIttG+F0oST55ooRoXq0bcOEycYa5caB3UXYA8UsUbMLus2uCko4iSGphH/MAdoFdyGLdZI0nPe09Q1cRRUu9D44KeFMkAXZBcJFWCHVhoTGpJSPUsn3QQTSHGQkPPSyuNa4UE73+G1R9dV2CmILgMKsjgBrdBWB1lGJ8MXKGZlQRzn5P0eqpBk37WtNBqx2Dii7Twm0kznczrvWe33WFvfZFZMieOIqqqYtp04K8cJV+Mr2s6xtrEtibhtmPVWcPMpVVXR+QZthb6mtfxNAvj9zxwOMMsNAeFCK1k3vO/qmoOL7kT3+UZlexNekN+9nnABSbIufDb8Aha6duZYSPYuGDcEOqWIHYXCBqnGvb95Ebu+LUlZdk2EpqgPO0Tv5Xg+DML7oMUsZdG1eNGiX5BGPe7vPGRjZYu8nzPo9ZhOp0RxxPpanzyJiazm7upDIlJ2V3a5s7rH/XsPWRn0KIop/cE6vik5ODhEK8W+MZxeXnB6dsL7Hz1lNB4zWFnhwcNHjEZXfPbZpxTTCcV0QhxH9JKUNAJjFbtZxN5Kxlt399je2yXOMspixtnJKQevjrkaTTg9v+T86oqqbdkYrAaJSIerSy7O5gxHlxRFw9VoTlmLSa02hjQVzvhHnzzl8OgZX33nq7z7L77PD//6r2nmU05Pjnlwb5+2Ufzq6VPybIrzJZNxydWwZHdrl3I2BeVI88EbV9yXJt1enJHoBI0Bp+kamE9LmrKlceVS6Frsp1XQ/ZDRTxWq3SRJ6GqwofnlnA8CNh5thUZlTUxoM6GUIjZGeKtBj7U2hhLCFbpbajFoJQlKnH7lQ2ysoasaacQ52U6rgIEa4+m6ljiOqZ0MBNRlidWapvVyEeCafxkZg7VWxIyNoW4txnUYBY0LmFj48DlY0tWU0lgbYw10WBbanF1o6ikf8q8TmTvCdJNC0bU1cZyirRWHCBBGR9cSpTldW4sgetuBkmZb41o88n8T+pwVBofREJkweOLwKqXxwjTQXYuxCUlsqZsapw2urmh8Sd4bUJcFYIjilLrpmNcdvSyi1xc1KWuNTOMEXMigOLs4oa5rVlbWQWuKekbVVFT1nCyL6aUxg15MnFjiWBGb3wb2wo3qFJma1Fovf7OAHpb9tJv83ZBZX5d7fP2YchRJmN6zHB0PT1w+n8Uxg7CODzCHCzDAopG3qFL94vleLhKL5q3zPmCzAat1NzDbRaW7+Ny6kHKX+K6EVpbcrtPNHc+efcLZ+Uu890JPxNM2DaXyjK6uWF3fZie7x/3NfVZ7fcrZlHkxomkczyYT7m5v8vDBI07Pj/nRj3/I8fEJKMXdew+xp8cUxZjnLz6nrWrG46lcEryXSpqG9bUN8nyVzSTna2895v69fXr9jPm84ODgBR9+9JSLyyuuJjMuhhPGxQzvwazJxc9G0vQejcZcDUdMipJ5DU4Z4igmz3PKpiLLMtZWB/zq/RF/83d/y3g04ltf/wbWKD5/9pTLqwseP/oGf/5nP+Do5RE//dnPODo+5+BsyMMH97izd48XB8+ZzqZvXHFfvuq1Wja8XNgit7pDUdO6OS0tWotpoTOahWqqV+BD5WYjjfKefi/BKpg1wdYlyOJ559GRiKYopUWMJYqIogirFU3bSgNJaYEijHBjnZPk5vGBCbGQP9SIIhdoK9sgGyW0TUkY8RJxcxTKWpqqDD5XksC1MSRRJJoOcUTqxLNMLg6GqgJrLHQ1RktCW0g5isIXWC0MB9c0GKPROhYLHe/pAl9Ca710Ilbhm1xEJAFH1iyrd+8Fg9Z40ahwnWRt57DBvbdrG7qmpq4qYmvpEJ8533lMJHBJ50XFLY4i2q4l6mQK0PmOuq6xUUrTiOLbysoaRTGl6xqyLKGYFzgP/V5OEsmGN0kTkansQoXVtRTFBKVlx72zsYOJY9QVaNWysdZna2PA2iBjZdAjjd88vfNPHSokN9mBSAV7He4aYwjbcXm0bOFVEMMPR/o12MEvITe1qJyDBsV1hX/tNrE8HwIMH37rcOils7J67UELo0/8EkggmLdJQ8wHzjjXSRcXtEFCAna+XXLaCQXUar7Nn3znX7OV7fL046d8/tmnGG1ES7ltqZuGOEopq5Y7acp2PiDPMi5HY2bljCjSFJOC7/3+d0njlI8+/CWvTk84ODohyzLSLOfgxXPqtpbJrrqirTviJCIzhqKY4TtPnKWkSc6g1+crD+6zv79D1suoq5qjFy/4+c/f47PPnzOeTnHKMC9rvPMM+n1p9rlGsFblmM9LppMZRdnQeYOymnyQMVhdoaprjg4PGI4u+fZ3fp+f/OQnPP34U4yOeHD/DmeXV5yenfLy+Jx7dx/y6NFb/MH3vsfG4SG/ev8pzw6P2drZZe/eQ46ODt645r7cgr2TPxpOy7SSd7jG4bsG50uUdejECsXEaXRApbQ3QXdVBZGWjjSPiWKLn8qQhCxCh7Gx0KKsDWtJNBQWHfimbYNlDYF25cO+XERkrE3DovFYLZ1kayNQGmu8jMcaS1OFChxH66QZpZxQ1jonydr7hXiNeH3NAi9VYDuhvc2riiiOqZpaOvORcJQ1wvVFGfJ+nzSN6KqKKLagojBg0DAeyfbHWmmHOyV0Mp0YOVcPaZKitKb14ibReU+SZsuxWuNkqqyuG0xbB/5yK+4RTmbdVXCFUFoU17KAYTvvqBsZFe6ihKqqhEcsZRjWaqr5lDxZJ0+zUClBHGd0XSMCOTaRBk/bysj1wvPOaGbFmKZrWFlZw5iIjZUNnG9wbs7+9iZ399bZ2Vgjz1NZG78VIbXogqd7fRuWezelbzw2VL/avcZieL3CVUssGHXtPedv8NNvGmCqACkEBHz5yniCRkVI2ItDI2pzhKTt6ZZwAt4H2tc1DcwjuL0k3EUS7kLV625aiLI12OS773yHB7tPWF/Z4P0P3xMtaGtwTgZilI2JkpzdjS16ecpoUuBwlPOKYtKwv7fFF599TNc5hqMh58MxxkY8evyY8/NTLi/PqWpZu23XCs+/6bBGE0cRq6vrbG/tstpf4+H9R+ztbxPnCY1rOT064ue/+AU/++X7XA4nJElCnKQkSUIvH7A66JGnMW3bcH5+TprElGXFvKqpGsk1VhviNBNjhabj9PiU8WTMvXv3efTgEXU5Z/fODlGWgY5QOmU8mfDpF884OHpJ3su5s7fPn777x7z3s/d49uKQr37lCRsbO29cbeqmw+lt3MbvWtz/TuxvVqmi96sDdCOYn9JacHWtgvCQMEqMMTcm0USDRAdNBr2Yt1gkXgFfhTnA65jvcsAiJFutlLB3lnjvIjELt90vat7g2OyXdDAXLgkLKEHU9ZbKd+5msnWBRhYgCU8Yc455uPaE33/0Lusbezx78Zy/+fH/pJjNhJ3UNOxsb7O/ucUPvv8uW1trXJ6LG8OzF0fcu7dPMRkymxWANLSfHxwyKuasr69zfnFBVc3QwLwSt5UkjVDKMplMWB/k5FnOvbv32N/d5/Gjt9ne2SZOE6Io4vTsFU8/+oRfffAxzw5f0nSOjbU1bBRTty3rK2vcvbPNIE9o5lORIUV4+R6N05Y46ZH0BqxsbuPbjq6aU5YzVlbXuBxeUFUFTV2TxDKJmec9Ef8aDZnPZ2gj3ojjqyu++c1vc3J2znA45PGjh9y7e4//8B//05d2iH8bQLXbuI1/xgiVopdKVqnQ1VdC7VMEZwkVJtd+DR5YYrnev/YY72/ct4ARfPg5wAWLu/XiNNQCOfA4f2NIeon9Xp/zoqJdUMEWxZMk0vYaPljoNbuF8egCKQl9CPz1Py9MhGre8dmnz0iyK07PzzA64fHDRzRVyeGr57x9/z5/9HtfY6VnuLo4ppfn9FPDkwd3GY2HlEVBnudMJhNeHR9RzOaksexo0ZooFnhpdnRMVc3pXE05n+FRtJlmZ+cOd/f2efTgIVs765hEds4Hn33BR598wrOXp4yKOWvrGzRNi1KaOE4w2pKnKYNexvbaKk2ecHp2xnQ2p21lFxdZi4li8tVNotU9dlYztnuWD95/j/d++VP6/R6vXh2RZRl37uxwePiFyBhoTV13WBuz0uvhfcvq6gpHRwckWU5dNRTTkquLyRtX3G3SvY3f8bgJLQSHX7UY2nhdvOSao3v9XH8jed483iJ5LgHa66NcP/LXGnA3H+ZUGL1ZVLksku7iIiF2VotzuMlgcAHjdWHQx3svNEPvWRLN/JJ7JM8P7BiDJVIxcZzw7vf/jCTrc356Ti9K+fjDX9JL4F/+4beJjeLi7JzxeMru7i6+azm/OKGYzQHNxcUl89mcPF1hY2uf8XTM+eUpZSnuMVpH3Lv3gCg+Y1qMGY8roihme3uH7e1ddu/codfPRWmw8Ry/fMXTDz/i2aszxpMpNk5I0pyNOKOua5I4IYlj8jQhiZPw3hqUsgIfhsa+jSLywRrRygZtvsbMxpxcHfLq5IjPP/uMfr+HUoavfe1rHB29oJjNAM/Z+aX0P0KTsq4bsiRlf3+X6fkE7RVvPXjM47fefuOKu026t/E7Hh6vrhOl9CSC2SQ+TD9K8r1Zaf46/itEKx8qXI9wu7jO51w3a5UX6qBePEbuXibwRW5dYsCLQ7jF6wRnZu/R0rUOZLEFhutDldstK+ww+ClJV0lSl2QtTVF5AY3qLOW05fnFc9of/XfefvCA/mCfwdYeV1enfP8Pv4nRHZcXF0DM5uYOvXzAxcU5p8fHaBPz5O23UCpia2uPtc1tzocj/v4Xv2BYtHz3yX0iq+j1Y375yw9YW1+hbmasrGRoJYJKcZJgbcTl1ZA4jhlPhnzx/IAXr045PjlnPq9Y39pCR47Ow9raBoN+P4hLyd9uOpszmxUUVRkonxrnwcY5yeo6Ls7pbMppp2kPTzm7vODu/j2+/uQrNDg+/Ph9ZrOCKEqIo5w0qUXzREnjPkli6kYYF3XZsb/zgG9//Vs8ePvxG1fcbdK9jd/pWNCzFupQi+EEUNcJ8UZcsw38jd9dsyAW2/TXmmEs7nvtlf/B8/n1wnhRwV4n7kWzWXSgl3gsi+r2mpq2qGC9WyRicAuBKJY9uBvnovFOklbbdvzwxz/i+Ref85f/9t+jjWXvzhaumfPq5RU7G5tUjWNzZ5vDF4d89PEnbGys85W3HnJ4cszx6QVnwwv6x+v0+iv80fe+y8PRlMvLMz7+6H1OTw6lKdeIf2CaRiRJjjHiJPPFs4OgUdJyNRxydnnJq9NLroYTOudxl0O2iIL7uCZNU+LIgmsZT4ZMJxOmxZQu6J5YE6E7j0l7tCpm1irmswqnNDurO6z0N6gnl/TyNWbNlDzpcXZ8wfbOBtpoNjbWgq6IoyxLYfvYGO8sezv3+Yt/95fcffyIla2tN66526R7G7/T4f63KQaJJZTwf0iO1xzZ68dI00tKWx+aZT5gwK/btocXeI25cOPQi3PzN/pwofK+TuqK1gfNZq7hh4XAkPeLaTRJtp0XOuKS+abcEttdHGDRTIxsJBodCuZlyWw6pq1nuNYzuppirSdLM54dfMrBq1M2N+6Q5yvcf3CP44sz/vanP6WsWrz/FGtj1jY2ubO7w9HxMW3rGE8mOOeJo5hWBX2XNEE7UEbMKoe+wHtF0zbMq5KzyzGXl2M6D1neI8v6xFFCGickSULTtHRtQ9tWTCZTZtOCtm1J05w4CZOtNsYTMe8Ul5MZl6MKpw1rawkQ8Rd/9Vc8fPyQn/zd3/J73/g2q/0+n37xOd5Z5vMZ06Kgbmra1lNXjiw1fP2bT/hXP/gBX/3WN1jd3Cbv/SOHI27jNv5/j2VFqv6hyvbLFH8X2Kq75vZ6FWCCm5VwtzyWfBce+QJD+DKlNY+wKVQYMRPdBJZF+ALLBYJcaoAU3HVzzDkXoAcVIIbXK/TXboRBnzbYUEUmYqU/oJiOmIxG7Oy+xeHnJ6yvxrw4PODTzz7HeU3Xat5++zEnp6/4u7//GU3r2NvbZDabU5aOl6+OGQ4vSdKE2bwkTyI21noUzjErHSZJsZEm8cLxn5Ul1iR4r7A2pi0rxpOCeVUSJyk2TtjeusPezg6rg76IKZWzYHQwp5jNmc9L8J5+LyLPB2Kiqy2tF8rbxWTEcdmS9td50Xk28hVmTcF//S//ma7suHf/Lo8ePeDl6TEnJ4doDeV8DkrhOoVWMV9/5+t8/90/5q3Hj+gPBsSJeEK+KW4pY7dxG7dxG7/BeLO3xG3cxm3cxm38P4vbpHsbt3Ebt/EbjNukexu3cRu38RuM26R7G7dxG7fxG4zbpHsbt3Ebt/EbjNukexu3cRu38RuM/wVHDXP2+PziqwAAAABJRU5ErkJggg==", "text/plain": [ "
    " ] @@ -415,7 +415,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAD3CAYAAAC+eIeLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9d7hmSX7fh31+deKb7n1v7tzTk2dnMbuzARuIBUCQAJhBEAJpEhYp0iKVaNN+bEvy88gSLdEy/diyaVmkZVEWk0AIAEkEEiSIxMViF8Bi0+zOTk7d0/Hm8OYTqvxHVZ1z3tvdM7ML9Ow0eH+7d/re9z2hTp2qb31/scQYw4mcyImcyIm8O6K+1Q04kRM5kRP510lOQPdETuRETuRdlBPQPZETOZETeRflBHRP5ERO5ETeRTkB3RM5kRM5kXdRTkD3RE7kRE7kXZR/bUFXRIyIPHyX74Yi8uC73abG/f8tEfnst+r+J3IiJ3Lv5HcF6IrIZRGZOLD0P//NN3s9Y0zXGPP672QbT+RETuREAMJvdQN+B+WPGmN+6VvdiBM5kRM5kbeS3xVM924iIg+LyK+KyKGI7IjIj9/luO8Qkasi8t3u78r0ICJ/V0T+loj8C8egPycip0Tkb4jIvoi8KCJPN671hIh8WkQOROQ5Eflj76CdKyLysyJyJCK/BTx07PtPisgX3HN8QUQ+2fjukoh8RkQGIvJLIvI3ReR//KY67ERO5ETuufyuBl3gvwB+AVgCzgH/7+MHiMgfAH4M+CFjzKfvcp0/CfwnwCowA34D+LL7+x8B/w93rQj4p+6e68D/EvhREXnsbdr5N4EpcBr4C+7Ht28Z+DngvwZW3L1+TkRW3CH/EPgt991fBf7Nt7nXiZzIiXwL5XcT6P60Y5f+5y8COXAROGOMmRpjjjunfhj4/wJ/0BjzW29x7Z8yxnzJGDMFfgqYGmP+vjGmBH4c8Ez340AX+OvGmMwY8yvAPwP+9N0uLCIB8EPAf2qMGRljvg78vcYhfxh4xRjzD4wxhTHmx4AXgT8qIheAj7pzM/d8P/v2XXUiJ3Ii3yr53QS6f9wY02/8/G3gPwQE+C2n6v+FY+f8r4GfcED3VrLZ+H1yh7+77vczwFVjjG58fwU4+xbXXsPa1q8eO8fLmWN/N695Btgzxowb313lRE7kRN6z8rsJdG8TY8wtY8xfNMacAf4d4G8dCxP7YeCPi8hf+R265Q3gvIg0+/UCcP0tztkGCuD8sXOa17x47Bx/zZvAsoi0G9+d50RO5ETes/K7GnRF5IdF5Jz7cx8wQJOF3gB+H/BXROTf+x245eeBMfAfikjkHHN/FPif7naCM1H8E+CvikhbRN4H/LnGIf8ceFRE/oyIhCLyp4D3Af/MGHMF+KI7NxaRT7j7nciJnMh7VH43ge4/PRan+1NYe+fnRWSItXX+lePxt8aYN7HA+x+LyL/922mAMSbDgt4fBHaAvwX8WWPMi29z6l/GmihuAX8X+DuNa+4CfwT43wK7WJPJHzHG7LhDfgT4hPvur2FtzLPfznOcyImcyL0TOSli/rtLXFjci8aY/+xb3ZYTOZETuV1+NzHdfy1FRD4qIg+JiHLhbz8A/PS3uFknciInchc5Ad13SVz0xPAOPz/y27z0KeDTwBAby/vvGWO+8ttt74mcyIncGzkxL5zIiXwLRESeA/6Dt0jIudt5fxe4Zoz5T0TkU8B/b4x5u+Sbb1pExACPGGNevVf3aNzrR4A/Z4z5vnt9r2+l/G6qvXAiJ3LfiDHmyd+Ba/wacM8A916KiDwAvAFExpgCwBjzo8CPfivb9W7IPQHdH/u1r5jxuI7XFxFEBAClFFprtNZz5/hjRARjTPXT/FyUgAhKKYyGstSUuiRtd4nCGENJGIS0Wl3SNCUMQ4IgIopClApQYghCEAWCAXfdQARjNFo372cIlBAK9jwlVTvnxGkKYrBpGMx/b9xHBigBY0z94fFjG1qHvY9xF7a/GnchYwylAaNBa6p+UkpAQJeGIssp8oK8yBEgShIE0HnBbDplkmcUjRyOY091u5j6GN8Hzb74nsfPvu0lTuRETuQe2XSDICCKourHgl9AEAQW5Brfh2F42zFhGM79VOcq21wP2IFShIH9PowikiQhbaWkadq4foBSglIQhULkgDRU9vcoEIIARCAIhEDZnzAQAkUFzM2FY07ELgRGCVoEDdWPcbhaA+28WJD1P/ZS71SkgdnNhQoDRhvQBlOW5LOM6WRKNpmSzzJmsxlZkfNOzUrVdd19lFJz/eDf54l8Y+LKkf5+EfmrIvITIvL3XdGi50TkI43jnhaRL7vvfhxIG999t4hca/z9IRH5ijv2J0Xkx0XkrzW+/4si8qqI7LkCS2feYXP/kIi8LrZo1P/NJ/+4tlfFlUTkAbHFokL396dF5L8QWyRqICK/ICKr7vDPuH8PnG/jE3KsjrS71r8rIq+41P6/KW7wiUggIv+Va9MbIvKXm/d+L8s9AV2l7n7ZtwSwOxyrlKon+m1MEMt8xYJvFEUkcTIH6GEoqACCAAJlwUqJ/T0I7O9KQKn6JxCDaoAhzGNm8xtz7O+KwMpxnL3zX8b4x6qBd65vzG2nVs8vx/ip1gZdanRRoIuCIs8xZYkYQz6bMR2PmWUZhdYYy5vfVoQ7Lzr+7+MgfCLflPwxbAJNHxtP/t8AiEiMjUT5B8Ay8JPYOh23iTv2p7Bx3svYIk4/2Pj+e4D/C7Z402lsKvldk3aOyQ8CHwE+hI2OOZ5O/1byZ4A/jy0AFQP/O/f5d7p/+65+9W/c5fw/go23f8q1/fvd538RGw//QdeuP/4NtOlbKvcEdN9qEjYZ1nG2JSIOYwREISpEKeUYsrIAYADtkUoIgxgRRaAgDkIiFRIFQhQqokAIFYQiBA4IRUApwS7W1owgQOCOCcQzTgtqyqn3nqx+825H23Zxaro0LySG+n+3n3ebKeJY/zbNMXluwTbPc8qiqMBRG0NhDIUu0Wb+PkLNWMPGjxKx/YWg7vBO3+nieSJvK581xvxzl534D4APuM8/DkTA3zDG5MaYfwR84S7X+DjWXPhfu2P/Cbb6nJcfAf4HY8yXjTEz4P8AfMLZVt9O/q/GmD2XSPQ3eIsCTneQv2OMedkYMwF+AguS34j8dWPMgbv3v2qc/yeB/5cx5poxZh/469/gdb9lck+peNN2O2ebbajDxyet/U7QWluWqgJ3jL2OMboCa20MYRAQhAFBoFCiCBwzVhUgeKgUMBbUtTYox3qNsfZdCzzza5Bnwfa+d2DZvANbaPVM88cawTFVU13I36KpzlfHHzcHGG6DaGMMZVmQFwVFWaAdjbY2YENhjR5UNFzm7bRBEKAarSwp595fsw0nDPd3VG41fh8DqVOTzwDXzfzLP178yMudjr167Psv+z+MMUMR2cUWTrr8Nu07XozpnZol4PZn697twG/w/DPH2nXfFHq6J6BrjKEoCsqyrD+sQKYGEw+83m5rjEGoJ3cQBARRDAi6LBAMptTVuYJVb8MgRMSBbRDcZt4wxqC1xqAcywXLp41zPjXAr3leo43VY1RmDrE2W94eeI0xc0fdkS2b5i+uf/yHx8AYPNmfv643OYgIhIpSl+hCY7R2NuZ55J9bBBp9Xt1D69vA1psUTuRdkZvAWRGRBpheAF57h8eebxw7VzhJRDrYGsxvVYzJy3ngucb9b7jfR0Cz2NKpd3AtL7/dWNWb2BrZXu6bQk/3ZPaUZc2QHMGsAAxqBtz8ux4nnuUpwiglSdtEcUIQRAjKmhnEer4kDBzQWHVdKct0m9ZO7VheaYxTq71+31SPLQCDRsRYk4JYYCsMlAil48N38j+9Q5cUGlNxzXmxDar+11DbK5uxAW2EooSiBGMEOfb6tNZoDEZAKeuQVIGqHHp3ipjwN/GmjzAMiKLQag4NcG1GklStPmG691p+A1uB7n8ltoDSnwC+/S2OLYG/LLYw0g8cO/bHgD8vIh8UkQT4L4HPG2Muv4N2/O9FZElEzgN/BVvfA+AZ4DtF5IKILGJNFu9UtrHq6ze7AexPYAtVnRWRPvAffZPXedflnoDubGbrrXi1umKLDefL3cQYQ1kUgCAqcOAREYYRKggRsZ+JClCBZbailI1sEEWJoTTaAq0u0U2QVf7HRjMEysKWC1JAiVQ/4MK8NBSlsf8amQPNygnWeMbmz9xz+fNuwym5w099kjEGbWw7ytKGiGltw8VslzqbrdaUZVndV3lzQSPq40538QuUiFgHZGx/gjBA1LxJ6PiPf18nCTb3RlwBpT8B/FvAHvCnsBXp3urY/wVwAPzPsQX0Z+77XwL+j8A/xrLEh4D/2Ttsys8AX8KC7M8B/z93zV/EAvDX3Pf/7Bt4tjHwfwY+5yITPv5Oz3Xyt7E7tHwN+Aq2Gl+BXXje03JPMtL+3i9/3ni1VBuDNj7EKyBQytkeyzmV1qu11hQAKohod3qkrRYiQp7nZLMZZVmgS40RTRAGtFtdkiQljGIXfhYShA48lbKOMh+P64E2EAIxBALKNEig7ZIqprbUoEvLlu1iYaMelHO41XLnSIAmMCHytnG6x8UYy4ybIOv7zdtUxTm7ylJXJh3fr2VZkuc5RVFQFMVtcc9VO52W0G63ieIIESiKgtk0oyiK+hkaz3QcdL/z4Y0T2vseExH5PPDfGmP+zre6LfdaROQPYp/14tse/C2We2LTjeN4zlmm3b9e/QcLCGVZ1oDobLp+codhQNOvJSIEYWhZndKIEpI0IU1axHFMEIYEgVOLBYJAEQSCyxewoCnahoV5wKWOVPBiBKuiG3Eqvakcb8YIumEF8bilGuFh9nI1U64v/E4Wt3nwtmFd9hm0+LZou+gojQQ2ssOyWoWItY17803THit48K/DwPwXYrC28MAuLP4RVCAENGJwTf2Uc06+d/BkJ3LvRUS+C3gJW1b0R7BhVj//LW3UPRIRaQG/F8t2N4D/DBsy956XewK6YWhDvSrQdTZb/7lnrk3W5I/XWrskBUUQ2tAlYwxxFKEwlI7ZiQpIkpQ4jl08bjMJIqjjbqlZmXIsVVyogJlzEtXOqxp0G0zdKERbWNTKubsagQCqgZZN4PQgfDxMy8u8k6puhT+3mT9hjMFoTZnnFMagwhASEAnrhUX5Z1XVwma1CDMH/McZax0PXbdFKRtJcqfjBZsh2ExaOZFvuTyGtXV2gNeBf8MYc/OtThBbv+Ff3Ok7Y8w3GmnwbooA/yeseWOCNXv8p9/SFr1DuadMF7gNdOdUbm530PjfwygkCiO8NVJE0AIS2uMCl8lmQVchyoKEEms6qGJMnS0XmAuHMo2YYJ+cYBpI2kyx9c8gEtr2amcbdiy5tteK5YV3CYV7O7ZrjA8luz0yAYDSRm/ooiTLMlQY1LZot2hV92r0ZWVHv0vIVxWjG4aOMXsAD46lRjedaKa+zgnovifEGPPfAf/dN3jOr/GNh3F9y8XZhD/6rW7HNyP3CHQjHHRYINEaY6zKz11idJvmCEQsAKjaZqmNtsCKIEo580H9Y+NuHTD4cAaocKuKlRXwirttWyPXohFMa1V0a1pwmEyJqes2VIsFGAVGScVSlQ2ivaPF1tz2iwsNczHEvslV+1yzvFlBFyVlUaBdhEgR2AgFn2Jt214vaP7ZlSgQa+IJw9A6ylzkB+I0i8DWpxARwlC5RYdGv/m+nE/jOAHdEzmRdy73BHSTOAY8yzUYXdt0/ecSOhB24kmYD+kKVIAYYye4KTGmQNCOoVnbcKikss0qp+cLBiMGbWzoGNo45lYDhzb2P9qA0UKptUskqJ9BG+3aXbUQANEefUobBSHK2UIFpazjS6EaINVkt8dg2KIr2tmOjZaKQfoDPMsuC+0cYjllWZtmijxD5QFeGxBphOC5iAS/ObE1uQTVQtUMS7PmCQioWaz22sDxlUJ8Osn9Kz/zD/9LY1wIYf3q7VJSvbuGz1MQF/DX+NItWt7gYv+Ruj+b16S+x3xyjVTXb44Pq6kpG1ttjt1DeROZ1dgwphE+aJ8p8KTHX9l4rRI0UhMDr9mZiorgE29sZLt//wZjNFL5XeyVNfqYBif2Oq7vSrG/a0d2NNbUJhi0EYxoxFgt1l7XPUcjhNSauLTrRIW/uH032vVi3cOIwib32RaC7Uep3rO32GkUtj9sq60/SANimnPD9q4RhTEKkRJjbB8avMprqpBPf9af/rN/9Y6T5J6AbpqmlTNHl7V5Yc6xo6SKaoDjmrdBxGaVVdGtFWhaYGlGPNjzTX0dbQeTspm+DnDrSVIxS2OjE0pN5ewDzxTrwVXfo+mcsm0Uxx6VMqjAvSCx39VtridXkzbaBcXUNRNclALurp6VW9AtKbKMIs/sNdzDlcYmoohjquDbYdV+u9AZl8pL1S4fizzf+cIco/VONX+IH4juGe7nSDEPKs2/AwcK0BgnPg28+lRAmiBnpemYPK7O+KsJduJaUYjo6nub5u7vWU9eMTYJJ8DnZEptJhMPGPbVKAduYWWHVzU4VE235rfKjlW1wD2h1OAsRtAOiOtaHVKNWyOCGIWxzo4a4P1zuHFXir1KCYip55bFckGLB1ZTPeXt0azixt+8Blt3uNg+9cY+UdgYJKn8IvXCakHcwYl7xXaeaGNJU/ON20lhr6ykdH2iMFLnBJj5Rr2l3BPQ7fV65FX+vy2/6E0MFegaQTVU6uYgEEC5JAgjlgFqA8bZIQOXGmy0xjQcdk3graIiRObiTes7uJjX0jnLtFXXS20ZLtQrvzdZGJfl4QeMeIbimLd4EwfezFGzbMvqjQNiVb1M4wC3LEvKwpDnZRV54M/VxoKu1gXKGKIwJIgTy1ZdPnMQhLZfAm1ZkhIXxWEHnUJseJz47q4BtlpazDyzN76N7vNq8Bo3Nbx2cif783teLBssq6lYcxVo9INxkFTN7RqwqoWUZsfWiyUc87jOAReAsmVFm81yY0OknvaenQIOhA2qAsQa8EUcIItjqNSAa5tRMz7Bgp0YC1QWQLU7UGOTb2qAqxUwxyyN+BECCEYCch1QAgEKY3JCVbpnMI4TukXNmHpL7orYNAbjHJA2e8d95hhy9ZKqzvKFnLypL7B/iwf6+k1bUNbVuzDoCtS10wXAarJ2rrslxc0le6/ALUC+Fxw5NGr+nR6Te8R0Y6IopChisiwjyzLL5Jwep7Uj9OZ4R7vGS7N6lUErQZfVWuyAzao76JLSx0MbVdlmPbMuKwCyoQy13VPXYF2WaF1SFIU1h8wxldqR5CMajjus7hSK5e9vU5MhioIqssA/V23HFhtTm+XkRYGtL2Hv74mxYJ2LcRwRxxFRZJMX0AF5UTKdZmBytCnJ8owiK1HK0GrFtFotkjggTazNFgNarGPNwBzwVJZpYwd35Sx0TMGDr22jZQD6rUbYe1QqFmcqeKyGYdPCI47mS0UKPJtzwCuA8RqXR9zG1BfHrDxIu75WFWBbxmvc5K8AnlpZ9Wep6rvAAqJYwFRurFm12LLL6k6eNRurMSpshqIxoJrM0dRjoMpvd5/XECiOuxq0QGFi9ssl3hxvsJv12BonmKIgRzDljOVoyvn2Fg8u7dMNR/Yqxj2r8VzYc8QSYwKsucAxfL+sVAudBUk/7mrfgl2Iqh4zHhSpn8ndxb9PP2c9XzDGaqi60hKcNiOO45rjBqB6nbXHNRaOY2PouNwT0LXky9c5cNlSUiKlqVicbbRXIeq1qYpecGFgiFOvS1fLwSibjSbeLqVtKJdYEDZa0NpUdR8q4FMKCZpAV5s9dFmiy6IGYUzVHqjZOajG71THi2tjc8G2jrYAkYBA28iK5uQ0FNV1fH9EcUiSRlW/NNOlq8SPQBGEdjAMjibMZiVFUYJosmzC1tY2h4dDMMLBcMTly1e4eO48s2nGxQfO88iD5zlzepkwMNUzlEazeWuPV166zsc+8SRJGjWe0TM/U4GubpAMraFsTtL7RHT1b2239M5WwOKDQ2TlQMYyH3ETSqqJacSTLakgxFtvdDUPvRlAHIA3NTsXaYOyNlr3qZ28xrcQwGVL1k5cMd5/4Fi01CNXuYXCmHl7ozRQwfuOq6E7Z0utFyPbFaZaIEZmic/tPcmLwyVuDDVJntEOYaYjhlnOZBai9QKTWcqirPP7Lx3wHReuEqrM2Vs95Nbs1QKvqphxxZKrPPZG+2sKVl3LvilxbfQv0c8vu2Dpipbq6jPBmdv8rK9egWkssL4ujKrOr6z33rzSXB/eYuzdE9CtHFeACW3Bcq0NSJ0MYY+rwVdJvY5EriB5GHn6bsHHZlc5I7Y7R9nRak0EpaGsXpeVudTc0tt4bOytjXs1bhBg406Vck4xyxe8vbV00QzUw2TuWcNA8EV36jAt+xMEyiZ7SFCNcK1ru5FfnPy5SrnJW6n1pk6ACGxiyeb2IYeHQ5b6XRbaMJtmTKcTeknEuUcusbd7wMHuDk9cusAHv+0xbt3c5kd/7CdZObXOJ7/jk5w9d57DgyOuvnmFbDSl3Ul5/IlHLIOu2DrV89b9Wb1lt8DdB3mXd5SmOapWK/2b9RMIaWhYjuV5c5OqJrm9nuBA0i2aRhxIGv+9U/qrSVk7aDzwGnecvYKqSot62y3U7VFugtdA30BQ+5F/mhr8q0OsOcEupLoezzQ1HlOZVSrgMopBnvDV7dO8dhDxpa0ho+GMNA7JRIgMtELDtDDo2ZgZit0s4L9/7gzP7fb4N598iW4ytHPLJ9Ub+6ye+9qfgEqb9c9t/ILjzB9ij6vMBGiQwHe3w+nKyOLA3DPkec3DjYiGCdL2dIN6VR0qHuC9YoPFMOt7EvdI8zvjNOXeMF1H4bULPwrCiMiAyTIKkxNE1oMeuopg1iHlXjIQBCFxZKMTEIVBURqbDBDkOaXW1pFWlX204FkUGvLCVtby6rFy6tGc087YqAbPGJQijFSV0eaZhDEGXRhXwKekaJgWfGxsEAQVsCoXEaAEyzj8ZHSLQ22mcNeZU2Md8IotumPjeu1uFGIgMoZSDHtHE5599mUW+j02lhdZSAJMWTDNZywtdFFqgdFkwv7eNg+e2iBJIorJhPXVJSaDA17b2WL7xhVWT53naDTj/Fqf7/3e7+bUxiqtTkISOJ7hlvwqqaNJ24ydvL4OxX1oXcDzxxJqh5px2X+es7o+8Cy2sutVJME4dVUcONtZ6LlaRSjdO/QcrRkvjqi5BB7r73XM1NSmCWthrKMPLDPUDnz9O/N8tjYJuJs4uG+YENw7tmq8OMrrYM/NX69Wo4RSw9EQXrpyhESa6bV/xKtfLcmCJ8jih8iMJlU5f3T5gOneZfrS5mfTDzOSFiaJKAx89kaPg9Fj/KUPv8pK6wAa7bSmj1qbMhWT9O0t8Lzbsk3/dLXd2A3S6r004bTqCWff9dEJCqnqs7hlpXpP/kzbRlV9X4N33Q5rrXZmiIad905yb+rpepsnIAqCUIAIESGOI1eOsQ5bEs/snNgEBwg9WxYhMEIpdpsdz/yaq5LGONAMCIqCsqz5vndu0Tje32eu+I2qVTXcP76EonblEcGxW0NtQnFFdES8Wmnq4zwA0/THOvvw3LSo2ZM0BoAyJYhCG+HW1hGf++LX2d/b43s+9WEWOykBJVlhCIMQFQYc7e/TXVigHYX803/8T3j/B7+Nb/vQB7h65Qp7O5torXnzyitcOL/ND/ypP8Vav8fmjTdZWmyxvta3/KIBqL74j8bqmmLcg7gIHhHrNb/vxEBZMfmKg1r1s2JCjrHiFlPMsWLu3sliJ3ulmIv9zlFEx2LdYl0xY9+ZNO5kx7j91fPaemxAw6oq/rxaRZamDd45zOZstdXdcGzMc15qRos7x9QZQDt7Uy5fHbHUX+L82bM89OgjvPA1zbdf/1levfVFXty7RBqOORUs8uzVN5GldYavPsN3f98GPzV5EJ0LRnUw5ZTnDxL+q996H//xx56l1xrMkaGa02q3GFAvCP4pxB0pXkNx8035KAXvOPPf1TDa/NQ0TEmVj6U6o8n3/Xk1060Yd2MZqxY5qQ1Xd5N7U08XPyEtcwgrdTuyDXQ7Pfg9yJRnBP65lI+9rSe0FlfjQFx0nFd5jP2u9ANPAlSo0CXQGOy+Y22/eTXBfh64a9eG8fo5LKsAY1TVxV4VrJ6xcU7DSlJfq2IujT66zdLupvncuXaCaSPsHo354tdf5o0rb/I93/lhVhfbxAHkWcF4PKLUhmya0e22uf7mFT736U9zeHRI2u3w2muv8ZnPfJZAwcHeDqur6yytLqF1xq99+tN88vd8nFPra6QuqaUa4o6M+37R2PrDWtfPpJrYcR+JMT7u1hCAs6UaaqeZDymSGogc463Xb+PssMYBnnia5hZQzzm1A253XP0J7hZWxI5r1TBp2CjTmrf6jaQ8QDjLZMPRR81QqdmtZXeeSbrJJt60IO7I+lgjCjMr2Nk8YPcrb/JEHDP8rS8zKwre/L6nWUmX+A/+7B/nma9+jp96ZofNwwkL022+vrfH4ZuXeXi5w/nsFv3+kxyaFvpwAkGALoXLu/A/fPEB/v1PvEQcTp0j1t9bNwDRzIFyBYhGcKlK1YLhoykcSa/if6sVprq+VH1RaSnVt/XMr6dxbdjB/+bHgfPbYJoGthLzNsUb7wnoumCK2osqhkAZjPLrtgVDnxthQbfJ+sA/oMJOaoUHXdupStUdoHEDNbCRDqWR2mfVQEDVcDL4bXoQUMbMqXhzqOnab99Kg6tK/a80BnHNT8zca/b3969XU8csz1mOKvblrVzCzmDK5595hdfeuMyjD51luZcSKWVjdwvNdDal016gkwaMZwPGoyHPfu1ZNncPaP3mlxjPRsyyCbPphMnhIZ1eF4k7fPVrLxIhPPTIQ3RbrQoEbDiZs3cDRrv3JnaBs4uR7UNVL/HHh8F7WoyUDkdduU4DSnS1KPvR673bQdNR0TQQiOOpom2crPsMYyogn1NX/WLt/luPCWmcC948VVtyqwhUt8jbb0r89dz1naJr7aHNB4b6ai4qQNdhUN4B5BcXhaC+/iYf/fRv0DraZLQ9YNJuc+vCafLFVaJWwcHVN0i2rvP9Dyzycy92OeKQH/mODuNswrenT/KL8SXaZUZRlKiliEOToA+ngOKLB0v83EsX+GNPvIq4BU5XoYc+SsE33D+dX7yo/saTIkfm/Od+4aRCIbsY+Zl1PGO10Ul4rXhOBxIFpo79tdhhw8xMBcwOsczx687LPTMveHCs2KSqBw04m6j73YNq3ZtOsRBB1bXQq689+2pCpHVI2A/vFsIkc7/XLKNOh200sHlS0x3Z7FD/ccWGGt+5oPbqcg3d04JWg1k0vP/iVvBCC6NJzv7RiN/6ynMcDiecWlvioYvn6LXaIDbMbDaboVRA2o6ZzSZc37zFr37+C3ROX+SD7/8IeVEiWoiDiNMPPESpFWlnAYlbBEmXdhqikp6t43Ds8X2/+HdZx+nWleMsu7v/0oBrJiWWYTqQ8lXpvA3XLjb1hArw7NNndjmgc3GtTfXee1oEV5OjsahZNinVvYDaESaqYUe29zbGNIad+8wRBjsQjy3ypjGcvTpcEWAPbA6svKnLvWAxgipLHvj15zh1uA1rl2hfLOi0VnjgiQ/wxkf/IEGrRRyGDNT/nc2XnkVHH+OyHjPYv8WplVM8W55nNgmIFoSNsGAUxkzyjFmoAVvT4+eunuNia5v3n98hDNVcu2yWkGehDbZpdNVfDRUBbxCrF7g67tbihXuHxq8w/nyvUzQmr8wF3dVkrBFvbcHVZ8cpRAob8oYD97eI6Lln5gW/+ijj1G9xk9e12TNYLz4EBmio3oaGrXqOXTT5Yb3KcOzAO7Wsgv25a73luXLsl2Pn+HW0Btx55LZ3NZS6vrhWc2dWlytRTHLNG1d3+PJXX+DgcI/B8IgHLp7l8Ycvsra0SKRsYogPi+svLZHlBS+/ep1f+40vk+mUlVMX2d3bwRjDxukznD23RiDCK6dOs7W1w8b5i0wmBcNpxrPPX+ah86eI1bF+vP1R5pRQoErxvt+Yri9Hb/wAExBT5yIpZ7z2C09TS6pLebrIV+PTSd1YdFEBcz4HMSDKTbjabFBPY8uuKl5mqMxnpRhCd3dv+wUa/d5QjhtmsMqU4DUqP6/m3qkLz6rwyC0GkzHTL30Z/ch50sGYZPkiM0rkq7/Kl/MJ+coKrZbixouv8rUbPcbRBkV6gRu64EbUYiEWlsshnemACYqoyOiEPSRWzNAQtRiX8JOvP8JDpyd01NCq6h4Yba8de2v2eSsQNr7fPFD7yATj+q/BdYzXEZrXcv9Ww9f3j1/8/Efm2DkClNUNRGwWaKkKyNWxc26XexO90LBmGXFmBcHasxpOtuZgUXi7lFRssIKA2/DU94Zx93vrKW+aY03udGQjPtNThPoval7M/Mu5m1T30BUL0j6LyfgfP4Cg0CWDQcZoPOPKjV1evXKd69dvYnTBdHxIv99jY6XP0kKbSBnGkwlxHFPkJZkRtm7t8Mqrb/DqG9fRQZvRaJfRYJ/h4QFJu0W/3+HbP/Rt9Bd6dNttrl7b4qkPf4Dnnn+DK6++SVYU1nRQJdrXHWCc5lCKrxEhNcPFheq9lav2PS41bzVzY6MGXiGQGnz969fimK2p1XgfmVLH8joO5aMeKpD1KQammiki9TiuUivcqwhMPeqrX0x9rHcCVkuiIzzVFBHAaSaemdvokxpM5kOcDGWnzf5qh4U3rrLaTYiKDFa6HDy8jKjXuPWbP8/+IOBwGFCc+WP0woB2d5li9wA1m2GSlIKQPgXdoOCG0YyMkHbbaAISSmYq5Masz09f+QA/8sgXEIrK3mx8jHGtB1DP/pqhIiW1uaei8u4ax8lAWdlxRRp95M6rF0mnx3hnnpmPVLDRCX7M2HsUhaCjCbqriU2PdufuhdvuTZyuX5WrLJGqvQ01ocFUpcEKTOMcme+yihxWqkF9vbfkWb6D62bM/Xb7EsDcUfWZd77bbQtb9d7tYAjdRQyaEhsGlmkYj2bcurXLM197hV/79S+RpC0m+Yjl5UXGwwP2d3dY6i/wwIVTtNKYMs8ogoBZWTA8mmI0zMqC0XDAYq/LRz74fl594032tjdZ31imlYYsLS/zgSce57EL51GB4tIDF1jsr/DEwxfYvnnIePGAP/J7P0KsjLWF+z5vPI9X3IxfDKUGXqXuz4w0gDqEaz50KEQq1b85xT24eRukTx4o8ezXVPZhoTn2LZnwo8xUUS52FXacmlqTslEUQcPcZv0gtZmhGW1QQafLzfb/qxxJfoAKNaiJcTH9LiqnUfMDXYAKmfx7P8Qb/89/SDgb0d55k2tFn+v/7g9wOp/y8K1tMMKrqsfP7OwwuPRBUgpGScTYxCwN9gjMlDETsiDgVv9RZDYlS7sE7YgoSmghJKni6jTmud3HeXL5WbxHvZ5T3gzTNJ/U/VJnTVKf54F6LkW3nuP+nUhlkpHqXHuUBfKmVdH7SIFGyJrMxeoGeRspJ5T9XWbpu8x0qRrv1CVTf9Jcpb3zqhmm5T/HfeRPFag8v/YjM3/Nt2Cfc6z5jgfUQeAOofG1Exq5LZU3v0FtoHoO/73nDI4JYSgRZnnJaJKxt3/Erc1dvvzMC3z+819ia2ubKAo4feYsnbYiImfr+hWbsmsyhoN9et2UOFTovGDClIPhwLHnkvWlJc6sLNFptQmDgI9+22McDj9BnCbs7h8SJS1WFtu0Yig0zMZT+r0WK72Up9/3CN/3e55ieaGFd7zoY+TAzKkJzeBxqb6/o/LwHhe/qAfGaWNQRSD4eFf7Xu2sUoKtMOcMo3Y8uh093BU9c9Ku9gHu2jVwG7QIIR6kBTFCeIf+U9UsaECMW+gq++ucr8EzRP+3i86owpuslqn9GK4AGnxMrql4pVgEv3SR4V/9d3hxa5vF3Rsk12+iEYIoITn/GOd3nmOVfV5r7fFLecmRCDoMKDJr795VbVqHrzPtXsBsXacgQfc2kBm0AkhUwcW20E8Crowuca6zSS++WTFz/2w2CcSZFETAHDc7VG+UCgwxLlvP1H3XXNoqVuc7pF4o5wmd2M7yDo3qHpUKMdcGKRPYD8jYu0Mbrdw70G2GgvkJWmGke7UC0lC9fB80x+DcY8q8qlE71t6aasldfq/u0cARqZYue03lc+epVR3LTHwaoeVIPhxHA+NpwdHhhOvXtzg6mrBzNOLajZvsbN7iYG+f5599lkwXLCwtsrjc5tFve4pOp8fB/i7MxuSzGcODIzbWVtg/2GNvd5szZ9aZRSHD8ZjcGHb2Duj3eywv9uhEIWEQIAhpN2a5t0JhNLs3N/lv/z9/n8cee4Q/92f/BLNZxnScsXZqleGwZHmpw/kzC9YnYOZD2rxUSQCuf4wBo8QW6plzt99nIq78YbVgC6Jcck5jvFb1mWuvcJW0oBWu1gE1A8VrnBZglbuOdXo5siB1rlOtWaiKeYk5PkrrldADbm01OKZiiaGOirH/FVfCxYOF+IXG1KzYX7OK3TSWcIRLPbKlPls8iOgCbTRKNNm5M/S++GXMyqN8WB7gM50+Mx0RpRFmPGB6tEmWdFHJBkfSJVaGKI1Ihq+zuNDmwmyf8609HloY8W2nx8hCwPoD2wwPI25cT8imnt2rxvx2iw2148s6q7V7JrtozvmKatCx/4jV2KpYekeU5pz0x+6FNHlwvTSBrzpY97adLiHqYPGuQ+8emRcqWKwe9rj27TNDaurbiCBw5x6vc6AbzPL4/d6Cx95RfM0Ee+tGoHOjEWIE0UKdZW1c3LhhPCuY5QUHhwOODkfks4zdnT0+/5tf4rkXXoI4QitYOXUKRLh++VWGhwdMpjMW+8ssJQlrp09z/tKDdit5UzA+3OVgd5d8NiVNIq5eeYN2O+Xm9WucOXua8XBEXhQ2HRpFPIopCo3E3uZo4waNsSFOjzx8gT/wvd/BS6+8Rl7YokMPXDiLiOK5Z1/hE594jCiwK7mP/6z6tGHuCaQepH5h+Qa7+z0n3qYauFRepDYzgCcEzkBgV9lKlfXsUBnHId2kLE095uvruB1PsIBgq35JHalQDXhnW/Z/Nv0ackzjqAZq/cY8eHiVbB52cQDiSzA2pFosbFiUMXW4ltXaDDb2VAMhGI02MOokSBAQ7n6dx1b7PL3Y4s1ZzN5Y0DJG93ustq6zcXSFi4sBF3rPsJgc0DmjKOOH2Tg1pLv+Iq1eRqenWV7ustJdQWc5Tzz53bz04gJX3ygpChc6V9l4qQGzUQayDsVz/TH3nzru1jTeYUV2G4O5ClRo9F6tp5jGqorrp2Zv1hpw8RZ+jnvGdOcacYcZ+js5Z5vP1/QYv9XB3jPsbZUYa5vM85LJNOPgYMhgOObN61vsHYzJy5Kt3V2yPEdr2N7cYzwcsbV5i+Fwn8HhDmIKjvZuMhkfEbd6PHDpYcaDQzbOnGex28OUE06d2eDgaEAUCw8+eJ61M+tcu3qV629eYev6DTZvXCOJI5ZWlplNxuhyxsH+Lnu7OyStNoeHR2RZxuLiCtNJwZe++jwf+eD7WOikxMoqUNoBSCuN+b7v/RSf+q5PkCQhQUeRthI0iofOnqbXbSaM+sy9YwtkwzlkjFP1OJ5Zfr8isMuil9ok5LJeaT6TZaKuZxqaWDVxjV+yVEWcq4ltfHWw4/4NqRMqMNV49NGhZm5iu2PMvBHA220rE5ihngxz6rBvuljV3IH4cZZsAder2KU73VYZtueXbqQYjIKd88ucH03ZzAKORmPCqOA7Nv4lZx78ZU63rnDq9IROKgSFUBQ5p85A0hKK/DpBZEhTUJlBSiiHR5RJQFko0D/NU089yIOXPsDXvtJjZ9t2qF0UVV2zoQGx1RJl/HttaB5Vb9XvpEqW8rzWm27MPIZIYyxUxMz5PjhGCj1xLMXwi1d+kR/hrx8fcMA9BF2/UlZmAFOvNv5BqljbOwJlg/I3vOX+pXtTTHPl88eCQWshK0pG4ylZVpBnBcPBmFmWs7d3RK41ZVFQFgWT6Yzt7X0ODobcurXNZDrh9StXOTw6pJWGdLo9krSNSkLarTbtbodpNqHVTemVLeI2tLsxe9s36S2v01tc4nBvk1ee/SLt7iKTwQ4mEBb6y7z6wnMsLvZY2FhleWUJoWQ8HDA8OmI2GaF0wdH+EUkc0Wq12N2+xWaaMByNCeOUzZs32d/bZ21jwpkLD3Bre4dXX7/Mw5cu0O+koLBV1QD7h6GdRHbHCAUL7dju/KsgMAGlt3kd6/ta5h04NWOojzkOUveHuDoXFViaKgHCJjs0xuccADdcOjUO423ADUUYEGdiqM/34fbzR0njxJo5Nz+3fa4bcbbzI98Yv6+FB2VTHeGPcSVZKrNC3f769yaQCPXOEL6lbnZhBIaX3s/laxv859F3ksgv8b/58N/j7LlNen3IJjCbCe0WJKnh8hvCaGwoS0MvVaQtQ6sdURQZaQC5Nmzv7JK22gTZhOHh8yws3eR7vvfP8cxXRrz2okYbVTnnfV/6JAhf0NJrGr4yWdMy7hNJ3AF3xBcVSpVgVqcTH9sxpPEPGI6T2pcPX+SNvWe4m9xjpmulqj+LqbNpZH6aVmtWcwVxq5tPxfXxzdp4hm/wDgUtQjbLeeP1a3zpmRf48jPPc/X6JqXE5DrjaLSPKUvW1tZ48OEHSdIWSRJTZlMO94+4ceMGBs1wcERvcZEnP/I+Wt0OcRiSZQXj0Zjd7S2mwwPKcoo2JdPpgDROKMIQCSPOPPAQk8kYZeD8w4+zu32LbDomShL6K8uMxxO67TZpu8UDD18idLUS9nZ2mI0nYCAKIqalYXw0RATiKGZ/a5e9nX0Ojo7Y2d7maO+A2awgaXfoL3Y5GozZ2t5DzBKLnZatWeFU40DEecFd0LvYgL7AKKv2NtRYr4bV6mjTnmYnW+1kwDvl70tHmi0LqiqfQp2R6MwDVfiuVCTXM0Sf8eX5VR1z6yMLjkG1VEFpdQafA8+KOIj7y2XUKJpVtxo6o3GgV31qahbugVZj9+1rnohjtuKfqwG+fuWoeWx1DlDZf6m+tYzwYOXb+dH9FS7FP8oP/eD/SLRQMBoLYRgQaU1vASgsAdpoQRQo2n2NmZTIEMajkJUVxfRoSisyEMHBMGd5QRGLZnp4iyL9BZ56/wbd9kWe/aqiLMOKUXoHfN2yug8rkiZUBM2a3+ZZr+1UD9YlxcoANe4gw6g22cwtdMelYXJyf2+kK3zq3MfucKyVe1ba8U7tk0ZnCXc3BVQAbGpmBf6ajeAPY8hL2B9M+PRnPs+nf+VzfP7zX6DIR0wmA0ATxR1WNk5z5sIlLj76IA9cOke3m7DQXaCdtiizGVeuXuP8I+cZDoYcHBzSardYXlmm2+sgorhx/RZFmbFz8xrToyFROyWIAmaTCWEUoVXA6bPnKEVYOX0aJUJpcs49+hDj4YA0VFx943XGh/ukaYtT5y5w/vxZiqLgYH+Pm2+8zng0IkpS+qurBHHIZDRmNBySpilH+we8/PWvcfGRR2ilLYbhgOHBPkcHhywtLjLOpqCEnb1dyrzH0vKy3TVC1QVrgMaOFMdWPJrM9XYTQ31Q/YYqkLgfEReoakd4sPQ+BZ8g4tVU75hpsMKq+oHrM+UmpprTeKUiF0Aj+8we451tdjcHz3Y9OamhpMlEm5lZ9nQXdUPDwWxKV7ugDhmrwdbMv9g58Dd1Onvj6+oYsazXD5OcBX785TbbRz/LRz/wk4xnhgsxxIFhOC0xpVBOoJ0mHBxlpKlBtLC6EbCzWTLch2w2wegOcTuivJnTS6EdZOxsBqyvLjCbTNkvv87i2iFnl7fp/p6n+MLnY2ZZjDRqUFSV3BodbLV9vyDRKADk3w/VuzPOlm4Q8sOMaecWvc4DhKN0frWDhtYn1XvRpU2HD+OEIi9YDtdZW1q769C7Z460eb2o/vUdzVE/Adx5jeQt/KosQIHwa196la+88AZbmzeIV1d5+rs+hdGG3e1t4iii1WuTJCEbG6ss9BeYjkcElBSzGbN2y1Y7C2H/xhaDwQhQhAGgc0TnjEdT3nj1JUbDCaI0cTsiiITx8IDt6zdo9Xp0F5aYDAboMOLsubO0uwntOGQ6njA5OuTlZ5/j1puX6XS7LJzv8/iTT6KUoiwzjg4P2L11g9FoxPq5iyyf2WD5zDrjowGH+/vk04xOr8ebr72GKa0TpigKJqMR4+GI8XhC2jqNCkK6aYv9owFBlBCFEZ1WggTNwuuG2SwniiKUc75VGx/Ohck4pao5QN0xLoLKzVWZn8T3kSi3dQ0EldMM8VE1x1RJHw/oprmfcJUJoNLEZO6z2m7okn9oFLARzVwIk0djz8J8FI0xGGUJhncui9TvybPxys4pDcCu+FxF9WpPu38e06hFS63r+EXBtoWqb6xTMOT1K2d4/crX6T/1T5iYPl9/bYsy0JQKZiOhlcDKojANNHQM0xFIZNjeMfTWYra3ciYTw/7OkF43oNdSvPmy4sFzJUkhcMWwtLyIDCOCUx2KU5pe8Twf/sAFPv+lNVvQSoRmjl6z6lhlanDSLDQk7m/jsgTrnAEhna2QmwP2oxc4u/ztFAclpS59J1TX8wu10ZrBKzfhoYRTp57k1j/8Sa7spXz7IxfvOvbuUUbavLlfye1ZY/7dv63jy4sbTJ79FqXhc19+hc/+1ousn1niYucc12/coNCG2WzKmYsXCIKAOAm49MA50iQgDgKiMGQynRIoYTAYALC3s8fm9Ztobdjd22Nhpc/p06dJk4RpNgNRZLMZvYUeB3t7TCdjDrY3KbIRRZ4wmU7Z29tlZeMUs+mEtbU+lAVZnjGbThgNBxhjKMqC2WxKv7/A8tISVwdXrV05n5FPR4hAu7fIqY01BkeHXHn1VQb7h0ymBhXG3Lx+jV6/T9RKIcTurEFBURRgDIEK6C32GU2mdDuhzTRToVuZDUYL40xTZmNWw5DU7c6sq8no7JINjcJXyHSvwDrqjC0djdhMtbdw1L5nRYvfSdqAt+FSM9K5GKJKHxf8RpFQk8dqR13H/Bu6BeBA9riN0ZkyrIOtaeX1QO061t1k/nN/OVNbBqhNAD7r05jbWeDcdpz+majByF0Jzxb91bVj1SIw0yt8/StXePZLP85f+r4NLpxtgdri8vWYrMzJxrDYhVIZRrOCVgemBrolzAaGVidnZS3gIC9prRjGlzXT3YBIDC98VvGBJ9sUcYl5KSCI+pS7Ea2tGL3aJcr7XOoJrx3UoWF1oRtv5rFmsGOk1j5VI3RBUM0uJgoUhRF6+UOIuclh+jKnzr6Pw5sTdKEbJRsMkQS0CZnuH3G4vM3q8mlefvNr/E8/9WW6iaIY3uSH7zL23hWbLtBgC9/IOfWvHgiMlGiBa9tjPvfMFRZWI7Is48bNyxztHTCZFKye3iCOAs6fPUWnm5LGEYnb6ubo6IjZbMLe3ojhcMBkMmF4OGBwdEir1WZ0dEQQKGZLS4zHY3oLXS5ePI8YQzmbEmB49cXnmQ6PKIyiu7hMb2mVXn8RI4Zep0UgUBqNznNacUIcx7TbbabTMZPRkMloQBRt0O126HRaRGnC5PqQ0eERWoOKIs6eP0MUCm+88gbF5ham1ebo6IgoL3jwiYuURU4YaHrdFtlszMHRIWhNf7lPO05QSlGUQFbb6I6GU1545Q2m2YQPPfkoF06tVGBQsyKpX1fjG/tZHS4+947uQ9D1pgV81apmOKJn9l5n86zTM0zHALWRKsnB/d/hWKMfm73VsFOYxjfewjgXnyu6kYxjId3r0t5N5glIXekZfKKP38KqukcVrSDU2+XU53jnXfX4rp5BbZHwxg0hH65gBi+gB7uMrn0/O2rE+vlNUDsoYKYMr28L17eFbg9iBSYw9BT0F4XZNbswdLuQbgZcfLLL818Y0esq1k4ZeDWjtd6GdUEdhJhxTLHZQY0EigmPxEvsqZB9ZmAKt+ipum6FeIpne8f1nvvEaot+mfRvSQmEoaLMC0QiesVZxnvXuJU9x9L6OfRRjJ4IvSShnWdMx0NmpXBzsscV8wVk7zFm05iL71uiQ8Eoyu869u6RTfft2evcEY0l6U5nzpuhhMGo4Gd+9tfIiwEyTREm6DLn1o3r9HorXHjgPMsLbZYWOhgKxqMJh4cjyrJgNBqgtSEMA+I4ZjQakWVTJuMBRZaRTcbM4piD/X2CIOB0uEGaxvR6Xa7u7rC/eZ3h/i66KFg+dZH+6imSThsjQr/fIwwFXeQc7u9xuL/H9uYWSimyskAXObosGBwdsru7g9YF58+fZW1jna3XLzMdDjk6GhCEMa1Oi9Pnz1IUGlOWHOwdoIKEbr9Hu9tlZWWZJAiIImV3hyhLtFLMsgwThGgp6bTbTLMcYyxj/YVf+gx//+/+GGna4gd+4A/x7//bP1RtB3Nbn5v6x7+X40fKXT6/H6QK8XLszQKPU1WrkCCv9UsdLVNlMAki9QartZOG2wLyvT2xNk54U8Y8m6zdWqauhQ0NxkuDSZs5B5c/zle48oDrQ5s81FfVyqrTdBU37FtBdb5/+aYi+wJcf/F18skmSavN+NZFbu3vs3X1CR7+0G+SLkwJEkOUwmQA+UzQWhGFmiMtDDMIRgqTCW1RLGhNPCmIl7rsHyq6k5xAYDBKSSc9wlmb0UDTnRUExSJtGZEPb/DJ9gW+0l7nptzCGFtTwRbt8e7Nipa6XvemktrOXidYGISAaVbwG7/6Szzy0PtZO3eOVJ1DjzL2xocsdIRzp1rs3pxwPRdMmEIELLd587O75Le+xMaZM3zXxz/O0dGA4fTormPvHiZHNGmq+/yYh63eI43bMtFuF6fmaOHnf+VLXN/f49TZZQ4Pj5hOxuxv7pFNcx780AOYbIzoECEhc/uq7e4dcHR0iIghTkLiKCQMFe12SpF1OTo8IJtMUIEggWI0HBEEAbooWF1fJ0ojitmUG6+9xHSWc/rS45y+cIn2Yp+0lVCWGYv9Hnkx4+BwyvbmJod7+5R5hgCBCmgvrbJy6hRpu0OZ53Z1jRQPPPwwr33tGYaHO4wPD9CFIY5bdDttslnG4PCAyXSKioXV9VNcuHCe/lKPNI6Jw5ijw0MEWFjsE6iQMIyIo4A4FLqtDrsHA8rSsLy8QqvVZePUCh986nECr166jrfz3O8jZ5NCKuBwPxqrimqX2VOprveZ1IEKDXOCc36ZCpWcXbYRg1llI1WgLDVT9q5y/LXddYx3+thrGeVgoALTKjq3AmLPXsU07Kw0+YnMmRY8MM/FrFeOtrrdnssaKe21XdKEr69h0dXgw8Q8sHtlXRMw3d+hmA05u3aavd0h71tucfOlCwySPcypN1hcG9BJoL9gGOwKemJYWDcMDoVpCVlo0LlmMk4ZFIqhCdAqYq88xQME6GmOlAmHZUwahIQ6Yjou0WqMdCJM2KU92uXDuealxUVe5cD2j/HJJbWTq3JY4qFXN8a871dXiF4Upx96kJ/+uZ+gFy4QJCHRA4LulHz8sfPo4Cx5cQnE7t+Wqhbj7SswanN5NKGVHtJDYaIZ7//g03cde/eu4I001++aMd12bHPgQ/Vy73BRwHDt1iG/+utfZGltmUAU25ubFGVOq5vwp3/k32B1Y5W9nT1u3LzObDbBYHdXmE6nTKczwkghCibjMXEUEUURUWx3vy2KgiSJ0bpA5xm7W5sc7u0xHgw42N/n+usvMJ1lrJ+5xOq5C1x6/FE6Cz3y6YSyyOj1ugShsjs5lCWHh4eURYERodXp0Gq36PS6DAdDup0WaZoQRxEb58/QW11lePk629evsXXrFisrC7RbHdJWi6TTZv3MBkdHE+I0pd1dYHl5iTgMicOIPC/Y3z8gSdosLyy45wpIQkU7Ftqri2RFwep3fZhPffgJup0WC90WuKQAZaTamcPvv+WlQcRcDV2pJrfWcuc0+PtABMvuKmdDFWJkmB+CDsyo2V6l8ktd8MRQmx2qTVbF1KBd3bg2WdQJOj4IDexWNKYuQFMBB433UgNiReh8W6sWmka4lOfPngE2mLW7RrUvnDcyOIasqRmwZ+VhHtCTgnFvgfHgdYL+g5yXGeXXzjL9yozux7YpF6eoNCPe7hGvjSlmJVqFLCxNuXYtRHc0k7xg0ayQ74RE44jCBPzKa4v8oS6EY0MgJVMFUmiECMo2mpCkBbNpQaaFJ40m63e5aoZoKRGXxF31lEv3bjwZHqCN1OU1ce/j0sWHWfjQwxQvZgy39hlcHfLAx1pcePwprl17hVV5EDGaOEzpyQLvf+CDtKINvvbir3Hz2vM8+mHDuZVvdyaOO8s9t+lWA2KO7dZ/V93jmUN13PxVDJDnhn/6zz9HIYY0Vbx5+bKNiT17ht/7qY9xenWZrMgps5yr10qu3bhJp91iNs3tppUYAqUIlGJaFMymU8IwpNfrkaYpg/0DEIUKhL3tLXSRMzjY47VSMxoMKfKS9YuPcu6Rx+gvrdJutemmCSYJabVS4iRkb3+PbDZjNpvR7nQAyGdjt716Qm9hkTzLGE/GBIFCa02axLR7i6h4i2w8Yndzk6Nzp4hj+31/eYlWp00QH9Lp9oiihMXFPsoYwiBgfWODoijptLvYjUADwjAgjEL7PGJoRSFpFLDUjQEb16wRlFcrDT5EFJ+6Wu2E0GRU3jJmLODej040cFxINVkqruh/nfZZs1eqSdmMYmiyCB8+pxrj2bNRT0KalEJq3b+2Orq42ar8aU1rq1v6z5o5gf5ce5xumCCs6my03yLc5xI6CHIv3lTXsE427cOrxI4PjVQbeIoxrHUj9t13N1TIG0fXeSxNWY0KiuEi8ZcTyo5GBxlqusq20sQPbtFdmzKdHJJlBcUwJxIDk5B4mDHLttndmnL5ZsCt8Yy/9KGztCZTjI6IggCCgGBaEoQbZEcT4jRnXI54YRBzMVqm6BTcZNZ4N04qx6dlvz47cE5/aESKCIqnH3iSL3zhV2irCYtLId/5hz/Azmjb9iOw0F0l0T10UXDl6i1mk4xzacojj34/OWOqzL+7yD0H3bmh2egPgWr/s3diVgDDS69e45nnXmBxrc9oPGRn5xbrG+v8gd/3Kc6s9TGlocgyOq2Ec+fOMh5PGI+GhJ2Y/YMjwigiCmP2d3aZTMeoQNHpdJB2xHQyJZtNiXtdknabKEoYHR4yG48Zj8ecunCBMOnx4OOPsXJ6A2UMpzZWaHcSwMbAjkZDQhWgRLGysU67O6bIMkbDgDLPSJKEIFAcHhywsNRFAsuKgyjkwUceYevKDXqLPQaHuwwHAzrdNrNZThjFJGmL3kKfg/0Bk8mUOEhZ6LQRDAttjVIBrVaLVppijCFRilhU5YC09RLmGayNuJFqk0k0hFLXYFCi0UYo3YC0+6O5ECPHujSm3iX4PhIRH2xk1X/BbevtYFM3bKfgiUODVnpN3C9UeNNEA7T9FTwA+1ObwOkzo5yqP5eMIh6c7Y2a6b84tdkAot33Rh8z4dVA6kHVVx+z4K0bDFDX9l+3GJQNs4LdU8wgZJw73SZ9fpdp5wF+JhsyiBd5Rh/xRByhOiu0TI5ME0ql2E9TpuUucnWRfLMkylKWFwxZV0MrJC9m7GpNL045s9ZhfCvkhcGYv/31Pf7yuQ6hMpQmJtARSEAxzoEA0YalTk5GxkubJRcW2xSrEdvYdDK/oDQzCOffigfhGnjtY5c8un6R/g9+N4f7r7H09BK95SWu3HqRjeBBlloX6addrl65yu7OLvvbLxOEQx54+oOESY9b268xne2TxIt3HXvvQvRCcxBQMQZpfPt2YgBdCP/4H/88RTkjiuH6jRtsbKzzg3/sD3H+9DohJVNdMJlNyLKMKAxZWVlhodcFDP1+j5s3b3Lzxi0EodQGFQZ0egt0O12iIOLo4IhTCwss9fuEQcC41yXtdlhZW2Ohv0iRlfRXloiSmHYrRRRMpzN0WTIcjSjynCAIMMYQRRGm3YJWStKKEG3I8owgsqaM/b19oigiDEMipRhPx8StlNZCjzhJKPKM0XDAaDyiKApOnz7FUn+Rhx9MKIuS6WTKSr9PgGEymdDutEjT1O60rIQkbGwQIzSm/DwXaKqSzS+9t9xnUvlEyMDFNRrBZj1paVoj7iOR6sdXnmoyAp/o4G1/TbNDbfd1f0ujX6v+a2Qq1fYIe7y2LNvbcS3yWXaksGF4c3v2iedkNj+zWjCdzJtEnE5ibBywvXW9OZS15QqGotZeHEu25NsCkNV0/Mbi2poZjGXKwZmAR1fbvHa0ywMqQLcTBsEhL7VGJO0ANY0oyoK2LohjUHFIyA7D/JA0zUnLgs5YIJgyak/JdYvx5CILN/p8eFl4aGGVr+7O+AfXZ/z5swEqmiKhscXkS43JYqe+t1hTMUwP2YsUG/stBv0ZU6XrYCnxI7exWjrWb5/eWDOQwe6C4hbN1Qvn6b7PUKYZw1FJkY/op08w2xtxs9jh+uUXmE1usHzuDL21RwGFNgWry+fZOfwSq9GH7zry3gXQbdLZ23XR5ip0GwS70SUYrly+wa999td57ENPYjBsnD7H9/7e7+Ti6Q0S0WSznPF4wizPmWUztra26PYW2FhfIQoVYkr6vZQ4DNg/HFIeGhaXlgijhLTV5txDDzEYjwkV5NmUzvISp8+eZjgYOJtsm8l4hjYlrSQGUxLHEdms4OhoxHQ6QamAo6MDsizDb9suSpAgsItFEhMlMWmakrZiWq0WURQxHo1ptTu0el26S8usrm8gCmbZlLIsieOY9bVV1pYX6HYXKUuDKaEocpJWSkoL7bfvEWhHMYHQYF136vm3eWUyf444UwTYiRk0QOR+NOsqp6tX40+UAxywAKmq8eeZLFiXkzRQT6rvqU0KTqpLufPnLWrOnFABgHF97kGhMSN8ZpVj2s0asfbetpR65eqr8MXgi9jYD6py9NRQZGoLStW+EmO8HmAhV/v2UjJsHbFy6QxLL+1xfjDla1sjkrMrmP4I1dlBTQrCDGaTgF4aU+gMUohjxTifkSxETMY5g9yQtiFjitoOUaUix9CXjO85E/P8KOZfZDF/ohWRTY8wnQQJY4g1RaFolUKpNLNpzOxwG3VmkbOdBa4k0+aSUr3vOT3CmMb7tn0diiFPhDwekbX3KYojTGY4HO8QB4bpbkF+9Do7N64StUacefJJgqTtonwM4ko9pmmbrBjedey9a3G6zS3Ogcp+CDXU3pkwGUqj+bl/+a9IF9tEaUqr1eXUxgYXT6+RhtYBNssLlApop226nRlJmjIejRmNEpYWuvRabZIgJD9/hqS1h1KKdqfH9Zs3EFFsnDrN0tIinSRhMBiwd7DPZDohSWIWFrqMxiMCVVLmJdubNwljxfbWFsYEFKUmz2aIGMbjMVprgiAgSdNK3YuTGNGGOI5ZXlmi2+sRRSFiNJ1Wi+5Cj0efej8b5y6wvrFBnk+J4pCFhR7L/UXOnV5jsdum3eqiVIQuYTqZIAhREoEx5PmMTrtHnETfxBuqDYa1xcu9O2OqrXo8+7UOuLd8ce9p0eK2XvfKd8PsVXm3XUSDZXxQhXlNRki7W2sHlYnBTmTlPjBz30HFsvDQ5xhmZVf3C0DDnOHtyhXTpEGpsSaDKi7VExgLmo6fNlRsf39THVsKBD6Gt/K3KPvMYjPdtNEYSnBZfAUZ2w9foHcw4tzRkD2t+dXffJ14Q3Hhu1MWNrbolhOKXDGbdmmlY7JsAIEiais2y4x2XxFp2B2HqGCBtJWS9VuMxhNmQ1hD88Fl2JGUr4ctnszG5KN91FJINumiTA+TxaRRi+V4xC+8+ia98Q6Xwot0zi5yqDLqOL1G31FrEMfT2HOjmSabTNJ9IhQECpMpivKQJOsz2tlkc/NNTl1YpbPxqL2Crl2NWguIIokusT988a5j796Crh9E1BV97hTDa+afvRZn+N87GPPFr77IY0++j+F0xLKUPPrwBRY7lnHO8pxpnlkfsNhdckUp4jgmSRLKUlMUBcPhEflswuryIt1Ol+FoyoVzF8jzgqyYsbK0yCOXHmA8GrO9t8vm1hbGGPI8Y29nh/29fcv4VIiKFGmrQxK3HMOe0W63aXfaaA1xHBNFISoQkmTR7pulNe1OyuJij25vETEaUxZks4z+Up/1U6dYXV2n2+synlhzxcJCjwvnzrK2vEQrSVAqQEmECYUoDNBaowSSOAFjSMOwKpx9WzHs5qJXR73f/uWc7cH/cgctxBw/4f4RWzBcURXJNtr9TrVQeoZbd5N9zkImhLSxu8aqqjau/VbhU8H8uRWjrJayRt83APdO2kh9Tq25+Hen3byq2Kx4nHFtwLKvWpH2aRXGAVGN3dlwk6C7QighWqx5QRvrgJMqfNDdVwyDriZ84hzrL7/BhwcF189f5Ktfe4aOOUfr962T9Z8jiseozhFiUpJ2hNZjlDbocchw3AMJiHRAfON9LEwV7bhAL2iSruHmOOfWICJtF/zE1j5/vB3zofYSMmtjIiATZrlGGWEx7XPp3Cl+4tYb/J5rN1jSJXJhAVt9zNrNdWVO8guMcl3m7dq2h+Jxn2kyZVZMCQR2h5cBOLoeM9p8hYvv/zaiVsuZW1wNZFO5QhGjCVSM4V1Ojrij3GHevu0pjl195rPPsLh2iiyfYsqMpX6bNDQoU1LkJdNsxjSfMJ3llBqKokQpVdk4AcpSo01Jp5XQbvcogevFNoPBGJ8K+dhDl1hop6z0upxaX2FpscPewQGTyZRAFAv9RfK8ZDAYcnSwT6gCG+FweEgYx8RpnyRts9BbIAwjRAzdboswVCgFUmqSNKLVSghUiCkLStGUhXDmzCkWFvssL68QBorprM1kPGZteZWN5SXacYwKFEEQ4rc8D8OIg4NDgjAgCSPaSUorjBupvO/svczF17/t4cbVZHCQJL604/0lfuv12pLr6t95E4H4NNNm6BWAoFJFXk6Iw677xIovhgJSBRPgDcM+jtaBXZ0U0YgeaJoqKkRsluqWRnvqbeO9iLHONB8FYRcT5zjDZ7iZ6h7G2bOtTSrgtc1/ycOn/1BdwQ+cccFmsDW3gS9Fs3OqzeknzrL0/GU+OHie/Mn3s3PjKsWLFzk63UJFh8TdZ2F53+6aoUJ0WZK0CgZbiqXdb2dBBwSmxXR6xGRwxHJf6LYMrd6MUZlxY0+Tt2L+/n5BHC3xdAwtWUanMYoOxXhEEfX4/oee5lYS8+LkOo8eHpFuhkSnUpvwUkUS1MaGejnz78zajJN4iaG6Rl6MyWaKstglCfuk7UVOffhhjIps6KQHb4rq0vYqJWJCFluP3HXs3XvQNQZRDc2l8dXbz1VhOJrxC7/4WZbPrXG4t0d/sc/a6gqddgSmJC9zJBAbfmVKjgYDJrOCdishm82YTgwzoynjiDiKKUtNqUtUEBFHEUaXbG3v8NClc/Q6bTqthDiJybKMRx66xGQ6JctK9vYOGOU5o8mM0WjC1TevcnS4x3Q2xmAdZ3EcE8cRYRSSJBEL3S7tdgpowlDRa6cAxHGEznOKPKcsQsqi5PT6Gv2lZdrtNtpoup2UrN2m1+nSabWJ45jafuc3hDS00gQVhKRRTC9J7Pbs9cy9Yy/P7cjh/q2UpDlbrg/UZy5Rwtsoj2f23E9SgY0pQayhoX5AB0nVDrC1BxwxKGkz0AMiulVmW5X6W9HSGtrs/++wnbgL76pZqf3dh/mCQky94968zRdnTPfX9TUHvBmhVkXEMz27E6ULExRKMQRuMYiCPsP8MpvDr3Gq+1Rlp6yv3zCK+EcTxe5HH+LU9ibnX9+kdf0LfPnMk8TZjL03U0yRcHTrMZ76WJuRepV2GpBnEZNBC1Uo9EHGCM1qS9t6BarFcPuI0QIsrsFSd0q8NGVzv8dRb4mfKTSxtHkyWYLhES19GbN/jYAVgijjT555lL+1A/vpmODNHc4unoWWTzmp2Wi9d5x9qjAIWF1cpb/8KG9O3qDMR+gy4GDwEnEYoZVmeeMsJrfOsjpd0WDt3wFQggncOCiJ1Lu8G7AX6wRwe6Vh3uG8bB5kePX1a7z8ysucN0M2r70OQcLjjz/EeDwi7/Ycu9AYrVEKWp0WrV7MZDLl8OCAPJvRaadkeYbREbOsQEtGq5PQ7fXI8msEgWKh17GAGcZkeYEKAhY6HTrdLhjFwkKfg6MB+0dDpuNbJEmL/soaWmcMDo9oJSmtVmqdY2FAp5WwurRImiQoJaRpRCuO7BQVIc+mjEYjojAkimKiKKbb7RLHMWDt1GmY0G61UEGISGDz/Bu1QgPH5kMV0G2lFnBdF845Rt6hzHnkG+fXYfNu8lPvJPGN3uO9I47lUI9TX/qlZnl1D4BX7xUiCWW41XCimeoazThb0zhvzpzgGKfX2KvQL7wZzrNTbwKpGbHFSA/kpg7PFVMBNA0W7XQTd9D8wmpcSoRdQGP6yTqbgy/TThZphedd1mGDNQsN550dB1k7Yev7P876T3+a3tGY8Oh1tg6eR9YusdU5RbKwgtlfoVP0GQ73UVrTThfJ8wISjegZgyyjQ0kHQy9tMRiO2ToS0gdi2ssTzl7YZ2EKh8OEXwpaLE0mPDC+DAJxt4cKuuijK3SyjB9evcDf2d8kY5fg+T1OP71SE4nGexFRtJOU9ZWLrCyeZVoUbO68yQ3zGXLRHOxvosqCRfMYobSIkxCdObMMfjES/M7A9p0W1e4eb+VevsdM16sv34zYjQ8/+7mvsrq2jCkLlMDR3jbPfvkrPHjhNEWRA7bUoVfNtC7JsjF5nhMEAjpgMpnQShJG0ymjyYReHFPkBcZAu90mCEKSOCGKItrtDoUuCYIAxBC6sKjFICKKQtrtDqPhhM3tLZIgpSxDjD6i0+2ytLREkiSsrazS67ZY6HRpRQlBKCgFcRRZp4QxzNwODzbqwNBq2ZAvwRnnS40KBKVsaUafwSTOaRIEyppOjKEdR8RhvT+B7/HfDvds1te9HVnnr1wrrPePGAFlNEYC/wnQSCzwmpkHMPHFfgyGgEQZSp0Tqpgaeo0DVc82qZisw1ZHQA3GbeNOdWajSpmpnV9VjdtGNIO9lna/aGeJqN++ruyUjiVXi7BdVDRU9/DtVErYWPo4m7f+MTePvsClpTawiIdnf77N4lLV82hg0k1of+ppkl99lo0g4lJviclsl89vHnFl/VFKFZHGHYJiQGEKJkc7dNM2s4mhjCNmRhiXOSM9o680C2nEoonZvVlydQ96FxTr5wythUN2Bl1+uYj5M2qNdrHFLIMoicFAOJ3x4KLmD/fP8HNpypuX36B9bcDC+Q7enKBEsdhdYmP1AdrJMoPDfW5efpG9ncsMO4Zi+YjxcIiZDbiQfA+xWcbMSkym5xyZVWyv63NjfLB7gDUx+HF1u9wT0PUvBBp2KP+FV3manjM5frad9IPRjFcvXyPtJAyOdpkMhwSUTMYjSm2YzjKbclkF7YPPE1diGI+H6LIkDBSahPF0xmSa0V0URCmKckqSJvT7iywv9UmTFiqw9lHjHkICN0EVdFWXOC555OEHWd9Y59XXXmd3b5ddhPFozMapiHbaYrHXYXlhgVaSEAUhQahQ1ZYB2BKPCGnasrGBxrjEjRDcQI6j2D6T1OzV/mMIgoAwDAlEiMOQJKiB4nZbbuMDMz9xG2/rDp/jbOpNB0+dTmmOO+nuQ6mqETQSIcRtklaNWqk2PsLb0gWhZ9qMOCRinQpgmyAJLhSrdqDZEarrzV9M9UqqzyyQuc1nxEOFNSWJA+va3NFUNXwgWNOeW5c9t+ea+hRXzxcDWjTKQCs5TSfqMs5usXf0BZYWv9s9s621oYypEkqaC63BsHtxnZVve4D4uesERxM2OiF/NNK8vPNVbrY/xGHYQkUhxdDurLK7f0S3FQFt20aVMtKameRkuaEnsBZH9FSH65dzXtnLWDu3xNpij53dkk/HJd+fL5LEB5SFTRnOdUk83uejyTIv7OzxtV6Hm6/t0DvVJkoilhbW2Fi/REBCMR6wtfcqt25e5ta1K9y6dZkz33OaMhdm4wPOxZ8kZsk+n18svT2esjYxNOeCt6lLwxx1B7nH5oW6olG1Vt5hrt72kXMivPLGDSSGpJ2gggV0NuHoYI9bt25xsH/IpTOnSaOYaZERBIFNTNCaPMsoioI8m6FUwGA4IowTsqJEG0jilCiO0YMR/f4C/YUuG+urpElKEkdVi+zWLAYtLiA9CAHFcn+RVhKjTEl28QyPP/Iwg8GAXq9Hp9NmfWWZTpoQugWhXmB0I2pAbGGaIEIEirKkLEp7rAhRHDHLMutBFhxdUQQiRIEQhkIaRIQV9bkT4N65j48fZqojzLHPpZ7A7hODojT+afw+sfcfAFd1ZuuNdJxZRtAuLraKFLBn1GFlAqJWGOurdGUdW9BGV4Dpt2ivwkCp8/s9h22aFYyIC7/zTNcBr6mKOFZXqa9TBSpVb87H0vq/ql0x/DNU9/csuawWHAMERnGm91Fe2v2XbHENlT7PYvz+Cl6Pj44qP86FH+099QCrgzHx61vkRKycPcOTO9dZv/k1XoyX2ewskccJRZbRTVscTSbIdI/l3gIiMVnQZqAgVzmlKihRdEW41FpkNw+59tUpvXVDkCRcWW5xZXyZC2aFJFRIaQgZM81C0uAWP7Tc42u/9ZscrXQ43Mz55Hd/B0oHjPe22N/ZZDg85Nq1lymKEe2VJS6d+QB5+5DRcJsz4QdJWLVganT1Du0vuvq36lZslIgdQ77mwrcwDdjXoze4wtfvUEoDuQlZ6HfQZoySlMX1FVQIRhvy2YzZdEakgioRYTqZuNCwIQcHB+R5jgoTCm0YTabMsowwCCnLktFoii4N7STm7MY6S4sLtNKIJAypAuEd09S6xBhDYRxDF0DHnD97mvFsyqyfk2UZrVaLJIqIwoA4CusUTrB2Z2OBPMsyjgYDOq023ZU2ALMsI8sywJod4tgyXaPts2FwgGuL3CRhUE+Exn3ecVH4SuQOE8pLE3DdJ6aO8tQYtLFmoPvPvNBcDJ3dVAJAqvjj5u4NNRvGPXxIaaYUZIQmpOGKxNfXpeopoYYtby5QzaOriARf5c2Dsj3EFcRxJgkP7FQLXl2RzH5mHO81c3cVt4x644NfcCw7s0csdB6BrV/laDYgPPoKrSUhiR5F6wCjNImETArvsTcYKXFJ1JSBYvfjj3NuWlDeOqLY32Oh1ITtFq0w48b+K7wRLrEpUKqEbhKTl4adowEo6LUWydMWuQ6ZmoL1dkAehrSMsNaKWOik7JQ5mYBpt3httU988xYxhnZnzSYrjQ/Y1opz/VP8R09/gL/2ld/ia8++zmL/DN1ii93BLvuHO5RqxNKZC3T6D4EK+OrnX+Hq127y+Hqb1iMbzqSn8VvSVxqEcXX2jqmEdg2vw/J4t80LVVuoJ2mdo37n4xpGCEQgL+GNq9dtqJUyxHHE4cEMJUKrv4CKQlqtlo2tdPbOIAwpnT02z3O7g26kXXlHTZ7lLPVtcfIoTmmlCcv9HsuLPbrtFmkUVc4oAUwQ2A5VIQZDaCBUmkIHxEqRlyVKWRW/TBKSJCEMAvesurK5ApSlodQlRV4wnkwYDUd0221EvGnEWJZe5BaY8wwQ4iQhDEKiICQNQ+IoIg6CancOL3XAd/3fu6s4dbh8TZTvdrTUDhTj36e4iAb3su4zwAXwKbW2L4S6zmoFa9Rxtn6XSh+Da49LgpRxOaQnfaryjKZWvZvXqLz+NBdGr6JoRAJq1urbYur7uUUbz5Bd6rLj2O4unoVVm8E39JCa/pjG8d4xpx17CyTg9OITvLLzZVpqwO7gN7m4fI093WMwGHC6//uqWhHa3c/biBFFEYfc+NSTrP/ylxlv7jLORpSdlLi7zJNRyXq5xbUg4Y0y5kB1UUGLNIkZZxnTckJ2NCZOerQ6XbaKkLHAQq9DWuT004jTccpQT5jqmOJoyNXyEClK+llJEGwwygpiYDzc5tLSGn/+0nn+3s6IX/5Xv86DGxndVWHp4hmidg8JLDMt8oCdHc2Lv/k8n/wL34MxClvo/ZhTtSJQtWpZayH4lbPBhO4s70KVsbdSbGupA1/sz9GwoDSaTqfFzk5Blk8Iw4BplpHNxuzs7ZEVJVEcOHBTBGGEiCKKIhYWFhgMRxwNhmgNs2lJu5MwHA1pddosdFss9XqsLvfpddu044iwyWbwc6uejAEQuVCtIgzIypIwtOzTb/poQ7lqVmsvYYFLG8MszyjKwoW+rRKHAUVhSKKQOAwoy5JCa0aTMWWpWegt0IoTWlFMOwlt/rmhijNtitz+0VyPV6rysYcUXU1/l21aczBTXUOqGFNfdaqC+LkdGe8PMaIbm+Qwny3Z4KDSAFn/u4fktlriQB/RMwv2GtX7xoKwqieqN615rnm8hGmtrEMVmVtpMM2dgc2cmWKOx4pvo534/pwaq01VnLxplfU25NKx4I2Fp3j+2jMMwpJUa64PXmOkDQvB70HrRjkf8QtJ7UQEQ96K2fz9H2HjV75AeDMjmOYQjcjSLiuJJikKlobXuRl0eSPocxQuEYcRUZIy0wVTM2V7d0p/ZYUiWGAygvW1M2RoOlISBppw7wbpLCeXLkYfcjS+SU8ySm24uj9h0XSQbpsL5y/wh8Ob/MLhgBtHbT7+4UtEQb2hmMkVz3zmeb7wr36JP/HDH6V35gzWaGaokx6asc/ueZshgRVlqd8KUt517N27jSn9QK0+vTvgHpfCwM2dI0bDAUFgHU5BoFAokjhgb2+fycTWWUiTGG1wpRsVSdJGMyEvS4qyJEkS8twavoMgJkkiumnC2uKijfdNY9pJTByoev8qmJ+FzANoIEKgIAoVcagoIk2WW0O+NhpKP1+svUsp5WrRWidYq9UCI8RxjBjLlqMoQimFEmtbbLfb7O7t0Uoj0jgkdYBbqb7Hete2sV603rLHj2Ok3G6zs7bnY+pJYwX3Wrd1BL7dG33vid0d4lgPGkO9ZYPTBqrB7CaXX5BECHVKITuUlCgTNsaMXf38PSqW2rAH1dNXu4pmpu5nqfCr4qu1qcfbUGvntD9Gqolf73mmTQOyxT+mdlEINWSX1kCAMUIY9eimPYrwkEk2IysMpgg4d/qRalGu91pzD9WYL8ZAkUbc/N6PsvrFF+k+/xp6OMFEbaJY0e8ldJWhPxjSz7e4bjJ22msMaRGHCaGkBCpjOj0gNyXB6im29wb0koiZzpHJIWtLixwurZJefpZskJGrBRjNCIKShbjHG1c36S6scatcot8/z+/v3ODXBzNe+NJ13v+Rc4gYyhns3brBref/FZ/8yFkeePIhVBhW8cnGjxO8ScE+Z9PHYceSqeK07fBQvMXsexcz0t5KfPvce8s1bO8fMR4N2NvbZjIZE8cJKgwIJWGx32d7a5vDoyO67ZYrMCMYDXlRMByPKHRJq90mDFMmkxkgrK6s0e+1OLu+yuryEp0kphVHpEHYGNzeYdJQ3TmGwe6rAJDQbngZhyGzsiDXJUVhGasHXZuqK4RhaKuAhRHeSVMWZcWSvSNQiaLTahGsrpBEEXEYEioLuF5h1cfaMt+djQ/fjoBWh7rJ00BQc6fDqIvoCIASzN0X9fewzBMC+3zKsUyr6s8/f2Oi+QVY2YL3EyZ0jK2d7CMW7BCqGbOPQrHbtVv11dv/7EKpsKFG4nYxsIueL4ZTsy0Pnhpx9RXs0zTYr/HgXEcz+IpjxjmGlGfEGPyW7Q1FmpX+OtfHhwxmmlYohKZNoNLKfu+PrRdk4xax+t5lFLL9sfcz6bfpfO052N5G9xdoiyKKE5aTGe0s4JRMuLX1HNvxCjurD3LY7hO1OoxnY4zA9GCHoN1nKkIrG9GOQ2Z5QSElrY1L9GlxUIzJSsO6MgQqZmnpPL/6zKt86oOf5JnRjLSzzifOlLxcHnLttZssry8ShSVnk4Tv+MHfR/fUWfdefOlSv8RVhjSq1dBrEW6tqbV5acRufwvNC+9UhFo1Ho9ytna2yfWUJE3RuiRNI8IgYWf7iG6vx/LKCr2FRQIVoIKQcpYxmc0YjceMxxO0hiwv0dh43Y21dc6f2mCh12JpsUuS2LjbOAyrXHvED7l3rir7NkeBECpb/b4MDdpoW/NBa0pXAczoEB9sbgPSld0i3TnssjzHYEiCCAFSt5V6pAKCBgLYyTsvd1pX77rWHlelfaFnsaCgnQmhmuCNRVF5poVywfNvPcDeu2JcR9Y9KXjW7g0IOH+BrovSiHNF6SGb2S10AONwSDvruHhY56AypioM5oFUVc42byNumDBwhWvceT4GVkENiBHo0iDaL72mfg5n963hwZeAtLy3NggbmpHJNADX4rxbfqKIdgRFIbTjFPJOg2/XC1A9NMSG21U2fmeKEhg88TDZao/VX/1N9HAPQlD9FJ1GtENNHAS01Jg1c8Rw8iK7+8KObnHYW2e8sM4UMDpHFSBRBKFCRx1moyGDyZCjNOKUrHNjNmB3Oma93QelWV0t+OzLr3Hp0af50u41diLNA/2AQVchEYRxjFY7dDfO2343Zb2Y+Lq81Vjxz6SrOVP7WOtIEjtuFL5K253kPQO6XozAYJLRbsWkrYTJeEQYhoRhwGymWVpdIYoiFvuLKFHkeUFRFuRFxmBwxP7+HrN8Rjaz25IvdrucWl9nbXmFbssa7dM4Jgkj0ji2CRRApTYfR6o74YnUv9QLvbXZRaKIFIBCh24Lc2rG651+SoS8LJnMNCqwIC0SUJY5hdYo0QRBaBcVperbHjPb1Grq20uz7mulGjlmbyr27J1kchvg2pW93jC8Cn+6+/h6D4tDqoZ9dC6ioXKIGeoyhyWiFUYZhIRkahgHt8jakKk+iU4q1jP3htyvdi+u0n3TrAngvje62rXBuDZo8VEIhsF0zMuj53l68cPVNjxON6vZF3bqB/idirWD2bJi2vXi4RdQ95JF2/eez1gIV1hcez+vXP06mNzGkNM4tnLwUauBzRq1vl3Y2NXZ2jpb3/ddLL/xOsXehG5eEKysweCAMtek7RaBNiQC8eyAzuwGo8HrHLySMli+SPa+x5iMNEGoiDqLTA72yMYTQnKSxQVevnmLxW6X/ShhWB6QmICYjMWuYmfzMheW+lze32JShlzMI1qRwUyHzJYWqcLCKp+HdVI2n7eOCWl8VnVFHVPtJ81bBRHds+QI23ZT1ep8+zPsUaWG4SynLHNECcPRkG63S+Eqh7U7bUajEUVRMpmMKWfC4eiQo+GQ/aPDqpatChRnTp/mgXPnWFladqFcQhxGxGFEO7Z1Cqq7N8BWVWoDzqvbkDnjZ8MEIfUnlbIqdv+xqgeC2mosgBJFVpQULo3Z2tkCtDHkukTExh43TarH+7IC0rsYVu/KdqknigFKU9tmtRH73IJTl8zchYRmke+6IMv9Jr5ql69rUPVt8/0es58a0e6dBiymF0l1n93iJgfBNqc4N/f+TWUT95/a80w1KcUBK64+gv1MHZvYPmipHba4OtokiZ7hyfbTjXHrDQigpURVdkhXE5gCm0emKzt2DSEeknV1PQli1nofRMsMwpcRpQm1t1ea+QkjvrC6i5dwxzTcbXjWm/UW2Hz/U+wUmrUXX+LsWJOEMdM8Y1qWqLiEtIOsnCYOYmR5lbS7xqrRqP4q0yzipWe3mA5HqHxMGEZoCcjygrCTsNgydEaa/SCiCGOK9iocbJGK4mAXNhaX2Rnt8+qB4oEwosMB0mrb/A/xWogHzjpyoRocpraTS9Xr3qRQpby8LeDdE9D1+2tZqRnV3dri1TCFYZwZbm3t2K3SC0MogX2vgdDvrZFNp7RbPXueyZiMJwyOjpgVBe20TTttk6YxrbTF0tISC+02aZoQqIBA2V0a0jQmim6nZxWYuGaLWOteBUZzgHs3acZr3AaR9QpoIFSKQAWI1FvniZqvihQqhS/H8o2IOXb7Jrutx5ZHGudTdydVSqPxLLiaS5Vaq6Vm2ME3HBv8HhAxzG9i6DO/6ppeFjNcGnbdVfiSo4qQVrDMig7YUzeZBTPiMsZv8G4ZTz0h6/FTJyz4SAL/m69/BV77MG6rHNvKJ/uP87Xdr7CRnmNZrVS82qAtGJgaDuw15pfE5l19eJs9VlV39JASENMJFphmA9IgcWBdl4r0x/lFi+rO3lLtjFTeTm2gFMFEEZvvewx1dYa6scuhGTDt90jef4kSoQwCirRLGYYIAYGxNQ5uvLbLL/2Lf87OGD7xwSd56gOPI0XJZHhEp50QnFkjHV2llUdsHpYUeUC4dJpsPKSv9tkd5Gx04HBW8ML1Hc722yy+sEl8sU+xmFCH1eljT+gK5DhtorkhqV2X6zhnquXv7vIumhf81L37BC0xbO0OmUwmTCYjtre36PUXGQ4PMaVhNByhy5LF3gLnz5yh22qRKwhHY1pBiFIBSZLSbbdot9ukSUIa26yvIFAIQpJEpFFkh8Wcuj0vdZm+mtwqvwgee6q5844/X20TuO0sAZtJp105bVOrY4I1L4RB0GAWb81q7yz1Kj1/njTC+e300PiQI2dUNO48qZ9TO2bmy7EcI8H3j7jtuq2Tqwa52k1Z7xkG82+wyVtBaKk+nfyAo2iTZc5XkOtByg6BGu6qrdErUC8d+DcAy+hK21C+kA5wvnWBV1uv8cXN3+Kjpz/GsuojZR2A5kHT/k/X765yqlG9MHtPC7bK2A18PGT7cIuF9Cw7g2cJu4vY9Nf6bdvQN1W1ulK3q2XEPVXDZOKWbMpQsbWwQGe2wMvZEbl5lfMLK9ROP1WN+3ymeeWrr7P55hs8dL5FfzBkOr7KqQvv5+amIpCYpY0+ph8zXXkQwbAyzbl+dQumEabVp8xHdJKcrAxZCAsCDFcOJqy3u2xcz2jNNPlaUm/356NPGj1rYys9sNajwj+rfc9NvfTO8q7W0zVwZ1uHQ7LCaKa5ATTaWDutkTYGGB4dIUGICgIeffRRnnz8cRJyZlGEJqTUBWEY02q1SaLQ1iYIAiJl7acKIYkjWondyuZOUuHjHfqrYSQA483s7vDj59x2/fkvm0uPuD7xIO8N8TatOayO97hr3uJlvnPws6yrNFRAXxofXuQxtxlJWluyDMZtVmkf3ABaN5/o/hC7T1/tjqpVfYMKrMPKLbXgdk2ox4aqAE2MIKJZVOfYLl8iDyfERVodKt5JiSBGo4yhdIy2WV2s/reiTxZwsaYfsBEuiYo42zrN68VV/uWr/4Tvu/D7WY0uOC3Ec7Q6c806fPz786Fp1ofgj62Zq+PWRkBZS/JS5yI3d58lSRYrJ3CjF52PwvdT4ynckPAAXttDpdIYJImZFDnT4oCVx9frUeZC90QCBrtDXvnS1xkcDTkaDEi7bT7x/vM89qGPggpJWjt0l2LUYkIhOWICDPD1o1d4/uBZHtY91pIPsbiwhCkyQqPQeUEvzogDzcAYtrKI1X2NGhyQXFglj4vG2uLbj42QYt7mfif93bPhu8k9AV3VfC+N1aB0DTle9NoPuqIMGE1mlDpD65IoiikLjS5LstmEdn+J97/vKZ5+6ilasaIsNGGc0lsIbdZWFKOUsvuSGbs5HcqGYiVRTCsObYGcaqVy9zfH2/xWYs+vnUlzmkWtQr4NBnmgFhF0aZxFofZqB25XYX+5u13Sf3fHexxvubO/OgJbeV/989R2xvm1R7nGlu574+ZFbQW5vwAXqF7cXN8KiFEYVTJLpyTjBFxkSdX5LuHBAocFQ5tBFrJQnma/vM66PGhBurLSWubahHEP8p5MGzHOFuva5RyW2jFUF0GGGHigdYkb2WWOJpUl1QGrjbLAx41WykrD4QaOnzZB2jS+AZ8tYzC0kzVCCUmTHvMZWn4AU11BxCYO1UHJvqObWlZ9jg473MpvMusN6G48YttPAArKXLF3/YBXvvIcxuSIlHQ7Be//9qdZO3/BhlsazdrGkhuPZYU0BsMXr36Ja7v7jOMtzpuCh9Qn6YZtRBcczaAVRLRUQS8fMZQuV0eKM60u2UuHtM53oe81The2N6f1ut99FmjF8I+lit9F7g3TrbyAjUY0Xk51TOX1BAxkWUGchnS7HbIss5W2yoLB0QAliqeefJLv+MTH6bVamDJDVEAYCGEU221sXBlE5apEBWKIIlvgOw6jBtjfYXVqjBE7Eepeu23V8kVCjLmtk+9ClG+/H0327H7z7MYVybFRDu/wgndbWY89szHzVj7lJoqvE1Ear+geu6kH6WPLgKnH4/0lTc9pk9WIQC5oNWLczWgNFqvJbGOrVUPNBE/bDIZELaKKm2TxhKRsOzA2FVh6rUgcIBsRqyVIfe+q58XWtKihuh6TvbCLMgX97mkybJhhifZ7XzDv+Gw4yKpCPn7vNP9M9gp+AbYmF3teICEXly7SCVvk1TXrySLOMehB2mYGOaebmXcu+YAHgDLv88YzrxJ1xzz0xHm8jgfCaG/GC194hVuvv0xnMUDiFhvnNrj0xEcJO4vWxGUK6h2cNRA6kLRj+WPnP8yo/CwTnfHi4DISdnis850wVUShcGPvkFbaYSmGcDZmFrW4VqScS0IOXh+RLge0LrQpZdYYM42x3jRJiQsTqzr9rSftPQ32uY2FeYZk7tykIBDa7ZgwCtnYWCeKIwbDQwaDAR/5yMf4rk98ksVOhJLCmgyUqsLJgiBwCQZ2C/I4ULTThF7aIg1DWyH/bh5+sQxVy2+PtH3D2CNezWUulMwD7vGmvJVp4R21z3hTxt3bbBr9UHETD8rNMxpr5zdmY35viDGN/C1xmWPiHUCG1mwZk08Z9Q6q2FWgnmxztihlr6dClsPHGZY3XGSEO8YzTgcq4u7jy5DaKdtI6nUTRLDs1zJg/69Ba8NitM5G/wKDYshWccTnrv58zVq9RkINDAa7oHquauOA5+HXO9Xs/l/1uz3b/RCrk8x+7+pB27bX1ut6fKhqEjmO6zsOv7DovM+bv3GFpVMlZx5fQ8UtQKHLFtdfH/BbP/85nvvNL0FguLE35NTFJR56+n0ErR5al2htNUNjjI36MW7hMHb3GG00j68+yg898QfptRd5/NwFvvPpj7HxUMReMmMqIf3FHsPpkM1ZTBCltKUkIueGTkECrr455OZXDzAzt937XP1jRbW9U/W7/9yZrd4ijvKegK429YutgLcRnO2/rypWGduQKLT2s8XFHguLi7Q7KcaUnD69wSc+9jFacWxJhraqRRAGle0WBVGgaEUR3TRmodOmnaQEnt7OaztzP8dJj278eAXSp/HeCWAMTcXrDve5g/jmhEoIxJoSfIFzwYKvz2KS6uetV4Tjj3a3O9/pKqU2NKsR+HdXUjvSxPVVAFUqdBjcnWS/l0V8Wqfn/X5zR1O7FzvTVZTOmS0cuRemCWJfodZUYOwdn2VRsne4R2LazIKBx9tj97Vqqx934g9qFMyx16z4rRsDNTSKMTyYPsIkH1MGitd2rtMNFxqqrzNJNFZGD6l+vzT7grV7brdheWV681BqdwKeqYR4IHRKf313PWOLe3qbp5amDuX71Tuk3Ug2C9z6yg4rD4Qsnu8jStBlwOwoZf/yITdffp1Xr+7BIOO1V/b4xHd+gAc/8JQlA6ZwC1JZLQA28aPEFqhxLN3YNp3qnOaPPPIH+NQj30WoEjIO2G/9HHm7wHQ6rK0uIeWE/RmUKKQoECnZlog0iXh1c8irX9qhGC4gxsYpS+VkPr6YQB0aCLdvz1TLvQkZcyu0EeN1Ute85spbvYaq4Uqg12mRRMJ4cYHTpzfAlCx2uqRxhNGa0jEFXz9XgEAJcRCQRjGhqu1v1bUbasGd8MEc+7wZW3z7pLmd2ZljF77bufX3tfUpEJyd2fWdcXatBqGs++gdSpPJznmbpRofjQPsP14lhDmVWPkXpaQybWqnJ/pIhjC6L1HXAZ5zBDkVXxD0bIYkIYaA1niZcX+PfHlKeNhmlhxClBBPYs8cKrvBVKVkaUIawMzskdCByrtPbT6wLKQC7WoPMxdfay9nKtOo+KgWdFXycTlcop/3WG9tYJJ9Lnaf8uwGH2dqGveqFX2DmXOuWTFVLreq2uD/W4QhV5fbPJoJL6TGRWTUjrO6WlnTLXn7LNBlyvYzB4SLR3TPLNhkoGmLVLqMh5sYFfLlZ1/lcG+f5ZUWf/JP/R6Wz51xhEdTp8C7CW3sFvEiLo3f1ZRoTvjFZBmREm0KDodbTK6PeX32L3h4/XtRyTKdpMXk6IgreyNOLXdJEUynx2A2pNPr8NKtA7TaZuOBNRbXBhhVWGivaa8L06vdshZ/3+WMNO3sg28FF3XwdKMxSujEAaEJ2VhZJgoUa8tL9Hs9WkkMmCo7Kwht2UWlFK0wpBUFNOMFvxGM+u1KY135Js4VwiBgVuQYravsLjthTBXR4BnwNyvmLr8DjQljxRcnFxz5uc3U0VQf/W/3GfA6k0mzqpoHXhWFTMtd9uWAdrJClLUYt49oL2lMWTKVQ2YhBGUIBZjSoEzMzBwQhorBZEYrTeh1S4aD2qTgSsogDZuwxQ5xm0XadtioA9sWcSt605Hmk00fSS8xlkNWwowk7FJ5Np2a4q23FTBCpUVpUzNaz92bm5Frdz/v8C0CxeViyGnT5rqP3PAPgLftStVuj/9etG6x8/UZhWyy8dAarWSVYhRjpgP2hzeZTDKk3KUo4JNPd3nkYx9h8expV0vaWBXfaHSVxCzu/4Hz1TgTh9dYUGhKZvmAW3tXuLlzhWJWELeWWe2WnDszZmZOs78bE4URi60O13a2WQlismJo6zwkMUvry2zOBLM9Y3mli1YHc+BaEfjqYb356O6IcM8y0iqVtHIUOKZFFbhSsUafiioGQgU6CuimLaLlgJWlJaIgQIkbCAbCMKjUtCgISGJL/Zv22DuBOnf8xH1+jAnfdpxUY7n6+/jA+kaB14eJKeWM8L7oDd6gYG/k+/NuN7jNDm2OvfKKNZvmn/WDeOLgGFGpDaWf5Vjmq4yai9gAy9DryXq/iUv80F5lpB4EKiBlnSUdM8y2GRQT9FQxCm4RxwtIGCKuoEspGSaY2vGmZyRaKNMxSRnSuXyZcumTjFVzkhqMK26DqU1b1ijg1XY7L3zysS3BU7NVXyMhJWYynNFBgy4RsePI122wc82nuApKTPWO/VvU1As7vl1UN7H95Nj8KE5YP5iRdkKmsX0Ony/XaJ29urEMVKHQwUPMdgp2936ND3zqQZJwmYQO02DI9ZubjEa79PpCsrpES8GFh06xePosWvudd13MrnHPh+BjyRXC3vAWxRSWlxeIwi6agsF0i9dvvMju5lXaSUrUXaTT65KVuzx19rsI4yXackQ7ajGctRmNY8JEEesRD1zqYmTGZDSh1CmGhCiYkgczqpdmvJ3XL2V+VPk2vstMt8KAyrvvmtZIRqgKv/i0DjcJlMJtyCjEYUAodi+wssgJAlv+sCw1AYokimjHYSMEp77HsZa8JRG705bk1WkNFNZ+bt7pYX8bRE8JiKvWVRSGJLaGUm8Ks84t79u9C61uNMzgTAD4kO3a0udFN2xSHop9eTolysWTugsb5s6fc9zTHHL3m9R5W3BMYxFoBUuk9CnMiPFsh/3yMgN1jVbcIclmTJIWWZGhixkGCEpNy3Rpq1OEwSq7acy5a3tcO79M0fR2u5vo6jNvQvDfObaJrQbmTQHK1JzVK5JL8WkYZ4iqVerSGCZ7Y9orLoLChZD5c7TRVRH8pqnPmySoQNgzEf+v8GZPeGzf8MJqna1YazqGIBdMOUGnbSR8gnbnFFeujbh163We/PDTqDxmOp2xc/Qmw+wKg2KHUw+eIlRtbjx/mcef6LPY38BUjLVJ1GojiQe7w+yQX/zCL1BMp3TabdbXLzAYHVLM9ljodlg5t0Q3bREa4VTvYXbzDYoyIzC23GsSj4iTEcsLCrNhUEGKSI6hQ2fBt6HAV2ezZM/jgqraY5cgwZYGfWtYvXfJEU4tuu1j0wCy5qHuN7EKA0oJJrTbp/jYQ8EXhRJCMbTdljh1uMlvo7m3hYiZqr32jne4/u8A1gjWGaVErL3ab88T2FdzJ3ydZ7bHVTnroS6wNnKvApm5M+bPbrL8aldZp3pU7NjUNl9kHsDvR7FxsY2+cPN7Xr8QRAmR6bIY9ehF59nNX2AsW5hsyERnKAIWow2SbIEkXqITruHreOUxXA9z1m4csX2mR6lMxQvB9bXxu0XUQOvfQ2UfdWg5HQ2IuwliIgJxZjwJiFXAzBEOBbTKgIPBPu2VFiKu1I2BclKgAo2Ko6qco9XMtatm5u2SNBVo6nAoTREEbCUla+OSzY6tilZODEEc2lT9vYBZK2CkHiHLe9y8dpMb166wd/NVPnDmKYp8wPbuLYajWxz0rlDGhnLrkHY35MypU2y8/0HCaYGRkMloi1ZnCbC7w9aV0zSZzrm6e5Vf/8oXKKZjJAQzHpNdf4X1jS4rp1bodgypBKynF2knpxGJOB2vginwtngjPs7aODOFnQ92Vww/Ouo5ZkS7QVKp73UtXReD0lzI7yT3xrxg6tfWDCyfG+BQ2UK8Q8N9ZNXWIECUQmnQRe5i8/xwNiRRVEcmUIP53cKX3m7vsOb30vhv9WuDRb5juQsjPd5EVZkVCjvhGqBvB0bzkg4Em5817L2VcwZvMqiPOdYM6+M21rFiWYtU1/WMp2nr9V2isWxq3hRzf7FdwbJ9JXVBGW/iqtRk30kegiRiOXkfI4ZM0xbn1YMoo5hGE9qqRRKcAl1zREEo05itFcP6lSMGayGTOKYMbEpDu+xiTMFIjRomGheOZep37X0VoidMy002Fi7BOAOjmFx5ky+88Caqd4MPffRjBIdjDkzIyrmHiIIIfEyr1sz2dwgWFYS2pGgpJdrY/Djl3r0lPApdwnAoqLQA7IYAYawQKdlZCHjkypCDpMssVBAZpodj3nz9kOWiz/r7NljpdMkG13jj+c8wy1OKouSFF75MlArd5YhT504THk251XmDiZQsdk9D1OHg5iGB2eLW/pA3rlwniNoQhPQ6cGnpUZZXToMofvnzP8+1nS2SRDh/vsfiQookPab5Nq12QVcpTqfn6SUPoVRUzQdQKBW7cV5TB1PZjRuj2iFtIznala9sYI24seIHjzd2v9uOND98fEVKv520x+BKXW1kdHiwsZWSwKjA2qa0ptQlSFCV3lMCURTZznAzxrjrNNmKF9vh7wwu7wQdTSY4d5W3ueTxa5nGxXTj/NJN9NAjgGuvEUNJCaiGne7O966suO6fABt72qySNg/AjX3OaNiNq4LkUsWyavEA3VBvcSDdcJjeV2L8VPKeeCrmK+KTBurDfSEZEUFmwpnwKSJpIQjtUrOfb/P84W/w9JkP0Jp0kMJGoEwHU4bDQzZvHnB+U+CBZWIjtGc9Hrs5oygLds6kXL6QoXwBJBUQByGiIqIgBq0pCdG9i0zL62RHKd3uKkdHI67t3WRwZHjh869wvtXlYG+Xw4nm3MNPsrC8QKvTRZUlRpd0l06hdYEezoiiHpPhkHI2IW317QaoZW7ftQ5BItg54MrBlJtX32CkQk6ttXnikQXMguLaRpszrx5y86EVpiGESynnz2rCSZtve/ARhsMJeqnNRz/+YV7b1jz36i0keJmzTzyGMSFIyVrvQVbMJXJdcDO8SbeIMatbFIeajkk5f2GB2QzyWUkaBwyzq0x2rxCGKavrMStnz6KDjHarxSQ/JFJH9MIIgAdWPkAarGFMgA8D1GB9QwROw3Ap3p5wGcNc4gseWKHOPGxoglRQXKuUgDYl8hbb8N6jkDFVgyz1v3LME9Vg706lsp9jjCvyIZTGwwIESuH3IbP34bb5Lqau8fSN74zbAKbbL/2NsVx3/PFzqgVp7gtBBcruMFGxKgsEpavQ1/Br3UGOLwcy91FzV1u/waTGVb1qGk5M41LGWNDGxSy7h7FfNQL571M7g9fFKr3GMX2vmPkMPV/3y54jiFacCh8nolUrI0HE9Vs3+cxrv0RPRzwanSIONyDLiUZjrn/9Vb7+ylXOrZ1mLRbObawx2r1MtH6RpTOnSf7/7P1ptG5Jmt+F/Z6IPbzzGe98b87zVHNVD+pBQhI0SCBGA8YgY7D1Bdtr2YYvXhgvjK1lY7QYbMRiYYQNAlksMRs0oG6hbnV1d01dXZVZWTndIe985vMOe4oIf4iIvfe5efNmVpeyqrL7PFUn7znvu4fYsSP+8Tz/Z4jpjAubayjxBflXyyVlUVA1hp29fcpqRV2VTM87vv/rb5KPtzi/fcCb71z3A2NzRHJG8Ve+/jqXzw5wScZbV7+J3LDUxiFJTpZkJEqjdEpd+meeDAZoBdbdY6Rhb3ePwWjAwbJkUQE64ftv3cKuFhTzFU9+5gnkluU7f/X7PPXKZxEOGb814sg2nN/OeObMRfKtx7j5/nUq43j7zdeRd+9i1tdY7Bme/flXaHRMbBCvTAE35G0Obcmw0eyVdzk3uMzG9AzTxRr14oDh+gb3OUDbFCclTizjwRnq5gjBkNqSyWwLbIWTjHJ1TJKM8Y43gzjl/THRZ+Hh15fBbCEwxPm6bu44urA+WpsvTsZu5sSwPL+b9MNm/Un5REDXBI9wG0ccPldEHyknM6569IPvF+/MceF3xHNr7eOHpIEPqJ09R100J+TE59Ho653Tb0D8pOf97xe1/gDlcAKWHx4k0v+sdYC0JkuPVgnPZJ3zyR/GegLbOLqstYdf138QhsQJm79/gmvphm5YdFEj/fKF8Xvrus1KXFCFo2e903I/pRKtIyK9Je2kPJme3n+vDtcYUj3tda3DGsPL5z6DFsXy3vvceeUimdpjbEZka1s8c3GT7c8/j1kYNsab1IcLdo6WHKR32dbnqffvs//271C7Auo5zlZYLM425KMci2FndY8r+hwXn9hAacNy8T7Pv7rlC93r8/zMH/xsG4nQFZb0hW0cMYvMIsZy8zt3+eW/9ttYW1LXJRcujHnlp59l/NSQyXTM2E0Q7a2k808POLx3n727B1w/2KMwJbuLQ9YXh1zfP+LoYJcnL24z3zVknxvy63/jr3NULLF3d1jdPuAP3LJ8+5ldbP4U9tvXUa9dwYhpHekHyT4L8z6obd4+/Abr6gw6TRCE9ckl3PgSANpmLOoDVmIomgLbHDLSI9b1WdbkItlwDYtjYQ+5J29T2ANGTLw2K12Nivatizsx62O5Rv9ncDBGmlRsOL+X6BFA10kXteFjnXW7YH+YfEKOtPDSnbQe/xhfqID+hoARhCI4Rm1OWxdoBoUov4Fj7AAkoYsJhBP9SRd3eOImEgHOdTSz66cv9kT6l/yQznvw497C8WHabTzInfhOwnqhSQYD6qqmtg6LIQNEFLWx3jtNtwjYEy9Ves/uejUm/HcxrdQFyuFEaF0A1jiWTLxUiCu1QStpx5qL2Xqu3Wao40E/TWJbsPVb6USR1trwjqXeGdb5PeFsDI7vvlVK8ezWy/zG7a/yvTtzdHrIU5MXeWb0J8jSdUZDzWq1Yu/uDe4f3GJ4/SbVb3ydvb/wX7KxLBj90ZdIfv4raLUOKiQFi7cY39u9zlFTMxtvgk6xzjKazXp3b1p1IiQK9+a8h2ClQNC4RGjI2E5g/cVLfPZnXiSbTnAqFquxwbLxLPN4E849fRbnLIv9Y26/fZ9R7tgpDrg3r7h6/QYbgyW7teXm0Q6bT6xx+cVLyOE+Q3eB945eZ3f6Glm+zs3f+Q5PbK3RXBrjaMgGOe8uf4vRYJ2iLrHLPWR0lsFgAsRxCDhhoM4wSM9h3YJ5ssdd03BGz9gcf5ZAYKKANT1BBA4W1xhPL4IVnCSoEHkerZkTgc/QhkyeANYwHrq43La8UGhf3wntteCotIg8iAKdfDL0QqRJpBcDKRFavXlh/VJB1DxjZnPIpIvJiu3g9s4FDx6roqRpHIM8Jzw9/QD+cLveW6P3jT+2v/Nvr1Rt+FpOfvdIcSf/kZOg2h7V+1AIAzwuTuJf5yjLWaGoqwpxBLC1OOsdHNF68NMnOCpdpFkifSAdLRuXgNaxGTTYqN3FSlTRERc12LbNUbPtaIQudVtwTrXff9qkDabrabXSq+oW5WRsc9/dFf7rQgUCJ9zZ+x0WGM40Cc+s/V2cm/wBVku4dfsu5eIuzhwyGic8/eJFhl/7JtV33yXVQvnUJvOnr7CdDiBopv7yFkTz/fn3uZxdhrhTbUu+d+ZwF4IZF9DAYgbfQFxpN440s2HG5f/pzyJr6yEE1ltXNhS9cURACeMrvPwzZzYZr0/BaYojYfCda5TFPrdNzUufvcL08hnGZ0Zkqz0ONm4yr8do1VC89Q7l+gGLxV3qe2dwl8Y4pyiLioGakidjdot9NvIZrlmGeGO6gdXTKpWMmMmQelAz1hNszK5zggu7ZsySixi1wqpjtFtvR3TUeuNFY+JPmEZ+ELTbr9OBiANPK4Sh4vpjJHIRHsVc2OfuUbnxnwzo9qrhWdUyBK3t201SCf8PfEkYLCp4032N1/DyQ7FXax2L1RJrV8xmM7IIvAFIIuC2sBk7qAesffU/xrH2tcPokBQXS1h053Li73CNVluXVhP8oLgTv0VuFeJ8EBIRtEooaaiamlQL1BatNVb7LdgdCt1SNyczX9wHXnRchrt+N67bcyvSG911uk6K7Yu7LPcngbORXvCfN59C0IWOaPJMXGcdiYsx2X3IPbmaxo0qRUCpjOFojSujz/DUE79EvUiYH+zw9rWvg1oxnAzY2J6SZuewwSFX/NN/AvVzn6Eq5tgXnmY7H+DTaXu7FoifyLvLOzydXGhXdBcA1//fv5zBcEBRlMR4Utd798pZFIr1e3PWDixXn52iderDoqwHaxMAvDs/joHoghJWpiEd3uXg6gscvfsWs/E5zly8xZVXNdPLgpZDdg/eYKAu0zx/gTvufWYHCaP7h3B4wPBMzreu3+LFz10Jc8UxlG0ae8wsGaHsgadFuyWRCGrxPfjCP4pNLiLlChkqusJBkX/XrI+eIav3MBKAsD83pJdgEf9u50A/36xbZKM/qrMSwzEujqPueh0KPVw+megFz1pH8A+A6xDrB5IhapFdSFIbaNMSi/6lWAQb9kPTScpkPGQ0GOGco2kaUAqVphAypoSuaMiDYNpqo+333QuNq3rbdyFPXkUt2gVDrbeK+Ukq4YV3oNUHedcDvK4djlhryAHO2pB26bnrxglV1ZAqS5Ym6MaRakjTBOUsBjyXJwp5xMttQ12Cxh/v0WqzrSfygf5wgrPe7I6Z9idCOHxYia89636QWsQ/ORJDpDzAhhKa0aqKioCEAuIxHbCXPSFKk+oB65Mz6Hyd2qTcez9h5873EXPMZKI4e2kDlW77E5wLxd6tL2Kuwb7wFN2LCPHZ+HkRX6tDMKrEpZNoJ+KswtKgVdxix3F7ucv3Dr/PKxsvMVIZYiOt5BiQMru/S5lYdi/MaAJF0qVGdFDjrcqoRTva3W/FYQyYo4Ti/l02tzbZP3iHF/+QZjYaYe7XXP3Oe1z5ykukyVlQU4rN7zL6x17jsf0Em2gmz55nWwUbTbwWnqUpWbbBcn7Mrb0jnjw3bp3I/rZxUvbnkEOnA0jG4VotHHeACtRqE2Ul+L6kp+Vqun1Tuj6E/mJ1EoJb5S20q2OEpf8hoMKz/YhBt40B65mqnVbfe7hgFsV0WDkBUNEpAPOqYmf/Hpvr66yWwnCQU6xWDIa+KLEYQekEhYRNJaWdI+1c6qFvCzNRucZr0JHHMz2tptVihRaAg3JBV+tB4l+0/EjkOnto28GbdGY6+J0YnHdAVtZyMC84Pjzk/u2bbKxvMMwTNtbXmI6n0NQoYxnPZhSuYjAcxDH5AfhtTcXA2xpiaT8XCpN3LXO9k1yIC7dB0w09EL6nDdvrl4L9tElk6LoKfNIBMUJZQyIOG7Y7T1SC1jlJOgCrKVY1toT39/fZ3Xub1eIOlbrHM088R5ad9wubsx7A2hxuR5cM0dPfoikRssa6d2nZK49YmCOq3G+mUzWOa++/wcb2mM3JxbA4CHvFgr/yxt/kq9m3ODO9xNZoxtZgyiwbcdHAwYbh+TMvcnVnH1xCFyzYlsYJ7YvAC/1qDJGCs/kI7Sy/8vV3yLO3+dwXt9m/f5vZ3U0uvfAaid7EOkiTCbmesa+/z/j8ZTY2XqQoDhjnYNz5djwN05Qj23B0Z8XhsSM515Fj8T21jnM6JSOUJgpKjS8S5eGjF0Eg4HRQJFQXhyJ0IOv7XyAsAoL4ammONkzQd1QHrlFpCfuHQGgjbZ+p1pn5MPlkdo5QnYOq/Vf5yLU4SYPyRSQFVadqoASM8Z2kFSxKy7vX7lI0kKiGarXi/Vvv89STT5Fox+OXz7O+NvWVLYVWa/Fxv53W26ITtPWVY195EyR2cfdKXGhjjAnut7sLm+5Mi47jjCFX7Qe0vCseAKPGbaynUmpjscYyG49oVnOUMyRiOd4/wNaW+zsLVosF4/GAswamo4zRIGz7/ZCFNfJ7ItJpozY4zsJKTgDWGJUQxbYcLsSoBXA+iuSER/+k5fZpEa18jYuo7NvKMkhzDvZLjhcV9+4cktqS6doag9GIyjSsFnsczRcc7++xd/UtdFLyys98jtnmlDK5w2iQkGbjEIMdtSM/NWkX2UDbSNwOPVAGLeCGng3v7Tt3vk6W5CxtDVjeufE1itF1DI+zzgWO64J3ju/w3735y4iC2hUcLq6zV2hupAlN1ZA54eevvIQ+c4daVeSst6AV48WFWHK1R3u1Wj6ghDI5YL7Y4caNEe9dvc7n/+iC+bxgPXkMeeJ50mQQnkiwTtjefoU75a8jyS32i/uk+hC3EoaDLyNcwQKjbExR19y7fodMKVI1DDfvTVBCv0iEukBISoRi1VUX69Q8YhC/9OY14hfDlkNsl5UeTrhoAfstkjq/TvddRJaW0nAO8A7+B31ED8onU3shco5RAxMQ5QeZCm2P3yvUB7oKfG1c5xy1c0xHY648/gyiYDxaZzxbZ7yxwWwyJnENs8mYLIDticyyQFPQ655W4r534c+2X6VbC9rPI6CeiHfu7vOBNc31NEd6iYGuu2O/dm0EOFK/v5PFcX56hZefvBT6rzu2CzaT9somLiQPerSEtjg2OMRZknBWE46NkQuRv43X6SYegdMNA9MEDcKePOfTJt/59h6FcSz39tk6u854sknqVlx/7z3u3L7G/cN9xoOE7Y2M0ThnOBqR4rjz7m3eun6P82en/NE/9oeYbZ3jb/z13+Jg+2/whSd/iXbbdRetg5j4GzqSjqFpF2doX4KTLgfQOeHK7DLfu/tNytUurD1DtnVIrYXjcpe/evUv8+7VqxyYCjtxjAcpgyylnBeUjSNphrwwvcLnLr5GNp2xqBcMbc698jpnh4/1EnRcawG1Ske764NFNKzcdZZ2l3z5CptnR3zpS7tsPbnL1L5APn4eDyWuN/YNo+EFLmXPUCXXfc1odYm6qVnI2yh7yEieo1Q101VOJksGWpHIMOqOoXEhUL3Vcf3oj454hQXpl4Dvn9dZZ54uEM/dtvxsKFjTOtNc7z04TmiOLYcf3me0zHtUEOG5/X0+HFo/0TTg2F7/IS3H0AFG56jqQLczIBwg2nFxe8ilrSdQ0fklgg1ApMR1GnT/frQXpN8rkcA/+enJ/j15fCf2Q/BFHvgrAuMHD3z4Bax0z+/oA1lMlOg7CfsA3vbSh7Sr15a4mNje77FJwfvbNdN/6yv0dw6zvpXQgvGDX35K5L//y7+CKDi7tc47b73FxlbOs09s8eQzm7z8uddI85wsH4DS3QJqDOrob3Hr3Rv84s/8IX5+dxPZ3cM9dp6b059jokeIs6G/HDjV7qkWt5LxW6r3HXWu7V9vBUaryEPLE2vPcO5gm6raxYojmeTYo0NG2YTZ2LG1Zdhq1njT7bFsDGIMP2/WWdt+gkuXXyHTQ5wIjdQ01pLKgNWy5Ja9yvnx40DX3taJFseWcyS5o0jv4spjmE84frNhPr/H+sWKjeELjPIXcC6Y9K4bx14Uuf4MIz1lOLpNY2uaZJ15UeLsMWX1XdR8g5yCjVHKykDm8l7Rn5NjyiGhnkhU1CIwhi2U+sjRpuF6UA57HgeglRaAEROXROKy41pHm+ti2Ns2qGj+QYtg8csOqN0DPdGXT3w3YNd2YBB54O/wYWcdhAQKIXSMDkkVKlaaQwR0NHcfBPcPEekj+6OPfPiBD7HgH3q5/qN+WKNcPzArntatTl39A3CiOFF5/wPXlhML2ofLyX5qEy5w3goR/AaZPW23HcQBnZ3rwNhTDj1A/pTJz//cRdZn6zSNZTQbc+bcFsPRJOhVjqaN3nA9zhWe/8M/xRPPXeBn726yef13cGPDHzR3+e2f+0X2B3lLLXjlyYU9HkNyQn8qulhfIfZfn65qP0QBr6x9kXeOXqdJC6rlDtn4LIIwciNevPAis+QMXwB+48Zv87Pji1xIzuAu+roQzlnP39sEY3wu1uWNp/hv3vhv+fKzQ7azbY8VLmqpXaWxdCTU2S62aTiujhlUT3Dp2cf51je+TZMtGKYv0u3E2vn6o6UYFYK6fha1Wme0fhutFlS2BFtQVQXZ7QPqUcrxqiLPMjI1JvpN/JzrzPlOHoiTlhiZH0d0cKi1TjXX0oQtVSg+kcGFRbBbMeLzxEWktScfmGbhuBbPIt3gHnDafVA+sT3S+iZq2x9xVUJhXXiYh+BSG6QSzAcR6zsRi8KXpfP0xA+qXUXgkQ/9icedoEik9wh0hIEKa67qaeyxXR/VNtf78QtJGCTS8dJK/KvU4fcPvWJ48fH8/v27Z4tabbh7GC8dQxa5+O4aSoFOJFR28631Ffc6DtjakKr8KZPPfeUrPPXS8zz3mZe4/OTjZKNx4N+D5mdd+0NPoxelGV28wrVr38Omgpx/ArVzn0u/9p/hF8CoLYKHIdOCUQwGixPd4rrxFABBAhhbZ1tH7GPjJzla7XN39TqSrZHoCaIUw3xEkzj2zR6PT1/gf/L438nFwTnk/DOIVVjra/ZaDLZcsjxeIs5vD/XS2Zf5r771V7ld3A8Y12l2AOkgp8lXHJdzjuf3yUq4kD7FdDojHaw4u76FkPfSw+NCHBduwYcVKARDVW6y3H8GxZQ8HTFOYKQvMVz7LM69xv6BI7EZaT7150k4H4jEZDeGe5QZ0lkHYYz6QwIQhsWujW3ug2HY1se2kQzQ5WDi34wDF+KtJCo4dO3q7hvu6RLcR8CqfBKpnK8f+TIrIt47LAFUEgm0s+uALOmBSRfF4GsviO3oBE+fxBXug4D7UfDbj+F9UB7WB12lrw/rn4ddLTCf/XHwA8pDtwJ6yD379II9cdzDn7L1T4fF0IRY6D6nC2CcxVnVXd/hKQZ8dEVLN8TrhIy1n7r0aXSnncqp/OjlE9F0PwhiQUsiTHLpQOKhGz0+YLNKD8R+0vP922QOelpyXKF/KHn4BU4EeX9M6UKVTrY1Xu9BbfnB4/pWjHqkCn4qp3IqD8onlxwRk0B6+BmL/XZzNIZVxWMDp9VzJPliwv1zPkxf/VFJ/94fXACU9NjaH5bvlO65Xf+i/RYEjqoflfbg9/6L3mWj1i/SC7YNx5zgdMPHIZj+RDskZN+5ByumncqpnMqj5BNypPUnrldrVdKBUbfJRUeROBw6nhd+IoUek9RO1rn826E9htZ+7AtFwP9w2qHjfbrDfxDlvN+WRxINAfRiuEs/rOzkcR194KIDokfvKNVzhvVuoZR8qBXioK3TG62XUzmVU/l48onQCzrpyOhYBLjvCe5X8fHOmJMqYeQa4072ztoPw7ifaOlzpZ/oTR4pcmJxstaFalLx/AcrlvUv3fPgh4P713LB0fOo7JtTOZVTOSmfiKabJkLdgPf+RW03BhdHbVba//q/6cA5zuyeNhU13x9Wu/3dbufzg0i/StkHnGoheuDjgvFHcti90K8HbtK7Rvyo1y7XRWL0KRHPOAQaKDyHtV2BkFiWM/LU1gn6VNU9lVP52PLJgC7Q9ANjXQjuaMOY/MfdRin+IA/K8ZyHxhkQM0EeVejlUdLi+cdWzj7sPh/z/j3KtHMefsxbf+BA+cBXgs82Ny3sfki7XODUo5MshEH1aNy2sQqwpp9506MoAq8QNjrBIaec7qmcyg8gn1hyhCOU9FUOQq5zB5QPlpyANtYPh6helaEHzONWI/1dO82Dhv2xuNYfEnD5IOD+8NJdsV8s6KOaJYDuLWRROX1YRlsM9XM29ncoDGPsyWMD9eAXyg/fE+pUTuVUOvlEN6aEYIb29pDpcqo99HpY7RMID8GOHi54B1rncGvv87to5w/CKHQhUw+Y7g/57WQi84MXotcDv3uJGTknis/0HXgnjhUeXOHi+Q9PnHEn2uhcSIrAl57EdhrvaXTuqZzKDyafjKYbHCxOpC0OHDOt4mSWYMZqcW0KYpu+8CBHGM9zHwS9H66RvRv8kFd6IGChFds7KH4n0tt093ctQtzT68HIg4+S8HqI9VZjiToHoSqntCnXXUnt3vn9OGSHDxE8lVM5lY8ln5Cm+6DzzH8e02Vj6XLPzZ6s4n7C7H/oXPbXtvbDs9M+rE0fokf/aCQAFMTn6zTU39WuxQ/je6N37EPu353cacrt1w+LCW7Dyzq9N2q4PhXThUTXU9A9lVP5uPKJabpx3iqJnGy/1rrrONyo/jlO8pPQal4x9PUDGl0Ej78tQbsf7rSC0LYHjpITvz8M0vuFfOg09pZW+Qjpr0AtRdDrnzYV+MHyOR8tfcx+kEp41MIkyqvpbZiZ+/CQs1M5lVP5oHwyRcylz9y6lhZw0G6R4otqP3xyd7UqfY0A5cIJEh10fZA7yQd/mPSDKR5yxweP/sij5EN+/7hysmbSw5vmAbstANjXS09yuu3C84O1ob97hf+7Vx6vH3UR/+gDtXR88Cmveyqn8vHlEyl4cyqnciqncioPl0+stOOpnMqpnMqpfFBOQfdUTuVUTuVHKKegeyqnciqn8iOUU9A9lVM5lVP5Ecop6J7KqZzKqfwI5RR0T+VUPsUiIv+tiPxTP+52/KAiIldF5A//uNvx45BPfDfgUzmVU/nkxDn3Sz/uNpzKDyanmu6pnMqpnMqPUE5B91RO5SdcRORfEJH/9IHP/nUR+TdE5FdE5J8Jn/1JEflVEflXRWRfRN4TkV/qnbMmIv+eiNwWkZsi8n8SkUfW5BSRZ0Tkb4jIoYjsiMhf6H3nRORPichbInIgIv8PCYVERORpEfnrIrIbzvuPRGT9Q+7xYmjrPxb+/mMi8q1wzb8lIq/9rjvvJ1BOQfdUTuUnX/4T4O8WkSlAAMp/BPjzDzn2K8CbwDbwfwX+vQiEwJ8DGuAZ4HPAHwX+mY+4978M/BVgA7gM/JsPfP/HgC8Br4U2/Z3hcwH+L8BF4EXgCvAvPXhxEfk88JeBf8459x+LyOeA/xfwvwC2gH8H+C9FJP+Idn5q5Pck6MYV/xHffyqdD6fy+1Occ9eAbwB/f/joDwFL59xXH3L4Nefcv+ucM8B/AFwAzonIOeDvBv7XzrmFc+4e8GeAf/Qjbl8DjwMXnXOFc+7BefWnnXMHzrnrwC8Dnw1tfts591edc6Vz7j7wrwG/8MC5Pwf8l8A/6Zz7r8Nn/3Pg33HO/YZzzjjn/gOgBH7qI9r5qZHfk6D7UeKc+6XwMk/lVD4t8ueBfyz8/o/zcC0X4E78xTm3DL9O8MCZAreD2X6A1yLPfsR9/3m81vqbIvJdEfmnP+x+wDLcCxE5JyL/SaAxjoD/EK999+VPAX/LOfcrvc8eB/43sY2hnVfwGvPvCfl9CbqnciqfQvmLwC+KyGW8xvthoPthcgOvMW4759bDz8w59/KjTnLO3XHO/bPOuYt4k///KSLPfIz7/Z/xdeledc7NgH+CD9bB+1PAYyLyZx5o57/Sa+O6c27knPuPP+Zz/sTLpx50ReSKiPwlEbkfSPt/q/fdhzkUHnQ+/JqI/Jmwsr4rIj8TPr8hIvf6VISI/DkR+bMi8ldF5Dg4GR7vff+vh/OOROTrIvJzve/+JRH5/4rI/zuc+10R+WL47n8kIvPeTykivxK+y8OzXBeRu+H+w951f087Hk4Fgon+K8C/D7znnHvjBzz/Np6b/b+LyExEVHB2PWjynxAR+YcD0APs44H04+z/PAXmwKGIXAL+dw855hj4u4CfF5E/HT77d4E/JSJfES9jEfl7Ip/9e0E+1aAbHAr/NXANeAK4hHc6wKMdCg/KV4Bv44n7Px+u8SW8w+GfAP4tEZn0jv8f4x0M28C3gP+o991v4XmtzXCtvygig973f2+4/jqez/q3AJxzf8E5N3HOTfCm1LtAXN3/NPBcuO4z4Tn/xdAHv+cdD6fSyp8H/jA/uJYb5Z8EMuB1PID+p3jO91HyJeA3RGSOH6//K+fcux/jXv9H4PPAIfDfAH/pYQc55w6APwL8koj8y865rwH/LH5e7ANvA3/yY9zv0yPOuU/tD/DTwH0geeDzPwm83ft7hF+hz4e/fwX4Z3rHvtU79tVw7LneZ7vAZ8Pvfw74T3rfTfBbnl35kDbuA58Jv/9LwF/rffcSsHrgeIVfSP7t8LcAC+DpB577vfD7vw38yw9c403gF37c7+f05/Tn9OeDP5/2jLQreG9t85DvTjgUgpI7echxAHd7v6/COQ9+1j/3Ru/acxHZw2unN0Tkfwv8z8LfDphx0oHwoONhICJJ7xn+Fbxp9r8Mf5/BLxpf7ynqQrfn+ePAPyUi/1zvuhm/hxwPp3Iqv5fkU00v4MHvMRH5US8eV+IvgXbYBG4F/vafx8crbjjn1vHm1cfa0EZE/lG8h/ofcs7V4eMdPOi/7DrHwprzNAT8PnA8nMonK8FHMH/Iz5/9cbft96J82kH3N4HbwJ8OhPtARH72R3Dfv1tE/oCIZHhu96vOuRt4DbUhUB4i8i/iNd2PlMDN/pvAn3DeaQKA8xuX/bvAnxGRs+HYSyISg9B/zzseTuWTFefcn3LBn/DAz5/6cbft96J8qukF55wRkT8O/BvAdbw5/+fxgeSfpPx54P+A51a/gXe2gc+s+e+A7+N52D9Dj4r4CPn78Fk/v9qjEf6m8wVN/gW84+yrIrIN3MRzuX/ZOfc1EYmOh2fxWvGvAv/DD/OAv1/kK//A4y72t7S7l7rwN4goRBJEaZRKUTpBKY1SivY9OQEn7fnOhfPxuzl7Lg+sNeD6G9Y3GGvw6yqcbEcU124KLSLtd/6a/SACE46OwQUWEYcLvztncfFfZwCLdQbnDNYZrG2w1tLuHvuAKNXpZ/H5YtuiiICok3su+j707XbiQu8qlNKIpCidoiQJf4tvt3IIgoju+iLuGv6BNrR3am8okqJFoyVBax1+VLuZq9/UVqFct7mqQ3BOhXep2s9wDodp+7Q9oTdGujfl+/sv/pnffqRle7ox5Q8oIvLngPedc//7H3dbTuWHly///Y+5Psj5X50HGaUQ0WiVoFSCEo3opAXcFiQRJBiN7XyKwOU6Z3UEP3G2/duYJgCj67Xh5F7TfTCOO0R7MPDX6BaLsKOzdMCL+Gtb6wG2D7wt6FqDtbX/28TrnGzHBwN/+u2N//pdox8E4vAY8T84EZRKQCUBcBNE+guZbfu3388ReB+GWfEjUQrwgJuolCRJ0Fq1i4bvw4eBrsKFxbMjAHrvrV3M+vc+ueggDovlL/5rjwbdT7Wmeyqn8sOKBzw/myX+KwpRGlEKCYAraBAVwFUBghPpnRm0Oed61/Jg4JxDrMM5wSI4xGuh1v/LgyDiCBpbuJL0tLQeADkcIn0NFJSKYCs4pxFxWGdQSnBOgsZrfNudYK2AcjinvRYuH2yOv7Z7CPCGhrSAL+Ev18dYr9262F5B+caD83CGtaF9APFZXduXLj73h+iHHQiHY+QhnwNK9RrUtbr93rfZgfOg74jaq39HH3x8aY/4eF4bL6egeyq/r8U5r5oJCkSBCEprJNEoEQQdQC9oYUrhRHBK2okpCEqU15+UCtqtv34EMSVCVEJNQCF/PYu1mgcRJQKEBzp/r5OfwQnYaDVAaX9cUOMUkTUQr+06wTrTXhfnArTpYEp31ztJZ5wE3u67vnbrgc+JB17nvPJo6R0TrABnnQd8PFViLaiHeZl62m1czD485P6Dn0eKo6MgYtf1qQKH62vU8fNwbFwA/QcdreOc8y+1pZo+pFk9OQXdH1Ccc3/yx92GU/nbKZ7DE+WBVWmN0hpQKALYooOZq3ACogRUBOsAcBEO2v/42SoEkzaa684DvLWeYxWrUaoHKi0H2rUvAr7HfNXjFQkAFrhbCQjX3t+Eg1R3rPjPFR0QCg4Rh4hFBCy2h6R97rID4KjVP4h98bOgrNL+Ey8lHd45HBJAy5/X8eEnyVLX3r9/b/gQ8A33eZAj7zh36doVxwBe6+54+k579W85vmHXtgNx7ftyD5zzKDkF3VP5fS2dliYo0SgS/xM126DFIhqUIEHD7VMKKvK5cfIFYjOCbR99lHfZYK3FiCH4tDz49DjC7l8VHErKc8oiOLEePwNouT5AOtsay9YSON2oLWusZx6wON9qAScah0OJ1/Y0TaAhguPrpCob+i2C30ng823qsKc13uNiIPHhBAKP6kIfWWuDpushLtI3Knz/IPA+2A7wjrATWm37jrvPIoT6Z5feqtBCas+X6HjQedapv71nDn9/HB/ZKeieyu9r6agD71HXOjp2Ith2zhwnHZhGx4ynFSSAoud622sjLcAo6Jm0DrEWkdoDoAkcRIgw8O2Stn0qauGRvw11x72GahARrNhgBksLcErZqGu37VJKeWBxGqxqtWxrFeIS+vZ0dPh5F1KIbJCHc7sufNc32yOedTjUa4uooDt2oAsE4FXeoRcda+J4mAJ84t501/jg9w983loosd1Ru5XePx3QRv3W9ekIce13/sk+hoob5JGg+2f/7P/N4YSqrtjbP+Te/R32DndRuealZ5/m2cefYnu2Rioppm44ODjg7atX+dYbb/LWezc4OlpSVobaGIwxOOcwvc7ToVMTpdBaiENJibTcTjTrvAqvMM5hbINtDI2xOGux1vnPseAE60JHtaaMBCLf9tYqv9KpaM4p7VdoF48KK60oxqMJL77yKhtra/zCH/h5DhdLhuMxrjGkaYrDcm5rm2VRIrbh/u4O9+7vcOf2bd6/eY3X33iDqq5wSuNSjU0SrHJYB85anHXtvbEG11hcU+Mai4lOGEDEoYJ2pXGeJxRvKIqLTJOERdhhe3yUcz0jWCSYdv5zKx1f5mjnVjjJeq4vHOz5wDgY4zB0gekK1402pFjEeUbR93/Hh8V7VAd3Pv5o/QREi/Yarkp9hIJKggmvvGZLB6MSuN8IfvFvHc4R5cE4esdbs75nbgv+PRtrMOK5T8Tzm87GfvMnxbFPDKeKQNx/gNa0jf/pm/5Rw1U4p7rQtPCeUQ6sQynnATqYx05SxBo8PeF697P+3Z+w/D8IdA5BenOw/azVbhXOCi5Q2bGdrcbrbDtoPeD751QuLkTwIL/cgTlBe3btWI4/XZeFYwNtYU88gzxwRuRvH9R2u2P6/fOhVHNPHgm6tq6xQFVVOGfRiWYwGrO5vcbFCxe5fOEiZ9c3GKZDsJb5/Ii16YTpeMh4POTNt65zf/cQVTXUomiMCYS5XzWNtQiCtQ5tBR14q0R5s0OUkIh4QFZJWB0t1ipsYqmbBtMYjLFoZ8EKxsXV1Xe6ITgSxGGcartQgkkR/6cQkjQhHwxwznHm7FnOnr/IdDrj3NnzfPYzn+PsmfMkaRYGnkKnGls3ZFqxWM5BUpxznEuHXLx4mfQLX+H+/bt8753v887bb3Hjzk0OFnMaJTTOsSoLmrqGYO75CQ0STEJRyi9W8YV4mzIoT0FTEAGnQLm4+EIwDDtNC+KcVdJpYF65coFf8+ep4HZpHSHi2+QXIbwzKOhjnfkce9BrROFwvMe/4/78G1FRvcB+nBH6SUuIVFAqgG8w5UUpXOhTEe848yB8MpRJt9qxQnSIeMBTAv3cI0UAtbiYWR8t4EPIfL+5+E4CoETQPRETHCXQEREwHgQ/CeATAdafrlrA8njrSYZO27fB3O4ce5Fnjeaz9BSXrikBANtxFNeOCJzSLhpdWJZ3LLpeeFh8LOeDmhHloz3i/yKl8oH79pZG57pFogvV6/dLb+H6AC/de9Ye6J4E7K77gxJ+gmP+OPJI0K3ryo8P05ClGbPZjMlsjYsXz3Bh+xzbG1tsbWwyTAcIMJmMGOQZWZaSpQNG+ZDvvXuN23d3WS1LygaUEYyIdyQ4P2isCMaCQtBKcNqhxbNrKK/dJUrQWmFRWOdw1qIFKrzGbK3XelUY2DaMABM6wzq/UtqgybrQg0r8tcED0ovPv8izz7/Eyy+/xMWLVwBFnmcopTg8mHN8vEBrjRVFmiRoBfOiZLFaYYyhbhqGg5ymgVWxYDga8flXv8BLz7/KweE+y+WC9+/f4rfffJ0bd2+zNPiFyJl2QIngJ70zHtQkmlBxKIeFQsKkiF91Q+MDn/ux6Z9XxwGOw6gw0FqHbG8FF2jDlYTW+eL70gZrMvRdr3Ut4Lc3pvvURe1HPggkPwYRpRGJCQ/hR+ug8bk4qzqLi0gvJKgYeK988D2Bluj4V43X4BzSJik4sOI1SZcG7dMFALDBKuvfszOFo1gb40XjSH5Ql4sPJ+37ibDkwptSURk3GsNJR5o/wdudnXbsqROvsXd87yN6tuNSnbRrTQv+8XsXuOcIvl5DIty8e442fKz9qCcPqN/0ADf8dN9Gh+cDrY+WWftHvDe9HuwObp1y8mHHfLg8EnSrugZjQDR5npEPhwwGAy6eO8+ZzW2m4ymj4Zh84CsXpqkmiUHkotBpwmA4IB9e5dbNexwdL6nrBmOEBmisC1yRxTgwOAwKh8KpMOmN8wHPIZ7ED3qQAMLa+hVaKY2xAsZfTxxYEZT1WoTCA64K1kNccLXWZGnC1vYZnn3hBb70ha/w1BNPY2xDtVwxny8oqspTFqJJ0oS6bmisYzQagCiq5ZL5ckVT12idsLdnqKqS8SBnPJlgTENZ1zR1w2w05rPPvMBj5y7y3vvXeev6e7xz/RrH8znWWTQOcZ7asM4Gsz4MsmCeOhe0IvGxlnE19nGQwfAXOhMvjAcNpAgK7zCJ08k6F46POrJvgwTNRqGCU0N8tpBxGPwJXhsOXugTQzsq3Y7WcXGCr3Qfc4h+suK1WxUoBo0ojWvjliLoBq0evxh6sE3ROmmznZRSKEmCZuoRTQLH6wFDA55iQ/lFzlmNskm34DofbqbCYhdeth+vD2htbajSQwC3HwHg4nsK6m7rgVcgNoC7i5pugkjTapW4OKaix79PQEXL5VESaSV6QCot1+v7RgisIHFUtI5K1z1Pt0C3I+tD7+k1CG9R9at79Vp1YsGX1jKMT9SFhD0Ynte1oX01uLCItc/6EfJI0FVpSt1UiDiSdESWD5mMpswmE6ajMYN8QJKkkCZeE5IhQ2DTGc9ZWefjGbUiTTQ3b9/j6GhOWTZIIzhjsBacMW38irOeNzJO0FgaBKTx3eJcy53FiRwHfLf6S3AKANa08yYilrWuzYjRScpzzz/Li889z7PPPM/Gxhamcezs7OCcZblYUtQNTV1TNA3OOgaDAYhgbEOqU/I8p65rdnd3aRoLwRM8Gg0pi5JlWTEZZX4AmIbdwwNQUFYVG8MZX3r+FbbX1vidt95iPj+mXK4Cx2oRFJqOY1ZhZVVx4IXBGoeLhIEWFTQXTfpo70cTUegiMsUTAxG2u6HYhTup4B8SxPPEOO91J5wrPc3atkOy5eAsFuXCRA5hNg9RTn4s0neeKZWE9xe1cBs0wM76QCskidqtDsDrwbelJlqTl96/jpicEDUlFwDeqgAQkVsPwN2Che1Ao5+C6013D4rtPegW6E78e1MEU9j2Ixqijib40LgAqmEQidM9IApjLPyt2yX6wfu1Lez9Lq2jMb76NokCTzVY1QEtgTaLccQnx4r7QNQE/as6z0U7Ua1F3Qfek5x3/DyGrfWtygcBP/RUa8QFbt/FPvvbALqDfAjGUNcl4ixaKZRWJDoh1QmJTlCJ7t5yAipPyM2AyXTKme2awjQUVUVtalCOJNUcHi6pigqpBd0YagcNzpstQeN11vrBFzrDOq/EaqXaAa6UoFQaNAXvF0iSlNp4CDFF4ekE5yfMdDohzXIGozHrG5tsb5/h1Zdf5eL5iyQ6xZqGpiqYL1dYZ1ktlxwcHrFarUAUOkk4RHkfSzAt0yQDrSlWK6qmwYrXVo4OD8E5JrMZy/GQjTW/WGklHM4X1MaRiEIr4TNPv8izlx/jt7//Bm+8/TbHR0dYJ+g0Q8RbG0qEjfV1xpMJrnHs7t7HNHWruTgRT2AHoilqEu3gbiern3DeAecCp9st0X29ybqufmTUfDXKT0wVJnjUzIJmYsVhW0dG1Iha+4tIMXQc749XVIxckAQhCUub6oFP0A5FEK3RScfhaq1Ria/HoEPKsNcuoTXI2zz+kOXklLfsxIHWiNVom3hnamxTUCx8VIH11mCHBg9IfwV7sL8dtM/i/7I4oj/Ao0TkVMNxVhEGUnduuHxLhbT0QEyq6IF+z2yP93BtoJzvaxuaLa0yEKkLH1lBBDO8JRcV/p7T4iGAG68Vf3VYsQjeh2StpR91cpKHPanFRt67NS17134oIyYf+OWR8tGga20ALos4R1PX1E0NzqHjFLVNmLg2eEMVeZ4zHo+YzUZsb28wL+dUTQU4tNIcH83RhVBWIVrBWE8PWOudSnHAWXDaX95ah1Ea7RxZGoe1X4m01kxn67zw0qsMRyNu3rrJrVu3OTjYpzKNz7NWGWfOXuSVV17miStXmIynONGUVU2jLFVZMJ8vWK5W1E2NaSw6zchEmC8WLPYOEWep65rReMJwNGYyhkTlWGtpqppVWbEs5jSNYTgY0lhDVRYsVyvWJkPWZ1PWJmOyLMcaQ6KExliG+ZjPPP8yG9Mp77x/k93dHRZHx2ATvviFL/PS8y/w3NPPsjg65te/9pt889vfYOfurZje1HK/nYlGsCJd9FvFDEesksB9O8T1eazeXCV4dUOqqogL08KiRLA6atXS3R+8dmQ9Xxe156hVxd9dUMPVxxujn6j4iIOTzrGuJwKdpQSlFUonJCrQEaF2gGj/Q4injZZApFsiH9t3aFkbeV+FUwqnNFoTLLkICLStEOXfQ4xbbWNTaaGA9mh5AEDwC6YS6THAjmCqtBql/9UDk3GRoY/iwjF9cO9r8512HMdSd3ZcjHw0Rzs2JdBXcrLv25b3FrsTgzLe8UENtP1awgLhNV5BsE63mm4MSTt5vUAn9ONx43uM1kO7Sqh2nvVv3SPyPlIeCbqJViRpRmYtKG8eGlOzXC4oyoK6rsjrCu38KuycV0etbbDicImQ5inDyYDZ+oT15ZSiWmGtQWmHPtZIUaLKBl0bmsbQOEGMifiNw2EMGGvQ1qG1w5GglUOJn9ZaaR5/8hl+4Q/+HTzx+JM8/dQzOFG8d/U93n33Xa5du8bR0RHnzp/jsUsXWVvbBGcpy5rD4wWT0dAD43zO0dFhKJqhMHjQpWlYrlYslktMU6FFWFtbwxrjTTXnvJPPNizmh9SmRqeZ52StoSpLtAgLcYgoUq3RWNIkxZgaAdIsJ8+HXPzseZ5+4oC3rr3Lndu3+Pt+6Y/zuc99nv/hl/8a//1f+ctkOuHl557HKfitv7Xi4PDAc7Lt4AgGXFwPcV67ihqJCxSOIgTW94aKn9vtxAbv4bZOoVsN2R+g4yC0QauV0ALrL+QkaNHt8LSBGgnAi/u4Y/QTFWm993JCA2y12wC4ifacr1YhmiDwv5EP9sDitf44AaPDk/YvrzyoQLP4lOIEqyyJ9Pukm/xKacQJxtUo5cP1xHr47AD3AS33IR3rL9stkJ48AUH7VorpqJRohkcdViREQmg6br5zukW+t7Wv5GQfRg23WyL8vyq0XUUUi2AclvcIyvF5rOsOixE13YsUP7YJyoSIH9+hmpqx2r9LFNZZPw5b0rhbPFx72bjqPeB7iHzxDyGPBF3T1IAjSVKSNCFJNDiYz+fsH+wxGw5IU80g95exxmCto6xrVmVBWVcYLEoLWZ4yHg+Yro2obI1ToFNNtkoplhXFoqKsa3TTeCcbIRzGegBoHBj84BRjaUyDIiHJMy5evsIv/sIv8rVf/yr/xV/6z/jyl77CL/7iH2Zz4wxXfv5xVkfHXL9xjdo2LBcFt27foSxLRAmj4YjlcsGdu/coq4rFYkGiE8bjCVolLBdLDvZ3WS5WlGWFYBGlKIqC0dCbms54LXmxOA4cuEKspalrTGJJEkdVlYg4RsMhKk1JswE4R1kZqqKGqmaQpSzmc9aGIz7/3IsMP/cFXnnpVcb5gD/6h/8uZrN1/qP/8M/x3dd/h8997gtcunKF+WpBUzaYyGPRDZgYNieB/HWAsg4tDmW9xmldWNxazjBqCXQZVsY77pRWrSEo0v1rhFDQxXXnh9Z0HutOK46Txf0EVHNuNSyJnLN0VcQUiA40QgDYqKGqWBCnDbeKfGi8sOtFasSFjA5uxEfmEMC7hTjrWv9EP0HCOY0vVeBaYOlPfhc1Vx40vb3YE86kbsFrtUzXUQwqmvzSv1fggV3rUXggoibSGtJpsU6FegaqpQxOtlF6Dr7uWg/qkRFfo92k2nEVHXwBc9sqYMG6C+PTWoVVjR/nEjLcpLPOurs/GD7mKZXuPdDe92E8w0PZn4fII0G3KBYo0egkIc8yEpXQmIb5Ys69Xc10NCAfZCR6jIjD1JaqaSjqiuVq5U3y1ZKy8XG+SSYMxwkTk4NYslxRrDIWecVSlyyXBWVZUomAWJrGYIWgMbkQvmaxYjANVAgXzp7n1Vc/y1/+z/9z3rl6FSOwODqkLAs+87mfZjwekWjF8XzJ4fExzvn42NVqxWQyJk1TiuWKNMs4Xi7BWSazCeI0y7LgYP+A5WrFsiyo6opECXma4CyMxiOMNZTFAh24XC2aVbnyL0tpBEeSrpElY7QIu/fvM5tOmM3WGI2GTDc2Wa4KqqpgsZiTJimWmmGW8sxjT6Kc42Bvj4PdXVzVsDFb49rVd/mtr/4qz7/0Cqsrj/He1fewlW2tpA7y4mCyOCdtvLKTlj0j6kwnAsSdH8w2cHiNeA5Yh1hSUaCli2iI4XrWOqwF08tk8vPbR/ZK0Gi0sx4u7I+fX/BzJxa76TnBxNMOquckU0o6wG150C4honWstPAQ/wrRIISattFcF4tohSIJsesOUSZEFZw0oUUJEvurBTDXgm2bkkuLDG0b+k64liDo0xfhWYhJCR7WvIIRfoc+B6p6C2iAudgfbUyxDz+ygaJBCLHf8Z13MS0SrKe4aLWRIhFpe0Don9k7lK2znXPuRG3heKwHTUvjgdd2TrDo8I0Zb11/dvdplYMW4DsFoq0m97sYwo8E3aapENFkWpEkmlQl4BzzsuT+wR5ba1M21maMBzlKQd00lHVDWVYsiiUHiznz5YLlckFZrzBU6MyRjzVOZWS5Js8b0iQjlYREKRYiKFXhg8qExvr89DgoHY7G+BJ0o/GY5559nv/hl3+Zq9eukyQJWZZyvCr4+re+SZKNuHjhEkqE3b095mXpOSvTcOHCeQZ5zmK5wjUGrGM8GsFohHVwfDynKEuKuqK2DXmicbXGmIqm0SggT1KywZDlYk5d1ljTsFgcUVUVVVOT5wMET9FsbGyRpQnT2YxiteDoeI4ooapr0iwnzTJGbsLxYslqvmSUp7z97rsMB0OOjg45PDrgzu07JGnG2sYmddNw/949HrtwiYODA3Z3dnxGXjfe+uM0DJSOy7LSgXGrkXbjEdrvgunoLDiFU97d5JQLIOLPNxaMdf59xYHqFE5CiEVrWjpi4tVPgoiKWqoOGpp3YqE1SisfpaB0D2C9VkovdThSFKoHht3SF2DGOfymIq4FqWBi0EYr2FD4UQVuTbpoBxU0Y2ttqz32tTDnQnJDr44vDwOE1gqhrb1A0DadBICVEDPTGxO+CpsJY4hgdgdgE4GYWHIiIy/egKCpBnCN3wh+4ZGoqdIDQP9BF2EQL+UxwLb9GtInPgT9Ihhb23hHvDgIhYtaFbodjZ3zVyRq8LGvpV0UuqLzugXiD4L+h8sjQXc0mlDVBcbVgEVroTG+q1dFxbJYUZcVpq6RJMFYS9M0lFXNYlUwXy2YLxcsigVFtaIxPvQrSS1jnWFSS5qmJJKhbBy8oFYaXImVGhrPN5nGZ7CZMPkTUTzxxNPcef8Gt+7cZTweMRwOyQYjBpMJo9GE3f0d1mczkjQN2T6Opm44d/4swzzn4PAIpTTL5SoAuSLRinK5wATtvG5q5os5q8WKqlj5hAjtM9dM3AlAhOPFMXXTUDU1q3JFXdc0xqKTjOVyyb17d3ns0iWSNOXM2gVWiwXHh0foNEX0itFoSD4cggg60dSrgje+9wYbsymH82MODw5ZW9viwsUnGM02aKqSTCesr6+xtXmG5fExxhiapiFGgUQvccfbRq4upBDTZZmZYGIiLphfD8R4OrDKBtMzBrThQSNivYsakac0nJyc+a73Hx9j/LHH6ScmvsaCRARozW2lVJsscUKjDf8iMYqh4wr7ZnZHOfZDkbp7+FCmBxtDyHYM3HmHNMFc7zn72jAnfw/om7cBSMLpbVlG6MojPKwv6DsSCdcMcdai+9jUmvu0TrAuZK5rQ+wR/xMLAyG27Rsfjxwcg2KI0ePdQ/eep9em7nf/UB3N0P3br0hmXQMmJIAoHV3wUaU4wR336Yluzehirru0Zdd7Fyfb9Sh5JOhubJ9juZxTFEucNW31eaU1mFAAWfwDatEkYhELRelBd7kqWRYrlqsVRbWibApqWyIaskxDkqK1RYzFGUUk+lXiq8jrQijKmrpR1AjKmBD64cO/Lpw7xxvf+Q6j8ZjpZMpsbZ31jS3W1jfJR0MSnbJ7eIBSPkLBOsdsOsXWFXv7FWfPnqVYlRweHjOejBikKbfv3cU2DYM85XB+yPHRPseHB1hr0VoxyDOmwwGz2YzpdAoIiU6YTWekSlEWBab2Wq81Ncv5PriaPNHs7e+RzxeMx2Nm0wnJdMqqWFLOFzRVgRVpqRRjLLP1DcqqZLUseOal1xiMJqwWCx6zlsY2zI+P0Vrx/PNwdLjP4f6uT1dFQtZYcLC5aC56iQmqNuoq0qs4Ff610IKtE4kUHyJtsFGIw3VY8V53ZRVWLLotZq2CM+NkCBuuy2D7cYuDts9Ua8qrLjSxB7Q+AUJDiO0V8YqCcjFsMgzhWHch7BLhF8E2AR3vvIoxrz4W2Pa27bE9jVnCiubabIUuweTkHO9TGv4+MerpRP3aAJidBufa60rI2ozO0li7JCbZhGTmkMjgtX2iI1F1BX88cOuWPxXAYnDiC+o4sSixsTqm11zFUxuC8fd7QHGNkBjD1JQfmO1nXZ/E0DMX3menCFjrMMZHlsTkKx8PbR+gGcJ7aovW9xe4sKy2C+oD4+ljAO+jQXdji3ww4vj4AFsXVFXlHVi9GEWtNAqFDuaWsY6yqlgWJUVRsipLlkVBUZfUtsa42meSKUFShW0U2cBha4X1viCyPCPLcrIiJVmsWC1LlECtJGSmaUajMavlkiQfcGY8Y3N9m43NDSbjKesbm94zbGoODvfZ392hqGvW1zdoTMPxvObpZ57GNobDgwMGg5ymarh2+w7GGcpVQWNq7t65TVmWDPIMUGRaMxqNWJttkOU5o9EYY52nB9KMyXhKNhiR7WUsFgsPOuJDVOaLYz+5jGV9usYwf4zZ2jr5YEA1qtjf3fEJJc6xWCzRCFr5SZkNx5TLFXk25OL589SmQaFYbqxQpmFtbY0b19+lLlcsl3OcFbRzNKG2ReTkdKs1+cHi40h9CBmu83s7BOXAqDCwgwkVsDcOSYL9F6cviRawCiehhkOrMccB6luiwgD/SYjTbYEqajShaI3W3f5crbNJhVRfHSMXgmYXuG0Xteao7YunALp8/r4GFsBcVMh2DBZErzmtXdLX7OK/facYnZYbNd+T2qAEp1B3isSauTwIFCFyIBY5sl1qsj8s2DqhZoULC09n4idAilLerWVCau1083nOXfk8s/U1bHHA/dvf4+79d2jqJYgJIG67hT1sM/SBEXLCeDqp/XahdH7cG4mKQhcpYaxf9JT4qBDnaPdko/ceTlgt4j4WmP5t0XSHozFpOkApzfx4n8X8mKqqQCBNPeB6QtthrKM2hrKuWJYFq6LwoLtcUZQFZVNjMaAUiaSIU9jG4pwmUZo0UeSZRYumsYZ8UJEVGXmecZTMWS5WJFUojpMkXL54kWw0Ybq2wWx9i7WNLdZmE7IsZ7lYcLC3y97+HsuyIEkS0ixnvpgzHo957tnnKIuC/cNjxpMpVVXw/s33KYuSxfyQ+WIOCHmW8vxzz3vNXimasqKsaobDIZPxiGGeofIB1jmasqQoCpybIgq2Ng2DwYDGNezt7lNXlQ8rq2vmy2Pu7dynrErGkxmTyQy7tsF8foRzjiqtuHnrFnVRMB6PyPMBe/u7ONNw5eIVnnv5NTY2NtgSn8lTFAt++md+jsOjQ4pihWihrqo4ZMIQotVouwU7xsx2WkEEXNce28VT9kkK5xxaujngRNDK4cK2KzEsyU/wmGocQDxYzj8JtRc89RKcRmGfrgi4LdAGgBHRYfueGCoWNaE+l9lNvrgfWfseWlPZh6LhgsZrbABoh4hpF6d+plRXS6A3uZ2AU2EXiD6AdrxtC6xBQ8WFKImYLRhojhOAEd6ZDy9R3b0JHGfoE6dUgFuNUwn5dJuNM09z/sorTCfrOGcoy2Nu3bqNub/kxdmTHNXCN+4UvPb5f4IvjxUHOzc4PHqfg/3b7O7dYLk6IEkS8kxzNN/F2RWaKkSCuNaiiHKSagkLeUydd4GuPEGphDhk6xNcnJNAO0SaJI5LiUGSIcElLsIqLEInKY/+e/8oeSToimiyPGHYjGjqiuVyQWON3xFVJzglWGMx1pdvrKuaVVGyWHmTeLlcslyuKJqaxllEOx9cToJzGk8/+pVVKecL5WQ5Fkte52RZxjDPSdKENNMsFwV1WZMmGaPhkKqo2Nw8y5kLlxgOh7i6YPfeXd557x3m8zkWGA5H1MYgOsUZy3Qy4/7unqcKBjn7+3vcuHGD4+Nj6mKFThLOXrjEZz/zGl/64uc5d/Y8OklZrgqUVhweHbFz9z6379ymWJXgDMM8w2UJW2e32NzcZHt7i0E+YDyeotOEpq7Y39/n3avXeeP117n67ru8f/MWy1XJBdGMx2PW1mY0znJwcECWZayvr3H71oJ79+5QrkqyPKNpGq5fe4/7u/f58pd/htnGFsVqhTENT1x6jHPnL3Dv/l2WqxUupkAHviwSu21gUTDr+hpTBMlQ44xIC56IsIxWrjgaJIBr4EHp0iidC6DtvMajQkRER2H8ZBAMHhQ7k12Fql79zSdFK589dgJwfXyrN61173px8ge6wHW0zongfyehloj2tXXbvnMQohxism4fdF0PXINS21EE/sodzyu9HpbOmmlpo6j9tl6lCOb9/gngjGoLu4sorCiUS0Ap8vEZnn7pD6FnT7K5dZ75smTHVGSjIaUquXf/bZ7YvU3x9jd47OkXeXO1w9/83htcfOwZXtj6DM9f+QrHyzmJwFTD2mDEZDxid3mbd298g1/91f8Ppr7/gXf3waw0Q6RfEB+nG5fCTl3wySn+nesQkx+UgB7nrpTGuRi7HKzA4EyL3csDt/+4o/nRBW+qGh3SfLVOSJKcPDMQ1PWqqlmVJUVZkaTCsixYrgqWqyXzxZLFYklZhBhSBWhFqjO0ynBG8E4oFfgjS5KpoFn7a+d5SmUyklzIciEfpCznBc4olAjDQY7onNViTlMuKRbHvH/zfW5cv04+HLK2tk5TV6hE09QVm5euICKkacIgH3Dv3l2uv3+dw6NDMq05f+UxPv+lL/HaZ14jEcXO7i67d19nurZG1fhY28l4zHg05rXXXqNpDMeHBwhCNsgZz9YQ5yiLgmJZcLz/PohiMhkzSDNefuF5Xn3pRa5eu8ZXv/rr3L51i5s3b2DqmvMXLrA2m7EqSg72dlE6YTKZMhwO2L1/j/v373provTheEW54vkXX0OA1WrFMEvZWN9EqxRsgWn8SIiB+P3xEf1bccLFGARpJ6YPwrfx4FiNpO+Fsfi6Gs61XztRaHFt/VNxPlJBOyILh8WhEc8D/wSEjDnnkzi88hZikVWgDJRGtPZ1OlS39brugTKiTpj0/a3OcV0lF6EDiehY6ya58jylACoBZ2nE0tUwjgAZInharben/forh6cKIOpMCw4t79lqywFQW644vAv3wGIsQIi3dSLtMysEdMLalZ9i6/Ev8O0bu1Q7N+H1dzFJSr2xzZiG1d4OcuM2cvtd7t64Ct/6Te6ZjPuzs9y9dpe7Z86SK8tzTz+DlRFn19YZr21ik5zpIOenXn2O46NDvvb1fx/B9J6xe3+h9/FVhNxJahZCqJprOXOHgDU4FDp4ODy3HN6Hi1ZHzHjtKx09vvl3qTM8OmSsLmkaqOvGp6wmKYPBCGMarIXjxZLdg0NSUeSDEcui4Hi1ZLFcslguKIqKqjE45fnJNE/JkwycomlCgWvxud5K+YpfeZ56J0aiSG3CgIQkhyxVpGlCkqY0ha/hUJYrDuc7iGgGeUZVlty5e4fGWTKlqOqapqmZ5hPOX7zEdDYjTRIGScrOvbu89+577O7vkGQZL7/8Kj/7Mz/L+tqMw/0Djg4PWRwdc3Q85/atG9zfuU9jDIM85/z5izz5zDMMR2PqumE8nnD71m3SPOeN17/DnVvvoxxMxgPSLGP77AXOnbvIbH2TNE24dO4s/+Df/w/wm1/7Or/19d/i2o0bzJdLts+cYzwakSrFweEBGs3+wS7rm9vM5wvu37lNXfuBd+3au+zv7zIYDBnkAwb5kKJaYYylqH2ato/LlcgkBICM5qWfdM56c6zN4AmUQ2fCen5WQlIFKF982wLOpwTj8NRCAFiR6EDjpDksoKyfIBIL4PyYxRqDS4KmqyKdEAnsbut1LUmPUuh48viQHvzCNueuD34QJ3WUCM7ifJlMD2K+Kp64dnr7+7Rhe9FY6d5NB/IPFwmWSasfh3cSax6ftHSiBhzKrbpQTJyY4OC6fnGCk4T8zGc5njzL67/zLrZeURuDXa3IZ2MWuze4sLXO/Xv75MuaMQ6pDXsHO8zXL7KWjslvvk361lcpteKv/apDnXua5MLzbFx+kmfOb/DyhcucbRxf+fI/xPvvf4M7974Z+lG6Po/90D5zL6DM6Vah8OAZz/GbHvi+9vyuO1H/wQVnXgBe10V0iPiqe3G/vN+NPHq7HhGqasVqtcIZgxKfnYZz1HXF0XHh6y9YmIxqjLOsVitWiyXFqqCuDQ6fIpvnCcM8J1M5TWOoqUB83VxJ/MtWWpPoEAupINeeqE8yTyOhPKc7NxVlWdCUjqODPZq68SFXgxHG+mva2qBGwmg8ZH1jm7NnzjAejphNp7z9ztvcubfD8fwIRPHyyy/z01/5MscHuxzt71GVJTdv3mRn5x53bt/i1s0blHUJSpMmKTfev8Zv//bXWFvfYOvsRfI04/DwiBvX32Y4HHF4POdwf5dRnrG1tYkxsHt/h8lkzOUnnkKSCyhn+fIXv4jSmq9+9de5v7tHUVVsbmwym03Z2FhDxFGUI+aLY86cOcPx0RGrgz2SRDEYDimWc8rlkn3n2FjfRPKEqqmIIUk2AGlckmMGow0cq7QTNnJgYTxb6SiB8L2I8zwk3gscOTbTUgvgsCH0zIZSdyGrsOUDQ62qcK64D4OLH53YGJUTat22JmvU6qDdF81rt/5rv5OtbYHQz2tLVyzm5Iz0vLEfv86FaneuA4624EyQuEOIwWf8tbrsCadanw5w7S07y7efpuGgt1gYZ3vHxfafpC+iiAg6lJ30YYUa1p9mPznHrffeQA0Ma5e30DZj68xlBgJHxQ7bOmPz0hMc7Zxj9xsNlaooP/sYmy/9AZqv/jrZd75NIhZ76TlGwyl2fsTGtW/x9dvf5cYTz/Pd6zeYDgecPXOeZvYU7u63QsTIBzXe9jFcj6YR076FjnKxkUsAB8ZYnDK0sdrxutJbUFvg9heytGvPB/wSH8dP8UjQ1cGr3dQF1tjA6QDi85eLsuZQrUh1Sll52uF4Pmc+n1OUBdY1aK1IEyENhc21JDRNDPYOBcgleOqV1yZaAM48n6a0r73QGEtRNNQVGNuQZznHR0c01oJtSHVCsfIVvNbX10m0YjZd58qlywzThOVyyf7ePru7uyznR9RVxVNPPsVXvvhl8uEY54R3332bN1//LlfffZujoz3qusJYB0pRNQbEx+ROxhNUkrF/+D3KuiRRmuVywc2b131Ym9LU9YqjowOuXruGTlJm0wkXr13lsSef5tlnnqVqDI9feZyd3T3efOMN7ty5w2pVsFh6jbioDM46ZtMN9vfuc/7cWVbLBc5Y8sRn0BVlhXWWyWjI9vo5Pxisaadbm6EfTVBr22RJb/d2lIEfYy2qtCUlrTgSiTSEP9ISy2TSamTKhlwmiRDjCBYxSk4kafYYyx+vWGcD8NY4lxHjWf1miJ73A1oitA+LzkYQ7krJdKUPH5AAzt5xZYh7oUHQ1KKTKDhxJER/RNPWOtfSric43pY6ClqxJKSJQqcTBqM1BtmAxjrywZCmqSkWx6yqBcvFLtQLX1TMeZB3oT+MM6Fmh79ltJja9kpKOb3MPWOYrFd85aUt1nWDdjXvpIpVVfCVrRmXZkM21y5RN2v89pUthpMpyfkLvHVoqP7eP84Tzz1BbhU7T75Iw4zb964xeue7nLdHZM1ddt75DrvJFu8vnmZ7/xoJCh2e2bYjuAe+Epf2uHD0wvRaLrajB1zgWXwYqgFJvTM1vm7xBSz9PQyxAl08N96pD7TuY6i/j9Z0nbSV8euy8HyT6FCwRpMNhjhJWdSO2szB1BwdHzFfzmmauk2jTFLFIB2QqLTjW1QITdHSlkoU580ri/OxklojicOKImsy0tyQ5iU69dEIg2TUpmceL1cYG4OtDQcH+5w5d5b12ZqvB1HVuMaxf7DH7sEe88WCi+cv8Nyzz1GVJe+89X3u3bnD66//Nu+9+xZ7B4cUZYUJVc+U+IVjkGaYJMEay7KIu0XUZPnA12AwDXVVU5YljTFtPQrrIMsybt65w+27dzDGcu7sWaazNR67fJn79+7y3tUDrl57j3OrJRsbWzTWkeUZxjRM1zbYuX+PLMvQaUJVFYHm8eE2WZaDaNIk8ZZxWBhbDG1pO2mVoriYe3MqftCP6IW4R5qN4NoN6TBJ45YzCuIC2nqR24GEc37zREfEefmJ0HQBv8DY/i4MD37fgVwbGhU3hxTwwBw43CAPajwOzx2321Xh2noIfn73w/Xx2WHhfWnnwaXuTXqc8UX5dcZ0eo71rStsbj/O4489SzrcwMqA6SAnHw65ejBn3WnWx0OWTYGiYXV8j++9/Q3eeeur7N97B1zd8satvttqe5Gz9/8xo3PcXh6Tr835wqUhr2VLdu4dMD9uWFN3OT8eosuK3aPrFMsVt23DKy98mTqfcGxyNlxJOT/m8b/j72G8NsEWx8wXhsVkk7X8Eq8dpMjRPme2z7KwNQejOfu3b9HX4XsrUNBq+eB3/d6PDs2YadYb7xIGpRMT+j6cLQRHW9j0K9ynD7h+eHTWCh9jTD+a0zWNr6yUZCAr6qYhSfzSm+UjhiMfvyrOsSqPqRbHHB8dUZYFDoPWGVmmfdxtmqN1gq0alFNo0VhtQWyI8fVrqrUKaUDFshuSoH0eBal2aF1ibcHh8RH55oREJ2BrDJqyqlhfW+P23Xu4JEPrhLqu/F5qxrCzu8vNO7coq4rZZMbadMrBzl2uv/09lss5Ozv3ef/WLY7nc45XKxZFQWMdWjRpqiibhpWuSESR6QSdeOeQDlltojRlUWBtQ9NYyqb29SNCvGzVNF6TuPYO5WrO4489webmGTbPX2BtbcZ4NKSua+7cv0dV10ymM0zY1LMoloymM/b2dkiThIODfb+NUuYz47LM88eztTVM3bAqVmE/s/7Qi2NSWnPXj2MTlN5QDyA6T3DdnmpE0HXdBMQjuQTNVSL3FXLs/YabnZZhoa0f4JzrIil+nNIHlUiHBJFIGTjvLPM7j8SU365nu4I5KnDkncYVHWvE7aliiBaBosADbowo6Jrl54MvvRnqXEji+18SxmsbnDn3FC8+/xWGsyuURqgaxT6Onb2SmTUwanh77wYXUZjZkDeXFU3t+MJ0RDa8wsYTG/zxF/8Obt34Fr/2a3+Bxf57fov2njNK2sXWc8xWEpbpiJUt2bZrrJYDfmV/l3pVY5YN1jguqDPcu3mbF598jqPv3+LclRd59+p1BpMJv5MKzWwdfTbnvuxz99ob6Dv3GV67wXN796BquGYd24N1kjRlKjWvToZ8PdGUpd8jsWfsty9PHhjpJ4oP9V+1676PtXpb9sbF2GDra/GGGGMPprq7iIQIkWiVtGtTm6f5SHkk6JZlSZalJEmKTnKMWWJN4/PRs4zBaIxyCXVVURYLVkXFYrmiaRqUhkRDmgqDLCdNc5x1NI0NpQi9kaqUQ2uI5pq2ChP1LxdWDqeIMYOe2PeTN2bFJMp7mH1Rb8fG2hoOjbVQW8vabI3bN29y8+4dirIEZ3G2pq4K3nnn+yzmRyyXS/Z29zg8PmZelFSNZTSeoZSiCVsA5crXgjDWUYkhs0KS6LbAS12VvvCLcVS1wSKMRlPyPKOsKlbLJcuiDFzSbZq6Zm9/l/WDPWZrWzhjyFJvQSyWKxyW6XhKE0JX1temrLa2GaQJ8/khKnCEk/GYyXiMU5b1rXWcguToiEQEtF8YTNj5AvFjy1i/GPiQP1+sRmIhqcDDxrAZWl44ElkdV+vphUgmdFyij4Loa3E+g83GwiGA5oMT40cvJ9vgWitfQoYWRCM27jId5WTKbAinU679PfZdTJBwNmq5ob+lu04E/b6Etc/H4VqHToacvfA8l5/6MnWyRTYZsy9D3tktkERxvaq5s7fiiyjeySxvlynblWVzbcxX6xWuTnkWxdvOcDhfMnQJy6Wl2XqFn/4jZ/jur/4HXH/vtwB6i4qc+FECS7tEZwksG4wWpBgwVAmoEpRhuThEjwd8b/8eZ7cucNdWNPUh5uA6353fYZDnXFIjilXK3s3bHGt463CX9eGMm8URk60L3GTF+mzI+dkmiXHkSlOesEWk1Tx72kT4pzdeQ7+2EQhtPDonRmx3tg/384mhDuUcsYKcj/BRnRpsvbLo4qLromrxaHkk6B4dz9ncWPf7iGUDTFP7ECwgTTR5moGk7ZYYRVVR1TUOR6IVKgWdK/IsIVVeEzXGV/1x4gPBVaJQWhALjek4MxdWHuf8xnWdR9WnHw8ynzgxnUyYL3zIlxFI04Sz585zOF+R5jlZmnHr9k3u3b9PURYcHx6SporpeMh8fsj8YJ979+6xWCxYlCVlbSjrGqU0k9GExXLOYJAxGk8wdUVdljR1TZ7lxBpKOoHGQiZCZS1l7SmJRPndJpx12MYwGGaYxlA2Brtc4XbuUdQVx4sF5y80qERjlobV8ogkTVksl1jjtwjK0yGr5THPP/0Mi+NDmqoAB3mWMZuukQ8GZLMh2WDIdEuzefY8W2szzpw9y2CQec00FOx2TmiqmqJYsVjOWR7PWRUF1gTzCqEsK1bzY6rVCmcayqqhqsP7C5SJcc4PoKjZOvw24kHrjTxG3NDRSjjeRYfajx90neu148TkjWxml5n2IAfdRWVIO6G7vHxvptug3dJWYus7gWLYWUfbnDSKw3+SEbOt50guvsra6CzfOyq5MJty6+Z9dKp5brrNf3frfe6qAS+WNVfPrvF2mrMBvLI25vuqYKWHvKoHbA5Srlc1m4MRr66v88b+XfZLeGp6iac+/w9x7+51lqs7dHvedZmFUZ/MrCEzBUZSbt1f4VYlqU4YZIKpS8ZjRZY6hlkD7ONu32PgIBPNZw8OyBqHFc07zuHSDGcsr5x7DC0JB1XBWXIaLYypsYs9jtOC1WqfEwj7oUNHIg8SSZKOw3VBARDnsSWGkj34XvEKoefSadOSBQcSq4/ESAch5hSfGEuPkEeC7u7eHoM8YzDMSZKEJM2om4q6qRk46zfgE79yG2Ooam/P6gzSXMhzTZYlqET8903tNUXniWvRvsKY1oqmcsG8MxCcbNYv/zhn2mIudW1oGsdhMefKBcXFSxe5fqNBcKiBYjAac+nCZc6ZmnuHhwxSza179ziYHzE/nmPqiiQZMh4MWCyOKcuCsqooG0NjLY2zKGdYHw25cHabuhxx9dZd8jQlS1PEWEa5L2lZ1xUDneBEURmDThOKYoU7XmBXBQOxnL/0BGeHiu+89y7L+TFZkoY9xaAqag7VEQIUiyNQKXVdY0xNYxrG4xl105DjyFPh4mNPkmcp761WDPIBzjp0mnHhwkVUolmfzXjh+Re4d3REVVRcuXSFxy9d4rErl5mur5NmaWtKGWuo64r58THzowOKqsRHpdTUlecLDw53ONjdYX5wwPHxnIOjQ46PfSigxYG2JGnYIBSfYViWFWVZ4awjyVPy0ZjhcESSpqR5zmAw9E5GY1jNFx85QD95CRy3886kWEu23f32BI3gJ62vZdFN5hZww3EuaEvOWs/jWsFZg3NNBwSxPkOkeACHwobd6yKnur7+ONmlL/DtA8vyHtTuPtvVgveO7rNrC14dzPj/qR3eHaY81iQMz425tZ7yxXyL543hm6Ykm2zwsmjWGuFr5ZztfMDTKuOdg12WwyHns8xX1Nu4xOd+6h/m1375z4KvduJNeQkBbOGBB65C0hkJmmGWsjQFdb3ArvBFqoymTsdUBVTNimZZMECxMjUDlXJoSyazMcXhDhfHU7J8FhJPMl4+/zSSjBgNBpT1IZPUce/tX6cyS99vbT3foKv2Fsp+nQQvcYGLadnS8vJ+u6p+BAc98A1eiz7d4yLV4L9V4qmOWKKzDdJ5AMAfJo8E3Xs7e4xHA7b0hnfOKIVOUkxd0gSeVInzxXBMg3EWnWrSNCXNFfnQZ5Rp0dTWYBqDszWiDOgKpR0q8Z3jveGhpmd4wUpCmiqA9WZw3TTUTUOxqqgbQ5olrM3WMHWJwZGm2ud9G5iOp+wdHnNweMDR0ZyiDHVvTcbe3g5+94iS2tTUtvE0SjagWDgujxO2mx3ePlgySCDNhoiDcxfOsLm5jU4UxfwIccKqKDmeHzGereFE2FW3qeodtgaWK+4OzVKTGMMg0RhJGA4zsBaxNgCg5fj4iHw0Q4l4KkApqrJgOBzjbMgZV0JTVxTlijwfkiSaJEt59sknuLt3wHg8Y7S+TmGvU48c+XjCbG2Ns2fPc/78OUbjcRuniIO6rlktFtSVT7YwVrFaraiqAnFQ1gVHB3vcvXOLw/0D9g8O2Nnb5+h4jlOONFOMhgmJJtAqlroyFEWJzjI2t86yuXWGtbVNZuubrM02GAzHGAdlUXF8dPCRA/STFmk1FIgZX2GtP6Ehtc40YqB9d357UJv80PG30Qqk1YJc79CTJrCPm20AS5qOmZ17nun283z9rmXY1NxZHlGtaoa55e3N83xpcoEpjndywy9MLnJuUfErHLBWOC7XFTvnJjTWsekKzi4y7m3nXBitsbaoqJcF9bkR7OwxcTlvuZJL6zPsk5/h0vde4e7tb3l6r11YOi0+L5akU6F0irW1dUYJJM6gLCzKY1SSkOmE6XSGsRV5ltPYhroxrI/WKVeHpK7hlcvPMU5mVE5xe3Gf7ckmG8mYRWMYjFOapmLn1veoV/cDb3vyvdmOpO3gsvfeaD9zfDCUK1oyJ+0L15104roPUhLOOZTzi6YVuop6P2z0wt7+PhuzIYM8YTgcoHFhF9QEpcCaGhGNNTXONohyqDSEhw0SbxZnGThN7WrPcSmL0hZRljTV6BRsY4j5zBLru4qvTJRIgOQwExTCZDzm2Sef5eLFS9y5dZvpaMTBUc3aZAJKsygKlkXN0fyYqq59yUXjvbNJ6pMvqmKFUoIxNWmSkCWW4WSKI2E6mrHIMiqTYPOczUnO5pmzaKXY2NxgbbZO0xj0+jbWWfb2dxmOJwwnvuCNWZZYKxzOD7hZ5KwaWNu6gE58lSZEKJZzVssFSoSmqijKmnToPOA2hspWJInfLHNVLNDjKbsHvvCQEsV4PAVnOHvmHG+++x7j9Q32D45x2qFRlLaiaips4HXTzBcRGmSeJ7fW0jQZk9GA1aoI0RaWM2eEpvGlPMuiYLW9xfrGGge7u+zuHzK9e5f9wwOsMwyGKaNhRqJ97QJrLE3tS12ubZ5nc+Mss+k6040NNjbOMhnPSHTqF86qpChXHzlAfxTigjVlW2dX9DMEoLTdVkVtMkJbPJtOA3JxQlqc6aqGecAG75wMFI6LIXMdHWFtjXWW0egsdu1pri0G7BzcRqqG941PLlibjfn+2pTHGxhq+FszyJqMpDB8a3tIPoR1aZgew027ZDNruKQEmc7468c7yMGcPyJj7m5M+Y27d/h7phPK44rpTMP+EXNjufzYZ7h76zsoCWnjPUe3Q5FJRpIOSQcjDg92uby+RqqgKSu2phMMlmVd0jQF4yRBXANNyXQ4ZZQNmQ7WyJOUhIRhNqVCeHZtk2E6YFkUTGZTjKq5ffc2B8fvh3kfNEtcS13F/3qgjZElUYNtXWRBAr5IfGd++bQIvg5xcM8J/vphW/peYBgtwLvgT/JvkphG7T4A7A+XR4LuYrni+HjOINcYMyZLUhSQpRlJkhBT66z1xWySRGFISRKfRJFlOYnOqGsbx23oEINoQac+48dgukpLquNjYm6JCxWHkiRhtrbGxlqGdorFYkE+HFKXBWfzsyiVMBgNWVUV89Uhq6qiLguWqwXVaoXSPtzNYFmWhlQrRCUMByPQGZubZxBRrJZL0sGA6XSNc1nOaDgMGXkZs+mU2fo6dVUzyDJQwnA0wRjjnY7pgBTF+XLF/OiQNPH53bZpWCznYC1ZPmBPK4y1aOX5zbKqSZZLn3qaJlBZjLEsyxXr2YzVcoEMBijrY5CzNGc8HHC0XHC0LEhHM86dGTMYDbhw5iyHywWSakxdc/f+DsuiZpDljNIM5yyT0QCdphRViRKFFmE4HDAYZWilaOoSMxxSjcekcQEdjdBZwmA6omkqRqOM6TAjTbxhZ1yDbQyj4YTp2jnGk3VG4xmz9Q2mk3UG+dC/b9NQ13XgN3+8Yq31McTWp+5a46kyHygfM9VUG5MM+DoMYQukdgPFE2RsL462tVGjRguR63bOxJpdHqgtJPlZ9NarXD3QJM5xb7XgiY1NltWCJ+YV87FDb814cXCWaxxSu2Neyja5KJo7WcnQNlyoLfe2cv7S/C7m4JB/pNrg2nDJd8r3OOsmNKT86uJN5ggLhvymXaFuLricpWwNU0aXn2cwOUOx2AnWpsKplDSdsrZ1hY3HXmM52ObW/k1ydZvj40PWs4xMJ6TWoaxlluQk2RARIWHC+izFYhjkQxKlWa1WyGCIHeQMG0GsIdGKrbPbzMs93n7717h7702UK1vLoy0pCXRaasCL9vvQx3Ky3x9q9geOyMZ07GABEqIa4p8A7e7QEZxRvVce7B8XF4RHyyNBd1mUFKsVxSolTR1NkpLqjCQbkmrtV0JjME0FWNLEp0umqSPNdMheA9s4nM8rQCu8aZpqdOq9gs7hdwIOHmPPkfnQJYvxOxGL34FhMBBG+YzpaIaShPu7ezQOxoOc8WjEqrbMlytwjtVqxfz4mNVy6btF+UyssqrJEo1SKU1t0DphrHNSrVlb26CeeS51OpmG9huU8nGvo8kUJZpUG2xdYazBmYbxYIBK/Jb0G5vr2H3XOtoSrairEpUqlIWqqX1ZyKZGOZ9mjdbUpvEZdUmGq3wKs7W+/OT6xjrOOc6cOU+WaEaDnIPDfe7v72MlYe/gAK1TLp7fZm065sxkRoOwc3uHq+/cIEky1mZrSKIZDnKyLPU1IfIBWivOnd1kc3uDPEsQcaSpwrkBarlgysQX0BGojcEpqJuaUZ6wPs5JtU+kiSbyYDhhON5gMpoxHE8YT8YMRnlLiZjGUldl8OT/eCWm0lrrfJF8a2hMAyJo8NqRVaAEE/KcXRiPD+5i259uznYURIz+aINAiLGlgnVNAGYF6Ra3yhl33nwXFiW7zjJb2+LbS8PT2Rrmyoj9WcbLpWU5WrK7nTKkomgWfHO8yV8/uEZarvgT9iLpwVXWlu8zWh2ScJ4sMby2d5Wz6Qb1eEW6usXj+ZTFzHFY3OLl7af5vlvyfJkwWRmef/kPs7/cp2hqppM1NjavsLbxOCafsHSaQdOway3v7NzlYjJg2BhcXZFlKSmCM4JUjiQZMByMcIDRwrJqEGmYTLfROvdF9/OE6WANpeD9W6/zxlu/QtMco8S2wNcpkZEAl6CJ+nrEESzjMT0fZ3gFttNyY+CDxEVQnby++Pce7+ajVPpp2fH4HqBHduGHjV4oi4qyKlFakWUZ4KhNhZi0x08ZjKl9ecY8B0nQqfGlHyWhMX57HYcBMYi2qASSLCFNtH854MlonC8SjQMFKlRAViLkWQ5O0eiERGUkaUq5KjGNoXZCYyo2Ns5T7R3TNA3z5YK6KqhqvzOvVjYULxHKokQPB5AK6SALBcpT8ixDOcc4H1KmhrJcMhlNcMawLAuuXHkMrQTTFIwGg5CQIGC9ll4bX/pxNBrTWMfOzj1m4zFFWXN0uEeWZUiSeB65qDxfa2q/2Cjlt7wB76DLBxwfH7C2toFTwqoomAwn4Pz+ZIPBkFtvfY/DxYrZdIYVKJua/cM5g3yAsRXHx3Pu7u56DrVquKl97YrRcOTjgqcTzp3Z5qknH+OJxy8xHmdAyBLEUZQFKEuShFjrwZDBaMSwXJHUitEgYTjKyFLtC8ZYn8Kt0pThICUfpOR5RpoqklRIMkFp8SUBJcU11UcO0E9anDNYp9rMNNM0Pt05qExWeinASmGc4LSP8/Rbt6uToBvnf/ilS6WOR0jH7wo4MRgB24y5vxpSFBX1nfeR9TU2Hn+Gi5Mz7OuazdlZ3lQHLIcHDNYep8JxfPsmm+mYZ0zOtw6+z3D3ewyLBidHnEmFzx3cpTk+hmSHFQ0bi0Om6S6L0U02q5ozkw2WRcl5UzHVu6j6mJvDbbJ6xWvPfYUnn3mFw1XJQA84OCrIBzmlKTlual7fuc0t5agev8TVd7/L8eEx0zRjXGjW8wFKNKNRglYaYy0qydAqIc0mDPMxaZozLw/Zm9/EHdfsqoTd3bfZ27mKc3ULpEY6vVI5n/gU+zLyzK4PtsS/g8nPgxyva2mGE4xtywXHWPMIvJ5SkfaaHa0U2+HxPjrffsjohVXIqhpNZmxsnsU5WK2W1E3lnS1A09Q0xqCTlAE+6wjtq4c1xlFXfoPJxprAZypSlaF1jkgSMp0c2lfbDVXdFYlSKG1xyhdJMfhNKbWosFcb5HnmC+OkGUYJu0crdvYPKMsCUZosHZIkJdYaGiMorb3zL0nR+QDnnE8/1uILiqcD6qaGNMFZQ5amLI6OGE1nPPXUFfI8J0sU585d8iZ5mrJcFmRJggmbLdZ1xc1bdxjkKa+8/CrvvfcOdV2wffYc+/t72KYmyzJG+ZCmKf3uErqganxxeNsYnPPXEaVYLBdsbm6R5rkn7BVMphOu3rrOjXv3fAGcwFXPlUI7H5qUZSlFuaIqS2rjY26xDoz1GpwxjIcDts9ssb65ibGwXJUeQNOU2hoa2/gdMBqDEhV2hU5Rifb5FDH0Jgw+JTqwQ46yXCJaIYmCBKz23LzohCaY8E3TANOPHKSftERO1/O6jS+o7xoaTavJaq1DtHGNC2FkzimU1ii6wPm+46V3Bz4wF0XaWGdrh7w/H3DzcI/KWtRwTDWdcqGqecftceFY02ytMGuKzeKY1B3xlm24f/Auhc1YqHN8nprZ/T3scYHYPYpBRrZaMrAOPTTMqxUTC5mt2Nk/JpMESSzH5SFrknNvuWQzybmjDri4fY6lMdxYHDJIBzSuQoYJK7HMRdi3hvnAcObCmOW732dVH3G3KMAl2ERR1QUb4xFmMSeraiZra6QiJEnOeDTBYrl261vcuPZNqvIQJbatYyDSgHj6JjotrVi/EzKRBIhqZYwAwdM9LR/Qox7CAidKnQBWWqdoLwTFqbCNlT+343TjAV2R0+hA67/qvy31dFdFRWW9Sb29dRatE+bLBfv7932qa7Gi9jUESbMBOrHUpsbYCtv4QVUVDVXt6+0a8Syt0inaappGYxtfdASn261dfF1Bjes9qDGOPBuRyYA8HTDIBxzOCybTqd+HbFWwd3CEtYYkyyj29lgWBUopqrLynRmiLPLhkET8rg46zRilCaYx2EwYT2c+k0o5lHVcfPJJRuMJVVmyMZuysbnJYDBkd2eHjfGIROc427B/sM90OvHxvBZu3rzJ8XzOhbMXuHnrOu9fu8p4MiEfjRGLT9dlwOHRHmmS+sQnfNp0JsJq1TDIchpjKVcF09GEydBvgvnduze5s7ODCzzYqlhydJSAbUIq9gGLUF5T6TSkR0M+8HvILZYL1usNhuMhu7u7DAY542HK5voULUJZVb64UNNQ1jVVXeGc59SHwxFZnkKtyLVGx4qlTrWhfc5aGmPDnPARKGVTIqsEi890rE2DMY6Xzp37WAP1kxIXttJx1oIxIA0mTGTtPCWlROOcT8cl8Vas0rrlYWOcctR3fWJF/x79O3ouMN7XOuFwkXP/5g6JQD5dIxvmpBeeYmxq3nfHXL78JF+YrbG7+x7FasIZtaJgl2T/gGFpqKo9klyzXZSUDoaJonaGQZIwzgZYZ0iShEk6YFmXJGlGqhP2jhdhi50ly7omn66TJgnzYcKhcZzHYFLH3NQcKzAOVs5Qa8MlNaS+dsD1++/TlAtmoykOWJYlSZ5jHCTWoTJPRalMEDG8d/M73Lv9JsvFbZTE4vk69go+ZUbokhhMWw2sA0mf5afwCTcuxhIHVPXA3ItM6MdZt6pvqOgWvlMImQyYpWtM8iEr03Dx/HPcun+d+8c3MOGK3nLpnGodg9SPbni0PBJ056uC+bLEimI4mjIeTxhNZohy7O/usJrP/eaLOiNJfdwotaYpG5qqoa4bqqLxVcWspaJGpwqlEq8FieAMNCWYGl/HEkA0jXVIo1CJItUjSA310lAry2CYUtaGNEnAFjhjaOqao8Wc5XLBcrXENDXOOZom1P9NQzEd6xgOBowGA5qqIE8TrDFUTeWTQLRm++wZhsOx35JFK4qy5PKVK5zZ3ibPc/J0gDghHw7Z3d3DmZrt7TPkeY5DMduAwXSNu7ff571332W2ts5oNGFZLDB1jTEW5Rps8Pj7LWAsxvntfKqqJMlSb85WJQYojWFVVVhT8d7Nm8znc9IsYTQcoZIMZxqcNcznC5+Ygq9gu1wtfXKiDiu90iRa+2JEmQ85m82mKKUo6wZjgqlkPFBrLSRa+2pM1heh16J93LbOSBLlQ8bEQe18PLYpkSRFpwl1XSPFClsvMViMdYE3DfnsP2aJzi6fnVdjReNrKYTJ6Py+b2It1ik0nvN2+L4kFF9RJwhd6DQiLx1/GO/reeSqybm/V3BkG+ozF5jahqMLM7aqQ24/PsPimA4qfuPgJm9c/RrbkrFML/DSULGxNFRVRYLFNhkpGqchHQxZloWnBLWiqhvyPOdotUChSJWmWBVorTuryliOD/Zww5y9GyuyKwmD997iwuULzGYThkqRJ5rD1ZKr+3u8efMG33/jO5ijBqkVa9MBpipZn4yYDMbUzjAZTsnGE5I852i+w923v8dqeRdxDXH7nJZDbXusi16KoOrhNXKlrqMG2gQO117HiwqarFes2vfRErz9l+K154SUqVtne77JpeFFHn/hVV584efZP9rl3vwa3772Nd55/zscFbutRts6ReNFJWrfj5ZHgu7xsmD/aM7R8YKqMaynKUmW0phtmqZhsZhTG8MoS0jTFGvxu+pawdSWurSURUPVGEpjaYDM+sLQxoVUSQumAls7UvFbXsdamMZ4xw3OIC4nyYTxYERdNSyXSxaFoSoKVsWSqiywTYmWUBZPi58gTY3WmkSnGNOQZSlatI91TbQP5Bf8djNp4ou2O89rqjzD1IannnqK9c0tVkXJjfff4jOvvsaZs2eoKh8qp5SwsbGJOMetWzf4zhvf47mXXuPZ515gOp7w7W99gzRLmSUznHMUqyXKNmArsrUZic44Wi7YOThApxqM8qU0g5baNA2JTinrmixJSBLPqVdlTZI0ZKKpreVgfx/jHKtihRIVQvYGoBN0mpAo7bcWMhVlXbKzu8fa5hq7+7uMxznjUYZKtAch17AqK+rK00d1Y6iqklWxwjQWJQmJTnyNYw21qXxhn8ZSlSusLLF+O0KszzzAYXFhixS/Q0P6kQP0kxfXA16DsQ19bdTv9hu3N49FrR2iHd3uA74soNfQ/FX7DpyT9ILFWV+oyNSO2zf32TlW6GTINNFsTTa4dbTkifUt1O6CpXUkZodFdcATRcWGqyncLdRqwAWdsq9AuwSdTRApybTFKkftSqqy9DRPorGNMMq2qKuSypWkKqFqTK/mg2OQaioaWDXcuHmVOtXcvJ/B3h61bdhralKleffeTfbev4ZbVGyojDOzdabpmGy0zlAljAYTkjxjMFrD2Ibbd99DqhUbm+fZOnuJsq7RKNBgxdcVsbbCNAVNXWCaEr/zhgFXRmPf44X4sLuWRXfRwRULkcc+l95PRzV07IALqKnIkwFnhheQXcX1717lwucu8/ITP83ZjXNszM7xuHuRL734R7g7v8v33vsa37v+Td658QZFM6exta+Y11br+yGjF6pVxe7eIbfv3GX3YI/ZbMpwMGQ4GDMZrzMYHlDXR4j2FcFwjqapqUqf1VRXlqqyFFVNYRoaB8ZpmqB5+YQ2h2t8B/gwMr9zryQaqzQ6yQHBWcXabJ1hlmMHYWugxQFlUVDXlde4QvlFsYbxcIxZrFAI6XBEWRQk2jt8mqYJ26dbEpWgkwSM4fDw0BMaSpGkKWIazp+/yMbWGURp3nn7u3z1136ZTDmeeeZFxrMNEM1wPCJJMm5efYvvvvFdvvGtb7F95iyz8YjtM2d59bXPcPXqNRxw9/ZNyrrElCX1ao7WmicuXWEwHGON47gsMFl8h8JoOMIag1hHnuVU5ZLpbEZTlRwv5jjbYIzCOmjwoXoClGEb+HpgUInGWl8uM8sHVHmGcY7KOfQwbbMO58tj0iRhNMzBegfp0fyQuqwxBoqixFQVmVZYp9Fa0fFY3m/bhNq0KMHY2m9l3zQkWeYLJynlS3E6UDr/yAH6SUvUVqLmqaxFPPuMc76UoHIhFTg4v2Ievw8xCrWFlXCy8lh7h/bHuZBEZBqvWTcJkDHbnFBZTTJMOdiYsNvcZX8Tru3fRA7v0Qwv87l8xEE6o6gKJommAXKVkieaCqgSx/zwNsvFPcpiHqIiQkyw9VpfObzIePMpjDNUVYEyXoNXSvmMudqhVMIoSZgv59y99i723m0KV1C7BusE7Qx2MSc5OuBiPkSLZnu4jbUpiR4wHk0QnZKN1/nii59nmI6pGstgOMDWPkLEKQ1aIQoSLEniyMSxqgoOVkfMF0fUzYqj413u7b7H/f33qZZ7lIsdmmaBbjVc00YUxH9bcOWksywmSLig7CmrEee3Xbq8/gSPjZ7g6vV32Jhs8eLLL7Pav8vt2pAOp0iiSdOMdXWWn3nhj/EzL/0Skhrm5RG3927y1tXv8L33vsX1+29S2/ojx9wjQdcYw87eETdu3ObO43dYn01Rm1sgQpKmjCdTbz7i4x2ruqBYzb13vvH1BqrGUtWOqnY0kmAloTIaCyRKUOLQDlKg0gbXQD4cUDuhqh2mrBkNR6xPJuRphq/F4Dg8OsY0NUfHh1RVyTAfUBUlTVOxvrnJsqg4mi/QfqMuEqVItN8RN1YgStKBdwY6oSwKYqrfZDLzoVRZzpkzZ3yltSxn6+wFiqLmv/qv/wueefK7vPa5nyIbjhnN1vn2t7/J3/qbf433rl/jzJmLjEcTlEqoqpK1tU0uXKjYvb/L5sYGb37/DYrVAuMMzhj2Dw+ZTmdsbG7T4HwUg7VtsQ2rFcvlwoeNWctisSDNUkZMqOsSqH21NWdpqopEKcrgHPK7bWRtmcnGNohLKOqCYTPh/v0dXFNxf/c+m2sbjIY+hCzRCsEyGqZ+26S6oalrsD621G/E6MOqlUrQ1mLwJT8TGYJSqCRDVEKSJAyyAWmagQiNCangSfaRA/RHJTa0X8SgnPdWO+tA+4mrlPKxvCZqXd1Pu+Ov9FWpvoQoH9t4R13IVLu3e8TN/YpsYsjH64znR1Rncy4axXR+yPbxDoOmQZWHqLrgscGAlc4oasNhUzFOx2TDlP3d97hz+xaYVesE8g4hvxuxpzct88X7sL6FnHse2buNPjxGWYcK4GybBjweMhFFuTyiWu4xUDATSERIRKjqApPmjJQiyyaUxjDIU/JszLJ0GAeXJ1u8deOI4Zpjc5zw7TfvQzbAag3pgKTxu5tsb2+Atoy0ZqSmnN88R7FuWYliu3E850pMU4MqOdi7wds3vs7u3dfZuf0Gpl6gXE2vVphXAgJXG6MPJGq1ErfRTFjLN/jMs19kbTzl+jtvc/31q7z/9vv8gS/+Ak889wpnLl3mYHfO/s599nf2KMqGPB8wXl/j8OCYF155gbNbF7j02EW++MSXMX/I8J23vso3Xv+NjxxrH1FPF5aLgvv7e+zt3md/d93v2pvnKHEMBkPG4zHlakVdFaxWS8q69l5v57ffyVKoas93JCoDl2AaH3LRiKC11yBEGfI8R4CiMMzWt5llGdaBs5bJaMYoT7FNzc7iiLKsWMwX1KWPTtjd20Uh5KMJ+XDM0WKFbRrSNKNpLGmSUBZ+c8lpPiDLEoajEcVyhdiG6dinpw4HPoA/STIuXbpIluXoRJNkA9Zmazz73PP89m//Jr/xta/y+vde50tf+Vnee2/G1Xfe5P1bNxiPJ7z08qtsnz3PcOR5YRkMOKt8vYP7O7fIB0PmywWgGY3HmKqkqmuOFwtqaylLTw+kWUaqM2rryPPc13WwnsY/Oj4m0b6u76paokTRNA1NU+NEkaiE0WTswdtaz8EOhwxHI1+cJ81JRGNWNffLXfZ39lmbzZhOpygRxqMB01HOrgjWGG8N4MiUr4uRKKhD9o+v5yLUTdhpVbQf4JLhnPYhWcZhdSiNaCRUN9CPHH4/Cuk2dfQUgwlFf5QDVIKJZS+j+mo8aWJUH3gt4gzdDrH9MKYQy24rTKyvbAzGWMpFw+Z0k3w4Ylc70jMzVsuCe/fnPH92jZ8enuVQHTEAbKJpjCPXCSrTLEk5avY5uPMudXnkOdKYhhzuK6JitcIAOoaj22+QjGa48RqDZUGGQde+9jUu1Dh2As6Q4EiUL11KqAox0glzFMPJiHlR0BhLkmTsHy852N3jeGHZ2rzMrd3vMj1/meSmA9dw0wxZDoYMjGJVllzaOEM5TGiahu2qYisf8OL2Nm+vDqgQtpIBZ0W4awSXZAySIYONNX5280U2J5bvvPcbfPO7f53b174G5Y5vd9wUopcYETMBvV9OEKf4zFNf5u/9mX+UJy+9zNvvfJNkkfH+4TXuD3a5cPGKj9d2MN1aZ1WseP/eDi4ZcGVrG51njMcZh/fvYOuCbKBZHOyT6oRzco5/8Kf/8Y8cc48GXfy2L0Xhi3IXqyWr/z9z//Wr25ql92G/N8z4xRV3PvlUnVSpM9lkd5NiMEGLIGXDNgkYEmzDcLjw3+ALw74VDfjCN4YNGLYh0LQtWIRIyZSaHdhsdnV1d6UTdw4rf2mmN/rinWufU02pNiGZrp6FvU+dtTb2Wd9a8xtzvGM8z+9pd6TAO0GuFWVZYa2h27X44FC6oJ7kOGMZBotUKRokJf6m+J04Rr9kQiJFSqSoCo0eO6jpZIbWOT6kNN+6yMiynCzTdNbiBocZBq5WK7x3I43LkZc1k7xIhdhZsjwHBEJFhq7FWkshc2SWkekMIdP8MqXv7uFMWvg57zi4cch8bw8hNGrc9h7duMXP/cKfQ4jI08f36ZodFyfPuHWvoMgLbt96jXe+9gHvffgt6ukC612SqVlH1/UURQKLGGPY20uJwSJ4vB1ou5Zt22JdUnpkOqOoa3bbLZkuEAicSwu/7XZD36XCrIvspUkl0ylWOssLylFHrLMkr4vB400k5BlSKHKhyRBM6xk+eLp2zWl/Rt8PFHVJ03dsW0WeSRbTGf1gcDZQZtkY26TAe5yT2Cwt2QbjQPq0RBMK4RVegjMBMwY1xlELK6VC/2tsev9NXyk1INHXEsg8xe1E6RPTWcrkUsO/TAkOo9LhOpY+xkhUAeHDl4GVL6+ID2mkcF1000go0ro0f+2iQx3N0aLHbDZMwoahrTheHHJzmmGCpzeOjTHp1DWpGNonXJx8RsSOCoAv1RPXX9P1kuh6HpL6PI998Edkdz+iny+wzZYKgXIeJUfhlXMg0oM1uoAetbIuOEwUGOdx2x0RydNnpzSrhovW4fMlh4e3Oe06rlYN0xgxmabRBaUymPUls6ObXO0tWJWazBmqrWCuFb89WP54dclxnvNBVbF1HVud8cQ5ZBf4MBQsdM79wdPkJW+88eu8dfdX+PjRb/Nbv/V/ojv9BDBEIqO8/ydsCum5EylExrfu/TzH5V2qbM7N/dfo5ud8dvF97t66yzvvvUehNasnj8mqCtttcX1L53oeOctH3/iAt997GzsM5FVNOa2TwYoI0WObV0OcfnoEewzIyJdU/QjeefquTakOgqSnFQl4XRQldV0Qo2ToOy4uL8FYdJays3SW6EkhgpCCIh91KEEgtCTPcup6Sm8SF1eoJDFaNx2ZHFLuVgh4BC4kCVPXNekmdjY97UNkMD0hehL6UhKtwTmL0oJpPfnSCRWT1rcoiqSokAlbWOQZSklM14PQtLuGyXzJbG+fd7/2Pnt7ezx78oBuu2E6m/L8+VMODo/54INv8/pbb7PcP+JqtWK7uUIryeNHD2m3Ww6PD9F5QZSKTOVMqxoRA9ZptrstgzNIlZIhtNaYfgAEu2bLcV2Nc8fUeTnnURrC0Kc3ig8ImZONtuWiLFMxQVKUIyVOJ3PE4Czg6LoWKTVZUVDVe8SYFp5X52f44JiUBXVVcLHdJieOc1RZTq41RZ5h6pIqTzB3Ykyw+Bggc9TVFO09zoGIBpcJnE0PB1yaM8ufvQuYawZquA7ufWnzTB2fFOJL6dv17gyINh1ZY0jz8hACUvqXkPaXfz8QfYoDupbTxeAgQJlliDzDacEsatx24OtlTawySiXZ+oGFyFA+MNElQSuGXHK1fcz69JNxfHlNJf4SPB6jIIzsAKRE6ZI8n1DXC6azAyazfQ5uvEk+O+ZHjx5y9uQLdutztHEpsyw4EA4XHVJJhvVAlRUpNVtKrq5WXF2usU3P1XpHyKfkt+6xODoizgq+9sbXeG49W5XjgwUfcbqkFwKbSXrbUlY5OYKTaDmVgXo24RemJU/PTvn9uGQxmXOk0wNOC8FVYzifSnYCVhcN70wKXptNef+dv8RicYf/5B//fS6f/j6jTDyxkK9JYFwrGOD14zd449ZHdJsdbX6G2HXcjwnRAQAAhVdJREFU/+H3OXn+lHu33kTninw6pW0Gmt5STRf8rf/uv0NWT/G9Qeearm0o8/R+dDawvHkDN/SI4KnrV+8pfmrRDTHiYxhxg6NV0ifborURpfJxrifJy4KimFCVsxQe2exo+obBJqybCgGlr6MM0rG0yHOwAh8kZZFTFRVSQpErXDBYN9D3O2JU1EUJMeJdwAWYzhe0bctqvUJKRVWXTOazxEjIJLFJpH0pBX4cb9STKVmRp4XJKJ0So2F76LukvCBy+9YdqrJGKsVkMqOazNJDx1ryvOLu62+yf7CP7Tucs9x74+3E350uyIqKvmspM8n0xjHb7ZYbxzcoXn8dJWHvySHPnr3AuQHvSrIsPVik1BA8eV3Q9h1ZNWXbtZRZPkpo0tGv2TVM6hl928GoHcyypGUu85KsqACwoypDK53mryoVeiE0VZEjlMJ5R9tsqaInywscHhssq80aEz2PTjvKQpHneZrnySxpdbOMWVUzqXImZU6eZePxVOJiSA/UaqAs67T8sxGtBrRMtnBCeKl1/VlfCSZ+3e2G8WNjnyRSR/vyxJ3ezcQYU0y7S0sqEZLZ41/pcq93OsElOVpIScFy3B2goMNgqgLXt0Tv+fDwNZZasAsJiH/ebCCkh2evM97+zq9yfHmP86PXkoHD9gymR6sCLfW4b6mYTvbSvVvVTCdLJvUCRI6PKX7LhcDpruX49vu0+oCwXtNdPGPYnmHbS1ZnL+jWZ/S7DV2fZJn9rkEogcsUsaxRqiI7PKIvNG5Z0s8rqsWM7z7/hH66x/zgNmU2wZnIeb1AzKe8W5c8PD1lXeeslke8VRfMzp7zI7Plu+Qs79wltC0/vlrx/qLkG8WUH/mB3VQxdxZlB2LIeXzesJOSPAQW1bv8xd/4n/Mf/0f/a7rL7yNieKnR+wmpXows6z32FzfYnx8kyp+0WOO4feMdfvXX/ypQ0GwaNtsdm/WGPCswHm6+/hr1bALecX7+nHy2RBUVWiQBQbu+5OzRFyipuPH+t37qPfdTi24cj4ND17PdNfRDh3MTtJYMQ4dSZox4iZRlTT2bMa0WRB+SPCvTCOlQ0hFkAOlHuIpGKkVVCmRZ460n1xXdqC1UusRbR1lURCHwLrmXBuPxo4tsNp2yrWv29pcoBMb6pIGVimBiUlSMxVZKnWbIPiJUhhSOqiyIEcq8ZOg6gtSgFMv5gjfefIPF/gHb9Ybtdo2QkrwoUSrDuQFcJNcF5aIeF0mKSEgPBGvRSqFkgbMp1Upnmq7vWMxmvPPW2zx68JCh3SJiz3Z3XYDS+MV7jzMDfd9iuo5ca8QY0Q0w9EPq4lUq0lVZojPJfDqnriZolROix9qBwZjxqe8JdsDGgBUalWUInVOVFWVZpqWZBGccQQhiVGy7JBNsBosQHc47ejtQZSknrioKZnVFnRdkOk9GCaWIIrEmlE5ysrwoKPOSXGVonVNonZaX3iWb98/4iiEQhUTKQAgiYUFD8vMHQtI/x0AgJIWO1NcT4CSBixIRUzaclOql4+lLSWjCNYbgiCHwMrgzRgqZLNJVUSN1QV7lrHxHLQv2VMbOtMjx5z84j7x9m9fe/iazt4vEvhDQO4tzo3tL6VFrHZNEMjoYT0/nXYcP6T5r+paN62iC4HS7Jcicpy6gVMVuEGiv6AfPs4dP8ThklRGLnPz4DbI8w2Epjm4y27vNnIynTz5G7ZfI2zVv3XuXZ1884FnRMxxEvnbrNV4fCv7Pn38Plx/xYHKX73zzIy5ePONTf8V5X7B3tM9bYeBj33PuIn/34ICi2vIjLdmWkV8uZrxYbzjx8FE5o9WKJkayGDmUkrVxTPff4+d+7X/E7/yj/yWxO0+n6Z+QcQW0zDla3Eb6mLTyCC5Pzzk/veSjb/4yt955n27XMAyew6ND7ty9S5SKzeqSJz/+Y/YP9wkBLk9OyDYt1d4ezXrD4/tf8ODzT1HAcrnHr73inntF0U2/dV3PerWlaTrmc4P3kr4bCN6lZU6UqKogzwuyPMf7gNISpdOLdc5goyGKJK4XuiCSY4YWrQWZrglAnmd0fYvWAakLdmPuGAhsZ2j6ljzL8M6z22zItWK5WLJeb7HREnwgkwmk41uP8Q4lcnxIi6BrglRVTijrmqHvMNYxGEtVFyz29vnogw/ZPzhivd6M89EsZb7FSFFEotRIEcejokIXRZKgCJlyO4VOXbO1NO0OYy1FUY6nhMDd23f49V//df74e9+l211hTU9AUM4WrJvneJu6v75rk2idWQIJqZS8oTPFZtvgnCHXiklVUlc19XTGZLbPYrHPpMgpMsVmu+HZ80c0fY+P12kWgI9EdR0jk8wtSkjKvKQxHbPJhMYbLprkPPTekmcFzlnWmw1SJltspjVFVpCrLKH6dEamNLlMBphMp65KqKR1zZRKNmOhCKS8uJ/1FWLKr0unn5jkVfJ6XJAyi4kREb6cLcSXhtRRIRDESyDSS4n/tbKB8NJi/DKheZy36kyycR6zbXjv9UPmRYkxAyYGVuttisYSEoSkdZ4hl0QdGYAQM54MhlwVZDkMYxyTRWMEtL2hEGCkphssWmQMucS5QCMF97uelWnpO8Ou61kUEFtDVIYnLz6n354g7xygI8Q8Mr13D3Vwm+n8iJtVThcC56bD1XN+/v33efj4Iadtx++dPuA7H3yTm7bjB8MpO7tD37zJX66+zp+4LV+bZ/z3bu7xYJnzf7j8gm8sj7ija95TJf/X0wesc8UPhOc3bt6gOXvCE+N5PXp+8eiIf9E3nGcFt4XCt4alklRacD44TBC8dvsXePj1v8aTP/oPIDpUfDlVQCB468bXefPw6/StQciWYHu+9y/+gMnyNoP1mN5w6803UFIiQqBdr1hfPKGsa1YnT/nRH/4eg9d84xd/Faczrs4vkEpwdPcWB3duMp3NsGZ45T33CvVC+qr7wXC5WrNab1guFlRlgXPQDwPebtB5ybysUTJtuGNwBOdSOq4zNN0GGxxCCLKsRMSCPJeEkBNjgmAYY7Eh4kMkOoUdzFhkcqQQBB8ILnC+usB6yyTXWGdp2wGtc7RzKJUxm08xg6Vtd+P2Ns2Rtcrohx7nPdPZhExqnBA0/UCeF9STKd/61nf41re/jRAS65IBY7fdJpSlkvR9SEuoskArnbpTJbHjm8Nag7MWaweccWipsCg2mw0A00lNWVV89NGH3Lt3j9//vX/Oo0dfcHF+jsgL6vmCYbBkRcV6vWJST5IbTCbhfRglYYok35/NZpRFmuFW5SQ57SYTlns3IFimy0OKsuLho/sYa3CjNlnI5FazZmC0/zOfTgjWoUNC+R3O93DesW4b1rs1nenJkEzKmsFarHV0xgItkqTTzoRESkFGSiRORpdR0C7SguP6TRBIp6if+XVtPWfscKUYRw7X9NZxIeXDuJiyY3erUFGOsVNfnaNey5QSdB6uC24YZ67XhRekiiymFbO9JXmhWVYTiryka3cIJUEXBBKZyzrH5WbDH25P+Ms336H1gqwQ2ACbwaKz9DX0gFMCp0vW/cCZs0zziq3tcEPHRfTcb9dMJyXTTNKzI3QrPr3/Q2K0HPoeVUFVHeJiwNUld+6+wToGrg6XxBv3+G+8/i32y4Ifrp/xZLPiLX1AyCout4/wyvPZ9pS/eecDfv/Fmn8xbPj++SP+Z/c+5IvHD/hiGLhvez5Y7vPfj4Er22PigJ7P+XN7U86DQSjLcRH5927f4nt6BaIkE5qfI+cPmgYjM35tb8kZlgxFjSH0BqVqvvbRv82TL36XuLmfIOfjiUOKjA9f/2XeffeXqYuaoig5eXrF7bc+4tf/9t/D9i2zxZzJYo7pBtxgkqbd9uzW57y4POOHDx4g9BT9+WeI4LlanfP8yWMuzk85PT8jAF3b8m/9t//uT73lXlF0k8c5WMfl5Yqzswv2FnsIIV+GL/ZdjzSeampTFphPR2xrUrKDs5G+G+jtgM5KhmFHnGqybI4UE4g1Tevoe0NvDEQwJnXRLga83abstODQUtLstvRdw0ZEhq5DaUVdlhwsFykBFzi7OkcLlQA7wVNkORbL0AfyLENJwWAHrPPM5lP2lvu8/fY7fPSNjxBC0I1c2zwvKKuKvh/YrNf0fU+W51RVjSBFqkepMENPnuV4Z+n7lKphjU1gGKnSph/BbrfDWcNsPuPo8Ii/8tf/Br/1m/8Zv3f5W1jrkkpB5nTtDq01CInzjrqu6fpmjHxhlN/JpG9WivlsRlnPmE6nLObLNP9VCq1z9g9u0g89Z+endMMwzo8FWiRZjDU9WaZpB0twJtmb9bgkRSBDTNKy4AhSYI1Jc9txSediUi4YPzCMoYsiiC8XUl+JEmcsYOkfkfhnoNO9jtS8lo6FEJAq4VZkSOAmcf2/60UVqdsNJPyoEOl7Op5pxyjvBEoWY9bWT4TXj0PGXFkGY5lkJTcWe7jBs2tbmvWaGCxIgRjt2zFEwuWKf/7JJ3z74BZ3R3BMkAVGCjYuIVBVFpBKYXuDyku+Xsz57OKSx8KgaoUfBt453ON7T36M8o5C5WT2nMmxoj9dsYmOxWtvs1pdYEVLPFiQ33uTm0VGVwRmVY7drfnBk1Ma33LTR2ocs80Zb5geJSV3uMI9+YQ9s6XM4ThbUDcN87xgNp/wpO8ZLLxZH/D72xf8id/wB2eW//HiNv/h6gm/31/yubf8uwc3ub11dMIgdct39maEqSTKghBh0kceDFs+0BVhWhKj5yZvce+dv8jj7z4hYF8qOA5md/jam79AWU0ZtmuIFms6Fssl+zeP2V2cE4JHEilGaaqxgabr+eyTL2h7uPPau3z82Wf8P/9f/w+mVcWkKDk6PuaqaXh08oIYSdySV1w/fbwAEAXeB1abLSenF+wt52gtKPIcLTPyrCLIdFsF6/DCYswwRvkotCoJQbNer8iyyK2bbzCtD5OlMgiUSvNaCXjr6Ic+SWuCGBMGDMPQw1j8N9sV1iS773I+YzFLS66+7xAxsttssf2Q4tFDHNUIEqULQkw3pIsRjUwMgM4wf23BL/7yr+CGnvV2O4JyFEg4PTkhxsjQ9YmN4BxRplRUPW7+u6FnUk1QAppdQ1Gm0UiuM2Sej1lyktl0koopCfJz8/iQd99/nz/8o++y2TYEKdBSU+QFtijw3idXmR2Pjt6BTOMH7yO77ZYiy2i7nqyYIUSGGQYy55OtOSbIzcHhTXbNFmPMS9G8R+KtRaosfZ+Cx7pA8A7hk4X41mI/6UmdxYdAN/Q4b3EjQ0FCojeF6/l/SnpOmslrgWjgGlMSx27vS7bTz36mC3xpbBi73hgiUQSiTB0q1zyGGIk+jN9DP8aiw5e63GvRlhiXOfCy0Io/VXQFRGkwdov1hlKV6MwR4g6fZYDEj4oQ55I8rWh2XD17zP/+D3+bf/fDb7FXVUgROJ6WzG3gxMGtcsL3L59ze77HxgU+257z3o09DteX/MBccWM5w3U9r00F29WKk6eXFGrGDbHgE/OYUGTkueStN17ns/6SerHPDZUzcYJqu+Io7PDxjO3Fc6wfyKViqwv0tmVfO5SCZWkwV+e85nriquDmfEV3fsUsBrabKevlEa/NjhmC4MIMfL2ouEeFCuB1yTTz7IhctoYrM+cfmBX3qp7/SVkw6xtcPlDoCTenJa50DDZQxAyiZCFnfOPbf5snP/7/EHdPQUAuK/6tb/83ubl/k9Wzh6yeP+H0xXPazvFrf/PfoW92ODOQaUW725JlBSEEmrZhcXSH7/z6La4uT/nkT77H/t5eAnah+at/7W/w9W9+yHf/4Hf43/79f59t03Aw33/l/faKmW7kOjnTGsPVbsXlOvn095Z7FFojY4koNEWRIUTEO5s4CMGR64wqr8nUBC1btKzwVoBPb8Wt7aBpSFJ5NWp2I7vdBusCUmm8t9ihx1iDNQPWWEAkFm+E3XZHURUoqVLBdhadZRR5waZpKIqSqiyJPmCVIdOa6CNCC+xgODw85M/96q9SVQXroaczhtVqhbOGajJhMOm/610AIRMq0SZ5mvMOXeSICLYfyLOMECM6JoC6miiGtk2hjFISXImoJCGmB8pms+XOnTu8/8E3+Jd/8Ac4azDe4oPHWDfKKyVt06CVorcmwVZCpLc9UhQYa2m7lrJsGMwE1QJljSAyBIdgQllV1GWFHQbWu11SNYiCup5jncVZy2wyQ9WKtu1ouybNk7VimVds5AYvJbKoaK3CDk06ThMR/nojz3WNTTKdl0e7+Kdq68iXTWv9f42S+G/2+hJeEsc81JhmsxFEkMnem+YLScKAuo6Ivf4HXxbb60HFONP90pnAV78JKRoGhLDUWRqVeem43K642qwSjIQRLiTTkm/oB7JMMTl5xiY6/ne7jm/ce53ZckIVM97Z28O3LWvfcFNJfrC9ZD8vuJk7/ujyGVVW82425T9/9ohLK7nLks35fdrdJU3ccrNcUsxmIOFuMeGWKhlsRd1bDk6f4zYNcnXCylo6G7k4O8day1mMPELROoeoC+azCdXyAKRGVxnaG0oXWW22TL1HXOXo6R5+/y4Pdcl+KagnE46LPaITHMeBXAv2BMxtWpznylBEzcPtloqK326uuFd6/kqxZJ9kbMp6yefNljwrOb71Bgf3vs35j55B9Nya3+KdOx/QXVxy+sXnxABlNefmazcAT7u+wvUtxWzx0rGqtGQyndD2FtN3CAGL/X3eEIrT03MuLy+5Oj/hT/7ljtXFBb/0S7/E+cUldT1/5T33Sp1ujKAyga4gyAHjGpp2RZ5L1GSBVinRQUuddGveY01KbiiKgvl8ymG3R28a+t5hhpYHDy+ZTRdUVQ1BEETS4wk0Inpms5Km7dntdmybhqEfXqoCpAjMqpLppEZrjfcBrRRm6OjaFqkzhEqOKCV10hNLgVLZ6PBSEALb7RYfIh9+9G3efPMtdrstw5Ai1ouR8LVZrdOMGUHTthRFQWeGEUGZ3oNaqsQkcIGd31JPZ/TDBjN26Y60FJtOJvTDkJgGbOnNgnpSc/P4iF/7jb/Mp5/f5+zkOVEkhYIaeas+OpxzxKjHNIIE+1BSE2KiMQkpsdayW10gZ47pZIrpG5zt0aM0azmbpQWf82x2G2xIwPiymuK8Y9d3HB3so5DJrNG3VOUEJSQH9QwXPM1uhw8pfn6wCXBzPfcnXh/RR1vQS73U9a7/GnoX/9U6/DO8fiK0cIzkiTKk0YCMI6A+mR6uRaCJLpHi5EeWD5EvA1ziuGp7uTTjZXV++d9Ktd4jY0uzfs7stXcwKmPINEH6tMMInt5agoB5VbDuDRPXMjk5Y9P2POi23FeSd47u8Q8efcwHi1v8YNjwy7Nb/IvNc7oi5y/W+/z26hl9yPjlbMrT3SWbvkcNkomPXITIPFiWQ8sdk3jCk3ZFDArOn7PebWnageFyzdX5JabrcIMlOPfy5y8gGY1ERGnFF3VNMamYHO1xcOOY+vgmpihxZUERMjIb6DrPygXyWiOqCj895KmuiMpTV5r9cpa43NHxYW458IG93NNhEPSsB8cuVFx5ySZa3iknLIuCzEmMyXj/m3+d//zT36awDe/f+wb7831KUZC9I4kucOtrH5BXJauT56zOXpCpdMKUKnkOwghA6podbZsQqftHt8jLKYMNOBcw3uFXa9ZXG7TKyXXBrJy88p57xXghOcSqScbeXsFsnqPKCMrifI9xBVplqJDjrcFybZ7oxiORpixyDvYXGN+z263pTIcuAp1d4UJPkZdkWY1EJ06D8RhjE4PXpTfvYAaiMwiyFD4p0rKjbRuEELSdS3HqvaGo04u2zpIXOT6kb5AU4aWUyceUPZbnBbfu3CYvcrIhObvsMNB0Hbu2STZjY1B5RhQJ4O6HnrwsqIqKpuvIioIYI2VZslmvkrStGxBKYrylLnKCrvBCYMcHR1bmiOiZ1DV5llHXBUpJuqGnqsqXD68YA8YYBAITbLoZnMNaw2I2o+9a/Fjcje7J8oyu2xGcRWtNCJ7dbkOIAodEFzW5SakUxhmM6YhCUJU1CsFgPYd7e1hvaduGru/Sg0oo5sUEoTPaoaU1w1hnIziPM+ZlcYnXT6PrsvoyuEqQ0kH+1QTWn/U1nvb5ahd+TR1DJE3uS1uvGB8hMYwJzfIrj5Qx8Swmspr4Sid/rd69zpEQ4ylASUPbnfGHTx8T+wCDQTiDFDFljmlFHLGek0yjnadpdhz6wG695WvLGc3qHJHnmIOOxuy42PVov0Moheo8+80FNggmbCnPzgjtjoOYkW3XuKZh4SNVHCguzlnvVjzf9Tzc9VycX+JsAJ2R6ZxYzAjFHOkdDD3RDURnwXmCdeA9PgR2Zs12teHq+SkX08dc3LjB5HCP6mCf5f6S6iCy7jw9ULuMehgoes8LY2hFQJQZcTLnGQU7EahKzayekk0ij4PluPAc5XN0FXnsGx7FFuIN5hH2yhoqwb27H1Es7nDQXfErH/0F9pcH9FdrTLMieI81DfUsLarPnj1luTygKCsgpjTjekqIoIuCvarCWks1mTJf7nN4fJOzkxOKsmBzecm8H4gI5pPpOFb76dcrbMARlcNklrNcTtjfr5jNc6Z7E5bTJZmosC5gnaFtd+Q6xzhD17cjv0An+67OKMuMwWegPT5IlCqADILGutQttt1ADAJj00ItywuUlGgifbMlBo8dEpi8CekHbPoOaw1Ej5CS/eWS1XqD0JpkY/ZEBH3fIXVGlmU0bZu6YqlQWlFVGZen6e+ZzqYpacJ5XDlqNH3iIVRllYAuWU7f90QfybIsmQO+ogc2zqKEphQKoTSZLjCmR2cqJV2M+MTV1YpI5Nmz52w2a2L042tJm32BSOGVQjIMPZmSGDOQ6EoZVVmkzstbnB0IrkBPpgymSzpfaxBSUZYzkBJvTbIOZ5rW9BAdmIEsy9FZQW8M214zn83ZbHc0bYMYdZ8RAVImR513KenCObSPlFJijUuLSxlHov6XqaupynwppwJSJ/lnYZH2klh1/ZCICMZuR/ivdKjy5WT6OsYgRhIgXoZx1Ra+8sBJS2hechu+fO1fAZal32xDHLYU1U3UeAIxfqAfks5aSkmpNZ014CLzomTXtFRak60C5rznw8UBzekVX8tzllXLg37FjWpCzC+p12fkQpLrmr3nzyjDQCEz2DaI7Tltb3jSO85Pz9l0DRGNjBlucYM4Bk6Sl9hgwEcKAoW1dH2H61t83yKsRRibinBIOFYfArvdjqZp0E8LDm4cE954DVrDOi9R04rcTdFRsbUbNt4hMsk0RrLQc9FfgQrMlGBvuse5XHEZB0RVUk5vcFl52syxnCpy5Xjic07ZcSgje4slh7c/Yv78j6l1ybDbYZxBVyVXJ6c8/+QTzsvHrC7O+fgH3+fNt99BZwpbVxRFTVQZxXRGPp1j+oGJHF2XxuC8pZzVPPn0U3a7Dfdeu8ft4Hjx7OmfsoD/F1+vdKTlmaCqNLNZwWJRM59N2Nvb48b+TbSoaVtH220ZuuZlAXQRiqKAESKspGBaT1G5wEWLsRHbl1iT0Q0dXbehHyx9143H+YAWmqgkPjikiDjvECGg8gxEpGnT3FGLFLEhhKCaTIFEyMqyjF3bJf2kd3TDQClT6m2CkyTA+cnpC569OCUrMvww0O82L8Mkb9++y8nJyXgsdzibdMmF1mTjjHRalwwmvbmqukLqHB+hruuUuICmKAvKIqdpWprNhlu3brLebplPKoy3XK3WeD8qH6x/GcWNlAgfMNHTdi2TshwVDMlKWk+mZFqRK4mS4J2ha3coAb2xGG/IVI5UGXmeE4jkeYGxhlwqejsw+PSwElJTj3xhgqMuCrq2SZ12DBRK4aXADAlwkoVIsHa0WDu0EGOeVSrOYXQDvSw218UtBFQICekZf/bd7rU87PoSIqkQIkk+Fq+lZCLhKiVJsys8RAleBmRI91+K8x5lYmNgovhToHbxX/QvwrNbf04xP+Zqm7pIFT25UBADwxibpXRGLiVNswMkpRSsL1dMtEK5K5p2S1XmXApFNTRMi4p11Ay7DV4KTkVGv1njg2WLxG8ams2a6ANalrjJEdniBiZEjMqIQpFHwUxqGiLTfEIdPLtgabQhFCViOkeZAT/0xKFDmAG6JiUTJPoMQiV98MnzZxih6SeH+HbDgR04jJLOwSUQCsVUTJhpgTcprmo/y8llpLSWS7ND4Sj6AdFY7rtHrBZTjo9uMpOR+7Rc+R2zfI9Cldy5+R5cPuL88VP6suXGG2/z2r03ufmmIQRBu75kvd1hjOHi+TMOj45YHBwSRVJ/1NMZcvQFyDGuyXtHu9lxeX7K0+cv+L3f/ef83Le/TZVn7O3vUU3qV95zr+50hSLLFEWZU1UVVVUxmU6YL5fU2RIzwMXlKRd9S9tssS6gy/qlhjVGRaYkZVWDcQxWs9sa2p3lWgkZvGO33dC2LUIk0ldRJibCYATGGebTSYo5V5ooAs6kh4ISaeNcFAWlVojgKYuCOOZ1DdYmQlaMOGPoTU+uFUoKunbL/c+/4LV7r9FuNvR9S9d2nL94luaWfUehBCIvyLTGmJ4yK6jqCm8dN27dQOYFUjuU0qRxsSDPEtS6zDOsc3Rtg7WWh198TlEULPf2OTo+5smTZywODvnD736PYRiSJTj4a7c4mZA4b7HWJhlbCJS5JpMC7xzOGbKsRmWaQIKd73YblExsiggILdIDsWvpuxat9Ric6XDB0fYDSEVW1qOkTCQnoZJkmcaZPvExnCWqiNIZNRG8w8oUtCLH+TNSEbUkjvbqyFh4RzGwGAlpMgRU+NkX3HSJnyz+aUowMq6/HINcg3HStCQxF8Q4wg4KhAjXdNckhRv1u1KOhXwcCScVb7qkkC9nor47Z/PkXzLbewvyQ9rVJZu+wVuT5vJCYPp+5GYk9+Jqs8NFgSoKTlcXKYewdayHDmLA6YFTY2lMRwaEmJayMXiQOUrm7B2+yWS6YOs8p8aSOYtyjj4GdBQoDVsBDknQGfvVDOEDx0XFYHrObZvCX61H9TtcuyXMFtB3YAzRDSQepoEQWJ0+wec5r//qXyFOJ1y5ntWuRWnBUk4ovSA4TygU87Kk0Bm1ymjNQJHl1KpikeW0TUtnB6bRsK8Lzn3GNo/cnC/ZCXjab7m7f8yHv/q3uD05xhpPtdhjcXQ71YXBMts7oN4/xJqBZ599kYIUyhqVF3S9xRmfwE1C0G42aYEuJV2z43d/85/xyef3ya/Hi0VKUcmLV1vbX6nTFRJ0lobMdTlhUs0o8oo8L6mrGVWVFjpts2OzWWOcGxMgNFolr71UGdF6pNhj6LfIGMgzRT8Y2rZjvWvw3jOpqnHZlaUCn+cIIpmoX2odRfTsdjuqomRa12Ra0nYtzqUE0b5vMM6BzLAuaYet6VMhcpb1esV8OiXTmu1myyeffMwbb77FG/fucHV+DlKQVRMePXyAUi+4cXxEPZujsgwlBHVRMKlKXJ66pM3FJW3bppSGQrPb9jhnsdZgjOHk9IS2aajqGbdv3ODDb3wTlWVcXFyw2N/n5t27TBez0SIqxg5Rokbb6XqzGjWvHmcHmuBRIlLojMxHfBC4qChkwtflWcZgDFmWYUa9dDYGblrvkHa05pYVw2DItafrWvKiHheSQEhLuyzLRu10TAYI5/BhwMnA3qRCKIhZQ7QKH6FF0AqZmKljsY1iDA4MAek9yjoUkAmP/DMQwR5GiPf1PDoCMoiXyotkrY8/8SstfceRg5RjgX1psXhZxMeXnpgMkZcJ4akB/LLnvWbgDt0p/XBFdfwh5eFtzOk5wa3ohoF+6JIyRyQoZjsYfIBJPcF3BuE8c52hvKSOGUpKFKlTnoZx0S0E8/mMxeKI2cEhs+mc3eaKy+0VZrtFOIuJHi9jYnTkBbHv8aNmXWjNs+CQAjKteOfOu1RnF1w2G7bthpjnCF0grUVMQ5Jo9jt8swYzwDh62D57ysf/6X/Ie3/r71F+5y+wblbMT58xsy1OCQ7Lmroq0n9HpqJXVSWlABVFIvFJwbwqmWhN0Wx4uLkk21twrCec5FvQkmVRspflDE1P2/Qc63xUG0W090iRYcqSarbgYrXlxdMz9m7cYXKYk2U5wTM6EaFrG/quQ2XJFLVcLlmtr3jn3jt89J1vslxOuP/Jj/j93/0u//a/9z/9qffcK3W6UinyvGQ2XTKfHVKWcxQZwY+R6VJSljWT2ZLs6pzOrL+8oWPAe5Ng5FbSGUeeTeh7iQsDu93AarWj6zqm9YSqKLDWEIKja1vc0JNJODrcp6rrxHSwPZdFxtU6gTjWW4MbBmazGqUyui4xPsM4i73uKqKURB/ou6SpdWOg5nQ6pSgL3nn/69y8eYPf/c3fZrGYc+OXfpHnz8549PAB69UPEzS8rtlfzNBSstlu2O52rLe7l2/CENKgzlhL23aAZH//iA8++ibvvvMus/mEy8tLun7g6PiY199+i8/vPx5nwwVlYTEGemPSqM8azJDm1VJpvDWUQuIBJyPWJwxk1xu0ql/CZ8K4LddKMbgB73qCUNgQCW7AekdZTcnLGuMtynv6rkEpzXSa0w9m5KhmyWQiAsKDco4Sh9aRSkWW8wo1y4nB45Rm6wJnnWFjPO56AjqOfoge6Ry5VkxFoBIRKf4MdLuj9jYNX9OHAolQJca5dBrnxZFOl8hjiJjm1PF61DCe28Y59TW/9bpQc11oY/qeAIR4zeMFRI7Ma5Z7t9nbv8Hx8Vtk3/rzfHz/Ps3T+2wvXjDsdujoIIKucjKdM51NUGqEc2dpcRx8oMiLJHFE4mUG5RRdlBjniLriZLPm4vKUOenBuMhrMus4Hwa81AhvaLIal02JUnNQT1j6gCgyojWcbq74F+2a6CO11iyrkvPdFuppMu14Sz80hGxJNp0jhgbft8R2B32HWV/x/X/4f2SvXbH96Fv4/RuUu0sWtuFDb3ifjDxGZkIyLfMx90/RmaQZ36un6f4JkauhI0THcqex2QW+rjiaLVivL9nEyLKasH90k6Ksxx+mI0aHNT2mbVhfXfLkySMmRcXtN9+gmC/IypoRU0gIETsMKKWYz+bsdpEbN27wrffe572vvc/RrRsEP2BcmnO/6vrpRVekbmc+22dW71NlC3JZEb2kbwdy0ZLl6cbKddJ9dl2HD+noG62l6wb6Plkni7xivd7SDwObESbjneNw/xDn0zGcGMhzxf58QV1PmNQTrDXsmpar1SWr9RozDHg7pNmqFOSTKh2Zncc7i/WRXdNQVElWRkjFNyqBc4HtdkMUgrzMIAZMb3j29AWzuuD1t1/n+dPnbDc7Dg/3ee2Nt9jtdjz4/HM+//xTPv/0E7qmpRs6+lGzq1Xi2BZ5wXyxx53bd/n577zN22+9y3S5YLvbcH55wuOnaWb93ocf8OZb77BrWp4/e8r3/ugP2G426bVrjXIBVKBpO1LCrkWHmOa7NiX1qjzH+jRSCFnAOI8NgnwEBOW5xth0JItC0O22CZoiVeqqVU8IgTyr6IaEvrTOJtLmy028Z1KXMIByEuEEla4gC1R55GiSM60KhFL0AS47h9w2dOcrghsLmRyLShBoBNM642iSMSsVVfazj+sRXxlzpKCB6yKY5tPX0xEhGGNvGLMHr5dmo+7hunCP/a6QItme41dm19c7O0FKBNGKrNjj4OhtDpf3uHn0JofHr1PVC4SQnDcDX/v6Ec9vv4++vKTfXSJ35yxkZIjQOcuiLNibTGjaBjN07C3mSFVz2bSsupYuaPLFIZ1z9JdnrHYbetaE3TbJPwl0XUOwJjGuqwkzXaH6jtWuQZQ1IQ5cSrgMHmVgUU+4eeMuJ01Hazo2wlPpkqmqMNFjBVjnEHlFFClCvQxL7NU55CX0HXHoCL3l6j/+j9D9QHzvfRoFTeM4ubzkn50+pRKWt5cT3p3s8/Zkj8PpnCrPyfNk6OmNoYuBSmqWOkc4z+XZM/Zv3uF2uWB2/HVu5Z69WUkMmqKq0snER5wd2F5ccnF+Stc0zGcLnLM0m20KNlA5AomzgbZpaNuWrCjTvsIHonF8/Wtvc3A44erkGcE7ttstXdu/8p57JcS8LCvqvCJXBXKEe1gT2G1a3ABFNSSPvQiU5YS6njMMHUOfnkhN1+NjyvDaNWkj3uy2FDksZjWhSnMtbz2ZFGQ6Y7nYoygnIBTrpqPvGi6vLhjaLcE5nOkTdEVLFILpGJvedQkw0w+Wpm2RWY6WklxJkrdBEzBsmgatFQf7B5RVxadffMZ0PuHrX3+X41u3EDrj0YOHrDYbLi+vOD66wa//xq/xV/7qX8EM5uUPYb1eEUKgridUZZUi1ssaISW7zYp2c8mnP/4Ttl3DfL5k7/CQsiiZL/bZbndsdw1PHj9iaBuGrmEySWYFSHlou6ZFCUWIFucG9EiR0rlOXTUpCNRYQ57nWASDSzejDzFl6SBHc1jKxRJSp3h1JLlOLNaiKFlv1iMKUzOZTOhHPbJSkiIrsKStrbOOIpPsVRMOFjUHezNUrtgOFrXpMFJy2jSYbU8U8qVhQEbItWB/XnPrYMb+vGJe/eyLbhy1pnF8QKSYl+tPpvv9ZdSvuO5c02NJjARB8dJtln4lzOV1hb3+u0g0M11QT464cfM9jm++ydHB6ywWt8hUCVLTup5Vb9kax7kJ7DxcIWmqBa5aIPeOqXNB7yxtFJybgeHyAjYWESWFVLjgYNfSNw19nlOvH9BniiyT+DJHb9Yom1CSMUaOihkn7QmNCBgJDRZdpZmvViW3qoqTk+fYbIJRksvesvMbqvqIxXTB1fqCnapQpWKRF6wvLwmTGWEuqBV0uyt6FxHHU8TQIrxFtBvc6oyw3WJ+8x9TR0P/wdcIsxpvHT5CLyTfbTb80eUV7xZT/trdt3lreUCmBFmWI5WgQhJdxFlLRPHO7bfY3ztGKkXW95RFohIGmZKvEelB6J3DdC2EyO3bd1j8ak3fDTSbHRcn5yzQlLN0Ou66DqXSjshaQwTK2ZTM5Wx3DScnz1hdXbHdbJlOpq+85366OUKk+G2lRlB5gGDBeIcTgaE16N2Oqq5BKIq8YDKZEyIY45AyQ8gC7wPtKAkTQrC3V2Osw1iBJqU65ALINJPJlOlin8EY2mbLMAwMQ4cZOiIC48NofEjQ88O9OYOLXFxdEkn63W3TYJ1nGAzCO6rlAiUExiUG8K5pSY2J5Oz0hGEY+Kf/+JSqKrlxdExV17z2xmtcnJ3z5NEjHj78jC8+/xG50uzvH1JMZuR1zWJ/L/EljGW7XXF59pzNZsOq2aXEXyWpJnNmiyW6qpjOZuw2SULjnOPps2eji6VKqgqlGUziF3dDDzGOlLM2LQ1HnVEIkRg8WkgQAhcjTT9Q1iEdnZA4Z5FKE0b4tpCKXOUM1hCVwjhDnqXjVhYS5rFtNqgsQYvqskrgIp8SH67nl8EG3OCJ3iGVJss1eVHiUOSFp8gthdbjkdpdC1KJIVAWmsV8wtHhgqP9CfN/jU3vv+nLj1Hwcjz6i3j99SYRGNdyL5nIYukeS9V2bIgR8ksFhBiXivJl0xtB5dR7N7l39yPefP1b3Dh6jdnkGDcu6Qbn6b2gHxzbIb1HWmtoW8PGerabFdI02N2G7ekzPjt7xtoZwnSPIGr2B0uVZZiiwF5siGUBWcZsMWPiB+7mE55vG04uL3Cnz+i7LXZSE2KklIrFzbvcm9XsmgbhJRfrK7ZRYoqKoDQvbCA7OKbKNLn3PF9d0KkJg9kxLXJmUrL1HU7n7HpPWdc4wCqBUxnlIvE/hNBk/QbbrkYuCcSyJO62mO99F7k3Jbz7JmQQdwbaSHQyBWhGeLbdsCwqDicTlIhMMo0UgmwyZTY9pJ4syXVOlJroPfV8SiZbut2OYrqHkAJvDb43hACyqMicZ68oWMwXrM5XPLn/lKAukOWEmGV4Apv1BtelvEXnDadPnnJ6ekqInosXzxhsx3a74fXXXqfI/2su0q6F3skJBcTUNUUfiNERXZJzeWeSfClTVFWZrLrCE6ImJxCEQbueuhAURcngBk5OBoKLmG6HCAEXHGVZYkdBsg8JaNMNA8b0EMEYQwyeSV1RZpqDvQUhClabK7IsY7vZ0jRtipaJkbbvEUVG33fkeZ66QZuiZkKMmD5xHlarS8pywpPHT3jrzTdZbzbkZcHxjRsgBFdXlzx58JDL5owff/97bNqW3jqmRU7TJ3JZrjW51gQhWe7tcfPua9TTKXlRIaVgu95QZAXT2QzTd6xXV6zXKz755EcI4cnGRFYzGLquQWfZuCn3hBBGB9o4L1Qpzbc3hlmZ0iK6vmfbtEwnFQSfOA1Yqqqmtw6tC4Ty4EViXUid2MTjZr0sKoz1DH2H94E8y1FS4L1D6cSQDS4Bd5zx2MFiekPbW1AKaw3eufTLJ4aDjIIgxqDy6FEqY1IXzGYTZtMZVfmzj2D33o8qg5QOSxTEkL7ul1KyyFe0uWmWm1xYcjz9jbHsLxvi1OVGIVgevc0v//zf5O3Xv8X+8hAlFEOMtNbRGocMnl3XJRKbD/TnJ1xeXtLvNmxPnvDw0Wc8ePA5q3aDKCqmyyOyxT6TcoY5O2coay4n+5R7e+wFz8EQWa137KTCxJ5dd8WJBONAETmcTyDTnEdQKsPuLvj0xYOXVLP9+YLX999ktdpifOD88oxtrhFZgYw5xzrnTj3jyrZ0oWUbanQ9ZZ7lrM/PsfMZvihYZhmX56e4akK22KNoScxrfUCUGQGLms5wXYtcb/C7K+If/hGz2zOY1ezcgDCRwgVuFiVvLQ8pdYYjIqSkM5a6KJmWE/b37xFkYm//4JMH7AYoS8UHB5Jc9gybFQfVDDMMdE2LaRpCTHjYq5MziqJgspghs5xI4iJnWZYIgjbRBvvNmjLPQArOzk64/8XnmL6lyBXWG3a7Fb/9O1+w2W34X/yv/v5PvedeMV5IYYo+JuTiYFy6IUcRv3MDuRAIleDIVVGTa0WWaQYfqKuK6XSO94Hz8+dsVpd0fU/XCXY7S9c2ODOQqzRWqOuaqp5junZcODU0bYOzKXAx04rFdMJiln5tu4G27/Hesd6khdxgUwYZShKdJWrB0A+pY9cpoVgJgfOB3iYWQ6YyVpcXNLsNL16cIIRgtbrk6uqSuqoYjKGeTSgmFV5Af3rK6dMn7LYJSBNJke1vvPkmi/1D+ralH3psCJSVYzFfEHxICgLrqGrJ5dUK7z2z2YyL0+eousIHGEwigfkYcD5l04XgX9L+5XXnOHZUXdehs4IyS8GVUgkmRYUjJX3Qtzgky4ND+iGNKFz0DGZI9m2tE3lNSPIixxiDMx3GdJR1jR0MXd9R1RPKvKA3A1me0XSW06sVDsd0mDEEz6brWe8a2q4jeo/7CtcAItH7MUIljUWGVwOZ/o1fSb0gCIyZaH4cvYpIkKOdN5DcdOOcJEFw0lgsxDS/FSJ+RZUgCFGwt7jLX/3Vv8ve8jV8b3jx/JTWGK6coe8dzeUVD04f4IwnDB1ZgAcPPme3W7NdrdmtL3C+SyGeeU5UGV302MEgZc9RNeVs6PHmHPv8EZel5nQyRQPaOQ6mc+RsnyfrC2LfE7odzycVXiik0iwnBUf5MZftBlEtCFpz0W442a6JaGaTKTfdhKvVFU3hCL7neZVULkeHN4i7NVsEPi8x5ZzqVknndwStaIua2ZtvsutaTPQUswWmafGqRk3nxPYKbRqEzCGf4bMctudM/uQL9M99wK61yL6n9JbDyZx+GMjKCYXOUVlOWWTUxQybzfjxScdsmbFuelaxQE9zLpodj16cMwxnlKpg73ZacDeXKzZXl2RVwdmzFzz44ce88e67LI5vsDzS7FYtup4wnU+pZhNiA2VZ4/uWrMiRSoOSzBZTdqHn6++/w8OHX/Dd737Otm1TXNUrrldTxhAEHxk6QyM7+tzgfUff7yh1hswyhk4DCeSiVY7SmuksY7Y8YFLNsIPFmoFm29IOjtVVw26b5rN4h5eaaT2lqqdESC6W3QZnLM45jLOIGFjM5hwd7JNnGattQ9sNaCXp+oG263EuWYcjIsFCBGiVipXSGV2XbK9FnjEMFmcs07pGiMhkUtP3PQ8e3KcqS/aXS4aySvPb3Q4hBH3XMVjLwfENkHJ8TdvE3w0BGyIxBDbbDd5HZvM51nicC+wfHBJ9YLFc0g+WzXbL1cUF07rmIsaXUUjWJduvFprtYBj6EYr8Us+URgd5VhBDOvabYaCoajzp4ejGGaUd476d95yfX1AWJT2QRYsJlrbryPKC4COzumIxibRSsOu6pOnNcrQu2LVX9ENHVU/RUuGsZ+cc692Gk4tzpvMZKs9pXeBi2zL0SaoXwzX0O7EKht6wXTesao0d2jT7/Blf14oEIcKXxXQEmRMCYXzIvUz+JYK8fmOl2KmkUkh/Jo1VFEVR8dbrv8oPP3uBlycMxhKCZLcbeGpadoPhuCopuoGTdsPQ7dARMmPZnJ4xCA9FBjYitaKqJ8i8Yq5L2t0Vbb/mUVal4pmV5N2Wy53F7Wb4rMATeOwalJJErdlb7mF2G16cnuOqGdQzvOhYzeboImc5mXFclPQfn7DNNBSabbelKyTl6/e4JQueP/sMrxSuyLnYrTk6PkL2LRsh2E0qptND8mef0eU5/XTKdJLTdFtCPcfUC6pJQ29C6qrNFhtygpNQaITOyMqS7umaMHuAWhTcKSoymbGX19yZ76NHlbMQCpVPEfPbPDpvWF+co5+fs5Ow215hz6+Y7e0zVVfI/oy7d98mKyuyrBjHGpLd5SXPHj9itbpCKEU9m6LmCmccq9VuJPwZut0OawfKSY3rO66enXN68pxml0wmi8WUd959h9OzDZ998QXPnj975T33iplumuUO3cBmswMf0ZnAhSGNDKQnRHBuIHaBED31ZIlWkulsxnyxJFMFRhm4gG3rWK22rMZUhqosaFtPnpVM6ynD4Gi7FjOk7tU4g3UOQmBSVwil2XUDZtOOw22ZcIKMci0Eznu01qjR4utHHaYZTLLnIlA6oyg0PiSwyHSUo23Wa06ev+DGzRtMv/Vtbt25w5NHj1gsFqxXK5ou6Yl3u6sxGDKnrGuUFFyt1wxDR9s2aZYsdiz3lgkfaUyaMU0nbLdb/vhP/hhdlKyvrpACirKEAMYaTN+n2HhhUtpuTDv01FHJ0Y4qcSFRsayLIB2i7xEisukH1F5y8iml6QeLUpKhb5F6CkqjMokKEILDWYOWks5aJnmCwecBuq4hL2vqakKel3RtQ9s0KKkYBoeQiovLLX23w6mn5NMJQeXsYsSENMO9po3FCD5G2nbg+fNTgmmpS43zfxYg5ow4yvT/pYiIkNi0YtSIwnXRJbEYYBwnpIdeUil8SVcQ0bO/eAMhlqx3W7yCs9Mdg3bI3qUja3Q8XJ/x2nRK5iPr8zO2Q4czLXkmkV1HtmsJpWTwEbNdkWVb9urXWU4mxGZHP6zxxnJWFeSzOXk246AouT3Z4/zygmfnp/g8J05qLjLJ/NYd5kJQesVCwMPLZ2yGHbqc0XlLKAsObtxiYhzt+pKVDvRZweAVTGtuvPEerW9Z2RY7rXjhPHfuvInrDU0GvY7c/c7Ps37xjLXK2FYTDr7+AeebHT7LqaVieHZKqGbo/QOsGRBtTxwcoqjIyhy92yCuOsppzq265Gg2QQZJcA6d5yk1O6voguD06SOeB4EVAX+15e6dYwrbYyYO4S3atRRZycHNm5SzKUqlLEel4PnDR9z//HNm9YT5/pJqMiEr0v6p6QbatmXAs11dImKgnNWs2jWPHz3gwaP79NsVNw73GPqG5XLBL/3Sr/CX/srf4Ic/+uErb7mfWnSVAOEjtrd0TZ9IPEiisETt8FKCFkSZwCzWp8yoqp5SFDmZAEYzw+n5is2mpWt7YnQoEZNMKXj2q4pMC5q2www9w9Dhg2dwDmMtmZKoLKcdPFfrS5QSxBjIVHrj5mOkurEmaSCFQEmRNKvWkVcVzlqCd2kJp5PRwfo0tiirCtsP/PiHP+Lg8ID2i4b9vQOOb99EK8mNm8es1yuKvCRYx+npKc5blFQEFyirAiEkzjqu1muss1Rywq7dIYRm/+CQqio5Oz9HCsnTx48wZqBrdmilqIuCbdNgjMPaARFByTIVg/SWH2dNydIsZY4SpDGPMyilMNGgtMRaz7bZUuT5uHnPaLodVVHQNw1ZMSFGS1EU+KDxIRC9xRnoRbI5SgJZXtB3O7IxlsdohbMmZbOlVXyaT/aO3gysNh2iqqEqCJlOCJjrAEYgBk/feU6Hns1qhRYRb9x/lTL5//MrRoEPX4IZ1ciJSI26+LLLFYwMCfmVfLd0z4eQQOaJqyDJ1ZTPPv2Yosg4Wa+YVnMuHj+hVYJlOeGG9zzfXPHp+XM0kblw7Lo13lk6GxDOoDJBjqBIKDmIcLo+SwAalXM4WxA3G57ttngf6KuKttnRNR2L45vcWS7RQiMHy+ex49I7cpnRKIEcOvZ7i163hOnAblLyMNagarK64Hh2D396xk5bgo6siGzqiqPlDeYnj9kpScgKnneW28cLzOP7uMU+Wy+5uz9ntd0yZCXlckoVe9qyJhYlpXf0g6WcH+BOz5FZSVzvyEzPPCuYLpYY27AXapa6YC8rqHQJwTHJFfuzA2x1yA+vekLMKQqFt4HBNLirs/Se8Y5FrjisJhzvLVneukleFXibsuq6XcPDBw95fnrGzW/eoqzLpIbKMurZlHpScnp6yvRwj6IqyHwyCzVDx9n2jC43nGxP2FtOuLi84vT8kn/yj/8p7733Eb/+1/76K++3V7IXQghY4zC9wdWWDA3SAUPqvHSyPAYX8XbA7yJeCKp+QMueprU8ePyU84srCDYhGPsBFTwES5GlpNnT83N2bU+dK6y1bJoWMwxUuWI6mSOEwpkOJVMicESOvvSEL4wEPNezTkeRl4goyDKVAC0xFQGpI8TRKhwT8zQtmj1tvyOee7TKODt9wSeffcw7b7/NzTu32N/fSxbKsmQxnXJ+eUGWV7gxWrssC6qyoh8s3qUZ7Gy6ZLG/NyYNO9q24cEX9+n7ns3mkuAjSgpmszlus2YYegaTttep0fWIeG2xTfIrqRVSpteipCYKMGPhzVWOEJGu2SHEFIElLyeImMA5WEOMKvnJo6coSvq+RyiJVAoRI5nSo0ElEjxstiuKokRE0FmJtzYxCWJkf/8G55cX+OiQskioyRhfsguIabQwmrvwY9Bla3qiNUT3s+90v2rPiDGxdMVoWkCQtLrXRfc6HWIE2sjRBfglkzcQo0ApzdXljqvLc2ZVwdXFigsUUSnarmOQJxws96i0wrY91hsugyebTdBdj6Jj7RxDplBZRqYUB1lF13fsmh3oDFHWXMZANp0x2T9ED5bCOdrouVpWtGZAac0Cz7RZU5+/oM8K8tmEMAw8m00p7t0iz2tuFTMeP/gRm6ZF7tc4rThRULzxFjeInDz4fgL77EtWjWJ5710mq2c8XT0FFYlDxu39BU/NlqvO89q9NzmSa84qy66OfOfwHn+wa7FZxUF2gxenK2xdIa6uCKoEa7A6sBkS82PqMvKt5fBmjYqwyDMOJ3vURYHVezxzFZ2z9M+fcmOi0cYxCw2XXzxmpQvasxW/9Bd+gWpSsHfzBnldEUPEG4MfDEPXs9kkeanUGmN72t2WqCRRBGQm2F1dUEwLJnuLtDvZbblaX3HWnWInjne/8wHN0w5rC958/+ss/uAHfPrjH/GLP/8rr7znXpkG7Kyj63p6OxBChRACHweUDKngap+gvyEHl2ypxlj6tsPbwMXljtPTE2RwdF1D03UMw4AICVZztKjZDQNnVyuqouBqu2Oz29K3LbnSlMWMIGTaqjvz8jhYVTW7tiUTYO1ACAEfkosK4YGYElPLHD/SxrSWKCkQwZMVGbEY4TnNLmEfpUxpDHXJ7/3+7zEMAzEGdrstt2/fYlKX5JniUitu3ryJ84kbMZlMOD8/Z75YELc7btw4JriUDCHzgtPnL7CDo2kbzk9fcLW64vjGDZ6/eEFVlom3EAPW9ImpIM3Lwsfo5hL4pLEl0cwgEJxNgu1Rwe9DGB1Whma3HcHlLUKllIwQJcZtqbNlKujWkmc5wZvRURcw1qKlJgSLC56hd+lzzuMF5FmR/h4zIKTg1u17vDh9QlAKpyQ2Jg6svF6evbR5XR/hAyGkH9GfBUNaouCKlx150uzycn6bWAppMSZJyb8xCKJOs90Yk4U4zXPTvyudMZkcImLH8/Ul88UeZ4PFVwV3ZnO27Y4zY8iE4t7ykNX6krNmRQyQ4Yl2QIdAMJ7gHdZHTieefDKhUjWTrGJeTvi839KoBP4v65qZc5izE7KnT3B7+wgiZ13P5XyOvHuXuciYbjecdA1mb4b1AYTDiYHynfeZSIlfXXBOhxE1TmmyWcndj76D9ZGzi8dYA5tqydt7N2m6NV3Y8XTn+IU3b9M//pwhTLg1LHi2+REH8iYf+hI3rKlKz2xS8o2b9/hnUmCiRB8ssR7KTNNtG3qludKSw7zC9B3TWPD2jQO0ENRaY0XGj59ecN6uWG02TBRsTODNvSnrlWE33+PzxnHr+C5kOYXOUGUJCGzXYdodfujIMsE3v/ked27vM50UbFdXIBSTvkdoSd/vkBLs0NO1iqIqEZlgiB3lZMJvfOev8trRXf7v/7f/gE8+/5S3Pvw6f+Nv/x3+4f/lH3Bxvn3lHfdK9YJziW/rvSfgCTENolWWgDZhRNgpHVEyx9qAFGnO23U9J6dnBGsI3rHZ7jCDoe8aZIzouqK3gZPLK0SM9N7RdC0aKLKSqq4JUjH0Pc12g5QpE2w6nRJiIDqL05K+64hSgdLEawi5EPgRwi5eWjAFbdujtEZlCWjunKNpWhbLJYvpnFNzwfpqPUarS148P2F/b59PP/2Uu/fuYPqBy9UVk3rCepNwkzrLmc8XZFlBkQ3kec70IC2Xnj5/xhcPvqDQOVmZc7W6wnvL0LUUeY4QkqbrMGOHLGLEWAdElNSjNEkgkOS6GF9TJI4EKmcNeTVBq+xlcq1WCuc8TdNQlI6yTDNrNxYH07bk1STxKgqZHmYIHNe61XHBNEJeBEnFYpxhsANlUVEWNb3tKfKKrJrQuy+lCDEkuE0yxo4DkhBHIHha7Agi+J8kcP0sLhGvLbuMwvn0qPgqBCfCS2PEy48Fxsh28bLbvVYuSFGkMZqSuBDYRIMxA4PtyNSE+WKP4eIFxnY86VqmizmLaGHoWTuLmU4hRgrvqIyhtQN9WTAAOgZ0rtF5yX4/sBsMWIfdNDwtCvKjfcq8Rnc98eSEq+NDeuEQNmCUhVlJGZdElxMKRTCGtZbJoZlPuL3cx/QburogMrBqei7riuPlhOrEsRMNZfQ8Ny0f/vxHfO/T36HIKg77KcNmjWk8m9V97rU9D3/0XT7RX/DGvSPeXC64/eYxd7MVVekJ7COmO0LbInWOkIK8VKjBo5TC5zWXveebMpCrlHL8w1PDjy63VNmUm3szCiU5ffGQf3lxxvnZc0wUXOg53zhcsnnxlHhnjzzPEMHTd1v6ZoPpOpQKHN6YU1QeEXTKNmwb7NAT8KzOz4nCEdzA6iKpeKLwzOsZ/4O/8z9Ehchv/eZv8uLkHBV6fusf/yN+/s/9ef7Of+vvEPtXL4dfqdON1uOjQ5CeqFJGpJbkOpGOfLSYAJmIZFpRyBQX7q2j2fWYrqPKFA9OLrHDQN93WDMkmEW25HK9wRlDNt6gmdQordFFOtI5a+jaFmstZZERRCJo9WMIpRsGfACpM+zQQfRkukx21gjOu1H4Hog+sOsHiixQlo6iyCFTdH2LHSqct0gpmFQlEZGMDleX/M4//x32FgvqyYSiqrhx6xbRBwZj0blifX7JZFJT1TN8DGitiNExWMOTx49ZX11h7ICPyROvhKJpGrIiQ2nN0LUMQ0fwfuysUqfuPSihRnJKij0XMc11CQ6ixLgeKTWyTCMChEx4zODxImKtoMzrhNcbDHleMPQtQYwFNgJCpYI86rFTilqaeRMig1QjvCXSbbcE65hNFxRZQc+AzAt8MPgY09H8K9ZXEcckiThqWZVKVU6pPxM83cRXGI0OESASpcDHiPxKIYVr/Pi1SkGkbDgRiSEhxKKIhCAQ5AQEF6sVR4tjPttdsJjPmTYdz6uMxgzcrKasQsO5hqZrmSvNjIx1HAhZQZQBP4AdHPmkgsESK4lCst2taMefe53lzBrLo1LjJxV98AxuYJrD7GBB9J7e5ITQYPKcs8kccfc1qtmC133gi4c/INoJ+lAijOe5L8j3D3m9Lrn/4hN8MUNSIPqWm+98Ddu3PDr7BPnaEbiCpciYmpIHP37BtJ/w8ac/5ImDrN2SFTk3D2F3cspJGGjfesatv/4b/PzNe5zGKSa7xfdPr3AyQ/TPEFJB3BFF0raft5ZHJw8xu6esuMXF5JtQzamDoV9d8MNnL1g3F1yFJAfdy2a8uyjIt0/R+YK95V2KKid6g+3X9M2a3WrL6vIKa3r6fkeeTSin+6hM48zAdrNit1knJ6gzCCHoNhsmdcVrx69x9eyM6WLG48dPmdUTjJF8/+NPGdqWX/nzv8rdN7/+ynvuFZQx8DGF4kkNOovoLKJyhc50cngEhw8RTyAqQaEmKJnmrbvtFq0lV6t1yoMPDjsMaflUT2gGQ9sPY1w7oGQ6CseAEgrTNQghafsOFSMeiRaKpumwrifEFGaJynAhYLuOItfkWZ7AGN4xmIiKo+V9JHmlziaQa0VA07YdmUrshGnlUFpxdZlSIC7PT7HOE33gxbPnHB4dc7G6JFNpxmeMYbZYUhQlTdtSlyVt39FbS1529EPCSTo3WmjzahRhR4bBUYkMYxKBP3iHUMlN85JHKxPMJIzx9AkhqfBuwAU3coENoYdMa5TOEJCwjM6NygePNw47dKOkKeB2G5TO2VjLpJ7gbAJPRwKTqsKPhdM7Q0WZDBm+J8SIGwwNW/Iipygrcp0loNDIGhAjveFlUXv58JeQCQRq/MzPXjIWr38TAq7pYv7LeS7XcJvxeikL49oSPJbjlx+PTKs9RCjpe8PRUYk8t6yHNfv5Arnd4bzhrN9RlROy3kH0bLc7zLSknM/QqxWhWdMrSbOYo7Qml4J9MlpnuNSO6DrESBM73F9yuzdcuA6vFW5o2WSwmxWU1ZQ3Zgf0nz3kvu5xFsSgEFngQiluvvttZLCcPr9PO68I+QLXK7oYOdBzdtMKZVecnnWc7835ueOap1dPeSsojoZ9gp3zg+/9Ie3FJdOrLXs+8N6d17n99ut865sf8s7XP6TMMh4/f8Lles3u+59y67hn9vqb/KHSPClB+cBWF3wjh/eGgflyiUZQ+ojwKzabHc9DT1laFldnxP6c33l+QcxmbNQMMa3JhWG/XnAMLJXiaDFjNp8luqBpMX1Ls13z/OlzLi8u2WxX7LYNX/v6N7m92KOcTGjWq7QvGqWZPpICbmNgPlty/uI5JlgePrrP42enPHz0iNl8wouTK5pdy+nZOe+8+x5/+b/z937qPfdTi25Ke43kORSFRGfJCKEznbo5wpi4a4nCk6kcpEArcM6NUPKAdQNlLjkbevphSOjCLEuZZGZIG3MBMqR5mAdsm1ii3lqUEESZXGS9tQxdh1aA1Pgo0VIyDCk8TqjEDxDCEYIgapEK8/WxnJgIUmPXnuksMWqVZFLXDMNAXmRkuUYpgTMG6x3NZs2uadi0D3h0/wFvv/0m1jqWewtEofAxcH5+SjWd8Pjpc4amYbttxiTjiJLJ+y69R2VpITZ0A8YMtE2DdYYwpkUkkHZACI9U+bhFT6GfWZ4BAq0Loh2IRBTg7JCsucFRVpO0IBQCIeRIe/MpxdYl86l1hrIcg8FDWgw554je4bKUSKGzPD0QrSWEwHWgonGGMTcXnRfUZYU0Dc67l0yIyNjxysRdAJkcXVKmbne01f7sr/HrfQkau9aLiJefFfCvfqnj6Ip4jbH88lN2GMDtWMyXPN5ecSg0T4uSbV1wfNmy3lywOVjilOLIROzlmvN5gVWKECw39w9xtqfF4UNaOAqd4euKeTYldGt8CAwxsjU9n8p0grozvUG82vIwbjFCJoOM7bjoVxy/8ToHl8/phSdqS7t6wWk+R3QDr+/tsUCza7bkBZTWs94a8lnB+/t7/ODR94iHU/bnJdPOcrtRmAdn/Ob3HvJrb3ydHz96Cg+fMZ3VfOuD9/jWRx9w594d7r3+Ji4M2MFwPJ0yV5rptGa325E/f8BREPylyZTi9g3UG7c51Bn77ttMJyVKwmy5jzWG7332iH//n/wzXnz3d/j2nT0eCUFrPdpuIViqbsPB7dscDCuW0XH3xh6vvXWbss7xpsPu1vTNlu3VFY8fPuRitcV5x3y2z40799i/cZM8z8mkSkEFWZ4eplLigqUsSvaOjlC54umDx1gfuXfrNtOqpusbDvcW3H/4gD/+9D4/uP+A/80r7rif3ukqQVFpqlqTV5KsFGS5Is81Ul7PTAVRBJSSlOWEMqshZtjeIESg63vqssANDus8gQTGcd7T9W2ypAqBHeeAOgq6Zpc29jHh/xKkRdH3Hd6lAipkmWaEUmK9S8sbrZLUzBps8COQO0XlBB/w40bdhUDfp8jlqpJMqhLvkx9byRRv7mIkVwpXCEzrUJnm/PQFz16cYK1jMknEtP2DfaazKc70tN3A89NT1qs1282G1XqdotuDI8tzJlWNiwEdA96muWnXNUndMCRTROKzihGafT039y+VCnF0QF0zElxwoCTBjqqJ4DFCUk7nKdmAJPJXetQlO5tm3yEZFoL3DCbFxcexs3bOI2VIy0kigxnI65oYHEqqpKG2Q+oCmx2zyZRCbxlGXGYUqYuNcuxyw/VSSoBKH0dJkK9cKfz/54rwUqjLT9TPlyX45R99OXK4/rgYP/blR6KE1uzIyhnN1RnzYkK2WtNGw6zQ5ELQrK/o6woZBItJTbHeMEw0Umg2g2Gyf8CBtfTNGjXJ2IjIC29Q0XM832O2G3jqGrocHB7rDSeu5fWjQ5ZXcOEbpIxE23O685yKLfNZyS2reHD/IX5WQe3I8gq5hr2jBUcH91idnPPYrHHzBVkW6Hbn5P0VtR5YuhkPTODW8jW+90e/iY6B735xn/7hCfuZ5NbBDDM0/NGf/CGff/4xB4cHlPWcD977gKHpCMHx4nnSPxdFnk6wOqdaX7JabxiqktVkwje/+S2GwXL646d0bcd/+tkZa1FiDo95MHj+6GJHzCd0zhD2blGomg+PjrjVXXBHDXzrm+9xfOcWWSZo1xcMmzVdu2O32XJ5dcnJ2Rqdl9y9e8De8Q2q6QwhBPViwbHOKK6u6IaEQM3wmL7n5Oljsizn/OyMrt2hteDwcEEUS56/eMZsNmXX7Li8Wr/ydvvpOt1cUk1z6mlJWefkZUZRKHSWOsWYwrBQUZEXJbPZgjpf4J2i767QWUoILvIxKrztEUjKPKdpW2IIKKVHv75DSU3XJ81hiDGNA/IsRco4i/eB6AxZOSUKnbovEdIsF9BCEn3EylSkyNLc0oWYOKLBj7ZfTaYzht6gtaKqKmzfg9JkWmKcpdSaWJYMTUtE0rYtWqX0iKHr+fzTTzk4POSLTz9hsVxgvWPXtOCTJ58YaXZblBq5BSHiQkIr9sYSncM7j7E9zvrRNadHR1SSI0XkyAYQL4+wwQeqokpLMx8QwaNi6t6Dd4Toxu7cIqKC6PHeIbROf49Osd5eCZwZkKQkYSFSGkX0ASUlmZKjJCoZTJxJJgofHUqpxCwOPuXJWcvxbI8+WHozvCxC6e5I885rCRZSJs7An4nhQiqi/9WMcckddY19/OrHu7aDbELIFQudsSol945vcrldc6Ykh2++xfT0gjaDSynwCg7mFeJizaUIrEtFIyNlWXF7UtE1O1Z+IKKIMqXfLqYTZq2g67dQgwqC7faKj11Hvb/k9aFi9fQJlzOFqypkMWEoFGo5595kxsb2uNDRdFd8MVwRzJwjNfDGYsajs4+p/YRjcYsb+V0Ob3zExfkZH3/+x7jBEHTObN2yeXrCRWOYVxnHezMEEWMMk0kKh+37npOzcz7++Md89MFHlJmiazvuvfYWq/Warut5fnYG3/8+r925x1pK9g8W/L//4Wd88vHnxOBR8z0eTO6R7d2lmlWcXp4yPz6krmp22yveKCruZYG3RMMst7x5vM/hjX2qScXQrBi2G7yzCBRFWbO3t8fJ2ZrZZM69199gulwSQhi5IoLJdJb08iGiioLV6pLdesUXP/g+s+UeR8eH/KN/8o84ef6cup4gM01vOi4uLui7ASlfzRP5qUX3GmBT1SVlmb+ExsgswVky0hiMCGVeU1cz6mIGQdFse5RWaBWAJDtr2448yxAxFdXgAz46rLFpHqYSy6DrUvhknucpydalTlYRycqSLM/SEkhKTNdjbYoFdzEdb0uVTAKC5LdOQY/Xx8GUt6ZUiVI6hc7VVQp2tKmIaCnJlBqPe6njdM7Rdn1KonAWgPVqhXeWZrujaXa0Q49SOXmRwXgcz/L8ZSc+mIFcpUWh857gDM6Y9L2IkCuJlBoXEjdBjGaILLvOm0szRmNN8oOTFANKgvQRtBxhRIG+7aiqiohgMIZCpCLqbPr7lHIE5xGZpm1b4qh68N6nRWdZooBi5DF4axMTwttxYps299577DAgMsm8mkIMmJBGO1zvoIAgr+ejknjNrP2JnvJncylU+jJFAtT8l31F4ivF9frnIERMMe3iOp49zfllJui6NHMPznParJjoCYWAxreEoHh7OUMMPSduO0Y85dw9PqRZr5C2JRYxgecnUyaTKXtXl+mUR6DtWj4vI8X+nNdcTffihJPK4CY1Ds8wrJntHbN0N+ncGhN7ClFgtxs+aXeIsuT9O68Rn53zw8maqAeqTPHNg/epLBx1llpYdpdX/NNP/zNuzKa8PVuSbVfcyAr06QvWnzyB6KknJZMyS+nVQNt2tF2H0oqyKqmKkq7t+MEP/phJPePe7dt0fcfHn3wKQrLabKmrkvVuTVkUfO97D7l39zVu3brHjVv3+Hi94+zpCumvCHogTObkZBwKz7t3bvFW4VjogLt6TkZPXef4aOnbDbvTU5rNJj30lUYIzaSekGcZs+mM5fFh0pw7R/QenRVAAhr1Q4vQgmpS8/u/8zEyej6+f587d+8wnc0os5qyKLh9+yaD7/nt3/tdNttnKdrqFdcr1AsSpRRlUVBX14U3HUODECAVMqaspyKvUTIjzwusgbwoyDONVgJrXErolWkD39sB7w3N0OCdJ9MZ3lm8h90uxaqrccvdDd2XMTYStEoxNEIoorFY049D70TMT8uNZAeWMc1BVZbhQiTTGj9uoZVUacYpBX3fs20aQoCsyCAKzDCghEBLgcgUxhq2ZpWSc1WSaxnTISJ0u01K2VUqAdxNeg8qpbHO4OwAIUcKmcIiQ8D7kKRGIYxR30ltIURynkkJSiusTUu1vMhQSo863BQPLrROMdghAD7Zh0NKE06RQZosy7DWI8VAVpZoXSBjOhVYLD54hIy0XUOZFwgk1hqEiNR5jpQqLRxlmg0rpzBDD1JhnSWTSf419B0uGhASrbNk/fU+fb8FyBhHt2Ackxf40y3iz/T6cozwkyqFf/UPpnGC/MoT5ctpw/hQ9APF4hbb9Zb6xk2WZyc87RuO9o6YnT5FyI5nQXFw45jl054gHd3Q83ReML91Ex4/JpjA2nU8MgZVlhwc7TN7dsJVGDCVxkbP4FrqxYKZfo1Nc0Frd2SFIhOCs+2aOFUczG6zvxp43l5yIteIrCbTMCtz4rRmedmSBcNUHXL/ySPqxR4/9+5f5g/++F9wtnuI1BGVefYXUz6oSszFFaFp+MUP30wP8eDouoEwJpg4HyiKAukD682W+XRKVZY0Tcvecp8X55d8dv8pWa4SjyRuuH37DrNJUiz84i/9ItPJnMuLtKxeGUnQNa4sqef7qM6i85I3avjm8YQjGuzqhEfnz5HecHCwpFlt2AXPiwcPuDw/R8qkzx/6jrPLC9qmQRBG6lhHLFJzpLOMoW/p+o622eC9JSrJ8Y1jfvuf/RY2WL54dJ/pdI8//8u/zHxasFpf8Cc/eIhzjvliQZm/mhH9U4uut4G+9QQHmSrJs5JMJ20uQo1H2ohAo0U5CuwTxLmqCnSeo3RG03a0bY/3AV2kzsI7TwwghKLrurSwiZFMqXT0lBJrDFKpNHsMHq0KXEifCyEk5kOMaKnTAwpQOsc7iyR1tDGSYM0hpbsmHWuK2O6Hn9SW2lEfi5A4YxCjBVaIQN93hBjR0pGpDBn9aF5I2tNkDhVJPxw8UaqkHHAu2XcJKb1YSvTYIfoY0ul7LLSCtEBTKsNLg5SCoihwPgV4Kq1eNocxQJYV2KHDhUiIklJlRJ1ifJJsKyTX4Ph6gx0oqyRrIyiUznHOkSnFYHqcTBrsXKXXH7Qm1zmoDCkinTEomcwTcvw5OWuRCHKt0S6hNyPjA3/UF6cjRiBXkrLKKKoClSm+NAn/7K6XGEYg5cL/l6sqRtzNVzpd8RPdrxjfEzJ6nO0J3rByW6bWcJULnmE5Eoq2azjLJKsu40Zd406fczbRbLs1F67htcMJw4srfJURM4nXkT4LFMs5pXEMdOgMlBJc7TouygnTgze512xod2ueNM9xdYFQJXsi586Ntzh/sqYODfO8YJpVXLQ91c1Dvl59h5NH93neW3b2AfL0Pm55m1uzQ1TsefPGEbfqBWK34VDC0+2aZt0Q7YAZLNN5zWw6xQwD62FA64zFco61lt70GGPTPiIELi/Puf/whOViSQiOy6sVBweHNM2ON9+4y9mLZzx99JjNpsNZw827t9hdWcrJDbL9WwShKY1jWuW8c2tJ2b3AdWc8/exHPH78nKODfYJxNOsdbdvy6PEz7t+/z9B1zKcT6jpjvU3W/Gv64fpqzVBaDo728N7Stgm2JYVkuX/Ap599zOXZCcvFjKKu+OPvf59WNDx78YjfffgZJ6dnWOORUbI3n/9r9RE/teg6F1mtGk7Pdtw4GlgsJ5CPQXtSIGWSrIiYioaxlkz2SJUkUXleILUec4OSvCZEjxIaZxwxBKxJabchePI8R6s0q3XWorPsJe0nzwuESOQsjcI5g3Up6VcKkRZDIaLzxBNwLhC8T0udEF7iKTOVir51PsVaC8kwOCKCTKfPDSNusreOQmuc64mk7lCJiFQpQ8uPfNlrBUEmIUaFJ6IERG/hWnaER+ikF/YuvXbnPASfiPbOpw5RprmtiCl2JysKpHdIqVAqS5I3pYjRo5Uizyuc7dE68SaElGgiRabJdZa+rqIiBpukZFqho8aEQCDgHGRKE/L0M0vavS8fAlKl0YdUmrJKqoMIGGcpyhpnHf0wUOkps6JiGwacHw0aMYxhDAKpFbOqYG9Rc3wwYz6p/0wU3XSJLwUIX02Q/Oqf+FO0MSkkUor0+iQv4VBCSCKGQvfUdYFWkr5Q1CIQXUefC7JYoIVF4miEpz7YQw4tuZJMq4pV11Lfu8nSOJRO44rVbk2ba+pFyZs2pzDw4+YFfZ6B3kOZhuPjt7gKguf2AVPZsl++xrLcZ7tX8rX9X+DZjww61Dy86Nme/TGIwDt7N5lWe/TnjyiV4aja583lPjcnE85XkEtYOEsMljLPCAf7POqGVGh1z6QqkUSKPKeqCoqqZLVas2ta5MhpHob0588uNuzalhAdWmoWiz0W8wV3bt1GSc1yecjQD0wmc4Tw/PjBA16YGds+cHxwF8ocNVtQuIGwvcLbhpNHX/D5Fw84unGL9z76kKO7dwgukf2awWBCJGqN14rWBoyDW7fucfuNt9FFRdsMKJWlXc3Qsd2sabY7yrJMmXMmqbKcM5w9fIEisFqd8Vu/9QwpBavVGmsde3v7qbFT/3XNETHgreNyveb8Ys1yUZEX0yR9knKMJdEvdZfeeYbQIWRC+xVFwXQ6x7oX/H/bO68mybLrOn/HXJu2slyXaTM9gzHwhqAAUCLEYOhBFPkD9I/0G/SkZ70pQqEH6UlBUiAiQAzcDMb09LSp7vKV9ua15xw9nJtZ1SA1rQiKAEKo3VHdVZWZN7Mzz913n7XXXsvhyJYZBBpsRWMqDylYj1NppdbDDA5AefvxlXC3DgIQXlRbK0FV5CipwHrTxsY1IBQCiTE1tfVTcCtlLj9Dr3DWUDeWZVkRKuE1aoHaNG3zR7QnncdG65WV+Yo5gCPSGusEaD+ptfLCktahJN59YKW1KjxnGecwjSMIQup2AswYg7UNupWb08pLRCI9P9p3yxJ02Op4CoFWmjBKqRovUxkmKUp4x2UlDM5VaBUQRxFBHCNwhGEIxldwSnpxlyCIqJxFyJrGNGitaYwlCLynHMozDJTS3t8sDAnxVDDrHCLPEc4SRiFSa7JsSpIkdHXIVbOkMV5j2TqLQBAHAb1uzMGoy/29LXpp8nvSSltVu9fM4vXLWqMg4pXqdlV0COG1F6TwFzvZfgmgrsZY1yGsLc9kwU7oDRsfUdDvpDw0IUVjeJZPPJUs7bIjEx5np8yqnDGGu/0N9peCo/IUYy6xukshevT33iCcNaTjc5AXJEJyJ3nI4zojvr/Pw8sGMbsiL3v88tkT6qcfE0eK7wze4OTknJolNDNUqMnzKW/177E0I3ZHKYOoz16aok3OhnAMsZhihjEVYNjbHpFEEVmeM65ymqYhjPxk5fj5jCCuCMMAKRW2aZiOJxweHrAx2uKjH79P2onROmQ02OI73/wjNrZGHOzfYT674NFnn/DsyTM2httUVUUpQ9TGDoGOKfI5G4MRVeUIAmiwZHnGZDqh1+nxta9/ncM33yCMIi5Ojjk/O+P84pLpbE5ZFyyrnE6cMugOefvd99jY3iGKu+hEEgRQFiVVUTCfTD2jpyz8tKcO6PZ6vPvue3z26FPee+ddyqpgPp9RNTXPnh9xcnZGWRVIJVA6fO16+2KerhaEHYkOLGWzZJEtSDohOkhaA7+W5O6Ep2I1ta/SpETrmDiK6KRdhFB+lNg2NHVD3XbXPWxgkaqFKpxdGycaa1pBGk+PkgKqukAISZ7VmKZBhXrdGfcegbId1jBgfaNOS41tPOULZ9fVoHN+xFkpX7mZxidvKcBYS1XkyHYQgcr6y4qAxjqiyHtRyVpSVz5BKqlosH6SX8jWIdhjHjoIacrSC3g7R1nXBEHQMl3xjAGlUTqkzOcEQYJRAU3t4RWcQ7bVtHXWQx4y8sMLxqJT38AS3giObtqhmybe1VUIYq1Iuh2KosAJgQwUjgDrLOGqwYjFWairkjjwOw4hIIlTP9arFUJINuLYT54hWmxdeEUz51kMcRQiRYFxFmM8vc3hCAR0Q80giRnGCf0k4YYn5O84HGD8BRKFFe7VhLuikq0SrBDrf0Vb3a6qXC+MA6GGThqjiBkwYZimNHlJYhtcWNITI8xyTJjm2NDQH2wRLCBRFXVaEiYpg16PMtb0sxkyCJBpgopHfJ7ndAddHva+THb1OWG6y+eXY2bmGKEU727coeckLxcTbDMnCiWJS7Cu4s5gxGJ5Thx12e8OeDjYYStIQG1TmwJZzjlfnpBPx8RlQ9LtEwWaySLj+fEJoBj0BhzcPUD0LZXNwVQ4K/jSu4ecnmY4Y+h2YhbZEmscl1cTHn32nE7X23od7B7yJ9//PmknZTDsE0cBL6YTZuMJX/3ye2yMRmTLnKyG+ZXhqizoRSm9OGZSLaGF7vKiIstrHjx4wP7dQ7qDPvPxmLPjEz57/JiXp8e8ePmidQBPeXB4nz/+9pe59+YDBqMNhApwziCFpSxyrs5OvQ5LFHHy8iUXJ8d86Z132d7d4aMPf8nGxoCz8xO2Nze4d3ePz58+4WB/hziNOTk9I0kiguD1o+1fmHTjVDMYxgw3IqJUgDR+YsMA2guvSKfBtkR36ytVU1tiBGmcUoQhURT5BopUfhS4LHDGm8NJIQjCENM0CKT3/2qTSaBDnLVEcey9uoC6rmnqyjffjN+aW9u0QjgGK9oKsjUPXDXLwjDwjq3Sd8/rusEKixTad62dt5jRQQuZSOWTThSSV55NgfPyfdaCxfoBC9VizM4/RraODc76Cb1AK0IlEYF3DDZt5RfokLrOEUKipVcOK1tBH6l8A7OuXXtyq5am1RDqGCFY46p1XYNUmDqntoYkSdjaHKGVpqgNTgrSJKKbdNpBCYHTXnQeUyNc5H3nMFA33owyCEjTDlEY0Ot0EFJ5qyT8JOLmQHIxvvKfh/GDF1p5eqByEEpNXlftkvD/B9t4bu8yX7JYLvHXy9+PSncV1+n1H94CLay2qnRXCfZG9bu+ndbmqMwxQcLD4R2CaECZON4pLWlvxMXE0k1i3iOgk2zxbGoZJ47NzgO65ZSkM+LxxZRKO+4P99nMBqgg4tfjBUuWTPI5prfJ/eg+F4sCU1doaQmlV83a3d5hOROUsoMRDYEO2Ug6bA16dKsU40pSJdhJQ/LlFU12TFGcUjZLjK3QRjO9EFy9vCKNQuZ5QVk1BFownWfc2T9gsN3jgxef+vdGSGI9YPClr3B1doVuaiIUR0enxLlja2OLbLHg7Tff5N/8+Z/jBJRlzoc/e8J8PieONEkS8+zZMz57/DnzbI5UmnxwjyKHcz4gTSJGW3foIciPH5GdHDNMuzx4+y26G0Mwlsn5BSfHx0ymU7JlTlk2dNKUve0dvvud7/LuV77M5u4OKgioqhrrDGVZMr445/LkBBF4eHAw2ADn+PiTD3h5/AznHFdXVxhryJZT9nd3OTs5JohiBr0Optlgkc3XHntfFF+cdDsB/UHCaJQyGMR0Bin9YZ8wiLHOtDPnqqU2+a2ncdbzT0VJGMYoJUiTGGP8Fq6uCkS79aYlmlvjK8XaNighsVaiWj+qIApbVoKvfk1Te7hAqjWv1XrBAN/8aituife98pCA8riM8xCCMR4msA5vQYTHhAPlsALiOKJufHJPtSKNYoq8BCwS3zz0vpYSpMU0vmmlVHuVcxalfaIMtPb6Ccr5bm/L1lidoALf1a+NI5AWS6ux0A5JiLbjH0QRZVVhjfeTE/iZf6X9Vq4uMnCSUX/IsNf1VaTyWHAYxpRNQxgENA6SOKE2UOUaLQ3GCZQKUA6cs6RhSCQVvbRLEIakScdjZMscqSRR4Bj0+0xmM5SGpt1ZNMYgrKUfdSiauq32fZuqLA3z6ZJzQBjLdJqgX6+39FsI3wJlrXpzzUy4OQPhWuGfdWJdVbm0n1G7g1rBEAgwTYYtG4LuJtqWpJtDdtQ75JOMeVIjRMBIPaCZlBiX0QiN7WxyN97mIlsSxDW1zZiZnP3NO5RXGcNOj65qiAJBL4nY2d6lm+eE1YxG1KRJzHbc453+HfrTIXUqMS5jlKQcdPsU8wV6lnN6cU5VT3hyNqesljhXA17UyuF7IAQhTW6ppXf9MMb3HWrjWOYlm/vbqJeaxtUYDAtXsBSGor9BPh6jY0Fnc4RcVtw/vMd3v/UNhv0eg2GHn/zkp9S14fDgkMM7BwSR5ue/ep+zy0uCMCQIQ8bjCxZqiOrt4VzFdHrB4e4OQ9fgTEZdZdx7Y5/t3S2CQDIbX3F5fsp0OmGRzWkaw8bmBnd3R3zrG9/ga19/j8HWJmEcU1dL8sWEbLFgOp6wnGUUeYHTivxqTF4uOT17yc9+/vfUdcOg1yOKvRfh1eUF+7u7bI22eHl6QqfTIYkCFguDrV+/4r5w1UexptvxRoL9Xkq/N2BjcweNZrGceeV/6TExi8cx/c+Wqs4oco3EopQgjkKvKdBKMFrr1ayMtTg8ZWqFJQo8tugQXkOgNlhjfLJzDqXa6kJ5qplUsrW7sajAJ3JjrIcsrEFI2VK0BJVp0A6axhJKaACBb2iFSehteJqmTX6auqnp9XpM5gvqyhDq1pGiFYMxxnl5P+ffB6VUu1334i5aSpzxyklAm5wVTcv1lXjIwViLIvAXMaHAeYM8D7/I9Wizb0j6pC9dQxx1vcmm9eaYQeiPobUiQqKE9mwEAzrSKOubXK7Feq2ApihJ4g6N9BQ/KSxJHHnIpDaUsvYLK02oioIMKE1Ev9dnuVwSqgjrBBaYZ3Oc86PbpoUsfNMQFibDlTWT8cLbpvy+UcbWQxzilcbZKonefL0r9TUhrhOxuPk4vACO1jXSLuj3N0hCTbq5yZ3+Nkk2oZANaZiQJCVhmXFBTRTHDJIem8uKQZWxcDlpnDBIBuztPWQnmzGzOVHk6KVd7ne3GAYx5/kcIyoakxNLQSJyNssly7pkPH3Jy2zC8yqnaZYYV7GykTfW4NpE66UpjdfqcAaVKmwpqVoYzjrr9T+0Ii9K0iIg0h3qegL4tR3FHcK4S5oMCQ3Y4CVf2d/nz77/PaaTK2zjNW3v7e3SWMv56TGjt98mikOKMkdqzWhzk8vLcyaTCahLpE5piowlFXUSEG/2SFVB7+4d9u8fEMYBdZ4xPjthNr0kL5ZUVY1UiiSMePutt3jrrYdsjIboAMp8znR8wdnJMacvjhlP5oRhTHc4ZJllzOZTnjx5zIuTl9zZuUugNXXj/QdfnpyQZwtOzs65d7jPi+MXHD1/gtaKfidB63/qcISShKEv+5MkJenEdLtdQhHTNJZFOcP5HbunSwmfdFGCpmpY5jOq3BEEmtHGgKfPj1FaY2vT0rfauX/nqJsSJUXrdiCJ4piq8Nt652os3pTRSzVKcIam9pQv6zwuurrdmx+adrjLECjtlbuER1GVkjTGIZWmNMZTympD2sH7YlnbGjVqKmPQ1hKHAaaqkAK09lbwXolAIoWlbrxrrpISpwW2FftWQmKkayfM3JqEXzVNW836M9da46fPEFhb4wRo7dkKgY6Q0idLiT+WaVaMY69Ta+sKGcfols2hlaJqLEGokVKRBP55hbFIFRAoSxDH1NYSRV4LQ2oNTUUSanSg6XZSgiCirGqy5dLb9/QGxJ0ucjJlsZSY2njcWSpvcKkk42JBxwY402CsRViP3RfGczoxXgt4BRn9TkPc+FpVt+3gxlp/QTikXLEUrgXOaatd2dLMrum6beJ1YJVBuQxjM0JiIhq2D/bYqkZMygwXKHbu9jhcFJws5xSuJokiDu72uF+UjIs5KlDEUcC2jjisYyZ5xiK/osxPuZh+xmVdUVQlRZV7po2pcc74c8na9fnjnPFfWKyz7ZDLKtHa1vXbYGyDc5IokQyTXcZH47ao8mTyKAx9n8GE9JIBWTXGCoGQiu3+BoHswGyBnow5+NID3n3zTZJQkWnJ/YdvUmRLLs5PiQJBt6NZFjM+/OQJF+enzLKcyeSS5TLzDfL5FSZMaYIO1VnOIhQ05S5bw5itvV3ibky+XJBNJowvTynKjNrU1MZPn24MhuxsbdPtdgFHPp+wmM44Oz7l2fMjLsYTjBUMRhGhcyzynPliRlkVhEHEvYNDdne3OT454tNHj0iTlOl0zoePHiOUoNNNmV4tqZY53c2RH4x6TXzxGLBWhGFIEnfppgPisIsONJFOiOOEZTnH2QaEb0AJoBYWJ73DallkzOclspV9tNYSak1pKiTCV4h+QgAhfMPcWd8oUi384Iz1VaVzHg+V0jv9WouzTYt/hl7CUXovJWutF1XBX8m1FAipfSUqfFUphaSqKoIwoqxLGmuoaofEQxBBoBHSIZ1sF1pEoQosjrqxvvKUAofBNCCUdwQWAI2nqQRBiHAW3b45XmTda3BJ5xBS+a25dJ6z7Nomn/VCPI21PokqBdbj06Yx7do3SOdlKaM4xdS+MunEKVL5XYJum3CrcaqmqXEWdNLFNDWhjqh0jbUVQkCcdMjmNU5qZBBR1ZYoDkg7EcvlgmI2xpg+QRjQ6aRrqKasStK0S1F70XcjJFmREzpB4wQtzdjvCqzFGgcGXPP70EnzkpOsCoZVwl1BPxLPVFDOOwHfrIalQKqbjbTfrJD9Z2pMzfjqKYvpMcFJwnR3n07S87h8oLkILqBuSOqa0FmqZcHT0wzRKuU1tuSiWPK0KZBtRbrig1+zLm4QuFdymrYVVsdCm3Ct89/jvBGnbfnc1llffLTfCyEZ9bf5zsM/4Vf1z/no0w+IAs9fmc/GmCZnd3fLa634kguNYxgoqllOxzR8+xtfZWvDc3al8pKsH/z8fYzxantCKbK65Ef/47+RLZZ0Ol4DodNNCUJFWda4Bqgrer1NhnFKaHIG1IyGO8S9DsYYpqcXnJ285GJyySzLKKoaIRVaCHY3N4lCz93PF1PmkwXnp+e8PDnjxek5tYUgiugL348abW7yyce/8tKrgSJJExbTCdl8ilIghWW+yDk9zwDLwfYGg36XqizI5vN/+hhwGIb0ukOGvS2SeAMto7UEo1JeYHtlObyqEKQDJz0v019NDWGkEa3snWkapMIPIDgLxltzaymQOJzSqHbBR3GEwzcmamtwbrV9dygEjTPYpiFNOmAdKgipqhKcQ6mgXTwaiUC3k1CunSgKdEDd1IRhSJZloDR1XXt3CeHdhIWw0OJbURgSR5Efd7ReJUwrD1uYFlZQLQfYtmI5oQ4wpvICOsbDHOKGupaTYA3+vZF+qEQq3TYB/UntmRYNFoVUkqbx1YazlsY5L3nZVs26HS82DqTxFz4tPO9XqQCtHI0zvlGoA4LGW/Y454VvTGPp9jawTYVFUTYNuqxIOwndbo/ZbEo2mxCm3uQvSTo0jaFpL36b3S2mSmEcbJoRp1cnrfmkWyuUeZU358eC/y84jf/ccZ0kAWH91FxLXVhBDisxHD/2e9Mz7RrPbd0q18e9KYID4GyNocG5gqPnE3ACR9Niwm01zXploBEo6dei96bwovCrYRPh7I3ZOZ84V/fy1atb/+QTqU+4th2JX2kcO6zXBHYW27pHO+FV8fY27/OVt75NNXF89OmHFEWBasfzlZQ0ZY2rBUoERCJhJ7nLgUh44+09Njd6LKaX1FVJGAaYMufevfsoJXj86cfMlwtOX55zdHSENbC1tcPm1jafPPqEpqmoqoI8LylKi9IRnV6XYeA46HXZ3ejR6XdRQcjk8oznL444OztjuphxOZlwNS+xFrY2N9kc9hHCks/nTC/OmI4XnF1ccXwxZpq1u7cwYFnkLPMl3W7KnTt7BEHI+eUZH/76A/78T/+EyeyK6WTOIs/X7/d0Mmd70CFKAgaDAQ7FdJ69ds19YdLtdof00g3ioEsgY4TVlEuDNF6O0StH2XUFAF4HwOGTggq8Dm8gJEGgWo5og7NgbI1oFbOMa4n9YjX+6x0iFH7CrZYCar9gQq1pTI3QAa6uvX5CU/uts/Q4rBF+C+0aT2nC+seqIES01aNrLCqMUFqvNQ4q08IeCEKJN7usmtZNQRBGIdS19w+zgBSt27BGK5/0rHPUVUUYtKpdSFzrp4VQgGllF2krQIMUEtU2JH317DvpqjWB9A4S/nVJ4bCN50FbZ3DSIauy3Sl4XQYEJGEEUiC1bGUZrR++QLAsMsIwJe12EJlP9qZpfKMRjQ4DGuNQgWSR5xjnGHQ7DDc2WWYLwjCgLEsWjfHOymHEeDamMZbhaNsn4qZm2elj8wVlWWJcjdSevial/0xa8Pt3HBZQ6w0B7YXWZ932fRfXHFyk8brPNyrbm/CC/92rCRfwSda254ZbwULXzhwryd4V44aWTunFjtrCBl+NO3fzInb9s0/KftdE2xR17Q7ROX881w7Cu1aL2ZdF1+JFq35BHHS4u3OfUX+LtJvS63RYLBYEYcDGsEsahQRacjC4T0DMbn+XO4M97h7ep9/rkGULur0NXF3w/PkRUgj2leLs6pKz81M++PgjprMZvX6fe/cfMJ2O+eyzR2SLOdliThgGdKKYOAClBbtJwF4/4Y2DPbb3dgmThCJbcn56xvPjE8bTOWcXV1yMx5RNw6g3aCUiLbYquDzPmUyvyLKa8TSnqLxJrVSKOPac8Y8//YijF0945+13+N6/+AF/89d/TZ0vODs94d7hPk0t+NVHH5EmC6wrmM8KxpOC3a1diuUChCVOe69dcV+YdDthQiQjJAqsxNSQLwrqoqG2xVro2ttPi1b3w49+irbajaIIU4Fum1/WulbAxiG1p1FpFdK2mRBCECrleautHmulFAW0V2iz1mKQwico7/TrT2KlFaasfSPO+u20aDFQpRzGNIRhSGX9QEBVFGgpqRvnLwJc8y8DpdBaezFjpagajbIGJaC2LSbWnnwW1nQ1ISRah2gFBs1Km9O0TT3h2vxrvcwd7XSTQGCaijCMkVp7hwjwjA7TEMQppqm8IHpjQPhmW20bHP7/5ulz2jM4lIRAtYMnFidiaueZBtI0KB0RhZqqrrBSYauS2hWknR5VkQGKIIypakNeGTpJQKfr1aS0Vn4ap8WFFILzy1OqqqLf3wApyaolZV1SVjlJEtKJQ3qdkDDShKEgVL8P7IUb1Sl+alJKuf7NCnpY99Nu8nfbzPqq3OOrx/RH8QnTOdaj4+0D149ndcxWWMe1MIdtYYBVI29VpbrV452/SKyat9a5FpttsVp7A7NdVbqr89a2KXeN7/qQQpPqDUxuefLkU84vXuKc8/REHE1dUwjHdDxmsLHNTnLI3c19Bp0uxXJBnk2pa8uT+ZyD7U3u33vA2cUJf/ujv+Hk5BSE4ODwPvrshCyb8fTZY5qyYjZb+EuCc76SpmZjOCJNB2xGKe++8ZC7h/t0ugl5nvH8+TN+/fFHXF6NGc+XXE7mzLIlzoEa+oufDnzTezqdMZ5MmWcFeQVWKMIgJE1TirokSRKGgx6/+mDK//rx3zGbTvnqe19GK8HjJx9xNb7k4YMv82d/+kNevHzBT3/2M16cXPD8fML9e4fc2Tvk2fOnLJaL1664L171UqwbXrbdIjfSIKhobE5Dg5TetNAqyUo11QlwbeWmA4lwjm4nQgtY1q2tSyuL56xDBl40RQjpxViCgCAI0FJQN41vIAnpoQjlubHW+uTmcC0TYiV/KPGKXCC13wbpIKKpC9oRLy9ujkBoTV0Wrc+VT+BSKaIg8JoOYUBsvWeZvzgoyhK00mAqlPQJbSXl6BW+QEvPcLB1jVISKUNvoeMcpuVLSCnXTsSi/ctfRHwCDrRaV+/OeQxa4rxGhTU+a1uLbt17TVNj6oqqLAm1xuB95pxxqMDDJcZ5FbcwCGhMQ2D8FKB1hqqq0EFMXXvFt35/SJYtMKYmSSKyPMM66HZSosBveKM48jKVpq2wTEOWzRHS77h3RjuoMESMQYqG0bDL1qjHsJfQ73WIw9dP7/xzh2iTm9+B+Ar2Ouw1xtBux/29/RZetGL47ZF+A3Zwa8hNrCrnVoPiusK/dptYvx5aGL79rcUi187K4pU7rYw+cWsggda8zTfEXMsZ5zrpYlttkDYBW9esOe20BdQg3eb73/jXbCW7fPTJRzz+7BFKKq+l3DRUdU0YxBRlw504ZjvtkSYJV9MZy2JJEEiyeca3v/VN4jDm41//kuOzU56/OCVJEuIk5fmzp1RN5Se7qpKmMoRRQKIUWbbEGUeYxMRRSq/T5Uv37rK/v0PSSajKihfPnvHzn7/PZ4+fMlsssEKRFxXOOnrdrm/22dpjrcKS5wWL+ZKsqDFOIbQk7SX0Bn3KquLF0XMm0yu+9o1v8ZOf/ISPPnmEkgH37t7h/GrM2fkZL08uODy4z4MHb/Cdb3+b0dERv/rgI54cnbC1s8ve4X1evHj+2jX3xRbsxn9oWOmnlZzF1hZnaqwrENoiI+0pJlYiW1RKOtXqropWpMUQpyFBqHELPyThF6FF6dDTorRu15LXUFh14OumaS1raGlXrt2XexEZreN20Ti09J1krQMQEq2cH49VmrpsK3AsjfXNKGE9Zc1Yn6ydW4nXeK+vZctL9bCdp73lZUkQhpR15TvzgecoSzzXF6FIu13iOMCUJUGoQQTtgEHNbOq3P1r7drgVnk4mI+Vfq4M4ihFS0jjvJmGcI4qT9Vitsn6qrKpqVFO1/OXGu0dYP+suWlcIIb3iWtJi2NZZqtqPCpsgoixLzyP2ZRhaS8p8QRptkMZJWylBGCYYU3uBHB35Bk/T+JHrleedkiyzGbWp6feHKBUw6o+wrsbanP3tTQ72NtgZDUnT2K+N34vwteiKp3v9M6z3bkLeuG9b/Ur7Covh1QpXrLFgxLX3nLvBT79pgClaSKFFwNfPjKPVqGgT9urQeLU52qTtMGs4Aeda2tc1DczhcXufcFdJ2LRVr71pIcpWb5Nvvv0N7u2+xUZ/xAe/ft9rQWuFtX4gRuiQIErZHW3RSWOm8wyLpchLsnnN/t4Wn3/2CcZYJtMJF5MZSgc8ePiQi4szrq4uKCu/dhvTeJ5/bdBKEgYBg8EG21u7DLpD7t99wN7+NmEaUduGsxcv+PkvfsHPfvkBV5M5URQRRjFRFNFJewx6HdI4pGlqLi4uiKOQoijJy4qy9rlGS0UYJ95YoTacnZwxm884PLzLg3sPqIqc3Ts7BEkCMkDImNl8zqPPn/D8xUvSTsqdvX3+5Ht/zPs/e58nz45450tvMRrtvHa1iZsOp7dxG39ocfcbobtZpXq9X9lCNx7zE1J6XF2KVnjIM0qUUjcm0bwGiWw1GeRq3mKVeD346pkDvIr5rgcs2mQrhfDsnTXeu0rMntvuVjVv69js1nQw214SVlCCV9dbK9/Zm8nWtjSyFpJwtGPOIfeHb/GtB99jY7THk2dP+V8/+p9ky6VnJ9U1O9vb7G9u8cMffI+trSFXF96N4cmzFxwe7pPNJyyXGeAb2k+fHzHNcjY2Nri4vKQsl0ggL73bShQHCKGZz+ds9FLSJOXw4JD93X0ePniT7Z1twjgiCALOzo/56ONP+dWHn/Dk6CW1sYyGQ3QQUjUNG/0hB3e26aURdb7wMqR4Xr5DYqUmjDpEnR79zW1cYzBlTlEs6Q+GXE0uKcuMuqqIQj+JmaYdL/41nZDnS6Ty3oiz8ZivfOVrnJ5fMJlMePjgPocHh/yH//ifvrBD/PsAqt3GbfwOo60Una9khWi7+sJT+wSts4RoJ9d+Ax5YY7nOvXIf527ctoIRXPt9CxesbparlyFWyIHDuhtD0mvs9/o1ryraFRVsVTz5RNpcwwcrvWa7Mh5dISVtHwJ3/cd5JkKZGz579IQoGXN2cY6SEQ/vP6AuC46On/Lm3bt89+vv0u8oxpcndNKUbqx4694B09mEIstI05T5fM7xyQuyZU4c+h0tUhKEHl5avjihLHOMrSjyJQ5Bk0h2du5wsLfPg3v32drZQEV+5/z8s8/5+NNPefLyjGmWM9wYUdcNQkjCMEJJTRrH9DoJ28MBdRpxdn7OYpnTNH4XF2iNCkLSwSbBYI+dQcJ2R/PhB+/z/i9/Srfb4fj4BUmScOfODkdHn3sZAympKoPWIf1OB+caBoM+L148J0pSqrImWxSML+evXXG3Sfc2/sDjJrTQOvyK1dDGq+Il1xzd68e6G8nz5vFWyXMN0F4f5fqev9GAu3k3K9rRm1WVyyrpri4S3s5q9RpuMhhsi/HadtDHOedphs6xJpq5NffIP75lxyg0gQgJw4jv/eBPiZIuF2cXdIKYT379SzoR/Ms/+hqhElyeXzCbLdjd3cWZhovLU7JlDkguL6/Ilzlp3Ge0tc9sMePi6oyi8O4xUgYcHt4jCM9ZZDNms5IgCNne3mF7e5fdO3fodFOvNFg7Tl4e89GvP+bJ8Tmz+QIdRkRxyihMqKqKKIyIwpA0jojCqH1vFUJoDx+2jX0dBKS9IUF/RJMOWeqQ0/ERx6cvePzZZ3S7HYRQvPvuu7x48YxsuQQc5xdXvv/RNimrqiaJYvb3d1lczJFO8Ma9hzx8483XrrjbpHsbf+DhcOI6UfqeRGs2iWunH33yvVlp/ib+64lWrq1wHZ7bxXU+57pZK5ynDsrVffzN6wS+yq1rDHh1CLt6ntaZ2Tmk71q3ZLEVhuvaKtesK+x28NMnXeGTuk/Wvinqn0AijKZYNDy9fErzt/+dN+/do9vbp7e1x3h8xg/+6Csoabi6vARCNjd36KQ9Li8vODs5QaqQt958AyECtrb2GG5uczGZ8ve/+AWTrOGbb90l0IJON+SXv/yQ4Uafql7S7ydI4QWVwihC64Cr8YQwDJnNJ3z+9DnPjs84Ob0gz0s2traQgcU4GA5H9LrdVlzKf3aLZc5ymZGVRUv5lFgHOkyJBhvYMMXomDMjaY7OOL+65GD/kPfe+hI1ll9/8gHLZUYQRIRBShxVXvNE+MZ9FIVUtWdcVIVhf+ceX3vvq9x78+FrV9xt0r2NP+hY0bNW6lCr4QQQ1wnxRlyzDdyN312zIFbb9FeaYaxue+WZ/9HX85uF8aqCvU7cq2az14Fe47GsqttratqqgnV2lYjBrgSiWPfgbrwWibM+aTWN4W9+9Lc8/fwxf/lv/z1SafbubGHrnOOXY3ZGm5S1ZXNnm6NnR3z8yaeMRht86Y37HJ2ecHJ2yfnkku7JBp1un+9++5vcny64ujrnk48/4Oz0yDflau8fGMcBUZSilHeS+fzJ81ajpGE8mXB+dcXx2RXjyRxjHfZqwhZB6z4uieOYMNBgG2bzCYv5nEW2wLS6J1oFSONQcYdGhCwbQb4ssUKyM9ih3x1Rza/opEOW9YI06nB+csn2zgipJKPRsNUVsRRF4dk+OsRZzd7OXf7i3/0lBw8f0N/aeu2au026t/EHHfYfTDH4WEMJ/4fkeM2Rvb6Pb3r50ta1zTLXYsCv2ra3T/AKc+HGoVevzd3ow7WV93VSFzSu1WzmGn5YCQw5t5pG88nWOE9HXDPfhF1ju6sDrJqJgQ68RoeAvChYLmY01RLbOKbjBVo7kjjhyfNHPD8+Y3N0hzTtc/feISeX5/zdT39KUTY49witQ4ajTe7s7vDi5ISmsczmc6x1hEFII1p9lzhCWhDKm1VOXIZzgrqpycuC86sZV1czjIMk7ZAkXcIgIg4joiiirhtMU9M0JfP5guUio2ka4jgljNrJVh3iCMiN4Gq+5GpaYqViOIyAgL/4q7/i/sP7/OTHf8fXv/w1Bt0ujz5/jLOaPF+yyDKquqJpHFVpSWLFe195i3/1wx/yzle/zGBzm7TzTxyOuI3b+P891hWp+Mcq2y9S/F1hq/aa2+tECxPcrITN+lj+b88jX2EIX6S05vBsCtGOmHndBNZF+ArLBVq51BZSsNfNMWttCz2IFmJ4tUJ/5Yd20KdpbagCFdDv9sgWU+bTKTu7b3D0+JSNQcizo+c8+uwx1klMI3nzzYecnh3z47//GXVj2dvbZLnMKQrLy+MTJpMrojhimRekUcBo2CGzlmVhUVGMDiSR8xz/ZVGgVYRzAq1DmqJkNs/Iy4IwitFhxPbWHfZ2dhj0ul5MqVi2Rgc52TInzwtwjm4nIE173kRXahrnKW+X8yknRUPc3eCZcYzSPss647/+l/+MKQyHdw948OAeL89OOD09Qkoo8hyEwBqBFCHvvf0eP/jeH/PGwwd0ez3CyHtCvi5uKWO3cRu3cRu/xXi9t8Rt3MZt3MZt/D+L26R7G7dxG7fxW4zbpHsbt3Ebt/FbjNukexu3cRu38VuM26R7G7dxG7fxW4zbpHsbt3Ebt/FbjP8NsZG563zAPk8AAAAASUVORK5CYII=\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAD3CAYAAAC+eIeLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9d7hmSX7fh31+deKb7n1v7tzTk2dnMbuzARuIBUCQAJhBEAJpEhYp0iKVaNN+bEvy88gSLdEy/diyaVmkZVEWk0AIAEkEEiSIxMViF8Bi0+zOTk7d0/Hm8OYTqvxHVZ1z3tvdM7ML9Ow0eH+7d/re9z2hTp2qb31/scQYw4mcyImcyIm8O6K+1Q04kRM5kRP510lOQPdETuRETuRdlBPQPZETOZETeRflBHRP5ERO5ETeRTkB3RM5kRM5kXdRTkD3RE7kRE7kXZR/bUFXRIyIPHyX74Yi8uC73abG/f8tEfnst+r+J3IiJ3Lv5HcF6IrIZRGZOLD0P//NN3s9Y0zXGPP672QbT+RETuREAMJvdQN+B+WPGmN+6VvdiBM5kRM5kbeS3xVM924iIg+LyK+KyKGI7IjIj9/luO8Qkasi8t3u78r0ICJ/V0T+loj8C8egPycip0Tkb4jIvoi8KCJPN671hIh8WkQOROQ5Eflj76CdKyLysyJyJCK/BTx07PtPisgX3HN8QUQ+2fjukoh8RkQGIvJLIvI3ReR//KY67ERO5ETuufyuBl3gvwB+AVgCzgH/7+MHiMgfAH4M+CFjzKfvcp0/CfwnwCowA34D+LL7+x8B/w93rQj4p+6e68D/EvhREXnsbdr5N4EpcBr4C+7Ht28Z+DngvwZW3L1+TkRW3CH/EPgt991fBf7Nt7nXiZzIiXwL5XcT6P60Y5f+5y8COXAROGOMmRpjjjunfhj4/wJ/0BjzW29x7Z8yxnzJGDMFfgqYGmP+vjGmBH4c8Ez340AX+OvGmMwY8yvAPwP+9N0uLCIB8EPAf2qMGRljvg78vcYhfxh4xRjzD4wxhTHmx4AXgT8qIheAj7pzM/d8P/v2XXUiJ3Ii3yr53QS6f9wY02/8/G3gPwQE+C2n6v+FY+f8r4GfcED3VrLZ+H1yh7+77vczwFVjjG58fwU4+xbXXsPa1q8eO8fLmWN/N695Btgzxowb313lRE7kRN6z8rsJdG8TY8wtY8xfNMacAf4d4G8dCxP7YeCPi8hf+R265Q3gvIg0+/UCcP0tztkGCuD8sXOa17x47Bx/zZvAsoi0G9+d50RO5ETes/K7GnRF5IdF5Jz7cx8wQJOF3gB+H/BXROTf+x245eeBMfAfikjkHHN/FPif7naCM1H8E+CvikhbRN4H/LnGIf8ceFRE/oyIhCLyp4D3Af/MGHMF+KI7NxaRT7j7nciJnMh7VH43ge4/PRan+1NYe+fnRWSItXX+lePxt8aYN7HA+x+LyL/922mAMSbDgt4fBHaAvwX8WWPMi29z6l/GmihuAX8X+DuNa+4CfwT43wK7WJPJHzHG7LhDfgT4hPvur2FtzLPfznOcyImcyL0TOSli/rtLXFjci8aY/+xb3ZYTOZETuV1+NzHdfy1FRD4qIg+JiHLhbz8A/PS3uFknciInchc5Ad13SVz0xPAOPz/y27z0KeDTwBAby/vvGWO+8ttt74mcyIncGzkxL5zIiXwLRESeA/6Dt0jIudt5fxe4Zoz5T0TkU8B/b4x5u+Sbb1pExACPGGNevVf3aNzrR4A/Z4z5vnt9r2+l/G6qvXAiJ3LfiDHmyd+Ba/wacM8A916KiDwAvAFExpgCwBjzo8CPfivb9W7IPQHdH/u1r5jxuI7XFxFEBAClFFprtNZz5/hjRARjTPXT/FyUgAhKKYyGstSUuiRtd4nCGENJGIS0Wl3SNCUMQ4IgIopClApQYghCEAWCAXfdQARjNFo372cIlBAK9jwlVTvnxGkKYrBpGMx/b9xHBigBY0z94fFjG1qHvY9xF7a/GnchYwylAaNBa6p+UkpAQJeGIssp8oK8yBEgShIE0HnBbDplkmcUjRyOY091u5j6GN8Hzb74nsfPvu0lTuRETuQe2XSDICCKourHgl9AEAQW5Brfh2F42zFhGM79VOcq21wP2IFShIH9PowikiQhbaWkadq4foBSglIQhULkgDRU9vcoEIIARCAIhEDZnzAQAkUFzM2FY07ELgRGCVoEDdWPcbhaA+28WJD1P/ZS71SkgdnNhQoDRhvQBlOW5LOM6WRKNpmSzzJmsxlZkfNOzUrVdd19lFJz/eDf54l8Y+LKkf5+EfmrIvITIvL3XdGi50TkI43jnhaRL7vvfhxIG999t4hca/z9IRH5ijv2J0Xkx0XkrzW+/4si8qqI7LkCS2feYXP/kIi8LrZo1P/NJ/+4tlfFlUTkAbHFokL396dF5L8QWyRqICK/ICKr7vDPuH8PnG/jE3KsjrS71r8rIq+41P6/KW7wiUggIv+Va9MbIvKXm/d+L8s9AV2l7n7ZtwSwOxyrlKon+m1MEMt8xYJvFEUkcTIH6GEoqACCAAJlwUqJ/T0I7O9KQKn6JxCDaoAhzGNm8xtz7O+KwMpxnL3zX8b4x6qBd65vzG2nVs8vx/ip1gZdanRRoIuCIs8xZYkYQz6bMR2PmWUZhdYYy5vfVoQ7Lzr+7+MgfCLflPwxbAJNHxtP/t8AiEiMjUT5B8Ay8JPYOh23iTv2p7Bx3svYIk4/2Pj+e4D/C7Z402lsKvldk3aOyQ8CHwE+hI2OOZ5O/1byZ4A/jy0AFQP/O/f5d7p/+65+9W/c5fw/go23f8q1/fvd538RGw//QdeuP/4NtOlbKvcEdN9qEjYZ1nG2JSIOYwREISpEKeUYsrIAYADtkUoIgxgRRaAgDkIiFRIFQhQqokAIFYQiBA4IRUApwS7W1owgQOCOCcQzTgtqyqn3nqx+825H23Zxaro0LySG+n+3n3ebKeJY/zbNMXluwTbPc8qiqMBRG0NhDIUu0Wb+PkLNWMPGjxKx/YWg7vBO3+nieSJvK581xvxzl534D4APuM8/DkTA3zDG5MaYfwR84S7X+DjWXPhfu2P/Cbb6nJcfAf4HY8yXjTEz4P8AfMLZVt9O/q/GmD2XSPQ3eIsCTneQv2OMedkYMwF+AguS34j8dWPMgbv3v2qc/yeB/5cx5poxZh/469/gdb9lck+peNN2O2ebbajDxyet/U7QWluWqgJ3jL2OMboCa20MYRAQhAFBoFCiCBwzVhUgeKgUMBbUtTYox3qNsfZdCzzza5Bnwfa+d2DZvANbaPVM88cawTFVU13I36KpzlfHHzcHGG6DaGMMZVmQFwVFWaAdjbY2YENhjR5UNFzm7bRBEKAarSwp595fsw0nDPd3VG41fh8DqVOTzwDXzfzLP178yMudjr167Psv+z+MMUMR2cUWTrr8Nu07XozpnZol4PZn697twG/w/DPH2nXfFHq6J6BrjKEoCsqyrD+sQKYGEw+83m5rjEGoJ3cQBARRDAi6LBAMptTVuYJVb8MgRMSBbRDcZt4wxqC1xqAcywXLp41zPjXAr3leo43VY1RmDrE2W94eeI0xc0fdkS2b5i+uf/yHx8AYPNmfv643OYgIhIpSl+hCY7R2NuZ55J9bBBp9Xt1D69vA1psUTuRdkZvAWRGRBpheAF57h8eebxw7VzhJRDrYGsxvVYzJy3ngucb9b7jfR0Cz2NKpd3AtL7/dWNWb2BrZXu6bQk/3ZPaUZc2QHMGsAAxqBtz8ux4nnuUpwiglSdtEcUIQRAjKmhnEer4kDBzQWHVdKct0m9ZO7VheaYxTq71+31SPLQCDRsRYk4JYYCsMlAil48N38j+9Q5cUGlNxzXmxDar+11DbK5uxAW2EooSiBGMEOfb6tNZoDEZAKeuQVIGqHHp3ipjwN/GmjzAMiKLQag4NcG1GklStPmG691p+A1uB7n8ltoDSnwC+/S2OLYG/LLYw0g8cO/bHgD8vIh8UkQT4L4HPG2Muv4N2/O9FZElEzgN/BVvfA+AZ4DtF5IKILGJNFu9UtrHq6ze7AexPYAtVnRWRPvAffZPXedflnoDubGbrrXi1umKLDefL3cQYQ1kUgCAqcOAREYYRKggRsZ+JClCBZbailI1sEEWJoTTaAq0u0U2QVf7HRjMEysKWC1JAiVQ/4MK8NBSlsf8amQPNygnWeMbmz9xz+fNuwym5w099kjEGbWw7ytKGiGltw8VslzqbrdaUZVndV3lzQSPq40538QuUiFgHZGx/gjBA1LxJ6PiPf18nCTb3RlwBpT8B/FvAHvCnsBXp3urY/wVwAPzPsQX0Z+77XwL+j8A/xrLEh4D/2Ttsys8AX8KC7M8B/z93zV/EAvDX3Pf/7Bt4tjHwfwY+5yITPv5Oz3Xyt7E7tHwN+Aq2Gl+BXXje03JPMtL+3i9/3ni1VBuDNj7EKyBQytkeyzmV1qu11hQAKohod3qkrRYiQp7nZLMZZVmgS40RTRAGtFtdkiQljGIXfhYShA48lbKOMh+P64E2EAIxBALKNEig7ZIqprbUoEvLlu1iYaMelHO41XLnSIAmMCHytnG6x8UYy4ybIOv7zdtUxTm7ylJXJh3fr2VZkuc5RVFQFMVtcc9VO52W0G63ieIIESiKgtk0oyiK+hkaz3QcdL/z4Y0T2vseExH5PPDfGmP+zre6LfdaROQPYp/14tse/C2We2LTjeN4zlmm3b9e/QcLCGVZ1oDobLp+codhQNOvJSIEYWhZndKIEpI0IU1axHFMEIYEgVOLBYJAEQSCyxewoCnahoV5wKWOVPBiBKuiG3Eqvakcb8YIumEF8bilGuFh9nI1U64v/E4Wt3nwtmFd9hm0+LZou+gojQQ2ssOyWoWItY17803THit48K/DwPwXYrC28MAuLP4RVCAENGJwTf2Uc06+d/BkJ3LvRUS+C3gJW1b0R7BhVj//LW3UPRIRaQG/F8t2N4D/DBsy956XewK6YWhDvSrQdTZb/7lnrk3W5I/XWrskBUUQ2tAlYwxxFKEwlI7ZiQpIkpQ4jl08bjMJIqjjbqlZmXIsVVyogJlzEtXOqxp0G0zdKERbWNTKubsagQCqgZZN4PQgfDxMy8u8k6puhT+3mT9hjMFoTZnnFMagwhASEAnrhUX5Z1XVwma1CDMH/McZax0PXbdFKRtJcqfjBZsh2ExaOZFvuTyGtXV2gNeBf8MYc/OtThBbv+Ff3Ok7Y8w3GmnwbooA/yeseWOCNXv8p9/SFr1DuadMF7gNdOdUbm530PjfwygkCiO8NVJE0AIS2uMCl8lmQVchyoKEEms6qGJMnS0XmAuHMo2YYJ+cYBpI2kyx9c8gEtr2amcbdiy5tteK5YV3CYV7O7ZrjA8luz0yAYDSRm/ooiTLMlQY1LZot2hV92r0ZWVHv0vIVxWjG4aOMXsAD46lRjedaKa+zgnovifEGPPfAf/dN3jOr/GNh3F9y8XZhD/6rW7HNyP3CHQjHHRYINEaY6zKz11idJvmCEQsAKjaZqmNtsCKIEo580H9Y+NuHTD4cAaocKuKlRXwirttWyPXohFMa1V0a1pwmEyJqes2VIsFGAVGScVSlQ2ivaPF1tz2iwsNczHEvslV+1yzvFlBFyVlUaBdhEgR2AgFn2Jt214vaP7ZlSgQa+IJw9A6ylzkB+I0i8DWpxARwlC5RYdGv/m+nE/jOAHdEzmRdy73BHSTOAY8yzUYXdt0/ecSOhB24kmYD+kKVIAYYye4KTGmQNCOoVnbcKikss0qp+cLBiMGbWzoGNo45lYDhzb2P9qA0UKptUskqJ9BG+3aXbUQANEefUobBSHK2UIFpazjS6EaINVkt8dg2KIr2tmOjZaKQfoDPMsuC+0cYjllWZtmijxD5QFeGxBphOC5iAS/ObE1uQTVQtUMS7PmCQioWaz22sDxlUJ8Osn9Kz/zD/9LY1wIYf3q7VJSvbuGz1MQF/DX+NItWt7gYv+Ruj+b16S+x3xyjVTXb44Pq6kpG1ttjt1DeROZ1dgwphE+aJ8p8KTHX9l4rRI0UhMDr9mZiorgE29sZLt//wZjNFL5XeyVNfqYBif2Oq7vSrG/a0d2NNbUJhi0EYxoxFgt1l7XPUcjhNSauLTrRIW/uH032vVi3cOIwib32RaC7Uep3rO32GkUtj9sq60/SANimnPD9q4RhTEKkRJjbB8avMprqpBPf9af/rN/9Y6T5J6AbpqmlTNHl7V5Yc6xo6SKaoDjmrdBxGaVVdGtFWhaYGlGPNjzTX0dbQeTspm+DnDrSVIxS2OjE0pN5ewDzxTrwVXfo+mcsm0Uxx6VMqjAvSCx39VtridXkzbaBcXUNRNclALurp6VW9AtKbKMIs/sNdzDlcYmoohjquDbYdV+u9AZl8pL1S4fizzf+cIco/VONX+IH4juGe7nSDEPKs2/AwcK0BgnPg28+lRAmiBnpemYPK7O+KsJduJaUYjo6nub5u7vWU9eMTYJJ8DnZEptJhMPGPbVKAduYWWHVzU4VE235rfKjlW1wD2h1OAsRtAOiOtaHVKNWyOCGIWxzo4a4P1zuHFXir1KCYip55bFckGLB1ZTPeXt0azixt+8Blt3uNg+9cY+UdgYJKn8IvXCakHcwYl7xXaeaGNJU/ON20lhr6ykdH2iMFLnBJj5Rr2l3BPQ7fV65FX+vy2/6E0MFegaQTVU6uYgEEC5JAgjlgFqA8bZIQOXGmy0xjQcdk3graIiRObiTes7uJjX0jnLtFXXS20ZLtQrvzdZGJfl4QeMeIbimLd4EwfezFGzbMvqjQNiVb1M4wC3LEvKwpDnZRV54M/VxoKu1gXKGKIwJIgTy1ZdPnMQhLZfAm1ZkhIXxWEHnUJseJz47q4BtlpazDyzN76N7vNq8Bo3Nbx2cif783teLBssq6lYcxVo9INxkFTN7RqwqoWUZsfWiyUc87jOAReAsmVFm81yY0OknvaenQIOhA2qAsQa8EUcIItjqNSAa5tRMz7Bgp0YC1QWQLU7UGOTb2qAqxUwxyyN+BECCEYCch1QAgEKY3JCVbpnMI4TukXNmHpL7orYNAbjHJA2e8d95hhy9ZKqzvKFnLypL7B/iwf6+k1bUNbVuzDoCtS10wXAarJ2rrslxc0le6/ALUC+Fxw5NGr+nR6Te8R0Y6IopChisiwjyzLL5Jwep7Uj9OZ4R7vGS7N6lUErQZfVWuyAzao76JLSx0MbVdlmPbMuKwCyoQy13VPXYF2WaF1SFIU1h8wxldqR5CMajjus7hSK5e9vU5MhioIqssA/V23HFhtTm+XkRYGtL2Hv74mxYJ2LcRwRxxFRZJMX0AF5UTKdZmBytCnJ8owiK1HK0GrFtFotkjggTazNFgNarGPNwBzwVJZpYwd35Sx0TMGDr22jZQD6rUbYe1QqFmcqeKyGYdPCI47mS0UKPJtzwCuA8RqXR9zG1BfHrDxIu75WFWBbxmvc5K8AnlpZ9Wep6rvAAqJYwFRurFm12LLL6k6eNRurMSpshqIxoJrM0dRjoMpvd5/XECiOuxq0QGFi9ssl3hxvsJv12BonmKIgRzDljOVoyvn2Fg8u7dMNR/Yqxj2r8VzYc8QSYwKsucAxfL+sVAudBUk/7mrfgl2Iqh4zHhSpn8ndxb9PP2c9XzDGaqi60hKcNiOO45rjBqB6nbXHNRaOY2PouNwT0LXky9c5cNlSUiKlqVicbbRXIeq1qYpecGFgiFOvS1fLwSibjSbeLqVtKJdYEDZa0NpUdR8q4FMKCZpAV5s9dFmiy6IGYUzVHqjZOajG71THi2tjc8G2jrYAkYBA28iK5uQ0FNV1fH9EcUiSRlW/NNOlq8SPQBGEdjAMjibMZiVFUYJosmzC1tY2h4dDMMLBcMTly1e4eO48s2nGxQfO88iD5zlzepkwMNUzlEazeWuPV166zsc+8SRJGjWe0TM/U4GubpAMraFsTtL7RHT1b2239M5WwOKDQ2TlQMYyH3ETSqqJacSTLakgxFtvdDUPvRlAHIA3NTsXaYOyNlr3qZ28xrcQwGVL1k5cMd5/4Fi01CNXuYXCmHl7ozRQwfuOq6E7Z0utFyPbFaZaIEZmic/tPcmLwyVuDDVJntEOYaYjhlnOZBai9QKTWcqirPP7Lx3wHReuEqrM2Vs95Nbs1QKvqphxxZKrPPZG+2sKVl3LvilxbfQv0c8vu2Dpipbq6jPBmdv8rK9egWkssL4ujKrOr6z33rzSXB/eYuzdE9CtHFeACW3Bcq0NSJ0MYY+rwVdJvY5EriB5GHn6bsHHZlc5I7Y7R9nRak0EpaGsXpeVudTc0tt4bOytjXs1bhBg406Vck4xyxe8vbV00QzUw2TuWcNA8EV36jAt+xMEyiZ7SFCNcK1ru5FfnPy5SrnJW6n1pk6ACGxiyeb2IYeHQ5b6XRbaMJtmTKcTeknEuUcusbd7wMHuDk9cusAHv+0xbt3c5kd/7CdZObXOJ7/jk5w9d57DgyOuvnmFbDSl3Ul5/IlHLIOu2DrV89b9Wb1lt8DdB3mXd5SmOapWK/2b9RMIaWhYjuV5c5OqJrm9nuBA0i2aRhxIGv+9U/qrSVk7aDzwGnecvYKqSot62y3U7VFugtdA30BQ+5F/mhr8q0OsOcEupLoezzQ1HlOZVSrgMopBnvDV7dO8dhDxpa0ho+GMNA7JRIgMtELDtDDo2ZgZit0s4L9/7gzP7fb4N598iW4ytHPLJ9Ub+6ye+9qfgEqb9c9t/ILjzB9ij6vMBGiQwHe3w+nKyOLA3DPkec3DjYiGCdL2dIN6VR0qHuC9YoPFMOt7EvdI8zvjNOXeMF1H4bULPwrCiMiAyTIKkxNE1oMeuopg1iHlXjIQBCFxZKMTEIVBURqbDBDkOaXW1pFWlX204FkUGvLCVtby6rFy6tGc087YqAbPGJQijFSV0eaZhDEGXRhXwKekaJgWfGxsEAQVsCoXEaAEyzj8ZHSLQ22mcNeZU2Md8IotumPjeu1uFGIgMoZSDHtHE5599mUW+j02lhdZSAJMWTDNZywtdFFqgdFkwv7eNg+e2iBJIorJhPXVJSaDA17b2WL7xhVWT53naDTj/Fqf7/3e7+bUxiqtTkISOJ7hlvwqqaNJ24ydvL4OxX1oXcDzxxJqh5px2X+es7o+8Cy2sutVJME4dVUcONtZ6LlaRSjdO/QcrRkvjqi5BB7r73XM1NSmCWthrKMPLDPUDnz9O/N8tjYJuJs4uG+YENw7tmq8OMrrYM/NX69Wo4RSw9EQXrpyhESa6bV/xKtfLcmCJ8jih8iMJlU5f3T5gOneZfrS5mfTDzOSFiaJKAx89kaPg9Fj/KUPv8pK6wAa7bSmj1qbMhWT9O0t8Lzbsk3/dLXd2A3S6r004bTqCWff9dEJCqnqs7hlpXpP/kzbRlV9X4N33Q5rrXZmiIad905yb+rpepsnIAqCUIAIESGOI1eOsQ5bEs/snNgEBwg9WxYhMEIpdpsdz/yaq5LGONAMCIqCsqz5vndu0Tje32eu+I2qVTXcP76EonblEcGxW0NtQnFFdES8Wmnq4zwA0/THOvvw3LSo2ZM0BoAyJYhCG+HW1hGf++LX2d/b43s+9WEWOykBJVlhCIMQFQYc7e/TXVigHYX803/8T3j/B7+Nb/vQB7h65Qp7O5torXnzyitcOL/ND/ypP8Vav8fmjTdZWmyxvta3/KIBqL74j8bqmmLcg7gIHhHrNb/vxEBZMfmKg1r1s2JCjrHiFlPMsWLu3sliJ3ulmIv9zlFEx2LdYl0xY9+ZNO5kx7j91fPaemxAw6oq/rxaRZamDd45zOZstdXdcGzMc15qRos7x9QZQDt7Uy5fHbHUX+L82bM89OgjvPA1zbdf/1levfVFXty7RBqOORUs8uzVN5GldYavPsN3f98GPzV5EJ0LRnUw5ZTnDxL+q996H//xx56l1xrMkaGa02q3GFAvCP4pxB0pXkNx8035KAXvOPPf1TDa/NQ0TEmVj6U6o8n3/Xk1060Yd2MZqxY5qQ1Xd5N7U08XPyEtcwgrdTuyDXQ7Pfg9yJRnBP65lI+9rSe0FlfjQFx0nFd5jP2u9ANPAlSo0CXQGOy+Y22/eTXBfh64a9eG8fo5LKsAY1TVxV4VrJ6xcU7DSlJfq2IujT66zdLupvncuXaCaSPsHo354tdf5o0rb/I93/lhVhfbxAHkWcF4PKLUhmya0e22uf7mFT736U9zeHRI2u3w2muv8ZnPfJZAwcHeDqur6yytLqF1xq99+tN88vd8nFPra6QuqaUa4o6M+37R2PrDWtfPpJrYcR+JMT7u1hCAs6UaaqeZDymSGogc463Xb+PssMYBnnia5hZQzzm1A253XP0J7hZWxI5r1TBp2CjTmrf6jaQ8QDjLZMPRR81QqdmtZXeeSbrJJt60IO7I+lgjCjMr2Nk8YPcrb/JEHDP8rS8zKwre/L6nWUmX+A/+7B/nma9+jp96ZofNwwkL022+vrfH4ZuXeXi5w/nsFv3+kxyaFvpwAkGALoXLu/A/fPEB/v1PvEQcTp0j1t9bNwDRzIFyBYhGcKlK1YLhoykcSa/if6sVprq+VH1RaSnVt/XMr6dxbdjB/+bHgfPbYJoGthLzNsUb7wnoumCK2osqhkAZjPLrtgVDnxthQbfJ+sA/oMJOaoUHXdupStUdoHEDNbCRDqWR2mfVQEDVcDL4bXoQUMbMqXhzqOnab99Kg6tK/a80BnHNT8zca/b3969XU8csz1mOKvblrVzCzmDK5595hdfeuMyjD51luZcSKWVjdwvNdDal016gkwaMZwPGoyHPfu1ZNncPaP3mlxjPRsyyCbPphMnhIZ1eF4k7fPVrLxIhPPTIQ3RbrQoEbDiZs3cDRrv3JnaBs4uR7UNVL/HHh8F7WoyUDkdduU4DSnS1KPvR673bQdNR0TQQiOOpom2crPsMYyogn1NX/WLt/luPCWmcC948VVtyqwhUt8jbb0r89dz1naJr7aHNB4b6ai4qQNdhUN4B5BcXhaC+/iYf/fRv0DraZLQ9YNJuc+vCafLFVaJWwcHVN0i2rvP9Dyzycy92OeKQH/mODuNswrenT/KL8SXaZUZRlKiliEOToA+ngOKLB0v83EsX+GNPvIq4BU5XoYc+SsE33D+dX7yo/saTIkfm/Od+4aRCIbsY+Zl1PGO10Ul4rXhOBxIFpo79tdhhw8xMBcwOsczx687LPTMveHCs2KSqBw04m6j73YNq3ZtOsRBB1bXQq689+2pCpHVI2A/vFsIkc7/XLKNOh200sHlS0x3Z7FD/ccWGGt+5oPbqcg3d04JWg1k0vP/iVvBCC6NJzv7RiN/6ynMcDiecWlvioYvn6LXaIDbMbDaboVRA2o6ZzSZc37zFr37+C3ROX+SD7/8IeVEiWoiDiNMPPESpFWlnAYlbBEmXdhqikp6t43Ds8X2/+HdZx+nWleMsu7v/0oBrJiWWYTqQ8lXpvA3XLjb1hArw7NNndjmgc3GtTfXee1oEV5OjsahZNinVvYDaESaqYUe29zbGNIad+8wRBjsQjy3ypjGcvTpcEWAPbA6svKnLvWAxgipLHvj15zh1uA1rl2hfLOi0VnjgiQ/wxkf/IEGrRRyGDNT/nc2XnkVHH+OyHjPYv8WplVM8W55nNgmIFoSNsGAUxkzyjFmoAVvT4+eunuNia5v3n98hDNVcu2yWkGehDbZpdNVfDRUBbxCrF7g67tbihXuHxq8w/nyvUzQmr8wF3dVkrBFvbcHVZ8cpRAob8oYD97eI6Lln5gW/+ijj1G9xk9e12TNYLz4EBmio3oaGrXqOXTT5Yb3KcOzAO7Wsgv25a73luXLsl2Pn+HW0Btx55LZ3NZS6vrhWc2dWlytRTHLNG1d3+PJXX+DgcI/B8IgHLp7l8Ycvsra0SKRsYogPi+svLZHlBS+/ep1f+40vk+mUlVMX2d3bwRjDxukznD23RiDCK6dOs7W1w8b5i0wmBcNpxrPPX+ah86eI1bF+vP1R5pRQoErxvt+Yri9Hb/wAExBT5yIpZ7z2C09TS6pLebrIV+PTSd1YdFEBcz4HMSDKTbjabFBPY8uuKl5mqMxnpRhCd3dv+wUa/d5QjhtmsMqU4DUqP6/m3qkLz6rwyC0GkzHTL30Z/ch50sGYZPkiM0rkq7/Kl/MJ+coKrZbixouv8rUbPcbRBkV6gRu64EbUYiEWlsshnemACYqoyOiEPSRWzNAQtRiX8JOvP8JDpyd01NCq6h4Yba8de2v2eSsQNr7fPFD7yATj+q/BdYzXEZrXcv9Ww9f3j1/8/Efm2DkClNUNRGwWaKkKyNWxc26XexO90LBmGXFmBcHasxpOtuZgUXi7lFRssIKA2/DU94Zx93vrKW+aY03udGQjPtNThPoval7M/Mu5m1T30BUL0j6LyfgfP4Cg0CWDQcZoPOPKjV1evXKd69dvYnTBdHxIv99jY6XP0kKbSBnGkwlxHFPkJZkRtm7t8Mqrb/DqG9fRQZvRaJfRYJ/h4QFJu0W/3+HbP/Rt9Bd6dNttrl7b4qkPf4Dnnn+DK6++SVYU1nRQJdrXHWCc5lCKrxEhNcPFheq9lav2PS41bzVzY6MGXiGQGnz969fimK2p1XgfmVLH8joO5aMeKpD1KQammiki9TiuUivcqwhMPeqrX0x9rHcCVkuiIzzVFBHAaSaemdvokxpM5kOcDGWnzf5qh4U3rrLaTYiKDFa6HDy8jKjXuPWbP8/+IOBwGFCc+WP0woB2d5li9wA1m2GSlIKQPgXdoOCG0YyMkHbbaAISSmYq5Masz09f+QA/8sgXEIrK3mx8jHGtB1DP/pqhIiW1uaei8u4ax8lAWdlxRRp95M6rF0mnx3hnnpmPVLDRCX7M2HsUhaCjCbqriU2PdufuhdvuTZyuX5WrLJGqvQ01ocFUpcEKTOMcme+yihxWqkF9vbfkWb6D62bM/Xb7EsDcUfWZd77bbQtb9d7tYAjdRQyaEhsGlmkYj2bcurXLM197hV/79S+RpC0m+Yjl5UXGwwP2d3dY6i/wwIVTtNKYMs8ogoBZWTA8mmI0zMqC0XDAYq/LRz74fl594032tjdZ31imlYYsLS/zgSce57EL51GB4tIDF1jsr/DEwxfYvnnIePGAP/J7P0KsjLWF+z5vPI9X3IxfDKUGXqXuz4w0gDqEaz50KEQq1b85xT24eRukTx4o8ezXVPZhoTn2LZnwo8xUUS52FXacmlqTslEUQcPcZv0gtZmhGW1QQafLzfb/qxxJfoAKNaiJcTH9LiqnUfMDXYAKmfx7P8Qb/89/SDgb0d55k2tFn+v/7g9wOp/y8K1tMMKrqsfP7OwwuPRBUgpGScTYxCwN9gjMlDETsiDgVv9RZDYlS7sE7YgoSmghJKni6jTmud3HeXL5WbxHvZ5T3gzTNJ/U/VJnTVKf54F6LkW3nuP+nUhlkpHqXHuUBfKmVdH7SIFGyJrMxeoGeRspJ5T9XWbpu8x0qRrv1CVTf9Jcpb3zqhmm5T/HfeRPFag8v/YjM3/Nt2Cfc6z5jgfUQeAOofG1Exq5LZU3v0FtoHoO/73nDI4JYSgRZnnJaJKxt3/Erc1dvvzMC3z+819ia2ubKAo4feYsnbYiImfr+hWbsmsyhoN9et2UOFTovGDClIPhwLHnkvWlJc6sLNFptQmDgI9+22McDj9BnCbs7h8SJS1WFtu0Yig0zMZT+r0WK72Up9/3CN/3e55ieaGFd7zoY+TAzKkJzeBxqb6/o/LwHhe/qAfGaWNQRSD4eFf7Xu2sUoKtMOcMo3Y8uh093BU9c9Ku9gHu2jVwG7QIIR6kBTFCeIf+U9UsaECMW+gq++ucr8EzRP+3i86owpuslqn9GK4AGnxMrql4pVgEv3SR4V/9d3hxa5vF3Rsk12+iEYIoITn/GOd3nmOVfV5r7fFLecmRCDoMKDJr795VbVqHrzPtXsBsXacgQfc2kBm0AkhUwcW20E8Crowuca6zSS++WTFz/2w2CcSZFETAHDc7VG+UCgwxLlvP1H3XXNoqVuc7pF4o5wmd2M7yDo3qHpUKMdcGKRPYD8jYu0Mbrdw70G2GgvkJWmGke7UC0lC9fB80x+DcY8q8qlE71t6aasldfq/u0cARqZYue03lc+epVR3LTHwaoeVIPhxHA+NpwdHhhOvXtzg6mrBzNOLajZvsbN7iYG+f5599lkwXLCwtsrjc5tFve4pOp8fB/i7MxuSzGcODIzbWVtg/2GNvd5szZ9aZRSHD8ZjcGHb2Duj3eywv9uhEIWEQIAhpN2a5t0JhNLs3N/lv/z9/n8cee4Q/92f/BLNZxnScsXZqleGwZHmpw/kzC9YnYOZD2rxUSQCuf4wBo8QW6plzt99nIq78YbVgC6Jcck5jvFb1mWuvcJW0oBWu1gE1A8VrnBZglbuOdXo5siB1rlOtWaiKeYk5PkrrldADbm01OKZiiaGOirH/FVfCxYOF+IXG1KzYX7OK3TSWcIRLPbKlPls8iOgCbTRKNNm5M/S++GXMyqN8WB7gM50+Mx0RpRFmPGB6tEmWdFHJBkfSJVaGKI1Ihq+zuNDmwmyf8609HloY8W2nx8hCwPoD2wwPI25cT8imnt2rxvx2iw2148s6q7V7JrtozvmKatCx/4jV2KpYekeU5pz0x+6FNHlwvTSBrzpY97adLiHqYPGuQ+8emRcqWKwe9rj27TNDaurbiCBw5x6vc6AbzPL4/d6Cx95RfM0Ee+tGoHOjEWIE0UKdZW1c3LhhPCuY5QUHhwOODkfks4zdnT0+/5tf4rkXXoI4QitYOXUKRLh++VWGhwdMpjMW+8ssJQlrp09z/tKDdit5UzA+3OVgd5d8NiVNIq5eeYN2O+Xm9WucOXua8XBEXhQ2HRpFPIopCo3E3uZo4waNsSFOjzx8gT/wvd/BS6+8Rl7YokMPXDiLiOK5Z1/hE594jCiwK7mP/6z6tGHuCaQepH5h+Qa7+z0n3qYauFRepDYzgCcEzkBgV9lKlfXsUBnHId2kLE095uvruB1PsIBgq35JHalQDXhnW/Z/Nv0ackzjqAZq/cY8eHiVbB52cQDiSzA2pFosbFiUMXW4ltXaDDb2VAMhGI02MOokSBAQ7n6dx1b7PL3Y4s1ZzN5Y0DJG93ustq6zcXSFi4sBF3rPsJgc0DmjKOOH2Tg1pLv+Iq1eRqenWV7ustJdQWc5Tzz53bz04gJX3ygpChc6V9l4qQGzUQayDsVz/TH3nzru1jTeYUV2G4O5ClRo9F6tp5jGqorrp2Zv1hpw8RZ+jnvGdOcacYcZ+js5Z5vP1/QYv9XB3jPsbZUYa5vM85LJNOPgYMhgOObN61vsHYzJy5Kt3V2yPEdr2N7cYzwcsbV5i+Fwn8HhDmIKjvZuMhkfEbd6PHDpYcaDQzbOnGex28OUE06d2eDgaEAUCw8+eJ61M+tcu3qV629eYev6DTZvXCOJI5ZWlplNxuhyxsH+Lnu7OyStNoeHR2RZxuLiCtNJwZe++jwf+eD7WOikxMoqUNoBSCuN+b7v/RSf+q5PkCQhQUeRthI0iofOnqbXbSaM+sy9YwtkwzlkjFP1OJ5Zfr8isMuil9ok5LJeaT6TZaKuZxqaWDVxjV+yVEWcq4ltfHWw4/4NqRMqMNV49NGhZm5iu2PMvBHA220rE5ihngxz6rBvuljV3IH4cZZsAder2KU73VYZtueXbqQYjIKd88ucH03ZzAKORmPCqOA7Nv4lZx78ZU63rnDq9IROKgSFUBQ5p85A0hKK/DpBZEhTUJlBSiiHR5RJQFko0D/NU089yIOXPsDXvtJjZ9t2qF0UVV2zoQGx1RJl/HttaB5Vb9XvpEqW8rzWm27MPIZIYyxUxMz5PjhGCj1xLMXwi1d+kR/hrx8fcMA9BF2/UlZmAFOvNv5BqljbOwJlg/I3vOX+pXtTTHPl88eCQWshK0pG4ylZVpBnBcPBmFmWs7d3RK41ZVFQFgWT6Yzt7X0ODobcurXNZDrh9StXOTw6pJWGdLo9krSNSkLarTbtbodpNqHVTemVLeI2tLsxe9s36S2v01tc4nBvk1ee/SLt7iKTwQ4mEBb6y7z6wnMsLvZY2FhleWUJoWQ8HDA8OmI2GaF0wdH+EUkc0Wq12N2+xWaaMByNCeOUzZs32d/bZ21jwpkLD3Bre4dXX7/Mw5cu0O+koLBV1QD7h6GdRHbHCAUL7dju/KsgMAGlt3kd6/ta5h04NWOojzkOUveHuDoXFViaKgHCJjs0xuccADdcOjUO423ADUUYEGdiqM/34fbzR0njxJo5Nz+3fa4bcbbzI98Yv6+FB2VTHeGPcSVZKrNC3f769yaQCPXOEL6lbnZhBIaX3s/laxv859F3ksgv8b/58N/j7LlNen3IJjCbCe0WJKnh8hvCaGwoS0MvVaQtQ6sdURQZaQC5Nmzv7JK22gTZhOHh8yws3eR7vvfP8cxXRrz2okYbVTnnfV/6JAhf0NJrGr4yWdMy7hNJ3AF3xBcVSpVgVqcTH9sxpPEPGI6T2pcPX+SNvWe4m9xjpmulqj+LqbNpZH6aVmtWcwVxq5tPxfXxzdp4hm/wDgUtQjbLeeP1a3zpmRf48jPPc/X6JqXE5DrjaLSPKUvW1tZ48OEHSdIWSRJTZlMO94+4ceMGBs1wcERvcZEnP/I+Wt0OcRiSZQXj0Zjd7S2mwwPKcoo2JdPpgDROKMIQCSPOPPAQk8kYZeD8w4+zu32LbDomShL6K8uMxxO67TZpu8UDD18idLUS9nZ2mI0nYCAKIqalYXw0RATiKGZ/a5e9nX0Ojo7Y2d7maO+A2awgaXfoL3Y5GozZ2t5DzBKLnZatWeFU40DEecFd0LvYgL7AKKv2NtRYr4bV6mjTnmYnW+1kwDvl70tHmi0LqiqfQp2R6MwDVfiuVCTXM0Sf8eX5VR1z6yMLjkG1VEFpdQafA8+KOIj7y2XUKJpVtxo6o3GgV31qahbugVZj9+1rnohjtuKfqwG+fuWoeWx1DlDZf6m+tYzwYOXb+dH9FS7FP8oP/eD/SLRQMBoLYRgQaU1vASgsAdpoQRQo2n2NmZTIEMajkJUVxfRoSisyEMHBMGd5QRGLZnp4iyL9BZ56/wbd9kWe/aqiLMOKUXoHfN2yug8rkiZUBM2a3+ZZr+1UD9YlxcoANe4gw6g22cwtdMelYXJyf2+kK3zq3MfucKyVe1ba8U7tk0ZnCXc3BVQAbGpmBf6ajeAPY8hL2B9M+PRnPs+nf+VzfP7zX6DIR0wmA0ATxR1WNk5z5sIlLj76IA9cOke3m7DQXaCdtiizGVeuXuP8I+cZDoYcHBzSardYXlmm2+sgorhx/RZFmbFz8xrToyFROyWIAmaTCWEUoVXA6bPnKEVYOX0aJUJpcs49+hDj4YA0VFx943XGh/ukaYtT5y5w/vxZiqLgYH+Pm2+8zng0IkpS+qurBHHIZDRmNBySpilH+we8/PWvcfGRR2ilLYbhgOHBPkcHhywtLjLOpqCEnb1dyrzH0vKy3TVC1QVrgMaOFMdWPJrM9XYTQ31Q/YYqkLgfEReoakd4sPQ+BZ8g4tVU75hpsMKq+oHrM+UmpprTeKUiF0Aj+8we451tdjcHz3Y9OamhpMlEm5lZ9nQXdUPDwWxKV7ugDhmrwdbMv9g58Dd1Onvj6+oYsazXD5OcBX785TbbRz/LRz/wk4xnhgsxxIFhOC0xpVBOoJ0mHBxlpKlBtLC6EbCzWTLch2w2wegOcTuivJnTS6EdZOxsBqyvLjCbTNkvv87i2iFnl7fp/p6n+MLnY2ZZjDRqUFSV3BodbLV9vyDRKADk3w/VuzPOlm4Q8sOMaecWvc4DhKN0frWDhtYn1XvRpU2HD+OEIi9YDtdZW1q769C7Z460eb2o/vUdzVE/Adx5jeQt/KosQIHwa196la+88AZbmzeIV1d5+rs+hdGG3e1t4iii1WuTJCEbG6ss9BeYjkcElBSzGbN2y1Y7C2H/xhaDwQhQhAGgc0TnjEdT3nj1JUbDCaI0cTsiiITx8IDt6zdo9Xp0F5aYDAboMOLsubO0uwntOGQ6njA5OuTlZ5/j1puX6XS7LJzv8/iTT6KUoiwzjg4P2L11g9FoxPq5iyyf2WD5zDrjowGH+/vk04xOr8ebr72GKa0TpigKJqMR4+GI8XhC2jqNCkK6aYv9owFBlBCFEZ1WggTNwuuG2SwniiKUc75VGx/Ohck4pao5QN0xLoLKzVWZn8T3kSi3dQ0EldMM8VE1x1RJHw/oprmfcJUJoNLEZO6z2m7okn9oFLARzVwIk0djz8J8FI0xGGUJhncui9TvybPxys4pDcCu+FxF9WpPu38e06hFS63r+EXBtoWqb6xTMOT1K2d4/crX6T/1T5iYPl9/bYsy0JQKZiOhlcDKojANNHQM0xFIZNjeMfTWYra3ciYTw/7OkF43oNdSvPmy4sFzJUkhcMWwtLyIDCOCUx2KU5pe8Twf/sAFPv+lNVvQSoRmjl6z6lhlanDSLDQk7m/jsgTrnAEhna2QmwP2oxc4u/ztFAclpS59J1TX8wu10ZrBKzfhoYRTp57k1j/8Sa7spXz7IxfvOvbuUUbavLlfye1ZY/7dv63jy4sbTJ79FqXhc19+hc/+1ousn1niYucc12/coNCG2WzKmYsXCIKAOAm49MA50iQgDgKiMGQynRIoYTAYALC3s8fm9Ztobdjd22Nhpc/p06dJk4RpNgNRZLMZvYUeB3t7TCdjDrY3KbIRRZ4wmU7Z29tlZeMUs+mEtbU+lAVZnjGbThgNBxhjKMqC2WxKv7/A8tISVwdXrV05n5FPR4hAu7fIqY01BkeHXHn1VQb7h0ymBhXG3Lx+jV6/T9RKIcTurEFBURRgDIEK6C32GU2mdDuhzTRToVuZDUYL40xTZmNWw5DU7c6sq8no7JINjcJXyHSvwDrqjC0djdhMtbdw1L5nRYvfSdqAt+FSM9K5GKJKHxf8RpFQk8dqR13H/Bu6BeBA9riN0ZkyrIOtaeX1QO061t1k/nN/OVNbBqhNAD7r05jbWeDcdpz+majByF0Jzxb91bVj1SIw0yt8/StXePZLP85f+r4NLpxtgdri8vWYrMzJxrDYhVIZRrOCVgemBrolzAaGVidnZS3gIC9prRjGlzXT3YBIDC98VvGBJ9sUcYl5KSCI+pS7Ea2tGL3aJcr7XOoJrx3UoWF1oRtv5rFmsGOk1j5VI3RBUM0uJgoUhRF6+UOIuclh+jKnzr6Pw5sTdKEbJRsMkQS0CZnuH3G4vM3q8mlefvNr/E8/9WW6iaIY3uSH7zL23hWbLtBgC9/IOfWvHgiMlGiBa9tjPvfMFRZWI7Is48bNyxztHTCZFKye3iCOAs6fPUWnm5LGEYnb6ubo6IjZbMLe3ojhcMBkMmF4OGBwdEir1WZ0dEQQKGZLS4zHY3oLXS5ePI8YQzmbEmB49cXnmQ6PKIyiu7hMb2mVXn8RI4Zep0UgUBqNznNacUIcx7TbbabTMZPRkMloQBRt0O126HRaRGnC5PqQ0eERWoOKIs6eP0MUCm+88gbF5ham1ebo6IgoL3jwiYuURU4YaHrdFtlszMHRIWhNf7lPO05QSlGUQFbb6I6GU1545Q2m2YQPPfkoF06tVGBQsyKpX1fjG/tZHS4+947uQ9D1pgV81apmOKJn9l5n86zTM0zHALWRKsnB/d/hWKMfm73VsFOYxjfewjgXnyu6kYxjId3r0t5N5glIXekZfKKP38KqukcVrSDU2+XU53jnXfX4rp5BbZHwxg0hH65gBi+gB7uMrn0/O2rE+vlNUDsoYKYMr28L17eFbg9iBSYw9BT0F4XZNbswdLuQbgZcfLLL818Y0esq1k4ZeDWjtd6GdUEdhJhxTLHZQY0EigmPxEvsqZB9ZmAKt+ipum6FeIpne8f1nvvEaot+mfRvSQmEoaLMC0QiesVZxnvXuJU9x9L6OfRRjJ4IvSShnWdMx0NmpXBzsscV8wVk7zFm05iL71uiQ8Eoyu869u6RTfft2evcEY0l6U5nzpuhhMGo4Gd+9tfIiwEyTREm6DLn1o3r9HorXHjgPMsLbZYWOhgKxqMJh4cjyrJgNBqgtSEMA+I4ZjQakWVTJuMBRZaRTcbM4piD/X2CIOB0uEGaxvR6Xa7u7rC/eZ3h/i66KFg+dZH+6imSThsjQr/fIwwFXeQc7u9xuL/H9uYWSimyskAXObosGBwdsru7g9YF58+fZW1jna3XLzMdDjk6GhCEMa1Oi9Pnz1IUGlOWHOwdoIKEbr9Hu9tlZWWZJAiIImV3hyhLtFLMsgwThGgp6bTbTLMcYyxj/YVf+gx//+/+GGna4gd+4A/x7//bP1RtB3Nbn5v6x7+X40fKXT6/H6QK8XLszQKPU1WrkCCv9UsdLVNlMAki9QartZOG2wLyvT2xNk54U8Y8m6zdWqauhQ0NxkuDSZs5B5c/zle48oDrQ5s81FfVyqrTdBU37FtBdb5/+aYi+wJcf/F18skmSavN+NZFbu3vs3X1CR7+0G+SLkwJEkOUwmQA+UzQWhGFmiMtDDMIRgqTCW1RLGhNPCmIl7rsHyq6k5xAYDBKSSc9wlmb0UDTnRUExSJtGZEPb/DJ9gW+0l7nptzCGFtTwRbt8e7Nipa6XvemktrOXidYGISAaVbwG7/6Szzy0PtZO3eOVJ1DjzL2xocsdIRzp1rs3pxwPRdMmEIELLd587O75Le+xMaZM3zXxz/O0dGA4fTormPvHiZHNGmq+/yYh63eI43bMtFuF6fmaOHnf+VLXN/f49TZZQ4Pj5hOxuxv7pFNcx780AOYbIzoECEhc/uq7e4dcHR0iIghTkLiKCQMFe12SpF1OTo8IJtMUIEggWI0HBEEAbooWF1fJ0ojitmUG6+9xHSWc/rS45y+cIn2Yp+0lVCWGYv9Hnkx4+BwyvbmJod7+5R5hgCBCmgvrbJy6hRpu0OZ53Z1jRQPPPwwr33tGYaHO4wPD9CFIY5bdDttslnG4PCAyXSKioXV9VNcuHCe/lKPNI6Jw5ijw0MEWFjsE6iQMIyIo4A4FLqtDrsHA8rSsLy8QqvVZePUCh986nECr166jrfz3O8jZ5NCKuBwPxqrimqX2VOprveZ1IEKDXOCc36ZCpWcXbYRg1llI1WgLDVT9q5y/LXddYx3+thrGeVgoALTKjq3AmLPXsU07Kw0+YnMmRY8MM/FrFeOtrrdnssaKe21XdKEr69h0dXgw8Q8sHtlXRMw3d+hmA05u3aavd0h71tucfOlCwySPcypN1hcG9BJoL9gGOwKemJYWDcMDoVpCVlo0LlmMk4ZFIqhCdAqYq88xQME6GmOlAmHZUwahIQ6Yjou0WqMdCJM2KU92uXDuealxUVe5cD2j/HJJbWTq3JY4qFXN8a871dXiF4Upx96kJ/+uZ+gFy4QJCHRA4LulHz8sfPo4Cx5cQnE7t+Wqhbj7SswanN5NKGVHtJDYaIZ7//g03cde/eu4I001++aMd12bHPgQ/Vy73BRwHDt1iG/+utfZGltmUAU25ubFGVOq5vwp3/k32B1Y5W9nT1u3LzObDbBYHdXmE6nTKczwkghCibjMXEUEUURUWx3vy2KgiSJ0bpA5xm7W5sc7u0xHgw42N/n+usvMJ1lrJ+5xOq5C1x6/FE6Cz3y6YSyyOj1ugShsjs5lCWHh4eURYERodXp0Gq36PS6DAdDup0WaZoQRxEb58/QW11lePk629evsXXrFisrC7RbHdJWi6TTZv3MBkdHE+I0pd1dYHl5iTgMicOIPC/Y3z8gSdosLyy45wpIQkU7Ftqri2RFwep3fZhPffgJup0WC90WuKQAZaTamcPvv+WlQcRcDV2pJrfWcuc0+PtABMvuKmdDFWJkmB+CDsyo2V6l8ktd8MRQmx2qTVbF1KBd3bg2WdQJOj4IDexWNKYuQFMBB433UgNiReh8W6sWmka4lOfPngE2mLW7RrUvnDcyOIasqRmwZ+VhHtCTgnFvgfHgdYL+g5yXGeXXzjL9yozux7YpF6eoNCPe7hGvjSlmJVqFLCxNuXYtRHc0k7xg0ayQ74RE44jCBPzKa4v8oS6EY0MgJVMFUmiECMo2mpCkBbNpQaaFJ40m63e5aoZoKRGXxF31lEv3bjwZHqCN1OU1ce/j0sWHWfjQwxQvZgy39hlcHfLAx1pcePwprl17hVV5EDGaOEzpyQLvf+CDtKINvvbir3Hz2vM8+mHDuZVvdyaOO8s9t+lWA2KO7dZ/V93jmUN13PxVDJDnhn/6zz9HIYY0Vbx5+bKNiT17ht/7qY9xenWZrMgps5yr10qu3bhJp91iNs3tppUYAqUIlGJaFMymU8IwpNfrkaYpg/0DEIUKhL3tLXSRMzjY47VSMxoMKfKS9YuPcu6Rx+gvrdJutemmCSYJabVS4iRkb3+PbDZjNpvR7nQAyGdjt716Qm9hkTzLGE/GBIFCa02axLR7i6h4i2w8Yndzk6Nzp4hj+31/eYlWp00QH9Lp9oiihMXFPsoYwiBgfWODoijptLvYjUADwjAgjEL7PGJoRSFpFLDUjQEb16wRlFcrDT5EFJ+6Wu2E0GRU3jJmLODej040cFxINVkqruh/nfZZs1eqSdmMYmiyCB8+pxrj2bNRT0KalEJq3b+2Orq42ar8aU1rq1v6z5o5gf5ce5xumCCs6my03yLc5xI6CHIv3lTXsE427cOrxI4PjVQbeIoxrHUj9t13N1TIG0fXeSxNWY0KiuEi8ZcTyo5GBxlqusq20sQPbtFdmzKdHJJlBcUwJxIDk5B4mDHLttndmnL5ZsCt8Yy/9KGztCZTjI6IggCCgGBaEoQbZEcT4jRnXI54YRBzMVqm6BTcZNZ4N04qx6dlvz47cE5/aESKCIqnH3iSL3zhV2irCYtLId/5hz/Azmjb9iOw0F0l0T10UXDl6i1mk4xzacojj34/OWOqzL+7yD0H3bmh2egPgWr/s3diVgDDS69e45nnXmBxrc9oPGRn5xbrG+v8gd/3Kc6s9TGlocgyOq2Ec+fOMh5PGI+GhJ2Y/YMjwigiCmP2d3aZTMeoQNHpdJB2xHQyJZtNiXtdknabKEoYHR4yG48Zj8ecunCBMOnx4OOPsXJ6A2UMpzZWaHcSwMbAjkZDQhWgRLGysU67O6bIMkbDgDLPSJKEIFAcHhywsNRFAsuKgyjkwUceYevKDXqLPQaHuwwHAzrdNrNZThjFJGmL3kKfg/0Bk8mUOEhZ6LQRDAttjVIBrVaLVppijCFRilhU5YC09RLmGayNuJFqk0k0hFLXYFCi0UYo3YC0+6O5ECPHujSm3iX4PhIRH2xk1X/BbevtYFM3bKfgiUODVnpN3C9UeNNEA7T9FTwA+1ObwOkzo5yqP5eMIh6c7Y2a6b84tdkAot33Rh8z4dVA6kHVVx+z4K0bDFDX9l+3GJQNs4LdU8wgZJw73SZ9fpdp5wF+JhsyiBd5Rh/xRByhOiu0TI5ME0ql2E9TpuUucnWRfLMkylKWFwxZV0MrJC9m7GpNL045s9ZhfCvkhcGYv/31Pf7yuQ6hMpQmJtARSEAxzoEA0YalTk5GxkubJRcW2xSrEdvYdDK/oDQzCOffigfhGnjtY5c8un6R/g9+N4f7r7H09BK95SWu3HqRjeBBlloX6addrl65yu7OLvvbLxOEQx54+oOESY9b268xne2TxIt3HXvvQvRCcxBQMQZpfPt2YgBdCP/4H/88RTkjiuH6jRtsbKzzg3/sD3H+9DohJVNdMJlNyLKMKAxZWVlhodcFDP1+j5s3b3Lzxi0EodQGFQZ0egt0O12iIOLo4IhTCwss9fuEQcC41yXtdlhZW2Ohv0iRlfRXloiSmHYrRRRMpzN0WTIcjSjynCAIMMYQRRGm3YJWStKKEG3I8owgsqaM/b19oigiDEMipRhPx8StlNZCjzhJKPKM0XDAaDyiKApOnz7FUn+Rhx9MKIuS6WTKSr9PgGEymdDutEjT1O60rIQkbGwQIzSm/DwXaKqSzS+9t9xnUvlEyMDFNRrBZj1paVoj7iOR6sdXnmoyAp/o4G1/TbNDbfd1f0ujX6v+a2Qq1fYIe7y2LNvbcS3yWXaksGF4c3v2iedkNj+zWjCdzJtEnE5ibBywvXW9OZS15QqGotZeHEu25NsCkNV0/Mbi2poZjGXKwZmAR1fbvHa0ywMqQLcTBsEhL7VGJO0ANY0oyoK2LohjUHFIyA7D/JA0zUnLgs5YIJgyak/JdYvx5CILN/p8eFl4aGGVr+7O+AfXZ/z5swEqmiKhscXkS43JYqe+t1hTMUwP2YsUG/stBv0ZU6XrYCnxI7exWjrWb5/eWDOQwe6C4hbN1Qvn6b7PUKYZw1FJkY/op08w2xtxs9jh+uUXmE1usHzuDL21RwGFNgWry+fZOfwSq9GH7zry3gXQbdLZ23XR5ip0GwS70SUYrly+wa999td57ENPYjBsnD7H9/7e7+Ti6Q0S0WSznPF4wizPmWUztra26PYW2FhfIQoVYkr6vZQ4DNg/HFIeGhaXlgijhLTV5txDDzEYjwkV5NmUzvISp8+eZjgYOJtsm8l4hjYlrSQGUxLHEdms4OhoxHQ6QamAo6MDsizDb9suSpAgsItFEhMlMWmakrZiWq0WURQxHo1ptTu0el26S8usrm8gCmbZlLIsieOY9bVV1pYX6HYXKUuDKaEocpJWSkoL7bfvEWhHMYHQYF136vm3eWUyf444UwTYiRk0QOR+NOsqp6tX40+UAxywAKmq8eeZLFiXkzRQT6rvqU0KTqpLufPnLWrOnFABgHF97kGhMSN8ZpVj2s0asfbetpR65eqr8MXgi9jYD6py9NRQZGoLStW+EmO8HmAhV/v2UjJsHbFy6QxLL+1xfjDla1sjkrMrmP4I1dlBTQrCDGaTgF4aU+gMUohjxTifkSxETMY5g9yQtiFjitoOUaUix9CXjO85E/P8KOZfZDF/ohWRTY8wnQQJY4g1RaFolUKpNLNpzOxwG3VmkbOdBa4k0+aSUr3vOT3CmMb7tn0diiFPhDwekbX3KYojTGY4HO8QB4bpbkF+9Do7N64StUacefJJgqTtonwM4ko9pmmbrBjedey9a3G6zS3Ogcp+CDXU3pkwGUqj+bl/+a9IF9tEaUqr1eXUxgYXT6+RhtYBNssLlApop226nRlJmjIejRmNEpYWuvRabZIgJD9/hqS1h1KKdqfH9Zs3EFFsnDrN0tIinSRhMBiwd7DPZDohSWIWFrqMxiMCVVLmJdubNwljxfbWFsYEFKUmz2aIGMbjMVprgiAgSdNK3YuTGNGGOI5ZXlmi2+sRRSFiNJ1Wi+5Cj0efej8b5y6wvrFBnk+J4pCFhR7L/UXOnV5jsdum3eqiVIQuYTqZIAhREoEx5PmMTrtHnETfxBuqDYa1xcu9O2OqrXo8+7UOuLd8ce9p0eK2XvfKd8PsVXm3XUSDZXxQhXlNRki7W2sHlYnBTmTlPjBz30HFsvDQ5xhmZVf3C0DDnOHtyhXTpEGpsSaDKi7VExgLmo6fNlRsf39THVsKBD6Gt/K3KPvMYjPdtNEYSnBZfAUZ2w9foHcw4tzRkD2t+dXffJ14Q3Hhu1MWNrbolhOKXDGbdmmlY7JsAIEiais2y4x2XxFp2B2HqGCBtJWS9VuMxhNmQ1hD88Fl2JGUr4ctnszG5KN91FJINumiTA+TxaRRi+V4xC+8+ia98Q6Xwot0zi5yqDLqOL1G31FrEMfT2HOjmSabTNJ9IhQECpMpivKQJOsz2tlkc/NNTl1YpbPxqL2Crl2NWguIIokusT988a5j796Crh9E1BV97hTDa+afvRZn+N87GPPFr77IY0++j+F0xLKUPPrwBRY7lnHO8pxpnlkfsNhdckUp4jgmSRLKUlMUBcPhEflswuryIt1Ol+FoyoVzF8jzgqyYsbK0yCOXHmA8GrO9t8vm1hbGGPI8Y29nh/29fcv4VIiKFGmrQxK3HMOe0W63aXfaaA1xHBNFISoQkmTR7pulNe1OyuJij25vETEaUxZks4z+Up/1U6dYXV2n2+synlhzxcJCjwvnzrK2vEQrSVAqQEmECYUoDNBaowSSOAFjSMOwKpx9WzHs5qJXR73f/uWc7cH/cgctxBw/4f4RWzBcURXJNtr9TrVQeoZbd5N9zkImhLSxu8aqqjau/VbhU8H8uRWjrJayRt83APdO2kh9Tq25+Hen3byq2Kx4nHFtwLKvWpH2aRXGAVGN3dlwk6C7QighWqx5QRvrgJMqfNDdVwyDriZ84hzrL7/BhwcF189f5Ktfe4aOOUfr962T9Z8jiseozhFiUpJ2hNZjlDbocchw3AMJiHRAfON9LEwV7bhAL2iSruHmOOfWICJtF/zE1j5/vB3zofYSMmtjIiATZrlGGWEx7XPp3Cl+4tYb/J5rN1jSJXJhAVt9zNrNdWVO8guMcl3m7dq2h+Jxn2kyZVZMCQR2h5cBOLoeM9p8hYvv/zaiVsuZW1wNZFO5QhGjCVSM4V1Ojrij3GHevu0pjl195rPPsLh2iiyfYsqMpX6bNDQoU1LkJdNsxjSfMJ3llBqKokQpVdk4AcpSo01Jp5XQbvcogevFNoPBGJ8K+dhDl1hop6z0upxaX2FpscPewQGTyZRAFAv9RfK8ZDAYcnSwT6gCG+FweEgYx8RpnyRts9BbIAwjRAzdboswVCgFUmqSNKLVSghUiCkLStGUhXDmzCkWFvssL68QBorprM1kPGZteZWN5SXacYwKFEEQ4rc8D8OIg4NDgjAgCSPaSUorjBupvO/svczF17/t4cbVZHCQJL604/0lfuv12pLr6t95E4H4NNNm6BWAoFJFXk6Iw677xIovhgJSBRPgDcM+jtaBXZ0U0YgeaJoqKkRsluqWRnvqbeO9iLHONB8FYRcT5zjDZ7iZ6h7G2bOtTSrgtc1/ycOn/1BdwQ+cccFmsDW3gS9Fs3OqzeknzrL0/GU+OHie/Mn3s3PjKsWLFzk63UJFh8TdZ2F53+6aoUJ0WZK0CgZbiqXdb2dBBwSmxXR6xGRwxHJf6LYMrd6MUZlxY0+Tt2L+/n5BHC3xdAwtWUanMYoOxXhEEfX4/oee5lYS8+LkOo8eHpFuhkSnUpvwUkUS1MaGejnz78zajJN4iaG6Rl6MyWaKstglCfuk7UVOffhhjIps6KQHb4rq0vYqJWJCFluP3HXs3XvQNQZRDc2l8dXbz1VhOJrxC7/4WZbPrXG4t0d/sc/a6gqddgSmJC9zJBAbfmVKjgYDJrOCdishm82YTgwzoynjiDiKKUtNqUtUEBFHEUaXbG3v8NClc/Q6bTqthDiJybKMRx66xGQ6JctK9vYOGOU5o8mM0WjC1TevcnS4x3Q2xmAdZ3EcE8cRYRSSJBEL3S7tdgpowlDRa6cAxHGEznOKPKcsQsqi5PT6Gv2lZdrtNtpoup2UrN2m1+nSabWJ45jafuc3hDS00gQVhKRRTC9J7Pbs9cy9Yy/P7cjh/q2UpDlbrg/UZy5Rwtsoj2f23E9SgY0pQayhoX5AB0nVDrC1BxwxKGkz0AMiulVmW5X6W9HSGtrs/++wnbgL76pZqf3dh/mCQky94968zRdnTPfX9TUHvBmhVkXEMz27E6ULExRKMQRuMYiCPsP8MpvDr3Gq+1Rlp6yv3zCK+EcTxe5HH+LU9ibnX9+kdf0LfPnMk8TZjL03U0yRcHTrMZ76WJuRepV2GpBnEZNBC1Uo9EHGCM1qS9t6BarFcPuI0QIsrsFSd0q8NGVzv8dRb4mfKTSxtHkyWYLhES19GbN/jYAVgijjT555lL+1A/vpmODNHc4unoWWTzmp2Wi9d5x9qjAIWF1cpb/8KG9O3qDMR+gy4GDwEnEYoZVmeeMsJrfOsjpd0WDt3wFQggncOCiJ1Lu8G7AX6wRwe6Vh3uG8bB5kePX1a7z8ysucN0M2r70OQcLjjz/EeDwi7/Ycu9AYrVEKWp0WrV7MZDLl8OCAPJvRaadkeYbREbOsQEtGq5PQ7fXI8msEgWKh17GAGcZkeYEKAhY6HTrdLhjFwkKfg6MB+0dDpuNbJEmL/soaWmcMDo9oJSmtVmqdY2FAp5WwurRImiQoJaRpRCuO7BQVIc+mjEYjojAkimKiKKbb7RLHMWDt1GmY0G61UEGISGDz/Bu1QgPH5kMV0G2lFnBdF845Rt6hzHnkG+fXYfNu8lPvJPGN3uO9I47lUI9TX/qlZnl1D4BX7xUiCWW41XCimeoazThb0zhvzpzgGKfX2KvQL7wZzrNTbwKpGbHFSA/kpg7PFVMBNA0W7XQTd9D8wmpcSoRdQGP6yTqbgy/TThZphedd1mGDNQsN550dB1k7Yev7P876T3+a3tGY8Oh1tg6eR9YusdU5RbKwgtlfoVP0GQ73UVrTThfJ8wISjegZgyyjQ0kHQy9tMRiO2ToS0gdi2ssTzl7YZ2EKh8OEXwpaLE0mPDC+DAJxt4cKuuijK3SyjB9evcDf2d8kY5fg+T1OP71SE4nGexFRtJOU9ZWLrCyeZVoUbO68yQ3zGXLRHOxvosqCRfMYobSIkxCdObMMfjES/M7A9p0W1e4eb+VevsdM16sv34zYjQ8/+7mvsrq2jCkLlMDR3jbPfvkrPHjhNEWRA7bUoVfNtC7JsjF5nhMEAjpgMpnQShJG0ymjyYReHFPkBcZAu90mCEKSOCGKItrtDoUuCYIAxBC6sKjFICKKQtrtDqPhhM3tLZIgpSxDjD6i0+2ytLREkiSsrazS67ZY6HRpRQlBKCgFcRRZp4QxzNwODzbqwNBq2ZAvwRnnS40KBKVsaUafwSTOaRIEyppOjKEdR8RhvT+B7/HfDvds1te9HVnnr1wrrPePGAFlNEYC/wnQSCzwmpkHMPHFfgyGgEQZSp0Tqpgaeo0DVc82qZisw1ZHQA3GbeNOdWajSpmpnV9VjdtGNIO9lna/aGeJqN++ruyUjiVXi7BdVDRU9/DtVErYWPo4m7f+MTePvsClpTawiIdnf77N4lLV82hg0k1of+ppkl99lo0g4lJviclsl89vHnFl/VFKFZHGHYJiQGEKJkc7dNM2s4mhjCNmRhiXOSM9o680C2nEoonZvVlydQ96FxTr5wythUN2Bl1+uYj5M2qNdrHFLIMoicFAOJ3x4KLmD/fP8HNpypuX36B9bcDC+Q7enKBEsdhdYmP1AdrJMoPDfW5efpG9ncsMO4Zi+YjxcIiZDbiQfA+xWcbMSkym5xyZVWyv63NjfLB7gDUx+HF1u9wT0PUvBBp2KP+FV3manjM5frad9IPRjFcvXyPtJAyOdpkMhwSUTMYjSm2YzjKbclkF7YPPE1diGI+H6LIkDBSahPF0xmSa0V0URCmKckqSJvT7iywv9UmTFiqw9lHjHkICN0EVdFWXOC555OEHWd9Y59XXXmd3b5ddhPFozMapiHbaYrHXYXlhgVaSEAUhQahQ1ZYB2BKPCGnasrGBxrjEjRDcQI6j2D6T1OzV/mMIgoAwDAlEiMOQJKiB4nZbbuMDMz9xG2/rDp/jbOpNB0+dTmmOO+nuQ6mqETQSIcRtklaNWqk2PsLb0gWhZ9qMOCRinQpgmyAJLhSrdqDZEarrzV9M9UqqzyyQuc1nxEOFNSWJA+va3NFUNXwgWNOeW5c9t+ea+hRXzxcDWjTKQCs5TSfqMs5usXf0BZYWv9s9s621oYypEkqaC63BsHtxnZVve4D4uesERxM2OiF/NNK8vPNVbrY/xGHYQkUhxdDurLK7f0S3FQFt20aVMtKameRkuaEnsBZH9FSH65dzXtnLWDu3xNpij53dkk/HJd+fL5LEB5SFTRnOdUk83uejyTIv7OzxtV6Hm6/t0DvVJkoilhbW2Fi/REBCMR6wtfcqt25e5ta1K9y6dZkz33OaMhdm4wPOxZ8kZsk+n18svT2esjYxNOeCt6lLwxx1B7nH5oW6olG1Vt5hrt72kXMivPLGDSSGpJ2gggV0NuHoYI9bt25xsH/IpTOnSaOYaZERBIFNTNCaPMsoioI8m6FUwGA4IowTsqJEG0jilCiO0YMR/f4C/YUuG+urpElKEkdVi+zWLAYtLiA9CAHFcn+RVhKjTEl28QyPP/Iwg8GAXq9Hp9NmfWWZTpoQugWhXmB0I2pAbGGaIEIEirKkLEp7rAhRHDHLMutBFhxdUQQiRIEQhkIaRIQV9bkT4N65j48fZqojzLHPpZ7A7hODojT+afw+sfcfAFd1ZuuNdJxZRtAuLraKFLBn1GFlAqJWGOurdGUdW9BGV4Dpt2ivwkCp8/s9h22aFYyIC7/zTNcBr6mKOFZXqa9TBSpVb87H0vq/ql0x/DNU9/csuawWHAMERnGm91Fe2v2XbHENlT7PYvz+Cl6Pj44qP86FH+099QCrgzHx61vkRKycPcOTO9dZv/k1XoyX2ewskccJRZbRTVscTSbIdI/l3gIiMVnQZqAgVzmlKihRdEW41FpkNw+59tUpvXVDkCRcWW5xZXyZC2aFJFRIaQgZM81C0uAWP7Tc42u/9ZscrXQ43Mz55Hd/B0oHjPe22N/ZZDg85Nq1lymKEe2VJS6d+QB5+5DRcJsz4QdJWLVganT1Du0vuvq36lZslIgdQ77mwrcwDdjXoze4wtfvUEoDuQlZ6HfQZoySlMX1FVQIRhvy2YzZdEakgioRYTqZuNCwIQcHB+R5jgoTCm0YTabMsowwCCnLktFoii4N7STm7MY6S4sLtNKIJAypAuEd09S6xBhDYRxDF0DHnD97mvFsyqyfk2UZrVaLJIqIwoA4CusUTrB2Z2OBPMsyjgYDOq023ZU2ALMsI8sywJod4tgyXaPts2FwgGuL3CRhUE+Exn3ecVH4SuQOE8pLE3DdJ6aO8tQYtLFmoPvPvNBcDJ3dVAJAqvjj5u4NNRvGPXxIaaYUZIQmpOGKxNfXpeopoYYtby5QzaOriARf5c2Dsj3EFcRxJgkP7FQLXl2RzH5mHO81c3cVt4x644NfcCw7s0csdB6BrV/laDYgPPoKrSUhiR5F6wCjNImETArvsTcYKXFJ1JSBYvfjj3NuWlDeOqLY32Oh1ITtFq0w48b+K7wRLrEpUKqEbhKTl4adowEo6LUWydMWuQ6ZmoL1dkAehrSMsNaKWOik7JQ5mYBpt3httU988xYxhnZnzSYrjQ/Y1opz/VP8R09/gL/2ld/ia8++zmL/DN1ii93BLvuHO5RqxNKZC3T6D4EK+OrnX+Hq127y+Hqb1iMbzqSn8VvSVxqEcXX2jqmEdg2vw/J4t80LVVuoJ2mdo37n4xpGCEQgL+GNq9dtqJUyxHHE4cEMJUKrv4CKQlqtlo2tdPbOIAwpnT02z3O7g26kXXlHTZ7lLPVtcfIoTmmlCcv9HsuLPbrtFmkUVc4oAUwQ2A5VIQZDaCBUmkIHxEqRlyVKWRW/TBKSJCEMAvesurK5ApSlodQlRV4wnkwYDUd0221EvGnEWJZe5BaY8wwQ4iQhDEKiICQNQ+IoIg6CancOL3XAd/3fu6s4dbh8TZTvdrTUDhTj36e4iAb3su4zwAXwKbW2L4S6zmoFa9Rxtn6XSh+Da49LgpRxOaQnfaryjKZWvZvXqLz+NBdGr6JoRAJq1urbYur7uUUbz5Bd6rLj2O4unoVVm8E39JCa/pjG8d4xpx17CyTg9OITvLLzZVpqwO7gN7m4fI093WMwGHC6//uqWhHa3c/biBFFEYfc+NSTrP/ylxlv7jLORpSdlLi7zJNRyXq5xbUg4Y0y5kB1UUGLNIkZZxnTckJ2NCZOerQ6XbaKkLHAQq9DWuT004jTccpQT5jqmOJoyNXyEClK+llJEGwwygpiYDzc5tLSGn/+0nn+3s6IX/5Xv86DGxndVWHp4hmidg8JLDMt8oCdHc2Lv/k8n/wL34MxClvo/ZhTtSJQtWpZayH4lbPBhO4s70KVsbdSbGupA1/sz9GwoDSaTqfFzk5Blk8Iw4BplpHNxuzs7ZEVJVEcOHBTBGGEiCKKIhYWFhgMRxwNhmgNs2lJu5MwHA1pddosdFss9XqsLvfpddu044iwyWbwc6uejAEQuVCtIgzIypIwtOzTb/poQ7lqVmsvYYFLG8MszyjKwoW+rRKHAUVhSKKQOAwoy5JCa0aTMWWpWegt0IoTWlFMOwlt/rmhijNtitz+0VyPV6rysYcUXU1/l21aczBTXUOqGFNfdaqC+LkdGe8PMaIbm+Qwny3Z4KDSAFn/u4fktlriQB/RMwv2GtX7xoKwqieqN615rnm8hGmtrEMVmVtpMM2dgc2cmWKOx4pvo534/pwaq01VnLxplfU25NKx4I2Fp3j+2jMMwpJUa64PXmOkDQvB70HrRjkf8QtJ7UQEQ96K2fz9H2HjV75AeDMjmOYQjcjSLiuJJikKlobXuRl0eSPocxQuEYcRUZIy0wVTM2V7d0p/ZYUiWGAygvW1M2RoOlISBppw7wbpLCeXLkYfcjS+SU8ySm24uj9h0XSQbpsL5y/wh8Ob/MLhgBtHbT7+4UtEQb2hmMkVz3zmeb7wr36JP/HDH6V35gzWaGaokx6asc/ueZshgRVlqd8KUt517N27jSn9QK0+vTvgHpfCwM2dI0bDAUFgHU5BoFAokjhgb2+fycTWWUiTGG1wpRsVSdJGMyEvS4qyJEkS8twavoMgJkkiumnC2uKijfdNY9pJTByoev8qmJ+FzANoIEKgIAoVcagoIk2WW0O+NhpKP1+svUsp5WrRWidYq9UCI8RxjBjLlqMoQimFEmtbbLfb7O7t0Uoj0jgkdYBbqb7Hete2sV603rLHj2Ok3G6zs7bnY+pJYwX3Wrd1BL7dG33vid0d4lgPGkO9ZYPTBqrB7CaXX5BECHVKITuUlCgTNsaMXf38PSqW2rAH1dNXu4pmpu5nqfCr4qu1qcfbUGvntD9Gqolf73mmTQOyxT+mdlEINWSX1kCAMUIY9eimPYrwkEk2IysMpgg4d/qRalGu91pzD9WYL8ZAkUbc/N6PsvrFF+k+/xp6OMFEbaJY0e8ldJWhPxjSz7e4bjJ22msMaRGHCaGkBCpjOj0gNyXB6im29wb0koiZzpHJIWtLixwurZJefpZskJGrBRjNCIKShbjHG1c36S6scatcot8/z+/v3ODXBzNe+NJ13v+Rc4gYyhns3brBref/FZ/8yFkeePIhVBhW8cnGjxO8ScE+Z9PHYceSqeK07fBQvMXsexcz0t5KfPvce8s1bO8fMR4N2NvbZjIZE8cJKgwIJWGx32d7a5vDoyO67ZYrMCMYDXlRMByPKHRJq90mDFMmkxkgrK6s0e+1OLu+yuryEp0kphVHpEHYGNzeYdJQ3TmGwe6rAJDQbngZhyGzsiDXJUVhGasHXZuqK4RhaKuAhRHeSVMWZcWSvSNQiaLTahGsrpBEEXEYEioLuF5h1cfaMt+djQ/fjoBWh7rJ00BQc6fDqIvoCIASzN0X9fewzBMC+3zKsUyr6s8/f2Oi+QVY2YL3EyZ0jK2d7CMW7BCqGbOPQrHbtVv11dv/7EKpsKFG4nYxsIueL4ZTsy0Pnhpx9RXs0zTYr/HgXEcz+IpjxjmGlGfEGPyW7Q1FmpX+OtfHhwxmmlYohKZNoNLKfu+PrRdk4xax+t5lFLL9sfcz6bfpfO052N5G9xdoiyKKE5aTGe0s4JRMuLX1HNvxCjurD3LY7hO1OoxnY4zA9GCHoN1nKkIrG9GOQ2Z5QSElrY1L9GlxUIzJSsO6MgQqZmnpPL/6zKt86oOf5JnRjLSzzifOlLxcHnLttZssry8ShSVnk4Tv+MHfR/fUWfdefOlSv8RVhjSq1dBrEW6tqbV5acRufwvNC+9UhFo1Ho9ytna2yfWUJE3RuiRNI8IgYWf7iG6vx/LKCr2FRQIVoIKQcpYxmc0YjceMxxO0hiwv0dh43Y21dc6f2mCh12JpsUuS2LjbOAyrXHvED7l3rir7NkeBECpb/b4MDdpoW/NBa0pXAczoEB9sbgPSld0i3TnssjzHYEiCCAFSt5V6pAKCBgLYyTsvd1pX77rWHlelfaFnsaCgnQmhmuCNRVF5poVywfNvPcDeu2JcR9Y9KXjW7g0IOH+BrovSiHNF6SGb2S10AONwSDvruHhY56AypioM5oFUVc42byNumDBwhWvceT4GVkENiBHo0iDaL72mfg5n963hwZeAtLy3NggbmpHJNADX4rxbfqKIdgRFIbTjFPJOg2/XC1A9NMSG21U2fmeKEhg88TDZao/VX/1N9HAPQlD9FJ1GtENNHAS01Jg1c8Rw8iK7+8KObnHYW2e8sM4UMDpHFSBRBKFCRx1moyGDyZCjNOKUrHNjNmB3Oma93QelWV0t+OzLr3Hp0af50u41diLNA/2AQVchEYRxjFY7dDfO2343Zb2Y+Lq81Vjxz6SrOVP7WOtIEjtuFL5K253kPQO6XozAYJLRbsWkrYTJeEQYhoRhwGymWVpdIYoiFvuLKFHkeUFRFuRFxmBwxP7+HrN8Rjaz25IvdrucWl9nbXmFbssa7dM4Jgkj0ji2CRRApTYfR6o74YnUv9QLvbXZRaKIFIBCh24Lc2rG651+SoS8LJnMNCqwIC0SUJY5hdYo0QRBaBcVperbHjPb1Grq20uz7mulGjlmbyr27J1kchvg2pW93jC8Cn+6+/h6D4tDqoZ9dC6ioXKIGeoyhyWiFUYZhIRkahgHt8jakKk+iU4q1jP3htyvdi+u0n3TrAngvje62rXBuDZo8VEIhsF0zMuj53l68cPVNjxON6vZF3bqB/idirWD2bJi2vXi4RdQ95JF2/eez1gIV1hcez+vXP06mNzGkNM4tnLwUauBzRq1vl3Y2NXZ2jpb3/ddLL/xOsXehG5eEKysweCAMtek7RaBNiQC8eyAzuwGo8HrHLySMli+SPa+x5iMNEGoiDqLTA72yMYTQnKSxQVevnmLxW6X/ShhWB6QmICYjMWuYmfzMheW+lze32JShlzMI1qRwUyHzJYWqcLCKp+HdVI2n7eOCWl8VnVFHVPtJ81bBRHds+QI23ZT1ep8+zPsUaWG4SynLHNECcPRkG63S+Eqh7U7bUajEUVRMpmMKWfC4eiQo+GQ/aPDqpatChRnTp/mgXPnWFladqFcQhxGxGFEO7Z1Cqq7N8BWVWoDzqvbkDnjZ8MEIfUnlbIqdv+xqgeC2mosgBJFVpQULo3Z2tkCtDHkukTExh43TarH+7IC0rsYVu/KdqknigFKU9tmtRH73IJTl8zchYRmke+6IMv9Jr5ql69rUPVt8/0es58a0e6dBiymF0l1n93iJgfBNqc4N/f+TWUT95/a80w1KcUBK64+gv1MHZvYPmipHba4OtokiZ7hyfbTjXHrDQigpURVdkhXE5gCm0emKzt2DSEeknV1PQli1nofRMsMwpcRpQm1t1ea+QkjvrC6i5dwxzTcbXjWm/UW2Hz/U+wUmrUXX+LsWJOEMdM8Y1qWqLiEtIOsnCYOYmR5lbS7xqrRqP4q0yzipWe3mA5HqHxMGEZoCcjygrCTsNgydEaa/SCiCGOK9iocbJGK4mAXNhaX2Rnt8+qB4oEwosMB0mrb/A/xWogHzjpyoRocpraTS9Xr3qRQpby8LeDdE9D1+2tZqRnV3dri1TCFYZwZbm3t2K3SC0MogX2vgdDvrZFNp7RbPXueyZiMJwyOjpgVBe20TTttk6YxrbTF0tISC+02aZoQqIBA2V0a0jQmim6nZxWYuGaLWOteBUZzgHs3acZr3AaR9QpoIFSKQAWI1FvniZqvihQqhS/H8o2IOXb7Jrutx5ZHGudTdydVSqPxLLiaS5Vaq6Vm2ME3HBv8HhAxzG9i6DO/6ppeFjNcGnbdVfiSo4qQVrDMig7YUzeZBTPiMsZv8G4ZTz0h6/FTJyz4SAL/m69/BV77MG6rHNvKJ/uP87Xdr7CRnmNZrVS82qAtGJgaDuw15pfE5l19eJs9VlV39JASENMJFphmA9IgcWBdl4r0x/lFi+rO3lLtjFTeTm2gFMFEEZvvewx1dYa6scuhGTDt90jef4kSoQwCirRLGYYIAYGxNQ5uvLbLL/2Lf87OGD7xwSd56gOPI0XJZHhEp50QnFkjHV2llUdsHpYUeUC4dJpsPKSv9tkd5Gx04HBW8ML1Hc722yy+sEl8sU+xmFCH1eljT+gK5DhtorkhqV2X6zhnquXv7vIumhf81L37BC0xbO0OmUwmTCYjtre36PUXGQ4PMaVhNByhy5LF3gLnz5yh22qRKwhHY1pBiFIBSZLSbbdot9ukSUIa26yvIFAIQpJEpFFkh8Wcuj0vdZm+mtwqvwgee6q5844/X20TuO0sAZtJp105bVOrY4I1L4RB0GAWb81q7yz1Kj1/njTC+e300PiQI2dUNO48qZ9TO2bmy7EcI8H3j7jtuq2Tqwa52k1Z7xkG82+wyVtBaKk+nfyAo2iTZc5XkOtByg6BGu6qrdErUC8d+DcAy+hK21C+kA5wvnWBV1uv8cXN3+Kjpz/GsuojZR2A5kHT/k/X765yqlG9MHtPC7bK2A18PGT7cIuF9Cw7g2cJu4vY9Nf6bdvQN1W1ulK3q2XEPVXDZOKWbMpQsbWwQGe2wMvZEbl5lfMLK9ROP1WN+3ymeeWrr7P55hs8dL5FfzBkOr7KqQvv5+amIpCYpY0+ph8zXXkQwbAyzbl+dQumEabVp8xHdJKcrAxZCAsCDFcOJqy3u2xcz2jNNPlaUm/356NPGj1rYys9sNajwj+rfc9NvfTO8q7W0zVwZ1uHQ7LCaKa5ATTaWDutkTYGGB4dIUGICgIeffRRnnz8cRJyZlGEJqTUBWEY02q1SaLQ1iYIAiJl7acKIYkjWondyuZOUuHjHfqrYSQA483s7vDj59x2/fkvm0uPuD7xIO8N8TatOayO97hr3uJlvnPws6yrNFRAXxofXuQxtxlJWluyDMZtVmkf3ABaN5/o/hC7T1/tjqpVfYMKrMPKLbXgdk2ox4aqAE2MIKJZVOfYLl8iDyfERVodKt5JiSBGo4yhdIy2WV2s/reiTxZwsaYfsBEuiYo42zrN68VV/uWr/4Tvu/D7WY0uOC3Ec7Q6c806fPz786Fp1ofgj62Zq+PWRkBZS/JS5yI3d58lSRYrJ3CjF52PwvdT4ynckPAAXttDpdIYJImZFDnT4oCVx9frUeZC90QCBrtDXvnS1xkcDTkaDEi7bT7x/vM89qGPggpJWjt0l2LUYkIhOWICDPD1o1d4/uBZHtY91pIPsbiwhCkyQqPQeUEvzogDzcAYtrKI1X2NGhyQXFglj4vG2uLbj42QYt7mfif93bPhu8k9AV3VfC+N1aB0DTle9NoPuqIMGE1mlDpD65IoiikLjS5LstmEdn+J97/vKZ5+6ilasaIsNGGc0lsIbdZWFKOUsvuSGbs5HcqGYiVRTCsObYGcaqVy9zfH2/xWYs+vnUlzmkWtQr4NBnmgFhF0aZxFofZqB25XYX+5u13Sf3fHexxvubO/OgJbeV/989R2xvm1R7nGlu574+ZFbQW5vwAXqF7cXN8KiFEYVTJLpyTjBFxkSdX5LuHBAocFQ5tBFrJQnma/vM66PGhBurLSWubahHEP8p5MGzHOFuva5RyW2jFUF0GGGHigdYkb2WWOJpUl1QGrjbLAx41WykrD4QaOnzZB2jS+AZ8tYzC0kzVCCUmTHvMZWn4AU11BxCYO1UHJvqObWlZ9jg473MpvMusN6G48YttPAArKXLF3/YBXvvIcxuSIlHQ7Be//9qdZO3/BhlsazdrGkhuPZYU0BsMXr36Ja7v7jOMtzpuCh9Qn6YZtRBcczaAVRLRUQS8fMZQuV0eKM60u2UuHtM53oe81The2N6f1ut99FmjF8I+lit9F7g3TrbyAjUY0Xk51TOX1BAxkWUGchnS7HbIss5W2yoLB0QAliqeefJLv+MTH6bVamDJDVEAYCGEU221sXBlE5apEBWKIIlvgOw6jBtjfYXVqjBE7Eepeu23V8kVCjLmtk+9ClG+/H0327H7z7MYVybFRDu/wgndbWY89szHzVj7lJoqvE1Ear+geu6kH6WPLgKnH4/0lTc9pk9WIQC5oNWLczWgNFqvJbGOrVUPNBE/bDIZELaKKm2TxhKRsOzA2FVh6rUgcIBsRqyVIfe+q58XWtKihuh6TvbCLMgX97mkybJhhifZ7XzDv+Gw4yKpCPn7vNP9M9gp+AbYmF3teICEXly7SCVvk1TXrySLOMehB2mYGOaebmXcu+YAHgDLv88YzrxJ1xzz0xHm8jgfCaG/GC194hVuvv0xnMUDiFhvnNrj0xEcJO4vWxGUK6h2cNRA6kLRj+WPnP8yo/CwTnfHi4DISdnis850wVUShcGPvkFbaYSmGcDZmFrW4VqScS0IOXh+RLge0LrQpZdYYM42x3jRJiQsTqzr9rSftPQ32uY2FeYZk7tykIBDa7ZgwCtnYWCeKIwbDQwaDAR/5yMf4rk98ksVOhJLCmgyUqsLJgiBwCQZ2C/I4ULTThF7aIg1DWyH/bh5+sQxVy2+PtH3D2CNezWUulMwD7vGmvJVp4R21z3hTxt3bbBr9UHETD8rNMxpr5zdmY35viDGN/C1xmWPiHUCG1mwZk08Z9Q6q2FWgnmxztihlr6dClsPHGZY3XGSEO8YzTgcq4u7jy5DaKdtI6nUTRLDs1zJg/69Ba8NitM5G/wKDYshWccTnrv58zVq9RkINDAa7oHquauOA5+HXO9Xs/l/1uz3b/RCrk8x+7+pB27bX1ut6fKhqEjmO6zsOv7DovM+bv3GFpVMlZx5fQ8UtQKHLFtdfH/BbP/85nvvNL0FguLE35NTFJR56+n0ErR5al2htNUNjjI36MW7hMHb3GG00j68+yg898QfptRd5/NwFvvPpj7HxUMReMmMqIf3FHsPpkM1ZTBCltKUkIueGTkECrr455OZXDzAzt937XP1jRbW9U/W7/9yZrd4ijvKegK429YutgLcRnO2/rypWGduQKLT2s8XFHguLi7Q7KcaUnD69wSc+9jFacWxJhraqRRAGle0WBVGgaEUR3TRmodOmnaQEnt7OaztzP8dJj278eAXSp/HeCWAMTcXrDve5g/jmhEoIxJoSfIFzwYKvz2KS6uetV4Tjj3a3O9/pKqU2NKsR+HdXUjvSxPVVAFUqdBjcnWS/l0V8Wqfn/X5zR1O7FzvTVZTOmS0cuRemCWJfodZUYOwdn2VRsne4R2LazIKBx9tj97Vqqx934g9qFMyx16z4rRsDNTSKMTyYPsIkH1MGitd2rtMNFxqqrzNJNFZGD6l+vzT7grV7brdheWV681BqdwKeqYR4IHRKf313PWOLe3qbp5amDuX71Tuk3Ug2C9z6yg4rD4Qsnu8jStBlwOwoZf/yITdffp1Xr+7BIOO1V/b4xHd+gAc/8JQlA6ZwC1JZLQA28aPEFqhxLN3YNp3qnOaPPPIH+NQj30WoEjIO2G/9HHm7wHQ6rK0uIeWE/RmUKKQoECnZlog0iXh1c8irX9qhGC4gxsYpS+VkPr6YQB0aCLdvz1TLvQkZcyu0EeN1Ute85spbvYaq4Uqg12mRRMJ4cYHTpzfAlCx2uqRxhNGa0jEFXz9XgEAJcRCQRjGhqu1v1bUbasGd8MEc+7wZW3z7pLmd2ZljF77bufX3tfUpEJyd2fWdcXatBqGs++gdSpPJznmbpRofjQPsP14lhDmVWPkXpaQybWqnJ/pIhjC6L1HXAZ5zBDkVXxD0bIYkIYaA1niZcX+PfHlKeNhmlhxClBBPYs8cKrvBVKVkaUIawMzskdCByrtPbT6wLKQC7WoPMxdfay9nKtOo+KgWdFXycTlcop/3WG9tYJJ9Lnaf8uwGH2dqGveqFX2DmXOuWTFVLreq2uD/W4QhV5fbPJoJL6TGRWTUjrO6WlnTLXn7LNBlyvYzB4SLR3TPLNhkoGmLVLqMh5sYFfLlZ1/lcG+f5ZUWf/JP/R6Wz51xhEdTp8C7CW3sFvEiLo3f1ZRoTvjFZBmREm0KDodbTK6PeX32L3h4/XtRyTKdpMXk6IgreyNOLXdJEUynx2A2pNPr8NKtA7TaZuOBNRbXBhhVWGivaa8L06vdshZ/3+WMNO3sg28FF3XwdKMxSujEAaEJ2VhZJgoUa8tL9Hs9WkkMmCo7Kwht2UWlFK0wpBUFNOMFvxGM+u1KY135Js4VwiBgVuQYravsLjthTBXR4BnwNyvmLr8DjQljxRcnFxz5uc3U0VQf/W/3GfA6k0mzqpoHXhWFTMtd9uWAdrJClLUYt49oL2lMWTKVQ2YhBGUIBZjSoEzMzBwQhorBZEYrTeh1S4aD2qTgSsogDZuwxQ5xm0XadtioA9sWcSt605Hmk00fSS8xlkNWwowk7FJ5Np2a4q23FTBCpUVpUzNaz92bm5Frdz/v8C0CxeViyGnT5rqP3PAPgLftStVuj/9etG6x8/UZhWyy8dAarWSVYhRjpgP2hzeZTDKk3KUo4JNPd3nkYx9h8expV0vaWBXfaHSVxCzu/4Hz1TgTh9dYUGhKZvmAW3tXuLlzhWJWELeWWe2WnDszZmZOs78bE4URi60O13a2WQlismJo6zwkMUvry2zOBLM9Y3mli1YHc+BaEfjqYb356O6IcM8y0iqVtHIUOKZFFbhSsUafiioGQgU6CuimLaLlgJWlJaIgQIkbCAbCMKjUtCgISGJL/Zv22DuBOnf8xH1+jAnfdpxUY7n6+/jA+kaB14eJKeWM8L7oDd6gYG/k+/NuN7jNDm2OvfKKNZvmn/WDeOLgGFGpDaWf5Vjmq4yai9gAy9DryXq/iUv80F5lpB4EKiBlnSUdM8y2GRQT9FQxCm4RxwtIGCKuoEspGSaY2vGmZyRaKNMxSRnSuXyZcumTjFVzkhqMK26DqU1b1ijg1XY7L3zysS3BU7NVXyMhJWYynNFBgy4RsePI122wc82nuApKTPWO/VvU1As7vl1UN7H95Nj8KE5YP5iRdkKmsX0Ony/XaJ29urEMVKHQwUPMdgp2936ND3zqQZJwmYQO02DI9ZubjEa79PpCsrpES8GFh06xePosWvudd13MrnHPh+BjyRXC3vAWxRSWlxeIwi6agsF0i9dvvMju5lXaSUrUXaTT65KVuzx19rsI4yXackQ7ajGctRmNY8JEEesRD1zqYmTGZDSh1CmGhCiYkgczqpdmvJ3XL2V+VPk2vstMt8KAyrvvmtZIRqgKv/i0DjcJlMJtyCjEYUAodi+wssgJAlv+sCw1AYokimjHYSMEp77HsZa8JRG705bk1WkNFNZ+bt7pYX8bRE8JiKvWVRSGJLaGUm8Ks84t79u9C61uNMzgTAD4kO3a0udFN2xSHop9eTolysWTugsb5s6fc9zTHHL3m9R5W3BMYxFoBUuk9CnMiPFsh/3yMgN1jVbcIclmTJIWWZGhixkGCEpNy3Rpq1OEwSq7acy5a3tcO79M0fR2u5vo6jNvQvDfObaJrQbmTQHK1JzVK5JL8WkYZ4iqVerSGCZ7Y9orLoLChZD5c7TRVRH8pqnPmySoQNgzEf+v8GZPeGzf8MJqna1YazqGIBdMOUGnbSR8gnbnFFeujbh163We/PDTqDxmOp2xc/Qmw+wKg2KHUw+eIlRtbjx/mcef6LPY38BUjLVJ1GojiQe7w+yQX/zCL1BMp3TabdbXLzAYHVLM9ljodlg5t0Q3bREa4VTvYXbzDYoyIzC23GsSj4iTEcsLCrNhUEGKSI6hQ2fBt6HAV2ezZM/jgqraY5cgwZYGfWtYvXfJEU4tuu1j0wCy5qHuN7EKA0oJJrTbp/jYQ8EXhRJCMbTdljh1uMlvo7m3hYiZqr32jne4/u8A1gjWGaVErL3ab88T2FdzJ3ydZ7bHVTnroS6wNnKvApm5M+bPbrL8aldZp3pU7NjUNl9kHsDvR7FxsY2+cPN7Xr8QRAmR6bIY9ehF59nNX2AsW5hsyERnKAIWow2SbIEkXqITruHreOUxXA9z1m4csX2mR6lMxQvB9bXxu0XUQOvfQ2UfdWg5HQ2IuwliIgJxZjwJiFXAzBEOBbTKgIPBPu2VFiKu1I2BclKgAo2Ko6qco9XMtatm5u2SNBVo6nAoTREEbCUla+OSzY6tilZODEEc2lT9vYBZK2CkHiHLe9y8dpMb166wd/NVPnDmKYp8wPbuLYajWxz0rlDGhnLrkHY35MypU2y8/0HCaYGRkMloi1ZnCbC7w9aV0zSZzrm6e5Vf/8oXKKZjJAQzHpNdf4X1jS4rp1bodgypBKynF2knpxGJOB2vginwtngjPs7aODOFnQ92Vww/Ouo5ZkS7QVKp73UtXReD0lzI7yT3xrxg6tfWDCyfG+BQ2UK8Q8N9ZNXWIECUQmnQRe5i8/xwNiRRVEcmUIP53cKX3m7vsOb30vhv9WuDRb5juQsjPd5EVZkVCjvhGqBvB0bzkg4Em5817L2VcwZvMqiPOdYM6+M21rFiWYtU1/WMp2nr9V2isWxq3hRzf7FdwbJ9JXVBGW/iqtRk30kegiRiOXkfI4ZM0xbn1YMoo5hGE9qqRRKcAl1zREEo05itFcP6lSMGayGTOKYMbEpDu+xiTMFIjRomGheOZep37X0VoidMy002Fi7BOAOjmFx5ky+88Caqd4MPffRjBIdjDkzIyrmHiIIIfEyr1sz2dwgWFYS2pGgpJdrY/Djl3r0lPApdwnAoqLQA7IYAYawQKdlZCHjkypCDpMssVBAZpodj3nz9kOWiz/r7NljpdMkG13jj+c8wy1OKouSFF75MlArd5YhT504THk251XmDiZQsdk9D1OHg5iGB2eLW/pA3rlwniNoQhPQ6cGnpUZZXToMofvnzP8+1nS2SRDh/vsfiQookPab5Nq12QVcpTqfn6SUPoVRUzQdQKBW7cV5TB1PZjRuj2iFtIznala9sYI24seIHjzd2v9uOND98fEVKv520x+BKXW1kdHiwsZWSwKjA2qa0ptQlSFCV3lMCURTZznAzxrjrNNmKF9vh7wwu7wQdTSY4d5W3ueTxa5nGxXTj/NJN9NAjgGuvEUNJCaiGne7O966suO6fABt72qySNg/AjX3OaNiNq4LkUsWyavEA3VBvcSDdcJjeV2L8VPKeeCrmK+KTBurDfSEZEUFmwpnwKSJpIQjtUrOfb/P84W/w9JkP0Jp0kMJGoEwHU4bDQzZvHnB+U+CBZWIjtGc9Hrs5oygLds6kXL6QoXwBJBUQByGiIqIgBq0pCdG9i0zL62RHKd3uKkdHI67t3WRwZHjh869wvtXlYG+Xw4nm3MNPsrC8QKvTRZUlRpd0l06hdYEezoiiHpPhkHI2IW317QaoZW7ftQ5BItg54MrBlJtX32CkQk6ttXnikQXMguLaRpszrx5y86EVpiGESynnz2rCSZtve/ARhsMJeqnNRz/+YV7b1jz36i0keJmzTzyGMSFIyVrvQVbMJXJdcDO8SbeIMatbFIeajkk5f2GB2QzyWUkaBwyzq0x2rxCGKavrMStnz6KDjHarxSQ/JFJH9MIIgAdWPkAarGFMgA8D1GB9QwROw3Ap3p5wGcNc4gseWKHOPGxoglRQXKuUgDYl8hbb8N6jkDFVgyz1v3LME9Vg706lsp9jjCvyIZTGwwIESuH3IbP34bb5Lqau8fSN74zbAKbbL/2NsVx3/PFzqgVp7gtBBcruMFGxKgsEpavQ1/Br3UGOLwcy91FzV1u/waTGVb1qGk5M41LGWNDGxSy7h7FfNQL571M7g9fFKr3GMX2vmPkMPV/3y54jiFacCh8nolUrI0HE9Vs3+cxrv0RPRzwanSIONyDLiUZjrn/9Vb7+ylXOrZ1mLRbObawx2r1MtH6RpTOnSf7/7P1ptG5Jmt+F/Z6IPbzzGe98b87zVHNVD+pBQhI0SCBGA8YgY7D1Bdtr2YYvXhgvjK1lY7QYbMRiYYQNAlksMRs0oG6hbnV1d01dXZVZWTndIe985vMOe4oIf4iIvfe5efNmVpeyqrL7PFUn7znvu4fYsSP+8Tz/Z4jpjAubayjxBflXyyVlUVA1hp29fcpqRV2VTM87vv/rb5KPtzi/fcCb71z3A2NzRHJG8Ve+/jqXzw5wScZbV7+J3LDUxiFJTpZkJEqjdEpd+meeDAZoBdbdY6Rhb3ePwWjAwbJkUQE64ftv3cKuFhTzFU9+5gnkluU7f/X7PPXKZxEOGb814sg2nN/OeObMRfKtx7j5/nUq43j7zdeRd+9i1tdY7Bme/flXaHRMbBCvTAE35G0Obcmw0eyVdzk3uMzG9AzTxRr14oDh+gb3OUDbFCclTizjwRnq5gjBkNqSyWwLbIWTjHJ1TJKM8Y43gzjl/THRZ+Hh15fBbCEwxPm6bu44urA+WpsvTsZu5sSwPL+b9MNm/Un5REDXBI9wG0ccPldEHyknM6569IPvF+/MceF3xHNr7eOHpIEPqJ09R100J+TE59Ho653Tb0D8pOf97xe1/gDlcAKWHx4k0v+sdYC0JkuPVgnPZJ3zyR/GegLbOLqstYdf138QhsQJm79/gmvphm5YdFEj/fKF8Xvrus1KXFCFo2e903I/pRKtIyK9Je2kPJme3n+vDtcYUj3tda3DGsPL5z6DFsXy3vvceeUimdpjbEZka1s8c3GT7c8/j1kYNsab1IcLdo6WHKR32dbnqffvs//271C7Auo5zlZYLM425KMci2FndY8r+hwXn9hAacNy8T7Pv7rlC93r8/zMH/xsG4nQFZb0hW0cMYvMIsZy8zt3+eW/9ttYW1LXJRcujHnlp59l/NSQyXTM2E0Q7a2k808POLx3n727B1w/2KMwJbuLQ9YXh1zfP+LoYJcnL24z3zVknxvy63/jr3NULLF3d1jdPuAP3LJ8+5ldbP4U9tvXUa9dwYhpHekHyT4L8z6obd4+/Abr6gw6TRCE9ckl3PgSANpmLOoDVmIomgLbHDLSI9b1WdbkItlwDYtjYQ+5J29T2ANGTLw2K12Nivatizsx62O5Rv9ncDBGmlRsOL+X6BFA10kXteFjnXW7YH+YfEKOtPDSnbQe/xhfqID+hoARhCI4Rm1OWxdoBoUov4Fj7AAkoYsJhBP9SRd3eOImEgHOdTSz66cv9kT6l/yQznvw497C8WHabTzInfhOwnqhSQYD6qqmtg6LIQNEFLWx3jtNtwjYEy9Ves/uejUm/HcxrdQFyuFEaF0A1jiWTLxUiCu1QStpx5qL2Xqu3Wao40E/TWJbsPVb6USR1trwjqXeGdb5PeFsDI7vvlVK8ezWy/zG7a/yvTtzdHrIU5MXeWb0J8jSdUZDzWq1Yu/uDe4f3GJ4/SbVb3ydvb/wX7KxLBj90ZdIfv4raLUOKiQFi7cY39u9zlFTMxtvgk6xzjKazXp3b1p1IiQK9+a8h2ClQNC4RGjI2E5g/cVLfPZnXiSbTnAqFquxwbLxLPN4E849fRbnLIv9Y26/fZ9R7tgpDrg3r7h6/QYbgyW7teXm0Q6bT6xx+cVLyOE+Q3eB945eZ3f6Glm+zs3f+Q5PbK3RXBrjaMgGOe8uf4vRYJ2iLrHLPWR0lsFgAsRxCDhhoM4wSM9h3YJ5ssdd03BGz9gcf5ZAYKKANT1BBA4W1xhPL4IVnCSoEHkerZkTgc/QhkyeANYwHrq43La8UGhf3wntteCotIg8iAKdfDL0QqRJpBcDKRFavXlh/VJB1DxjZnPIpIvJiu3g9s4FDx6roqRpHIM8Jzw9/QD+cLveW6P3jT+2v/Nvr1Rt+FpOfvdIcSf/kZOg2h7V+1AIAzwuTuJf5yjLWaGoqwpxBLC1OOsdHNF68NMnOCpdpFkifSAdLRuXgNaxGTTYqN3FSlTRERc12LbNUbPtaIQudVtwTrXff9qkDabrabXSq+oW5WRsc9/dFf7rQgUCJ9zZ+x0WGM40Cc+s/V2cm/wBVku4dfsu5eIuzhwyGic8/eJFhl/7JtV33yXVQvnUJvOnr7CdDiBopv7yFkTz/fn3uZxdhrhTbUu+d+ZwF4IZF9DAYgbfQFxpN440s2HG5f/pzyJr6yEE1ltXNhS9cURACeMrvPwzZzYZr0/BaYojYfCda5TFPrdNzUufvcL08hnGZ0Zkqz0ONm4yr8do1VC89Q7l+gGLxV3qe2dwl8Y4pyiLioGakidjdot9NvIZrlmGeGO6gdXTKpWMmMmQelAz1hNszK5zggu7ZsySixi1wqpjtFtvR3TUeuNFY+JPmEZ+ELTbr9OBiANPK4Sh4vpjJHIRHsVc2OfuUbnxnwzo9qrhWdUyBK3t201SCf8PfEkYLCp4032N1/DyQ7FXax2L1RJrV8xmM7IIvAFIIuC2sBk7qAesffU/xrH2tcPokBQXS1h053Li73CNVluXVhP8oLgTv0VuFeJ8EBIRtEooaaiamlQL1BatNVb7LdgdCt1SNyczX9wHXnRchrt+N67bcyvSG911uk6K7Yu7LPcngbORXvCfN59C0IWOaPJMXGcdiYsx2X3IPbmaxo0qRUCpjOFojSujz/DUE79EvUiYH+zw9rWvg1oxnAzY2J6SZuewwSFX/NN/AvVzn6Eq5tgXnmY7H+DTaXu7FoifyLvLOzydXGhXdBcA1//fv5zBcEBRlMR4Utd798pZFIr1e3PWDixXn52iderDoqwHaxMAvDs/joHoghJWpiEd3uXg6gscvfsWs/E5zly8xZVXNdPLgpZDdg/eYKAu0zx/gTvufWYHCaP7h3B4wPBMzreu3+LFz10Jc8UxlG0ae8wsGaHsgadFuyWRCGrxPfjCP4pNLiLlChkqusJBkX/XrI+eIav3MBKAsD83pJdgEf9u50A/36xbZKM/qrMSwzEujqPueh0KPVw+megFz1pH8A+A6xDrB5IhapFdSFIbaNMSi/6lWAQb9kPTScpkPGQ0GOGco2kaUAqVphAypoSuaMiDYNpqo+333QuNq3rbdyFPXkUt2gVDrbeK+Ukq4YV3oNUHedcDvK4djlhryAHO2pB26bnrxglV1ZAqS5Ym6MaRakjTBOUsBjyXJwp5xMttQ12Cxh/v0WqzrSfygf5wgrPe7I6Z9idCOHxYia89636QWsQ/ORJDpDzAhhKa0aqKioCEAuIxHbCXPSFKk+oB65Mz6Hyd2qTcez9h5873EXPMZKI4e2kDlW77E5wLxd6tL2Kuwb7wFN2LCPHZ+HkRX6tDMKrEpZNoJ+KswtKgVdxix3F7ucv3Dr/PKxsvMVIZYiOt5BiQMru/S5lYdi/MaAJF0qVGdFDjrcqoRTva3W/FYQyYo4Ti/l02tzbZP3iHF/+QZjYaYe7XXP3Oe1z5ykukyVlQU4rN7zL6x17jsf0Em2gmz55nWwUbTbwWnqUpWbbBcn7Mrb0jnjw3bp3I/rZxUvbnkEOnA0jG4VotHHeACtRqE2Ul+L6kp+Vqun1Tuj6E/mJ1EoJb5S20q2OEpf8hoMKz/YhBt40B65mqnVbfe7hgFsV0WDkBUNEpAPOqYmf/Hpvr66yWwnCQU6xWDIa+KLEYQekEhYRNJaWdI+1c6qFvCzNRucZr0JHHMz2tptVihRaAg3JBV+tB4l+0/EjkOnto28GbdGY6+J0YnHdAVtZyMC84Pjzk/u2bbKxvMMwTNtbXmI6n0NQoYxnPZhSuYjAcxDH5AfhtTcXA2xpiaT8XCpN3LXO9k1yIC7dB0w09EL6nDdvrl4L9tElk6LoKfNIBMUJZQyIOG7Y7T1SC1jlJOgCrKVY1toT39/fZ3Xub1eIOlbrHM088R5ad9wubsx7A2hxuR5cM0dPfoikRssa6d2nZK49YmCOq3G+mUzWOa++/wcb2mM3JxbA4CHvFgr/yxt/kq9m3ODO9xNZoxtZgyiwbcdHAwYbh+TMvcnVnH1xCFyzYlsYJ7YvAC/1qDJGCs/kI7Sy/8vV3yLO3+dwXt9m/f5vZ3U0uvfAaid7EOkiTCbmesa+/z/j8ZTY2XqQoDhjnYNz5djwN05Qj23B0Z8XhsSM515Fj8T21jnM6JSOUJgpKjS8S5eGjF0Eg4HRQJFQXhyJ0IOv7XyAsAoL4ammONkzQd1QHrlFpCfuHQGgjbZ+p1pn5MPlkdo5QnYOq/Vf5yLU4SYPyRSQFVadqoASM8Z2kFSxKy7vX7lI0kKiGarXi/Vvv89STT5Fox+OXz7O+NvWVLYVWa/Fxv53W26ITtPWVY195EyR2cfdKXGhjjAnut7sLm+5Mi47jjCFX7Qe0vCseAKPGbaynUmpjscYyG49oVnOUMyRiOd4/wNaW+zsLVosF4/GAswamo4zRIGz7/ZCFNfJ7ItJpozY4zsJKTgDWGJUQxbYcLsSoBXA+iuSER/+k5fZpEa18jYuo7NvKMkhzDvZLjhcV9+4cktqS6doag9GIyjSsFnsczRcc7++xd/UtdFLyys98jtnmlDK5w2iQkGbjEIMdtSM/NWkX2UDbSNwOPVAGLeCGng3v7Tt3vk6W5CxtDVjeufE1itF1DI+zzgWO64J3ju/w3735y4iC2hUcLq6zV2hupAlN1ZA54eevvIQ+c4daVeSst6AV48WFWHK1R3u1Wj6ghDI5YL7Y4caNEe9dvc7n/+iC+bxgPXkMeeJ50mQQnkiwTtjefoU75a8jyS32i/uk+hC3EoaDLyNcwQKjbExR19y7fodMKVI1DDfvTVBCv0iEukBISoRi1VUX69Q8YhC/9OY14hfDlkNsl5UeTrhoAfstkjq/TvddRJaW0nAO8A7+B31ED8onU3shco5RAxMQ5QeZCm2P3yvUB7oKfG1c5xy1c0xHY648/gyiYDxaZzxbZ7yxwWwyJnENs8mYLIDticyyQFPQ655W4r534c+2X6VbC9rPI6CeiHfu7vOBNc31NEd6iYGuu2O/dm0EOFK/v5PFcX56hZefvBT6rzu2CzaT9somLiQPerSEtjg2OMRZknBWE46NkQuRv43X6SYegdMNA9MEDcKePOfTJt/59h6FcSz39tk6u854sknqVlx/7z3u3L7G/cN9xoOE7Y2M0ThnOBqR4rjz7m3eun6P82en/NE/9oeYbZ3jb/z13+Jg+2/whSd/iXbbdRetg5j4GzqSjqFpF2doX4KTLgfQOeHK7DLfu/tNytUurD1DtnVIrYXjcpe/evUv8+7VqxyYCjtxjAcpgyylnBeUjSNphrwwvcLnLr5GNp2xqBcMbc698jpnh4/1EnRcawG1Ske764NFNKzcdZZ2l3z5CptnR3zpS7tsPbnL1L5APn4eDyWuN/YNo+EFLmXPUCXXfc1odYm6qVnI2yh7yEieo1Q101VOJksGWpHIMOqOoXEhUL3Vcf3oj454hQXpl4Dvn9dZZ54uEM/dtvxsKFjTOtNc7z04TmiOLYcf3me0zHtUEOG5/X0+HFo/0TTg2F7/IS3H0AFG56jqQLczIBwg2nFxe8ilrSdQ0fklgg1ApMR1GnT/frQXpN8rkcA/+enJ/j15fCf2Q/BFHvgrAuMHD3z4Bax0z+/oA1lMlOg7CfsA3vbSh7Sr15a4mNje77FJwfvbNdN/6yv0dw6zvpXQgvGDX35K5L//y7+CKDi7tc47b73FxlbOs09s8eQzm7z8uddI85wsH4DS3QJqDOrob3Hr3Rv84s/8IX5+dxPZ3cM9dp6b059jokeIs6G/HDjV7qkWt5LxW6r3HXWu7V9vBUaryEPLE2vPcO5gm6raxYojmeTYo0NG2YTZ2LG1Zdhq1njT7bFsDGIMP2/WWdt+gkuXXyHTQ5wIjdQ01pLKgNWy5Ja9yvnx40DX3taJFseWcyS5o0jv4spjmE84frNhPr/H+sWKjeELjPIXcC6Y9K4bx14Uuf4MIz1lOLpNY2uaZJ15UeLsMWX1XdR8g5yCjVHKykDm8l7Rn5NjyiGhnkhU1CIwhi2U+sjRpuF6UA57HgeglRaAEROXROKy41pHm+ti2Ns2qGj+QYtg8csOqN0DPdGXT3w3YNd2YBB54O/wYWcdhAQKIXSMDkkVKlaaQwR0NHcfBPcPEekj+6OPfPiBD7HgH3q5/qN+WKNcPzArntatTl39A3CiOFF5/wPXlhML2ofLyX5qEy5w3goR/AaZPW23HcQBnZ3rwNhTDj1A/pTJz//cRdZn6zSNZTQbc+bcFsPRJOhVjqaN3nA9zhWe/8M/xRPPXeBn726yef13cGPDHzR3+e2f+0X2B3lLLXjlyYU9HkNyQn8qulhfIfZfn65qP0QBr6x9kXeOXqdJC6rlDtn4LIIwciNevPAis+QMXwB+48Zv87Pji1xIzuAu+roQzlnP39sEY3wu1uWNp/hv3vhv+fKzQ7azbY8VLmqpXaWxdCTU2S62aTiujhlUT3Dp2cf51je+TZMtGKYv0u3E2vn6o6UYFYK6fha1Wme0fhutFlS2BFtQVQXZ7QPqUcrxqiLPMjI1JvpN/JzrzPlOHoiTlhiZH0d0cKi1TjXX0oQtVSg+kcGFRbBbMeLzxEWktScfmGbhuBbPIt3gHnDafVA+sT3S+iZq2x9xVUJhXXiYh+BSG6QSzAcR6zsRi8KXpfP0xA+qXUXgkQ/9icedoEik9wh0hIEKa67qaeyxXR/VNtf78QtJGCTS8dJK/KvU4fcPvWJ48fH8/v27Z4tabbh7GC8dQxa5+O4aSoFOJFR28631Ffc6DtjakKr8KZPPfeUrPPXS8zz3mZe4/OTjZKNx4N+D5mdd+0NPoxelGV28wrVr38Omgpx/ArVzn0u/9p/hF8CoLYKHIdOCUQwGixPd4rrxFABBAhhbZ1tH7GPjJzla7XN39TqSrZHoCaIUw3xEkzj2zR6PT1/gf/L438nFwTnk/DOIVVjra/ZaDLZcsjxeIs5vD/XS2Zf5r771V7ld3A8Y12l2AOkgp8lXHJdzjuf3yUq4kD7FdDojHaw4u76FkPfSw+NCHBduwYcVKARDVW6y3H8GxZQ8HTFOYKQvMVz7LM69xv6BI7EZaT7150k4H4jEZDeGe5QZ0lkHYYz6QwIQhsWujW3ug2HY1se2kQzQ5WDi34wDF+KtJCo4dO3q7hvu6RLcR8CqfBKpnK8f+TIrIt47LAFUEgm0s+uALOmBSRfF4GsviO3oBE+fxBXug4D7UfDbj+F9UB7WB12lrw/rn4ddLTCf/XHwA8pDtwJ6yD379II9cdzDn7L1T4fF0IRY6D6nC2CcxVnVXd/hKQZ8dEVLN8TrhIy1n7r0aXSnncqp/OjlE9F0PwhiQUsiTHLpQOKhGz0+YLNKD8R+0vP922QOelpyXKF/KHn4BU4EeX9M6UKVTrY1Xu9BbfnB4/pWjHqkCn4qp3IqD8onlxwRk0B6+BmL/XZzNIZVxWMDp9VzJPliwv1zPkxf/VFJ/94fXACU9NjaH5bvlO65Xf+i/RYEjqoflfbg9/6L3mWj1i/SC7YNx5zgdMPHIZj+RDskZN+5ByumncqpnMqj5BNypPUnrldrVdKBUbfJRUeROBw6nhd+IoUek9RO1rn826E9htZ+7AtFwP9w2qHjfbrDfxDlvN+WRxINAfRiuEs/rOzkcR194KIDokfvKNVzhvVuoZR8qBXioK3TG62XUzmVU/l48onQCzrpyOhYBLjvCe5X8fHOmJMqYeQa4072ztoPw7ifaOlzpZ/oTR4pcmJxstaFalLx/AcrlvUv3fPgh4P713LB0fOo7JtTOZVTOSmfiKabJkLdgPf+RW03BhdHbVba//q/6cA5zuyeNhU13x9Wu/3dbufzg0i/StkHnGoheuDjgvFHcti90K8HbtK7Rvyo1y7XRWL0KRHPOAQaKDyHtV2BkFiWM/LU1gn6VNU9lVP52PLJgC7Q9ANjXQjuaMOY/MfdRin+IA/K8ZyHxhkQM0EeVejlUdLi+cdWzj7sPh/z/j3KtHMefsxbf+BA+cBXgs82Ny3sfki7XODUo5MshEH1aNy2sQqwpp9506MoAq8QNjrBIaec7qmcyg8gn1hyhCOU9FUOQq5zB5QPlpyANtYPh6helaEHzONWI/1dO82Dhv2xuNYfEnD5IOD+8NJdsV8s6KOaJYDuLWRROX1YRlsM9XM29ncoDGPsyWMD9eAXyg/fE+pUTuVUOvlEN6aEYIb29pDpcqo99HpY7RMID8GOHi54B1rncGvv87to5w/CKHQhUw+Y7g/57WQi84MXotcDv3uJGTknis/0HXgnjhUeXOHi+Q9PnHEn2uhcSIrAl57EdhrvaXTuqZzKDyafjKYbHCxOpC0OHDOt4mSWYMZqcW0KYpu+8CBHGM9zHwS9H66RvRv8kFd6IGChFds7KH4n0tt093ctQtzT68HIg4+S8HqI9VZjiToHoSqntCnXXUnt3vn9OGSHDxE8lVM5lY8ln5Cm+6DzzH8e02Vj6XLPzZ6s4n7C7H/oXPbXtvbDs9M+rE0fokf/aCQAFMTn6zTU39WuxQ/je6N37EPu353cacrt1w+LCW7Dyzq9N2q4PhXThUTXU9A9lVP5uPKJabpx3iqJnGy/1rrrONyo/jlO8pPQal4x9PUDGl0Ej78tQbsf7rSC0LYHjpITvz8M0vuFfOg09pZW+Qjpr0AtRdDrnzYV+MHyOR8tfcx+kEp41MIkyqvpbZiZ+/CQs1M5lVP5oHwyRcylz9y6lhZw0G6R4otqP3xyd7UqfY0A5cIJEh10fZA7yQd/mPSDKR5yxweP/sij5EN+/7hysmbSw5vmAbstANjXS09yuu3C84O1ob97hf+7Vx6vH3UR/+gDtXR88Cmveyqn8vHlEyl4cyqnciqncioPl0+stOOpnMqpnMqpfFBOQfdUTuVUTuVHKKegeyqnciqn8iOUU9A9lVM5lVP5Ecop6J7KqZzKqfwI5RR0T+VUPsUiIv+tiPxTP+52/KAiIldF5A//uNvx45BPfDfgUzmVU/nkxDn3Sz/uNpzKDyanmu6pnMqpnMqPUE5B91RO5SdcRORfEJH/9IHP/nUR+TdE5FdE5J8Jn/1JEflVEflXRWRfRN4TkV/qnbMmIv+eiNwWkZsi8n8SkUfW5BSRZ0Tkb4jIoYjsiMhf6H3nRORPichbInIgIv8PCYVERORpEfnrIrIbzvuPRGT9Q+7xYmjrPxb+/mMi8q1wzb8lIq/9rjvvJ1BOQfdUTuUnX/4T4O8WkSlAAMp/BPjzDzn2K8CbwDbwfwX+vQiEwJ8DGuAZ4HPAHwX+mY+4978M/BVgA7gM/JsPfP/HgC8Br4U2/Z3hcwH+L8BF4EXgCvAvPXhxEfk88JeBf8459x+LyOeA/xfwvwC2gH8H+C9FJP+Idn5q5Pck6MYV/xHffyqdD6fy+1Occ9eAbwB/f/joDwFL59xXH3L4Nefcv+ucM8B/AFwAzonIOeDvBv7XzrmFc+4e8GeAf/Qjbl8DjwMXnXOFc+7BefWnnXMHzrnrwC8Dnw1tfts591edc6Vz7j7wrwG/8MC5Pwf8l8A/6Zz7r8Nn/3Pg33HO/YZzzjjn/gOgBH7qI9r5qZHfk6D7UeKc+6XwMk/lVD4t8ueBfyz8/o/zcC0X4E78xTm3DL9O8MCZAreD2X6A1yLPfsR9/3m81vqbIvJdEfmnP+x+wDLcCxE5JyL/SaAxjoD/EK999+VPAX/LOfcrvc8eB/43sY2hnVfwGvPvCfl9CbqnciqfQvmLwC+KyGW8xvthoPthcgOvMW4759bDz8w59/KjTnLO3XHO/bPOuYt4k///KSLPfIz7/Z/xdeledc7NgH+CD9bB+1PAYyLyZx5o57/Sa+O6c27knPuPP+Zz/sTLpx50ReSKiPwlEbkfSPt/q/fdhzkUHnQ+/JqI/Jmwsr4rIj8TPr8hIvf6VISI/DkR+bMi8ldF5Dg4GR7vff+vh/OOROTrIvJzve/+JRH5/4rI/zuc+10R+WL47n8kIvPeTykivxK+y8OzXBeRu+H+w951f087Hk4Fgon+K8C/D7znnHvjBzz/Np6b/b+LyExEVHB2PWjynxAR+YcD0APs44H04+z/PAXmwKGIXAL+dw855hj4u4CfF5E/HT77d4E/JSJfES9jEfl7Ip/9e0E+1aAbHAr/NXANeAK4hHc6wKMdCg/KV4Bv44n7Px+u8SW8w+GfAP4tEZn0jv8f4x0M28C3gP+o991v4XmtzXCtvygig973f2+4/jqez/q3AJxzf8E5N3HOTfCm1LtAXN3/NPBcuO4z4Tn/xdAHv+cdD6fSyp8H/jA/uJYb5Z8EMuB1PID+p3jO91HyJeA3RGSOH6//K+fcux/jXv9H4PPAIfDfAH/pYQc55w6APwL8koj8y865rwH/LH5e7ANvA3/yY9zv0yPOuU/tD/DTwH0geeDzPwm83ft7hF+hz4e/fwX4Z3rHvtU79tVw7LneZ7vAZ8Pvfw74T3rfTfBbnl35kDbuA58Jv/9LwF/rffcSsHrgeIVfSP7t8LcAC+DpB577vfD7vw38yw9c403gF37c7+f05/Tn9OeDP5/2jLQreG9t85DvTjgUgpI7echxAHd7v6/COQ9+1j/3Ru/acxHZw2unN0Tkfwv8z8LfDphx0oHwoONhICJJ7xn+Fbxp9r8Mf5/BLxpf7ynqQrfn+ePAPyUi/1zvuhm/hxwPp3Iqv5fkU00v4MHvMRH5US8eV+IvgXbYBG4F/vafx8crbjjn1vHm1cfa0EZE/lG8h/ofcs7V4eMdPOi/7DrHwprzNAT8PnA8nMonK8FHMH/Iz5/9cbft96J82kH3N4HbwJ8OhPtARH72R3Dfv1tE/oCIZHhu96vOuRt4DbUhUB4i8i/iNd2PlMDN/pvAn3DeaQKA8xuX/bvAnxGRs+HYSyISg9B/zzseTuWTFefcn3LBn/DAz5/6cbft96J8qukF55wRkT8O/BvAdbw5/+fxgeSfpPx54P+A51a/gXe2gc+s+e+A7+N52D9Dj4r4CPn78Fk/v9qjEf6m8wVN/gW84+yrIrIN3MRzuX/ZOfc1EYmOh2fxWvGvAv/DD/OAv1/kK//A4y72t7S7l7rwN4goRBJEaZRKUTpBKY1SivY9OQEn7fnOhfPxuzl7Lg+sNeD6G9Y3GGvw6yqcbEcU124KLSLtd/6a/SACE46OwQUWEYcLvztncfFfZwCLdQbnDNYZrG2w1tLuHvuAKNXpZ/H5YtuiiICok3su+j707XbiQu8qlNKIpCidoiQJf4tvt3IIgoju+iLuGv6BNrR3am8okqJFoyVBax1+VLuZq9/UVqFct7mqQ3BOhXep2s9wDodp+7Q9oTdGujfl+/sv/pnffqRle7ox5Q8oIvLngPedc//7H3dbTuWHly///Y+5Psj5X50HGaUQ0WiVoFSCEo3opAXcFiQRJBiN7XyKwOU6Z3UEP3G2/duYJgCj67Xh5F7TfTCOO0R7MPDX6BaLsKOzdMCL+Gtb6wG2D7wt6FqDtbX/28TrnGzHBwN/+u2N//pdox8E4vAY8T84EZRKQCUBcBNE+guZbfu3388ReB+GWfEjUQrwgJuolCRJ0Fq1i4bvw4eBrsKFxbMjAHrvrV3M+vc+ueggDovlL/5rjwbdT7Wmeyqn8sOKBzw/myX+KwpRGlEKCYAraBAVwFUBghPpnRm0Oed61/Jg4JxDrMM5wSI4xGuh1v/LgyDiCBpbuJL0tLQeADkcIn0NFJSKYCs4pxFxWGdQSnBOgsZrfNudYK2AcjinvRYuH2yOv7Z7CPCGhrSAL+Ev18dYr9262F5B+caD83CGtaF9APFZXduXLj73h+iHHQiHY+QhnwNK9RrUtbr93rfZgfOg74jaq39HH3x8aY/4eF4bL6egeyq/r8U5r5oJCkSBCEprJNEoEQQdQC9oYUrhRHBK2okpCEqU15+UCtqtv34EMSVCVEJNQCF/PYu1mgcRJQKEBzp/r5OfwQnYaDVAaX9cUOMUkTUQr+06wTrTXhfnArTpYEp31ztJZ5wE3u67vnbrgc+JB17nvPJo6R0TrABnnQd8PFViLaiHeZl62m1czD485P6Dn0eKo6MgYtf1qQKH62vU8fNwbFwA/QcdreOc8y+1pZo+pFk9OQXdH1Ccc3/yx92GU/nbKZ7DE+WBVWmN0hpQKALYooOZq3ACogRUBOsAcBEO2v/42SoEkzaa684DvLWeYxWrUaoHKi0H2rUvAr7HfNXjFQkAFrhbCQjX3t+Eg1R3rPjPFR0QCg4Rh4hFBCy2h6R97rID4KjVP4h98bOgrNL+Ey8lHd45HBJAy5/X8eEnyVLX3r9/b/gQ8A33eZAj7zh36doVxwBe6+54+k579W85vmHXtgNx7ftyD5zzKDkF3VP5fS2dliYo0SgS/xM126DFIhqUIEHD7VMKKvK5cfIFYjOCbR99lHfZYK3FiCH4tDz49DjC7l8VHErKc8oiOLEePwNouT5AOtsay9YSON2oLWusZx6wON9qAScah0OJ1/Y0TaAhguPrpCob+i2C30ng823qsKc13uNiIPHhBAKP6kIfWWuDpushLtI3Knz/IPA+2A7wjrATWm37jrvPIoT6Z5feqtBCas+X6HjQedapv71nDn9/HB/ZKeieyu9r6agD71HXOjp2Ith2zhwnHZhGx4ynFSSAoud622sjLcAo6Jm0DrEWkdoDoAkcRIgw8O2Stn0qauGRvw11x72GahARrNhgBksLcErZqGu37VJKeWBxGqxqtWxrFeIS+vZ0dPh5F1KIbJCHc7sufNc32yOedTjUa4uooDt2oAsE4FXeoRcda+J4mAJ84t501/jg9w983loosd1Ru5XePx3QRv3W9ekIce13/sk+hoob5JGg+2f/7P/N4YSqrtjbP+Te/R32DndRuealZ5/m2cefYnu2Rioppm44ODjg7atX+dYbb/LWezc4OlpSVobaGIwxOOcwvc7ToVMTpdBaiENJibTcTjTrvAqvMM5hbINtDI2xOGux1vnPseAE60JHtaaMBCLf9tYqv9KpaM4p7VdoF48KK60oxqMJL77yKhtra/zCH/h5DhdLhuMxrjGkaYrDcm5rm2VRIrbh/u4O9+7vcOf2bd6/eY3X33iDqq5wSuNSjU0SrHJYB85anHXtvbEG11hcU+Mai4lOGEDEoYJ2pXGeJxRvKIqLTJOERdhhe3yUcz0jWCSYdv5zKx1f5mjnVjjJeq4vHOz5wDgY4zB0gekK1402pFjEeUbR93/Hh8V7VAd3Pv5o/QREi/Yarkp9hIJKggmvvGZLB6MSuN8IfvFvHc4R5cE4esdbs75nbgv+PRtrMOK5T8Tzm87GfvMnxbFPDKeKQNx/gNa0jf/pm/5Rw1U4p7rQtPCeUQ6sQynnATqYx05SxBo8PeF697P+3Z+w/D8IdA5BenOw/azVbhXOCi5Q2bGdrcbrbDtoPeD751QuLkTwIL/cgTlBe3btWI4/XZeFYwNtYU88gzxwRuRvH9R2u2P6/fOhVHNPHgm6tq6xQFVVOGfRiWYwGrO5vcbFCxe5fOEiZ9c3GKZDsJb5/Ii16YTpeMh4POTNt65zf/cQVTXUomiMCYS5XzWNtQiCtQ5tBR14q0R5s0OUkIh4QFZJWB0t1ipsYqmbBtMYjLFoZ8EKxsXV1Xe6ITgSxGGcartQgkkR/6cQkjQhHwxwznHm7FnOnr/IdDrj3NnzfPYzn+PsmfMkaRYGnkKnGls3ZFqxWM5BUpxznEuHXLx4mfQLX+H+/bt8753v887bb3Hjzk0OFnMaJTTOsSoLmrqGYO75CQ0STEJRyi9W8YV4mzIoT0FTEAGnQLm4+EIwDDtNC+KcVdJpYF65coFf8+ep4HZpHSHi2+QXIbwzKOhjnfkce9BrROFwvMe/4/78G1FRvcB+nBH6SUuIVFAqgG8w5UUpXOhTEe848yB8MpRJt9qxQnSIeMBTAv3cI0UAtbiYWR8t4EPIfL+5+E4CoETQPRETHCXQEREwHgQ/CeATAdafrlrA8njrSYZO27fB3O4ce5Fnjeaz9BSXrikBANtxFNeOCJzSLhpdWJZ3LLpeeFh8LOeDmhHloz3i/yKl8oH79pZG57pFogvV6/dLb+H6AC/de9Ye6J4E7K77gxJ+gmP+OPJI0K3ryo8P05ClGbPZjMlsjYsXz3Bh+xzbG1tsbWwyTAcIMJmMGOQZWZaSpQNG+ZDvvXuN23d3WS1LygaUEYyIdyQ4P2isCMaCQtBKcNqhxbNrKK/dJUrQWmFRWOdw1qIFKrzGbK3XelUY2DaMABM6wzq/UtqgybrQg0r8tcED0ovPv8izz7/Eyy+/xMWLVwBFnmcopTg8mHN8vEBrjRVFmiRoBfOiZLFaYYyhbhqGg5ymgVWxYDga8flXv8BLz7/KweE+y+WC9+/f4rfffJ0bd2+zNPiFyJl2QIngJ70zHtQkmlBxKIeFQsKkiF91Q+MDn/ux6Z9XxwGOw6gw0FqHbG8FF2jDlYTW+eL70gZrMvRdr3Ut4Lc3pvvURe1HPggkPwYRpRGJCQ/hR+ug8bk4qzqLi0gvJKgYeK988D2Bluj4V43X4BzSJik4sOI1SZcG7dMFALDBKuvfszOFo1gb40XjSH5Ql4sPJ+37ibDkwptSURk3GsNJR5o/wdudnXbsqROvsXd87yN6tuNSnbRrTQv+8XsXuOcIvl5DIty8e442fKz9qCcPqN/0ADf8dN9Gh+cDrY+WWftHvDe9HuwObp1y8mHHfLg8EnSrugZjQDR5npEPhwwGAy6eO8+ZzW2m4ymj4Zh84CsXpqkmiUHkotBpwmA4IB9e5dbNexwdL6nrBmOEBmisC1yRxTgwOAwKh8KpMOmN8wHPIZ7ED3qQAMLa+hVaKY2xAsZfTxxYEZT1WoTCA64K1kNccLXWZGnC1vYZnn3hBb70ha/w1BNPY2xDtVwxny8oqspTFqJJ0oS6bmisYzQagCiq5ZL5ckVT12idsLdnqKqS8SBnPJlgTENZ1zR1w2w05rPPvMBj5y7y3vvXeev6e7xz/RrH8znWWTQOcZ7asM4Gsz4MsmCeOhe0IvGxlnE19nGQwfAXOhMvjAcNpAgK7zCJ08k6F46POrJvgwTNRqGCU0N8tpBxGPwJXhsOXugTQzsq3Y7WcXGCr3Qfc4h+suK1WxUoBo0ojWvjliLoBq0evxh6sE3ROmmznZRSKEmCZuoRTQLH6wFDA55iQ/lFzlmNskm34DofbqbCYhdeth+vD2htbajSQwC3HwHg4nsK6m7rgVcgNoC7i5pugkjTapW4OKaix79PQEXL5VESaSV6QCot1+v7RgisIHFUtI5K1z1Pt0C3I+tD7+k1CG9R9at79Vp1YsGX1jKMT9SFhD0Ynte1oX01uLCItc/6EfJI0FVpSt1UiDiSdESWD5mMpswmE6ajMYN8QJKkkCZeE5IhQ2DTGc9ZWefjGbUiTTQ3b9/j6GhOWTZIIzhjsBacMW38irOeNzJO0FgaBKTx3eJcy53FiRwHfLf6S3AKANa08yYilrWuzYjRScpzzz/Li889z7PPPM/Gxhamcezs7OCcZblYUtQNTV1TNA3OOgaDAYhgbEOqU/I8p65rdnd3aRoLwRM8Gg0pi5JlWTEZZX4AmIbdwwNQUFYVG8MZX3r+FbbX1vidt95iPj+mXK4Cx2oRFJqOY1ZhZVVx4IXBGoeLhIEWFTQXTfpo70cTUegiMsUTAxG2u6HYhTup4B8SxPPEOO91J5wrPc3atkOy5eAsFuXCRA5hNg9RTn4s0neeKZWE9xe1cBs0wM76QCskidqtDsDrwbelJlqTl96/jpicEDUlFwDeqgAQkVsPwN2Che1Ao5+C6013D4rtPegW6E78e1MEU9j2Ixqijib40LgAqmEQidM9IApjLPyt2yX6wfu1Lez9Lq2jMb76NokCTzVY1QEtgTaLccQnx4r7QNQE/as6z0U7Ua1F3Qfek5x3/DyGrfWtygcBP/RUa8QFbt/FPvvbALqDfAjGUNcl4ixaKZRWJDoh1QmJTlCJ7t5yAipPyM2AyXTKme2awjQUVUVtalCOJNUcHi6pigqpBd0YagcNzpstQeN11vrBFzrDOq/EaqXaAa6UoFQaNAXvF0iSlNp4CDFF4ekE5yfMdDohzXIGozHrG5tsb5/h1Zdf5eL5iyQ6xZqGpiqYL1dYZ1ktlxwcHrFarUAUOkk4RHkfSzAt0yQDrSlWK6qmwYrXVo4OD8E5JrMZy/GQjTW/WGklHM4X1MaRiEIr4TNPv8izlx/jt7//Bm+8/TbHR0dYJ+g0Q8RbG0qEjfV1xpMJrnHs7t7HNHWruTgRT2AHoilqEu3gbiern3DeAecCp9st0X29ybqufmTUfDXKT0wVJnjUzIJmYsVhW0dG1Iha+4tIMXQc749XVIxckAQhCUub6oFP0A5FEK3RScfhaq1Ria/HoEPKsNcuoTXI2zz+kOXklLfsxIHWiNVom3hnamxTUCx8VIH11mCHBg9IfwV7sL8dtM/i/7I4oj/Ao0TkVMNxVhEGUnduuHxLhbT0QEyq6IF+z2yP93BtoJzvaxuaLa0yEKkLH1lBBDO8JRcV/p7T4iGAG68Vf3VYsQjeh2StpR91cpKHPanFRt67NS17134oIyYf+OWR8tGga20ALos4R1PX1E0NzqHjFLVNmLg2eEMVeZ4zHo+YzUZsb28wL+dUTQU4tNIcH83RhVBWIVrBWE8PWOudSnHAWXDaX95ah1Ea7RxZGoe1X4m01kxn67zw0qsMRyNu3rrJrVu3OTjYpzKNz7NWGWfOXuSVV17miStXmIynONGUVU2jLFVZMJ8vWK5W1E2NaSw6zchEmC8WLPYOEWep65rReMJwNGYyhkTlWGtpqppVWbEs5jSNYTgY0lhDVRYsVyvWJkPWZ1PWJmOyLMcaQ6KExliG+ZjPPP8yG9Mp77x/k93dHRZHx2ATvviFL/PS8y/w3NPPsjg65te/9pt889vfYOfurZje1HK/nYlGsCJd9FvFDEesksB9O8T1eazeXCV4dUOqqogL08KiRLA6atXS3R+8dmQ9Xxe156hVxd9dUMPVxxujn6j4iIOTzrGuJwKdpQSlFUonJCrQEaF2gGj/Q4injZZApFsiH9t3aFkbeV+FUwqnNFoTLLkICLStEOXfQ4xbbWNTaaGA9mh5AEDwC6YS6THAjmCqtBql/9UDk3GRoY/iwjF9cO9r8512HMdSd3ZcjHw0Rzs2JdBXcrLv25b3FrsTgzLe8UENtP1awgLhNV5BsE63mm4MSTt5vUAn9ONx43uM1kO7Sqh2nvVv3SPyPlIeCbqJViRpRmYtKG8eGlOzXC4oyoK6rsjrCu38KuycV0etbbDicImQ5inDyYDZ+oT15ZSiWmGtQWmHPtZIUaLKBl0bmsbQOEGMifiNw2EMGGvQ1qG1w5GglUOJn9ZaaR5/8hl+4Q/+HTzx+JM8/dQzOFG8d/U93n33Xa5du8bR0RHnzp/jsUsXWVvbBGcpy5rD4wWT0dAD43zO0dFhKJqhMHjQpWlYrlYslktMU6FFWFtbwxrjTTXnvJPPNizmh9SmRqeZ52StoSpLtAgLcYgoUq3RWNIkxZgaAdIsJ8+HXPzseZ5+4oC3rr3Lndu3+Pt+6Y/zuc99nv/hl/8a//1f+ctkOuHl557HKfitv7Xi4PDAc7Lt4AgGXFwPcV67ihqJCxSOIgTW94aKn9vtxAbv4bZOoVsN2R+g4yC0QauV0ALrL+QkaNHt8LSBGgnAi/u4Y/QTFWm993JCA2y12wC4ifacr1YhmiDwv5EP9sDitf44AaPDk/YvrzyoQLP4lOIEqyyJ9Pukm/xKacQJxtUo5cP1xHr47AD3AS33IR3rL9stkJ48AUH7VorpqJRohkcdViREQmg6br5zukW+t7Wv5GQfRg23WyL8vyq0XUUUi2AclvcIyvF5rOsOixE13YsUP7YJyoSIH9+hmpqx2r9LFNZZPw5b0rhbPFx72bjqPeB7iHzxDyGPBF3T1IAjSVKSNCFJNDiYz+fsH+wxGw5IU80g95exxmCto6xrVmVBWVcYLEoLWZ4yHg+Yro2obI1ToFNNtkoplhXFoqKsa3TTeCcbIRzGegBoHBj84BRjaUyDIiHJMy5evsIv/sIv8rVf/yr/xV/6z/jyl77CL/7iH2Zz4wxXfv5xVkfHXL9xjdo2LBcFt27foSxLRAmj4YjlcsGdu/coq4rFYkGiE8bjCVolLBdLDvZ3WS5WlGWFYBGlKIqC0dCbms54LXmxOA4cuEKspalrTGJJEkdVlYg4RsMhKk1JswE4R1kZqqKGqmaQpSzmc9aGIz7/3IsMP/cFXnnpVcb5gD/6h/8uZrN1/qP/8M/x3dd/h8997gtcunKF+WpBUzaYyGPRDZgYNieB/HWAsg4tDmW9xmldWNxazjBqCXQZVsY77pRWrSEo0v1rhFDQxXXnh9Z0HutOK46Txf0EVHNuNSyJnLN0VcQUiA40QgDYqKGqWBCnDbeKfGi8sOtFasSFjA5uxEfmEMC7hTjrWv9EP0HCOY0vVeBaYOlPfhc1Vx40vb3YE86kbsFrtUzXUQwqmvzSv1fggV3rUXggoibSGtJpsU6FegaqpQxOtlF6Dr7uWg/qkRFfo92k2nEVHXwBc9sqYMG6C+PTWoVVjR/nEjLcpLPOurs/GD7mKZXuPdDe92E8w0PZn4fII0G3KBYo0egkIc8yEpXQmIb5Ys69Xc10NCAfZCR6jIjD1JaqaSjqiuVq5U3y1ZKy8XG+SSYMxwkTk4NYslxRrDIWecVSlyyXBWVZUomAWJrGYIWgMbkQvmaxYjANVAgXzp7n1Vc/y1/+z/9z3rl6FSOwODqkLAs+87mfZjwekWjF8XzJ4fExzvn42NVqxWQyJk1TiuWKNMs4Xi7BWSazCeI0y7LgYP+A5WrFsiyo6opECXma4CyMxiOMNZTFAh24XC2aVbnyL0tpBEeSrpElY7QIu/fvM5tOmM3WGI2GTDc2Wa4KqqpgsZiTJimWmmGW8sxjT6Kc42Bvj4PdXVzVsDFb49rVd/mtr/4qz7/0Cqsrj/He1fewlW2tpA7y4mCyOCdtvLKTlj0j6kwnAsSdH8w2cHiNeA5Yh1hSUaCli2iI4XrWOqwF08tk8vPbR/ZK0Gi0sx4u7I+fX/BzJxa76TnBxNMOquckU0o6wG150C4honWstPAQ/wrRIISattFcF4tohSIJsesOUSZEFZw0oUUJEvurBTDXgm2bkkuLDG0b+k64liDo0xfhWYhJCR7WvIIRfoc+B6p6C2iAudgfbUyxDz+ygaJBCLHf8Z13MS0SrKe4aLWRIhFpe0Don9k7lK2znXPuRG3heKwHTUvjgdd2TrDo8I0Zb11/dvdplYMW4DsFoq0m97sYwo8E3aapENFkWpEkmlQl4BzzsuT+wR5ba1M21maMBzlKQd00lHVDWVYsiiUHiznz5YLlckFZrzBU6MyRjzVOZWS5Js8b0iQjlYREKRYiKFXhg8qExvr89DgoHY7G+BJ0o/GY5559nv/hl3+Zq9eukyQJWZZyvCr4+re+SZKNuHjhEkqE3b095mXpOSvTcOHCeQZ5zmK5wjUGrGM8GsFohHVwfDynKEuKuqK2DXmicbXGmIqm0SggT1KywZDlYk5d1ljTsFgcUVUVVVOT5wMET9FsbGyRpQnT2YxiteDoeI4ooapr0iwnzTJGbsLxYslqvmSUp7z97rsMB0OOjg45PDrgzu07JGnG2sYmddNw/949HrtwiYODA3Z3dnxGXjfe+uM0DJSOy7LSgXGrkXbjEdrvgunoLDiFU97d5JQLIOLPNxaMdf59xYHqFE5CiEVrWjpi4tVPgoiKWqoOGpp3YqE1SisfpaB0D2C9VkovdThSFKoHht3SF2DGOfymIq4FqWBi0EYr2FD4UQVuTbpoBxU0Y2ttqz32tTDnQnJDr44vDwOE1gqhrb1A0DadBICVEDPTGxO+CpsJY4hgdgdgE4GYWHIiIy/egKCpBnCN3wh+4ZGoqdIDQP9BF2EQL+UxwLb9GtInPgT9Ihhb23hHvDgIhYtaFbodjZ3zVyRq8LGvpV0UuqLzugXiD4L+h8sjQXc0mlDVBcbVgEVroTG+q1dFxbJYUZcVpq6RJMFYS9M0lFXNYlUwXy2YLxcsigVFtaIxPvQrSS1jnWFSS5qmJJKhbBy8oFYaXImVGhrPN5nGZ7CZMPkTUTzxxNPcef8Gt+7cZTweMRwOyQYjBpMJo9GE3f0d1mczkjQN2T6Opm44d/4swzzn4PAIpTTL5SoAuSLRinK5wATtvG5q5os5q8WKqlj5hAjtM9dM3AlAhOPFMXXTUDU1q3JFXdc0xqKTjOVyyb17d3ns0iWSNOXM2gVWiwXHh0foNEX0itFoSD4cggg60dSrgje+9wYbsymH82MODw5ZW9viwsUnGM02aKqSTCesr6+xtXmG5fExxhiapiFGgUQvccfbRq4upBDTZZmZYGIiLphfD8R4OrDKBtMzBrThQSNivYsakac0nJyc+a73Hx9j/LHH6ScmvsaCRARozW2lVJsscUKjDf8iMYqh4wr7ZnZHOfZDkbp7+FCmBxtDyHYM3HmHNMFc7zn72jAnfw/om7cBSMLpbVlG6MojPKwv6DsSCdcMcdai+9jUmvu0TrAuZK5rQ+wR/xMLAyG27Rsfjxwcg2KI0ePdQ/eep9em7nf/UB3N0P3br0hmXQMmJIAoHV3wUaU4wR336Yluzehirru0Zdd7Fyfb9Sh5JOhubJ9juZxTFEucNW31eaU1mFAAWfwDatEkYhELRelBd7kqWRYrlqsVRbWibApqWyIaskxDkqK1RYzFGUUk+lXiq8jrQijKmrpR1AjKmBD64cO/Lpw7xxvf+Q6j8ZjpZMpsbZ31jS3W1jfJR0MSnbJ7eIBSPkLBOsdsOsXWFXv7FWfPnqVYlRweHjOejBikKbfv3cU2DYM85XB+yPHRPseHB1hr0VoxyDOmwwGz2YzpdAoIiU6YTWekSlEWBab2Wq81Ncv5PriaPNHs7e+RzxeMx2Nm0wnJdMqqWFLOFzRVgRVpqRRjLLP1DcqqZLUseOal1xiMJqwWCx6zlsY2zI+P0Vrx/PNwdLjP4f6uT1dFQtZYcLC5aC56iQmqNuoq0qs4Ff610IKtE4kUHyJtsFGIw3VY8V53ZRVWLLotZq2CM+NkCBuuy2D7cYuDts9Ua8qrLjSxB7Q+AUJDiO0V8YqCcjFsMgzhWHch7BLhF8E2AR3vvIoxrz4W2Pa27bE9jVnCiubabIUuweTkHO9TGv4+MerpRP3aAJidBufa60rI2ozO0li7JCbZhGTmkMjgtX2iI1F1BX88cOuWPxXAYnDiC+o4sSixsTqm11zFUxuC8fd7QHGNkBjD1JQfmO1nXZ/E0DMX3menCFjrMMZHlsTkKx8PbR+gGcJ7aovW9xe4sKy2C+oD4+ljAO+jQXdji3ww4vj4AFsXVFXlHVi9GEWtNAqFDuaWsY6yqlgWJUVRsipLlkVBUZfUtsa42meSKUFShW0U2cBha4X1viCyPCPLcrIiJVmsWC1LlECtJGSmaUajMavlkiQfcGY8Y3N9m43NDSbjKesbm94zbGoODvfZ392hqGvW1zdoTMPxvObpZ57GNobDgwMGg5ymarh2+w7GGcpVQWNq7t65TVmWDPIMUGRaMxqNWJttkOU5o9EYY52nB9KMyXhKNhiR7WUsFgsPOuJDVOaLYz+5jGV9usYwf4zZ2jr5YEA1qtjf3fEJJc6xWCzRCFr5SZkNx5TLFXk25OL589SmQaFYbqxQpmFtbY0b19+lLlcsl3OcFbRzNKG2ReTkdKs1+cHi40h9CBmu83s7BOXAqDCwgwkVsDcOSYL9F6cviRawCiehhkOrMccB6luiwgD/SYjTbYEqajShaI3W3f5crbNJhVRfHSMXgmYXuG0Xteao7YunALp8/r4GFsBcVMh2DBZErzmtXdLX7OK/facYnZYbNd+T2qAEp1B3isSauTwIFCFyIBY5sl1qsj8s2DqhZoULC09n4idAilLerWVCau1083nOXfk8s/U1bHHA/dvf4+79d2jqJYgJIG67hT1sM/SBEXLCeDqp/XahdH7cG4mKQhcpYaxf9JT4qBDnaPdko/ceTlgt4j4WmP5t0XSHozFpOkApzfx4n8X8mKqqQCBNPeB6QtthrKM2hrKuWJYFq6LwoLtcUZQFZVNjMaAUiaSIU9jG4pwmUZo0UeSZRYumsYZ8UJEVGXmecZTMWS5WJFUojpMkXL54kWw0Ybq2wWx9i7WNLdZmE7IsZ7lYcLC3y97+HsuyIEkS0ixnvpgzHo957tnnKIuC/cNjxpMpVVXw/s33KYuSxfyQ+WIOCHmW8vxzz3vNXimasqKsaobDIZPxiGGeofIB1jmasqQoCpybIgq2Ng2DwYDGNezt7lNXlQ8rq2vmy2Pu7dynrErGkxmTyQy7tsF8foRzjiqtuHnrFnVRMB6PyPMBe/u7ONNw5eIVnnv5NTY2NtgSn8lTFAt++md+jsOjQ4pihWihrqo4ZMIQotVouwU7xsx2WkEEXNce28VT9kkK5xxaujngRNDK4cK2KzEsyU/wmGocQDxYzj8JtRc89RKcRmGfrgi4LdAGgBHRYfueGCoWNaE+l9lNvrgfWfseWlPZh6LhgsZrbABoh4hpF6d+plRXS6A3uZ2AU2EXiD6AdrxtC6xBQ8WFKImYLRhojhOAEd6ZDy9R3b0JHGfoE6dUgFuNUwn5dJuNM09z/sorTCfrOGcoy2Nu3bqNub/kxdmTHNXCN+4UvPb5f4IvjxUHOzc4PHqfg/3b7O7dYLk6IEkS8kxzNN/F2RWaKkSCuNaiiHKSagkLeUydd4GuPEGphDhk6xNcnJNAO0SaJI5LiUGSIcElLsIqLEInKY/+e/8oeSToimiyPGHYjGjqiuVyQWON3xFVJzglWGMx1pdvrKuaVVGyWHmTeLlcslyuKJqaxllEOx9cToJzGk8/+pVVKecL5WQ5Fkte52RZxjDPSdKENNMsFwV1WZMmGaPhkKqo2Nw8y5kLlxgOh7i6YPfeXd557x3m8zkWGA5H1MYgOsUZy3Qy4/7unqcKBjn7+3vcuHGD4+Nj6mKFThLOXrjEZz/zGl/64uc5d/Y8OklZrgqUVhweHbFz9z6379ymWJXgDMM8w2UJW2e32NzcZHt7i0E+YDyeotOEpq7Y39/n3avXeeP117n67ru8f/MWy1XJBdGMx2PW1mY0znJwcECWZayvr3H71oJ79+5QrkqyPKNpGq5fe4/7u/f58pd/htnGFsVqhTENT1x6jHPnL3Dv/l2WqxUupkAHviwSu21gUTDr+hpTBMlQ44xIC56IsIxWrjgaJIBr4EHp0iidC6DtvMajQkRER2H8ZBAMHhQ7k12Fql79zSdFK589dgJwfXyrN61173px8ge6wHW0zongfyehloj2tXXbvnMQohxism4fdF0PXINS21EE/sodzyu9HpbOmmlpo6j9tl6lCOb9/gngjGoLu4sorCiUS0Ap8vEZnn7pD6FnT7K5dZ75smTHVGSjIaUquXf/bZ7YvU3x9jd47OkXeXO1w9/83htcfOwZXtj6DM9f+QrHyzmJwFTD2mDEZDxid3mbd298g1/91f8Ppr7/gXf3waw0Q6RfEB+nG5fCTl3wySn+nesQkx+UgB7nrpTGuRi7HKzA4EyL3csDt/+4o/nRBW+qGh3SfLVOSJKcPDMQ1PWqqlmVJUVZkaTCsixYrgqWqyXzxZLFYklZhBhSBWhFqjO0ynBG8E4oFfgjS5KpoFn7a+d5SmUyklzIciEfpCznBc4olAjDQY7onNViTlMuKRbHvH/zfW5cv04+HLK2tk5TV6hE09QVm5euICKkacIgH3Dv3l2uv3+dw6NDMq05f+UxPv+lL/HaZ14jEcXO7i67d19nurZG1fhY28l4zHg05rXXXqNpDMeHBwhCNsgZz9YQ5yiLgmJZcLz/PohiMhkzSDNefuF5Xn3pRa5eu8ZXv/rr3L51i5s3b2DqmvMXLrA2m7EqSg72dlE6YTKZMhwO2L1/j/v373provTheEW54vkXX0OA1WrFMEvZWN9EqxRsgWn8SIiB+P3xEf1bccLFGARpJ6YPwrfx4FiNpO+Fsfi6Gs61XztRaHFt/VNxPlJBOyILh8WhEc8D/wSEjDnnkzi88hZikVWgDJRGtPZ1OlS39brugTKiTpj0/a3OcV0lF6EDiehY6ya58jylACoBZ2nE0tUwjgAZInharben/forh6cKIOpMCw4t79lqywFQW644vAv3wGIsQIi3dSLtMysEdMLalZ9i6/Ev8O0bu1Q7N+H1dzFJSr2xzZiG1d4OcuM2cvtd7t64Ct/6Te6ZjPuzs9y9dpe7Z86SK8tzTz+DlRFn19YZr21ik5zpIOenXn2O46NDvvb1fx/B9J6xe3+h9/FVhNxJahZCqJprOXOHgDU4FDp4ODy3HN6Hi1ZHzHjtKx09vvl3qTM8OmSsLmkaqOvGp6wmKYPBCGMarIXjxZLdg0NSUeSDEcui4Hi1ZLFcslguKIqKqjE45fnJNE/JkwycomlCgWvxud5K+YpfeZ56J0aiSG3CgIQkhyxVpGlCkqY0ha/hUJYrDuc7iGgGeUZVlty5e4fGWTKlqOqapqmZ5hPOX7zEdDYjTRIGScrOvbu89+577O7vkGQZL7/8Kj/7Mz/L+tqMw/0Djg4PWRwdc3Q85/atG9zfuU9jDIM85/z5izz5zDMMR2PqumE8nnD71m3SPOeN17/DnVvvoxxMxgPSLGP77AXOnbvIbH2TNE24dO4s/+Df/w/wm1/7Or/19d/i2o0bzJdLts+cYzwakSrFweEBGs3+wS7rm9vM5wvu37lNXfuBd+3au+zv7zIYDBnkAwb5kKJaYYylqH2ato/LlcgkBICM5qWfdM56c6zN4AmUQ2fCen5WQlIFKF982wLOpwTj8NRCAFiR6EDjpDksoKyfIBIL4PyYxRqDS4KmqyKdEAnsbut1LUmPUuh48viQHvzCNueuD34QJ3WUCM7ifJlMD2K+Kp64dnr7+7Rhe9FY6d5NB/IPFwmWSasfh3cSax6ftHSiBhzKrbpQTJyY4OC6fnGCk4T8zGc5njzL67/zLrZeURuDXa3IZ2MWuze4sLXO/Xv75MuaMQ6pDXsHO8zXL7KWjslvvk361lcpteKv/apDnXua5MLzbFx+kmfOb/DyhcucbRxf+fI/xPvvf4M7974Z+lG6Po/90D5zL6DM6Vah8OAZz/GbHvi+9vyuO1H/wQVnXgBe10V0iPiqe3G/vN+NPHq7HhGqasVqtcIZgxKfnYZz1HXF0XHh6y9YmIxqjLOsVitWiyXFqqCuDQ6fIpvnCcM8J1M5TWOoqUB83VxJ/MtWWpPoEAupINeeqE8yTyOhPKc7NxVlWdCUjqODPZq68SFXgxHG+mva2qBGwmg8ZH1jm7NnzjAejphNp7z9ztvcubfD8fwIRPHyyy/z01/5MscHuxzt71GVJTdv3mRn5x53bt/i1s0blHUJSpMmKTfev8Zv//bXWFvfYOvsRfI04/DwiBvX32Y4HHF4POdwf5dRnrG1tYkxsHt/h8lkzOUnnkKSCyhn+fIXv4jSmq9+9de5v7tHUVVsbmwym03Z2FhDxFGUI+aLY86cOcPx0RGrgz2SRDEYDimWc8rlkn3n2FjfRPKEqqmIIUk2AGlckmMGow0cq7QTNnJgYTxb6SiB8L2I8zwk3gscOTbTUgvgsCH0zIZSdyGrsOUDQ62qcK64D4OLH53YGJUTat22JmvU6qDdF81rt/5rv5OtbYHQz2tLVyzm5Iz0vLEfv86FaneuA4624EyQuEOIwWf8tbrsCadanw5w7S07y7efpuGgt1gYZ3vHxfafpC+iiAg6lJ30YYUa1p9mPznHrffeQA0Ma5e30DZj68xlBgJHxQ7bOmPz0hMc7Zxj9xsNlaooP/sYmy/9AZqv/jrZd75NIhZ76TlGwyl2fsTGtW/x9dvf5cYTz/Pd6zeYDgecPXOeZvYU7u63QsTIBzXe9jFcj6YR076FjnKxkUsAB8ZYnDK0sdrxutJbUFvg9heytGvPB/wSH8dP8UjQ1cGr3dQF1tjA6QDi85eLsuZQrUh1Sll52uF4Pmc+n1OUBdY1aK1IEyENhc21JDRNDPYOBcgleOqV1yZaAM48n6a0r73QGEtRNNQVGNuQZznHR0c01oJtSHVCsfIVvNbX10m0YjZd58qlywzThOVyyf7ePru7uyznR9RVxVNPPsVXvvhl8uEY54R3332bN1//LlfffZujoz3qusJYB0pRNQbEx+ROxhNUkrF/+D3KuiRRmuVywc2b131Ym9LU9YqjowOuXruGTlJm0wkXr13lsSef5tlnnqVqDI9feZyd3T3efOMN7ty5w2pVsFh6jbioDM46ZtMN9vfuc/7cWVbLBc5Y8sRn0BVlhXWWyWjI9vo5Pxisaadbm6EfTVBr22RJb/d2lIEfYy2qtCUlrTgSiTSEP9ISy2TSamTKhlwmiRDjCBYxSk4kafYYyx+vWGcD8NY4lxHjWf1miJ73A1oitA+LzkYQ7krJdKUPH5AAzt5xZYh7oUHQ1KKTKDhxJER/RNPWOtfSric43pY6ClqxJKSJQqcTBqM1BtmAxjrywZCmqSkWx6yqBcvFLtQLX1TMeZB3oT+MM6Fmh79ltJja9kpKOb3MPWOYrFd85aUt1nWDdjXvpIpVVfCVrRmXZkM21y5RN2v89pUthpMpyfkLvHVoqP7eP84Tzz1BbhU7T75Iw4zb964xeue7nLdHZM1ddt75DrvJFu8vnmZ7/xoJCh2e2bYjuAe+Epf2uHD0wvRaLrajB1zgWXwYqgFJvTM1vm7xBSz9PQyxAl08N96pD7TuY6i/j9Z0nbSV8euy8HyT6FCwRpMNhjhJWdSO2szB1BwdHzFfzmmauk2jTFLFIB2QqLTjW1QITdHSlkoU580ri/OxklojicOKImsy0tyQ5iU69dEIg2TUpmceL1cYG4OtDQcH+5w5d5b12ZqvB1HVuMaxf7DH7sEe88WCi+cv8Nyzz1GVJe+89X3u3bnD66//Nu+9+xZ7B4cUZYUJVc+U+IVjkGaYJMEay7KIu0XUZPnA12AwDXVVU5YljTFtPQrrIMsybt65w+27dzDGcu7sWaazNR67fJn79+7y3tUDrl57j3OrJRsbWzTWkeUZxjRM1zbYuX+PLMvQaUJVFYHm8eE2WZaDaNIk8ZZxWBhbDG1pO2mVoriYe3MqftCP6IW4R5qN4NoN6TBJ45YzCuIC2nqR24GEc37zREfEefmJ0HQBv8DY/i4MD37fgVwbGhU3hxTwwBw43CAPajwOzx2321Xh2noIfn73w/Xx2WHhfWnnwaXuTXqc8UX5dcZ0eo71rStsbj/O4489SzrcwMqA6SAnHw65ejBn3WnWx0OWTYGiYXV8j++9/Q3eeeur7N97B1zd8satvttqe5Gz9/8xo3PcXh6Tr835wqUhr2VLdu4dMD9uWFN3OT8eosuK3aPrFMsVt23DKy98mTqfcGxyNlxJOT/m8b/j72G8NsEWx8wXhsVkk7X8Eq8dpMjRPme2z7KwNQejOfu3b9HX4XsrUNBq+eB3/d6PDs2YadYb7xIGpRMT+j6cLQRHW9j0K9ynD7h+eHTWCh9jTD+a0zWNr6yUZCAr6qYhSfzSm+UjhiMfvyrOsSqPqRbHHB8dUZYFDoPWGVmmfdxtmqN1gq0alFNo0VhtQWyI8fVrqrUKaUDFshuSoH0eBal2aF1ibcHh8RH55oREJ2BrDJqyqlhfW+P23Xu4JEPrhLqu/F5qxrCzu8vNO7coq4rZZMbadMrBzl2uv/09lss5Ozv3ef/WLY7nc45XKxZFQWMdWjRpqiibhpWuSESR6QSdeOeQDlltojRlUWBtQ9NYyqb29SNCvGzVNF6TuPYO5WrO4489webmGTbPX2BtbcZ4NKSua+7cv0dV10ymM0zY1LMoloymM/b2dkiThIODfb+NUuYz47LM88eztTVM3bAqVmE/s/7Qi2NSWnPXj2MTlN5QDyA6T3DdnmpE0HXdBMQjuQTNVSL3FXLs/YabnZZhoa0f4JzrIil+nNIHlUiHBJFIGTjvLPM7j8SU365nu4I5KnDkncYVHWvE7aliiBaBosADbowo6Jrl54MvvRnqXEji+18SxmsbnDn3FC8+/xWGsyuURqgaxT6Onb2SmTUwanh77wYXUZjZkDeXFU3t+MJ0RDa8wsYTG/zxF/8Obt34Fr/2a3+Bxf57fov2njNK2sXWc8xWEpbpiJUt2bZrrJYDfmV/l3pVY5YN1jguqDPcu3mbF598jqPv3+LclRd59+p1BpMJv5MKzWwdfTbnvuxz99ob6Dv3GV67wXN796BquGYd24N1kjRlKjWvToZ8PdGUpd8jsWfsty9PHhjpJ4oP9V+1676PtXpb9sbF2GDra/GGGGMPprq7iIQIkWiVtGtTm6f5SHkk6JZlSZalJEmKTnKMWWJN4/PRs4zBaIxyCXVVURYLVkXFYrmiaRqUhkRDmgqDLCdNc5x1NI0NpQi9kaqUQ2uI5pq2ChP1LxdWDqeIMYOe2PeTN2bFJMp7mH1Rb8fG2hoOjbVQW8vabI3bN29y8+4dirIEZ3G2pq4K3nnn+yzmRyyXS/Z29zg8PmZelFSNZTSeoZSiCVsA5crXgjDWUYkhs0KS6LbAS12VvvCLcVS1wSKMRlPyPKOsKlbLJcuiDFzSbZq6Zm9/l/WDPWZrWzhjyFJvQSyWKxyW6XhKE0JX1temrLa2GaQJ8/khKnCEk/GYyXiMU5b1rXWcguToiEQEtF8YTNj5AvFjy1i/GPiQP1+sRmIhqcDDxrAZWl44ElkdV+vphUgmdFyij4Loa3E+g83GwiGA5oMT40cvJ9vgWitfQoYWRCM27jId5WTKbAinU679PfZdTJBwNmq5ob+lu04E/b6Etc/H4VqHToacvfA8l5/6MnWyRTYZsy9D3tktkERxvaq5s7fiiyjeySxvlynblWVzbcxX6xWuTnkWxdvOcDhfMnQJy6Wl2XqFn/4jZ/jur/4HXH/vtwB6i4qc+FECS7tEZwksG4wWpBgwVAmoEpRhuThEjwd8b/8eZ7cucNdWNPUh5uA6353fYZDnXFIjilXK3s3bHGt463CX9eGMm8URk60L3GTF+mzI+dkmiXHkSlOesEWk1Tx72kT4pzdeQ7+2EQhtPDonRmx3tg/384mhDuUcsYKcj/BRnRpsvbLo4qLromrxaHkk6B4dz9ncWPf7iGUDTFP7ECwgTTR5moGk7ZYYRVVR1TUOR6IVKgWdK/IsIVVeEzXGV/1x4gPBVaJQWhALjek4MxdWHuf8xnWdR9WnHw8ynzgxnUyYL3zIlxFI04Sz585zOF+R5jlZmnHr9k3u3b9PURYcHx6SporpeMh8fsj8YJ979+6xWCxYlCVlbSjrGqU0k9GExXLOYJAxGk8wdUVdljR1TZ7lxBpKOoHGQiZCZS1l7SmJRPndJpx12MYwGGaYxlA2Brtc4XbuUdQVx4sF5y80qERjlobV8ogkTVksl1jjtwjK0yGr5THPP/0Mi+NDmqoAB3mWMZuukQ8GZLMh2WDIdEuzefY8W2szzpw9y2CQec00FOx2TmiqmqJYsVjOWR7PWRUF1gTzCqEsK1bzY6rVCmcayqqhqsP7C5SJcc4PoKjZOvw24kHrjTxG3NDRSjjeRYfajx90neu148TkjWxml5n2IAfdRWVIO6G7vHxvptug3dJWYus7gWLYWUfbnDSKw3+SEbOt50guvsra6CzfOyq5MJty6+Z9dKp5brrNf3frfe6qAS+WNVfPrvF2mrMBvLI25vuqYKWHvKoHbA5Srlc1m4MRr66v88b+XfZLeGp6iac+/w9x7+51lqs7dHvedZmFUZ/MrCEzBUZSbt1f4VYlqU4YZIKpS8ZjRZY6hlkD7ONu32PgIBPNZw8OyBqHFc07zuHSDGcsr5x7DC0JB1XBWXIaLYypsYs9jtOC1WqfEwj7oUNHIg8SSZKOw3VBARDnsSWGkj34XvEKoefSadOSBQcSq4/ESAch5hSfGEuPkEeC7u7eHoM8YzDMSZKEJM2om4q6qRk46zfgE79yG2Ooam/P6gzSXMhzTZYlqET8903tNUXniWvRvsKY1oqmcsG8MxCcbNYv/zhn2mIudW1oGsdhMefKBcXFSxe5fqNBcKiBYjAac+nCZc6ZmnuHhwxSza179ziYHzE/nmPqiiQZMh4MWCyOKcuCsqooG0NjLY2zKGdYHw25cHabuhxx9dZd8jQlS1PEWEa5L2lZ1xUDneBEURmDThOKYoU7XmBXBQOxnL/0BGeHiu+89y7L+TFZkoY9xaAqag7VEQIUiyNQKXVdY0xNYxrG4xl105DjyFPh4mNPkmcp761WDPIBzjp0mnHhwkVUolmfzXjh+Re4d3REVVRcuXSFxy9d4rErl5mur5NmaWtKGWuo64r58THzowOKqsRHpdTUlecLDw53ONjdYX5wwPHxnIOjQ46PfSigxYG2JGnYIBSfYViWFWVZ4awjyVPy0ZjhcESSpqR5zmAw9E5GY1jNFx85QD95CRy3886kWEu23f32BI3gJ62vZdFN5hZww3EuaEvOWs/jWsFZg3NNBwSxPkOkeACHwobd6yKnur7+ONmlL/DtA8vyHtTuPtvVgveO7rNrC14dzPj/qR3eHaY81iQMz425tZ7yxXyL543hm6Ykm2zwsmjWGuFr5ZztfMDTKuOdg12WwyHns8xX1Nu4xOd+6h/m1375z4KvduJNeQkBbOGBB65C0hkJmmGWsjQFdb3ArvBFqoymTsdUBVTNimZZMECxMjUDlXJoSyazMcXhDhfHU7J8FhJPMl4+/zSSjBgNBpT1IZPUce/tX6cyS99vbT3foKv2Fsp+nQQvcYGLadnS8vJ+u6p+BAc98A1eiz7d4yLV4L9V4qmOWKKzDdJ5AMAfJo8E3Xs7e4xHA7b0hnfOKIVOUkxd0gSeVInzxXBMg3EWnWrSNCXNFfnQZ5Rp0dTWYBqDszWiDOgKpR0q8Z3jveGhpmd4wUpCmiqA9WZw3TTUTUOxqqgbQ5olrM3WMHWJwZGm2ud9G5iOp+wdHnNweMDR0ZyiDHVvTcbe3g5+94iS2tTUtvE0SjagWDgujxO2mx3ePlgySCDNhoiDcxfOsLm5jU4UxfwIccKqKDmeHzGereFE2FW3qeodtgaWK+4OzVKTGMMg0RhJGA4zsBaxNgCg5fj4iHw0Q4l4KkApqrJgOBzjbMgZV0JTVxTlijwfkiSaJEt59sknuLt3wHg8Y7S+TmGvU48c+XjCbG2Ns2fPc/78OUbjcRuniIO6rlktFtSVT7YwVrFaraiqAnFQ1gVHB3vcvXOLw/0D9g8O2Nnb5+h4jlOONFOMhgmJJtAqlroyFEWJzjI2t86yuXWGtbVNZuubrM02GAzHGAdlUXF8dPCRA/STFmk1FIgZX2GtP6Ehtc40YqB9d357UJv80PG30Qqk1YJc79CTJrCPm20AS5qOmZ17nun283z9rmXY1NxZHlGtaoa55e3N83xpcoEpjndywy9MLnJuUfErHLBWOC7XFTvnJjTWsekKzi4y7m3nXBitsbaoqJcF9bkR7OwxcTlvuZJL6zPsk5/h0vde4e7tb3l6r11YOi0+L5akU6F0irW1dUYJJM6gLCzKY1SSkOmE6XSGsRV5ltPYhroxrI/WKVeHpK7hlcvPMU5mVE5xe3Gf7ckmG8mYRWMYjFOapmLn1veoV/cDb3vyvdmOpO3gsvfeaD9zfDCUK1oyJ+0L15104roPUhLOOZTzi6YVuop6P2z0wt7+PhuzIYM8YTgcoHFhF9QEpcCaGhGNNTXONohyqDSEhw0SbxZnGThN7WrPcSmL0hZRljTV6BRsY4j5zBLru4qvTJRIgOQwExTCZDzm2Sef5eLFS9y5dZvpaMTBUc3aZAJKsygKlkXN0fyYqq59yUXjvbNJ6pMvqmKFUoIxNWmSkCWW4WSKI2E6mrHIMiqTYPOczUnO5pmzaKXY2NxgbbZO0xj0+jbWWfb2dxmOJwwnvuCNWZZYKxzOD7hZ5KwaWNu6gE58lSZEKJZzVssFSoSmqijKmnToPOA2hspWJInfLHNVLNDjKbsHvvCQEsV4PAVnOHvmHG+++x7j9Q32D45x2qFRlLaiaips4HXTzBcRGmSeJ7fW0jQZk9GA1aoI0RaWM2eEpvGlPMuiYLW9xfrGGge7u+zuHzK9e5f9wwOsMwyGKaNhRqJ97QJrLE3tS12ubZ5nc+Mss+k6040NNjbOMhnPSHTqF86qpChXHzlAfxTigjVlW2dX9DMEoLTdVkVtMkJbPJtOA3JxQlqc6aqGecAG75wMFI6LIXMdHWFtjXWW0egsdu1pri0G7BzcRqqG941PLlibjfn+2pTHGxhq+FszyJqMpDB8a3tIPoR1aZgew027ZDNruKQEmc7468c7yMGcPyJj7m5M+Y27d/h7phPK44rpTMP+EXNjufzYZ7h76zsoCWnjPUe3Q5FJRpIOSQcjDg92uby+RqqgKSu2phMMlmVd0jQF4yRBXANNyXQ4ZZQNmQ7WyJOUhIRhNqVCeHZtk2E6YFkUTGZTjKq5ffc2B8fvh3kfNEtcS13F/3qgjZElUYNtXWRBAr5IfGd++bQIvg5xcM8J/vphW/peYBgtwLvgT/JvkphG7T4A7A+XR4LuYrni+HjOINcYMyZLUhSQpRlJkhBT66z1xWySRGFISRKfRJFlOYnOqGsbx23oEINoQac+48dgukpLquNjYm6JCxWHkiRhtrbGxlqGdorFYkE+HFKXBWfzsyiVMBgNWVUV89Uhq6qiLguWqwXVaoXSPtzNYFmWhlQrRCUMByPQGZubZxBRrJZL0sGA6XSNc1nOaDgMGXkZs+mU2fo6dVUzyDJQwnA0wRjjnY7pgBTF+XLF/OiQNPH53bZpWCznYC1ZPmBPK4y1aOX5zbKqSZZLn3qaJlBZjLEsyxXr2YzVcoEMBijrY5CzNGc8HHC0XHC0LEhHM86dGTMYDbhw5iyHywWSakxdc/f+DsuiZpDljNIM5yyT0QCdphRViRKFFmE4HDAYZWilaOoSMxxSjcekcQEdjdBZwmA6omkqRqOM6TAjTbxhZ1yDbQyj4YTp2jnGk3VG4xmz9Q2mk3UG+dC/b9NQ13XgN3+8Yq31McTWp+5a46kyHygfM9VUG5MM+DoMYQukdgPFE2RsL462tVGjRguR63bOxJpdHqgtJPlZ9NarXD3QJM5xb7XgiY1NltWCJ+YV87FDb814cXCWaxxSu2Neyja5KJo7WcnQNlyoLfe2cv7S/C7m4JB/pNrg2nDJd8r3OOsmNKT86uJN5ggLhvymXaFuLricpWwNU0aXn2cwOUOx2AnWpsKplDSdsrZ1hY3HXmM52ObW/k1ydZvj40PWs4xMJ6TWoaxlluQk2RARIWHC+izFYhjkQxKlWa1WyGCIHeQMG0GsIdGKrbPbzMs93n7717h7702UK1vLoy0pCXRaasCL9vvQx3Ky3x9q9geOyMZ07GABEqIa4p8A7e7QEZxRvVce7B8XF4RHyyNBd1mUFKsVxSolTR1NkpLqjCQbkmrtV0JjME0FWNLEp0umqSPNdMheA9s4nM8rQCu8aZpqdOq9gs7hdwIOHmPPkfnQJYvxOxGL34FhMBBG+YzpaIaShPu7ezQOxoOc8WjEqrbMlytwjtVqxfz4mNVy6btF+UyssqrJEo1SKU1t0DphrHNSrVlb26CeeS51OpmG9huU8nGvo8kUJZpUG2xdYazBmYbxYIBK/Jb0G5vr2H3XOtoSrairEpUqlIWqqX1ZyKZGOZ9mjdbUpvEZdUmGq3wKs7W+/OT6xjrOOc6cOU+WaEaDnIPDfe7v72MlYe/gAK1TLp7fZm065sxkRoOwc3uHq+/cIEky1mZrSKIZDnKyLPU1IfIBWivOnd1kc3uDPEsQcaSpwrkBarlgysQX0BGojcEpqJuaUZ6wPs5JtU+kiSbyYDhhON5gMpoxHE8YT8YMRnlLiZjGUldl8OT/eCWm0lrrfJF8a2hMAyJo8NqRVaAEE/KcXRiPD+5i259uznYURIz+aINAiLGlgnVNAGYF6Ra3yhl33nwXFiW7zjJb2+LbS8PT2Rrmyoj9WcbLpWU5WrK7nTKkomgWfHO8yV8/uEZarvgT9iLpwVXWlu8zWh2ScJ4sMby2d5Wz6Qb1eEW6usXj+ZTFzHFY3OLl7af5vlvyfJkwWRmef/kPs7/cp2hqppM1NjavsLbxOCafsHSaQdOway3v7NzlYjJg2BhcXZFlKSmCM4JUjiQZMByMcIDRwrJqEGmYTLfROvdF9/OE6WANpeD9W6/zxlu/QtMco8S2wNcpkZEAl6CJ+nrEESzjMT0fZ3gFttNyY+CDxEVQnby++Pce7+ajVPpp2fH4HqBHduGHjV4oi4qyKlFakWUZ4KhNhZi0x08ZjKl9ecY8B0nQqfGlHyWhMX57HYcBMYi2qASSLCFNtH854MlonC8SjQMFKlRAViLkWQ5O0eiERGUkaUq5KjGNoXZCYyo2Ns5T7R3TNA3z5YK6KqhqvzOvVjYULxHKokQPB5AK6SALBcpT8ixDOcc4H1KmhrJcMhlNcMawLAuuXHkMrQTTFIwGg5CQIGC9ll4bX/pxNBrTWMfOzj1m4zFFWXN0uEeWZUiSeB65qDxfa2q/2Cjlt7wB76DLBxwfH7C2toFTwqoomAwn4Pz+ZIPBkFtvfY/DxYrZdIYVKJua/cM5g3yAsRXHx3Pu7u56DrVquKl97YrRcOTjgqcTzp3Z5qknH+OJxy8xHmdAyBLEUZQFKEuShFjrwZDBaMSwXJHUitEgYTjKyFLtC8ZYn8Kt0pThICUfpOR5RpoqklRIMkFp8SUBJcU11UcO0E9anDNYp9rMNNM0Pt05qExWeinASmGc4LSP8/Rbt6uToBvnf/ilS6WOR0jH7wo4MRgB24y5vxpSFBX1nfeR9TU2Hn+Gi5Mz7OuazdlZ3lQHLIcHDNYep8JxfPsmm+mYZ0zOtw6+z3D3ewyLBidHnEmFzx3cpTk+hmSHFQ0bi0Om6S6L0U02q5ozkw2WRcl5UzHVu6j6mJvDbbJ6xWvPfYUnn3mFw1XJQA84OCrIBzmlKTlual7fuc0t5agev8TVd7/L8eEx0zRjXGjW8wFKNKNRglYaYy0qydAqIc0mDPMxaZozLw/Zm9/EHdfsqoTd3bfZ27mKc3ULpEY6vVI5n/gU+zLyzK4PtsS/g8nPgxyva2mGE4xtywXHWPMIvJ5SkfaaHa0U2+HxPjrffsjohVXIqhpNZmxsnsU5WK2W1E3lnS1A09Q0xqCTlAE+6wjtq4c1xlFXfoPJxprAZypSlaF1jkgSMp0c2lfbDVXdFYlSKG1xyhdJMfhNKbWosFcb5HnmC+OkGUYJu0crdvYPKMsCUZosHZIkJdYaGiMorb3zL0nR+QDnnE8/1uILiqcD6qaGNMFZQ5amLI6OGE1nPPXUFfI8J0sU585d8iZ5mrJcFmRJggmbLdZ1xc1bdxjkKa+8/CrvvfcOdV2wffYc+/t72KYmyzJG+ZCmKf3uErqganxxeNsYnPPXEaVYLBdsbm6R5rkn7BVMphOu3rrOjXv3fAGcwFXPlUI7H5qUZSlFuaIqS2rjY26xDoz1GpwxjIcDts9ssb65ibGwXJUeQNOU2hoa2/gdMBqDEhV2hU5Rifb5FDH0Jgw+JTqwQ46yXCJaIYmCBKz23LzohCaY8E3TANOPHKSftERO1/O6jS+o7xoaTavJaq1DtHGNC2FkzimU1ii6wPm+46V3Bz4wF0XaWGdrh7w/H3DzcI/KWtRwTDWdcqGqecftceFY02ytMGuKzeKY1B3xlm24f/Auhc1YqHN8nprZ/T3scYHYPYpBRrZaMrAOPTTMqxUTC5mt2Nk/JpMESSzH5SFrknNvuWQzybmjDri4fY6lMdxYHDJIBzSuQoYJK7HMRdi3hvnAcObCmOW732dVH3G3KMAl2ERR1QUb4xFmMSeraiZra6QiJEnOeDTBYrl261vcuPZNqvIQJbatYyDSgHj6JjotrVi/EzKRBIhqZYwAwdM9LR/Qox7CAidKnQBWWqdoLwTFqbCNlT+343TjAV2R0+hA67/qvy31dFdFRWW9Sb29dRatE+bLBfv7932qa7Gi9jUESbMBOrHUpsbYCtv4QVUVDVXt6+0a8Syt0inaappGYxtfdASn261dfF1Bjes9qDGOPBuRyYA8HTDIBxzOCybTqd+HbFWwd3CEtYYkyyj29lgWBUopqrLynRmiLPLhkET8rg46zRilCaYx2EwYT2c+k0o5lHVcfPJJRuMJVVmyMZuysbnJYDBkd2eHjfGIROc427B/sM90OvHxvBZu3rzJ8XzOhbMXuHnrOu9fu8p4MiEfjRGLT9dlwOHRHmmS+sQnfNp0JsJq1TDIchpjKVcF09GEydBvgvnduze5s7ODCzzYqlhydJSAbUIq9gGLUF5T6TSkR0M+8HvILZYL1usNhuMhu7u7DAY542HK5voULUJZVb64UNNQ1jVVXeGc59SHwxFZnkKtyLVGx4qlTrWhfc5aGmPDnPARKGVTIqsEi890rE2DMY6Xzp37WAP1kxIXttJx1oIxIA0mTGTtPCWlROOcT8cl8Vas0rrlYWOcctR3fWJF/x79O3ouMN7XOuFwkXP/5g6JQD5dIxvmpBeeYmxq3nfHXL78JF+YrbG7+x7FasIZtaJgl2T/gGFpqKo9klyzXZSUDoaJonaGQZIwzgZYZ0iShEk6YFmXJGlGqhP2jhdhi50ly7omn66TJgnzYcKhcZzHYFLH3NQcKzAOVs5Qa8MlNaS+dsD1++/TlAtmoykOWJYlSZ5jHCTWoTJPRalMEDG8d/M73Lv9JsvFbZTE4vk69go+ZUbokhhMWw2sA0mf5afwCTcuxhIHVPXA3ItM6MdZt6pvqOgWvlMImQyYpWtM8iEr03Dx/HPcun+d+8c3MOGK3nLpnGodg9SPbni0PBJ056uC+bLEimI4mjIeTxhNZohy7O/usJrP/eaLOiNJfdwotaYpG5qqoa4bqqLxVcWspaJGpwqlEq8FieAMNCWYGl/HEkA0jXVIo1CJItUjSA310lAry2CYUtaGNEnAFjhjaOqao8Wc5XLBcrXENDXOOZom1P9NQzEd6xgOBowGA5qqIE8TrDFUTeWTQLRm++wZhsOx35JFK4qy5PKVK5zZ3ibPc/J0gDghHw7Z3d3DmZrt7TPkeY5DMduAwXSNu7ff571332W2ts5oNGFZLDB1jTEW5Rps8Pj7LWAsxvntfKqqJMlSb85WJQYojWFVVVhT8d7Nm8znc9IsYTQcoZIMZxqcNcznC5+Ygq9gu1wtfXKiDiu90iRa+2JEmQ85m82mKKUo6wZjgqlkPFBrLSRa+2pM1heh16J93LbOSBLlQ8bEQe18PLYpkSRFpwl1XSPFClsvMViMdYE3DfnsP2aJzi6fnVdjReNrKYTJ6Py+b2It1ik0nvN2+L4kFF9RJwhd6DQiLx1/GO/reeSqybm/V3BkG+ozF5jahqMLM7aqQ24/PsPimA4qfuPgJm9c/RrbkrFML/DSULGxNFRVRYLFNhkpGqchHQxZloWnBLWiqhvyPOdotUChSJWmWBVorTuryliOD/Zww5y9GyuyKwmD997iwuULzGYThkqRJ5rD1ZKr+3u8efMG33/jO5ijBqkVa9MBpipZn4yYDMbUzjAZTsnGE5I852i+w923v8dqeRdxDXH7nJZDbXusi16KoOrhNXKlrqMG2gQO117HiwqarFes2vfRErz9l+K154SUqVtne77JpeFFHn/hVV584efZP9rl3vwa3772Nd55/zscFbutRts6ReNFJWrfj5ZHgu7xsmD/aM7R8YKqMaynKUmW0phtmqZhsZhTG8MoS0jTFGvxu+pawdSWurSURUPVGEpjaYDM+sLQxoVUSQumAls7UvFbXsdamMZ4xw3OIC4nyYTxYERdNSyXSxaFoSoKVsWSqiywTYmWUBZPi58gTY3WmkSnGNOQZSlatI91TbQP5Bf8djNp4ou2O89rqjzD1IannnqK9c0tVkXJjfff4jOvvsaZs2eoKh8qp5SwsbGJOMetWzf4zhvf47mXXuPZ515gOp7w7W99gzRLmSUznHMUqyXKNmArsrUZic44Wi7YOThApxqM8qU0g5baNA2JTinrmixJSBLPqVdlTZI0ZKKpreVgfx/jHKtihRIVQvYGoBN0mpAo7bcWMhVlXbKzu8fa5hq7+7uMxznjUYZKtAch17AqK+rK00d1Y6iqklWxwjQWJQmJTnyNYw21qXxhn8ZSlSusLLF+O0KszzzAYXFhixS/Q0P6kQP0kxfXA16DsQ19bdTv9hu3N49FrR2iHd3uA74soNfQ/FX7DpyT9ILFWV+oyNSO2zf32TlW6GTINNFsTTa4dbTkifUt1O6CpXUkZodFdcATRcWGqyncLdRqwAWdsq9AuwSdTRApybTFKkftSqqy9DRPorGNMMq2qKuSypWkKqFqTK/mg2OQaioaWDXcuHmVOtXcvJ/B3h61bdhralKleffeTfbev4ZbVGyojDOzdabpmGy0zlAljAYTkjxjMFrD2Ibbd99DqhUbm+fZOnuJsq7RKNBgxdcVsbbCNAVNXWCaEr/zhgFXRmPf44X4sLuWRXfRwRULkcc+l95PRzV07IALqKnIkwFnhheQXcX1717lwucu8/ITP83ZjXNszM7xuHuRL734R7g7v8v33vsa37v+Td658QZFM6exta+Y11br+yGjF6pVxe7eIbfv3GX3YI/ZbMpwMGQ4GDMZrzMYHlDXR4j2FcFwjqapqUqf1VRXlqqyFFVNYRoaB8ZpmqB5+YQ2h2t8B/gwMr9zryQaqzQ6yQHBWcXabJ1hlmMHYWugxQFlUVDXlde4QvlFsYbxcIxZrFAI6XBEWRQk2jt8mqYJ26dbEpWgkwSM4fDw0BMaSpGkKWIazp+/yMbWGURp3nn7u3z1136ZTDmeeeZFxrMNEM1wPCJJMm5efYvvvvFdvvGtb7F95iyz8YjtM2d59bXPcPXqNRxw9/ZNyrrElCX1ao7WmicuXWEwHGON47gsMFl8h8JoOMIag1hHnuVU5ZLpbEZTlRwv5jjbYIzCOmjwoXoClGEb+HpgUInGWl8uM8sHVHmGcY7KOfQwbbMO58tj0iRhNMzBegfp0fyQuqwxBoqixFQVmVZYp9Fa0fFY3m/bhNq0KMHY2m9l3zQkWeYLJynlS3E6UDr/yAH6SUvUVqLmqaxFPPuMc76UoHIhFTg4v2Ievw8xCrWFlXCy8lh7h/bHuZBEZBqvWTcJkDHbnFBZTTJMOdiYsNvcZX8Tru3fRA7v0Qwv87l8xEE6o6gKJommAXKVkieaCqgSx/zwNsvFPcpiHqIiQkyw9VpfObzIePMpjDNUVYEyXoNXSvmMudqhVMIoSZgv59y99i723m0KV1C7BusE7Qx2MSc5OuBiPkSLZnu4jbUpiR4wHk0QnZKN1/nii59nmI6pGstgOMDWPkLEKQ1aIQoSLEniyMSxqgoOVkfMF0fUzYqj413u7b7H/f33qZZ7lIsdmmaBbjVc00YUxH9bcOWksywmSLig7CmrEee3Xbq8/gSPjZ7g6vV32Jhs8eLLL7Pav8vt2pAOp0iiSdOMdXWWn3nhj/EzL/0Skhrm5RG3927y1tXv8L33vsX1+29S2/ojx9wjQdcYw87eETdu3ObO43dYn01Rm1sgQpKmjCdTbz7i4x2ruqBYzb13vvH1BqrGUtWOqnY0kmAloTIaCyRKUOLQDlKg0gbXQD4cUDuhqh2mrBkNR6xPJuRphq/F4Dg8OsY0NUfHh1RVyTAfUBUlTVOxvrnJsqg4mi/QfqMuEqVItN8RN1YgStKBdwY6oSwKYqrfZDLzoVRZzpkzZ3yltSxn6+wFiqLmv/qv/wueefK7vPa5nyIbjhnN1vn2t7/J3/qbf433rl/jzJmLjEcTlEqoqpK1tU0uXKjYvb/L5sYGb37/DYrVAuMMzhj2Dw+ZTmdsbG7T4HwUg7VtsQ2rFcvlwoeNWctisSDNUkZMqOsSqH21NWdpqopEKcrgHPK7bWRtmcnGNohLKOqCYTPh/v0dXFNxf/c+m2sbjIY+hCzRCsEyGqZ+26S6oalrsD621G/E6MOqlUrQ1mLwJT8TGYJSqCRDVEKSJAyyAWmagQiNCangSfaRA/RHJTa0X8SgnPdWO+tA+4mrlPKxvCZqXd1Pu+Ov9FWpvoQoH9t4R13IVLu3e8TN/YpsYsjH64znR1Rncy4axXR+yPbxDoOmQZWHqLrgscGAlc4oasNhUzFOx2TDlP3d97hz+xaYVesE8g4hvxuxpzct88X7sL6FnHse2buNPjxGWYcK4GybBjweMhFFuTyiWu4xUDATSERIRKjqApPmjJQiyyaUxjDIU/JszLJ0GAeXJ1u8deOI4Zpjc5zw7TfvQzbAag3pgKTxu5tsb2+Atoy0ZqSmnN88R7FuWYliu3E850pMU4MqOdi7wds3vs7u3dfZuf0Gpl6gXE2vVphXAgJXG6MPJGq1ErfRTFjLN/jMs19kbTzl+jtvc/31q7z/9vv8gS/+Ak889wpnLl3mYHfO/s599nf2KMqGPB8wXl/j8OCYF155gbNbF7j02EW++MSXMX/I8J23vso3Xv+NjxxrH1FPF5aLgvv7e+zt3md/d93v2pvnKHEMBkPG4zHlakVdFaxWS8q69l5v57ffyVKoas93JCoDl2AaH3LRiKC11yBEGfI8R4CiMMzWt5llGdaBs5bJaMYoT7FNzc7iiLKsWMwX1KWPTtjd20Uh5KMJ+XDM0WKFbRrSNKNpLGmSUBZ+c8lpPiDLEoajEcVyhdiG6dinpw4HPoA/STIuXbpIluXoRJNkA9Zmazz73PP89m//Jr/xta/y+vde50tf+Vnee2/G1Xfe5P1bNxiPJ7z08qtsnz3PcOR5YRkMOKt8vYP7O7fIB0PmywWgGY3HmKqkqmuOFwtqaylLTw+kWUaqM2rryPPc13WwnsY/Oj4m0b6u76paokTRNA1NU+NEkaiE0WTswdtaz8EOhwxHI1+cJ81JRGNWNffLXfZ39lmbzZhOpygRxqMB01HOrgjWGG8N4MiUr4uRKKhD9o+v5yLUTdhpVbQf4JLhnPYhWcZhdSiNaCRUN9CPHH4/Cuk2dfQUgwlFf5QDVIKJZS+j+mo8aWJUH3gt4gzdDrH9MKYQy24rTKyvbAzGWMpFw+Z0k3w4Ylc70jMzVsuCe/fnPH92jZ8enuVQHTEAbKJpjCPXCSrTLEk5avY5uPMudXnkOdKYhhzuK6JitcIAOoaj22+QjGa48RqDZUGGQde+9jUu1Dh2As6Q4EiUL11KqAox0glzFMPJiHlR0BhLkmTsHy852N3jeGHZ2rzMrd3vMj1/meSmA9dw0wxZDoYMjGJVllzaOEM5TGiahu2qYisf8OL2Nm+vDqgQtpIBZ0W4awSXZAySIYONNX5280U2J5bvvPcbfPO7f53b174G5Y5vd9wUopcYETMBvV9OEKf4zFNf5u/9mX+UJy+9zNvvfJNkkfH+4TXuD3a5cPGKj9d2MN1aZ1WseP/eDi4ZcGVrG51njMcZh/fvYOuCbKBZHOyT6oRzco5/8Kf/8Y8cc48GXfy2L0Xhi3IXqyWr/z9z//Wr25ql92G/N8z4xRV3PvlUnVSpM9lkd5NiMEGLIGXDNgkYEmzDcLjw3+ALw74VDfjCN4YNGLYh0LQtWIRIyZSaHdhsdnV1d6UTdw4rf2mmN/rinWufU02pNiGZrp6FvU+dtTb2Wd9a8xtzvGM8z+9pd6TAO0GuFWVZYa2h27X44FC6oJ7kOGMZBotUKRokJf6m+J04Rr9kQiJFSqSoCo0eO6jpZIbWOT6kNN+6yMiynCzTdNbiBocZBq5WK7x3I43LkZc1k7xIhdhZsjwHBEJFhq7FWkshc2SWkekMIdP8MqXv7uFMWvg57zi4cch8bw8hNGrc9h7duMXP/cKfQ4jI08f36ZodFyfPuHWvoMgLbt96jXe+9gHvffgt6ukC612SqVlH1/UURQKLGGPY20uJwSJ4vB1ou5Zt22JdUnpkOqOoa3bbLZkuEAicSwu/7XZD36XCrIvspUkl0ylWOssLylFHrLMkr4vB400k5BlSKHKhyRBM6xk+eLp2zWl/Rt8PFHVJ03dsW0WeSRbTGf1gcDZQZtkY26TAe5yT2Cwt2QbjQPq0RBMK4RVegjMBMwY1xlELK6VC/2tsev9NXyk1INHXEsg8xe1E6RPTWcrkUsO/TAkOo9LhOpY+xkhUAeHDl4GVL6+ID2mkcF1000go0ro0f+2iQx3N0aLHbDZMwoahrTheHHJzmmGCpzeOjTHp1DWpGNonXJx8RsSOCoAv1RPXX9P1kuh6HpL6PI998Edkdz+iny+wzZYKgXIeJUfhlXMg0oM1uoAetbIuOEwUGOdx2x0RydNnpzSrhovW4fMlh4e3Oe06rlYN0xgxmabRBaUymPUls6ObXO0tWJWazBmqrWCuFb89WP54dclxnvNBVbF1HVud8cQ5ZBf4MBQsdM79wdPkJW+88eu8dfdX+PjRb/Nbv/V/ojv9BDBEIqO8/ydsCum5EylExrfu/TzH5V2qbM7N/dfo5ud8dvF97t66yzvvvUehNasnj8mqCtttcX1L53oeOctH3/iAt997GzsM5FVNOa2TwYoI0WObV0OcfnoEewzIyJdU/QjeefquTakOgqSnFQl4XRQldV0Qo2ToOy4uL8FYdJays3SW6EkhgpCCIh91KEEgtCTPcup6Sm8SF1eoJDFaNx2ZHFLuVgh4BC4kCVPXNekmdjY97UNkMD0hehL6UhKtwTmL0oJpPfnSCRWT1rcoiqSokAlbWOQZSklM14PQtLuGyXzJbG+fd7/2Pnt7ezx78oBuu2E6m/L8+VMODo/54INv8/pbb7PcP+JqtWK7uUIryeNHD2m3Ww6PD9F5QZSKTOVMqxoRA9ZptrstgzNIlZIhtNaYfgAEu2bLcV2Nc8fUeTnnURrC0Kc3ig8ImZONtuWiLFMxQVKUIyVOJ3PE4Czg6LoWKTVZUVDVe8SYFp5X52f44JiUBXVVcLHdJieOc1RZTq41RZ5h6pIqTzB3Ykyw+Bggc9TVFO09zoGIBpcJnE0PB1yaM8ufvQuYawZquA7ufWnzTB2fFOJL6dv17gyINh1ZY0jz8hACUvqXkPaXfz8QfYoDupbTxeAgQJlliDzDacEsatx24OtlTawySiXZ+oGFyFA+MNElQSuGXHK1fcz69JNxfHlNJf4SPB6jIIzsAKRE6ZI8n1DXC6azAyazfQ5uvEk+O+ZHjx5y9uQLdutztHEpsyw4EA4XHVJJhvVAlRUpNVtKrq5WXF2usU3P1XpHyKfkt+6xODoizgq+9sbXeG49W5XjgwUfcbqkFwKbSXrbUlY5OYKTaDmVgXo24RemJU/PTvn9uGQxmXOk0wNOC8FVYzifSnYCVhcN70wKXptNef+dv8RicYf/5B//fS6f/j6jTDyxkK9JYFwrGOD14zd449ZHdJsdbX6G2HXcjwnRAQAAhVdJREFU/+H3OXn+lHu33kTninw6pW0Gmt5STRf8rf/uv0NWT/G9Qeearm0o8/R+dDawvHkDN/SI4KnrV+8pfmrRDTHiYxhxg6NV0ifborURpfJxrifJy4KimFCVsxQe2exo+obBJqybCgGlr6MM0rG0yHOwAh8kZZFTFRVSQpErXDBYN9D3O2JU1EUJMeJdwAWYzhe0bctqvUJKRVWXTOazxEjIJLFJpH0pBX4cb9STKVmRp4XJKJ0So2F76LukvCBy+9YdqrJGKsVkMqOazNJDx1ryvOLu62+yf7CP7Tucs9x74+3E350uyIqKvmspM8n0xjHb7ZYbxzcoXn8dJWHvySHPnr3AuQHvSrIsPVik1BA8eV3Q9h1ZNWXbtZRZPkpo0tGv2TVM6hl928GoHcyypGUu85KsqACwoypDK53mryoVeiE0VZEjlMJ5R9tsqaInywscHhssq80aEz2PTjvKQpHneZrnySxpdbOMWVUzqXImZU6eZePxVOJiSA/UaqAs67T8sxGtBrRMtnBCeKl1/VlfCSZ+3e2G8WNjnyRSR/vyxJ3ezcQYU0y7S0sqEZLZ41/pcq93OsElOVpIScFy3B2goMNgqgLXt0Tv+fDwNZZasAsJiH/ebCCkh2evM97+zq9yfHmP86PXkoHD9gymR6sCLfW4b6mYTvbSvVvVTCdLJvUCRI6PKX7LhcDpruX49vu0+oCwXtNdPGPYnmHbS1ZnL+jWZ/S7DV2fZJn9rkEogcsUsaxRqiI7PKIvNG5Z0s8rqsWM7z7/hH66x/zgNmU2wZnIeb1AzKe8W5c8PD1lXeeslke8VRfMzp7zI7Plu+Qs79wltC0/vlrx/qLkG8WUH/mB3VQxdxZlB2LIeXzesJOSPAQW1bv8xd/4n/Mf/0f/a7rL7yNieKnR+wmpXows6z32FzfYnx8kyp+0WOO4feMdfvXX/ypQ0GwaNtsdm/WGPCswHm6+/hr1bALecX7+nHy2RBUVWiQBQbu+5OzRFyipuPH+t37qPfdTi24cj4ND17PdNfRDh3MTtJYMQ4dSZox4iZRlTT2bMa0WRB+SPCvTCOlQ0hFkAOlHuIpGKkVVCmRZ460n1xXdqC1UusRbR1lURCHwLrmXBuPxo4tsNp2yrWv29pcoBMb6pIGVimBiUlSMxVZKnWbIPiJUhhSOqiyIEcq8ZOg6gtSgFMv5gjfefIPF/gHb9Ybtdo2QkrwoUSrDuQFcJNcF5aIeF0mKSEgPBGvRSqFkgbMp1Upnmq7vWMxmvPPW2zx68JCh3SJiz3Z3XYDS+MV7jzMDfd9iuo5ca8QY0Q0w9EPq4lUq0lVZojPJfDqnriZolROix9qBwZjxqe8JdsDGgBUalWUInVOVFWVZpqWZBGccQQhiVGy7JBNsBosQHc47ejtQZSknrioKZnVFnRdkOk9GCaWIIrEmlE5ysrwoKPOSXGVonVNonZaX3iWb98/4iiEQhUTKQAgiYUFD8vMHQtI/x0AgJIWO1NcT4CSBixIRUzaclOql4+lLSWjCNYbgiCHwMrgzRgqZLNJVUSN1QV7lrHxHLQv2VMbOtMjx5z84j7x9m9fe/iazt4vEvhDQO4tzo3tL6VFrHZNEMjoYT0/nXYcP6T5r+paN62iC4HS7Jcicpy6gVMVuEGiv6AfPs4dP8ThklRGLnPz4DbI8w2Epjm4y27vNnIynTz5G7ZfI2zVv3XuXZ1884FnRMxxEvnbrNV4fCv7Pn38Plx/xYHKX73zzIy5ePONTf8V5X7B3tM9bYeBj33PuIn/34ICi2vIjLdmWkV8uZrxYbzjx8FE5o9WKJkayGDmUkrVxTPff4+d+7X/E7/yj/yWxO0+n6Z+QcQW0zDla3Eb6mLTyCC5Pzzk/veSjb/4yt955n27XMAyew6ND7ty9S5SKzeqSJz/+Y/YP9wkBLk9OyDYt1d4ezXrD4/tf8ODzT1HAcrnHr73inntF0U2/dV3PerWlaTrmc4P3kr4bCN6lZU6UqKogzwuyPMf7gNISpdOLdc5goyGKJK4XuiCSY4YWrQWZrglAnmd0fYvWAakLdmPuGAhsZ2j6ljzL8M6z22zItWK5WLJeb7HREnwgkwmk41uP8Q4lcnxIi6BrglRVTijrmqHvMNYxGEtVFyz29vnogw/ZPzhivd6M89EsZb7FSFFEotRIEcejokIXRZKgCJlyO4VOXbO1NO0OYy1FUY6nhMDd23f49V//df74e9+l211hTU9AUM4WrJvneJu6v75rk2idWQIJqZS8oTPFZtvgnCHXiklVUlc19XTGZLbPYrHPpMgpMsVmu+HZ80c0fY+P12kWgI9EdR0jk8wtSkjKvKQxHbPJhMYbLprkPPTekmcFzlnWmw1SJltspjVFVpCrLKH6dEamNLlMBphMp65KqKR1zZRKNmOhCKS8uJ/1FWLKr0unn5jkVfJ6XJAyi4kREb6cLcSXhtRRIRDESyDSS4n/tbKB8NJi/DKheZy36kyycR6zbXjv9UPmRYkxAyYGVuttisYSEoSkdZ4hl0QdGYAQM54MhlwVZDkMYxyTRWMEtL2hEGCkphssWmQMucS5QCMF97uelWnpO8Ou61kUEFtDVIYnLz6n354g7xygI8Q8Mr13D3Vwm+n8iJtVThcC56bD1XN+/v33efj4Iadtx++dPuA7H3yTm7bjB8MpO7tD37zJX66+zp+4LV+bZ/z3bu7xYJnzf7j8gm8sj7ija95TJf/X0wesc8UPhOc3bt6gOXvCE+N5PXp+8eiIf9E3nGcFt4XCt4alklRacD44TBC8dvsXePj1v8aTP/oPIDpUfDlVQCB468bXefPw6/StQciWYHu+9y/+gMnyNoP1mN5w6803UFIiQqBdr1hfPKGsa1YnT/nRH/4eg9d84xd/Faczrs4vkEpwdPcWB3duMp3NsGZ45T33CvVC+qr7wXC5WrNab1guFlRlgXPQDwPebtB5ybysUTJtuGNwBOdSOq4zNN0GGxxCCLKsRMSCPJeEkBNjgmAYY7Eh4kMkOoUdzFhkcqQQBB8ILnC+usB6yyTXWGdp2wGtc7RzKJUxm08xg6Vtd+P2Ns2Rtcrohx7nPdPZhExqnBA0/UCeF9STKd/61nf41re/jRAS65IBY7fdJpSlkvR9SEuoskArnbpTJbHjm8Nag7MWaweccWipsCg2mw0A00lNWVV89NGH3Lt3j9//vX/Oo0dfcHF+jsgL6vmCYbBkRcV6vWJST5IbTCbhfRglYYok35/NZpRFmuFW5SQ57SYTlns3IFimy0OKsuLho/sYa3CjNlnI5FazZmC0/zOfTgjWoUNC+R3O93DesW4b1rs1nenJkEzKmsFarHV0xgItkqTTzoRESkFGSiRORpdR0C7SguP6TRBIp6if+XVtPWfscKUYRw7X9NZxIeXDuJiyY3erUFGOsVNfnaNey5QSdB6uC24YZ67XhRekiiymFbO9JXmhWVYTiryka3cIJUEXBBKZyzrH5WbDH25P+Ms336H1gqwQ2ACbwaKz9DX0gFMCp0vW/cCZs0zziq3tcEPHRfTcb9dMJyXTTNKzI3QrPr3/Q2K0HPoeVUFVHeJiwNUld+6+wToGrg6XxBv3+G+8/i32y4Ifrp/xZLPiLX1AyCout4/wyvPZ9pS/eecDfv/Fmn8xbPj++SP+Z/c+5IvHD/hiGLhvez5Y7vPfj4Er22PigJ7P+XN7U86DQSjLcRH5927f4nt6BaIkE5qfI+cPmgYjM35tb8kZlgxFjSH0BqVqvvbRv82TL36XuLmfIOfjiUOKjA9f/2XeffeXqYuaoig5eXrF7bc+4tf/9t/D9i2zxZzJYo7pBtxgkqbd9uzW57y4POOHDx4g9BT9+WeI4LlanfP8yWMuzk85PT8jAF3b8m/9t//uT73lXlF0k8c5WMfl5Yqzswv2FnsIIV+GL/ZdjzSeampTFphPR2xrUrKDs5G+G+jtgM5KhmFHnGqybI4UE4g1Tevoe0NvDEQwJnXRLga83abstODQUtLstvRdw0ZEhq5DaUVdlhwsFykBFzi7OkcLlQA7wVNkORbL0AfyLENJwWAHrPPM5lP2lvu8/fY7fPSNjxBC0I1c2zwvKKuKvh/YrNf0fU+W51RVjSBFqkepMENPnuV4Z+n7lKphjU1gGKnSph/BbrfDWcNsPuPo8Ii/8tf/Br/1m/8Zv3f5W1jrkkpB5nTtDq01CInzjrqu6fpmjHxhlN/JpG9WivlsRlnPmE6nLObLNP9VCq1z9g9u0g89Z+endMMwzo8FWiRZjDU9WaZpB0twJtmb9bgkRSBDTNKy4AhSYI1Jc9txSediUi4YPzCMoYsiiC8XUl+JEmcsYOkfkfhnoNO9jtS8lo6FEJAq4VZkSOAmcf2/60UVqdsNJPyoEOl7Op5pxyjvBEoWY9bWT4TXj0PGXFkGY5lkJTcWe7jBs2tbmvWaGCxIgRjt2zFEwuWKf/7JJ3z74BZ3R3BMkAVGCjYuIVBVFpBKYXuDyku+Xsz57OKSx8KgaoUfBt453ON7T36M8o5C5WT2nMmxoj9dsYmOxWtvs1pdYEVLPFiQ33uTm0VGVwRmVY7drfnBk1Ma33LTR2ocs80Zb5geJSV3uMI9+YQ9s6XM4ThbUDcN87xgNp/wpO8ZLLxZH/D72xf8id/wB2eW//HiNv/h6gm/31/yubf8uwc3ub11dMIgdct39maEqSTKghBh0kceDFs+0BVhWhKj5yZvce+dv8jj7z4hYF8qOA5md/jam79AWU0ZtmuIFms6Fssl+zeP2V2cE4JHEilGaaqxgabr+eyTL2h7uPPau3z82Wf8P/9f/w+mVcWkKDk6PuaqaXh08oIYSdySV1w/fbwAEAXeB1abLSenF+wt52gtKPIcLTPyrCLIdFsF6/DCYswwRvkotCoJQbNer8iyyK2bbzCtD5OlMgiUSvNaCXjr6Ic+SWuCGBMGDMPQw1j8N9sV1iS773I+YzFLS66+7xAxsttssf2Q4tFDHNUIEqULQkw3pIsRjUwMgM4wf23BL/7yr+CGnvV2O4JyFEg4PTkhxsjQ9YmN4BxRplRUPW7+u6FnUk1QAppdQ1Gm0UiuM2Sej1lyktl0koopCfJz8/iQd99/nz/8o++y2TYEKdBSU+QFtijw3idXmR2Pjt6BTOMH7yO77ZYiy2i7nqyYIUSGGQYy55OtOSbIzcHhTXbNFmPMS9G8R+KtRaosfZ+Cx7pA8A7hk4X41mI/6UmdxYdAN/Q4b3EjQ0FCojeF6/l/SnpOmslrgWjgGlMSx27vS7bTz36mC3xpbBi73hgiUQSiTB0q1zyGGIk+jN9DP8aiw5e63GvRlhiXOfCy0Io/VXQFRGkwdov1hlKV6MwR4g6fZYDEj4oQ55I8rWh2XD17zP/+D3+bf/fDb7FXVUgROJ6WzG3gxMGtcsL3L59ze77HxgU+257z3o09DteX/MBccWM5w3U9r00F29WKk6eXFGrGDbHgE/OYUGTkueStN17ns/6SerHPDZUzcYJqu+Io7PDxjO3Fc6wfyKViqwv0tmVfO5SCZWkwV+e85nriquDmfEV3fsUsBrabKevlEa/NjhmC4MIMfL2ouEeFCuB1yTTz7IhctoYrM+cfmBX3qp7/SVkw6xtcPlDoCTenJa50DDZQxAyiZCFnfOPbf5snP/7/EHdPQUAuK/6tb/83ubl/k9Wzh6yeP+H0xXPazvFrf/PfoW92ODOQaUW725JlBSEEmrZhcXSH7/z6La4uT/nkT77H/t5eAnah+at/7W/w9W9+yHf/4Hf43/79f59t03Aw33/l/faKmW7kOjnTGsPVbsXlOvn095Z7FFojY4koNEWRIUTEO5s4CMGR64wqr8nUBC1btKzwVoBPb8Wt7aBpSFJ5NWp2I7vdBusCUmm8t9ihx1iDNQPWWEAkFm+E3XZHURUoqVLBdhadZRR5waZpKIqSqiyJPmCVIdOa6CNCC+xgODw85M/96q9SVQXroaczhtVqhbOGajJhMOm/610AIRMq0SZ5mvMOXeSICLYfyLOMECM6JoC6miiGtk2hjFISXImoJCGmB8pms+XOnTu8/8E3+Jd/8Ac4azDe4oPHWDfKKyVt06CVorcmwVZCpLc9UhQYa2m7lrJsGMwE1QJljSAyBIdgQllV1GWFHQbWu11SNYiCup5jncVZy2wyQ9WKtu1ouybNk7VimVds5AYvJbKoaK3CDk06ThMR/nojz3WNTTKdl0e7+Kdq68iXTWv9f42S+G/2+hJeEsc81JhmsxFEkMnem+YLScKAuo6Ivf4HXxbb60HFONP90pnAV78JKRoGhLDUWRqVeem43K642qwSjIQRLiTTkm/oB7JMMTl5xiY6/ne7jm/ce53ZckIVM97Z28O3LWvfcFNJfrC9ZD8vuJk7/ujyGVVW82425T9/9ohLK7nLks35fdrdJU3ccrNcUsxmIOFuMeGWKhlsRd1bDk6f4zYNcnXCylo6G7k4O8day1mMPELROoeoC+azCdXyAKRGVxnaG0oXWW22TL1HXOXo6R5+/y4Pdcl+KagnE46LPaITHMeBXAv2BMxtWpznylBEzcPtloqK326uuFd6/kqxZJ9kbMp6yefNljwrOb71Bgf3vs35j55B9Nya3+KdOx/QXVxy+sXnxABlNefmazcAT7u+wvUtxWzx0rGqtGQyndD2FtN3CAGL/X3eEIrT03MuLy+5Oj/hT/7ljtXFBb/0S7/E+cUldT1/5T33Sp1ujKAyga4gyAHjGpp2RZ5L1GSBVinRQUuddGveY01KbiiKgvl8ymG3R28a+t5hhpYHDy+ZTRdUVQ1BEETS4wk0Inpms5Km7dntdmybhqEfXqoCpAjMqpLppEZrjfcBrRRm6OjaFqkzhEqOKCV10hNLgVLZ6PBSEALb7RYfIh9+9G3efPMtdrstw5Ai1ouR8LVZrdOMGUHTthRFQWeGEUGZ3oNaqsQkcIGd31JPZ/TDBjN26Y60FJtOJvTDkJgGbOnNgnpSc/P4iF/7jb/Mp5/f5+zkOVEkhYIaeas+OpxzxKjHNIIE+1BSE2KiMQkpsdayW10gZ47pZIrpG5zt0aM0azmbpQWf82x2G2xIwPiymuK8Y9d3HB3so5DJrNG3VOUEJSQH9QwXPM1uhw8pfn6wCXBzPfcnXh/RR1vQS73U9a7/GnoX/9U6/DO8fiK0cIzkiTKk0YCMI6A+mR6uRaCJLpHi5EeWD5EvA1ziuGp7uTTjZXV++d9Ktd4jY0uzfs7stXcwKmPINEH6tMMInt5agoB5VbDuDRPXMjk5Y9P2POi23FeSd47u8Q8efcwHi1v8YNjwy7Nb/IvNc7oi5y/W+/z26hl9yPjlbMrT3SWbvkcNkomPXITIPFiWQ8sdk3jCk3ZFDArOn7PebWnageFyzdX5JabrcIMlOPfy5y8gGY1ERGnFF3VNMamYHO1xcOOY+vgmpihxZUERMjIb6DrPygXyWiOqCj895KmuiMpTV5r9cpa43NHxYW458IG93NNhEPSsB8cuVFx5ySZa3iknLIuCzEmMyXj/m3+d//zT36awDe/f+wb7831KUZC9I4kucOtrH5BXJauT56zOXpCpdMKUKnkOwghA6podbZsQqftHt8jLKYMNOBcw3uFXa9ZXG7TKyXXBrJy88p57xXghOcSqScbeXsFsnqPKCMrifI9xBVplqJDjrcFybZ7oxiORpixyDvYXGN+z263pTIcuAp1d4UJPkZdkWY1EJ06D8RhjE4PXpTfvYAaiMwiyFD4p0rKjbRuEELSdS3HqvaGo04u2zpIXOT6kb5AU4aWUyceUPZbnBbfu3CYvcrIhObvsMNB0Hbu2STZjY1B5RhQJ4O6HnrwsqIqKpuvIioIYI2VZslmvkrStGxBKYrylLnKCrvBCYMcHR1bmiOiZ1DV5llHXBUpJuqGnqsqXD68YA8YYBAITbLoZnMNaw2I2o+9a/Fjcje7J8oyu2xGcRWtNCJ7dbkOIAodEFzW5SakUxhmM6YhCUJU1CsFgPYd7e1hvaduGru/Sg0oo5sUEoTPaoaU1w1hnIziPM+ZlcYnXT6PrsvoyuEqQ0kH+1QTWn/U1nvb5ahd+TR1DJE3uS1uvGB8hMYwJzfIrj5Qx8Swmspr4Sid/rd69zpEQ4ylASUPbnfGHTx8T+wCDQTiDFDFljmlFHLGek0yjnadpdhz6wG695WvLGc3qHJHnmIOOxuy42PVov0Moheo8+80FNggmbCnPzgjtjoOYkW3XuKZh4SNVHCguzlnvVjzf9Tzc9VycX+JsAJ2R6ZxYzAjFHOkdDD3RDURnwXmCdeA9PgR2Zs12teHq+SkX08dc3LjB5HCP6mCf5f6S6iCy7jw9ULuMehgoes8LY2hFQJQZcTLnGQU7EahKzayekk0ij4PluPAc5XN0FXnsGx7FFuIN5hH2yhoqwb27H1Es7nDQXfErH/0F9pcH9FdrTLMieI81DfUsLarPnj1luTygKCsgpjTjekqIoIuCvarCWks1mTJf7nN4fJOzkxOKsmBzecm8H4gI5pPpOFb76dcrbMARlcNklrNcTtjfr5jNc6Z7E5bTJZmosC5gnaFtd+Q6xzhD17cjv0An+67OKMuMwWegPT5IlCqADILGutQttt1ADAJj00ItywuUlGgifbMlBo8dEpi8CekHbPoOaw1Ej5CS/eWS1XqD0JpkY/ZEBH3fIXVGlmU0bZu6YqlQWlFVGZen6e+ZzqYpacJ5XDlqNH3iIVRllYAuWU7f90QfybIsmQO+ogc2zqKEphQKoTSZLjCmR2cqJV2M+MTV1YpI5Nmz52w2a2L042tJm32BSOGVQjIMPZmSGDOQ6EoZVVmkzstbnB0IrkBPpgymSzpfaxBSUZYzkBJvTbIOZ5rW9BAdmIEsy9FZQW8M214zn83ZbHc0bYMYdZ8RAVImR513KenCObSPlFJijUuLSxlHov6XqaupynwppwJSJ/lnYZH2klh1/ZCICMZuR/ivdKjy5WT6OsYgRhIgXoZx1Ra+8sBJS2hechu+fO1fAZal32xDHLYU1U3UeAIxfqAfks5aSkmpNZ014CLzomTXtFRak60C5rznw8UBzekVX8tzllXLg37FjWpCzC+p12fkQpLrmr3nzyjDQCEz2DaI7Tltb3jSO85Pz9l0DRGNjBlucYM4Bk6Sl9hgwEcKAoW1dH2H61t83yKsRRibinBIOFYfArvdjqZp0E8LDm4cE954DVrDOi9R04rcTdFRsbUbNt4hMsk0RrLQc9FfgQrMlGBvuse5XHEZB0RVUk5vcFl52syxnCpy5Xjic07ZcSgje4slh7c/Yv78j6l1ybDbYZxBVyVXJ6c8/+QTzsvHrC7O+fgH3+fNt99BZwpbVxRFTVQZxXRGPp1j+oGJHF2XxuC8pZzVPPn0U3a7Dfdeu8ft4Hjx7OmfsoD/F1+vdKTlmaCqNLNZwWJRM59N2Nvb48b+TbSoaVtH220ZuuZlAXQRiqKAESKspGBaT1G5wEWLsRHbl1iT0Q0dXbehHyx9143H+YAWmqgkPjikiDjvECGg8gxEpGnT3FGLFLEhhKCaTIFEyMqyjF3bJf2kd3TDQClT6m2CkyTA+cnpC569OCUrMvww0O82L8Mkb9++y8nJyXgsdzibdMmF1mTjjHRalwwmvbmqukLqHB+hruuUuICmKAvKIqdpWprNhlu3brLebplPKoy3XK3WeD8qH6x/GcWNlAgfMNHTdi2TshwVDMlKWk+mZFqRK4mS4J2ha3coAb2xGG/IVI5UGXmeE4jkeYGxhlwqejsw+PSwElJTj3xhgqMuCrq2SZ12DBRK4aXADAlwkoVIsHa0WDu0EGOeVSrOYXQDvSw218UtBFQICekZf/bd7rU87PoSIqkQIkk+Fq+lZCLhKiVJsys8RAleBmRI91+K8x5lYmNgovhToHbxX/QvwrNbf04xP+Zqm7pIFT25UBADwxibpXRGLiVNswMkpRSsL1dMtEK5K5p2S1XmXApFNTRMi4p11Ay7DV4KTkVGv1njg2WLxG8ams2a6ANalrjJEdniBiZEjMqIQpFHwUxqGiLTfEIdPLtgabQhFCViOkeZAT/0xKFDmAG6JiUTJPoMQiV98MnzZxih6SeH+HbDgR04jJLOwSUQCsVUTJhpgTcprmo/y8llpLSWS7ND4Sj6AdFY7rtHrBZTjo9uMpOR+7Rc+R2zfI9Cldy5+R5cPuL88VP6suXGG2/z2r03ufmmIQRBu75kvd1hjOHi+TMOj45YHBwSRVJ/1NMZcvQFyDGuyXtHu9lxeX7K0+cv+L3f/ef83Le/TZVn7O3vUU3qV95zr+50hSLLFEWZU1UVVVUxmU6YL5fU2RIzwMXlKRd9S9tssS6gy/qlhjVGRaYkZVWDcQxWs9sa2p3lWgkZvGO33dC2LUIk0ldRJibCYATGGebTSYo5V5ooAs6kh4ISaeNcFAWlVojgKYuCOOZ1DdYmQlaMOGPoTU+uFUoKunbL/c+/4LV7r9FuNvR9S9d2nL94luaWfUehBCIvyLTGmJ4yK6jqCm8dN27dQOYFUjuU0qRxsSDPEtS6zDOsc3Rtg7WWh198TlEULPf2OTo+5smTZywODvnD736PYRiSJTj4a7c4mZA4b7HWJhlbCJS5JpMC7xzOGbKsRmWaQIKd73YblExsiggILdIDsWvpuxat9Ric6XDB0fYDSEVW1qOkTCQnoZJkmcaZPvExnCWqiNIZNRG8w8oUtCLH+TNSEbUkjvbqyFh4RzGwGAlpMgRU+NkX3HSJnyz+aUowMq6/HINcg3HStCQxF8Q4wg4KhAjXdNckhRv1u1KOhXwcCScVb7qkkC9nor47Z/PkXzLbewvyQ9rVJZu+wVuT5vJCYPp+5GYk9+Jqs8NFgSoKTlcXKYewdayHDmLA6YFTY2lMRwaEmJayMXiQOUrm7B2+yWS6YOs8p8aSOYtyjj4GdBQoDVsBDknQGfvVDOEDx0XFYHrObZvCX61H9TtcuyXMFtB3YAzRDSQepoEQWJ0+wec5r//qXyFOJ1y5ntWuRWnBUk4ovSA4TygU87Kk0Bm1ymjNQJHl1KpikeW0TUtnB6bRsK8Lzn3GNo/cnC/ZCXjab7m7f8yHv/q3uD05xhpPtdhjcXQ71YXBMts7oN4/xJqBZ599kYIUyhqVF3S9xRmfwE1C0G42aYEuJV2z43d/85/xyef3ya/Hi0VKUcmLV1vbX6nTFRJ0lobMdTlhUs0o8oo8L6mrGVWVFjpts2OzWWOcGxMgNFolr71UGdF6pNhj6LfIGMgzRT8Y2rZjvWvw3jOpqnHZlaUCn+cIIpmoX2odRfTsdjuqomRa12Ra0nYtzqUE0b5vMM6BzLAuaYet6VMhcpb1esV8OiXTmu1myyeffMwbb77FG/fucHV+DlKQVRMePXyAUi+4cXxEPZujsgwlBHVRMKlKXJ66pM3FJW3bppSGQrPb9jhnsdZgjOHk9IS2aajqGbdv3ODDb3wTlWVcXFyw2N/n5t27TBez0SIqxg5Rokbb6XqzGjWvHmcHmuBRIlLojMxHfBC4qChkwtflWcZgDFmWYUa9dDYGblrvkHa05pYVw2DItafrWvKiHheSQEhLuyzLRu10TAYI5/BhwMnA3qRCKIhZQ7QKH6FF0AqZmKljsY1iDA4MAek9yjoUkAmP/DMQwR5GiPf1PDoCMoiXyotkrY8/8SstfceRg5RjgX1psXhZxMeXnpgMkZcJ4akB/LLnvWbgDt0p/XBFdfwh5eFtzOk5wa3ohoF+6JIyRyQoZjsYfIBJPcF3BuE8c52hvKSOGUpKFKlTnoZx0S0E8/mMxeKI2cEhs+mc3eaKy+0VZrtFOIuJHi9jYnTkBbHv8aNmXWjNs+CQAjKteOfOu1RnF1w2G7bthpjnCF0grUVMQ5Jo9jt8swYzwDh62D57ysf/6X/Ie3/r71F+5y+wblbMT58xsy1OCQ7Lmroq0n9HpqJXVSWlABVFIvFJwbwqmWhN0Wx4uLkk21twrCec5FvQkmVRspflDE1P2/Qc63xUG0W090iRYcqSarbgYrXlxdMz9m7cYXKYk2U5wTM6EaFrG/quQ2XJFLVcLlmtr3jn3jt89J1vslxOuP/Jj/j93/0u//a/9z/9qffcK3W6UinyvGQ2XTKfHVKWcxQZwY+R6VJSljWT2ZLs6pzOrL+8oWPAe5Ng5FbSGUeeTeh7iQsDu93AarWj6zqm9YSqKLDWEIKja1vc0JNJODrcp6rrxHSwPZdFxtU6gTjWW4MbBmazGqUyui4xPsM4i73uKqKURB/ou6SpdWOg5nQ6pSgL3nn/69y8eYPf/c3fZrGYc+OXfpHnz8549PAB69UPEzS8rtlfzNBSstlu2O52rLe7l2/CENKgzlhL23aAZH//iA8++ibvvvMus/mEy8tLun7g6PiY199+i8/vPx5nwwVlYTEGemPSqM8azJDm1VJpvDWUQuIBJyPWJwxk1xu0ql/CZ8K4LddKMbgB73qCUNgQCW7AekdZTcnLGuMtynv6rkEpzXSa0w9m5KhmyWQiAsKDco4Sh9aRSkWW8wo1y4nB45Rm6wJnnWFjPO56AjqOfoge6Ry5VkxFoBIRKf4MdLuj9jYNX9OHAolQJca5dBrnxZFOl8hjiJjm1PF61DCe28Y59TW/9bpQc11oY/qeAIR4zeMFRI7Ma5Z7t9nbv8Hx8Vtk3/rzfHz/Ps3T+2wvXjDsdujoIIKucjKdM51NUGqEc2dpcRx8oMiLJHFE4mUG5RRdlBjniLriZLPm4vKUOenBuMhrMus4Hwa81AhvaLIal02JUnNQT1j6gCgyojWcbq74F+2a6CO11iyrkvPdFuppMu14Sz80hGxJNp0jhgbft8R2B32HWV/x/X/4f2SvXbH96Fv4/RuUu0sWtuFDb3ifjDxGZkIyLfMx90/RmaQZ36un6f4JkauhI0THcqex2QW+rjiaLVivL9nEyLKasH90k6Ksxx+mI0aHNT2mbVhfXfLkySMmRcXtN9+gmC/IypoRU0gIETsMKKWYz+bsdpEbN27wrffe572vvc/RrRsEP2BcmnO/6vrpRVekbmc+22dW71NlC3JZEb2kbwdy0ZLl6cbKddJ9dl2HD+noG62l6wb6Plkni7xivd7SDwObESbjneNw/xDn0zGcGMhzxf58QV1PmNQTrDXsmpar1SWr9RozDHg7pNmqFOSTKh2Zncc7i/WRXdNQVElWRkjFNyqBc4HtdkMUgrzMIAZMb3j29AWzuuD1t1/n+dPnbDc7Dg/3ee2Nt9jtdjz4/HM+//xTPv/0E7qmpRs6+lGzq1Xi2BZ5wXyxx53bd/n577zN22+9y3S5YLvbcH55wuOnaWb93ocf8OZb77BrWp4/e8r3/ugP2G426bVrjXIBVKBpO1LCrkWHmOa7NiX1qjzH+jRSCFnAOI8NgnwEBOW5xth0JItC0O22CZoiVeqqVU8IgTyr6IaEvrTOJtLmy028Z1KXMIByEuEEla4gC1R55GiSM60KhFL0AS47h9w2dOcrghsLmRyLShBoBNM642iSMSsVVfazj+sRXxlzpKCB6yKY5tPX0xEhGGNvGLMHr5dmo+7hunCP/a6QItme41dm19c7O0FKBNGKrNjj4OhtDpf3uHn0JofHr1PVC4SQnDcDX/v6Ec9vv4++vKTfXSJ35yxkZIjQOcuiLNibTGjaBjN07C3mSFVz2bSsupYuaPLFIZ1z9JdnrHYbetaE3TbJPwl0XUOwJjGuqwkzXaH6jtWuQZQ1IQ5cSrgMHmVgUU+4eeMuJ01Hazo2wlPpkqmqMNFjBVjnEHlFFClCvQxL7NU55CX0HXHoCL3l6j/+j9D9QHzvfRoFTeM4ubzkn50+pRKWt5cT3p3s8/Zkj8PpnCrPyfNk6OmNoYuBSmqWOkc4z+XZM/Zv3uF2uWB2/HVu5Z69WUkMmqKq0snER5wd2F5ccnF+Stc0zGcLnLM0m20KNlA5AomzgbZpaNuWrCjTvsIHonF8/Wtvc3A44erkGcE7ttstXdu/8p57JcS8LCvqvCJXBXKEe1gT2G1a3ABFNSSPvQiU5YS6njMMHUOfnkhN1+NjyvDaNWkj3uy2FDksZjWhSnMtbz2ZFGQ6Y7nYoygnIBTrpqPvGi6vLhjaLcE5nOkTdEVLFILpGJvedQkw0w+Wpm2RWY6WklxJkrdBEzBsmgatFQf7B5RVxadffMZ0PuHrX3+X41u3EDrj0YOHrDYbLi+vOD66wa//xq/xV/7qX8EM5uUPYb1eEUKgridUZZUi1ssaISW7zYp2c8mnP/4Ttl3DfL5k7/CQsiiZL/bZbndsdw1PHj9iaBuGrmEySWYFSHlou6ZFCUWIFucG9EiR0rlOXTUpCNRYQ57nWASDSzejDzFl6SBHc1jKxRJSp3h1JLlOLNaiKFlv1iMKUzOZTOhHPbJSkiIrsKStrbOOIpPsVRMOFjUHezNUrtgOFrXpMFJy2jSYbU8U8qVhQEbItWB/XnPrYMb+vGJe/eyLbhy1pnF8QKSYl+tPpvv9ZdSvuO5c02NJjARB8dJtln4lzOV1hb3+u0g0M11QT464cfM9jm++ydHB6ywWt8hUCVLTup5Vb9kax7kJ7DxcIWmqBa5aIPeOqXNB7yxtFJybgeHyAjYWESWFVLjgYNfSNw19nlOvH9BniiyT+DJHb9Yom1CSMUaOihkn7QmNCBgJDRZdpZmvViW3qoqTk+fYbIJRksvesvMbqvqIxXTB1fqCnapQpWKRF6wvLwmTGWEuqBV0uyt6FxHHU8TQIrxFtBvc6oyw3WJ+8x9TR0P/wdcIsxpvHT5CLyTfbTb80eUV7xZT/trdt3lreUCmBFmWI5WgQhJdxFlLRPHO7bfY3ztGKkXW95RFohIGmZKvEelB6J3DdC2EyO3bd1j8ak3fDTSbHRcn5yzQlLN0Ou66DqXSjshaQwTK2ZTM5Wx3DScnz1hdXbHdbJlOpq+85366OUKk+G2lRlB5gGDBeIcTgaE16N2Oqq5BKIq8YDKZEyIY45AyQ8gC7wPtKAkTQrC3V2Osw1iBJqU65ALINJPJlOlin8EY2mbLMAwMQ4cZOiIC48NofEjQ88O9OYOLXFxdEkn63W3TYJ1nGAzCO6rlAiUExiUG8K5pSY2J5Oz0hGEY+Kf/+JSqKrlxdExV17z2xmtcnJ3z5NEjHj78jC8+/xG50uzvH1JMZuR1zWJ/L/EljGW7XXF59pzNZsOq2aXEXyWpJnNmiyW6qpjOZuw2SULjnOPps2eji6VKqgqlGUziF3dDDzGOlLM2LQ1HnVEIkRg8WkgQAhcjTT9Q1iEdnZA4Z5FKE0b4tpCKXOUM1hCVwjhDnqXjVhYS5rFtNqgsQYvqskrgIp8SH67nl8EG3OCJ3iGVJss1eVHiUOSFp8gthdbjkdpdC1KJIVAWmsV8wtHhgqP9CfN/jU3vv+nLj1Hwcjz6i3j99SYRGNdyL5nIYukeS9V2bIgR8ksFhBiXivJl0xtB5dR7N7l39yPefP1b3Dh6jdnkGDcu6Qbn6b2gHxzbIb1HWmtoW8PGerabFdI02N2G7ekzPjt7xtoZwnSPIGr2B0uVZZiiwF5siGUBWcZsMWPiB+7mE55vG04uL3Cnz+i7LXZSE2KklIrFzbvcm9XsmgbhJRfrK7ZRYoqKoDQvbCA7OKbKNLn3PF9d0KkJg9kxLXJmUrL1HU7n7HpPWdc4wCqBUxnlIvE/hNBk/QbbrkYuCcSyJO62mO99F7k3Jbz7JmQQdwbaSHQyBWhGeLbdsCwqDicTlIhMMo0UgmwyZTY9pJ4syXVOlJroPfV8SiZbut2OYrqHkAJvDb43hACyqMicZ68oWMwXrM5XPLn/lKAukOWEmGV4Apv1BtelvEXnDadPnnJ6ekqInosXzxhsx3a74fXXXqfI/2su0q6F3skJBcTUNUUfiNERXZJzeWeSfClTVFWZrLrCE6ImJxCEQbueuhAURcngBk5OBoKLmG6HCAEXHGVZYkdBsg8JaNMNA8b0EMEYQwyeSV1RZpqDvQUhClabK7IsY7vZ0jRtipaJkbbvEUVG33fkeZ66QZuiZkKMmD5xHlarS8pywpPHT3jrzTdZbzbkZcHxjRsgBFdXlzx58JDL5owff/97bNqW3jqmRU7TJ3JZrjW51gQhWe7tcfPua9TTKXlRIaVgu95QZAXT2QzTd6xXV6zXKz755EcI4cnGRFYzGLquQWfZuCn3hBBGB9o4L1Qpzbc3hlmZ0iK6vmfbtEwnFQSfOA1Yqqqmtw6tC4Ty4EViXUid2MTjZr0sKoz1DH2H94E8y1FS4L1D6cSQDS4Bd5zx2MFiekPbW1AKaw3eufTLJ4aDjIIgxqDy6FEqY1IXzGYTZtMZVfmzj2D33o8qg5QOSxTEkL7ul1KyyFe0uWmWm1xYcjz9jbHsLxvi1OVGIVgevc0v//zf5O3Xv8X+8hAlFEOMtNbRGocMnl3XJRKbD/TnJ1xeXtLvNmxPnvDw0Wc8ePA5q3aDKCqmyyOyxT6TcoY5O2coay4n+5R7e+wFz8EQWa137KTCxJ5dd8WJBONAETmcTyDTnEdQKsPuLvj0xYOXVLP9+YLX999ktdpifOD88oxtrhFZgYw5xzrnTj3jyrZ0oWUbanQ9ZZ7lrM/PsfMZvihYZhmX56e4akK22KNoScxrfUCUGQGLms5wXYtcb/C7K+If/hGz2zOY1ezcgDCRwgVuFiVvLQ8pdYYjIqSkM5a6KJmWE/b37xFkYm//4JMH7AYoS8UHB5Jc9gybFQfVDDMMdE2LaRpCTHjYq5MziqJgspghs5xI4iJnWZYIgjbRBvvNmjLPQArOzk64/8XnmL6lyBXWG3a7Fb/9O1+w2W34X/yv/v5PvedeMV5IYYo+JuTiYFy6IUcRv3MDuRAIleDIVVGTa0WWaQYfqKuK6XSO94Hz8+dsVpd0fU/XCXY7S9c2ODOQqzRWqOuaqp5junZcODU0bYOzKXAx04rFdMJiln5tu4G27/Hesd6khdxgUwYZShKdJWrB0A+pY9cpoVgJgfOB3iYWQ6YyVpcXNLsNL16cIIRgtbrk6uqSuqoYjKGeTSgmFV5Af3rK6dMn7LYJSBNJke1vvPkmi/1D+ralH3psCJSVYzFfEHxICgLrqGrJ5dUK7z2z2YyL0+eousIHGEwigfkYcD5l04XgX9L+5XXnOHZUXdehs4IyS8GVUgkmRYUjJX3Qtzgky4ND+iGNKFz0DGZI9m2tE3lNSPIixxiDMx3GdJR1jR0MXd9R1RPKvKA3A1me0XSW06sVDsd0mDEEz6brWe8a2q4jeo/7CtcAItH7MUIljUWGVwOZ/o1fSb0gCIyZaH4cvYpIkKOdN5DcdOOcJEFw0lgsxDS/FSJ+RZUgCFGwt7jLX/3Vv8ve8jV8b3jx/JTWGK6coe8dzeUVD04f4IwnDB1ZgAcPPme3W7NdrdmtL3C+SyGeeU5UGV302MEgZc9RNeVs6PHmHPv8EZel5nQyRQPaOQ6mc+RsnyfrC2LfE7odzycVXiik0iwnBUf5MZftBlEtCFpz0W442a6JaGaTKTfdhKvVFU3hCL7neZVULkeHN4i7NVsEPi8x5ZzqVknndwStaIua2ZtvsutaTPQUswWmafGqRk3nxPYKbRqEzCGf4bMctudM/uQL9M99wK61yL6n9JbDyZx+GMjKCYXOUVlOWWTUxQybzfjxScdsmbFuelaxQE9zLpodj16cMwxnlKpg73ZacDeXKzZXl2RVwdmzFzz44ce88e67LI5vsDzS7FYtup4wnU+pZhNiA2VZ4/uWrMiRSoOSzBZTdqHn6++/w8OHX/Dd737Otm1TXNUrrldTxhAEHxk6QyM7+tzgfUff7yh1hswyhk4DCeSiVY7SmuksY7Y8YFLNsIPFmoFm29IOjtVVw26b5rN4h5eaaT2lqqdESC6W3QZnLM45jLOIGFjM5hwd7JNnGattQ9sNaCXp+oG263EuWYcjIsFCBGiVipXSGV2XbK9FnjEMFmcs07pGiMhkUtP3PQ8e3KcqS/aXS4aySvPb3Q4hBH3XMVjLwfENkHJ8TdvE3w0BGyIxBDbbDd5HZvM51nicC+wfHBJ9YLFc0g+WzXbL1cUF07rmIsaXUUjWJduvFprtYBj6EYr8Us+URgd5VhBDOvabYaCoajzp4ejGGaUd476d95yfX1AWJT2QRYsJlrbryPKC4COzumIxibRSsOu6pOnNcrQu2LVX9ENHVU/RUuGsZ+cc692Gk4tzpvMZKs9pXeBi2zL0SaoXwzX0O7EKht6wXTesao0d2jT7/Blf14oEIcKXxXQEmRMCYXzIvUz+JYK8fmOl2KmkUkh/Jo1VFEVR8dbrv8oPP3uBlycMxhKCZLcbeGpadoPhuCopuoGTdsPQ7dARMmPZnJ4xCA9FBjYitaKqJ8i8Yq5L2t0Vbb/mUVal4pmV5N2Wy53F7Wb4rMATeOwalJJErdlb7mF2G16cnuOqGdQzvOhYzeboImc5mXFclPQfn7DNNBSabbelKyTl6/e4JQueP/sMrxSuyLnYrTk6PkL2LRsh2E0qptND8mef0eU5/XTKdJLTdFtCPcfUC6pJQ29C6qrNFhtygpNQaITOyMqS7umaMHuAWhTcKSoymbGX19yZ76NHlbMQCpVPEfPbPDpvWF+co5+fs5Ow215hz6+Y7e0zVVfI/oy7d98mKyuyrBjHGpLd5SXPHj9itbpCKEU9m6LmCmccq9VuJPwZut0OawfKSY3rO66enXN68pxml0wmi8WUd959h9OzDZ998QXPnj975T33iplumuUO3cBmswMf0ZnAhSGNDKQnRHBuIHaBED31ZIlWkulsxnyxJFMFRhm4gG3rWK22rMZUhqosaFtPnpVM6ynD4Gi7FjOk7tU4g3UOQmBSVwil2XUDZtOOw22ZcIKMci0Eznu01qjR4utHHaYZTLLnIlA6oyg0PiSwyHSUo23Wa06ev+DGzRtMv/Vtbt25w5NHj1gsFqxXK5ou6Yl3u6sxGDKnrGuUFFyt1wxDR9s2aZYsdiz3lgkfaUyaMU0nbLdb/vhP/hhdlKyvrpACirKEAMYaTN+n2HhhUtpuTDv01FHJ0Y4qcSFRsayLIB2i7xEisukH1F5y8iml6QeLUpKhb5F6CkqjMokKEILDWYOWks5aJnmCwecBuq4hL2vqakKel3RtQ9s0KKkYBoeQiovLLX23w6mn5NMJQeXsYsSENMO9po3FCD5G2nbg+fNTgmmpS43zfxYg5ow4yvT/pYiIkNi0YtSIwnXRJbEYYBwnpIdeUil8SVcQ0bO/eAMhlqx3W7yCs9Mdg3bI3qUja3Q8XJ/x2nRK5iPr8zO2Q4czLXkmkV1HtmsJpWTwEbNdkWVb9urXWU4mxGZHP6zxxnJWFeSzOXk246AouT3Z4/zygmfnp/g8J05qLjLJ/NYd5kJQesVCwMPLZ2yGHbqc0XlLKAsObtxiYhzt+pKVDvRZweAVTGtuvPEerW9Z2RY7rXjhPHfuvInrDU0GvY7c/c7Ps37xjLXK2FYTDr7+AeebHT7LqaVieHZKqGbo/QOsGRBtTxwcoqjIyhy92yCuOsppzq265Gg2QQZJcA6d5yk1O6voguD06SOeB4EVAX+15e6dYwrbYyYO4S3atRRZycHNm5SzKUqlLEel4PnDR9z//HNm9YT5/pJqMiEr0v6p6QbatmXAs11dImKgnNWs2jWPHz3gwaP79NsVNw73GPqG5XLBL/3Sr/CX/srf4Ic/+uErb7mfWnSVAOEjtrd0TZ9IPEiisETt8FKCFkSZwCzWp8yoqp5SFDmZAEYzw+n5is2mpWt7YnQoEZNMKXj2q4pMC5q2www9w9Dhg2dwDmMtmZKoLKcdPFfrS5QSxBjIVHrj5mOkurEmaSCFQEmRNKvWkVcVzlqCd2kJp5PRwfo0tiirCtsP/PiHP+Lg8ID2i4b9vQOOb99EK8mNm8es1yuKvCRYx+npKc5blFQEFyirAiEkzjqu1muss1Rywq7dIYRm/+CQqio5Oz9HCsnTx48wZqBrdmilqIuCbdNgjMPaARFByTIVg/SWH2dNydIsZY4SpDGPMyilMNGgtMRaz7bZUuT5uHnPaLodVVHQNw1ZMSFGS1EU+KDxIRC9xRnoRbI5SgJZXtB3O7IxlsdohbMmZbOlVXyaT/aO3gysNh2iqqEqCJlOCJjrAEYgBk/feU6Hns1qhRYRb9x/lTL5//MrRoEPX4IZ1ciJSI26+LLLFYwMCfmVfLd0z4eQQOaJqyDJ1ZTPPv2Yosg4Wa+YVnMuHj+hVYJlOeGG9zzfXPHp+XM0kblw7Lo13lk6GxDOoDJBjqBIKDmIcLo+SwAalXM4WxA3G57ttngf6KuKttnRNR2L45vcWS7RQiMHy+ex49I7cpnRKIEcOvZ7i163hOnAblLyMNagarK64Hh2D396xk5bgo6siGzqiqPlDeYnj9kpScgKnneW28cLzOP7uMU+Wy+5uz9ntd0yZCXlckoVe9qyJhYlpXf0g6WcH+BOz5FZSVzvyEzPPCuYLpYY27AXapa6YC8rqHQJwTHJFfuzA2x1yA+vekLMKQqFt4HBNLirs/Se8Y5FrjisJhzvLVneukleFXibsuq6XcPDBw95fnrGzW/eoqzLpIbKMurZlHpScnp6yvRwj6IqyHwyCzVDx9n2jC43nGxP2FtOuLi84vT8kn/yj/8p7733Eb/+1/76K++3V7IXQghY4zC9wdWWDA3SAUPqvHSyPAYX8XbA7yJeCKp+QMueprU8ePyU84srCDYhGPsBFTwES5GlpNnT83N2bU+dK6y1bJoWMwxUuWI6mSOEwpkOJVMicESOvvSEL4wEPNezTkeRl4goyDKVAC0xFQGpI8TRKhwT8zQtmj1tvyOee7TKODt9wSeffcw7b7/NzTu32N/fSxbKsmQxnXJ+eUGWV7gxWrssC6qyoh8s3qUZ7Gy6ZLG/NyYNO9q24cEX9+n7ns3mkuAjSgpmszlus2YYegaTttep0fWIeG2xTfIrqRVSpteipCYKMGPhzVWOEJGu2SHEFIElLyeImMA5WEOMKvnJo6coSvq+RyiJVAoRI5nSo0ElEjxstiuKokRE0FmJtzYxCWJkf/8G55cX+OiQskioyRhfsguIabQwmrvwY9Bla3qiNUT3s+90v2rPiDGxdMVoWkCQtLrXRfc6HWIE2sjRBfglkzcQo0ApzdXljqvLc2ZVwdXFigsUUSnarmOQJxws96i0wrY91hsugyebTdBdj6Jj7RxDplBZRqYUB1lF13fsmh3oDFHWXMZANp0x2T9ED5bCOdrouVpWtGZAac0Cz7RZU5+/oM8K8tmEMAw8m00p7t0iz2tuFTMeP/gRm6ZF7tc4rThRULzxFjeInDz4fgL77EtWjWJ5710mq2c8XT0FFYlDxu39BU/NlqvO89q9NzmSa84qy66OfOfwHn+wa7FZxUF2gxenK2xdIa6uCKoEa7A6sBkS82PqMvKt5fBmjYqwyDMOJ3vURYHVezxzFZ2z9M+fcmOi0cYxCw2XXzxmpQvasxW/9Bd+gWpSsHfzBnldEUPEG4MfDEPXs9kkeanUGmN72t2WqCRRBGQm2F1dUEwLJnuLtDvZbblaX3HWnWInjne/8wHN0w5rC958/+ss/uAHfPrjH/GLP/8rr7znXpkG7Kyj63p6OxBChRACHweUDKngap+gvyEHl2ypxlj6tsPbwMXljtPTE2RwdF1D03UMw4AICVZztKjZDQNnVyuqouBqu2Oz29K3LbnSlMWMIGTaqjvz8jhYVTW7tiUTYO1ACAEfkosK4YGYElPLHD/SxrSWKCkQwZMVGbEY4TnNLmEfpUxpDHXJ7/3+7zEMAzEGdrstt2/fYlKX5JniUitu3ryJ84kbMZlMOD8/Z75YELc7btw4JriUDCHzgtPnL7CDo2kbzk9fcLW64vjGDZ6/eEFVlom3EAPW9ImpIM3Lwsfo5hL4pLEl0cwgEJxNgu1Rwe9DGB1Whma3HcHlLUKllIwQJcZtqbNlKujWkmc5wZvRURcw1qKlJgSLC56hd+lzzuMF5FmR/h4zIKTg1u17vDh9QlAKpyQ2Jg6svF6evbR5XR/hAyGkH9GfBUNaouCKlx150uzycn6bWAppMSZJyb8xCKJOs90Yk4U4zXPTvyudMZkcImLH8/Ul88UeZ4PFVwV3ZnO27Y4zY8iE4t7ykNX6krNmRQyQ4Yl2QIdAMJ7gHdZHTieefDKhUjWTrGJeTvi839KoBP4v65qZc5izE7KnT3B7+wgiZ13P5XyOvHuXuciYbjecdA1mb4b1AYTDiYHynfeZSIlfXXBOhxE1TmmyWcndj76D9ZGzi8dYA5tqydt7N2m6NV3Y8XTn+IU3b9M//pwhTLg1LHi2+REH8iYf+hI3rKlKz2xS8o2b9/hnUmCiRB8ssR7KTNNtG3qludKSw7zC9B3TWPD2jQO0ENRaY0XGj59ecN6uWG02TBRsTODNvSnrlWE33+PzxnHr+C5kOYXOUGUJCGzXYdodfujIMsE3v/ked27vM50UbFdXIBSTvkdoSd/vkBLs0NO1iqIqEZlgiB3lZMJvfOev8trRXf7v/7f/gE8+/5S3Pvw6f+Nv/x3+4f/lH3Bxvn3lHfdK9YJziW/rvSfgCTENolWWgDZhRNgpHVEyx9qAFGnO23U9J6dnBGsI3rHZ7jCDoe8aZIzouqK3gZPLK0SM9N7RdC0aKLKSqq4JUjH0Pc12g5QpE2w6nRJiIDqL05K+64hSgdLEawi5EPgRwi5eWjAFbdujtEZlCWjunKNpWhbLJYvpnFNzwfpqPUarS148P2F/b59PP/2Uu/fuYPqBy9UVk3rCepNwkzrLmc8XZFlBkQ3kec70IC2Xnj5/xhcPvqDQOVmZc7W6wnvL0LUUeY4QkqbrMGOHLGLEWAdElNSjNEkgkOS6GF9TJI4EKmcNeTVBq+xlcq1WCuc8TdNQlI6yTDNrNxYH07bk1STxKgqZHmYIHNe61XHBNEJeBEnFYpxhsANlUVEWNb3tKfKKrJrQuy+lCDEkuE0yxo4DkhBHIHha7Agi+J8kcP0sLhGvLbuMwvn0qPgqBCfCS2PEy48Fxsh28bLbvVYuSFGkMZqSuBDYRIMxA4PtyNSE+WKP4eIFxnY86VqmizmLaGHoWTuLmU4hRgrvqIyhtQN9WTAAOgZ0rtF5yX4/sBsMWIfdNDwtCvKjfcq8Rnc98eSEq+NDeuEQNmCUhVlJGZdElxMKRTCGtZbJoZlPuL3cx/QburogMrBqei7riuPlhOrEsRMNZfQ8Ny0f/vxHfO/T36HIKg77KcNmjWk8m9V97rU9D3/0XT7RX/DGvSPeXC64/eYxd7MVVekJ7COmO0LbInWOkIK8VKjBo5TC5zWXveebMpCrlHL8w1PDjy63VNmUm3szCiU5ffGQf3lxxvnZc0wUXOg53zhcsnnxlHhnjzzPEMHTd1v6ZoPpOpQKHN6YU1QeEXTKNmwb7NAT8KzOz4nCEdzA6iKpeKLwzOsZ/4O/8z9Ehchv/eZv8uLkHBV6fusf/yN+/s/9ef7Of+vvEPtXL4dfqdON1uOjQ5CeqFJGpJbkOpGOfLSYAJmIZFpRyBQX7q2j2fWYrqPKFA9OLrHDQN93WDMkmEW25HK9wRlDNt6gmdQordFFOtI5a+jaFmstZZERRCJo9WMIpRsGfACpM+zQQfRkukx21gjOu1H4Hog+sOsHiixQlo6iyCFTdH2LHSqct0gpmFQlEZGMDleX/M4//x32FgvqyYSiqrhx6xbRBwZj0blifX7JZFJT1TN8DGitiNExWMOTx49ZX11h7ICPyROvhKJpGrIiQ2nN0LUMQ0fwfuysUqfuPSihRnJKij0XMc11CQ6ixLgeKTWyTCMChEx4zODxImKtoMzrhNcbDHleMPQtQYwFNgJCpYI86rFTilqaeRMig1QjvCXSbbcE65hNFxRZQc+AzAt8MPgY09H8K9ZXEcckiThqWZVKVU6pPxM83cRXGI0OESASpcDHiPxKIYVr/Pi1SkGkbDgRiSEhxKKIhCAQ5AQEF6sVR4tjPttdsJjPmTYdz6uMxgzcrKasQsO5hqZrmSvNjIx1HAhZQZQBP4AdHPmkgsESK4lCst2taMefe53lzBrLo1LjJxV98AxuYJrD7GBB9J7e5ITQYPKcs8kccfc1qtmC133gi4c/INoJ+lAijOe5L8j3D3m9Lrn/4hN8MUNSIPqWm+98Ddu3PDr7BPnaEbiCpciYmpIHP37BtJ/w8ac/5ImDrN2SFTk3D2F3cspJGGjfesatv/4b/PzNe5zGKSa7xfdPr3AyQ/TPEFJB3BFF0raft5ZHJw8xu6esuMXF5JtQzamDoV9d8MNnL1g3F1yFJAfdy2a8uyjIt0/R+YK95V2KKid6g+3X9M2a3WrL6vIKa3r6fkeeTSin+6hM48zAdrNit1knJ6gzCCHoNhsmdcVrx69x9eyM6WLG48dPmdUTjJF8/+NPGdqWX/nzv8rdN7/+ynvuFZQx8DGF4kkNOovoLKJyhc50cngEhw8RTyAqQaEmKJnmrbvtFq0lV6t1yoMPDjsMaflUT2gGQ9sPY1w7oGQ6CseAEgrTNQghafsOFSMeiRaKpumwrifEFGaJynAhYLuOItfkWZ7AGN4xmIiKo+V9JHmlziaQa0VA07YdmUrshGnlUFpxdZlSIC7PT7HOE33gxbPnHB4dc7G6JFNpxmeMYbZYUhQlTdtSlyVt39FbS1529EPCSTo3WmjzahRhR4bBUYkMYxKBP3iHUMlN85JHKxPMJIzx9AkhqfBuwAU3coENoYdMa5TOEJCwjM6NygePNw47dKOkKeB2G5TO2VjLpJ7gbAJPRwKTqsKPhdM7Q0WZDBm+J8SIGwwNW/Iipygrcp0loNDIGhAjveFlUXv58JeQCQRq/MzPXjIWr38TAq7pYv7LeS7XcJvxeikL49oSPJbjlx+PTKs9RCjpe8PRUYk8t6yHNfv5Arnd4bzhrN9RlROy3kH0bLc7zLSknM/QqxWhWdMrSbOYo7Qml4J9MlpnuNSO6DrESBM73F9yuzdcuA6vFW5o2WSwmxWU1ZQ3Zgf0nz3kvu5xFsSgEFngQiluvvttZLCcPr9PO68I+QLXK7oYOdBzdtMKZVecnnWc7835ueOap1dPeSsojoZ9gp3zg+/9Ie3FJdOrLXs+8N6d17n99ut865sf8s7XP6TMMh4/f8Lles3u+59y67hn9vqb/KHSPClB+cBWF3wjh/eGgflyiUZQ+ojwKzabHc9DT1laFldnxP6c33l+QcxmbNQMMa3JhWG/XnAMLJXiaDFjNp8luqBpMX1Ls13z/OlzLi8u2WxX7LYNX/v6N7m92KOcTGjWq7QvGqWZPpICbmNgPlty/uI5JlgePrrP42enPHz0iNl8wouTK5pdy+nZOe+8+x5/+b/z937qPfdTi25Ke43kORSFRGfJCKEznbo5wpi4a4nCk6kcpEArcM6NUPKAdQNlLjkbevphSOjCLEuZZGZIG3MBMqR5mAdsm1ii3lqUEESZXGS9tQxdh1aA1Pgo0VIyDCk8TqjEDxDCEYIgapEK8/WxnJgIUmPXnuksMWqVZFLXDMNAXmRkuUYpgTMG6x3NZs2uadi0D3h0/wFvv/0m1jqWewtEofAxcH5+SjWd8Pjpc4amYbttxiTjiJLJ+y69R2VpITZ0A8YMtE2DdYYwpkUkkHZACI9U+bhFT6GfWZ4BAq0Loh2IRBTg7JCsucFRVpO0IBQCIeRIe/MpxdYl86l1hrIcg8FDWgw554je4bKUSKGzPD0QrSWEwHWgonGGMTcXnRfUZYU0Dc67l0yIyNjxysRdAJkcXVKmbne01f7sr/HrfQkau9aLiJefFfCvfqnj6Ip4jbH88lN2GMDtWMyXPN5ecSg0T4uSbV1wfNmy3lywOVjilOLIROzlmvN5gVWKECw39w9xtqfF4UNaOAqd4euKeTYldGt8CAwxsjU9n8p0grozvUG82vIwbjFCJoOM7bjoVxy/8ToHl8/phSdqS7t6wWk+R3QDr+/tsUCza7bkBZTWs94a8lnB+/t7/ODR94iHU/bnJdPOcrtRmAdn/Ob3HvJrb3ydHz96Cg+fMZ3VfOuD9/jWRx9w594d7r3+Ji4M2MFwPJ0yV5rptGa325E/f8BREPylyZTi9g3UG7c51Bn77ttMJyVKwmy5jzWG7332iH//n/wzXnz3d/j2nT0eCUFrPdpuIViqbsPB7dscDCuW0XH3xh6vvXWbss7xpsPu1vTNlu3VFY8fPuRitcV5x3y2z40799i/cZM8z8mkSkEFWZ4eplLigqUsSvaOjlC54umDx1gfuXfrNtOqpusbDvcW3H/4gD/+9D4/uP+A/80r7rif3ukqQVFpqlqTV5KsFGS5Is81Ul7PTAVRBJSSlOWEMqshZtjeIESg63vqssANDus8gQTGcd7T9W2ypAqBHeeAOgq6Zpc29jHh/xKkRdH3Hd6lAipkmWaEUmK9S8sbrZLUzBps8COQO0XlBB/w40bdhUDfp8jlqpJMqhLvkx9byRRv7mIkVwpXCEzrUJnm/PQFz16cYK1jMknEtP2DfaazKc70tN3A89NT1qs1282G1XqdotuDI8tzJlWNiwEdA96muWnXNUndMCRTROKzihGafT039y+VCnF0QF0zElxwoCTBjqqJ4DFCUk7nKdmAJPJXetQlO5tm3yEZFoL3DCbFxcexs3bOI2VIy0kigxnI65oYHEqqpKG2Q+oCmx2zyZRCbxlGXGYUqYuNcuxyw/VSSoBKH0dJkK9cKfz/54rwUqjLT9TPlyX45R99OXK4/rgYP/blR6KE1uzIyhnN1RnzYkK2WtNGw6zQ5ELQrK/o6woZBItJTbHeMEw0Umg2g2Gyf8CBtfTNGjXJ2IjIC29Q0XM832O2G3jqGrocHB7rDSeu5fWjQ5ZXcOEbpIxE23O685yKLfNZyS2reHD/IX5WQe3I8gq5hr2jBUcH91idnPPYrHHzBVkW6Hbn5P0VtR5YuhkPTODW8jW+90e/iY6B735xn/7hCfuZ5NbBDDM0/NGf/CGff/4xB4cHlPWcD977gKHpCMHx4nnSPxdFnk6wOqdaX7JabxiqktVkwje/+S2GwXL646d0bcd/+tkZa1FiDo95MHj+6GJHzCd0zhD2blGomg+PjrjVXXBHDXzrm+9xfOcWWSZo1xcMmzVdu2O32XJ5dcnJ2Rqdl9y9e8De8Q2q6QwhBPViwbHOKK6u6IaEQM3wmL7n5Oljsizn/OyMrt2hteDwcEEUS56/eMZsNmXX7Li8Wr/ydvvpOt1cUk1z6mlJWefkZUZRKHSWOsWYwrBQUZEXJbPZgjpf4J2i767QWUoILvIxKrztEUjKPKdpW2IIKKVHv75DSU3XJ81hiDGNA/IsRco4i/eB6AxZOSUKnbovEdIsF9BCEn3EylSkyNLc0oWYOKLBj7ZfTaYzht6gtaKqKmzfg9JkWmKcpdSaWJYMTUtE0rYtWqX0iKHr+fzTTzk4POSLTz9hsVxgvWPXtOCTJ58YaXZblBq5BSHiQkIr9sYSncM7j7E9zvrRNadHR1SSI0XkyAYQL4+wwQeqokpLMx8QwaNi6t6Dd4Toxu7cIqKC6PHeIbROf49Osd5eCZwZkKQkYSFSGkX0ASUlmZKjJCoZTJxJJgofHUqpxCwOPuXJWcvxbI8+WHozvCxC6e5I885rCRZSJs7An4nhQiqi/9WMcckddY19/OrHu7aDbELIFQudsSol945vcrldc6Ykh2++xfT0gjaDSynwCg7mFeJizaUIrEtFIyNlWXF7UtE1O1Z+IKKIMqXfLqYTZq2g67dQgwqC7faKj11Hvb/k9aFi9fQJlzOFqypkMWEoFGo5595kxsb2uNDRdFd8MVwRzJwjNfDGYsajs4+p/YRjcYsb+V0Ob3zExfkZH3/+x7jBEHTObN2yeXrCRWOYVxnHezMEEWMMk0kKh+37npOzcz7++Md89MFHlJmiazvuvfYWq/Warut5fnYG3/8+r925x1pK9g8W/L//4Wd88vHnxOBR8z0eTO6R7d2lmlWcXp4yPz6krmp22yveKCruZYG3RMMst7x5vM/hjX2qScXQrBi2G7yzCBRFWbO3t8fJ2ZrZZM69199gulwSQhi5IoLJdJb08iGiioLV6pLdesUXP/g+s+UeR8eH/KN/8o84ef6cup4gM01vOi4uLui7ASlfzRP5qUX3GmBT1SVlmb+ExsgswVky0hiMCGVeU1cz6mIGQdFse5RWaBWAJDtr2448yxAxFdXgAz46rLFpHqYSy6DrUvhknucpydalTlYRycqSLM/SEkhKTNdjbYoFdzEdb0uVTAKC5LdOQY/Xx8GUt6ZUiVI6hc7VVQp2tKmIaCnJlBqPe6njdM7Rdn1KonAWgPVqhXeWZrujaXa0Q49SOXmRwXgcz/L8ZSc+mIFcpUWh857gDM6Y9L2IkCuJlBoXEjdBjGaILLvOm0szRmNN8oOTFANKgvQRtBxhRIG+7aiqiohgMIZCpCLqbPr7lHIE5xGZpm1b4qh68N6nRWdZooBi5DF4axMTwttxYps299577DAgMsm8mkIMmJBGO1zvoIAgr+ejknjNrP2JnvJncylU+jJFAtT8l31F4ivF9frnIERMMe3iOp49zfllJui6NHMPznParJjoCYWAxreEoHh7OUMMPSduO0Y85dw9PqRZr5C2JRYxgecnUyaTKXtXl+mUR6DtWj4vI8X+nNdcTffihJPK4CY1Ds8wrJntHbN0N+ncGhN7ClFgtxs+aXeIsuT9O68Rn53zw8maqAeqTPHNg/epLBx1llpYdpdX/NNP/zNuzKa8PVuSbVfcyAr06QvWnzyB6KknJZMyS+nVQNt2tF2H0oqyKqmKkq7t+MEP/phJPePe7dt0fcfHn3wKQrLabKmrkvVuTVkUfO97D7l39zVu3brHjVv3+Hi94+zpCumvCHogTObkZBwKz7t3bvFW4VjogLt6TkZPXef4aOnbDbvTU5rNJj30lUYIzaSekGcZs+mM5fFh0pw7R/QenRVAAhr1Q4vQgmpS8/u/8zEyej6+f587d+8wnc0os5qyKLh9+yaD7/nt3/tdNttnKdrqFdcr1AsSpRRlUVBX14U3HUODECAVMqaspyKvUTIjzwusgbwoyDONVgJrXErolWkD39sB7w3N0OCdJ9MZ3lm8h90uxaqrccvdDd2XMTYStEoxNEIoorFY049D70TMT8uNZAeWMc1BVZbhQiTTGj9uoZVUacYpBX3fs20aQoCsyCAKzDCghEBLgcgUxhq2ZpWSc1WSaxnTISJ0u01K2VUqAdxNeg8qpbHO4OwAIUcKmcIiQ8D7kKRGIYxR30ltIURynkkJSiusTUu1vMhQSo863BQPLrROMdghAD7Zh0NKE06RQZosy7DWI8VAVpZoXSBjOhVYLD54hIy0XUOZFwgk1hqEiNR5jpQqLRxlmg0rpzBDD1JhnSWTSf419B0uGhASrbNk/fU+fb8FyBhHt2Ackxf40y3iz/T6cozwkyqFf/UPpnGC/MoT5ctpw/hQ9APF4hbb9Zb6xk2WZyc87RuO9o6YnT5FyI5nQXFw45jl054gHd3Q83ReML91Ex4/JpjA2nU8MgZVlhwc7TN7dsJVGDCVxkbP4FrqxYKZfo1Nc0Frd2SFIhOCs+2aOFUczG6zvxp43l5yIteIrCbTMCtz4rRmedmSBcNUHXL/ySPqxR4/9+5f5g/++F9wtnuI1BGVefYXUz6oSszFFaFp+MUP30wP8eDouoEwJpg4HyiKAukD682W+XRKVZY0Tcvecp8X55d8dv8pWa4SjyRuuH37DrNJUiz84i/9ItPJnMuLtKxeGUnQNa4sqef7qM6i85I3avjm8YQjGuzqhEfnz5HecHCwpFlt2AXPiwcPuDw/R8qkzx/6jrPLC9qmQRBG6lhHLFJzpLOMoW/p+o622eC9JSrJ8Y1jfvuf/RY2WL54dJ/pdI8//8u/zHxasFpf8Cc/eIhzjvliQZm/mhH9U4uut4G+9QQHmSrJs5JMJ20uQo1H2ohAo0U5CuwTxLmqCnSeo3RG03a0bY/3AV2kzsI7TwwghKLrurSwiZFMqXT0lBJrDFKpNHsMHq0KXEifCyEk5kOMaKnTAwpQOsc7iyR1tDGSYM0hpbsmHWuK2O6Hn9SW2lEfi5A4YxCjBVaIQN93hBjR0pGpDBn9aF5I2tNkDhVJPxw8UaqkHHAu2XcJKb1YSvTYIfoY0ul7LLSCtEBTKsNLg5SCoihwPgV4Kq1eNocxQJYV2KHDhUiIklJlRJ1ifJJsKyTX4Ph6gx0oqyRrIyiUznHOkSnFYHqcTBrsXKXXH7Qm1zmoDCkinTEomcwTcvw5OWuRCHKt0S6hNyPjA3/UF6cjRiBXkrLKKKoClSm+NAn/7K6XGEYg5cL/l6sqRtzNVzpd8RPdrxjfEzJ6nO0J3rByW6bWcJULnmE5Eoq2azjLJKsu40Zd406fczbRbLs1F67htcMJw4srfJURM4nXkT4LFMs5pXEMdOgMlBJc7TouygnTgze512xod2ueNM9xdYFQJXsi586Ntzh/sqYODfO8YJpVXLQ91c1Dvl59h5NH93neW3b2AfL0Pm55m1uzQ1TsefPGEbfqBWK34VDC0+2aZt0Q7YAZLNN5zWw6xQwD62FA64zFco61lt70GGPTPiIELi/Puf/whOViSQiOy6sVBweHNM2ON9+4y9mLZzx99JjNpsNZw827t9hdWcrJDbL9WwShKY1jWuW8c2tJ2b3AdWc8/exHPH78nKODfYJxNOsdbdvy6PEz7t+/z9B1zKcT6jpjvU3W/Gv64fpqzVBaDo728N7Stgm2JYVkuX/Ap599zOXZCcvFjKKu+OPvf59WNDx78YjfffgZJ6dnWOORUbI3n/9r9RE/teg6F1mtGk7Pdtw4GlgsJ5CPQXtSIGWSrIiYioaxlkz2SJUkUXleILUec4OSvCZEjxIaZxwxBKxJabchePI8R6s0q3XWorPsJe0nzwuESOQsjcI5g3Up6VcKkRZDIaLzxBNwLhC8T0udEF7iKTOVir51PsVaC8kwOCKCTKfPDSNusreOQmuc64mk7lCJiFQpQ8uPfNlrBUEmIUaFJ6IERG/hWnaER+ikF/YuvXbnPASfiPbOpw5RprmtiCl2JysKpHdIqVAqS5I3pYjRo5Uizyuc7dE68SaElGgiRabJdZa+rqIiBpukZFqho8aEQCDgHGRKE/L0M0vavS8fAlKl0YdUmrJKqoMIGGcpyhpnHf0wUOkps6JiGwacHw0aMYxhDAKpFbOqYG9Rc3wwYz6p/0wU3XSJLwUIX02Q/Oqf+FO0MSkkUor0+iQv4VBCSCKGQvfUdYFWkr5Q1CIQXUefC7JYoIVF4miEpz7YQw4tuZJMq4pV11Lfu8nSOJRO44rVbk2ba+pFyZs2pzDw4+YFfZ6B3kOZhuPjt7gKguf2AVPZsl++xrLcZ7tX8rX9X+DZjww61Dy86Nme/TGIwDt7N5lWe/TnjyiV4aja583lPjcnE85XkEtYOEsMljLPCAf7POqGVGh1z6QqkUSKPKeqCoqqZLVas2ta5MhpHob0588uNuzalhAdWmoWiz0W8wV3bt1GSc1yecjQD0wmc4Tw/PjBA16YGds+cHxwF8ocNVtQuIGwvcLbhpNHX/D5Fw84unGL9z76kKO7dwgukf2awWBCJGqN14rWBoyDW7fucfuNt9FFRdsMKJWlXc3Qsd2sabY7yrJMmXMmqbKcM5w9fIEisFqd8Vu/9QwpBavVGmsde3v7qbFT/3XNETHgreNyveb8Ys1yUZEX0yR9knKMJdEvdZfeeYbQIWRC+xVFwXQ6x7oX/H/bO68mybLrOn/HXJu2slyXaTM9gzHwhqAAUCLEYOhBFPkD9I/0G/SkZ70pQqEH6UlBUiAiQAzcDMb09LSp7vKV9ua15xw9nJtZ1SA1rQiKAEKo3VHdVZWZN7Mzz913n7XXXsvhyJYZBBpsRWMqDylYj1NppdbDDA5AefvxlXC3DgIQXlRbK0FV5CipwHrTxsY1IBQCiTE1tfVTcCtlLj9Dr3DWUDeWZVkRKuE1aoHaNG3zR7QnncdG65WV+Yo5gCPSGusEaD+ptfLCktahJN59YKW1KjxnGecwjSMIQup2AswYg7UNupWb08pLRCI9P9p3yxJ02Op4CoFWmjBKqRovUxkmKUp4x2UlDM5VaBUQRxFBHCNwhGEIxldwSnpxlyCIqJxFyJrGNGitaYwlCLynHMozDJTS3t8sDAnxVDDrHCLPEc4SRiFSa7JsSpIkdHXIVbOkMV5j2TqLQBAHAb1uzMGoy/29LXpp8nvSSltVu9fM4vXLWqMg4pXqdlV0COG1F6TwFzvZfgmgrsZY1yGsLc9kwU7oDRsfUdDvpDw0IUVjeJZPPJUs7bIjEx5np8yqnDGGu/0N9peCo/IUYy6xukshevT33iCcNaTjc5AXJEJyJ3nI4zojvr/Pw8sGMbsiL3v88tkT6qcfE0eK7wze4OTknJolNDNUqMnzKW/177E0I3ZHKYOoz16aok3OhnAMsZhihjEVYNjbHpFEEVmeM65ymqYhjPxk5fj5jCCuCMMAKRW2aZiOJxweHrAx2uKjH79P2onROmQ02OI73/wjNrZGHOzfYT674NFnn/DsyTM2httUVUUpQ9TGDoGOKfI5G4MRVeUIAmiwZHnGZDqh1+nxta9/ncM33yCMIi5Ojjk/O+P84pLpbE5ZFyyrnE6cMugOefvd99jY3iGKu+hEEgRQFiVVUTCfTD2jpyz8tKcO6PZ6vPvue3z26FPee+ddyqpgPp9RNTXPnh9xcnZGWRVIJVA6fO16+2KerhaEHYkOLGWzZJEtSDohOkhaA7+W5O6Ep2I1ta/SpETrmDiK6KRdhFB+lNg2NHVD3XbXPWxgkaqFKpxdGycaa1pBGk+PkgKqukAISZ7VmKZBhXrdGfcegbId1jBgfaNOS41tPOULZ9fVoHN+xFkpX7mZxidvKcBYS1XkyHYQgcr6y4qAxjqiyHtRyVpSVz5BKqlosH6SX8jWIdhjHjoIacrSC3g7R1nXBEHQMl3xjAGlUTqkzOcEQYJRAU3t4RWcQ7bVtHXWQx4y8sMLxqJT38AS3giObtqhmybe1VUIYq1Iuh2KosAJgQwUjgDrLOGqwYjFWairkjjwOw4hIIlTP9arFUJINuLYT54hWmxdeEUz51kMcRQiRYFxFmM8vc3hCAR0Q80giRnGCf0k4YYn5O84HGD8BRKFFe7VhLuikq0SrBDrf0Vb3a6qXC+MA6GGThqjiBkwYZimNHlJYhtcWNITI8xyTJjm2NDQH2wRLCBRFXVaEiYpg16PMtb0sxkyCJBpgopHfJ7ndAddHva+THb1OWG6y+eXY2bmGKEU727coeckLxcTbDMnCiWJS7Cu4s5gxGJ5Thx12e8OeDjYYStIQG1TmwJZzjlfnpBPx8RlQ9LtEwWaySLj+fEJoBj0BhzcPUD0LZXNwVQ4K/jSu4ecnmY4Y+h2YhbZEmscl1cTHn32nE7X23od7B7yJ9//PmknZTDsE0cBL6YTZuMJX/3ye2yMRmTLnKyG+ZXhqizoRSm9OGZSLaGF7vKiIstrHjx4wP7dQ7qDPvPxmLPjEz57/JiXp8e8ePmidQBPeXB4nz/+9pe59+YDBqMNhApwziCFpSxyrs5OvQ5LFHHy8iUXJ8d86Z132d7d4aMPf8nGxoCz8xO2Nze4d3ePz58+4WB/hziNOTk9I0kiguD1o+1fmHTjVDMYxgw3IqJUgDR+YsMA2guvSKfBtkR36ytVU1tiBGmcUoQhURT5BopUfhS4LHDGm8NJIQjCENM0CKT3/2qTSaBDnLVEcey9uoC6rmnqyjffjN+aW9u0QjgGK9oKsjUPXDXLwjDwjq3Sd8/rusEKixTad62dt5jRQQuZSOWTThSSV55NgfPyfdaCxfoBC9VizM4/RraODc76Cb1AK0IlEYF3DDZt5RfokLrOEUKipVcOK1tBH6l8A7OuXXtyq5am1RDqGCFY46p1XYNUmDqntoYkSdjaHKGVpqgNTgrSJKKbdNpBCYHTXnQeUyNc5H3nMFA33owyCEjTDlEY0Ot0EFJ5qyT8JOLmQHIxvvKfh/GDF1p5eqByEEpNXlftkvD/B9t4bu8yX7JYLvHXy9+PSncV1+n1H94CLay2qnRXCfZG9bu+ndbmqMwxQcLD4R2CaECZON4pLWlvxMXE0k1i3iOgk2zxbGoZJ47NzgO65ZSkM+LxxZRKO+4P99nMBqgg4tfjBUuWTPI5prfJ/eg+F4sCU1doaQmlV83a3d5hOROUsoMRDYEO2Ug6bA16dKsU40pSJdhJQ/LlFU12TFGcUjZLjK3QRjO9EFy9vCKNQuZ5QVk1BFownWfc2T9gsN3jgxef+vdGSGI9YPClr3B1doVuaiIUR0enxLlja2OLbLHg7Tff5N/8+Z/jBJRlzoc/e8J8PieONEkS8+zZMz57/DnzbI5UmnxwjyKHcz4gTSJGW3foIciPH5GdHDNMuzx4+y26G0Mwlsn5BSfHx0ymU7JlTlk2dNKUve0dvvud7/LuV77M5u4OKgioqhrrDGVZMr445/LkBBF4eHAw2ADn+PiTD3h5/AznHFdXVxhryJZT9nd3OTs5JohiBr0Optlgkc3XHntfFF+cdDsB/UHCaJQyGMR0Bin9YZ8wiLHOtDPnqqU2+a2ncdbzT0VJGMYoJUiTGGP8Fq6uCkS79aYlmlvjK8XaNighsVaiWj+qIApbVoKvfk1Te7hAqjWv1XrBAN/8aituife98pCA8riM8xCCMR4msA5vQYTHhAPlsALiOKJufHJPtSKNYoq8BCwS3zz0vpYSpMU0vmmlVHuVcxalfaIMtPb6Ccr5bm/L1lidoALf1a+NI5AWS6ux0A5JiLbjH0QRZVVhjfeTE/iZf6X9Vq4uMnCSUX/IsNf1VaTyWHAYxpRNQxgENA6SOKE2UOUaLQ3GCZQKUA6cs6RhSCQVvbRLEIakScdjZMscqSRR4Bj0+0xmM5SGpt1ZNMYgrKUfdSiauq32fZuqLA3z6ZJzQBjLdJqgX6+39FsI3wJlrXpzzUy4OQPhWuGfdWJdVbm0n1G7g1rBEAgwTYYtG4LuJtqWpJtDdtQ75JOMeVIjRMBIPaCZlBiX0QiN7WxyN97mIlsSxDW1zZiZnP3NO5RXGcNOj65qiAJBL4nY2d6lm+eE1YxG1KRJzHbc453+HfrTIXUqMS5jlKQcdPsU8wV6lnN6cU5VT3hyNqesljhXA17UyuF7IAQhTW6ppXf9MMb3HWrjWOYlm/vbqJeaxtUYDAtXsBSGor9BPh6jY0Fnc4RcVtw/vMd3v/UNhv0eg2GHn/zkp9S14fDgkMM7BwSR5ue/ep+zy0uCMCQIQ8bjCxZqiOrt4VzFdHrB4e4OQ9fgTEZdZdx7Y5/t3S2CQDIbX3F5fsp0OmGRzWkaw8bmBnd3R3zrG9/ga19/j8HWJmEcU1dL8sWEbLFgOp6wnGUUeYHTivxqTF4uOT17yc9+/vfUdcOg1yOKvRfh1eUF+7u7bI22eHl6QqfTIYkCFguDrV+/4r5w1UexptvxRoL9Xkq/N2BjcweNZrGceeV/6TExi8cx/c+Wqs4oco3EopQgjkKvKdBKMFrr1ayMtTg8ZWqFJQo8tugQXkOgNlhjfLJzDqXa6kJ5qplUsrW7sajAJ3JjrIcsrEFI2VK0BJVp0A6axhJKaACBb2iFSehteJqmTX6auqnp9XpM5gvqyhDq1pGiFYMxxnl5P+ffB6VUu1334i5aSpzxyklAm5wVTcv1lXjIwViLIvAXMaHAeYM8D7/I9Wizb0j6pC9dQxx1vcmm9eaYQeiPobUiQqKE9mwEAzrSKOubXK7Feq2ApihJ4g6N9BQ/KSxJHHnIpDaUsvYLK02oioIMKE1Ev9dnuVwSqgjrBBaYZ3Oc86PbpoUsfNMQFibDlTWT8cLbpvy+UcbWQxzilcbZKonefL0r9TUhrhOxuPk4vACO1jXSLuj3N0hCTbq5yZ3+Nkk2oZANaZiQJCVhmXFBTRTHDJIem8uKQZWxcDlpnDBIBuztPWQnmzGzOVHk6KVd7ne3GAYx5/kcIyoakxNLQSJyNssly7pkPH3Jy2zC8yqnaZYYV7GykTfW4NpE66UpjdfqcAaVKmwpqVoYzjrr9T+0Ii9K0iIg0h3qegL4tR3FHcK4S5oMCQ3Y4CVf2d/nz77/PaaTK2zjNW3v7e3SWMv56TGjt98mikOKMkdqzWhzk8vLcyaTCahLpE5piowlFXUSEG/2SFVB7+4d9u8fEMYBdZ4xPjthNr0kL5ZUVY1UiiSMePutt3jrrYdsjIboAMp8znR8wdnJMacvjhlP5oRhTHc4ZJllzOZTnjx5zIuTl9zZuUugNXXj/QdfnpyQZwtOzs65d7jPi+MXHD1/gtaKfidB63/qcISShKEv+5MkJenEdLtdQhHTNJZFOcP5HbunSwmfdFGCpmpY5jOq3BEEmtHGgKfPj1FaY2vT0rfauX/nqJsSJUXrdiCJ4piq8Nt652os3pTRSzVKcIam9pQv6zwuurrdmx+adrjLECjtlbuER1GVkjTGIZWmNMZTympD2sH7YlnbGjVqKmPQ1hKHAaaqkAK09lbwXolAIoWlbrxrrpISpwW2FftWQmKkayfM3JqEXzVNW836M9da46fPEFhb4wRo7dkKgY6Q0idLiT+WaVaMY69Ta+sKGcfols2hlaJqLEGokVKRBP55hbFIFRAoSxDH1NYSRV4LQ2oNTUUSanSg6XZSgiCirGqy5dLb9/QGxJ0ucjJlsZSY2njcWSpvcKkk42JBxwY402CsRViP3RfGczoxXgt4BRn9TkPc+FpVt+3gxlp/QTikXLEUrgXOaatd2dLMrum6beJ1YJVBuQxjM0JiIhq2D/bYqkZMygwXKHbu9jhcFJws5xSuJokiDu72uF+UjIs5KlDEUcC2jjisYyZ5xiK/osxPuZh+xmVdUVQlRZV7po2pcc74c8na9fnjnPFfWKyz7ZDLKtHa1vXbYGyDc5IokQyTXcZH47ao8mTyKAx9n8GE9JIBWTXGCoGQiu3+BoHswGyBnow5+NID3n3zTZJQkWnJ/YdvUmRLLs5PiQJBt6NZFjM+/OQJF+enzLKcyeSS5TLzDfL5FSZMaYIO1VnOIhQ05S5bw5itvV3ibky+XJBNJowvTynKjNrU1MZPn24MhuxsbdPtdgFHPp+wmM44Oz7l2fMjLsYTjBUMRhGhcyzynPliRlkVhEHEvYNDdne3OT454tNHj0iTlOl0zoePHiOUoNNNmV4tqZY53c2RH4x6TXzxGLBWhGFIEnfppgPisIsONJFOiOOEZTnH2QaEb0AJoBYWJ73DallkzOclspV9tNYSak1pKiTCV4h+QgAhfMPcWd8oUi384Iz1VaVzHg+V0jv9WouzTYt/hl7CUXovJWutF1XBX8m1FAipfSUqfFUphaSqKoIwoqxLGmuoaofEQxBBoBHSIZ1sF1pEoQosjrqxvvKUAofBNCCUdwQWAI2nqQRBiHAW3b45XmTda3BJ5xBS+a25dJ6z7Nomn/VCPI21PokqBdbj06Yx7do3SOdlKaM4xdS+MunEKVL5XYJum3CrcaqmqXEWdNLFNDWhjqh0jbUVQkCcdMjmNU5qZBBR1ZYoDkg7EcvlgmI2xpg+QRjQ6aRrqKasStK0S1F70XcjJFmREzpB4wQtzdjvCqzFGgcGXPP70EnzkpOsCoZVwl1BPxLPVFDOOwHfrIalQKqbjbTfrJD9Z2pMzfjqKYvpMcFJwnR3n07S87h8oLkILqBuSOqa0FmqZcHT0wzRKuU1tuSiWPK0KZBtRbrig1+zLm4QuFdymrYVVsdCm3Ct89/jvBGnbfnc1llffLTfCyEZ9bf5zsM/4Vf1z/no0w+IAs9fmc/GmCZnd3fLa634kguNYxgoqllOxzR8+xtfZWvDc3al8pKsH/z8fYzxantCKbK65Ef/47+RLZZ0Ol4DodNNCUJFWda4Bqgrer1NhnFKaHIG1IyGO8S9DsYYpqcXnJ285GJyySzLKKoaIRVaCHY3N4lCz93PF1PmkwXnp+e8PDnjxek5tYUgiugL348abW7yyce/8tKrgSJJExbTCdl8ilIghWW+yDk9zwDLwfYGg36XqizI5vN/+hhwGIb0ukOGvS2SeAMto7UEo1JeYHtlObyqEKQDJz0v019NDWGkEa3snWkapMIPIDgLxltzaymQOJzSqHbBR3GEwzcmamtwbrV9dygEjTPYpiFNOmAdKgipqhKcQ6mgXTwaiUC3k1CunSgKdEDd1IRhSJZloDR1XXt3CeHdhIWw0OJbURgSR5Efd7ReJUwrD1uYFlZQLQfYtmI5oQ4wpvICOsbDHOKGupaTYA3+vZF+qEQq3TYB/UntmRYNFoVUkqbx1YazlsY5L3nZVs26HS82DqTxFz4tPO9XqQCtHI0zvlGoA4LGW/Y454VvTGPp9jawTYVFUTYNuqxIOwndbo/ZbEo2mxCm3uQvSTo0jaFpL36b3S2mSmEcbJoRp1cnrfmkWyuUeZU358eC/y84jf/ccZ0kAWH91FxLXVhBDisxHD/2e9Mz7RrPbd0q18e9KYID4GyNocG5gqPnE3ACR9Niwm01zXploBEo6dei96bwovCrYRPh7I3ZOZ84V/fy1atb/+QTqU+4th2JX2kcO6zXBHYW27pHO+FV8fY27/OVt75NNXF89OmHFEWBasfzlZQ0ZY2rBUoERCJhJ7nLgUh44+09Njd6LKaX1FVJGAaYMufevfsoJXj86cfMlwtOX55zdHSENbC1tcPm1jafPPqEpqmoqoI8LylKi9IRnV6XYeA46HXZ3ejR6XdRQcjk8oznL444OztjuphxOZlwNS+xFrY2N9kc9hHCks/nTC/OmI4XnF1ccXwxZpq1u7cwYFnkLPMl3W7KnTt7BEHI+eUZH/76A/78T/+EyeyK6WTOIs/X7/d0Mmd70CFKAgaDAQ7FdJ69ds19YdLtdof00g3ioEsgY4TVlEuDNF6O0StH2XUFAF4HwOGTggq8Dm8gJEGgWo5og7NgbI1oFbOMa4n9YjX+6x0iFH7CrZYCar9gQq1pTI3QAa6uvX5CU/uts/Q4rBF+C+0aT2nC+seqIES01aNrLCqMUFqvNQ4q08IeCEKJN7usmtZNQRBGIdS19w+zgBSt27BGK5/0rHPUVUUYtKpdSFzrp4VQgGllF2krQIMUEtU2JH317DvpqjWB9A4S/nVJ4bCN50FbZ3DSIauy3Sl4XQYEJGEEUiC1bGUZrR++QLAsMsIwJe12EJlP9qZpfKMRjQ4DGuNQgWSR5xjnGHQ7DDc2WWYLwjCgLEsWjfHOymHEeDamMZbhaNsn4qZm2elj8wVlWWJcjdSevial/0xa8Pt3HBZQ6w0B7YXWZ932fRfXHFyk8brPNyrbm/CC/92rCRfwSda254ZbwULXzhwryd4V44aWTunFjtrCBl+NO3fzInb9s0/KftdE2xR17Q7ROX881w7Cu1aL2ZdF1+JFq35BHHS4u3OfUX+LtJvS63RYLBYEYcDGsEsahQRacjC4T0DMbn+XO4M97h7ep9/rkGULur0NXF3w/PkRUgj2leLs6pKz81M++PgjprMZvX6fe/cfMJ2O+eyzR2SLOdliThgGdKKYOAClBbtJwF4/4Y2DPbb3dgmThCJbcn56xvPjE8bTOWcXV1yMx5RNw6g3aCUiLbYquDzPmUyvyLKa8TSnqLxJrVSKOPac8Y8//YijF0945+13+N6/+AF/89d/TZ0vODs94d7hPk0t+NVHH5EmC6wrmM8KxpOC3a1diuUChCVOe69dcV+YdDthQiQjJAqsxNSQLwrqoqG2xVro2ttPi1b3w49+irbajaIIU4Fum1/WulbAxiG1p1FpFdK2mRBCECrleautHmulFAW0V2iz1mKQwico7/TrT2KlFaasfSPO+u20aDFQpRzGNIRhSGX9QEBVFGgpqRvnLwJc8y8DpdBaezFjpagajbIGJaC2LSbWnnwW1nQ1ISRah2gFBs1Km9O0TT3h2vxrvcwd7XSTQGCaijCMkVp7hwjwjA7TEMQppqm8IHpjQPhmW20bHP7/5ulz2jM4lIRAtYMnFidiaueZBtI0KB0RhZqqrrBSYauS2hWknR5VkQGKIIypakNeGTpJQKfr1aS0Vn4ap8WFFILzy1OqqqLf3wApyaolZV1SVjlJEtKJQ3qdkDDShKEgVL8P7IUb1Sl+alJKuf7NCnpY99Nu8nfbzPqq3OOrx/RH8QnTOdaj4+0D149ndcxWWMe1MIdtYYBVI29VpbrV452/SKyat9a5FpttsVp7A7NdVbqr89a2KXeN7/qQQpPqDUxuefLkU84vXuKc8/REHE1dUwjHdDxmsLHNTnLI3c19Bp0uxXJBnk2pa8uT+ZyD7U3u33vA2cUJf/ujv+Hk5BSE4ODwPvrshCyb8fTZY5qyYjZb+EuCc76SpmZjOCJNB2xGKe++8ZC7h/t0ugl5nvH8+TN+/fFHXF6NGc+XXE7mzLIlzoEa+oufDnzTezqdMZ5MmWcFeQVWKMIgJE1TirokSRKGgx6/+mDK//rx3zGbTvnqe19GK8HjJx9xNb7k4YMv82d/+kNevHzBT3/2M16cXPD8fML9e4fc2Tvk2fOnLJaL1664L171UqwbXrbdIjfSIKhobE5Dg5TetNAqyUo11QlwbeWmA4lwjm4nQgtY1q2tSyuL56xDBl40RQjpxViCgCAI0FJQN41vIAnpoQjlubHW+uTmcC0TYiV/KPGKXCC13wbpIKKpC9oRLy9ujkBoTV0Wrc+VT+BSKaIg8JoOYUBsvWeZvzgoyhK00mAqlPQJbSXl6BW+QEvPcLB1jVISKUNvoeMcpuVLSCnXTsSi/ctfRHwCDrRaV+/OeQxa4rxGhTU+a1uLbt17TVNj6oqqLAm1xuB95pxxqMDDJcZ5FbcwCGhMQ2D8FKB1hqqq0EFMXXvFt35/SJYtMKYmSSKyPMM66HZSosBveKM48jKVpq2wTEOWzRHS77h3RjuoMESMQYqG0bDL1qjHsJfQ73WIw9dP7/xzh2iTm9+B+Ar2Ouw1xtBux/29/RZetGL47ZF+A3Zwa8hNrCrnVoPiusK/dptYvx5aGL79rcUi187K4pU7rYw+cWsggda8zTfEXMsZ5zrpYlttkDYBW9esOe20BdQg3eb73/jXbCW7fPTJRzz+7BFKKq+l3DRUdU0YxBRlw504ZjvtkSYJV9MZy2JJEEiyeca3v/VN4jDm41//kuOzU56/OCVJEuIk5fmzp1RN5Se7qpKmMoRRQKIUWbbEGUeYxMRRSq/T5Uv37rK/v0PSSajKihfPnvHzn7/PZ4+fMlsssEKRFxXOOnrdrm/22dpjrcKS5wWL+ZKsqDFOIbQk7SX0Bn3KquLF0XMm0yu+9o1v8ZOf/ISPPnmEkgH37t7h/GrM2fkZL08uODy4z4MHb/Cdb3+b0dERv/rgI54cnbC1s8ve4X1evHj+2jX3xRbsxn9oWOmnlZzF1hZnaqwrENoiI+0pJlYiW1RKOtXqropWpMUQpyFBqHELPyThF6FF6dDTorRu15LXUFh14OumaS1raGlXrt2XexEZreN20Ti09J1krQMQEq2cH49VmrpsK3AsjfXNKGE9Zc1Yn6ydW4nXeK+vZctL9bCdp73lZUkQhpR15TvzgecoSzzXF6FIu13iOMCUJUGoQQTtgEHNbOq3P1r7drgVnk4mI+Vfq4M4ihFS0jjvJmGcI4qT9Vitsn6qrKpqVFO1/OXGu0dYP+suWlcIIb3iWtJi2NZZqtqPCpsgoixLzyP2ZRhaS8p8QRptkMZJWylBGCYYU3uBHB35Bk/T+JHrleedkiyzGbWp6feHKBUw6o+wrsbanP3tTQ72NtgZDUnT2K+N34vwteiKp3v9M6z3bkLeuG9b/Ur7Covh1QpXrLFgxLX3nLvBT79pgClaSKFFwNfPjKPVqGgT9urQeLU52qTtMGs4Aeda2tc1DczhcXufcFdJ2LRVr71pIcpWb5Nvvv0N7u2+xUZ/xAe/ft9rQWuFtX4gRuiQIErZHW3RSWOm8wyLpchLsnnN/t4Wn3/2CcZYJtMJF5MZSgc8ePiQi4szrq4uKCu/dhvTeJ5/bdBKEgYBg8EG21u7DLpD7t99wN7+NmEaUduGsxcv+PkvfsHPfvkBV5M5URQRRjFRFNFJewx6HdI4pGlqLi4uiKOQoijJy4qy9rlGS0UYJ95YoTacnZwxm884PLzLg3sPqIqc3Ts7BEkCMkDImNl8zqPPn/D8xUvSTsqdvX3+5Ht/zPs/e58nz45450tvMRrtvHa1iZsOp7dxG39ocfcbobtZpXq9X9lCNx7zE1J6XF2KVnjIM0qUUjcm0bwGiWw1GeRq3mKVeD346pkDvIr5rgcs2mQrhfDsnTXeu0rMntvuVjVv69js1nQw214SVlCCV9dbK9/Zm8nWtjSyFpJwtGPOIfeHb/GtB99jY7THk2dP+V8/+p9ky6VnJ9U1O9vb7G9u8cMffI+trSFXF96N4cmzFxwe7pPNJyyXGeAb2k+fHzHNcjY2Nri4vKQsl0ggL73bShQHCKGZz+ds9FLSJOXw4JD93X0ePniT7Z1twjgiCALOzo/56ONP+dWHn/Dk6CW1sYyGQ3QQUjUNG/0hB3e26aURdb7wMqR4Xr5DYqUmjDpEnR79zW1cYzBlTlEs6Q+GXE0uKcuMuqqIQj+JmaYdL/41nZDnS6Ty3oiz8ZivfOVrnJ5fMJlMePjgPocHh/yH//ifvrBD/PsAqt3GbfwOo60Una9khWi7+sJT+wSts4RoJ9d+Ax5YY7nOvXIf527ctoIRXPt9CxesbparlyFWyIHDuhtD0mvs9/o1ryraFRVsVTz5RNpcwwcrvWa7Mh5dISVtHwJ3/cd5JkKZGz579IQoGXN2cY6SEQ/vP6AuC46On/Lm3bt89+vv0u8oxpcndNKUbqx4694B09mEIstI05T5fM7xyQuyZU4c+h0tUhKEHl5avjihLHOMrSjyJQ5Bk0h2du5wsLfPg3v32drZQEV+5/z8s8/5+NNPefLyjGmWM9wYUdcNQkjCMEJJTRrH9DoJ28MBdRpxdn7OYpnTNH4XF2iNCkLSwSbBYI+dQcJ2R/PhB+/z/i9/Srfb4fj4BUmScOfODkdHn3sZAympKoPWIf1OB+caBoM+L148J0pSqrImWxSML+evXXG3Sfc2/sDjJrTQOvyK1dDGq+Il1xzd68e6G8nz5vFWyXMN0F4f5fqev9GAu3k3K9rRm1WVyyrpri4S3s5q9RpuMhhsi/HadtDHOedphs6xJpq5NffIP75lxyg0gQgJw4jv/eBPiZIuF2cXdIKYT379SzoR/Ms/+hqhElyeXzCbLdjd3cWZhovLU7JlDkguL6/Ilzlp3Ge0tc9sMePi6oyi8O4xUgYcHt4jCM9ZZDNms5IgCNne3mF7e5fdO3fodFOvNFg7Tl4e89GvP+bJ8Tmz+QIdRkRxyihMqKqKKIyIwpA0jojCqH1vFUJoDx+2jX0dBKS9IUF/RJMOWeqQ0/ERx6cvePzZZ3S7HYRQvPvuu7x48YxsuQQc5xdXvv/RNimrqiaJYvb3d1lczJFO8Ma9hzx8483XrrjbpHsbf+DhcOI6UfqeRGs2iWunH33yvVlp/ib+64lWrq1wHZ7bxXU+57pZK5ynDsrVffzN6wS+yq1rDHh1CLt6ntaZ2Tmk71q3ZLEVhuvaKtesK+x28NMnXeGTuk/Wvinqn0AijKZYNDy9fErzt/+dN+/do9vbp7e1x3h8xg/+6Csoabi6vARCNjd36KQ9Li8vODs5QaqQt958AyECtrb2GG5uczGZ8ve/+AWTrOGbb90l0IJON+SXv/yQ4Uafql7S7ydI4QWVwihC64Cr8YQwDJnNJ3z+9DnPjs84Ob0gz0s2traQgcU4GA5H9LrdVlzKf3aLZc5ymZGVRUv5lFgHOkyJBhvYMMXomDMjaY7OOL+65GD/kPfe+hI1ll9/8gHLZUYQRIRBShxVXvNE+MZ9FIVUtWdcVIVhf+ceX3vvq9x78+FrV9xt0r2NP+hY0bNW6lCr4QQQ1wnxRlyzDdyN312zIFbb9FeaYaxue+WZ/9HX85uF8aqCvU7cq2az14Fe47GsqttratqqgnV2lYjBrgSiWPfgbrwWibM+aTWN4W9+9Lc8/fwxf/lv/z1SafbubGHrnOOXY3ZGm5S1ZXNnm6NnR3z8yaeMRht86Y37HJ2ecHJ2yfnkku7JBp1un+9++5vcny64ujrnk48/4Oz0yDflau8fGMcBUZSilHeS+fzJ81ajpGE8mXB+dcXx2RXjyRxjHfZqwhZB6z4uieOYMNBgG2bzCYv5nEW2wLS6J1oFSONQcYdGhCwbQb4ssUKyM9ih3x1Rza/opEOW9YI06nB+csn2zgipJKPRsNUVsRRF4dk+OsRZzd7OXf7i3/0lBw8f0N/aeu2au026t/EHHfYfTDH4WEMJ/4fkeM2Rvb6Pb3r50ta1zTLXYsCv2ra3T/AKc+HGoVevzd3ow7WV93VSFzSu1WzmGn5YCQw5t5pG88nWOE9HXDPfhF1ju6sDrJqJgQ68RoeAvChYLmY01RLbOKbjBVo7kjjhyfNHPD8+Y3N0hzTtc/feISeX5/zdT39KUTY49witQ4ajTe7s7vDi5ISmsczmc6x1hEFII1p9lzhCWhDKm1VOXIZzgrqpycuC86sZV1czjIMk7ZAkXcIgIg4joiiirhtMU9M0JfP5guUio2ka4jgljNrJVh3iCMiN4Gq+5GpaYqViOIyAgL/4q7/i/sP7/OTHf8fXv/w1Bt0ujz5/jLOaPF+yyDKquqJpHFVpSWLFe195i3/1wx/yzle/zGBzm7TzTxyOuI3b+P891hWp+Mcq2y9S/F1hq/aa2+tECxPcrITN+lj+b88jX2EIX6S05vBsCtGOmHndBNZF+ArLBVq51BZSsNfNMWttCz2IFmJ4tUJ/5Yd20KdpbagCFdDv9sgWU+bTKTu7b3D0+JSNQcizo+c8+uwx1klMI3nzzYecnh3z47//GXVj2dvbZLnMKQrLy+MTJpMrojhimRekUcBo2CGzlmVhUVGMDiSR8xz/ZVGgVYRzAq1DmqJkNs/Iy4IwitFhxPbWHfZ2dhj0ul5MqVi2Rgc52TInzwtwjm4nIE173kRXahrnKW+X8yknRUPc3eCZcYzSPss647/+l/+MKQyHdw948OAeL89OOD09Qkoo8hyEwBqBFCHvvf0eP/jeH/PGwwd0ez3CyHtCvi5uKWO3cRu3cRu/xXi9t8Rt3MZt3MZt/D+L26R7G7dxG7fxW4zbpHsbt3Ebt/FbjNukexu3cRu38VuM26R7G7dxG7fxW4zbpHsbt3Ebt/FbjP8NsZG563zAPk8AAAAASUVORK5CYII=", "text/plain": [ "
    " ] diff --git a/py/requirements.txt b/py/requirements.txt index 302b7e92af..a34a458938 100644 --- a/py/requirements.txt +++ b/py/requirements.txt @@ -2,7 +2,7 @@ numpy packaging pybind11==2.6.2 --extra-index-url https://download.pytorch.org/whl/nightly/cu129 -torch>=2.8.0.dev,<2.9.0 +torch>=2.9.0.dev,<2.10.0 --extra-index-url https://pypi.ngc.nvidia.com pyyaml dllist \ No newline at end of file diff --git a/py/torch_tensorrt/dynamo/_compiler.py b/py/torch_tensorrt/dynamo/_compiler.py index 6434afe248..74cab980c4 100644 --- a/py/torch_tensorrt/dynamo/_compiler.py +++ b/py/torch_tensorrt/dynamo/_compiler.py @@ -258,10 +258,11 @@ def cross_compile_for_windows( if use_explicit_typing: if len(enabled_precisions) != 1 or not any( - x in enabled_precisions for x in {torch.float32, dtype.f32} + x in enabled_precisions + for x in {torch.float32, dtype.f32, torch.float4_e2m1fn_x2, dtype.f4} ): raise AssertionError( - f"use_explicit_typing was set to True, however found that enabled_precisions was also specified (saw: {enabled_precisions}, expected: {_defaults.ENABLED_PRECISIONS}). enabled_precisions should not be used when use_explicit_typing=True" + f"use_explicit_typing was set to True, however found that enabled_precisions was also specified (saw: {enabled_precisions}, expected: dtype.f32, dtype.f4). enabled_precisions should not be used when use_explicit_typing=True" ) if use_fp32_acc: @@ -591,10 +592,11 @@ def compile( if use_explicit_typing: if len(enabled_precisions) != 1 or not any( - x in enabled_precisions for x in {torch.float32, dtype.f32} + x in enabled_precisions + for x in {torch.float32, dtype.f32, torch.float4_e2m1fn_x2, dtype.f4} ): raise AssertionError( - f"use_explicit_typing was set to True, however found that enabled_precisions was also specified (saw: {enabled_precisions}, expected: {_defaults.ENABLED_PRECISIONS}). enabled_precisions should not be used when use_explicit_typing=True" + f"use_explicit_typing was set to True, however found that enabled_precisions was also specified (saw: {enabled_precisions}, expected: dtype.f32, dtype.f4). enabled_precisions should not be used when use_explicit_typing=True" ) if use_fp32_acc: @@ -799,6 +801,28 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool: "Some nodes do not have metadata (shape and dtype information). This could lead to problems sometimes if the graph has PyTorch and TensorRT segments." ) + # Store the original input spec for later use + original_in_spec = getattr(gm, "_in_spec", None) + original_out_spec = getattr(gm, "_out_spec", None) + + # Function to preserve and restore module specs + def preserve_module_specs( + in_spec: Any, out_spec: Any, target_module: torch.fx.GraphModule + ) -> None: + """ + Applies input and output specs to the target module. + + Args: + in_spec: The input spec to apply + out_spec: The output spec to apply + target_module: The module to apply specs to + """ + # Apply specs to target module + if in_spec is not None: + target_module._in_spec = in_spec + if out_spec is not None: + target_module._out_spec = out_spec + # Partition module into components that can be TRT-accelerated fast_partitioner_failed = False # If specified, try using the fast partitioner and fall back to the global one on failure @@ -844,6 +868,7 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool: continue submodule_node_dict[node.name] = node + preserve_module_specs(original_in_spec, original_out_spec, partitioned_module) # Store TRT replicas of Torch subgraphs trt_modules = {} # Iterate over all components that can be accelerated diff --git a/py/torch_tensorrt/dynamo/_refit.py b/py/torch_tensorrt/dynamo/_refit.py index 7cf19e870e..9aae901f87 100644 --- a/py/torch_tensorrt/dynamo/_refit.py +++ b/py/torch_tensorrt/dynamo/_refit.py @@ -22,6 +22,9 @@ DYNAMO_CONVERTERS as CONVERTERS, ) from torch_tensorrt.dynamo.conversion._TRTInterpreter import TRTInterpreter +from torch_tensorrt.dynamo.conversion.impl.normalization.ops import ( + batch_norm_constant_folding, +) from torch_tensorrt.dynamo.conversion.truncate_double import repair_double_inputs from torch_tensorrt.dynamo.lowering import ( get_decompositions, @@ -39,7 +42,6 @@ from torch_tensorrt.dynamo.utils import ( CPU_DEVICE, check_module_output, - deallocate_module, get_model_device, get_torch_inputs, to_torch_device, @@ -78,8 +80,9 @@ def construct_refit_mapping( compilation_settings=settings, ) interpreter._construct_trt_network_def() + weight_refit_map: dict[str, torch.Tensor] = interpreter.ctx.weight_refit_map - return interpreter.ctx.weight_refit_map + return weight_refit_map @needs_refit @@ -90,7 +93,20 @@ def construct_refit_mapping_from_weight_name_map( ) -> dict[Any, Any]: engine_weight_map = {} for engine_weight_name, (sd_weight_name, np_weight_type) in weight_name_map.items(): - if sd_weight_name not in state_dict: + # Add more constant folding converters here + if engine_weight_name.split(" ")[-1] in ["SCALE", "SHIFT"]: + # Batch Norm Layer + params = {} + for w in sd_weight_name: + params[w.split(".")[-1]] = state_dict[w].cuda() + # Batch norm constant folding + + scale, shift = batch_norm_constant_folding(**params, eps=1e-5) + # Set scale to scale or shift to shift + engine_weight_map[engine_weight_name] = eval( + engine_weight_name.split(" ")[-1].lower() + ) + elif sd_weight_name not in state_dict: # If weights is not in sd, we can leave it unchanged continue else: @@ -178,10 +194,12 @@ def _refit_single_trt_engine_with_gm( for layer_name in weight_list: if layer_name not in mapping: raise AssertionError(f"{layer_name} is not found in weight mapping") - # Use Numpy to create weights + # Use Tensor to create weights weight = mapping[layer_name] trt_dtype = dtype._from(weight.dtype).to(trt.DataType) - trt_wt_tensor = trt.Weights(trt_dtype, weight.ctypes.data, weight.size) + trt_wt_tensor = trt.Weights( + trt_dtype, weight.data_ptr(), torch.numel(weight) + ) refitter.set_named_weights(layer_name, trt_wt_tensor, trt_wt_location) refitted.add(layer_name) @@ -300,7 +318,7 @@ def refit_module_weights( # Check the number of supported operations in the graph num_supported_ops, total_ops = partitioning.get_graph_converter_support( - new_gm, settings.debug, settings.torch_executed_ops + new_gm, settings.torch_executed_ops ) if num_supported_ops == 0 or ( @@ -363,7 +381,6 @@ def refit_module_weights( # Iterate over all components that can be accelerated # Generate the corresponding TRT Module for those - new_weight_module.module().to(CPU_DEVICE) for name, new_submodule in new_partitioned_module.named_children(): # Refit each submodule # Extract engine from the submodule @@ -466,7 +483,6 @@ def refit_module_weights( settings=settings, weight_name_map=None, ) - deallocate_module(new_submodule) # clear EXCLUDE_WEIGHTS flag serialization_config = engine.create_serialization_config() @@ -489,8 +505,6 @@ def refit_module_weights( gc.collect() torch.cuda.empty_cache() - deallocate_module(new_partitioned_module) - if verify_output and arg_inputs is not None: new_gm.to(to_torch_device(settings.device)) if check_module_output( diff --git a/py/torch_tensorrt/dynamo/backend/backends.py b/py/torch_tensorrt/dynamo/backend/backends.py index dfdc9e1c69..c0d29c41f0 100644 --- a/py/torch_tensorrt/dynamo/backend/backends.py +++ b/py/torch_tensorrt/dynamo/backend/backends.py @@ -10,7 +10,6 @@ from torch._dynamo.backends.common import aot_autograd from torch._dynamo.utils import detect_fake_mode from torch._functorch.aot_autograd import aot_export_joint_simple -from torch.distributed.tensor import DTensor from torch_tensorrt.dynamo import CompilationSettings from torch_tensorrt.dynamo._compiler import compile_module from torch_tensorrt.dynamo.lowering import ( @@ -89,6 +88,11 @@ def aot_torch_tensorrt_aten_backend( logger.warning( "It is recommended to run the model with use_distributed_mode_trace = True since there are distributed tensors in the input which is not supported in aot_export_joint_simple" ) + + if settings.offload_module_to_cpu: + logger.warning( + "The offload_module_to_cpu option is set, but it is being ignored since the torch_compile backend does not support this feature" + ) return _pretraced_backend(gm, sample_inputs, settings, engine_cache) diff --git a/py/torch_tensorrt/dynamo/conversion/_TRTInterpreter.py b/py/torch_tensorrt/dynamo/conversion/_TRTInterpreter.py index b134b3d5f5..8d7a914836 100644 --- a/py/torch_tensorrt/dynamo/conversion/_TRTInterpreter.py +++ b/py/torch_tensorrt/dynamo/conversion/_TRTInterpreter.py @@ -890,10 +890,9 @@ def call_function(self, target: str, args: Any, kwargs: Any) -> Any: else: return converter(self.ctx, target, args, kwargs, self._cur_node_name) - def get_attr(self, target: str, args: Any, kwargs: Any) -> np.ndarray: + def get_attr(self, target: str, args: Any, kwargs: Any) -> torch.Tensor: with _disable_current_modes(), unset_fake_temporarily(): frozen_attr = self.fetch_attr(target) - if isinstance(frozen_attr, torch.nn.Parameter): constant_tensor = frozen_attr.data else: diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index e542f1d417..fe9a01b06c 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -532,6 +532,7 @@ def aten_ops_gelu( @dynamo_tensorrt_converter(torch.ops.aten.matmul, supports_dynamic_shapes=True) +@dynamo_tensorrt_converter(torch.ops.aten.matmul.default, supports_dynamic_shapes=True) @dynamo_tensorrt_converter(torch.ops.aten.dot.default, supports_dynamic_shapes=True) @dynamo_tensorrt_converter(torch.ops.aten.mm.default, supports_dynamic_shapes=True) @dynamo_tensorrt_converter(torch.ops.aten.mv.default, supports_dynamic_shapes=True) @@ -1034,6 +1035,7 @@ def validate_dtype(to_copy_node: Node) -> bool: torch.bool, torch.int8, torch.float16, + torch.bfloat16, } # Validate input node has convertible kwargs @@ -1935,6 +1937,7 @@ def aten_ops_minimum( ) +@dynamo_tensorrt_converter(operator.sub, supports_dynamic_shapes=True) @dynamo_tensorrt_converter(torch.ops.aten.sub.Tensor, supports_dynamic_shapes=True) @dynamo_tensorrt_converter(torch.ops.aten.sub.Scalar, supports_dynamic_shapes=True) def aten_ops_sub( @@ -3577,3 +3580,22 @@ def aten_ops_nonzero( name, args[0], ) + + +@dynamo_tensorrt_converter(torch.ops.aten.linear.default, supports_dynamic_shapes=True) +def aten_ops_linear( + ctx: ConversionContext, + target: Target, + args: Tuple[Argument, ...], + kwargs: Dict[str, Argument], + name: str, +) -> Union[TRTTensor, Sequence[TRTTensor]]: + return impl.linear.linear( + ctx, + target, + SourceIR.ATEN, + name, + input=args[0], + weight=args[1], + bias=args_bounds_check(args, 2, None), + ) diff --git a/py/torch_tensorrt/dynamo/conversion/converter_utils.py b/py/torch_tensorrt/dynamo/conversion/converter_utils.py index 0c2b85a671..896bf37b42 100644 --- a/py/torch_tensorrt/dynamo/conversion/converter_utils.py +++ b/py/torch_tensorrt/dynamo/conversion/converter_utils.py @@ -19,10 +19,11 @@ import numpy as np import tensorrt as trt import torch -import torch_tensorrt.dynamo.conversion.impl as impl from torch.fx.experimental.proxy_tensor import unset_fake_temporarily from torch.fx.node import Argument, Target from torch.fx.passes.shape_prop import TensorMetadata + +import torch_tensorrt.dynamo.conversion.impl as impl from torch_tensorrt import _enums from torch_tensorrt.dynamo._settings import CompilationSettings from torch_tensorrt.dynamo._SourceIR import SourceIR @@ -152,9 +153,9 @@ def cast_trt_tensor( ) -> TRTTensor: """Given a TRT Tensor, convert that Tensor to the specified dtype - Adds an Identity layer to the network which performs the conversion - if the input's dtype is different from the cast type. Otherwise returns - input unchanged + Adds a Cast layer to the network to convert the input tensor to the specified dtype. + If the input tensor already has the desired dtype, it is returned unchanged. + Otherwise, a Cast layer is added to perform the conversion Args: ctx (ConversionContext): A ConversionContext containing the TensorRT network @@ -335,8 +336,8 @@ def to_trt_weights( ctx: ConversionContext, value: torch.Tensor, name: str, - layer_type_name: Literal["CONVOLUTION", "DECONVOLUTION", "CONSTANT"], - weight_type_name: Literal["KERNEL", "BIAS", "CONSTANT"], + layer_type_name: Literal["CONVOLUTION", "DECONVOLUTION", "CONSTANT", "SCALE"], + weight_type_name: Literal["KERNEL", "BIAS", "CONSTANT", "SCALE", "SHIFT", "POWER"], target: Optional[Union[Target, str]] = None, source_ir: Optional[SourceIR] = None, target_quantized_type: Optional[trt.DataType] = None, @@ -362,8 +363,8 @@ def to_trt_weights( ) # Weight Recording - supported_layer_types = ["CONVOLUTION", "DECONVOLUTION", "CONSTANT"] - supported_weight_types = ["KERNEL", "BIAS", "CONSTANT"] + supported_layer_types = ["CONVOLUTION", "DECONVOLUTION", "CONSTANT", "SCALE"] + supported_weight_types = ["KERNEL", "BIAS", "CONSTANT", "SCALE", "SHIFT", "POWER"] assert ( layer_type_name in supported_layer_types ), f"Encountered unsupported layer type: {layer_type_name}. Supported types are: {supported_layer_types}. Manually calling to_trt_weights with a custom layer type is not intended for general use." diff --git a/py/torch_tensorrt/dynamo/conversion/impl/__init__.py b/py/torch_tensorrt/dynamo/conversion/impl/__init__.py index 10af2ad892..61728392da 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/__init__.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/__init__.py @@ -12,6 +12,7 @@ embedding, full, grid, + linear, matmul, nccl_ops, normalization, diff --git a/py/torch_tensorrt/dynamo/conversion/impl/elementwise/base.py b/py/torch_tensorrt/dynamo/conversion/impl/elementwise/base.py index ab9629b0db..097a81b8d1 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/elementwise/base.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/elementwise/base.py @@ -1,8 +1,8 @@ +import logging import operator import warnings from typing import Any, Callable, Optional, Union -import numpy as np import tensorrt as trt import torch from torch.fx.node import Target @@ -20,6 +20,8 @@ ) from torch_tensorrt.dynamo.types import TRTElementWiseOp, TRTTensor +logger = logging.getLogger(__name__) + def get_python_op_from_trt_elementwise_op( trt_op: TRTElementWiseOp, @@ -148,7 +150,11 @@ def convert_binary_elementwise( ctx, rhs_val, trt_promoted_type, f"{name}_cast_rhs_val", target, source_ir ) - if has_dynamic_shape(lhs_val.shape) or has_dynamic_shape(rhs_val.shape): + if len(lhs_val.shape) == len(rhs_val.shape) and all( + a == b or a == 1 or b == 1 for a, b in zip(lhs_val.shape, rhs_val.shape) + ): + logger.info(f"skip broadcast for {name}") + elif has_dynamic_shape(lhs_val.shape) or has_dynamic_shape(rhs_val.shape): lhs_val, rhs_val = broadcast( ctx, lhs_val, rhs_val, f"{name}_broadcast_lhs", f"{name}_broadcast_rhs" ) diff --git a/py/torch_tensorrt/dynamo/conversion/impl/elementwise/ops.py b/py/torch_tensorrt/dynamo/conversion/impl/elementwise/ops.py index 17e5042ce7..1bfb8c7242 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/elementwise/ops.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/elementwise/ops.py @@ -544,9 +544,16 @@ def pow( lhs_val: Union[TRTTensor, int, float], rhs_val: Union[TRTTensor, int, float], ) -> TRTTensor: + + lhs_dtype = None + rhs_dtype = None + if isinstance(lhs_val, int): + lhs_dtype = torch.int32 + if isinstance(rhs_val, int): + rhs_dtype = torch.int32 # POW operation supports only float32 and int8 inputs - lhs_val = get_trt_tensor(ctx, lhs_val, name + "_lhs_val", trt.float32) - rhs_val = get_trt_tensor(ctx, rhs_val, name + "_rhs_val", trt.float32) + lhs_val = get_trt_tensor(ctx, lhs_val, name + "_lhs_val", lhs_dtype) + rhs_val = get_trt_tensor(ctx, rhs_val, name + "_rhs_val", rhs_dtype) out = convert_binary_elementwise( ctx, target, source_ir, name, trt.ElementWiseOperation.POW, lhs_val, rhs_val ) diff --git a/py/torch_tensorrt/dynamo/conversion/impl/linear.py b/py/torch_tensorrt/dynamo/conversion/impl/linear.py new file mode 100644 index 0000000000..5e859a46d3 --- /dev/null +++ b/py/torch_tensorrt/dynamo/conversion/impl/linear.py @@ -0,0 +1,56 @@ +from typing import Optional, Union + +import numpy as np +import tensorrt as trt +import torch +from torch.fx.node import Target +from torch_tensorrt.dynamo.conversion import impl +from torch_tensorrt.dynamo.conversion._ConversionContext import ConversionContext +from torch_tensorrt.dynamo.conversion.converter_utils import SourceIR, get_trt_tensor +from torch_tensorrt.dynamo.types import TRTTensor + + +def linear( + ctx: ConversionContext, + target: Union[Target, str], + source_ir: Optional[SourceIR], + name: str, + input: TRTTensor, + weight: Union[TRTTensor, torch.Tensor, np.ndarray], + bias: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], +) -> TRTTensor: + # Process weight terms + if not isinstance(weight, (TRTTensor, torch.Tensor, np.ndarray)): + raise RuntimeError( + f"Linear layer {name} has weight of type {type(weight)}, Expect Union[TRTTensor, torch.Tensor, np.ndarray]," + ) + elif isinstance(weight, (torch.Tensor, np.ndarray)): + weight = get_trt_tensor(ctx, weight, f"{name}_weight") + + # Process bias terms + if bias is not None and not isinstance(bias, (TRTTensor, torch.Tensor, np.ndarray)): + raise RuntimeError( + f"Linear layer {name} has bias of type {type(bias)}, Expect Union[TRTTensor, torch.Tensor, np.ndarray]," + ) + elif isinstance(bias, (torch.Tensor, np.ndarray)): + bias = get_trt_tensor(ctx, bias, f"{name}_bias") + + # add IMatrixMultiplyLayer + out = impl.matmul.matrix_multiply( + ctx, + target, + source_ir, + f"{name}_matrix_multiply", + input, + weight, + input_matrix_op=trt.MatrixOperation.NONE, + other_matrix_op=trt.MatrixOperation.TRANSPOSE, + ) + + if bias is not None: + # add bias + out = impl.elementwise.add( + ctx, target, source_ir, f"{name}_add_bias", out, bias + ) + + return out diff --git a/py/torch_tensorrt/dynamo/conversion/impl/normalization/ops.py b/py/torch_tensorrt/dynamo/conversion/impl/normalization/ops.py index 7e5b03a87e..e9f3cf4796 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/normalization/ops.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/normalization/ops.py @@ -16,11 +16,11 @@ get_trt_tensor, has_dynamic_shape, set_layer_name, + to_trt_weights, ) from torch_tensorrt.dynamo.conversion.impl.cat import cat from torch_tensorrt.dynamo.conversion.impl.elementwise.ops import ge from torch_tensorrt.dynamo.conversion.impl.shape import shape as get_shape -from torch_tensorrt.dynamo.types import TRTTensor from torch_tensorrt.dynamo.utils import DYNAMIC_DIM _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -31,106 +31,180 @@ def batch_norm( target: Target, source_ir: Optional[SourceIR], name: str, - input: TRTTensor, - weight: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], - bias: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], - running_mean: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], - running_var: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], + input: trt.ITensor, + weight: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], + bias: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], + running_mean: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], + running_var: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], training: bool, momentum: float, eps: float, cudnn_enabled: bool, return_mean_rstd: bool, -) -> Union[TRTTensor, Tuple[TRTTensor, torch.Tensor, torch.Tensor]]: +) -> Union[trt.ITensor, Tuple[trt.ITensor, torch.Tensor, torch.Tensor]]: if has_dynamic_shape(input.shape): assert input.shape[1] != -1, "Channel dim can't be dynamic for batch norm." # Save the original output shape for later use output_shape = input.shape + # We perform constant folding for batch norm when the weight, bias, running_mean, and running_var are all tensors. + # Batch norm operation can be fused into a single layer, which is more efficient than the original implementation. + # In this way, the batch norm layer will be fused with the Convolution layer and get a performance boost. + if any( + [ + isinstance(weight, trt.ITensor), + isinstance(bias, trt.ITensor), + isinstance(running_mean, trt.ITensor), + isinstance(running_var, trt.ITensor), + ] + ): + # We name the weight here according to the state_dict name + weight = ( + get_trt_tensor(ctx, 1.0, f"{name}_weight") + if weight is None + else get_trt_tensor(ctx, weight, f"{name}_weight") + ) + bias = ( + get_trt_tensor(ctx, 0.0, f"{name}_bias") + if bias is None + else get_trt_tensor(ctx, bias, f"{name}_bias") + ) + running_mean = ( + get_trt_tensor(ctx, 0.0, f"{name}_running_mean") + if running_mean is None + else get_trt_tensor(ctx, running_mean, f"{name}_running_mean") + ) + running_var = ( + get_trt_tensor(ctx, 1.0, f"{name}_running_var") + if running_var is None + else get_trt_tensor(ctx, running_var, f"{name}_running_var") + ) - # We name the weight here according to the state_dict name - weight = ( - get_trt_tensor(ctx, 1.0, f"{name}_weight") - if weight is None - else get_trt_tensor(ctx, weight, f"{name}_weight") - ) - bias = ( - get_trt_tensor(ctx, 0.0, f"{name}_bias") - if bias is None - else get_trt_tensor(ctx, bias, f"{name}_bias") - ) - running_mean = ( - get_trt_tensor(ctx, 0.0, f"{name}_running_mean") - if running_mean is None - else get_trt_tensor(ctx, running_mean, f"{name}_running_mean") - ) - running_var = ( - get_trt_tensor(ctx, 1.0, f"{name}_running_var") - if running_var is None - else get_trt_tensor(ctx, running_var, f"{name}_running_var") - ) + # eps_tensor for numerical stability + eps_tensor = get_trt_tensor(ctx, eps, f"{name}_eps") - # eps_tensor for numerical stability - eps_tensor = get_trt_tensor(ctx, eps, f"{name}_eps") + # adjusted_var = running_var + eps + adjusted_var = impl.elementwise.add( + ctx, target, source_ir, f"{name}_adjusted_var", running_var, eps_tensor + ) - # adjusted_var = running_var + eps - adjusted_var = impl.elementwise.add( - ctx, target, source_ir, f"{name}_adjusted_var", running_var, eps_tensor - ) + # sqrt_adjusted_var = sqrt(adjusted_var) + sqrt_adjusted_var = impl.unary.sqrt( + ctx, target, source_ir, f"{name}_sqrt", adjusted_var + ) - # sqrt_adjusted_var = sqrt(adjusted_var) - sqrt_adjusted_var = impl.unary.sqrt( - ctx, target, source_ir, f"{name}_sqrt", adjusted_var - ) + # scale = weight / sqrt_adjusted_var + scale = impl.elementwise.div( + ctx, target, source_ir, f"{name}_scale", weight, sqrt_adjusted_var + ) - # scale = weight / sqrt_adjusted_var - scale = impl.elementwise.div( - ctx, target, source_ir, f"{name}_scale", weight, sqrt_adjusted_var - ) + # scaled_running_mean = running_mean * scale + scaled_running_mean = impl.elementwise.mul( + ctx, target, source_ir, f"{name}_scaled_running_mean", running_mean, scale + ) - # scaled_running_mean = running_mean * scale - scaled_running_mean = impl.elementwise.mul( - ctx, target, source_ir, f"{name}_scaled_running_mean", running_mean, scale - ) + # bias_adjusted = bias - scaled_running_mean + bias_adjusted = impl.elementwise.sub( + ctx, target, source_ir, f"{name}_bias_adjusted", bias, scaled_running_mean + ) - # bias_adjusted = bias - scaled_running_mean - bias_adjusted = impl.elementwise.sub( - ctx, target, source_ir, f"{name}_bias_adjusted", bias, scaled_running_mean - ) + # Reshape scale and bias_adjusted to match input shape for broadcasting + expanded_shape = [1] * len(output_shape) + expanded_shape[1] = output_shape[1] # Set channel dimension - # Reshape scale and bias_adjusted to match input shape for broadcasting - expanded_shape = [1] * len(output_shape) - expanded_shape[1] = output_shape[1] # Set channel dimension + scale_reshape = impl.shuffle.reshape( + ctx, + target, + source_ir, + f"{name}_reshape_scale", + scale, + tuple(expanded_shape), + ) + bias_adjusted_reshape = impl.shuffle.reshape( + ctx, + target, + source_ir, + f"{name}_reshape_bias", + bias_adjusted, + tuple(expanded_shape), + ) - scale_reshape = impl.shuffle.reshape( - ctx, - target, - source_ir, - f"{name}_reshape_scale", - scale, - tuple(expanded_shape), - ) - bias_adjusted_reshape = impl.shuffle.reshape( - ctx, - target, - source_ir, - f"{name}_reshape_bias", - bias_adjusted, - tuple(expanded_shape), - ) + # Apply the scale and bias to the input + scaled_input = impl.elementwise.mul( + ctx, target, source_ir, f"{name}_scaled_input", input, scale_reshape + ) + output = impl.elementwise.add( + ctx, + target, + source_ir, + f"{name}_output", + scaled_input, + bias_adjusted_reshape, + ) - # Apply the scale and bias to the input - scaled_input = impl.elementwise.mul( - ctx, target, source_ir, f"{name}_scaled_input", input, scale_reshape - ) - output = impl.elementwise.add( - ctx, - target, - source_ir, - f"{name}_output", - scaled_input, - bias_adjusted_reshape, - ) + else: + if weight is None: + weight = 1.0 + + if bias is None: + bias = 0.0 + + if running_mean is None: + running_mean = 0.0 + + if running_var is None: + running_var = 1.0 + adjusted_scale, adjusted_bias = batch_norm_constant_folding( + weight, bias, running_mean, running_var, eps + ) + power = torch.ones_like(adjusted_scale) + + adjusted_scale = to_trt_weights( + ctx, + adjusted_scale, + name, + layer_type_name="SCALE", + weight_type_name="SCALE", + target=target, + source_ir=source_ir, + ) + adjusted_bias = to_trt_weights( + ctx, + adjusted_bias, + name, + layer_type_name="SCALE", + weight_type_name="SHIFT", + target=target, + source_ir=source_ir, + ) + + power = to_trt_weights( + ctx, + power, + name, + layer_type_name="SCALE", + weight_type_name="POWER", + target=target, + source_ir=source_ir, + ) + + output_shape = input.shape + if len(input.shape) < 4: + + new_shape = ( + (input.shape[0], input.shape[1], 1, 1) + if len(input.shape) == 2 + else (input.shape[0], input.shape[1], input.shape[2], 1) + ) + input = impl.shuffle.reshape( + ctx, target, source_ir, f"{name}_reshape_2d", input, new_shape + ) + + layer = ctx.net.add_scale_nd( + input, trt.ScaleMode.CHANNEL, adjusted_bias, adjusted_scale, power, 1 + ) + set_layer_name(layer, target, name, source_ir) + output = layer.get_output(0) # For BatchNorm1d, reshape output back to original shape if necessary if len(output_shape) < 4: @@ -150,17 +224,29 @@ def batch_norm( return output +def batch_norm_constant_folding( + weight: torch.Tensor, + bias: torch.Tensor, + running_mean: torch.Tensor, + running_var: torch.Tensor, + eps: float, +) -> Tuple[torch.Tensor, torch.Tensor]: + adjusted_scale = weight / torch.sqrt(running_var + eps) + adjusted_bias = bias - running_mean * adjusted_scale + return adjusted_scale, adjusted_bias + + def native_layer_norm( ctx: ConversionContext, target: Target, source_ir: Optional[SourceIR], name: str, - input: TRTTensor, + input: trt.ITensor, normalized_shape: List[int], - weight: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], - bias: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], + weight: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], + bias: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], eps: float, -) -> Tuple[TRTTensor, torch.Tensor, torch.Tensor]: +) -> Tuple[trt.ITensor, torch.Tensor, torch.Tensor]: dims = list(range(len(input.shape) - len(normalized_shape), len(input.shape))) axes = get_axes_for_reduce_op(dims) @@ -200,15 +286,15 @@ def native_group_norm( target: Target, source_ir: Optional[SourceIR], name: str, - input: TRTTensor, - weight: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], - bias: Optional[Union[TRTTensor, torch.Tensor, np.ndarray]], + input: trt.ITensor, + weight: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], + bias: Optional[Union[trt.ITensor, torch.Tensor, np.ndarray]], N: int, C: int, HxW: int, group: int, eps: float, -) -> Tuple[TRTTensor, torch.Tensor, torch.Tensor]: +) -> Tuple[trt.ITensor, torch.Tensor, torch.Tensor]: rank = len(input.shape) assert rank >= 3, f"Expected at least 3 dimensions for input tensor but got {rank}" @@ -229,7 +315,7 @@ def native_group_norm( ctx, target, source_ir, f"{name}_expand_bias_zero", bias_zero, shape ) - axes = get_axes_for_reduce_op([i for i in range(1 if group == 1 else 2, rank)]) + axes = get_axes_for_reduce_op(list(range(1 if group == 1 else 2, rank))) # INormalizationLayer scales the normalized output per-group, but PyTorch scales the normalized output per-channel, # hence causing diverse result. Let TensorRT does no-op for scaling here, and do scaling ourselves later @@ -274,10 +360,10 @@ def softmax( target: Target, source_ir: Optional[SourceIR], name: str, - input: TRTTensor, + input: trt.ITensor, dim: int, half_to_float: bool, -) -> Union[TRTTensor, Sequence[TRTTensor]]: +) -> Union[trt.ITensor, Sequence[trt.ITensor]]: dim = get_positive_dim(dim, len(input.shape)) if half_to_float: @@ -294,9 +380,9 @@ def pdist( target: Target, source_ir: Optional[SourceIR], name: str, - input: TRTTensor, + input: trt.ITensor, p: float = 2, -) -> Union[TRTTensor, Sequence[TRTTensor]]: +) -> Union[trt.ITensor, Sequence[trt.ITensor]]: shape = input.shape # Extend input from shape [N, D] to [N, 1, D] extend_input = impl.unsqueeze.unsqueeze( @@ -390,8 +476,8 @@ def tri_upper_indices( target: Target, source_ir: Optional[SourceIR], name: str, - size_tensor: TRTTensor, -) -> TRTTensor: + size_tensor: trt.ITensor, +) -> trt.ITensor: """ Return the indices for the upper-triangle part of a square size of matrix in a N-by-2 Tensor, where the diagonal offset = 1. One loop is used to calculate the indices like below. @@ -410,7 +496,7 @@ def tri_upper_indices( target (Target): Target of calling node. source_ir (Optional[SourceIR]): SourceIR of calling converter. name (str): Name of the calling layer. - size_tensor (TRTTensor): number of rows in the 2-D square matrix. scalar tensor. + size_tensor (trt.ITensor): number of rows in the 2-D square matrix. scalar tensor. Example: if size_tensor is 4, it will return [[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]] @@ -560,11 +646,11 @@ def cdist_forward( target: Target, source_ir: Optional[SourceIR], name: str, - x1: TRTTensor, - x2: TRTTensor, + x1: trt.ITensor, + x2: trt.ITensor, p: float, compute_mode: Optional[int], -) -> Union[TRTTensor, Sequence[TRTTensor]]: +) -> Union[trt.ITensor, Sequence[trt.ITensor]]: """ Computes pairwise distances between sets of vectors in tensors x1 and x2 using the p-norm. The function treats the last dimension of x1 and x2 as feature dimensions, which must be identical for both inputs. The second-to-last dimensions can differ, reflecting diff --git a/py/torch_tensorrt/dynamo/conversion/impl/quantize.py b/py/torch_tensorrt/dynamo/conversion/impl/quantize.py index 9c1d95b585..2aeedb144e 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/quantize.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/quantize.py @@ -45,7 +45,6 @@ def quantize( Adds quantize and dequantize ops (QDQ) which quantize to INT8 or FP8 based on the output_type set and dequantizes them back. """ - with unset_fake_temporarily(): if isinstance(input_tensor, (torch.Tensor, TRTTensor)): if input_tensor.dtype not in ( @@ -118,8 +117,6 @@ def quantize( if not isinstance(input_tensor, TRTTensor): input_tensor = get_trt_tensor(ctx, input_tensor, name + "_quantize_input") - quantize_layer = ctx.net.add_quantize(input_tensor, scale, dtype) - # Add Q node quantize_layer = ctx.net.add_quantize(input_tensor, scale, dtype) if axis is not None: diff --git a/py/torch_tensorrt/dynamo/conversion/impl/slice/ops.py b/py/torch_tensorrt/dynamo/conversion/impl/slice/ops.py index 990b01eb70..203bb03553 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/slice/ops.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/slice/ops.py @@ -5,6 +5,7 @@ import numpy as np import tensorrt as trt from torch.fx.node import Target + from torch_tensorrt.dynamo._SourceIR import SourceIR from torch_tensorrt.dynamo.conversion import impl from torch_tensorrt.dynamo.conversion._ConversionContext import ConversionContext @@ -13,6 +14,9 @@ flatten_dims, get_positive_dim, get_trt_tensor, + has_dynamic_shape, + prepend_ones, + set_layer_name, ) from torch_tensorrt.dynamo.conversion.impl.cat import cat from torch_tensorrt.dynamo.conversion.impl.elementwise import floor_divide @@ -23,11 +27,6 @@ from torch_tensorrt.dynamo.conversion.impl.shape import shape as get_shape from torch_tensorrt.dynamo.conversion.impl.slice.base import slice from torch_tensorrt.dynamo.utils import DYNAMIC_DIM -from torch_tensorrt.fx.converters.converter_utils import ( - has_dynamic_shape, - prepend_ones, - set_layer_name, -) from torch_tensorrt.fx.types import Shape, TRTTensor @@ -230,7 +229,7 @@ def expand( # If the rank of the input tensor is less than the shape's rank, pad with ones if initial_tensor_rank < shape_rank: input_t = prepend_ones( - ctx.net, + ctx, input_t, name + "_expand_broadcast", shape_rank - initial_tensor_rank, diff --git a/py/torch_tensorrt/dynamo/conversion/ops_evaluators.py b/py/torch_tensorrt/dynamo/conversion/ops_evaluators.py index f320505c94..a2feb99d56 100644 --- a/py/torch_tensorrt/dynamo/conversion/ops_evaluators.py +++ b/py/torch_tensorrt/dynamo/conversion/ops_evaluators.py @@ -23,7 +23,10 @@ def getitem_validator(getitem_node: Node, settings: CompilationSettings = None) from torch_tensorrt.dynamo.conversion._ConverterRegistry import DYNAMO_CONVERTERS # Getitem nodes can only be converted if their parent node also can - return getitem_node.args[0] in DYNAMO_CONVERTERS + return ( + getitem_node.args[0] in DYNAMO_CONVERTERS + or getitem_node.args[0].op == "get_attr" + ) # TODO: Subsequent evaluators should be registered here with their own validators @@ -43,7 +46,10 @@ def generic_evaluator( _LOGGER.debug( f"Evaluating {ConverterRegistry.qualified_name_or_str(target)} on object with name: {name}" ) - return target(*args) + from torch._subclasses.fake_tensor import unset_fake_temporarily + + with unset_fake_temporarily(): + return target(*args) def rand_validator(rand_node: Node, settings: CompilationSettings = None) -> bool: diff --git a/py/torch_tensorrt/dynamo/lowering/_decomposition_groups.py b/py/torch_tensorrt/dynamo/lowering/_decomposition_groups.py index 825be75076..eca5d7fe77 100644 --- a/py/torch_tensorrt/dynamo/lowering/_decomposition_groups.py +++ b/py/torch_tensorrt/dynamo/lowering/_decomposition_groups.py @@ -171,6 +171,8 @@ aten.upsample_bilinear2d.vec, aten.upsample_trilinear3d.vec, aten.upsample_bicubic2d.vec, + aten.linear.default, + aten.matmul.default, } diff --git a/py/torch_tensorrt/dynamo/lowering/_decompositions.py b/py/torch_tensorrt/dynamo/lowering/_decompositions.py index e8bb8eb2ef..fb7b833a5f 100644 --- a/py/torch_tensorrt/dynamo/lowering/_decompositions.py +++ b/py/torch_tensorrt/dynamo/lowering/_decompositions.py @@ -9,7 +9,6 @@ _get_decomp_for_cia, ) from torch._ops import OpOverload - from torch_tensorrt.dynamo._defaults import default_device from torch_tensorrt.dynamo.conversion.converter_utils import get_positive_dim from torch_tensorrt.dynamo.utils import to_torch_device @@ -202,34 +201,29 @@ def slice_scatter_decomposition( start = get_positive_dim(start, input_tensor.shape[dim]) if end is None: # Ensure end is int end = dim_size - end = get_positive_dim(end, input_tensor.shape[dim]) + end = ( + get_positive_dim(end, input_tensor.shape[dim]) if isinstance(end, int) else end + ) if step is None: step = 1 - src_dim = src_tensor.shape # step == 0 is not a valid torch case - # also src_dim should be equal to slice dimension - if start == 0 and end == dim_size and step == 1: return src_tensor # Ensure start, end, and step are all integers - assert isinstance(start, int), "start must be an integer" - assert isinstance(end, int), "end must be an integer" - assert isinstance(step, int), "step must be an integer" - - cat_tensors = [] - index_tensor_shape = [] - for i, src_each_dim in enumerate(list(src_dim)): - if i != dim: - index_tensor_shape.append(src_each_dim) - for index in range(start, end, step): - cat_tensors.append(index * torch.ones(index_tensor_shape, dtype=torch.int64)) - index_tensor = torch.stack(cat_tensors, dim) - index_tensor = index_tensor.to(device_input_tensor) - index_tensor_64 = index_tensor.to(torch.int64) - output_tensor = torch.scatter(input_tensor, dim, index_tensor_64, src_tensor) - return output_tensor + assert isinstance(start, (int, torch.SymInt)), "start must be an int or SymInt" + assert isinstance(end, (int, torch.SymInt)), "end must be an int or SymInt" + assert isinstance(step, (int, torch.SymInt)), "step must be an int or SymInt" + + indices = torch.arange( + start, end, step, device=device_input_tensor, dtype=torch.int64 + ) + index_tensor = indices.view( + [-1 if i == dim else 1 for i in range(input_tensor.dim())] + ) + index_tensor = index_tensor.expand_as(src_tensor) + return torch.scatter(input_tensor, dim, index_tensor, src_tensor) @register_torch_trt_decomposition( @@ -428,8 +422,8 @@ def instance_norm_decomposition( @register_torch_trt_decomposition( torch.ops.aten.full_like, registry=TORCH_TRT_DECOMPOSITIONS -) # type: ignore -def full_like_decomposition(*args, **kwargs) -> torch.Tensor: +) +def full_like_decomposition(*args: Any, **kwargs: Any) -> torch.Tensor: input = args[0] shape = args[0].shape fill_value = args[1] @@ -459,11 +453,13 @@ def scaled_dot_product_attention_decomposition( ) -> torch.Tensor: L, S = query.size(-2), key.size(-2) device = query.device - attn_bias = torch.zeros(L, S, dtype=query.dtype, device=device) + + if is_causal or attn_mask is not None: + attn_bias = torch.zeros((L, S), dtype=query.dtype, device=device) if is_causal: assert attn_mask is None, "attn_mask must be None when is_causal=True" - temp_mask = torch.ones(L, S, dtype=torch.bool, device=device).tril(diagonal=0) + temp_mask = torch.ones((L, S), dtype=torch.bool, device=device).tril(diagonal=0) attn_bias = attn_bias.masked_fill(temp_mask.logical_not(), float("-inf")) if attn_mask is not None: @@ -476,7 +472,7 @@ def scaled_dot_product_attention_decomposition( key = key.repeat_interleave(query.size(-3) // key.size(-3), -3) value = value.repeat_interleave(query.size(-3) // value.size(-3), -3) - attn_weight = query @ key.transpose(-2, -1) + attn_weight = torch.matmul(query, key.transpose(-2, -1)) if scale is None: scale = torch.sqrt(torch.scalar_tensor(query.size(-1), dtype=torch.int)) @@ -484,9 +480,12 @@ def scaled_dot_product_attention_decomposition( else: attn_weight = attn_weight * scale - attn_weight = attn_weight + attn_bias + if is_causal or attn_mask is not None: + # We only add attn_bias when we have to, otherwise this will have a negative impact on the performance even it's 0. + attn_weight = attn_weight + attn_bias + attn_weight = torch.softmax(attn_weight, dim=-1) - return attn_weight @ value + return torch.matmul(attn_weight, value) @register_torch_trt_decomposition( diff --git a/py/torch_tensorrt/dynamo/lowering/passes/__init__.py b/py/torch_tensorrt/dynamo/lowering/passes/__init__.py index 716c6505fe..c0e2803e60 100644 --- a/py/torch_tensorrt/dynamo/lowering/passes/__init__.py +++ b/py/torch_tensorrt/dynamo/lowering/passes/__init__.py @@ -1,4 +1,3 @@ from ._aten_lowering_pass import * -from ._modify_reshape_complex_nodes import modify_reshape_complex_nodes from .remove_sym_nodes import remove_sym_nodes from .repair_input_aliasing import repair_input_aliasing diff --git a/py/torch_tensorrt/dynamo/lowering/passes/_aten_lowering_pass.py b/py/torch_tensorrt/dynamo/lowering/passes/_aten_lowering_pass.py index c7fe264c5a..fff4473b47 100644 --- a/py/torch_tensorrt/dynamo/lowering/passes/_aten_lowering_pass.py +++ b/py/torch_tensorrt/dynamo/lowering/passes/_aten_lowering_pass.py @@ -6,6 +6,7 @@ from torch_tensorrt.dynamo.utils import is_tegra_platform from .accumulate_fp32_matmul import accumulate_fp32_matmul +from .complex_graph_rewrite import complex_graph_detection from .constant_folding import constant_fold from .fuse_distributed_ops import fuse_distributed_ops from .fuse_prims_broadcast import fuse_prims_broadcast @@ -26,6 +27,7 @@ remove_assert_nodes, accumulate_fp32_matmul, remove_num_users_is_0_nodes, + complex_graph_detection, ] pre_lowering_pass_list = [ diff --git a/py/torch_tensorrt/dynamo/lowering/passes/accumulate_fp32_matmul.py b/py/torch_tensorrt/dynamo/lowering/passes/accumulate_fp32_matmul.py index e569c45cfa..282693d299 100644 --- a/py/torch_tensorrt/dynamo/lowering/passes/accumulate_fp32_matmul.py +++ b/py/torch_tensorrt/dynamo/lowering/passes/accumulate_fp32_matmul.py @@ -10,6 +10,18 @@ def split_addmm_nodes(gm: torch.fx.GraphModule) -> torch.fx.GraphModule: + """ + Splits all `torch.ops.aten.addmm.default` nodes in the FX graph into separate + `add` and `mm` nodes. This is useful for passes that want to insert additional + logic (such as FP32 accumulation) specifically around the matrix multiplication + operation, rather than the fused addmm. + + Args: + gm (torch.fx.GraphModule): The FX graph module to transform. + + Returns: + torch.fx.GraphModule: The modified FX graph module with addmm nodes split. + """ target = torch.ops.aten.addmm.default addmm_nodes = [node for node in gm.graph.nodes if node.target == target] for addmm_node in addmm_nodes: @@ -52,6 +64,7 @@ def accumulate_fp32_matmul( matmul_targets = [ torch.ops.aten.mm.default, torch.ops.aten.bmm.default, + torch.ops.aten.matmul.default, ] # Split torch.addmm nodes into add + mm and only add cast nodes around mm nodes diff --git a/py/torch_tensorrt/dynamo/lowering/passes/complex_graph_rewrite.py b/py/torch_tensorrt/dynamo/lowering/passes/complex_graph_rewrite.py new file mode 100644 index 0000000000..c3ead218aa --- /dev/null +++ b/py/torch_tensorrt/dynamo/lowering/passes/complex_graph_rewrite.py @@ -0,0 +1,361 @@ +import logging +from typing import Callable, List, Set, Tuple + +import torch +from torch._subclasses.fake_tensor import FakeTensorMode +from torch.fx import GraphModule, Node +from torch.fx.experimental.proxy_tensor import unset_fake_temporarily +from torch_tensorrt.dynamo._settings import CompilationSettings +from torch_tensorrt.dynamo.lowering.passes.pass_utils import ( + clean_up_graph_after_modifications, +) + +logger = logging.getLogger(__name__) + + +class ComplexSubGraphInfo: + def __init__( + self, + anchor_nodes: List[Node], + subgraph_nodes: List[Node], + input_nodes: List[Node], + ): + self.anchor_nodes = anchor_nodes + self.subgraph_nodes = subgraph_nodes + self.input_nodes = input_nodes + + def __repr__(self) -> str: + return ( + f"ComplexOpSubGraphInfo(anchor_nodes={[n.name for n in self.anchor_nodes]}, " + f"subgraph={[n.name for n in self.subgraph_nodes]}, " + f"inputs={[n.name for n in self.input_nodes]})" + ) + + +class ComplexOpDetector: + def __init__(self) -> None: + pass + + def is_complex_dtype(self, node: Node) -> bool: + # Check if node's metadata or dtype is complex + dtype = None + if "val" in node.meta: + val = node.meta["val"] + if hasattr(val, "dtype"): + dtype = val.dtype + + logger.debug(f"dtype of node: {dtype}") + return dtype in {torch.complex64, torch.complex128} + + def node_include_in_subgraph(self, node: Node) -> bool: + # Include only call_function ops on complex tensors + if node.op == "call_function" and self.is_complex_dtype(node): + logger.debug( + f"node.op is added to subgraph: {node.op}, node name: {node.name} is complex" + ) + return node.op == "call_function" and self.is_complex_dtype(node) + + def subgraph_from_anchor(self, anchor_node: Node) -> ComplexSubGraphInfo: + subgraph_nodes: Set[Node] = set() + input_nodes: Set[Node] = set() + stack = [anchor_node] + while stack: + n = stack.pop() + if n in subgraph_nodes: + continue + subgraph_nodes.add(n) + logger.debug(f"node {n.name} is added to subgraph") + for inp in n.all_input_nodes: + if self.node_include_in_subgraph(inp): + stack.append(inp) + else: + input_nodes.add(inp) + return ComplexSubGraphInfo( + [anchor_node], list(subgraph_nodes), list(input_nodes) + ) + + def find_complex_op_subgraphs( + self, gm: GraphModule, anchor_target: str + ) -> List[ComplexSubGraphInfo]: + complex_op_subgraphs: List[ComplexSubGraphInfo] = [] + for node in gm.graph.nodes: + if node.target == anchor_target: + new_sub = self.subgraph_from_anchor(node) + # if any intersecting nodes between seen and sub.subgraph_nodes they should be merged + merged = False + for existing_sub in complex_op_subgraphs: + if set(existing_sub.subgraph_nodes) & set(new_sub.subgraph_nodes): + logger.debug(f"merging subgraphs {existing_sub} {new_sub}") + # merge the two subgraphs + existing_sub.subgraph_nodes = list( + set(existing_sub.subgraph_nodes) + | set(new_sub.subgraph_nodes) + ) + existing_sub.input_nodes = list( + set(existing_sub.input_nodes) | set(new_sub.input_nodes) + ) + existing_sub.anchor_nodes = list( + set(existing_sub.anchor_nodes) | set(new_sub.anchor_nodes) + ) + merged = True + break + if not merged: + complex_op_subgraphs.append(new_sub) + return complex_op_subgraphs + + +class ComplexGraphRewriter: + def __init__(self, gm: GraphModule, truncate_double: bool = False) -> None: + self.gm = gm + self.truncate_double = truncate_double + + def extract_shape_dtype_device( + self, input_node: Node + ) -> Tuple[Tuple[int, ...], torch.dtype, torch.device]: + if input_node.op == "placeholder": + tensor_val = input_node.meta["val"] + + elif input_node.op == "get_attr": + tensor_val = self.get_attr_tensor(input_node.target) # type: ignore + + else: + raise ValueError(f"Unsupported node type: {input_node.op}") + + node_shape = tensor_val.size() + dtype = tensor_val.dtype + new_node_shape = node_shape + (2,) + device = tensor_val.device + + if dtype == torch.complex64: + new_node_dtype = torch.float32 + elif dtype == torch.complex128 and self.truncate_double: + new_node_dtype = torch.float32 + else: + new_node_dtype = torch.float64 + + return new_node_shape, new_node_dtype, device + + def get_attr_tensor(self, target): # type: ignore + # Check if target is param or buffer + if target in dict(self.gm.named_parameters()): + return self.gm.get_parameter(target) + elif target in dict(self.gm.named_buffers()): + return self.gm.get_buffer(target) + else: + raise ValueError( + f"Attribute {target} not found in gm parameters or buffers." + ) + + def replace_input_node(self, input_node: Node) -> None: + modified = False + logger.debug(f"Replacing input node: {input_node.name}") + new_shape, new_dtype, device = self.extract_shape_dtype_device(input_node) + real_tensor = torch.empty(new_shape, dtype=new_dtype, device=device) + + if input_node.op == "placeholder": + with FakeTensorMode() as fake_mode: + fake_tensor = fake_mode.from_tensor(real_tensor) + with self.gm.graph.inserting_before(input_node): + new_node = self.gm.graph.placeholder(input_node.target + "_reshaped") + new_node.meta["val"] = fake_tensor + + elif input_node.op == "get_attr": + new_attr_name = input_node.target + "_reshaped" + with unset_fake_temporarily(): + original_tensor = self.get_attr_tensor(input_node.target) # type: ignore + stacked_tensor = torch.stack( + [original_tensor.real, original_tensor.imag], dim=-1 + ) + self.gm.register_buffer(new_attr_name, stacked_tensor) + with self.gm.graph.inserting_after(input_node): + new_node = self.gm.graph.get_attr(new_attr_name) + else: + logger.debug( + f"Unsupported node type in replacement of input node: {input_node.op}" + ) + logger.debug( + "This complex subgraph inputnode type does not need to replaced" + ) + input_node.replace_all_uses_with(new_node) + self.gm.graph.erase_node(input_node) + clean_up_graph_after_modifications(self.gm) + + def rewrite_subgraph_nodes(self, subgraphs: List[ComplexSubGraphInfo]) -> None: + modified = False + for subgraph in subgraphs: + for input_node in subgraph.input_nodes: + logger.debug(f"Input node rewrite: {input_node.name}") + if input_node.op not in ("call_function"): + self.replace_input_node(input_node) + for node in subgraph.subgraph_nodes: + logger.debug(f"Subgraph Node rewrite: {node.name}") + if node.target == torch.ops.aten.view_as_complex.default: + node.replace_all_uses_with(node.args[0]) + self.gm.graph.erase_node(node) + elif node.target == torch.ops.aten.mul.Tensor: + # this is complex mul where inputs = a+ib and output = c+id. + # complex mul returns (ac - bd) + (ad + bc)i + # which is then view_as_real as (ac-bd), (ad+bc) stacked along the last dimension with last dimension size 2 + x_placeholder_or_func = ( + True if node.args[0].op != "get_attr" else False + ) + y_placeholder_or_func = ( + True if node.args[1].op != "get_attr" else False + ) + + replaced_nodes = [] + original_mul, replacement = complex_mul_replacement( + x_placeholder_or_func, y_placeholder_or_func + ) + + def match_complex_mul( # type: ignore[no-untyped-def] + match: torch.fx.subgraph_rewriter.Match, + original_graph, + pattern_graph, + ) -> bool: + for original_node in match.nodes_map.values(): + if original_node.name == node.name: + return True + return False + + nodes = torch.fx.subgraph_rewriter.replace_pattern_with_filters( + self.gm, + original_mul, + replacement, + match_filters=[match_complex_mul], + ignore_literals=True, + ) + replaced_nodes += nodes + modified = True + elif node.target == torch.ops.aten.view_as_real.default: + node.replace_all_uses_with(node.args[0]) + self.gm.graph.erase_node(node) + else: + logger.debug(f"Unsupported node target: {node.target}") + logger.debug( + "This complex subgraphnode type does not need to replaced" + ) + + if modified: + self.propagate_metadata() + self.gm.graph.lint() + self.gm.recompile() + + def propagate_metadata(self) -> None: + fake_inputs = [] + from torch._subclasses.fake_tensor import FakeTensorMode + from torch.fx.passes.fake_tensor_prop import FakeTensorProp + + for node in self.gm.graph.nodes: + if node.op == "placeholder": + if "val" in node.meta: + with FakeTensorMode(allow_non_fake_inputs=True): + fake_val = node.meta["val"] + fake_inputs.append( + fake_val.to("cuda") + if fake_val.device.type == "cuda" + else fake_val + ) + else: + fake_tensor = torch.empty( + [s if s != 0 else 1 for s in node.meta["tensor_meta"].shape], + dtype=node.meta["tensor_meta"].dtype, + device=node.meta["tensor_meta"].device, + ) + fake_inputs.append(fake_tensor) + FakeTensorProp( + self.gm, mode=FakeTensorMode(allow_non_fake_inputs=True) + ).propagate(*fake_inputs) + + +def extract_real_imag(input, placeholder_or_func: bool = True): # type: ignore + """Extract real and imaginary parts from a tensor. + This function handles different tensor types based on whether they are placeholder/function + tensors or get_attr tensors. For placeholder/function tensors, it uses select operations, + while for get_attr tensors, it uses indexing. + Args: + input: Input tensor to extract real and imaginary parts from + placeholder_or_func: Boolean flag indicating if the input is a placeholder/function tensor (True) + or a get_attr tensor (False). Defaults to True. + Returns: + Tuple of (real_part, imaginary_part) where both parts have the same type as the input + Note: + - When placeholder_or_func=True: Uses torch.ops.aten.select.int operations + - When placeholder_or_func=False: Uses tensor indexing [..., 0] and [..., 1] + """ + if placeholder_or_func: + # For ITensor, use select operations + real_part = torch.ops.aten.select.int(input, -1, 0) + imag_part = torch.ops.aten.select.int(input, -1, 1) + return real_part, imag_part + else: + # For get_attr, use indexing + return input[..., 0], input[..., 1] + + +def complex_mul_replacement( + x_placeholder_or_func: bool = True, y_placeholder_or_func: bool = True +) -> Tuple[ + Callable[[torch.Tensor, torch.Tensor], torch.Tensor], + Callable[[torch.Tensor, torch.Tensor], torch.Tensor], +]: + """Constructs the original and replacement functions for complex multiplication. + + The original functions correspond to native complex multiplication + via torch.mul or operator.mul on complex tensors. + + The replacement function assumes x and y are real tensors with the last + dimension size 2 representing real and imaginary parts, and performs + complex multiplication manually returning the same shaped tensor. + """ + + # Original pattern: torch.mul for complex tensors + def original_mul(x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: + return torch.ops.aten.mul.Tensor(x, y) + + # Replacement function: manual complex multiplication on real/imag stacked tensors + def replacement(x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: + x_real, x_imag = extract_real_imag(x, x_placeholder_or_func) + y_real, y_imag = extract_real_imag(y, y_placeholder_or_func) + + real_part1 = torch.ops.aten.mul.Tensor(x_real, y_real) + real_part2 = torch.ops.aten.mul.Tensor(x_imag, y_imag) + real = torch.ops.aten.sub.Tensor(real_part1, real_part2) + + imag_part1 = torch.ops.aten.mul.Tensor(x_real, y_imag) + imag_part2 = torch.ops.aten.mul.Tensor(x_imag, y_real) + imag = torch.ops.aten.add.Tensor(imag_part1, imag_part2) + + return torch.ops.aten.cat.default( + [ + torch.ops.aten.unsqueeze.default(real, -1), + torch.ops.aten.unsqueeze.default(imag, -1), + ], + -1, + ) + + return (original_mul, replacement) + + +# This lowering pass is used to detect and rewrite complex subgraphs in the graph +def complex_graph_detection( + gm: GraphModule, settings: CompilationSettings +) -> GraphModule: + """Detect and rewrite complex subgraphs in the graph. + This lowering pass is used to detect and rewrite complex subgraphs in the graph. + This lowering pass works for complex tensor in mul which are parameter or buffers in the graph. + Args: + gm: The GraphModule to process + settings: Compilation settings + Returns: + The modified GraphModule with complex subgraphs rewritten + """ + complex_op_detector = ComplexOpDetector() + complex_subgraphs = complex_op_detector.find_complex_op_subgraphs( + gm, anchor_target=torch.ops.aten.view_as_real.default + ) + for subgraph in complex_subgraphs: + logger.debug(f"Complex subgraph info: {subgraph}") + complex_graph_rewriter = ComplexGraphRewriter(gm, settings.truncate_double) + complex_graph_rewriter.rewrite_subgraph_nodes(complex_subgraphs) + return gm diff --git a/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py b/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py index 928b7284fe..5ba84b09b0 100644 --- a/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py +++ b/py/torch_tensorrt/dynamo/lowering/passes/constant_folding.py @@ -103,10 +103,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.quantization_ops: Set[torch._ops.OpOverload] = set() try: # modelopt import ensures torch.ops.tensorrt.quantize_op.default is registered - import modelopt.torch.quantization as mtq + import modelopt.torch.quantization as mtq # noqa: F401 assert torch.ops.tensorrt.quantize_op.default + assert torch.ops.tensorrt.dynamic_block_quantize_op.default self.quantization_ops.add(torch.ops.tensorrt.quantize_op.default) + self.quantization_ops.add( + torch.ops.tensorrt.dynamic_block_quantize_op.default + ) except Exception as e: pass diff --git a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py index 85a31b9736..b0e41f7aeb 100644 --- a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py @@ -1,5 +1,6 @@ import inspect import logging +import warnings from copy import deepcopy from enum import Enum, auto from typing import Any, Dict, Iterator, Optional, Set, Union @@ -333,7 +334,7 @@ def export_fn() -> torch.export.ExportedProgram: # Check if any quantization precision is enabled if self.enabled_precisions and any( precision in self.enabled_precisions - for precision in (torch.float8_e4m3fn, torch.int8) + for precision in (torch.float8_e4m3fn, torch.int8, torch.float4_e2m1fn_x2) ): try: from modelopt.torch.quantization.utils import export_torch_mode @@ -476,6 +477,12 @@ def _process_kwarg_inputs(inputs: Any) -> Any: ) def forward(self, *args: Any, **kwargs: Any) -> Any: + warnings.warn( + "Direct calls to {self.__class__}.forward() are currently broken by due to https://github.com/pytorch/pytorch/issues/157183. Either call {self.__class__}(...) directly or use {self.__class__}._forward as a work around" + ) + return self._forward(*args, **kwargs) + + def _forward(self, *args: Any, **kwargs: Any) -> Any: # Step 1: Check whether the input shape has changed kwargs = MutableTorchTensorRTModule._process_kwarg_inputs(kwargs) self._validate_inputs(*args, **kwargs) @@ -535,7 +542,9 @@ def __deepcopy__(self, memo: Any) -> Any: return result def __call__(self, *args: Any, **kwargs: Any) -> Any: - return self.forward(*args, **kwargs) + # Due to https://github.com/pytorch/pytorch/issues/157183, we cannot use forward call, use _forward as a workaround. + # This is a temporary fix. + return self._forward(*args, **kwargs) def __getattr__(self, name: str) -> Any: if name in self.__dict__: diff --git a/py/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.py index fc76b20141..1d619b6ce3 100644 --- a/py/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_PythonTorchTensorRTModule.py @@ -752,7 +752,14 @@ def validate_input_shapes(self, inputs: Sequence[torch.Tensor]) -> bool: # Representation of input shapes to a given model # Shapes are concatenated as so: # x: (3, 4), y: (4, 5) --> Key: (3,4)(4,5) - new_shape_key = "".join(str(tuple(t.shape)).replace(" ", "") for t in inputs) + tensor_inputs = [] + for t in inputs: + if not isinstance(t, torch.Tensor): + return True + tensor_inputs.append(t) + new_shape_key = "".join( + str(tuple(t.shape)).replace(" ", "") for t in tensor_inputs + ) # If the new shape key differs from the existing one, # invalidate the old shape key and remove the CUDAGraph diff --git a/py/torch_tensorrt/fx/converters/converter_utils.py b/py/torch_tensorrt/fx/converters/converter_utils.py index 510d4ef69b..78ea125424 100644 --- a/py/torch_tensorrt/fx/converters/converter_utils.py +++ b/py/torch_tensorrt/fx/converters/converter_utils.py @@ -909,7 +909,6 @@ def type_cast( """ This function helps to cast the input type to cast_type """ - layer_i = network.add_identity(input) - layer_i.set_output_type(0, cast_type) + layer_i = network.add_cast(input, cast_type) set_layer_name(layer_i, target, f"{name}_dtype_change") return layer_i.get_output(0) diff --git a/pyproject.toml b/pyproject.toml index b45cd96d5d..d390e8b4a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,17 +6,9 @@ requires = [ "ninja>=1.11.0", "pyyaml>=6.0", "cffi>=1.15.1", - "typing-extensions>=4.7.0", - "future>=0.18.3", - "tensorrt-cu12>=10.11.0,<10.12.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", - "tensorrt-cu12>=10.3.0,<10.4.0; platform_machine == 'aarch64' and 'tegra' in platform_release", - "torch>=2.8.0.dev,<2.9.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", + "torch>=2.9.0.dev,<2.10.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", "torch>=2.7.0,<2.8.0; platform_machine == 'aarch64' and 'tegra' in platform_release", "pybind11==2.6.2", - "numpy; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", - "numpy<2.0.0; platform_machine == 'aarch64' and 'tegra' in platform_release", - "sympy", - "dllist", ] build-backend = "setuptools.build_meta" @@ -59,13 +51,13 @@ keywords = [ "inference", ] dependencies = [ - "torch>=2.8.0.dev,<2.9.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", + "torch>=2.9.0.dev,<2.10.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", "torch>=2.7.0,<2.8.0; platform_machine == 'aarch64' and 'tegra' in platform_release", - "tensorrt>=10.11.0,<10.12.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", - "tensorrt-cu12>=10.11.0,<10.12.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", - "tensorrt-cu12-bindings>=10.11.0,<10.12.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", - "tensorrt-cu12-libs>=10.11.0,<10.12.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", + "tensorrt>=10.12.0,<10.13.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", + "tensorrt-cu12>=10.12.0,<10.13.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", + "tensorrt-cu12-bindings>=10.12.0,<10.13.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", + "tensorrt-cu12-libs>=10.12.0,<10.13.0; platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)", "tensorrt>=10.3.0,<10.4.0;platform_machine == 'aarch64' and 'tegra' in platform_release", "tensorrt-cu12>=10.3.0,<10.4.0; platform_machine == 'aarch64' and 'tegra' in platform_release", @@ -135,23 +127,23 @@ index-strategy = "unsafe-best-match" [tool.uv.sources] torch = [ - { index = "pytorch-nightly-cu128", marker = "platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)" }, - { index = "jetson-containers", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release" }, + { index = "pytorch-nightly-cu129", marker = "platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)" }, + # { index = "jetson-containers", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release" }, ] torchvision = [ - { index = "pytorch-nightly-cu128", marker = "platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)" }, - { index = "jetson-containers", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release" }, + { index = "pytorch-nightly-cu129", marker = "platform_machine != 'aarch64' or (platform_machine == 'aarch64' and 'tegra' not in platform_release)" }, + # { index = "jetson-containers", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release" }, ] [[tool.uv.index]] -name = "pytorch-nightly-cu128" -url = "https://download.pytorch.org/whl/nightly/cu128" +name = "pytorch-nightly-cu129" +url = "https://download.pytorch.org/whl/nightly/cu129" explicit = false -[[tool.uv.index]] -name = "jetson-containers" -url = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" -explicit = false +# [[tool.uv.index]] +# name = "jetson-containers" +# url = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" +# explicit = false [[tool.uv.index]] name = "nvidia" diff --git a/setup.py b/setup.py index fb96d85453..f829602f1a 100644 --- a/setup.py +++ b/setup.py @@ -569,6 +569,7 @@ def run(self): f'/DPYBIND11_BUILD_ABI=\\"{torch._C._PYBIND11_BUILD_ABI}\\"', "/GS-", "/permissive-", + "/utf-8", ] if IS_WINDOWS else [ diff --git a/tests/modules/requirements.txt b/tests/modules/requirements.txt index 1fccd40ee1..90a87e0888 100644 --- a/tests/modules/requirements.txt +++ b/tests/modules/requirements.txt @@ -1,2 +1,2 @@ timm==0.9.12 -transformers==4.48.0 +transformers==4.53.1 diff --git a/tests/py/dynamo/conversion/harness.py b/tests/py/dynamo/conversion/harness.py index 79e656ef82..93ffc8b451 100644 --- a/tests/py/dynamo/conversion/harness.py +++ b/tests/py/dynamo/conversion/harness.py @@ -412,6 +412,7 @@ def run_test( propagate_shapes=False, int32_reqd=False, immutable_weights=True, + use_explicit_typing=False, ): # TODO: lan to remove this and set use_dynamo_traccer to True by default # once all the converter test files are moved to use_dynamo_tracer @@ -422,6 +423,7 @@ def run_test( enabled_precisions={dtype._from(precision)}, truncate_double=True, immutable_weights=immutable_weights, + use_explicit_typing=use_explicit_typing, ) mod = self.generate_graph( diff --git a/tests/py/dynamo/conversion/test_casts.py b/tests/py/dynamo/conversion/test_casts.py index 88260ba771..997092d24b 100644 --- a/tests/py/dynamo/conversion/test_casts.py +++ b/tests/py/dynamo/conversion/test_casts.py @@ -64,6 +64,21 @@ def forward(self, x): precision=torch.float, ) + def test_to_copy_bfloat16(self): + class ToCopyBFloat16(nn.Module): + def forward(self, x): + y = torch.ops.aten._to_copy.default(x, dtype=torch.bfloat16) + y = y**2 + return y + + inputs = [torch.rand((1, 3, 10), dtype=torch.float32)] + self.run_test( + ToCopyBFloat16(), + inputs, + precision=torch.float, + use_explicit_typing=True, + ) + def test_to_copy_i64b(self): class ToCopy64Bit(nn.Module): def forward(self, x): diff --git a/tests/py/dynamo/conversion/test_linear_aten.py b/tests/py/dynamo/conversion/test_linear_aten.py new file mode 100644 index 0000000000..2426b7b42d --- /dev/null +++ b/tests/py/dynamo/conversion/test_linear_aten.py @@ -0,0 +1,54 @@ +import torch +import torch.nn as nn +from parameterized import parameterized +from torch.testing._internal.common_utils import run_tests +from torch_tensorrt import Input + +from .harness import DispatchTestCase + + +class TestLinearConverter(DispatchTestCase): + @parameterized.expand( + [ + (10, 10), + (10, 100), + (100, 10), + (100, 100), + ] + ) + def test_linear_converter(self, in_features, out_features): + class LinearModel(nn.Module): + def __init__(self, in_features, out_features): + super(LinearModel, self).__init__() + self.linear = nn.Linear(in_features, out_features) + + def forward(self, x): + return self.linear(x) + + model = LinearModel(in_features, out_features).eval().cuda() + inputs = [torch.randn(int(torch.randint(1, 20, (1,))), in_features).cuda()] + self.run_test(model, inputs, use_dynamo_tracer=True, enable_passes=True) + + def test_linear_with_dynamic_shape(self): + class LinearModel(torch.nn.Module): + def forward(self, x, weight, bias): + return torch.ops.aten.linear.default(x, weight, bias) + + input_specs = [ + Input( + dtype=torch.float32, + min_shape=(1, 10), + opt_shape=(10, 10), + max_shape=(100, 10), + ), + Input(dtype=torch.float32, shape=(20, 10)), + Input(dtype=torch.float32, shape=(20,)), + ] + + self.run_test_with_dynamic_shape( + LinearModel(), input_specs, use_dynamo_tracer=True, enable_passes=True + ) + + +if __name__ == "__main__": + run_tests() diff --git a/tests/py/dynamo/lowering/test_aten_lowering_passes.py b/tests/py/dynamo/lowering/test_aten_lowering_passes.py index 69c91db475..7eaccf9348 100644 --- a/tests/py/dynamo/lowering/test_aten_lowering_passes.py +++ b/tests/py/dynamo/lowering/test_aten_lowering_passes.py @@ -237,5 +237,97 @@ def forward(self, input, mat1, mat2): torch._dynamo.reset() +class TestComplexSubgraph(TestCase): + def test_complex_subgraph(self): + BATCH = 1 + SEQ_LEN = 2 + HEADS = 1 + DIM = 2 + + class RotaryAttention(torch.nn.Module): + def __init__(self): + super().__init__() + self.dim = DIM + self.wq = torch.nn.Linear(self.dim, self.dim) + self.seq_len = SEQ_LEN + + self.register_buffer( + "freqs_ex_tensor", + self._freqs_ex_tensor(), + persistent=True, + ) + + def rotary_embedding(self, x, dim, freqs_cis=None): + x_ = torch.view_as_complex(x.float().reshape(*x.shape[:-1], -1, 2)) + x_out_flatten = torch.view_as_real(x_ * freqs_cis) + return x_out_flatten.type_as(x) + + def _freqs_ex_tensor(self): + real = torch.tensor([[[[1.0000]], [[2.0000]]]], device="cuda") + imag = torch.tensor([[[[0.0000]], [[3.0000]]]], device="cuda") + + z = torch.complex(real, imag) + return z + + def forward(self, x): + q = self.wq(x) + freqs_cis = self._freqs_ex_tensor().to(q.device) + q_out = self.rotary_embedding(q, self.dim, freqs_cis=freqs_cis) + return q_out + + inputs = [torch.randn(BATCH, SEQ_LEN, HEADS, DIM).cuda()] + model = RotaryAttention() + model = model.cuda() + + expected_ops = {torch.ops.aten.mul.Tensor} + unexpected_ops = { + torch.ops.aten.view_as_complex.default, + torch.ops.aten.view_as_real.default, + } + + unexpected_ops_seen, expected_ops_unseen = lower_graph_testing( + model, + inputs, + expected_ops=expected_ops, + unexpected_ops=unexpected_ops, + min_block_size=1, + ) + + self.assertEqual( + len(unexpected_ops_seen), + 0, + f"The following unexpected ops were encountered: {unexpected_ops_seen}", + ) + + self.assertEqual( + len(expected_ops_unseen), + 0, + f"The following expected ops were not encountered: {expected_ops_unseen}", + ) + torch._dynamo.reset() + + # Validate that the results between Torch and Torch-TRT are similar + optimized_model = torch_tensorrt.compile( + model, + "torch_compile", + inputs, + min_block_size=1, + pass_through_build_failures=True, + ) + optimized_model_results = optimized_model(*inputs)[0].detach().cpu() + torch_model_results = model(*inputs)[0].detach().cpu() + + max_diff = float( + torch.max(torch.abs(optimized_model_results - torch_model_results)) + ) + self.assertAlmostEqual( + max_diff, + 0, + DECIMALS_OF_AGREEMENT, + msg=f"ComplexSubgraph TRT outputs don't match with the original model.", + ) + torch._dynamo.reset() + + if __name__ == "__main__": run_tests() diff --git a/tests/py/dynamo/lowering/test_decompositions.py b/tests/py/dynamo/lowering/test_decompositions.py index b63e0f3bf7..32bf7f8b98 100644 --- a/tests/py/dynamo/lowering/test_decompositions.py +++ b/tests/py/dynamo/lowering/test_decompositions.py @@ -812,6 +812,38 @@ def forward(self, x, src, dim, start, end, step): f"Slice_scatter TRT outputs don't match with the original model.", ) + def test_lowering_slice_scatter_dynamic_module(self): + class sliceScatter(torch.nn.Module): + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + def forward(self, x, src): + y = torch.ops.aten.slice_scatter(x, src, 1, 6, None, 1) + return y + + dim1 = torch.export.Dim("dim1", min=8, max=10) + dynamic_shapes = { + "x": [torch.export.Dim.STATIC, dim1], + "src": [torch.export.Dim.STATIC, None], + } + inputs = (torch.zeros(8, 8).cuda(), torch.ones(8, 2).cuda()) + exported_program = torch.export.export( + sliceScatter(), tuple(inputs), dynamic_shapes=dynamic_shapes + ) + fx_graph = exported_program.module() + inputs = [ + torch_tensorrt.Input( + min_shape=[8, 8], opt_shape=[8, 10], max_shape=[8, 10] + ), + torch_tensorrt.Input(min_shape=[8, 2], opt_shape=[8, 2], max_shape=[8, 2]), + ] + torch._dynamo.reset() + trt_model = torch_tensorrt.dynamo.compile(exported_program, inputs) + inputs = (torch.zeros(8, 8).cuda(), torch.ones(8, 2).cuda()) + torch.testing.assert_close( + trt_model(*inputs), fx_graph(*inputs), rtol=RTOL, atol=ATOL + ) + def test_lowering_select_scatter_dimZero_module(self): class selectScatter(torch.nn.Module): def __init__(self, *args, **kwargs) -> None: diff --git a/tests/py/dynamo/models/test_export_serde.py b/tests/py/dynamo/models/test_export_serde.py index e8ce2df8e4..4afe885930 100644 --- a/tests/py/dynamo/models/test_export_serde.py +++ b/tests/py/dynamo/models/test_export_serde.py @@ -321,11 +321,12 @@ def test_resnet18_cpu_offload(ir): exp_program = torchtrt.dynamo.trace(model, **compile_spec) trt_module = torchtrt.dynamo.compile(exp_program, **compile_spec) - assertions.assertTrue( - get_model_device(model).type == "cpu", - msg="Model should be offloaded to CPU", - ) - model.cuda() + if ir == "dynamo": + assertions.assertTrue( + get_model_device(model).type == "cpu", + msg="Model should be offloaded to CPU", + ) + model.cuda() torchtrt.save(trt_module, trt_ep_path) deser_trt_module = torchtrt.load(trt_ep_path).module() diff --git a/tests/py/dynamo/models/test_model_refit.py b/tests/py/dynamo/models/test_model_refit.py index f7b92b92b9..d1ae28fb13 100644 --- a/tests/py/dynamo/models/test_model_refit.py +++ b/tests/py/dynamo/models/test_model_refit.py @@ -89,6 +89,200 @@ def test_mapping(): torch._dynamo.reset() +@unittest.skipIf( + not torch_trt.ENABLED_FEATURES.torch_tensorrt_runtime, + "TorchScript Frontend is not available", +) +@unittest.skipIf( + not torch_trt.ENABLED_FEATURES.refit, + "Refit feature is not supported in Python 3.13 or higher", +) +@unittest.skipIf( + not importlib.util.find_spec("torchvision"), + "torchvision is not installed", +) +@pytest.mark.unit +def test_conv_refit_with_weightmap(): + class net(nn.Module): + def __init__(self): + super().__init__() + self.conv = nn.Conv2d(3, 3, 1) + + def forward(self, x): + return self.conv(x) + + model = net().eval().to("cuda") + model2 = net().eval().to("cuda") + inputs = [torch.randn((1, 3, 224, 224)).to("cuda")] + enabled_precisions = {torch.float} + min_block_size = 1 + use_python_runtime = True + + exp_program = torch.export.export(model, tuple(inputs)) + exp_program2 = torch.export.export(model2, tuple(inputs)) + + trt_gm = torchtrt.dynamo.compile( + exp_program, + tuple(inputs), + use_python_runtime=use_python_runtime, + enabled_precisions=enabled_precisions, + min_block_size=min_block_size, + immutable_weights=False, + ) + + new_trt_gm = refit_module_weights( + compiled_module=trt_gm, + new_weight_module=exp_program2, + arg_inputs=inputs, + use_weight_map_cache=True, + verify_output=True, + ) + + # Check the output + model2.to("cuda") + expected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm( + *inputs + ) + for expected_output, refitted_output in zip(expected_outputs, refitted_outputs): + assertions.assertTrue( + torch.allclose(expected_output, refitted_output, 1e-2, 1e-2), + "Refit Result is not correct. Refit failed", + ) + # Clean up model env + + torch._dynamo.reset() + + +@unittest.skipIf( + not torch_trt.ENABLED_FEATURES.torch_tensorrt_runtime, + "TorchScript Frontend is not available", +) +@unittest.skipIf( + not torch_trt.ENABLED_FEATURES.refit, + "Refit feature is not supported in Python 3.13 or higher", +) +@unittest.skipIf( + not importlib.util.find_spec("torchvision"), + "torchvision is not installed", +) +@pytest.mark.unit +def test_batch_norm_refit_one_engine_with_weightmap(): + class net(nn.Module): + def __init__(self): + super().__init__() + self.conv = nn.Conv2d(3, 3, 1) + self.bn = nn.BatchNorm2d(3) + + def forward(self, x): + return self.bn(self.conv(x)) + + model = net().eval().to("cuda") + model2 = net().eval().to("cuda") + inputs = [torch.randn((1, 3, 224, 224)).to("cuda")] + enabled_precisions = {torch.float} + min_block_size = 1 + use_python_runtime = True + + exp_program = torch.export.export(model, tuple(inputs)) + exp_program2 = torch.export.export(model2, tuple(inputs)) + + trt_gm = torchtrt.dynamo.compile( + exp_program, + tuple(inputs), + use_python_runtime=use_python_runtime, + enabled_precisions=enabled_precisions, + min_block_size=min_block_size, + immutable_weights=False, + ) + + new_trt_gm = refit_module_weights( + compiled_module=trt_gm, + new_weight_module=exp_program2, + arg_inputs=inputs, + use_weight_map_cache=True, + verify_output=True, + ) + + # Check the output + model2.to("cuda") + expected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm( + *inputs + ) + for expected_output, refitted_output in zip(expected_outputs, refitted_outputs): + assertions.assertTrue( + torch.allclose(expected_output, refitted_output, 1e-2, 1e-2), + "Refit Result is not correct. Refit failed", + ) + # Clean up model env + + torch._dynamo.reset() + + +@unittest.skipIf( + not torch_trt.ENABLED_FEATURES.torch_tensorrt_runtime, + "TorchScript Frontend is not available", +) +@unittest.skipIf( + not torch_trt.ENABLED_FEATURES.refit, + "Refit feature is not supported in Python 3.13 or higher", +) +@unittest.skipIf( + not importlib.util.find_spec("torchvision"), + "torchvision is not installed", +) +@pytest.mark.unit +def test_batch_norm_refit_one_engine_without_weightmap(): + class net(nn.Module): + def __init__(self): + super().__init__() + self.conv = nn.Conv2d(3, 3, 1) + self.bn = nn.BatchNorm2d(3) + + def forward(self, x): + return self.bn(self.conv(x)) + + model = net().eval().to("cuda") + model2 = net().eval().to("cuda") + inputs = [torch.randn((1, 3, 224, 224)).to("cuda")] + enabled_precisions = {torch.float} + min_block_size = 1 + use_python_runtime = True + + exp_program = torch.export.export(model, tuple(inputs)) + exp_program2 = torch.export.export(model2, tuple(inputs)) + + trt_gm = torchtrt.dynamo.compile( + exp_program, + tuple(inputs), + use_python_runtime=use_python_runtime, + enabled_precisions=enabled_precisions, + min_block_size=min_block_size, + immutable_weights=False, + ) + + new_trt_gm = refit_module_weights( + compiled_module=trt_gm, + new_weight_module=exp_program2, + arg_inputs=inputs, + use_weight_map_cache=False, + verify_output=True, + ) + + # Check the output + model2.to("cuda") + expected_outputs, refitted_outputs = exp_program2.module()(*inputs), new_trt_gm( + *inputs + ) + for expected_output, refitted_output in zip(expected_outputs, refitted_outputs): + assertions.assertTrue( + torch.allclose(expected_output, refitted_output, 1e-2, 1e-2), + "Refit Result is not correct. Refit failed", + ) + # Clean up model env + + torch._dynamo.reset() + + @unittest.skipIf( not torch_trt.ENABLED_FEATURES.torch_tensorrt_runtime, "TorchScript Frontend is not available", diff --git a/tests/py/dynamo/models/test_models.py b/tests/py/dynamo/models/test_models.py index 359044a2b2..90d3cc637b 100644 --- a/tests/py/dynamo/models/test_models.py +++ b/tests/py/dynamo/models/test_models.py @@ -79,11 +79,12 @@ def test_resnet18_cpu_offload(ir): } trt_mod = torchtrt.compile(model, **compile_spec) - assertions.assertTrue( - get_model_device(model).type == "cpu", - msg="Model should be offloaded to CPU", - ) - model.cuda() + if ir == "dynamo": + assertions.assertTrue( + get_model_device(model).type == "cpu", + msg="Model should be offloaded to CPU", + ) + model.cuda() cos_sim = cosine_similarity(model(input), trt_mod(input)) assertions.assertTrue( cos_sim > COSINE_THRESHOLD, @@ -286,11 +287,12 @@ def test_bert_base_uncased_cpu_offload(ir): "offload_module_to_cpu": True, } trt_mod = torchtrt.compile(model, **compile_spec) - assertions.assertTrue( - get_model_device(model).type == "cpu", - msg="Model should be offloaded to CPU", - ) - model.cuda() + if ir == "dynamo": + assertions.assertTrue( + get_model_device(model).type == "cpu", + msg="Model should be offloaded to CPU", + ) + model.cuda() model_outputs = model(input, input2) trt_model_outputs = trt_mod(input, input2) diff --git a/tests/py/dynamo/runtime/test_mutable_torchtrt_module.py b/tests/py/dynamo/runtime/test_mutable_torchtrt_module.py index a0af6420ed..b2caa2551b 100644 --- a/tests/py/dynamo/runtime/test_mutable_torchtrt_module.py +++ b/tests/py/dynamo/runtime/test_mutable_torchtrt_module.py @@ -317,9 +317,7 @@ def test_resnet18_modify_attribute(): mutable_module = torch_trt.MutableTorchTensorRTModule(model, **compile_spec) mutable_module(*inputs) - mutable_module.conv1.weight = nn.Parameter( - torch.rand_like(mutable_module.conv1.weight) - ) + mutable_module.fc.weight = nn.Parameter(torch.rand_like(mutable_module.fc.weight)) assertions.assertEqual( mutable_module.refit_state.get_state(), RefitFlag.UNKNOWN, diff --git a/tests/py/dynamo/testing_utilities.py b/tests/py/dynamo/testing_utilities.py index 7894c49967..7adf2c8a58 100644 --- a/tests/py/dynamo/testing_utilities.py +++ b/tests/py/dynamo/testing_utilities.py @@ -92,7 +92,7 @@ def compile_module_testing( ) # Store intermediate graph from partitioned module - store_intermediate_graphs.append(deepcopy(partitioned_module)) + store_intermediate_graphs.append(partitioned_module) return partitioned_module diff --git a/tests/py/requirements.txt b/tests/py/requirements.txt index b806a668db..c022378f6d 100644 --- a/tests/py/requirements.txt +++ b/tests/py/requirements.txt @@ -7,7 +7,7 @@ parameterized>=0.2.0 pytest>=8.2.1 pytest-xdist>=3.6.1 pyyaml -transformers==4.49.0 +transformers==4.53.1 nvidia-modelopt[all]; python_version >'3.9' and python_version <'3.13' --extra-index-url https://pypi.nvidia.com # flashinfer-python is not supported for python version 3.13 or higher diff --git a/toolchains/ci_workspaces/MODULE.bazel.tmpl b/toolchains/ci_workspaces/MODULE.bazel.tmpl index 4f03473c08..4f29a469da 100644 --- a/toolchains/ci_workspaces/MODULE.bazel.tmpl +++ b/toolchains/ci_workspaces/MODULE.bazel.tmpl @@ -75,18 +75,18 @@ http_archive = use_repo_rule("@bazel_tools//tools/build_defs/repo:http.bzl", "ht http_archive( name = "tensorrt", build_file = "@//third_party/tensorrt/archive:BUILD", - strip_prefix = "TensorRT-10.11.0.33", + strip_prefix = "TensorRT-10.12.0.36", urls = [ - "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/tars/TensorRT-10.11.0.33.Linux.x86_64-gnu.cuda-12.9.tar.gz", + "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/tars/TensorRT-10.12.0.36.Linux.x86_64-gnu.cuda-12.9.tar.gz", ], ) http_archive( name = "tensorrt_sbsa", build_file = "@//third_party/tensorrt/archive:BUILD", - strip_prefix = "TensorRT-10.11.0.33", + strip_prefix = "TensorRT-10.12.0.36", urls = [ - "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/tars/TensorRT-10.11.0.33.Linux.aarch64-gnu.cuda-12.9.tar.gz", + "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/tars/TensorRT-10.12.0.36.Linux.aarch64-gnu.cuda-12.9.tar.gz", ], ) @@ -102,9 +102,9 @@ http_archive( http_archive( name = "tensorrt_win", build_file = "@//third_party/tensorrt/archive:BUILD", - strip_prefix = "TensorRT-10.11.0.33", + strip_prefix = "TensorRT-10.12.0.36", urls = [ - "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.11.0/zip/TensorRT-10.11.0.33.Windows.win10.cuda-12.9.zip", + "https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.12.0/zip/TensorRT-10.12.0.36.Windows.win10.cuda-12.9.zip", ], ) diff --git a/tools/llm/README.md b/tools/llm/README.md new file mode 100644 index 0000000000..a141505517 --- /dev/null +++ b/tools/llm/README.md @@ -0,0 +1,67 @@ +# Optimizing LLMs in Torch-TensorRT + +This directory provides utilities and scripts for compiling, optimizing, and benchmarking Large Language Models (LLMs) using Torch-TensorRT, with a focus on efficient inference on NVIDIA GPUs. The main entry point is `run_llm.py`, which demonstrates how to export, compile, and run LLMs with various caching strategies and precision modes. Note that this is an **experimental release** and APIs may change in future versions. + +### Key Features + +- **Model Support:** Works with popular LLMs such as Llama-3, Qwen2.5, etc. +- **Precision Modes:** Supports FP16, BF16, and FP32. +- **KV Cache:** Supports static and dynamic KV cache for efficient autoregressive decoding. +- **Benchmarking:** Measures and compares throughput and latency for PyTorch and TensorRT backends. +- **Custom Attention:** Registers and converts custom scaled dot-product attention (SDPA) for compatibility with TensorRT. + + +### Supported Models + +We have officially verified support for the following models: + +| Model Series | HF Model Card | Precision | KV Cache Supported ? | +|--------------|---------------|-----------|-------------------| +| GPT-2 | gpt2
    gpt2-medium | FP16, FP32 | Yes | +| LLaMA 2 | meta-llama/Llama-2-7b-chat-hf | FP16, FP32 | Yes | +| LLaMA 3.1 | meta-llama/Llama-3.1-8B-Instruct | FP16, FP32 | Yes | +| LLaMA 3.2 | meta-llama/Llama-3.2-1B-Instruct
    meta-llama/Llama-3.2-3B-Instruct | FP16, FP32 | Yes | +| Qwen 2.5 | Qwen/Qwen2.5-0.5B-Instruct
    Qwen/Qwen2.5-1.5B-Instruct
    Qwen/Qwen2.5-4B-Instruct
    Qwen/Qwen2.5-7B-Instruct | FP16, FP32 | Yes | +| Qwen 3 | Qwen/Qwen3-0.6B
    Qwen/Qwen3-1.7B
    Qwen/Qwen3-4B
    Qwen/Qwen3-8B | FP16, FP32 | Yes | + + +### Usage + +The main entry point is : `run_llm.py` + +```bash +python run_llm.py --model meta-llama/Llama-3.2-1B-Instruct --prompt "What is parallel programming?" --precision FP16 --num_tokens 128 --cache static_v2 --benchmark +``` + +#### Key Arguments + +- `--model`: Name or path of the HuggingFace LLM. +- `--tokenizer`: (Optional) Tokenizer name; defaults to model. +- `--prompt`: Input prompt for generation. +- `--precision`: Precision mode (`FP16`, `FP32`). +- `--num_tokens`: Number of output tokens to generate. +- `--cache`: KV cache type (`static_v1`, `static_v2`, or empty for no KV caching). +- `--benchmark`: Enable benchmarking mode. +- `--enable_pytorch_run`: Also run and compare PyTorch baseline. + +### Caching Strategies + +- **Static Cache v1/v2:** Adds static KV cache tensors as model inputs/outputs for efficient reuse. +- **No Cache:** Standard autoregressive decoding. + +Please read our tutorial on how static cache is implemented. + +## Extension + +This codebase can be extended to +- Add new models by specifying their HuggingFace name. +- Implement new cache strategies by adding FX graph passes. +- Customize SDPA conversion for new attention mechanisms. + +## Limitations +- We do not currently support sliding window attention (used in Gemma3 and Qwen 3 models) yet. + +## Requirements + +- Torch-TensorRT 2.8.0 +- Transformers v4.52.3 \ No newline at end of file diff --git a/tools/llm/cache_utils.py b/tools/llm/cache_utils.py new file mode 100644 index 0000000000..d25e5bb40e --- /dev/null +++ b/tools/llm/cache_utils.py @@ -0,0 +1,177 @@ +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union + +import tensorrt +import torch +import torch_tensorrt +from torch._export.utils import _detect_fake_mode_from_gm +from torch._ops import OpOverloadPacket +from torch._subclasses.fake_tensor import FakeTensor, FakeTensorMode +from torch.fx import Graph, GraphModule, Node +from torch.fx.node import Target +from torch.fx.passes.shape_prop import _extract_tensor_metadata +from torch.utils._pytree import _LEAF_SPEC + + +def get_kv_nodes(gm): + """ + Extract key and value nodes from scaled dot-product attention operations in the graph. + + This function searches through the graph for scaled_dot_product_attention operations + and extracts the key and value tensor nodes from each operation's arguments. + + Args: + gm: A torch.fx.GraphModule containing the computational graph + + Returns: + List[Tuple[Node, Node]]: A list of tuples, where each tuple contains + (key_node, value_node) from a scaled dot-product attention operation + """ + kv_nodes = [] + for node in gm.graph.nodes: + if ( + node.op == "call_function" + and node.target == torch._C._nn.scaled_dot_product_attention + ): + q_node, k_node, v_node = node.args[:3] + kv_nodes.append((k_node, v_node)) + return kv_nodes + + +def get_random_tensor_from_node(node: Node) -> torch.Tensor: + """ + Creates a random tensor based on the shape information in a node's metadata. + For symbolic dimensions, extracts the maximum value from the shape environment. + + Args: + node: A torch.fx.Node object with metadata containing tensor information + + Returns: + A random tensor with shape matching the node's metadata, or None if no valid + tensor information is found + """ + if "val" not in node.meta: + raise ValueError( + f"No tensor information found in node metadata for node: {node}" + ) + + fake_tensor = node.meta["val"] + shape = [] + + # Iterate through each dimension and handle symbolic dimensions + for dim in fake_tensor.shape: + if isinstance(dim, torch.SymInt): + # Extract the maximum value from the shape environment + max_val = dim.node.hint + shape.append(max_val) + else: + shape.append(dim) + + # Create a random tensor with the determined shape + dtype = fake_tensor.dtype + device = fake_tensor.device + random_tensor = torch.rand(shape, dtype=dtype, device=device) + + return random_tensor + + +def create_random_output_tensors(nodes: List[Node]) -> List[torch.Tensor]: + """ + Creates random tensors based on the shape information in node metadata. + For symbolic dimensions, extracts the maximum value from the shape environment. + + Args: + nodes: List of torch.fx.Node objects with metadata + + Returns: + List of random tensors with shapes matching the nodes' metadata + """ + random_tensors = [] + + for node in nodes: + if isinstance(node, Node): + node_tensor = get_random_tensor_from_node(node) + elif isinstance(node, tuple): + node_tensor_list = [] + for n in node: + random_tensor = get_random_tensor_from_node(n) + node_tensor_list.append(random_tensor) + node_tensor = tuple(node_tensor_list) + + random_tensors.append(node_tensor) + + return random_tensors + + +def _add_graph_input( + gm: GraphModule, name: str, val: Optional[torch.Tensor] = None, dynamic_shape=None +) -> Node: + """Add a graph input to the given GraphModule and return the newly created node. + + NOTE: function does NOT do any graph canonicalization. This is left to the user! + + Args: + gm (GraphModule): The GraphModule to add the input to. + name (str): The name of the input. + val (torch.Tensor): An example tensor to use for the input. + dynamic_shape: The dynamic shape of the input tensor [NOT SUPPORTED YET] + """ + # check that no dynamic shape is provided... + if dynamic_shape: + raise NotImplementedError("Dynamic shape not supported for adding graph inputs") + + # extract graph and input spec + graph: Graph = gm.graph + + in_spec = graph._codegen.pytree_info.in_spec + in_spec_for_args = in_spec.children_specs[0] + orig_args = graph._codegen.pytree_info.orig_args + assert in_spec_for_args.type is tuple + + # insert input node after currently last input node + node_last_input = graph.find_nodes(op="placeholder", sort=True)[-1] + with graph.inserting_after(node_last_input): + in_node = graph.placeholder(name) + in_spec_for_args.children_specs.append(_LEAF_SPEC) + orig_args.append(f"arg_{name}") + + # update pytree info recursively with __post_init__ starting at leaves + def call_post_init(spec): + for child_spec in spec.children_specs: + call_post_init(child_spec) + spec.__post_init__() + + call_post_init(in_spec) + + # set fake tensor information if all required information is available + fake_mode: Optional[FakeTensorMode] = _detect_fake_mode_from_gm(gm) + if fake_mode and val is not None and isinstance(val, torch.Tensor): + if isinstance(val, FakeTensor): + fake_tensor = val + else: + fake_tensor: FakeTensor = fake_mode.from_tensor(val, static_shapes=True) + in_node.meta["val"] = fake_tensor + in_node.meta["tensor_meta"] = _extract_tensor_metadata(fake_tensor) + + # return new node... + return in_node + + +def is_op(node: Node, ops: Union[OpOverloadPacket, Iterable[OpOverloadPacket]]) -> bool: + """Check if the node is a call to one of the ops.""" + if node.op != "call_function": + return False + # check if it's a single op that's provided + if isinstance(ops, OpOverloadPacket): + ops = [ops] + + # check if it's the op itself instead of an overload + if any(node.target == op for op in ops): + return True + + return False + + +def get_all_input_output_nodes(graph: Graph) -> Tuple[List[Node], List[Node]]: + input_nodes: List[Node] = graph.find_nodes(op="placeholder") + output_nodes: List[Node] = graph.find_nodes(op="output") + return (input_nodes, output_nodes) diff --git a/tools/llm/run_llm.py b/tools/llm/run_llm.py new file mode 100644 index 0000000000..7e50b515c2 --- /dev/null +++ b/tools/llm/run_llm.py @@ -0,0 +1,357 @@ +""" +.. _run_llm: + +Running LLM inference with Torch-TensorRT +========================================================== + +This script illustrates Torch-TensorRT workflow with dynamo backend on popular LLM models. +""" + +import argparse +import copy +import os +import timeit +from contextlib import nullcontext + +# %% +# Imports and Model Definition +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +import torch +import torch_tensorrt +from torchtrt_ext import register_sdpa +from transformers import AutoModelForCausalLM, AutoTokenizer +from utils import ( + export_llm, + generate, + generate_with_static_cache, + record_stats, + time_generate, +) + +DEVICE = torch.device("cuda:0") + + +def get_model(args): + """ + Load and configure the language model for inference. + + This function loads a pre-trained causal language model using the specified + model name and configures it with the appropriate precision and settings + for inference. + + Args: + args: Parsed command line arguments containing: + - model (str): Name or path of the model to load + - precision (str): Precision to use ("FP16", "BF16", or "FP32") + + Returns: + torch.nn.Module: The loaded and configured model ready for inference, + moved to CUDA device with the specified precision + """ + with torch.no_grad(): + model = ( + AutoModelForCausalLM.from_pretrained( + args.model, + use_cache=False, + attn_implementation="sdpa", + ) + .eval() + .cuda() + ) + + if args.precision == "FP16": + model = model.to(torch.float16) + elif args.precision == "BF16": + model = model.to(torch.bfloat16) + else: + model = model.to(torch.float32) + + return model + + +def compile_torchtrt(model, input_ids, args): + """ + Compile a PyTorch model to TensorRT using torch_tensorrt.dynamo.compile. + + This function exports the given model to a TorchScript representation and then + compiles it to TensorRT for optimized inference. The compilation process includes + precision-specific optimizations and various performance tuning parameters. + + Args: + model (torch.nn.Module): The PyTorch model to compile + input_ids (torch.Tensor): Input token IDs tensor used for model export + args: Parsed command line arguments containing: + - num_tokens (int): Number of tokens to generate (used for max sequence length) + - precision (str): Precision to use ("FP16", "BF16", or "FP32") + - debug (bool): Whether to enable debug logging + - min_block_size (int): Minimum block size for TensorRT compilation + + Returns: + torch_tensorrt.dynamo.TorchTensorRTModule: The compiled TensorRT model ready + for optimized inference + """ + max_seq_len = input_ids.shape[1] + args.num_tokens + ep = export_llm(model, input_ids, max_seq_len=max_seq_len) + position_ids = torch.arange(input_ids.shape[1]).unsqueeze(0).to(DEVICE) + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + inputs=[input_ids, position_ids], + enabled_precisions=enabled_precisions, + # truncate_double=True, + use_explicit_typing=use_explicit_typing, + use_fp32_acc=use_fp32_acc, + device=DEVICE, + disable_tf32=True, + use_python_runtime=True, + debug=args.debug, + offload_module_to_cpu=True, + min_block_size=args.min_block_size, + ) + + return trt_model + + +def print_outputs(backend_name, gen_tokens, tokenizer): + """ + Print the generated tokens from the model. + """ + print(f"========= {backend_name} =========") + print( + f"{backend_name} model generated text: ", + tokenizer.decode(gen_tokens[0], skip_special_tokens=True), + ) + print("===================================") + + +def measure_perf(trt_model, input_signature, backend_name): + """ + Measure the performance of a TensorRT model by running it multiple times and + calculating the average time per iteration. + """ + total_time = 0 + iterations = 10 + + print("Running warmup iteration...") + # Warmup run + _ = trt_model(*input_signature) + torch.cuda.synchronize() + + print(f"Measuring performance over {iterations} iterations...") + for i in range(iterations): + start_time = timeit.default_timer() + _ = trt_model(*input_signature) + torch.cuda.synchronize() + end_time = timeit.default_timer() + iter_time = end_time - start_time + total_time += iter_time + + avg_time = total_time / iterations + print( + f"Backend: {backend_name} Average time per iteration: {avg_time*1000:.4f} milliseconds" + ) + print( + f"Backend: {backend_name} Average throughput: {1.0/avg_time:.2f} iterations/second" + ) + + +if __name__ == "__main__": + arg_parser = argparse.ArgumentParser( + description="Run inference on a model with random input values" + ) + arg_parser.add_argument( + "--model", + type=str, + default="meta-llama/Llama-3.2-1B-Instruct", + help="Name of LLM model", + ) + arg_parser.add_argument( + "--tokenizer", + type=str, + default="", + help="Name of LLM model tokenizer", + ) + arg_parser.add_argument( + "--prompt", type=str, default="What is parallel programming ?", help="Prompt" + ) + arg_parser.add_argument( + "--precision", + type=str, + default="FP16", + help="Precision to use in the model. Options: FP16, BF16, FP32", + ) + arg_parser.add_argument( + "--iterations", type=int, default=5, help="no. of iterations to run" + ) + arg_parser.add_argument( + "--min_block_size", type=int, default=1, help="no. of iterations to run" + ) + arg_parser.add_argument( + "--num_tokens", + type=int, + default=128, + help="no. of output tokens to be generated", + ) + arg_parser.add_argument( + "--batch_size", type=int, default=1, help="Batch size used for benchmarking" + ) + arg_parser.add_argument( + "--isl", + type=int, + default=2048, + help="Input sequence length used for benchmarking", + ) + arg_parser.add_argument( + "--enable_pytorch_run", + action="store_true", + help="Enable pytorch run (default: False)", + ) + arg_parser.add_argument( + "--cache", + type=str, + default="", + help="Type of KV cache to use. Options: static_v1, static_v2", + ) + arg_parser.add_argument( + "--cudagraph", action="store_true", help="Enable cudagraphs (default: False)" + ) + arg_parser.add_argument( + "--debug", action="store_true", help="Enable debug (default: False)" + ) + arg_parser.add_argument( + "--benchmark", action="store_true", help="Enable benchmark (default: False)" + ) + + args = arg_parser.parse_args() + with torch.inference_mode(): + model = get_model(args) + + tokenizer = AutoTokenizer.from_pretrained(args.tokenizer or args.model) + + # Prepare input for benchmarking or evaluation + if args.benchmark: + input_ids = torch.randint( + 1, 10000, (args.batch_size, args.isl), dtype=torch.int64 + ).to(model.device) + position_ids = torch.arange(input_ids.shape[1]).unsqueeze(0).to(DEVICE) + else: + model_inputs = tokenizer(args.prompt, return_tensors="pt") + input_ids = model_inputs["input_ids"].to(DEVICE) + position_ids = torch.arange(input_ids.shape[1]).unsqueeze(0).to(DEVICE) + + MAX_OUTPUT_SEQ_LENGTH = input_ids.shape[1] + args.num_tokens + # Pyt + pyt_gen_tokens = None + pyt_timings = None + pyt_stats = None + + if args.enable_pytorch_run: + pyt_gen_tokens = generate( + model, input_ids.clone(), MAX_OUTPUT_SEQ_LENGTH, tokenizer.eos_token_id + ) + if args.benchmark: + pyt_timings = time_generate( + generate, + model, + input_ids.clone(), + MAX_OUTPUT_SEQ_LENGTH, + tokenizer.eos_token_id, + iterations=args.iterations, + ) + pyt_stats = record_stats( + "PyTorch", + pyt_timings, + args.precision, + batch_size=args.batch_size, + compile_time_s=None, + ) + + if args.cache == "static_v1": + # This import is required to register static v1 KV cache transformations as lowering passes + import static_cache_v1 + if args.cache == "static_v2": + # This import is required to register static v2 KV cache transformations as lowering passes + import static_cache_v2 + + # Compile the model with Torch-TensorRT + trt_model = compile_torchtrt(model, input_ids, args) + + if args.cache == "static_v1" or args.cache == "static_v2": + if args.cudagraph: + # Run a decoding loop with prefill and generate phases so that the CUDAGraph is recorded for both of these phases. + # trt_input_signature = (input_ids.clone(),) + get_zeroed_kv_cache_inputs(trt_model) + torch_tensorrt.runtime.set_cudagraphs_mode(True) + + trt_gen_tokens = generate_with_static_cache( + trt_model, + input_ids.clone(), + MAX_OUTPUT_SEQ_LENGTH, + tokenizer.eos_token_id, + ) + + if args.benchmark: + trt_timings = time_generate( + generate_with_static_cache, + trt_model, + input_ids.clone(), + MAX_OUTPUT_SEQ_LENGTH, + tokenizer.eos_token_id, + iterations=args.iterations, + ) + else: + trt_gen_tokens = generate( + trt_model, + input_ids.clone(), + MAX_OUTPUT_SEQ_LENGTH, + tokenizer.eos_token_id, + ) + if args.benchmark: + trt_timings = time_generate( + generate, + trt_model, + input_ids.clone(), + MAX_OUTPUT_SEQ_LENGTH, + tokenizer.eos_token_id, + iterations=args.iterations, + ) + + if args.benchmark: + trt_stats = record_stats( + "TensorRT", + trt_timings, + args.precision, + batch_size=args.batch_size, + compile_time_s=None, + ) + + if not args.benchmark: + if args.enable_pytorch_run: + print_outputs("PyTorch", pyt_gen_tokens, tokenizer) + + print_outputs("TensorRT", trt_gen_tokens, tokenizer) + + if args.enable_pytorch_run: + print( + f"PyTorch and TensorRT outputs match: {torch.equal(pyt_gen_tokens, trt_gen_tokens)}" + ) + + if args.benchmark: + if args.enable_pytorch_run: + print("=========PyTorch PERFORMANCE============ \n") + print(pyt_stats) + print("===================== \n") + print("=========TensorRT PERFORMANCE============ \n") + print(trt_stats) diff --git a/tools/llm/static_cache_v1.py b/tools/llm/static_cache_v1.py new file mode 100644 index 0000000000..b60396c08b --- /dev/null +++ b/tools/llm/static_cache_v1.py @@ -0,0 +1,277 @@ +import logging +from typing import List, Tuple + +import torch +import torch.utils._pytree as pytree +from cache_utils import _add_graph_input, create_random_output_tensors, get_kv_nodes +from torch.fx import Node +from torch_tensorrt.dynamo._settings import CompilationSettings +from torch_tensorrt.dynamo.lowering.passes._aten_lowering_pass import ( + _aten_lowering_pass, +) +from torch_tensorrt.dynamo.lowering.passes.pass_utils import ( + clean_up_graph_after_modifications, +) +from torch_tensorrt.dynamo.utils import extract_var_range_info + +logger = logging.getLogger(__name__) + +SDPA_OP = torch._C._nn.scaled_dot_product_attention + + +def add_kv_as_outputs(gm, kv_cache_for_graph: List[Tuple[torch.Tensor, torch.Tensor]]): + """ + Modifies the graph to add query, key, and value tensors as outputs. + + This function identifies all scaled dot-product attention (SDPA) operations + in the graph, creates copies of their query, key, and value inputs, and adds + these copies to the graph's outputs. This allows for accessing these tensors + externally, which is useful for operations like key-value caching. + + Args: + graph: The torch.fx.Graph to modify + + Returns: + None. The graph is modified in-place. + """ + output_node = next(node for node in gm.graph.nodes if node.op == "output") + + # Get the current output args (typically a tuple) + current_outputs = output_node.args[0] + + # If the current output is a tuple, extend it with our new outputs + if isinstance(current_outputs, tuple): + new_outputs = current_outputs + tuple(kv_cache_for_graph) + else: + # If there's only one output or it's not a tuple, create a new tuple + new_outputs = (current_outputs,) + tuple(kv_cache_for_graph) + + gm.graph.output(new_outputs) + gm.graph.erase_node(output_node) + + return new_outputs + + +def add_kv_cache_inputs(gm, fixed_kv: bool = True): + """ + Add key-value tensors, index parameters as inputs to the graph. + + Args: + gm: The GraphModule to modify + fixed_kv: Boolean indicating whether to use static tensors for KV cache. Default is True. + + Returns: + A tuple containing: + - List of (k_input, v_input) node pairs for each SDPA operation + - start_idx input node for slicing operations + - end_idx input node for slicing operations + """ + + def get_static_tensor(tensor: torch.Tensor): + key_shape = [] + for dim in tensor.shape: + if isinstance(dim, torch.SymInt): + min_max_opt = extract_var_range_info(dim) + key_shape.append(min_max_opt["max"]) + else: + key_shape.append(dim) + + static_tensor = torch.randn(key_shape, dtype=tensor.dtype, device=tensor.device) + return static_tensor + + keys_values = get_kv_nodes(gm) + + kv_inputs = [] + for idx, key_value in enumerate(keys_values): + k_val = key_value[0].meta["val"] + v_val = key_value[1].meta["val"] + if fixed_kv: + k_val = get_static_tensor(k_val) + v_val = get_static_tensor(v_val) + + # Add new inputs using _add_graph_input + k_input = _add_graph_input(gm, key_value[0].name + "_k_input", k_val) + v_input = _add_graph_input(gm, key_value[1].name + "_v_input", v_val) + kv_inputs.append((k_input, v_input)) + + # Add start_idx and end_idx as inputs + start_idx_input = _add_graph_input(gm, "start_idx", torch.tensor(0)) + end_idx_input = _add_graph_input(gm, "end_idx", torch.tensor(1)) + + # Get the max sequence length from the first key_cache node. The order of nodes is: input_ids, is_causal, key_cache1, value_cache1, key_cache2, value_cache2, .. + input_nodes = [node for node in gm.graph.nodes if node.op == "placeholder"] + input_ids_meta = input_nodes[0].meta["val"] + seq_len = input_ids_meta.shape[1] + min_max_opt = extract_var_range_info(seq_len) + max_seq_len = min_max_opt["max"] + + from torch.fx.experimental.symbolic_shapes import ShapeEnv + + shape_env = ShapeEnv() + # Create symbolic ints for start_idx and end_idx with range [0, seq_len] inclusive + start_idx_unbacked_symint = shape_env.create_unbacked_symint() + torch._check(start_idx_unbacked_symint >= 0) + torch._check(start_idx_unbacked_symint <= max_seq_len) + + end_idx_unbacked_symint = shape_env.create_unbacked_symint() + torch._check(end_idx_unbacked_symint >= 0) + torch._check(end_idx_unbacked_symint <= max_seq_len) + # Set the symbolic ints as the metadata for start_idx and end_idx inputs + start_idx_input.meta["val"] = start_idx_unbacked_symint + end_idx_input.meta["val"] = end_idx_unbacked_symint + + return kv_inputs, start_idx_input, end_idx_input + + +def insert_kv_slicing_before_sdpa( + gm, + incoming_keys_values: List[Tuple[torch.Tensor, torch.Tensor]], + start_idx_input: Node, + end_idx_input: Node, +): + """ + Insert slicing operations before each scaled_dot_product_attention operation. + """ + # Find all nodes with scaled_dot_product_attention + sdpa_nodes = [] + for node in gm.graph.nodes: + if node.op == "call_function" and node.target == SDPA_OP: + sdpa_nodes.append(node) + kv_cache_for_graph = [] + for idx, sdpa_node in enumerate(sdpa_nodes): + assert ( + len(sdpa_node.args) == 6 + ), f"SDPA node should have 6 arguments but got {len(sdpa_node.args)} arguments" + q_node, k_node, v_node, attn_mask, dropout_p, is_causal = sdpa_node.args + incoming_key, incoming_value = incoming_keys_values[idx] + kv_cache_for_sdpa_node = [] + new_keys_values = [] + for key_or_value, current_key_or_value_node in zip( + [incoming_key, incoming_value], [k_node, v_node] + ): + # Create a slice node for key_cache[:,:,:start_idx,:]. The shape of key_cache is batch_size x num_heads x seq_len x head_dim + with gm.graph.inserting_before(sdpa_node): + slice_1 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(key_or_value,), + kwargs={}, + ) + slice_2 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_1, 1), + kwargs={}, + ) + slice_3 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_2, 2, None, start_idx_input), + kwargs={}, + ) + slice_4 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_3, 3), + kwargs={}, + ) + # =============================================== # + # Create a slice node for key_cache[:,:, end_idx:,:]. The shape of key_cache is batch_size x num_heads x seq_len x head_dim + slice_5 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(key_or_value,), + kwargs={}, + ) + slice_6 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_5, 1), + kwargs={}, + ) + slice_7 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_6, 2, end_idx_input), + kwargs={}, + ) + slice_8 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_7, 3), + kwargs={}, + ) + # =============================================== # + # Concatenate the sliced tensors to build KV cache + cat = gm.graph.create_node( + "call_function", + torch.ops.aten.cat.default, + args=([slice_4, current_key_or_value_node, slice_8], 2), + kwargs={}, + ) + # Update the metadata of the newly built KV cache node with the metadata of the input KV cache node to the graph + cat.meta.update(key_or_value.meta) + kv_cache_for_sdpa_node.append(cat) + # =============================================== # + # Get the current key and value by indexing the KV cache + slice_9 = gm.graph.create_node( + "call_function", torch.ops.aten.slice.Tensor, args=(cat,), kwargs={} + ) + slice_10 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_9, 1), + kwargs={}, + ) + slice_11 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_10, 2, None, end_idx_input), + kwargs={}, + ) + slice_12 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_11, 3), + kwargs={}, + ) + new_keys_values.append(slice_12) + + kv_cache_for_graph.extend(kv_cache_for_sdpa_node) + + sdpa_node.args = (q_node, new_keys_values[0], new_keys_values[1]) + ( + attn_mask, + dropout_p, + True, + ) + + return gm, kv_cache_for_graph + + +@_aten_lowering_pass +def insert_static_cache_v1( + gm: torch.fx.GraphModule, settings: CompilationSettings +) -> torch.fx.GraphModule: + """Insert KV cache ops in the graph""" + """Perform insertion of kv-caches and attention kernel.""" + # Add static key and value as inputs to the graph + kv_inputs, start_idx_input, end_idx_input = add_kv_cache_inputs(gm, fixed_kv=True) + + # Build and update the KV cache using computed KV inputs for current token and + # incoming keys and values from previous tokens (which were added as inputs) + gm, kv_cache_for_graph = insert_kv_slicing_before_sdpa( + gm, kv_inputs, start_idx_input, end_idx_input + ) + + # Call the function to add KV as outputs + logits_keys_values = add_kv_as_outputs(gm, kv_cache_for_graph) + + gm = clean_up_graph_after_modifications(gm) + + new_output_tensors = create_random_output_tensors(logits_keys_values) + + new_out_spec = pytree.tree_flatten(new_output_tensors)[1] + gm._out_spec = new_out_spec + logger.debug("After inserting KV cache into the graph: " + str(gm.graph)) + + return gm diff --git a/tools/llm/static_cache_v2.py b/tools/llm/static_cache_v2.py new file mode 100644 index 0000000000..4634b79a52 --- /dev/null +++ b/tools/llm/static_cache_v2.py @@ -0,0 +1,290 @@ +import logging +from typing import List, Tuple + +import torch +import torch.utils._pytree as pytree +from cache_utils import _add_graph_input, create_random_output_tensors, get_kv_nodes +from torch.fx import Node +from torch_tensorrt.dynamo._settings import CompilationSettings +from torch_tensorrt.dynamo.lowering.passes._aten_lowering_pass import ( + _aten_lowering_pass, +) +from torch_tensorrt.dynamo.lowering.passes.pass_utils import ( + clean_up_graph_after_modifications, +) +from torch_tensorrt.dynamo.utils import extract_var_range_info + +logger = logging.getLogger(__name__) + +SDPA_OP = torch._C._nn.scaled_dot_product_attention + + +def add_kv_as_outputs(gm, kv_cache_for_graph: List[Tuple[torch.Tensor, torch.Tensor]]): + """ + Modifies the graph to add query, key, and value tensors as outputs. + + This function identifies all scaled dot-product attention (SDPA) operations + in the graph, creates copies of their query, key, and value inputs, and adds + these copies to the graph's outputs. This allows for accessing these tensors + externally, which is useful for operations like key-value caching. + + Args: + graph: The torch.fx.Graph to modify + + Returns: + None. The graph is modified in-place. + """ + output_node = next(node for node in gm.graph.nodes if node.op == "output") + + # Get the current output args (typically a tuple) + current_outputs = output_node.args[0] + + # If the current output is a tuple, extend it with our new outputs + if isinstance(current_outputs, tuple): + new_outputs = current_outputs + tuple(kv_cache_for_graph) + else: + # If there's only one output or it's not a tuple, create a new tuple + new_outputs = (current_outputs,) + tuple(kv_cache_for_graph) + + gm.graph.output(new_outputs) + gm.graph.erase_node(output_node) + + return new_outputs + + +def add_kv_cache_inputs(gm, fixed_kv: bool = True): + """ + Add key-value tensors, index parameters as inputs to the graph. + + Args: + gm: The GraphModule to modify + fixed_kv: Boolean indicating whether to use static tensors for KV cache. Default is True. + + Returns: + A tuple containing: + - List of (k_input, v_input) node pairs for each SDPA operation + - start_idx input node for slicing operations + - end_idx input node for slicing operations + """ + + def get_static_tensor(tensor: torch.Tensor): + key_shape = [] + for dim in tensor.shape: + if isinstance(dim, torch.SymInt): + min_max_opt = extract_var_range_info(dim) + key_shape.append(min_max_opt["max"]) + else: + key_shape.append(dim) + + static_tensor = torch.randn(key_shape, dtype=tensor.dtype, device=tensor.device) + return static_tensor + + keys_values = get_kv_nodes(gm) + + kv_inputs = [] + for idx, key_value in enumerate(keys_values): + k_val = key_value[0].meta["val"] + v_val = key_value[1].meta["val"] + if fixed_kv: + k_val = get_static_tensor(k_val) + v_val = get_static_tensor(v_val) + + # Add new inputs using _add_graph_input + k_input = _add_graph_input(gm, key_value[0].name + "_k_input", k_val) + v_input = _add_graph_input(gm, key_value[1].name + "_v_input", v_val) + kv_inputs.append((k_input, v_input)) + + # Add start_idx and end_idx as inputs + start_idx_input = _add_graph_input(gm, "start_idx", torch.tensor(0)) + end_idx_input = _add_graph_input(gm, "end_idx", torch.tensor(1)) + + # Get the max sequence length from the first key_cache node. The order of input nodes is: input_ids, key_cache1, value_cache1, key_cache2, value_cache2, start_idx, end_idx + input_nodes = [node for node in gm.graph.nodes if node.op == "placeholder"] + # Get the third last input which should be the last value cache node and store the max_seq_len + input_ids_meta = input_nodes[-3].meta["val"] + seq_len = input_ids_meta.shape[2] + + if isinstance(seq_len, torch.SymInt): + min_max_opt = extract_var_range_info(seq_len) + max_seq_len = min_max_opt["max"] + else: + max_seq_len = seq_len + + from torch.fx.experimental.symbolic_shapes import ShapeEnv + + shape_env = ShapeEnv() + # Create symbolic ints for start_idx and end_idx with range [0, seq_len] inclusive + start_idx_unbacked_symint = shape_env.create_unbacked_symint() + torch._check(start_idx_unbacked_symint >= 0) + torch._check(start_idx_unbacked_symint <= max_seq_len) + + end_idx_unbacked_symint = shape_env.create_unbacked_symint() + torch._check(end_idx_unbacked_symint >= 0) + torch._check(end_idx_unbacked_symint <= max_seq_len) + # Set the symbolic ints as the metadata for start_idx and end_idx inputs + start_idx_input.meta["val"] = start_idx_unbacked_symint + end_idx_input.meta["val"] = end_idx_unbacked_symint + + return kv_inputs, start_idx_input, end_idx_input + + +def create_kv_cache_update_nodes( + gm, sdpa_node, current_kv_node, incoming_kv_node, start_idx_input, end_idx_input +): + """ + Create slicing and concatenation nodes for KV cache update. + + This function creates the necessary slicing and concatenation nodes to update the KV cache + during the generation process. It takes the SDPA node, the current KV cache node, and the + incoming KV cache node as input. + Returns: + for a particular SDPA node, a tuple containing: + - List of new current KV nodes + - List of updated incoming KV cache nodes + + """ + + # Create a slice node for key_cache[:,:,:start_idx,:]. The shape of key_cache is batch_size x num_heads x seq_len x head_dim + with gm.graph.inserting_before(sdpa_node): + slice_1 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(incoming_kv_node,), + kwargs={}, + ) + slice_2 = gm.graph.create_node( + "call_function", torch.ops.aten.slice.Tensor, args=(slice_1, 1), kwargs={} + ) + slice_3 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_2, 2, None, start_idx_input), + kwargs={}, + ) + slice_4 = gm.graph.create_node( + "call_function", torch.ops.aten.slice.Tensor, args=(slice_3, 3), kwargs={} + ) + # Concat key_cache[:,:,:start_idx,:] with current key (k) + concat_keys_or_values = gm.graph.create_node( + "call_function", + torch.ops.aten.cat.default, + args=([slice_4, current_kv_node], 2), + kwargs={}, + ) + + # =============================================== # + # Create nodes for key_cache[:,:, end_idx:,:]. The shape of key_cache is batch_size x num_heads x seq_len x head_dim + slice_5 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(incoming_kv_node,), + kwargs={}, + ) + slice_6 = gm.graph.create_node( + "call_function", torch.ops.aten.slice.Tensor, args=(slice_5, 1), kwargs={} + ) + slice_7 = gm.graph.create_node( + "call_function", + torch.ops.aten.slice.Tensor, + args=(slice_6, 2, end_idx_input), + kwargs={}, + ) + slice_8 = gm.graph.create_node( + "call_function", torch.ops.aten.slice.Tensor, args=(slice_7, 3), kwargs={} + ) + # =============================================== # + # Concatenate the sliced tensors to build KV cache + new_incoming_keys_or_values = gm.graph.create_node( + "call_function", + torch.ops.aten.cat.default, + args=([concat_keys_or_values, slice_8], 2), + kwargs={}, + ) + # Update the metadata of the newly built KV cache node with the metadata of the input KV cache node to the graph + new_incoming_keys_or_values.meta.update(incoming_kv_node.meta) + + return concat_keys_or_values, new_incoming_keys_or_values + + +def insert_kv_slicing_before_sdpa( + gm, + incoming_keys_values: List[Tuple[torch.Tensor, torch.Tensor]], + start_idx_input: Node, + end_idx_input: Node, +): + """ + Insert slicing and concatenation operations before each scaled_dot_product_attention operation as per the following KV cache update logic: + concat_keys = torch.cat((key_cache[:, :, :start_idx, :], k), dim=2) + concat_values = torch.cat((value_cache[:, :, :start_idx, :], v), dim=2) + new_key_cache = torch.cat((concat_keys, key_cache[:, :, end_idx:, :]), dim=2) + new_value_cache = torch.cat((concat_values, value_cache[:, :, end_idx:, :]), dim=2) + out = torch._C._nn.scaled_dot_product_attention(q, concat_keys, concat_values, dropout_p=0.0, is_causal=is_causal) + """ + # Find all nodes with scaled_dot_product_attention + sdpa_nodes = [] + for node in gm.graph.nodes: + if node.op == "call_function" and node.target == SDPA_OP: + sdpa_nodes.append(node) + kv_cache_for_graph = [] + for idx, sdpa_node in enumerate(sdpa_nodes): + assert ( + len(sdpa_node.args) == 6 + ), f"SDPA node should have 6 arguments but got {len(sdpa_node.args)} arguments" + q_node, k_node, v_node, attn_mask, dropout_p, is_causal = sdpa_node.args + incoming_key, incoming_value = incoming_keys_values[idx] + # For keys + new_current_key_node, new_incoming_key_cache_node = ( + create_kv_cache_update_nodes( + gm, sdpa_node, k_node, incoming_key, start_idx_input, end_idx_input + ) + ) + # For values + new_current_value_node, new_incoming_value_cache_node = ( + create_kv_cache_update_nodes( + gm, sdpa_node, v_node, incoming_value, start_idx_input, end_idx_input + ) + ) + + # Store the KV cache nodes for the current SDPA node + kv_cache_for_graph.extend( + [new_incoming_key_cache_node, new_incoming_value_cache_node] + ) + + # Update the SDPA node arguments with current key and value nodes + sdpa_node.args = (q_node, new_current_key_node, new_current_value_node) + ( + attn_mask, + dropout_p, + True, + ) + + # kv_cache_for_graph.extend([k_node, v_node]) + return gm, kv_cache_for_graph + + +@_aten_lowering_pass +def insert_static_cache_v2( + gm: torch.fx.GraphModule, settings: CompilationSettings +) -> torch.fx.GraphModule: + """Insert KV cache ops in the graph""" + """Perform insertion of kv-caches and attention kernel.""" + # Add static key and value as inputs to the graph + kv_inputs, start_idx_input, end_idx_input = add_kv_cache_inputs(gm, fixed_kv=True) + + # Build and update the KV cache using computed KV inputs for current token and + # incoming keys and values from previous tokens (which were added as inputs) + gm, kv_cache_for_graph = insert_kv_slicing_before_sdpa( + gm, kv_inputs, start_idx_input, end_idx_input + ) + + # Call the function to add KV as outputs + logits_keys_values = add_kv_as_outputs(gm, kv_cache_for_graph) + + gm = clean_up_graph_after_modifications(gm) + + new_output_tensors = create_random_output_tensors(logits_keys_values) + + new_out_spec = pytree.tree_flatten(new_output_tensors)[1] + gm._out_spec = new_out_spec + + logger.debug("After inserting KV cache into the graph: " + str(gm.graph)) + return gm diff --git a/tools/llm/test_llama_components.py b/tools/llm/test_llama_components.py new file mode 100644 index 0000000000..ef7e59cd72 --- /dev/null +++ b/tools/llm/test_llama_components.py @@ -0,0 +1,603 @@ +import torch + +torch.backends.cuda.matmul.allow_tf32 = False +torch.backends.cudnn.allow_tf32 = False + +import argparse +import os +import sys +from contextlib import nullcontext + +import torch.nn as nn +import torch_tensorrt +from torch.testing._internal.common_utils import TestCase, run_tests +from transformers import AutoModelForCausalLM +from transformers.models.llama.configuration_llama import LlamaConfig +from transformers.models.llama.modeling_llama import LlamaAttention, LlamaDecoderLayer + +# Register SDPA as a standalone operator. Converter and lowering pass are defined in register_sdpa.py +sys.path.append(os.path.join(os.path.dirname(__file__), "..")) +from register_sdpa import * + +ATOL = 1e-5 +RTOL = 1e-5 + + +# llama2_model_name = "meta-llama/Llama-2-7b-hf" +llama3_model_name = "meta-llama/Llama-3.2-1B-Instruct" +llama_model = ( + AutoModelForCausalLM.from_pretrained( + llama3_model_name, + use_cache=False, + attn_implementation="sdpa", + num_hidden_layers=1, + ) + .eval() + .cuda() +) +LLAMA_CONFIG = llama_model.config + + +def test_llama_attention(args): + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + + # model = LlamaAttentionBlock().eval().cuda().to(DTYPE) + model = llama_model.model.layers[0].self_attn.to(DTYPE) + # llama3 + hidden_states = torch.randn((1, 6, 2048), dtype=DTYPE).cuda() + position_embeddings = ( + torch.randn((1, 6, 64), dtype=DTYPE).cuda(), + torch.randn((1, 6, 64), dtype=DTYPE).cuda(), + ) + + pyt_output = model(hidden_states, position_embeddings, None) + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, ({1: seq_len}, {1: seq_len}), None) + from torch.export._trace import _export + + # ep = torch.export.export(model, (hidden_states, position_embeddings, None), dynamic_shapes=dynamic_shapes, strict=False) + ep = _export( + model, + args=(hidden_states, position_embeddings, None), + dynamic_shapes=dynamic_shapes, + strict=False, + allow_complex_guards_as_runtime_asserts=True, + ) + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + inputs=[hidden_states, position_embeddings, None], + enabled_precisions=enabled_precisions, + disable_tf32=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + debug=args.debug, + ) + trt_output = trt_model(hidden_states, position_embeddings, None) + if isinstance(pyt_output, tuple): + print( + f"Diff b/w pyt and trt: {torch.mean(torch.abs(pyt_output[0] - trt_output[0]))}" + ) + assert torch.allclose(pyt_output[0], trt_output[0], atol=ATOL, rtol=RTOL) + else: + print(f"Diff b/w pyt and trt: {torch.mean(torch.abs(pyt_output - trt_output))}") + assert torch.allclose(pyt_output, trt_output, atol=ATOL, rtol=RTOL) + + +def print_diff(tensor1, tensor2, prefix=""): + """ + Print the diff between two tensors + """ + print( + f"[{prefix}] Diff between tensor1 and tensor2: {torch.mean(torch.abs(tensor1 - tensor2))}" + ) + + +def test_llama_attention_with_static_cache(args): + class LlamaAttentionBlock(nn.Module): + def __init__(self): + super().__init__() + self.config = LLAMA_CONFIG + self.attn = LlamaAttention(config=self.config, layer_idx=0) + + def forward(self, hidden_states, position_embeddings): + attn_output, attn_weights = self.attn( + hidden_states, position_embeddings, None + ) + return attn_output + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + model = llama_model.model.layers[0].self_attn.to(DTYPE) + + # Inputs + ISL = 2048 + NUM_TOKENS = 128 + OSL = ISL + NUM_TOKENS + hidden_states = torch.randn((1, ISL, 2048), dtype=DTYPE).cuda() + position_embeddings = ( + torch.randn((1, ISL, 64), dtype=DTYPE).cuda(), + torch.randn((1, ISL, 64), dtype=DTYPE).cuda(), + ) + key_cache = torch.zeros(1, 32, OSL, 64).cuda().to(DTYPE) + value_cache = torch.zeros(1, 32, OSL, 64).cuda().to(DTYPE) + start_idx = 0 + end_idx = ISL + is_causal = True + + pyt_output = model(hidden_states, position_embeddings, None) + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, ({1: seq_len}, {1: seq_len}), None) + ep = torch.export.export( + model, (hidden_states, position_embeddings, None), dynamic_shapes=dynamic_shapes + ) + import static_cache_v2 + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + inputs=[ + hidden_states, + position_embeddings, + None, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ], + enabled_precisions=enabled_precisions, + disable_tf32=True, + debug=args.debug, + # offload_module_to_cpu=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + use_python_runtime=True, + ) + + # Test Prefill + trt_output, _, key_cache, value_cache = trt_model( + hidden_states, + position_embeddings, + None, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ) + print_diff(pyt_output[0], trt_output[0], "pyt_output[0] vs trt_output[0] [Prefill]") + + # Test Generate + for start_idx in range(2048, 2176): + end_idx = start_idx + 1 + hidden_states_curr = torch.randn((1, 1, 2048), dtype=DTYPE).cuda() + position_embeddings_curr = ( + torch.randn((1, 1, 64), dtype=DTYPE).cuda(), + torch.randn((1, 1, 64), dtype=DTYPE).cuda(), + ) + # Concatenate the current hidden_states with the previous ones + hidden_states_full = torch.cat((hidden_states, hidden_states_curr), dim=1) + position_embeddings_full = ( + torch.cat((position_embeddings[0], position_embeddings_curr[0]), dim=1), + torch.cat((position_embeddings[1], position_embeddings_curr[1]), dim=1), + ) + + is_causal = False + out_no_cache, _ = model(hidden_states_full, position_embeddings_full, None) + out_trt, _, key_cache, value_cache = trt_model( + hidden_states_curr, + position_embeddings_curr, + None, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ) + out_pyt = out_no_cache[:, -1:, :] + print_diff(out_pyt, out_trt, f"pyt_curr_output vs out_trt for idx {start_idx}") + + hidden_states = hidden_states_full + position_embeddings = position_embeddings_full + + +def test_llama_decoder(args): + + class LlamaDecoderLayerBlock(nn.Module): + def __init__(self, model): + super().__init__() + self.config = LLAMA_CONFIG + self.decoder = LlamaDecoderLayer(config=self.config, layer_idx=0) + self.model = model + + def forward(self, hidden_states, position_embeddings): + return self.model(hidden_states, position_embeddings=position_embeddings) + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + + model = LlamaDecoderLayerBlock(llama_model.model.layers[0].to(DTYPE)) + # llama3 + hidden_states = torch.randn((1, 6, 2048), dtype=DTYPE).cuda() + position_embeddings = ( + torch.randn((1, 6, 64), dtype=DTYPE).cuda(), + torch.randn((1, 6, 64), dtype=DTYPE).cuda(), + ) + + pyt_output = model(hidden_states, position_embeddings) + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, ({1: seq_len}, {1: seq_len})) + ep = torch.export.export( + model, (hidden_states, position_embeddings), dynamic_shapes=dynamic_shapes + ) + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + inputs=[hidden_states, position_embeddings], + enabled_precisions=enabled_precisions, + debug=args.debug, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + ) + trt_output = trt_model(hidden_states, position_embeddings) + + print( + f"Diff b/w pyt and trt: {torch.mean(torch.abs(pyt_output[0] - trt_output[0]))}" + ) + assert torch.allclose(pyt_output[0], trt_output[0], atol=ATOL, rtol=RTOL) + + +def test_llama_decoder_with_static_cache(args): + + class LlamaDecoderLayerBlock(nn.Module): + def __init__(self, model): + super().__init__() + self.config = LLAMA_CONFIG + self.decoder = LlamaDecoderLayer(config=self.config, layer_idx=0) + self.model = model + + def forward(self, hidden_states, position_embeddings): + return self.model(hidden_states, position_embeddings=position_embeddings) + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + model = LlamaDecoderLayerBlock(llama_model.model.layers[0].to(DTYPE)) + + # Inputs + ISL = 2048 + NUM_TOKENS = 128 + OSL = ISL + NUM_TOKENS + hidden_states = torch.randn((1, ISL, 2048), dtype=DTYPE).cuda() + position_embeddings = ( + torch.randn((1, ISL, 64), dtype=DTYPE).cuda(), + torch.randn((1, ISL, 64), dtype=DTYPE).cuda(), + ) + key_cache = torch.zeros(1, 32, OSL, 64).cuda().to(DTYPE) + value_cache = torch.zeros(1, 32, OSL, 64).cuda().to(DTYPE) + start_idx = 0 + end_idx = ISL + is_causal = True + + pyt_output = model(hidden_states, position_embeddings) + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, ({1: seq_len}, {1: seq_len})) + ep = torch.export.export( + model, args=(hidden_states, position_embeddings), dynamic_shapes=dynamic_shapes + ) + import static_cache_v2 + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + arg_inputs=[ + hidden_states, + position_embeddings, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ], + enabled_precisions=enabled_precisions, + disable_tf32=True, + debug=args.debug, + # offload_module_to_cpu=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + use_python_runtime=True, + ) + + # Test Prefill + trt_output, key_cache, value_cache = trt_model( + hidden_states, + position_embeddings, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ) + print_diff(pyt_output[0], trt_output, "pyt_output vs trt_output [Prefill]") + + # Test Generate + for start_idx in range(2048, 2176): + end_idx = start_idx + 1 + hidden_states_curr = torch.randn((1, 1, 2048), dtype=DTYPE).cuda() + position_embeddings_curr = ( + torch.randn((1, 1, 64), dtype=DTYPE).cuda(), + torch.randn((1, 1, 64), dtype=DTYPE).cuda(), + ) + # Concatenate the current hidden_states with the previous ones + hidden_states_full = torch.cat((hidden_states, hidden_states_curr), dim=1) + position_embeddings_full = ( + torch.cat((position_embeddings[0], position_embeddings_curr[0]), dim=1), + torch.cat((position_embeddings[1], position_embeddings_curr[1]), dim=1), + ) + + is_causal = False + out_no_cache = model(hidden_states_full, position_embeddings_full) + + out_trt, key_cache, value_cache = trt_model( + hidden_states_curr, + position_embeddings_curr, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ) + out_pyt = out_no_cache[0][:, -1:, :] + print_diff(out_pyt, out_trt, f"pyt_curr_output vs out_trt for idx {start_idx}") + hidden_states = hidden_states_full + position_embeddings = position_embeddings_full + + +def test_llama_model(args): + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + + model = llama_model.model.to(DTYPE) + + # Inputs + ISL = 2048 + NUM_TOKENS = 128 + OSL = ISL + NUM_TOKENS + input_ids = torch.randint(1, 20, (1, ISL), dtype=torch.int64).cuda() + position_ids = torch.arange(input_ids.shape[1]).unsqueeze(0).cuda() + + pyt_output = model(input_ids, position_ids) + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, {1: seq_len}) + kwarg_inputs = {"position_ids": position_ids} + from torch.export._trace import _export + + ep = _export( + model, + args=(input_ids,), + kwargs=kwarg_inputs, + dynamic_shapes=dynamic_shapes, + strict=False, + allow_complex_guards_as_runtime_asserts=True, + ) + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + arg_inputs=[], + kwarg_inputs=kwarg_inputs, + enabled_precisions=enabled_precisions, + disable_tf32=True, + debug=args.debug, + offload_module_to_cpu=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + use_python_runtime=True, + ) + + trt_output = trt_model(input_ids, position_ids) + + print( + f"Diff b/w pyt and trt: {torch.mean(torch.abs(pyt_output[0] - trt_output[0]))}" + ) + # print(f"Diff b/w pyt and trt: {torch.mean(torch.abs(pyt_output[1] - trt_output[1]))}") + breakpoint() + assert torch.allclose(pyt_output, trt_output, atol=ATOL, rtol=RTOL) + + +def test_llama_model_with_static_cache(args): + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + model = llama_model.model.to(DTYPE) + + # Inputs + ISL = 2048 + NUM_TOKENS = 128 + OSL = ISL + NUM_TOKENS + input_ids = torch.randint(1, 20, (1, ISL), dtype=torch.int64).cuda() + position_ids = torch.arange(input_ids.shape[1]).unsqueeze(0).cuda() + key_cache = torch.zeros(1, 32, OSL, 64).cuda().to(DTYPE) + value_cache = torch.zeros(1, 32, OSL, 64).cuda().to(DTYPE) + start_idx = 0 + end_idx = ISL + is_causal = True + + pyt_output = model(input_ids) + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, {1: seq_len}) + kwarg_inputs = {"input_ids": input_ids, "position_ids": position_ids} + ep = torch.export.export( + model, args=(), kwargs=kwarg_inputs, dynamic_shapes=dynamic_shapes + ) + + import static_cache_v2 + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + arg_inputs=[], + kwarg_inputs=kwarg_inputs, + enabled_precisions=enabled_precisions, + disable_tf32=True, + debug=args.debug, + # offload_module_to_cpu=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + use_python_runtime=True, + ) + + # Test Prefill + trt_output, key_cache, value_cache = trt_model( + input_ids, position_ids, key_cache, value_cache, start_idx, end_idx, is_causal + ) + pyt_output = pyt_output.last_hidden_state + print_diff(pyt_output, trt_output, "pyt_output vs trt_output [Prefill]") + + # Test Generate + for start_idx in range(2048, 2176): + end_idx = start_idx + 1 + input_ids_curr = torch.randint(1, 20, (1, 1), dtype=torch.int64).cuda() + position_ids_curr = torch.tensor([[start_idx]], dtype=torch.int64).cuda() + + # Concatenate the current hidden_states with the previous ones + input_ids_full = torch.cat((input_ids, input_ids_curr), dim=1) + position_ids_full = torch.cat((position_ids, position_ids_curr), dim=1) + is_causal = False + kwarg_inputs = {"input_ids": input_ids_full, "position_ids": position_ids_full} + out_no_cache = model(**kwarg_inputs) + + out_trt, key_cache, value_cache = trt_model( + input_ids_curr, + position_ids_curr, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ) + out_pyt = out_no_cache.last_hidden_state[:, -1:, :] + print_diff(out_pyt, out_trt, f"pyt_curr_output vs out_trt for idx {start_idx}") + input_ids = input_ids_full + position_ids = position_ids_full + + +if __name__ == "__main__": + arg_parser = argparse.ArgumentParser( + description="Run test cases for llama attention and decoder" + ) + arg_parser.add_argument( + "--debug", action="store_true", help="Enable debug (default: False)" + ) + arg_parser.add_argument( + "--precision", type=str, default="FP16", help="Precision (default: FP16)" + ) + args = arg_parser.parse_args() + with torch.inference_mode(): + # test_llama_attention(args) + # test_llama_decoder(args) + test_llama_model(args) + # test_llama_attention_with_static_cache(args) + # test_llama_decoder_with_static_cache(args) + # test_llama_model_with_static_cache(args) diff --git a/tools/llm/test_qwen2.5_components.py b/tools/llm/test_qwen2.5_components.py new file mode 100644 index 0000000000..60482bf22d --- /dev/null +++ b/tools/llm/test_qwen2.5_components.py @@ -0,0 +1,193 @@ +import torch + +torch.backends.cuda.matmul.allow_tf32 = False +torch.backends.cudnn.allow_tf32 = False + +import argparse +import os +import sys +from contextlib import nullcontext + +import torch.nn as nn +import torch_tensorrt +from torch.testing._internal.common_utils import TestCase, run_tests +from transformers import AutoModelForCausalLM +from transformers.models.llama.configuration_llama import LlamaConfig + +# Register SDPA as a standalone operator. Converter and lowering pass are defined in register_sdpa.py +sys.path.append(os.path.join(os.path.dirname(__file__), "..")) +from register_sdpa import * + +ATOL = 1e-5 +RTOL = 1e-5 + + +qwen2_5_model_name = "Qwen/Qwen2.5-1.5B-Instruct" +qwen2_5_model = ( + AutoModelForCausalLM.from_pretrained( + qwen2_5_model_name, + use_cache=False, + attn_implementation="sdpa", + num_hidden_layers=1, + ) + .eval() + .cuda() +) +QWEN_CONFIG = qwen2_5_model.config + + +def print_diff(tensor1, tensor2, prefix=""): + """ + Print the diff between two tensors + """ + print( + f"[{prefix}] Diff between tensor1 and tensor2: {torch.mean(torch.abs(tensor1 - tensor2))}" + ) + + +def test_qwen_apply_rotary_pos_emb(args): + class QwenApplyRotaryPosEmb(nn.Module): + def __init__(self): + super().__init__() + + def rotate_half(self, x): + x1 = x[..., : x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2 :] + return torch.cat((-x2, x1), dim=-1) + + def apply_rotary_pos_emb(self, q, k, cos, sin, unsqueeze_dim=1): + cos = cos.unsqueeze(unsqueeze_dim) + sin = sin.unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (self.rotate_half(q) * sin) + k_embed = (k * cos) + (self.rotate_half(k) * sin) + return q_embed, k_embed + + def forward(self, q, k, cos, sin, unsqueeze_dim=1): + return self.apply_rotary_pos_emb(q, k, cos, sin, unsqueeze_dim) + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + + model = QwenApplyRotaryPosEmb().eval().cuda().to(DTYPE) + # Shapes for Qwen 2.5 + q = torch.randn((1, 12, 5, 128), dtype=DTYPE).cuda() + k = torch.randn((1, 12, 5, 128), dtype=DTYPE).cuda() + cos = torch.randn((1, 5, 128), dtype=DTYPE).cuda() + sin = torch.randn((1, 5, 128), dtype=DTYPE).cuda() + + pyt_output = model(q, k, cos, sin) + + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({2: seq_len}, {2: seq_len}, {1: seq_len}, {1: seq_len}) + ep = torch.export.export(model, (q, k, cos, sin), dynamic_shapes=dynamic_shapes) + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + inputs=[q, k, cos, sin], + enabled_precisions=enabled_precisions, + disable_tf32=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + debug=args.debug, + ) + trt_output = trt_model(q, k, cos, sin) + + if isinstance(pyt_output, tuple): + print_diff(pyt_output[0], trt_output[0], "Diff b/w pyt and trt") + # print_diff(pyt_output[1], trt_output[1], "Diff b/w pyt and trt") + assert torch.allclose(pyt_output[0], trt_output[0], atol=ATOL, rtol=RTOL) + else: + print_diff(pyt_output, trt_output, "Diff b/w pyt and trt") + assert torch.allclose(pyt_output, trt_output, atol=ATOL, rtol=RTOL) + + +def test_qwen_attention(args): + + DTYPE = torch.float32 + if args.precision == "FP16": + DTYPE = torch.float16 + elif args.precision == "BF16": + DTYPE = torch.bfloat16 + + # Set precision specific flags + use_fp32_acc = False + use_explicit_typing = False + if args.precision == "FP16": + enabled_precisions = {torch.float32} + use_fp32_acc = True + use_explicit_typing = True + elif args.precision == "BF16": + enabled_precisions = {torch.bfloat16} + use_fp32_acc = False + else: + enabled_precisions = {torch.float32} + + model = qwen2_5_model.model.layers[0].self_attn.to(DTYPE) + # qwen2.5 + hidden_states = torch.randn((1, 5, 1536), dtype=DTYPE).cuda() + position_embeddings = ( + torch.randn((1, 5, 128), dtype=DTYPE).cuda(), + torch.randn((1, 5, 128), dtype=DTYPE).cuda(), + ) + + pyt_output = model(hidden_states, position_embeddings, None) + + seq_len = torch.export.Dim("seq_len", min=2, max=2176) + dynamic_shapes = ({1: seq_len}, ({1: seq_len}, {1: seq_len}), None) + ep = torch.export.export( + model, (hidden_states, position_embeddings, None), dynamic_shapes=dynamic_shapes + ) + + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + ep, + inputs=[hidden_states, position_embeddings, None], + enabled_precisions=enabled_precisions, + disable_tf32=True, + use_fp32_acc=use_fp32_acc, + use_explicit_typing=use_explicit_typing, + debug=args.debug, + ) + trt_output = trt_model(hidden_states, position_embeddings, None) + + if isinstance(pyt_output, tuple): + print_diff(pyt_output[0], trt_output[0], "Diff b/w pyt and trt") + assert torch.allclose(pyt_output[0], trt_output[0], atol=ATOL, rtol=RTOL) + else: + print_diff(pyt_output, trt_output, "Diff b/w pyt and trt") + assert torch.allclose(pyt_output, trt_output, atol=ATOL, rtol=RTOL) + + +if __name__ == "__main__": + arg_parser = argparse.ArgumentParser( + description="Run test cases for llama attention and decoder" + ) + arg_parser.add_argument( + "--debug", action="store_true", help="Enable debug (default: False)" + ) + arg_parser.add_argument( + "--precision", + type=str, + default="FP16", + help="Precision to use in the model. Options: FP16, BF16, FP32", + ) + args = arg_parser.parse_args() + with torch.inference_mode(): + # test_qwen_apply_rotary_pos_emb(args) + test_qwen_attention(args) diff --git a/tools/llm/test_static_cache.py b/tools/llm/test_static_cache.py new file mode 100644 index 0000000000..603f84d3a6 --- /dev/null +++ b/tools/llm/test_static_cache.py @@ -0,0 +1,478 @@ +import argparse +import os +import sys +from contextlib import nullcontext + +import torch +import torch.nn as nn +import torch_tensorrt +from torch.export import export +from torch_tensorrt.dynamo.lowering import ( + get_decompositions, + post_lowering, + pre_export_lowering, +) +from transformers import AutoModelForCausalLM +from transformers.models.llama.configuration_llama import LlamaConfig +from transformers.models.llama.modeling_llama import LlamaAttention, LlamaDecoderLayer + +# Register SDPA as a standalone operator. Converter and lowering pass are defined in register_sdpa.py +sys.path.append(os.path.join(os.path.dirname(__file__), "..")) +import register_sdpa + +ATOL = 1e-5 +RTOL = 1e-5 +torch.backends.cuda.matmul.allow_tf32 = False +torch.backends.cudnn.allow_tf32 = False + + +class DynamicCacheModel(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, q, k, v, k1, v1, flag): + def true_fn(q, k, v, k1, v1): + k_new = torch.cat((k, k1), dim=2) + v_new = torch.cat((v, v1), dim=2) + return torch._C._nn.scaled_dot_product_attention(q, k_new, v_new) + + def false_fn(q, k, v, k1, v1): + return torch._C._nn.scaled_dot_product_attention(q, k, v) + + out = torch.cond(flag, true_fn, false_fn, (q, k, v, k1, v1)) + + return 2 * out + + +class ModelNoCache(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, q, k, v): + return torch._C._nn.scaled_dot_product_attention( + q, k, v, dropout_p=0.0, is_causal=True + ) + + +class StaticCacheModel(nn.Module): + def __init__(self): + super().__init__() + + def forward( + self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True + ): + new_key_cache = torch.cat( + (key_cache[:, :, :start_idx, :], k, key_cache[:, :, end_idx:, :]), dim=2 + ) + new_value_cache = torch.cat( + (value_cache[:, :, :start_idx, :], v, value_cache[:, :, end_idx:, :]), dim=2 + ) + attn_output = torch._C._nn.scaled_dot_product_attention( + q, + new_key_cache[:, :, :end_idx, :], + new_value_cache[:, :, :end_idx, :], + dropout_p=0.0, + is_causal=is_causal, + ) + + return attn_output, new_key_cache, new_value_cache + + def forward( + self, q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True + ): + concat_keys = torch.cat((key_cache[:, :, :start_idx, :], k), dim=2) + concat_values = torch.cat((value_cache[:, :, :start_idx, :], v), dim=2) + new_key_cache = torch.cat((concat_keys, key_cache[:, :, end_idx:, :]), dim=2) + new_value_cache = torch.cat( + (concat_values, value_cache[:, :, end_idx:, :]), dim=2 + ) + attn_output = torch._C._nn.scaled_dot_product_attention( + q, concat_keys, concat_values, dropout_p=0.0, is_causal=is_causal + ) + + return attn_output, new_key_cache, new_value_cache + + +def eager_sdpa( + query, + key, + value, + attn_mask=None, + dropout_p=0.0, + is_causal=False, + scale=None, + enable_gqa=False, +) -> torch.Tensor: + """ + Eager implementation of SDPA + """ + import math + + L, S = query.size(-2), key.size(-2) + scale_factor = 1 / math.sqrt(query.size(-1)) if scale is None else scale + attn_bias = torch.zeros(L, S, dtype=query.dtype, device=query.device) + + if is_causal: + assert attn_mask is None + temp_mask = torch.ones(L, S, dtype=torch.bool).tril(diagonal=0).cuda() + attn_bias.masked_fill_(temp_mask.logical_not(), float("-inf")) + attn_bias.to(query.dtype) + + if attn_mask is not None: + if attn_mask.dtype == torch.bool: + attn_bias.masked_fill_(attn_mask.logical_not(), float("-inf")) + else: + attn_bias = attn_mask + attn_bias + + if enable_gqa: + key = key.repeat_interleave(query.size(-3) // key.size(-3), -3) + value = value.repeat_interleave(query.size(-3) // value.size(-3), -3) + + attn_weight = query @ key.transpose(-2, -1) * scale_factor + attn_weight += attn_bias + attn_weight = torch.softmax(attn_weight, dim=-1) + attn_weight = torch.dropout(attn_weight, dropout_p, train=True) + return attn_weight @ value + + +def print_diff(tensor1, tensor2, prefix=""): + """ + Print the diff between two tensors + """ + print( + f"[{prefix}] Diff between tensor1 and tensor2: {torch.mean(torch.abs(tensor1 - tensor2))}" + ) + + +def test_no_cache_model_with_torch_tensorrt(args): + """ + Test the no cache model + """ + with torch.inference_mode(): + model_no_cache = ModelNoCache().eval().cuda() + # q = torch.randn(1, 32, 6, 64).cuda() + # k = torch.randn(1, 32, 6, 64).cuda() + # v = torch.randn(1, 32, 6, 64).cuda() + q = torch.load("query.pt") + k = torch.load("key.pt") + v = torch.load("value.pt") + out_no_cache = model_no_cache(q, k, v) + out_eager = eager_sdpa(q, k, v, is_causal=True) + q_seq_len = torch.export.Dim("q_seq_len", min=2, max=2176) + # Export the model + exported_program = torch.export.export( + model_no_cache, + args=(q, k, v), + dynamic_shapes=({2: q_seq_len}, {2: q_seq_len}, {2: q_seq_len}), + strict=False, + ) + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + exported_program, + inputs=[q, k, v], + enabled_precisions={torch.float32}, + disable_tf32=True, + debug=args.debug, + min_block_size=1, + ) + out_trt = trt_model(q, k, v) + + print_diff(out_no_cache, out_eager, "out_no_cache vs out_eager") + print_diff(out_no_cache, out_trt, "out_no_cache vs out_trt") + print_diff(out_eager, out_trt, "out_eager vs out_trt") + breakpoint() + + +def test_static_cache_model(args): + """ + Test the static cache model + """ + with torch.inference_mode(): + model_no_cache = ModelNoCache().eval().cuda() + model_static_cache = StaticCacheModel().eval().cuda() + q = torch.randn(1, 32, 2048, 64).cuda() + k = torch.randn(1, 32, 2048, 64).cuda() + v = torch.randn(1, 32, 2048, 64).cuda() + key_cache = torch.zeros(1, 32, 2176, 64).cuda() + value_cache = torch.zeros(1, 32, 2176, 64).cuda() + + # Test Prefill + start_idx = 0 + end_idx = 2048 + out_no_cache = model_no_cache(q, k, v) + out_static_cache, new_key_cache, new_value_cache = model_static_cache( + q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal=True + ) + assert torch.allclose(out_no_cache, out_static_cache, atol=ATOL, rtol=RTOL) + + # Test Generate + for start_idx in range(2048, 2176): + end_idx = start_idx + 1 + q_curr = torch.randn(1, 32, 1, 64).cuda() + k_curr = torch.randn(1, 32, 1, 64).cuda() + v_curr = torch.randn(1, 32, 1, 64).cuda() + + # Concatenate the current query, key, and value with the previous ones + q_full = torch.cat((q, q_curr), dim=2) + k_full = torch.cat((k, k_curr), dim=2) + v_full = torch.cat((v, v_curr), dim=2) + + out_no_cache = model_no_cache(q_full, k_full, v_full) + out_static_cache, new_key_cache, new_value_cache = model_static_cache( + q_curr, + k_curr, + v_curr, + new_key_cache, + new_value_cache, + start_idx, + end_idx, + is_causal=False, + ) + + assert torch.allclose( + out_no_cache[:, :, -1:, :], out_static_cache, atol=ATOL, rtol=RTOL + ) + q = q_full + k = k_full + v = v_full + print("============== test_static_cache passed ==============") + + +def transform_gm_with_kv_cache(exported_program: torch.export.ExportedProgram, args): + """ + Transform the graph module by adding key and value cache to the graph + """ + gm = exported_program.module() + # Post lower the model + settings = torch_tensorrt.dynamo.conversion.CompilationSettings( + enabled_precisions={torch.float32}, + disable_tf32=True, + use_python_runtime=True, + debug=args.debug, + min_block_size=1, + ) + exported_program = pre_export_lowering(exported_program, settings) + exported_program = exported_program.run_decompositions(get_decompositions(False)) + + gm = exported_program.module() + gm = post_lowering(gm, settings) + + return gm + + +def test_static_cache_lowering(args): + """ + Test static cache lowering pass applied to the model with no cache and run the graph module + and compare the output with the model with no cache + """ + import static_cache2 + + model_no_cache = ModelNoCache().eval().cuda() + q = torch.randn(1, 32, 2, 64).cuda() + k = torch.randn(1, 32, 2048, 64).cuda() + v = torch.randn(1, 32, 2048, 64).cuda() + key_cache = torch.zeros(1, 32, 2176, 64).cuda() + value_cache = torch.zeros(1, 32, 2176, 64).cuda() + + # Export the model + q_seq_len = torch.export.Dim("q_seq_len", min=2, max=2176) + kv_seq_len = torch.export.Dim("kv_seq_len", min=2, max=2176) + exported_program = export( + model_no_cache, + args=(q, k, v), + dynamic_shapes=({2: q_seq_len}, {2: kv_seq_len}, {2: kv_seq_len}), + strict=False, + ) + + gm = transform_gm_with_kv_cache(exported_program, args) + + # Test Prefill + start_idx = 0 + end_idx = 2048 + is_causal = True + q = torch.randn(1, 32, 2048, 64).cuda() + out_no_cache = model_no_cache(q, k, v) + out_pyt_cache, key_cache, value_cache = gm( + q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal + ) + assert torch.allclose(out_no_cache, out_pyt_cache, atol=ATOL, rtol=RTOL) + + # Test Generate + for start_idx in range(2048, 2176): + end_idx = start_idx + 1 + is_causal = False + q_curr = torch.randn(1, 32, 1, 64).cuda() + k_curr = torch.randn(1, 32, 1, 64).cuda() + v_curr = torch.randn(1, 32, 1, 64).cuda() + # Concatenate the current query, key, and value with the previous ones + q_full = torch.cat((q, q_curr), dim=2) + k_full = torch.cat((k, k_curr), dim=2) + v_full = torch.cat((v, v_curr), dim=2) + + out_no_cache = model_no_cache(q_full, k_full, v_full) + out_pyt_static_cache, key_cache, value_cache = gm( + q_curr, + k_curr, + v_curr, + key_cache, + value_cache, + start_idx, + end_idx, + is_causal, + ) + assert torch.allclose( + out_no_cache[:, :, -1:, :], out_pyt_static_cache, atol=ATOL, rtol=RTOL + ) + q = q_full + k = k_full + v = v_full + + print("============== test_static_cache_lowering passed ==============") + + +def test_static_cache_export(args): + """ + Test the static cache model export + """ + model_static_cache = StaticCacheModel().eval().cuda() + q = torch.randn(1, 32, 2048, 64).cuda() + k = torch.randn(1, 32, 2048, 64).cuda() + v = torch.randn(1, 32, 2048, 64).cuda() + key_cache = torch.zeros(1, 32, 2176, 64).cuda() + value_cache = torch.zeros(1, 32, 2176, 64).cuda() + # Test Prefill + start_idx = 0 + end_idx = 2048 + is_causal = True + # Export the model + seq_len = torch.export.Dim("seq_len", min=2, max=2048) + seq_len_dyn_dim = {2: seq_len} + exported_program = export( + model_static_cache, + args=(q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal), + dynamic_shapes=( + seq_len_dyn_dim, + seq_len_dyn_dim, + seq_len_dyn_dim, + None, + None, + torch.export.Dim.DYNAMIC, + torch.export.Dim.DYNAMIC, + None, + ), + strict=False, + ) + + +def test_static_cache_with_torch_tensorrt(args): + """ + Test the static cache model with torch_tensorrt + """ + import static_cache_v2 + + model_no_cache = ModelNoCache().eval().cuda() + q = torch.randn(1, 32, 2, 64).cuda() + k = torch.randn(1, 32, 2048, 64).cuda() + v = torch.randn(1, 32, 2048, 64).cuda() + key_cache = torch.zeros(1, 32, 2176, 64).cuda() + value_cache = torch.zeros(1, 32, 2176, 64).cuda() + + # Export the model + q_seq_len = torch.export.Dim("q_seq_len", min=2, max=2176) + kv_seq_len = torch.export.Dim("kv_seq_len", min=2, max=2176) + exported_program = export( + model_no_cache, + args=(q, k, v), + dynamic_shapes=({2: q_seq_len}, {2: kv_seq_len}, {2: kv_seq_len}), + strict=False, + ) + with torch_tensorrt.logging.debug() if args.debug else nullcontext(): + trt_model = torch_tensorrt.dynamo.compile( + exported_program, + inputs=[q, k, v], + enabled_precisions={torch.float32}, + disable_tf32=True, + use_python_runtime=True, + debug=args.debug, + min_block_size=1, + ) + + start_idx = 0 + end_idx = 2048 + is_causal = True + q = torch.randn(1, 32, 2048, 64).cuda() + # out_eager = eager_sdpa(q, k, v, is_causal=is_causal) + out_no_cache = model_no_cache(q, k, v) + out_trt, trt_key_cache, trt_value_cache = trt_model( + q, k, v, key_cache, value_cache, start_idx, end_idx, is_causal + ) + + assert torch.allclose( + out_no_cache, out_trt, atol=ATOL, rtol=RTOL + ), "Prefill TRT logits don't match" + assert torch.allclose( + trt_key_cache[:, :, :end_idx, :], k, atol=ATOL, rtol=RTOL + ), "Prefill TRT key cache don't match" + assert torch.allclose( + trt_value_cache[:, :, :end_idx, :], v, atol=ATOL, rtol=RTOL + ), "Prefill TRT value cache don't match" + + # Test Generate + for start_idx in range(2048, 2176): + end_idx = start_idx + 1 + q_curr = torch.randn(1, 32, 1, 64).cuda() + k_curr = torch.randn(1, 32, 1, 64).cuda() + v_curr = torch.randn(1, 32, 1, 64).cuda() + # Concatenate the current query, key, and value with the previous ones + q_full = torch.cat((q, q_curr), dim=2) + k_full = torch.cat((k, k_curr), dim=2) + v_full = torch.cat((v, v_curr), dim=2) + is_causal = True + out_no_cache = model_no_cache(q_full, k_full, v_full) + out_trt, trt_key_cache, trt_value_cache = trt_model( + q_curr, + k_curr, + v_curr, + trt_key_cache, + trt_value_cache, + start_idx, + end_idx, + is_causal, + ) + # breakpoint() + # print_diff(out_no_cache[:, :, -1:, :], out_trt, f"out_no_cache[:, :, -1:, :] vs out_trt for idx {start_idx}") + # print_diff(trt_key_cache[:, :, :end_idx, :], k_full, f"trt_key_cache[:, :, :end_idx, :] vs k_full for idx {start_idx}") + # print_diff(trt_value_cache[:, :, :end_idx, :], v_full, f"trt_value_cache[:, :, :end_idx, :] vs v_full for idx {start_idx}") + assert torch.allclose( + out_no_cache[:, :, -1:, :], out_trt, atol=ATOL, rtol=RTOL + ), f"Generate TRT logits don't match for idx {start_idx}" + assert torch.allclose( + trt_key_cache[:, :, :end_idx, :], k_full, atol=ATOL, rtol=RTOL + ), f"Generate TRT key cache don't match for idx {start_idx}" + assert torch.allclose( + trt_value_cache[:, :, :end_idx, :], v_full, atol=ATOL, rtol=RTOL + ), f"Generate TRT value cache don't match for idx {start_idx}" + q = q_full + k = k_full + v = v_full + + print("============== test_static_cache_with_torch_tensorrt passed ==============") + + +def main(): + arg_parser = argparse.ArgumentParser( + description="Run test cases for llama attention and decoder" + ) + arg_parser.add_argument( + "--debug", action="store_true", help="Enable debug (default: False)" + ) + args = arg_parser.parse_args() + with torch.inference_mode(): + # test_no_cache_model_with_torch_tensorrt(args) + # test_static_cache_model(args) + # test_static_cache_lowering(args) + test_static_cache_with_torch_tensorrt(args) + + +if __name__ == "__main__": + main() diff --git a/examples/dynamo/register_sdpa.py b/tools/llm/torchtrt_ext/register_sdpa.py similarity index 86% rename from examples/dynamo/register_sdpa.py rename to tools/llm/torchtrt_ext/register_sdpa.py index 7436f31939..90a00a5798 100644 --- a/examples/dynamo/register_sdpa.py +++ b/tools/llm/torchtrt_ext/register_sdpa.py @@ -4,7 +4,6 @@ from typing import Callable, Sequence, Tuple import torch -from sdpa_converter import * from torch_tensorrt.dynamo._settings import CompilationSettings from torch_tensorrt.dynamo.conversion.aten_ops_converters import args_bounds_check from torch_tensorrt.dynamo.lowering import TORCH_TRT_DECOMPOSITIONS @@ -15,15 +14,19 @@ clean_up_graph_after_modifications, ) +from .sdpa_converter import * + logger = logging.getLogger(__name__) # Remove decompositions for aten.scaled_dot_product_attention, aten._scaled_dot_product_efficient_attention, aten._scaled_dot_product_flash_attention # This is because we want to have SDPA as a standalone operator in the graph and invoke the custom converter for it. -TORCH_TRT_DECOMPOSITIONS.pop(torch.ops.aten.scaled_dot_product_attention.default) +TORCH_TRT_DECOMPOSITIONS.pop(torch.ops.aten.scaled_dot_product_attention.default, None) +TORCH_TRT_DECOMPOSITIONS.pop( + torch.ops.aten._scaled_dot_product_efficient_attention.default, None +) TORCH_TRT_DECOMPOSITIONS.pop( - torch.ops.aten._scaled_dot_product_efficient_attention.default + torch.ops.aten._scaled_dot_product_flash_attention.default, None ) -TORCH_TRT_DECOMPOSITIONS.pop(torch.ops.aten._scaled_dot_product_flash_attention.default) REPLACEABLE_ATEN_OPS = { torch.ops.aten._scaled_dot_product_efficient_attention.default, @@ -59,6 +62,7 @@ def replace_variants_of_sdpa( elif len(node.args) == 5: query, key, value, attn_mask, is_causal = node.args dropout_p = 0.0 + else: raise ValueError( f"Unexpected number of arguments for {node.target} in the graph" @@ -71,6 +75,8 @@ def replace_variants_of_sdpa( query, key, value, dropout_p, is_causal, return_debug_mask = ( node.args ) + if len(node.args) == 5: + query, key, value, dropout_p, is_causal = node.args elif len(node.args) == 3: query, key, value = node.args dropout_p = 0.0 @@ -79,20 +85,21 @@ def replace_variants_of_sdpa( raise ValueError( f"Unexpected number of arguments for {node.target} in the graph" ) - if attn_mask is not None: - logger.warning( - f"This current version of SDPA converter does not support attn_mask for {node.target} in the graph. Ignoring it and using is_causal=True configuration." - ) - - modified_input_args = (query, key, value, None, dropout_p, is_causal) + logger.warning( + f"This current version of SDPA converter only supports attn_mask = None, dropout_p = 0.0 and is_causal = True configuration. This could cause issues with accuracy for models with different configurations." + ) + modified_input_args = (query, key, value, None, dropout_p, True) # Create a new node with torch.nn.functional.scaled_dot_product_attention # The input args is (query, key, value, is_causal). kwargs has scale with gm.graph.inserting_after(node): new_node = gm.graph.call_function( torch.nn.functional.scaled_dot_product_attention, args=modified_input_args, - kwargs={"scale": node.kwargs.get("scale", None)}, + kwargs={ + "scale": node.kwargs.get("scale", None), + "use_fp32_acc": settings.use_fp32_acc, + }, ) # Deep copy encounters RuntimeError: Cannot access data pointer of Tensor (e.g. FakeTensor, FunctionalTensor). So we use copy instead. @@ -113,7 +120,7 @@ def replace_variants_of_sdpa( # Clean up the graph clean_up_graph_after_modifications(gm) - logger.info( + logger.debug( "Replaced variants of scaled_dot_product_attention with torch.nn.functional.scaled_dot_product_attention" ) return gm diff --git a/examples/dynamo/sdpa_converter.py b/tools/llm/torchtrt_ext/sdpa_converter.py similarity index 51% rename from examples/dynamo/sdpa_converter.py rename to tools/llm/torchtrt_ext/sdpa_converter.py index 903324dff5..47083c7b48 100644 --- a/examples/dynamo/sdpa_converter.py +++ b/tools/llm/torchtrt_ext/sdpa_converter.py @@ -62,25 +62,15 @@ def scaled_dot_product_attention( ) -> TRTTensor: # TODO: Handle attn_mask and is_causal arguments in the future query, key, value, attn_mask, dropout_p, is_causal = args - logger.info( - "Ignoring attn_mask and is_causal arguments provided by the original graph. " - "This converter expects is_causal to be an input to the graph. For prefill phase, is_causal=True " - "and for generate phase, is_causal=False since we pass only 1 input token at a time" - ) # TODO: remove this once we have a better way to handle the causal mask scale = kwargs.get("scale", None) source_ir = SourceIR.ATEN + is_causal = True # implementation as described here: https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html - mm = impl.matmul.matrix_multiply( - ctx, - target, - source_ir, - name + "_mm", - query, - key, - other_matrix_op=trt.MatrixOperation.TRANSPOSE, - ) + use_fp32_acc = kwargs.get("use_fp32_acc", False) + query_dtype = query.dtype + if scale is None: scale = query.shape[-1] if scale < 0: @@ -90,80 +80,106 @@ def scaled_dot_product_attention( else: # static shape sqrt_scaled = math.sqrt(scale) - scaled = impl.elementwise.div( + key = impl.elementwise.div( ctx, target, source_ir, name + "_scale", - mm, + key, sqrt_scaled, ) else: - scaled = impl.elementwise.mul( + key = impl.elementwise.mul( ctx, target, source_ir, name + "_scale", - mm, + key, scale, ) - # If is_causal is True, we need to generate a causal mask - if is_causal: - L, S = query.shape[-2], key.shape[-2] - if L >= 0 and S >= 0: - # static shape - attn_bias = np.zeros((L, S), dtype=dtype._from(query.dtype).to(np.dtype)) - temp_mask = np.logical_not(np.tril(np.ones((L, S), dtype=np.bool_), k=0)) - attn_bias = np.ma.array(attn_bias, mask=temp_mask).filled(float("-inf")) - attn_bias = get_trt_tensor(ctx, attn_bias, name + "_attn_bias") - else: - # if any of the L or S is dynamic shape - if L < 0: - L = impl.shape.shape( - ctx, target, source_ir, name + "_shape_0", query, 2 - ) - if S < 0: - S = impl.shape.shape(ctx, target, source_ir, name + "_shape_1", key, 2) - - # generate the mask tensor - tril_tensor = tril(ctx, target, source_ir, name + "_tril", L, S) - - temp_mask = impl.unary.logical_not( - ctx, target, source_ir, name + "_logical_not", tril_tensor - ) - temp_mask_casted = cast_trt_tensor( - ctx, temp_mask, trt.float32, name + "_casted_bool", target, source_ir - ) - one_minus_temp_mask = impl.elementwise.sub( - ctx, - target, - source_ir, - name + "_one_minus_temp_mask", - 1.0, - temp_mask_casted, - ) - attn_bias = impl.unary.log( - ctx, target, source_ir, name + "_log", one_minus_temp_mask - ) - - scaled_add_attn_bias = impl.elementwise.add( - ctx, target, source_ir, name + "_attn_bias_add", scaled, attn_bias + if use_fp32_acc and query_dtype == trt.float16: + query = cast_trt_tensor( + ctx, query, trt.float32, name + "_query_cast_to_fp32", target, source_ir + ) + key = cast_trt_tensor( + ctx, key, trt.float32, name + "_key_cast_to_fp32", target, source_ir ) + + mm = impl.matmul.matrix_multiply( + ctx, + target, + source_ir, + name + "_mm", + query, + key, + other_matrix_op=trt.MatrixOperation.TRANSPOSE, + ) + + if use_fp32_acc: + mm = cast_trt_tensor( + ctx, mm, query_dtype, name + "_mm_cast_to_fp16", target, source_ir + ) + + L, S = query.shape[-2], key.shape[-2] + if L >= 0 and S >= 0: + # static shape + attn_bias = np.zeros((L, S), dtype=dtype._from(query_dtype).to(np.dtype)) + temp_mask = np.logical_not(np.tril(np.ones((L, S), dtype=np.bool_), k=0)) + attn_bias = np.ma.array(attn_bias, mask=temp_mask).filled(float("-inf")) + attn_bias = get_trt_tensor(ctx, attn_bias, name + "_attn_bias") else: - scaled_add_attn_bias = scaled + # if any of the L or S is dynamic shape + if L < 0: + L = impl.shape.shape(ctx, target, source_ir, name + "_shape_0", query, 2) + if S < 0: + S = impl.shape.shape(ctx, target, source_ir, name + "_shape_1", key, 2) - # Create a if condition to check if is_causal is True - if isinstance(is_causal, TRTTensor): - if_layer = ctx.net.add_if_conditional() - condition, true_branch, false_branch = is_causal, scaled_add_attn_bias, scaled - if_layer.set_condition(condition) - output_layer = if_layer.add_output(true_branch, false_branch) - scaled_add_attn_bias = output_layer.get_output(0) + # generate the mask tensor + tril_tensor = tril(ctx, target, source_ir, name + "_tril", L, S) + + temp_mask = impl.unary.logical_not( + ctx, target, source_ir, name + "_logical_not", tril_tensor + ) + + # This need_mask determines if we want to use the causal mask or not + # When KV caching is enabled, L = 1 and != S. In this case, we shouldn't use the causal mask. + # So need_mask will be all False values in this case. + # TODO: Implement more general case where L != 1 and S != L + need_mask = impl.elementwise.eq(ctx, target, source_ir, name + "_eq", L, S) + temp_mask = impl.elementwise.logical_and( + ctx, target, source_ir, name + "_logical_and", need_mask, temp_mask + ) + temp_mask_casted = cast_trt_tensor( + ctx, temp_mask, query_dtype, name + "_casted_bool", target, source_ir + ) + + one_minus_temp_mask = impl.elementwise.sub( + ctx, + target, + source_ir, + name + "_one_minus_temp_mask", + 1.0, + temp_mask_casted, + ) + attn_bias = impl.unary.log( + ctx, target, source_ir, name + "_log", one_minus_temp_mask + ) + + scaled_add_attn_bias = impl.elementwise.add( + ctx, target, source_ir, name + "_attn_bias_add", mm, attn_bias + ) softmax = impl.normalization.softmax( ctx, target, source_ir, name + "_softmax", scaled_add_attn_bias, -1, False ) + if use_fp32_acc: + softmax = cast_trt_tensor( + ctx, softmax, trt.float32, name + "_softmax_cast_to_fp32", target, source_ir + ) + value = cast_trt_tensor( + ctx, value, trt.float32, name + "_value_cast_to_fp32", target, source_ir + ) out = impl.matmul.matrix_multiply( ctx, target, @@ -172,5 +188,9 @@ def scaled_dot_product_attention( softmax, value, ) + if use_fp32_acc: + out = cast_trt_tensor( + ctx, out, query_dtype, name + "_out_cast_to_fp16", target, source_ir + ) return out diff --git a/tools/llm/utils.py b/tools/llm/utils.py new file mode 100644 index 0000000000..2c3434b0ed --- /dev/null +++ b/tools/llm/utils.py @@ -0,0 +1,244 @@ +import copy +import timeit + +import numpy as np +import torch +from transformers import StoppingCriteriaList +from transformers.generation.stopping_criteria import ( + EosTokenCriteria, + MaxLengthCriteria, +) + + +def export_llm(model, inputs, min_seq_len=1, max_seq_len=16): + """ + Exports the LLM model into an ExportedProgram with dynamic shapes. + In the case of guard failures due to some PyTorch kernel implements, we also + try to re-export the graph by expressing them as runtime assert nodes + """ + with torch.no_grad(): + # max=1024 has contraint violation error. https://github.com/pytorch/pytorch/issues/125604 + seq_len = torch.export.Dim("seq_len", min=min_seq_len, max=max_seq_len) + position_ids = torch.arange(inputs.shape[1]).unsqueeze(0).to(inputs.device) + try: + print("Trying to export the model using torch.export.export()..") + # strict=False only enables aotautograd tracing and excludes dynamo. + ep = torch.export.export( + model, + args=(inputs,), + kwargs={"position_ids": position_ids}, + dynamic_shapes=({1: seq_len}, {1: seq_len}), + strict=False, + ) + except: + print( + "Trying torch.export._trace._export to trace the graph since torch.export.export() failed" + ) + # This API is used to express the constraint violation guards as asserts in the graph. + ep = torch.export._trace._export( + model, + args=(inputs,), + kwargs={"position_ids": position_ids}, + dynamic_shapes=({1: seq_len}, {1: seq_len}), + strict=False, + allow_complex_guards_as_runtime_asserts=True, + ) + + return ep + + +def get_zeroed_static_cache_inputs(model: torch.fx.GraphModule): + """ + Extracts and returns zeroed static KV cache tensors from a torch.fx.GraphModule. This should only be used for static cache_v1 and static cache_v2. + + This function identifies placeholder nodes in the graph that represent KV cache tensors, + and creates zeroed tensors with the same shape, dtype, and device as the original placeholders. + + Args: + model (torch.fx.GraphModule): The exported model graph containing KV cache placeholders + + Returns: + tuple: A tuple of zeroed tensors corresponding to the KV cache placeholders in the graph + """ + # placeholder nodes are expected to be in the following order: + # input_ids, kv_cache_key, kv_cache_value, start_idx, end_idx + placeholder_nodes = [node for node in model.graph.nodes if node.op == "placeholder"] + # The first two inputs are input_ids, position_ids. The last two inputs are start_idx, end_idx. In between are the KV cache tensors. + kv_cache_inputs = placeholder_nodes[2:-2] + zeroed_kv_cache_inputs = [] + for input in kv_cache_inputs: + zeroed_kv_cache_inputs.append( + torch.zeros( + input.meta["val"].shape, + dtype=input.meta["val"].dtype, + device=torch.device("cuda:0"), + ) + ) + + return tuple(zeroed_kv_cache_inputs) + + +def get_zeroed_dynamic_cache_inputs(model: torch.fx.GraphModule): + """ + Extracts and returns zeroed KV cache tensors from a torch.fx.GraphModule. This should only be used for dynamic cache. + + This function identifies placeholder nodes in the graph that represent KV cache tensors, + and creates zeroed tensors with the same shape, dtype, and device as the original placeholders. + + Args: + model (torch.fx.GraphModule): The exported model graph containing KV cache placeholders + + Returns: + tuple: A tuple of zeroed tensors corresponding to the KV cache placeholders in the graph + """ + # placeholder nodes are expected to be in the following order: + # input_ids, kv_cache_key, kv_cache_value, start_idx, end_idx + placeholder_nodes = [node for node in model.graph.nodes if node.op == "placeholder"] + # The first two inputs are input_ids, position_ids. The last input is is_generate. In between are the KV cache tensors. + kv_cache_inputs = placeholder_nodes[2:-1] + zeroed_kv_cache_inputs = [] + for input in kv_cache_inputs: + zeroed_kv_cache_inputs.append( + torch.zeros( + input.meta["val"].shape, + dtype=input.meta["val"].dtype, + device=torch.device("cuda:0"), + ) + ) + + return tuple(zeroed_kv_cache_inputs) + + +def generate(model, input_seq, max_output_seq_length, eos_token_id, benchmark=True): + """ + Greedy decoding of the model. This generates up to max_tokens. + """ + stopping_criteria = StoppingCriteriaList( + [ + MaxLengthCriteria(max_length=max_output_seq_length), + EosTokenCriteria(eos_token_id=eos_token_id), + ] + ) + isl = input_seq.shape[1] + osl = max_output_seq_length - isl + + num_tokens_generated = 0 + while num_tokens_generated < osl: + position_ids = torch.arange(input_seq.shape[1]).unsqueeze(0).cuda() + outputs = model(input_seq, position_ids=position_ids) + logits = outputs.logits + next_token_logits = logits[:, -1, :] + next_tokens = torch.argmax(next_token_logits, dim=-1) + input_seq = torch.cat([input_seq, next_tokens[:, None]], dim=-1) + num_tokens_generated += 1 + # TODO: Handle batch in this check + if not benchmark and stopping_criteria(input_seq, logits).item(): + break + + return input_seq + + +def generate_with_static_cache(model, input_seq, max_output_seq_length, eos_token_id): + """ + Greedy decoding of the model with static KV cache. + """ + start_idx = 0 + end_idx = input_seq.shape[1] + position_ids = torch.arange(input_seq.shape[1]).unsqueeze(0).cuda() + output_seq = input_seq.clone() + # TODO: Confirm this: When end_idx = max_output_seq_length-1, number of tokens generated = OSL + num_tokens_generated = 0 + kv_cache = get_zeroed_static_cache_inputs(model) + while end_idx < max_output_seq_length: + position_ids = ( + torch.tensor([[start_idx]], dtype=torch.int64).cuda() + if input_seq.shape[1] == 1 + else position_ids + ) + input_signature = (input_seq, position_ids, *kv_cache, start_idx, end_idx) + logits_keys_values = model(*input_signature) + num_tokens_generated += 1 + logits = logits_keys_values[0] + kv_cache = logits_keys_values[1:] + next_token_logits = logits[:, -1, :] + next_tokens = torch.argmax(next_token_logits, dim=-1, keepdim=True) + output_seq = torch.cat([output_seq, next_tokens], dim=-1) + input_seq = next_tokens + start_idx = end_idx + end_idx = start_idx + 1 + return output_seq + + +def generate_with_dynamic_cache(model, input_seq, max_output_seq_length, eos_token_id): + """ + Greedy decoding of the model with dynamic KV cache. + """ + position_ids = torch.arange(input_seq.shape[1]).unsqueeze(0).cuda() + output_seq = input_seq.clone() + num_output_tokens = max_output_seq_length - input_seq.shape[1] + num_tokens_generated = 0 + kv_cache = get_zeroed_dynamic_cache_inputs(model) + last_position_id = position_ids[-1, -1].item() + breakpoint() + while num_tokens_generated < num_output_tokens: + is_generate = False if input_seq.shape[1] > 1 else True + position_ids = ( + torch.tensor([[last_position_id + 1]], dtype=torch.int64).cuda() + if input_seq.shape[1] == 1 + else position_ids + ) + input_signature = (input_seq, position_ids, *kv_cache, is_generate) + logits_keys_values = model(*input_signature) + num_tokens_generated += 1 + logits = logits_keys_values[0] + kv_cache = logits_keys_values[1:] + next_token_logits = logits[:, -1, :] + next_tokens = torch.argmax(next_token_logits, dim=-1, keepdim=True) + output_seq = torch.cat([output_seq, next_tokens], dim=-1) + input_seq = next_tokens + last_position_id += 1 + return output_seq + + +def time_generate( + generate_fn, model, inputs, output_seq_length, eos_token_id, iterations=10 +): + """ + Measure the time for generating a sentence over certain number of iterations + """ + timings = [] + for _ in range(iterations): + start_time = timeit.default_timer() + _ = generate_fn(model, inputs, output_seq_length, eos_token_id) + torch.cuda.synchronize() + end_time = timeit.default_timer() + timings.append(end_time - start_time) + + return timings + + +def record_stats(backend, timings, precision, batch_size=1, compile_time_s=None): + """ + Records different timing stats and adds it to the result + """ + times = np.array(timings) + speeds = batch_size / times + time_mean = np.mean(times).item() + time_med = np.median(times).item() + time_99th = np.percentile(times, 99).item() + time_std = np.std(times, ddof=0).item() + speed_mean = np.mean(speeds).item() + speed_med = np.median(speeds).item() + + stats = { + "Backend": backend, + "Precision": precision, + "Batch size": batch_size, + "Median(FPS)": speed_med, + "Mean(FPS)": speed_mean, + "Median-Latency(ms)": time_med * 1000, + "Mean-Latency(ms)": time_mean * 1000, + "Latency-StdDev(ms)": time_std * 1000, + "Compile Time(s)": compile_time_s, + } + return stats diff --git a/tools/perf/Flux/benchmark.sh b/tools/perf/Flux/benchmark.sh index 79f5e4b66c..3b29ac0989 100644 --- a/tools/perf/Flux/benchmark.sh +++ b/tools/perf/Flux/benchmark.sh @@ -1,9 +1,20 @@ #TODO: Enter the HF Token huggingface-cli login --token HF_TOKEN +nvidia-smi --query-gpu=index,utilization.gpu,utilization.memory,temperature.gpu,temperature.memory,power.draw,clocks.sm,clocks.mem,memory.total,memory.used --format=csv,nounits -lms 500 >> pytorch_fp16_gpu_utilization.txt & +NVIDIA_SMI_PID=$! +python flux_perf.py --pytorch --max_batch_size 3 > pytorch_fp16_benchmark.txt +kill $NVIDIA_SMI_PID + nvidia-smi --query-gpu=index,utilization.gpu,utilization.memory,temperature.gpu,temperature.memory,power.draw,clocks.sm,clocks.mem,memory.total,memory.used --format=csv,nounits -lms 500 >> fp8_gpu_utilization.txt & NVIDIA_SMI_PID=$! -python flux_perf.py --dtype fp8 --low_vram_mode> fp8_benchmark.txt +python flux_perf.py --dtype fp8 --max_batch_size 3 > fp8_benchmark.txt +kill $NVIDIA_SMI_PID + + +nvidia-smi --query-gpu=index,utilization.gpu,utilization.memory,temperature.gpu,temperature.memory,power.draw,clocks.sm,clocks.mem,memory.total,memory.used --format=csv,nounits -lms 500 >> fp16_gpu_utilization.txt & +NVIDIA_SMI_PID=$! +python flux_perf.py --dtype fp16 --max_batch_size 3 > fp16_benchmark.txt kill $NVIDIA_SMI_PID diff --git a/tools/perf/Flux/flux_perf.py b/tools/perf/Flux/flux_perf.py index e54952ea10..969f4c93d8 100644 --- a/tools/perf/Flux/flux_perf.py +++ b/tools/perf/Flux/flux_perf.py @@ -3,12 +3,29 @@ import sys from time import time +import torch + sys.path.append(os.path.join(os.path.dirname(__file__), "../../../examples/apps")) from flux_demo import compile_model def benchmark(pipe, prompt, inference_step, batch_size=1, iterations=1): + print(f"Running warmup with {batch_size=} {inference_step=} iterations=10") + # warmup + for i in range(10): + start = time() + images = pipe( + prompt, + output_type="pil", + num_inference_steps=inference_step, + num_images_per_prompt=batch_size, + ).images + print( + f"Warmup {i} done in {time() - start} seconds, with {batch_size=} {inference_step=}, generated {len(images)} images" + ) + # actual benchmark + print(f"Running benchmark with {batch_size=} {inference_step=} {iterations=}") start = time() for i in range(iterations): image = pipe( @@ -18,32 +35,55 @@ def benchmark(pipe, prompt, inference_step, batch_size=1, iterations=1): num_images_per_prompt=batch_size, ).images end = time() - print(f"Batch Size: {batch_size}") print("Time Elapse for", iterations, "iterations:", end - start) print( "Average Latency Per Step:", (end - start) / inference_step / iterations / batch_size, ) - return image + return + + +from diffusers import FluxPipeline def main(args): - pipe, backbone, trt_gm = compile_model(args) - for batch_size in range(1, args.max_batch_size + 1): - benchmark(pipe, ["Test"], 20, batch_size=batch_size, iterations=3) + print(f"Running flux_perfwith args: {args}") + if not args.pytorch: + pipe, backbone, trt_gm = compile_model(args) + else: + pipe = ( + FluxPipeline.from_pretrained( + "black-forest-labs/FLUX.1-dev", + torch_dtype=torch.float16, + ) + .to(torch.float16) + .to("cuda:0") + ) + + benchmark(pipe, ["Test"], 20, batch_size=args.max_batch_size, iterations=3) if __name__ == "__main__": parser = argparse.ArgumentParser( description="Run Flux quantization with different dtypes" ) - + parser.add_argument( + "--use_sdpa", + action="store_true", + help="Use sdpa", + default=False, + ) parser.add_argument( "--dtype", - choices=["fp8", "int8", "fp16"], + choices=["fp4", "fp8", "int8", "fp16"], default="fp16", - help="Select the data type to use (fp8 or int8 or fp16)", + help="Select the data type to use (fp4 or fp8 or int8 or fp16)", + ) + parser.add_argument( + "--fp4_mha", + action="store_true", + help="Use NVFP4_FP8_MHA_CONFIG config instead of NVFP4_DEFAULT_CFG", ) parser.add_argument( "--low_vram_mode", @@ -56,6 +96,11 @@ def main(args): action="store_true", help="Use dynamic shapes", ) + parser.add_argument( + "--pytorch", + action="store_true", + help="Use pytorch runtime and no tensorrt", + ) parser.add_argument("--max_batch_size", type=int, default=1) args = parser.parse_args() main(args) diff --git a/uv.lock b/uv.lock index 9c711d13e4..18b5f3d7ed 100644 --- a/uv.lock +++ b/uv.lock @@ -39,36 +39,36 @@ supported-markers = [ [[package]] name = "accelerate" version = "1.7.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "psutil", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pyyaml", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "safetensors", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e8a/2a5503d6237b9/accelerate-1.7.0.tar.gz", hash = "sha256:e8a2a5503d6237b9eee73cc8d36cf543f9c2d8dd2c6713450b322f5e6d53a610" } +sdist = { url = "https://files.pythonhosted.org/packages/97/33/47bbd507e3a851d33d19ce7b2141c5ea3689bfae91ba168044d7db24b0e9/accelerate-1.7.0.tar.gz", hash = "sha256:e8a2a5503d6237b9eee73cc8d36cf543f9c2d8dd2c6713450b322f5e6d53a610", size = 376026, upload-time = "2025-05-15T10:00:52.117Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cf5/7165cca28769c/accelerate-1.7.0-py3-none-any.whl", hash = "sha256:cf57165cca28769c6cf2650812371c81b18e05743dfa3c748524b1bb4f2b272f" }, + { url = "https://files.pythonhosted.org/packages/f8/bb/be8146c196ad6e4dec78385d91e92591f8a433576c4e04c342a636fcd811/accelerate-1.7.0-py3-none-any.whl", hash = "sha256:cf57165cca28769c6cf2650812371c81b18e05743dfa3c748524b1bb4f2b272f", size = 362095, upload-time = "2025-05-15T10:00:49.914Z" }, ] [[package]] name = "aiohappyeyeballs" version = "2.6.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c3f/9d0113123803c/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f34/9ba8f4b75cb25/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8" }, + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, ] [[package]] name = "aiohttp" version = "3.12.9" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "aiosignal", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -79,116 +79,116 @@ dependencies = [ { name = "propcache", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "yarl", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2c9/914c8914ff40b/aiohttp-3.12.9.tar.gz", hash = "sha256:2c9914c8914ff40b68c6e4ed5da33e88d4e8f368fddd03ceb0eb3175905ca782" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/43d/bedb626c6bb03/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43dbedb626c6bb03cc8e9ab27b9da4414bc5540d3fe1bce0e687e50c20553689" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/188/97f24e80bac4e/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:18897f24e80bac4e7df5d37375ab22391f8b7beedfe617f8de064dbfd76ca36b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/246/6804eaa42bf63/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2466804eaa42bf6340de28fba7254709db788989b891a7c5bd57a84f5a11c04b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/85d/df89da86915ab/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85ddf89da86915ab327fafe9059540707b9deac7cfad1dfda4621eac6590aa16" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f8d/89c0ea455b8e8/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8d89c0ea455b8e8e386db8b82a55671703d4868c7c1e38cca0d643232f50f8d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2ee/5ca28436b9203/aiohttp-3.12.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ee5ca28436b9203d020924c6dacc1cca4e77acf5f8f5c5d236b123c0158a012" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7ca/2ad779958e1be/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7ca2ad779958e1beb2f139e7d45f84c13f94f6c0f63025e435e31f3247cb5a05" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/daa/e5ea9c06daacb/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:daae5ea9c06daacb056351273a38d4465446fbb5c8c8107a6f93db3e1d5bc4e8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/52c/ec94fa76e488b/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:52cec94fa76e488b0ebc6586507421116d7993c7984ea020529107796b206117" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/db2/aef30d877f447/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:db2aef30d877f44716c8ce4adb2162c7ccb9c58d6153bc68bd2cfb3fbd7d6a95" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1d2/05549f965bc69/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1d205549f965bc69c377206643b06fd78d77ed20b8735765c54153cf00a51465" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3fd/aaf63a778ae02/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3fdaaf63a778ae020b9bf8a7ae4a80f87deb88152aad259764e994b3efe44d38" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0d0/11b13f3bfcf71/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d011b13f3bfcf711ce9007ea08305a582135ee2105dc3202b011c055c1ac6f1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3c7/b314d565e2350/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3c7b314d565e235051893a46e14ea14ab05bb17fe99bdb2cf85e9adc62b4836c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2bb/6408bc2cb8ee5/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb6408bc2cb8ee5be4efb18bcfcfce4d76448f62237074917e146a425daf425" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b9a/d4fe8d068544b/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9ad4fe8d068544ba5d77500ea2d450f130109a4b0caf6d9197167303250f683" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/557/21245164191ac/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55721245164191ac92808ad39f3b2876195b1e6521ead0aad7f1c9ae69568b1a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b5c/5fbc9217578f5/aiohttp-3.12.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5c5fbc9217578f5c9b5a65f27dfb044283b437cfa9cf52531f3ce94dca1e912" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5c7/e03f6dd8210b7/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5c7e03f6dd8210b76587cb17088b3e5e0dabfc6787d42db58bc933da932230b7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c89/2b2400c0795bb/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c892b2400c0795bbf00303282029c66e8ba912dc9fabf4728ba69a63046c8020" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4de/97019fec6f236/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4de97019fec6f236671ee5d5831cebf67fbd52ee6bd47e2b8c9941cd39698db1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/941/cd1ce3d1f605f/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:941cd1ce3d1f605fd062857b339f7c3cde5ce83392bfb1029c3de782b8f98b52" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/43f/3d4d6264629d9/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:43f3d4d6264629d97d44a6d75603923c2c63dad6aff2f72b172635c43db739db" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bbe/5ab33a6810e98/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bbe5ab33a6810e9839270b3673eba683b9f91ed011be66feb4823f9fecf1bb73" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bfe/590ddb0dca3cd/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bfe590ddb0dca3cdb601787079276545f00cfb9493f73f00fa011e71dae6f5fd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fc4/41aba05efec5c/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fc441aba05efec5c72127393f56206d0f3fb113aadcd1685033c10da1ff582ad" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1a3/f20a1b72643a0/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a3f20a1b72643a0be5c9fcb97eb22607fcca32f1ca497f09a88d1ec3109daae" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/364/7dd1da43d595a/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3647dd1da43d595a52c5071b68fd8d39c0fd25b80f2cdd83eaabd9d59cd1f139" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/970/bae350cedbabb/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:970bae350cedbabb7c9d0fc8564b004a547d4a27cf12dc986be0abf7d8cc8d81" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7cc/c5a5a4ccfa0ef/aiohttp-3.12.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccc5a5a4ccfa0ef0191dad2926e9752c37f368d846a70e40095a8529c5fb6eb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/551/97e86994682a3/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:55197e86994682a332e8943eb01b462ae25630b10f245812e517251d7a922f25" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/94d/0cf6606ed9f23/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:94d0cf6606ed9f2373565b8d0005bb070afbb81525ef6fa6e0725b8aec0c0843" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/057/5d7ae9a9c2062/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0575d7ae9a9c206276a6aaa3ce364b467f29f0497c0db4449de060dc341d88d6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9f4/4a4ebd717cc39/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9f44a4ebd717cc39796c4647495bc2901d0c168c71cd0132691ae3d0312215a9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f9c/dadfe84beb8ce/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f9cdadfe84beb8ceafa98ab676e8c0caf1e5d60e8b33c385c11259ee0f7f2587" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/995/b5640969b1250/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:995b5640969b1250e37be6fc92d185e523e8df446f8bfa723b347e52d7ae80f9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/998/e323c107c3f63/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:998e323c107c3f6396c1f9de72289009057c611942771f24114ae78a76af0af5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/20f/8a6d3af13f043/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:20f8a6d3af13f043a09726add6d096b533f180cf8b43970a8d9c9ca978bf45c5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0bd/0e06c86263610/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bd0e06c8626361027f69df510c8484e17568ba2f91b2de51ea055f86ed3b071" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/64e/22f12dd940a6e/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64e22f12dd940a6e7b923637b10b611b752f6117bc3a780b7e61cc43c9e04892" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/11b/5bf453056b6ac/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b5bf453056b6ac4924ede1188d01e8b8d4801a6aa5351da3a7dbdbc03cb44e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/003/69db59f09860e/aiohttp-3.12.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00369db59f09860e0e26c75035f80f92881103e90f5858c18f29eb4f8cb8970f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/80f/a1efc71d423be/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:80fa1efc71d423be25db9dddefe8dcd90e487fbc9351a59549521b66405e71de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5ca/de22a0f0a4665/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:5cade22a0f0a4665003ded2bc4d43bb69fde790e5a287187569509c33333a3ab" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d4a/0fe3cd45cf6fb/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d4a0fe3cd45cf6fb18222deef92af1c3efe090b7f43d477de61b2360c90a4b32" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/97b/036ce251825fd/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:97b036ce251825fd5ab69d302ca8a99d3352af1c616cf40b2306fdb734cd6d30" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eea/c3a965552dbf7/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eeac3a965552dbf79bcc0b9b963b5f7d6364b1542eb609937278d70d27ae997f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4a1/f72b2560beaa9/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a1f72b2560beaa949b5d3b324fc07b66846d39a8e7cc106ca450312a5771e3e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/495/b2ac780e4d4f9/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:495b2ac780e4d4f9a67fc79b7e84f21b09661f362b93d43360204a7bfecc4fec" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/661/2437f2c761dd0/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6612437f2c761dd0b31569b28b8905bccfb88dc1aeecc9ad20fbaf346eafe989" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/435/1fb8d4b12b15f/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4351fb8d4b12b15f39ed076a21d53f9542bc0db09ba973c04503b31ef8268332" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/402/7f160e5109d6a/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4027f160e5109d6aac1537426d8b6e693fcca393dd9488d986ec855caf6dc4f6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/30a/55cdc682d98b8/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30a55cdc682d98b8f7f1e8d3505846ab302a5547ffb7cef85607448b090d691d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f91/ee8ed3d9ccb83/aiohttp-3.12.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f91ee8ed3d9ccb832dbc93e6b9d85c2a9dc73a7ea5d0f3ee4c3b64136f6ba598" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/325/acbe0c0225836/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:325acbe0c0225836e720eb758672c2f39e3017e89389de1dfd7fba7977b9bb82" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/075/da814b9a63990/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:075da814b9a639904041d8d50e3ed665ea892df4e99278f8b63ff0ee549eb519" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/579/71e7adbe0984d/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:57971e7adbe0984d9736836d7a34bd615119e628f04dfca302c1bf0ec3d39a77" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/095/4f990f274cfcb/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0954f990f274cfcbbd08d8fdb4a0c7949ac753bc1ea344c540829a85b0a8f34d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/daa/f5a5f2340f462/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:daaf5a5f2340f46291ab7d44f60693cc71a05a8b9104e6efd3bd51c8a6526290" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ba0/843970e8a9cb4/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ba0843970e8a9cb4ddae47281010997f5b1a1c8cbc635fbefc9a0ccaa7c95606" }, +sdist = { url = "https://files.pythonhosted.org/packages/4b/ad/5b0f3451c2275af09966f1d7c0965facd4729a5b7efdc2eb728654679f85/aiohttp-3.12.9.tar.gz", hash = "sha256:2c9914c8914ff40b68c6e4ed5da33e88d4e8f368fddd03ceb0eb3175905ca782", size = 7810207, upload-time = "2025-06-04T16:26:40.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/f4/9245dd38d760d92504fdc1a1cdaa3468b8642e0692875badc509312728a4/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43dbedb626c6bb03cc8e9ab27b9da4414bc5540d3fe1bce0e687e50c20553689", size = 1646712, upload-time = "2025-06-04T16:23:19.012Z" }, + { url = "https://files.pythonhosted.org/packages/f7/52/907028e57dd34d89424f9adc03bdf2dcbf8ca66b1799a4b0362b3291adf3/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:18897f24e80bac4e7df5d37375ab22391f8b7beedfe617f8de064dbfd76ca36b", size = 1620767, upload-time = "2025-06-04T16:23:21.016Z" }, + { url = "https://files.pythonhosted.org/packages/d8/71/615d3f8fcbec363c998856726daeb8d7a1de348618ddbebf2799694d3f46/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2466804eaa42bf6340de28fba7254709db788989b891a7c5bd57a84f5a11c04b", size = 1693176, upload-time = "2025-06-04T16:23:23.465Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f1/c815a3e91b89f678bbbd053e199438067c554d669f00b5d3a3ddcd4e31e9/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85ddf89da86915ab327fafe9059540707b9deac7cfad1dfda4621eac6590aa16", size = 1735490, upload-time = "2025-06-04T16:23:25.418Z" }, + { url = "https://files.pythonhosted.org/packages/d2/55/ff9a6951fb8aa04d95d4c206f189a62bf616a9ab7a325c8e72f1bd817f84/aiohttp-3.12.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8d89c0ea455b8e8e386db8b82a55671703d4868c7c1e38cca0d643232f50f8d", size = 1640156, upload-time = "2025-06-04T16:23:27.788Z" }, + { url = "https://files.pythonhosted.org/packages/22/97/c7d8d8ac53862a612dc06574f591d30b64326ef910c43bc5c0cbeffb9210/aiohttp-3.12.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ee5ca28436b9203d020924c6dacc1cca4e77acf5f8f5c5d236b123c0158a012", size = 1580277, upload-time = "2025-06-04T16:23:29.813Z" }, + { url = "https://files.pythonhosted.org/packages/d2/e8/6cdfe6f65713c4957311a4fad1b343bc93eb3a87b84ef8e5c18f06c77a69/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7ca2ad779958e1beb2f139e7d45f84c13f94f6c0f63025e435e31f3247cb5a05", size = 1624126, upload-time = "2025-06-04T16:23:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/10/1d/9a63f309928ff6494626659c68bb4e0c8e2678dd5aa9e7a22a47305f297c/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:daae5ea9c06daacb056351273a38d4465446fbb5c8c8107a6f93db3e1d5bc4e8", size = 1634913, upload-time = "2025-06-04T16:23:35.171Z" }, + { url = "https://files.pythonhosted.org/packages/92/be/9a90641bc61777d9fbd037b12cafa0208726172c22decfdfbea5b82b931d/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:52cec94fa76e488b0ebc6586507421116d7993c7984ea020529107796b206117", size = 1610367, upload-time = "2025-06-04T16:23:37.519Z" }, + { url = "https://files.pythonhosted.org/packages/0f/63/16730d255cd92bf8f834b0199a7faf850989628129b6fa3684d541a4effe/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:db2aef30d877f44716c8ce4adb2162c7ccb9c58d6153bc68bd2cfb3fbd7d6a95", size = 1689952, upload-time = "2025-06-04T16:23:39.497Z" }, + { url = "https://files.pythonhosted.org/packages/35/aa/e7410f300a66b6db014873e0efcc277206433c89b60502e7434efccde43a/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1d205549f965bc69c377206643b06fd78d77ed20b8735765c54153cf00a51465", size = 1713189, upload-time = "2025-06-04T16:23:41.528Z" }, + { url = "https://files.pythonhosted.org/packages/69/18/d36db9ae9ae972310abbfbd8ebcf53e434e4973a017c9f5677efeb36f31f/aiohttp-3.12.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3fdaaf63a778ae020b9bf8a7ae4a80f87deb88152aad259764e994b3efe44d38", size = 1641531, upload-time = "2025-06-04T16:23:43.458Z" }, + { url = "https://files.pythonhosted.org/packages/83/c0/3347524ee435e13a9bfa54ae59a9e479f7cd05bf5062bee8471a6b39d933/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d011b13f3bfcf711ce9007ea08305a582135ee2105dc3202b011c055c1ac6f1", size = 1738567, upload-time = "2025-06-04T16:23:56.431Z" }, + { url = "https://files.pythonhosted.org/packages/98/af/96f10bc9f71aa806cdb1e4af3aa00352e20dc0e70b53a7147526b2f95e81/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3c7b314d565e235051893a46e14ea14ab05bb17fe99bdb2cf85e9adc62b4836c", size = 1687239, upload-time = "2025-06-04T16:23:58.428Z" }, + { url = "https://files.pythonhosted.org/packages/c7/f8/049a08282f9e5a45e903cc81ded19de718133daf21924c715ef0435038b3/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb6408bc2cb8ee5be4efb18bcfcfce4d76448f62237074917e146a425daf425", size = 1786031, upload-time = "2025-06-04T16:24:00.988Z" }, + { url = "https://files.pythonhosted.org/packages/26/3a/dc6ce1731d6a116d927c6c47e9f8dab283582d2e8fb31f49615ea2447b4c/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9ad4fe8d068544ba5d77500ea2d450f130109a4b0caf6d9197167303250f683", size = 1825076, upload-time = "2025-06-04T16:24:03.072Z" }, + { url = "https://files.pythonhosted.org/packages/d6/49/9e635c2f0a4d296e204ef87858ec2d6c590c944d5c3166c01d19813d3dc1/aiohttp-3.12.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55721245164191ac92808ad39f3b2876195b1e6521ead0aad7f1c9ae69568b1a", size = 1727537, upload-time = "2025-06-04T16:24:05.062Z" }, + { url = "https://files.pythonhosted.org/packages/67/92/64cbc47a73282eefca62e44ca44d771ccd40441e295b6b33531eed2d9f8f/aiohttp-3.12.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5c5fbc9217578f5c9b5a65f27dfb044283b437cfa9cf52531f3ce94dca1e912", size = 1664613, upload-time = "2025-06-04T16:24:08.56Z" }, + { url = "https://files.pythonhosted.org/packages/11/52/8e78137d1145f5bc5e77d39a4072da3bbe4216ddc13624a91d4061913846/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5c7e03f6dd8210b76587cb17088b3e5e0dabfc6787d42db58bc933da932230b7", size = 1712887, upload-time = "2025-06-04T16:24:10.806Z" }, + { url = "https://files.pythonhosted.org/packages/07/e9/beb9b75a38be8746242d76d5d4671d5467e54e53208d654ee921cb331fc5/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c892b2400c0795bbf00303282029c66e8ba912dc9fabf4728ba69a63046c8020", size = 1708045, upload-time = "2025-06-04T16:24:14.321Z" }, + { url = "https://files.pythonhosted.org/packages/f9/14/91da26fd19abf723b61f0861a73a917b15f25b6473191a5d597b67ff9c4e/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4de97019fec6f236671ee5d5831cebf67fbd52ee6bd47e2b8c9941cd39698db1", size = 1688229, upload-time = "2025-06-04T16:24:16.459Z" }, + { url = "https://files.pythonhosted.org/packages/c6/a4/d8a68c5c3f618e29ae978497c93d05718a98614659336672bbac37d227d9/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:941cd1ce3d1f605fd062857b339f7c3cde5ce83392bfb1029c3de782b8f98b52", size = 1781830, upload-time = "2025-06-04T16:24:19.363Z" }, + { url = "https://files.pythonhosted.org/packages/bb/4a/2e526757885e0d69ef796c470b470084073d2f9286784f34457139a8c2a5/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:43f3d4d6264629d97d44a6d75603923c2c63dad6aff2f72b172635c43db739db", size = 1802292, upload-time = "2025-06-04T16:24:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/77/92/5269deb655ee3ec8b48551b228ceccaa21e4fd61d44e7b6720618f09b958/aiohttp-3.12.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bbe5ab33a6810e9839270b3673eba683b9f91ed011be66feb4823f9fecf1bb73", size = 1715349, upload-time = "2025-06-04T16:24:23.873Z" }, + { url = "https://files.pythonhosted.org/packages/4d/2d/c6e796e6d7e57a3935772333d80e0407d66e551e2c7c2b930b7e18f527a4/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bfe590ddb0dca3cdb601787079276545f00cfb9493f73f00fa011e71dae6f5fd", size = 1713182, upload-time = "2025-06-04T16:24:36.314Z" }, + { url = "https://files.pythonhosted.org/packages/93/b7/bf9010f6dfe633147d74e93d41ec982b2538bfebcb6521a4139d187d07e3/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fc441aba05efec5c72127393f56206d0f3fb113aadcd1685033c10da1ff582ad", size = 1695833, upload-time = "2025-06-04T16:24:38.599Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b9/fe87b305d1a0272cb5c499402525c06571840349f2b2a4ffdc20e2996ac2/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a3f20a1b72643a0be5c9fcb97eb22607fcca32f1ca497f09a88d1ec3109daae", size = 1750928, upload-time = "2025-06-04T16:24:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/37/24/3ece3ca9c43b95a5836675c11f3be295fb65068ffffaad0e99a7a5b93c84/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3647dd1da43d595a52c5071b68fd8d39c0fd25b80f2cdd83eaabd9d59cd1f139", size = 1797083, upload-time = "2025-06-04T16:24:43.583Z" }, + { url = "https://files.pythonhosted.org/packages/1c/d2/c153f7858d9c6db578b495b15f533182bd95f24c62ab125cc039d97bf588/aiohttp-3.12.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:970bae350cedbabb7c9d0fc8564b004a547d4a27cf12dc986be0abf7d8cc8d81", size = 1716522, upload-time = "2025-06-04T16:24:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/1a/a9/ecfffc1659d8e3f02e109afec4df58a600128a2f48819af7e76a398a1ad3/aiohttp-3.12.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccc5a5a4ccfa0ef0191dad2926e9752c37f368d846a70e40095a8529c5fb6eb", size = 1632325, upload-time = "2025-06-04T16:24:48.639Z" }, + { url = "https://files.pythonhosted.org/packages/aa/07/69889c2e598661418f646038fc344769712a6dbc625c4b16f2d0191d872b/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:55197e86994682a332e8943eb01b462ae25630b10f245812e517251d7a922f25", size = 1693386, upload-time = "2025-06-04T16:24:51.032Z" }, + { url = "https://files.pythonhosted.org/packages/c3/fb/23e292231a5d6d7413c998d096ed7dae049e7fb2c3406019eb04cb93c5b7/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:94d0cf6606ed9f2373565b8d0005bb070afbb81525ef6fa6e0725b8aec0c0843", size = 1714841, upload-time = "2025-06-04T16:24:53.227Z" }, + { url = "https://files.pythonhosted.org/packages/80/bf/4d12162630ac2a39025c67bfeae94fdaeaec3b0438e65122f0012a570667/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0575d7ae9a9c206276a6aaa3ce364b467f29f0497c0db4449de060dc341d88d6", size = 1655490, upload-time = "2025-06-04T16:24:56Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/6c4f84197d9d04f548405d89d504afaef4c94dfea3842c52fa852f7f4c28/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9f44a4ebd717cc39796c4647495bc2901d0c168c71cd0132691ae3d0312215a9", size = 1735055, upload-time = "2025-06-04T16:24:59.458Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ae/6a9f1863e5d4b210890fb85b4b33e383351cc0588f1f30ea6866faef2141/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f9cdadfe84beb8ceafa98ab676e8c0caf1e5d60e8b33c385c11259ee0f7f2587", size = 1763027, upload-time = "2025-06-04T16:25:01.841Z" }, + { url = "https://files.pythonhosted.org/packages/5e/8c/7c0ca97b65f38d3453cee496da8d465a7b0b44d302c6b5c1da4d83b62f1b/aiohttp-3.12.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:995b5640969b1250e37be6fc92d185e523e8df446f8bfa723b347e52d7ae80f9", size = 1722637, upload-time = "2025-06-04T16:25:04.119Z" }, + { url = "https://files.pythonhosted.org/packages/88/38/5c308d02754e346ca9eae63a086f438aae9a4fc36cdd1708fe41588b3883/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:998e323c107c3f6396c1f9de72289009057c611942771f24114ae78a76af0af5", size = 1702124, upload-time = "2025-06-04T16:25:18.701Z" }, + { url = "https://files.pythonhosted.org/packages/ad/25/ab0af26f80c1b6035794d1c769d5671f7ecb59c93b64ea7dfced28df0dca/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:20f8a6d3af13f043a09726add6d096b533f180cf8b43970a8d9c9ca978bf45c5", size = 1683390, upload-time = "2025-06-04T16:25:20.98Z" }, + { url = "https://files.pythonhosted.org/packages/23/fa/9a510d5ec8e1a75008a1c0e985e1db2ce339b9f82d838c7598b85f8f16d4/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bd0e06c8626361027f69df510c8484e17568ba2f91b2de51ea055f86ed3b071", size = 1735458, upload-time = "2025-06-04T16:25:23.864Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b2/870cabf883512f0f2cd9505bd7bce1e4574d137f132ab8d597ac5367b0ee/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64e22f12dd940a6e7b923637b10b611b752f6117bc3a780b7e61cc43c9e04892", size = 1784830, upload-time = "2025-06-04T16:25:26.212Z" }, + { url = "https://files.pythonhosted.org/packages/68/cd/ab572264f5efbb8059f40d92d411918215bc4e669a7684bfa1ea0617745d/aiohttp-3.12.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b5bf453056b6ac4924ede1188d01e8b8d4801a6aa5351da3a7dbdbc03cb44e", size = 1707162, upload-time = "2025-06-04T16:25:28.663Z" }, + { url = "https://files.pythonhosted.org/packages/19/6f/8a6a1dedb8ee5a4034e49bb3cb81ced4fe239d4d047f6bab538320fcb5bc/aiohttp-3.12.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00369db59f09860e0e26c75035f80f92881103e90f5858c18f29eb4f8cb8970f", size = 1620865, upload-time = "2025-06-04T16:25:31.092Z" }, + { url = "https://files.pythonhosted.org/packages/ed/cf/6b7ab3b221a900a62e8cf26a47476377278675191aa2ea28327ba105c5c9/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:80fa1efc71d423be25db9dddefe8dcd90e487fbc9351a59549521b66405e71de", size = 1673887, upload-time = "2025-06-04T16:25:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/aaa1fe022e86291c34a4e15e41d7cad589b4bdd66d473d6d537420763ab2/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:5cade22a0f0a4665003ded2bc4d43bb69fde790e5a287187569509c33333a3ab", size = 1705551, upload-time = "2025-06-04T16:25:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/0f7393a2ef0df4464945c3081d0629a9cb9bfaefaaa922dba225f7c47824/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d4a0fe3cd45cf6fb18222deef92af1c3efe090b7f43d477de61b2360c90a4b32", size = 1648148, upload-time = "2025-06-04T16:25:38.961Z" }, + { url = "https://files.pythonhosted.org/packages/f9/71/286923ff54ae69c54e84bfbcc741b5833d980f192a93438f8d6cf153dae8/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:97b036ce251825fd5ab69d302ca8a99d3352af1c616cf40b2306fdb734cd6d30", size = 1724280, upload-time = "2025-06-04T16:25:41.423Z" }, + { url = "https://files.pythonhosted.org/packages/58/48/808167d6f115165da3fcc6b7bb49bce6cc648471aa30634bcd47a7c96a32/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eeac3a965552dbf79bcc0b9b963b5f7d6364b1542eb609937278d70d27ae997f", size = 1757753, upload-time = "2025-06-04T16:25:43.893Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1b/949e7965d642cdd82c7d9576fd27c24b27f4e0e35586fceb81057a99f617/aiohttp-3.12.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a1f72b2560beaa949b5d3b324fc07b66846d39a8e7cc106ca450312a5771e3e", size = 1706642, upload-time = "2025-06-04T16:25:46.299Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b8/bdc05299241f289dc447dc14f134c887b8e946816c6dcf6903c88c345c29/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:495b2ac780e4d4f9a67fc79b7e84f21b09661f362b93d43360204a7bfecc4fec", size = 1640768, upload-time = "2025-06-04T16:26:01.209Z" }, + { url = "https://files.pythonhosted.org/packages/6d/17/f0c8878103983b87933a8d96b38df51fee89bdb905fcb7b42d639bcf9c72/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6612437f2c761dd0b31569b28b8905bccfb88dc1aeecc9ad20fbaf346eafe989", size = 1615262, upload-time = "2025-06-04T16:26:03.832Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e9/02e56ba92b099b22b6c494bc33e80e85e30c97150c682ad511bade4c7357/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4351fb8d4b12b15f39ed076a21d53f9542bc0db09ba973c04503b31ef8268332", size = 1688888, upload-time = "2025-06-04T16:26:06.929Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c4/8640e86e2cdab212e8c99be747928c20fa37906df9ac653a40deaf248ec0/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4027f160e5109d6aac1537426d8b6e693fcca393dd9488d986ec855caf6dc4f6", size = 1728203, upload-time = "2025-06-04T16:26:09.612Z" }, + { url = "https://files.pythonhosted.org/packages/f3/4f/7c006db1cefbc8ad5a4b772d586548efa56661f4ea6ffce5a57f114d6c31/aiohttp-3.12.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30a55cdc682d98b8f7f1e8d3505846ab302a5547ffb7cef85607448b090d691d", size = 1634443, upload-time = "2025-06-04T16:26:12.163Z" }, + { url = "https://files.pythonhosted.org/packages/bf/46/54c8185145efd8f6890fdc1da177956411badf6a03931bcf01bde02b87d9/aiohttp-3.12.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f91ee8ed3d9ccb832dbc93e6b9d85c2a9dc73a7ea5d0f3ee4c3b64136f6ba598", size = 1569445, upload-time = "2025-06-04T16:26:14.748Z" }, + { url = "https://files.pythonhosted.org/packages/da/cc/d8dd35ccca0f38e107a0eb1a943679cc18a1d291562c9b0741e2f14ecc2d/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:325acbe0c0225836e720eb758672c2f39e3017e89389de1dfd7fba7977b9bb82", size = 1617404, upload-time = "2025-06-04T16:26:17.4Z" }, + { url = "https://files.pythonhosted.org/packages/59/f1/7a9b737306055555d7b5fc57031d4be799203313d5c797afe1baa5585052/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:075da814b9a639904041d8d50e3ed665ea892df4e99278f8b63ff0ee549eb519", size = 1627544, upload-time = "2025-06-04T16:26:20.448Z" }, + { url = "https://files.pythonhosted.org/packages/b6/92/a19201e3218f912390f36631c38b9f25ef9ceea6111680a4c151fda8db24/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:57971e7adbe0984d9736836d7a34bd615119e628f04dfca302c1bf0ec3d39a77", size = 1604189, upload-time = "2025-06-04T16:26:23.236Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ce/e354a05cb26bdc1bb54a52aecf54e47089a9cc46644a79b5610c91f6bca1/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0954f990f274cfcbbd08d8fdb4a0c7949ac753bc1ea344c540829a85b0a8f34d", size = 1691791, upload-time = "2025-06-04T16:26:26.01Z" }, + { url = "https://files.pythonhosted.org/packages/bd/76/85caf3b2910cce3c62c4ea7a395d1dcc14c905a2d5b3d6652a2effa530b1/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:daaf5a5f2340f46291ab7d44f60693cc71a05a8b9104e6efd3bd51c8a6526290", size = 1707463, upload-time = "2025-06-04T16:26:28.518Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b9/292cbc40fddc47e48e971134267f6288d45e851ec39bd01bc1e0228e7afe/aiohttp-3.12.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ba0843970e8a9cb4ddae47281010997f5b1a1c8cbc635fbefc9a0ccaa7c95606", size = 1637131, upload-time = "2025-06-04T16:26:31.551Z" }, ] [[package]] name = "aiosignal" version = "1.3.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a8c/255c66fafb1e4/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424, upload-time = "2024-12-13T17:10:40.86Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/45c/de58e409a3017/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5" }, + { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597, upload-time = "2024-12-13T17:10:38.469Z" }, ] [[package]] name = "annotated-types" version = "0.7.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aff/07c09a53a08bc/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1f0/2e8b43a8fbbc3/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53" }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] name = "async-timeout" version = "5.0.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d93/21a7a3d5a6a5e/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/39e/3809566ff8535/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c" }, + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, ] [[package]] name = "attrs" version = "25.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/75d/7cefc7fb57674/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/427/318ce031701fe/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3" }, + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, ] [[package]] name = "black" version = "25.1.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and sys_platform == 'linux') or (python_full_version >= '3.10' and sys_platform == 'windows')" }, + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and sys_platform == 'linux') or (python_full_version >= '3.10' and sys_platform == 'windows')" }, { name = "mypy-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pathspec", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -196,110 +196,110 @@ dependencies = [ { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/334/96d5cd1222ad7/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666" } +sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/055/e59b198df7ac0/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bce/2e264d59c91e5/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3b4/8735872ec5350/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/030/b9759066a4ee5/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d9e/6827d563a2c82/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/95e/8176dae143ba9/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717" }, + { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, + { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, + { url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" }, + { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, ] [[package]] name = "certifi" version = "2025.4.26" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a8/16057ea3cdefc/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/303/50364dfe37116/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3" }, + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, ] [[package]] name = "cfgv" version = "3.4.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e52/591d4c5f5dead/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b72/65b1f29fd3316/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9" }, + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, ] [[package]] name = "charset-normalizer" version = "3.4.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5ba/ececa9ecba31e/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b2d/318c11350e106/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9cb/facf36cb0ec28/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/18d/d2e350387c87d/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/807/5c35cd58273fe/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5bf/4545e3b962767/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7a6/ab32f7210554a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b33/de11b92e9f75a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/875/5483f3c00d6c9/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/68a/328e5f55ec37c/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/21b/2899062867b0e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aa8/8ca0b1932e93f/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d52/4ba3f1581b35c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/28a/1005facc94196/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fdb/20a30fe1175ec/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0f5/d9ed7f254402c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/efd/387a49825780f/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f0a/a37f3c979cf25/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e70/e990b2137b29d/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0c8/c57f84ccfc871/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6b6/6f92b17849b85/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cdd/f7bd982eaa998/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fcb/e676a55d7445b/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d41/c4d287cfc6906/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4e5/94135de17ab38/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cf7/13fe9a71ef6fd/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a37/0b3e078e41818/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a95/5b438e62efdf7/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/722/2ffd5e4de8e57/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bee/093bf902e1d8f/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ded/b8adb91d11846/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eba/9904b0f38a143/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3fd/db7e2c84ac87a/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/98f/862da73774290/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6c9/379d65defcab8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e63/5b87f01ebc977/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1c9/5a1e2902a8b72/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ef8/de666d6179b00/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/32f/c0341d72e0f73/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/289/200a18fa69894/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4a4/76b06fbcf359a/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e92/fca20c46e9f5e/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/50b/f98d5e563b83c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/721/c76e84fe669be/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/82d/8fd25b7f4675d/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b3d/aeac64d5b371d/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dcc/ab8d5fa1ef9bf/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aaf/27faa992bfee0/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb3/0abc20df9ab08/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c72/fbbe68c6f32f2/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/982/bb1e8b4ffda88/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7f5/6930ab0abd1c4/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744, upload-time = "2025-05-02T08:34:14.665Z" }, + { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993, upload-time = "2025-05-02T08:34:17.134Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382, upload-time = "2025-05-02T08:34:19.081Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536, upload-time = "2025-05-02T08:34:21.073Z" }, + { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349, upload-time = "2025-05-02T08:34:23.193Z" }, + { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365, upload-time = "2025-05-02T08:34:25.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499, upload-time = "2025-05-02T08:34:27.359Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735, upload-time = "2025-05-02T08:34:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786, upload-time = "2025-05-02T08:34:31.858Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203, upload-time = "2025-05-02T08:34:33.88Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, ] [[package]] name = "clang-format" version = "14.0.6" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d5c/96b500d7f8b5d/clang-format-14.0.6.tar.gz", hash = "sha256:d5c96b500d7f8b5d2db5b75ac035be387512850ad589cdc3019666b861382136" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/92/d57c1b3ea310ae0f48ab51a5aa2c87c4c732c3d79037ad2527f2eed7ca34/clang-format-14.0.6.tar.gz", hash = "sha256:d5c96b500d7f8b5d2db5b75ac035be387512850ad589cdc3019666b861382136", size = 9598, upload-time = "2022-06-27T11:34:36.46Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/13f/2d6d4a2af004a/clang_format-14.0.6-py2.py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13f2d6d4a2af004a783c65f0921afa8f0384bffcdaf500b6c2cb542edeb0b4a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d7c/1c5e404c58e55/clang_format-14.0.6-py2.py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d7c1c5e404c58e55f0170f01b3c5611dce6c119e62b5d1020347e0ad97d5a047" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dbf/d60528eb3bb7d/clang_format-14.0.6-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbfd60528eb3bb7d7cfe8576faa70845fbf93601f815ef75163d36606e87f388" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c93/580945f75de7e/clang_format-14.0.6-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c93580945f75de7e01996f1fb3cf67e4dc424f1c864e237c85614fb99a48c7a4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aaf/4edecc46a24f0/clang_format-14.0.6-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf4edecc46a24f0b572b82cf5827e292ad1c137903427627c4d5f671668cc2b" }, + { url = "https://files.pythonhosted.org/packages/5f/de/f666633c30a4cc9e987d153db992849bfeea03ad200bf1cfa937039c64ff/clang_format-14.0.6-py2.py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13f2d6d4a2af004a783c65f0921afa8f0384bffcdaf500b6c2cb542edeb0b4a5", size = 1259649, upload-time = "2022-06-27T11:34:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/ce/27/df41404419d9116e071d0b8a5ba0a0969d9db7587af689ec81ec75c1f18a/clang_format-14.0.6-py2.py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d7c1c5e404c58e55f0170f01b3c5611dce6c119e62b5d1020347e0ad97d5a047", size = 1147591, upload-time = "2022-06-27T11:34:08.688Z" }, + { url = "https://files.pythonhosted.org/packages/23/e4/ea55429601432913e9fe40686c3c09a79338075c830a523fabc71aa49c69/clang_format-14.0.6-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbfd60528eb3bb7d7cfe8576faa70845fbf93601f815ef75163d36606e87f388", size = 1205157, upload-time = "2022-06-27T11:34:13.842Z" }, + { url = "https://files.pythonhosted.org/packages/8c/67/e1faf73ea166669e1698f55f3ae366369db57d75eb3b6c04c93620ebac12/clang_format-14.0.6-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c93580945f75de7e01996f1fb3cf67e4dc424f1c864e237c85614fb99a48c7a4", size = 1949067, upload-time = "2022-06-27T11:34:18.984Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3b/3e20072464e98314eafdc5bc5744454ade6e6f5e525fb29f6b4555173811/clang_format-14.0.6-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf4edecc46a24f0b572b82cf5827e292ad1c137903427627c4d5f671668cc2b", size = 1187836, upload-time = "2022-06-27T11:34:23.88Z" }, ] [[package]] name = "click" version = "8.1.8" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -309,15 +309,15 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ed5/3c9d8990d83c2/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/63c/132bbbed01578/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2" }, + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] [[package]] name = "click" version = "8.2.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -341,68 +341,68 @@ resolution-markers = [ "python_full_version == '3.11.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/27c/491cc05d968d2/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202" } +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/61a/3265b914e850b/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b" }, + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, ] [[package]] name = "coloredlogs" version = "15.0.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "humanfriendly", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7c9/91aa71a4577af/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/612/ee75c546f53e9/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934" }, + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, ] [[package]] name = "cppimport" version = "22.8.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "mako", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pybind11", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bbb/4957102db41bc/cppimport-22.8.2.tar.gz", hash = "sha256:bbb4957102db41bc99ad72c233bce92f9d1fd91be352fc07878c4361033a401f" } +sdist = { url = "https://files.pythonhosted.org/packages/54/27/01d9078a77b9e31b79b9716e66ca4db74f4744c5232bcb3e8769395c4280/cppimport-22.8.2.tar.gz", hash = "sha256:bbb4957102db41bc99ad72c233bce92f9d1fd91be352fc07878c4361033a401f", size = 26635, upload-time = "2022-08-02T16:50:36.872Z" } [[package]] name = "cupy-cuda12x" version = "13.4.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastrlock", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/113/a4f6b5e89d8e3/cupy_cuda12x-13.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:113a4f6b5e89d8e3f0cb150708fa8586fde5f682d2d5bf4703ad8dde66063a5e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5ec/b45b8fc581621/cupy_cuda12x-13.4.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:5ecb45b8fc5816214390267a0e0c989b8c7a9ffa8ff5712958aa9b066334abfc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aaa/81533a0367fd4/cupy_cuda12x-13.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:aaa81533a0367fd42fa5af30ba60e604d9f0bed5f75ae388df0ff6b906627ab1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/081/f543178a118d0/cupy_cuda12x-13.4.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:081f543178a118d08f00e7f9caea77839e37912cbfc6542ecc4245fe6580c4ce" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a71/4db3dae534b9d/cupy_cuda12x-13.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a714db3dae534b9d869951366ae2431f3e72036b07827927ffccd24076507ca8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/061/03dd2dc2ff7f3/cupy_cuda12x-13.4.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:06103dd2dc2ff7f36c67d2d01cb658befd68da350fae78a0e113fbab6895755f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/43f/97bedd6e2385f/cupy_cuda12x-13.4.1-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:43f97bedd6e2385f61b939ee37faadff0e1fa701d35f2a328cdc13d5b1b74b48" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d0d/153ac5b24ad18/cupy_cuda12x-13.4.1-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:d0d153ac5b24ad183a7bcbe83693a6df06840355bf94b30c1606c519added468" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/02c/68c237cc92f12/cupy_cuda12x-13.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:02c68c237cc92f1255879a54049e701f0b04020b6e3606397afda8945bd2efb6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/238/bd20bc1b33e17/cupy_cuda12x-13.4.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:238bd20bc1b33e170949f5541795a2617c10f6deb39a758fd5c71c186774df0f" }, + { url = "https://files.pythonhosted.org/packages/55/ba/2fd0bbaf59ca964fc519fc3d9c8dc67813c83f75b3728d455fd7f89e6f09/cupy_cuda12x-13.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:113a4f6b5e89d8e3f0cb150708fa8586fde5f682d2d5bf4703ad8dde66063a5e", size = 117383051, upload-time = "2025-03-21T07:24:28.751Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/544f2c890dbbcc9f95d97e7ef0a185b530d5de962593aff1f4460bcde9c6/cupy_cuda12x-13.4.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:5ecb45b8fc5816214390267a0e0c989b8c7a9ffa8ff5712958aa9b066334abfc", size = 104635306, upload-time = "2025-03-21T07:24:36.532Z" }, + { url = "https://files.pythonhosted.org/packages/25/b5/d6e149e5bcc17110e14b965ac5c9458046513645ecef9a305f34413668f4/cupy_cuda12x-13.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:aaa81533a0367fd42fa5af30ba60e604d9f0bed5f75ae388df0ff6b906627ab1", size = 119061973, upload-time = "2025-03-21T07:24:50.303Z" }, + { url = "https://files.pythonhosted.org/packages/09/b2/0c75292a027e1a60b5d83389657bce3fa5b79955c6fb79d1988ad0cf9466/cupy_cuda12x-13.4.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:081f543178a118d08f00e7f9caea77839e37912cbfc6542ecc4245fe6580c4ce", size = 105424326, upload-time = "2025-03-21T07:24:57.496Z" }, + { url = "https://files.pythonhosted.org/packages/b7/6d/a5e08d225b1664b400fb4a87262878d315267c310b93d43efd5b7b0b1f64/cupy_cuda12x-13.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a714db3dae534b9d869951366ae2431f3e72036b07827927ffccd24076507ca8", size = 118354020, upload-time = "2025-03-21T07:25:10.378Z" }, + { url = "https://files.pythonhosted.org/packages/56/58/5bfc83265455ff783d5be65451392a6920a90fe8996a091006ba02512848/cupy_cuda12x-13.4.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:06103dd2dc2ff7f36c67d2d01cb658befd68da350fae78a0e113fbab6895755f", size = 105273045, upload-time = "2025-03-21T07:25:17.966Z" }, + { url = "https://files.pythonhosted.org/packages/cd/59/c5200651fc3c0e1e92393d4e582e7812d5f76f26607c1fb310399c335b21/cupy_cuda12x-13.4.1-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:43f97bedd6e2385f61b939ee37faadff0e1fa701d35f2a328cdc13d5b1b74b48", size = 117957759, upload-time = "2025-03-21T07:25:31.363Z" }, + { url = "https://files.pythonhosted.org/packages/13/33/de71853fcd28aaf961092d895d126bfe5ebecc56d89865ea41ad8e48e559/cupy_cuda12x-13.4.1-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:d0d153ac5b24ad183a7bcbe83693a6df06840355bf94b30c1606c519added468", size = 105047230, upload-time = "2025-03-21T07:25:38.084Z" }, + { url = "https://files.pythonhosted.org/packages/1a/25/194fadde538f3c413f5f6d9407c6926025e4f60e52f92660a76b2bd0becb/cupy_cuda12x-13.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:02c68c237cc92f1255879a54049e701f0b04020b6e3606397afda8945bd2efb6", size = 117471500, upload-time = "2025-03-21T07:25:50.818Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f4/3ed29413a42f0d92bec0c13779badf4c134a21322eea51fbd864d367a0de/cupy_cuda12x-13.4.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:238bd20bc1b33e170949f5541795a2617c10f6deb39a758fd5c71c186774df0f", size = 104703256, upload-time = "2025-03-21T07:25:58.678Z" }, ] [[package]] name = "datasets" version = "3.6.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dill", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "fsspec", extra = ["http"], marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "multiprocess", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pandas", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pyarrow", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -411,9 +411,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "xxhash", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1b2/bf43b19776e27/datasets-3.6.0.tar.gz", hash = "sha256:1b2bf43b19776e2787e181cfd329cb0ca1a358ea014780c3581e0f276375e041" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/89/d3d6fef58a488f8569c82fd293ab7cbd4250244d67f425dcae64c63800ea/datasets-3.6.0.tar.gz", hash = "sha256:1b2bf43b19776e2787e181cfd329cb0ca1a358ea014780c3581e0f276375e041", size = 569336, upload-time = "2025-05-07T15:15:02.659Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/250/00c4a2c0873a7/datasets-3.6.0-py3-none-any.whl", hash = "sha256:25000c4a2c0873a710df127d08a202a06eab7bf42441a6bc278b499c2f72cd1b" }, + { url = "https://files.pythonhosted.org/packages/20/34/a08b0ee99715eaba118cbe19a71f7b5e2425c2718ef96007c325944a1152/datasets-3.6.0-py3-none-any.whl", hash = "sha256:25000c4a2c0873a710df127d08a202a06eab7bf42441a6bc278b499c2f72cd1b", size = 491546, upload-time = "2025-05-07T15:14:59.742Z" }, ] [[package]] @@ -424,8 +424,8 @@ dependencies = [ { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "importlib-metadata", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "pillow", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "regex", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "requests", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -439,7 +439,7 @@ wheels = [ [[package]] name = "dill" version = "0.3.8" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/dill-0.3.8-py3-none-any.whl" }, ] @@ -447,91 +447,91 @@ wheels = [ [[package]] name = "distlib" version = "0.3.9" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a60/f20dea646b8a3/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/47f/8c22fd27c27e2/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87" }, + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] [[package]] name = "dllist" version = "2.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/741/3ba963aaa1b2b/dllist-2.0.0.tar.gz", hash = "sha256:7413ba963aaa1b2b6827eadd7908e40e635b19108ab431667485eaf75c492bf4" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/ce/dda13123329d55555de67b414206f14f4f80b96cb273ec59d76d5ce6b5c3/dllist-2.0.0.tar.gz", hash = "sha256:7413ba963aaa1b2b6827eadd7908e40e635b19108ab431667485eaf75c492bf4", size = 5369, upload-time = "2025-02-10T15:41:07.637Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cd3/07b1a91bc46fa/dllist-2.0.0-py3-none-any.whl", hash = "sha256:cd307b1a91bc46fae084f8c817d79be7e34951b149a2fd69004772e03573bfb3" }, + { url = "https://files.pythonhosted.org/packages/7b/07/a212ca4cfe56fa35c8315307e37df218e5946c726a792b5c7a795b245c10/dllist-2.0.0-py3-none-any.whl", hash = "sha256:cd307b1a91bc46fae084f8c817d79be7e34951b149a2fd69004772e03573bfb3", size = 5721, upload-time = "2025-02-10T15:41:06.045Z" }, ] [[package]] name = "exceptiongroup" version = "1.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b24/1f5885f560bc5/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4d1/11e6e0c13d064/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10" }, + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, ] [[package]] name = "execnet" version = "2.1.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/518/9b52c6121c24f/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/26d/ee51f1b80cebd/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc" }, + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, ] [[package]] name = "expecttest" version = "0.1.6" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fd4/9563b6703b9c0/expecttest-0.1.6.tar.gz", hash = "sha256:fd49563b6703b9c060a0bc946dfafc62bad74898867432192927eb1e5f9d8952" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/46/42526e9e0f6d67966bd15364fc3713ac5a3501204868e472583f12c5271d/expecttest-0.1.6.tar.gz", hash = "sha256:fd49563b6703b9c060a0bc946dfafc62bad74898867432192927eb1e5f9d8952", size = 6518, upload-time = "2023-08-01T12:11:19.959Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7cf/2db203c06f9e3/expecttest-0.1.6-py3-none-any.whl", hash = "sha256:7cf2db203c06f9e3173670ca9d09ac00912e535139afac2c7458c1627b1a3ee6" }, + { url = "https://files.pythonhosted.org/packages/c7/39/689391845f5dc48df81b0c22248d5f66919b82da12f2bab1424bc3610529/expecttest-0.1.6-py3-none-any.whl", hash = "sha256:7cf2db203c06f9e3173670ca9d09ac00912e535139afac2c7458c1627b1a3ee6", size = 6535, upload-time = "2023-08-01T12:11:18.902Z" }, ] [[package]] name = "fastrlock" version = "0.8.3" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4af/6734d92eaa3ab/fastrlock-0.8.3.tar.gz", hash = "sha256:4af6734d92eaa3ab4373e6c9a1dd0d5ad1304e172b1521733c6c3b3d73c8fa5d" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7a7/7ebb0a24535ef/fastrlock-0.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7a77ebb0a24535ef4f167da2c5ee35d9be1e96ae192137e9dc3ff75b8dfc08a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d51/f7fb0db8dab34/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d51f7fb0db8dab341b7f03a39a3031678cf4a98b18533b176c533c122bfce47d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/767/ec79b7f6ed9b9/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:767ec79b7f6ed9b9a00eb9ff62f2a51f56fdb221c5092ab2dadec34a9ccbfc6e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0d6/a77b3f396f7d4/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d6a77b3f396f7d41094ef09606f65ae57feeb713f4285e8e417f4021617ca62" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/925/77ff82ef4a94c/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:92577ff82ef4a94c5667d6d2841f017820932bc59f31ffd83e4a2c56c1738f90" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3df/8514086e16bb7/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3df8514086e16bb7c66169156a8066dc152f3be892c7817e85bf09a27fa2ada2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/55d/42f6286b9d867/fastrlock-0.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:55d42f6286b9d867370af4c27bc70d04ce2d342fe450c4a4fcce14440514e695" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bbc/3bf96dcbd6839/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:bbc3bf96dcbd68392366c477f78c9d5c47e5d9290cb115feea19f20a43ef6d05" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/77a/b8a98417a1f46/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:77ab8a98417a1f467dafcd2226718f7ca0cf18d4b64732f838b8c2b3e4b55cb5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/04b/b5eef8f460d13/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:04bb5eef8f460d13b8c0084ea5a9d3aab2c0573991c880c0a34a56bb14951d30" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8c9/d459ce344c21f/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c9d459ce344c21ff03268212a1845aa37feab634d242131bc16c2a2355d5f65" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/33e/6fa4af4f3af3e/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33e6fa4af4f3af3e9c747ec72d1eadc0b7ba2035456c2afb51c24d9e8a56f8fd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/85a/49a1f1e020097/fastrlock-0.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85a49a1f1e020097d087e1963e42cea6f307897d5ebe2cb6daf4af47ffdd3eed" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5f1/3ec08f1adb1aa/fastrlock-0.8.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f13ec08f1adb1aa916c384b05ecb7dbebb8df9ea81abd045f60941c6283a670" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0ea/4e53a04980d64/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0ea4e53a04980d646def0f5e4b5e8bd8c7884288464acab0b37ca0c65c482bfe" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/383/40f6635bd4ee2/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:38340f6635bd4ee2a4fb02a3a725759fe921f2ca846cb9ca44531ba739cc17b4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4a9/8ba46b3e14927/fastrlock-0.8.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a98ba46b3e14927550c4baa36b752d0d2f7387b8534864a8767f83cce75c160" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dbd/ea6deeccea191/fastrlock-0.8.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbdea6deeccea1917c6017d353987231c4e46c93d5338ca3e66d6cd88fbce259" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c6e/5bfecbc0d72ff/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6e5bfecbc0d72ff07e43fed81671747914d6794e0926700677ed26d894d4f4f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2a8/3d558470c520e/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2a83d558470c520ed21462d304e77a12639859b205759221c8144dd2896b958a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/40b/328369005a0b3/fastrlock-0.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:40b328369005a0b32de14b699192aed32f549c2d2b27a5e1f614fb7ac4cec4e9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6cb/fb6f7731b5a28/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:6cbfb6f7731b5a280851c93883624424068fa5b22c2f546d8ae6f1fd9311e36d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1fc/ed4cb0b3f1616/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1fced4cb0b3f1616be68092b70a56e9173713a4a943d02e90eb9c7897a7b5e07" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/387/b2ac642938a20/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:387b2ac642938a20170a50f528817026c561882ea33306c5cbe750ae10d0a7c2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5a0/d31840a28d665/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a0d31840a28d66573047d2df410eb971135a2461fb952894bf51c9533cbfea5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a9/dc6fa73174f97/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0a9dc6fa73174f974dfb22778d05a44445b611a41d5d3776b0d5daa9e50225c6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/984/2b7722e4923fe/fastrlock-0.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9842b7722e4923fe76b08d8c58a9415a9a50d4c29b80673cffeae4874ea6626a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/050/29d7080c0c61a/fastrlock-0.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:05029d7080c0c61a81d5fee78e842c9a1bf22552cd56129451a252655290dcef" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/73/b1/1c3d635d955f2b4bf34d45abf8f35492e04dbd7804e94ce65d9f928ef3ec/fastrlock-0.8.3.tar.gz", hash = "sha256:4af6734d92eaa3ab4373e6c9a1dd0d5ad1304e172b1521733c6c3b3d73c8fa5d", size = 79327, upload-time = "2024-12-17T11:03:39.638Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/b4/aae7ed94b8122c325d89eb91336084596cebc505dc629b795fcc9629606d/fastrlock-0.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7a77ebb0a24535ef4f167da2c5ee35d9be1e96ae192137e9dc3ff75b8dfc08a5", size = 48220, upload-time = "2024-12-17T11:01:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/96/87/9807af47617fdd65c68b0fcd1e714542c1d4d3a1f1381f591f1aa7383a53/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d51f7fb0db8dab341b7f03a39a3031678cf4a98b18533b176c533c122bfce47d", size = 49551, upload-time = "2024-12-17T11:01:52.316Z" }, + { url = "https://files.pythonhosted.org/packages/9d/12/e201634810ac9aee59f93e3953cb39f98157d17c3fc9d44900f1209054e9/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:767ec79b7f6ed9b9a00eb9ff62f2a51f56fdb221c5092ab2dadec34a9ccbfc6e", size = 49398, upload-time = "2024-12-17T11:01:53.514Z" }, + { url = "https://files.pythonhosted.org/packages/15/a1/439962ed439ff6f00b7dce14927e7830e02618f26f4653424220a646cd1c/fastrlock-0.8.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d6a77b3f396f7d41094ef09606f65ae57feeb713f4285e8e417f4021617ca62", size = 53334, upload-time = "2024-12-17T11:01:55.518Z" }, + { url = "https://files.pythonhosted.org/packages/b5/9e/1ae90829dd40559ab104e97ebe74217d9da794c4bb43016da8367ca7a596/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:92577ff82ef4a94c5667d6d2841f017820932bc59f31ffd83e4a2c56c1738f90", size = 52495, upload-time = "2024-12-17T11:01:57.76Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/5e746ee6f3d7afbfbb0d794c16c71bfd5259a4e3fb1dda48baf31e46956c/fastrlock-0.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3df8514086e16bb7c66169156a8066dc152f3be892c7817e85bf09a27fa2ada2", size = 51972, upload-time = "2024-12-17T11:02:01.384Z" }, + { url = "https://files.pythonhosted.org/packages/be/91/5f3afba7d14b8b7d60ac651375f50fff9220d6ccc3bef233d2bd74b73ec7/fastrlock-0.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:55d42f6286b9d867370af4c27bc70d04ce2d342fe450c4a4fcce14440514e695", size = 48911, upload-time = "2024-12-17T11:02:06.173Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/e37bd72d7d70a8a551b3b4610d028bd73ff5d6253201d5d3cf6296468bee/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:bbc3bf96dcbd68392366c477f78c9d5c47e5d9290cb115feea19f20a43ef6d05", size = 50357, upload-time = "2024-12-17T11:02:07.418Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ef/a13b8bab8266840bf38831d7bf5970518c02603d00a548a678763322d5bf/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:77ab8a98417a1f467dafcd2226718f7ca0cf18d4b64732f838b8c2b3e4b55cb5", size = 50222, upload-time = "2024-12-17T11:02:08.745Z" }, + { url = "https://files.pythonhosted.org/packages/01/e2/5e5515562b2e9a56d84659377176aef7345da2c3c22909a1897fe27e14dd/fastrlock-0.8.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:04bb5eef8f460d13b8c0084ea5a9d3aab2c0573991c880c0a34a56bb14951d30", size = 54553, upload-time = "2024-12-17T11:02:10.925Z" }, + { url = "https://files.pythonhosted.org/packages/c0/8f/65907405a8cdb2fc8beaf7d09a9a07bb58deff478ff391ca95be4f130b70/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c9d459ce344c21ff03268212a1845aa37feab634d242131bc16c2a2355d5f65", size = 53362, upload-time = "2024-12-17T11:02:12.476Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b9/ae6511e52738ba4e3a6adb7c6a20158573fbc98aab448992ece25abb0b07/fastrlock-0.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33e6fa4af4f3af3e9c747ec72d1eadc0b7ba2035456c2afb51c24d9e8a56f8fd", size = 52836, upload-time = "2024-12-17T11:02:13.74Z" }, + { url = "https://files.pythonhosted.org/packages/57/21/ea1511b0ef0d5457efca3bf1823effb9c5cad4fc9dca86ce08e4d65330ce/fastrlock-0.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85a49a1f1e020097d087e1963e42cea6f307897d5ebe2cb6daf4af47ffdd3eed", size = 52201, upload-time = "2024-12-17T11:02:19.512Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/cdecb7aa976f34328372f1c4efd6c9dc1b039b3cc8d3f38787d640009a25/fastrlock-0.8.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f13ec08f1adb1aa916c384b05ecb7dbebb8df9ea81abd045f60941c6283a670", size = 53924, upload-time = "2024-12-17T11:02:20.85Z" }, + { url = "https://files.pythonhosted.org/packages/88/6d/59c497f8db9a125066dd3a7442fab6aecbe90d6fec344c54645eaf311666/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0ea4e53a04980d646def0f5e4b5e8bd8c7884288464acab0b37ca0c65c482bfe", size = 52140, upload-time = "2024-12-17T11:02:22.263Z" }, + { url = "https://files.pythonhosted.org/packages/62/04/9138943c2ee803d62a48a3c17b69de2f6fa27677a6896c300369e839a550/fastrlock-0.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:38340f6635bd4ee2a4fb02a3a725759fe921f2ca846cb9ca44531ba739cc17b4", size = 53261, upload-time = "2024-12-17T11:02:24.418Z" }, + { url = "https://files.pythonhosted.org/packages/06/77/f06a907f9a07d26d0cca24a4385944cfe70d549a2c9f1c3e3217332f4f12/fastrlock-0.8.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a98ba46b3e14927550c4baa36b752d0d2f7387b8534864a8767f83cce75c160", size = 50954, upload-time = "2024-12-17T11:02:32.12Z" }, + { url = "https://files.pythonhosted.org/packages/f9/4e/94480fb3fd93991dd6f4e658b77698edc343f57caa2870d77b38c89c2e3b/fastrlock-0.8.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbdea6deeccea1917c6017d353987231c4e46c93d5338ca3e66d6cd88fbce259", size = 52535, upload-time = "2024-12-17T11:02:33.402Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a7/ee82bb55b6c0ca30286dac1e19ee9417a17d2d1de3b13bb0f20cefb86086/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6e5bfecbc0d72ff07e43fed81671747914d6794e0926700677ed26d894d4f4f", size = 50942, upload-time = "2024-12-17T11:02:34.688Z" }, + { url = "https://files.pythonhosted.org/packages/63/1d/d4b7782ef59e57dd9dde69468cc245adafc3674281905e42fa98aac30a79/fastrlock-0.8.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2a83d558470c520ed21462d304e77a12639859b205759221c8144dd2896b958a", size = 52044, upload-time = "2024-12-17T11:02:36.613Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ad/c8fb45d5efcdf791f0dba5c09896b39eabbdc108f5b518941a2caae52f23/fastrlock-0.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:40b328369005a0b32de14b699192aed32f549c2d2b27a5e1f614fb7ac4cec4e9", size = 49804, upload-time = "2024-12-17T11:03:25.14Z" }, + { url = "https://files.pythonhosted.org/packages/47/15/365918306c30132bd63ae27b154e2aadb4e71c178297fc635e613aa4e767/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:6cbfb6f7731b5a280851c93883624424068fa5b22c2f546d8ae6f1fd9311e36d", size = 51763, upload-time = "2024-12-17T11:03:27.546Z" }, + { url = "https://files.pythonhosted.org/packages/84/39/74fda02c3edeb6cc69cf5a4616e394f5636a227262788f4d33fee8401941/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1fced4cb0b3f1616be68092b70a56e9173713a4a943d02e90eb9c7897a7b5e07", size = 38277, upload-time = "2024-12-17T11:03:28.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/8f/86cf1dfd1d0d027110d0177946ddb34a28a6d0040331899df6dabcf9f332/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:387b2ac642938a20170a50f528817026c561882ea33306c5cbe750ae10d0a7c2", size = 51561, upload-time = "2024-12-17T11:03:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/09/5a/eabdde19fee480da1e0b3af4aef7f285d544c1ea733dc0f3df22a620df23/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a0d31840a28d66573047d2df410eb971135a2461fb952894bf51c9533cbfea5", size = 55191, upload-time = "2024-12-17T11:03:31.604Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e3/bdbe97b6d0d25b44bb2141c8e6be5f5bf573cf6413c9e23a7029af2d8922/fastrlock-0.8.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0a9dc6fa73174f974dfb22778d05a44445b611a41d5d3776b0d5daa9e50225c6", size = 39536, upload-time = "2024-12-17T11:03:32.958Z" }, + { url = "https://files.pythonhosted.org/packages/0a/d0/aa12b01ea28606398bcd781b01c07dad388616029a14e065b1f0ae64d8ca/fastrlock-0.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9842b7722e4923fe76b08d8c58a9415a9a50d4c29b80673cffeae4874ea6626a", size = 54474, upload-time = "2024-12-17T11:03:34.199Z" }, + { url = "https://files.pythonhosted.org/packages/4e/fb/e82f40aa6a4844107f6ace90f70b72c0cd26838a5d1984e44ec4a5d72f30/fastrlock-0.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:05029d7080c0c61a81d5fee78e842c9a1bf22552cd56129451a252655290dcef", size = 54142, upload-time = "2024-12-17T11:03:35.587Z" }, ] [[package]] name = "filelock" version = "3.18.0" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/filelock-3.18.0-py3-none-any.whl" }, ] @@ -539,100 +539,100 @@ wheels = [ [[package]] name = "flatbuffers" version = "25.2.10" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/97e/451377a41262f/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ebb/a5f4d5ea615af/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051" }, + { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, ] [[package]] name = "frozenlist" version = "1.6.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eff/c641518696471/frozenlist-1.6.2.tar.gz", hash = "sha256:effc641518696471cf4962e8e32050133bc1f7b2851ae8fd0cb8797dd70dc202" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0de/575df0135949c/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de575df0135949c4049ae42db714c43d1693c590732abc78c47a04228fc1efb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2b6/eaba27ec2b3c0/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b6eaba27ec2b3c0af7845619a425eeae8d510d5cc83fb3ef80569129238153b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/af1/ee5188d2f63b4/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af1ee5188d2f63b4f09b67cf0c60b8cdacbd1e8d24669eac238e247d8b157581" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/917/9c5186eb996c0/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9179c5186eb996c0dd7e4c828858ade4d7a8d1d12dd67320675a6ae7401f2647" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/388/14ebc3c6bb01d/frozenlist-1.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38814ebc3c6bb01dc3bb4d6cffd0e64c19f4f2d03e649978aeae8e12b81bdf43" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0db/cab0531318fc9/frozenlist-1.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dbcab0531318fc9ca58517865fae63a2fe786d5e2d8f3a56058c29831e49f13" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/747/2e477dc5d6a00/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7472e477dc5d6a000945f45b6e38cbb1093fdec189dc1e98e57f8ab53f8aa246" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/17c/230586d473327/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:17c230586d47332774332af86cc1e69ee095731ec70c27e5698dfebb9db167a0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/946/a41e095592cf1/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:946a41e095592cf1c88a1fcdd154c13d0ef6317b371b817dc2b19b3d93ca0811" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d90/c9b36c669eb48/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d90c9b36c669eb481de605d3c2da02ea98cba6a3f5e93b3fe5881303026b2f14" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/865/1dd2d762d6eef/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8651dd2d762d6eefebe8450ec0696cf3706b0eb5e46463138931f70c667ba612" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/484/00e6a09e21734/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:48400e6a09e217346949c034105b0df516a1b3c5aa546913b70b71b646caa9f5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/91d/d2fb760f4a2c0/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91dd2fb760f4a2c04b3330e0191787c3437283f9241f0b379017d4b13cea8f5e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f01/f34f8a5c7b4d7/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f01f34f8a5c7b4d74a1c65227678822e69801dcf68edd4c11417a7c83828ff6f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f43/f872cc4cfc46d/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f43f872cc4cfc46d9805d0e71302e9c39c755d5ad7572198cd2ceb3a291176cc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3f9/6cc8ab3a73d42/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f96cc8ab3a73d42bcdb6d9d41c3dceffa8da8273ac54b71304b891e32de8b13" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9c0/b257123320832/frozenlist-1.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c0b257123320832cce9bea9935c860e4fa625b0e58b10db49fdfef70087df81" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/23d/c4def97ccc023/frozenlist-1.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23dc4def97ccc0232f491836050ae664d3d2352bb43ad4cd34cd3399ad8d1fc8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fcf/3663463c04031/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fcf3663463c040315f025bd6a5f88b3748082cfe111e90fd422f71668c65de52" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/16b/9e7b59ea6eef8/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:16b9e7b59ea6eef876a8a5fac084c95fd4bac687c790c4d48c0d53c6bcde54d1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/308/b40d32a98a8d0/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:308b40d32a98a8d0d09bc28e4cbc13a0b803a0351041d4548564f28f6b148b05" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/baf/585d8968eaad6/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:baf585d8968eaad6c1aae99456c40978a9fa822ccbdb36fd4746b581ef338192" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4df/dbdb671a6af6e/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4dfdbdb671a6af6ea1a363b210373c8233df3925d9a7fb99beaa3824f6b99656" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/949/16e3acaeb8374/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:94916e3acaeb8374d5aea9c37db777c9f0a2b9be46561f5de30064cbbbfae54a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/84d/918b01781c6eb/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d918b01781c6ebb5b776c18a87dd3016ff979eb78626aaca928bae69a640c3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e28/92d9ab060a847/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2892d9ab060a847f20fab83fdb886404d0f213f648bdeaebbe76a6134f0973d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bbd/2225d7218e7d3/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbd2225d7218e7d386f4953d11484b0e38e5d134e85c91f0a6b0f30fb6ae25c4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9b6/79187cba0a99f/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b679187cba0a99f1162c7ec1b525e34bdc5ca246857544d16c1ed234562df80" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bce/b7bd48849d4b7/frozenlist-1.6.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bceb7bd48849d4b76eac070a6d508aa3a529963f5d9b0a6840fd41fb381d5a09" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/88b/1b79ae86fdacc/frozenlist-1.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b1b79ae86fdacc4bf842a4e0456540947abba64a84e61b5ae24c87adb089db" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6c5/c3c575148aa73/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c5c3c575148aa7308a38709906842039d7056bf225da6284b7a11cf9275ac5d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/162/63bd677a31fe1/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:16263bd677a31fe1a5dc2b803b564e349c96f804a81706a62b8698dd14dbba50" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2e5/1b2054886ff7d/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2e51b2054886ff7db71caf68285c2cd936eb7a145a509965165a2aae715c92a7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ae1/785b76f641cce/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ae1785b76f641cce4efd7e6f49ca4ae456aa230383af5ab0d4d3922a7e37e763" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/301/55cc481f73f92/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:30155cc481f73f92f47ab1e858a7998f7b1207f9b5cf3b3cba90ec65a7f224f5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e1a/1d82f2eb3d287/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e1a1d82f2eb3d2875a8d139ae3f5026f7797f9de5dce44f53811ab0a883e85e7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d3e/6c0681783723b/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e6c0681783723bb472b6b8304e61ecfcb4c2b11cf7f243d923813c21ae5d2a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/61b/ae4d345a26550/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:61bae4d345a26550d0ed9f2c9910ea060f89dbfc642b7b96e9510a95c3a33b3c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/90e/5a84016d0d2fb/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90e5a84016d0d2fb828f770ede085b5d89155fcb9629b8a3237c960c41c120c3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/55d/c289a064c0481/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55dc289a064c04819d669e6e8a85a1c0416e6c601782093bdc749ae14a2f39da" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b79/bcf97ca03c95b/frozenlist-1.6.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b79bcf97ca03c95b044532a4fef6e5ae106a2dd863875b75fde64c553e3f4820" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2e5/e7564d232a782/frozenlist-1.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e5e7564d232a782baa3089b25a0d979e2e4d6572d3c7231fcceacc5c22bf0f7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6fc/d8d56880dccdd/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fcd8d56880dccdd376afb18f483ab55a0e24036adc9a83c914d4b7bb5729d4e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4fb/ce985c7fe7baf/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4fbce985c7fe7bafb4d9bf647c835dbe415b465a897b0c79d1bdf0f3fae5fe50" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3bd/12d727cd61638/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3bd12d727cd616387d50fe283abebb2db93300c98f8ff1084b68460acd551926" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/385/44cae535ed697/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:38544cae535ed697960891131731b33bb865b7d197ad62dc380d2dbb1bceff48" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/473/96898f98fae5c/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:47396898f98fae5c9b9bb409c3d2cf6106e409730f35a0926aad09dd7acf1ef5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d10/d835f8ce8571f/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d10d835f8ce8571fd555db42d3aef325af903535dad7e6faa7b9c8abe191bffc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cc4/9f2277e8173ab/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc49f2277e8173abf028d744f8b7d69fe8cc26bffc2de97d47a3b529599fbf50" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/65e/b9e8a973161bd/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:65eb9e8a973161bdac5fa06ea6bd261057947adc4f47a7a6ef3d6db30c78c5b4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/301/eb2f898d86303/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:301eb2f898d863031f8c5a56c88a6c5d976ba11a4a08a1438b96ee3acb5aea80" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/207/f717fd5e65fdd/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:207f717fd5e65fddb77d33361ab8fa939f6d89195f11307e073066886b33f2b8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f83/992722642ee0d/frozenlist-1.6.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f83992722642ee0db0333b1dbf205b1a38f97d51a7382eb304ba414d8c3d1e05" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/12a/f99e6023851b3/frozenlist-1.6.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12af99e6023851b36578e5bcc60618b5b30f4650340e29e565cd1936326dbea7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6f0/1620444a674ea/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6f01620444a674eaad900a3263574418e99c49e2a5d6e5330753857363b5d59f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/82b/94c8948341512/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:82b94c8948341512306ca8ccc702771600b442c6abe5f8ee017e00e452a209e8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/324/a4cf4c220ddb3/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:324a4cf4c220ddb3db1f46ade01e48432c63fa8c26812c710006e7f6cfba4a08" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/695/284e51458dabb/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:695284e51458dabb89af7f7dc95c470aa51fd259207aba5378b187909297feef" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9cc/beb1c8dda4f42/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:9ccbeb1c8dda4f42d0678076aa5cbde941a232be71c67b9d8ca89fbaf395807c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cbb/df62fcc186491/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cbbdf62fcc1864912c592a1ec748fee94f294c6b23215d5e8e9569becb7723ee" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/97d/cdffe18c0e35c/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97dcdffe18c0e35ce57b3d7c1352893a3608e7578b814abb3b2a3cc15907e682" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cc2/28faf4533327e/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cc228faf4533327e5f1d153217ab598648a2cd5f6b1036d82e63034f079a5861" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0ee/53aba5d0768e2/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ee53aba5d0768e2c5c6185ec56a94bab782ef002429f293497ec5c5a3b94bdf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d32/14738024afd53/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d3214738024afd53434614ee52aa74353a562414cd48b1771fa82fd982cb1edb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/562/8e6a6f74ef169/frozenlist-1.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5628e6a6f74ef1693adbe25c0bce312eb9aee82e58abe370d287794aff632d0f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ad7/678d3e32cb388/frozenlist-1.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7678d3e32cb3884879f10c679804c08f768df55078436fb56668f3e13e2a5e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b77/6ab5217e2bf99/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b776ab5217e2bf99c84b2cbccf4d30407789c0653f72d1653b5f8af60403d28f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b1e/162a99405cb62/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:b1e162a99405cb62d338f747b8625d6bd7b6794383e193335668295fb89b75fb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2de/1ddeb9dd8a073/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2de1ddeb9dd8a07383f6939996217f0f1b2ce07f6a01d74c9adb1db89999d006" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2dc/abe4e7aac889d/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2dcabe4e7aac889d41316c1698df0eb2565ed233b66fab6bc4a5c5b7769cad4c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/06e/28cd2ac31797e/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:06e28cd2ac31797e12ec8c65aa462a89116323f045e8b1930127aba9486aab24" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/86f/908b70043c351/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:86f908b70043c3517f862247bdc621bd91420d40c3e90ede1701a75f025fcd5f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/947/abfcc8c42a329/frozenlist-1.6.2-py3-none-any.whl", hash = "sha256:947abfcc8c42a329bbda6df97a4b9c9cdb4e12c85153b3b57b9d2f02aa5877dc" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/bf/a812e2fe6cb3f6c6cfc8d0303bf1742f2286004e5ec41ac8c89cf68cdb54/frozenlist-1.6.2.tar.gz", hash = "sha256:effc641518696471cf4962e8e32050133bc1f7b2851ae8fd0cb8797dd70dc202", size = 43108, upload-time = "2025-06-03T21:48:04.467Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/36/e33a7ecafa8be33d251e92780d028090a4694160ed0f7b4dde5ac91698fc/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de575df0135949c4049ae42db714c43d1693c590732abc78c47a04228fc1efb", size = 225206, upload-time = "2025-06-03T21:45:18.671Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1c/07f56515c785c3b861173d2e0e73c614acc4a4f11b0e8f33bf74f8613083/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b6eaba27ec2b3c0af7845619a425eeae8d510d5cc83fb3ef80569129238153b", size = 220009, upload-time = "2025-06-03T21:45:20.72Z" }, + { url = "https://files.pythonhosted.org/packages/67/78/1427ecc0223fe59e3320bed93fda9b6b4ca7fb3ac9c40e1453a0f2c3bdac/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af1ee5188d2f63b4f09b67cf0c60b8cdacbd1e8d24669eac238e247d8b157581", size = 235243, upload-time = "2025-06-03T21:45:22.269Z" }, + { url = "https://files.pythonhosted.org/packages/15/c7/597f042562daffcada159807cf6539363f797777ee80e855c2aa84d4fed9/frozenlist-1.6.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9179c5186eb996c0dd7e4c828858ade4d7a8d1d12dd67320675a6ae7401f2647", size = 228925, upload-time = "2025-06-03T21:45:24.102Z" }, + { url = "https://files.pythonhosted.org/packages/a7/32/736cd296a4792826bc38764d5bd0442b51dbaad3c1a4f5cea01b17df9960/frozenlist-1.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38814ebc3c6bb01dc3bb4d6cffd0e64c19f4f2d03e649978aeae8e12b81bdf43", size = 211781, upload-time = "2025-06-03T21:45:25.983Z" }, + { url = "https://files.pythonhosted.org/packages/f1/cc/041c88e1cdcb176a99b0c1194e1e387ebaeebaae77d1d41938f06b124e74/frozenlist-1.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dbcab0531318fc9ca58517865fae63a2fe786d5e2d8f3a56058c29831e49f13", size = 224409, upload-time = "2025-06-03T21:45:27.411Z" }, + { url = "https://files.pythonhosted.org/packages/80/1b/3b60600ae89b7b3d5b3c95423b22fd4b54c966fe0b1f9dee9137019cf9ec/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7472e477dc5d6a000945f45b6e38cbb1093fdec189dc1e98e57f8ab53f8aa246", size = 227850, upload-time = "2025-06-03T21:45:29.336Z" }, + { url = "https://files.pythonhosted.org/packages/77/e3/cd0d75e1c395b08010b94916e8650dd5bd5f25153147b0bb9fda9ecbb94a/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:17c230586d47332774332af86cc1e69ee095731ec70c27e5698dfebb9db167a0", size = 237819, upload-time = "2025-06-03T21:45:31.164Z" }, + { url = "https://files.pythonhosted.org/packages/38/c9/2681be06d34a993782bcc8a7d4d0c2d0970cd1f8c919d5b963ecec3bf4da/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:946a41e095592cf1c88a1fcdd154c13d0ef6317b371b817dc2b19b3d93ca0811", size = 218407, upload-time = "2025-06-03T21:45:32.612Z" }, + { url = "https://files.pythonhosted.org/packages/c6/c1/81f6f745e273454daecc29f06a571cd253f1bf7fc2b49e22a14636539bee/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d90c9b36c669eb481de605d3c2da02ea98cba6a3f5e93b3fe5881303026b2f14", size = 235941, upload-time = "2025-06-03T21:45:34.492Z" }, + { url = "https://files.pythonhosted.org/packages/99/a1/0bc9000642c05a19c7e0b9bb6f636243fc5af9c008e6c3fb31bb1e504738/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8651dd2d762d6eefebe8450ec0696cf3706b0eb5e46463138931f70c667ba612", size = 235766, upload-time = "2025-06-03T21:45:35.946Z" }, + { url = "https://files.pythonhosted.org/packages/a5/12/77effc4e36f69be8bda2284001417d8c85bf616fb36d9aa19e0bd07e292e/frozenlist-1.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:48400e6a09e217346949c034105b0df516a1b3c5aa546913b70b71b646caa9f5", size = 225239, upload-time = "2025-06-03T21:45:37.847Z" }, + { url = "https://files.pythonhosted.org/packages/a2/dc/af7b2d190cb8b553032b7b46e582eaad4563d6f3c30b7e2524a7cdfc3e11/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91dd2fb760f4a2c04b3330e0191787c3437283f9241f0b379017d4b13cea8f5e", size = 237242, upload-time = "2025-06-03T21:45:46.388Z" }, + { url = "https://files.pythonhosted.org/packages/27/0c/e8fcde735f8b62421f944e08e95191a88a065bb5cdc5e7a1c9b7806adb3f/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f01f34f8a5c7b4d74a1c65227678822e69801dcf68edd4c11417a7c83828ff6f", size = 228128, upload-time = "2025-06-03T21:45:47.88Z" }, + { url = "https://files.pythonhosted.org/packages/43/ea/0e7bf5c347387724fc4b77ef94cf4ca317f3720ac154adb1a97e8b68d7ef/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f43f872cc4cfc46d9805d0e71302e9c39c755d5ad7572198cd2ceb3a291176cc", size = 246343, upload-time = "2025-06-03T21:45:49.765Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ce/223a2fbdaaeeb72428063378b11ff356e801a4cf922cccfeb569fe8a21a4/frozenlist-1.6.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f96cc8ab3a73d42bcdb6d9d41c3dceffa8da8273ac54b71304b891e32de8b13", size = 240659, upload-time = "2025-06-03T21:45:51.216Z" }, + { url = "https://files.pythonhosted.org/packages/2f/9e/77c92740b33523b880683872971da1ed6fa4a30a7a84d3f43540d807b792/frozenlist-1.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c0b257123320832cce9bea9935c860e4fa625b0e58b10db49fdfef70087df81", size = 221329, upload-time = "2025-06-03T21:45:52.665Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c3/9dcfc63ae15a51132483fc34c2aad0ff32cabeedb6e51324553423cd2449/frozenlist-1.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23dc4def97ccc0232f491836050ae664d3d2352bb43ad4cd34cd3399ad8d1fc8", size = 236338, upload-time = "2025-06-03T21:45:54.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/d6/7eaf4bdafa61c227670832f2f21294ecae4505bba25a71a49f16db005a69/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fcf3663463c040315f025bd6a5f88b3748082cfe111e90fd422f71668c65de52", size = 239097, upload-time = "2025-06-03T21:45:55.599Z" }, + { url = "https://files.pythonhosted.org/packages/59/df/3350e94786babdd906ac7d8ca9646e38a97a81f7e1585b598dcabb6ea178/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:16b9e7b59ea6eef876a8a5fac084c95fd4bac687c790c4d48c0d53c6bcde54d1", size = 247310, upload-time = "2025-06-03T21:45:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/ea/26/9a09169158ce073d04ff1851242e4f05df93e6eef4161997f9ff05da2f66/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:308b40d32a98a8d0d09bc28e4cbc13a0b803a0351041d4548564f28f6b148b05", size = 227829, upload-time = "2025-06-03T21:45:58.47Z" }, + { url = "https://files.pythonhosted.org/packages/f1/da/a1e2db77514ffabeeb16c486af74580a1105162206386c6b826a69c0a040/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:baf585d8968eaad6c1aae99456c40978a9fa822ccbdb36fd4746b581ef338192", size = 247808, upload-time = "2025-06-03T21:46:00.462Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/457931890fab0f240d07eed45adc51c7be817d474a791d7f12799a5b93f2/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4dfdbdb671a6af6ea1a363b210373c8233df3925d9a7fb99beaa3824f6b99656", size = 247343, upload-time = "2025-06-03T21:46:02.491Z" }, + { url = "https://files.pythonhosted.org/packages/47/4c/34a28b01d8dab8f84630ce75004bcb4313866105248f942df5148604eaf0/frozenlist-1.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:94916e3acaeb8374d5aea9c37db777c9f0a2b9be46561f5de30064cbbbfae54a", size = 236482, upload-time = "2025-06-03T21:46:04.155Z" }, + { url = "https://files.pythonhosted.org/packages/b1/cb/aa09a825abeabb8165282f3f79cb3f130847486ee6427d72d742efa604d6/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d918b01781c6ebb5b776c18a87dd3016ff979eb78626aaca928bae69a640c3", size = 241513, upload-time = "2025-06-03T21:46:13.26Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a3/9c22011770ea8b423adf0e12ec34200cf68ff444348d6c7c3466acc6be53/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2892d9ab060a847f20fab83fdb886404d0f213f648bdeaebbe76a6134f0973d", size = 234019, upload-time = "2025-06-03T21:46:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/88/39/83c077661ba708d28859dc01d299c9272c9adeb4b9e58dba85da2271cb08/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbd2225d7218e7d386f4953d11484b0e38e5d134e85c91f0a6b0f30fb6ae25c4", size = 247035, upload-time = "2025-06-03T21:46:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/78/9f/7153e16e51ee8d660e907ef43c5a73882e3dc96582f70b00ece7d8a69b43/frozenlist-1.6.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b679187cba0a99f1162c7ec1b525e34bdc5ca246857544d16c1ed234562df80", size = 244126, upload-time = "2025-06-03T21:46:18.253Z" }, + { url = "https://files.pythonhosted.org/packages/71/1f/e8e6b72f3b285f8a6cfe4c01d14c4bbbf477c40868c8386bd9617298c696/frozenlist-1.6.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bceb7bd48849d4b76eac070a6d508aa3a529963f5d9b0a6840fd41fb381d5a09", size = 224463, upload-time = "2025-06-03T21:46:20.177Z" }, + { url = "https://files.pythonhosted.org/packages/69/b5/20ab79daba2e787c3426f6fa7bb2114edfcdffa4cfb2dd1c8e84f6964519/frozenlist-1.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b1b79ae86fdacc4bf842a4e0456540947abba64a84e61b5ae24c87adb089db", size = 240225, upload-time = "2025-06-03T21:46:21.615Z" }, + { url = "https://files.pythonhosted.org/packages/02/46/5d2e14cec6f577426f53e8726f824028da55703a5a6b41c6eb7a3cdf1372/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6c5c3c575148aa7308a38709906842039d7056bf225da6284b7a11cf9275ac5d", size = 237668, upload-time = "2025-06-03T21:46:23.143Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/d29a3297954c34b69842f63541833eaca71e50fb6ebbafd9eb95babc1508/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:16263bd677a31fe1a5dc2b803b564e349c96f804a81706a62b8698dd14dbba50", size = 248603, upload-time = "2025-06-03T21:46:28.592Z" }, + { url = "https://files.pythonhosted.org/packages/1e/30/bcb572840d112b22b89d2178168741674ab3766ad507c33e2549fdfee7f0/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2e51b2054886ff7db71caf68285c2cd936eb7a145a509965165a2aae715c92a7", size = 225855, upload-time = "2025-06-03T21:46:30.151Z" }, + { url = "https://files.pythonhosted.org/packages/ac/33/a0d3f75b126a18deb151f1cfb42ff64bbce22d8651fdda061e4fb56cd9b5/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ae1785b76f641cce4efd7e6f49ca4ae456aa230383af5ab0d4d3922a7e37e763", size = 246094, upload-time = "2025-06-03T21:46:32.709Z" }, + { url = "https://files.pythonhosted.org/packages/4d/7c/c5140e62f1b878a2982246505ed9461c4238f17fd53237ae25ddc9dbeb8d/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:30155cc481f73f92f47ab1e858a7998f7b1207f9b5cf3b3cba90ec65a7f224f5", size = 247984, upload-time = "2025-06-03T21:46:35.095Z" }, + { url = "https://files.pythonhosted.org/packages/77/da/32ac9c843ee126f8b2c3b164cf39a1bbf05e7a46e57659fef1db4f35e5dc/frozenlist-1.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e1a1d82f2eb3d2875a8d139ae3f5026f7797f9de5dce44f53811ab0a883e85e7", size = 239770, upload-time = "2025-06-03T21:46:36.55Z" }, + { url = "https://files.pythonhosted.org/packages/85/4e/38643ce3ee80d222892b694d02c15ea476c4d564493a6fe530347163744e/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e6c0681783723bb472b6b8304e61ecfcb4c2b11cf7f243d923813c21ae5d2a", size = 255771, upload-time = "2025-06-03T21:46:46.53Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e6/ceed85a7d5c0f666485384fc393e32353f8088e154a1109e5ef60165d366/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:61bae4d345a26550d0ed9f2c9910ea060f89dbfc642b7b96e9510a95c3a33b3c", size = 252519, upload-time = "2025-06-03T21:46:48.101Z" }, + { url = "https://files.pythonhosted.org/packages/29/99/9f2e2b90cf918465e3b6ca4eea79e6be53d24fba33937e37d86c3764bbf9/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90e5a84016d0d2fb828f770ede085b5d89155fcb9629b8a3237c960c41c120c3", size = 263348, upload-time = "2025-06-03T21:46:49.64Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ac/59f3ec4c1b4897186efb4757379915734a48bb16bbc15a9fe0bf0857b679/frozenlist-1.6.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55dc289a064c04819d669e6e8a85a1c0416e6c601782093bdc749ae14a2f39da", size = 257858, upload-time = "2025-06-03T21:46:51.189Z" }, + { url = "https://files.pythonhosted.org/packages/48/4a/19c97510d0c2be1ebaae68383d1b5a256a12a660ca17b0c427b1024d9b92/frozenlist-1.6.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b79bcf97ca03c95b044532a4fef6e5ae106a2dd863875b75fde64c553e3f4820", size = 238248, upload-time = "2025-06-03T21:46:52.649Z" }, + { url = "https://files.pythonhosted.org/packages/ef/64/641aa2b0944fa3d881323948e0d8d6fee746dae03d9023eb510bb80bc46a/frozenlist-1.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e5e7564d232a782baa3089b25a0d979e2e4d6572d3c7231fcceacc5c22bf0f7", size = 255932, upload-time = "2025-06-03T21:46:54.175Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f8/5b68d5658fac7332e5d26542a4af0ffc2edca8da8f854f6274882889ee1e/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fcd8d56880dccdd376afb18f483ab55a0e24036adc9a83c914d4b7bb5729d4e", size = 253329, upload-time = "2025-06-03T21:46:55.69Z" }, + { url = "https://files.pythonhosted.org/packages/e9/20/379d7a27eb82748b41319bf376bf2c034e7ee11dda94f12b331edcc261ff/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4fbce985c7fe7bafb4d9bf647c835dbe415b465a897b0c79d1bdf0f3fae5fe50", size = 266164, upload-time = "2025-06-03T21:46:57.19Z" }, + { url = "https://files.pythonhosted.org/packages/13/bd/d7dbf94220020850392cb661bedfdf786398bafae85d1045dd108971d261/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3bd12d727cd616387d50fe283abebb2db93300c98f8ff1084b68460acd551926", size = 241641, upload-time = "2025-06-03T21:46:59.769Z" }, + { url = "https://files.pythonhosted.org/packages/a4/70/916fef6284d294077265cd69ad05f228e44f7ed88d9acb690df5a1174049/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:38544cae535ed697960891131731b33bb865b7d197ad62dc380d2dbb1bceff48", size = 261215, upload-time = "2025-06-03T21:47:01.752Z" }, + { url = "https://files.pythonhosted.org/packages/8f/98/1326a7189fa519692698cddf598f56766b0fea6ac71cddaf64760a055397/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:47396898f98fae5c9b9bb409c3d2cf6106e409730f35a0926aad09dd7acf1ef5", size = 262597, upload-time = "2025-06-03T21:47:03.495Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d6/0a95ab9289c72e86c37c9b8afe82576556456b6f66a35d242526634130f2/frozenlist-1.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d10d835f8ce8571fd555db42d3aef325af903535dad7e6faa7b9c8abe191bffc", size = 258766, upload-time = "2025-06-03T21:47:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/fd/b3/c4f2f7fca9487b25c39bf64535f029316e184072a82f3660ce72defc5421/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc49f2277e8173abf028d744f8b7d69fe8cc26bffc2de97d47a3b529599fbf50", size = 310270, upload-time = "2025-06-03T21:47:13.495Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5b/046eb34d8d0fee1a8c9dc91a9ba581283c67a1ace20bcc01c86a53595105/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:65eb9e8a973161bdac5fa06ea6bd261057947adc4f47a7a6ef3d6db30c78c5b4", size = 289062, upload-time = "2025-06-03T21:47:14.92Z" }, + { url = "https://files.pythonhosted.org/packages/48/7b/80991efaa0aa25e867cf93033c28e9d1310f34f90421eb59eb1f2073d937/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:301eb2f898d863031f8c5a56c88a6c5d976ba11a4a08a1438b96ee3acb5aea80", size = 312202, upload-time = "2025-06-03T21:47:16.436Z" }, + { url = "https://files.pythonhosted.org/packages/78/6b/6fe30bdababdf82c5b34f0093770c4be6211071e23570721b80b11c9d52a/frozenlist-1.6.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:207f717fd5e65fddb77d33361ab8fa939f6d89195f11307e073066886b33f2b8", size = 309557, upload-time = "2025-06-03T21:47:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ef/b7bf48802fc7d084703ba2173e6a8d0590bea378dcd6a480051c41bddf47/frozenlist-1.6.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f83992722642ee0db0333b1dbf205b1a38f97d51a7382eb304ba414d8c3d1e05", size = 282135, upload-time = "2025-06-03T21:47:19.521Z" }, + { url = "https://files.pythonhosted.org/packages/af/f8/6911a085bce8d0d0df3dfc2560e3e0fb4d6c19ff101014bcf61aa32ba39a/frozenlist-1.6.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12af99e6023851b36578e5bcc60618b5b30f4650340e29e565cd1936326dbea7", size = 303392, upload-time = "2025-06-03T21:47:21.16Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5d/b4e0cc6dbd6b9282926a470a919da7c6599ff324ab5268c7ecaff82cb858/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6f01620444a674eaad900a3263574418e99c49e2a5d6e5330753857363b5d59f", size = 309402, upload-time = "2025-06-03T21:47:22.705Z" }, + { url = "https://files.pythonhosted.org/packages/0f/1b/bf777de3c810e68e8758337fcc97ee8c956376c87aecee9a61ba19a94123/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:82b94c8948341512306ca8ccc702771600b442c6abe5f8ee017e00e452a209e8", size = 312924, upload-time = "2025-06-03T21:47:24.251Z" }, + { url = "https://files.pythonhosted.org/packages/0e/03/a69b890bc310790fcae61fd3b5be64876811b12db5d50b32e62f65e766bd/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:324a4cf4c220ddb3db1f46ade01e48432c63fa8c26812c710006e7f6cfba4a08", size = 291768, upload-time = "2025-06-03T21:47:25.874Z" }, + { url = "https://files.pythonhosted.org/packages/70/cc/559386adf987b47c8977c929271d11a72efd92778a0a2f4cc97827a9a25b/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:695284e51458dabb89af7f7dc95c470aa51fd259207aba5378b187909297feef", size = 313305, upload-time = "2025-06-03T21:47:29.305Z" }, + { url = "https://files.pythonhosted.org/packages/e7/fa/eb0e21730ffccfb2d0d367d863cbaacf8367bdc277b44eabf72f7329ab91/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:9ccbeb1c8dda4f42d0678076aa5cbde941a232be71c67b9d8ca89fbaf395807c", size = 312228, upload-time = "2025-06-03T21:47:30.967Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c1/8471b67172abc9478ad78c70a3f3a5c4fed6d4bcadc748e1b6dfa06ab2ae/frozenlist-1.6.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cbbdf62fcc1864912c592a1ec748fee94f294c6b23215d5e8e9569becb7723ee", size = 309905, upload-time = "2025-06-03T21:47:32.526Z" }, + { url = "https://files.pythonhosted.org/packages/c6/29/a6148cfc933f169c9d1602a06a754fd34dcfa8e53c8641e4a2df23452d5e/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97dcdffe18c0e35ce57b3d7c1352893a3608e7578b814abb3b2a3cc15907e682", size = 226545, upload-time = "2025-06-03T21:47:42.228Z" }, + { url = "https://files.pythonhosted.org/packages/69/3b/3fceaf4c9a0b47b9b167242e5e16b0ff76b4c31d6777df96bc84fc92df41/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cc228faf4533327e5f1d153217ab598648a2cd5f6b1036d82e63034f079a5861", size = 221973, upload-time = "2025-06-03T21:47:43.752Z" }, + { url = "https://files.pythonhosted.org/packages/f5/57/580b9770228995c12d206304cbaec3be7de302de5c59fa3e70fee00764c3/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ee53aba5d0768e2c5c6185ec56a94bab782ef002429f293497ec5c5a3b94bdf", size = 237506, upload-time = "2025-06-03T21:47:45.213Z" }, + { url = "https://files.pythonhosted.org/packages/6f/21/c92aaa9d44a6281bbbfc8e1de696a71c066d5bfc53d3c46e7e4cfce5537b/frozenlist-1.6.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d3214738024afd53434614ee52aa74353a562414cd48b1771fa82fd982cb1edb", size = 230642, upload-time = "2025-06-03T21:47:46.753Z" }, + { url = "https://files.pythonhosted.org/packages/24/72/19988f1c34fa91bd258aeb66fe3f88041d2ce6075415c11a3b13b218c599/frozenlist-1.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5628e6a6f74ef1693adbe25c0bce312eb9aee82e58abe370d287794aff632d0f", size = 213528, upload-time = "2025-06-03T21:47:48.232Z" }, + { url = "https://files.pythonhosted.org/packages/35/a0/331c2f38759b1aab45455b50899f92ef7d42efb00e2cb8c03cf58a95f8ef/frozenlist-1.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7678d3e32cb3884879f10c679804c08f768df55078436fb56668f3e13e2a5e", size = 225708, upload-time = "2025-06-03T21:47:49.847Z" }, + { url = "https://files.pythonhosted.org/packages/dd/9e/bf1b80cb6f009514a7e7883289eea416b9c97bdc75cc3e1e3cf9d0def806/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b776ab5217e2bf99c84b2cbccf4d30407789c0653f72d1653b5f8af60403d28f", size = 228854, upload-time = "2025-06-03T21:47:51.415Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/099d8ec52cff40449f7482f62163e7de77514859c79c8803804edf429b43/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:b1e162a99405cb62d338f747b8625d6bd7b6794383e193335668295fb89b75fb", size = 239406, upload-time = "2025-06-03T21:47:52.926Z" }, + { url = "https://files.pythonhosted.org/packages/fc/58/bab811720a62a8a600224e20a50d7a8827b6feb9cf232c7863cdb071ee7e/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2de1ddeb9dd8a07383f6939996217f0f1b2ce07f6a01d74c9adb1db89999d006", size = 220092, upload-time = "2025-06-03T21:47:54.49Z" }, + { url = "https://files.pythonhosted.org/packages/2e/14/5b5c19575d583de815a905c9e313bd626a844a3c19511972436e77eaf6f3/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2dcabe4e7aac889d41316c1698df0eb2565ed233b66fab6bc4a5c5b7769cad4c", size = 237304, upload-time = "2025-06-03T21:47:55.978Z" }, + { url = "https://files.pythonhosted.org/packages/e6/92/8a27ad183b0fe24ca0a74d7037804d91cacedd76ffd51b75ea617463e8a9/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:06e28cd2ac31797e12ec8c65aa462a89116323f045e8b1930127aba9486aab24", size = 237083, upload-time = "2025-06-03T21:47:57.519Z" }, + { url = "https://files.pythonhosted.org/packages/b1/48/6a093630018023757cd2f6412fb1b38b06530b52ddd613e402eb8a44b10a/frozenlist-1.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:86f908b70043c3517f862247bdc621bd91420d40c3e90ede1701a75f025fcd5f", size = 226780, upload-time = "2025-06-03T21:47:59.005Z" }, + { url = "https://files.pythonhosted.org/packages/13/be/0ebbb283f2d91b72beaee2d07760b2c47dab875c49c286f5591d3d157198/frozenlist-1.6.2-py3-none-any.whl", hash = "sha256:947abfcc8c42a329bbda6df97a4b9c9cdb4e12c85153b3b57b9d2f02aa5877dc", size = 12582, upload-time = "2025-06-03T21:48:03.201Z" }, ] [[package]] name = "fsspec" version = "2025.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a93/5fd1ea872591f/fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/34/f4/5721faf47b8c499e776bc34c6a8fc17efdf7fdef0b00f398128bc5dcb4ac/fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972", size = 298491, upload-time = "2025-03-07T21:47:56.461Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/efb/87af3efa9103f/fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3" }, + { url = "https://files.pythonhosted.org/packages/56/53/eb690efa8513166adef3e0669afd31e95ffde69fb3c52ec2ac7223ed6018/fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3", size = 193615, upload-time = "2025-03-07T21:47:54.809Z" }, ] [package.optional-dependencies] @@ -643,28 +643,28 @@ http = [ [[package]] name = "graphviz" version = "0.21" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/207/43e7183be82aa/graphviz-0.21.tar.gz", hash = "sha256:20743e7183be82aaaa8ad6c93f8893c923bd6658a04c32ee115edb3c8a835f78" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/b3/3ac91e9be6b761a4b30d66ff165e54439dcd48b83f4e20d644867215f6ca/graphviz-0.21.tar.gz", hash = "sha256:20743e7183be82aaaa8ad6c93f8893c923bd6658a04c32ee115edb3c8a835f78", size = 200434, upload-time = "2025-06-15T09:35:05.824Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/54f/33de9f4f911d7/graphviz-0.21-py3-none-any.whl", hash = "sha256:54f33de9f4f911d7e84e4191749cac8cc5653f815b06738c54db9a15ab8b1e42" }, + { url = "https://files.pythonhosted.org/packages/91/4c/e0ce1ef95d4000ebc1c11801f9b944fa5910ecc15b5e351865763d8657f8/graphviz-0.21-py3-none-any.whl", hash = "sha256:54f33de9f4f911d7e84e4191749cac8cc5653f815b06738c54db9a15ab8b1e42", size = 47300, upload-time = "2025-06-15T09:35:04.433Z" }, ] [[package]] name = "hf-xet" version = "1.1.3" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a5f/09b1dd24e6ff6/hf_xet-1.1.3.tar.gz", hash = "sha256:a5f09b1dd24e6ff6bcedb4b0ddab2d81824098bb002cf8b4ffa780545fa348c3" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/dc/dc091aeeb671e71cbec30e84963f9c0202c17337b24b0a800e7d205543e8/hf_xet-1.1.3.tar.gz", hash = "sha256:a5f09b1dd24e6ff6bcedb4b0ddab2d81824098bb002cf8b4ffa780545fa348c3", size = 488127, upload-time = "2025-06-04T00:47:27.456Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fd2/da210856444a3/hf_xet-1.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd2da210856444a34aad8ada2fc12f70dabed7cc20f37e90754d1d9b43bc0534" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/820/3f52827e3df65/hf_xet-1.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8203f52827e3df65981984936654a5b390566336956f65765a8aa58c362bb841" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/30c/575a5306f8e6f/hf_xet-1.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:30c575a5306f8e6fda37edb866762140a435037365eba7a17ce7bd0bc0216a8b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7c1/a6aa6abed1f69/hf_xet-1.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7c1a6aa6abed1f696f8099aa9796ca04c9ee778a58728a115607de9cc4638ff1" }, + { url = "https://files.pythonhosted.org/packages/52/48/e929e6e3db6e4758c2adf0f2ca2c59287f1b76229d8bdc1a4c9cfc05212e/hf_xet-1.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd2da210856444a34aad8ada2fc12f70dabed7cc20f37e90754d1d9b43bc0534", size = 4820519, upload-time = "2025-06-04T00:47:17.244Z" }, + { url = "https://files.pythonhosted.org/packages/28/2e/03f89c5014a5aafaa9b150655f811798a317036646623bdaace25f485ae8/hf_xet-1.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8203f52827e3df65981984936654a5b390566336956f65765a8aa58c362bb841", size = 4964121, upload-time = "2025-06-04T00:47:15.17Z" }, + { url = "https://files.pythonhosted.org/packages/47/8b/5cd399a92b47d98086f55fc72d69bc9ea5e5c6f27a9ed3e0cdd6be4e58a3/hf_xet-1.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:30c575a5306f8e6fda37edb866762140a435037365eba7a17ce7bd0bc0216a8b", size = 5283017, upload-time = "2025-06-04T00:47:23.239Z" }, + { url = "https://files.pythonhosted.org/packages/53/e3/2fcec58d2fcfd25ff07feb876f466cfa11f8dcf9d3b742c07fe9dd51ee0a/hf_xet-1.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7c1a6aa6abed1f696f8099aa9796ca04c9ee778a58728a115607de9cc4638ff1", size = 4970349, upload-time = "2025-06-04T00:47:25.383Z" }, ] [[package]] name = "huggingface-hub" version = "0.32.4" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "fsspec", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -675,33 +675,33 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f61/d45cd338736f5/huggingface_hub-0.32.4.tar.gz", hash = "sha256:f61d45cd338736f59fb0e97550b74c24ee771bcc92c05ae0766b9116abe720be" } +sdist = { url = "https://files.pythonhosted.org/packages/60/c8/4f7d270285c46324fd66f62159eb16739aa5696f422dba57678a8c6b78e9/huggingface_hub-0.32.4.tar.gz", hash = "sha256:f61d45cd338736f59fb0e97550b74c24ee771bcc92c05ae0766b9116abe720be", size = 424494, upload-time = "2025-06-03T09:59:46.105Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/37a/bf8826b38d971/huggingface_hub-0.32.4-py3-none-any.whl", hash = "sha256:37abf8826b38d971f60d3625229221c36e53fe58060286db9baf619cfbf39767" }, + { url = "https://files.pythonhosted.org/packages/67/8b/222140f3cfb6f17b0dd8c4b9a0b36bd4ebefe9fb0098ba35d6960abcda0f/huggingface_hub-0.32.4-py3-none-any.whl", hash = "sha256:37abf8826b38d971f60d3625229221c36e53fe58060286db9baf619cfbf39767", size = 512101, upload-time = "2025-06-03T09:59:44.099Z" }, ] [[package]] name = "humanfriendly" version = "10.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6b0/b831ce8f15f73/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/169/7e1a8a8f550fd/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477" }, + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, ] [[package]] name = "identify" version = "2.6.12" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d8d/e45749f1efb10/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ad9/672d5a72e0d2f/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2" }, + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, ] [[package]] name = "idna" version = "3.10" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/idna-3.10-py3-none-any.whl" }, ] @@ -709,37 +709,37 @@ wheels = [ [[package]] name = "importlib-metadata" version = "8.7.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d13/b81ad223b890a/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000" } +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e5d/d1551894c7786/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd" }, + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, ] [[package]] name = "iniconfig" version = "2.1.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3ab/bd2e30b36733f/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9de/ba5723312380e/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" }, + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] [[package]] name = "isort" version = "6.0.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1cb/5df28dfbc742e/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2dc/5d7f65c9678d9/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615" }, + { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, ] [[package]] name = "jinja2" version = "3.1.6" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "markupsafe", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] @@ -750,84 +750,84 @@ wheels = [ [[package]] name = "mako" version = "1.3.10" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/995/79a6f39583fa7/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bae/f24a52fc4fc51/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59" }, + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, ] [[package]] name = "markdown-it-py" version = "3.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e3f/60a94fa066dc5/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" } +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/355/216845c60bd96/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1" }, + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, ] [[package]] name = "markupsafe" version = "3.0.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ee5/5d3edf80167e4/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/38a/9ef736c01fccd/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bbc/b445fa71794da/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/57c/b5a3cf367aeb1/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/380/9ede931876f5b/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e07/c3764494e3776/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b42/4c77b206d63d5/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2cb/8438c3cbb25e2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a12/3e330ef0853c6/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1e0/84f686b92e5b8/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d82/13e09c917a951/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5b0/2fb34468b6aaa/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0bf/f5e0ae4ef2e1a/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1c9/9d261bd2d5f6b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e17/c96c14e192785/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/884/16bd1e65dcea1/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/218/1e67807fc2fa7/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/523/05740fe773d09/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ad1/0d3ded218f103/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/569/511d3b58c8791/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/15a/b75ef81add558/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f38/18cb119498c06/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cdb/82a876c47801b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cab/c348d87e913db/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/444/dcda765c8a838/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4aa/4e5faecf353ed/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c0e/f13eaeee5b615/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d16/a81a06776313e/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/638/1026f158fdb7c/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3d7/9d162e7be8f99/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/131/a3c7689c85f5a/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1a9/d3f5f0901fdec/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/88b/49a3b9ff31e19/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cfa/d01eed2c2e0c0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/122/5beacc926f536/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/316/9b1eefae02756/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb7/972a85c54febf/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, ] [[package]] name = "mdurl" version = "0.1.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bb4/13d29f5eea38f/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/840/08a41e51615a4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8" }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] [[package]] name = "mpmath" version = "1.3.0" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/mpmath-1.3.0-py3-none-any.whl" }, ] @@ -835,91 +835,91 @@ wheels = [ [[package]] name = "multidict" version = "6.4.4" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/69e/e9e6ba214b524/multidict-6.4.4.tar.gz", hash = "sha256:69ee9e6ba214b5245031b76233dd95408a0fd57fdb019ddcc1ead4790932a8e8" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4ff/c3c6a37e048b5/multidict-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc3c6a37e048b5395ee235e4a2a0d639c2349dffa32d9367a42fc20d399772" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/87c/b72263946b301/multidict-6.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87cb72263946b301570b0f63855569a24ee8758aaae2cd182aae7d95fbc92ca7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9bb/f7bd39822fd07/multidict-6.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bbf7bd39822fd07e3609b6b4467af4c404dd2b88ee314837ad1830a7f4a8299" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d1f/7cbd4f1f44ddf/multidict-6.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1f7cbd4f1f44ddf5fd86a8675b7679176eae770f2fc88115d6dddb6cefb59bc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bb5/ac9e5bfce0e62/multidict-6.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5ac9e5bfce0e6282e7f59ff7b7b9a74aa8e5c60d38186a4637f5aa764046ad" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4ef/c31dfef8c4eeb/multidict-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4efc31dfef8c4eeb95b6b17d799eedad88c4902daba39ce637e23a17ea078915" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9fc/ad2945b1b91c2/multidict-6.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9fcad2945b1b91c29ef2b4050f590bfcb68d8ac8e0995a74e659aa57e8d78e01" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d87/7447e7368c732/multidict-6.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d877447e7368c7320832acb7159557e49b21ea10ffeb135c1077dbbc0816b598" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/33a/12ebac9f38071/multidict-6.4.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:33a12ebac9f380714c298cbfd3e5b9c0c4e89c75fe612ae496512ee51028915f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0f1/4ea68d29b43a9/multidict-6.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0f14ea68d29b43a9bf37953881b1e3eb75b2739e896ba4a6aa4ad4c5b9ffa145" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/032/7ad2c747a6600/multidict-6.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0327ad2c747a6600e4797d115d3c38a220fdb28e54983abe8964fd17e95ae83c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d1a/20707492db971/multidict-6.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d1a20707492db9719a05fc62ee215fd2c29b22b47c1b1ba347f9abc831e26683" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5e0/ba18a9afd495f/multidict-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e0ba18a9afd495f17c351d08ebbc4284e9c9f7971d715f196b79636a4d0de44" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9fa/f1b1dcaadf9f9/multidict-6.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9faf1b1dcaadf9f900d23a0e6d6c8eadd6a95795a0e57fcca73acce0eb912065" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a4d/1cb1327c6082c/multidict-6.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4d1cb1327c6082c4fce4e2a438483390964c02213bc6b8d782cf782c9b1471f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/941/f1bec2f5dbd51/multidict-6.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:941f1bec2f5dbd51feeb40aea654c2747f811ab01bdd3422a48a4e4576b7d76a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e5f/8a146184da7ea/multidict-6.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5f8a146184da7ea12910a4cec51ef85e44f6268467fb489c3caf0cd512f29c2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/232/b7237e57ec3c0/multidict-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232b7237e57ec3c09be97206bfb83a0aa1c5d7d377faa019c68a210fa35831f1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/55a/e0721c1513e5e/multidict-6.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:55ae0721c1513e5e3210bca4fc98456b980b0c2c016679d3d723119b6b202c42" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/51d/662c072579f63/multidict-6.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:51d662c072579f63137919d7bb8fc250655ce79f00c82ecf11cab678f335062e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0e0/5c39962baa0bb/multidict-6.4.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0e05c39962baa0bb19a6b210e9b1422c35c093b651d64246b6c2e1a7e242d9fd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d5b/1cc3ab8c31d9e/multidict-6.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b1cc3ab8c31d9ebf0faa6e3540fb91257590da330ffe6d2393d4208e638925" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/93e/c84488a384cd7/multidict-6.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:93ec84488a384cd7b8a29c2c7f467137d8a73f6fe38bb810ecf29d1ade011a7c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b30/8402608493638/multidict-6.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b308402608493638763abc95f9dc0030bbd6ac6aff784512e8ac3da73a88af08" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/169/c4ba7858176b7/multidict-6.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:169c4ba7858176b797fe551d6e99040c531c775d2d57b31bcf4de6d7a669847f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b9e/b4c59c54421a3/multidict-6.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9eb4c59c54421a32b3273d4239865cb14ead53a606db066d7130ac80cc8ec93" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7cf/3bd54c56aa16f/multidict-6.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cf3bd54c56aa16fdb40028d545eaa8d051402b61533c21e84046e05513d5780" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f68/2c42003c72641/multidict-6.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f682c42003c7264134bfe886376299db4cc0c6cd06a3295b41b347044bcb5482" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a92/0f9cf2abdf6e4/multidict-6.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920f9cf2abdf6e493c519492d892c362007f113c94da4c239ae88429835bad1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/530/d86827a2df650/multidict-6.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:530d86827a2df6504526106b4c104ba19044594f8722d3e87714e847c74a0275" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ecd/e56ea2439b96e/multidict-6.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ecde56ea2439b96ed8a8d826b50c57364612ddac0438c39e473fafad7ae1c23b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dc8/c9736d8574b56/multidict-6.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:dc8c9736d8574b560634775ac0def6bdc1661fc63fa27ffdfc7264c565bcb4f2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7f3/d3b3c34867579/multidict-6.4.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7f3d3b3c34867579ea47cbd6c1f2ce23fbfd20a273b6f9e3177e256584f1eacc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/87a/728af265e08f9/multidict-6.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:87a728af265e08f96b6318ebe3c0f68b9335131f461efab2fc64cc84a44aa6ed" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9f1/93eeda1857f8e/multidict-6.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9f193eeda1857f8e8d3079a4abd258f42ef4a4bc87388452ed1e1c4d2b0c8740" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/be0/6e73c06415199/multidict-6.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be06e73c06415199200e9a2324a11252a3d62030319919cde5e6950ffeccf72e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9ab/cf56a9511653f/multidict-6.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9abcf56a9511653fa1d052bfc55fbe53dbee8f34e68bd6a5a038731b0ca42d15" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6ed/5ae5605d4ad5a/multidict-6.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6ed5ae5605d4ad5a049fad2a28bb7193400700ce2f4ae484ab702d1e3749c3f9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bbf/cb60396f9bcfa/multidict-6.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbfcb60396f9bcfa63e017a180c3105b8c123a63e9d1428a36544e7d37ca9e20" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b0f/1987787f5f1e2/multidict-6.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0f1987787f5f1e2076b59692352ab29a955b09ccc433c1f6b8e8e18666f608b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1d0/121ccce8c8120/multidict-6.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0121ccce8c812047d8d43d691a1ad7641f72c4f730474878a5aeae1b8ead8c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/83e/c4967114295b8/multidict-6.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ec4967114295b8afd120a8eec579920c882831a3e4c3331d591a8e5bfbbc0f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/995/f985e2e268dea/multidict-6.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:995f985e2e268deaf17867801b859a282e0448633f1310e3704b30616d269d69" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d83/2c608f94b9f92/multidict-6.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:d832c608f94b9f92a0ec8b7e949be7792a642b6e535fcf32f3e28fab69eeb046" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d21/c1212171cf7da/multidict-6.4.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d21c1212171cf7da703c5b0b7a0e85be23b720818aef502ad187d627316d5645" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cbe/baa076aaecad3/multidict-6.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cbebaa076aaecad3d4bb4c008ecc73b09274c952cf6a1b78ccfd689e51f5a5b0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c93/a6fb06cc8e5d3/multidict-6.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c93a6fb06cc8e5d3628b2b5fda215a5db01e8f08fc15fadd65662d9b857acbe4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8cd/8f81f13101823/multidict-6.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8cd8f81f1310182362fb0c7898145ea9c9b08a71081c5963b40ee3e3cac589b1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/508/55d03e9e4d66e/multidict-6.4.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50855d03e9e4d66eab6947ba688ffb714616f985838077bc4b490e769e48da51" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5bc/e06b83be23225/multidict-6.4.4-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5bce06b83be23225be1905dcdb6b789064fae92499fbc458f59a8c0e68718601" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/66e/d0731f8e5dfd8/multidict-6.4.4-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ed0731f8e5dfd8369a883b6e564aca085fb9289aacabd9decd70568b9a30de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/329/ae97fc2f56f44/multidict-6.4.4-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:329ae97fc2f56f44d91bc47fe0972b1f52d21c4b7a2ac97040da02577e2daca2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c27/e5dcf520923d6/multidict-6.4.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c27e5dcf520923d6474d98b96749e6805f7677e93aaaf62656005b8643f907ab" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/058/cc59b9e9b143c/multidict-6.4.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:058cc59b9e9b143cc56715e59e22941a5d868c322242278d28123a5d09cdf6b0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/691/33376bc9a03f8/multidict-6.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:69133376bc9a03f8c47343d33f91f74a99c339e8b58cea90433d8e24bb298031" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d6b/15c55721b1b11/multidict-6.4.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d6b15c55721b1b115c5ba178c77104123745b1417527ad9641a4c5e2047450f0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a88/7b77f51d3d41e/multidict-6.4.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a887b77f51d3d41e6e1a63cf3bc7ddf24de5939d9ff69441387dfefa58ac2e26" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/632/a3bf8f1787f7e/multidict-6.4.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:632a3bf8f1787f7ef7d3c2f68a7bde5be2f702906f8b5842ad6da9d974d0aab3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a14/5c550900deb75/multidict-6.4.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a145c550900deb7540973c5cdb183b0d24bed6b80bf7bddf33ed8f569082535e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cc5/d83c6619ca5c9/multidict-6.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc5d83c6619ca5c9672cb78b39ed8542f1975a803dee2cda114ff73cbb076edd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/421/9390fb5bf8e54/multidict-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4219390fb5bf8e548e77b428bb36a21d9382960db5321b74d9d9987148074d6b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3ef/4e9096ff86dfd/multidict-6.4.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef4e9096ff86dfdcbd4a78253090ba13b1d183daa11b973e842465d94ae1772" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/49a/29d7133b1fc21/multidict-6.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49a29d7133b1fc214e818bbe025a77cc6025ed9a4f407d2850373ddde07fd04a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e32/053d6d3a8b0df/multidict-6.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e32053d6d3a8b0dfe49fde05b496731a0e6099a4df92154641c00aa76786aef5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8cc/403092a49509e/multidict-6.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc403092a49509e8ef2d2fd636a8ecefc4698cc57bbe894606b14579bc2a955" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/536/3f9b2a7f3910e/multidict-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5363f9b2a7f3910e5c87d8b1855c478c05a2dc559ac57308117424dfaad6805c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2e5/43a40e4946cf7/multidict-6.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e543a40e4946cf70a88a3be87837a3ae0aebd9058ba49e91cacb0b2cd631e2b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/60d/849912350da55/multidict-6.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:60d849912350da557fe7de20aa8cf394aada6980d0052cc829eeda4a0db1c1db" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/19d/08b4f22eae45b/multidict-6.4.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:19d08b4f22eae45bb018b9f06e2838c1e4b853c67628ef8ae126d99de0da6395" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d69/3307856d1ef08/multidict-6.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d693307856d1ef08041e8b6ff01d5b4618715007d288490ce2c7e29013c12b9a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fad/6daaed4102193/multidict-6.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fad6daaed41021934917f4fb03ca2db8d8a4d79bf89b17ebe77228eb6710c003" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c10/d17371bff801a/multidict-6.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c10d17371bff801af0daf8b073c30b6cf14215784dc08cd5c43ab5b7b8029bbc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bd4/557071b561a8b/multidict-6.4.4-py3-none-any.whl", hash = "sha256:bd4557071b561a8b3b6075c3ce93cf9bfb6182cb241805c3d66ced3b75eff4ac" }, +sdist = { url = "https://files.pythonhosted.org/packages/91/2f/a3470242707058fe856fe59241eee5635d79087100b7042a867368863a27/multidict-6.4.4.tar.gz", hash = "sha256:69ee9e6ba214b5245031b76233dd95408a0fd57fdb019ddcc1ead4790932a8e8", size = 90183, upload-time = "2025-05-19T14:16:37.381Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/8b/fbd9c0fc13966efdb4a47f5bcffff67a4f2a3189fbeead5766eaa4250b20/multidict-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc3c6a37e048b5395ee235e4a2a0d639c2349dffa32d9367a42fc20d399772", size = 220433, upload-time = "2025-05-19T14:13:55.346Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c4/5132b2d75b3ea2daedb14d10f91028f09f74f5b4d373b242c1b8eec47571/multidict-6.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87cb72263946b301570b0f63855569a24ee8758aaae2cd182aae7d95fbc92ca7", size = 218059, upload-time = "2025-05-19T14:13:56.993Z" }, + { url = "https://files.pythonhosted.org/packages/1a/70/f1e818c7a29b908e2d7b4fafb1d7939a41c64868e79de2982eea0a13193f/multidict-6.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bbf7bd39822fd07e3609b6b4467af4c404dd2b88ee314837ad1830a7f4a8299", size = 231120, upload-time = "2025-05-19T14:13:58.333Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7e/95a194d85f27d5ef9cbe48dff9ded722fc6d12fedf641ec6e1e680890be7/multidict-6.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1f7cbd4f1f44ddf5fd86a8675b7679176eae770f2fc88115d6dddb6cefb59bc", size = 227457, upload-time = "2025-05-19T14:13:59.663Z" }, + { url = "https://files.pythonhosted.org/packages/25/2b/590ad220968d1babb42f265debe7be5c5c616df6c5688c995a06d8a9b025/multidict-6.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5ac9e5bfce0e6282e7f59ff7b7b9a74aa8e5c60d38186a4637f5aa764046ad", size = 219111, upload-time = "2025-05-19T14:14:01.019Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f0/b07682b995d3fb5313f339b59d7de02db19ba0c02d1f77c27bdf8212d17c/multidict-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4efc31dfef8c4eeb95b6b17d799eedad88c4902daba39ce637e23a17ea078915", size = 213012, upload-time = "2025-05-19T14:14:02.396Z" }, + { url = "https://files.pythonhosted.org/packages/24/56/c77b5f36feef2ec92f1119756e468ac9c3eebc35aa8a4c9e51df664cbbc9/multidict-6.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9fcad2945b1b91c29ef2b4050f590bfcb68d8ac8e0995a74e659aa57e8d78e01", size = 225408, upload-time = "2025-05-19T14:14:04.826Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b3/e8189b82af9b198b47bc637766208fc917189eea91d674bad417e657bbdf/multidict-6.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d877447e7368c7320832acb7159557e49b21ea10ffeb135c1077dbbc0816b598", size = 214396, upload-time = "2025-05-19T14:14:06.187Z" }, + { url = "https://files.pythonhosted.org/packages/20/e0/200d14c84e35ae13ee99fd65dc106e1a1acb87a301f15e906fc7d5b30c17/multidict-6.4.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:33a12ebac9f380714c298cbfd3e5b9c0c4e89c75fe612ae496512ee51028915f", size = 222237, upload-time = "2025-05-19T14:14:07.778Z" }, + { url = "https://files.pythonhosted.org/packages/13/f3/bb3df40045ca8262694a3245298732ff431dc781414a89a6a364ebac6840/multidict-6.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0f14ea68d29b43a9bf37953881b1e3eb75b2739e896ba4a6aa4ad4c5b9ffa145", size = 231425, upload-time = "2025-05-19T14:14:09.516Z" }, + { url = "https://files.pythonhosted.org/packages/85/3b/538563dc18514384dac169bcba938753ad9ab4d4c8d49b55d6ae49fb2579/multidict-6.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0327ad2c747a6600e4797d115d3c38a220fdb28e54983abe8964fd17e95ae83c", size = 226251, upload-time = "2025-05-19T14:14:10.82Z" }, + { url = "https://files.pythonhosted.org/packages/56/79/77e1a65513f09142358f1beb1d4cbc06898590b34a7de2e47023e3c5a3a2/multidict-6.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d1a20707492db9719a05fc62ee215fd2c29b22b47c1b1ba347f9abc831e26683", size = 220363, upload-time = "2025-05-19T14:14:12.638Z" }, + { url = "https://files.pythonhosted.org/packages/3b/37/cbc977cae59277e99d15bbda84cc53b5e0c4929ffd91d958347200a42ad0/multidict-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e0ba18a9afd495f17c351d08ebbc4284e9c9f7971d715f196b79636a4d0de44", size = 226661, upload-time = "2025-05-19T14:14:24.124Z" }, + { url = "https://files.pythonhosted.org/packages/15/cd/7e0b57fbd4dc2fc105169c4ecce5be1a63970f23bb4ec8c721b67e11953d/multidict-6.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9faf1b1dcaadf9f900d23a0e6d6c8eadd6a95795a0e57fcca73acce0eb912065", size = 223422, upload-time = "2025-05-19T14:14:25.437Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/1de268da121bac9f93242e30cd3286f6a819e5f0b8896511162d6ed4bf8d/multidict-6.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4d1cb1327c6082c4fce4e2a438483390964c02213bc6b8d782cf782c9b1471f", size = 235447, upload-time = "2025-05-19T14:14:26.793Z" }, + { url = "https://files.pythonhosted.org/packages/d2/8c/8b9a5e4aaaf4f2de14e86181a3a3d7b105077f668b6a06f043ec794f684c/multidict-6.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:941f1bec2f5dbd51feeb40aea654c2747f811ab01bdd3422a48a4e4576b7d76a", size = 231455, upload-time = "2025-05-19T14:14:28.149Z" }, + { url = "https://files.pythonhosted.org/packages/35/db/e1817dcbaa10b319c412769cf999b1016890849245d38905b73e9c286862/multidict-6.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5f8a146184da7ea12910a4cec51ef85e44f6268467fb489c3caf0cd512f29c2", size = 223666, upload-time = "2025-05-19T14:14:29.584Z" }, + { url = "https://files.pythonhosted.org/packages/4a/e1/66e8579290ade8a00e0126b3d9a93029033ffd84f0e697d457ed1814d0fc/multidict-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232b7237e57ec3c09be97206bfb83a0aa1c5d7d377faa019c68a210fa35831f1", size = 217392, upload-time = "2025-05-19T14:14:30.961Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6f/f8639326069c24a48c7747c2a5485d37847e142a3f741ff3340c88060a9a/multidict-6.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:55ae0721c1513e5e3210bca4fc98456b980b0c2c016679d3d723119b6b202c42", size = 228969, upload-time = "2025-05-19T14:14:32.672Z" }, + { url = "https://files.pythonhosted.org/packages/d2/c3/3d58182f76b960eeade51c89fcdce450f93379340457a328e132e2f8f9ed/multidict-6.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:51d662c072579f63137919d7bb8fc250655ce79f00c82ecf11cab678f335062e", size = 217433, upload-time = "2025-05-19T14:14:34.016Z" }, + { url = "https://files.pythonhosted.org/packages/e1/4b/f31a562906f3bd375f3d0e83ce314e4a660c01b16c2923e8229b53fba5d7/multidict-6.4.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0e05c39962baa0bb19a6b210e9b1422c35c093b651d64246b6c2e1a7e242d9fd", size = 225418, upload-time = "2025-05-19T14:14:35.376Z" }, + { url = "https://files.pythonhosted.org/packages/99/89/78bb95c89c496d64b5798434a3deee21996114d4d2c28dd65850bf3a691e/multidict-6.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b1cc3ab8c31d9ebf0faa6e3540fb91257590da330ffe6d2393d4208e638925", size = 235042, upload-time = "2025-05-19T14:14:36.723Z" }, + { url = "https://files.pythonhosted.org/packages/74/91/8780a6e5885a8770442a8f80db86a0887c4becca0e5a2282ba2cae702bc4/multidict-6.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:93ec84488a384cd7b8a29c2c7f467137d8a73f6fe38bb810ecf29d1ade011a7c", size = 230280, upload-time = "2025-05-19T14:14:38.194Z" }, + { url = "https://files.pythonhosted.org/packages/68/c1/fcf69cabd542eb6f4b892469e033567ee6991d361d77abdc55e3a0f48349/multidict-6.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b308402608493638763abc95f9dc0030bbd6ac6aff784512e8ac3da73a88af08", size = 223322, upload-time = "2025-05-19T14:14:40.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/e0/c29d9d462d7cfc5fc8f9bf24f9c6843b40e953c0b55e04eba2ad2cf54fba/multidict-6.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:169c4ba7858176b797fe551d6e99040c531c775d2d57b31bcf4de6d7a669847f", size = 224686, upload-time = "2025-05-19T14:14:48.366Z" }, + { url = "https://files.pythonhosted.org/packages/dc/4a/da99398d7fd8210d9de068f9a1b5f96dfaf67d51e3f2521f17cba4ee1012/multidict-6.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9eb4c59c54421a32b3273d4239865cb14ead53a606db066d7130ac80cc8ec93", size = 231061, upload-time = "2025-05-19T14:14:49.952Z" }, + { url = "https://files.pythonhosted.org/packages/21/f5/ac11add39a0f447ac89353e6ca46666847051103649831c08a2800a14455/multidict-6.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cf3bd54c56aa16fdb40028d545eaa8d051402b61533c21e84046e05513d5780", size = 232412, upload-time = "2025-05-19T14:14:51.812Z" }, + { url = "https://files.pythonhosted.org/packages/d9/11/4b551e2110cded705a3c13a1d4b6a11f73891eb5a1c449f1b2b6259e58a6/multidict-6.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f682c42003c7264134bfe886376299db4cc0c6cd06a3295b41b347044bcb5482", size = 231563, upload-time = "2025-05-19T14:14:53.262Z" }, + { url = "https://files.pythonhosted.org/packages/4c/02/751530c19e78fe73b24c3da66618eda0aa0d7f6e7aa512e46483de6be210/multidict-6.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920f9cf2abdf6e493c519492d892c362007f113c94da4c239ae88429835bad1", size = 223811, upload-time = "2025-05-19T14:14:55.232Z" }, + { url = "https://files.pythonhosted.org/packages/c7/cb/2be8a214643056289e51ca356026c7b2ce7225373e7a1f8c8715efee8988/multidict-6.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:530d86827a2df6504526106b4c104ba19044594f8722d3e87714e847c74a0275", size = 216524, upload-time = "2025-05-19T14:14:57.226Z" }, + { url = "https://files.pythonhosted.org/packages/19/f3/6d5011ec375c09081f5250af58de85f172bfcaafebff286d8089243c4bd4/multidict-6.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ecde56ea2439b96ed8a8d826b50c57364612ddac0438c39e473fafad7ae1c23b", size = 229012, upload-time = "2025-05-19T14:14:58.597Z" }, + { url = "https://files.pythonhosted.org/packages/67/9c/ca510785df5cf0eaf5b2a8132d7d04c1ce058dcf2c16233e596ce37a7f8e/multidict-6.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:dc8c9736d8574b560634775ac0def6bdc1661fc63fa27ffdfc7264c565bcb4f2", size = 226765, upload-time = "2025-05-19T14:15:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/36/c8/ca86019994e92a0f11e642bda31265854e6ea7b235642f0477e8c2e25c1f/multidict-6.4.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7f3d3b3c34867579ea47cbd6c1f2ce23fbfd20a273b6f9e3177e256584f1eacc", size = 222888, upload-time = "2025-05-19T14:15:01.568Z" }, + { url = "https://files.pythonhosted.org/packages/c6/67/bc25a8e8bd522935379066950ec4e2277f9b236162a73548a2576d4b9587/multidict-6.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:87a728af265e08f96b6318ebe3c0f68b9335131f461efab2fc64cc84a44aa6ed", size = 234041, upload-time = "2025-05-19T14:15:03.759Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a0/70c4c2d12857fccbe607b334b7ee28b6b5326c322ca8f73ee54e70d76484/multidict-6.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9f193eeda1857f8e8d3079a4abd258f42ef4a4bc87388452ed1e1c4d2b0c8740", size = 231046, upload-time = "2025-05-19T14:15:05.698Z" }, + { url = "https://files.pythonhosted.org/packages/c1/0f/52954601d02d39742aab01d6b92f53c1dd38b2392248154c50797b4df7f1/multidict-6.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be06e73c06415199200e9a2324a11252a3d62030319919cde5e6950ffeccf72e", size = 227106, upload-time = "2025-05-19T14:15:07.124Z" }, + { url = "https://files.pythonhosted.org/packages/fa/70/1af3143000eddfb19fd5ca5e78393985ed988ac493bb859800fe0914041f/multidict-6.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9abcf56a9511653fa1d052bfc55fbe53dbee8f34e68bd6a5a038731b0ca42d15", size = 224097, upload-time = "2025-05-19T14:15:15.566Z" }, + { url = "https://files.pythonhosted.org/packages/b1/39/d570c62b53d4fba844e0378ffbcd02ac25ca423d3235047013ba2f6f60f8/multidict-6.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6ed5ae5605d4ad5a049fad2a28bb7193400700ce2f4ae484ab702d1e3749c3f9", size = 230768, upload-time = "2025-05-19T14:15:17.308Z" }, + { url = "https://files.pythonhosted.org/packages/fd/f8/ed88f2c4d06f752b015933055eb291d9bc184936903752c66f68fb3c95a7/multidict-6.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbfcb60396f9bcfa63e017a180c3105b8c123a63e9d1428a36544e7d37ca9e20", size = 231331, upload-time = "2025-05-19T14:15:18.73Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/8e07cffa32f483ab887b0d56bbd8747ac2c1acd00dc0af6fcf265f4a121e/multidict-6.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0f1987787f5f1e2076b59692352ab29a955b09ccc433c1f6b8e8e18666f608b", size = 230169, upload-time = "2025-05-19T14:15:20.179Z" }, + { url = "https://files.pythonhosted.org/packages/e6/2b/5dcf173be15e42f330110875a2668ddfc208afc4229097312212dc9c1236/multidict-6.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0121ccce8c812047d8d43d691a1ad7641f72c4f730474878a5aeae1b8ead8c", size = 222947, upload-time = "2025-05-19T14:15:21.714Z" }, + { url = "https://files.pythonhosted.org/packages/39/75/4ddcbcebe5ebcd6faa770b629260d15840a5fc07ce8ad295a32e14993726/multidict-6.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ec4967114295b8afd120a8eec579920c882831a3e4c3331d591a8e5bfbbc0f", size = 215761, upload-time = "2025-05-19T14:15:23.242Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c9/55e998ae45ff15c5608e384206aa71a11e1b7f48b64d166db400b14a3433/multidict-6.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:995f985e2e268deaf17867801b859a282e0448633f1310e3704b30616d269d69", size = 227605, upload-time = "2025-05-19T14:15:24.763Z" }, + { url = "https://files.pythonhosted.org/packages/04/49/c2404eac74497503c77071bd2e6f88c7e94092b8a07601536b8dbe99be50/multidict-6.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:d832c608f94b9f92a0ec8b7e949be7792a642b6e535fcf32f3e28fab69eeb046", size = 226144, upload-time = "2025-05-19T14:15:26.249Z" }, + { url = "https://files.pythonhosted.org/packages/62/c5/0cd0c3c6f18864c40846aa2252cd69d308699cb163e1c0d989ca301684da/multidict-6.4.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d21c1212171cf7da703c5b0b7a0e85be23b720818aef502ad187d627316d5645", size = 221100, upload-time = "2025-05-19T14:15:28.303Z" }, + { url = "https://files.pythonhosted.org/packages/71/7b/f2f3887bea71739a046d601ef10e689528d4f911d84da873b6be9194ffea/multidict-6.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cbebaa076aaecad3d4bb4c008ecc73b09274c952cf6a1b78ccfd689e51f5a5b0", size = 232731, upload-time = "2025-05-19T14:15:30.263Z" }, + { url = "https://files.pythonhosted.org/packages/e5/b3/d9de808349df97fa75ec1372758701b5800ebad3c46ae377ad63058fbcc6/multidict-6.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c93a6fb06cc8e5d3628b2b5fda215a5db01e8f08fc15fadd65662d9b857acbe4", size = 229637, upload-time = "2025-05-19T14:15:33.337Z" }, + { url = "https://files.pythonhosted.org/packages/5e/57/13207c16b615eb4f1745b44806a96026ef8e1b694008a58226c2d8f5f0a5/multidict-6.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8cd8f81f1310182362fb0c7898145ea9c9b08a71081c5963b40ee3e3cac589b1", size = 225594, upload-time = "2025-05-19T14:15:34.832Z" }, + { url = "https://files.pythonhosted.org/packages/18/28/a554678898a19583548e742080cf55d169733baf57efc48c2f0273a08583/multidict-6.4.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50855d03e9e4d66eab6947ba688ffb714616f985838077bc4b490e769e48da51", size = 226071, upload-time = "2025-05-19T14:15:42.877Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/7ba6c789d05c310e294f85329efac1bf5b450338d2542498db1491a264df/multidict-6.4.4-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5bce06b83be23225be1905dcdb6b789064fae92499fbc458f59a8c0e68718601", size = 222597, upload-time = "2025-05-19T14:15:44.412Z" }, + { url = "https://files.pythonhosted.org/packages/24/4f/34eadbbf401b03768dba439be0fb94b0d187facae9142821a3d5599ccb3b/multidict-6.4.4-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ed0731f8e5dfd8369a883b6e564aca085fb9289aacabd9decd70568b9a30de", size = 228253, upload-time = "2025-05-19T14:15:46.474Z" }, + { url = "https://files.pythonhosted.org/packages/c0/e6/493225a3cdb0d8d80d43a94503fc313536a07dae54a3f030d279e629a2bc/multidict-6.4.4-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:329ae97fc2f56f44d91bc47fe0972b1f52d21c4b7a2ac97040da02577e2daca2", size = 226146, upload-time = "2025-05-19T14:15:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/2f/70/e411a7254dc3bff6f7e6e004303b1b0591358e9f0b7c08639941e0de8bd6/multidict-6.4.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c27e5dcf520923d6474d98b96749e6805f7677e93aaaf62656005b8643f907ab", size = 220585, upload-time = "2025-05-19T14:15:49.546Z" }, + { url = "https://files.pythonhosted.org/packages/08/8f/beb3ae7406a619100d2b1fb0022c3bb55a8225ab53c5663648ba50dfcd56/multidict-6.4.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:058cc59b9e9b143cc56715e59e22941a5d868c322242278d28123a5d09cdf6b0", size = 212080, upload-time = "2025-05-19T14:15:51.151Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ec/355124e9d3d01cf8edb072fd14947220f357e1c5bc79c88dff89297e9342/multidict-6.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:69133376bc9a03f8c47343d33f91f74a99c339e8b58cea90433d8e24bb298031", size = 226558, upload-time = "2025-05-19T14:15:52.665Z" }, + { url = "https://files.pythonhosted.org/packages/fd/22/d2b95cbebbc2ada3be3812ea9287dcc9712d7f1a012fad041770afddb2ad/multidict-6.4.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d6b15c55721b1b115c5ba178c77104123745b1417527ad9641a4c5e2047450f0", size = 212168, upload-time = "2025-05-19T14:15:55.279Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c5/62bfc0b2f9ce88326dbe7179f9824a939c6c7775b23b95de777267b9725c/multidict-6.4.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a887b77f51d3d41e6e1a63cf3bc7ddf24de5939d9ff69441387dfefa58ac2e26", size = 217970, upload-time = "2025-05-19T14:15:56.806Z" }, + { url = "https://files.pythonhosted.org/packages/79/74/977cea1aadc43ff1c75d23bd5bc4768a8fac98c14e5878d6ee8d6bab743c/multidict-6.4.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:632a3bf8f1787f7ef7d3c2f68a7bde5be2f702906f8b5842ad6da9d974d0aab3", size = 226980, upload-time = "2025-05-19T14:15:58.313Z" }, + { url = "https://files.pythonhosted.org/packages/48/fc/cc4a1a2049df2eb84006607dc428ff237af38e0fcecfdb8a29ca47b1566c/multidict-6.4.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a145c550900deb7540973c5cdb183b0d24bed6b80bf7bddf33ed8f569082535e", size = 220641, upload-time = "2025-05-19T14:15:59.866Z" }, + { url = "https://files.pythonhosted.org/packages/3b/6a/a7444d113ab918701988d4abdde373dbdfd2def7bd647207e2bf645c7eac/multidict-6.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc5d83c6619ca5c9672cb78b39ed8542f1975a803dee2cda114ff73cbb076edd", size = 221728, upload-time = "2025-05-19T14:16:01.535Z" }, + { url = "https://files.pythonhosted.org/packages/aa/20/3aee7910260e7b6f0045b6f48b97ebf041de0cab513c12f87cf6e4e514d3/multidict-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4219390fb5bf8e548e77b428bb36a21d9382960db5321b74d9d9987148074d6b", size = 218106, upload-time = "2025-05-19T14:16:10.962Z" }, + { url = "https://files.pythonhosted.org/packages/a9/79/15f5a65b8de8ae8f3c5da1591a322620675e4fec8d39995b04101d2b2e2c/multidict-6.4.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef4e9096ff86dfdcbd4a78253090ba13b1d183daa11b973e842465d94ae1772", size = 213817, upload-time = "2025-05-19T14:16:12.486Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a7/90de36db90ce2936fbb1639ca51508965861a8ad5dc2947531d18f3363b9/multidict-6.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49a29d7133b1fc214e818bbe025a77cc6025ed9a4f407d2850373ddde07fd04a", size = 228133, upload-time = "2025-05-19T14:16:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/df/25/5fcd66fda3c8b7d6d6f658a871017791c46824e965dfa20a4c46d4167ad4/multidict-6.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e32053d6d3a8b0dfe49fde05b496731a0e6099a4df92154641c00aa76786aef5", size = 224271, upload-time = "2025-05-19T14:16:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/fd/9a/1011812091fd99b2dddd9d2dbde4b7d69bbf8070e0291fe49c3bb40c2d55/multidict-6.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc403092a49509e8ef2d2fd636a8ecefc4698cc57bbe894606b14579bc2a955", size = 216448, upload-time = "2025-05-19T14:16:18.263Z" }, + { url = "https://files.pythonhosted.org/packages/cf/cc/916e066b7e2686999f95dde87f588be26fa1c2f05e70d9fd472fe2289c0b/multidict-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5363f9b2a7f3910e5c87d8b1855c478c05a2dc559ac57308117424dfaad6805c", size = 210080, upload-time = "2025-05-19T14:16:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ff/15034b18f2e4179cd559aa13bc3b376a95c22e1fd7c3b88884e078ad5466/multidict-6.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e543a40e4946cf70a88a3be87837a3ae0aebd9058ba49e91cacb0b2cd631e2b", size = 221926, upload-time = "2025-05-19T14:16:22.227Z" }, + { url = "https://files.pythonhosted.org/packages/17/43/4243298a6b0b869a83b6331f3fcc12a2a0544c0995292ee96badf0fec6aa/multidict-6.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:60d849912350da557fe7de20aa8cf394aada6980d0052cc829eeda4a0db1c1db", size = 211318, upload-time = "2025-05-19T14:16:23.914Z" }, + { url = "https://files.pythonhosted.org/packages/fe/80/bc43c87d60138e401c7d1818a47e5a0f748904c9f3be99012cdab5e31446/multidict-6.4.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:19d08b4f22eae45bb018b9f06e2838c1e4b853c67628ef8ae126d99de0da6395", size = 217611, upload-time = "2025-05-19T14:16:25.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/5d/2ec94209254e48910911ac2404d71b37f06fd97ec83948a92d0c87a11d3c/multidict-6.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d693307856d1ef08041e8b6ff01d5b4618715007d288490ce2c7e29013c12b9a", size = 227893, upload-time = "2025-05-19T14:16:27.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/83/89344adc0cf08fd89d82d43de1a17a2635b03a57dfa680f6cdf2a24d481f/multidict-6.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fad6daaed41021934917f4fb03ca2db8d8a4d79bf89b17ebe77228eb6710c003", size = 221956, upload-time = "2025-05-19T14:16:29.307Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ea/81382bb59cd3a1047d1c2ea9339d2107fc918a63491bbb9399eb1aceda91/multidict-6.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c10d17371bff801af0daf8b073c30b6cf14215784dc08cd5c43ab5b7b8029bbc", size = 216850, upload-time = "2025-05-19T14:16:30.913Z" }, + { url = "https://files.pythonhosted.org/packages/84/5d/e17845bb0fa76334477d5de38654d27946d5b5d3695443987a094a71b440/multidict-6.4.4-py3-none-any.whl", hash = "sha256:bd4557071b561a8b3b6075c3ce93cf9bfb6182cb241805c3d66ced3b75eff4ac", size = 10481, upload-time = "2025-05-19T14:16:36.024Z" }, ] [[package]] name = "multiprocess" version = "0.70.16" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "dill", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] @@ -934,46 +934,46 @@ wheels = [ [[package]] name = "mypy" version = "1.16.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pathspec", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/84b/94283f817e2aa/mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/38/13c2f1abae94d5ea0354e146b95a1be9b2137a0d506728e0da037c4276f6/mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab", size = 3323139, upload-time = "2025-05-29T13:46:12.532Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/936/ccfdd749af476/mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/408/6883a73166631/mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fee/c38097f71797d/mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/089/bedc02307c254/mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6a2/322896003ba66/mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/021/a68568082c5b3/mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d04/5d33c284e10a0/mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b49/68f14f44c62e2/mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb1/4a4a871bb8efb/mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c6f/b60cbd85dc65d/mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a7e/32297a437cc91/mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/afe/420c9380ccec3/mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f2e/d0e0847a80655/mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb5/fbc8063cb4fde/mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a5f/cfdb7318c6a8d/mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/29e/1499864a3888b/mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031" }, + { url = "https://files.pythonhosted.org/packages/29/59/5fd2400352c3093bed4c09017fe671d26bc5bb7e6ef2d4bf85f2a2488104/mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491", size = 11875192, upload-time = "2025-05-29T13:34:54.281Z" }, + { url = "https://files.pythonhosted.org/packages/ad/3e/4bfec74663a64c2012f3e278dbc29ffe82b121bc551758590d1b6449ec0c/mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777", size = 12612939, upload-time = "2025-05-29T13:33:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/88/1f/fecbe3dcba4bf2ca34c26ca016383a9676711907f8db4da8354925cbb08f/mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b", size = 12874719, upload-time = "2025-05-29T13:21:52.09Z" }, + { url = "https://files.pythonhosted.org/packages/f1/9b/5fd5801a72b5d6fb6ec0105ea1d0e01ab2d4971893076e558d4b6d6b5f80/mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff", size = 11800138, upload-time = "2025-05-29T13:32:55.082Z" }, + { url = "https://files.pythonhosted.org/packages/2e/81/a117441ea5dfc3746431e51d78a4aca569c677aa225bca2cc05a7c239b61/mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666", size = 12533156, upload-time = "2025-05-29T13:19:12.963Z" }, + { url = "https://files.pythonhosted.org/packages/3f/38/88ec57c6c86014d3f06251e00f397b5a7daa6888884d0abf187e4f5f587f/mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c", size = 12742426, upload-time = "2025-05-29T13:20:22.72Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7f/7242062ec6288c33d8ad89574df87c3903d394870e5e6ba1699317a65075/mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1", size = 11828306, upload-time = "2025-05-29T13:21:02.164Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5f/b392f7b4f659f5b619ce5994c5c43caab3d80df2296ae54fa888b3d17f5a/mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8", size = 12702764, upload-time = "2025-05-29T13:20:42.826Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c0/7646ef3a00fa39ac9bc0938626d9ff29d19d733011be929cfea59d82d136/mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730", size = 12896233, upload-time = "2025-05-29T13:18:37.446Z" }, + { url = "https://files.pythonhosted.org/packages/14/9b/a943f09319167da0552d5cd722104096a9c99270719b1afeea60d11610aa/mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b", size = 11827764, upload-time = "2025-05-29T13:46:04.47Z" }, + { url = "https://files.pythonhosted.org/packages/ec/64/ff75e71c65a0cb6ee737287c7913ea155845a556c64144c65b811afdb9c7/mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d", size = 12701356, upload-time = "2025-05-29T13:35:13.553Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ad/0e93c18987a1182c350f7a5fab70550852f9fabe30ecb63bfbe51b602074/mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52", size = 12900745, upload-time = "2025-05-29T13:17:24.409Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/145ffe29f4b577219943b7b1dc0a71df7ead3c5bed4898686bd87c5b5cc2/mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436", size = 11879670, upload-time = "2025-05-29T13:17:45.971Z" }, + { url = "https://files.pythonhosted.org/packages/c6/94/0421562d6b046e22986758c9ae31865d10ea0ba607ae99b32c9d18b16f66/mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2", size = 12610528, upload-time = "2025-05-29T13:34:36.983Z" }, + { url = "https://files.pythonhosted.org/packages/1a/f1/39a22985b78c766a594ae1e0bbb6f8bdf5f31ea8d0c52291a3c211fd3cd5/mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20", size = 12871923, upload-time = "2025-05-29T13:32:21.823Z" }, + { url = "https://files.pythonhosted.org/packages/99/a3/6ed10530dec8e0fdc890d81361260c9ef1f5e5c217ad8c9b21ecb2b8366b/mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031", size = 2265773, upload-time = "2025-05-29T13:35:18.762Z" }, ] [[package]] name = "mypy-extensions" version = "1.1.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/52e/68efc3284861e/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1be/4cccdb0f24823/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505" }, + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] [[package]] name = "networkx" version = "3.2.1" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -990,7 +990,7 @@ wheels = [ [[package]] name = "networkx" version = "3.4.2" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } resolution-markers = [ "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1007,7 +1007,7 @@ wheels = [ [[package]] name = "networkx" version = "3.5" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1031,36 +1031,36 @@ wheels = [ [[package]] name = "ninja" version = "1.11.1.4" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6aa/39f6e894e0452/ninja-1.11.1.4.tar.gz", hash = "sha256:6aa39f6e894e0452e5b297327db00019383ae55d5d9c57c73b04f13bf79d438a" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/d4/6b0324541018561c5e73e617bd16f20a4fc17d1179bb3b3520b6ca8beb7b/ninja-1.11.1.4.tar.gz", hash = "sha256:6aa39f6e894e0452e5b297327db00019383ae55d5d9c57c73b04f13bf79d438a", size = 201256, upload-time = "2025-03-22T06:46:43.46Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ced/e0af00b58e27b/ninja-1.11.1.4-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cede0af00b58e27b31f2482ba83292a8e9171cdb9acc2c867a3b6e40b3353e43" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/096/487995473320d/ninja-1.11.1.4-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:096487995473320de7f65d622c3f1d16c3ad174797602218ca8c967f51ec38a0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d30/90d4488fadf60/ninja-1.11.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3090d4488fadf6047d0d7a1db0c9643a8d391f0d94729554dbb89b5bdc769d7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ecc/e44a00325a936/ninja-1.11.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecce44a00325a93631792974659cf253a815cc6da4ec96f89742925dfc295a0d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9c2/9bb66d2aa46a2/ninja-1.11.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c29bb66d2aa46a2409ab369ea804c730faec7652e8c22c1e428cc09216543e5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/055/f386fb550c2c9/ninja-1.11.1.4-py3-none-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:055f386fb550c2c9d6157e45e20a84d29c47968876b9c5794ae2aec46f952306" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f61/86d7607bb090c/ninja-1.11.1.4-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:f6186d7607bb090c3be1e10c8a56b690be238f953616626f5032238c66e56867" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cf4/453679d15babc/ninja-1.11.1.4-py3-none-musllinux_1_1_i686.whl", hash = "sha256:cf4453679d15babc04ba023d68d091bb613091b67101c88f85d2171c6621c6eb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d4a/6f159b08b0ac4/ninja-1.11.1.4-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:d4a6f159b08b0ac4aca5ee1572e3e402f969139e71d85d37c0e2872129098749" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c3b/96bd875f3ef1d/ninja-1.11.1.4-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:c3b96bd875f3ef1db782470e9e41d7508905a0986571f219d20ffed238befa15" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cf5/54e73f72c04de/ninja-1.11.1.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:cf554e73f72c04deb04d0cf51f5fdb1903d9c9ca3d2344249c8ce3bd616ebc02" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cfd/d09776436a1ff/ninja-1.11.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:cfdd09776436a1ff3c4a2558d3fc50a689fb9d7f1bdbc3e6f7b8c2991341ddb3" }, + { url = "https://files.pythonhosted.org/packages/12/42/4c94fdad51fcf1f039a156e97de9e4d564c2a8cc0303782d36f9bd893a4b/ninja-1.11.1.4-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cede0af00b58e27b31f2482ba83292a8e9171cdb9acc2c867a3b6e40b3353e43", size = 472026, upload-time = "2025-03-22T06:46:19.974Z" }, + { url = "https://files.pythonhosted.org/packages/eb/7a/455d2877fe6cf99886849c7f9755d897df32eaf3a0fba47b56e615f880f7/ninja-1.11.1.4-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:096487995473320de7f65d622c3f1d16c3ad174797602218ca8c967f51ec38a0", size = 422814, upload-time = "2025-03-22T06:46:21.235Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ad/fb6cca942528e25e8e0ab0f0cf98fe007319bf05cf69d726c564b815c4af/ninja-1.11.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3090d4488fadf6047d0d7a1db0c9643a8d391f0d94729554dbb89b5bdc769d7", size = 156965, upload-time = "2025-03-22T06:46:23.45Z" }, + { url = "https://files.pythonhosted.org/packages/a8/e7/d94a1b60031b115dd88526834b3da69eaacdc3c1a6769773ca8e2b1386b5/ninja-1.11.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecce44a00325a93631792974659cf253a815cc6da4ec96f89742925dfc295a0d", size = 179937, upload-time = "2025-03-22T06:46:24.728Z" }, + { url = "https://files.pythonhosted.org/packages/08/cc/e9316a28235409e9363794fc3d0b3083e48dd80d441006de66421e55f364/ninja-1.11.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c29bb66d2aa46a2409ab369ea804c730faec7652e8c22c1e428cc09216543e5", size = 157020, upload-time = "2025-03-22T06:46:26.046Z" }, + { url = "https://files.pythonhosted.org/packages/e3/30/389b22300541aa5f2e9dad322c4de2f84be4e32aa4e8babd9160d620b5f1/ninja-1.11.1.4-py3-none-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:055f386fb550c2c9d6157e45e20a84d29c47968876b9c5794ae2aec46f952306", size = 130389, upload-time = "2025-03-22T06:46:27.174Z" }, + { url = "https://files.pythonhosted.org/packages/a9/10/e27f35cb92813aabbb7ae771b1685b45be1cc8a0798ce7d4bfd08d142b93/ninja-1.11.1.4-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:f6186d7607bb090c3be1e10c8a56b690be238f953616626f5032238c66e56867", size = 372435, upload-time = "2025-03-22T06:46:28.637Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/e3559619756739aae124c6abf7fe41f7e546ab1209cfbffb13137bff2d2e/ninja-1.11.1.4-py3-none-musllinux_1_1_i686.whl", hash = "sha256:cf4453679d15babc04ba023d68d091bb613091b67101c88f85d2171c6621c6eb", size = 419300, upload-time = "2025-03-22T06:46:30.392Z" }, + { url = "https://files.pythonhosted.org/packages/35/46/809e4e9572570991b8e6f88f3583807d017371ab4cb09171cbc72a7eb3e4/ninja-1.11.1.4-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:d4a6f159b08b0ac4aca5ee1572e3e402f969139e71d85d37c0e2872129098749", size = 420239, upload-time = "2025-03-22T06:46:32.442Z" }, + { url = "https://files.pythonhosted.org/packages/e6/64/5cb5710d15f844edf02ada577f8eddfdcd116f47eec15850f3371a3a4b33/ninja-1.11.1.4-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:c3b96bd875f3ef1db782470e9e41d7508905a0986571f219d20ffed238befa15", size = 415986, upload-time = "2025-03-22T06:46:33.821Z" }, + { url = "https://files.pythonhosted.org/packages/95/b2/0e9ab1d926f423b12b09925f78afcc5e48b3c22e7121be3ddf6c35bf06a3/ninja-1.11.1.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:cf554e73f72c04deb04d0cf51f5fdb1903d9c9ca3d2344249c8ce3bd616ebc02", size = 379657, upload-time = "2025-03-22T06:46:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/c8/3e/fd6d330d0434168e7fe070d414b57dd99c4c133faa69c05b42a3cbdc6c13/ninja-1.11.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:cfdd09776436a1ff3c4a2558d3fc50a689fb9d7f1bdbc3e6f7b8c2991341ddb3", size = 454466, upload-time = "2025-03-22T06:46:37.413Z" }, ] [[package]] name = "nodeenv" version = "1.9.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6ec/12890a2dab794/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ba1/1c9782d29c27c/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9" }, + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] [[package]] name = "numpy" version = "1.26.4" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1096,7 +1096,7 @@ wheels = [ [[package]] name = "numpy" version = "2.2.6" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1108,147 +1108,188 @@ resolution-markers = [ "python_full_version == '3.11.*' and 'tegra' not in platform_release and sys_platform == 'windows'", "python_full_version == '3.10.*' and 'tegra' not in platform_release and sys_platform == 'windows'", ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e29/554e2bef54a90/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/efd/28d4e9cd7d7a8/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fc7/b73d02efb0e18/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/74d/4531beb257d2c/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8fc/377d995680230/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b64/d8d4d17135e00/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ba1/0f8411898fc41/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bd4/8227a919f1baf/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/955/1a499bf125c1d/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f26/18db89be1b4e0/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fd8/3c01228a68873/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/37c/0ca431f82cd5f/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fe2/7749d33bb772c/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f92/729c95468a2f4/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1bc/23a79bfabc5d0/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e31/43e4451880bed/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b4f/13750ce797515/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e1d/da9c7e08dc141/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f44/7e6acb680fd30/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/389/d771b1623ec92/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8e9/ace4a37db2342/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ce4/7521a4754c8f4/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543" }, +sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440, upload-time = "2025-05-17T22:38:04.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", size = 14297320, upload-time = "2025-05-17T21:29:02.78Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", size = 16801050, upload-time = "2025-05-17T21:29:27.675Z" }, + { url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", size = 15807034, upload-time = "2025-05-17T21:29:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", size = 18614185, upload-time = "2025-05-17T21:30:18.703Z" }, + { url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005, upload-time = "2025-05-17T21:32:23.332Z" }, + { url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570, upload-time = "2025-05-17T21:32:47.991Z" }, + { url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548, upload-time = "2025-05-17T21:33:11.728Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521, upload-time = "2025-05-17T21:33:39.139Z" }, + { url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462, upload-time = "2025-05-17T21:35:42.174Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618, upload-time = "2025-05-17T21:36:06.711Z" }, + { url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511, upload-time = "2025-05-17T21:36:29.965Z" }, + { url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783, upload-time = "2025-05-17T21:36:56.883Z" }, + { url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", size = 14010719, upload-time = "2025-05-17T21:38:58.433Z" }, + { url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", size = 16526072, upload-time = "2025-05-17T21:39:22.638Z" }, + { url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", size = 15503213, upload-time = "2025-05-17T21:39:45.865Z" }, + { url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", size = 18316632, upload-time = "2025-05-17T21:40:13.331Z" }, + { url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", size = 14077935, upload-time = "2025-05-17T21:41:49.738Z" }, + { url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", size = 16600122, upload-time = "2025-05-17T21:42:14.046Z" }, + { url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", size = 15586143, upload-time = "2025-05-17T21:42:37.464Z" }, + { url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", size = 18385260, upload-time = "2025-05-17T21:43:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", size = 16643476, upload-time = "2025-05-17T21:45:11.871Z" }, ] [[package]] name = "nvidia-cublas-cu12" -version = "12.8.4.1" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "12.9.1.4" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cublas_cu12-12.9.1.4-py3-none-manylinux_2_27_x86_64.whl" }, ] [[package]] name = "nvidia-cuda-cupti-cu12" -version = "12.8.90" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "12.9.79" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cuda_cupti_cu12-12.9.79-py3-none-manylinux_2_25_x86_64.whl" }, ] [[package]] name = "nvidia-cuda-nvrtc-cu12" -version = "12.8.93" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "12.9.86" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cuda_nvrtc_cu12-12.9.86-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl" }, ] [[package]] name = "nvidia-cuda-runtime-cu12" version = "12.8.90" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://pypi.nvidia.com/" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version >= '3.12' and 'tegra' not in platform_release and sys_platform == 'windows'", + "python_full_version == '3.11.*' and 'tegra' not in platform_release and sys_platform == 'windows'", + "python_full_version == '3.10.*' and 'tegra' not in platform_release and sys_platform == 'windows'", + "python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows'", + "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version == '3.11.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", + "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", +] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://pypi.nvidia.com/nvidia-cuda-runtime-cu12/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d" }, + { url = "https://pypi.nvidia.com/nvidia-cuda-runtime-cu12/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90" }, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.9.79" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", + "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", + "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", +] +wheels = [ + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cuda_runtime_cu12-12.9.79-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cuda_runtime_cu12-12.9.79-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, ] [[package]] name = "nvidia-cudnn-cu12" -version = "9.8.0.87" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "9.10.2.21" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "nvidia-cublas-cu12", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cudnn_cu12-9.8.0.87-py3-none-manylinux_2_27_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl" }, ] [[package]] name = "nvidia-cufft-cu12" -version = "11.3.3.83" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "11.4.1.4" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "nvidia-nvjitlink-cu12", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cufft_cu12-11.4.1.4-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, ] [[package]] name = "nvidia-cufile-cu12" -version = "1.13.1.3" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "1.14.1.1" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cufile_cu12-1.14.1.1-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, ] [[package]] name = "nvidia-curand-cu12" -version = "10.3.9.90" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "10.3.10.19" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_curand_cu12-10.3.10.19-py3-none-manylinux_2_27_x86_64.whl" }, ] [[package]] name = "nvidia-cusolver-cu12" -version = "11.7.3.90" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "11.7.5.82" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "nvidia-cublas-cu12", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, { name = "nvidia-cusparse-cu12", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, { name = "nvidia-nvjitlink-cu12", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cusolver_cu12-11.7.5.82-py3-none-manylinux_2_27_x86_64.whl" }, ] [[package]] name = "nvidia-cusparse-cu12" -version = "12.5.8.93" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "12.5.10.65" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "nvidia-nvjitlink-cu12", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cusparse_cu12-12.5.10.65-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, ] [[package]] name = "nvidia-cusparselt-cu12" -version = "0.6.3" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "0.7.1" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_cusparselt_cu12-0.6.3-py3-none-manylinux2014_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl" }, ] [[package]] name = "nvidia-ml-py" version = "12.575.51" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/649/0e93fea99eb4e/nvidia_ml_py-12.575.51.tar.gz", hash = "sha256:6490e93fea99eb4e966327ae18c6eec6256194c921f23459c8767aee28c54581" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/4d/6f017814ed5ac28e08e1b8a62e3a258957da27582c89b7f8f8b15ac3d2e7/nvidia_ml_py-12.575.51.tar.gz", hash = "sha256:6490e93fea99eb4e966327ae18c6eec6256194c921f23459c8767aee28c54581", size = 46597, upload-time = "2025-05-06T20:46:37.962Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb8/641800d98ce40/nvidia_ml_py-12.575.51-py3-none-any.whl", hash = "sha256:eb8641800d98ce40a22f479873f34b482e214a7e80349c63be51c3919845446e" }, + { url = "https://files.pythonhosted.org/packages/db/24/552ebea28f0570b9e65e62b50287a273804c9f997cc1c2dcd4e2d64b9e7d/nvidia_ml_py-12.575.51-py3-none-any.whl", hash = "sha256:eb8641800d98ce40a22f479873f34b482e214a7e80349c63be51c3919845446e", size = 47547, upload-time = "2025-05-06T20:46:36.457Z" }, ] [[package]] name = "nvidia-modelopt" version = "0.29.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1268,18 +1309,18 @@ resolution-markers = [ ] dependencies = [ { name = "ninja", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "nvidia-modelopt-core", version = "0.29.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "nvidia-modelopt-core", version = "0.29.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "pydantic", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "rich", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "scipy", version = "1.13.1", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "scipy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "tqdm", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e66/0c20f8554f963/nvidia_modelopt-0.29.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:e660c20f8554f9633ebcda59f2cabe8f4b7cc2851f5c50ea5007f8a79b82ad71" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f30/cc8f496e252d8/nvidia_modelopt-0.29.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:f30cc8f496e252d82af59bd92ff64b1454d93dd1f2f1409f0a2bbfe4452180cd" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt/nvidia_modelopt-0.29.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:e660c20f8554f9633ebcda59f2cabe8f4b7cc2851f5c50ea5007f8a79b82ad71" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt/nvidia_modelopt-0.29.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:f30cc8f496e252d82af59bd92ff64b1454d93dd1f2f1409f0a2bbfe4452180cd" }, ] [package.optional-dependencies] @@ -1293,27 +1334,27 @@ all = [ { name = "onnx", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "onnx-graphsurgeon", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "onnxconverter-common", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "onnxruntime", version = "1.18.1", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "onnxruntime", version = "1.20.1", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "onnxruntime-gpu", version = "1.20.2", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "onnxruntime", version = "1.18.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "onnxruntime", version = "1.20.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "onnxruntime-gpu", version = "1.20.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "onnxsim", marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "peft", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "pulp", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "pynvml", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "regex", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "safetensors", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "torchprofile", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.23.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.23.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "transformers", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, ] [[package]] name = "nvidia-modelopt" version = "0.31.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1331,18 +1372,18 @@ resolution-markers = [ ] dependencies = [ { name = "ninja", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "nvidia-modelopt-core", version = "0.31.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "nvidia-modelopt-core", version = "0.31.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "pydantic", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "rich", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, - { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, + { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "tqdm", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/774/95c50700ef9ed/nvidia_modelopt-0.31.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:77495c50700ef9ed1782f4999e17265751a0f4002a7f1185dee7bb46d5d05039" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8b1/905122b0615ae/nvidia_modelopt-0.31.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:8b1905122b0615aeff78f65aa39920c3971d6ebd1966b7ac57ee8da271d49913" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt/nvidia_modelopt-0.31.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:77495c50700ef9ed1782f4999e17265751a0f4002a7f1185dee7bb46d5d05039" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt/nvidia_modelopt-0.31.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:8b1905122b0615aeff78f65aa39920c3971d6ebd1966b7ac57ee8da271d49913" }, ] [package.optional-dependencies] @@ -1357,26 +1398,26 @@ all = [ { name = "onnx", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "onnx-graphsurgeon", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "onnxconverter-common", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, - { name = "onnxruntime", version = "1.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, - { name = "onnxruntime-gpu", version = "1.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, + { name = "onnxruntime", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, + { name = "onnxruntime-gpu", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "onnxsim", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "peft", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "polygraphy", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "pulp", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "regex", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, { name = "safetensors", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "torchprofile", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.23.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.23.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "transformers", marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, ] [[package]] name = "nvidia-modelopt-core" version = "0.29.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1395,20 +1436,20 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/085/76558dd4beca4/nvidia_modelopt_core-0.29.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:08576558dd4beca4ca4e1f903735a61e551c24b19555a311657f173cddc2478b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/373/b50ff3090b4f3/nvidia_modelopt_core-0.29.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:373b50ff3090b4f3d2a5d51e4167cd4f045298f11f541e33fde11b2e5283f7a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d35/3cf0d1ce7fa7a/nvidia_modelopt_core-0.29.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d353cf0d1ce7fa7acdc98838ba00b39ec002058b5770973071f031b3be400e97" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/df4/b5d2022cdd888/nvidia_modelopt_core-0.29.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:df4b5d2022cdd8887058baf2a85c01c4009c2a8d2876f999770774b457e06c53" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f6a/5a9341dab21fe/nvidia_modelopt_core-0.29.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f6a5a9341dab21fe2a5364adbfbf94cba310769796b7b2bceb3c0fdf2a3755ff" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/db6/26ae54c350999/nvidia_modelopt_core-0.29.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:db626ae54c350999c11365b7687bcf7386d2e70ac6391ba2ed7c4ec80ee4eb12" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c36/d008eb5765eec/nvidia_modelopt_core-0.29.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c36d008eb5765eec94177f9b9ad7a173405b14e72ab7688029c43a38047745c2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b9f/fd78257ee93bb/nvidia_modelopt_core-0.29.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b9ffd78257ee93bb78a44f9027e4658d12affa00a524b062eca294d78cdc99b7" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:08576558dd4beca4ca4e1f903735a61e551c24b19555a311657f173cddc2478b" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:373b50ff3090b4f3d2a5d51e4167cd4f045298f11f541e33fde11b2e5283f7a5" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d353cf0d1ce7fa7acdc98838ba00b39ec002058b5770973071f031b3be400e97" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:df4b5d2022cdd8887058baf2a85c01c4009c2a8d2876f999770774b457e06c53" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f6a5a9341dab21fe2a5364adbfbf94cba310769796b7b2bceb3c0fdf2a3755ff" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:db626ae54c350999c11365b7687bcf7386d2e70ac6391ba2ed7c4ec80ee4eb12" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c36d008eb5765eec94177f9b9ad7a173405b14e72ab7688029c43a38047745c2" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.29.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b9ffd78257ee93bb78a44f9027e4658d12affa00a524b062eca294d78cdc99b7" }, ] [[package]] name = "nvidia-modelopt-core" version = "0.31.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1425,103 +1466,95 @@ resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/18e/dfe34b3254647/nvidia_modelopt_core-0.31.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:18edfe34b325464709bf9cc8a9e4d6eca724adfbfa80bb0b41c4c9699ca7852c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e13/81a66e4e6b228/nvidia_modelopt_core-0.31.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:e1381a66e4e6b2282dbde0103b9706e6639480a6ac773b6c23611a8346ed8fca" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8b9/5bb4b6d005d66/nvidia_modelopt_core-0.31.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:8b95bb4b6d005d661ddaf2e65b9628cba1d45d46bb5a4d041629bda57cec45ea" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b1d/43f3ddf9c598f/nvidia_modelopt_core-0.31.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b1d43f3ddf9c598f7579a13d1ef133029b54b2a833cbbad53a486cf56cee0c5f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3a0/3f3b081322bda/nvidia_modelopt_core-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3a03f3b081322bdad71982de37898e5407c6a85c65d02a26470b735d8a454e74" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/473/9f00f1797699f/nvidia_modelopt_core-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4739f00f1797699fe4b9c256a5b75114b66e22749250dc87128491a8bdb2ce5a" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.31.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:18edfe34b325464709bf9cc8a9e4d6eca724adfbfa80bb0b41c4c9699ca7852c" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.31.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:e1381a66e4e6b2282dbde0103b9706e6639480a6ac773b6c23611a8346ed8fca" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.31.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:8b95bb4b6d005d661ddaf2e65b9628cba1d45d46bb5a4d041629bda57cec45ea" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.31.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b1d43f3ddf9c598f7579a13d1ef133029b54b2a833cbbad53a486cf56cee0c5f" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3a03f3b081322bdad71982de37898e5407c6a85c65d02a26470b735d8a454e74" }, + { url = "https://pypi.nvidia.com/nvidia-modelopt-core/nvidia_modelopt_core-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4739f00f1797699fe4b9c256a5b75114b66e22749250dc87128491a8bdb2ce5a" }, ] [[package]] name = "nvidia-nccl-cu12" -version = "2.26.5" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "2.27.3" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_nccl_cu12-2.26.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, ] [[package]] name = "nvidia-nvjitlink-cu12" -version = "12.8.93" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "12.9.86" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl" }, -] - -[[package]] -name = "nvidia-nvshmem-cu12" -version = "3.2.5" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } -wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_nvshmem_cu12-3.2.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_nvjitlink_cu12-12.9.86-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl" }, ] [[package]] name = "nvidia-nvtx-cu12" -version = "12.8.90" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "12.9.79" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/nvidia_nvtx_cu12-12.9.79-py3-none-manylinux1_x86_64.manylinux_2_5_x86_64.whl" }, ] [[package]] name = "onnx" version = "1.18.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "protobuf", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3d8/dbf9e99662913/onnx-1.18.0.tar.gz", hash = "sha256:3d8dbf9e996629131ba3aa1afd1d8239b660d1f830c6688dd7e03157cccd6b9c" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/60/e56e8ec44ed34006e6d4a73c92a04d9eea6163cc12440e35045aec069175/onnx-1.18.0.tar.gz", hash = "sha256:3d8dbf9e996629131ba3aa1afd1d8239b660d1f830c6688dd7e03157cccd6b9c", size = 12563009, upload-time = "2025-05-12T22:03:09.626Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e18/9652dad6e70a0/onnx-1.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e189652dad6e70a0465035c55cc565c27aa38803dd4f4e74e4b952ee1c2de94b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bfb/1f271b1523b29/onnx-1.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb1f271b1523b29f324bfd223f6a4cfbdc5a2f2f16e73563671932d33663365" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/731/60799472e1a86/onnx-1.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73160799472e1a86083f786fecdf864cf43d55325492a9b5a1cfa64d8a523ecc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6ac/afb3823238bbe/onnx-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6acafb3823238bbe8f4340c7ac32fb218689442e074d797bee1c5c9a02fdae75" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e4d/a451bf1c5ae38/onnx-1.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4da451bf1c5ae381f32d430004a89f0405bc57a8471b0bddb6325a5b334aa40" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/99a/fac90b4cdb147/onnx-1.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99afac90b4cdb1471432203c3c1f74e16549c526df27056d39f41a9a47cfb4af" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/852/1544987d71394/onnx-1.18.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8521544987d713941ee1e591520044d35e702f73dc87e91e6d4b15a064ae813d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3c1/37eecf6bc618c/onnx-1.18.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c137eecf6bc618c2f9398bcc381474b55c817237992b169dfe728e169549e8f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dc2/2abacfb0d3cd0/onnx-1.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc22abacfb0d3cd024d6ab784cb5eb5aca9c966a791e8e13b1a4ecb93ddb47d3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/783/9bf2adb494e46/onnx-1.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7839bf2adb494e46ccf375a7936b5d9e241b63e1a84254f3eb2e2e184e3292c8" }, + { url = "https://files.pythonhosted.org/packages/04/5b/3cfd183961a0a872fe29c95f8d07264890ec65c75c94b99a4dabc950df29/onnx-1.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e189652dad6e70a0465035c55cc565c27aa38803dd4f4e74e4b952ee1c2de94b", size = 17422721, upload-time = "2025-05-12T22:01:52.841Z" }, + { url = "https://files.pythonhosted.org/packages/58/52/fa649429016c5790f68c614cdebfbefd3e72ba1c458966305297d540f713/onnx-1.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb1f271b1523b29f324bfd223f6a4cfbdc5a2f2f16e73563671932d33663365", size = 17584220, upload-time = "2025-05-12T22:01:56.458Z" }, + { url = "https://files.pythonhosted.org/packages/02/3a/56475a111120d1e5d11939acbcbb17c92198c8e64a205cd68e00bdfd8a1f/onnx-1.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73160799472e1a86083f786fecdf864cf43d55325492a9b5a1cfa64d8a523ecc", size = 17424359, upload-time = "2025-05-12T22:02:09.866Z" }, + { url = "https://files.pythonhosted.org/packages/cf/03/5eb5e9ef446ed9e78c4627faf3c1bc25e0f707116dd00e9811de232a8df5/onnx-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6acafb3823238bbe8f4340c7ac32fb218689442e074d797bee1c5c9a02fdae75", size = 17586006, upload-time = "2025-05-12T22:02:13.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/77/ba50a903a9b5e6f9be0fa50f59eb2fca4a26ee653375408fbc72c3acbf9f/onnx-1.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4da451bf1c5ae381f32d430004a89f0405bc57a8471b0bddb6325a5b334aa40", size = 17421291, upload-time = "2025-05-12T22:02:29.645Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/25ec2ba723ac62b99e8fed6d7b59094dadb15e38d4c007331cc9ae3dfa5f/onnx-1.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99afac90b4cdb1471432203c3c1f74e16549c526df27056d39f41a9a47cfb4af", size = 17584084, upload-time = "2025-05-12T22:02:32.789Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/762b5fb5ed1a2b8e9a4bc5e668c82723b1b789c23b74e6b5a3356731ae4e/onnx-1.18.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8521544987d713941ee1e591520044d35e702f73dc87e91e6d4b15a064ae813d", size = 17421486, upload-time = "2025-05-12T22:02:48.467Z" }, + { url = "https://files.pythonhosted.org/packages/12/bb/471da68df0364f22296456c7f6becebe0a3da1ba435cdb371099f516da6e/onnx-1.18.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c137eecf6bc618c2f9398bcc381474b55c817237992b169dfe728e169549e8f", size = 17583581, upload-time = "2025-05-12T22:02:51.784Z" }, + { url = "https://files.pythonhosted.org/packages/06/7a/7eca4c27fa96fad2ec76fddc65a69d56c7d898134dddd82fa3331242f927/onnx-1.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc22abacfb0d3cd024d6ab784cb5eb5aca9c966a791e8e13b1a4ecb93ddb47d3", size = 17423027, upload-time = "2025-05-12T22:01:35.54Z" }, + { url = "https://files.pythonhosted.org/packages/9d/05/545c0b2c67421cac9573301e6a3e3d0ddf7c7bc4d1ae09781285cc996f16/onnx-1.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7839bf2adb494e46ccf375a7936b5d9e241b63e1a84254f3eb2e2e184e3292c8", size = 17584166, upload-time = "2025-05-12T22:01:39.096Z" }, ] [[package]] name = "onnx-graphsurgeon" version = "0.5.8" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "onnx", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6f6/11ea29a8e4740/onnx_graphsurgeon-0.5.8-py2.py3-none-any.whl", hash = "sha256:6f611ea29a8e4740fbab1aae52bf4c40b8b9918f8459058d20b99acc79fce121" }, + { url = "https://pypi.nvidia.com/onnx-graphsurgeon/onnx_graphsurgeon-0.5.8-py2.py3-none-any.whl", hash = "sha256:6f611ea29a8e4740fbab1aae52bf4c40b8b9918f8459058d20b99acc79fce121" }, ] [[package]] name = "onnxconverter-common" version = "1.13.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "onnx", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "protobuf", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/03d/b8a6033a3d659/onnxconverter-common-1.13.0.tar.gz", hash = "sha256:03db8a6033a3d6590f22df3f64234079caa826375d1fcb0b37b8123c06bf598c" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/44/54c6b7f1a28d919a15caf642113fb44651087d1bb0658f028c54b93df8e3/onnxconverter-common-1.13.0.tar.gz", hash = "sha256:03db8a6033a3d6590f22df3f64234079caa826375d1fcb0b37b8123c06bf598c", size = 73935, upload-time = "2022-11-03T12:11:37.783Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5ee/1c025ef6c3b4a/onnxconverter_common-1.13.0-py2.py3-none-any.whl", hash = "sha256:5ee1c025ef6c3b4abaede8425bc6b393248941a6cf8c21563d0d0e3f04634a0a" }, + { url = "https://files.pythonhosted.org/packages/51/a4/4439174c879c33557eab08e4dd480c1e096bc26c487c85a62e4c0d8f78ff/onnxconverter_common-1.13.0-py2.py3-none-any.whl", hash = "sha256:5ee1c025ef6c3b4abaede8425bc6b393248941a6cf8c21563d0d0e3f04634a0a", size = 83796, upload-time = "2022-11-03T12:11:35.767Z" }, ] [[package]] name = "onnxruntime" version = "1.18.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1534,26 +1567,26 @@ resolution-markers = [ dependencies = [ { name = "coloredlogs", marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "flatbuffers", marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "protobuf", marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, { name = "sympy", marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fc7/06eb1df06ddf5/onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b7d/e69f5ced2a263/onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ef3/6f3a8b768506d/onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/170/e711393e0618e/onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b9e/03c4ba9f73450/onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/781/aa9873640f5df/onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/983/9491e77e5c5a1/onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ad3/187c1faff3ac1/onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66" }, + { url = "https://files.pythonhosted.org/packages/cd/ef/4f4e45d49c2587080f0252dba644620a9808e2d19591bb0b327650ace6b6/onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05", size = 6013528, upload-time = "2024-06-27T23:52:45.941Z" }, + { url = "https://files.pythonhosted.org/packages/04/da/cd671caf4231942c4f68bf0dc1a959303df91dfd0e1d55c556b924d8e68e/onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a", size = 6788962, upload-time = "2024-06-27T23:52:48.548Z" }, + { url = "https://files.pythonhosted.org/packages/54/4b/f4c52a6b5e62f98f852a946fefc48f12d5838652eb7da5c300dc27a80ba4/onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c", size = 6010462, upload-time = "2024-06-27T23:52:59.835Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ae/163375ec2b6aee385c26889b4a0bd4546133b1da7c66285ef8db180781c5/onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434", size = 6794885, upload-time = "2024-06-27T23:53:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/bf/75/305c44288ad9733d4209c8c5cb7eba6f09f25462bf2d64bbdfca742585c3/onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa", size = 6007788, upload-time = "2024-06-27T23:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/a3/0a/89bc7acdf7b311ec5cdf6c01983e8ecb23f7b1ba7a1b2d2fd10d33dfd24a/onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6", size = 6793752, upload-time = "2024-06-27T23:53:15.37Z" }, + { url = "https://files.pythonhosted.org/packages/5f/66/8a1deac3f16fad5fab4aedeea8d4a0baaafef0e16f7c5ae2e162f6af6d34/onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9", size = 5995471, upload-time = "2024-06-27T23:53:39.884Z" }, + { url = "https://files.pythonhosted.org/packages/10/f5/67bd1b947bc4055ea62efd4b974cef51a8a6819ac4e8c61ca014901dbfd0/onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66", size = 6793994, upload-time = "2024-06-27T23:53:42.554Z" }, ] [[package]] name = "onnxruntime" version = "1.20.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -1563,23 +1596,23 @@ resolution-markers = [ dependencies = [ { name = "coloredlogs", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "flatbuffers", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "protobuf", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "sympy", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7b2/908b50101a19e/onnxruntime-1.20.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b2908b50101a19e99c4d4e97ebb9905561daf61829403061c1adc1b588bc0de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f62/43e34d74423bd/onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6243e34d74423bdd1edf0ae9596dd61023b260f546ee17d701723915f06a9f7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f1f/56e898815963d/onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f56e898815963d6dc4ee1c35fc6c36506466eff6d16f3cb9848cea4e8c8172" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fb4/4b08e017a6489/onnxruntime-1.20.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb44b08e017a648924dbe91b82d89b0c105b1adcfe31e90d1dc06b8677ad37be" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c91/58465745423b2/onnxruntime-1.20.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9158465745423b2b5d97ed25aa7740c7d38d2993ee2e5c3bfacb0c4145c49d8" }, + { url = "https://files.pythonhosted.org/packages/6d/c6/c4c0860bee2fde6037bdd9dcd12d323f6e38cf00fcc9a5065b394337fc55/onnxruntime-1.20.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b2908b50101a19e99c4d4e97ebb9905561daf61829403061c1adc1b588bc0de", size = 11954010, upload-time = "2024-11-21T00:48:35.254Z" }, + { url = "https://files.pythonhosted.org/packages/a5/da/c44bf9bd66cd6d9018a921f053f28d819445c4d84b4dd4777271b0fe52a2/onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6243e34d74423bdd1edf0ae9596dd61023b260f546ee17d701723915f06a9f7", size = 11955227, upload-time = "2024-11-21T00:48:54.556Z" }, + { url = "https://files.pythonhosted.org/packages/c5/9d/a42a84e10f1744dd27c6f2f9280cc3fb98f869dd19b7cd042e391ee2ab61/onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f56e898815963d6dc4ee1c35fc6c36506466eff6d16f3cb9848cea4e8c8172", size = 11952833, upload-time = "2024-11-21T00:49:10.563Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/13bbd9489be2a6944f4a940084bfe388f1100472f38c07080a46fbd4ab96/onnxruntime-1.20.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb44b08e017a648924dbe91b82d89b0c105b1adcfe31e90d1dc06b8677ad37be", size = 11951459, upload-time = "2024-11-21T00:49:26.269Z" }, + { url = "https://files.pythonhosted.org/packages/d8/55/3821c5fd60b52a6c82a00bba18531793c93c4addfe64fbf061e235c5617a/onnxruntime-1.20.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9158465745423b2b5d97ed25aa7740c7d38d2993ee2e5c3bfacb0c4145c49d8", size = 11950342, upload-time = "2024-11-21T00:49:34.164Z" }, ] [[package]] name = "onnxruntime" version = "1.22.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1593,24 +1626,24 @@ resolution-markers = [ dependencies = [ { name = "coloredlogs", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, { name = "flatbuffers", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, { name = "protobuf", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, { name = "sympy", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/468/c9502a12f6f49/onnxruntime-1.22.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:468c9502a12f6f49ec335c2febd22fdceecc1e4cc96dfc27e419ba237dff5aff" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fef/17d665a917866/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fef17d665a917866d1f68f09edc98223b9a27e6cb167dec69da4c66484ad12fd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c86/01128eaef79b6/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8601128eaef79b636152aea76ae6981b7c9fc81a618f584c15d78d42b310f1c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6a6/bbed10bc5e770/onnxruntime-1.22.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a6bbed10bc5e770c04d422893d3045b81acbbadc9fb759a2cd1ca00993da919" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a2/ac5bd9205d831/onnxruntime-1.22.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2ac5bd9205d831541db4e508e586e764a74f14efdd3f89af7fd20e1bf4a1ed" }, + { url = "https://files.pythonhosted.org/packages/54/ab/fd9a3b5285008c060618be92e475337fcfbf8689787953d37273f7b52ab0/onnxruntime-1.22.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:468c9502a12f6f49ec335c2febd22fdceecc1e4cc96dfc27e419ba237dff5aff", size = 14445346, upload-time = "2025-05-09T20:25:41.322Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8b/22989f6b59bc4ad1324f07a945c80b9ab825f0a581ad7a6064b93716d9b7/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fef17d665a917866d1f68f09edc98223b9a27e6cb167dec69da4c66484ad12fd", size = 14446302, upload-time = "2025-05-09T20:25:44.299Z" }, + { url = "https://files.pythonhosted.org/packages/03/79/36f910cd9fc96b444b0e728bba14607016079786adf032dae61f7c63b4aa/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8601128eaef79b636152aea76ae6981b7c9fc81a618f584c15d78d42b310f1c", size = 14443220, upload-time = "2025-05-09T20:25:47.078Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/1dfe1b368831d1256b90b95cb8d11da8ab769febd5c8833ec85ec1f79d21/onnxruntime-1.22.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a6bbed10bc5e770c04d422893d3045b81acbbadc9fb759a2cd1ca00993da919", size = 14443266, upload-time = "2025-05-09T20:25:49.479Z" }, + { url = "https://files.pythonhosted.org/packages/9f/48/d61d5f1ed098161edd88c56cbac49207d7b7b149e613d2cd7e33176c63b3/onnxruntime-1.22.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2ac5bd9205d831541db4e508e586e764a74f14efdd3f89af7fd20e1bf4a1ed", size = 14454003, upload-time = "2025-05-09T20:25:52.287Z" }, ] [[package]] name = "onnxruntime-gpu" version = "1.20.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version == '3.11.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -1620,19 +1653,23 @@ resolution-markers = [ dependencies = [ { name = "coloredlogs", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "flatbuffers", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "protobuf", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "sympy", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and python_full_version < '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/f6e/2baa664069470/onnxruntime_gpu-1.20.2-cp310-cp310-linux_aarch64.whl", hash = "sha256:f6e2baa664069470c6574219a79aba315e26c76db49d347678a5a273f1c41c9a" }, + { url = "https://files.pythonhosted.org/packages/04/ad/4e5534dcaafe36f596792ebd0049177f7f0b7afa0f696505974ed1d6f72c/onnxruntime_gpu-1.20.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dfba508f110ec062dedfd3032e6eee8cde325026e9d7c5792884e8b9d4ebb9c3", size = 291522233, upload-time = "2025-03-07T05:46:08.901Z" }, + { url = "https://files.pythonhosted.org/packages/5e/53/9341b875b0ed29953485b43713e94b335a449c3770fed67dddb3c9b84af0/onnxruntime_gpu-1.20.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85057c7006457bee14fc2a57417b7e4f396f10d9c1b08b11aae08ac2b825eeda", size = 291518407, upload-time = "2025-03-07T05:46:22.943Z" }, + { url = "https://files.pythonhosted.org/packages/5b/db/c1fcdf45cad147d3b3609cf66a1c6083b54382f58a41d7fc526cd5909090/onnxruntime_gpu-1.20.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa66d2e6de13fe6f4d1554b1c219bd2e4778b540ed9d3dc62957c95a8af43d66", size = 291510804, upload-time = "2025-03-07T05:46:36.178Z" }, + { url = "https://files.pythonhosted.org/packages/48/a4/60f0cf16b24f05d123f90525408a705741fa92e0c38ab122cdf1d239e3fe/onnxruntime_gpu-1.20.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6af5b30b9b0e729d3ca1dfff493a39771f143cfc22af1d77d487022033cae284", size = 291511859, upload-time = "2025-03-07T05:46:49.302Z" }, + { url = "https://files.pythonhosted.org/packages/4e/de/6c692ac8604a451011a2a01e35e94f84bea8775ef97f6830985bbe8de172/onnxruntime_gpu-1.20.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:407e5b7a21d656aac6f994d2e329f5577eb3d7f98b63aa1e49e71a702ffa1da1", size = 291502464, upload-time = "2025-03-07T05:47:03.191Z" }, ] [[package]] name = "onnxruntime-gpu" version = "1.22.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -1646,124 +1683,128 @@ resolution-markers = [ dependencies = [ { name = "coloredlogs", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "flatbuffers", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "protobuf", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, { name = "sympy", marker = "(python_full_version >= '3.10' and python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/869/e41abdc35e093/onnxruntime_gpu-1.22.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:869e41abdc35e09345876f047fce49267d699df3e44b67c2518b0469739484ff" }, + { url = "https://files.pythonhosted.org/packages/27/76/81de592072d6a41553b1523e15447f0ef94392e8f4cb98fda42909f24f9b/onnxruntime_gpu-1.22.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:965da7d33a54917e8e5176f292cc22640819f328370f4fb86087908745b03708", size = 283205327, upload-time = "2025-05-09T19:39:24.231Z" }, + { url = "https://files.pythonhosted.org/packages/4a/10/cd3e7e289f7b46eb93e38b5c90139f735bf1ea7f03d4b17ceb0e998e5bb6/onnxruntime_gpu-1.22.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d30c1512f22b1f01bacb4f177d49cbefd23e0f4bef56066f1282992d133e6ff8", size = 283204403, upload-time = "2025-05-09T19:39:38.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/5c/3f9700ba277d52c121dd2cebc8a672fb60b53e888972fc6682b6692a766c/onnxruntime_gpu-1.22.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86b064c8f6cbe6da03f51f46351237d985f8fd5eb907d3f9997ea91881131a13", size = 283199528, upload-time = "2025-05-09T19:39:54.489Z" }, + { url = "https://files.pythonhosted.org/packages/ae/26/35efe9dae012f453f2f7698dec3604368ce91ee2a0464336d2284fe02e3b/onnxruntime_gpu-1.22.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c3e635792931c5edf48a6a44b8daf4f74a9458e2d60245d24d91e29b6c1c7aa5", size = 283205630, upload-time = "2025-05-09T19:40:12.749Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ab/943c659cded9288519c67e6d5827973762207d19035972c703a1fefd032c/onnxruntime_gpu-1.22.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1559033601d71023d72a8e279b2575a104de5f46e136f87534206aa2044eb1c", size = 283210584, upload-time = "2025-05-09T19:40:27.372Z" }, ] [[package]] name = "onnxsim" version = "0.4.36" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "onnx", marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "rich", marker = "(python_full_version < '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.12' and 'tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6e0/ee9d6d4a83042/onnxsim-0.4.36.tar.gz", hash = "sha256:6e0ee9d6d4a83042bdef7319fbe58352d9fda5f253386be2b267c7c27f0638ee" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/9e/f34238413ebeda9a3a8802feeaa5013934455466b9ab390b48ad9c7e184f/onnxsim-0.4.36.tar.gz", hash = "sha256:6e0ee9d6d4a83042bdef7319fbe58352d9fda5f253386be2b267c7c27f0638ee", size = 20993703, upload-time = "2024-03-04T08:25:00.086Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ce8/7837f8975beeb/onnxsim-0.4.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce87837f8975beebdcc98cc01d6d13e84b10900eb2c14035ce1066c3d670d96d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fa7/596e6b806ed19/onnxsim-0.4.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa7596e6b806ed19077f7652788a50ee576c172b4d16d421f0593aef1a6fa4c4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e93/7abb8e20a6609/onnxsim-0.4.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e937abb8e20a6609f27ae19639d21dc5e8621c4a5e44ebbafab9292451f75497" }, + { url = "https://files.pythonhosted.org/packages/d9/6e/80c77b5c6ec079994295e6e685097fa42732a1e7c5a22fe9c5c4ca1aac74/onnxsim-0.4.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce87837f8975beebdcc98cc01d6d13e84b10900eb2c14035ce1066c3d670d96d", size = 2255237, upload-time = "2024-03-04T08:24:29.047Z" }, + { url = "https://files.pythonhosted.org/packages/db/94/22aab761b3d416bce02020d9ca98dc692427c2717b0325952e30ce41f83b/onnxsim-0.4.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa7596e6b806ed19077f7652788a50ee576c172b4d16d421f0593aef1a6fa4c4", size = 2255003, upload-time = "2024-03-04T08:24:35.024Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/21c4e4a17c0dae64c0f0c02186baa198d7d5563af9a27361d2fdd75a8e04/onnxsim-0.4.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e937abb8e20a6609f27ae19639d21dc5e8621c4a5e44ebbafab9292451f75497", size = 2255609, upload-time = "2024-03-04T08:24:50.594Z" }, ] [[package]] name = "packaging" version = "25.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d44/3872c98d677bf/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/295/72ef2b1f17581/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484" }, + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] [[package]] name = "pandas" version = "2.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "python-dateutil", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pytz", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "tzdata", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/346/00ab34ebf1131/pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f4d/d97c19bd06bc5/pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/034/abd6f3db8b988/pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/23c/2b2dc52138102/pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/39f/f73ec07be5e90/pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fa3/5c266c8cd1a67/pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/14a/0cc77b0f089d2/pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c06/f6f144ad0a1bf/pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ed1/6339bc354a73e/pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9ff/730713d4c4f2f/pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ba2/4af48643b12ff/pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/404/d681c698e3c8a/pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/602/1910b086b3ca7/pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bb3/2dc743b52467d/pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/213/cd63c43263dbb/pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1d2/b33e68d0ce64e/pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/430/a63bae10b5086/pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/951/805d146922aed/pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1a8/81bc1309f3fce/pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e19/91bbb96f4050b/pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bb3/be95802219853/pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bf5/be867a0541a9f/pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/841/41f722d45d0c2/pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f95/a2aef32614ed8/pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e0f/51973ba93a9f9/pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb" }, +sdist = { url = "https://files.pythonhosted.org/packages/72/51/48f713c4c728d7c55ef7444ba5ea027c26998d96d1a40953b346438602fc/pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133", size = 4484490, upload-time = "2025-06-05T03:27:54.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/45/d2599400fad7fe06b849bd40b52c65684bc88fbe5f0a474d0513d057a377/pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2", size = 11711963, upload-time = "2025-06-05T03:25:56.855Z" }, + { url = "https://files.pythonhosted.org/packages/66/f8/5508bc45e994e698dbc93607ee6b9b6eb67df978dc10ee2b09df80103d9e/pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e", size = 12349446, upload-time = "2025-06-05T03:26:01.292Z" }, + { url = "https://files.pythonhosted.org/packages/f7/fc/17851e1b1ea0c8456ba90a2f514c35134dd56d981cf30ccdc501a0adeac4/pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1", size = 12920002, upload-time = "2025-06-06T00:00:07.925Z" }, + { url = "https://files.pythonhosted.org/packages/a1/9b/8743be105989c81fa33f8e2a4e9822ac0ad4aaf812c00fee6bb09fc814f9/pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6", size = 13651218, upload-time = "2025-06-05T03:26:09.731Z" }, + { url = "https://files.pythonhosted.org/packages/ee/3e/8c0fb7e2cf4a55198466ced1ca6a9054ae3b7e7630df7757031df10001fd/pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d", size = 11788230, upload-time = "2025-06-05T03:26:27.417Z" }, + { url = "https://files.pythonhosted.org/packages/14/22/b493ec614582307faf3f94989be0f7f0a71932ed6f56c9a80c0bb4a3b51e/pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46", size = 12370423, upload-time = "2025-06-05T03:26:34.142Z" }, + { url = "https://files.pythonhosted.org/packages/9f/74/b012addb34cda5ce855218a37b258c4e056a0b9b334d116e518d72638737/pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33", size = 12990594, upload-time = "2025-06-06T00:00:13.934Z" }, + { url = "https://files.pythonhosted.org/packages/95/81/b310e60d033ab64b08e66c635b94076488f0b6ce6a674379dd5b224fc51c/pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c", size = 13745952, upload-time = "2025-06-05T03:26:39.475Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ba/a7883d7aab3d24c6540a2768f679e7414582cc389876d469b40ec749d78b/pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09", size = 11262180, upload-time = "2025-06-05T16:50:17.453Z" }, + { url = "https://files.pythonhosted.org/packages/01/a5/931fc3ad333d9d87b10107d948d757d67ebcfc33b1988d5faccc39c6845c/pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d", size = 11991493, upload-time = "2025-06-05T03:26:51.813Z" }, + { url = "https://files.pythonhosted.org/packages/d7/bf/0213986830a92d44d55153c1d69b509431a972eb73f204242988c4e66e86/pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20", size = 12470733, upload-time = "2025-06-06T00:00:18.651Z" }, + { url = "https://files.pythonhosted.org/packages/a4/0e/21eb48a3a34a7d4bac982afc2c4eb5ab09f2d988bdf29d92ba9ae8e90a79/pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b", size = 13212406, upload-time = "2025-06-05T03:26:55.992Z" }, + { url = "https://files.pythonhosted.org/packages/e8/6a/47fd7517cd8abe72a58706aab2b99e9438360d36dcdb052cf917b7bf3bdc/pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f", size = 11328359, upload-time = "2025-06-05T03:27:06.431Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/463bfe819ed60fb7e7ddffb4ae2ee04b887b3444feee6c19437b8f834837/pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3", size = 12024789, upload-time = "2025-06-05T03:27:09.875Z" }, + { url = "https://files.pythonhosted.org/packages/04/0c/e0704ccdb0ac40aeb3434d1c641c43d05f75c92e67525df39575ace35468/pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8", size = 12480734, upload-time = "2025-06-06T00:00:22.246Z" }, + { url = "https://files.pythonhosted.org/packages/e9/df/815d6583967001153bb27f5cf075653d69d51ad887ebbf4cfe1173a1ac58/pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9", size = 13223381, upload-time = "2025-06-05T03:27:15.641Z" }, + { url = "https://files.pythonhosted.org/packages/81/3a/3806d041bce032f8de44380f866059437fb79e36d6b22c82c187e65f765b/pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c", size = 11439876, upload-time = "2025-06-05T03:27:43.652Z" }, + { url = "https://files.pythonhosted.org/packages/15/aa/3fc3181d12b95da71f5c2537c3e3b3af6ab3a8c392ab41ebb766e0929bc6/pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67", size = 11966182, upload-time = "2025-06-05T03:27:47.652Z" }, + { url = "https://files.pythonhosted.org/packages/37/e7/e12f2d9b0a2c4a2cc86e2aabff7ccfd24f03e597d770abfa2acd313ee46b/pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f", size = 12547686, upload-time = "2025-06-06T00:00:26.142Z" }, + { url = "https://files.pythonhosted.org/packages/39/c2/646d2e93e0af70f4e5359d870a63584dacbc324b54d73e6b3267920ff117/pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249", size = 13231847, upload-time = "2025-06-05T03:27:51.465Z" }, + { url = "https://files.pythonhosted.org/packages/5c/be/3ee7f424367e0f9e2daee93a3145a18b703fbf733ba56e1cf914af4b40d1/pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14", size = 11736943, upload-time = "2025-06-06T00:01:15.992Z" }, + { url = "https://files.pythonhosted.org/packages/83/95/81c7bb8f1aefecd948f80464177a7d9a1c5e205c5a1e279984fdacbac9de/pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324", size = 12366679, upload-time = "2025-06-06T00:01:36.162Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/54cf52fb454408317136d683a736bb597864db74977efee05e63af0a7d38/pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34", size = 12924072, upload-time = "2025-06-06T00:01:44.243Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/25018e431257f8a42c173080f9da7c592508269def54af4a76ccd1c14420/pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb", size = 13696374, upload-time = "2025-06-06T00:02:14.346Z" }, ] [[package]] name = "parameterized" version = "0.9.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7fc/905272cefa4f3/parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/49/00c0c0cc24ff4266025a53e41336b79adaa5a4ebfad214f433d623f9865e/parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1", size = 24351, upload-time = "2023-03-27T02:01:11.592Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4e0/758e3d41bea3b/parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b" }, + { url = "https://files.pythonhosted.org/packages/00/2f/804f58f0b856ab3bf21617cccf5b39206e6c4c94c2cd227bde125ea6105f/parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b", size = 20475, upload-time = "2023-03-27T02:01:09.31Z" }, ] [[package]] name = "pathspec" version = "0.12.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a48/2d51503a1ab33/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a0d/503e138a4c123/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08" }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] [[package]] name = "peft" version = "0.15.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "accelerate", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "psutil", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pyyaml", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "safetensors", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, { name = "tqdm", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "transformers", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/705/9029f4d42a092/peft-0.15.2.tar.gz", hash = "sha256:7059029f4d42a092ded1aa117dd366a46084aef638bdd593f6ab0195d5427fcd" } +sdist = { url = "https://files.pythonhosted.org/packages/33/65/faa18cd8ffbe0f742c3f2559770646cce2574b9cd28a2a05e8d36f64e968/peft-0.15.2.tar.gz", hash = "sha256:7059029f4d42a092ded1aa117dd366a46084aef638bdd593f6ab0195d5427fcd", size = 472952, upload-time = "2025-04-15T15:27:53.09Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0df/c942b03b7af4b/peft-0.15.2-py3-none-any.whl", hash = "sha256:0dfc942b03b7af4b7267cd4e30b15e3a4a1d277adc581ce6245fc13f1f93d0a0" }, + { url = "https://files.pythonhosted.org/packages/68/85/8e6ea3d1089f2b6de3c1cd34bbbd7560912af9d34b057be3b8b8fefe1da3/peft-0.15.2-py3-none-any.whl", hash = "sha256:0dfc942b03b7af4b7267cd4e30b15e3a4a1d277adc581ce6245fc13f1f93d0a0", size = 411051, upload-time = "2025-04-15T15:27:50.799Z" }, ] [[package]] name = "pillow" version = "11.2.1" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, @@ -1794,33 +1835,33 @@ wheels = [ [[package]] name = "platformdirs" version = "4.3.8" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3d5/12d96e16bcb95/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ff7/059bb7eb1179e/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4" }, + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, ] [[package]] name = "pluggy" version = "1.6.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7dc/c130b76258d33/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e92/0276dd6813095/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746" }, + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] [[package]] name = "polygraphy" version = "0.49.24" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1e5/964a24af34d21/polygraphy-0.49.24-py2.py3-none-any.whl", hash = "sha256:1e5964a24af34d21b1f2f1817536b54625ac8c7fd7464d567d0a4fbae9cff8cc" }, + { url = "https://pypi.nvidia.com/polygraphy/polygraphy-0.49.24-py2.py3-none-any.whl", hash = "sha256:1e5964a24af34d21b1f2f1817536b54625ac8c7fd7464d567d0a4fbae9cff8cc" }, ] [[package]] name = "pre-commit" version = "4.2.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "identify", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -1828,306 +1869,306 @@ dependencies = [ { name = "pyyaml", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "virtualenv", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/601/283b9757afd87/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146" } +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a00/9ca7205f1eb49/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd" }, + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, ] [[package]] name = "propcache" version = "0.3.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/40d/980c337653590/propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/967/a8eec513dbe08/propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5b9/145c35cc87313/propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9e6/4e948ab414119/propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/319/fa8765bfd6a26/propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c66/d8ccbc902ad54/propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2d2/19b0dbabe75e1/propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cd6/a55f65241c551/propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/997/9643ffc69b799/propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4cf/9e93a81979f14/propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2fc/e1df66915909f/propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4d0/dfdd9a2ebc77b/propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f6f/1324db48f001c/propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5cd/b0f3e1eb6dfc9/propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1eb/34d90aac9bfbc/propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f35/c7070eeec2cda/propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b23/c11c2c9e6d4e7/propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3e1/9ea4ea0bf4617/propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bd3/9c92e4c8f6cbf/propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b03/13e8b923b3814/propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e86/1ad8289240848/propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/610/14615c1274df8/propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/71e/be3fe42656a23/propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aa8/efd8c5adc5a2c/propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c2f/e5c910f6007e7/propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a0a/b8cf8cdd2194f/propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/563/f9d8c03ad6455/propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fb6/e0faf8cb6b4be/propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1c5/c7ab7f2bb3f57/propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/050/b571b2e96ec94/propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e1c/4d24b804b3a87/propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e4f/e2a6d5ce975c1/propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fec/cd282de1f6322/propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ec3/14cde7314d2dd/propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d24/9609e547c04d1/propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5ce/d33d827625d0a/propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/411/4c4ada8f3181a/propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/975/af16f406ce48f/propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a34/aa3a1abc50740/propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9ce/c3239c85ed15b/propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/055/43250deac8e61/propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5cb/5918253912e08/propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f3b/becd2f34d0e6d/propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aca/63103895c7d96/propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5a0/a9898fdb99bf1/propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3e5/84b6d388aeb00/propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8a1/7583515a04358/propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5ae/d8d8308215089/propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6d8/e309ff9a0503e/propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b65/5032b202028a5/propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9f6/4d91b751df779/propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/19a/06db789a4bd89/propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bef/100c88d869286/propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/873/80fb1f3089d2a/propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e47/4fc718e73ba5e/propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/17d/1c688a4433552/propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/83b/e47aa4e35b87c/propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/27c/6ac6aa9fc7bc6/propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/64a/956dff37080b3/propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/82d/e5da8c8893056/propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0c3/c3a203c375b08/propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b30/3b194c2e6f171/propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/916/cd229b0150129/propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a46/1959ead5b38e2/propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/069/e7212890b0bcf/propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ef2/e4e91fb394576/propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/863/8f99dca15b9df/propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9a8/ecf38de50a7f5/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/c8/fdc6686a986feae3541ea23dcaa661bd93972d3940460646c6bb96e21c40/propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf", size = 43651, upload-time = "2025-03-26T03:06:12.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/6c/d01f9dfbbdc613305e0a831016844987a1fb4861dd221cd4c69b1216b43f/propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649", size = 206135, upload-time = "2025-03-26T03:03:40.757Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8a/e6e1c77394088f4cfdace4a91a7328e398ebed745d59c2f6764135c5342d/propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f", size = 220517, upload-time = "2025-03-26T03:03:42.657Z" }, + { url = "https://files.pythonhosted.org/packages/19/3b/6c44fa59d6418f4239d5db8b1ece757351e85d6f3ca126dfe37d427020c8/propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229", size = 218952, upload-time = "2025-03-26T03:03:44.549Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e4/4aeb95a1cd085e0558ab0de95abfc5187329616193a1012a6c4c930e9f7a/propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46", size = 206593, upload-time = "2025-03-26T03:03:46.114Z" }, + { url = "https://files.pythonhosted.org/packages/da/6a/29fa75de1cbbb302f1e1d684009b969976ca603ee162282ae702287b6621/propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7", size = 196745, upload-time = "2025-03-26T03:03:48.02Z" }, + { url = "https://files.pythonhosted.org/packages/19/7e/2237dad1dbffdd2162de470599fa1a1d55df493b16b71e5d25a0ac1c1543/propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0", size = 203369, upload-time = "2025-03-26T03:03:49.63Z" }, + { url = "https://files.pythonhosted.org/packages/a4/bc/a82c5878eb3afb5c88da86e2cf06e1fe78b7875b26198dbb70fe50a010dc/propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519", size = 198723, upload-time = "2025-03-26T03:03:51.091Z" }, + { url = "https://files.pythonhosted.org/packages/17/76/9632254479c55516f51644ddbf747a45f813031af5adcb8db91c0b824375/propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd", size = 200751, upload-time = "2025-03-26T03:03:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c3/a90b773cf639bd01d12a9e20c95be0ae978a5a8abe6d2d343900ae76cd71/propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259", size = 210730, upload-time = "2025-03-26T03:03:54.498Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ec/ad5a952cdb9d65c351f88db7c46957edd3d65ffeee72a2f18bd6341433e0/propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e", size = 213499, upload-time = "2025-03-26T03:03:56.054Z" }, + { url = "https://files.pythonhosted.org/packages/83/c0/ea5133dda43e298cd2010ec05c2821b391e10980e64ee72c0a76cdbb813a/propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136", size = 207132, upload-time = "2025-03-26T03:03:57.398Z" }, + { url = "https://files.pythonhosted.org/packages/59/f9/4c0a5cf6974c2c43b1a6810c40d889769cc8f84cea676cbe1e62766a45f8/propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744", size = 233633, upload-time = "2025-03-26T03:04:07.044Z" }, + { url = "https://files.pythonhosted.org/packages/e7/64/66f2f4d1b4f0007c6e9078bd95b609b633d3957fe6dd23eac33ebde4b584/propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0", size = 241124, upload-time = "2025-03-26T03:04:08.676Z" }, + { url = "https://files.pythonhosted.org/packages/aa/bf/7b8c9fd097d511638fa9b6af3d986adbdf567598a567b46338c925144c1b/propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5", size = 240283, upload-time = "2025-03-26T03:04:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c9/e85aeeeaae83358e2a1ef32d6ff50a483a5d5248bc38510d030a6f4e2816/propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256", size = 232498, upload-time = "2025-03-26T03:04:11.616Z" }, + { url = "https://files.pythonhosted.org/packages/8e/66/acb88e1f30ef5536d785c283af2e62931cb934a56a3ecf39105887aa8905/propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073", size = 221486, upload-time = "2025-03-26T03:04:13.102Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f9/233ddb05ffdcaee4448508ee1d70aa7deff21bb41469ccdfcc339f871427/propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d", size = 222675, upload-time = "2025-03-26T03:04:14.658Z" }, + { url = "https://files.pythonhosted.org/packages/98/b8/eb977e28138f9e22a5a789daf608d36e05ed93093ef12a12441030da800a/propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f", size = 215727, upload-time = "2025-03-26T03:04:16.207Z" }, + { url = "https://files.pythonhosted.org/packages/89/2d/5f52d9c579f67b8ee1edd9ec073c91b23cc5b7ff7951a1e449e04ed8fdf3/propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0", size = 217878, upload-time = "2025-03-26T03:04:18.11Z" }, + { url = "https://files.pythonhosted.org/packages/7a/fd/5283e5ed8a82b00c7a989b99bb6ea173db1ad750bf0bf8dff08d3f4a4e28/propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a", size = 230558, upload-time = "2025-03-26T03:04:19.562Z" }, + { url = "https://files.pythonhosted.org/packages/90/38/ab17d75938ef7ac87332c588857422ae126b1c76253f0f5b1242032923ca/propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a", size = 233754, upload-time = "2025-03-26T03:04:21.065Z" }, + { url = "https://files.pythonhosted.org/packages/06/5d/3b921b9c60659ae464137508d3b4c2b3f52f592ceb1964aa2533b32fcf0b/propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9", size = 226088, upload-time = "2025-03-26T03:04:22.718Z" }, + { url = "https://files.pythonhosted.org/packages/d4/b0/911eda0865f90c0c7e9f0415d40a5bf681204da5fd7ca089361a64c16b28/propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f", size = 243031, upload-time = "2025-03-26T03:04:31.977Z" }, + { url = "https://files.pythonhosted.org/packages/0a/06/0da53397c76a74271621807265b6eb61fb011451b1ddebf43213df763669/propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70", size = 249100, upload-time = "2025-03-26T03:04:33.45Z" }, + { url = "https://files.pythonhosted.org/packages/f1/eb/13090e05bf6b963fc1653cdc922133ced467cb4b8dab53158db5a37aa21e/propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7", size = 250170, upload-time = "2025-03-26T03:04:35.542Z" }, + { url = "https://files.pythonhosted.org/packages/3b/4c/f72c9e1022b3b043ec7dc475a0f405d4c3e10b9b1d378a7330fecf0652da/propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25", size = 245000, upload-time = "2025-03-26T03:04:37.501Z" }, + { url = "https://files.pythonhosted.org/packages/e8/fd/970ca0e22acc829f1adf5de3724085e778c1ad8a75bec010049502cb3a86/propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277", size = 230262, upload-time = "2025-03-26T03:04:39.532Z" }, + { url = "https://files.pythonhosted.org/packages/c4/42/817289120c6b9194a44f6c3e6b2c3277c5b70bbad39e7df648f177cc3634/propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8", size = 236772, upload-time = "2025-03-26T03:04:41.109Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9c/3b3942b302badd589ad6b672da3ca7b660a6c2f505cafd058133ddc73918/propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e", size = 231133, upload-time = "2025-03-26T03:04:42.544Z" }, + { url = "https://files.pythonhosted.org/packages/98/a1/75f6355f9ad039108ff000dfc2e19962c8dea0430da9a1428e7975cf24b2/propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee", size = 230741, upload-time = "2025-03-26T03:04:44.06Z" }, + { url = "https://files.pythonhosted.org/packages/67/0c/3e82563af77d1f8731132166da69fdfd95e71210e31f18edce08a1eb11ea/propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815", size = 244047, upload-time = "2025-03-26T03:04:45.983Z" }, + { url = "https://files.pythonhosted.org/packages/f7/50/9fb7cca01532a08c4d5186d7bb2da6c4c587825c0ae134b89b47c7d62628/propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5", size = 246467, upload-time = "2025-03-26T03:04:47.699Z" }, + { url = "https://files.pythonhosted.org/packages/a9/02/ccbcf3e1c604c16cc525309161d57412c23cf2351523aedbb280eb7c9094/propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7", size = 241022, upload-time = "2025-03-26T03:04:49.195Z" }, + { url = "https://files.pythonhosted.org/packages/62/37/fc357e345bc1971e21f76597028b059c3d795c5ca7690d7a8d9a03c9708a/propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5", size = 225804, upload-time = "2025-03-26T03:04:57.158Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f1/16e12c33e3dbe7f8b737809bad05719cff1dccb8df4dafbcff5575002c0e/propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb", size = 230650, upload-time = "2025-03-26T03:04:58.61Z" }, + { url = "https://files.pythonhosted.org/packages/3e/a2/018b9f2ed876bf5091e60153f727e8f9073d97573f790ff7cdf6bc1d1fb8/propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7", size = 234235, upload-time = "2025-03-26T03:05:00.599Z" }, + { url = "https://files.pythonhosted.org/packages/45/5f/3faee66fc930dfb5da509e34c6ac7128870631c0e3582987fad161fcb4b1/propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120", size = 228249, upload-time = "2025-03-26T03:05:02.11Z" }, + { url = "https://files.pythonhosted.org/packages/62/1e/a0d5ebda5da7ff34d2f5259a3e171a94be83c41eb1e7cd21a2105a84a02e/propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654", size = 214964, upload-time = "2025-03-26T03:05:03.599Z" }, + { url = "https://files.pythonhosted.org/packages/db/a0/d72da3f61ceab126e9be1f3bc7844b4e98c6e61c985097474668e7e52152/propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e", size = 222501, upload-time = "2025-03-26T03:05:05.107Z" }, + { url = "https://files.pythonhosted.org/packages/18/6d/a008e07ad7b905011253adbbd97e5b5375c33f0b961355ca0a30377504ac/propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b", size = 217917, upload-time = "2025-03-26T03:05:06.59Z" }, + { url = "https://files.pythonhosted.org/packages/98/37/02c9343ffe59e590e0e56dc5c97d0da2b8b19fa747ebacf158310f97a79a/propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53", size = 217089, upload-time = "2025-03-26T03:05:08.1Z" }, + { url = "https://files.pythonhosted.org/packages/53/1b/d3406629a2c8a5666d4674c50f757a77be119b113eedd47b0375afdf1b42/propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5", size = 228102, upload-time = "2025-03-26T03:05:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/cd/a7/3664756cf50ce739e5f3abd48febc0be1a713b1f389a502ca819791a6b69/propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7", size = 230122, upload-time = "2025-03-26T03:05:11.408Z" }, + { url = "https://files.pythonhosted.org/packages/35/36/0bbabaacdcc26dac4f8139625e930f4311864251276033a52fd52ff2a274/propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef", size = 226818, upload-time = "2025-03-26T03:05:12.909Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2f/854e653c96ad1161f96194c6678a41bbb38c7947d17768e8811a77635a08/propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de", size = 292265, upload-time = "2025-03-26T03:05:21.654Z" }, + { url = "https://files.pythonhosted.org/packages/40/8d/090955e13ed06bc3496ba4a9fb26c62e209ac41973cb0d6222de20c6868f/propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6", size = 294412, upload-time = "2025-03-26T03:05:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/39/e6/d51601342e53cc7582449e6a3c14a0479fab2f0750c1f4d22302e34219c6/propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7", size = 294290, upload-time = "2025-03-26T03:05:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/3b/4d/be5f1a90abc1881884aa5878989a1acdafd379a91d9c7e5e12cef37ec0d7/propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458", size = 282926, upload-time = "2025-03-26T03:05:26.459Z" }, + { url = "https://files.pythonhosted.org/packages/57/2b/8f61b998c7ea93a2b7eca79e53f3e903db1787fca9373af9e2cf8dc22f9d/propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11", size = 267808, upload-time = "2025-03-26T03:05:28.188Z" }, + { url = "https://files.pythonhosted.org/packages/11/1c/311326c3dfce59c58a6098388ba984b0e5fb0381ef2279ec458ef99bd547/propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c", size = 290916, upload-time = "2025-03-26T03:05:29.757Z" }, + { url = "https://files.pythonhosted.org/packages/4b/74/91939924b0385e54dc48eb2e4edd1e4903ffd053cf1916ebc5347ac227f7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf", size = 262661, upload-time = "2025-03-26T03:05:31.472Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d7/e6079af45136ad325c5337f5dd9ef97ab5dc349e0ff362fe5c5db95e2454/propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27", size = 264384, upload-time = "2025-03-26T03:05:32.984Z" }, + { url = "https://files.pythonhosted.org/packages/b7/d5/ba91702207ac61ae6f1c2da81c5d0d6bf6ce89e08a2b4d44e411c0bbe867/propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757", size = 291420, upload-time = "2025-03-26T03:05:34.496Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/2117780ed7edcd7ba6b8134cb7802aada90b894a9810ec56b7bb6018bee7/propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18", size = 290880, upload-time = "2025-03-26T03:05:36.256Z" }, + { url = "https://files.pythonhosted.org/packages/4a/1f/ecd9ce27710021ae623631c0146719280a929d895a095f6d85efb6a0be2e/propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a", size = 287407, upload-time = "2025-03-26T03:05:37.799Z" }, + { url = "https://files.pythonhosted.org/packages/ae/7e/3e3b36854e96be2e881bc6e87293d59c74dd734dd038dd4981474be44e26/propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894", size = 209214, upload-time = "2025-03-26T03:05:47.366Z" }, + { url = "https://files.pythonhosted.org/packages/11/1a/ac0f757cc0babdc8217056fca85150066cf43bf11db9651e6b7d8e0646d6/propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035", size = 224702, upload-time = "2025-03-26T03:05:48.946Z" }, + { url = "https://files.pythonhosted.org/packages/92/0a/0cf77d0e984b7058019ffa5385b3efd6962cbd5340a8f278ae103032863a/propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908", size = 223085, upload-time = "2025-03-26T03:05:50.472Z" }, + { url = "https://files.pythonhosted.org/packages/05/fc/cb52a0caf803caff9b95b0a99e7c9c87f15b7e34ba0feebfd2572b49013d/propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5", size = 209613, upload-time = "2025-03-26T03:05:52.36Z" }, + { url = "https://files.pythonhosted.org/packages/e5/fc/b1d1fdffbe1e0278ab535f8d21fc6b030889417714a545755bdd5ebe9bb0/propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5", size = 199931, upload-time = "2025-03-26T03:05:54.302Z" }, + { url = "https://files.pythonhosted.org/packages/23/a9/2a2f8d93d8f526c35dd8dbbc4a1ac22a106712cd821e15e2a6530aea8931/propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7", size = 208937, upload-time = "2025-03-26T03:05:56.38Z" }, + { url = "https://files.pythonhosted.org/packages/ef/71/5247a264b95e8d4ba86757cf9ad6a523d764bd4579a2d80007a2d4d2b0ad/propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641", size = 202577, upload-time = "2025-03-26T03:05:58.325Z" }, + { url = "https://files.pythonhosted.org/packages/6f/4e/c8ec771731f1b1e7d07bd8875f1d13c1564b5d60f7483624d021eaef5687/propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294", size = 204669, upload-time = "2025-03-26T03:05:59.849Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b8/bdfcb1170a7b8504226064d7c0b4deb61acbcc6bb2e754ee25fb36c1b72a/propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf", size = 214334, upload-time = "2025-03-26T03:06:01.905Z" }, + { url = "https://files.pythonhosted.org/packages/72/c6/fdb9e8ba161a4e12c75a7415cb99314cad195d3b8ae9d770783cec54001e/propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c", size = 218052, upload-time = "2025-03-26T03:06:03.586Z" }, + { url = "https://files.pythonhosted.org/packages/67/3f/0dd87220f61598b61b590a8b3562142ae475a9c0f694ee32bf97e4e41d44/propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe", size = 210852, upload-time = "2025-03-26T03:06:05.045Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d3/c3cb8f1d6ae3b37f83e1de806713a9b3642c5895f0215a62e1a4bd6e5e34/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40", size = 12376, upload-time = "2025-03-26T03:06:10.5Z" }, ] [[package]] name = "protobuf" version = "6.31.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d8c/ac4c982f0b957/protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/f3/b9655a711b32c19720253f6f06326faf90580834e2e83f840472d752bc8b/protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a", size = 441797, upload-time = "2025-05-28T19:25:54.947Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a40/fc12b84c15488/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4ee/898bf66f7a8b0/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/720/a6c7e6b77288b/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e" }, + { url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39", size = 322115, upload-time = "2025-05-28T19:25:47.128Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6", size = 321070, upload-time = "2025-05-28T19:25:50.036Z" }, + { url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e", size = 168724, upload-time = "2025-05-28T19:25:53.926Z" }, ] [[package]] name = "psutil" version = "7.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7be/9c3eba38beccb/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1fc/ee592b4c6f146/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4b1/388a4f6875d7e/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a5f/098451abc2828/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993" }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, ] [[package]] name = "pulp" version = "3.2.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fc6/c02c47c06342c/pulp-3.2.1.tar.gz", hash = "sha256:fc6c02c47c06342c586b175924add753cad7638ff6149b3b43e87ac6709ac469" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/cd/cb1308632ad5b092ebbfe64d0cd0b9906caec6e52bff88f54ddd3d434694/pulp-3.2.1.tar.gz", hash = "sha256:fc6c02c47c06342c586b175924add753cad7638ff6149b3b43e87ac6709ac469", size = 16297436, upload-time = "2025-05-29T09:25:51.647Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c6c/f7fe84cef1579/pulp-3.2.1-py3-none-any.whl", hash = "sha256:c6cf7fe84cef15795bc7c27e2f3c6784db5cf6ebf68e94d5a659b02415f982c5" }, + { url = "https://files.pythonhosted.org/packages/84/45/2bb878df73b5545405faff0b0b30f72929222356387a41b50ca268951d5d/pulp-3.2.1-py3-none-any.whl", hash = "sha256:c6cf7fe84cef15795bc7c27e2f3c6784db5cf6ebf68e94d5a659b02415f982c5", size = 16383592, upload-time = "2025-05-29T09:25:49.262Z" }, ] [[package]] name = "pyarrow" version = "20.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/feb/c4a913592573c/pyarrow-20.0.0.tar.gz", hash = "sha256:febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/641/5a0d017448745/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/15a/a1b3b2587e743/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/560/5919fbe67a794/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a57/04f29a74b8167/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/001/38f79ee1b5aca/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f2d/67ac28f57a362/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5f0/fb1041267e996/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b8f/f87cc83760153/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7a3/a5dcf54286e61/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a6a/d3e7758ecf559/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6bb/830757103a6cb/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/96e/37f0766ecb451/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4ba/3cf4182828be7/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2c3/a01f313ffe27a/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c3a01f313ffe27ac4126f4c2e5ea0f36a5fc6ab51f8726cf41fee4b256680bd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a27/91f69ad72addd/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a2791f69ad72addd33510fec7bb14ee06c2a448e06b649e264c094c5b5f7ce28" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/425/0e28a22302ce8/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4250e28a22302ce8692d3a0e8ec9d9dde54ec00d237cff4dfa9c1fbf79e472a8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/89e/030dc58fc760e/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/610/2b4864d77102d/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aa0/d288143a85858/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa0d288143a8585806e3cc7c39566407aab646fb9ece164609dac1cfff45f6ae" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b69/53f0114f8d6f3/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6953f0114f8d6f3d905d98e987d0924dabce59c3cda380bdfaa25a6201563b4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/991/f85b48a8a5e83/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:991f85b48a8a5e839b2128590ce07611fae48a904cae6cab1f089c5955b57eb5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/97c/8dc984ed09cb0/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:97c8dc984ed09cb07d618d57d8d4b67a5100a30c3818c2fb0b04599f0da2de7b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9b7/1daf534f47458/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b71daf534f4745818f96c214dbc1e6124d7daf059167330b610fc69b6f3d3e3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e8b/88758f9303fa5/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8b88758f9303fa5a83d6c90e176714b2fd3852e776fc2d7e42a22dd6c2fb368" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7f4/c8534e2ff0597/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4c8534e2ff059765647aa69b75d6543f9fef59e2cd4c6d18015192565d2b70" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3e1/f8a47f4b4ae4c/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1f8a47f4b4ae4c69c4d702cfbdfe4d41e18e5c7ef6f1bb1c50918c1e81c57b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a1f/60dc14658efaa/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a1f60dc14658efaa927f8214734f6a01a806d7690be4b3232ba526836d216122" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/204/a846dca751428/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:204a846dca751428991346976b914d6d2a82ae5b8316a6ed99789ebf976551e6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f3b/117b922af5e4c/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e72/4a3fd23ae5b9c/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cb4/97649e505dc36/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/115/29a2283cb1f62/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6fc/1499ed3b4b57e/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/db5/3390eaf8a4dab/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/851/c6a8260ad387c/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e22/f80b97a271f0a/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/ee/a7810cb9f3d6e9238e61d312076a9859bf3668fd21c69744de9532383912/pyarrow-20.0.0.tar.gz", hash = "sha256:febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1", size = 1125187, upload-time = "2025-04-27T12:34:23.264Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/df/4099b69a432b5cb412dd18adc2629975544d656df3d7fda6d73c5dba935d/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae", size = 41337051, upload-time = "2025-04-27T12:27:44.4Z" }, + { url = "https://files.pythonhosted.org/packages/4c/27/99922a9ac1c9226f346e3a1e15e63dee6f623ed757ff2893f9d6994a69d3/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee", size = 42404659, upload-time = "2025-04-27T12:27:51.715Z" }, + { url = "https://files.pythonhosted.org/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20", size = 40695446, upload-time = "2025-04-27T12:27:59.643Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9", size = 42278528, upload-time = "2025-04-27T12:28:07.297Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75", size = 42918162, upload-time = "2025-04-27T12:28:15.716Z" }, + { url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8", size = 44550319, upload-time = "2025-04-27T12:28:27.026Z" }, + { url = "https://files.pythonhosted.org/packages/44/fb/dfb2dfdd3e488bb14f822d7335653092dde150cffc2da97de6e7500681f9/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232", size = 41334704, upload-time = "2025-04-27T12:28:55.064Z" }, + { url = "https://files.pythonhosted.org/packages/58/0d/08a95878d38808051a953e887332d4a76bc06c6ee04351918ee1155407eb/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f", size = 42399836, upload-time = "2025-04-27T12:29:02.13Z" }, + { url = "https://files.pythonhosted.org/packages/f3/cd/efa271234dfe38f0271561086eedcad7bc0f2ddd1efba423916ff0883684/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab", size = 40711789, upload-time = "2025-04-27T12:29:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/46/1f/7f02009bc7fc8955c391defee5348f510e589a020e4b40ca05edcb847854/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62", size = 42301124, upload-time = "2025-04-27T12:29:17.187Z" }, + { url = "https://files.pythonhosted.org/packages/4f/92/692c562be4504c262089e86757a9048739fe1acb4024f92d39615e7bab3f/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c", size = 42916060, upload-time = "2025-04-27T12:29:24.253Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ec/9f5c7e7c828d8e0a3c7ef50ee62eca38a7de2fa6eb1b8fa43685c9414fef/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3", size = 44547640, upload-time = "2025-04-27T12:29:32.782Z" }, + { url = "https://files.pythonhosted.org/packages/31/fd/c565e5dcc906a3b471a83273039cb75cb79aad4a2d4a12f76cc5ae90a4b8/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199", size = 41334890, upload-time = "2025-04-27T12:29:59.452Z" }, + { url = "https://files.pythonhosted.org/packages/af/a9/3bdd799e2c9b20c1ea6dc6fa8e83f29480a97711cf806e823f808c2316ac/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c3a01f313ffe27ac4126f4c2e5ea0f36a5fc6ab51f8726cf41fee4b256680bd", size = 42421775, upload-time = "2025-04-27T12:30:06.875Z" }, + { url = "https://files.pythonhosted.org/packages/10/f7/da98ccd86354c332f593218101ae56568d5dcedb460e342000bd89c49cc1/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a2791f69ad72addd33510fec7bb14ee06c2a448e06b649e264c094c5b5f7ce28", size = 40687231, upload-time = "2025-04-27T12:30:13.954Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1b/2168d6050e52ff1e6cefc61d600723870bf569cbf41d13db939c8cf97a16/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4250e28a22302ce8692d3a0e8ec9d9dde54ec00d237cff4dfa9c1fbf79e472a8", size = 42295639, upload-time = "2025-04-27T12:30:21.949Z" }, + { url = "https://files.pythonhosted.org/packages/b2/66/2d976c0c7158fd25591c8ca55aee026e6d5745a021915a1835578707feb3/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e", size = 42908549, upload-time = "2025-04-27T12:30:29.551Z" }, + { url = "https://files.pythonhosted.org/packages/31/a9/dfb999c2fc6911201dcbf348247f9cc382a8990f9ab45c12eabfd7243a38/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a", size = 44557216, upload-time = "2025-04-27T12:30:36.977Z" }, + { url = "https://files.pythonhosted.org/packages/92/41/fe18c7c0b38b20811b73d1bdd54b1fccba0dab0e51d2048878042d84afa8/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa0d288143a8585806e3cc7c39566407aab646fb9ece164609dac1cfff45f6ae", size = 41327322, upload-time = "2025-04-27T12:31:05.587Z" }, + { url = "https://files.pythonhosted.org/packages/da/ab/7dbf3d11db67c72dbf36ae63dcbc9f30b866c153b3a22ef728523943eee6/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6953f0114f8d6f3d905d98e987d0924dabce59c3cda380bdfaa25a6201563b4", size = 42411441, upload-time = "2025-04-27T12:31:15.675Z" }, + { url = "https://files.pythonhosted.org/packages/90/c3/0c7da7b6dac863af75b64e2f827e4742161128c350bfe7955b426484e226/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:991f85b48a8a5e839b2128590ce07611fae48a904cae6cab1f089c5955b57eb5", size = 40677027, upload-time = "2025-04-27T12:31:24.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/27/43a47fa0ff9053ab5203bb3faeec435d43c0d8bfa40179bfd076cdbd4e1c/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:97c8dc984ed09cb07d618d57d8d4b67a5100a30c3818c2fb0b04599f0da2de7b", size = 42281473, upload-time = "2025-04-27T12:31:31.311Z" }, + { url = "https://files.pythonhosted.org/packages/bc/0b/d56c63b078876da81bbb9ba695a596eabee9b085555ed12bf6eb3b7cab0e/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b71daf534f4745818f96c214dbc1e6124d7daf059167330b610fc69b6f3d3e3", size = 42893897, upload-time = "2025-04-27T12:31:39.406Z" }, + { url = "https://files.pythonhosted.org/packages/92/ac/7d4bd020ba9145f354012838692d48300c1b8fe5634bfda886abcada67ed/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8b88758f9303fa5a83d6c90e176714b2fd3852e776fc2d7e42a22dd6c2fb368", size = 44543847, upload-time = "2025-04-27T12:31:45.997Z" }, + { url = "https://files.pythonhosted.org/packages/d5/bc/e48b4fa544d2eea72f7844180eb77f83f2030b84c8dad860f199f94307ed/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4c8534e2ff059765647aa69b75d6543f9fef59e2cd4c6d18015192565d2b70", size = 41256434, upload-time = "2025-04-27T12:32:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/c3/01/974043a29874aa2cf4f87fb07fd108828fc7362300265a2a64a94965e35b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1f8a47f4b4ae4c69c4d702cfbdfe4d41e18e5c7ef6f1bb1c50918c1e81c57b", size = 42353648, upload-time = "2025-04-27T12:32:20.766Z" }, + { url = "https://files.pythonhosted.org/packages/68/95/cc0d3634cde9ca69b0e51cbe830d8915ea32dda2157560dda27ff3b3337b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a1f60dc14658efaa927f8214734f6a01a806d7690be4b3232ba526836d216122", size = 40619853, upload-time = "2025-04-27T12:32:28.1Z" }, + { url = "https://files.pythonhosted.org/packages/29/c2/3ad40e07e96a3e74e7ed7cc8285aadfa84eb848a798c98ec0ad009eb6bcc/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:204a846dca751428991346976b914d6d2a82ae5b8316a6ed99789ebf976551e6", size = 42241743, upload-time = "2025-04-27T12:32:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/eb/cb/65fa110b483339add6a9bc7b6373614166b14e20375d4daa73483755f830/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c", size = 42839441, upload-time = "2025-04-27T12:32:46.64Z" }, + { url = "https://files.pythonhosted.org/packages/98/7b/f30b1954589243207d7a0fbc9997401044bf9a033eec78f6cb50da3f304a/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a", size = 44503279, upload-time = "2025-04-27T12:32:56.503Z" }, + { url = "https://files.pythonhosted.org/packages/c4/e3/21e5758e46219fdedf5e6c800574dd9d17e962e80014cfe08d6d475be863/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc", size = 41351968, upload-time = "2025-04-27T12:33:28.215Z" }, + { url = "https://files.pythonhosted.org/packages/ac/f5/ed6a4c4b11f9215092a35097a985485bb7d879cb79d93d203494e8604f4e/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec", size = 42415208, upload-time = "2025-04-27T12:33:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/44/e5/466a63668ba25788ee8d38d55f853a60469ae7ad1cda343db9f3f45e0b0a/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5", size = 40708556, upload-time = "2025-04-27T12:33:46.483Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d7/4c4d4e4cf6e53e16a519366dfe9223ee4a7a38e6e28c1c0d372b38ba3fe7/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b", size = 42291754, upload-time = "2025-04-27T12:33:55.4Z" }, + { url = "https://files.pythonhosted.org/packages/07/d5/79effb32585b7c18897d3047a2163034f3f9c944d12f7b2fd8df6a2edc70/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d", size = 42936483, upload-time = "2025-04-27T12:34:03.694Z" }, + { url = "https://files.pythonhosted.org/packages/09/5c/f707603552c058b2e9129732de99a67befb1f13f008cc58856304a62c38b/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619", size = 44558895, upload-time = "2025-04-27T12:34:13.26Z" }, ] [[package]] name = "pybind11" version = "2.13.6" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ba6/af10348c12b24/pybind11-2.13.6.tar.gz", hash = "sha256:ba6af10348c12b24e92fa086b39cfba0eff619b61ac77c406167d813b096d39a" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/c1/72b9622fcb32ff98b054f724e213c7f70d6898baa714f4516288456ceaba/pybind11-2.13.6.tar.gz", hash = "sha256:ba6af10348c12b24e92fa086b39cfba0eff619b61ac77c406167d813b096d39a", size = 218403, upload-time = "2024-09-14T00:35:22.606Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/237/c41e29157b962/pybind11-2.13.6-py3-none-any.whl", hash = "sha256:237c41e29157b962835d356b370ededd57594a26d5894a795960f0047cb5caf5" }, + { url = "https://files.pythonhosted.org/packages/13/2f/0f24b288e2ce56f51c920137620b4434a38fd80583dbbe24fc2a1656c388/pybind11-2.13.6-py3-none-any.whl", hash = "sha256:237c41e29157b962835d356b370ededd57594a26d5894a795960f0047cb5caf5", size = 243282, upload-time = "2024-09-14T00:35:20.361Z" }, ] [[package]] name = "pydantic" version = "2.11.5" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pydantic-core", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "typing-inspection", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7f8/53db3d0ce78ce/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102, upload-time = "2025-05-22T21:18:08.761Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f9c/26ba06f974774/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7" }, + { url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229, upload-time = "2025-05-22T21:18:06.329Z" }, ] [[package]] name = "pydantic-core" version = "2.33.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7cb/8bc3605c29176/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/006/9c9acc3f3981b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d53/b22f2032c42ea/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/040/5262705a123b7/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4b2/5d91e288e2c4e/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6bd/fe4b3789761f3/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/efe/c8db3266b76ef/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/031/c57d67ca86902/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f8d/e619080e94434/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/736/62edf539e72a9/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dc4/6a01bf8d62f22/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a14/4d4f717285c6d/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/73c/f6373c21bc80b/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3dc/625f4aa797135/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/881/b21b554949997/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bdc/25f3681f7b785/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fe5/b32187cbc0c86/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bc7/aee6f634a6f4a/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/235/f45e5dbcccf6b/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4e6/1206137cbc65e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb8/c529b2819c371/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c52/b02ad8b4e2cf1/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/960/81f1605125ba0/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8f5/7a69461af2a5f/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/572/c7e6c8bb4774d/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/db4/b41f9bd95fbe5/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fa8/54f5cf7e33842/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5f4/83cfb75ff7030/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a9/f2c9dd1965682/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2b0/a451c263b01ac/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1ea/40a64d23faa25/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0fb/2d542b4d66f94/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9fd/ac5d6ffa1b5a8/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/04a/1a413977ab517/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c8e/7af2f4e0194c2/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5c9/2edd15cd58b3c/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/651/32b7b4a1c0bed/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/952/37e53bb015f67/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c54/c939ee22dc8e2/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/53a/57d2ed685940a/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/09f/b9dd6571aacd0/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0e6/116757f7959a7/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8d5/5ab81c57b8ff8/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c20/c462aa4434b33/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/448/57c3227d3fb5e/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eb9/b459ca4df0e5c/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9fc/d347d2cc5c23b/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/87b/31b6846e361ef/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aa9/d91b338f2df05/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/205/8a32994f1fde4/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0e0/3262ab796d986/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1a8/695a8d00c73e5/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fa7/54d1850735a0b/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2f8/2865531efd18d/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2bf/b5112df54209d/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/646/32ff9d614e5ee/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f88/9f7a40498cc07/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/de4/b83bb311557e4/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/82f/68293f055f51b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d3f/26877a748dc42/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dac/89aea9af8cd67/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/970/919794d126ba8/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3eb/3fe62804e8f85/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3ab/cd9392a36025e/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3a1/c81334778f9e3/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039" }, +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", size = 1898467, upload-time = "2025-04-23T18:32:31.119Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", size = 1983041, upload-time = "2025-04-23T18:32:33.655Z" }, + { url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", size = 2136503, upload-time = "2025-04-23T18:32:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", size = 2736079, upload-time = "2025-04-23T18:32:37.659Z" }, + { url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", size = 2006508, upload-time = "2025-04-23T18:32:39.637Z" }, + { url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", size = 2113693, upload-time = "2025-04-23T18:32:41.818Z" }, + { url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", size = 2074224, upload-time = "2025-04-23T18:32:44.033Z" }, + { url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", size = 2245403, upload-time = "2025-04-23T18:32:45.836Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", size = 2242331, upload-time = "2025-04-23T18:32:47.618Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", size = 1892858, upload-time = "2025-04-23T18:33:36.933Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", size = 2068498, upload-time = "2025-04-23T18:33:38.997Z" }, + { url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", size = 2108428, upload-time = "2025-04-23T18:33:41.18Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", size = 2069854, upload-time = "2025-04-23T18:33:43.446Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", size = 2237859, upload-time = "2025-04-23T18:33:45.56Z" }, + { url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", size = 2239059, upload-time = "2025-04-23T18:33:47.735Z" }, ] [[package]] name = "pydot" version = "4.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyparsing", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/12f/16493337cade2/pydot-4.0.0.tar.gz", hash = "sha256:12f16493337cade2f7631b87c8ccd299ba2e251f3ee5d0732a058df2887afe97" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/c3/6034ed1ebf2e3ba95a0e35fa7c43104e40444c0ed2b5325702c63e824dbf/pydot-4.0.0.tar.gz", hash = "sha256:12f16493337cade2f7631b87c8ccd299ba2e251f3ee5d0732a058df2887afe97", size = 161793, upload-time = "2025-05-04T11:13:03.214Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cf8/6e13a6cfe2a96/pydot-4.0.0-py3-none-any.whl", hash = "sha256:cf86e13a6cfe2a96758a9702537f77e0ac1368db8ef277b4d3b34473ea425c97" }, + { url = "https://files.pythonhosted.org/packages/0a/16/984c0cf5073a23154b1f95c9d131b14c9fea83bfadae4ba8fc169daded11/pydot-4.0.0-py3-none-any.whl", hash = "sha256:cf86e13a6cfe2a96758a9702537f77e0ac1368db8ef277b4d3b34473ea425c97", size = 37535, upload-time = "2025-05-04T11:13:01.458Z" }, ] [[package]] name = "pygments" version = "2.19.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/61c/16d2a8576dc06/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9ea/1544ad55cecf4/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, ] [[package]] name = "pynvml" version = "12.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nvidia-ml-py", marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/299/ce2451a6a17e6/pynvml-12.0.0.tar.gz", hash = "sha256:299ce2451a6a17e6822d6faee750103e25b415f06f59abb8db65d30f794166f5" } +sdist = { url = "https://files.pythonhosted.org/packages/26/6f/6b5880ed0239e85b9a39aed103b65b2ef81425beef9f45e5c035bf008330/pynvml-12.0.0.tar.gz", hash = "sha256:299ce2451a6a17e6822d6faee750103e25b415f06f59abb8db65d30f794166f5", size = 33636, upload-time = "2024-12-02T15:04:36.631Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fdf/f84b62a27dbe9/pynvml-12.0.0-py3-none-any.whl", hash = "sha256:fdff84b62a27dbe98e08e1a647eb77342bef1aebe0878bcd15e99a83fcbecb9e" }, + { url = "https://files.pythonhosted.org/packages/ed/df/f7cf07a65a96dd11d71f346f9c2863accdd4784da83af7181b067d556cbc/pynvml-12.0.0-py3-none-any.whl", hash = "sha256:fdff84b62a27dbe98e08e1a647eb77342bef1aebe0878bcd15e99a83fcbecb9e", size = 26560, upload-time = "2024-12-02T15:04:35.047Z" }, ] [[package]] name = "pyparsing" version = "3.2.3" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b9c/13f1ab8b3b542/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a74/9938e02d6fd0b/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf" }, + { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, ] [[package]] name = "pytest" version = "8.4.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, { name = "iniconfig", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -2136,71 +2177,65 @@ dependencies = [ { name = "pygments", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/14d/920b48472ea0d/pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/aa/405082ce2749be5398045152251ac69c0f3578c7077efc53431303af97ce/pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6", size = 1515232, upload-time = "2025-06-02T17:36:30.03Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f40/f825768ad76c0/pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e" }, + { url = "https://files.pythonhosted.org/packages/2f/de/afa024cbe022b1b318a3d224125aa24939e99b4ff6f22e0ba639a2eaee47/pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e", size = 363797, upload-time = "2025-06-02T17:36:27.859Z" }, ] [[package]] name = "pytest-xdist" version = "3.7.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "execnet", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pytest", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f92/48c99a7c15b7d/pytest_xdist-3.7.0.tar.gz", hash = "sha256:f9248c99a7c15b7d2f90715df93610353a485827bc06eefb6566d23f6400f126" } +sdist = { url = "https://files.pythonhosted.org/packages/49/dc/865845cfe987b21658e871d16e0a24e871e00884c545f246dd8f6f69edda/pytest_xdist-3.7.0.tar.gz", hash = "sha256:f9248c99a7c15b7d2f90715df93610353a485827bc06eefb6566d23f6400f126", size = 87550, upload-time = "2025-05-26T21:18:20.251Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7d3/fbd2559982650/pytest_xdist-3.7.0-py3-none-any.whl", hash = "sha256:7d3fbd255998265052435eb9daa4e99b62e6fb9cfb6efd1f858d4d8c0c7f0ca0" }, + { url = "https://files.pythonhosted.org/packages/0d/b2/0e802fde6f1c5b2f7ae7e9ad42b83fd4ecebac18a8a8c2f2f14e39dce6e1/pytest_xdist-3.7.0-py3-none-any.whl", hash = "sha256:7d3fbd255998265052435eb9daa4e99b62e6fb9cfb6efd1f858d4d8c0c7f0ca0", size = 46142, upload-time = "2025-05-26T21:18:18.759Z" }, ] [[package]] name = "python-dateutil" version = "2.9.0.post0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/37d/d54208da7e1cd/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a8b/2bc7bffae2822/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] [[package]] name = "pytorch-triton" version = "3.3.1+gitc8757738" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ - { name = "setuptools", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'linux')" }, + { name = "setuptools", marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp310-cp310-linux_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp311-cp311-linux_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp312-cp312-linux_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp313-cp313-linux_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp313-cp313t-linux_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp39-cp39-linux_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/pytorch_triton-3.3.1%2Bgitc8757738-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl" }, ] [[package]] name = "pytz" version = "2025.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/360/b9e3dbb49a209/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5dd/f76296dd8c44c/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00" }, + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] [[package]] name = "pyyaml" version = "6.0.2" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl" }, @@ -2222,67 +2257,67 @@ wheels = [ [[package]] name = "regex" version = "2024.11.6" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7ab/159b063c52a03/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d36/60c82f209655a/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d22/326fcdef5e08c/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f1a/c758ef6aebfc8/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/997/d6a487ff00807/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/02a/02d2bb04fec86/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f02/f93b92358ee3f/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/06e/b1be98df10e81/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/040/df6fe1a5504eb/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fda/bbfc59f2c6edb/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/844/7d2d39b5abe38/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/da8/f5fc57d1933de/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/106/2b39a0a2b75a9/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/167/ed4852351d8a7/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2d5/48dafee61f06e/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f2a/19f302cd1ce5d/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bec/9931dfb61ddd8/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/971/4398225f299aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/202/eb32e89f60fc1/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/418/1b814e56078e9/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/068/376da5a7e4da5/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ac1/0f2c4184420d8/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b85/c2530be953a89/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bb2/6437975da7dc3/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/abf/a5080c374a76a/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/70b/7fa6606c2881c/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0c3/2f75920cf99fe/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/982/e6d21414e78e1/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a7c/2155f790e2fb4/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/149/f5008d286636e/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e53/64a4502efca09/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a8/6e7eeca091c09/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bde/01f35767c4a78/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b58/3904576650166/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1c4/de13f06a0d54f/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3cd/e6e9f2580eb16/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0d7/f453dca13f40a/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/59d/fe1ed21aea057/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b97/c1e0bd37c5cd7/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f9d/1e379028e0fc2/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/132/91b39131e2d00/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4f5/1f88c126370dc/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/505/6b185ca113c88/regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2e3/4b51b650b23ed/regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/567/0bce7b200273e/regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/089/86dce1339bc93/regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/93c/0b12d3d3bc25a/regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/764/e71f22ab3b305/regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f05/6bf21105c2515/regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/69a/b78f848845569/regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/86f/ddba590aad920/regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/684/d7a212682996d/regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a03/e02f48cd1abbd/regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde", size = 782511, upload-time = "2024-11-06T20:09:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/1b/2b/323e72d5d2fd8de0d9baa443e1ed70363ed7e7b2fb526f5950c5cb99c364/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e", size = 821149, upload-time = "2024-11-06T20:09:06.237Z" }, + { url = "https://files.pythonhosted.org/packages/90/30/63373b9ea468fbef8a907fd273e5c329b8c9535fee36fc8dba5fecac475d/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2", size = 809707, upload-time = "2024-11-06T20:09:07.715Z" }, + { url = "https://files.pythonhosted.org/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf", size = 781702, upload-time = "2024-11-06T20:09:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/87/55/eb2a068334274db86208ab9d5599ffa63631b9f0f67ed70ea7c82a69bbc8/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c", size = 771976, upload-time = "2024-11-06T20:09:11.566Z" }, + { url = "https://files.pythonhosted.org/packages/74/c0/be707bcfe98254d8f9d2cff55d216e946f4ea48ad2fd8cf1428f8c5332ba/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86", size = 697397, upload-time = "2024-11-06T20:09:13.119Z" }, + { url = "https://files.pythonhosted.org/packages/49/dc/bb45572ceb49e0f6509f7596e4ba7031f6819ecb26bc7610979af5a77f45/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67", size = 768726, upload-time = "2024-11-06T20:09:14.85Z" }, + { url = "https://files.pythonhosted.org/packages/5a/db/f43fd75dc4c0c2d96d0881967897926942e935d700863666f3c844a72ce6/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d", size = 775098, upload-time = "2024-11-06T20:09:16.504Z" }, + { url = "https://files.pythonhosted.org/packages/99/d7/f94154db29ab5a89d69ff893159b19ada89e76b915c1293e98603d39838c/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2", size = 839325, upload-time = "2024-11-06T20:09:18.698Z" }, + { url = "https://files.pythonhosted.org/packages/f7/17/3cbfab1f23356fbbf07708220ab438a7efa1e0f34195bf857433f79f1788/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008", size = 843277, upload-time = "2024-11-06T20:09:21.725Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62", size = 773197, upload-time = "2024-11-06T20:09:24.092Z" }, + { url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114", size = 792121, upload-time = "2024-11-06T20:09:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3", size = 831275, upload-time = "2024-11-06T20:09:40.371Z" }, + { url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f", size = 818257, upload-time = "2024-11-06T20:09:43.059Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0", size = 792727, upload-time = "2024-11-06T20:09:48.19Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55", size = 780667, upload-time = "2024-11-06T20:09:49.828Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89", size = 776963, upload-time = "2024-11-06T20:09:51.819Z" }, + { url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d", size = 784700, upload-time = "2024-11-06T20:09:53.982Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34", size = 848592, upload-time = "2024-11-06T20:09:56.222Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d", size = 852929, upload-time = "2024-11-06T20:09:58.642Z" }, + { url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45", size = 781213, upload-time = "2024-11-06T20:10:00.867Z" }, + { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, + { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, + { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, + { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, + { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, + { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, + { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, + { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0", size = 795023, upload-time = "2024-11-06T20:10:51.102Z" }, + { url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7", size = 833072, upload-time = "2024-11-06T20:10:52.926Z" }, + { url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7", size = 823130, upload-time = "2024-11-06T20:10:54.828Z" }, + { url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c", size = 796857, upload-time = "2024-11-06T20:10:56.634Z" }, + { url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3", size = 784006, upload-time = "2024-11-06T20:10:59.369Z" }, + { url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07", size = 781650, upload-time = "2024-11-06T20:11:02.042Z" }, + { url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e", size = 789545, upload-time = "2024-11-06T20:11:03.933Z" }, + { url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6", size = 853045, upload-time = "2024-11-06T20:11:06.497Z" }, + { url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4", size = 860182, upload-time = "2024-11-06T20:11:09.06Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733, upload-time = "2024-11-06T20:11:11.256Z" }, + { url = "https://files.pythonhosted.org/packages/49/70/c7eaa219efa67a215846766fde18d92d54cb590b6a04ffe43cef30057622/regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b", size = 782012, upload-time = "2024-11-06T20:11:59.218Z" }, + { url = "https://files.pythonhosted.org/packages/89/e5/ef52c7eb117dd20ff1697968219971d052138965a4d3d9b95e92e549f505/regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0", size = 820580, upload-time = "2024-11-06T20:12:01.969Z" }, + { url = "https://files.pythonhosted.org/packages/5f/3f/9f5da81aff1d4167ac52711acf789df13e789fe6ac9545552e49138e3282/regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b", size = 809110, upload-time = "2024-11-06T20:12:04.786Z" }, + { url = "https://files.pythonhosted.org/packages/86/44/2101cc0890c3621b90365c9ee8d7291a597c0722ad66eccd6ffa7f1bcc09/regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef", size = 780919, upload-time = "2024-11-06T20:12:06.944Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2e/3e0668d8d1c7c3c0d397bf54d92fc182575b3a26939aed5000d3cc78760f/regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48", size = 771515, upload-time = "2024-11-06T20:12:09.9Z" }, + { url = "https://files.pythonhosted.org/packages/a6/49/1bc4584254355e3dba930a3a2fd7ad26ccba3ebbab7d9100db0aff2eedb0/regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13", size = 696957, upload-time = "2024-11-06T20:12:12.319Z" }, + { url = "https://files.pythonhosted.org/packages/c8/dd/42879c1fc8a37a887cd08e358af3d3ba9e23038cd77c7fe044a86d9450ba/regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2", size = 768088, upload-time = "2024-11-06T20:12:15.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/96/c05a0fe173cd2acd29d5e13c1adad8b706bcaa71b169e1ee57dcf2e74584/regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95", size = 774752, upload-time = "2024-11-06T20:12:17.416Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f3/a757748066255f97f14506483436c5f6aded7af9e37bca04ec30c90ca683/regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9", size = 838862, upload-time = "2024-11-06T20:12:19.639Z" }, + { url = "https://files.pythonhosted.org/packages/5c/93/c6d2092fd479dcaeea40fc8fa673822829181ded77d294a7f950f1dda6e2/regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f", size = 842622, upload-time = "2024-11-06T20:12:21.841Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9c/daa99532c72f25051a90ef90e1413a8d54413a9e64614d9095b0c1c154d0/regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b", size = 772713, upload-time = "2024-11-06T20:12:24.785Z" }, ] [[package]] name = "requests" version = "2.32.3" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "certifi", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "charset-normalizer", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -2296,59 +2331,59 @@ wheels = [ [[package]] name = "rich" version = "14.0.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pygments", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/82f/1bc23a6a21ebc/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1c9/491e1951aac09/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0" }, + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] [[package]] name = "ruff" version = "0.11.13" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/26f/a247dc68d1d4e/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/da/9c6f995903b4d9474b39da91d2d626659af3ff1eeb43e9ae7c119349dba6/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514", size = 4282054, upload-time = "2025-06-05T21:00:15.721Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4bd/fbf1240533f40/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ab1/5324140078913/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6c5/1f93029d54a91/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/180/8b3ed53e1a777/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d28/ce58b5ecf0f43/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/55e/4bc3a77842da3/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/633/bf2c6f35678c5/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4ff/bc82d70424b27/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4a9/ddd3ec62a9a89/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d23/7a496e0778d71/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/268/16a218ca6ef02/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/51c/3f95abd9331dc/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492" }, + { url = "https://files.pythonhosted.org/packages/7d/ce/a11d381192966e0b4290842cc8d4fac7dc9214ddf627c11c1afff87da29b/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46", size = 10292516, upload-time = "2025-06-05T20:59:32.944Z" }, + { url = "https://files.pythonhosted.org/packages/8b/5b/f6d94f2980fa1ee854b41568368a2e1252681b9238ab2895e133d303538f/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a", size = 10646324, upload-time = "2025-06-05T20:59:42.185Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9c/b4c2acf24ea4426016d511dfdc787f4ce1ceb835f3c5fbdbcb32b1c63bda/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc", size = 10174416, upload-time = "2025-06-05T20:59:44.319Z" }, + { url = "https://files.pythonhosted.org/packages/f3/10/e2e62f77c65ede8cd032c2ca39c41f48feabedb6e282bfd6073d81bb671d/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629", size = 11724197, upload-time = "2025-06-05T20:59:46.935Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f0/466fe8469b85c561e081d798c45f8a1d21e0b4a5ef795a1d7f1a9a9ec182/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933", size = 12511615, upload-time = "2025-06-05T20:59:49.534Z" }, + { url = "https://files.pythonhosted.org/packages/17/0e/cefe778b46dbd0cbcb03a839946c8f80a06f7968eb298aa4d1a4293f3448/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165", size = 12117080, upload-time = "2025-06-05T20:59:51.654Z" }, + { url = "https://files.pythonhosted.org/packages/5d/2c/caaeda564cbe103bed145ea557cb86795b18651b0f6b3ff6a10e84e5a33f/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71", size = 11326315, upload-time = "2025-06-05T20:59:54.469Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/782e7d681d660eda8c536962920c41309e6dd4ebcea9a2714ed5127d44bd/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9", size = 11555640, upload-time = "2025-06-05T20:59:56.986Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/3d580c616316c7f07fb3c99dbecfe01fbaea7b6fd9a82b801e72e5de742a/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc", size = 10507364, upload-time = "2025-06-05T20:59:59.154Z" }, + { url = "https://files.pythonhosted.org/packages/5a/dc/195e6f17d7b3ea6b12dc4f3e9de575db7983db187c378d44606e5d503319/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7", size = 10141462, upload-time = "2025-06-05T21:00:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/8e/39a094af6967faa57ecdeacb91bedfb232474ff8c3d20f16a5514e6b3534/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432", size = 11121028, upload-time = "2025-06-05T21:00:04.06Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c0/b0b508193b0e8a1654ec683ebab18d309861f8bd64e3a2f9648b80d392cb/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492", size = 11602992, upload-time = "2025-06-05T21:00:06.249Z" }, ] [[package]] name = "safetensors" version = "0.5.3" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b6b/0d6ecacec39a4/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210, upload-time = "2025-02-26T09:15:13.155Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/11b/ce6164887cd49/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4a2/43be3590bc330/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8bd/84b12b1670a6f/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/391/ac8cab7c82945/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cea/d1fa41fc54b1e/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/107/7f3e94182d726/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/799/021e78287bac6/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/df2/6da01aaac5043/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/32c/3ef2d7af8b9f5/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/37f/1521be045e56f/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493, upload-time = "2025-02-26T09:14:51.812Z" }, + { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400, upload-time = "2025-02-26T09:14:53.549Z" }, + { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891, upload-time = "2025-02-26T09:14:55.717Z" }, + { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694, upload-time = "2025-02-26T09:14:57.036Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642, upload-time = "2025-02-26T09:15:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241, upload-time = "2025-02-26T09:14:58.303Z" }, + { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001, upload-time = "2025-02-26T09:15:05.79Z" }, + { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013, upload-time = "2025-02-26T09:15:07.892Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687, upload-time = "2025-02-26T09:15:09.979Z" }, + { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147, upload-time = "2025-02-26T09:15:11.185Z" }, ] [[package]] name = "scipy" version = "1.13.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2359,28 +2394,28 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/095/a87a0312b08df/scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/00/48c2f661e2816ccf2ecd77982f6605b2950afe60f60a52b4cbbc2504aa8f/scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c", size = 57210720, upload-time = "2024-05-23T03:29:26.079Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cfa/31f1def5c819b/scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f26/264b282b9da09/scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ecc/fa1906eacc02d/scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e89/369d27f9e7b08/scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a78/b4b3345f1b6f6/scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/454/84bee6d656337/scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/949/ae67db5fa78a8/scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/de3/ade0e53bc1f21/scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2ac/65fb503dad642/scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d53/3654b7d221a6a/scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/637/e98dcf185ba7f/scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a01/4c2b3697bde71/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c" }, + { url = "https://files.pythonhosted.org/packages/c0/66/9cd4f501dd5ea03e4a4572ecd874936d0da296bd04d1c45ae1a4a75d9c3a/scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989", size = 33743202, upload-time = "2024-05-23T03:19:15.138Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ba/7255e5dc82a65adbe83771c72f384d99c43063648456796436c9a5585ec3/scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f", size = 38577335, upload-time = "2024-05-23T03:19:21.984Z" }, + { url = "https://files.pythonhosted.org/packages/49/a5/bb9ded8326e9f0cdfdc412eeda1054b914dfea952bda2097d174f8832cc0/scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94", size = 38820728, upload-time = "2024-05-23T03:19:28.225Z" }, + { url = "https://files.pythonhosted.org/packages/80/ba/8be64fe225360a4beb6840f3cbee494c107c0887f33350d0a47d55400b01/scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299", size = 33694638, upload-time = "2024-05-23T03:19:55.104Z" }, + { url = "https://files.pythonhosted.org/packages/36/07/035d22ff9795129c5a847c64cb43c1fa9188826b59344fee28a3ab02e283/scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa", size = 38569931, upload-time = "2024-05-23T03:20:01.82Z" }, + { url = "https://files.pythonhosted.org/packages/d9/10/f9b43de37e5ed91facc0cfff31d45ed0104f359e4f9a68416cbf4e790241/scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59", size = 38838145, upload-time = "2024-05-23T03:20:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cb/26e4a47364bbfdb3b7fb3363be6d8a1c543bcd70a7753ab397350f5f189a/scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627", size = 33406062, upload-time = "2024-05-23T03:20:36.012Z" }, + { url = "https://files.pythonhosted.org/packages/88/ab/6ecdc526d509d33814835447bbbeedbebdec7cca46ef495a61b00a35b4bf/scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884", size = 38218311, upload-time = "2024-05-23T03:20:42.086Z" }, + { url = "https://files.pythonhosted.org/packages/0b/00/9f54554f0f8318100a71515122d8f4f503b1a2c4b4cfab3b4b68c0eb08fa/scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16", size = 38442493, upload-time = "2024-05-23T03:20:48.292Z" }, + { url = "https://files.pythonhosted.org/packages/6d/0f/aaa55b06d474817cea311e7b10aab2ea1fd5d43bc6a2861ccc9caec9f418/scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004", size = 33732190, upload-time = "2024-05-23T03:21:14.41Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/d0ad1a96f80962ba65e2ce1de6a1e59edecd1f0a7b55990ed208848012e0/scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d", size = 38612244, upload-time = "2024-05-23T03:21:21.827Z" }, + { url = "https://files.pythonhosted.org/packages/8d/02/1165905f14962174e6569076bcc3315809ae1291ed14de6448cc151eedfd/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c", size = 38845637, upload-time = "2024-05-23T03:21:28.729Z" }, ] [[package]] name = "scipy" version = "1.15.3" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2405,55 +2440,55 @@ resolution-markers = [ "python_full_version == '3.10.*' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.10' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.10' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/eae/3cf522bc7df64/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214, upload-time = "2025-05-08T16:13:05.955Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/263/961f658ce2165/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9e2/abc762b0811e0/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ed7/284b21a7a0c8f/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/538/0741e53df2c56/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/721/d6b4ef5dc82ca/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/39c/b9c62e471b1bb/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/795/c46999bae8459/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/18a/aacb735ab38b3/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c05/045d8b9bfd807/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/271/e3713e645149e/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6cf/d56fc1a8e53f6/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0ff/17c0bb1cb3295/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/791/67bba085c31f3/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c9d/eabd6d547aee2/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dde/4fc32993071ac/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f77/f853d584e72e8/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a7/69105537aa07a/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9db/984639887e3df/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/40e/54d5c7e7ebf1a/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5e7/21fed53187e71/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5" }, + { url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82", size = 35547617, upload-time = "2025-05-08T16:04:43.546Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40", size = 37662964, upload-time = "2025-05-08T16:04:49.431Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e", size = 37238749, upload-time = "2025-05-08T16:04:55.215Z" }, + { url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c", size = 40022383, upload-time = "2025-05-08T16:05:01.914Z" }, + { url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889", size = 35503415, upload-time = "2025-05-08T16:05:34.699Z" }, + { url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982", size = 37652622, upload-time = "2025-05-08T16:05:40.762Z" }, + { url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9", size = 37244796, upload-time = "2025-05-08T16:05:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594", size = 40047684, upload-time = "2025-05-08T16:05:54.22Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45", size = 35210199, upload-time = "2025-05-08T16:06:26.159Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49", size = 37309455, upload-time = "2025-05-08T16:06:32.778Z" }, + { url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e", size = 36885140, upload-time = "2025-05-08T16:06:39.249Z" }, + { url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539", size = 39710549, upload-time = "2025-05-08T16:06:45.729Z" }, + { url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825", size = 35174018, upload-time = "2025-05-08T16:07:19.427Z" }, + { url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7", size = 37269716, upload-time = "2025-05-08T16:07:25.712Z" }, + { url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11", size = 36872342, upload-time = "2025-05-08T16:07:31.468Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126", size = 39670869, upload-time = "2025-05-08T16:07:38.002Z" }, + { url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723", size = 34809045, upload-time = "2025-05-08T16:08:03.929Z" }, + { url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb", size = 36703062, upload-time = "2025-05-08T16:08:09.558Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4", size = 36393132, upload-time = "2025-05-08T16:08:15.34Z" }, + { url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5", size = 38979503, upload-time = "2025-05-08T16:08:21.513Z" }, ] [[package]] name = "setuptools" version = "80.9.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f36/b47402ecde768/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/062/d34222ad13e0c/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922" }, + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, ] [[package]] name = "six" version = "1.17.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ff7/0335d468e7eb6/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/472/1f391ed90541f/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] [[package]] name = "sympy" version = "1.14.0" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } dependencies = [ { name = "mpmath", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] @@ -2464,16 +2499,16 @@ wheels = [ [[package]] name = "tabulate" version = "0.9.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/009/5b12bf5966de5/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/024/ca478df22e934/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f" }, + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, ] [[package]] name = "tensorrt" version = "10.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -2485,14 +2520,14 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "tensorrt-cu12", version = "10.3.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12", version = "10.3.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/23b/0dbeeada4ba1c/tensorrt-10.3.0.tar.gz", hash = "sha256:23b0dbeeada4ba1c72021d3ee0a2f172fb7cb60c72ad5e268b62822fab698d1e" } +sdist = { url = "https://pypi.nvidia.com/tensorrt/tensorrt-10.3.0.tar.gz", hash = "sha256:23b0dbeeada4ba1c72021d3ee0a2f172fb7cb60c72ad5e268b62822fab698d1e" } [[package]] name = "tensorrt" version = "10.11.0.33" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2516,14 +2551,14 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "tensorrt-cu12", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a3d/6048f86e11ea5/tensorrt-10.11.0.33.tar.gz", hash = "sha256:a3d6048f86e11ea5202d473646194d3be866c0c8d578ac0b7eeb91d923f65d0b" } +sdist = { url = "https://pypi.nvidia.com/tensorrt/tensorrt-10.11.0.33.tar.gz", hash = "sha256:a3d6048f86e11ea5202d473646194d3be866c0c8d578ac0b7eeb91d923f65d0b" } [[package]] name = "tensorrt-cu12" version = "10.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -2534,12 +2569,12 @@ resolution-markers = [ "python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/14f/0e60f40713a65/tensorrt-cu12-10.3.0.tar.gz", hash = "sha256:14f0e60f40713a658f9634fffb1a5a665c35feb019be48b2f49e25ac12d2d084" } +sdist = { url = "https://pypi.nvidia.com/tensorrt-cu12/tensorrt-cu12-10.3.0.tar.gz", hash = "sha256:14f0e60f40713a658f9634fffb1a5a665c35feb019be48b2f49e25ac12d2d084" } [[package]] name = "tensorrt-cu12" version = "10.11.0.33" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2563,15 +2598,15 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "tensorrt-cu12-bindings", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12-libs", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12-bindings", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12-libs", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7e2/9c8b16771c025/tensorrt_cu12-10.11.0.33.tar.gz", hash = "sha256:7e29c8b16771c025320035ba9609c2a074767d9a8c05696a30c9d5c0fdfb37df" } +sdist = { url = "https://pypi.nvidia.com/tensorrt-cu12/tensorrt_cu12-10.11.0.33.tar.gz", hash = "sha256:7e29c8b16771c025320035ba9609c2a074767d9a8c05696a30c9d5c0fdfb37df" } [[package]] name = "tensorrt-cu12-bindings" version = "10.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -2586,7 +2621,7 @@ resolution-markers = [ [[package]] name = "tensorrt-cu12-bindings" version = "10.11.0.33" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2610,22 +2645,22 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a2d/27745575be5d7/tensorrt_cu12_bindings-10.11.0.33-cp310-none-manylinux_2_28_x86_64.whl", hash = "sha256:a2d27745575be5d7f06caa9565230025b8e41a8915ee6a5dc735d41c3faf206d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/546/c7ee976366dc9/tensorrt_cu12_bindings-10.11.0.33-cp310-none-manylinux_2_31_aarch64.whl", hash = "sha256:546c7ee976366dc9cb76ffefbde555dec4feddcfb508b4c99ee626447b8c72de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e7b/7a5b80174f8c4/tensorrt_cu12_bindings-10.11.0.33-cp311-none-manylinux_2_28_x86_64.whl", hash = "sha256:e7b7a5b80174f8c4ddd8a63bc9fa97cad3320409eafad79428bc2b1e15884068" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/492/e3e91d7c1083b/tensorrt_cu12_bindings-10.11.0.33-cp311-none-manylinux_2_31_aarch64.whl", hash = "sha256:492e3e91d7c1083bff1f7c15fdd8f5fb09a782dcfa6d1d0f8d9034b2e3b38cad" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a8f/374f6d752ce4b/tensorrt_cu12_bindings-10.11.0.33-cp312-none-manylinux_2_28_x86_64.whl", hash = "sha256:a8f374f6d752ce4b0d4a8303d29c3ba9904eb29da0dc95b4db6b75c501997e4a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6a3/b768cea69b153/tensorrt_cu12_bindings-10.11.0.33-cp312-none-manylinux_2_31_aarch64.whl", hash = "sha256:6a3b768cea69b153ed0c2eb50130d150406d5c1498fdb0bf6c8a1be160137a6a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1ce/da290d1ed79b6/tensorrt_cu12_bindings-10.11.0.33-cp313-none-manylinux_2_28_x86_64.whl", hash = "sha256:1ceda290d1ed79b6107b0eb29eeb178f569d007c1506b72caae8248975d57662" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3c2/7e0d6e36a3b1f/tensorrt_cu12_bindings-10.11.0.33-cp313-none-manylinux_2_31_aarch64.whl", hash = "sha256:3c27e0d6e36a3b1f06e1dc8b735e34f04f5b8aac3e7d9b21762b8264496e825f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9a8/01886f389b75f/tensorrt_cu12_bindings-10.11.0.33-cp39-none-manylinux_2_28_x86_64.whl", hash = "sha256:9a801886f389b75f92e69fc6be40308392ec7746dbf4de4a2b76585d591960f0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/42e/9b3cc2e3c6bcc/tensorrt_cu12_bindings-10.11.0.33-cp39-none-manylinux_2_31_aarch64.whl", hash = "sha256:42e9b3cc2e3c6bcc0785c9c96b4dd25cd7043ff95e4fd09c8d35331f63ce9634" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp310-none-manylinux_2_28_x86_64.whl", hash = "sha256:a2d27745575be5d7f06caa9565230025b8e41a8915ee6a5dc735d41c3faf206d" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp310-none-manylinux_2_31_aarch64.whl", hash = "sha256:546c7ee976366dc9cb76ffefbde555dec4feddcfb508b4c99ee626447b8c72de" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp311-none-manylinux_2_28_x86_64.whl", hash = "sha256:e7b7a5b80174f8c4ddd8a63bc9fa97cad3320409eafad79428bc2b1e15884068" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp311-none-manylinux_2_31_aarch64.whl", hash = "sha256:492e3e91d7c1083bff1f7c15fdd8f5fb09a782dcfa6d1d0f8d9034b2e3b38cad" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp312-none-manylinux_2_28_x86_64.whl", hash = "sha256:a8f374f6d752ce4b0d4a8303d29c3ba9904eb29da0dc95b4db6b75c501997e4a" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp312-none-manylinux_2_31_aarch64.whl", hash = "sha256:6a3b768cea69b153ed0c2eb50130d150406d5c1498fdb0bf6c8a1be160137a6a" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp313-none-manylinux_2_28_x86_64.whl", hash = "sha256:1ceda290d1ed79b6107b0eb29eeb178f569d007c1506b72caae8248975d57662" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp313-none-manylinux_2_31_aarch64.whl", hash = "sha256:3c27e0d6e36a3b1f06e1dc8b735e34f04f5b8aac3e7d9b21762b8264496e825f" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp39-none-manylinux_2_28_x86_64.whl", hash = "sha256:9a801886f389b75f92e69fc6be40308392ec7746dbf4de4a2b76585d591960f0" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-bindings/tensorrt_cu12_bindings-10.11.0.33-cp39-none-manylinux_2_31_aarch64.whl", hash = "sha256:42e9b3cc2e3c6bcc0785c9c96b4dd25cd7043ff95e4fd09c8d35331f63ce9634" }, ] [[package]] name = "tensorrt-cu12-libs" version = "10.3.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -2637,14 +2672,13 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "nvidia-cuda-runtime-cu12", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d2f/36838e2762b5c/tensorrt_cu12_libs-10.3.0.tar.gz", hash = "sha256:d2f36838e2762b5ceb62f614157ba4764de2fa1f4fe5661c6cfc07e07e6e71da" } [[package]] name = "tensorrt-cu12-libs" version = "10.11.0.33" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.nvidia.com/" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2668,80 +2702,84 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "nvidia-cuda-runtime-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.8.90", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "platform_machine != 'aarch64' and sys_platform == 'linux'" }, +] +wheels = [ + { url = "https://pypi.nvidia.com/tensorrt-cu12-libs/tensorrt_cu12_libs-10.11.0.33-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:81ace8d3284fdbef0804c444a4d7555343ee079370e79c93cb328c7d9b08f968" }, + { url = "https://pypi.nvidia.com/tensorrt-cu12-libs/tensorrt_cu12_libs-10.11.0.33-py2.py3-none-manylinux_2_31_aarch64.whl", hash = "sha256:b6846dbc32d717a5031d9757f16293dd9e25de8a1c4aae8c00701d52351ef173" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/632/f912b3a5fbccd/tensorrt_cu12_libs-10.11.0.33.tar.gz", hash = "sha256:632f912b3a5fbccd317f85d0c0a342c86d58a201e2b65b2c08c179603859e11c" } [[package]] name = "timm" version = "1.0.15" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pyyaml", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "safetensors", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.23.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.23.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/756/a3bc30c96565f/timm-1.0.15.tar.gz", hash = "sha256:756a3bc30c96565f056e608a9b559daed904617eaadb6be536f96874879b1055" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/0c/66b0f9b4a4cb9ffdac7b52b17b37c7d3c4f75623b469e388b0c6d89b4e88/timm-1.0.15.tar.gz", hash = "sha256:756a3bc30c96565f056e608a9b559daed904617eaadb6be536f96874879b1055", size = 2230258, upload-time = "2025-02-23T05:05:55.959Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5a3/dc460c24e322e/timm-1.0.15-py3-none-any.whl", hash = "sha256:5a3dc460c24e322ecc7fd1f3e3eb112423ddee320cb059cc1956fbc9731748ef" }, + { url = "https://files.pythonhosted.org/packages/6c/d0/179abca8b984b3deefd996f362b612c39da73b60f685921e6cd58b6125b4/timm-1.0.15-py3-none-any.whl", hash = "sha256:5a3dc460c24e322ecc7fd1f3e3eb112423ddee320cb059cc1956fbc9731748ef", size = 2361373, upload-time = "2025-02-23T05:05:53.601Z" }, ] [[package]] name = "tokenizers" version = "0.21.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a1b/b04dc5b448985/tokenizers-0.21.1.tar.gz", hash = "sha256:a1bb04dc5b448985f86ecd4b05407f5a8d97cb2c0532199b2a302a604a0165ab" } +sdist = { url = "https://files.pythonhosted.org/packages/92/76/5ac0c97f1117b91b7eb7323dcd61af80d72f790b4df71249a7850c195f30/tokenizers-0.21.1.tar.gz", hash = "sha256:a1bb04dc5b448985f86ecd4b05407f5a8d97cb2c0532199b2a302a604a0165ab", size = 343256, upload-time = "2025-03-13T10:51:18.189Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/28d/a6b72d4fb14ee/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28da6b72d4fb14ee200a1bd386ff74ade8992d7f725f2bde2c495a9a98cf4d9f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/34d/8cfde551c9916/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34d8cfde551c9916cb92014e040806122295a6800914bab5865deb85623931cf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/aaa/852d23e125b73/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa852d23e125b73d283c98f007e06d4595732104b65402f46e8ef24b588d9f8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a21/a15d5c8e60333/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a21a15d5c8e603331b8a59548bbe113564136dc0f5ad8306dd5033459a226da0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2fd/bd4c067c60a0a/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fdbd4c067c60a0ac7eca14b6bd18a5bebace54eb757c706b47ea93204f7a37c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2dd/9a0061e403546/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd9a0061e403546f7377df940e866c3e678d7d4e9643d0461ea442b4f89e61a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/db9/484aeb2e200c4/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:db9484aeb2e200c43b915a1a0150ea885e35f357a5a8fabf7373af333dcc8dbf" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ed2/48ab5279e601a/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed248ab5279e601a30a4d67bdb897ecbe955a50f1e7bb62bd99f07dd11c2f5b6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9ac/78b12e541d4ce/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:9ac78b12e541d4ce67b4dfd970e44c060a2147b9b2a21f509566d556a509c67d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e5a/69c1a4496b81a/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e5a69c1a4496b81a5ee5d2c1f3f7fbdf95e90a0196101b0ee89ed9956b8a168f" }, + { url = "https://files.pythonhosted.org/packages/4d/7a/a209b29f971a9fdc1da86f917fe4524564924db50d13f0724feed37b2a4d/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28da6b72d4fb14ee200a1bd386ff74ade8992d7f725f2bde2c495a9a98cf4d9f", size = 2937541, upload-time = "2025-03-13T10:50:56.679Z" }, + { url = "https://files.pythonhosted.org/packages/3c/1e/b788b50ffc6191e0b1fc2b0d49df8cff16fe415302e5ceb89f619d12c5bc/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34d8cfde551c9916cb92014e040806122295a6800914bab5865deb85623931cf", size = 2819058, upload-time = "2025-03-13T10:50:59.525Z" }, + { url = "https://files.pythonhosted.org/packages/36/aa/3626dfa09a0ecc5b57a8c58eeaeb7dd7ca9a37ad9dd681edab5acd55764c/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa852d23e125b73d283c98f007e06d4595732104b65402f46e8ef24b588d9f8", size = 3133278, upload-time = "2025-03-13T10:51:04.678Z" }, + { url = "https://files.pythonhosted.org/packages/a4/4d/8fbc203838b3d26269f944a89459d94c858f5b3f9a9b6ee9728cdcf69161/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a21a15d5c8e603331b8a59548bbe113564136dc0f5ad8306dd5033459a226da0", size = 3144253, upload-time = "2025-03-13T10:51:01.261Z" }, + { url = "https://files.pythonhosted.org/packages/d8/1b/2bd062adeb7c7511b847b32e356024980c0ffcf35f28947792c2d8ad2288/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fdbd4c067c60a0ac7eca14b6bd18a5bebace54eb757c706b47ea93204f7a37c", size = 3398225, upload-time = "2025-03-13T10:51:03.243Z" }, + { url = "https://files.pythonhosted.org/packages/8a/63/38be071b0c8e06840bc6046991636bcb30c27f6bb1e670f4f4bc87cf49cc/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd9a0061e403546f7377df940e866c3e678d7d4e9643d0461ea442b4f89e61a", size = 3038874, upload-time = "2025-03-13T10:51:06.235Z" }, + { url = "https://files.pythonhosted.org/packages/ec/83/afa94193c09246417c23a3c75a8a0a96bf44ab5630a3015538d0c316dd4b/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:db9484aeb2e200c43b915a1a0150ea885e35f357a5a8fabf7373af333dcc8dbf", size = 9014448, upload-time = "2025-03-13T10:51:10.927Z" }, + { url = "https://files.pythonhosted.org/packages/ae/b3/0e1a37d4f84c0f014d43701c11eb8072704f6efe8d8fc2dcdb79c47d76de/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed248ab5279e601a30a4d67bdb897ecbe955a50f1e7bb62bd99f07dd11c2f5b6", size = 8937877, upload-time = "2025-03-13T10:51:12.688Z" }, + { url = "https://files.pythonhosted.org/packages/ac/33/ff08f50e6d615eb180a4a328c65907feb6ded0b8f990ec923969759dc379/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:9ac78b12e541d4ce67b4dfd970e44c060a2147b9b2a21f509566d556a509c67d", size = 9186645, upload-time = "2025-03-13T10:51:14.723Z" }, + { url = "https://files.pythonhosted.org/packages/5f/aa/8ae85f69a9f6012c6f8011c6f4aa1c96154c816e9eea2e1b758601157833/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e5a69c1a4496b81a5ee5d2c1f3f7fbdf95e90a0196101b0ee89ed9956b8a168f", size = 9384380, upload-time = "2025-03-13T10:51:16.526Z" }, ] [[package]] name = "tomli" version = "2.2.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cd4/5e1dc79c835ce/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ece/47d672db52ac6/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/697/2ca9c9cc9f0ac/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c95/4d2250168d287/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8dd/28b3e155b80f4/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e59/e304978767a54/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/335/80bccab0338d0/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4e3/40144ad7ae153/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/db2/b95f9de791818/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/407/41994320b2325/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/400/e720fe168c0f8/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/02a/be224de6ae62c/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b82/ebccc8c8a36f2/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a92/ef1a44547e894/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/931/6dc65bed1684c/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e85/e99945e688e32/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ac0/65718db92ca81/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d92/0f33822747519/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a19/8f10c4d1b1375/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/cb5/5c73c5f440877/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc" }, +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] [[package]] name = "torch" version = "2.7.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -2756,21 +2794,26 @@ dependencies = [ { name = "filelock", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "fsspec", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "jinja2", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "networkx", version = "3.2.1", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "networkx", version = "3.5", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.11' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.11' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "networkx", version = "3.2.1", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version == '3.10.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "networkx", version = "3.5", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.11' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.11' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "setuptools", marker = "(python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "sympy", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "typing-extensions", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/6ef/f643c0a7acda9/torch-2.7.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:6eff643c0a7acda92734cc798338f733ff35c7df1a4434576f5ff7c66fc97319" }, + { url = "https://files.pythonhosted.org/packages/46/c2/3fb87940fa160d956ee94d644d37b99a24b9c05a4222bf34f94c71880e28/torch-2.7.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c9afea41b11e1a1ab1b258a5c31afbd646d6319042bfe4f231b408034b51128b", size = 99158447, upload-time = "2025-04-23T14:35:10.557Z" }, + { url = "https://files.pythonhosted.org/packages/40/da/7378d16cc636697f2a94f791cb496939b60fb8580ddbbef22367db2c2274/torch-2.7.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2b7813e904757b125faf1a9a3154e1d50381d539ced34da1992f52440567c156", size = 99159397, upload-time = "2025-04-23T14:35:35.304Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5e/ac759f4c0ab7c01feffa777bd68b43d2ac61560a9770eeac074b450f81d4/torch-2.7.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:36a6368c7ace41ad1c0f69f18056020b6a5ca47bedaca9a2f3b578f5a104c26c", size = 99013250, upload-time = "2025-04-23T14:35:15.589Z" }, + { url = "https://files.pythonhosted.org/packages/14/24/720ea9a66c29151b315ea6ba6f404650834af57a26b2a04af23ec246b2d5/torch-2.7.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:868ccdc11798535b5727509480cd1d86d74220cfdc42842c4617338c1109a205", size = 99015553, upload-time = "2025-04-23T14:34:41.075Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b4/8df3f9fe6bdf59e56a0e538592c308d18638eb5f5dc4b08d02abb173c9f0/torch-2.7.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:2a885fc25afefb6e6eb18a7d1e8bfa01cc153e92271d980a49243b250d5ab6d9", size = 99091348, upload-time = "2025-04-23T14:33:48.975Z" }, + { url = "https://files.pythonhosted.org/packages/57/6a/36775d1b553a443ba1453e1bfeae903ef20d94c95ab31aa09225bf52fda1/torch-2.7.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:e362efaa5b3078e5f75c33efc05005b9b46de0d2e899519d5b4cad0e050ed0f7", size = 99197389, upload-time = "2025-04-23T14:32:33.083Z" }, ] [[package]] name = "torch" -version = "2.8.0.dev20250606+cu128" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "2.9.0.dev20250701+cu129" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -2797,13 +2840,13 @@ dependencies = [ { name = "filelock", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, { name = "fsspec", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, { name = "jinja2", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "networkx", version = "3.2.1", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version == '3.10.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.10.*' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version == '3.10.*' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version == '3.10.*' and 'tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "networkx", version = "3.5", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.11' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version >= '3.11' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.11' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "networkx", version = "3.2.1", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version == '3.10.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.10.*' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version == '3.10.*' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version == '3.10.*' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "networkx", version = "3.5", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version >= '3.11' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.11' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version >= '3.11' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.11' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", version = "12.9.79", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, @@ -2813,26 +2856,25 @@ dependencies = [ { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "pytorch-triton", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'linux')" }, + { name = "pytorch-triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "setuptools", marker = "(python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.12' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version >= '3.12' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.12' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "sympy", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, { name = "typing-extensions", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp310-cp310-manylinux_2_28_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp311-cp311-manylinux_2_28_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp312-cp312-manylinux_2_28_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp312-cp312-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp313-cp313-manylinux_2_28_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp313-cp313-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp313-cp313t-manylinux_2_28_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp313-cp313t-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp39-cp39-manylinux_2_28_aarch64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torch-2.8.0.dev20250606%2Bcu128-cp39-cp39-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp310-cp310-manylinux_2_28_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp310-cp310-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp311-cp311-manylinux_2_28_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp311-cp311-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp312-cp312-manylinux_2_28_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp312-cp312-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp313-cp313-manylinux_2_28_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp313-cp313-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp313-cp313t-manylinux_2_28_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp313-cp313t-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp39-cp39-manylinux_2_28_aarch64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torch-2.9.0.dev20250701%2Bcu129-cp39-cp39-manylinux_2_28_x86_64.whl" }, ] [[package]] @@ -2840,30 +2882,30 @@ name = "torch-tensorrt" source = { editable = "." } dependencies = [ { name = "dllist", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "tensorrt", version = "10.3.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12", version = "10.3.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12-bindings", version = "10.3.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12-bindings", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12-libs", version = "10.3.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "tensorrt-cu12-libs", version = "10.11.0.33", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt", version = "10.3.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12", version = "10.3.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12-bindings", version = "10.3.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12-bindings", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12-libs", version = "10.3.0", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "tensorrt-cu12-libs", version = "10.11.0.33", source = { registry = "https://pypi.nvidia.com/" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] [package.optional-dependencies] quantization = [ - { name = "nvidia-modelopt", version = "0.29.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, extra = ["all"], marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, - { name = "nvidia-modelopt", version = "0.31.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, extra = ["all"], marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, + { name = "nvidia-modelopt", version = "0.29.0", source = { registry = "https://pypi.nvidia.com/" }, extra = ["all"], marker = "(python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'linux') or (python_full_version < '3.12' and 'tegra' in platform_release and sys_platform == 'windows') or (python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows')" }, + { name = "nvidia-modelopt", version = "0.31.0", source = { registry = "https://pypi.nvidia.com/" }, extra = ["all"], marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'windows')" }, ] torchvision = [ - { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.23.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.23.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] [package.dev-dependencies] @@ -2906,10 +2948,10 @@ requires-dist = [ { name = "tensorrt-cu12-bindings", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release", specifier = ">=10.3.0,<10.4.0" }, { name = "tensorrt-cu12-libs", marker = "platform_machine != 'aarch64' or 'tegra' not in platform_release", specifier = ">=10.11.0,<10.12.0" }, { name = "tensorrt-cu12-libs", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release", specifier = ">=10.3.0,<10.4.0" }, - { name = "torch", marker = "platform_machine != 'aarch64' or 'tegra' not in platform_release", specifier = ">=2.8.0.dev0,<2.9.0", index = "https://download.pytorch.org/whl/nightly/cu128" }, - { name = "torch", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release", specifier = ">=2.7.0,<2.8.0", index = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, - { name = "torchvision", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release and extra == 'torchvision'", specifier = ">=0.22.0,<0.23.0", index = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, - { name = "torchvision", marker = "(platform_machine != 'aarch64' and extra == 'torchvision') or ('tegra' not in platform_release and extra == 'torchvision')", specifier = ">=0.23.0.dev0,<0.24.0", index = "https://download.pytorch.org/whl/nightly/cu128" }, + { name = "torch", marker = "platform_machine != 'aarch64' or 'tegra' not in platform_release", specifier = ">=2.9.0.dev0,<2.10.0", index = "https://download.pytorch.org/whl/nightly/cu129" }, + { name = "torch", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release", specifier = ">=2.7.0,<2.8.0" }, + { name = "torchvision", marker = "platform_machine == 'aarch64' and 'tegra' in platform_release and extra == 'torchvision'", specifier = ">=0.22.0,<0.23.0" }, + { name = "torchvision", marker = "(platform_machine != 'aarch64' and extra == 'torchvision') or ('tegra' not in platform_release and extra == 'torchvision')", specifier = ">=0.23.0.dev0,<0.24.0", index = "https://download.pytorch.org/whl/nightly/cu129" }, { name = "typing-extensions", specifier = ">=4.7.0" }, ] provides-extras = ["torchvision", "quantization"] @@ -2942,24 +2984,24 @@ test = [ [[package]] name = "torchprofile" version = "0.0.4" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torchvision", version = "0.23.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.22.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torchvision", version = "0.23.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/96b/6da17d752a06b/torchprofile-0.0.4.tar.gz", hash = "sha256:96b6da17d752a06b02977e078aea95614893b31d4117dd5dcd081f30ce65611b" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/36/574c0c46e818533b78b3c09505211162918188325ab4165ef11a3f295755/torchprofile-0.0.4.tar.gz", hash = "sha256:96b6da17d752a06b02977e078aea95614893b31d4117dd5dcd081f30ce65611b", size = 4557, upload-time = "2021-06-22T04:58:03.592Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/715/1fe88dc770f0e/torchprofile-0.0.4-py3-none-any.whl", hash = "sha256:7151fe88dc770f0eeec241244a4c7feaec2c5e8c7852386bc2d6a8d7dde7384d" }, + { url = "https://files.pythonhosted.org/packages/62/15/71ad4ed163b03cba1315f1d96e0bc8e39d5a97f92974ffa610a729b273ab/torchprofile-0.0.4-py3-none-any.whl", hash = "sha256:7151fe88dc770f0eeec241244a4c7feaec2c5e8c7852386bc2d6a8d7dde7384d", size = 7694, upload-time = "2021-06-22T04:58:02.485Z" }, ] [[package]] name = "torchvision" version = "0.22.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", "python_full_version == '3.11.*' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux'", @@ -2971,18 +3013,23 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, { name = "pillow", marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.7.0", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine == 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+f/daa/bff3a07259968/torchvision-0.22.0-cp310-cp310-linux_aarch64.whl", hash = "sha256:daabff3a0725996886b92e4b5dd143f5750ef4b181b5c7d01371a9185e8f0402" }, + { url = "https://files.pythonhosted.org/packages/a3/e5/ec4b52041cd8c440521b75864376605756bd2d112d6351ea6a1ab25008c1/torchvision-0.22.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:810ea4af3bc63cf39e834f91f4218ff5999271caaffe2456247df905002bd6c0", size = 2512604, upload-time = "2025-04-23T14:41:56.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/71/ce9a303b94e64fe25d534593522ffc76848c4e64c11e4cbe9f6b8d537210/torchvision-0.22.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6c5620e10ffe388eb6f4744962106ed7cf1508d26e6fdfa0c10522d3249aea24", size = 2514016, upload-time = "2025-04-23T14:41:48.566Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/21f8b6122e13ae045b8e49658029c695fd774cd21083b3fa5c3f9c5d3e35/torchvision-0.22.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8f116bc82e0c076e70ba7776e611ed392b9666aa443662e687808b08993d26af", size = 2514571, upload-time = "2025-04-23T14:41:53.458Z" }, + { url = "https://files.pythonhosted.org/packages/77/77/88f64879483d66daf84f1d1c4d5c31ebb08e640411139042a258d5f7dbfe/torchvision-0.22.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:471c6dd75bb984c6ebe4f60322894a290bf3d4b195e769d80754f3689cd7f238", size = 2471592, upload-time = "2025-04-23T14:41:54.991Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9a/2b59f5758ba7e3f23bc84e16947493bbce97392ec6d18efba7bdf0a3b10e/torchvision-0.22.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:753d3c84eeadd5979a33b3b73a25ecd0aa4af44d6b45ed2c70d44f5e0ac68312", size = 2476555, upload-time = "2025-04-23T14:41:38.357Z" }, + { url = "https://files.pythonhosted.org/packages/2c/40/ca84add0f8e548a5b083b271e832786cd397047a9c2e7fac76c0c1f3de04/torchvision-0.22.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:4095fac2b2e49a9c30f701e09ec1bdf3d11b1e48b006a76a9015a2ed8b39556e", size = 2512670, upload-time = "2025-04-23T14:41:33.739Z" }, ] [[package]] name = "torchvision" -version = "0.23.0.dev20250606+cu128" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +version = "0.23.0.dev20250701+cu129" +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } resolution-markers = [ "python_full_version >= '3.12' and platform_machine != 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", "python_full_version >= '3.12' and platform_machine == 'aarch64' and 'tegra' not in platform_release and sys_platform == 'linux'", @@ -3006,38 +3053,38 @@ resolution-markers = [ "python_full_version < '3.10' and platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows'", ] dependencies = [ - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.10' and platform_machine != 'aarch64' and sys_platform == 'windows') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version < '3.10' and 'tegra' not in platform_release and sys_platform == 'windows') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'linux') or (platform_machine != 'aarch64' and 'tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "pillow", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, - { name = "torch", version = "2.8.0.dev20250606+cu128", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "torch", version = "2.9.0.dev20250701+cu129", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and sys_platform == 'windows') or ('tegra' not in platform_release and sys_platform == 'linux') or ('tegra' not in platform_release and sys_platform == 'windows')" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/nightly/cu128/torchvision-0.23.0.dev20250606%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torchvision-0.23.0.dev20250606%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torchvision-0.23.0.dev20250606%2Bcu128-cp312-cp312-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torchvision-0.23.0.dev20250606%2Bcu128-cp313-cp313-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torchvision-0.23.0.dev20250606%2Bcu128-cp313-cp313t-manylinux_2_28_x86_64.whl" }, - { url = "https://download.pytorch.org/whl/nightly/cu128/torchvision-0.23.0.dev20250606%2Bcu128-cp39-cp39-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torchvision-0.23.0.dev20250701%2Bcu129-cp310-cp310-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torchvision-0.23.0.dev20250701%2Bcu129-cp311-cp311-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torchvision-0.23.0.dev20250701%2Bcu129-cp312-cp312-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torchvision-0.23.0.dev20250701%2Bcu129-cp313-cp313-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torchvision-0.23.0.dev20250701%2Bcu129-cp313-cp313t-manylinux_2_28_x86_64.whl" }, + { url = "https://download.pytorch.org/whl/nightly/cu129/torchvision-0.23.0.dev20250701%2Bcu129-cp39-cp39-manylinux_2_28_x86_64.whl" }, ] [[package]] name = "tqdm" version = "4.67.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f8a/ef9c52c08c13a/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/264/45eca388f82e7/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2" }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] [[package]] name = "transformers" version = "4.51.3" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "huggingface-hub", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, - { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu128" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "1.26.4", source = { registry = "https://download.pytorch.org/whl/nightly/cu129" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'windows') or ('tegra' in platform_release and sys_platform == 'linux') or ('tegra' in platform_release and sys_platform == 'windows')" }, + { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'linux') or (python_full_version >= '3.10' and 'tegra' not in platform_release and sys_platform == 'windows')" }, { name = "packaging", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "pyyaml", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "regex", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, @@ -3046,15 +3093,15 @@ dependencies = [ { name = "tokenizers", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "tqdm", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e29/2fcab3990c6de/transformers-4.51.3.tar.gz", hash = "sha256:e292fcab3990c6defe6328f0f7d2004283ca81a7a07b2de9a46d67fd81ea1409" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/11/7414d5bc07690002ce4d7553602107bf969af85144bbd02830f9fb471236/transformers-4.51.3.tar.gz", hash = "sha256:e292fcab3990c6defe6328f0f7d2004283ca81a7a07b2de9a46d67fd81ea1409", size = 8941266, upload-time = "2025-04-14T08:15:00.485Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fd3/279633ceb2b77/transformers-4.51.3-py3-none-any.whl", hash = "sha256:fd3279633ceb2b777013234bbf0b4f5c2d23c4626b05497691f00cfda55e8a83" }, + { url = "https://files.pythonhosted.org/packages/a9/b6/5257d04ae327b44db31f15cce39e6020cc986333c715660b1315a9724d82/transformers-4.51.3-py3-none-any.whl", hash = "sha256:fd3279633ceb2b777013234bbf0b4f5c2d23c4626b05497691f00cfda55e8a83", size = 10383940, upload-time = "2025-04-14T08:13:43.023Z" }, ] [[package]] name = "typing-extensions" version = "4.14.0" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/typing_extensions-4.14.0-py3-none-any.whl" }, ] @@ -3062,64 +3109,64 @@ wheels = [ [[package]] name = "typing-inspection" version = "0.4.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6ae/134cc0203c333/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/389/055682238f53b/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51" }, + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, ] [[package]] name = "typos" version = "1.33.1" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ae5/905af0925cee3/typos-1.33.1.tar.gz", hash = "sha256:ae5905af0925cee3ea97cd3372c5f570b67547b6b8af6b95c678b310fbec55c9" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/1e/bef7ff928af2f9b329cd7cae0cda89671241bf5275fdd353f823d439a577/typos-1.33.1.tar.gz", hash = "sha256:ae5905af0925cee3ea97cd3372c5f570b67547b6b8af6b95c678b310fbec55c9", size = 1506391, upload-time = "2025-06-02T17:58:36.447Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/57c/0e84e63a37535/typos-1.33.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c0e84e63a37535f3fea0d85659e88cbe59329dbd7b1920d31bbc52e6d63fca" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c17/4e3b756ebd7f5/typos-1.33.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c174e3b756ebd7f576de759a749b4ca4c6d7952801e179d4f9ade1a6bba159c5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/538/889ffcc159b0d/typos-1.33.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:538889ffcc159b0da1f2eeb52fb99a94be2886ca5060a48b84c0bb54893ea604" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4f6/afcb874c6a803/typos-1.33.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4f6afcb874c6a803da9d93813546648cdf3764ee5657844b9cd8692aaeff4790" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2ce/db8e13c3cb9e2/typos-1.33.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2cedb8e13c3cb9e2755061297d464962c66561f0b30bb12432d9d05497483be8" }, + { url = "https://files.pythonhosted.org/packages/23/bd/ae66ad308b186d39fff0b5cf9a817d7f741684ec1ac7ddbdd3cbc1c681f5/typos-1.33.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57c0e84e63a37535f3fea0d85659e88cbe59329dbd7b1920d31bbc52e6d63fca", size = 7546523, upload-time = "2025-06-02T17:58:23.957Z" }, + { url = "https://files.pythonhosted.org/packages/97/ae/32cbec711c8c8c6a90f698a6d2b235530cf652d1c05bd3ea52e1d05e7300/typos-1.33.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c174e3b756ebd7f576de759a749b4ca4c6d7952801e179d4f9ade1a6bba159c5", size = 6775099, upload-time = "2025-06-02T17:58:25.567Z" }, + { url = "https://files.pythonhosted.org/packages/d3/73/a243913bcb9914b7a5ce1fa8d5a8aaa554fe94033387060c0952c6446fa5/typos-1.33.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:538889ffcc159b0da1f2eeb52fb99a94be2886ca5060a48b84c0bb54893ea604", size = 7472916, upload-time = "2025-06-02T17:58:27.385Z" }, + { url = "https://files.pythonhosted.org/packages/44/05/411d71a8e4895bc85df4250cba401345864bd858a34781b58777d89fdc11/typos-1.33.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4f6afcb874c6a803da9d93813546648cdf3764ee5657844b9cd8692aaeff4790", size = 6666776, upload-time = "2025-06-02T17:58:29.116Z" }, + { url = "https://files.pythonhosted.org/packages/89/36/1f6aed427f77cf3a72e8b3bb7174e9e387e506b553da6d81b721a1fe0715/typos-1.33.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2cedb8e13c3cb9e2755061297d464962c66561f0b30bb12432d9d05497483be8", size = 7554311, upload-time = "2025-06-02T17:58:30.965Z" }, ] [[package]] name = "tzdata" version = "2025.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b60/a638fcc0daffa/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/1a4/03fada01ff922/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8" }, + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] [[package]] name = "urllib3" version = "2.4.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/414/bc6535b787feb/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4e1/6665048960a09/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" }, + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, ] [[package]] name = "virtualenv" version = "20.31.2" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "platformdirs", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e10/c0a9d02835e59/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af" } +sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/36e/fd0d9650ee985/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11" }, + { url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" }, ] [[package]] name = "xxhash" version = "3.5.0" -source = { registry = "https://download.pytorch.org/whl/nightly/cu128" } +source = { registry = "https://download.pytorch.org/whl/nightly/cu129" } wheels = [ { url = "https://download.pytorch.org/whl/nightly/xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { url = "https://download.pytorch.org/whl/nightly/xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" }, @@ -3151,94 +3198,94 @@ wheels = [ [[package]] name = "yarl" version = "1.20.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "multidict", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, { name = "propcache", marker = "sys_platform == 'linux' or sys_platform == 'windows'" }, ] -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/686/d51e51ee5dfe6/yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307" } -wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/759/5498d085becc8/yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/af5/607159085dcdb/yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/95b/50910e4965674/yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b59/4113a301ad537/yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/083/ce0393ea173cd/yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4f1/a350a652bbbe1/yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fb0/caeac4a164aad/yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d88/cc43e923f3242/yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/e52/d6ed9ea8fd3ab/yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ce3/60ae48a5e9961/yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/06d/06c9d5b5bc3eb/yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c27/d98f4e5c40605/yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/6d4/09e321e4addf7/yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ea5/2f7328a36960b/yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c87/03517b9244639/yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/077/989b09ffd2f48/yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0ac/faf1da020253f/yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b42/30ac0b97ec5ee/yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/0a6/a1e6ae21cdd84/yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/86d/e313371ec04dd/yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dd5/9c9dd58ae16ea/yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a0b/c5e05f457b7c1/yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/c94/71ca18e6aeb0e/yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/40e/d574b4df72358/yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/087/e9731884621b1/yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/69d/f35468b66c1a6/yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3b2/992fe29002fd0/yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4c9/03e0b42aab48a/yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bf0/99e2432131093/yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8a7/f62f5dc70a6c7/yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/54a/c15a8b60382b2/yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/25b/3bc0763a7aca1/yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/b25/86e36dc070fc8/yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/866/349da9d8c5290/yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/33b/b660b390a0554/yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/737/e9f171e5a0703/yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/18e/321617de4ab17/yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/434/5f58719825bba/yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5d9/b980d7234614b/yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/af4/baa8a44597783/yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/123/393db7420e71d/yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/ab4/7acc9332f3de1/yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4a3/4c52ed158f898/yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/04d/8cfb12714158a/yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/7dc/63ad0d541c38b/yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f9d/02b591a64e4e6/yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/95f/c9876f917cac7/yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bb7/69ae5760cd1c6/yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f16/6eafa78810ddb/yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5d3/d6d14754aefc7/yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/2a8/f64df8ed5d04c/yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4d9/949eaf05b4d30/yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9c3/66b254082d21c/yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/91b/c450c80a2e968/yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/9c2/aa4387de4bc3a/yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d2c/bca6760a54118/yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/798/a5074e656f06b/yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f10/6e75c45428847/yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3b6/0a86551669c23/yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3e4/29857e341d5e8/yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/8d8/a3d54a090e0ff/yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f0c/f05ae2d3d87a8/yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/a88/4b8974729e389/yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/f8d/8aa8dd89ffb9a/yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/3b4/e88d6c3c8672f/yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/bdb/77efde644d6f1/yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/4ba/5e59f14bfe8d2/yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/d0b/f955b96ea44ad/yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/273/59776bc359ee6/yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/04d/9c7a1dc0a26ef/yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/faa/709b66ae0e24c/yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/448/69ee8538208fe/yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0" }, - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/5d0/fe6af927a47a2/yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124" }, +sdist = { url = "https://files.pythonhosted.org/packages/62/51/c0edba5219027f6eab262e139f73e2417b0f4efffa23bf562f6e18f76ca5/yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307", size = 185258, upload-time = "2025-04-17T00:45:14.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/21/e0aa650bcee881fb804331faa2c0f9a5d6be7609970b2b6e3cdd414e174b/yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe", size = 327297, upload-time = "2025-04-17T00:41:34.03Z" }, + { url = "https://files.pythonhosted.org/packages/1a/a4/58f10870f5c17595c5a37da4c6a0b321589b7d7976e10570088d445d0f47/yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195", size = 323578, upload-time = "2025-04-17T00:41:36.492Z" }, + { url = "https://files.pythonhosted.org/packages/07/df/2506b1382cc0c4bb0d22a535dc3e7ccd53da9a59b411079013a7904ac35c/yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10", size = 343212, upload-time = "2025-04-17T00:41:38.396Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4a/d1c901d0e2158ad06bb0b9a92473e32d992f98673b93c8a06293e091bab0/yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634", size = 337956, upload-time = "2025-04-17T00:41:40.519Z" }, + { url = "https://files.pythonhosted.org/packages/8b/fd/10fcf7d86f49b1a11096d6846257485ef32e3d3d322e8a7fdea5b127880c/yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2", size = 333889, upload-time = "2025-04-17T00:41:42.437Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cd/bae926a25154ba31c5fd15f2aa6e50a545c840e08d85e2e2e0807197946b/yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a", size = 322282, upload-time = "2025-04-17T00:41:44.641Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/c3ac3597dfde746c63c637c5422cf3954ebf622a8de7f09892d20a68900d/yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867", size = 336270, upload-time = "2025-04-17T00:41:46.812Z" }, + { url = "https://files.pythonhosted.org/packages/dd/42/417fd7b8da5846def29712370ea8916a4be2553de42a2c969815153717be/yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995", size = 335500, upload-time = "2025-04-17T00:41:48.896Z" }, + { url = "https://files.pythonhosted.org/packages/37/aa/c2339683f8f05f4be16831b6ad58d04406cf1c7730e48a12f755da9f5ac5/yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487", size = 339672, upload-time = "2025-04-17T00:41:50.965Z" }, + { url = "https://files.pythonhosted.org/packages/be/12/ab6c4df95f00d7bc9502bf07a92d5354f11d9d3cb855222a6a8d2bd6e8da/yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2", size = 351840, upload-time = "2025-04-17T00:41:53.074Z" }, + { url = "https://files.pythonhosted.org/packages/83/3c/08d58c51bbd3899be3e7e83cd7a691fdcf3b9f78b8699d663ecc2c090ab7/yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61", size = 359550, upload-time = "2025-04-17T00:41:55.517Z" }, + { url = "https://files.pythonhosted.org/packages/8a/15/de7906c506f85fb476f0edac4bd74569f49e5ffdcf98e246a0313bf593b9/yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19", size = 351108, upload-time = "2025-04-17T00:41:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/2c/29/8f291e7922a58a21349683f6120a85701aeefaa02e9f7c8a2dc24fe3f431/yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e", size = 355788, upload-time = "2025-04-17T00:42:09.902Z" }, + { url = "https://files.pythonhosted.org/packages/26/6d/b4892c80b805c42c228c6d11e03cafabf81662d371b0853e7f0f513837d5/yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9", size = 344613, upload-time = "2025-04-17T00:42:11.768Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0e/517aa28d3f848589bae9593717b063a544b86ba0a807d943c70f48fcf3bb/yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a", size = 370953, upload-time = "2025-04-17T00:42:13.983Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/5bd09d2f1ad6e6f7c2beae9e50db78edd2cca4d194d227b958955573e240/yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2", size = 369204, upload-time = "2025-04-17T00:42:16.386Z" }, + { url = "https://files.pythonhosted.org/packages/9c/85/d793a703cf4bd0d4cd04e4b13cc3d44149470f790230430331a0c1f52df5/yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2", size = 358108, upload-time = "2025-04-17T00:42:18.622Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/b6c71e13549c1f6048fbc14ce8d930ac5fb8bafe4f1a252e621a24f3f1f9/yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8", size = 346610, upload-time = "2025-04-17T00:42:20.9Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1a/d6087d58bdd0d8a2a37bbcdffac9d9721af6ebe50d85304d9f9b57dfd862/yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902", size = 365378, upload-time = "2025-04-17T00:42:22.926Z" }, + { url = "https://files.pythonhosted.org/packages/02/84/e25ddff4cbc001dbc4af76f8d41a3e23818212dd1f0a52044cbc60568872/yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791", size = 356919, upload-time = "2025-04-17T00:42:25.145Z" }, + { url = "https://files.pythonhosted.org/packages/04/76/898ae362353bf8f64636495d222c8014c8e5267df39b1a9fe1e1572fb7d0/yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f", size = 364248, upload-time = "2025-04-17T00:42:27.475Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b0/9d9198d83a622f1c40fdbf7bd13b224a6979f2e1fc2cf50bfb1d8773c495/yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da", size = 378418, upload-time = "2025-04-17T00:42:29.333Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ce/1f50c1cc594cf5d3f5bf4a9b616fca68680deaec8ad349d928445ac52eb8/yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4", size = 383850, upload-time = "2025-04-17T00:42:31.668Z" }, + { url = "https://files.pythonhosted.org/packages/89/1e/a59253a87b35bfec1a25bb5801fb69943330b67cfd266278eb07e0609012/yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5", size = 381218, upload-time = "2025-04-17T00:42:33.523Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4e/929633b249611eeed04e2f861a14ed001acca3ef9ec2a984a757b1515889/yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33", size = 343972, upload-time = "2025-04-17T00:42:45.391Z" }, + { url = "https://files.pythonhosted.org/packages/49/fd/047535d326c913f1a90407a3baf7ff535b10098611eaef2c527e32e81ca1/yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58", size = 339639, upload-time = "2025-04-17T00:42:47.552Z" }, + { url = "https://files.pythonhosted.org/packages/48/2f/11566f1176a78f4bafb0937c0072410b1b0d3640b297944a6a7a556e1d0b/yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f", size = 353745, upload-time = "2025-04-17T00:42:49.406Z" }, + { url = "https://files.pythonhosted.org/packages/26/17/07dfcf034d6ae8837b33988be66045dd52f878dfb1c4e8f80a7343f677be/yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae", size = 354178, upload-time = "2025-04-17T00:42:51.588Z" }, + { url = "https://files.pythonhosted.org/packages/15/45/212604d3142d84b4065d5f8cab6582ed3d78e4cc250568ef2a36fe1cf0a5/yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018", size = 349219, upload-time = "2025-04-17T00:42:53.674Z" }, + { url = "https://files.pythonhosted.org/packages/e6/e0/a10b30f294111c5f1c682461e9459935c17d467a760c21e1f7db400ff499/yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672", size = 337266, upload-time = "2025-04-17T00:42:55.49Z" }, + { url = "https://files.pythonhosted.org/packages/33/a6/6efa1d85a675d25a46a167f9f3e80104cde317dfdf7f53f112ae6b16a60a/yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8", size = 360873, upload-time = "2025-04-17T00:42:57.895Z" }, + { url = "https://files.pythonhosted.org/packages/77/67/c8ab718cb98dfa2ae9ba0f97bf3cbb7d45d37f13fe1fbad25ac92940954e/yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7", size = 360524, upload-time = "2025-04-17T00:43:00.094Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e8/c3f18660cea1bc73d9f8a2b3ef423def8dadbbae6c4afabdb920b73e0ead/yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594", size = 365370, upload-time = "2025-04-17T00:43:02.242Z" }, + { url = "https://files.pythonhosted.org/packages/c9/99/33f3b97b065e62ff2d52817155a89cfa030a1a9b43fee7843ef560ad9603/yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6", size = 373297, upload-time = "2025-04-17T00:43:04.189Z" }, + { url = "https://files.pythonhosted.org/packages/3d/89/7519e79e264a5f08653d2446b26d4724b01198a93a74d2e259291d538ab1/yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1", size = 378771, upload-time = "2025-04-17T00:43:06.609Z" }, + { url = "https://files.pythonhosted.org/packages/3a/58/6c460bbb884abd2917c3eef6f663a4a873f8dc6f498561fc0ad92231c113/yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b", size = 375000, upload-time = "2025-04-17T00:43:09.01Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ee/7ee43bd4cf82dddd5da97fcaddb6fa541ab81f3ed564c42f146c83ae17ce/yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0", size = 343070, upload-time = "2025-04-17T00:43:21.426Z" }, + { url = "https://files.pythonhosted.org/packages/4a/12/b5eccd1109e2097bcc494ba7dc5de156e41cf8309fab437ebb7c2b296ce3/yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501", size = 337739, upload-time = "2025-04-17T00:43:23.634Z" }, + { url = "https://files.pythonhosted.org/packages/7d/6b/0eade8e49af9fc2585552f63c76fa59ef469c724cc05b29519b19aa3a6d5/yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc", size = 351338, upload-time = "2025-04-17T00:43:25.695Z" }, + { url = "https://files.pythonhosted.org/packages/45/cb/aaaa75d30087b5183c7b8a07b4fb16ae0682dd149a1719b3a28f54061754/yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d", size = 353636, upload-time = "2025-04-17T00:43:27.876Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/d9cb39ec68a91ba6e66fa86d97003f58570327d6713833edf7ad6ce9dde5/yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0", size = 348061, upload-time = "2025-04-17T00:43:29.788Z" }, + { url = "https://files.pythonhosted.org/packages/72/6b/103940aae893d0cc770b4c36ce80e2ed86fcb863d48ea80a752b8bda9303/yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a", size = 334150, upload-time = "2025-04-17T00:43:31.742Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b2/986bd82aa222c3e6b211a69c9081ba46484cffa9fab2a5235e8d18ca7a27/yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2", size = 362207, upload-time = "2025-04-17T00:43:34.099Z" }, + { url = "https://files.pythonhosted.org/packages/14/7c/63f5922437b873795d9422cbe7eb2509d4b540c37ae5548a4bb68fd2c546/yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9", size = 361277, upload-time = "2025-04-17T00:43:36.202Z" }, + { url = "https://files.pythonhosted.org/packages/81/83/450938cccf732466953406570bdb42c62b5ffb0ac7ac75a1f267773ab5c8/yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5", size = 364990, upload-time = "2025-04-17T00:43:38.551Z" }, + { url = "https://files.pythonhosted.org/packages/b4/de/af47d3a47e4a833693b9ec8e87debb20f09d9fdc9139b207b09a3e6cbd5a/yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877", size = 374684, upload-time = "2025-04-17T00:43:40.481Z" }, + { url = "https://files.pythonhosted.org/packages/62/0b/078bcc2d539f1faffdc7d32cb29a2d7caa65f1a6f7e40795d8485db21851/yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e", size = 382599, upload-time = "2025-04-17T00:43:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/74/a9/4fdb1a7899f1fb47fd1371e7ba9e94bff73439ce87099d5dd26d285fffe0/yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384", size = 378573, upload-time = "2025-04-17T00:43:44.797Z" }, + { url = "https://files.pythonhosted.org/packages/28/f4/a2a4c967c8323c03689383dff73396281ced3b35d0ed140580825c826af7/yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1", size = 408231, upload-time = "2025-04-17T00:43:57.825Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a1/66f7ffc0915877d726b70cc7a896ac30b6ac5d1d2760613603b022173635/yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb", size = 390221, upload-time = "2025-04-17T00:44:00.526Z" }, + { url = "https://files.pythonhosted.org/packages/41/15/cc248f0504610283271615e85bf38bc014224122498c2016d13a3a1b8426/yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00", size = 411400, upload-time = "2025-04-17T00:44:02.853Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/f0823d7e092bfb97d24fce6c7269d67fcd1aefade97d0a8189c4452e4d5e/yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de", size = 411714, upload-time = "2025-04-17T00:44:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/83/70/be418329eae64b9f1b20ecdaac75d53aef098797d4c2299d82ae6f8e4663/yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5", size = 404279, upload-time = "2025-04-17T00:44:07.721Z" }, + { url = "https://files.pythonhosted.org/packages/19/f5/52e02f0075f65b4914eb890eea1ba97e6fd91dd821cc33a623aa707b2f67/yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a", size = 384044, upload-time = "2025-04-17T00:44:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/6a/36/b0fa25226b03d3f769c68d46170b3e92b00ab3853d73127273ba22474697/yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9", size = 416236, upload-time = "2025-04-17T00:44:11.734Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3a/54c828dd35f6831dfdd5a79e6c6b4302ae2c5feca24232a83cb75132b205/yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145", size = 402034, upload-time = "2025-04-17T00:44:13.975Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/c7bf5fba488f7e049f9ad69c1b8fdfe3daa2e8916b3d321aa049e361a55a/yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda", size = 407943, upload-time = "2025-04-17T00:44:16.052Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a4/022d2555c1e8fcff08ad7f0f43e4df3aba34f135bff04dd35d5526ce54ab/yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f", size = 423058, upload-time = "2025-04-17T00:44:18.547Z" }, + { url = "https://files.pythonhosted.org/packages/4c/f6/0873a05563e5df29ccf35345a6ae0ac9e66588b41fdb7043a65848f03139/yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd", size = 423792, upload-time = "2025-04-17T00:44:20.639Z" }, + { url = "https://files.pythonhosted.org/packages/9e/35/43fbbd082708fa42e923f314c24f8277a28483d219e049552e5007a9aaca/yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f", size = 422242, upload-time = "2025-04-17T00:44:22.851Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8d/48edf4d49ca38e5229faf793276bdd6f01704740dcf519cf1d282acac6c6/yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94", size = 332687, upload-time = "2025-04-17T00:44:36.855Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c1/112c516bead873c83abe30e08143714d702d1fffdfed43dc103312b81666/yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d", size = 325390, upload-time = "2025-04-17T00:44:38.956Z" }, + { url = "https://files.pythonhosted.org/packages/0b/4c/07aef11f7f23a41049eb0b3b357ceb32bd9798f62042858e0168be9f6f49/yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c", size = 348497, upload-time = "2025-04-17T00:44:42.453Z" }, + { url = "https://files.pythonhosted.org/packages/56/d9/00d5525a2c5e5c66967eaa03866bef6317da4b129ae016582c6641826974/yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c", size = 343670, upload-time = "2025-04-17T00:44:44.822Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7c/2fc733090c6fce82ea5c50f431e70f5dff196d7b54da93b9d6e801031dd2/yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a", size = 335738, upload-time = "2025-04-17T00:44:47.352Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ce/6b22de535b7bc7b19f3cf23c4e744cd2368fa11a0c8f218dfd2ef46b6c3a/yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656", size = 328203, upload-time = "2025-04-17T00:44:49.728Z" }, + { url = "https://files.pythonhosted.org/packages/6b/c8/3fc10db34e731a426baaff348aa1b2c0eb9cb93ff723af4e930e767c058e/yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c", size = 341922, upload-time = "2025-04-17T00:44:52.233Z" }, + { url = "https://files.pythonhosted.org/packages/37/59/f607a63c24b31c66cf288cb819d8dbcac2bd9ec90f39bd03986f33a866b3/yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64", size = 338163, upload-time = "2025-04-17T00:44:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/01/b2/5fd461fe8ab3bb788e19ef6c35a3453f44a5c0d6973f847a08060c4d6183/yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20", size = 343096, upload-time = "2025-04-17T00:44:56.789Z" }, + { url = "https://files.pythonhosted.org/packages/71/d3/7102efd34ed22e6839361f30a27bdad341c0a01f66fcbf09822a1d90b853/yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa", size = 358520, upload-time = "2025-04-17T00:44:58.974Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ab/754b60a5c8be8abaa746543555612b2205ba60c194fc3a0547a34e0b6a53/yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5", size = 359635, upload-time = "2025-04-17T00:45:01.457Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d5/369f994369a7233fcd81f642553062d4f6c657a93069b58258b9046bb87d/yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0", size = 353906, upload-time = "2025-04-17T00:45:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/ea/1f/70c57b3d7278e94ed22d85e09685d3f0a38ebdd8c5c73b65ba4c0d0fe002/yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124", size = 46124, upload-time = "2025-04-17T00:45:12.199Z" }, ] [[package]] name = "zipp" version = "3.22.0" -source = { registry = "https://pypi.jetson-ai-lab.dev/jp6/cu126/+simple" } -sdist = { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/dd2/f28c3ce4bc675/zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5" } +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/b6/7b3d16792fdf94f146bed92be90b4eb4563569eca91513c8609aebf0c167/zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5", size = 25257, upload-time = "2025-05-26T14:46:32.217Z" } wheels = [ - { url = "https://pypi.jetson-ai-lab.dev/root/pypi/+f/fe2/08f65f2aca48b/zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343" }, + { url = "https://files.pythonhosted.org/packages/ad/da/f64669af4cae46f17b90798a827519ce3737d31dbafad65d391e49643dc4/zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343", size = 9796, upload-time = "2025-05-26T14:46:30.775Z" }, ] diff --git a/version.txt b/version.txt index 11922a5ce1..03e905f0db 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.8.0a0 +2.9.0a0