From 074052b7f16d351729260db12327d1b75221a775 Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 10:12:21 +0200 Subject: [PATCH 01/10] First iteration of refactoring of tasks.py to be fully dynamic instead of hardcoded like before. All code refined and refactored. Not everything is verified right now. --- tasks.py | 615 ++++++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 516 insertions(+), 99 deletions(-) diff --git a/tasks.py b/tasks.py index a318792..2ea6373 100644 --- a/tasks.py +++ b/tasks.py @@ -1,44 +1,218 @@ +""" +Enhanced Redis Docker Cluster Build Tool using Python Invoke + +This module provides an improved CLI interface for building, pulling, and pushing +Redis cluster Docker images with enhanced documentation and dictionary-based +parameter handling. + +Usage Examples: + invoke pull --version latest + invoke build --version 7.2 --cpu 4 + invoke push --version all + invoke list + invoke list-releases +""" + import multiprocessing import requests - +import re +import sys from multiprocessing import Pool from invoke import task +from invoke.context import Context -latest_version_string = "7.2.5" +def fetch_github_releases(): + """ + Fetch Redis releases from GitHub API and filter valid semver versions. + + Returns list of valid semver release versions (excludes RCs, betas, etc.) + Exits with error if GitHub cannot be reached or no versions found. + """ + releases = [] + + try: + for page in range(1, 10): # Fetch more pages to get comprehensive history + response = requests.get( + "https://api.github.com/repos/redis/redis/releases", + params={"page": page, "per_page": 100}, + timeout=30 + ) + + if response.status_code == 200: + page_releases = response.json() + if not page_releases: # No more releases + break + + for release in page_releases: + tag_name = release.get("tag_name", "") + release_name = release.get("name", "") + + # Use tag_name primarily, fallback to name + version = tag_name if tag_name else release_name + + # Filter valid semver versions (exclude RCs, betas, alphas, etc.) + if is_valid_semver_release(version): + releases.append(version) + + elif response.status_code == 403: + print("Error: GitHub API rate limit exceeded. Please try again later.") + sys.exit(1) + else: + print(f"Error: GitHub API returned status {response.status_code}") + sys.exit(1) + + except requests.exceptions.RequestException as e: + print(f"Error: Cannot connect to GitHub API: {e}") + sys.exit(1) + + if not releases: + print("Error: No valid Redis versions found on GitHub") + sys.exit(1) + + return sorted(set(releases), key=lambda x: version_sort_key(x)) + + +def is_valid_semver_release(version): + """ + Check if a version string is a valid semantic version release. + + Excludes pre-release versions (rc, alpha, beta, etc.) + """ + # Remove 'v' prefix if present + clean_version = version.lstrip('v') + + # Pattern for semantic versioning (major.minor.patch) + semver_pattern = r'^(\d+)\.(\d+)\.(\d+)$' + + # Exclude pre-release versions (rc, alpha, beta, etc.) + exclude_patterns = [ + r'rc\d*', # release candidates + r'alpha', # alpha versions + r'beta', # beta versions + r'pre', # pre-release + r'dev', # development + r'unstable', # unstable + r'-', # any version with dash (pre-release indicator) + ] + + # Check if it matches semver pattern + if not re.match(semver_pattern, clean_version): + return False + + # Check if it contains any excluded patterns + for pattern in exclude_patterns: + if re.search(pattern, clean_version, re.IGNORECASE): + return False + + return True + + +def version_sort_key(version): + """ + Create a sort key for version strings for proper version ordering. + """ + clean_version = version.lstrip('v') + parts = clean_version.split('.') + + try: + return tuple(int(part) for part in parts) + except ValueError: + # Fallback for non-numeric parts + return (0, 0, 0) -# Unpublished versions -version_config_mapping = [] -version_config_mapping += [f"3.0.{i}" for i in range(0, 8)] -version_config_mapping += [f"3.2.{i}" for i in range(0, 14)] -version_config_mapping += [f"4.0.{i}" for i in range(0, 15)] -version_config_mapping += [f"5.0.{i}" for i in range(0, 13)] -version_config_mapping += [f"6.0.{i}" for i in range(0, 21)] -# Published versions -version_config_mapping += [f"6.2.{i}" for i in range(0, 15)] -version_config_mapping += [f"7.0.{i}" for i in range(0, 16)] -version_config_mapping += [f"7.2.{i}" for i in range(0, 6)] -version_config_mapping += ["7.4-rc1"] +def generate_version_ranges(github_releases): + """ + Generate comprehensive version ranges based on GitHub releases. + + Creates complete version mapping including intermediate versions. + """ + version_mapping = [] + + # Group releases by major.minor + major_minor_groups = {} + for version in github_releases: + clean_version = version.lstrip('v') + parts = clean_version.split('.') + + if len(parts) >= 2: + try: + major = int(parts[0]) + minor = int(parts[1]) + patch = int(parts[2]) if len(parts) > 2 else 0 + + key = f"{major}.{minor}" + if key not in major_minor_groups: + major_minor_groups[key] = [] + major_minor_groups[key].append(patch) + except ValueError: + continue + + # Generate ranges for each major.minor group + for major_minor, patches in major_minor_groups.items(): + max_patch = max(patches) + + # Generate all versions from 0 to max_patch + for patch in range(0, max_patch + 1): + version_mapping.append(f"{major_minor}.{patch}") + + return sorted(set(version_mapping), key=lambda x: version_sort_key(x)) + + +def get_latest_version(versions): + """ + Get the latest version from a list of versions. + """ + if not versions: + print("Error: No versions provided to determine latest") + sys.exit(1) + + return max(versions, key=lambda x: version_sort_key(x)) -def version_name_to_version(version): +def initialize_version_data(): """ - Helper method that returns correct versions if you specify either - - all - - latest - - or it will filter your chosen version based on what you inputed as version argument + Initialize version data by fetching from GitHub. + Returns tuple of (version_mapping, latest_version) + """ + print("Fetching Redis releases from GitHub...") + github_releases = fetch_github_releases() + + version_mapping = generate_version_ranges(github_releases) + latest_version = get_latest_version(github_releases) + + print(f"Successfully loaded {len(version_mapping)} versions from GitHub") + print(f"Latest version detected: {latest_version}") + + return version_mapping, latest_version + + +def version_name_to_version(version, version_mapping, latest_version): + """ + Convert version specification to actual version list. + + Handles special version keywords and filters versions based on user input. + - "all": Returns all available versions + - "latest": Returns only the latest stable version + - Specific version pattern: Returns matching versions (e.g., "7.2" returns all 7.2.x versions) """ if version == "all": - return version_config_mapping + return version_mapping elif version == "latest": - return [latest_version_string] + return [latest_version] else: - return filter_versions(version) + return filter_versions(version, version_mapping) def get_pool_size(cpu_from_cli): + """ + Determine optimal multiprocessing pool size. + + Calculates the number of worker processes to use for parallel operations. + If no CPU count is specified, uses system CPU count minus 1 to avoid + overwhelming the system. + """ if cpu_from_cli: pool_size = int(cpu_from_cli) else: @@ -48,10 +222,16 @@ def get_pool_size(cpu_from_cli): return pool_size -def filter_versions(desired_version): +def filter_versions(desired_version, version_mapping): + """ + Filter available versions based on prefix matching. + + Searches through all available Redis versions and returns those + that start with the specified version pattern. + """ result = [] - for version in version_config_mapping: + for version in version_mapping: if version.startswith(desired_version): result.append(version) @@ -60,102 +240,339 @@ def filter_versions(desired_version): def _docker_pull(config): """ - Internal multiprocess method to run docker pull command + Internal multiprocess worker for Docker pull operations. + + Executes docker pull command for a specific Redis cluster version + in a separate process for parallel execution. + + Config dictionary should contain: + - 'context': Invoke context object + - 'version': Redis version to pull """ - c, version = config - print(f" -- Starting docker pull for version : {version}") + context = config['context'] + version = config['version'] + + print(f" -- Starting docker pull for version: {version}") pull_command = f"docker pull grokzen/redis-cluster:{version}" - c.run(pull_command) + + try: + context.run(pull_command) + print(f" -- Successfully pulled version: {version}") + except Exception as e: + print(f" -- Error pulling version {version}: {e}") def _docker_build(config): """ - Internal multiprocess method to run docker build command + Internal multiprocess worker for Docker build operations. + + Executes docker build command for a specific Redis cluster version + in a separate process for parallel execution. + + Config dictionary should contain: + - 'context': Invoke context object + - 'version': Redis version to build """ - c, version = config - print(f" -- Starting docker build for version : {version}") + context = config['context'] + version = config['version'] + + print(f" -- Starting docker build for version: {version}") build_command = f"docker build --build-arg redis_version={version} -t grokzen/redis-cluster:{version} ." - c.run(build_command) + + try: + context.run(build_command) + print(f" -- Successfully built version: {version}") + except Exception as e: + print(f" -- Error building version {version}: {e}") def _docker_push(config): """ - Internal multiprocess method to run docker push command + Internal multiprocess worker for Docker push operations. + + Executes docker push command for a specific Redis cluster version + in a separate process for parallel execution. + + Config dictionary should contain: + - 'context': Invoke context object + - 'version': Redis version to push """ - c, version = config - print(f" -- Starting docker push for version : {version}") - build_command = f"docker push grokzen/redis-cluster:{version}" - c.run(build_command) - - -@task + context = config['context'] + version = config['version'] + + print(f" -- Starting docker push for version: {version}") + push_command = f"docker push grokzen/redis-cluster:{version}" + + try: + context.run(push_command) + print(f" -- Successfully pushed version: {version}") + except Exception as e: + print(f" -- Error pushing version {version}: {e}") + + +@task(help={ + 'version': 'Redis version to pull. Options: "all", "latest", or specific version pattern (e.g., "7.2")', + 'cpu': 'Number of CPU cores to use for parallel processing (default: system cores - 1)' +}) def pull(c, version, cpu=None): - print(f" -- Docker pull version docker-hub : {version}") + """ + Pull Redis cluster Docker images from Docker Hub. + + Downloads pre-built Redis cluster images for the specified version(s) + using parallel processing for improved performance. + + Examples: + Pull latest stable version: + $ invoke pull --version latest + + Pull all 7.2.x versions: + $ invoke pull --version 7.2 + + Pull all versions using 4 CPU cores: + $ invoke pull --version all --cpu 4 + + Note: + Requires Docker to be installed and accessible via command line. + Large version sets may take considerable time to complete. + """ + print(f" -- Docker pull from Docker Hub for version: {version}") + + # Initialize version data from GitHub + version_mapping, latest_version = initialize_version_data() + + versions = version_name_to_version(version, version_mapping, latest_version) + if not versions: + print(f"Error: No versions found matching '{version}'") + print("Use 'invoke list' to see available versions") + return + + print(f" -- Found {len(versions)} version(s) to pull") pool = Pool(get_pool_size(cpu)) - pool.map( - _docker_pull, - [ - [c, version] - for version in version_name_to_version(version) - ], - ) - - -@task + + configs = [ + {'context': c, 'version': v} + for v in versions + ] + + try: + pool.map(_docker_pull, configs) + print(f" -- Completed pulling {len(versions)} version(s)") + finally: + pool.close() + pool.join() + + +@task(help={ + 'version': 'Redis version to build. Options: "all", "latest", or specific version pattern (e.g., "7.2")', + 'cpu': 'Number of CPU cores to use for parallel processing (default: system cores - 1)' +}) def build(c, version, cpu=None): - print(f" -- Docker building version : {version}") + """ + Build Redis cluster Docker images locally. + + Compiles Redis cluster Docker images for the specified version(s) + using parallel processing. Requires a Dockerfile in the current directory. + + Examples: + Build latest stable version: + $ invoke build --version latest + + Build all 6.2.x versions: + $ invoke build --version 6.2 + + Build specific version with custom CPU count: + $ invoke build --version 7.2.5 --cpu 2 + + Note: + Requires Docker and a properly configured Dockerfile. + Building all versions can take several hours and significant disk space. + """ + print(f" -- Docker building version: {version}") + + # Initialize version data from GitHub + version_mapping, latest_version = initialize_version_data() + + versions = version_name_to_version(version, version_mapping, latest_version) + if not versions: + print(f"Error: No versions found matching '{version}'") + print("Use 'invoke list' to see available versions") + return + + print(f" -- Found {len(versions)} version(s) to build") pool = Pool(get_pool_size(cpu)) - pool.map( - _docker_build, - [ - [c, version] - for version in version_name_to_version(version) - ], - ) - - -@task + + configs = [ + {'context': c, 'version': v} + for v in versions + ] + + try: + pool.map(_docker_build, configs) + print(f" -- Completed building {len(versions)} version(s)") + finally: + pool.close() + pool.join() + + +@task(help={ + 'version': 'Redis version to push. Options: "all", "latest", or specific version pattern (e.g., "7.2")', + 'cpu': 'Number of CPU cores to use for parallel processing (default: system cores - 1)' +}) def push(c, version, cpu=None): - print(f" -- Docker push version to docker-hub : {version}") + """ + Push Redis cluster Docker images to Docker Hub. + + Uploads locally built Redis cluster images to Docker Hub registry + using parallel processing for improved performance. + + Examples: + Push latest version: + $ invoke push --version latest + + Push all 7.0.x versions: + $ invoke push --version 7.0 + + Push all versions with limited parallelism: + $ invoke push --version all --cpu 2 + + Note: + Requires Docker Hub authentication and push permissions. + Images must be built locally before pushing. + """ + print(f" -- Docker push to Docker Hub for version: {version}") + + # Initialize version data from GitHub + version_mapping, latest_version = initialize_version_data() + + versions = version_name_to_version(version, version_mapping, latest_version) + if not versions: + print(f"Error: No versions found matching '{version}'") + print("Use 'invoke list' to see available versions") + return + + print(f" -- Found {len(versions)} version(s) to push") pool = Pool(get_pool_size(cpu)) - pool.map( - _docker_push, - [ - [c, version] - for version in version_name_to_version(version) - ], - ) - - -@task + + configs = [ + {'context': c, 'version': v} + for v in versions + ] + + try: + pool.map(_docker_push, configs) + print(f" -- Completed pushing {len(versions)} version(s)") + finally: + pool.close() + pool.join() + + +@task(help={}) def list(c): + """ + Display all available Redis versions. + + Shows a comprehensive list of all Redis versions that can be built, + including both published and unpublished versions. Useful for + identifying available version patterns. + + Examples: + $ invoke list + + Output: + Displays formatted list of all available Redis versions + organized by major.minor version groups. + """ from pprint import pprint - pprint(version_config_mapping, indent=2) - - -@task -def list_releases(c): - releases = [] - - for page in range(1, 5): - data = requests.get("https://api.github.com/repos/redis/redis/releases", params={"page": int(page)}) - - if data.status_code == 200: - for release in data.json(): - r = release["name"] - - if "rc" in r or r.startswith("5"): - pass - else: - releases.append(r) - else: - print("Error, stopping") - - for released_version in releases: - if released_version in version_config_mapping: - print(f"Release found - {released_version}") + + print("Available Redis versions:") + print("=" * 50) + + # Initialize version data from GitHub + version_mapping, latest_version = initialize_version_data() + + # Group versions by major.minor for better readability + version_groups = {} + for version in version_mapping: + if "rc" in version: + key = "Release Candidates" else: - print(f"NOT found - {released_version}") + parts = version.split('.') + if len(parts) >= 2: + key = f"{parts[0]}.{parts[1]}.x" + else: + key = "Other" + + if key not in version_groups: + version_groups[key] = [] + version_groups[key].append(version) + + for group, versions in sorted(version_groups.items()): + print(f"\n{group}:") + pprint(versions, indent=2, width=100) + + print(f"\nTotal versions available: {len(version_mapping)}") + print(f"Latest stable version: {latest_version}") + + +@task(name='list-releases', help={}) +def list_releases(c): + """ + Display GitHub releases and show dynamic version loading status. + + Fetches fresh Redis releases from GitHub API and displays comprehensive + information about available versions, latest releases, and the dynamic + version loading process. + + Examples: + $ invoke list-releases + + Output: + Shows GitHub releases, dynamic version loading status, and statistics. + + Note: + Requires internet connection to access GitHub API. + API rate limits may apply for unauthenticated requests. + """ + print("Fetching fresh GitHub releases...") + print("=" * 60) + + # Fetch fresh releases directly (this will exit on error) + fresh_releases = fetch_github_releases() + latest_version = get_latest_version(fresh_releases) + current_generated = generate_version_ranges(fresh_releases) + + print(f"✓ Successfully fetched {len(fresh_releases)} valid releases from GitHub") + print(f"✓ Latest version found: {latest_version}") + + # Show version range statistics + major_minor_stats = {} + for version in fresh_releases: + parts = version.lstrip('v').split('.') + if len(parts) >= 2: + key = f"{parts[0]}.{parts[1]}.x" + if key not in major_minor_stats: + major_minor_stats[key] = 0 + major_minor_stats[key] += 1 + + print(f"\nVersion Series Statistics:") + print("-" * 30) + for series, count in sorted(major_minor_stats.items(), key=lambda x: version_sort_key(x[0])): + print(f"{series:10} : {count:3} releases") + + # Show recent releases (last 10) + print(f"\nRecent Releases (Latest 10):") + print("-" * 30) + recent_releases = sorted(fresh_releases, key=lambda x: version_sort_key(x), reverse=True)[:10] + for i, version in enumerate(recent_releases, 1): + marker = "← LATEST" if i == 1 else "" + print(f"{i:2}. {version} {marker}") + + print(f"\nDynamic Version Generation:") + print("-" * 30) + print(f"GitHub releases found : {len(fresh_releases)}") + print(f"Generated version ranges : {len(current_generated)}") + print(f"Latest version detected : {latest_version}") + print(f"Total available versions : {len(current_generated)}") From 898d3af59c56b922f11545cf4545e039f79e5bec Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 10:23:31 +0200 Subject: [PATCH 02/10] Updates to Makefile --- Makefile | 46 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index 582283f..ac4241c 100644 --- a/Makefile +++ b/Makefile @@ -1,26 +1,54 @@ help: @echo "Please use 'make ' where is one of" - @echo " build builds docker-compose containers" - @echo " up starts docker-compose containers" - @echo " down stops the running docker-compose containers" + @echo "" + @echo "Docker Compose commands:" + @echo " build builds docker compose containers" + @echo " up starts docker compose containers" + @echo " down stops the running docker compose containers" @echo " rebuild rebuilds the image from scratch without using any cached layers" @echo " bash starts bash inside a running container." @echo " cli run redis-cli inside the container on the server with port 7000" + @echo "" + @echo "Invoke tasks:" + @echo " pull pull Redis Docker images (use VERSION and CPU variables)" + @echo " build-images build Redis Docker images (use VERSION and CPU variables)" + @echo " push push Redis Docker images (use VERSION and CPU variables)" + @echo " list list all available Redis versions" + @echo " list-releases list Redis releases from GitHub" + +# Default values +CPU ?= 2 build: - docker-compose build + docker compose build up: - docker-compose up + docker compose up down: - docker-compose stop + docker compose stop rebuild: - docker-compose build --no-cache + docker compose build --no-cache bash: - docker-compose exec redis-cluster /bin/bash + docker compose exec redis-cluster /bin/bash cli: - docker-compose exec redis-cluster /redis/src/redis-cli -p 7000 + docker compose exec redis-cluster /redis/src/redis-cli -p 7000 + +# Invoke tasks +pull: + invoke pull --version $(VERSION) --cpu $(CPU) + +build-images: + invoke build --version $(VERSION) --cpu $(CPU) + +push: + invoke push --version $(VERSION) --cpu $(CPU) + +list: + invoke list + +list-releases: + invoke list-releases From 6538e263d992b09be8c521e58fd2484094c2f021 Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 10:50:15 +0200 Subject: [PATCH 03/10] Update README.md after AI suggestions --- README.md | 174 +++++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 126 insertions(+), 48 deletions(-) diff --git a/README.md b/README.md index 3dcea6a..110a237 100644 --- a/README.md +++ b/README.md @@ -82,64 +82,136 @@ docker run -e "IP=0.0.0.0" -p 7000-7005:7000-7005 grokzen/redis-cluster:latest -# Usage - -This git repo is using `invoke` to pull, build, push docker images. You can use it to build your own images if you like. +# Installation -The invoke scripts in this repo is written only for python 3.7 and above +## Prerequisites -Install `invoke` with `pip install invoke`. +- Docker (latest version recommended, minimum 1.10) +- Python 3.7+ (for building custom images using invoke) -This script will run `N num of cpu - 1` parralell tasks based on your version input. +## Installing Invoke (for building custom images) -To see available commands run `invoke -l` in the root folder of this repo. Example +If you want to build your own Redis cluster images, install the invoke task runner: +```bash +pip install invoke ``` -(tmp-615229a94c330b9) ➜ docker-redis-cluster git:(invoke) ✗ invoke -l -"Configured multiprocess pool size: 3 -Available tasks: - build - list - pull - push +## Quick Start with Pre-built Images + +The easiest way to get started is using pre-built images from Docker Hub: + +```bash +# Pull and run the latest Redis cluster +docker run -p 7000-7005:7000-7005 grokzen/redis-cluster:latest + +# Or use docker compose +docker compose up ``` -Each command is only taking one required positional argument `version`. Example: +# Usage +## Running the Redis Cluster + +### Using Docker Compose + +Start the Redis cluster with docker compose: + +```bash +# Start the cluster +make up + +# Stop the cluster +make down + +# Rebuild from scratch +make rebuild ``` -(tmp-615229a94c330b9) ➜ docker-redis-cluster git:(invoke) ✗ invoke build 7.0 -... + +### Using Docker Run + +Run directly with docker: + +```bash +docker run -p 7000-7005:7000-7005 grokzen/redis-cluster:latest ``` -and it will run the build step on all versions that starts with 6.0. +### Connecting to the Cluster -The only other optional usefull argument is `--cpu=N` and it will set how many paralell processes will be used. By default you will use n - 1 number of cpu cores that is available on your system. Commands like pull and push aare not very cpu intensive so using a higher number here might speed things up if you have good network bandwidth. +Connect using redis-cli: +```bash +# Connect from host +redis-cli -c -p 7000 -## Makefile (legacy) +# Or connect to the container's redis-cli +make cli +``` -Makefile still has a few docker-compose commands that can be used +## Building Custom Images -To build your own image run: +This repository uses `invoke` to manage building, pulling, and pushing Redis cluster images. The invoke scripts support parallel processing using multiple CPU cores. - make build +### Available Invoke Tasks -To run the container run: +```bash +invoke -l +``` - make up +Available tasks: +- `pull` - Pull Redis Docker images from Docker Hub +- `build` - Build Redis Docker images from source +- `push` - Push Redis Docker images to Docker Hub +- `list` - List all available Redis versions +- `list-releases` - List Redis releases from GitHub -To stop the container run: +### Examples - make down +```bash +# Pull all available versions +invoke pull --version all -To connect to your cluster you can use the redis-cli tool: +# Build specific version with 4 CPUs +invoke build --version 7.2 --cpu 4 - redis-cli -c -p 7000 +# Push latest version +invoke push --version latest -Or the built redis-cli tool inside the container that will connect to the cluster inside the container +# List all available versions +invoke list - make cli +# List releases from GitHub +invoke list-releases +``` + +### Makefile Targets + +For convenience, Makefile targets are provided for both docker compose operations and invoke tasks: + +```bash +# Docker Compose commands +make build # Build docker compose containers +make up # Start containers +make down # Stop containers +make rebuild # Rebuild without cache +make bash # Start bash in container +make cli # Connect redis-cli to cluster + +# Invoke tasks (use VERSION and CPU variables) +make pull VERSION=7.2 CPU=4 # Pull specific version +make build-images VERSION=7.2 # Build specific version +make push VERSION=latest # Push latest version +make list # List versions +make list-releases # List GitHub releases +``` + +Each invoke command supports version patterns: +- `all` - All available versions +- `latest` - Latest stable version +- `7.2` - All 7.2.x versions +- `7.2.1` - Specific version + +The `--cpu` parameter controls parallel processing (defaults to 2 if not specified). ## Include sentinel instances @@ -148,14 +220,16 @@ Sentinel instances is not enabled by default. If running with plain docker send in `-e SENTINEL=true`. -When running with docker-compose set the environment variable on your system `REDIS_USE_SENTINEL=true` and start your container. +When running with docker compose set the environment variable on your system `REDIS_USE_SENTINEL=true` and start your container. - version: '2' - services: - redis-cluster: - ... - environment: - SENTINEL: 'true' +```yaml +version: '2' +services: + redis-cluster: + ... + environment: + SENTINEL: 'true' +``` ## Change number of nodes @@ -174,14 +248,16 @@ At the docker-compose provided by this repository, ports 7000-7050 are already m Also note that the number of sentinels (if enabled) is the same as the number of masters. The docker-compose file already maps ports 5000-5010 by default. You should also override those values if you have more than 10 masters. - version: '2' - services: - redis-cluster: - ... - environment: - INITIAL_PORT: 9000, - MASTERS: 2, - SLAVES_PER_MASTER: 2 +```yaml +version: '2' +services: + redis-cluster: + ... + environment: + INITIAL_PORT: 9000, + MASTERS: 2, + SLAVES_PER_MASTER: 2 +``` ## IPv6 support @@ -218,8 +294,10 @@ To build a different redis version use the argument `--build-arg` argument. To build a different redis version use the `--build-arg` argument. - # Example docker-compose - docker-compose build --build-arg "redis_version=6.0.11" redis-cluster +```bash +# Example docker compose +docker compose build --build-arg "redis_version=6.0.11" redis-cluster +``` From 8b9f9ddb40338a7050ddc0f1cee39e5afe8e3602 Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 10:54:38 +0200 Subject: [PATCH 04/10] Update python requirements file to match modern versions of tools --- dev-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev-requirements.txt b/dev-requirements.txt index ee761aa..7f1dd1d 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,2 +1,2 @@ -invoke>=2.0.0 -requests>=2.28.2 +invoke>=2.2.0 +requests>=2.31.0 From f6d023be122a04fe54a2b96d9f9e4a0d16a5d83a Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 11:01:27 +0200 Subject: [PATCH 05/10] Add missing variable to Makefile. Create new github workflow to run some basic checks when pushing to MR:s --- .github/workflows/test-build.yml | 33 ++++++++++++++++++++++++++++++++ Makefile | 1 + 2 files changed, 34 insertions(+) create mode 100644 .github/workflows/test-build.yml diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml new file mode 100644 index 0000000..257f997 --- /dev/null +++ b/.github/workflows/test-build.yml @@ -0,0 +1,33 @@ +name: Test Build + +on: + pull_request: + branches: + - '**' + push: + branches-ignore: + - 'master' + +jobs: + test-build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r dev-requirements.txt + + - name: Pull latest Redis image + run: invoke pull --version latest + + - name: Build Redis 7.2.0 image + run: invoke build --version 7.2.0 --cpu 1 \ No newline at end of file diff --git a/Makefile b/Makefile index ac4241c..8b05054 100644 --- a/Makefile +++ b/Makefile @@ -18,6 +18,7 @@ help: # Default values CPU ?= 2 +VERSION ?= 7.2 build: docker compose build From 16307adf4d1d552d7d6442ca1de6c5c6a154fdfe Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 11:05:39 +0200 Subject: [PATCH 06/10] Refactor pool handling to be a contextmanager for simpler usage and better reuse of code --- tasks.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/tasks.py b/tasks.py index 2ea6373..df2915f 100644 --- a/tasks.py +++ b/tasks.py @@ -20,6 +20,22 @@ from multiprocessing import Pool from invoke import task from invoke.context import Context +from contextlib import contextmanager + + +@contextmanager +def managed_pool(pool_size): + """ + Context manager for multiprocessing Pool that ensures proper cleanup. + + Automatically handles pool.close() and pool.join() when exiting the context. + """ + pool = Pool(pool_size) + try: + yield pool + finally: + pool.close() + pool.join() def fetch_github_releases(): @@ -348,19 +364,14 @@ def pull(c, version, cpu=None): print(f" -- Found {len(versions)} version(s) to pull") - pool = Pool(get_pool_size(cpu)) - configs = [ {'context': c, 'version': v} for v in versions ] - try: + with managed_pool(get_pool_size(cpu)) as pool: pool.map(_docker_pull, configs) print(f" -- Completed pulling {len(versions)} version(s)") - finally: - pool.close() - pool.join() @task(help={ @@ -401,19 +412,14 @@ def build(c, version, cpu=None): print(f" -- Found {len(versions)} version(s) to build") - pool = Pool(get_pool_size(cpu)) - configs = [ {'context': c, 'version': v} for v in versions ] - try: + with managed_pool(get_pool_size(cpu)) as pool: pool.map(_docker_build, configs) print(f" -- Completed building {len(versions)} version(s)") - finally: - pool.close() - pool.join() @task(help={ @@ -454,19 +460,14 @@ def push(c, version, cpu=None): print(f" -- Found {len(versions)} version(s) to push") - pool = Pool(get_pool_size(cpu)) - configs = [ {'context': c, 'version': v} for v in versions ] - try: + with managed_pool(get_pool_size(cpu)) as pool: pool.map(_docker_push, configs) print(f" -- Completed pushing {len(versions)} version(s)") - finally: - pool.close() - pool.join() @task(help={}) From 0354fd592e7f878053356c285d7d90210f042603 Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 11:45:04 +0200 Subject: [PATCH 07/10] Modularize tasks code into versions and cache file to split up the big file a bit --- .gitignore | 51 +++++++++ Makefile | 9 ++ cache.py | 65 ++++++++++++ dev-requirements.txt | 1 + tasks.py | 245 +++++-------------------------------------- versions.py | 213 +++++++++++++++++++++++++++++++++++++ 6 files changed, 366 insertions(+), 218 deletions(-) create mode 100644 .gitignore create mode 100644 cache.py create mode 100644 versions.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d28fec1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,51 @@ +# Cache directory +.cache/ + +# Python +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +env/ +venv/ +.venv/ +pip-log.txt +pip-delete-this-directory.txt +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.log +.git +.mypy_cache +.pytest_cache +.hypothesis + +# Virtual environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# OS +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db \ No newline at end of file diff --git a/Makefile b/Makefile index 8b05054..165c90b 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,9 @@ help: @echo " push push Redis Docker images (use VERSION and CPU variables)" @echo " list list all available Redis versions" @echo " list-releases list Redis releases from GitHub" + @echo "" + @echo "Cache management tasks:" + @echo " clear-cache clear the GitHub API cache" # Default values CPU ?= 2 @@ -53,3 +56,9 @@ list: list-releases: invoke list-releases + +# Cache management +clear-cache: + rm -rf .cache/ + +.PHONY: clear-cache diff --git a/cache.py b/cache.py new file mode 100644 index 0000000..2733333 --- /dev/null +++ b/cache.py @@ -0,0 +1,65 @@ +""" +GitHub API caching functionality for Redis Docker Cluster Build Tool. + +Provides persistent caching of GitHub API responses to reduce API calls +and improve performance across multiple invocations. +""" + +import os +import json +import atexit +from time import time +from cachetools import TTLCache, cached + + +# Cache configuration +CACHE_FILE = ".cache/github_releases.json" +cache = TTLCache(maxsize=128, ttl=1800) # 30-minute TTL + + +def ensure_cache_dir(): + """Ensure the cache directory exists.""" + os.makedirs(os.path.dirname(CACHE_FILE), exist_ok=True) + + +def load_cache(): + """Load cache from file if it exists.""" + try: + with open(CACHE_FILE, 'r') as f: + data = json.load(f) + # Only load entries not expired + now = time() + for key, (value, expiry) in data.items(): + if now < expiry: + cache[key] = (value, expiry) + except (FileNotFoundError, json.JSONDecodeError, KeyError): + pass + + +def save_cache(): + """Save cache to file.""" + ensure_cache_dir() + data = {key: (value, expiry) for key, (value, expiry) in cache.items()} + with open(CACHE_FILE, 'w') as f: + json.dump(data, f) + + +# Load cache on module import +load_cache() + +# Save cache on exit +atexit.register(save_cache) + + +@cached(cache) +def fetch_github_releases_cached(): + """ + Fetch Redis releases from GitHub API with caching. + + This function is cached for 30 minutes to avoid excessive API calls. + Returns list of valid semver release versions (excludes RCs, betas, etc.) + Exits with error if GitHub cannot be reached or no versions found. + """ + # Import here to avoid circular imports + from .versions import fetch_github_releases + return fetch_github_releases() \ No newline at end of file diff --git a/dev-requirements.txt b/dev-requirements.txt index 7f1dd1d..75db08f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,2 +1,3 @@ invoke>=2.2.0 requests>=2.31.0 +cachetools>=5.3.0 diff --git a/tasks.py b/tasks.py index df2915f..080da16 100644 --- a/tasks.py +++ b/tasks.py @@ -14,12 +14,19 @@ """ import multiprocessing -import requests -import re import sys from multiprocessing import Pool from invoke import task from invoke.context import Context + +from cache import fetch_github_releases_cached +from versions import ( + generate_version_ranges, + get_latest_version, + initialize_version_data, + version_name_to_version, + version_sort_key, +) from contextlib import contextmanager @@ -38,193 +45,10 @@ def managed_pool(pool_size): pool.join() -def fetch_github_releases(): - """ - Fetch Redis releases from GitHub API and filter valid semver versions. - - Returns list of valid semver release versions (excludes RCs, betas, etc.) - Exits with error if GitHub cannot be reached or no versions found. - """ - releases = [] - - try: - for page in range(1, 10): # Fetch more pages to get comprehensive history - response = requests.get( - "https://api.github.com/repos/redis/redis/releases", - params={"page": page, "per_page": 100}, - timeout=30 - ) - - if response.status_code == 200: - page_releases = response.json() - if not page_releases: # No more releases - break - - for release in page_releases: - tag_name = release.get("tag_name", "") - release_name = release.get("name", "") - - # Use tag_name primarily, fallback to name - version = tag_name if tag_name else release_name - - # Filter valid semver versions (exclude RCs, betas, alphas, etc.) - if is_valid_semver_release(version): - releases.append(version) - - elif response.status_code == 403: - print("Error: GitHub API rate limit exceeded. Please try again later.") - sys.exit(1) - else: - print(f"Error: GitHub API returned status {response.status_code}") - sys.exit(1) - - except requests.exceptions.RequestException as e: - print(f"Error: Cannot connect to GitHub API: {e}") - sys.exit(1) - - if not releases: - print("Error: No valid Redis versions found on GitHub") - sys.exit(1) - - return sorted(set(releases), key=lambda x: version_sort_key(x)) - - -def is_valid_semver_release(version): - """ - Check if a version string is a valid semantic version release. - - Excludes pre-release versions (rc, alpha, beta, etc.) - """ - # Remove 'v' prefix if present - clean_version = version.lstrip('v') - - # Pattern for semantic versioning (major.minor.patch) - semver_pattern = r'^(\d+)\.(\d+)\.(\d+)$' - - # Exclude pre-release versions (rc, alpha, beta, etc.) - exclude_patterns = [ - r'rc\d*', # release candidates - r'alpha', # alpha versions - r'beta', # beta versions - r'pre', # pre-release - r'dev', # development - r'unstable', # unstable - r'-', # any version with dash (pre-release indicator) - ] - - # Check if it matches semver pattern - if not re.match(semver_pattern, clean_version): - return False - - # Check if it contains any excluded patterns - for pattern in exclude_patterns: - if re.search(pattern, clean_version, re.IGNORECASE): - return False - - return True - - -def version_sort_key(version): - """ - Create a sort key for version strings for proper version ordering. - """ - clean_version = version.lstrip('v') - parts = clean_version.split('.') - - try: - return tuple(int(part) for part in parts) - except ValueError: - # Fallback for non-numeric parts - return (0, 0, 0) - - -def generate_version_ranges(github_releases): - """ - Generate comprehensive version ranges based on GitHub releases. - - Creates complete version mapping including intermediate versions. - """ - version_mapping = [] - - # Group releases by major.minor - major_minor_groups = {} - for version in github_releases: - clean_version = version.lstrip('v') - parts = clean_version.split('.') - - if len(parts) >= 2: - try: - major = int(parts[0]) - minor = int(parts[1]) - patch = int(parts[2]) if len(parts) > 2 else 0 - - key = f"{major}.{minor}" - if key not in major_minor_groups: - major_minor_groups[key] = [] - major_minor_groups[key].append(patch) - except ValueError: - continue - - # Generate ranges for each major.minor group - for major_minor, patches in major_minor_groups.items(): - max_patch = max(patches) - - # Generate all versions from 0 to max_patch - for patch in range(0, max_patch + 1): - version_mapping.append(f"{major_minor}.{patch}") - - return sorted(set(version_mapping), key=lambda x: version_sort_key(x)) - - -def get_latest_version(versions): - """ - Get the latest version from a list of versions. - """ - if not versions: - print("Error: No versions provided to determine latest") - sys.exit(1) - - return max(versions, key=lambda x: version_sort_key(x)) - - -def initialize_version_data(): - """ - Initialize version data by fetching from GitHub. - Returns tuple of (version_mapping, latest_version) - """ - print("Fetching Redis releases from GitHub...") - github_releases = fetch_github_releases() - - version_mapping = generate_version_ranges(github_releases) - latest_version = get_latest_version(github_releases) - - print(f"Successfully loaded {len(version_mapping)} versions from GitHub") - print(f"Latest version detected: {latest_version}") - - return version_mapping, latest_version - - -def version_name_to_version(version, version_mapping, latest_version): - """ - Convert version specification to actual version list. - - Handles special version keywords and filters versions based on user input. - - "all": Returns all available versions - - "latest": Returns only the latest stable version - - Specific version pattern: Returns matching versions (e.g., "7.2" returns all 7.2.x versions) - """ - if version == "all": - return version_mapping - elif version == "latest": - return [latest_version] - else: - return filter_versions(version, version_mapping) - - def get_pool_size(cpu_from_cli): """ Determine optimal multiprocessing pool size. - + Calculates the number of worker processes to use for parallel operations. If no CPU count is specified, uses system CPU count minus 1 to avoid overwhelming the system. @@ -238,39 +62,23 @@ def get_pool_size(cpu_from_cli): return pool_size -def filter_versions(desired_version, version_mapping): - """ - Filter available versions based on prefix matching. - - Searches through all available Redis versions and returns those - that start with the specified version pattern. - """ - result = [] - - for version in version_mapping: - if version.startswith(desired_version): - result.append(version) - - return result - - def _docker_pull(config): """ Internal multiprocess worker for Docker pull operations. - + Executes docker pull command for a specific Redis cluster version in a separate process for parallel execution. - + Config dictionary should contain: - 'context': Invoke context object - 'version': Redis version to pull """ context = config['context'] version = config['version'] - + print(f" -- Starting docker pull for version: {version}") pull_command = f"docker pull grokzen/redis-cluster:{version}" - + try: context.run(pull_command) print(f" -- Successfully pulled version: {version}") @@ -281,20 +89,20 @@ def _docker_pull(config): def _docker_build(config): """ Internal multiprocess worker for Docker build operations. - + Executes docker build command for a specific Redis cluster version in a separate process for parallel execution. - + Config dictionary should contain: - 'context': Invoke context object - 'version': Redis version to build """ context = config['context'] version = config['version'] - + print(f" -- Starting docker build for version: {version}") build_command = f"docker build --build-arg redis_version={version} -t grokzen/redis-cluster:{version} ." - + try: context.run(build_command) print(f" -- Successfully built version: {version}") @@ -305,20 +113,20 @@ def _docker_build(config): def _docker_push(config): """ Internal multiprocess worker for Docker push operations. - + Executes docker push command for a specific Redis cluster version in a separate process for parallel execution. - + Config dictionary should contain: - 'context': Invoke context object - 'version': Redis version to push """ context = config['context'] version = config['version'] - + print(f" -- Starting docker push for version: {version}") push_command = f"docker push grokzen/redis-cluster:{version}" - + try: context.run(push_command) print(f" -- Successfully pushed version: {version}") @@ -523,7 +331,7 @@ def list_releases(c): """ Display GitHub releases and show dynamic version loading status. - Fetches fresh Redis releases from GitHub API and displays comprehensive + Fetches Redis releases from GitHub API (using cache if available) and displays comprehensive information about available versions, latest releases, and the dynamic version loading process. @@ -534,14 +342,15 @@ def list_releases(c): Shows GitHub releases, dynamic version loading status, and statistics. Note: + Uses cached data when available (30 minute TTL). Requires internet connection to access GitHub API. API rate limits may apply for unauthenticated requests. """ - print("Fetching fresh GitHub releases...") + print("Fetching GitHub releases...") print("=" * 60) - # Fetch fresh releases directly (this will exit on error) - fresh_releases = fetch_github_releases() + # Fetch releases (cached if available) + fresh_releases = fetch_github_releases_cached() latest_version = get_latest_version(fresh_releases) current_generated = generate_version_ranges(fresh_releases) diff --git a/versions.py b/versions.py new file mode 100644 index 0000000..f7dc1bf --- /dev/null +++ b/versions.py @@ -0,0 +1,213 @@ +""" +Version management functionality for Redis Docker Cluster Build Tool. + +Handles fetching, filtering, and processing Redis version information +from GitHub API responses. +""" + +import requests +import re +import sys + + +def fetch_github_releases(): + """ + Fetch Redis releases from GitHub API and filter valid semver versions. + + Returns list of valid semver release versions (excludes RCs, betas, etc.) + Exits with error if GitHub cannot be reached or no versions found. + """ + releases = [] + + try: + for page in range(1, 10): # Fetch more pages to get comprehensive history + response = requests.get( + "https://api.github.com/repos/redis/redis/releases", + params={"page": page, "per_page": 100}, + timeout=30 + ) + + if response.status_code == 200: + page_releases = response.json() + if not page_releases: # No more releases + break + + for release in page_releases: + tag_name = release.get("tag_name", "") + release_name = release.get("name", "") + + # Use tag_name primarily, fallback to name + version = tag_name if tag_name else release_name + + # Filter valid semver versions (exclude RCs, betas, alphas, etc.) + if is_valid_semver_release(version): + releases.append(version) + + elif response.status_code == 403: + print("Error: GitHub API rate limit exceeded. Please try again later.") + sys.exit(1) + else: + print(f"Error: GitHub API returned status {response.status_code}") + sys.exit(1) + + except requests.exceptions.RequestException as e: + print(f"Error: Cannot connect to GitHub API: {e}") + sys.exit(1) + + if not releases: + print("Error: No valid Redis versions found on GitHub") + sys.exit(1) + + return sorted(set(releases), key=lambda x: version_sort_key(x)) + + +def is_valid_semver_release(version): + """ + Check if a version string is a valid semantic version release. + + Excludes pre-release versions (rc, alpha, beta, etc.) + """ + # Remove 'v' prefix if present + clean_version = version.lstrip('v') + + # Pattern for semantic versioning (major.minor.patch) + semver_pattern = r'^(\d+)\.(\d+)\.(\d+)$' + + # Exclude pre-release versions (rc, alpha, beta, etc.) + exclude_patterns = [ + r'rc\d*', # release candidates + r'alpha', # alpha versions + r'beta', # beta versions + r'pre', # pre-release + r'dev', # development + r'unstable', # unstable + r'-', # any version with dash (pre-release indicator) + ] + + # Check if it matches semver pattern + if not re.match(semver_pattern, clean_version): + return False + + # Check if it contains any excluded patterns + for pattern in exclude_patterns: + if re.search(pattern, clean_version, re.IGNORECASE): + return False + + return True + + +def version_sort_key(version): + """ + Create a sort key for version strings for proper version ordering. + """ + clean_version = version.lstrip('v') + parts = clean_version.split('.') + + try: + return tuple(int(part) for part in parts) + except ValueError: + # Fallback for non-numeric parts + return (0, 0, 0) + + +def generate_version_ranges(github_releases): + """ + Generate comprehensive version ranges based on GitHub releases. + + Creates complete version mapping including intermediate versions. + """ + version_mapping = [] + + # Group releases by major.minor + major_minor_groups = {} + for version in github_releases: + clean_version = version.lstrip('v') + parts = clean_version.split('.') + + if len(parts) >= 2: + try: + major = int(parts[0]) + minor = int(parts[1]) + patch = int(parts[2]) if len(parts) > 2 else 0 + + key = f"{major}.{minor}" + if key not in major_minor_groups: + major_minor_groups[key] = [] + major_minor_groups[key].append(patch) + except ValueError: + continue + + # Generate ranges for each major.minor group + for major_minor, patches in major_minor_groups.items(): + max_patch = max(patches) + + # Generate all versions from 0 to max_patch + for patch in range(0, max_patch + 1): + version_mapping.append(f"{major_minor}.{patch}") + + return sorted(set(version_mapping), key=lambda x: version_sort_key(x)) + + +def get_latest_version(versions): + """ + Get the latest version from a list of versions. + """ + if not versions: + print("Error: No versions provided to determine latest") + sys.exit(1) + + return max(versions, key=lambda x: version_sort_key(x)) + + +def initialize_version_data(): + """ + Initialize version data by fetching from GitHub. + + Uses cached GitHub release data when available (30 minute TTL). + + Returns tuple of (version_mapping, latest_version) + """ + from .cache import fetch_github_releases_cached + + github_releases = fetch_github_releases_cached() + + version_mapping = generate_version_ranges(github_releases) + latest_version = get_latest_version(github_releases) + + print(f"Successfully loaded {len(version_mapping)} versions from GitHub") + print(f"Latest version detected: {latest_version}") + + return version_mapping, latest_version + + +def version_name_to_version(version, version_mapping, latest_version): + """ + Convert version specification to actual version list. + + Handles special version keywords and filters versions based on user input. + - "all": Returns all available versions + - "latest": Returns only the latest stable version + - Specific version pattern: Returns matching versions (e.g., "7.2" returns all 7.2.x versions) + """ + if version == "all": + return version_mapping + elif version == "latest": + return [latest_version] + else: + return filter_versions(version, version_mapping) + + +def filter_versions(desired_version, version_mapping): + """ + Filter available versions based on prefix matching. + + Searches through all available Redis versions and returns those + that start with the specified version pattern. + """ + result = [] + + for version in version_mapping: + if version.startswith(desired_version): + result.append(version) + + return result \ No newline at end of file From ec9bb876d72c78c19227e803fd8648d7454684c6 Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 11:48:11 +0200 Subject: [PATCH 08/10] Update master build all workflow --- .github/workflows/dockerimage.yml | 14 ---------- .github/workflows/master-build-all-images.yml | 27 +++++++++++++++++++ 2 files changed, 27 insertions(+), 14 deletions(-) delete mode 100644 .github/workflows/dockerimage.yml create mode 100644 .github/workflows/master-build-all-images.yml diff --git a/.github/workflows/dockerimage.yml b/.github/workflows/dockerimage.yml deleted file mode 100644 index 6003798..0000000 --- a/.github/workflows/dockerimage.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: Docker Image CI - -on: - push: - branches: - - master - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Build the latest Docker image - run: make build diff --git a/.github/workflows/master-build-all-images.yml b/.github/workflows/master-build-all-images.yml new file mode 100644 index 0000000..01d7646 --- /dev/null +++ b/.github/workflows/master-build-all-images.yml @@ -0,0 +1,27 @@ +name: Docker Image CI + +on: + push: + branches: + - master + +jobs: + build-all: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.13' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r dev-requirements.txt + + - name: Build all Redis images + run: invoke build --version all --cpu 2 From 8bcac6f6a627ed1e6827563a44049e3cc4c46d2d Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 11:53:01 +0200 Subject: [PATCH 09/10] Test fix for relative wrong imports --- cache.py | 2 +- versions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cache.py b/cache.py index 2733333..fa764b8 100644 --- a/cache.py +++ b/cache.py @@ -61,5 +61,5 @@ def fetch_github_releases_cached(): Exits with error if GitHub cannot be reached or no versions found. """ # Import here to avoid circular imports - from .versions import fetch_github_releases + from versions import fetch_github_releases return fetch_github_releases() \ No newline at end of file diff --git a/versions.py b/versions.py index f7dc1bf..6b5d4f5 100644 --- a/versions.py +++ b/versions.py @@ -167,7 +167,7 @@ def initialize_version_data(): Returns tuple of (version_mapping, latest_version) """ - from .cache import fetch_github_releases_cached + from cache import fetch_github_releases_cached github_releases = fetch_github_releases_cached() From ad9d8e4865ced5d86b8f0c57860d454ef6389686 Mon Sep 17 00:00:00 2001 From: Johan Andersson Date: Sat, 4 Oct 2025 12:28:52 +0200 Subject: [PATCH 10/10] Update changelog to reflect changes in this branch --- CHANGELOG.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b4ee6ee..a7c78fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,16 @@ +## 2025-10-04 + +* Updated Python version to 3.13 across all workflows and development environment +* Implemented comprehensive caching system using cachetools with file-based persistence for GitHub API calls (30-minute TTL) +* Modularized codebase by separating concerns into dedicated modules: cache.py for caching functionality and versions.py for version management +* Updated Python requirements: invoke>=2.2.0, requests>=2.31.0, cachetools>=5.3.0 +* Created GitHub Actions workflow (test-build.yml) for automated testing and building on pull requests and non-master branches +* Refactored multiprocessing pool management to use context managers for proper resource cleanup +* Enhanced Makefile with Invoke task targets, clear-cache command, and CPU variable defaults +* Restructured README.md with separate Installation/Usage sections and updated documentation +* Fixed import issues by replacing relative imports with absolute imports for CI compatibility +* Updated dockerimage.yml workflow to build all Redis images on master branch pushes using parallel processing + ## 2024-06-25 * Added 7.2.x releases and published docker images