diff --git a/glitch/__main__.py b/glitch/__main__.py index 0d8cb9d5..9f9ca501 100644 --- a/glitch/__main__.py +++ b/glitch/__main__.py @@ -18,6 +18,8 @@ from glitch.parsers.puppet import PuppetParser from glitch.parsers.terraform import TerraformParser from glitch.parsers.gha import GithubActionsParser +from glitch.parsers.swarm import SwarmParser +from glitch.parsers.nomad import NomadParser from glitch.exceptions import throw_exception from glitch.repair.interactive.main import run_infrafix from pkg_resources import resource_filename @@ -90,6 +92,10 @@ def __get_parser(tech: Tech) -> Parser: return TerraformParser() elif tech == Tech.gha: return GithubActionsParser() + elif tech == Tech.swarm: + return SwarmParser() + elif tech == Tech.nomad: + return NomadParser() else: raise ValueError(f"Invalid tech: {tech}") diff --git a/glitch/analysis/rules.py b/glitch/analysis/rules.py index 2d2087d5..8110e44a 100644 --- a/glitch/analysis/rules.py +++ b/glitch/analysis/rules.py @@ -18,7 +18,7 @@ class Error: "sec_hard_secr": "Hard-coded secret - Developers should not reveal sensitive information in the source code. (CWE-798)", "sec_hard_pass": "Hard-coded password - Developers should not reveal sensitive information in the source code. (CWE-259)", "sec_hard_user": "Hard-coded user - Developers should not reveal sensitive information in the source code. (CWE-798)", - "sec_invalid_bind": "Invalid IP address binding - Binding to the address 0.0.0.0 allows connections from every possible network which might be a security issues. (CWE-284)", + "sec_invalid_bind": "Invalid IP address binding - Binding to the address 0.0.0.0 allows connections from every possible network which might be a security issue. (CWE-284)", "sec_no_int_check": "No integrity check - The content of files downloaded from the internet should be checked. (CWE-353)", "sec_no_default_switch": "Missing default case statement - Not handling every possible input combination might allow an attacker to trigger an error for an unhandled value. (CWE-478)", "sec_full_permission_filesystem": "Full permission to the filesystem - Files should not have full permissions to every user. (CWE-732)", @@ -26,6 +26,32 @@ class Error: Tech.docker: { "sec_non_official_image": "Use of non-official Docker image - Use of non-official images should be avoided or taken into careful consideration. (CWE-829)", }, + Tech.swarm: { + "sec_non_official_image": "Use of non-official Docker image - Use of non-official images should be avoided or taken into careful consideration. (CWE-829)", + "sec_image_integrity": "Use of image is not tagged with digest - The images downloaded from the internet should be checked. (CWE-353)", + "sec_unstable_tag": "Unstable version/release image tag - Prefer specifying a release version or better yet a specific version digest (CWE-353)", + "sec_no_image_tag": "The image is not tagged - Prefer specifying a release version or better yet specific version digest", + "arc_no_apig": "No API Gateway - If following a microservices architecture you should use in front of your services an API Gateway instead of directly exposing them.", + "arc_no_logging": "Log Collection not found - It is advised to setup logging for your services.", + "arc_missing_healthchecks": "Missing Healthchecks - You should setup healthchecks for your services, or check if the images used already have default ones.", + "sec_mounted_docker_socket": "Docker socket mounted to a container - Avoid mounting the Docker socket to container, if the container is compromised its access to the Docker socket allows control of all other containers, and even acquiring control of the host machine.", + "sec_privileged_containers": "Use of Privileged Containers - Developers should always try to give and use the least privileges possible. Use of privileged containers severely thins out the security and isolation provided by container runtimes, its use should be avoided as much as possible (CWE-250)", + "sec_depr_off_imgs" : "Use of deprecated official Docker images - Use of official deprecated images should be avoided as it makes you open to vulnerabilities, quality issues and unfixed bugs (CWE-1104)", + }, + Tech.nomad: { + "sec_non_official_image": "Use of non-official Docker image - Use of non-official images should be avoided or taken into careful consideration. (CWE-829)", + "sec_image_integrity": "Container image is not tagged with digest - The images downloaded from the internet should be checked. (CWE-353)", + "sec_unstable_tag": "Unstable version/release image tag - Prefer specifying a release version or better yet a specific version digest (CWE-353)", + "sec_no_image_tag": "Container Image is not tagged - Prefer specifying a release version or better yet specific version digest (CWE-353)", + "arc_no_apig": "No API Gateway - If following a microservices architecture you should use in front of your services an API Gateway instead of directly exposing them.", + "arc_no_logging": "Log Collection not found - It is advised to setup logging for your services.", + "arc_missing_healthchecks": "Missing Healthchecks - You should setup healthchecks for your services, or check if the images used already have default ones.", + "sec_mounted_docker_socket": "Docker socket mounted to a container - Avoid mounting the Docker socket to container, if the container is compromised its access to the Docker socket allows control of all other containers, and even acquiring control of the host machine.", + "sec_privileged_containers": "Use of Privileged Containers - Developers should always try to give and use the least privileges possible. Use of privileged containers severely thins out the security and isolation provided by container runtimes, its use should be avoided as much as possible (CWE-250)", + "arc_multiple_services": "Multiple Services per Deployment Unit - If you are following a Microservices architecture you are violating the idependent deployability rule by deploying multiple microservices in the same group.", + "sec_depr_off_imgs" : "Use of deprecated official Docker images - Use of official deprecated images should be avoided as it makes you open to vulnerabilities, quality issues and unfixed bugs (CWE-1104)", + "arc_wobbly_service_interaction" : "Wobbly Service Interaction - If you are following a Microservices you are likely compromising the principle of isolation of failures of microservice. Using a Consul sidecar proxy allows having Circuit Breakers and Timeouts that avoid cascading failures due to wobbly interactions between microservices", + }, Tech.terraform: { "sec_integrity_policy": "Integrity Policy - Image tag is prone to be mutable or integrity monitoring is disabled. (CWE-471)", "sec_ssl_tls_policy": "SSL/TLS/mTLS Policy - Developers should use SSL/TLS/mTLS protocols and their secure versions. (CWE-326)", diff --git a/glitch/analysis/security/container_image_tags_smells.py b/glitch/analysis/security/container_image_tags_smells.py new file mode 100644 index 00000000..91f35eaf --- /dev/null +++ b/glitch/analysis/security/container_image_tags_smells.py @@ -0,0 +1,84 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.repr.inter import CodeElement, KeyValue, Hash, String +from glitch.analysis.utils import parse_container_image_name +from typing import List + + +class ContainerImageTagsSmells(SecuritySmellChecker): + # NOTE: This is the implementation for Nomad and Swarm + def check(self, element: CodeElement, file: str) -> List[Error]: + errors: List[Error] = [] + bad_element = element + if isinstance(element, KeyValue) and ( + (element.name == "image" and isinstance(element.value, String)) + or (isinstance(element.value, Hash) and element.name == "config") + ): + image = "" + + if isinstance(element.value, String): + image = element.value.value + else: + for k, v in element.value.value.items(): + if isinstance(k, String) and k.value == "image": + image = v.value + bad_element = v + break + + has_digest, has_tag = False, False + _, tag, digest = parse_container_image_name(image) + + if tag != "": + has_tag = True + if digest != "": + has_digest = True + + if image != "" and has_digest: # image tagged with digest + checksum_s = digest.split(":") + checksum = checksum_s[-1] + if ( + checksum_s[0] == "sha256" and len(checksum) != 64 + ): # sha256 256 digest -> 64 hexadecimal digits + errors.append( + Error( + "sec_image_integrity", + bad_element, + file, + repr(bad_element), + ) + ) + + if image != "" and not has_digest: + errors.append( + Error( + "sec_image_integrity", + bad_element, + file, + repr(bad_element), + ) + ) + if image != "" and has_tag: + tag = tag.lower() + + dangerous_tags: List[str] = SecurityVisitor.DANGEROUS_IMAGE_TAGS + + for dt in dangerous_tags: + if dt in tag: + errors.append( + Error( + "sec_unstable_tag", + bad_element, + file, + repr(bad_element), + ) + ) + break + if ( + image != "" and not has_digest and not has_tag + ): # Image not tagged, avoids mistakenely nomad tasks without images (non-docker or non-podman) + errors.append( + Error("sec_no_image_tag", bad_element, file, repr(bad_element)) + ) + + return errors diff --git a/glitch/analysis/security/deprecated_official_images.py b/glitch/analysis/security/deprecated_official_images.py new file mode 100644 index 00000000..9a493c9d --- /dev/null +++ b/glitch/analysis/security/deprecated_official_images.py @@ -0,0 +1,47 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.repr.inter import CodeElement, KeyValue, Hash, String +from glitch.analysis.utils import parse_container_image_name +from typing import List + + +class DeprecatedOfficialDockerImages(SecuritySmellChecker): + def check(self, element: CodeElement, file: str) -> List[Error]: + errors: List[Error] = [] + image = "" + bad_element = element + if isinstance(element, KeyValue) and element.name == "image": + if isinstance(element.value, String): + image = element.value.value + elif ( + isinstance(element, KeyValue) + and element.name == "config" + and isinstance(element.value, Hash) + ): + for k, v in element.value.value.items(): + if isinstance(k, String) and k.value == "image": + image = v.value + bad_element = v + break + if image != "": + img_name, _, _ = parse_container_image_name(image) + for obsolete_img in SecurityVisitor.DEPRECATED_OFFICIAL_DOCKER_IMAGES: + obsolete_img_dockerio = f"docker.io/library/{obsolete_img}" + obsolete_img_library = f"library/{obsolete_img}" + obsolete_img_complete_link = ( + f"registry.hub.docker.com/library/{obsolete_img}" + ) + + if ( + img_name == obsolete_img + or img_name == obsolete_img_dockerio + or img_name == obsolete_img_library + or img_name == obsolete_img_complete_link + ): + errors.append( + Error("sec_depr_off_imgs", bad_element, file, repr(bad_element)) + ) + break + + return errors diff --git a/glitch/analysis/security/docker_socket_mounted.py b/glitch/analysis/security/docker_socket_mounted.py new file mode 100644 index 00000000..66f83a93 --- /dev/null +++ b/glitch/analysis/security/docker_socket_mounted.py @@ -0,0 +1,46 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.repr.inter import CodeElement, KeyValue, Hash, String,Array +from typing import List + + +class DockerSocketMountedInsideContainer(SecuritySmellChecker): + def check(self, element: CodeElement, file: str) -> List[Error]: + errors: List[Error] = [] + if isinstance(element, KeyValue): + if element.name == "volumes" and isinstance(element.value, Array): + for volume in element.value.value: + if isinstance(volume, String) and volume.value.split(":")[ + 0 + ].startswith("/var/run/docker.sock"): + errors.append( + Error( + "sec_mounted_docker_socket", volume, file, repr(volume) + ) + ) + break + elif element.name == "config" and isinstance(element.value, Hash): + found_socket_exposed = False + for k, v in element.value.value.items(): + if ( + isinstance(k, String) + and k.value == "volumes" + and isinstance(v, Array) + ): + for volume in v.value: + if isinstance(volume, String) and volume.value.split(":")[ + 0 + ].startswith("/var/run/docker.sock"): + errors.append( + Error( + "sec_mounted_docker_socket", + volume, + file, + repr(volume), + ) + ) + found_socket_exposed = True + break + if found_socket_exposed: + break + return errors diff --git a/glitch/analysis/security/hard_secr.py b/glitch/analysis/security/hard_secr.py index 0088cf7b..4b5a593c 100644 --- a/glitch/analysis/security/hard_secr.py +++ b/glitch/analysis/security/hard_secr.py @@ -28,10 +28,16 @@ def __check_pair( SecurityVisitor.PASSWORDS + SecurityVisitor.SECRETS + SecurityVisitor.USERS ): secr_checker = StringChecker( - lambda s: re.match( - r"[_A-Za-z0-9$\/\.\[\]-]*{text}\b".format(text=item), s + lambda s: ( + re.match(r"[_A-Za-z0-9$\/\.\[\]-]*{text}\b".format(text=item), s) + is not None + ) + or ( + re.match( + r"[_A-Za-z0-9$\/\.\[\]-]*{text}\b".format(text=item.upper()), s + ) + is not None ) - is not None ) if secr_checker.check(name) and not whitelist_checker.check(name): if not var_checker.check(value): diff --git a/glitch/analysis/security/invalid_bind.py b/glitch/analysis/security/invalid_bind.py index 9f861e7b..10dea455 100644 --- a/glitch/analysis/security/invalid_bind.py +++ b/glitch/analysis/security/invalid_bind.py @@ -5,6 +5,7 @@ from glitch.repr.inter import * from glitch.analysis.expr_checkers.string_checker import StringChecker from typing import List +from shlex import split as shsplit class InvalidBind(SecuritySmellChecker): @@ -27,4 +28,10 @@ def check(self, element: CodeElement, file: str) -> List[Error]: ): return [Error("sec_invalid_bind", element, file, repr(element))] + if isinstance(element, KeyValue) and isinstance(element.value, String): + #HACK: splits a string in command parts as for complete commmands invocations the regex wasn't + # matching on full command invocations that included a reference to "0.0.0.0" or the its http/s variants + for part in shsplit(element.value.value): + if check_invalid.str_check(part): + return [Error("sec_invalid_bind", element, file, repr(element))] return [] diff --git a/glitch/analysis/security/log_aggregation.py b/glitch/analysis/security/log_aggregation.py new file mode 100644 index 00000000..b2e140c9 --- /dev/null +++ b/glitch/analysis/security/log_aggregation.py @@ -0,0 +1,82 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.repr.inter import CodeElement, Hash, String, UnitBlock +from glitch.analysis.utils import parse_container_image_name +from typing import List + + +class LogAggregatorAbsence(SecuritySmellChecker): + def check(self, element: CodeElement, file: str) -> List[Error]: + errors: List[Error] = [] + + if isinstance(element, UnitBlock): + # HACK: Besides the ones we are explicitly stating a registry we assume the default, normally Docker hub + log_collectors: List[str] = SecurityVisitor.LOG_AGGREGATORS_AND_COLLECTORS + + log_drivers: List[str] = SecurityVisitor.DOCKER_LOG_DRIVERS + + has_log_collector = False + + for au in element.atomic_units: + if au.type != "service" and not au.type.startswith("task."): + # Don't analyze Unit Blocks which aren't tasks or services + return [] + + for att in au.attributes: + image = "" + if att.name == "config" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if isinstance(k, String) and k.value == "image": + image = v.value + break + + elif att.name == "image" and isinstance(att.value, String): + image = att.value.value + + img_name, _, _ = parse_container_image_name(image) + + if image != "": + for lc in log_collectors: + if img_name.startswith(lc): + return [] + break + + # if it doesn't have a log collector/aggregator in the deployment + if not has_log_collector: + for au in element.atomic_units: + has_logging = False + for att in au.attributes: + if has_logging: + break + if att.name == "logging" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if ( + k.value == "driver" + and isinstance(v, String) + and v.value in log_drivers + ): + has_logging = True + break + elif att.name == "config" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if ( + isinstance(k, String) + and k.value == "logging" + and isinstance(v, Hash) + ): + for _k, _v in v.value.items(): + if ( + _k.value == "type" + and isinstance(_v, String) + and _v.value in log_drivers + ): + has_logging = True + break + if has_logging: + break + + if not has_logging: + errors.append(Error("arc_no_logging", au, file, repr(au))) + + return errors diff --git a/glitch/analysis/security/missing_healthchecks.py b/glitch/analysis/security/missing_healthchecks.py new file mode 100644 index 00000000..272d9454 --- /dev/null +++ b/glitch/analysis/security/missing_healthchecks.py @@ -0,0 +1,160 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.repr.inter import ( + CodeElement, + Hash, + AtomicUnit, + Array, + UnitBlock, + String, + UnitBlockType, +) +from typing import List, Dict, Any + + +class MissingHealthchecks(SecuritySmellChecker): + # NOTE: This class checks for Missing Healthchecks smell in Nomad and Swarm + # But it is checking for the WobblyServiceInteraction in Nomad + def check(self, element: CodeElement, file: str) -> List[Error]: + errors: List[Error] = [] + + # nomad group tasks + if isinstance(element, UnitBlock) and element.type == UnitBlockType.block: + # HACK: avoid wrong for swarm + for au in element.atomic_units: + if not au.type.startswith("task."): + return [] + + services_info: List[Dict[str, bool | str | None]] = [] + + # getting the services and consul sidecars at group level + for att in element.attributes: + if att.name == "service" and isinstance(att.value, Hash): + serv_inf: Dict[str, Any] = { + "has_healthcheck": False, + "has_sidecar": False, + "port": None, + } + + for k, v in att.value.value.items(): + if k.value == "check" and isinstance(v, Hash): + serv_inf["has_healthcheck"] = True + + elif k.value == "connect" and isinstance(v, Hash): + for _k, _v in v.value.items(): + # Checks for use of Consul service mesh, sidecar proxy that + # provides Timeouts and Circuit Breaker mechanisms + # that avoid the Wobbly Service Interaction smell + # the smell is detectable in Nomad if this is not present + if _k.value == "sidecar_service" and isinstance( + _v, Hash + ): + serv_inf["has_sidecar"] = True + break + elif k.value == "port" and isinstance(v, String): + serv_inf["port"] = v.value + services_info.append(serv_inf) + + # checking each task + for au in element.atomic_units: + has_healthcheck = False + has_sidecar = False + + for att in au.attributes: + if att.name == "config" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if k.value == "ports" and isinstance(v, Array): + str_ports: List[str] = [x.value for x in v.value] + for service in services_info: + if ( + service["port"] is not None + and service["port"] in str_ports + ): + if service["has_sidecar"]: + has_sidecar = True + if service["has_healthcheck"]: + has_healthcheck = True + + if has_healthcheck and has_sidecar: + break + + if att.name == "service" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if k.value == "check": + has_healthcheck = True + break + + if not has_sidecar: + errors.append( + Error("arc_wobbly_service_interaction", au, file, repr(au)) + ) + if not has_healthcheck: + errors.append(Error("arc_missing_healthchecks", au, file, repr(au))) + # nomad tasks not in groups + if ( + isinstance(element, UnitBlock) + and element.type == UnitBlockType.script + and element.name == "job" + ): + for au in element.atomic_units: + has_healthcheck = False + for att in au.attributes: + if att.name == "service" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if k.value == "check": + has_healthcheck = True + break + if not has_healthcheck: + errors.append(Error("arc_missing_healthchecks", au, file, repr(au))) + + # consul sidecars are only available at group level + errors.append( + Error("arc_wobbly_service_interaction", au, file, repr(au)) + ) + + # swarm + if isinstance(element, AtomicUnit) and element.type == "service": + found_healthcheck = False + + for att in element.attributes: + if found_healthcheck: + break + if att.name == "healthcheck": + found_healthcheck = True + if isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if k.value == "disable" and ( + v.value or v.value.lower() == "true" + ): + errors.append( + Error( + "arc_missing_healthchecks", + element, + file, + repr(element), + ) + ) + break + elif k.value == "test": + if isinstance(v.value, Array): + if ( + len(v.value.value) >= 1 + and v.value.value[0] == "NONE" + ): + errors.append( + Error( + "arc_missing_healthchecks", + element, + file, + repr(element), + ) + ) + break + break + + if not found_healthcheck: + errors.append( + Error("arc_missing_healthchecks", element, file, repr(element)) + ) + + return errors diff --git a/glitch/analysis/security/multiple_services_per_deplyment_unit.py b/glitch/analysis/security/multiple_services_per_deplyment_unit.py new file mode 100644 index 00000000..bbbed76b --- /dev/null +++ b/glitch/analysis/security/multiple_services_per_deplyment_unit.py @@ -0,0 +1,44 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.repr.inter import CodeElement, Hash, String, UnitBlock, UnitBlockType +from glitch.analysis.utils import parse_container_image_name +from typing import List + + +class MultipleServicesPerDeploymentUnit(SecuritySmellChecker): + def check(self, element: CodeElement, file: str) -> List[Error]: + # FIXME: Besides log collectors, there are other types of agents/sidecars for observability (some of which are also on the log collector list) + # and proxies which should also be allowed besides the main microservice + errors: List[Error] = [] + if isinstance(element, UnitBlock) and element.type == UnitBlockType.block: + main_service_found = False + for au in element.atomic_units: + if au.type in ["task.docker", "task.podman"]: + image_name = "" + for att in au.attributes: + if att.name == "config" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if isinstance(k, String) and k.value == "image": + image_name, _, _ = parse_container_image_name( + v.value + ) + break + if image_name != "": + break + + if image_name in SecurityVisitor.LOG_AGGREGATORS_AND_COLLECTORS: + continue + + elif main_service_found: + errors.append( + Error("arc_multiple_services", au, file, repr(au)) + ) + else: + main_service_found = True + elif main_service_found: + # when there are other types of tasks that aren't docker or podman based + # and one that is likely the main microservice has already been found + errors.append(Error("arc_multiple_services", au, file, repr(au))) + + return errors diff --git a/glitch/analysis/security/no_api_gateway.py b/glitch/analysis/security/no_api_gateway.py new file mode 100644 index 00000000..aa72757c --- /dev/null +++ b/glitch/analysis/security/no_api_gateway.py @@ -0,0 +1,128 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.repr.inter import ( + CodeElement, + Hash, + Array, + VariableReference, + String, + UnitBlock, + UnitBlockType, +) +from glitch.analysis.utils import parse_container_image_name +from typing import List, Dict, Any + + +class NoAPIGateway(SecuritySmellChecker): + def check(self, element: CodeElement, file: str) -> List[Error]: + errors: List[Error] = [] + # Tries to follow an logic similar to the one presented for Kubernetes pods ond doi: 10.5220/0011845500003488 + + if isinstance(element, UnitBlock) and element.type == UnitBlockType.block: + has_api_gateway = False + + network_info: Dict[str, Any] = { + "mode": "bridge", # default network mode + "ports": [], + } + + network_mode_element = None + for att in element.attributes: + if att.name == "network" and isinstance(att.value, Hash): + for k, v in att.value.value.items(): + if isinstance(k, String) and k.value == "mode": + if v.value == "host": + network_mode_element = v + network_info["mode"] = v.value + elif ( + isinstance(k, String) or isinstance(k, VariableReference) + ) and k.value == "port": + port_info: Dict[str, Any] = { + "name": "", + } + for _k, _v in v.value.items(): + if isinstance(_k, String) and _k.value == "port": + port_info["name"] = _v.value + elif isinstance(_k, String) and _k.value in [ + "static", + "to", + ]: + port_info[_k.value] = _v.value + network_info["ports"].append(port_info) + + for au in element.atomic_units: + for att in au.attributes: + if att.name == "config" and isinstance(att.value, Hash): + temp_errors: List[Error] = [] + is_api_gateway = False + + if ( + isinstance(au.name, String) + and "gateway" in au.name.value.strip().lower() + ): + is_api_gateway = True + has_api_gateway = True + + for k, v in att.value.value.items(): + if ( + isinstance(k, String) + and k.value == "ports" + and isinstance(v, Array) + and not is_api_gateway + ): + for port in v.value: + if isinstance(port, String): + for exp_port in network_info["ports"]: + if exp_port["name"] == port.value: + if network_info["mode"] == "host": + temp_errors.append( + Error( + "arc_no_apig", + port, + file, + repr(port), + ) + ) + + elif network_info[ + "mode" + ] == "bridge" and ( + "to" in exp_port.keys() + or "static" in exp_port.keys() + ): + temp_errors.append( + Error( + "arc_no_apig", + port, + file, + repr(port), + ) + ) + + if isinstance(k, String) and k.value == "image": + image_name, _, _ = parse_container_image_name(v.value) + if ( + image_name in SecurityVisitor.API_GATEWAYS + or is_api_gateway + ): + is_api_gateway = True + has_api_gateway = True + else: + errors += temp_errors + temp_errors = [] + + if not is_api_gateway: + errors += temp_errors + + if not has_api_gateway and network_info["mode"] == "host": + errors.append( + Error( + "arc_no_apig", + network_mode_element, + file, + repr(network_mode_element), + ) + ) + + return errors diff --git a/glitch/analysis/security/non_official_container_images.py b/glitch/analysis/security/non_official_container_images.py new file mode 100644 index 00000000..b9d297dc --- /dev/null +++ b/glitch/analysis/security/non_official_container_images.py @@ -0,0 +1,50 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.repr.inter import CodeElement, KeyValue, Hash, String +from glitch.analysis.utils import parse_container_image_name +from typing import List + + +class NonOfficialContainerImage(SecuritySmellChecker): + # NOTE: This is the implementation for Nomad and Swarm + def check(self, element: CodeElement, file: str) -> List[Error]: + image = "" + bad_element = element + + if isinstance(element, KeyValue) and element.name == "image": + if isinstance(element.value, String): + image = element.value.value + elif ( + isinstance(element, KeyValue) + and element.name == "config" + and isinstance(element.value, Hash) + ): + for k, v in element.value.value.items(): + if isinstance(k, String) and k.value == "image": + image = v.value + bad_element = v + break + + img_name, _, _ = parse_container_image_name(image) + + if img_name != "": + all_official_images:List[str] = SecurityVisitor.DOCKER_OFFICIAL_IMAGES + SecurityVisitor.DEPRECATED_OFFICIAL_DOCKER_IMAGES + for off_img in all_official_images: + off_img_dockerio = f"docker.io/library/{off_img}" + off_img_library = f"library/{off_img}" + off_img_complete_link = f"registry.hub.docker.com/library/{off_img}" + + if ( + img_name == off_img + or img_name == off_img_dockerio + or img_name == off_img_library + or img_name == off_img_complete_link + ): + return [] + + return [ + Error("sec_non_official_image", bad_element, file, repr(bad_element)) + ] + + return [] diff --git a/glitch/analysis/security/privileged_containers.py b/glitch/analysis/security/privileged_containers.py new file mode 100644 index 00000000..cd16540b --- /dev/null +++ b/glitch/analysis/security/privileged_containers.py @@ -0,0 +1,23 @@ +from glitch.analysis.rules import Error +from glitch.analysis.security.smell_checker import SecuritySmellChecker +from glitch.repr.inter import CodeElement, KeyValue, Boolean, Hash, String +from typing import List + + +class PrivilegedContainers(SecuritySmellChecker): + def check(self, element: CodeElement, file: str) -> List[Error]: + if isinstance(element, KeyValue): + if ( + element.name == "privileged" + and isinstance(element.value, Boolean) + and element.value.value + ): + return [ + Error("sec_privileged_containers", element, file, repr(element)) + ] + elif element.name == "config" and isinstance(element.value, Hash): + for k, v in element.value.value.items(): + if isinstance(k, String) and k.value == "privileged" and v.value: + return [Error("sec_privileged_containers", k, file, repr(k))] + + return [] diff --git a/glitch/analysis/security/visitor.py b/glitch/analysis/security/visitor.py index ae81dbc1..bbd922d0 100644 --- a/glitch/analysis/security/visitor.py +++ b/glitch/analysis/security/visitor.py @@ -3,14 +3,14 @@ import json import glitch import configparser -from urllib.parse import urlparse +from urllib.parse import urlparse, parse_qs from glitch.analysis.rules import Error, RuleVisitor, SmellChecker from nltk.tokenize import WordPunctTokenizer # type: ignore from typing import Tuple, List, Optional from glitch.tech import Tech from glitch.repr.inter import * - +from glitch.analysis.expr_checkers.string_checker import StringChecker from glitch.analysis.terraform.smell_checker import TerraformSmellChecker from glitch.analysis.security.smell_checker import SecuritySmellChecker @@ -31,7 +31,11 @@ def check(self, element: CodeElement, file: str) -> List[Error]: ): return [] image = element.name.split(":") - if image[0] not in SecurityVisitor.DOCKER_OFFICIAL_IMAGES: + all_official_imgs = ( + SecurityVisitor.DOCKER_OFFICIAL_IMAGES + + SecurityVisitor.DEPRECATED_OFFICIAL_DOCKER_IMAGES + ) + if image[0] not in all_official_imgs: return [Error("sec_non_official_image", element, file, repr(element))] return [] @@ -173,6 +177,18 @@ def config(self, config_path: str) -> None: "official_docker_images" ) + SecurityVisitor.DEPRECATED_OFFICIAL_DOCKER_IMAGES = self._load_data_file( + "deprecated_official_docker_images" + ) + SecurityVisitor.LOG_AGGREGATORS_AND_COLLECTORS = self._load_data_file( + "log_collectors_and_aggregators" + ) + SecurityVisitor.DANGEROUS_IMAGE_TAGS = self._load_data_file( + "dangerous_image_tags" + ) + SecurityVisitor.DOCKER_LOG_DRIVERS = self._load_data_file("docker_log_drivers") + SecurityVisitor.API_GATEWAYS = self._load_data_file("api_gateways") + @staticmethod def _load_data_file(file: str) -> List[str]: folder_path = os.path.dirname(os.path.realpath(glitch.__file__)) @@ -232,6 +248,39 @@ def check_dependency(self, d: Dependency, file: str) -> List[Error]: def __check_keyvalue(self, c: KeyValue, file: str) -> List[Error]: errors: List[Error] = [] + + # check https/tls/ssl in Hash values + + ssl_checker = StringChecker(lambda x: self.__is_http_url(x)) + weak_crypt_checker = StringChecker(lambda x: self.__is_weak_crypt(x, "")) + + if isinstance(c.value, Hash): + pairs_to_check = [c.value.value] + + while pairs_to_check: + for _, v in pairs_to_check[0].items(): + if ssl_checker.check(v): + errors.append(Error("sec_https", v, file, repr(v))) + if weak_crypt_checker.check(v): + errors.append(Error("sec_weak_crypt", v, file, repr(v))) + if isinstance(v, Hash): + pairs_to_check.append(v.value) + + pairs_to_check.pop(0) + + elif isinstance(c.value, Array): + for x in c.value.value: + if ssl_checker.check(x): + errors.append(Error("sec_https", x, file, repr(x))) + if weak_crypt_checker.check(x): + errors.append(Error("sec_weak_crypt", x, file, repr(x))) + + else: + if ssl_checker.check(c.value): + errors.append(Error("sec_https", c, file, repr(c))) + if weak_crypt_checker.check(c.value): + errors.append(Error("sec_weak_crypt", c, file, repr(c))) + c.name = c.name.strip().lower() # if isinstance(c.value, type(None)): @@ -383,6 +432,13 @@ def check_unitblock(self, u: UnitBlock, file: str) -> List[Error]: missing_integrity_checks = {} for au in u.atomic_units: result = self.check_integrity_check(au, file) + if result is not None and result[0] is None: + if isinstance(result[1], Error): + errors.append(result[1]) + else: + for err in result[1]: + errors.append(err) + continue if result is not None: missing_integrity_checks[result[0]] = result[1] continue @@ -394,10 +450,16 @@ def check_unitblock(self, u: UnitBlock, file: str) -> List[Error]: errors += missing_integrity_checks.values() errors += self.non_off_img.check(u, file) + for checker in self.checkers: + checker.code = self.code + errors += checker.check(u, file) + return errors @staticmethod - def check_integrity_check(au: AtomicUnit, path: str) -> Optional[Tuple[str, Error]]: + def check_integrity_check( + au: AtomicUnit, path: str + ) -> Optional[Tuple[str | None, Error | List[Error]]]: for item in SecurityVisitor.DOWNLOAD: if not isinstance(au.name, str): continue @@ -411,7 +473,7 @@ def check_integrity_check(au: AtomicUnit, path: str) -> Optional[Tuple[str, Erro return os.path.basename(au.name), Error( "sec_no_int_check", au, path, repr(au) ) - + errors: List[Error] = [] for a in au.attributes: value = ( a.value.strip().lower() @@ -419,6 +481,35 @@ def check_integrity_check(au: AtomicUnit, path: str) -> Optional[Tuple[str, Erro else repr(a.value).strip().lower() ) + # Nomad integrity check + if a.name == "artifact" and isinstance(a.value, Hash): + found_checksum = False + + for k, v in a.value.value.items(): + if ( + isinstance(k, String) + and k.value == "options" + and isinstance(v, Hash) + ): + for _k, _ in v.value.items(): + if isinstance(_k, String) and _k.value == "checksum": + found_checksum = True + break + elif ( + isinstance(k, String) + and k.value == "source" + and isinstance(v, String) + ): + # artifact uses https://github.com/hashicorp/go-getter + parsed_source = urlparse(v.value) # type: ignore + checksum = parse_qs(parsed_source.query).get("checksum", []) # type: ignore + if checksum: + found_checksum = True + if not found_checksum: + errors.append(Error("sec_no_int_check", a, path, repr(a))) # type: ignore + + if len(errors) > 0: + continue for item in SecurityVisitor.DOWNLOAD: if not re.search( r"(http|https|www)[^ ,]*\.{text}".format(text=item), value @@ -429,6 +520,9 @@ def check_integrity_check(au: AtomicUnit, path: str) -> Optional[Tuple[str, Erro return os.path.basename(a.value), Error( # type: ignore "sec_no_int_check", au, path, repr(a) ) # type: ignore + + if len(errors) > 0: + return (None, errors) return None @staticmethod diff --git a/glitch/analysis/utils.py b/glitch/analysis/utils.py new file mode 100644 index 00000000..a05a8e44 --- /dev/null +++ b/glitch/analysis/utils.py @@ -0,0 +1,24 @@ +from typing import Tuple + + +def parse_container_image_name(img_name: str) -> Tuple[str, str, str]: + image, tag, digest = "", "", "" + + if "@" in img_name: + parts_dig = img_name.split("@") + digest = parts_dig[-1] + if ":" in parts_dig[0]: + parts_tag = parts_dig[0].split(":") + image = parts_tag[0] + tag = parts_tag[1] + else: + image = parts_dig[0] + + elif ":" in img_name: + parts_tag = img_name.split(":") + image = parts_tag[0] + tag = parts_tag[1] + else: + image = img_name + + return image, tag, digest diff --git a/glitch/configs/default.ini b/glitch/configs/default.ini index f2efba19..f5755b29 100644 --- a/glitch/configs/default.ini +++ b/glitch/configs/default.ini @@ -14,7 +14,7 @@ secrets = ["auth_token", "authetication_token","auth-token", "authentication-tok "ssh-key-public-content", "ssh-key-private-content"] misc_secrets = ["key", "cert"] roles = [] -download_extensions = ["iso", "tar", "tar.gz", "tar.bzip2", "zip", +download_extensions = ["iso", "tar", "tar.gz", "tar.bzip2", "tar.bz2","tar.zst", "zip", "rar", "gzip", "gzip2", "deb", "rpm", "sh", "run", "bin", "tgz"] ssh_dirs = ["source", "destination", "path", "directory", "src", "dest", "file"] admin = ["admin", "root"] diff --git a/glitch/exceptions.py b/glitch/exceptions.py index f7da3b3c..ce89b842 100644 --- a/glitch/exceptions.py +++ b/glitch/exceptions.py @@ -16,6 +16,8 @@ "DOCKER_UNKNOW_ERROR": "Docker - Unknown Error: {}", "SHELL_COULD_NOT_PARSE": "Shell Command - Could not parse: {}", "TERRAFORM_COULD_NOT_PARSE": "Terraform - Could not parse file: {}", + "DOCKER_SWARM_COULD_NOT_PARSE": "Swarm - Could not parse file: {}", + "HASHICORP_NOMAD_COULD_NOT_PARSE": "Nomad - Could not parse file: {}", } diff --git a/glitch/files/api_gateways b/glitch/files/api_gateways new file mode 100644 index 00000000..9683f5c5 --- /dev/null +++ b/glitch/files/api_gateways @@ -0,0 +1,29 @@ +apache/apisix +bitnami/apisix +kong +kong/kong-gateway +tykio/tyk-gateway +docker.tyk.io/tyk-gateway/tyk-gateway +apache/shenyu-bootstrap +apache/shenyu-admin +krakend +hashicorp/consul +envoyproxy/envoy +bitnami/envoy +express-gateway +nrel/api-umbrella +fusio/fusio +bitnami/nginx +nginx +traefik +linuxserver/nginx +chainguard/nginx +nginxinc/nginx-unprivileged +ubuntu/nginx +kasmweb/nginx +predic8/membrane +traefik/traefik +ubuntu/traefik +kong/kong +openresty/openresty +openwhisk/apigateway diff --git a/glitch/files/dangerous_image_tags b/glitch/files/dangerous_image_tags new file mode 100644 index 00000000..e9723d34 --- /dev/null +++ b/glitch/files/dangerous_image_tags @@ -0,0 +1,20 @@ +stable +unstable +nightly +main +mainline +master +trunk +latest +beta +alpha +current +dev +devel +develop +development +next +staging +test +testing +preview diff --git a/glitch/files/databases_and_kvs b/glitch/files/databases_and_kvs new file mode 100644 index 00000000..edae8b61 --- /dev/null +++ b/glitch/files/databases_and_kvs @@ -0,0 +1,70 @@ +redis +mysql +mongo +chainguard/mongodb +mongodb/mongodb-community-server +mongodb/mongodb-enterprise-server +bitnami/mongodb +bitnami/postgresql +bitnami/redis +bitnami/mariadb +bitnami/mysql +bitnami/memcached +bitnami/etcd +bitnami/cassandra +bitnami/neo4j +bitnami/influxdb +bitnami/clickhouse +bitnami/valkey +bitnami/couchdb +bitnami/milvus +bitnami/keydb +eqalpha/keydb +bitnami/scylladb +bitnami/percona-mysql +bitnami/cloudnative-pg +bitnami/arangodb +chainguard/mariadb +chainguard/valkey +chainguard/postgres +chainguard/redis +postgres +ubuntu/postgres +valkey/valkey +valkey +apache/kvrocks +memcached +ubuntu/memcached +mariadb +linuxserver/mariadb +influxdb +cassandra +scylladb/scylla +couchdb +apache/couchdb +couchbase +percona +percona/percona-server +rethinkdb +arangodb +arangodb/arangodb +arangodb/enterprise +crate +crate/crate +orientdb +clickhouse +clickhouse/clickhouse-server +cockroachdb/cockroach +neo4j +milvusdb/milvus +semitechnologies/weaviate +ravendb/ravendb +pgvector/pgvector +tensorchord/pgvecto-rs +tensorchord/vchord-postgres +qdrant/qdrant +chromadb/chroma +pingcap/tidb +pingcap/tikv +mcr.microsoft.com/mssql/server +ferretdb/ferretdb diff --git a/glitch/files/deprecated_official_docker_images b/glitch/files/deprecated_official_docker_images new file mode 100644 index 00000000..729d8ade --- /dev/null +++ b/glitch/files/deprecated_official_docker_images @@ -0,0 +1,38 @@ +adoptopenjdk +celery +centos +clearlinux +clefos +crux +django +docker-dev +emqx +euleros +express-gateway +fsharp +glassfish +hipache +iojs +java +jenkins +jobber +kaazing-gateway +known +mono +nats-streaming +notary +nuxeo +opensuse +owncloud +php-zendserver +piwik +plone +rails +rapidoid +sentry +sl +sourcemage +swarm +thrift +ubuntu-debootstrap +ubuntu-upstart \ No newline at end of file diff --git a/glitch/files/docker_log_drivers b/glitch/files/docker_log_drivers new file mode 100644 index 00000000..3f1e0070 --- /dev/null +++ b/glitch/files/docker_log_drivers @@ -0,0 +1,10 @@ +elastic/elastic-logging-plugin +logzio/logzio-logging-plugin +grafana/loki-docker-driver +sumologic/docker-logging-driver +syslog +gelf +fluentd +awslogs +splunk +gcplogs diff --git a/glitch/files/log_collectors_and_aggregators b/glitch/files/log_collectors_and_aggregators new file mode 100644 index 00000000..0f736106 --- /dev/null +++ b/glitch/files/log_collectors_and_aggregators @@ -0,0 +1,52 @@ +amazon/aws-for-fluent-bit +amazon/aws-otel-collector +amazon/cloudwatch-agent +balabit/syslog-ng +bitnami/fluent-bit +bitnami/fluentd +bitnami/grafana-alloy +bitnami/grafana-loki +bitnami/logstash +bitnami/promtail +bitnami/telegraf +chainguard/opentelemetry-collector-contrib +datadoghq/agent +datalust/seq +docker.elastic.co/beats/filebeat +docker.elastic.co/beats/filebeat-wolfi +dynatrace/oneagent +elastic/filebeat +fluent/fluent-bit +fluent/fluentd +fluentd +ghcr.io/mr-karan/nomad-vector-logger +gliderlabs/logspout +grafana/agent +grafana/alloy +grafana/loki +grafana/promtail +graylog/graylog +graylog/graylog-forwarder +linuxserver/syslog-ng +logstash +logzio/docker-collector-logs +netdata/netdata +otel/opentelemetry-collector-contrib +outcoldsolutions/collectorforopenshift +public.ecr.aws/aws-observability/aws-for-fluent-bit +public.ecr.aws/zinclabs/openobserve +sematext/agent +signoz/signoz-otel-collector +sofixa/nomad_follower +splunk/splunk +sumologic/collector +sumologic/sumologic-otel-collector +supabase/logflare +telegraf +timberio/vector +ubuntu/grafana-agent +ubuntu/telegraf +umputun/docker-logger +voxxit/rsyslog +victoriametrics/victoria-logs +zabbix/zabbix-agent diff --git a/glitch/files/message_queues_and_event_brokers b/glitch/files/message_queues_and_event_brokers new file mode 100644 index 00000000..349c03b4 --- /dev/null +++ b/glitch/files/message_queues_and_event_brokers @@ -0,0 +1,18 @@ +rabbitmq +bitnami/rabbitmq +nats +bitnami/nats +bitnami/kafka +ubuntu/kafka +apache/kafka +apachepulsar/pulsar +apachepulsar/pulsar-all +apache/activemq-classic +apache/activemq-artemis +redpandadata/redpanda +redis +bitnami/redis +chainguard/redis +valkey/valkey +bitnami/valkey +chainguard/valkey diff --git a/glitch/files/official_docker_images b/glitch/files/official_docker_images index d692e385..1789a557 100644 --- a/glitch/files/official_docker_images +++ b/glitch/files/official_docker_images @@ -1,175 +1,140 @@ +adminer +aerospike +almalinux alpine -busybox -nginx -ubuntu -python -redis -postgres -node -httpd -mongo -mysql -memcached -traefik -mariadb -docker -rabbitmq -hello-world -openjdk -golang -registry -wordpress -centos -debian -influxdb -consul -php -nextcloud -sonarqube -haproxy -ruby +alt +amazoncorretto amazonlinux -elasticsearch -tomcat -eclipse-mosquitto -maven -telegraf -vault -caddy -adminer +api-firewall +arangodb +archlinux +backdrop bash -ghost -kong -perl -neo4j -zookeeper +bonita buildpack-deps -mongo-express -gradle -logstash +busybox +caddy cassandra -couchdb -nats chronograf +cirros +clickhouse +clojure +composer +consul +convertigo +couchbase +couchdb +crate +dart +debian +docker drupal -jenkins -kibana -java -solr -percona -teamspeak -sentry -matomo +eclipse-mosquitto +eclipse-temurin +eggdrop +elasticsearch +elixir +erlang fedora -composer -nats-streaming -adoptopenjdk flink -couchbase -swarm -joomla +fluentd +friendica +gazebo +gcc +geonetwork +ghost +golang +gradle groovy -rethinkdb -rocket.chat -redmine -owncloud -rust -kapacitor -erlang -phpmyadmin +haproxy +haskell +haxe +hello-seattle +hello-world +hitch +hola-mundo +httpd +hylang +ibm-semeru-runtimes +ibmjava +influxdb +irssi +jetty +joomla jruby -elixir -amazoncorretto +julia +kapacitor +kibana +kong +krakend +lightstreamer +liquibase +logstash +mageia +mariadb +matomo +maven mediawiki -mono -pypy -jetty -clojure -arangodb +memcached +mongo +mongo-express +monica +mysql +nats +neo4j +neurodebian +nextcloud +nginx +node odoo -eclipse-temurin -xwiki +open-liberty +openjdk oraclelinux -znc -haxe +orientdb +percona +perl +photon +php +phpmyadmin +postfixadmin +postgres +pypy +python +r-base +rabbitmq +rakudo-star +redis +redmine +registry +rethinkdb +rocket.chat +rockylinux ros -hylang -websphere-liberty -django +ruby +rust sapmachine -gcc -archlinux +satosa +scratch +silverpeas +solr +sonarqube +spark +spiped +storm swift +swipl +teamspeak +telegraf +tomcat tomee -piwik -yourls -rockylinux -iojs -crate -aerospike -photon -orientdb -julia +traefik +ubuntu +unit varnish -ibmjava -open-liberty -bonita -monica -neurodebian -opensuse -fluentd -rails -ubuntu-debootstrap -storm -r-base -irssi -haskell -backdrop -clearlinux -plone -notary -cirros -lightstreamer -geonetwork -nuxeo -postfixadmin -gazebo -php-zendserver -convertigo -friendica -hello-seattle -celery -spiped -swipl -fsharp -eggdrop -thrift -rapidoid -almalinux -docker-dev -rakudo-star -express-gateway -Kaazing Gateway -ibm-semeru-runtimes -ubuntu-upstart -silverpeas -mageia -hola-mundo -known -glassfish -dart -crux -euleros -jobber -sourcemage -clefos -alt -sl -hipache -hitch -scratch -satosa -emqx -api-firewall -cheers -dcl2020 \ No newline at end of file +vault +websphere-liberty +wordpress +xwiki +yourls +znc +zookeeper \ No newline at end of file diff --git a/glitch/parsers/nomad.py b/glitch/parsers/nomad.py new file mode 100644 index 00000000..be625b59 --- /dev/null +++ b/glitch/parsers/nomad.py @@ -0,0 +1,174 @@ +# type: ignore +import os + +from hcl2.parser import hcl2 +import glitch.parsers.parser as p +from glitch.parsers.terraform import GLITCHTransformer +from glitch.exceptions import EXCEPTIONS, throw_exception +from glitch.repr.inter import * +from typing import List, Dict, Any + +from lark.tree import Meta +from lark.visitors import v_args + + +class NomadTransformer(GLITCHTransformer): + """Takes a syntax tree generated by the parser and + transforms it into a dict. + """ + + def __init__(self, code: List[str]): + super().__init__(code) + self.path = "" + + @v_args(meta=True) + def block(self, meta: Meta, args: List) -> Any: + if args[0].value == "task": + + def get_task_type(atts: List[Any]) -> str: + for elem in atts: + if isinstance(elem, (Attribute, UnitBlock)): + if elem.name == "driver": + return elem.value.value + + name: String = String(args[1].value, self._get_element_info(args[1])) + au = AtomicUnit(name, f"task.{get_task_type(args[-1])}") + + au.set_element_info(self._get_element_info(meta)) + for elem in args[-1]: + if isinstance(elem, Attribute): + au.add_attribute(elem) + else: + print(f"#####ERROR PARSING ELEMENT:{elem} IN A TASK#####") + + return au + + elif args[0].value == "group": + ub: UnitBlock = UnitBlock(args[1].value, UnitBlockType.block) + ub.path = self.path + for elem in args[-1]: + if isinstance(elem, AtomicUnit): + ub.add_atomic_unit(elem) + elif isinstance(elem, Attribute): + ub.add_attribute(elem) + else: + print(f"#####ERROR PARSING ELEMENT:{elem} IN A GROUP#####") + return ub + + elif args[0].value not in ["job", "group", "task", "port"]: + subatts: Dict[String, Expr] = {} + + for elem in args[-1]: + if isinstance(elem, Attribute): + if isinstance(elem.value, VariableReference): + # Attribute(VariableReference()) + # Review this decision, I might be throwing away important info/expressivity of the IR by doing VariableReference -> String + subatts[ + String(elem.name, ElementInfo.from_code_element(elem)) + ] = String( + elem.value.value, ElementInfo.from_code_element(elem.value) + ) + else: + if isinstance(elem.name, VariableReference): + subatts[elem.name] = elem.value + else: + subatts[ + String(elem.name, ElementInfo.from_code_element(elem)) + ] = elem.value + else: + print(f"#####ERROR PARSING ELEMENT:{elem} IN A BLOCK#####") + + info: ElementInfo = self._get_element_info(meta) + val_hash: Hash = Hash(subatts, info) + + return Attribute(args[0].value, val_hash, info) + + elif args[0].value == "port": + # port block inside network block inside resource blocks | args[1] is the string literal that has the port name + atts: Dict[String, Expr] = {} + + for elem in args[-1]: + atts[String(elem.name, ElementInfo.from_code_element(elem))] = ( + elem.value + ) + + val_hash: Hash = Hash( + { + String("port", self._get_element_info(args[1])): String( + args[1].value, self._get_element_info(args[1]) + ), + **atts, + }, + self._get_element_info(meta), + ) + + return Attribute(args[0], val_hash, self._get_element_info(meta)) + + else: + # job + ub = UnitBlock(args[0].value, UnitBlockType.script) + for arg in args[-1]: + if isinstance(arg, AtomicUnit): + ub.add_atomic_unit(arg) + elif isinstance(arg, UnitBlock): + ub.add_unit_block(arg) + elif isinstance(arg, Attribute): + ub.add_attribute(arg) + + ub.set_element_info(self._get_element_info(meta)) + return ub + + +class NomadParser(p.Parser): + def parse_file(self, path: str, type: UnitBlockType) -> UnitBlock: + try: + with open(path) as f: + unit_block = None + tree = hcl2.parse(f.read() + "\n") + f.seek(0, 0) + code = f.readlines() + transformer = NomadTransformer(code) + transformer.path = os.path.abspath(path) + elements = transformer.transform(tree) + + if elements and isinstance(elements[0], UnitBlock): + unit_block = elements[0] + unit_block.path = os.path.abspath(path) + else: + throw_exception(EXCEPTIONS["HASHICORP_NOMAD_COULD_NOT_PARSE"], path) + return None + + for c in transformer.comments: + unit_block.add_comment(c) + except: + throw_exception(EXCEPTIONS["HASHICORP_NOMAD_COULD_NOT_PARSE"], path) + return None + + return unit_block + + def parse_module(self, path: str) -> Module: + res: Module = Module(os.path.basename(os.path.normpath(path)), path) + super().parse_file_structure(res.folder, path) + + files = [ + f.path for f in os.scandir(f"{path}") if f.is_file() and not f.is_symlink() + ] + for f in files: + unit_block = self.parse_file(f, UnitBlockType.unknown) + res.add_block(unit_block) + + return res + + def parse_folder(self, path: str) -> Project: + res: Project = Project(os.path.basename(os.path.normpath(path))) + res.add_module(self.parse_module(path)) + + subfolders = [ + f.path for f in os.scandir(f"{path}") if f.is_dir() and not f.is_symlink() + ] + for d in subfolders: + aux = self.parse_folder(d) + res.blocks += aux.blocks + res.modules += aux.modules + + return res diff --git a/glitch/parsers/swarm.py b/glitch/parsers/swarm.py new file mode 100644 index 00000000..47295339 --- /dev/null +++ b/glitch/parsers/swarm.py @@ -0,0 +1,465 @@ +# type: ignore #TODO + +import os +from typing import Any, List, Optional +import re +from ruamel.yaml.main import YAML +from ruamel.yaml.nodes import ( + MappingNode, + ScalarNode, + SequenceNode, + Node, +) +from ruamel.yaml.tokens import Token + +from glitch.exceptions import EXCEPTIONS, throw_exception +from glitch.parsers.yaml import YamlParser +from glitch.repr.inter import ( + AtomicUnit, + Attribute, + Comment, + ElementInfo, + Null, + Expr, + Hash, + Module, + Project, + String, + UnitBlock, + UnitBlockType, + Array, + Dependency, + VariableReference, +) + + +class SwarmParser(YamlParser): + """ + Stack/Compose YAML files parser + """ + + def parse_atomic_unit( + self, type: str, unit_block: UnitBlock, dict: tuple[Any, Any], code: List[str] + ) -> None: + """ + Parses and creates AtomicUnits + """ + + def create_atomic_unit( + start_line: Token | Node, + end_line: Token | Node, + type: str, + name: str, + code: List[str], + ) -> AtomicUnit: + name_info = ElementInfo( + start_line.start_mark.line + 1, + start_line.start_mark.column + 1, + start_line.end_mark.line + 1, + start_line.end_mark.column + 1, + self._get_code(start_line, start_line, code), + ) + str_name = String(name, name_info) + + au = AtomicUnit(str_name, type) + au.line = start_line.start_mark.line + 1 + au.end_line = end_line.end_mark.line + 1 + au.end_column = end_line.end_mark.column + 1 + au.column = start_line.start_mark.column + 1 + au.code = self._get_code(start_line, end_line, code) + return au + + au: AtomicUnit = create_atomic_unit( + dict[0], dict[1], type[:-1], dict[0].value, code + ) + au.attributes += self.__parse_attributes(dict[1], code) + + unit_block.add_atomic_unit(au) + + def __parse_attributes(self, val: Any, code: List[str]) -> List[Attribute]: + """ + Parses the Attributes of an AtomicUnit + """ + + def create_attribute( + token: Token | Node | None, name: str, value: Any, _info: ElementInfo = None + ) -> Attribute: + if _info is not None and token is None: + # HACK: (Part of) Handling transforming attributes coming from ">>" inserts to normal attributes + info = _info + else: + info: ElementInfo = ElementInfo( + token.start_mark.line + 1, + token.start_mark.column + 1, + token.end_mark.line + 1, + token.end_mark.column + 1, + self._get_code(token, token, code), + ) + + a: Attribute = Attribute(name, value, info) + attributes.append(a) + return a + + attributes: List[Attribute] = [] + + if isinstance(val, MappingNode): + for att in val.value: + if isinstance(att, tuple): + if isinstance(att[1], ScalarNode) and att[1].tag.endswith("bool"): + # HACK: turn boolean scalar node values strings into + # real booleans values for get_value method, + # taking into account yaml 1.1 using the spec provided regexp (used by compose) + + if re.match( + "y|Y|yes|Yes|YES|true|True|TRUE|on|On|ON", att[1].value + ): + att[1].value = True + elif re.match( + "n|N|no|No|NO|false|False|FALSE|off|Off|OFF", att[1].value + ): + att[1].value = False + + att_value: Expr = self.get_value(att[1], code) + + if ( + att[0].value == "environment" + and isinstance(att[1], MappingNode) + and isinstance(att_value, Hash) + ): + temp_value_copy = att_value.value.copy() + modified = False + for k, v in att_value.value.items(): + if ( + isinstance(v, String) + and v.value.startswith("${") + and v.value.endswith("}") + ): + new_val = VariableReference( + v.value, ElementInfo.from_code_element(v) + ) + temp_value_copy[k] = new_val + modified = True + if modified: + att_value = Hash( + temp_value_copy, + ElementInfo.from_code_element(att_value), + ) + + if att[0].value == "environment" and isinstance( + att[1], SequenceNode + ): + """ + HACK: Converts all Sequence/Arrays environments to Hash + environment: + - VAR1=123 + - VAR2=456 + vs + environment: + VAR1 : 123 + VAR2 : 456 + + """ + fixed_env = {} + for elem in att_value.value: + elem_info: ElementInfo = ElementInfo.from_code_element(elem) + curr_str = elem.value + split_str = curr_str.split("=", 1) + if len(split_str) == 2: + key, n_val = split_str + if n_val.strip() in ["\"\"", "''"]: + n_val = "" + if n_val.startswith("${") and n_val.endswith("}"): + val_s = VariableReference(n_val, elem_info) + else: + val_s = String(n_val.strip(), elem_info) + else: + key = curr_str + val_s = Null(elem_info) + + key_s = String(key, elem_info) + fixed_env[key_s] = val_s + att_info = ElementInfo.from_code_element(att_value) + att_value = Hash(fixed_env, att_info) + if isinstance(att[1], MappingNode): + # HACK: Handle transforming attributes coming from ">>" inserts to normal attributes + if isinstance(att_value, Hash): + affected_keys = [] + temp_store = {} + + for k, v in att_value.value.items(): + if k.value == "<<": + affected_keys.append(k) + for _k, _v in v.value.items(): + temp_store[_k] = _v + + att_value.value.update(temp_store) + + for elem in affected_keys: + att_value.value.pop(elem) + if att[0].value == "<<" and isinstance(att_value, Hash): + # HACK: Handle transforming attributes coming from ">>" inserts to normal attributes + for k, v in att_value.value.items(): + create_attribute( + None, k.value, v, ElementInfo.from_code_element(v) + ) + else: + create_attribute(att[0], att[0].value, att_value) + + return attributes + + def parse_file( + self, + path: str, + type: UnitBlockType = UnitBlockType.script, + extends: bool = True, + ) -> Optional[UnitBlock]: + """ + Parses a stack/compose file into a UnitBlock each with its respective + AtomicUnits (each of the services,networks,volumes,configs and secrets) + and their Attributes + """ + try: + with open(path, "r") as f: + includes = [] + try: + parsed_file = YAML().compose(f) + f.seek(0, 0) + code: List[str] = f.readlines() + code.append("") + f.seek(0, 0) + except: + throw_exception(EXCEPTIONS["DOCKER_SWARM_COULD_NOT_PARSE"], path) + return None + if isinstance(parsed_file, MappingNode): + file_unit_block: UnitBlock = UnitBlock( + os.path.basename(os.path.normpath(path)), type + ) + file_unit_block.path = os.path.abspath(path) + if isinstance(parsed_file.value, list): + for field in parsed_file.value: + if field[0].value == "version" or field[0].value == "name": + expr: Expr = self.get_value(field[1], code) + info: ElementInfo = ElementInfo( + field[0].start_mark.line + 1, + field[0].start_mark.column + 1, + field[1].end_mark.line + 1, + field[1].end_mark.column + 1, + self._get_code(field[0], field[1], code), + ) + att: Attribute = Attribute(field[0].value, expr, info) + file_unit_block.add_attribute(att) + elif field[0].value == "include": + includes_temp = self.get_value(field[1], code).value + + for elem in includes_temp: + if isinstance(elem, String): + includes.append(elem.value) + elif isinstance(elem, Hash): + for k, v in elem.value.items(): + if k.value == "path": + includes.append(v.value) + for elem in includes: + file_unit_block.add_dependency(Dependency([elem])) + + elif field[0].value in [ + "services", + "networks", + "volumes", + "configs", + "secrets", + ]: + unit_block = UnitBlock( + field[0].value, UnitBlockType.block + ) + unit_block.path = os.path.abspath(path) + unit_block.line = field[0].start_mark.line + unit_block.column = field[0].start_mark.column + unit_block.end_line = field[0].end_mark.line + unit_block.end_column = field[0].end_mark.column + + for unit in field[1].value: + self.parse_atomic_unit( + field[0].value, unit_block, unit, code + ) + file_unit_block.add_unit_block(unit_block) + + elif isinstance(field[0], ScalarNode) and isinstance( + field[1], MappingNode + ): + continue + else: + throw_exception( + EXCEPTIONS["DOCKER_SWARM_COULD_NOT_PARSE"], path + ) + + for comment in self._get_comments(parsed_file, f): + c = Comment(comment[1]) + c.line = comment[0] + c.code = code[c.line - 1] + file_unit_block.add_comment(c) + file_unit_block.code = "".join(code) + + to_extend: List[List[AtomicUnit, Attribute]] = [] + + services = [] + for ub in file_unit_block.unit_blocks: + if ub.name == "services": + services = ub.atomic_units + # FIXME: Handling the extends from the same file or from other files, might not be the best way + if extends: + for service in services: + for attribute in service.attributes: + if attribute.name == "extends": + deps = [] + + if isinstance(attribute.value, Hash): + # adds the name of file as a dependency + for k, v in attribute.value.value.items(): + if k.value == "file": + deps.append(v.value) + break + file_unit_block.add_dependency(Dependency(deps)) + to_extend.append([service, attribute]) + break + + for service_to, attribute in to_extend: + att: Attribute = attribute + service_from_list = [] + service_from = "" + + if isinstance(att.value, String): + service_from = att.value.value + service_from_list = services + + elif isinstance(att.value, Hash): + hash_dict = att.value.value + file = "" + service_from = "" + for k, v in hash_dict.items(): + if k.value == "file": + file = v.value + elif k.value == "service": + service_from = v.value + curr_path = os.path.split(path)[0] + joint_path = os.path.normpath(os.path.join(curr_path, file)) + if os.path.normpath(path) != joint_path: + if os.path.exists(joint_path): + service_from_file_unit_block = self.parse_file( + joint_path, extends=False + ) + if service_from_file_unit_block is not None: + for ( + u_block + ) in service_from_file_unit_block.unit_blocks: + if ( + u_block.type == UnitBlockType.block + and u_block.name == "services" + ): + service_from_list += ( + u_block.atomic_units + ) + break + + else: + print( + f'Failed to parse extends file expected at "{joint_path}". File not found.' + ) + else: + print( + f'Failed to parse extends file expected at "{joint_path}". File not found.' + ) + else: + service_from_list = services + + for s in service_from_list: + if s.name.value == service_from: + att_names = [x.name for x in service_to.attributes] + + for s_att in s.attributes: + if s_att.name in ["depends_on", "volumes_from"]: + continue + elif s_att.name not in att_names: + service_to.add_attribute(s_att) + elif s_att.name in att_names: + for to_att in service_to.attributes: + if to_att.name == s_att.name: + if isinstance(to_att.value, Array): + self.__handle_array( + s_att.value, to_att.value + ) + elif isinstance(to_att.value, Hash): + self.__handle_hash( + s_att.value, to_att.value + ) + else: + continue + break + break + + return file_unit_block + except: + throw_exception(EXCEPTIONS["DOCKER_SWARM_COULD_NOT_PARSE"], path) + + def __handle_array(self, src: Array, dst: Array) -> None: + temp = [elem for elem in src.value if elem not in dst.value] + for elem in dst.value: + temp.append(elem) + dst.value = temp + + def __handle_hash(self, src: Hash, dst: Hash) -> None: + for k, v in src.value.items(): + if k not in dst.value: + dst.value[k] = v + else: + if isinstance(v, Array): + self.__handle_array(v, dst.value[k]) + elif isinstance(v, Hash): + self.__handle_hash(v, dst.value[k]) + + def parse_folder(self, path: str, root: bool = True) -> Optional[Project]: + """ + Swarm doesn't have a standard/sample directory layout, + but normally the stack/compose files are either at the root of + a projects folder, all in a specific folder or a stack for + different parts of the system are in each part subfolder + we consider each subfolder a Module + """ + + res: Project = Project(os.path.basename(os.path.normpath(path))) + + subfolders = [ + f.path for f in os.scandir(f"{path}") if f.is_dir() and not f.is_symlink() + ] + + for d in subfolders: + res.add_module(self.parse_module(d)) + + files = [ + f.path + for f in os.scandir(f"{path}") + if f.is_file() and not f.is_symlink() and f.path.endswith((".yml", ".yaml")) + ] + + for fi in files: + res.add_block(self.parse_file(fi)) + + return res + + def parse_module(self, path) -> Module: + """ + We consider each subfolder of the Project folder a Module + as done for other languagues supported by GLITCH + """ + res: Module = Module(os.path.basename(os.path.normpath(path)), path) + super().parse_file_structure(res.folder, path) + + files = [ + f.path + for f in os.scandir(f"{path}") + if f.is_file() and not f.is_symlink() and f.path.endswith((".yml", ".yaml")) + ] + + for fi in files: + res.add_block(self.parse_file(fi)) + + return res diff --git a/glitch/parsers/terraform.py b/glitch/parsers/terraform.py index e10f1f3e..eefddec7 100644 --- a/glitch/parsers/terraform.py +++ b/glitch/parsers/terraform.py @@ -51,7 +51,7 @@ def __get_element_code( return res - def __get_element_info(self, meta: Meta | Token) -> ElementInfo: + def _get_element_info(self, meta: Meta | Token) -> ElementInfo: return ElementInfo( meta.line, meta.column, @@ -113,9 +113,9 @@ def binary_op(self, meta: Meta, args: List) -> Any: @v_args(meta=True) def unary_op(self, meta: Meta, args: List) -> Any: if args[0] == "-": - return Minus(self.__get_element_info(meta), args[1]) + return Minus(self._get_element_info(meta), args[1]) elif args[0] == "!": - return Not(self.__get_element_info(meta), args[1]) + return Not(self._get_element_info(meta), args[1]) @v_args(meta=True) def get_attr(self, meta: Meta, args: List) -> Any: @@ -127,19 +127,19 @@ def index(self, meta: Meta, args: List) -> Any: @v_args(meta=True) def index_expr_term(self, meta: Meta, args: List) -> Any: - return Access(self.__get_element_info(meta), args[0], args[1]) + return Access(self._get_element_info(meta), args[0], args[1]) @v_args(meta=True) def get_attr_expr_term(self, meta: Meta, args: List) -> Any: - return Access(self.__get_element_info(meta), args[0], args[1]) + return Access(self._get_element_info(meta), args[0], args[1]) @v_args(meta=True) def int_lit(self, meta: Meta, args: List) -> int: - return Integer(int("".join(args)), self.__get_element_info(meta)) + return Integer(int("".join(args)), self._get_element_info(meta)) @v_args(meta=True) def float_lit(self, meta: Meta, args: List) -> float: - return Float(float("".join(args)), self.__get_element_info(meta)) + return Float(float("".join(args)), self._get_element_info(meta)) @v_args(meta=True) def interpolation_maybe_nested(self, meta: Meta, args: List) -> Any: @@ -151,7 +151,7 @@ def string_with_interpolation(self, meta: Meta, args: List) -> str: if isinstance(args[0], Token): return String( args[0].value, - self.__get_element_info(meta), + self._get_element_info(meta), ) return args[0] else: @@ -159,7 +159,7 @@ def string_with_interpolation(self, meta: Meta, args: List) -> str: if isinstance(args[i], Token): args[i] = String( args[i].value, - self.__get_element_info(args[i]), + self._get_element_info(args[i]), ) res = Sum( @@ -203,19 +203,19 @@ def string_with_interpolation(self, meta: Meta, args: List) -> str: @v_args(meta=True) def expr_term(self, meta: Meta, args: List) -> Expr: if len(args) == 0: - return Null(self.__get_element_info(meta)) + return Null(self._get_element_info(meta)) elif len(args) == 1: if isinstance(args[0], Tree) and args[0].data == "heredoc_template": return String( self.__parse_heredoc(args[0]), - self.__get_element_info(meta), + self._get_element_info(meta), ) if isinstance(args[0], Expr): return args[0] if args[0].type == "STRING_LIT": return String( args[0].value[1:-1], - self.__get_element_info(args[0]), + self._get_element_info(args[0]), ) return args[0] return args @@ -231,12 +231,12 @@ def object(self, meta: Meta, args: List) -> Any: object_elems = {} for k, v in args: object_elems[k] = v - res = Hash(object_elems, self.__get_element_info(meta)) + res = Hash(object_elems, self._get_element_info(meta)) return res @v_args(meta=True) def tuple(self, meta: Meta, args: List) -> Any: - return Array(args, self.__get_element_info(meta)) + return Array(args, self._get_element_info(meta)) @v_args(meta=True) def block(self, meta: Meta, args: List) -> Any: @@ -244,12 +244,12 @@ def block(self, meta: Meta, args: List) -> Any: au = AtomicUnit( String( args[2].value[1:-1], # Remove quotes - self.__get_element_info(args[2]), + self._get_element_info(args[2]), ), args[1].value[1:-1], ) au.attributes = [] - au.set_element_info(self.__get_element_info(meta)) + au.set_element_info(self._get_element_info(meta)) for arg in args[-1]: if isinstance(arg, Attribute): au.attributes.append(arg) @@ -275,7 +275,7 @@ def block(self, meta: Meta, args: List) -> Any: else: ub.add_attribute(arg) - ub.set_element_info(self.__get_element_info(meta)) + ub.set_element_info(self._get_element_info(meta)) return ub def body(self, args: List) -> Any: @@ -299,38 +299,38 @@ def conditional(self, meta: Meta, args: List) -> Any: @v_args(meta=True) def attribute(self, meta: Meta, args: List) -> Attribute: - return Attribute(args[0].value, args[2], self.__get_element_info(meta)) + return Attribute(args[0].value, args[2], self._get_element_info(meta)) @v_args(meta=True) def identifier(self, meta: Meta, value: Any) -> Expr: if value[0] == "null": - return Null(self.__get_element_info(meta)) + return Null(self._get_element_info(meta)) elif value[0] in ["true", "false"]: - return Boolean(value[0] == "true", self.__get_element_info(meta)) + return Boolean(value[0] == "true", self._get_element_info(meta)) name = value[0] if isinstance(name, Token): name = name.value - return VariableReference(name, self.__get_element_info(meta)) + return VariableReference(name, self._get_element_info(meta)) @v_args(meta=True) def attr_splat_expr_term(self, meta: Meta, args: List) -> Any: # TODO: Not supported yet - return Null(self.__get_element_info(meta)) + return Null(self._get_element_info(meta)) @v_args(meta=True) def full_splat_expr_term(self, meta: Meta, args: List) -> Any: # TODO: Not supported yet - return Null(self.__get_element_info(meta)) + return Null(self._get_element_info(meta)) @v_args(meta=True) def for_tuple_expr(self, meta: Meta, args: List) -> Any: # TODO: Not supported yet - return Null(self.__get_element_info(meta)) + return Null(self._get_element_info(meta)) @v_args(meta=True) def for_object_expr(self, meta: Meta, args: List) -> Any: # TODO: Not supported yet - return Null(self.__get_element_info(meta)) + return Null(self._get_element_info(meta)) @v_args(meta=True) def function_call(self, meta: Meta, args: List) -> Any: @@ -338,12 +338,12 @@ def function_call(self, meta: Meta, args: List) -> Any: return FunctionCall( args[0], [], - self.__get_element_info(meta), + self._get_element_info(meta), ) return FunctionCall( args[0], args[1], - self.__get_element_info(meta), + self._get_element_info(meta), ) def arguments(self, args: List) -> Any: diff --git a/glitch/stats/print.py b/glitch/stats/print.py index dc0f6dab..a36cb969 100644 --- a/glitch/stats/print.py +++ b/glitch/stats/print.py @@ -100,7 +100,7 @@ def print_stats( "\\textbf{Smell}", "\\textbf{Occurrences}", "\\textbf{Smell density (Smell/KLoC)}", - "\\textbf{Proportion of scripts (%)}", + "\\textbf{Proportion of scripts (\\%)}", ], ) latex = ( # type: ignore diff --git a/glitch/tech.py b/glitch/tech.py index ad321319..167e719f 100644 --- a/glitch/tech.py +++ b/glitch/tech.py @@ -13,3 +13,5 @@ def __init__(self, tech: str, extensions: List[str]): terraform = "terraform", ["tf"] docker = "docker", ["Dockerfile"] gha = "github-actions", ["yml", "yaml"] + swarm = "swarm", ["yml", "yaml"] + nomad = "nomad", ["hcl", "nomad", "job"] diff --git a/glitch/tests/security/nomad/__init__.py b/glitch/tests/security/nomad/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/glitch/tests/security/nomad/files/admin.nomad b/glitch/tests/security/nomad/files/admin.nomad new file mode 100644 index 00000000..d19b87ee --- /dev/null +++ b/glitch/tests/security/nomad/files/admin.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + user = "root" + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/container_image_tag_smells/no_tag_and_digest.nomad b/glitch/tests/security/nomad/files/container_image_tag_smells/no_tag_and_digest.nomad new file mode 100644 index 00000000..dc607367 --- /dev/null +++ b/glitch/tests/security/nomad/files/container_image_tag_smells/no_tag_and_digest.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/container_image_tag_smells/no_tag_no_digest.nomad b/glitch/tests/security/nomad/files/container_image_tag_smells/no_tag_no_digest.nomad new file mode 100644 index 00000000..19d537aa --- /dev/null +++ b/glitch/tests/security/nomad/files/container_image_tag_smells/no_tag_no_digest.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/container_image_tag_smells/normal_tag_and_digest.nomad b/glitch/tests/security/nomad/files/container_image_tag_smells/normal_tag_and_digest.nomad new file mode 100644 index 00000000..2611bd69 --- /dev/null +++ b/glitch/tests/security/nomad/files/container_image_tag_smells/normal_tag_and_digest.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/container_image_tag_smells/normal_tag_and_no_digest.nomad b/glitch/tests/security/nomad/files/container_image_tag_smells/normal_tag_and_no_digest.nomad new file mode 100644 index 00000000..773acd83 --- /dev/null +++ b/glitch/tests/security/nomad/files/container_image_tag_smells/normal_tag_and_no_digest.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/container_image_tag_smells/unstable_tag_and_digest.nomad b/glitch/tests/security/nomad/files/container_image_tag_smells/unstable_tag_and_digest.nomad new file mode 100644 index 00000000..e50245d4 --- /dev/null +++ b/glitch/tests/security/nomad/files/container_image_tag_smells/unstable_tag_and_digest.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:mainline@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/container_image_tag_smells/unstable_tag_no_digest.nomad b/glitch/tests/security/nomad/files/container_image_tag_smells/unstable_tag_no_digest.nomad new file mode 100644 index 00000000..23d430c2 --- /dev/null +++ b/glitch/tests/security/nomad/files/container_image_tag_smells/unstable_tag_no_digest.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:mainline" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/deprecated_docker_official_image.nomad b/glitch/tests/security/nomad/files/deprecated_docker_official_image.nomad new file mode 100644 index 00000000..352b09ed --- /dev/null +++ b/glitch/tests/security/nomad/files/deprecated_docker_official_image.nomad @@ -0,0 +1,41 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "rails:1.2.3@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/docker_socket_mounted.nomad b/glitch/tests/security/nomad/files/docker_socket_mounted.nomad new file mode 100644 index 00000000..d80f02fa --- /dev/null +++ b/glitch/tests/security/nomad/files/docker_socket_mounted.nomad @@ -0,0 +1,42 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + volumes = [ + "/var/run/docker.sock:/var/run/docker.sock", + ] + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/hard_secr/hard_secr_empty_password.nomad b/glitch/tests/security/nomad/files/hard_secr/hard_secr_empty_password.nomad new file mode 100644 index 00000000..c5e0e294 --- /dev/null +++ b/glitch/tests/security/nomad/files/hard_secr/hard_secr_empty_password.nomad @@ -0,0 +1,46 @@ + +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + DEV_PASSWORD = "" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/hard_secr/hard_secr_password.nomad b/glitch/tests/security/nomad/files/hard_secr/hard_secr_password.nomad new file mode 100644 index 00000000..2fc4f089 --- /dev/null +++ b/glitch/tests/security/nomad/files/hard_secr/hard_secr_password.nomad @@ -0,0 +1,46 @@ + +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + DEV_PASSWORD = "hunter2" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/hard_secr/hard_secr_secret.nomad b/glitch/tests/security/nomad/files/hard_secr/hard_secr_secret.nomad new file mode 100644 index 00000000..7ea74713 --- /dev/null +++ b/glitch/tests/security/nomad/files/hard_secr/hard_secr_secret.nomad @@ -0,0 +1,46 @@ + +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + DEV_SECRET = "iam_superman" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/hard_secr/hard_secr_user.nomad b/glitch/tests/security/nomad/files/hard_secr/hard_secr_user.nomad new file mode 100644 index 00000000..677e422c --- /dev/null +++ b/glitch/tests/security/nomad/files/hard_secr/hard_secr_user.nomad @@ -0,0 +1,46 @@ + +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + DEV_USER = "clark_kent" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/https_tls.nomad b/glitch/tests/security/nomad/files/https_tls.nomad new file mode 100644 index 00000000..89f314d0 --- /dev/null +++ b/glitch/tests/security/nomad/files/https_tls.nomad @@ -0,0 +1,45 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + SOME_ENDPOINT = "http://1.1.1.1" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/invalid_bind.nomad b/glitch/tests/security/nomad/files/invalid_bind.nomad new file mode 100644 index 00000000..d38a6264 --- /dev/null +++ b/glitch/tests/security/nomad/files/invalid_bind.nomad @@ -0,0 +1,45 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + SOME_ENVVAR = "0.0.0.0" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/missing_healthchecks.nomad b/glitch/tests/security/nomad/files/missing_healthchecks.nomad new file mode 100644 index 00000000..de5ad052 --- /dev/null +++ b/glitch/tests/security/nomad/files/missing_healthchecks.nomad @@ -0,0 +1,34 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/multiple_services_per_deployment_unit.nomad b/glitch/tests/security/nomad/files/multiple_services_per_deployment_unit.nomad new file mode 100644 index 00000000..0937d987 --- /dev/null +++ b/glitch/tests/security/nomad/files/multiple_services_per_deployment_unit.nomad @@ -0,0 +1,81 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + port "check" { } + } + + service { + name = "server-proxy" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + service { + name = "service-api" + port = "check" + tags = ["check"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + + task "example-service" { + driver = "docker" + + config { + image = "someone/exampleapp:v234.12@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + ports = [ "check" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + + task "example-service2" { + driver = "docker" + + config { + image = "someone/exampleapp2:v234.12@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + ports = [ "check" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/no_api_gateway.nomad b/glitch/tests/security/nomad/files/no_api_gateway.nomad new file mode 100644 index 00000000..af9280a7 --- /dev/null +++ b/glitch/tests/security/nomad/files/no_api_gateway.nomad @@ -0,0 +1,41 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "host" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "ubuntu:24.04@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/no_log_aggregation.nomad b/glitch/tests/security/nomad/files/no_log_aggregation.nomad new file mode 100644 index 00000000..d8087081 --- /dev/null +++ b/glitch/tests/security/nomad/files/no_log_aggregation.nomad @@ -0,0 +1,37 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + } + } + } +} diff --git a/glitch/tests/security/nomad/files/non_official_image.nomad b/glitch/tests/security/nomad/files/non_official_image.nomad new file mode 100644 index 00000000..42d23af8 --- /dev/null +++ b/glitch/tests/security/nomad/files/non_official_image.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "some_random_namespace/nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/priv_container.nomad b/glitch/tests/security/nomad/files/priv_container.nomad new file mode 100644 index 00000000..efb75930 --- /dev/null +++ b/glitch/tests/security/nomad/files/priv_container.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + privileged = true + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/susp_comment.nomad b/glitch/tests/security/nomad/files/susp_comment.nomad new file mode 100644 index 00000000..b6a908a0 --- /dev/null +++ b/glitch/tests/security/nomad/files/susp_comment.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + # FIXME TEST FILE DONT REMOVE + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/template_no_smells.nomad b/glitch/tests/security/nomad/files/template_no_smells.nomad new file mode 100644 index 00000000..d1753e8d --- /dev/null +++ b/glitch/tests/security/nomad/files/template_no_smells.nomad @@ -0,0 +1,45 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "grpc_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + + env { + SOME_ENVVAR = "value" + } + + } + } +} diff --git a/glitch/tests/security/nomad/files/weak_crypt.nomad b/glitch/tests/security/nomad/files/weak_crypt.nomad new file mode 100644 index 00000000..3000d72f --- /dev/null +++ b/glitch/tests/security/nomad/files/weak_crypt.nomad @@ -0,0 +1,40 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + connect { + sidecar_service {} + } + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + command = "/bin/sh -c 'wget -O - https://example.com/wow | md5sum > wow.txt'" + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/files/wobbly_service_interaction.nomad b/glitch/tests/security/nomad/files/wobbly_service_interaction.nomad new file mode 100644 index 00000000..377136ef --- /dev/null +++ b/glitch/tests/security/nomad/files/wobbly_service_interaction.nomad @@ -0,0 +1,37 @@ +job "example" { + + group "example" { + count = 2 + + network { + mode = "bridge" + port "http" { } + } + + service { + name = "server-api" + port = "http" + tags = ["http"] + + check { + name = "http_probe" + type = "http" + interval = "10s" + timeout = "1s" + } + } + + task "example-api" { + driver = "docker" + + config { + image = "nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5" + + ports = [ "http" ] + logging { + driver = "elastic/elastic-logging-plugin" + } + } + } + } +} diff --git a/glitch/tests/security/nomad/test_security.py b/glitch/tests/security/nomad/test_security.py new file mode 100644 index 00000000..147441e5 --- /dev/null +++ b/glitch/tests/security/nomad/test_security.py @@ -0,0 +1,206 @@ +import unittest + +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.parsers.nomad import NomadParser +from glitch.repr.inter import UnitBlockType +from glitch.tech import Tech +from typing import List + + +class TestSecurity(unittest.TestCase): + def __help_test( + self, path: str, n_errors: int, codes: List[str], lines: List[int] + ) -> None: + parser = NomadParser() + inter = parser.parse(path, UnitBlockType.script, False) + analysis = SecurityVisitor(Tech.nomad) + analysis.config("configs/default.ini") + errors = list( + filter( + lambda e: e.code.startswith("sec_") or e.code.startswith("arc_"), + set(analysis.check(inter)), + ) # type: ignore + ) + errors = sorted(errors, key=lambda e: (e.path, e.line, e.code)) + self.assertEqual(len(errors), n_errors) + for i in range(n_errors): + self.assertEqual(errors[i].code, codes[i]) + self.assertEqual(errors[i].line, lines[i]) + + def test_nomad_admin(self) -> None: + self.__help_test( + "tests/security/nomad/files/admin.nomad", + 3, + ["sec_def_admin", "sec_hard_secr", "sec_hard_user"], + [29, 29, 29], + ) + + def test_nomad_empty(self) -> None: + self.__help_test( + "tests/security/nomad/files/hard_secr/hard_secr_empty_password.nomad", + 1, + ["sec_empty_pass"], + [40], + ) + + def test_nomad_hard_secret(self) -> None: + self.__help_test( + "tests/security/nomad/files/hard_secr/hard_secr_password.nomad", + 2, + ["sec_hard_pass", "sec_hard_secr"], + [40, 40], + ) + self.__help_test( + "tests/security/nomad/files/hard_secr/hard_secr_secret.nomad", + 1, + ["sec_hard_secr"], + [40], + ) + self.__help_test( + "tests/security/nomad/files/hard_secr/hard_secr_user.nomad", + 2, + ["sec_hard_secr", "sec_hard_user"], + [40, 40], + ) + + def test_nomad_http(self) -> None: + self.__help_test( + "tests/security/nomad/files/https_tls.nomad", 1, ["sec_https"], [40] + ) + + def test_nomad_inv_bind(self) -> None: + self.__help_test( + "tests/security/nomad/files/invalid_bind.nomad", + 1, + ["sec_invalid_bind"], + [39], + ) + + def test_nomad_non_official_image(self) -> None: + self.__help_test( + "tests/security/nomad/files/non_official_image.nomad", + 1, + ["sec_non_official_image"], + [31], + ) + + def test_nomad_susp(self) -> None: + self.__help_test( + "tests/security/nomad/files/susp_comment.nomad", 1, ["sec_susp_comm"], [5] + ) + + def test_nomad_weak_crypt(self) -> None: + self.__help_test( + "tests/security/nomad/files/weak_crypt.nomad", + 1, + ["sec_weak_crypt"], + [32], + ) + + def test_nomad_container_image_tag_smells(self) -> None: + self.__help_test( + "tests/security/nomad/files/container_image_tag_smells/normal_tag_and_digest.nomad", + 0, + [], + [], + ) + self.__help_test( + "tests/security/nomad/files/container_image_tag_smells/normal_tag_and_no_digest.nomad", + 1, + ["sec_image_integrity"], + [31], + ) + self.__help_test( + "tests/security/nomad/files/container_image_tag_smells/no_tag_and_digest.nomad", + 0, + [], + [], + ) + self.__help_test( + "tests/security/nomad/files/container_image_tag_smells/no_tag_no_digest.nomad", + 1, + ["sec_no_image_tag"], + [31], + ) + + self.__help_test( + "tests/security/nomad/files/container_image_tag_smells/unstable_tag_and_digest.nomad", + 1, + ["sec_unstable_tag"], + [31], + ) + self.__help_test( + "tests/security/nomad/files/container_image_tag_smells/unstable_tag_no_digest.nomad", + 2, + ["sec_image_integrity", "sec_unstable_tag"], + [31, 31], + ) + + def test_nomad_deprecated_official_img(self) -> None: + self.__help_test( + "tests/security/nomad/files/deprecated_docker_official_image.nomad", + 1, + ["sec_depr_off_imgs"], + [31], + ) + + def test_nomad_missing_healthchecks(self) -> None: + self.__help_test( + "tests/security/nomad/files/missing_healthchecks.nomad", + 1, + ["arc_missing_healthchecks"], + [21], + ) + + def test_nomad_privileged_container(self) -> None: + self.__help_test( + "tests/security/nomad/files/priv_container.nomad", + 1, + ["sec_privileged_containers"], + [32], + ) + + def test_nomad_docker_socket_mounted(self) -> None: + self.__help_test( + "tests/security/nomad/files/docker_socket_mounted.nomad", + 1, + ["sec_mounted_docker_socket"], + [33], + ) + + def test_nomad_no_log_aggregation(self) -> None: + self.__help_test( + "tests/security/nomad/files/no_log_aggregation.nomad", + 1, + ["arc_no_logging"], + [27], + ) + + def test_nomad_multiple_services_per_deployment_unit(self) -> None: + self.__help_test( + "tests/security/nomad/files/multiple_services_per_deployment_unit.nomad", + 4, + [ + "arc_multiple_services", + "sec_non_official_image", + "arc_multiple_services", + "sec_non_official_image", + ], + [57, 61, 69, 73], + ) + + def test_nomad_no_api_gateway(self) -> None: + self.__help_test( + "tests/security/nomad/files/no_api_gateway.nomad", + 1, + ["arc_no_apig"], + [7], + ) + + def test_nomad_wobbly_service_interaction(self) -> None: + self.__help_test( + "tests/security/nomad/files/wobbly_service_interaction.nomad", + 1, + ["arc_wobbly_service_interaction"], + [24], + ) diff --git a/glitch/tests/security/swarm/__init__.py b/glitch/tests/security/swarm/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/glitch/tests/security/swarm/files/admin.yml b/glitch/tests/security/swarm/files/admin.yml new file mode 100644 index 00000000..397fd54a --- /dev/null +++ b/glitch/tests/security/swarm/files/admin.yml @@ -0,0 +1,12 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + user: root + logging: + driver: elastic/elastic-logging-plugin + + diff --git a/glitch/tests/security/swarm/files/container_image_tag_smells/no_tag_and_digest.yml b/glitch/tests/security/swarm/files/container_image_tag_smells/no_tag_and_digest.yml new file mode 100644 index 00000000..e28ee8ef --- /dev/null +++ b/glitch/tests/security/swarm/files/container_image_tag_smells/no_tag_and_digest.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/container_image_tag_smells/no_tag_no_digest.yml b/glitch/tests/security/swarm/files/container_image_tag_smells/no_tag_no_digest.yml new file mode 100644 index 00000000..d3ccf6d9 --- /dev/null +++ b/glitch/tests/security/swarm/files/container_image_tag_smells/no_tag_no_digest.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/container_image_tag_smells/normal_tag_and_digest.yml b/glitch/tests/security/swarm/files/container_image_tag_smells/normal_tag_and_digest.yml new file mode 100644 index 00000000..5a3d7410 --- /dev/null +++ b/glitch/tests/security/swarm/files/container_image_tag_smells/normal_tag_and_digest.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx:1.28.0-alpine@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/container_image_tag_smells/normal_tag_and_no_digest.yml b/glitch/tests/security/swarm/files/container_image_tag_smells/normal_tag_and_no_digest.yml new file mode 100644 index 00000000..ba84a692 --- /dev/null +++ b/glitch/tests/security/swarm/files/container_image_tag_smells/normal_tag_and_no_digest.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/container_image_tag_smells/unstable_tag_and_digest.yml b/glitch/tests/security/swarm/files/container_image_tag_smells/unstable_tag_and_digest.yml new file mode 100644 index 00000000..a7d9ac91 --- /dev/null +++ b/glitch/tests/security/swarm/files/container_image_tag_smells/unstable_tag_and_digest.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx:mainline@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/container_image_tag_smells/unstable_tag_no_digest.yml b/glitch/tests/security/swarm/files/container_image_tag_smells/unstable_tag_no_digest.yml new file mode 100644 index 00000000..379c9d29 --- /dev/null +++ b/glitch/tests/security/swarm/files/container_image_tag_smells/unstable_tag_no_digest.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx:latest + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/deprecated_docker_official_image.yml b/glitch/tests/security/swarm/files/deprecated_docker_official_image.yml new file mode 100644 index 00000000..26f4cf4c --- /dev/null +++ b/glitch/tests/security/swarm/files/deprecated_docker_official_image.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: django:example-version@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin + + diff --git a/glitch/tests/security/swarm/files/docker_socket_mounted.yml b/glitch/tests/security/swarm/files/docker_socket_mounted.yml new file mode 100644 index 00000000..f8a4d066 --- /dev/null +++ b/glitch/tests/security/swarm/files/docker_socket_mounted.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + volumes: + - /var/run/docker.sock:/var/run/docker.sock + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/hard_secr/hard_secr_empty_password.yml b/glitch/tests/security/swarm/files/hard_secr/hard_secr_empty_password.yml new file mode 100644 index 00000000..a2ba8b54 --- /dev/null +++ b/glitch/tests/security/swarm/files/hard_secr/hard_secr_empty_password.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - SOME_PASSWORD="" + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/hard_secr/hard_secr_password.yml b/glitch/tests/security/swarm/files/hard_secr/hard_secr_password.yml new file mode 100644 index 00000000..3a3199b5 --- /dev/null +++ b/glitch/tests/security/swarm/files/hard_secr/hard_secr_password.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - SOME_PASSWORD=verysecure + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/hard_secr/hard_secr_secret.yml b/glitch/tests/security/swarm/files/hard_secr/hard_secr_secret.yml new file mode 100644 index 00000000..680f435e --- /dev/null +++ b/glitch/tests/security/swarm/files/hard_secr/hard_secr_secret.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - SOME_SECRET=verysecure + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/hard_secr/hard_secr_user.yml b/glitch/tests/security/swarm/files/hard_secr/hard_secr_user.yml new file mode 100644 index 00000000..2f1315b7 --- /dev/null +++ b/glitch/tests/security/swarm/files/hard_secr/hard_secr_user.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - SOME_USER=someone + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/https_tls.yml b/glitch/tests/security/swarm/files/https_tls.yml new file mode 100644 index 00000000..ce75935f --- /dev/null +++ b/glitch/tests/security/swarm/files/https_tls.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - SOME_ENDPOINT_MAYBE=http://1.1.1.1 + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_array.yml b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_array.yml new file mode 100644 index 00000000..90827903 --- /dev/null +++ b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_array.yml @@ -0,0 +1,13 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + entrypoint: + - "some_command" + - "--some_flag" + - "0.0.0.0" + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_exec_form.yml b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_exec_form.yml new file mode 100644 index 00000000..ba3473f5 --- /dev/null +++ b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_exec_form.yml @@ -0,0 +1,10 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + command: ["example_command","--listen","0.0.0.0"] + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_hash.yml b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_hash.yml new file mode 100644 index 00000000..89298f5a --- /dev/null +++ b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_hash.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - ENDPOINT=0.0.0.0 + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_string.yml b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_string.yml new file mode 100644 index 00000000..522cf0f1 --- /dev/null +++ b/glitch/tests/security/swarm/files/invalid_bind/invalid_bind_string.yml @@ -0,0 +1,10 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + command: "example_command --listen 0.0.0.0" + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/missing_healthchecks.yml b/glitch/tests/security/swarm/files/missing_healthchecks.yml new file mode 100644 index 00000000..b65607e0 --- /dev/null +++ b/glitch/tests/security/swarm/files/missing_healthchecks.yml @@ -0,0 +1,5 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/no_log_aggregation.yml b/glitch/tests/security/swarm/files/no_log_aggregation.yml new file mode 100644 index 00000000..f063c68c --- /dev/null +++ b/glitch/tests/security/swarm/files/no_log_aggregation.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + + diff --git a/glitch/tests/security/swarm/files/non_official_image.yml b/glitch/tests/security/swarm/files/non_official_image.yml new file mode 100644 index 00000000..37a616d3 --- /dev/null +++ b/glitch/tests/security/swarm/files/non_official_image.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: example_dockerhub_namespace/nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin + + diff --git a/glitch/tests/security/swarm/files/priv_container.yml b/glitch/tests/security/swarm/files/priv_container.yml new file mode 100644 index 00000000..cc3b1c27 --- /dev/null +++ b/glitch/tests/security/swarm/files/priv_container.yml @@ -0,0 +1,10 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + privileged: true + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/susp_comment.yml b/glitch/tests/security/swarm/files/susp_comment.yml new file mode 100644 index 00000000..5893a533 --- /dev/null +++ b/glitch/tests/security/swarm/files/susp_comment.yml @@ -0,0 +1,11 @@ +services: + service_example: #FIXME TEST BUG DONT FIX + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin + + diff --git a/glitch/tests/security/swarm/files/template_no_smells.yml b/glitch/tests/security/swarm/files/template_no_smells.yml new file mode 100644 index 00000000..8fa23e5d --- /dev/null +++ b/glitch/tests/security/swarm/files/template_no_smells.yml @@ -0,0 +1,9 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_array.yml b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_array.yml new file mode 100644 index 00000000..a5ee6d14 --- /dev/null +++ b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_array.yml @@ -0,0 +1,13 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + entrypoint: + - "/bin/sh" + - "-c" + - "wget -O - https://example.com/wow | md5sum > wow.txt" + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_array_exec_form.yml b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_array_exec_form.yml new file mode 100644 index 00000000..289f393c --- /dev/null +++ b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_array_exec_form.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + command: + ["/bin/sh", "-c", "wget -O - https://example.com/wow | md5sum > wow.txt"] + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_hash.yml b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_hash.yml new file mode 100644 index 00000000..1a49f97c --- /dev/null +++ b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_hash.yml @@ -0,0 +1,11 @@ +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + environment: + - USE_SECURE_HASH=md5 + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_string.yml b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_string.yml new file mode 100644 index 00000000..1455806e --- /dev/null +++ b/glitch/tests/security/swarm/files/weak_crypt/weak_crypt_string.yml @@ -0,0 +1,12 @@ +version: 3.7 + +services: + service_example: + image: nginx:1.29.1-alpine3.22-perl@sha256:9322c38c12e68706f47d42b53622e1c52a351bd963574f4a157b3048d21772e5 + healthcheck: + test: "curl --fail http://localhost:80/ || exit 1" + interval: 30s + retries: 5s + command: '/bin/sh -c "wget -O - https://example.com/wow | md5sum > wow.txt"' + logging: + driver: elastic/elastic-logging-plugin diff --git a/glitch/tests/security/swarm/test_security.py b/glitch/tests/security/swarm/test_security.py new file mode 100644 index 00000000..bbe55f36 --- /dev/null +++ b/glitch/tests/security/swarm/test_security.py @@ -0,0 +1,213 @@ +import unittest + +from glitch.analysis.security.visitor import SecurityVisitor +from glitch.parsers.swarm import SwarmParser +from glitch.repr.inter import UnitBlockType +from glitch.tech import Tech +from typing import List + + +class TestSecurity(unittest.TestCase): + def __help_test( + self, path: str, n_errors: int, codes: List[str], lines: List[int] + ) -> None: + parser = SwarmParser() + inter = parser.parse(path, UnitBlockType.script, False) + analysis = SecurityVisitor(Tech.swarm) + analysis.config("configs/default.ini") + errors = list( + filter( + lambda e: e.code.startswith("sec_") or e.code.startswith("arc_"), + set(analysis.check(inter)), + ) # type: ignore + ) + errors = sorted(errors, key=lambda e: (e.path, e.line, e.code)) + self.assertEqual(len(errors), n_errors) + for i in range(n_errors): + self.assertEqual(errors[i].code, codes[i]) + self.assertEqual(errors[i].line, lines[i]) + + def test_swarm_admin(self) -> None: + self.__help_test( + "tests/security/swarm/files/admin.yml", + 3, + ["sec_def_admin", "sec_hard_secr", "sec_hard_user"], + [8, 8, 8], + ) + + def test_swarm_empty(self) -> None: + self.__help_test( + "tests/security/swarm/files/hard_secr/hard_secr_empty_password.yml", + 1, + ["sec_empty_pass"], + [8], + ) + + def test_swarm_hard_secret(self) -> None: + self.__help_test( + "tests/security/swarm/files/hard_secr/hard_secr_password.yml", + 2, + ["sec_hard_pass", "sec_hard_secr"], + [8, 8], + ) + self.__help_test( + "tests/security/swarm/files/hard_secr/hard_secr_secret.yml", + 1, + ["sec_hard_secr"], + [8], + ) + self.__help_test( + "tests/security/swarm/files/hard_secr/hard_secr_user.yml", + 2, + ["sec_hard_secr", "sec_hard_user"], + [8, 8], + ) + + def test_swarm_http(self) -> None: + self.__help_test( + "tests/security/swarm/files/https_tls.yml", 1, ["sec_https"], [9] + ) + + def test_swarm_inv_bind(self) -> None: + self.__help_test( + "tests/security/swarm/files/invalid_bind/invalid_bind_array.yml", + 1, + ["sec_invalid_bind"], + [8], + ) + self.__help_test( + "tests/security/swarm/files/invalid_bind/invalid_bind_exec_form.yml", + 1, + ["sec_invalid_bind"], + [8], + ) + self.__help_test( + "tests/security/swarm/files/invalid_bind/invalid_bind_hash.yml", + 1, + ["sec_invalid_bind"], + [8], + ) + self.__help_test( + "tests/security/swarm/files/invalid_bind/invalid_bind_string.yml", + 1, + ["sec_invalid_bind"], + [8], + ) + + def test_swarm_non_official_image(self) -> None: + self.__help_test( + "tests/security/swarm/files/non_official_image.yml", + 1, + ["sec_non_official_image"], + [3], + ) + + def test_swarm_susp(self) -> None: + self.__help_test( + "tests/security/swarm/files/susp_comment.yml", 1, ["sec_susp_comm"], [2] + ) + + def test_swarm_weak_crypt(self) -> None: + self.__help_test( + "tests/security/swarm/files/weak_crypt/weak_crypt_string.yml", + 1, + ["sec_weak_crypt"], + [10], + ) + self.__help_test( + "tests/security/swarm/files/weak_crypt/weak_crypt_hash.yml", + 1, + ["sec_weak_crypt"], + [9], + ) + self.__help_test( + "tests/security/swarm/files/weak_crypt/weak_crypt_array.yml", + 1, + ["sec_weak_crypt"], + [11], + ) + self.__help_test( + "tests/security/swarm/files/weak_crypt/weak_crypt_array_exec_form.yml", + 1, + ["sec_weak_crypt"], + [9], + ) + + def test_swarm_container_image_tag_smells(self) -> None: + self.__help_test( + "tests/security/swarm/files/container_image_tag_smells/normal_tag_and_digest.yml", + 0, + [], + [], + ) + self.__help_test( + "tests/security/swarm/files/container_image_tag_smells/normal_tag_and_no_digest.yml", + 1, + ["sec_image_integrity"], + [3], + ) + self.__help_test( + "tests/security/swarm/files/container_image_tag_smells/no_tag_and_digest.yml", + 0, + [], + [], + ) + self.__help_test( + "tests/security/swarm/files/container_image_tag_smells/no_tag_no_digest.yml", + 1, + ["sec_no_image_tag"], + [3], + ) + + self.__help_test( + "tests/security/swarm/files/container_image_tag_smells/unstable_tag_and_digest.yml", + 1, + ["sec_unstable_tag"], + [3], + ) + self.__help_test( + "tests/security/swarm/files/container_image_tag_smells/unstable_tag_no_digest.yml", + 2, + ["sec_image_integrity", "sec_unstable_tag"], + [3, 3], + ) + + def test_swarm_deprecated_official_img(self) -> None: + self.__help_test( + "tests/security/swarm/files/deprecated_docker_official_image.yml", + 1, + ["sec_depr_off_imgs"], + [3], + ) + + def test_swarm_missing_healthchecks(self) -> None: + self.__help_test( + "tests/security/swarm/files/missing_healthchecks.yml", + 1, + ["arc_missing_healthchecks"], + [2], + ) + + def test_swarm_privileged_container(self) -> None: + self.__help_test( + "tests/security/swarm/files/priv_container.yml", + 1, + ["sec_privileged_containers"], + [8], + ) + + def test_swarm_docker_socket_mounted(self) -> None: + self.__help_test( + "tests/security/swarm/files/docker_socket_mounted.yml", + 1, + ["sec_mounted_docker_socket"], + [9], + ) + + def test_swarm_no_log_aggregation(self) -> None: + self.__help_test( + "tests/security/swarm/files/no_log_aggregation.yml", + 1, + ["arc_no_logging"], + [2], + ) diff --git a/scripts/docker_images_scraper.py b/scripts/docker_images_scraper.py index 4e006df3..abb31345 100644 --- a/scripts/docker_images_scraper.py +++ b/scripts/docker_images_scraper.py @@ -1,15 +1,30 @@ import requests -next_url = "https://hub.docker.com/api/content/v1/products/search?image_filter=official&page=1&page_size=100&q=&type=image" -headers = {"Accept": "application/json", "Search-Version": "v3"} +next_url = "https://hub.docker.com/api/search/v4?badges=official&size=100&query=&type=image" +headers = {"Accept": "application/json"} +ses = requests.Session() images_list = [] +deprecated = [] +res = ses.get(next_url, headers=headers).json() +total = res["total"] +current = 0 -while next_url: - res = requests.get(next_url, headers=headers).json() - next_url = res["next"] - images = [i["name"] for i in res["summaries"]] - images_list += images +while current < total: + for elem in res["results"]: + if "deprecate" in elem["short_description"].lower(): + deprecated.append(elem["name"]) + else: + images_list.append(elem["name"]) + current += len(res["results"]) + if current < total: + res = ses.get(next_url + f"&from={current}", headers=headers).json() -with open("official_images", "w") as f: +with open("official_docker_images", "w") as f: + images_list.sort() f.write("\n".join(images_list)) + +with open("deprecated_official_docker_images", "w") as f: + deprecated.sort() + f.write("\n".join(deprecated)) +