|
| 1 | +# |
| 2 | +# Copyright (c) nexB Inc. and others. All rights reserved. |
| 3 | +# VulnerableCode is a trademark of nexB Inc. |
| 4 | +# SPDX-License-Identifier: Apache-2.0 |
| 5 | +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. |
| 6 | +# See https://github.com/aboutcode-org/vulnerablecode for support or download. |
| 7 | +# See https://aboutcode.org for more information about nexB OSS projects. |
| 8 | +# |
| 9 | + |
| 10 | +import json |
| 11 | +import logging |
| 12 | +import shutil |
| 13 | +import tempfile |
| 14 | +from io import DEFAULT_BUFFER_SIZE |
| 15 | +from pathlib import Path |
| 16 | +from typing import Iterable |
| 17 | +from urllib.parse import urljoin |
| 18 | + |
| 19 | +import dateparser |
| 20 | +import requests |
| 21 | +from extractcode import ExtractError |
| 22 | +from packageurl import PackageURL |
| 23 | +from univers.version_range import RpmVersionRange |
| 24 | +from univers.version_range import VersionRange |
| 25 | + |
| 26 | +from vulnerabilities.importer import AdvisoryData |
| 27 | +from vulnerabilities.importer import AffectedPackageV2 |
| 28 | +from vulnerabilities.importer import ReferenceV2 |
| 29 | +from vulnerabilities.importer import VulnerabilitySeverity |
| 30 | +from vulnerabilities.pipelines import VulnerableCodeBaseImporterPipelineV2 |
| 31 | +from vulnerabilities.pipes import extractcode_utils |
| 32 | +from vulnerabilities.severity_systems import REDHAT_AGGREGATE |
| 33 | +from vulnerabilities.utils import load_json |
| 34 | +from vulntotal import vulntotal_utils |
| 35 | + |
| 36 | + |
| 37 | +class RedHatImporterPipeline(VulnerableCodeBaseImporterPipelineV2): |
| 38 | + """Import RedHat Advisories (RHSA, RHEA and RHBA) |
| 39 | +
|
| 40 | + Ingest CSAF advisories published by RedHat, including Red Hat Security Advisory (RHSA), |
| 41 | + Red Hat Enhancement Advisory (RHEA), and Red Hat Bug Fix Advisory (RHBA). |
| 42 | + """ |
| 43 | + |
| 44 | + pipeline_id = "redhat_importer_v2" |
| 45 | + spdx_license_expression = "CC-BY-4.0" |
| 46 | + license_url = "https://access.redhat.com/security/data/" |
| 47 | + url = "https://security.access.redhat.com/data/csaf/v2/advisories/" |
| 48 | + |
| 49 | + @classmethod |
| 50 | + def steps(cls): |
| 51 | + return ( |
| 52 | + cls.fetch, |
| 53 | + cls.collect_and_store_advisories, |
| 54 | + cls.clean_download, |
| 55 | + ) |
| 56 | + |
| 57 | + def fetch(self): |
| 58 | + archive_latest_url = urljoin(self.url, "archive_latest.txt") |
| 59 | + response = requests.get(archive_latest_url) |
| 60 | + response.raise_for_status() |
| 61 | + self.latest_archive_name = response.text.strip() |
| 62 | + |
| 63 | + self.location = self.cleanup_location = Path(tempfile.mkdtemp()) |
| 64 | + archive_path = self.location / self.latest_archive_name |
| 65 | + archive_url = urljoin(self.url, self.latest_archive_name) |
| 66 | + |
| 67 | + response = requests.get(archive_url, stream=True) |
| 68 | + response.raise_for_status() |
| 69 | + |
| 70 | + with open(archive_path, "wb") as f: |
| 71 | + for chunk in response.iter_content(chunk_size=DEFAULT_BUFFER_SIZE): |
| 72 | + f.write(chunk) |
| 73 | + |
| 74 | + if errors := extractcode_utils.extract_archive( |
| 75 | + source=archive_path, |
| 76 | + destination=self.location, |
| 77 | + ): |
| 78 | + self.log( |
| 79 | + f"Error while extracting archive {archive_path}: {errors}", |
| 80 | + level=logging.ERROR, |
| 81 | + ) |
| 82 | + raise ExtractError(errors) |
| 83 | + |
| 84 | + def advisories_count(self) -> int: |
| 85 | + return sum(1 for _ in self.location.rglob("*.json")) |
| 86 | + |
| 87 | + def collect_advisories(self) -> Iterable[AdvisoryData]: |
| 88 | + for record in self.location.rglob("*.json"): |
| 89 | + yield self.parse_advisory(record) |
| 90 | + |
| 91 | + def parse_advisory(self, record): |
| 92 | + advisory = load_json(record) |
| 93 | + document = advisory.get("document", {}) |
| 94 | + if (csaf_version := document.get("csaf_version")) and not csaf_version == "2.0": |
| 95 | + self.log(f"Unsupported CSAF version: {csaf_version}.", level=logging.ERROR) |
| 96 | + return |
| 97 | + |
| 98 | + severities = [] |
| 99 | + references = [] |
| 100 | + impacts = [] |
| 101 | + affected_packages = [] |
| 102 | + notes = document.get("notes", []) |
| 103 | + adv_sub_path = f"{record.parent.name}/{record.name}" |
| 104 | + url = urljoin(self.url, adv_sub_path) |
| 105 | + advisory_id = get_item(document, "tracking", "id") |
| 106 | + release_date = get_item(document, "tracking", "initial_release_date") |
| 107 | + |
| 108 | + summary = "\n\n".join( |
| 109 | + note["text"] for note in notes if note["category"] != "legal_disclaimer" |
| 110 | + ) |
| 111 | + aliases = [vul["cve"] for vul in advisory.get("vulnerabilities", [])] |
| 112 | + |
| 113 | + for ref in document.get("references", []): |
| 114 | + ref_url = ref.get("url") |
| 115 | + if ref_url.startswith("https://bugzilla.redhat.com/"): |
| 116 | + references.append( |
| 117 | + ReferenceV2( |
| 118 | + reference_id=ref.get("summary"), |
| 119 | + reference_type="bug", |
| 120 | + url=ref_url, |
| 121 | + ) |
| 122 | + ) |
| 123 | + continue |
| 124 | + references.append(ReferenceV2.from_url(url=ref_url)) |
| 125 | + |
| 126 | + if aggregate_severity := document.get("aggregate_severity"): |
| 127 | + severities.append( |
| 128 | + VulnerabilitySeverity( |
| 129 | + system=REDHAT_AGGREGATE, |
| 130 | + value=aggregate_severity["text"], |
| 131 | + url=url, |
| 132 | + ) |
| 133 | + ) |
| 134 | + |
| 135 | + impacts = get_item(advisory, "product_tree", "branches", 0, "branches", default=[]) |
| 136 | + for impact in impacts: |
| 137 | + if impact["category"] == "product_family": |
| 138 | + continue |
| 139 | + for branch in impact.get("branches", []): |
| 140 | + if purl := get_item( |
| 141 | + branch, |
| 142 | + "product", |
| 143 | + "product_identification_helper", |
| 144 | + "purl", |
| 145 | + default=None, |
| 146 | + ): |
| 147 | + if not purl.startswith("pkg:rpm/"): |
| 148 | + continue |
| 149 | + package_purl = PackageURL.from_string(purl=purl) |
| 150 | + fixed_version = package_purl.version |
| 151 | + if not fixed_version: |
| 152 | + continue |
| 153 | + |
| 154 | + fixed_version_range = RpmVersionRange.from_versions([fixed_version]) |
| 155 | + affected_version_range = VersionRange.from_string(f"vers:rpm/<{fixed_version}") |
| 156 | + purl_dict = package_purl.to_dict() |
| 157 | + del purl_dict["version"] |
| 158 | + base_purl = PackageURL(**purl_dict) |
| 159 | + |
| 160 | + affected_packages.append( |
| 161 | + AffectedPackageV2( |
| 162 | + package=base_purl, |
| 163 | + affected_version_range=affected_version_range, |
| 164 | + fixed_version_range=fixed_version_range, |
| 165 | + ) |
| 166 | + ) |
| 167 | + |
| 168 | + return AdvisoryData( |
| 169 | + advisory_id=advisory_id, |
| 170 | + aliases=aliases, |
| 171 | + summary=summary, |
| 172 | + references_v2=references, |
| 173 | + affected_packages=affected_packages, |
| 174 | + severities=severities, |
| 175 | + weaknesses=[], |
| 176 | + date_published=dateparser.parse(release_date) if release_date else None, |
| 177 | + url=url, |
| 178 | + original_advisory_text=json.dumps(advisory), |
| 179 | + ) |
| 180 | + |
| 181 | + def clean_download(self): |
| 182 | + if hasattr(self, "cleanup_location") and self.cleanup_location.exists(): |
| 183 | + self.log(f"Removing downloaded archive: {self.latest_archive_name}") |
| 184 | + shutil.rmtree(self.cleanup_location) |
| 185 | + |
| 186 | + def on_failure(self): |
| 187 | + self.clean_download() |
| 188 | + |
| 189 | + |
| 190 | +def get_item(entity, *attributes, default=None): |
| 191 | + try: |
| 192 | + result = vulntotal_utils.get_item(entity, *attributes) |
| 193 | + except (KeyError, IndexError, TypeError) as e: |
| 194 | + result = default |
| 195 | + return result |
0 commit comments