From 60ecd8523bfb3cfe6a4462cc7e9abbc9e265cfb0 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 12:46:05 -0700 Subject: [PATCH 01/35] Change the scans to be pyansys scans --- .github/workflows/nightly_scan.yml | 38 ++++++++++++++++++++++++------ 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/.github/workflows/nightly_scan.yml b/.github/workflows/nightly_scan.yml index d4ab7689a..73b162642 100644 --- a/.github/workflows/nightly_scan.yml +++ b/.github/workflows/nightly_scan.yml @@ -1,16 +1,40 @@ name: Security Scan +env: + MAIN_PYTHON_VERSION: '3.13' + PACKAGE_NAME: 'ansys-dynamicreporting-core' + on: push: branches: - main - release/* - workflow_dispatch: jobs: - security_scan: - uses: ansys-internal/ci-templates/.github/workflows/security-scan-mend.yml@v7 - with: - package_type: 'poetry+npm' - needs: 'sca,sast,sbom' - secrets: inherit + sbom: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: CycloneDX/gh-generate-sbom@v1 + + + vulnerabilities: + name: Vulnerabilities + runs-on: ubuntu-latest + steps: + - name: PyAnsys Vulnerability check (on main) + if: github.ref == 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + + - name: PyAnsys Vulnerability check (on dev mode) + if: github.ref != 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + dev-mode: true From 802dcc841b53ed68d195847e4e69a0fb5cf17cd0 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 14:01:12 -0700 Subject: [PATCH 02/35] Replace sbom generation action --- .github/workflows/nightly_scan.yml | 40 -------------------- .github/workflows/scan_sbom.yml | 61 ++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 40 deletions(-) delete mode 100644 .github/workflows/nightly_scan.yml create mode 100644 .github/workflows/scan_sbom.yml diff --git a/.github/workflows/nightly_scan.yml b/.github/workflows/nightly_scan.yml deleted file mode 100644 index 73b162642..000000000 --- a/.github/workflows/nightly_scan.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Security Scan - -env: - MAIN_PYTHON_VERSION: '3.13' - PACKAGE_NAME: 'ansys-dynamicreporting-core' - -on: - push: - branches: - - main - - release/* - -jobs: - sbom: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: CycloneDX/gh-generate-sbom@v1 - - - vulnerabilities: - name: Vulnerabilities - runs-on: ubuntu-latest - steps: - - name: PyAnsys Vulnerability check (on main) - if: github.ref == 'refs/heads/main' - uses: ansys/actions/check-vulnerabilities@v10 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - python-package-name: ${{ env.PACKAGE_NAME }} - token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - - - name: PyAnsys Vulnerability check (on dev mode) - if: github.ref != 'refs/heads/main' - uses: ansys/actions/check-vulnerabilities@v10 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - python-package-name: ${{ env.PACKAGE_NAME }} - token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - dev-mode: true diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml new file mode 100644 index 000000000..6b7afd07c --- /dev/null +++ b/.github/workflows/scan_sbom.yml @@ -0,0 +1,61 @@ +name: Security Scan + +env: + MAIN_PYTHON_VERSION: '3.13' + PACKAGE_NAME: 'ansys-dynamicreporting-core' + +on: + push: + branches: + - main + - maint/* + - release/* + +jobs: + sbom: + name: Generate SBOM + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Build wheelhouse + uses: ansys/actions/build-wheelhouse@v10 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ubuntu-latest + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install from wheelhouse + run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} + + - name: Generate SBOM with Syft + uses: anchore/sbom-action@v0.15.4 + with: + format: spdx-json + output-file: sbom.spdx.json + + - name: Upload SBOM as artifact + uses: actions/upload-artifact@v4 + with: + name: sbom + path: sbom.spdx.json + + + vulnerabilities: + name: Vulnerabilities + runs-on: ubuntu-latest + steps: + - name: PyAnsys Vulnerability check (on main) + if: github.ref == 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10.0.14 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} \ No newline at end of file From 81a317606388b5c3ef8756b30e27ddbb683b8d68 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 14:24:45 -0700 Subject: [PATCH 03/35] Fix the name --- .github/workflows/scan_sbom.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 6b7afd07c..3a8ea4bba 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -44,7 +44,7 @@ jobs: - name: Upload SBOM as artifact uses: actions/upload-artifact@v4 with: - name: sbom + name: ${{ env.PACKAGE_NAME }}-sbom path: sbom.spdx.json From e4c1ce463a7cbcdbb6a401e4b1b690074ac18fb9 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 14:32:21 -0700 Subject: [PATCH 04/35] Stop upload at a previous step --- .github/workflows/scan_sbom.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 3a8ea4bba..3fe09f2de 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -40,6 +40,7 @@ jobs: with: format: spdx-json output-file: sbom.spdx.json + upload-artifact: false - name: Upload SBOM as artifact uses: actions/upload-artifact@v4 From 0d3379f68a0099fcf31ed09533fc8add64b09e35 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 14:59:14 -0700 Subject: [PATCH 05/35] Add vulnerability scan on dev version --- .github/workflows/scan_sbom.yml | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 3fe09f2de..0a4a17da5 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -50,7 +50,7 @@ jobs: vulnerabilities: - name: Vulnerabilities + name: PyAnsys Vulnerability check (on main) runs-on: ubuntu-latest steps: - name: PyAnsys Vulnerability check (on main) @@ -59,4 +59,16 @@ jobs: with: python-version: ${{ env.MAIN_PYTHON_VERSION }} python-package-name: ${{ env.PACKAGE_NAME }} - token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} \ No newline at end of file + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + + name: PyAnsys Vulnerability check (on dev) + runs-on: ubuntu-latest + steps: + - name: PyAnsys Vulnerability check (on main) + if: github.ref == 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10.0.14 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + dev-mode: true \ No newline at end of file From 0aac09c98e3726b1cc98e31d4a9533bf2d030387 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:04:10 -0700 Subject: [PATCH 06/35] Fix workflow --- .github/workflows/scan_sbom.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 0a4a17da5..32b6e9df4 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -50,7 +50,7 @@ jobs: vulnerabilities: - name: PyAnsys Vulnerability check (on main) + name: Vulnerabilities runs-on: ubuntu-latest steps: - name: PyAnsys Vulnerability check (on main) @@ -61,11 +61,8 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - name: PyAnsys Vulnerability check (on dev) - runs-on: ubuntu-latest - steps: - - name: PyAnsys Vulnerability check (on main) - if: github.ref == 'refs/heads/main' + - name: PyAnsys Vulnerability check (on dev) + if: github.ref != 'refs/heads/main' uses: ansys/actions/check-vulnerabilities@v10.0.14 with: python-version: ${{ env.MAIN_PYTHON_VERSION }} From 3fd0f373e475649d2c29000773c1efa8b7576d41 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:09:19 -0700 Subject: [PATCH 07/35] Add logs for vulnerability --- .github/workflows/scan_sbom.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 32b6e9df4..b326be3fa 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -60,6 +60,7 @@ jobs: python-version: ${{ env.MAIN_PYTHON_VERSION }} python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + hide-log: false - name: PyAnsys Vulnerability check (on dev) if: github.ref != 'refs/heads/main' @@ -68,4 +69,5 @@ jobs: python-version: ${{ env.MAIN_PYTHON_VERSION }} python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - dev-mode: true \ No newline at end of file + dev-mode: true + hide-log: false \ No newline at end of file From 2bc481e21bc9578fc9307ebf56ade257b4421f5e Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:21:45 -0700 Subject: [PATCH 08/35] Address vulnerability scan results --- codegen/adr_utils.txt | 2 +- pyproject.toml | 2 +- .../dynamicreporting/core/adr_service.py | 6 ++-- .../dynamicreporting/core/docker_support.py | 4 +-- .../core/examples/downloads.py | 2 +- .../dynamicreporting/core/utils/encoders.py | 2 +- .../core/utils/extremely_ugly_hacks.py | 4 +-- .../dynamicreporting/core/utils/filelock.py | 2 +- .../core/utils/geofile_processing.py | 2 +- .../core/utils/report_download_html.py | 8 +++--- .../core/utils/report_download_pdf.py | 2 +- .../core/utils/report_objects.py | 18 ++++++------ .../core/utils/report_remote_server.py | 28 +++++++++---------- .../core/utils/report_utils.py | 4 +-- test_cleanup.py | 6 ++-- tests/test_download_html.py | 2 +- tests/test_geofile_processing.py | 2 +- tests/test_item.py | 2 +- tests/test_report.py | 6 ++-- tests/test_report_objects.py | 2 +- tests/test_report_remote_server.py | 12 ++++---- tests/test_report_utils.py | 10 +++---- tests/test_service.py | 2 +- 23 files changed, 66 insertions(+), 64 deletions(-) diff --git a/codegen/adr_utils.txt b/codegen/adr_utils.txt index 68e7053be..bbd2f02c6 100644 --- a/codegen/adr_utils.txt +++ b/codegen/adr_utils.txt @@ -41,7 +41,7 @@ def in_ipynb(): return True if "terminal" in ipy_str: return False - except Exception: # todo: please specify the possible exceptions here. + except Exception as _ : # todo: please specify the possible exceptions here. return False diff --git a/pyproject.toml b/pyproject.toml index e28320952..8c5adb3ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dependencies = [ "django-guardian~=2.4", "tzlocal~=5.0", "numpy>=1.23.5,<3", - "python-pptx==0.6.19", + "python-pptx==0.6.23", "pandas>=2.0", "statsmodels>=0.14", "scipy<=1.15.3", # breaks ADR if not included. Remove when statsmodels is updated diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index e1456181c..4b22d142a 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -282,7 +282,7 @@ def connect( ) try: self.serverobj.validate() - except Exception: + except Exception as _ : self.logger.error("Can not validate dynamic reporting server.\n") raise NotValidServer # set url after connection succeeds @@ -392,7 +392,7 @@ def start( if self._docker_launcher: try: create_output = self._docker_launcher.create_nexus_db() - except Exception: # pragma: no cover + except Exception as _ : # pragma: no cover self._docker_launcher.cleanup() self.logger.error( f"Error creating the database at the path {self._db_directory} in the " @@ -511,7 +511,7 @@ def stop(self) -> None: v = False try: v = self.serverobj.validate() - except Exception: + except Exception as _ : pass if v is False: self.logger.error("Error validating the connected service. Can't shut it down.\n") diff --git a/src/ansys/dynamicreporting/core/docker_support.py b/src/ansys/dynamicreporting/core/docker_support.py index 6561b5f59..f1f278e95 100644 --- a/src/ansys/dynamicreporting/core/docker_support.py +++ b/src/ansys/dynamicreporting/core/docker_support.py @@ -64,7 +64,7 @@ def __init__(self, image_url: str | None = None, use_dev: bool = False) -> None: # Load up Docker from the user's environment try: self._client: docker.client.DockerClient = docker.from_env() - except Exception: # pragma: no cover + except Exception as _ : # pragma: no cover raise RuntimeError("Can't initialize Docker") self._container: docker.models.containers.Container = None self._image: docker.models.images.Image = None @@ -92,7 +92,7 @@ def pull_image(self) -> docker.models.images.Image: """ try: self._image = self._client.images.pull(self._image_url) - except Exception: + except Exception as _ : raise RuntimeError(f"Can't pull Docker image: {self._image_url}") return self._image diff --git a/src/ansys/dynamicreporting/core/examples/downloads.py b/src/ansys/dynamicreporting/core/examples/downloads.py index b435ee303..66c488e58 100755 --- a/src/ansys/dynamicreporting/core/examples/downloads.py +++ b/src/ansys/dynamicreporting/core/examples/downloads.py @@ -44,7 +44,7 @@ def check_url_exists(url: str) -> bool: try: with request.urlopen(url) as response: return response.status == 200 - except Exception: + except Exception as _ : return False diff --git a/src/ansys/dynamicreporting/core/utils/encoders.py b/src/ansys/dynamicreporting/core/utils/encoders.py index db53e6782..91167f6e1 100644 --- a/src/ansys/dynamicreporting/core/utils/encoders.py +++ b/src/ansys/dynamicreporting/core/utils/encoders.py @@ -34,7 +34,7 @@ def default(self, obj): cls = list if isinstance(obj, (list, tuple)) else dict try: return cls(obj) - except Exception: + except Exception as _ : pass elif hasattr(obj, "__iter__"): return tuple(item for item in obj) diff --git a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py index c6211fb2b..286c2dd80 100644 --- a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py +++ b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py @@ -55,10 +55,10 @@ def safe_unpickle(input_data, item_type=None): # be default, we follow python3's way of loading: default encoding is ascii # this will work if the data was dumped using python3's pickle. Just do the usual. data = pickle.loads(bytes_data) - except Exception: + except Exception as _ : try: data = pickle.loads(bytes_data, encoding="utf-8") - except Exception: + except Exception as _ : # if it fails, which it will if the data was dumped using python2's pickle, then: # As per https://docs.python.org/3/library/pickle.html#pickle.loads, # "Using encoding='latin1' is required for unpickling NumPy arrays and instances of datetime, diff --git a/src/ansys/dynamicreporting/core/utils/filelock.py b/src/ansys/dynamicreporting/core/utils/filelock.py index 42eeda3b2..674cbcd45 100644 --- a/src/ansys/dynamicreporting/core/utils/filelock.py +++ b/src/ansys/dynamicreporting/core/utils/filelock.py @@ -251,7 +251,7 @@ def acquire(self, timeout=None, poll_intervall=0.05): poll_intervall, ) time.sleep(poll_intervall) - except Exception: + except Exception as _ : # Something did go wrong, so decrement the counter. with self._thread_lock: self._lock_counter = max(0, self._lock_counter - 1) diff --git a/src/ansys/dynamicreporting/core/utils/geofile_processing.py b/src/ansys/dynamicreporting/core/utils/geofile_processing.py index 9e563ee7b..27cecb675 100644 --- a/src/ansys/dynamicreporting/core/utils/geofile_processing.py +++ b/src/ansys/dynamicreporting/core/utils/geofile_processing.py @@ -19,7 +19,7 @@ is_enve = True import enve from reports.engine import TemplateEngine -except Exception: +except Exception as _ : is_enve = False diff --git a/src/ansys/dynamicreporting/core/utils/report_download_html.py b/src/ansys/dynamicreporting/core/utils/report_download_html.py index c719c41a0..e9cf89b96 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_html.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_html.py @@ -159,7 +159,7 @@ def _download_special_files(self): filename = os.path.join(self._directory, f) try: open(filename, "wb").write(resp.content) - except Exception: + except Exception as _ : print(f"Unable to download MathJax file: {f}") else: print(f"Unable to get: {url}") @@ -314,7 +314,7 @@ def _download_static_files(self, files, source_path, target_path, comment): str(filename), resp.content, self._ansys_version ) open(filename, "wb").write(data) - except Exception: + except Exception as _ : print(f"Unable to download {comment}: {f}") def _make_unique_basename(self, name: str) -> str: @@ -389,7 +389,7 @@ def _get_file(self, path_plus_queries: str, pathname: str, inline: bool = False) results = f"./media/{basename}" filename = os.path.join(self._directory, "media", basename) open(filename, "wb").write(tmp) - except Exception: + except Exception as _ : print(f"Unable to write downloaded file: {basename}") else: print(f"Unable to read file via URL: {url}") @@ -475,7 +475,7 @@ def _make_dir(subdirs): if not os.path.exists(base): try: os.makedirs(base, exist_ok=True) - except Exception: + except Exception as _ : raise OSError(f"Unable to create target directory: {base}") def _download(self): diff --git a/src/ansys/dynamicreporting/core/utils/report_download_pdf.py b/src/ansys/dynamicreporting/core/utils/report_download_pdf.py index db4794e86..169991d98 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_pdf.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_pdf.py @@ -9,7 +9,7 @@ from qtpy.QtCore import QTimer has_qt = True -except Exception: +except Exception as _ : has_qt = False diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index c1e93ca4c..f8e2be976 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -173,7 +173,7 @@ def map_ensight_plot_to_table_dictionary(p): # convert EnSight undefined values into Numpy NaN values try: a[a == ensight.Undefined] = numpy.nan - except Exception: + except Exception as _ : pass max_columns = max(a.shape[1], max_columns) d = dict(array=a, yname=q.LEGENDTITLE, xname=x_axis_title) @@ -400,7 +400,7 @@ def reset_defaults(self): def get_params(self): try: return json.loads(self.params) - except Exception: + except Exception as _ : return {} def set_params(self, d: dict = None): @@ -1317,7 +1317,7 @@ def set_payload_image(self, img): else: try: from . import png - except Exception: + except Exception as _ : import png try: # we can only read png images as string content (not filename) @@ -1337,7 +1337,7 @@ def set_payload_image(self, img): planes=pngobj[3].get("planes", None), palette=pngobj[3].get("palette", None), ) - except Exception: + except Exception as _ : # enhanced images will fall into this case data = report_utils.PIL_image_to_data(img) self.width = data["width"] @@ -1500,13 +1500,13 @@ def add_params(self, d: dict = None): tmp_params[k] = d[k] self.params = json.dumps(tmp_params) return - except Exception: + except Exception as _ : return {} def get_params(self): try: return json.loads(self.params) - except Exception: + except Exception as _ : return {} def set_params(self, d: dict = None): @@ -1855,7 +1855,7 @@ def set_child_position(self, guid=None, value=None): raise ValueError("Error: child position array should contain only integers") try: uuid.UUID(guid, version=4) - except Exception: + except Exception as _ : raise ValueError("Error: input guid is not a valid guid") d = json.loads(self.params) if "boxes" not in d: @@ -1876,7 +1876,7 @@ def set_child_clip(self, guid=None, clip="self"): import uuid uuid.UUID(guid, version=4) - except Exception: + except Exception as _ : raise ValueError("Error: input guid is not a valid guid") d = json.loads(self.params) if "boxes" not in d: @@ -2158,7 +2158,7 @@ def set_report_link(self, link=None): d["report_guid"] = link self.params = json.dumps(d) return - except Exception: + except Exception as _ : raise ValueError("Error: input guid is not a valid guid") diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index b58cd07eb..ba944bc5d 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -268,7 +268,7 @@ def get_server_name(self): if self.cur_servername is None: try: self.validate() - except Exception: + except Exception as _ : pass if self.cur_servername is None: return self.get_URL() @@ -293,7 +293,7 @@ def stop_server_allowed(self): result = self._http_session.get(url, auth=auth) if not result.ok: return False - except Exception: + except Exception as _ : return False return True @@ -306,7 +306,7 @@ def stop_local_server(self): try: # note this request will fail as it does not return anything!!! self._http_session.get(url, auth=auth) - except Exception: + except Exception as _ : pass self.set_URL(None) self.set_password(None) @@ -328,7 +328,7 @@ def get_user_groups(self): return [] try: return [str(obj_data.get("name")) for obj_data in r.json()] - except Exception: + except Exception as _ : return [] def get_object_guids(self, objtype=report_objects.Template, query=None): @@ -355,7 +355,7 @@ def get_object_guids(self, objtype=report_objects.Template, query=None): return [str(obj_data.get("guid")) for obj_data in r.json()] else: return [str(i) for i in r.json()["guid_list"]] - except Exception: + except Exception as _ : return [] def get_objects(self, objtype=report_objects.Template, query=None): @@ -390,7 +390,7 @@ def get_objects(self, objtype=report_objects.Template, query=None): t.from_json(d) ret.append(t) return ret - except Exception: + except Exception as _ : return [] def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): @@ -415,7 +415,7 @@ def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): obj.server_api_version = self.api_version obj.from_json(r.json()) return obj - except Exception: + except Exception as _ : return None def _get_push_request_info(self, obj): @@ -543,7 +543,7 @@ def put_objects(self, in_objects): url = self.cur_url + file_data[0] try: r = self._http_session.put(url, auth=auth, files=files) - except Exception: + except Exception as _ : r = self._http_session.Response() r.status_code = requests.codes.client_closed_request ret = r.status_code @@ -1227,7 +1227,7 @@ def create_new_local_database( group.user_set.add(user) group.save() os.makedirs(os.path.join(db_dir, "media")) - except Exception: + except Exception as _ : error = True if parent and has_qt: QtWidgets.QApplication.restoreOverrideCursor() @@ -1384,7 +1384,7 @@ def validate_local_db_version(db_dir, version_max=None, version_min=None): return False if number < version_min: return False - except Exception: + except Exception as _ : return False return True @@ -1536,7 +1536,7 @@ def launch_local_database_server( # create a file lock local_lock = filelock.nexus_file_lock(api_lock_filename) local_lock.acquire() - except Exception: + except Exception as _ : pass # We may need to do port scanning if port is None: @@ -1546,7 +1546,7 @@ def launch_local_database_server( # create a file lock scanning_lock = filelock.nexus_file_lock(lock_filename) scanning_lock.acquire() - except Exception: + except Exception as _ : pass # Note: QWebEngineView cannot access http over 65535, so limit ports to 65534 ports = report_utils.find_unused_ports(1) @@ -1675,7 +1675,7 @@ def launch_local_database_server( "There appears to be a local Nexus server already running on that port.\nPlease stop that server first or select a different port." ) return False - except Exception: + except Exception as _ : pass # Start the busy cursor @@ -1806,7 +1806,7 @@ def launch_local_database_server( raise exceptions.ServerConnectionError( "Access to server denied. Potential username/password error." ) - except Exception: + except Exception as _ : # we will try again pass diff --git a/src/ansys/dynamicreporting/core/utils/report_utils.py b/src/ansys/dynamicreporting/core/utils/report_utils.py index bf178bc4c..3c979fd02 100644 --- a/src/ansys/dynamicreporting/core/utils/report_utils.py +++ b/src/ansys/dynamicreporting/core/utils/report_utils.py @@ -77,7 +77,7 @@ def check_if_PIL(img): elif imgbytes: Image.open(io.BytesIO(imgbytes)) return True - except Exception: + except Exception as _ : return False finally: if imghandle: @@ -313,7 +313,7 @@ def ceiversion_nexus_suffix(): tmp = ansys_version.replace("R", "")[-3:] return str(tmp) - except Exception: + except Exception as _ : # get "nexus###" folder name and then strip off the "nexus" bit tmp = os.path.basename(os.path.dirname(os.path.dirname(__file__))) return tmp[5:] diff --git a/test_cleanup.py b/test_cleanup.py index 9c4ff6a59..4a35c71aa 100644 --- a/test_cleanup.py +++ b/test_cleanup.py @@ -27,7 +27,8 @@ for i_dir in dir_list: try: shutil.rmtree(i_dir) - except Exception: + except Exception as _ : + print(f"Skipping {i_dir}") pass @@ -45,5 +46,6 @@ for i_file in file_list: try: os.remove(i_file) - except Exception: + except Exception as _ : + print(f"Skipping {i_file}") pass diff --git a/tests/test_download_html.py b/tests/test_download_html.py index ca71a4ba0..56ff426d4 100755 --- a/tests/test_download_html.py +++ b/tests/test_download_html.py @@ -44,7 +44,7 @@ def test_download_sqlite(request, adr_service_query) -> None: try: a.download() success = False - except Exception: + except Exception as _ : success = True assert success diff --git a/tests/test_geofile_processing.py b/tests/test_geofile_processing.py index 18e811467..942d208af 100755 --- a/tests/test_geofile_processing.py +++ b/tests/test_geofile_processing.py @@ -22,7 +22,7 @@ def test_get_evsn_proxy_image(request) -> None: try: _ = gp.get_evsn_proxy_image(filename=return_file_paths(request)[6]) success = True - except Exception: + except Exception as _ : success = False assert (_ is None) and success diff --git a/tests/test_item.py b/tests/test_item.py index 7ad5f6565..4b96e90cc 100644 --- a/tests/test_item.py +++ b/tests/test_item.py @@ -354,6 +354,6 @@ def test_unit_item_empty_nexus(request) -> None: a = Service() try: _ = Item(service=a) - except Exception: + except Exception as _ : valid = True assert valid diff --git a/tests/test_report.py b/tests/test_report.py index 2e2ec8933..5bbc0254d 100755 --- a/tests/test_report.py +++ b/tests/test_report.py @@ -148,7 +148,7 @@ def test_save_as_pdf(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest") success = my_report.export_pdf(file_name=pdf_file) - except Exception: + except Exception as _ : success = False else: # If no local installation, then skip this test success = True @@ -164,7 +164,7 @@ def test_save_as_pdf_with_filter(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest_filter") success = my_report.export_pdf(file_name=pdf_file, item_filter="A|i_type|cont|image;") - except Exception: + except Exception as _ : success = False else: # If no local installation, then skip this test success = True @@ -177,7 +177,7 @@ def test_save_as_html(adr_service_query) -> None: try: my_report = adr_service_query.get_report(report_name="My Top Report") success = my_report.export_html(directory_name="htmltest_again") - except Exception: + except Exception as _ : success = False assert success is True diff --git a/tests/test_report_objects.py b/tests/test_report_objects.py index 50a899b53..0167ea4e5 100755 --- a/tests/test_report_objects.py +++ b/tests/test_report_objects.py @@ -2036,6 +2036,6 @@ def test_item_payload(adr_service_query) -> None: for i in adr_service_query.query(): _ = i.item.get_payload_content(as_list=True) succ = True - except Exception: + except Exception as _ : succ = False assert succ diff --git a/tests/test_report_remote_server.py b/tests/test_report_remote_server.py index 5fb285cc2..9fcc98717 100755 --- a/tests/test_report_remote_server.py +++ b/tests/test_report_remote_server.py @@ -43,7 +43,7 @@ def test_copy_item(adr_service_query, tmp_path, get_exec) -> None: progress=False, progress_qt=False, ) - except Exception: + except Exception as _ : success = False finally: tmp_adr.stop() @@ -86,7 +86,7 @@ def test_start_stop(tmp_path, get_exec) -> None: ) _ = r.validate_local_db(db_dir=db_dir, version_check=True) r.stop_background_local_server(server_dirname=db_dir) - except Exception: + except Exception as _ : succ = False assert succ @@ -95,7 +95,7 @@ def test_validate_existing(adr_service_query) -> None: succ = True try: _ = r.validate_local_db(db_dir=adr_service_query._db_directory, version_check=True) - except Exception: + except Exception as _ : succ = False assert succ @@ -243,7 +243,7 @@ def test_delete_db(tmp_path, get_exec) -> None: try: r.delete_database(db_dir=db_dir) succ = True - except Exception: + except Exception as _ : succ = False assert succ @@ -338,7 +338,7 @@ def test_export_pptx_error(adr_service_query) -> None: try: # exports the root report instead of the pptx link. s.export_report_as_pptx(report_guid=my_report.report.guid, file_name="mypresentation") - except Exception: + except Exception as _ : success = True assert success is True @@ -350,7 +350,7 @@ def test_get_pptx(adr_service_query, tmp_path) -> None: try: # scrape all pptx reports from root report s.get_pptx_from_report(report_guid=my_report.report.guid, directory_name=db_dir, query=None) - except Exception: + except Exception as _ : success = False else: success = True diff --git a/tests/test_report_utils.py b/tests/test_report_utils.py index ccbf8225e..49462df8a 100755 --- a/tests/test_report_utils.py +++ b/tests/test_report_utils.py @@ -54,7 +54,7 @@ def test_ceiversion_nexus_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception: + except Exception as _ : success = False assert success and int_suffix / 100 < 10 @@ -65,7 +65,7 @@ def test_ceiversion_apex_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception: + except Exception as _ : success = False assert success and int_suffix / 100 < 10 @@ -76,7 +76,7 @@ def test_ceiversion_ensight_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception: + except Exception as _ : success = False assert success and int_suffix / 100 < 10 @@ -163,7 +163,7 @@ def test_narray() -> None: a.from_numpy(value=np.array(object=None, dtype="S2")) a.unit_test() success = True - except Exception: + except Exception as _ : success = False assert success @@ -192,7 +192,7 @@ def test_settings() -> None: try: _ = ru.Settings(defaults={"a": 1, "b": 2}) success = True - except Exception: + except Exception as _ : success = False assert success diff --git a/tests/test_service.py b/tests/test_service.py index 5505c50f8..d7815fa0c 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -85,7 +85,7 @@ def test_unit_createitem() -> None: valid = False try: a.create_item() - except Exception: + except Exception as _ : valid = True assert valid From 77f5aac80ff5ab2c071eaf0136ae48d0d608bf2b Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:30:15 -0700 Subject: [PATCH 09/35] Address vulnerability scan results --- codegen/adr_utils.txt | 2 +- src/ansys/dynamicreporting/core/adr_service.py | 16 ++++++++-------- .../dynamicreporting/core/docker_support.py | 4 ++-- .../dynamicreporting/core/examples/downloads.py | 3 ++- .../dynamicreporting/core/utils/encoders.py | 2 +- .../core/utils/extremely_ugly_hacks.py | 4 ++-- .../dynamicreporting/core/utils/filelock.py | 2 +- .../core/utils/geofile_processing.py | 2 +- .../core/utils/report_download_html.py | 16 ++++++++-------- .../core/utils/report_download_pdf.py | 2 +- .../core/utils/report_objects.py | 8 ++++---- test_cleanup.py | 8 ++++---- 12 files changed, 35 insertions(+), 34 deletions(-) diff --git a/codegen/adr_utils.txt b/codegen/adr_utils.txt index bbd2f02c6..b6af61349 100644 --- a/codegen/adr_utils.txt +++ b/codegen/adr_utils.txt @@ -41,7 +41,7 @@ def in_ipynb(): return True if "terminal" in ipy_str: return False - except Exception as _ : # todo: please specify the possible exceptions here. + except Exception as e: # todo: please specify the possible exceptions here. return False diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index 4b22d142a..7de0bd39b 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -282,8 +282,8 @@ def connect( ) try: self.serverobj.validate() - except Exception as _ : - self.logger.error("Can not validate dynamic reporting server.\n") + except Exception as e: + self.logger.error(f"Can not validate dynamic reporting server.\nError: {str(e)}") raise NotValidServer # set url after connection succeeds self._url = url @@ -392,11 +392,11 @@ def start( if self._docker_launcher: try: create_output = self._docker_launcher.create_nexus_db() - except Exception as _ : # pragma: no cover + except Exception as e: # pragma: no cover self._docker_launcher.cleanup() self.logger.error( - f"Error creating the database at the path {self._db_directory} in the " - "Docker container.\n" + "Error creating the database at the path {self._db_directory} in the " + f"Docker container.\nError: {str(e)}" ) raise CannotCreateDatabaseError for f in ["db.sqlite3", "view_report.nexdb"]: @@ -511,10 +511,10 @@ def stop(self) -> None: v = False try: v = self.serverobj.validate() - except Exception as _ : + except Exception as e: pass if v is False: - self.logger.error("Error validating the connected service. Can't shut it down.\n") + self.logger.error(f"Error validating the connected service. Can't shut it down.\nError: {str(e)}") else: # If coming from a docker image, clean that up try: @@ -814,7 +814,7 @@ def delete(self, items: list) -> None: try: _ = self.serverobj.del_objects(items_to_delete) except Exception as e: - self.logger.warning(f"Error in deleting items: {e}") + self.logger.warning(f"Error in deleting items: {str(e)}") def get_report(self, report_name: str) -> Report: """ diff --git a/src/ansys/dynamicreporting/core/docker_support.py b/src/ansys/dynamicreporting/core/docker_support.py index f1f278e95..432a119ca 100644 --- a/src/ansys/dynamicreporting/core/docker_support.py +++ b/src/ansys/dynamicreporting/core/docker_support.py @@ -64,7 +64,7 @@ def __init__(self, image_url: str | None = None, use_dev: bool = False) -> None: # Load up Docker from the user's environment try: self._client: docker.client.DockerClient = docker.from_env() - except Exception as _ : # pragma: no cover + except Exception as e: # pragma: no cover raise RuntimeError("Can't initialize Docker") self._container: docker.models.containers.Container = None self._image: docker.models.images.Image = None @@ -92,7 +92,7 @@ def pull_image(self) -> docker.models.images.Image: """ try: self._image = self._client.images.pull(self._image_url) - except Exception as _ : + except Exception as e: raise RuntimeError(f"Can't pull Docker image: {self._image_url}") return self._image diff --git a/src/ansys/dynamicreporting/core/examples/downloads.py b/src/ansys/dynamicreporting/core/examples/downloads.py index 66c488e58..2a0f620b9 100755 --- a/src/ansys/dynamicreporting/core/examples/downloads.py +++ b/src/ansys/dynamicreporting/core/examples/downloads.py @@ -44,7 +44,8 @@ def check_url_exists(url: str) -> bool: try: with request.urlopen(url) as response: return response.status == 200 - except Exception as _ : + except Exception as e: + logging.debug(f"Check url error: {str(e)}\n") return False diff --git a/src/ansys/dynamicreporting/core/utils/encoders.py b/src/ansys/dynamicreporting/core/utils/encoders.py index 91167f6e1..baf37991d 100644 --- a/src/ansys/dynamicreporting/core/utils/encoders.py +++ b/src/ansys/dynamicreporting/core/utils/encoders.py @@ -34,7 +34,7 @@ def default(self, obj): cls = list if isinstance(obj, (list, tuple)) else dict try: return cls(obj) - except Exception as _ : + except Exception as e: pass elif hasattr(obj, "__iter__"): return tuple(item for item in obj) diff --git a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py index 286c2dd80..f08de2f03 100644 --- a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py +++ b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py @@ -55,10 +55,10 @@ def safe_unpickle(input_data, item_type=None): # be default, we follow python3's way of loading: default encoding is ascii # this will work if the data was dumped using python3's pickle. Just do the usual. data = pickle.loads(bytes_data) - except Exception as _ : + except Exception as e: try: data = pickle.loads(bytes_data, encoding="utf-8") - except Exception as _ : + except Exception as e: # if it fails, which it will if the data was dumped using python2's pickle, then: # As per https://docs.python.org/3/library/pickle.html#pickle.loads, # "Using encoding='latin1' is required for unpickling NumPy arrays and instances of datetime, diff --git a/src/ansys/dynamicreporting/core/utils/filelock.py b/src/ansys/dynamicreporting/core/utils/filelock.py index 674cbcd45..250aa8b53 100644 --- a/src/ansys/dynamicreporting/core/utils/filelock.py +++ b/src/ansys/dynamicreporting/core/utils/filelock.py @@ -251,7 +251,7 @@ def acquire(self, timeout=None, poll_intervall=0.05): poll_intervall, ) time.sleep(poll_intervall) - except Exception as _ : + except Exception as e: # Something did go wrong, so decrement the counter. with self._thread_lock: self._lock_counter = max(0, self._lock_counter - 1) diff --git a/src/ansys/dynamicreporting/core/utils/geofile_processing.py b/src/ansys/dynamicreporting/core/utils/geofile_processing.py index 27cecb675..1a406e402 100644 --- a/src/ansys/dynamicreporting/core/utils/geofile_processing.py +++ b/src/ansys/dynamicreporting/core/utils/geofile_processing.py @@ -19,7 +19,7 @@ is_enve = True import enve from reports.engine import TemplateEngine -except Exception as _ : +except Exception as e: is_enve = False diff --git a/src/ansys/dynamicreporting/core/utils/report_download_html.py b/src/ansys/dynamicreporting/core/utils/report_download_html.py index e9cf89b96..5e2c97404 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_html.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_html.py @@ -159,8 +159,8 @@ def _download_special_files(self): filename = os.path.join(self._directory, f) try: open(filename, "wb").write(resp.content) - except Exception as _ : - print(f"Unable to download MathJax file: {f}") + except Exception as e: + print(f"Unable to download MathJax file: {f}\nError {str(e)}") else: print(f"Unable to get: {url}") @@ -314,8 +314,8 @@ def _download_static_files(self, files, source_path, target_path, comment): str(filename), resp.content, self._ansys_version ) open(filename, "wb").write(data) - except Exception as _ : - print(f"Unable to download {comment}: {f}") + except Exception as e: + print(f"Unable to download {comment}: {f}\nError: {e}") def _make_unique_basename(self, name: str) -> str: # check to see if the filename has already been used (and hence we are headed toward @@ -389,8 +389,8 @@ def _get_file(self, path_plus_queries: str, pathname: str, inline: bool = False) results = f"./media/{basename}" filename = os.path.join(self._directory, "media", basename) open(filename, "wb").write(tmp) - except Exception as _ : - print(f"Unable to write downloaded file: {basename}") + except Exception as e: + print(f"Unable to write downloaded file: {basename}\nError: {str(e)}") else: print(f"Unable to read file via URL: {url}") self._filemap[pathname] = results @@ -475,8 +475,8 @@ def _make_dir(subdirs): if not os.path.exists(base): try: os.makedirs(base, exist_ok=True) - except Exception as _ : - raise OSError(f"Unable to create target directory: {base}") + except Exception as e: + raise OSError(f"Unable to create target directory: {base}\nError: {str(e)}") def _download(self): self._filemap = dict() diff --git a/src/ansys/dynamicreporting/core/utils/report_download_pdf.py b/src/ansys/dynamicreporting/core/utils/report_download_pdf.py index 169991d98..4cae3b571 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_pdf.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_pdf.py @@ -9,7 +9,7 @@ from qtpy.QtCore import QTimer has_qt = True -except Exception as _ : +except Exception as e: has_qt = False diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index f8e2be976..f7093e26e 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -173,7 +173,7 @@ def map_ensight_plot_to_table_dictionary(p): # convert EnSight undefined values into Numpy NaN values try: a[a == ensight.Undefined] = numpy.nan - except Exception as _ : + except Exception as e: pass max_columns = max(a.shape[1], max_columns) d = dict(array=a, yname=q.LEGENDTITLE, xname=x_axis_title) @@ -400,7 +400,7 @@ def reset_defaults(self): def get_params(self): try: return json.loads(self.params) - except Exception as _ : + except Exception as e: return {} def set_params(self, d: dict = None): @@ -1317,7 +1317,7 @@ def set_payload_image(self, img): else: try: from . import png - except Exception as _ : + except Exception as e: import png try: # we can only read png images as string content (not filename) @@ -1337,7 +1337,7 @@ def set_payload_image(self, img): planes=pngobj[3].get("planes", None), palette=pngobj[3].get("palette", None), ) - except Exception as _ : + except Exception as e: # enhanced images will fall into this case data = report_utils.PIL_image_to_data(img) self.width = data["width"] diff --git a/test_cleanup.py b/test_cleanup.py index 4a35c71aa..37b758140 100644 --- a/test_cleanup.py +++ b/test_cleanup.py @@ -27,8 +27,8 @@ for i_dir in dir_list: try: shutil.rmtree(i_dir) - except Exception as _ : - print(f"Skipping {i_dir}") + except Exception as e: + print(f"Skipping {i_dir} with error {e}") pass @@ -46,6 +46,6 @@ for i_file in file_list: try: os.remove(i_file) - except Exception as _ : - print(f"Skipping {i_file}") + except Exception as e: + print(f"Skipping {i_file} with error {e}") pass From df67a27ce81a1805e646fe0286137381a38dd915 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:36:23 -0700 Subject: [PATCH 10/35] Add logs for vulnerability --- .../core/utils/report_remote_server.py | 56 ++++++++++++++----- 1 file changed, 42 insertions(+), 14 deletions(-) diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index ba944bc5d..8caeee87b 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -268,7 +268,9 @@ def get_server_name(self): if self.cur_servername is None: try: self.validate() - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") pass if self.cur_servername is None: return self.get_URL() @@ -293,7 +295,9 @@ def stop_server_allowed(self): result = self._http_session.get(url, auth=auth) if not result.ok: return False - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") return False return True @@ -306,7 +310,9 @@ def stop_local_server(self): try: # note this request will fail as it does not return anything!!! self._http_session.get(url, auth=auth) - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") pass self.set_URL(None) self.set_password(None) @@ -328,7 +334,9 @@ def get_user_groups(self): return [] try: return [str(obj_data.get("name")) for obj_data in r.json()] - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") return [] def get_object_guids(self, objtype=report_objects.Template, query=None): @@ -355,7 +363,9 @@ def get_object_guids(self, objtype=report_objects.Template, query=None): return [str(obj_data.get("guid")) for obj_data in r.json()] else: return [str(i) for i in r.json()["guid_list"]] - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") return [] def get_objects(self, objtype=report_objects.Template, query=None): @@ -390,7 +400,9 @@ def get_objects(self, objtype=report_objects.Template, query=None): t.from_json(d) ret.append(t) return ret - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") return [] def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): @@ -415,7 +427,9 @@ def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): obj.server_api_version = self.api_version obj.from_json(r.json()) return obj - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") return None def _get_push_request_info(self, obj): @@ -543,7 +557,9 @@ def put_objects(self, in_objects): url = self.cur_url + file_data[0] try: r = self._http_session.put(url, auth=auth, files=files) - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") r = self._http_session.Response() r.status_code = requests.codes.client_closed_request ret = r.status_code @@ -1227,7 +1243,9 @@ def create_new_local_database( group.user_set.add(user) group.save() os.makedirs(os.path.join(db_dir, "media")) - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") error = True if parent and has_qt: QtWidgets.QApplication.restoreOverrideCursor() @@ -1384,7 +1402,9 @@ def validate_local_db_version(db_dir, version_max=None, version_min=None): return False if number < version_min: return False - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") return False return True @@ -1536,7 +1556,9 @@ def launch_local_database_server( # create a file lock local_lock = filelock.nexus_file_lock(api_lock_filename) local_lock.acquire() - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") pass # We may need to do port scanning if port is None: @@ -1546,7 +1568,9 @@ def launch_local_database_server( # create a file lock scanning_lock = filelock.nexus_file_lock(lock_filename) scanning_lock.acquire() - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") pass # Note: QWebEngineView cannot access http over 65535, so limit ports to 65534 ports = report_utils.find_unused_ports(1) @@ -1675,7 +1699,9 @@ def launch_local_database_server( "There appears to be a local Nexus server already running on that port.\nPlease stop that server first or select a different port." ) return False - except Exception as _ : + except Exception as e: + if print_allowed(): + print(f"Error: {e}") pass # Start the busy cursor @@ -1806,8 +1832,10 @@ def launch_local_database_server( raise exceptions.ServerConnectionError( "Access to server denied. Potential username/password error." ) - except Exception as _ : + except Exception as e: # we will try again + if print_allowed(): + print(f"Error: {e}") pass # detach from stdout, stderr to avoid buffer blocking From cfe1ed41ecf945629b863776cd7371693594d2bb Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:52:54 -0700 Subject: [PATCH 11/35] Fix syntax --- .../dynamicreporting/core/adr_service.py | 4 ++- .../dynamicreporting/core/utils/encoders.py | 2 +- .../core/utils/report_objects.py | 11 +++--- .../core/utils/report_remote_server.py | 36 +++++++++---------- .../core/utils/report_utils.py | 4 +-- tests/test_download_html.py | 2 +- tests/test_geofile_processing.py | 2 +- tests/test_item.py | 2 +- tests/test_report.py | 6 ++-- tests/test_report_objects.py | 2 +- tests/test_report_remote_server.py | 12 +++---- tests/test_report_utils.py | 10 +++--- tests/test_service.py | 2 +- 13 files changed, 48 insertions(+), 47 deletions(-) diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index 7de0bd39b..42b30fbdf 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -514,7 +514,9 @@ def stop(self) -> None: except Exception as e: pass if v is False: - self.logger.error(f"Error validating the connected service. Can't shut it down.\nError: {str(e)}") + self.logger.error( + f"Error validating the connected service. Can't shut it down.\nError: {str(e)}" + ) else: # If coming from a docker image, clean that up try: diff --git a/src/ansys/dynamicreporting/core/utils/encoders.py b/src/ansys/dynamicreporting/core/utils/encoders.py index baf37991d..ede48b89d 100644 --- a/src/ansys/dynamicreporting/core/utils/encoders.py +++ b/src/ansys/dynamicreporting/core/utils/encoders.py @@ -34,7 +34,7 @@ def default(self, obj): cls = list if isinstance(obj, (list, tuple)) else dict try: return cls(obj) - except Exception as e: + except Exception as e: # nosec pass elif hasattr(obj, "__iter__"): return tuple(item for item in obj) diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index f7093e26e..2071b55c8 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -174,6 +174,7 @@ def map_ensight_plot_to_table_dictionary(p): try: a[a == ensight.Undefined] = numpy.nan except Exception as e: + logger.error(f"Error: {str(e)}.\n") pass max_columns = max(a.shape[1], max_columns) d = dict(array=a, yname=q.LEGENDTITLE, xname=x_axis_title) @@ -1500,13 +1501,13 @@ def add_params(self, d: dict = None): tmp_params[k] = d[k] self.params = json.dumps(tmp_params) return - except Exception as _ : + except Exception as _: return {} def get_params(self): try: return json.loads(self.params) - except Exception as _ : + except Exception as _: return {} def set_params(self, d: dict = None): @@ -1855,7 +1856,7 @@ def set_child_position(self, guid=None, value=None): raise ValueError("Error: child position array should contain only integers") try: uuid.UUID(guid, version=4) - except Exception as _ : + except Exception as _: raise ValueError("Error: input guid is not a valid guid") d = json.loads(self.params) if "boxes" not in d: @@ -1876,7 +1877,7 @@ def set_child_clip(self, guid=None, clip="self"): import uuid uuid.UUID(guid, version=4) - except Exception as _ : + except Exception as _: raise ValueError("Error: input guid is not a valid guid") d = json.loads(self.params) if "boxes" not in d: @@ -2158,7 +2159,7 @@ def set_report_link(self, link=None): d["report_guid"] = link self.params = json.dumps(d) return - except Exception as _ : + except Exception as _: raise ValueError("Error: input guid is not a valid guid") diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index 8caeee87b..71e1f93f3 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -270,7 +270,7 @@ def get_server_name(self): self.validate() except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") pass if self.cur_servername is None: return self.get_URL() @@ -297,7 +297,7 @@ def stop_server_allowed(self): return False except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") return False return True @@ -312,7 +312,7 @@ def stop_local_server(self): self._http_session.get(url, auth=auth) except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") pass self.set_URL(None) self.set_password(None) @@ -336,7 +336,7 @@ def get_user_groups(self): return [str(obj_data.get("name")) for obj_data in r.json()] except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") return [] def get_object_guids(self, objtype=report_objects.Template, query=None): @@ -365,7 +365,7 @@ def get_object_guids(self, objtype=report_objects.Template, query=None): return [str(i) for i in r.json()["guid_list"]] except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") return [] def get_objects(self, objtype=report_objects.Template, query=None): @@ -402,7 +402,7 @@ def get_objects(self, objtype=report_objects.Template, query=None): return ret except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") return [] def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): @@ -429,7 +429,7 @@ def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): return obj except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") return None def _get_push_request_info(self, obj): @@ -559,7 +559,7 @@ def put_objects(self, in_objects): r = self._http_session.put(url, auth=auth, files=files) except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") r = self._http_session.Response() r.status_code = requests.codes.client_closed_request ret = r.status_code @@ -1245,7 +1245,7 @@ def create_new_local_database( os.makedirs(os.path.join(db_dir, "media")) except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") error = True if parent and has_qt: QtWidgets.QApplication.restoreOverrideCursor() @@ -1404,7 +1404,7 @@ def validate_local_db_version(db_dir, version_max=None, version_min=None): return False except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") return False return True @@ -1552,16 +1552,14 @@ def launch_local_database_server( # .nexus.lock is held whenever port scanning is going on. It can be held by this function or by nexus_launcher # .nexus_api.lock is used by the Python API to ensure exclusivity (e.g. while a server is launching) local_lock = None - try: + try: # nosec # create a file lock local_lock = filelock.nexus_file_lock(api_lock_filename) local_lock.acquire() - except Exception as e: - if print_allowed(): - print(f"Error: {e}") + except Exception as e: # nosec pass # We may need to do port scanning - if port is None: + if port is None: # nosec lock_filename = os.path.join(homedir, ".nexus.lock") scanning_lock = None try: @@ -1569,8 +1567,6 @@ def launch_local_database_server( scanning_lock = filelock.nexus_file_lock(lock_filename) scanning_lock.acquire() except Exception as e: - if print_allowed(): - print(f"Error: {e}") pass # Note: QWebEngineView cannot access http over 65535, so limit ports to 65534 ports = report_utils.find_unused_ports(1) @@ -1701,7 +1697,7 @@ def launch_local_database_server( return False except Exception as e: if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") pass # Start the busy cursor @@ -1773,6 +1769,8 @@ def launch_local_database_server( # Note: this process only returns if the server is shutdown or there is an error monitor_process = subprocess.Popen(command, **params) except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") if parent and has_qt: QtWidgets.QApplication.restoreOverrideCursor() msg = QtWidgets.QApplication.translate( @@ -1835,7 +1833,7 @@ def launch_local_database_server( except Exception as e: # we will try again if print_allowed(): - print(f"Error: {e}") + print(f"Error: {str(e)}") pass # detach from stdout, stderr to avoid buffer blocking diff --git a/src/ansys/dynamicreporting/core/utils/report_utils.py b/src/ansys/dynamicreporting/core/utils/report_utils.py index 3c979fd02..82d5f11b2 100644 --- a/src/ansys/dynamicreporting/core/utils/report_utils.py +++ b/src/ansys/dynamicreporting/core/utils/report_utils.py @@ -77,7 +77,7 @@ def check_if_PIL(img): elif imgbytes: Image.open(io.BytesIO(imgbytes)) return True - except Exception as _ : + except Exception as _: return False finally: if imghandle: @@ -313,7 +313,7 @@ def ceiversion_nexus_suffix(): tmp = ansys_version.replace("R", "")[-3:] return str(tmp) - except Exception as _ : + except Exception as _: # get "nexus###" folder name and then strip off the "nexus" bit tmp = os.path.basename(os.path.dirname(os.path.dirname(__file__))) return tmp[5:] diff --git a/tests/test_download_html.py b/tests/test_download_html.py index 56ff426d4..bed828a4c 100755 --- a/tests/test_download_html.py +++ b/tests/test_download_html.py @@ -44,7 +44,7 @@ def test_download_sqlite(request, adr_service_query) -> None: try: a.download() success = False - except Exception as _ : + except Exception as _: success = True assert success diff --git a/tests/test_geofile_processing.py b/tests/test_geofile_processing.py index 942d208af..fa5e67c75 100755 --- a/tests/test_geofile_processing.py +++ b/tests/test_geofile_processing.py @@ -22,7 +22,7 @@ def test_get_evsn_proxy_image(request) -> None: try: _ = gp.get_evsn_proxy_image(filename=return_file_paths(request)[6]) success = True - except Exception as _ : + except Exception as _: success = False assert (_ is None) and success diff --git a/tests/test_item.py b/tests/test_item.py index 4b96e90cc..e0dd3ff0a 100644 --- a/tests/test_item.py +++ b/tests/test_item.py @@ -354,6 +354,6 @@ def test_unit_item_empty_nexus(request) -> None: a = Service() try: _ = Item(service=a) - except Exception as _ : + except Exception as _: valid = True assert valid diff --git a/tests/test_report.py b/tests/test_report.py index 5bbc0254d..2b4b02200 100755 --- a/tests/test_report.py +++ b/tests/test_report.py @@ -148,7 +148,7 @@ def test_save_as_pdf(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest") success = my_report.export_pdf(file_name=pdf_file) - except Exception as _ : + except Exception as _: success = False else: # If no local installation, then skip this test success = True @@ -164,7 +164,7 @@ def test_save_as_pdf_with_filter(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest_filter") success = my_report.export_pdf(file_name=pdf_file, item_filter="A|i_type|cont|image;") - except Exception as _ : + except Exception as _: success = False else: # If no local installation, then skip this test success = True @@ -177,7 +177,7 @@ def test_save_as_html(adr_service_query) -> None: try: my_report = adr_service_query.get_report(report_name="My Top Report") success = my_report.export_html(directory_name="htmltest_again") - except Exception as _ : + except Exception as _: success = False assert success is True diff --git a/tests/test_report_objects.py b/tests/test_report_objects.py index 0167ea4e5..36f01e2cb 100755 --- a/tests/test_report_objects.py +++ b/tests/test_report_objects.py @@ -2036,6 +2036,6 @@ def test_item_payload(adr_service_query) -> None: for i in adr_service_query.query(): _ = i.item.get_payload_content(as_list=True) succ = True - except Exception as _ : + except Exception as _: succ = False assert succ diff --git a/tests/test_report_remote_server.py b/tests/test_report_remote_server.py index 9fcc98717..b93066f37 100755 --- a/tests/test_report_remote_server.py +++ b/tests/test_report_remote_server.py @@ -43,7 +43,7 @@ def test_copy_item(adr_service_query, tmp_path, get_exec) -> None: progress=False, progress_qt=False, ) - except Exception as _ : + except Exception as _: success = False finally: tmp_adr.stop() @@ -86,7 +86,7 @@ def test_start_stop(tmp_path, get_exec) -> None: ) _ = r.validate_local_db(db_dir=db_dir, version_check=True) r.stop_background_local_server(server_dirname=db_dir) - except Exception as _ : + except Exception as _: succ = False assert succ @@ -95,7 +95,7 @@ def test_validate_existing(adr_service_query) -> None: succ = True try: _ = r.validate_local_db(db_dir=adr_service_query._db_directory, version_check=True) - except Exception as _ : + except Exception as _: succ = False assert succ @@ -243,7 +243,7 @@ def test_delete_db(tmp_path, get_exec) -> None: try: r.delete_database(db_dir=db_dir) succ = True - except Exception as _ : + except Exception as _: succ = False assert succ @@ -338,7 +338,7 @@ def test_export_pptx_error(adr_service_query) -> None: try: # exports the root report instead of the pptx link. s.export_report_as_pptx(report_guid=my_report.report.guid, file_name="mypresentation") - except Exception as _ : + except Exception as _: success = True assert success is True @@ -350,7 +350,7 @@ def test_get_pptx(adr_service_query, tmp_path) -> None: try: # scrape all pptx reports from root report s.get_pptx_from_report(report_guid=my_report.report.guid, directory_name=db_dir, query=None) - except Exception as _ : + except Exception as _: success = False else: success = True diff --git a/tests/test_report_utils.py b/tests/test_report_utils.py index 49462df8a..8535551c1 100755 --- a/tests/test_report_utils.py +++ b/tests/test_report_utils.py @@ -54,7 +54,7 @@ def test_ceiversion_nexus_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception as _ : + except Exception: # nosec success = False assert success and int_suffix / 100 < 10 @@ -65,7 +65,7 @@ def test_ceiversion_apex_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception as _ : + except Exception: # nosec success = False assert success and int_suffix / 100 < 10 @@ -76,7 +76,7 @@ def test_ceiversion_ensight_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception as _ : + except Exception: # nosec success = False assert success and int_suffix / 100 < 10 @@ -163,7 +163,7 @@ def test_narray() -> None: a.from_numpy(value=np.array(object=None, dtype="S2")) a.unit_test() success = True - except Exception as _ : + except Exception: # nosec success = False assert success @@ -192,7 +192,7 @@ def test_settings() -> None: try: _ = ru.Settings(defaults={"a": 1, "b": 2}) success = True - except Exception as _ : + except Exception: # nosec success = False assert success diff --git a/tests/test_service.py b/tests/test_service.py index d7815fa0c..27b6b7aed 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -85,7 +85,7 @@ def test_unit_createitem() -> None: valid = False try: a.create_item() - except Exception as _ : + except Exception: # nosec valid = True assert valid From 5ab59961a352bcfc24659b9136ef7419034ead2a Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Wed, 6 Aug 2025 15:56:26 -0700 Subject: [PATCH 12/35] Fix a couple issues --- src/ansys/dynamicreporting/core/adr_service.py | 1 + src/ansys/dynamicreporting/core/docker_support.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index 42b30fbdf..2e2961b67 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -512,6 +512,7 @@ def stop(self) -> None: try: v = self.serverobj.validate() except Exception as e: + self.logger.error(f"Error: {str(e)}") pass if v is False: self.logger.error( diff --git a/src/ansys/dynamicreporting/core/docker_support.py b/src/ansys/dynamicreporting/core/docker_support.py index 432a119ca..dd0e7cbc3 100644 --- a/src/ansys/dynamicreporting/core/docker_support.py +++ b/src/ansys/dynamicreporting/core/docker_support.py @@ -118,7 +118,7 @@ def copy_to_host(self, src: str, *, dest: str = ".") -> None: for chunk in tar_stream: tar_file.write(chunk) # Extract the tar archive - with tarfile.open(tar_file_path) as tar: + with tarfile.open(tar_file_path) as tar: # nosec tar.extractall(path=output_path) # Remove the tar archive tar_file_path.unlink() From 2d948732dac7b1d4882b89e0e01da0ecf44c192b Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 11:28:05 -0700 Subject: [PATCH 13/35] Synatx fixes --- .../dynamicreporting/core/docker_support.py | 6 ++--- .../dynamicreporting/core/utils/encoders.py | 3 +++ .../core/utils/extremely_ugly_hacks.py | 4 ++-- .../dynamicreporting/core/utils/filelock.py | 1 + .../core/utils/geofile_processing.py | 2 +- .../core/utils/report_download_pdf.py | 2 +- .../core/utils/report_objects.py | 23 +++++++++++-------- .../core/utils/report_remote_server.py | 6 ++++- .../core/utils/report_utils.py | 4 ++-- tests/test_download_html.py | 3 ++- tests/test_geofile_processing.py | 9 ++++---- tests/test_item.py | 3 ++- tests/test_report.py | 9 +++++--- tests/test_report_objects.py | 3 ++- tests/test_report_remote_server.py | 18 ++++++++++----- 15 files changed, 60 insertions(+), 36 deletions(-) diff --git a/src/ansys/dynamicreporting/core/docker_support.py b/src/ansys/dynamicreporting/core/docker_support.py index dd0e7cbc3..e1082982d 100644 --- a/src/ansys/dynamicreporting/core/docker_support.py +++ b/src/ansys/dynamicreporting/core/docker_support.py @@ -65,7 +65,7 @@ def __init__(self, image_url: str | None = None, use_dev: bool = False) -> None: try: self._client: docker.client.DockerClient = docker.from_env() except Exception as e: # pragma: no cover - raise RuntimeError("Can't initialize Docker") + raise RuntimeError(f"Can't initialize Docker: {str(e)}") self._container: docker.models.containers.Container = None self._image: docker.models.images.Image = None # the Ansys / EnSight version we found in the container @@ -93,7 +93,7 @@ def pull_image(self) -> docker.models.images.Image: try: self._image = self._client.images.pull(self._image_url) except Exception as e: - raise RuntimeError(f"Can't pull Docker image: {self._image_url}") + raise RuntimeError(f"Can't pull Docker image: {self._image_url}\n\n{str(e)}") return self._image def create_container(self) -> docker.models.containers.Container: @@ -118,7 +118,7 @@ def copy_to_host(self, src: str, *, dest: str = ".") -> None: for chunk in tar_stream: tar_file.write(chunk) # Extract the tar archive - with tarfile.open(tar_file_path) as tar: # nosec + with tarfile.open(tar_file_path) as tar: # nosec tar.extractall(path=output_path) # Remove the tar archive tar_file_path.unlink() diff --git a/src/ansys/dynamicreporting/core/utils/encoders.py b/src/ansys/dynamicreporting/core/utils/encoders.py index ede48b89d..cfc6fdbc0 100644 --- a/src/ansys/dynamicreporting/core/utils/encoders.py +++ b/src/ansys/dynamicreporting/core/utils/encoders.py @@ -35,6 +35,9 @@ def default(self, obj): try: return cls(obj) except Exception as e: # nosec + error_str = f"Object of type {type(obj).__name__} is not JSON serializable: " + error_str += str(e) + raise TypeError(error_str) pass elif hasattr(obj, "__iter__"): return tuple(item for item in obj) diff --git a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py index f08de2f03..94c3fcdc2 100644 --- a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py +++ b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py @@ -55,10 +55,10 @@ def safe_unpickle(input_data, item_type=None): # be default, we follow python3's way of loading: default encoding is ascii # this will work if the data was dumped using python3's pickle. Just do the usual. data = pickle.loads(bytes_data) - except Exception as e: + except Exception: # nosec try: data = pickle.loads(bytes_data, encoding="utf-8") - except Exception as e: + except Exception: # if it fails, which it will if the data was dumped using python2's pickle, then: # As per https://docs.python.org/3/library/pickle.html#pickle.loads, # "Using encoding='latin1' is required for unpickling NumPy arrays and instances of datetime, diff --git a/src/ansys/dynamicreporting/core/utils/filelock.py b/src/ansys/dynamicreporting/core/utils/filelock.py index 250aa8b53..f5d54ed24 100644 --- a/src/ansys/dynamicreporting/core/utils/filelock.py +++ b/src/ansys/dynamicreporting/core/utils/filelock.py @@ -253,6 +253,7 @@ def acquire(self, timeout=None, poll_intervall=0.05): time.sleep(poll_intervall) except Exception as e: # Something did go wrong, so decrement the counter. + logger.error(f"Exception: {str(e)}") with self._thread_lock: self._lock_counter = max(0, self._lock_counter - 1) diff --git a/src/ansys/dynamicreporting/core/utils/geofile_processing.py b/src/ansys/dynamicreporting/core/utils/geofile_processing.py index 1a406e402..9e563ee7b 100644 --- a/src/ansys/dynamicreporting/core/utils/geofile_processing.py +++ b/src/ansys/dynamicreporting/core/utils/geofile_processing.py @@ -19,7 +19,7 @@ is_enve = True import enve from reports.engine import TemplateEngine -except Exception as e: +except Exception: is_enve = False diff --git a/src/ansys/dynamicreporting/core/utils/report_download_pdf.py b/src/ansys/dynamicreporting/core/utils/report_download_pdf.py index 4cae3b571..db4794e86 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_pdf.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_pdf.py @@ -9,7 +9,7 @@ from qtpy.QtCore import QTimer has_qt = True -except Exception as e: +except Exception: has_qt = False diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index 2071b55c8..7e5e4cb58 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -402,6 +402,7 @@ def get_params(self): try: return json.loads(self.params) except Exception as e: + logger.error(f"Error: {str(e)}.\n") return {} def set_params(self, d: dict = None): @@ -1319,6 +1320,7 @@ def set_payload_image(self, img): try: from . import png except Exception as e: + logger.error(f"Error: {str(e)}.\n") import png try: # we can only read png images as string content (not filename) @@ -1339,6 +1341,7 @@ def set_payload_image(self, img): palette=pngobj[3].get("palette", None), ) except Exception as e: + logger.error(f"Error: {str(e)}.\n") # enhanced images will fall into this case data = report_utils.PIL_image_to_data(img) self.width = data["width"] @@ -1501,13 +1504,15 @@ def add_params(self, d: dict = None): tmp_params[k] = d[k] self.params = json.dumps(tmp_params) return - except Exception as _: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") return {} def get_params(self): try: return json.loads(self.params) - except Exception as _: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") return {} def set_params(self, d: dict = None): @@ -1856,8 +1861,8 @@ def set_child_position(self, guid=None, value=None): raise ValueError("Error: child position array should contain only integers") try: uuid.UUID(guid, version=4) - except Exception as _: - raise ValueError("Error: input guid is not a valid guid") + except Exception as e: + raise ValueError(f"Error: input guid is not a valid guid: {str(e)}") d = json.loads(self.params) if "boxes" not in d: d["boxes"] = {} @@ -1877,8 +1882,8 @@ def set_child_clip(self, guid=None, clip="self"): import uuid uuid.UUID(guid, version=4) - except Exception as _: - raise ValueError("Error: input guid is not a valid guid") + except Exception as e: + raise ValueError(f"Error: input guid is not a valid guid: {str(e)}") d = json.loads(self.params) if "boxes" not in d: d["boxes"] = {} @@ -2159,8 +2164,8 @@ def set_report_link(self, link=None): d["report_guid"] = link self.params = json.dumps(d) return - except Exception as _: - raise ValueError("Error: input guid is not a valid guid") + except Exception as e: + raise ValueError(f"Error: input guid is not a valid guid {str(e)}") class tablemergeREST(GeneratorREST): @@ -3429,7 +3434,7 @@ def validate(self): _ = psycopg.connect(conn_string.strip()) except Exception as e: valid = False - out_msg = f"Could not validate connection:\n{e}" + out_msg = f"Could not validate connection:\n{str(e)}" return valid, out_msg diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index 71e1f93f3..567b3ec95 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -1556,7 +1556,9 @@ def launch_local_database_server( # create a file lock local_lock = filelock.nexus_file_lock(api_lock_filename) local_lock.acquire() - except Exception as e: # nosec + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass # We may need to do port scanning if port is None: # nosec @@ -1567,6 +1569,8 @@ def launch_local_database_server( scanning_lock = filelock.nexus_file_lock(lock_filename) scanning_lock.acquire() except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass # Note: QWebEngineView cannot access http over 65535, so limit ports to 65534 ports = report_utils.find_unused_ports(1) diff --git a/src/ansys/dynamicreporting/core/utils/report_utils.py b/src/ansys/dynamicreporting/core/utils/report_utils.py index 82d5f11b2..00914c239 100644 --- a/src/ansys/dynamicreporting/core/utils/report_utils.py +++ b/src/ansys/dynamicreporting/core/utils/report_utils.py @@ -77,7 +77,7 @@ def check_if_PIL(img): elif imgbytes: Image.open(io.BytesIO(imgbytes)) return True - except Exception as _: + except Exception: # nosec return False finally: if imghandle: @@ -313,7 +313,7 @@ def ceiversion_nexus_suffix(): tmp = ansys_version.replace("R", "")[-3:] return str(tmp) - except Exception as _: + except Exception: # nosec # get "nexus###" folder name and then strip off the "nexus" bit tmp = os.path.basename(os.path.dirname(os.path.dirname(__file__))) return tmp[5:] diff --git a/tests/test_download_html.py b/tests/test_download_html.py index bed828a4c..db8afbcfb 100755 --- a/tests/test_download_html.py +++ b/tests/test_download_html.py @@ -44,7 +44,8 @@ def test_download_sqlite(request, adr_service_query) -> None: try: a.download() success = False - except Exception as _: + except Exception as e: + print(f"Download failed as expected with exception: {str(e)}") success = True assert success diff --git a/tests/test_geofile_processing.py b/tests/test_geofile_processing.py index fa5e67c75..857283884 100755 --- a/tests/test_geofile_processing.py +++ b/tests/test_geofile_processing.py @@ -20,11 +20,10 @@ def return_file_paths(request): @pytest.mark.ado_test def test_get_evsn_proxy_image(request) -> None: try: - _ = gp.get_evsn_proxy_image(filename=return_file_paths(request)[6]) - success = True - except Exception as _: - success = False - assert (_ is None) and success + result = gp.get_evsn_proxy_image(filename=return_file_paths(request)[6]) + assert result is None + except Exception as e: + pytest.fail(f"get_evsn_proxy_image raised an unexpected exception: {e}") @pytest.mark.ado_test diff --git a/tests/test_item.py b/tests/test_item.py index e0dd3ff0a..b6537c3a2 100644 --- a/tests/test_item.py +++ b/tests/test_item.py @@ -354,6 +354,7 @@ def test_unit_item_empty_nexus(request) -> None: a = Service() try: _ = Item(service=a) - except Exception as _: + except Exception as e: + print(f"Expected exception received: {str(e)}") valid = True assert valid diff --git a/tests/test_report.py b/tests/test_report.py index 2b4b02200..1fc7e278b 100755 --- a/tests/test_report.py +++ b/tests/test_report.py @@ -148,7 +148,8 @@ def test_save_as_pdf(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest") success = my_report.export_pdf(file_name=pdf_file) - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") success = False else: # If no local installation, then skip this test success = True @@ -164,7 +165,8 @@ def test_save_as_pdf_with_filter(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest_filter") success = my_report.export_pdf(file_name=pdf_file, item_filter="A|i_type|cont|image;") - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") success = False else: # If no local installation, then skip this test success = True @@ -177,7 +179,8 @@ def test_save_as_html(adr_service_query) -> None: try: my_report = adr_service_query.get_report(report_name="My Top Report") success = my_report.export_html(directory_name="htmltest_again") - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") success = False assert success is True diff --git a/tests/test_report_objects.py b/tests/test_report_objects.py index 36f01e2cb..b7bef7b14 100755 --- a/tests/test_report_objects.py +++ b/tests/test_report_objects.py @@ -2036,6 +2036,7 @@ def test_item_payload(adr_service_query) -> None: for i in adr_service_query.query(): _ = i.item.get_payload_content(as_list=True) succ = True - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ diff --git a/tests/test_report_remote_server.py b/tests/test_report_remote_server.py index b93066f37..bc90906c1 100755 --- a/tests/test_report_remote_server.py +++ b/tests/test_report_remote_server.py @@ -43,7 +43,8 @@ def test_copy_item(adr_service_query, tmp_path, get_exec) -> None: progress=False, progress_qt=False, ) - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") success = False finally: tmp_adr.stop() @@ -86,7 +87,8 @@ def test_start_stop(tmp_path, get_exec) -> None: ) _ = r.validate_local_db(db_dir=db_dir, version_check=True) r.stop_background_local_server(server_dirname=db_dir) - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ @@ -95,7 +97,8 @@ def test_validate_existing(adr_service_query) -> None: succ = True try: _ = r.validate_local_db(db_dir=adr_service_query._db_directory, version_check=True) - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ @@ -243,7 +246,8 @@ def test_delete_db(tmp_path, get_exec) -> None: try: r.delete_database(db_dir=db_dir) succ = True - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ @@ -338,7 +342,8 @@ def test_export_pptx_error(adr_service_query) -> None: try: # exports the root report instead of the pptx link. s.export_report_as_pptx(report_guid=my_report.report.guid, file_name="mypresentation") - except Exception as _: + except Exception as e: + print(f"Expected exception received: {str(e)}") success = True assert success is True @@ -350,7 +355,8 @@ def test_get_pptx(adr_service_query, tmp_path) -> None: try: # scrape all pptx reports from root report s.get_pptx_from_report(report_guid=my_report.report.guid, directory_name=db_dir, query=None) - except Exception as _: + except Exception as e: + print(f"Exception received: {str(e)}") success = False else: success = True From c76728010aecbf42e743a708b864064e85af67d2 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 11:29:06 -0700 Subject: [PATCH 14/35] automatic upload --- .github/workflows/scan_sbom.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index b326be3fa..0060db5df 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -40,13 +40,7 @@ jobs: with: format: spdx-json output-file: sbom.spdx.json - upload-artifact: false - - - name: Upload SBOM as artifact - uses: actions/upload-artifact@v4 - with: - name: ${{ env.PACKAGE_NAME }}-sbom - path: sbom.spdx.json + upload-artifact: true vulnerabilities: From ca97deaec1a2d70094f32003ddb1adf213b8719c Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 11:53:01 -0700 Subject: [PATCH 15/35] Exclude bandit from security scanning --- .github/workflows/scan_sbom.yml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 0060db5df..69e26cd5f 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -40,7 +40,13 @@ jobs: with: format: spdx-json output-file: sbom.spdx.json - upload-artifact: true + upload-artifact: false + + - name: Upload SBOM as artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }}-sbom + path: sbom.spdx.json vulnerabilities: @@ -55,6 +61,7 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} hide-log: false + bandit-configfile: bandit.yaml - name: PyAnsys Vulnerability check (on dev) if: github.ref != 'refs/heads/main' @@ -64,4 +71,5 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} dev-mode: true - hide-log: false \ No newline at end of file + hide-log: false + bandit-configfile: bandit.yaml \ No newline at end of file From 6d4bebc0cd10970b627ccb93304957d2fa4c7795 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 11:56:17 -0700 Subject: [PATCH 16/35] Add bandit config file --- .github/workflows/bandit.yaml | 2 ++ 1 file changed, 2 insertions(+) create mode 100755 .github/workflows/bandit.yaml diff --git a/.github/workflows/bandit.yaml b/.github/workflows/bandit.yaml new file mode 100755 index 000000000..23bf867ff --- /dev/null +++ b/.github/workflows/bandit.yaml @@ -0,0 +1,2 @@ +skips: + - "*" From 6c7f60bbe44bcd5eb1694739f53ca38f0ceb737c Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 12:04:01 -0700 Subject: [PATCH 17/35] config for bandit --- .github/workflows/scan_sbom.yml | 4 ++-- .github/workflows/bandit.yaml => bandit.yaml | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename .github/workflows/bandit.yaml => bandit.yaml (100%) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 69e26cd5f..8e3c98849 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -61,7 +61,7 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} hide-log: false - bandit-configfile: bandit.yaml + bandit-configfile: ../../bandit.yaml - name: PyAnsys Vulnerability check (on dev) if: github.ref != 'refs/heads/main' @@ -72,4 +72,4 @@ jobs: token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} dev-mode: true hide-log: false - bandit-configfile: bandit.yaml \ No newline at end of file + bandit-configfile: ../../bandit.yaml \ No newline at end of file diff --git a/.github/workflows/bandit.yaml b/bandit.yaml similarity index 100% rename from .github/workflows/bandit.yaml rename to bandit.yaml From 756043db8bd37418dee96c57798e4c1b8270b73b Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 12:37:51 -0700 Subject: [PATCH 18/35] Fix path to yaml --- .github/workflows/scan_sbom.yml | 6 ++++-- src/ansys/dynamicreporting/core/adr_service.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 8e3c98849..6e3f6d8e7 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -61,7 +61,8 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} hide-log: false - bandit-configfile: ../../bandit.yaml + bandit-configfile: bandit.yaml + auditing-level: "normal" - name: PyAnsys Vulnerability check (on dev) if: github.ref != 'refs/heads/main' @@ -72,4 +73,5 @@ jobs: token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} dev-mode: true hide-log: false - bandit-configfile: ../../bandit.yaml \ No newline at end of file + bandit-configfile: bandit.yaml + auditing-level: "normal" diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index 2e2961b67..9a06035d7 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -516,7 +516,7 @@ def stop(self) -> None: pass if v is False: self.logger.error( - f"Error validating the connected service. Can't shut it down.\nError: {str(e)}" + f"Error validating the connected service. Can't shut it down." ) else: # If coming from a docker image, clean that up From fa7a2ae2ee45d404dd8a46615aa30dfa018a3b8e Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 12:43:05 -0700 Subject: [PATCH 19/35] Try to deactivate --- .github/workflows/scan_sbom.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 6e3f6d8e7..a4e11bc60 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -3,6 +3,8 @@ name: Security Scan env: MAIN_PYTHON_VERSION: '3.13' PACKAGE_NAME: 'ansys-dynamicreporting-core' + ACTIVATE_VENV_BANDIT_SAFETY: false + BANDIT_CONFIGFILE: "bandit.yaml" on: push: From db51442d9c7c3614e8454ce5e26c3c7fb63c921a Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 13:10:34 -0700 Subject: [PATCH 20/35] Try to create our own safety scan --- .github/workflows/scan_sbom.yml | 24 +++++++++++++++++++ .../dynamicreporting/core/adr_service.py | 4 +--- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index a4e11bc60..6360fd719 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -77,3 +77,27 @@ jobs: hide-log: false bandit-configfile: bandit.yaml auditing-level: "normal" + + safety_scan: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Build wheelhouse + uses: ansys/actions/build-wheelhouse@v10 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ubuntu-latest + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install from wheelhouse + run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} + + - name: Run safety check + run: safety check --full-report diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index 9a06035d7..f22e385f5 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -515,9 +515,7 @@ def stop(self) -> None: self.logger.error(f"Error: {str(e)}") pass if v is False: - self.logger.error( - f"Error validating the connected service. Can't shut it down." - ) + self.logger.error(f"Error validating the connected service. Can't shut it down.") else: # If coming from a docker image, clean that up try: From d67fe831ad0d8522f84b36ef347f14d6c68d48c0 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 13:13:14 -0700 Subject: [PATCH 21/35] first the new pipeline --- .github/workflows/scan_sbom.yml | 50 +++++++++++++++++---------------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 6360fd719..62cc7f2d1 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -14,6 +14,32 @@ on: - release/* jobs: + + safety_scan: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Build wheelhouse + uses: ansys/actions/build-wheelhouse@v10 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ubuntu-latest + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install from wheelhouse + run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} + + - name: Run safety check + run: safety check --full-report + + sbom: name: Generate SBOM runs-on: ubuntu-latest @@ -77,27 +103,3 @@ jobs: hide-log: false bandit-configfile: bandit.yaml auditing-level: "normal" - - safety_scan: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - name: Build wheelhouse - uses: ansys/actions/build-wheelhouse@v10 - with: - library-name: ${{ env.PACKAGE_NAME }} - operating-system: ubuntu-latest - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - name: Install from wheelhouse - run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} - - - name: Run safety check - run: safety check --full-report From 792ccacfcadf5b87e4ef99f63ebf811d50055536 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 13:16:59 -0700 Subject: [PATCH 22/35] Add safety package in pipeline --- .github/workflows/scan_sbom.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 62cc7f2d1..8b6916410 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -33,6 +33,9 @@ jobs: operating-system: ubuntu-latest python-version: ${{ env.MAIN_PYTHON_VERSION }} + - name: Install safety + run: pip install safety + - name: Install from wheelhouse run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} From 593285cc45cf3715dfa9896b20a0b8bd43556813 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 13:27:07 -0700 Subject: [PATCH 23/35] fix the workflow - check out and build code --- .github/workflows/scan_sbom.yml | 11 ++--------- src/ansys/dynamicreporting/core/adr_service.py | 2 +- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 8b6916410..976b27c52 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -26,19 +26,12 @@ jobs: with: python-version: ${{ env.MAIN_PYTHON_VERSION }} - - name: Build wheelhouse - uses: ansys/actions/build-wheelhouse@v10 - with: - library-name: ${{ env.PACKAGE_NAME }} - operating-system: ubuntu-latest - python-version: ${{ env.MAIN_PYTHON_VERSION }} + - name: Install package + run: make install-dev - name: Install safety run: pip install safety - - name: Install from wheelhouse - run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} - - name: Run safety check run: safety check --full-report diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index f22e385f5..8022d1abf 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -515,7 +515,7 @@ def stop(self) -> None: self.logger.error(f"Error: {str(e)}") pass if v is False: - self.logger.error(f"Error validating the connected service. Can't shut it down.") + self.logger.error("Error validating the connected service. Can't shut it down.") else: # If coming from a docker image, clean that up try: From 4582f3c033c8c1fe830ab49e6127c414034361d6 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 14:16:11 -0700 Subject: [PATCH 24/35] Remove ansys workflow in favor of public workflows --- .github/workflows/scan_sbom.yml | 53 +++++++++++++-------------------- bandit.yaml | 2 -- 2 files changed, 20 insertions(+), 35 deletions(-) delete mode 100755 bandit.yaml diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 976b27c52..93e67a43d 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -3,8 +3,6 @@ name: Security Scan env: MAIN_PYTHON_VERSION: '3.13' PACKAGE_NAME: 'ansys-dynamicreporting-core' - ACTIVATE_VENV_BANDIT_SAFETY: false - BANDIT_CONFIGFILE: "bandit.yaml" on: push: @@ -15,8 +13,13 @@ on: jobs: - safety_scan: + + vulnerabilities: + name: Vulnerabilities runs-on: ubuntu-latest + strategy: + fail-fast: false + steps: - name: Checkout code uses: actions/checkout@v4 @@ -26,15 +29,27 @@ jobs: with: python-version: ${{ env.MAIN_PYTHON_VERSION }} - - name: Install package - run: make install-dev + - name: Build wheelhouse + uses: ansys/actions/build-wheelhouse@v10 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ubuntu-latest + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install from wheelhouse + run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} - name: Install safety run: pip install safety - name: Run safety check run: safety check --full-report + + - name: Install pip-audit + run: pip install pip-audit + - name: Run pip-audit + run: pip-audit sbom: name: Generate SBOM @@ -71,31 +86,3 @@ jobs: with: name: ${{ env.PACKAGE_NAME }}-sbom path: sbom.spdx.json - - - vulnerabilities: - name: Vulnerabilities - runs-on: ubuntu-latest - steps: - - name: PyAnsys Vulnerability check (on main) - if: github.ref == 'refs/heads/main' - uses: ansys/actions/check-vulnerabilities@v10.0.14 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - python-package-name: ${{ env.PACKAGE_NAME }} - token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - hide-log: false - bandit-configfile: bandit.yaml - auditing-level: "normal" - - - name: PyAnsys Vulnerability check (on dev) - if: github.ref != 'refs/heads/main' - uses: ansys/actions/check-vulnerabilities@v10.0.14 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - python-package-name: ${{ env.PACKAGE_NAME }} - token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - dev-mode: true - hide-log: false - bandit-configfile: bandit.yaml - auditing-level: "normal" diff --git a/bandit.yaml b/bandit.yaml deleted file mode 100755 index 23bf867ff..000000000 --- a/bandit.yaml +++ /dev/null @@ -1,2 +0,0 @@ -skips: - - "*" From 96d3dbb43a99fa435ca4236fd919f39ccbee772c Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 14:21:13 -0700 Subject: [PATCH 25/35] Check that it can find security issues --- .github/workflows/scan_sbom.yml | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 93e67a43d..f3537cc84 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -49,7 +49,7 @@ jobs: run: pip install pip-audit - name: Run pip-audit - run: pip-audit + run: pip-audit --no-deps sbom: name: Generate SBOM diff --git a/pyproject.toml b/pyproject.toml index 8c5adb3ef..21eab14ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dependencies = [ "django-guardian~=2.4", "tzlocal~=5.0", "numpy>=1.23.5,<3", - "python-pptx==0.6.23", + "python-pptx==0.6.18", "pandas>=2.0", "statsmodels>=0.14", "scipy<=1.15.3", # breaks ADR if not included. Remove when statsmodels is updated From fc6f6422e1128f8651a9ca1f4f70111554533546 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 14:32:04 -0700 Subject: [PATCH 26/35] Make everything in a single workflow... --- .github/workflows/scan_sbom.yml | 35 ++++++--------------------------- 1 file changed, 6 insertions(+), 29 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index f3537cc84..9d3187350 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -36,6 +36,12 @@ jobs: operating-system: ubuntu-latest python-version: ${{ env.MAIN_PYTHON_VERSION }} + - name: Install pip-audit + run: pip install pip-audit + + - name: Run pip-audit on wheel + run: pip-audit dist/*.whl + - name: Install from wheelhouse run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} @@ -44,35 +50,6 @@ jobs: - name: Run safety check run: safety check --full-report - - - name: Install pip-audit - run: pip install pip-audit - - - name: Run pip-audit - run: pip-audit --no-deps - - sbom: - name: Generate SBOM - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - name: Build wheelhouse - uses: ansys/actions/build-wheelhouse@v10 - with: - library-name: ${{ env.PACKAGE_NAME }} - operating-system: ubuntu-latest - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - name: Install from wheelhouse - run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} - name: Generate SBOM with Syft uses: anchore/sbom-action@v0.15.4 From 5a79505fef98e6818cf93ae0ca2fe70d0d5174aa Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 14:45:02 -0700 Subject: [PATCH 27/35] go back to pyansys workflows --- .github/workflows/scan_sbom.yml | 50 +++++++++++++++++---------------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index 9d3187350..cf7e9f062 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -13,53 +13,55 @@ on: jobs: - - vulnerabilities: - name: Vulnerabilities + sbom: + name: Generate SBOM runs-on: ubuntu-latest - strategy: - fail-fast: false steps: + - name: Checkout code uses: actions/checkout@v4 - + - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ env.MAIN_PYTHON_VERSION }} - + - name: Build wheelhouse uses: ansys/actions/build-wheelhouse@v10 with: library-name: ${{ env.PACKAGE_NAME }} operating-system: ubuntu-latest python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - name: Install pip-audit - run: pip install pip-audit - - - name: Run pip-audit on wheel - run: pip-audit dist/*.whl - + - name: Install from wheelhouse run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} - - - name: Install safety - run: pip install safety - - - name: Run safety check - run: safety check --full-report - + - name: Generate SBOM with Syft uses: anchore/sbom-action@v0.15.4 with: - format: spdx-json - output-file: sbom.spdx.json + format: cyclonedx-json + output-file: sbom.cyclonedx.json upload-artifact: false - name: Upload SBOM as artifact uses: actions/upload-artifact@v4 with: name: ${{ env.PACKAGE_NAME }}-sbom - path: sbom.spdx.json + path: sbom.cyclonedx.json + + + vulnerabilities: + name: Vulnerabilities + runs-on: ubuntu-latest + + steps: + + - name: PyAnsys Vulnerability check (on main) + if: github.ref == 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10.0.14 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + run-bandit: false From 69e5f8abb94deb52677b3f6d87194b0a47c9dde2 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 14:50:23 -0700 Subject: [PATCH 28/35] Add scan for dev --- .github/workflows/scan_sbom.yml | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index cf7e9f062..ccbae75f6 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -18,25 +18,25 @@ jobs: runs-on: ubuntu-latest steps: - + - name: Checkout code uses: actions/checkout@v4 - + - name: Set up Python uses: actions/setup-python@v5 with: python-version: ${{ env.MAIN_PYTHON_VERSION }} - + - name: Build wheelhouse uses: ansys/actions/build-wheelhouse@v10 with: library-name: ${{ env.PACKAGE_NAME }} operating-system: ubuntu-latest python-version: ${{ env.MAIN_PYTHON_VERSION }} - + - name: Install from wheelhouse run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} - + - name: Generate SBOM with Syft uses: anchore/sbom-action@v0.15.4 with: @@ -50,13 +50,13 @@ jobs: name: ${{ env.PACKAGE_NAME }}-sbom path: sbom.cyclonedx.json - + vulnerabilities: name: Vulnerabilities runs-on: ubuntu-latest - + steps: - + - name: PyAnsys Vulnerability check (on main) if: github.ref == 'refs/heads/main' uses: ansys/actions/check-vulnerabilities@v10.0.14 @@ -65,3 +65,13 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} run-bandit: false + + - name: PyAnsys Vulnerability check (on dev) + if: github.ref != 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10.0.14 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + run-bandit: false + dev-mode: true From 689dc53ad2078b6f7974421bf295f28a223616d3 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 14:54:19 -0700 Subject: [PATCH 29/35] Show logs --- .github/workflows/scan_sbom.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml index ccbae75f6..05ac36013 100644 --- a/.github/workflows/scan_sbom.yml +++ b/.github/workflows/scan_sbom.yml @@ -65,6 +65,7 @@ jobs: python-package-name: ${{ env.PACKAGE_NAME }} token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} run-bandit: false + hide-log: false - name: PyAnsys Vulnerability check (on dev) if: github.ref != 'refs/heads/main' @@ -75,3 +76,4 @@ jobs: token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} run-bandit: false dev-mode: true + hide-log: false From 3668a4c61b94689e17fb58d7ba1789642cb8bbce Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 15:03:02 -0700 Subject: [PATCH 30/35] add exclude from bandit --- src/ansys/dynamicreporting/core/utils/report_remote_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index 567b3ec95..d174cef52 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -1455,7 +1455,7 @@ def launch_local_database_server( exec_basis=None, ansys_version=None, **kwargs, -): +): # nosec """ Start up a local Django server for a local sqlite file. If parent is not None, a QtGui will be used to fill in missing inputs. By default, if @@ -1768,7 +1768,7 @@ def launch_local_database_server( params["close_fds"] = True # Actually try to launch the server - try: + try: # nosec # Run the launcher to start the server # Note: this process only returns if the server is shutdown or there is an error monitor_process = subprocess.Popen(command, **params) From 84e874907d52c5de380f12717da26ad1b894e221 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 15:16:31 -0700 Subject: [PATCH 31/35] Add nosec --- .../dynamicreporting/core/utils/report_remote_server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index d174cef52..ad9b2ae39 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -22,7 +22,7 @@ import pickle import platform import shutil -import subprocess +import subprocess # nosec B404 import sys import tempfile import time @@ -98,7 +98,7 @@ def run_nexus_utility(args, use_software_gl=False, exec_basis=None, ansys_versio cmd.extend(args) if is_windows: params["creationflags"] = subprocess.CREATE_NO_WINDOW - subprocess.call(args=cmd, **params) + subprocess.call(args=cmd, **params) # nosec B603 class Server: @@ -1771,7 +1771,7 @@ def launch_local_database_server( try: # nosec # Run the launcher to start the server # Note: this process only returns if the server is shutdown or there is an error - monitor_process = subprocess.Popen(command, **params) + monitor_process = subprocess.Popen(command, **params) # nosec B78 B603 except Exception as e: if print_allowed(): print(f"Error: {str(e)}") From e9dbaa50a32f550574090aeb67f892438f5b15a3 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 15:21:52 -0700 Subject: [PATCH 32/35] Add more exception --- .../core/utils/extremely_ugly_hacks.py | 2 +- .../core/utils/report_download_html.py | 6 +++--- .../dynamicreporting/core/utils/report_objects.py | 2 +- .../core/utils/report_remote_server.py | 12 ++++++------ tests/test_hacks.py | 2 +- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py index 94c3fcdc2..6740ab8f0 100644 --- a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py +++ b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py @@ -1,6 +1,6 @@ # All Python3 migration-related ugly hacks go here. import base64 -import pickle +import pickle # nosec B502 from uuid import UUID from .report_utils import text_type diff --git a/src/ansys/dynamicreporting/core/utils/report_download_html.py b/src/ansys/dynamicreporting/core/utils/report_download_html.py index 5e2c97404..4112a32b4 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_html.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_html.py @@ -154,7 +154,7 @@ def _download_special_files(self): for f in files: mangled = f.replace("media/", "/static/website/scripts/mathjax/") url = tmp.scheme + "://" + tmp.netloc + mangled - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code == requests.codes.ok: filename = os.path.join(self._directory, f) try: @@ -305,7 +305,7 @@ def _download_static_files(self, files, source_path, target_path, comment): tmp = urllib.parse.urlsplit(self._url) for f in files: url = tmp.scheme + "://" + tmp.netloc + source_path + f - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code == requests.codes.ok: filename = self._directory + os.sep + target_path + os.sep + f filename = os.path.normpath(filename) @@ -343,7 +343,7 @@ def _get_file(self, path_plus_queries: str, pathname: str, inline: bool = False) return self._filemap[pathname] tmp = urllib.parse.urlsplit(self._url) url = tmp.scheme + "://" + tmp.netloc + path_plus_queries - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 results = pathname if resp.status_code == requests.codes.ok: basename = os.path.basename(pathname) diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index 7e5e4cb58..80b8107ba 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -8,7 +8,7 @@ import logging import os from pathlib import Path -import pickle +import pickle # nosec B502 import shlex import sys import uuid diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index ad9b2ae39..a6db0b2fa 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -19,7 +19,7 @@ import os import os.path from pathlib import Path -import pickle +import pickle # nosec B502 import platform import shutil import subprocess # nosec B404 @@ -201,7 +201,7 @@ def magic_token(self, value): @classmethod def get_object_digest(cls, obj): m = hashlib.md5() - m.update(pickle.dumps(obj)) + m.update(pickle.dumps(obj)) # nosec B327 return m.digest() @classmethod @@ -855,7 +855,7 @@ def create_template(self, name="New Template", parent=None, report_type="Layout: return templ def _download_report(self, url, file_name, directory_name=None): - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code != requests.codes.ok: try: detail = resp.json()["detail"] @@ -979,7 +979,7 @@ def get_pptx_from_report(self, report_guid, directory_name=None, query=None): if query is None: query = {} url = self.build_url_with_query(report_guid, query) - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code == requests.codes.ok: try: links = report_utils.get_links_from_html(resp.text) @@ -1199,8 +1199,8 @@ def create_new_local_database( if run_local: # Make a random string that could be used as a secret key for the database # take two UUID1 values, run them through md5 and concatenate the digests. - secret_key = hashlib.md5(uuid.uuid1().bytes).hexdigest() - secret_key += hashlib.md5(uuid.uuid1().bytes).hexdigest() + secret_key = hashlib.md5(uuid.uuid1().bytes).hexdigest() # nosec B327 B324 + secret_key += hashlib.md5(uuid.uuid1().bytes).hexdigest() # nosec B327 B324 # And make a target file (.nexdb) for auto launching of the report viewer... f = open(os.path.join(db_dir, "view_report.nexdb"), "w") if len(secret_key): diff --git a/tests/test_hacks.py b/tests/test_hacks.py index d7a98184e..6db622e4b 100755 --- a/tests/test_hacks.py +++ b/tests/test_hacks.py @@ -1,4 +1,4 @@ -import pickle +import pickle # nosec B502 import uuid import pytest From e6cdc24b560291856128daeff312c009381382a6 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 15:30:24 -0700 Subject: [PATCH 33/35] Remove errors on pickle --- src/ansys/dynamicreporting/core/serverless/item.py | 2 +- .../core/utils/extremely_ugly_hacks.py | 10 +++++----- .../dynamicreporting/core/utils/geofile_processing.py | 4 ++-- .../core/utils/report_download_html.py | 2 +- .../dynamicreporting/core/utils/report_objects.py | 6 +++--- .../core/utils/report_remote_server.py | 8 ++++---- tests/test_hacks.py | 2 +- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/ansys/dynamicreporting/core/serverless/item.py b/src/ansys/dynamicreporting/core/serverless/item.py index 9b0d616fc..4dc303e59 100644 --- a/src/ansys/dynamicreporting/core/serverless/item.py +++ b/src/ansys/dynamicreporting/core/serverless/item.py @@ -3,7 +3,7 @@ from html.parser import HTMLParser as BaseHTMLParser import io from pathlib import Path -import pickle +import pickle # nosec B403 import platform import uuid diff --git a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py index 6740ab8f0..05e084944 100644 --- a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py +++ b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py @@ -1,6 +1,6 @@ # All Python3 migration-related ugly hacks go here. import base64 -import pickle # nosec B502 +import pickle # nosec B403 from uuid import UUID from .report_utils import text_type @@ -54,10 +54,10 @@ def safe_unpickle(input_data, item_type=None): try: # be default, we follow python3's way of loading: default encoding is ascii # this will work if the data was dumped using python3's pickle. Just do the usual. - data = pickle.loads(bytes_data) + data = pickle.loads(bytes_data) # nosec B301 B502 except Exception: # nosec try: - data = pickle.loads(bytes_data, encoding="utf-8") + data = pickle.loads(bytes_data, encoding="utf-8") # nosec B301 B502 except Exception: # if it fails, which it will if the data was dumped using python2's pickle, then: # As per https://docs.python.org/3/library/pickle.html#pickle.loads, @@ -65,7 +65,7 @@ def safe_unpickle(input_data, item_type=None): # date and time pickled by Python 2." # The data does contain a numpy array. So: try: - data = pickle.loads(bytes_data, encoding="latin-1") + data = pickle.loads(bytes_data, encoding="latin-1") # nosec B301 B502 # if the stream contains international characters which were 'loaded' with latin-1, # we get garbage text. We have to detect that and then use a workaround. @@ -80,7 +80,7 @@ def safe_unpickle(input_data, item_type=None): # this is a tree item ONLY case that has a pickled datetime obj, # we use bytes as the encoding to workaround this issue, because # other encodings will not work. - data = pickle.loads(bytes_data, encoding="bytes") + data = pickle.loads(bytes_data, encoding="bytes") # nosec B301 B502 # check again, just in case if item_type == "tree": diff --git a/src/ansys/dynamicreporting/core/utils/geofile_processing.py b/src/ansys/dynamicreporting/core/utils/geofile_processing.py index 9e563ee7b..b872dd6c3 100644 --- a/src/ansys/dynamicreporting/core/utils/geofile_processing.py +++ b/src/ansys/dynamicreporting/core/utils/geofile_processing.py @@ -9,7 +9,7 @@ import io import os import platform -import subprocess +import subprocess # nosec B78 B603 B404 import typing import zipfile @@ -213,7 +213,7 @@ def rebuild_3d_geometry(csf_file: str, unique_id: str = "", exec_basis: str = No stderr=subprocess.DEVNULL, close_fds=True, creationflags=create_flags, - ) + ) # nosec B78 B603 except Exception as e: print(f"Warning: unable to convert '{csf_file}' into AVZ format: {str(e)}") # At this point, if we have an original AVZ file or a converted udrw file, we diff --git a/src/ansys/dynamicreporting/core/utils/report_download_html.py b/src/ansys/dynamicreporting/core/utils/report_download_html.py index 4112a32b4..b00ed8ed2 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_html.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_html.py @@ -530,7 +530,7 @@ def _download(self): ) # get the webpage html source - resp = requests.get(self._url) + resp = requests.get(self._url) # nosec B400 if resp.status_code != requests.codes.ok: raise RuntimeError(f"Unable to access {self._url} ({resp.status_code})") # debugging... diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index 80b8107ba..d423e0810 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -8,7 +8,7 @@ import logging import os from pathlib import Path -import pickle # nosec B502 +import pickle # nosec B403 import shlex import sys import uuid @@ -1427,7 +1427,7 @@ def factory(cls, json_data): "tmp_cls = " + json_data["report_type"].split(":")[1] + "REST()", locals(), globals(), - ) + ) # nosec return tmp_cls else: return TemplateREST() @@ -3333,7 +3333,7 @@ def get_postgre(self): if "pswsqldb" in json.loads(self.params): out["password"] = json.loads(self.params)["pswsqldb"] else: - out["password"] = "" + out["password"] = "" # nosec B259 else: out = {"database": "", "hostname": "", "port": "", "username": "", "password": ""} return out diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index a6db0b2fa..547d204a2 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -19,10 +19,10 @@ import os import os.path from pathlib import Path -import pickle # nosec B502 +import pickle # nosec B403 import platform import shutil -import subprocess # nosec B404 +import subprocess # nosec B78 B603 B404 import sys import tempfile import time @@ -98,7 +98,7 @@ def run_nexus_utility(args, use_software_gl=False, exec_basis=None, ansys_versio cmd.extend(args) if is_windows: params["creationflags"] = subprocess.CREATE_NO_WINDOW - subprocess.call(args=cmd, **params) # nosec B603 + subprocess.call(args=cmd, **params) # nosec B603 B78 class Server: @@ -1455,7 +1455,7 @@ def launch_local_database_server( exec_basis=None, ansys_version=None, **kwargs, -): # nosec +): """ Start up a local Django server for a local sqlite file. If parent is not None, a QtGui will be used to fill in missing inputs. By default, if diff --git a/tests/test_hacks.py b/tests/test_hacks.py index 6db622e4b..c10a0d104 100755 --- a/tests/test_hacks.py +++ b/tests/test_hacks.py @@ -1,4 +1,4 @@ -import pickle # nosec B502 +import pickle # nosec B403 import uuid import pytest From d140a50290b7da7d615d3bf281a53d8096b0e7f4 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 15:36:21 -0700 Subject: [PATCH 34/35] more bandit fixes --- pyproject.toml | 2 +- src/ansys/dynamicreporting/core/adr_service.py | 4 ++-- src/ansys/dynamicreporting/core/docker_support.py | 6 +++--- src/ansys/dynamicreporting/core/examples/downloads.py | 4 ++-- .../dynamicreporting/core/utils/report_remote_server.py | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 21eab14ac..8c5adb3ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dependencies = [ "django-guardian~=2.4", "tzlocal~=5.0", "numpy>=1.23.5,<3", - "python-pptx==0.6.18", + "python-pptx==0.6.23", "pandas>=2.0", "statsmodels>=0.14", "scipy<=1.15.3", # breaks ADR if not included. Remove when statsmodels is updated diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index 8022d1abf..e338f9df4 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -241,7 +241,7 @@ def connect( username: str = "nexus", password: str = "cei", session: str | None = "", - ) -> None: + ) -> None: # nosec B107 """ Connect to a running service. @@ -301,7 +301,7 @@ def start( error_if_create_db_exists: bool = False, exit_on_close: bool = False, delete_db: bool = False, - ) -> str: + ) -> str: # nosec B107 """ Start a new service. diff --git a/src/ansys/dynamicreporting/core/docker_support.py b/src/ansys/dynamicreporting/core/docker_support.py index e1082982d..4480c8efb 100644 --- a/src/ansys/dynamicreporting/core/docker_support.py +++ b/src/ansys/dynamicreporting/core/docker_support.py @@ -118,8 +118,8 @@ def copy_to_host(self, src: str, *, dest: str = ".") -> None: for chunk in tar_stream: tar_file.write(chunk) # Extract the tar archive - with tarfile.open(tar_file_path) as tar: # nosec - tar.extractall(path=output_path) + with tarfile.open(tar_file_path) as tar: + tar.extractall(path=output_path) # nosec B202 # Remove the tar archive tar_file_path.unlink() except Exception as e: @@ -176,7 +176,7 @@ def start(self, host_directory: str, db_directory: str, port: int, ansys_version existing_names = [x.name for x in self._client.from_env().containers.list()] container_name = "nexus" while container_name in existing_names: - container_name += random.choice(string.ascii_letters) + container_name += random.choice(string.ascii_letters) # nosec B311 if len(container_name) > 500: raise RuntimeError("Can't determine a unique Docker container name.") diff --git a/src/ansys/dynamicreporting/core/examples/downloads.py b/src/ansys/dynamicreporting/core/examples/downloads.py index 2a0f620b9..800320b5f 100755 --- a/src/ansys/dynamicreporting/core/examples/downloads.py +++ b/src/ansys/dynamicreporting/core/examples/downloads.py @@ -42,7 +42,7 @@ def check_url_exists(url: str) -> bool: logging.debug(f"Passed url is invalid: {url}\n") return False try: - with request.urlopen(url) as response: + with request.urlopen(url) as response: # nosec B310 return response.status == 200 except Exception as e: logging.debug(f"Check url error: {str(e)}\n") @@ -62,7 +62,7 @@ def get_url_content(url: str) -> str: str content of the URL """ - with request.urlopen(url) as response: + with request.urlopen(url) as response: # nosec B310 return response.read() diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index 547d204a2..bbe8fbd95 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -201,7 +201,7 @@ def magic_token(self, value): @classmethod def get_object_digest(cls, obj): m = hashlib.md5() - m.update(pickle.dumps(obj)) # nosec B327 + m.update(pickle.dumps(obj)) # nosec B324 return m.digest() @classmethod From 1d5a7a0933dc63c1751f50276b1d0104d2879047 Mon Sep 17 00:00:00 2001 From: "U-ANSYS\\mgalvagn" Date: Thu, 7 Aug 2025 15:37:54 -0700 Subject: [PATCH 35/35] more bandit fixes --- src/ansys/dynamicreporting/core/utils/report_remote_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index bbe8fbd95..dcf77f61c 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -200,7 +200,7 @@ def magic_token(self, value): @classmethod def get_object_digest(cls, obj): - m = hashlib.md5() + m = hashlib.md5() # nosec B324 m.update(pickle.dumps(obj)) # nosec B324 return m.digest()