diff --git a/.github/workflows/nightly_scan.yml b/.github/workflows/nightly_scan.yml deleted file mode 100644 index d4ab7689a..000000000 --- a/.github/workflows/nightly_scan.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Security Scan - -on: - push: - branches: - - main - - release/* - workflow_dispatch: - -jobs: - security_scan: - uses: ansys-internal/ci-templates/.github/workflows/security-scan-mend.yml@v7 - with: - package_type: 'poetry+npm' - needs: 'sca,sast,sbom' - secrets: inherit diff --git a/.github/workflows/scan_sbom.yml b/.github/workflows/scan_sbom.yml new file mode 100644 index 000000000..05ac36013 --- /dev/null +++ b/.github/workflows/scan_sbom.yml @@ -0,0 +1,79 @@ +name: Security Scan + +env: + MAIN_PYTHON_VERSION: '3.13' + PACKAGE_NAME: 'ansys-dynamicreporting-core' + +on: + push: + branches: + - main + - maint/* + - release/* + +jobs: + + sbom: + name: Generate SBOM + runs-on: ubuntu-latest + + steps: + + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Build wheelhouse + uses: ansys/actions/build-wheelhouse@v10 + with: + library-name: ${{ env.PACKAGE_NAME }} + operating-system: ubuntu-latest + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install from wheelhouse + run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }} + + - name: Generate SBOM with Syft + uses: anchore/sbom-action@v0.15.4 + with: + format: cyclonedx-json + output-file: sbom.cyclonedx.json + upload-artifact: false + + - name: Upload SBOM as artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }}-sbom + path: sbom.cyclonedx.json + + + vulnerabilities: + name: Vulnerabilities + runs-on: ubuntu-latest + + steps: + + - name: PyAnsys Vulnerability check (on main) + if: github.ref == 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10.0.14 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + run-bandit: false + hide-log: false + + - name: PyAnsys Vulnerability check (on dev) + if: github.ref != 'refs/heads/main' + uses: ansys/actions/check-vulnerabilities@v10.0.14 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + python-package-name: ${{ env.PACKAGE_NAME }} + token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + run-bandit: false + dev-mode: true + hide-log: false diff --git a/codegen/adr_utils.txt b/codegen/adr_utils.txt index 68e7053be..b6af61349 100644 --- a/codegen/adr_utils.txt +++ b/codegen/adr_utils.txt @@ -41,7 +41,7 @@ def in_ipynb(): return True if "terminal" in ipy_str: return False - except Exception: # todo: please specify the possible exceptions here. + except Exception as e: # todo: please specify the possible exceptions here. return False diff --git a/pyproject.toml b/pyproject.toml index e28320952..8c5adb3ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ dependencies = [ "django-guardian~=2.4", "tzlocal~=5.0", "numpy>=1.23.5,<3", - "python-pptx==0.6.19", + "python-pptx==0.6.23", "pandas>=2.0", "statsmodels>=0.14", "scipy<=1.15.3", # breaks ADR if not included. Remove when statsmodels is updated diff --git a/src/ansys/dynamicreporting/core/adr_service.py b/src/ansys/dynamicreporting/core/adr_service.py index e1456181c..e338f9df4 100755 --- a/src/ansys/dynamicreporting/core/adr_service.py +++ b/src/ansys/dynamicreporting/core/adr_service.py @@ -241,7 +241,7 @@ def connect( username: str = "nexus", password: str = "cei", session: str | None = "", - ) -> None: + ) -> None: # nosec B107 """ Connect to a running service. @@ -282,8 +282,8 @@ def connect( ) try: self.serverobj.validate() - except Exception: - self.logger.error("Can not validate dynamic reporting server.\n") + except Exception as e: + self.logger.error(f"Can not validate dynamic reporting server.\nError: {str(e)}") raise NotValidServer # set url after connection succeeds self._url = url @@ -301,7 +301,7 @@ def start( error_if_create_db_exists: bool = False, exit_on_close: bool = False, delete_db: bool = False, - ) -> str: + ) -> str: # nosec B107 """ Start a new service. @@ -392,11 +392,11 @@ def start( if self._docker_launcher: try: create_output = self._docker_launcher.create_nexus_db() - except Exception: # pragma: no cover + except Exception as e: # pragma: no cover self._docker_launcher.cleanup() self.logger.error( - f"Error creating the database at the path {self._db_directory} in the " - "Docker container.\n" + "Error creating the database at the path {self._db_directory} in the " + f"Docker container.\nError: {str(e)}" ) raise CannotCreateDatabaseError for f in ["db.sqlite3", "view_report.nexdb"]: @@ -511,10 +511,11 @@ def stop(self) -> None: v = False try: v = self.serverobj.validate() - except Exception: + except Exception as e: + self.logger.error(f"Error: {str(e)}") pass if v is False: - self.logger.error("Error validating the connected service. Can't shut it down.\n") + self.logger.error("Error validating the connected service. Can't shut it down.") else: # If coming from a docker image, clean that up try: @@ -814,7 +815,7 @@ def delete(self, items: list) -> None: try: _ = self.serverobj.del_objects(items_to_delete) except Exception as e: - self.logger.warning(f"Error in deleting items: {e}") + self.logger.warning(f"Error in deleting items: {str(e)}") def get_report(self, report_name: str) -> Report: """ diff --git a/src/ansys/dynamicreporting/core/docker_support.py b/src/ansys/dynamicreporting/core/docker_support.py index 6561b5f59..4480c8efb 100644 --- a/src/ansys/dynamicreporting/core/docker_support.py +++ b/src/ansys/dynamicreporting/core/docker_support.py @@ -64,8 +64,8 @@ def __init__(self, image_url: str | None = None, use_dev: bool = False) -> None: # Load up Docker from the user's environment try: self._client: docker.client.DockerClient = docker.from_env() - except Exception: # pragma: no cover - raise RuntimeError("Can't initialize Docker") + except Exception as e: # pragma: no cover + raise RuntimeError(f"Can't initialize Docker: {str(e)}") self._container: docker.models.containers.Container = None self._image: docker.models.images.Image = None # the Ansys / EnSight version we found in the container @@ -92,8 +92,8 @@ def pull_image(self) -> docker.models.images.Image: """ try: self._image = self._client.images.pull(self._image_url) - except Exception: - raise RuntimeError(f"Can't pull Docker image: {self._image_url}") + except Exception as e: + raise RuntimeError(f"Can't pull Docker image: {self._image_url}\n\n{str(e)}") return self._image def create_container(self) -> docker.models.containers.Container: @@ -119,7 +119,7 @@ def copy_to_host(self, src: str, *, dest: str = ".") -> None: tar_file.write(chunk) # Extract the tar archive with tarfile.open(tar_file_path) as tar: - tar.extractall(path=output_path) + tar.extractall(path=output_path) # nosec B202 # Remove the tar archive tar_file_path.unlink() except Exception as e: @@ -176,7 +176,7 @@ def start(self, host_directory: str, db_directory: str, port: int, ansys_version existing_names = [x.name for x in self._client.from_env().containers.list()] container_name = "nexus" while container_name in existing_names: - container_name += random.choice(string.ascii_letters) + container_name += random.choice(string.ascii_letters) # nosec B311 if len(container_name) > 500: raise RuntimeError("Can't determine a unique Docker container name.") diff --git a/src/ansys/dynamicreporting/core/examples/downloads.py b/src/ansys/dynamicreporting/core/examples/downloads.py index b435ee303..800320b5f 100755 --- a/src/ansys/dynamicreporting/core/examples/downloads.py +++ b/src/ansys/dynamicreporting/core/examples/downloads.py @@ -42,9 +42,10 @@ def check_url_exists(url: str) -> bool: logging.debug(f"Passed url is invalid: {url}\n") return False try: - with request.urlopen(url) as response: + with request.urlopen(url) as response: # nosec B310 return response.status == 200 - except Exception: + except Exception as e: + logging.debug(f"Check url error: {str(e)}\n") return False @@ -61,7 +62,7 @@ def get_url_content(url: str) -> str: str content of the URL """ - with request.urlopen(url) as response: + with request.urlopen(url) as response: # nosec B310 return response.read() diff --git a/src/ansys/dynamicreporting/core/serverless/item.py b/src/ansys/dynamicreporting/core/serverless/item.py index 9b0d616fc..4dc303e59 100644 --- a/src/ansys/dynamicreporting/core/serverless/item.py +++ b/src/ansys/dynamicreporting/core/serverless/item.py @@ -3,7 +3,7 @@ from html.parser import HTMLParser as BaseHTMLParser import io from pathlib import Path -import pickle +import pickle # nosec B403 import platform import uuid diff --git a/src/ansys/dynamicreporting/core/utils/encoders.py b/src/ansys/dynamicreporting/core/utils/encoders.py index db53e6782..cfc6fdbc0 100644 --- a/src/ansys/dynamicreporting/core/utils/encoders.py +++ b/src/ansys/dynamicreporting/core/utils/encoders.py @@ -34,7 +34,10 @@ def default(self, obj): cls = list if isinstance(obj, (list, tuple)) else dict try: return cls(obj) - except Exception: + except Exception as e: # nosec + error_str = f"Object of type {type(obj).__name__} is not JSON serializable: " + error_str += str(e) + raise TypeError(error_str) pass elif hasattr(obj, "__iter__"): return tuple(item for item in obj) diff --git a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py index c6211fb2b..05e084944 100644 --- a/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py +++ b/src/ansys/dynamicreporting/core/utils/extremely_ugly_hacks.py @@ -1,6 +1,6 @@ # All Python3 migration-related ugly hacks go here. import base64 -import pickle +import pickle # nosec B403 from uuid import UUID from .report_utils import text_type @@ -54,10 +54,10 @@ def safe_unpickle(input_data, item_type=None): try: # be default, we follow python3's way of loading: default encoding is ascii # this will work if the data was dumped using python3's pickle. Just do the usual. - data = pickle.loads(bytes_data) - except Exception: + data = pickle.loads(bytes_data) # nosec B301 B502 + except Exception: # nosec try: - data = pickle.loads(bytes_data, encoding="utf-8") + data = pickle.loads(bytes_data, encoding="utf-8") # nosec B301 B502 except Exception: # if it fails, which it will if the data was dumped using python2's pickle, then: # As per https://docs.python.org/3/library/pickle.html#pickle.loads, @@ -65,7 +65,7 @@ def safe_unpickle(input_data, item_type=None): # date and time pickled by Python 2." # The data does contain a numpy array. So: try: - data = pickle.loads(bytes_data, encoding="latin-1") + data = pickle.loads(bytes_data, encoding="latin-1") # nosec B301 B502 # if the stream contains international characters which were 'loaded' with latin-1, # we get garbage text. We have to detect that and then use a workaround. @@ -80,7 +80,7 @@ def safe_unpickle(input_data, item_type=None): # this is a tree item ONLY case that has a pickled datetime obj, # we use bytes as the encoding to workaround this issue, because # other encodings will not work. - data = pickle.loads(bytes_data, encoding="bytes") + data = pickle.loads(bytes_data, encoding="bytes") # nosec B301 B502 # check again, just in case if item_type == "tree": diff --git a/src/ansys/dynamicreporting/core/utils/filelock.py b/src/ansys/dynamicreporting/core/utils/filelock.py index 42eeda3b2..f5d54ed24 100644 --- a/src/ansys/dynamicreporting/core/utils/filelock.py +++ b/src/ansys/dynamicreporting/core/utils/filelock.py @@ -251,8 +251,9 @@ def acquire(self, timeout=None, poll_intervall=0.05): poll_intervall, ) time.sleep(poll_intervall) - except Exception: + except Exception as e: # Something did go wrong, so decrement the counter. + logger.error(f"Exception: {str(e)}") with self._thread_lock: self._lock_counter = max(0, self._lock_counter - 1) diff --git a/src/ansys/dynamicreporting/core/utils/geofile_processing.py b/src/ansys/dynamicreporting/core/utils/geofile_processing.py index 9e563ee7b..b872dd6c3 100644 --- a/src/ansys/dynamicreporting/core/utils/geofile_processing.py +++ b/src/ansys/dynamicreporting/core/utils/geofile_processing.py @@ -9,7 +9,7 @@ import io import os import platform -import subprocess +import subprocess # nosec B78 B603 B404 import typing import zipfile @@ -213,7 +213,7 @@ def rebuild_3d_geometry(csf_file: str, unique_id: str = "", exec_basis: str = No stderr=subprocess.DEVNULL, close_fds=True, creationflags=create_flags, - ) + ) # nosec B78 B603 except Exception as e: print(f"Warning: unable to convert '{csf_file}' into AVZ format: {str(e)}") # At this point, if we have an original AVZ file or a converted udrw file, we diff --git a/src/ansys/dynamicreporting/core/utils/report_download_html.py b/src/ansys/dynamicreporting/core/utils/report_download_html.py index c719c41a0..b00ed8ed2 100644 --- a/src/ansys/dynamicreporting/core/utils/report_download_html.py +++ b/src/ansys/dynamicreporting/core/utils/report_download_html.py @@ -154,13 +154,13 @@ def _download_special_files(self): for f in files: mangled = f.replace("media/", "/static/website/scripts/mathjax/") url = tmp.scheme + "://" + tmp.netloc + mangled - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code == requests.codes.ok: filename = os.path.join(self._directory, f) try: open(filename, "wb").write(resp.content) - except Exception: - print(f"Unable to download MathJax file: {f}") + except Exception as e: + print(f"Unable to download MathJax file: {f}\nError {str(e)}") else: print(f"Unable to get: {url}") @@ -305,7 +305,7 @@ def _download_static_files(self, files, source_path, target_path, comment): tmp = urllib.parse.urlsplit(self._url) for f in files: url = tmp.scheme + "://" + tmp.netloc + source_path + f - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code == requests.codes.ok: filename = self._directory + os.sep + target_path + os.sep + f filename = os.path.normpath(filename) @@ -314,8 +314,8 @@ def _download_static_files(self, files, source_path, target_path, comment): str(filename), resp.content, self._ansys_version ) open(filename, "wb").write(data) - except Exception: - print(f"Unable to download {comment}: {f}") + except Exception as e: + print(f"Unable to download {comment}: {f}\nError: {e}") def _make_unique_basename(self, name: str) -> str: # check to see if the filename has already been used (and hence we are headed toward @@ -343,7 +343,7 @@ def _get_file(self, path_plus_queries: str, pathname: str, inline: bool = False) return self._filemap[pathname] tmp = urllib.parse.urlsplit(self._url) url = tmp.scheme + "://" + tmp.netloc + path_plus_queries - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 results = pathname if resp.status_code == requests.codes.ok: basename = os.path.basename(pathname) @@ -389,8 +389,8 @@ def _get_file(self, path_plus_queries: str, pathname: str, inline: bool = False) results = f"./media/{basename}" filename = os.path.join(self._directory, "media", basename) open(filename, "wb").write(tmp) - except Exception: - print(f"Unable to write downloaded file: {basename}") + except Exception as e: + print(f"Unable to write downloaded file: {basename}\nError: {str(e)}") else: print(f"Unable to read file via URL: {url}") self._filemap[pathname] = results @@ -475,8 +475,8 @@ def _make_dir(subdirs): if not os.path.exists(base): try: os.makedirs(base, exist_ok=True) - except Exception: - raise OSError(f"Unable to create target directory: {base}") + except Exception as e: + raise OSError(f"Unable to create target directory: {base}\nError: {str(e)}") def _download(self): self._filemap = dict() @@ -530,7 +530,7 @@ def _download(self): ) # get the webpage html source - resp = requests.get(self._url) + resp = requests.get(self._url) # nosec B400 if resp.status_code != requests.codes.ok: raise RuntimeError(f"Unable to access {self._url} ({resp.status_code})") # debugging... diff --git a/src/ansys/dynamicreporting/core/utils/report_objects.py b/src/ansys/dynamicreporting/core/utils/report_objects.py index c1e93ca4c..d423e0810 100755 --- a/src/ansys/dynamicreporting/core/utils/report_objects.py +++ b/src/ansys/dynamicreporting/core/utils/report_objects.py @@ -8,7 +8,7 @@ import logging import os from pathlib import Path -import pickle +import pickle # nosec B403 import shlex import sys import uuid @@ -173,7 +173,8 @@ def map_ensight_plot_to_table_dictionary(p): # convert EnSight undefined values into Numpy NaN values try: a[a == ensight.Undefined] = numpy.nan - except Exception: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") pass max_columns = max(a.shape[1], max_columns) d = dict(array=a, yname=q.LEGENDTITLE, xname=x_axis_title) @@ -400,7 +401,8 @@ def reset_defaults(self): def get_params(self): try: return json.loads(self.params) - except Exception: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") return {} def set_params(self, d: dict = None): @@ -1317,7 +1319,8 @@ def set_payload_image(self, img): else: try: from . import png - except Exception: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") import png try: # we can only read png images as string content (not filename) @@ -1337,7 +1340,8 @@ def set_payload_image(self, img): planes=pngobj[3].get("planes", None), palette=pngobj[3].get("palette", None), ) - except Exception: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") # enhanced images will fall into this case data = report_utils.PIL_image_to_data(img) self.width = data["width"] @@ -1423,7 +1427,7 @@ def factory(cls, json_data): "tmp_cls = " + json_data["report_type"].split(":")[1] + "REST()", locals(), globals(), - ) + ) # nosec return tmp_cls else: return TemplateREST() @@ -1500,13 +1504,15 @@ def add_params(self, d: dict = None): tmp_params[k] = d[k] self.params = json.dumps(tmp_params) return - except Exception: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") return {} def get_params(self): try: return json.loads(self.params) - except Exception: + except Exception as e: + logger.error(f"Error: {str(e)}.\n") return {} def set_params(self, d: dict = None): @@ -1855,8 +1861,8 @@ def set_child_position(self, guid=None, value=None): raise ValueError("Error: child position array should contain only integers") try: uuid.UUID(guid, version=4) - except Exception: - raise ValueError("Error: input guid is not a valid guid") + except Exception as e: + raise ValueError(f"Error: input guid is not a valid guid: {str(e)}") d = json.loads(self.params) if "boxes" not in d: d["boxes"] = {} @@ -1876,8 +1882,8 @@ def set_child_clip(self, guid=None, clip="self"): import uuid uuid.UUID(guid, version=4) - except Exception: - raise ValueError("Error: input guid is not a valid guid") + except Exception as e: + raise ValueError(f"Error: input guid is not a valid guid: {str(e)}") d = json.loads(self.params) if "boxes" not in d: d["boxes"] = {} @@ -2158,8 +2164,8 @@ def set_report_link(self, link=None): d["report_guid"] = link self.params = json.dumps(d) return - except Exception: - raise ValueError("Error: input guid is not a valid guid") + except Exception as e: + raise ValueError(f"Error: input guid is not a valid guid {str(e)}") class tablemergeREST(GeneratorREST): @@ -3327,7 +3333,7 @@ def get_postgre(self): if "pswsqldb" in json.loads(self.params): out["password"] = json.loads(self.params)["pswsqldb"] else: - out["password"] = "" + out["password"] = "" # nosec B259 else: out = {"database": "", "hostname": "", "port": "", "username": "", "password": ""} return out @@ -3428,7 +3434,7 @@ def validate(self): _ = psycopg.connect(conn_string.strip()) except Exception as e: valid = False - out_msg = f"Could not validate connection:\n{e}" + out_msg = f"Could not validate connection:\n{str(e)}" return valid, out_msg diff --git a/src/ansys/dynamicreporting/core/utils/report_remote_server.py b/src/ansys/dynamicreporting/core/utils/report_remote_server.py index b58cd07eb..dcf77f61c 100755 --- a/src/ansys/dynamicreporting/core/utils/report_remote_server.py +++ b/src/ansys/dynamicreporting/core/utils/report_remote_server.py @@ -19,10 +19,10 @@ import os import os.path from pathlib import Path -import pickle +import pickle # nosec B403 import platform import shutil -import subprocess +import subprocess # nosec B78 B603 B404 import sys import tempfile import time @@ -98,7 +98,7 @@ def run_nexus_utility(args, use_software_gl=False, exec_basis=None, ansys_versio cmd.extend(args) if is_windows: params["creationflags"] = subprocess.CREATE_NO_WINDOW - subprocess.call(args=cmd, **params) + subprocess.call(args=cmd, **params) # nosec B603 B78 class Server: @@ -200,8 +200,8 @@ def magic_token(self, value): @classmethod def get_object_digest(cls, obj): - m = hashlib.md5() - m.update(pickle.dumps(obj)) + m = hashlib.md5() # nosec B324 + m.update(pickle.dumps(obj)) # nosec B324 return m.digest() @classmethod @@ -268,7 +268,9 @@ def get_server_name(self): if self.cur_servername is None: try: self.validate() - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass if self.cur_servername is None: return self.get_URL() @@ -293,7 +295,9 @@ def stop_server_allowed(self): result = self._http_session.get(url, auth=auth) if not result.ok: return False - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") return False return True @@ -306,7 +310,9 @@ def stop_local_server(self): try: # note this request will fail as it does not return anything!!! self._http_session.get(url, auth=auth) - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass self.set_URL(None) self.set_password(None) @@ -328,7 +334,9 @@ def get_user_groups(self): return [] try: return [str(obj_data.get("name")) for obj_data in r.json()] - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") return [] def get_object_guids(self, objtype=report_objects.Template, query=None): @@ -355,7 +363,9 @@ def get_object_guids(self, objtype=report_objects.Template, query=None): return [str(obj_data.get("guid")) for obj_data in r.json()] else: return [str(i) for i in r.json()["guid_list"]] - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") return [] def get_objects(self, objtype=report_objects.Template, query=None): @@ -390,7 +400,9 @@ def get_objects(self, objtype=report_objects.Template, query=None): t.from_json(d) ret.append(t) return ret - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") return [] def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): @@ -415,7 +427,9 @@ def get_object_from_guid(self, guid, objtype=report_objects.TemplateREST): obj.server_api_version = self.api_version obj.from_json(r.json()) return obj - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") return None def _get_push_request_info(self, obj): @@ -543,7 +557,9 @@ def put_objects(self, in_objects): url = self.cur_url + file_data[0] try: r = self._http_session.put(url, auth=auth, files=files) - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") r = self._http_session.Response() r.status_code = requests.codes.client_closed_request ret = r.status_code @@ -839,7 +855,7 @@ def create_template(self, name="New Template", parent=None, report_type="Layout: return templ def _download_report(self, url, file_name, directory_name=None): - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code != requests.codes.ok: try: detail = resp.json()["detail"] @@ -963,7 +979,7 @@ def get_pptx_from_report(self, report_guid, directory_name=None, query=None): if query is None: query = {} url = self.build_url_with_query(report_guid, query) - resp = requests.get(url, allow_redirects=True) + resp = requests.get(url, allow_redirects=True) # nosec B400 if resp.status_code == requests.codes.ok: try: links = report_utils.get_links_from_html(resp.text) @@ -1183,8 +1199,8 @@ def create_new_local_database( if run_local: # Make a random string that could be used as a secret key for the database # take two UUID1 values, run them through md5 and concatenate the digests. - secret_key = hashlib.md5(uuid.uuid1().bytes).hexdigest() - secret_key += hashlib.md5(uuid.uuid1().bytes).hexdigest() + secret_key = hashlib.md5(uuid.uuid1().bytes).hexdigest() # nosec B327 B324 + secret_key += hashlib.md5(uuid.uuid1().bytes).hexdigest() # nosec B327 B324 # And make a target file (.nexdb) for auto launching of the report viewer... f = open(os.path.join(db_dir, "view_report.nexdb"), "w") if len(secret_key): @@ -1227,7 +1243,9 @@ def create_new_local_database( group.user_set.add(user) group.save() os.makedirs(os.path.join(db_dir, "media")) - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") error = True if parent and has_qt: QtWidgets.QApplication.restoreOverrideCursor() @@ -1384,7 +1402,9 @@ def validate_local_db_version(db_dir, version_max=None, version_min=None): return False if number < version_min: return False - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") return False return True @@ -1532,21 +1552,25 @@ def launch_local_database_server( # .nexus.lock is held whenever port scanning is going on. It can be held by this function or by nexus_launcher # .nexus_api.lock is used by the Python API to ensure exclusivity (e.g. while a server is launching) local_lock = None - try: + try: # nosec # create a file lock local_lock = filelock.nexus_file_lock(api_lock_filename) local_lock.acquire() - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass # We may need to do port scanning - if port is None: + if port is None: # nosec lock_filename = os.path.join(homedir, ".nexus.lock") scanning_lock = None try: # create a file lock scanning_lock = filelock.nexus_file_lock(lock_filename) scanning_lock.acquire() - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass # Note: QWebEngineView cannot access http over 65535, so limit ports to 65534 ports = report_utils.find_unused_ports(1) @@ -1675,7 +1699,9 @@ def launch_local_database_server( "There appears to be a local Nexus server already running on that port.\nPlease stop that server first or select a different port." ) return False - except Exception: + except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") pass # Start the busy cursor @@ -1742,11 +1768,13 @@ def launch_local_database_server( params["close_fds"] = True # Actually try to launch the server - try: + try: # nosec # Run the launcher to start the server # Note: this process only returns if the server is shutdown or there is an error - monitor_process = subprocess.Popen(command, **params) + monitor_process = subprocess.Popen(command, **params) # nosec B78 B603 except Exception as e: + if print_allowed(): + print(f"Error: {str(e)}") if parent and has_qt: QtWidgets.QApplication.restoreOverrideCursor() msg = QtWidgets.QApplication.translate( @@ -1806,8 +1834,10 @@ def launch_local_database_server( raise exceptions.ServerConnectionError( "Access to server denied. Potential username/password error." ) - except Exception: + except Exception as e: # we will try again + if print_allowed(): + print(f"Error: {str(e)}") pass # detach from stdout, stderr to avoid buffer blocking diff --git a/src/ansys/dynamicreporting/core/utils/report_utils.py b/src/ansys/dynamicreporting/core/utils/report_utils.py index bf178bc4c..00914c239 100644 --- a/src/ansys/dynamicreporting/core/utils/report_utils.py +++ b/src/ansys/dynamicreporting/core/utils/report_utils.py @@ -77,7 +77,7 @@ def check_if_PIL(img): elif imgbytes: Image.open(io.BytesIO(imgbytes)) return True - except Exception: + except Exception: # nosec return False finally: if imghandle: @@ -313,7 +313,7 @@ def ceiversion_nexus_suffix(): tmp = ansys_version.replace("R", "")[-3:] return str(tmp) - except Exception: + except Exception: # nosec # get "nexus###" folder name and then strip off the "nexus" bit tmp = os.path.basename(os.path.dirname(os.path.dirname(__file__))) return tmp[5:] diff --git a/test_cleanup.py b/test_cleanup.py index 9c4ff6a59..37b758140 100644 --- a/test_cleanup.py +++ b/test_cleanup.py @@ -27,7 +27,8 @@ for i_dir in dir_list: try: shutil.rmtree(i_dir) - except Exception: + except Exception as e: + print(f"Skipping {i_dir} with error {e}") pass @@ -45,5 +46,6 @@ for i_file in file_list: try: os.remove(i_file) - except Exception: + except Exception as e: + print(f"Skipping {i_file} with error {e}") pass diff --git a/tests/test_download_html.py b/tests/test_download_html.py index ca71a4ba0..db8afbcfb 100755 --- a/tests/test_download_html.py +++ b/tests/test_download_html.py @@ -44,7 +44,8 @@ def test_download_sqlite(request, adr_service_query) -> None: try: a.download() success = False - except Exception: + except Exception as e: + print(f"Download failed as expected with exception: {str(e)}") success = True assert success diff --git a/tests/test_geofile_processing.py b/tests/test_geofile_processing.py index 18e811467..857283884 100755 --- a/tests/test_geofile_processing.py +++ b/tests/test_geofile_processing.py @@ -20,11 +20,10 @@ def return_file_paths(request): @pytest.mark.ado_test def test_get_evsn_proxy_image(request) -> None: try: - _ = gp.get_evsn_proxy_image(filename=return_file_paths(request)[6]) - success = True - except Exception: - success = False - assert (_ is None) and success + result = gp.get_evsn_proxy_image(filename=return_file_paths(request)[6]) + assert result is None + except Exception as e: + pytest.fail(f"get_evsn_proxy_image raised an unexpected exception: {e}") @pytest.mark.ado_test diff --git a/tests/test_hacks.py b/tests/test_hacks.py index d7a98184e..c10a0d104 100755 --- a/tests/test_hacks.py +++ b/tests/test_hacks.py @@ -1,4 +1,4 @@ -import pickle +import pickle # nosec B403 import uuid import pytest diff --git a/tests/test_item.py b/tests/test_item.py index 7ad5f6565..b6537c3a2 100644 --- a/tests/test_item.py +++ b/tests/test_item.py @@ -354,6 +354,7 @@ def test_unit_item_empty_nexus(request) -> None: a = Service() try: _ = Item(service=a) - except Exception: + except Exception as e: + print(f"Expected exception received: {str(e)}") valid = True assert valid diff --git a/tests/test_report.py b/tests/test_report.py index 2e2ec8933..1fc7e278b 100755 --- a/tests/test_report.py +++ b/tests/test_report.py @@ -148,7 +148,8 @@ def test_save_as_pdf(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest") success = my_report.export_pdf(file_name=pdf_file) - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") success = False else: # If no local installation, then skip this test success = True @@ -164,7 +165,8 @@ def test_save_as_pdf_with_filter(adr_service_query, request, get_exec) -> None: my_report = adr_service_query.get_report(report_name="My Top Report") pdf_file = os.path.join(request.fspath.dirname, "again_mytest_filter") success = my_report.export_pdf(file_name=pdf_file, item_filter="A|i_type|cont|image;") - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") success = False else: # If no local installation, then skip this test success = True @@ -177,7 +179,8 @@ def test_save_as_html(adr_service_query) -> None: try: my_report = adr_service_query.get_report(report_name="My Top Report") success = my_report.export_html(directory_name="htmltest_again") - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") success = False assert success is True diff --git a/tests/test_report_objects.py b/tests/test_report_objects.py index 50a899b53..b7bef7b14 100755 --- a/tests/test_report_objects.py +++ b/tests/test_report_objects.py @@ -2036,6 +2036,7 @@ def test_item_payload(adr_service_query) -> None: for i in adr_service_query.query(): _ = i.item.get_payload_content(as_list=True) succ = True - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ diff --git a/tests/test_report_remote_server.py b/tests/test_report_remote_server.py index 5fb285cc2..bc90906c1 100755 --- a/tests/test_report_remote_server.py +++ b/tests/test_report_remote_server.py @@ -43,7 +43,8 @@ def test_copy_item(adr_service_query, tmp_path, get_exec) -> None: progress=False, progress_qt=False, ) - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") success = False finally: tmp_adr.stop() @@ -86,7 +87,8 @@ def test_start_stop(tmp_path, get_exec) -> None: ) _ = r.validate_local_db(db_dir=db_dir, version_check=True) r.stop_background_local_server(server_dirname=db_dir) - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ @@ -95,7 +97,8 @@ def test_validate_existing(adr_service_query) -> None: succ = True try: _ = r.validate_local_db(db_dir=adr_service_query._db_directory, version_check=True) - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ @@ -243,7 +246,8 @@ def test_delete_db(tmp_path, get_exec) -> None: try: r.delete_database(db_dir=db_dir) succ = True - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") succ = False assert succ @@ -338,7 +342,8 @@ def test_export_pptx_error(adr_service_query) -> None: try: # exports the root report instead of the pptx link. s.export_report_as_pptx(report_guid=my_report.report.guid, file_name="mypresentation") - except Exception: + except Exception as e: + print(f"Expected exception received: {str(e)}") success = True assert success is True @@ -350,7 +355,8 @@ def test_get_pptx(adr_service_query, tmp_path) -> None: try: # scrape all pptx reports from root report s.get_pptx_from_report(report_guid=my_report.report.guid, directory_name=db_dir, query=None) - except Exception: + except Exception as e: + print(f"Exception received: {str(e)}") success = False else: success = True diff --git a/tests/test_report_utils.py b/tests/test_report_utils.py index ccbf8225e..8535551c1 100755 --- a/tests/test_report_utils.py +++ b/tests/test_report_utils.py @@ -54,7 +54,7 @@ def test_ceiversion_nexus_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception: + except Exception: # nosec success = False assert success and int_suffix / 100 < 10 @@ -65,7 +65,7 @@ def test_ceiversion_apex_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception: + except Exception: # nosec success = False assert success and int_suffix / 100 < 10 @@ -76,7 +76,7 @@ def test_ceiversion_ensight_suffix() -> None: try: int_suffix = int(suffix) success = True - except Exception: + except Exception: # nosec success = False assert success and int_suffix / 100 < 10 @@ -163,7 +163,7 @@ def test_narray() -> None: a.from_numpy(value=np.array(object=None, dtype="S2")) a.unit_test() success = True - except Exception: + except Exception: # nosec success = False assert success @@ -192,7 +192,7 @@ def test_settings() -> None: try: _ = ru.Settings(defaults={"a": 1, "b": 2}) success = True - except Exception: + except Exception: # nosec success = False assert success diff --git a/tests/test_service.py b/tests/test_service.py index 5505c50f8..27b6b7aed 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -85,7 +85,7 @@ def test_unit_createitem() -> None: valid = False try: a.create_item() - except Exception: + except Exception: # nosec valid = True assert valid