Skip to content
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
60ecd85
Change the scans to be pyansys scans
margalva Aug 6, 2025
8d49092
Merge branch 'main' into maint/scans
margalva Aug 6, 2025
802dcc8
Replace sbom generation action
margalva Aug 6, 2025
660ef38
Merge from main
margalva Aug 6, 2025
81a3176
Fix the name
margalva Aug 6, 2025
e4c1ce4
Stop upload at a previous step
margalva Aug 6, 2025
0d3379f
Add vulnerability scan on dev version
margalva Aug 6, 2025
0aac09c
Fix workflow
margalva Aug 6, 2025
3fd0f37
Add logs for vulnerability
margalva Aug 6, 2025
2bc481e
Address vulnerability scan results
margalva Aug 6, 2025
77f5aac
Address vulnerability scan results
margalva Aug 6, 2025
df67a27
Add logs for vulnerability
margalva Aug 6, 2025
cfe1ed4
Fix syntax
margalva Aug 6, 2025
5ab5996
Fix a couple issues
margalva Aug 6, 2025
2d94873
Synatx fixes
margalva Aug 7, 2025
c767280
automatic upload
margalva Aug 7, 2025
ca97dea
Exclude bandit from security scanning
margalva Aug 7, 2025
6d4bebc
Add bandit config file
margalva Aug 7, 2025
6c7f60b
config for bandit
margalva Aug 7, 2025
756043d
Fix path to yaml
margalva Aug 7, 2025
fa7a2ae
Try to deactivate
margalva Aug 7, 2025
db51442
Try to create our own safety scan
margalva Aug 7, 2025
d67fe83
first the new pipeline
margalva Aug 7, 2025
792ccac
Add safety package in pipeline
margalva Aug 7, 2025
593285c
fix the workflow - check out and build code
margalva Aug 7, 2025
4582f3c
Remove ansys workflow in favor of public workflows
margalva Aug 7, 2025
96d3dbb
Check that it can find security issues
margalva Aug 7, 2025
fc6f642
Make everything in a single workflow...
margalva Aug 7, 2025
5a79505
go back to pyansys workflows
margalva Aug 7, 2025
69e5f8a
Add scan for dev
margalva Aug 7, 2025
689dc53
Show logs
margalva Aug 7, 2025
3668a4c
add exclude from bandit
margalva Aug 7, 2025
84e8749
Add nosec
margalva Aug 7, 2025
e9dbaa5
Add more exception
margalva Aug 7, 2025
e6cdc24
Remove errors on pickle
margalva Aug 7, 2025
d140a50
more bandit fixes
margalva Aug 7, 2025
1d5a7a0
more bandit fixes
margalva Aug 7, 2025
77b0bcf
Merge main with latest library updates
margalva Sep 15, 2025
41db02c
Fix spaces for black check
margalva Sep 15, 2025
ea8f72a
Add vulnerability scan to the nightly runs
margalva Sep 16, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 0 additions & 16 deletions .github/workflows/nightly_scan.yml

This file was deleted.

73 changes: 73 additions & 0 deletions .github/workflows/scan_sbom.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: Security Scan

env:
MAIN_PYTHON_VERSION: '3.13'
PACKAGE_NAME: 'ansys-dynamicreporting-core'

on:
push:
branches:
- main
- maint/*
- release/*

jobs:
sbom:
name: Generate SBOM
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}

- name: Build wheelhouse
uses: ansys/actions/build-wheelhouse@v10
with:
library-name: ${{ env.PACKAGE_NAME }}
operating-system: ubuntu-latest
python-version: ${{ env.MAIN_PYTHON_VERSION }}

- name: Install from wheelhouse
run: python -m pip install --no-index --find-links=wheelhouse ${{ env.PACKAGE_NAME }}

- name: Generate SBOM with Syft
uses: anchore/[email protected]
with:
format: spdx-json
output-file: sbom.spdx.json
upload-artifact: false

- name: Upload SBOM as artifact
uses: actions/upload-artifact@v4
with:
name: ${{ env.PACKAGE_NAME }}-sbom
path: sbom.spdx.json


vulnerabilities:
name: Vulnerabilities
runs-on: ubuntu-latest
steps:
- name: PyAnsys Vulnerability check (on main)
if: github.ref == 'refs/heads/main'
uses: ansys/actions/[email protected]
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
python-package-name: ${{ env.PACKAGE_NAME }}
token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }}
hide-log: false

- name: PyAnsys Vulnerability check (on dev)
if: github.ref != 'refs/heads/main'
uses: ansys/actions/[email protected]
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
python-package-name: ${{ env.PACKAGE_NAME }}
token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }}
dev-mode: true
hide-log: false
2 changes: 1 addition & 1 deletion codegen/adr_utils.txt
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def in_ipynb():
return True
if "terminal" in ipy_str:
return False
except Exception: # todo: please specify the possible exceptions here.
except Exception as e: # todo: please specify the possible exceptions here.
return False


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ dependencies = [
"django-guardian~=2.4",
"tzlocal~=5.0",
"numpy>=1.23.5,<3",
"python-pptx==0.6.19",
"python-pptx==0.6.23",
"pandas>=2.0",
"statsmodels>=0.14",
"scipy<=1.15.3", # breaks ADR if not included. Remove when statsmodels is updated
Expand Down
19 changes: 11 additions & 8 deletions src/ansys/dynamicreporting/core/adr_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,8 +282,8 @@ def connect(
)
try:
self.serverobj.validate()
except Exception:
self.logger.error("Can not validate dynamic reporting server.\n")
except Exception as e:
self.logger.error(f"Can not validate dynamic reporting server.\nError: {str(e)}")
raise NotValidServer
# set url after connection succeeds
self._url = url
Expand Down Expand Up @@ -392,11 +392,11 @@ def start(
if self._docker_launcher:
try:
create_output = self._docker_launcher.create_nexus_db()
except Exception: # pragma: no cover
except Exception as e: # pragma: no cover
self._docker_launcher.cleanup()
self.logger.error(
f"Error creating the database at the path {self._db_directory} in the "
"Docker container.\n"
"Error creating the database at the path {self._db_directory} in the "
f"Docker container.\nError: {str(e)}"
)
raise CannotCreateDatabaseError
for f in ["db.sqlite3", "view_report.nexdb"]:
Expand Down Expand Up @@ -511,10 +511,13 @@ def stop(self) -> None:
v = False
try:
v = self.serverobj.validate()
except Exception:
except Exception as e:
self.logger.error(f"Error: {str(e)}")
pass
if v is False:
self.logger.error("Error validating the connected service. Can't shut it down.\n")
self.logger.error(
f"Error validating the connected service. Can't shut it down.\nError: {str(e)}"
)
else:
# If coming from a docker image, clean that up
try:
Expand Down Expand Up @@ -814,7 +817,7 @@ def delete(self, items: list) -> None:
try:
_ = self.serverobj.del_objects(items_to_delete)
except Exception as e:
self.logger.warning(f"Error in deleting items: {e}")
self.logger.warning(f"Error in deleting items: {str(e)}")

def get_report(self, report_name: str) -> Report:
"""
Expand Down
6 changes: 3 additions & 3 deletions src/ansys/dynamicreporting/core/docker_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(self, image_url: str | None = None, use_dev: bool = False) -> None:
# Load up Docker from the user's environment
try:
self._client: docker.client.DockerClient = docker.from_env()
except Exception: # pragma: no cover
except Exception as e: # pragma: no cover
raise RuntimeError("Can't initialize Docker")
self._container: docker.models.containers.Container = None
self._image: docker.models.images.Image = None
Expand Down Expand Up @@ -92,7 +92,7 @@ def pull_image(self) -> docker.models.images.Image:
"""
try:
self._image = self._client.images.pull(self._image_url)
except Exception:
except Exception as e:
raise RuntimeError(f"Can't pull Docker image: {self._image_url}")
return self._image

Expand All @@ -118,7 +118,7 @@ def copy_to_host(self, src: str, *, dest: str = ".") -> None:
for chunk in tar_stream:
tar_file.write(chunk)
# Extract the tar archive
with tarfile.open(tar_file_path) as tar:
with tarfile.open(tar_file_path) as tar: # nosec
tar.extractall(path=output_path)
# Remove the tar archive
tar_file_path.unlink()
Expand Down
3 changes: 2 additions & 1 deletion src/ansys/dynamicreporting/core/examples/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ def check_url_exists(url: str) -> bool:
try:
with request.urlopen(url) as response:
return response.status == 200
except Exception:
except Exception as e:
logging.debug(f"Check url error: {str(e)}\n")
return False


Expand Down
2 changes: 1 addition & 1 deletion src/ansys/dynamicreporting/core/utils/encoders.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def default(self, obj):
cls = list if isinstance(obj, (list, tuple)) else dict
try:
return cls(obj)
except Exception:
except Exception as e: # nosec
pass
elif hasattr(obj, "__iter__"):
return tuple(item for item in obj)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,10 @@ def safe_unpickle(input_data, item_type=None):
# be default, we follow python3's way of loading: default encoding is ascii
# this will work if the data was dumped using python3's pickle. Just do the usual.
data = pickle.loads(bytes_data)
except Exception:
except Exception as e:
try:
data = pickle.loads(bytes_data, encoding="utf-8")
except Exception:
except Exception as e:
# if it fails, which it will if the data was dumped using python2's pickle, then:
# As per https://docs.python.org/3/library/pickle.html#pickle.loads,
# "Using encoding='latin1' is required for unpickling NumPy arrays and instances of datetime,
Expand Down
2 changes: 1 addition & 1 deletion src/ansys/dynamicreporting/core/utils/filelock.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def acquire(self, timeout=None, poll_intervall=0.05):
poll_intervall,
)
time.sleep(poll_intervall)
except Exception:
except Exception as e:
# Something did go wrong, so decrement the counter.
with self._thread_lock:
self._lock_counter = max(0, self._lock_counter - 1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
is_enve = True
import enve
from reports.engine import TemplateEngine
except Exception:
except Exception as e:
is_enve = False


Expand Down
16 changes: 8 additions & 8 deletions src/ansys/dynamicreporting/core/utils/report_download_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,8 @@ def _download_special_files(self):
filename = os.path.join(self._directory, f)
try:
open(filename, "wb").write(resp.content)
except Exception:
print(f"Unable to download MathJax file: {f}")
except Exception as e:
print(f"Unable to download MathJax file: {f}\nError {str(e)}")
else:
print(f"Unable to get: {url}")

Expand Down Expand Up @@ -314,8 +314,8 @@ def _download_static_files(self, files, source_path, target_path, comment):
str(filename), resp.content, self._ansys_version
)
open(filename, "wb").write(data)
except Exception:
print(f"Unable to download {comment}: {f}")
except Exception as e:
print(f"Unable to download {comment}: {f}\nError: {e}")

def _make_unique_basename(self, name: str) -> str:
# check to see if the filename has already been used (and hence we are headed toward
Expand Down Expand Up @@ -389,8 +389,8 @@ def _get_file(self, path_plus_queries: str, pathname: str, inline: bool = False)
results = f"./media/{basename}"
filename = os.path.join(self._directory, "media", basename)
open(filename, "wb").write(tmp)
except Exception:
print(f"Unable to write downloaded file: {basename}")
except Exception as e:
print(f"Unable to write downloaded file: {basename}\nError: {str(e)}")
else:
print(f"Unable to read file via URL: {url}")
self._filemap[pathname] = results
Expand Down Expand Up @@ -475,8 +475,8 @@ def _make_dir(subdirs):
if not os.path.exists(base):
try:
os.makedirs(base, exist_ok=True)
except Exception:
raise OSError(f"Unable to create target directory: {base}")
except Exception as e:
raise OSError(f"Unable to create target directory: {base}\nError: {str(e)}")

def _download(self):
self._filemap = dict()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from qtpy.QtCore import QTimer

has_qt = True
except Exception:
except Exception as e:
has_qt = False


Expand Down
19 changes: 10 additions & 9 deletions src/ansys/dynamicreporting/core/utils/report_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,8 @@ def map_ensight_plot_to_table_dictionary(p):
# convert EnSight undefined values into Numpy NaN values
try:
a[a == ensight.Undefined] = numpy.nan
except Exception:
except Exception as e:
logger.error(f"Error: {str(e)}.\n")
pass
max_columns = max(a.shape[1], max_columns)
d = dict(array=a, yname=q.LEGENDTITLE, xname=x_axis_title)
Expand Down Expand Up @@ -400,7 +401,7 @@ def reset_defaults(self):
def get_params(self):
try:
return json.loads(self.params)
except Exception:
except Exception as e:
return {}

def set_params(self, d: dict = None):
Expand Down Expand Up @@ -1317,7 +1318,7 @@ def set_payload_image(self, img):
else:
try:
from . import png
except Exception:
except Exception as e:
import png
try:
# we can only read png images as string content (not filename)
Expand All @@ -1337,7 +1338,7 @@ def set_payload_image(self, img):
planes=pngobj[3].get("planes", None),
palette=pngobj[3].get("palette", None),
)
except Exception:
except Exception as e:
# enhanced images will fall into this case
data = report_utils.PIL_image_to_data(img)
self.width = data["width"]
Expand Down Expand Up @@ -1500,13 +1501,13 @@ def add_params(self, d: dict = None):
tmp_params[k] = d[k]
self.params = json.dumps(tmp_params)
return
except Exception:
except Exception as _:
return {}

def get_params(self):
try:
return json.loads(self.params)
except Exception:
except Exception as _:
return {}

def set_params(self, d: dict = None):
Expand Down Expand Up @@ -1855,7 +1856,7 @@ def set_child_position(self, guid=None, value=None):
raise ValueError("Error: child position array should contain only integers")
try:
uuid.UUID(guid, version=4)
except Exception:
except Exception as _:
raise ValueError("Error: input guid is not a valid guid")
d = json.loads(self.params)
if "boxes" not in d:
Expand All @@ -1876,7 +1877,7 @@ def set_child_clip(self, guid=None, clip="self"):
import uuid

uuid.UUID(guid, version=4)
except Exception:
except Exception as _:
raise ValueError("Error: input guid is not a valid guid")
d = json.loads(self.params)
if "boxes" not in d:
Expand Down Expand Up @@ -2158,7 +2159,7 @@ def set_report_link(self, link=None):
d["report_guid"] = link
self.params = json.dumps(d)
return
except Exception:
except Exception as _:
raise ValueError("Error: input guid is not a valid guid")


Expand Down
Loading
Loading