Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci-cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ jobs:
runs-on: ubuntu-latest

strategy:
# If either the tests for 3.9 or 3.12 fail, all workflows
# If either the tests for 3.11 or 3.12 fail, all workflows
# are terminated to save computing resources.
fail-fast: true
# To safe runtime least and latest version supported are
# chosen. For more info see the pyproject.toml
matrix:
python-version: ["3.9", "3.12"]
python-version: ["3.11", "3.12"]

steps:
- uses: actions/checkout@v3
Expand Down
1 change: 0 additions & 1 deletion .python-version

This file was deleted.

10 changes: 5 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,20 @@ classifiers = [
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
]

requires-python = ">=3.9"
requires-python = ">=3.11"
dependencies = [
"h5py==3.*",
"scipy==1.*",
"numpy==1.*",
"numpy>2.2",
"plotly==5.*",
"psutil==5.*",
"rich==13.*",
"pandas==2.*",
"scikit_learn==1.*"
"scikit_learn==1.*",
]

[tool.setuptools]
Expand Down Expand Up @@ -89,6 +87,7 @@ select = [
"YTT", # flake8-2020 (Detects outdated Python 2/3 compatibility issues)
"FLY", # flynt (Converts old-style string formatting to f-strings)
"PIE", # flake8-pie
"NPY201", # numpy2-deprecations
# "PL", # pylint
# "RUF", # Ruff-specific rules (Additional optimizations and best practices)
]
Expand All @@ -99,6 +98,7 @@ ignore = [
"S311", # [suspicious-non-cryptographic-random-usage](https://docs.astral.sh/ruff/rules/suspicious-non-cryptographic-random-usage/)
"S404", # [suspicious-subprocess-import](https://docs.astral.sh/ruff/rules/suspicious-subprocess-import/)
"S603", # [subprocess-without-shell-equals-true](https://docs.astral.sh/ruff/rules/subprocess-without-shell-equals-true/)
"UP007", # [non-pep604-annotation-union](https://docs.astral.sh/ruff/rules/non-pep604-annotation-union/#non-pep604-annotation-union-up007)
]

[tool.ruff.lint.per-file-ignores]
Expand Down
2 changes: 1 addition & 1 deletion src/lasso/dimred/graph_laplacian.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def _laplacian_gauss_idw(
for i, (j, d, e, k) in enumerate(
zip(
*tree.query_radius(points, return_distance=True, r=search_radius),
*tree.query(points, return_distance=True, k=1 + min_neighbors),
*tree.query(points, return_distance=True, k=1 + min_neighbors), strict=False,
)
):
# Always search for k neighbors, this prevents strongly connected local areas
Expand Down
2 changes: 1 addition & 1 deletion src/lasso/dimred/hashing.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def _join_hash_comparison_thread_files(
thread_weights = thread_file["weights"]

for (i_row, i_col), values, matches in zip(
matrix_indexes, matrix_similarities, matrix_matches
matrix_indexes, matrix_similarities, matrix_matches, strict=False
):
smatrix[i_row, i_col] = values
ds_matches[i_row, i_col] = matches
Expand Down
4 changes: 3 additions & 1 deletion src/lasso/dyna/binout.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,9 @@ def as_df(self, *args) -> pd.DataFrame:
if args[0] == "rcforc":
ids = [
(str(i) + "m") if j else (str(i) + "s")
for i, j in zip(self.read("rcforc", "ids"), self.read("rcforc", "side"))
for i, j in zip(
self.read("rcforc", "ids"), self.read("rcforc", "side"), strict=False
)
]
else:
ids = self.read(*args[:-1], "ids")
Expand Down
8 changes: 5 additions & 3 deletions src/lasso/dyna/d3plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -7162,7 +7162,9 @@ def _write_geom_rigid_road_surface(
rigid_road_segment_road_id = self.arrays[ArrayType.rigid_road_segment_road_id]
rigid_road_segment_node_ids = self.arrays[ArrayType.rigid_road_segment_node_ids]

for segment_id, node_ids in zip(rigid_road_segment_road_id, rigid_road_segment_node_ids):
for segment_id, node_ids in zip(
rigid_road_segment_road_id, rigid_road_segment_node_ids, strict=False
):
n_bytes_written += fp.write(settings.pack(segment_id))
n_bytes_written += fp.write(settings.pack(len(node_ids)))
n_bytes_written += fp.write(settings.pack(node_ids, dtype_hint=np.integer))
Expand Down Expand Up @@ -7347,7 +7349,7 @@ def _write_header_part_contact_interface_titles(
title_wordsize = 4
max_len = 18 * title_wordsize
fmt_name = "{0:" + str(max_len) + "}"
for pid, title in zip(part_titles_ids, part_titles):
for pid, title in zip(part_titles_ids, part_titles, strict=False):
title = title.decode("utf-8")
n_bytes_written += fp.write(settings.pack(pid))

Expand Down Expand Up @@ -7387,7 +7389,7 @@ def _write_header_part_contact_interface_titles(

max_len = 18 * self.header.wordsize
fmt_name = "{0:" + str(max_len) + "}"
for pid, title in zip(titles_ids, titles):
for pid, title in zip(titles_ids, titles, strict=False):
n_bytes_written += fp.write(settings.pack(pid))

formatted_title = fmt_name.format(title[:max_len])
Expand Down
2 changes: 1 addition & 1 deletion test/unit_tests/dyna/test_d3plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def test_write(self):

with tempfile.TemporaryDirectory() as dirpath:
for d3plot_kwargs in d3plot_kwargs_list:
for d3plot_filepath, _ in zip(filepaths, d3plot_kwargs_list):
for d3plot_filepath, _ in zip(filepaths, d3plot_kwargs_list, strict=False):
print(d3plot_filepath)

# read d3plot
Expand Down
Loading