Skip to content

Commit b4269d6

Browse files
saudzahirrlaraibg786saudz
authored
Using ruff for linting #2 (#79)
* * remove black for linting * bump ruff version * add config for ruff rules (C only) * fix linting issues * format the codebase using ruff * Update old python syntax and typing --------- Co-authored-by: laraibg786 <[email protected]> Co-authored-by: saudz <[email protected]>
1 parent 181bf70 commit b4269d6

27 files changed

+147
-158
lines changed

.python-version

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.9

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ select = [
8585
# "I", # isort (Ensures imports are sorted properly)
8686
# "B", # flake8-bugbear (Detects likely bugs and bad practices)
8787
# "TID", # flake8-tidy-imports (Checks for banned or misplaced imports)
88-
# "UP", # pyupgrade (Automatically updates old Python syntax)
88+
"UP", # pyupgrade (Automatically updates old Python syntax)
8989
# "YTT", # flake8-2020 (Detects outdated Python 2/3 compatibility issues)
9090
# "FLY", # flynt (Converts old-style string formatting to f-strings)
9191
# "PIE", # flake8-pie

src/lasso/diffcrash/diffcrash_run.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
import time
1111
import typing
1212
from concurrent import futures
13-
from typing import List, Union
13+
from typing import Union
1414
from pathlib import Path
1515
import psutil
1616

@@ -688,9 +688,7 @@ def run_import(self, pool: futures.ThreadPoolExecutor):
688688

689689
if n_imports_finished != n_new_imports_finished:
690690
# pylint: disable = consider-using-f-string
691-
msg = "Running Imports ... [{0}/{1}] - {2:3.2f}%\r".format(
692-
n_new_imports_finished, len(return_code_futures), percentage
693-
)
691+
msg = f"Running Imports ... [{n_new_imports_finished}/{len(return_code_futures)}] - {percentage:3.2f}%\r"
694692
print(str_running(msg), end="", flush=True)
695693
self.logger.info(msg)
696694

@@ -1091,7 +1089,7 @@ def is_logfile_successful(self, logfile: Path) -> bool:
10911089
success : `bool`
10921090
"""
10931091

1094-
with open(logfile, "r", encoding="utf-8") as fp:
1092+
with open(logfile, encoding="utf-8") as fp:
10951093
for line in fp:
10961094
if "successfully" in line:
10971095
return True
@@ -1181,7 +1179,7 @@ def clear_project_dir(self):
11811179
# reinit logger
11821180
self.logger = self._setup_logger()
11831181

1184-
def read_config_file(self, config_file: str) -> List[str]:
1182+
def read_config_file(self, config_file: str) -> list[str]:
11851183
"""Read a diffcrash config file
11861184
11871185
Parameters
@@ -1203,7 +1201,7 @@ def read_config_file(self, config_file: str) -> List[str]:
12031201
# pylint: disable = too-many-branches
12041202
# pylint: disable = too-many-statements
12051203

1206-
with open(config_file, "r", encoding="utf-8") as conf:
1204+
with open(config_file, encoding="utf-8") as conf:
12071205
conf_lines = conf.readlines()
12081206
line = 0
12091207

@@ -1303,7 +1301,7 @@ def read_config_file(self, config_file: str) -> List[str]:
13031301

13041302
return export_item_list
13051303

1306-
def check_if_logfiles_show_success(self, pattern: str) -> List[str]:
1304+
def check_if_logfiles_show_success(self, pattern: str) -> list[str]:
13071305
"""Check if a logfiles with given pattern show success
13081306
13091307
Parameters

src/lasso/dimred/dimred_run.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,8 @@
77
import sys
88
import time
99
from concurrent.futures.process import ProcessPoolExecutor
10-
from typing import Sequence, Tuple, Union
10+
from typing import Union
11+
from collections.abc import Sequence
1112

1213
import h5py
1314
import numpy as np
@@ -529,7 +530,7 @@ def _parse_simulation_and_reference_runs(
529530
reference_run_pattern: Union[None, str],
530531
exclude_runs: Sequence[str],
531532
table: Table,
532-
) -> Tuple[Sequence[str], str, Sequence[str]]:
533+
) -> tuple[Sequence[str], str, Sequence[str]]:
533534
# pylint: disable = too-many-locals
534535

535536
# search all denoted runs

src/lasso/dimred/hashing.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import multiprocessing
22
import os
33
import time
4-
from typing import List, Tuple, Union, Sequence
4+
from typing import Union
5+
from collections.abc import Sequence
56

67
import h5py
78
import numpy as np
@@ -104,8 +105,8 @@ def _compute_mode_similarities(
104105
hashes2: np.ndarray,
105106
eigenvectors_sub1: np.ndarray,
106107
eigenvectors_sub2: np.ndarray,
107-
matches: List[Tuple[int, int]],
108-
) -> List[float]:
108+
matches: list[tuple[int, int]],
109+
) -> list[float]:
109110
"""Compute the mode similarity between different meshes
110111
111112
Parameters
@@ -217,7 +218,7 @@ def _join_hash_comparison_thread_files(
217218

218219
def run_hash_comparison(
219220
comparison_filepath: str,
220-
hashes_filepaths: List[str],
221+
hashes_filepaths: list[str],
221222
n_threads: int = 1,
222223
print_progress: bool = False,
223224
):
@@ -597,7 +598,7 @@ def curve_normalizer(x: np.ndarray, y: np.ndarray):
597598

598599
def compute_hashes(
599600
eig_vecs: np.ndarray, result_field: np.ndarray, n_points: int = 100, bandwidth: float = 0.05
600-
) -> List[Tuple[np.ndarray, np.ndarray]]:
601+
) -> list[tuple[np.ndarray, np.ndarray]]:
601602
"""Compute hashes for a result field
602603
603604
Parameters

src/lasso/dimred/hashing_sphere.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import os
2-
import typing
32
import warnings
43

54
import h5py
@@ -15,7 +14,7 @@
1514
warnings.simplefilter(action="ignore", category=FutureWarning)
1615

1716

18-
def _create_sphere_mesh(diameter: np.ndarray) -> typing.Tuple[np.ndarray, np.ndarray]:
17+
def _create_sphere_mesh(diameter: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
1918
"""Compute the alpha and beta increments for a
2019
meshed sphere for binning the projected values
2120
@@ -70,7 +69,7 @@ def _create_sphere_mesh(diameter: np.ndarray) -> typing.Tuple[np.ndarray, np.nda
7069

7170
def _project_to_sphere(
7271
points: np.ndarray, centroid: np.ndarray, axis: str = "Z"
73-
) -> typing.Tuple[np.ndarray, np.ndarray]:
72+
) -> tuple[np.ndarray, np.ndarray]:
7473
"""compute the projection vectors of centroid to each point in terms of spherical coordinates
7574
7675
Parameters

src/lasso/dimred/svd/clustering_betas.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
from typing import Sequence, Tuple, Union
1+
from typing import Union
2+
from collections.abc import Sequence
23

34
import numpy as np
45
from sklearn.cluster import DBSCAN, OPTICS, KMeans, SpectralClustering
@@ -418,7 +419,7 @@ def document_algorithm(keyword):
418419
}
419420

420421

421-
def create_cluster_arg_dict(args: Sequence[str]) -> Union[Tuple[str, dict], str]:
422+
def create_cluster_arg_dict(args: Sequence[str]) -> Union[tuple[str, dict], str]:
422423
"""Determines which cluster to use and creates a python dictionary to use as cluster_params
423424
424425
Parameters
@@ -493,7 +494,7 @@ def create_cluster_arg_dict(args: Sequence[str]) -> Union[Tuple[str, dict], str]
493494
return cluster_type, cluster_arg_dict
494495

495496

496-
def create_detector_arg_dict(args: Sequence[str]) -> Union[Tuple[str, dict], str]:
497+
def create_detector_arg_dict(args: Sequence[str]) -> Union[tuple[str, dict], str]:
497498
"""Determines which detector to use and creates a python dictionary to use as detector_params
498499
499500
Parameters
@@ -579,7 +580,7 @@ def group_betas(
579580
detector=None,
580581
cluster_params=None,
581582
detector_params=None,
582-
) -> Union[Tuple[list, list], str]:
583+
) -> Union[tuple[list, list], str]:
583584
"""
584585
Base function to to group betas into groups, detect outliers. Provides that all different
585586
clustering and outlier detection algorithms are implemented in an easy to access environment.

src/lasso/dimred/svd/keyword_types.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
import typing
2-
3-
41
class ClusterType:
52
"""Specifies names of specific clustering algorithms
63
@@ -22,7 +19,7 @@ class ClusterType:
2219
SpectralClustering = "SpectralClustering"
2320

2421
@staticmethod
25-
def get_cluster_type_name() -> typing.List[str]:
22+
def get_cluster_type_name() -> list[str]:
2623
"""Get the name of the clustering algorithms"""
2724
return [
2825
ClusterType.OPTICS,
@@ -51,7 +48,7 @@ class DetectorType:
5148
# Experimental = "Experimental"
5249

5350
@staticmethod
54-
def get_detector_type_name() -> typing.List[str]:
51+
def get_detector_type_name() -> list[str]:
5552
"""Get the name of the detector algorithms"""
5653
return [
5754
DetectorType.IsolationForest,

src/lasso/dimred/svd/plot_beta_clusters.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22
import re
33
import time
44
import webbrowser
5-
from typing import Sequence, Union
5+
from typing import Union
6+
from collections.abc import Sequence
67

78
import numpy as np
89

@@ -118,11 +119,13 @@ def plot_clusters_js(
118119
id_nr.append(id_group)
119120

120121
# pylint: disable = consider-using-f-string
121-
_three_min_ = '<script type="text/javascript">%s</script>' % _read_file(
122-
os.path.join(
123-
# move path to "~/lasso/"
124-
os.path.split(os.path.split(os.path.dirname(__file__))[0])[0],
125-
"plotting/resources/three_latest.min.js",
122+
_three_min_ = '<script type="text/javascript">{}</script>'.format(
123+
_read_file(
124+
os.path.join(
125+
# move path to "~/lasso/"
126+
os.path.split(os.path.split(os.path.dirname(__file__))[0])[0],
127+
"plotting/resources/three_latest.min.js",
128+
)
126129
)
127130
)
128131

@@ -136,10 +139,10 @@ def plot_clusters_js(
136139
name = "outliers"
137140
color = "black"
138141
else:
139-
name = "cluster {i}".format(i=index)
142+
name = f"cluster {index}"
140143
color = colorlist[(index - 1) % 10]
141144
formatted_trace = TRACE_STRING.format(
142-
_traceNr_="trace{i}".format(i=index),
145+
_traceNr_=f"trace{index}",
143146
_name_=name,
144147
_color_=color,
145148
_runIDs_=id_cluster[index].tolist(),

src/lasso/dimred/svd/pod_functions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Tuple, Union
1+
from typing import Union
22

33
import numpy as np
44
from scipy.sparse import csc_matrix
@@ -37,7 +37,7 @@ def calculate_v_and_betas(
3737
stacked_sub_displ: np.ndarray,
3838
progress_bar: Union[None, Progress, PlaceHolderBar] = None,
3939
task_id: Union[None, TaskID] = None,
40-
) -> Union[str, Tuple[np.ndarray, np.ndarray]]:
40+
) -> Union[str, tuple[np.ndarray, np.ndarray]]:
4141
"""
4242
Calculates the right reduced order Basis V and up to 10 eigenvalues of the subsamples
4343

src/lasso/dimred/svd/subsampling_methods.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import os
22
import random
33
import time
4-
from typing import List, Sequence, Tuple, Union
4+
from typing import Union
5+
from collections.abc import Sequence
56

67
import numpy as np
78
from sklearn.neighbors import NearestNeighbors
@@ -45,7 +46,7 @@ def _mark_dead_eles(node_indexes: np.ndarray, alive_shells: np.ndarray) -> np.nd
4546

4647
def _extract_shell_parts(
4748
part_list: Sequence[int], d3plot: D3plot
48-
) -> Union[Tuple[np.ndarray, np.ndarray], str]:
49+
) -> Union[tuple[np.ndarray, np.ndarray], str]:
4950
"""
5051
Extracts a shell part defined by its part ID out of the given d3plot.
5152
Returns a new node index, relevant coordinates and displacement
@@ -123,7 +124,7 @@ def _extract_shell_parts(
123124
return err_msg.format(part)
124125

125126
def mask_parts(
126-
part_list2: List[int], element_part_index: np.ndarray, element_node_index: np.ndarray
127+
part_list2: list[int], element_part_index: np.ndarray, element_node_index: np.ndarray
127128
) -> np.ndarray:
128129
element_part_filter = np.full(element_part_index.shape, False)
129130
proc_parts = []
@@ -180,7 +181,7 @@ def mask_parts(
180181

181182
def create_reference_subsample(
182183
load_path: str, parts: Sequence[int], nr_samples=2000
183-
) -> Union[Tuple[np.ndarray, float, float], str]:
184+
) -> Union[tuple[np.ndarray, float, float], str]:
184185
"""
185186
Loads the D3plot at load_path, extracts the node coordinates of part 13, returns
186187
a random subsample of these nodes
@@ -238,7 +239,7 @@ def create_reference_subsample(
238239

239240
def remap_random_subsample(
240241
load_path: str, parts: list, reference_subsample: np.ndarray
241-
) -> Union[Tuple[np.ndarray, float, float], str]:
242+
) -> Union[tuple[np.ndarray, float, float], str]:
242243
"""
243244
Remaps the specified sample onto a new mesh provided by reference subsampl, using knn matching
244245

src/lasso/dyna/array_type.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
import typing
2-
3-
41
class ArrayType:
52
"""Specifies the names for specific arrays
63
@@ -494,7 +491,7 @@ class ArrayType:
494491
rigid_wall_position = "rigid_wall_position"
495492

496493
@staticmethod
497-
def get_state_array_names() -> typing.List[str]:
494+
def get_state_array_names() -> list[str]:
498495
"""Get the names of all state arrays
499496
500497
Returns:

src/lasso/dyna/binout.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import glob
2-
from typing import List, Union
2+
from typing import Union
33

44
import h5py
55
import numpy as np
@@ -67,13 +67,13 @@ def __init__(self, filepath: str):
6767

6868
# check file existence
6969
if not self.filelist:
70-
raise IOError("No file was found.")
70+
raise OSError("No file was found.")
7171

7272
# open lsda buffer
7373
self.lsda = Lsda(self.filelist, "r")
7474
self.lsda_root = self.lsda.root
7575

76-
def read(self, *path) -> Union[List[str], str, np.ndarray]:
76+
def read(self, *path) -> Union[list[str], str, np.ndarray]:
7777
"""Read all data from Binout (top to low level)
7878
7979
Parameters

0 commit comments

Comments
 (0)