diff --git a/Taskfile.yml b/Taskfile.yml index 6bdb4e3..29eecbd 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -9,8 +9,8 @@ tasks: lint: desc: Lints the code and reports on issues. cmds: - - uv run black --check . - uv run ruff check . + # - uv run ruff format . --check build: desc: Builds the python package diff --git a/pyproject.toml b/pyproject.toml index b9bfccc..6d7b5fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,6 @@ dependencies = [ "scipy==1.*", "numpy==1.*", "plotly==5.*", - # "attrs==23.*" "psutil==5.*", "rich==13.*", "pandas==2.*", @@ -43,8 +42,7 @@ dimred = "lasso.dimred.run:main" dev = [ "pytest==8.*", "pytest-cov==5.*", - "black==24.*", - "ruff==0.3.*", + "ruff==0.11.*", "mkdocs==1.*", "mkdocs-material==9.*", "mkdocstrings[python]==0.*", @@ -58,10 +56,50 @@ dev = [ requires = ["setuptools>=78", "setuptools-scm>=8"] build-backend = "setuptools.build_meta" -[tool.black] -# We allow longer lines since 80 is quite short -line-length=100 +[tool.ruff] +required-version = "==0.11.*" +line-length = 100 +indent-width = 4 +preview = true -[tool.flake8] -exclude = [".git", "*migrations*"] -max-line-length = 100 +# Output serialization format for violations. The default serialization +# format is "full" [env: RUFF_OUTPUT_FORMAT=] [possible values: +# concise, full, json, json-lines, junit, grouped, github, gitlab, +# pylint, rdjson, azure, sarif] +output-format = "grouped" + +[tool.ruff.lint] +isort.lines-after-imports = 2 +select = [ + "C", # Complexity checks (e.g., McCabe complexity, comprehensions) + # "ANN001", "ANN201", "ANN401", # flake8-annotations (required strict type annotations for public functions) + # "S", # flake8-bandit (checks basic security issues in code) + # "BLE", # flake8-blind-except (checks the except blocks that do not specify exception) + # "FBT", # flake8-boolean-trap (ensure that boolean args can be used with kw only) + # "E", # pycodestyle errors (PEP 8 style guide violations) + # "W", # pycodestyle warnings (e.g., extra spaces, indentation issues) + # "DOC", # pydoclint issues (e.g., extra or missing return, yield, warnings) + # "A", # flake8-buitins (check variable and function names to not shadow builtins) + # "N", # Naming convention checks (e.g., PEP 8 variable and function names) + # "F", # Pyflakes errors (e.g., unused imports, undefined variables) + # "I", # isort (Ensures imports are sorted properly) + # "B", # flake8-bugbear (Detects likely bugs and bad practices) + # "TID", # flake8-tidy-imports (Checks for banned or misplaced imports) + # "UP", # pyupgrade (Automatically updates old Python syntax) + # "YTT", # flake8-2020 (Detects outdated Python 2/3 compatibility issues) + # "FLY", # flynt (Converts old-style string formatting to f-strings) + # "PIE", # flake8-pie + # "PL", # pylint + # "RUF", # Ruff-specific rules (Additional optimizations and best practices) +] + +ignore = [ + "PLR2004", # [magic-value-comparision](https://docs.astral.sh/ruff/rules/magic-value-comparison) + "C90", # [mccabe](https://docs.astral.sh/ruff/rules/#mccabe-c90) +] + +[tool.ruff.lint.per-file-ignores] + +[tool.ruff.format] +docstring-code-format = true +skip-magic-trailing-comma = true diff --git a/src/lasso/diffcrash/diffcrash_run.py b/src/lasso/diffcrash/diffcrash_run.py index face8c6..9901e64 100644 --- a/src/lasso/diffcrash/diffcrash_run.py +++ b/src/lasso/diffcrash/diffcrash_run.py @@ -287,7 +287,6 @@ def __init__( self.n_processes = self._parse_n_processes(n_processes) def _setup_logger(self) -> logging.Logger: - # better safe than sorry os.makedirs(self.logfile_dir, exist_ok=True) @@ -310,7 +309,6 @@ def _setup_logger(self) -> logging.Logger: return logger def _parse_diffcrash_home(self, diffcrash_home) -> str: - diffcrash_home_ok = len(diffcrash_home) != 0 msg = self._msg_option.format("diffcrash-home", diffcrash_home) @@ -328,7 +326,6 @@ def _parse_diffcrash_home(self, diffcrash_home) -> str: return diffcrash_home def _parse_crash_code(self, crash_code) -> str: - # these guys are allowed valid_crash_codes = ["dyna", "radioss", "pam"] @@ -340,8 +337,7 @@ def _parse_crash_code(self, crash_code) -> str: if not crash_code_ok: err_msg = ( - f"Invalid crash code '{crash_code}'. " - f"Please use one of: {str(valid_crash_codes)}" + f"Invalid crash code '{crash_code}'. Please use one of: {str(valid_crash_codes)}" ) self.logger.error(err_msg) raise RuntimeError(str_error(err_msg)) @@ -349,7 +345,6 @@ def _parse_crash_code(self, crash_code) -> str: return crash_code def _parse_reference_run(self, reference_run) -> str: - reference_run_ok = Path(reference_run).is_file() msg = self._msg_option.format("reference-run", reference_run) @@ -364,7 +359,6 @@ def _parse_reference_run(self, reference_run) -> str: return reference_run def _parse_use_id_mapping(self, use_id_mapping) -> bool: - msg = self._msg_option.format("use-id-mapping", use_id_mapping) print(str_info(msg)) self.logger.info(msg) @@ -386,7 +380,6 @@ def _parse_simulation_runs( reference_run: str, exclude_runs: typing.Sequence[str], ): - # search all denoted runs simulation_runs = [] for pattern in simulation_run_patterns: @@ -442,7 +435,6 @@ def natural_keys(text): return simulation_runs def _parse_config_file(self, config_file) -> Union[str, None]: - _msg_config_file = "" if len(config_file) > 0 and not Path(config_file).is_file(): config_file = None @@ -450,11 +442,9 @@ def _parse_config_file(self, config_file) -> Union[str, None]: # missing config file else: - config_file = None _msg_config_file = ( - "Config file missing. " - "Consider specifying the path with the option '--config-file'." + "Config file missing. Consider specifying the path with the option '--config-file'." ) msg = self._msg_option.format("config-file", config_file) @@ -468,7 +458,6 @@ def _parse_config_file(self, config_file) -> Union[str, None]: return config_file def _parse_parameter_file(self, parameter_file) -> Union[None, str]: - _msg_parameter_file = "" if len(parameter_file) > 0 and not Path(parameter_file).is_file(): parameter_file = None @@ -492,7 +481,6 @@ def _parse_parameter_file(self, parameter_file) -> Union[None, str]: return parameter_file def _parse_n_processes(self, n_processes) -> int: - print(str_info(self._msg_option.format("n-processes", n_processes))) if n_processes <= 0: @@ -640,7 +628,6 @@ def run_import(self, pool: futures.ThreadPoolExecutor): # entry 0 is the reference run, thus we start at 1 # pylint: disable = consider-using-enumerate for i_filepath in range(len(self.simulation_runs)): - # parameter file missing if self.parameter_file is None: if self.use_id_mapping: @@ -691,7 +678,6 @@ def run_import(self, pool: futures.ThreadPoolExecutor): return_code_future.done() for return_code_future in return_code_futures ) while n_imports_finished != len(return_code_futures): - # check again n_new_imports_finished = sum( return_code_future.done() for return_code_future in return_code_futures @@ -717,7 +703,6 @@ def run_import(self, pool: futures.ThreadPoolExecutor): # print failure if any(return_code != 0 for return_code in return_codes): - n_failed_runs = 0 for i_run, return_code in enumerate(return_codes): if return_code != 0: @@ -739,7 +724,6 @@ def run_import(self, pool: futures.ThreadPoolExecutor): # check log files messages = self.check_if_logfiles_show_success("DFC_Import_*.log") if messages: - # print failure msg = f"Running Imports ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) @@ -774,14 +758,12 @@ def run_math(self, pool: futures.ThreadPoolExecutor): start_time = time.time() return_code_future = pool.submit( - run_subprocess, - [self.diffcrash_home / f"DFC_Math_{self.crash_code}", self.project_dir], + run_subprocess, [self.diffcrash_home / f"DFC_Math_{self.crash_code}", self.project_dir] ) return_code = return_code_future.result() # check return code if return_code != 0: - msg = f"Running Math ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) self.logger.error(msg) @@ -793,7 +775,6 @@ def run_math(self, pool: futures.ThreadPoolExecutor): # check logs messages = self.check_if_logfiles_show_success("DFC_MATH*.log") if messages: - # print failure msg = f"Running Math ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) @@ -889,7 +870,6 @@ def run_export(self, pool: futures.ThreadPoolExecutor): # check logs messages = self.check_if_logfiles_show_success("DFC_Export_*") if messages: - # print failure msg = f"Running Export ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) @@ -900,10 +880,7 @@ def run_export(self, pool: futures.ThreadPoolExecutor): print(str_error(msg)) self.logger.error(msg) - msg = ( - "At least one export failed. " - f"Please check the log files in '{self.logfile_dir}'." - ) + msg = f"At least one export failed. Please check the log files in '{self.logfile_dir}'." self.logger.error(msg) raise RuntimeError(str_error(msg)) @@ -945,7 +922,6 @@ def run_matrix(self, pool: futures.ThreadPoolExecutor): # check return code if return_code != 0: - # print failure msg = f"Running Matrix ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) @@ -958,7 +934,6 @@ def run_matrix(self, pool: futures.ThreadPoolExecutor): # check log file messages = self.check_if_logfiles_show_success("DFC_Matrix_*") if messages: - # print failure msg = f"Running Matrix ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) @@ -1021,7 +996,6 @@ def run_eigen(self, pool: futures.ThreadPoolExecutor): # check log file messages = self.check_if_logfiles_show_success("DFC_Matrix_*") if messages: - # print failure msg = f"Running Eigen ... done in {time.time() - start_time:.2f}s " print(str_error(msg)) diff --git a/src/lasso/diffcrash/run.py b/src/lasso/diffcrash/run.py index 0a2d862..8fd0541 100644 --- a/src/lasso/diffcrash/run.py +++ b/src/lasso/diffcrash/run.py @@ -17,7 +17,6 @@ def _parse_stages(start_stage: str, end_stage: str): - # check validity if start_stage not in DC_STAGES or end_stage not in DC_STAGES: raise ValueError( @@ -76,7 +75,6 @@ def main(): # initiate threading pool for handling jobs with futures.ThreadPoolExecutor(max_workers=diffcrash_run.n_processes) as pool: - # setup if start_stage_index <= DC_STAGES.index(DC_STAGE_SETUP) <= end_stage_index: diffcrash_run.run_setup(pool) diff --git a/src/lasso/dimred/dimred_run.py b/src/lasso/dimred/dimred_run.py index bed792a..d822395 100644 --- a/src/lasso/dimred/dimred_run.py +++ b/src/lasso/dimred/dimred_run.py @@ -328,7 +328,9 @@ def __init__( self.logfile_filepath = ( logfile_filepath if logfile_filepath - else os.path.join(project_dir, "logfile") if project_dir else "" + else os.path.join(project_dir, "logfile") + if project_dir + else "" ) self._msg_option = "{:16s}: {}" @@ -346,12 +348,10 @@ def __init__( # parse simulation and reference run # if no reference run was set use first simulation run - ( - self.simulation_runs, - self.reference_run, - self.exclude_runs, - ) = self._parse_simulation_and_reference_runs( - simulation_runs, reference_run, tuple() if not exclude_runs else exclude_runs, table + (self.simulation_runs, self.reference_run, self.exclude_runs) = ( + self._parse_simulation_and_reference_runs( + simulation_runs, reference_run, () if not exclude_runs else exclude_runs, table + ) ) # check if basename or foldername serves as unique identifier @@ -361,7 +361,7 @@ def __init__( # set project dir and simulation runs self.project_dir = self._parse_project_dir(project_dir, table) - self.part_ids = part_id_filter if part_id_filter is not None else tuple() + self.part_ids = part_id_filter if part_id_filter is not None else () if self.part_ids is not None and len(self.part_ids) != 0: table.add_row("selected parts", ",".join(str(entry) for entry in self.part_ids)) self.timestep = timestep @@ -449,7 +449,6 @@ def _check_img_path(self, img_path: str) -> str: self.raise_error(err_msg) def _parse_stages(self, start_stage: str, end_stage: str): - # check validity if start_stage not in DIMRED_STAGES: err_msg = f"{start_stage} is not a valid stage. Try: {', '.join(DIMRED_STAGES)}." @@ -498,16 +497,14 @@ def _check_valid_stage_skip(self): self.log("Skipped clustering stage", style="warning") def _parse_part_ids(self, part_ids: Union[Sequence[int], None]) -> Sequence[int]: - if not part_ids: - return tuple() + return () assert all(isinstance(pid, int) for pid in part_ids), "All part ids must be of type 'int'" return part_ids def _parse_project_dir(self, project_dir: Union[str, None], table: Table): - if not project_dir: return "" @@ -533,7 +530,6 @@ def _parse_simulation_and_reference_runs( exclude_runs: Sequence[str], table: Table, ) -> Tuple[Sequence[str], str, Sequence[str]]: - # pylint: disable = too-many-locals # search all denoted runs @@ -576,7 +572,6 @@ def _parse_simulation_and_reference_runs( # check for valid reference run reference_run = "" if reference_run_pattern: - reference_run_ok = os.path.isfile(reference_run_pattern) if not reference_run_ok: err_msg = f"Filepath '{reference_run_pattern}' is not a file." @@ -641,7 +636,6 @@ def _parse_cluster_and_outlier_args( self.detector_args = result[1] def _parse_n_processes(self, n_processes: int, table: Table) -> int: - if n_processes <= 0: err_msg = f"n-processes is '{n_processes}' but must be at least 1." self.raise_error(err_msg) @@ -650,7 +644,6 @@ def _parse_n_processes(self, n_processes: int, table: Table) -> int: return n_processes def _parse_html_name(self, html_name_string: str) -> str: - html_name, replace_count = re.subn(r"[!§$%&/()=?\"\[\]{}\\.,;:<>|]", "", html_name_string) html_name = html_name.replace(" ", "-") @@ -750,7 +743,6 @@ def subsample_to_reference_run(self): prog = PlaceHolderBar() with prog: - # define progressbar task task1 = prog.add_task( "[cyan]Subsampling plots [/cyan]", total=len(self.simulation_runs) @@ -876,12 +868,10 @@ def dimension_reduction_svd(self): if entry not in excluded_entries ] - run_timesteps = np.array( - [ - self.h5file[HDF5FileNames.SUBSAMPLED_GROUP_NAME.value][entry].shape[0] - for entry in valid_entries - ] - ) + run_timesteps = np.array([ + self.h5file[HDF5FileNames.SUBSAMPLED_GROUP_NAME.value][entry].shape[0] + for entry in valid_entries + ]) min_step = np.min(run_timesteps) max_step = np.max(run_timesteps) @@ -897,16 +887,12 @@ def dimension_reduction_svd(self): # add task after checking condition, else output looks wonky beta_task = prog.add_task("[cyan]Reducing Plots [/cyan]", total=int(min_step)) - sub_displ = np.stack( - [ - self.h5file[HDF5FileNames.SUBSAMPLED_GROUP_NAME.value][entry][:min_step, :] - for entry in valid_entries - ] - ) + sub_displ = np.stack([ + self.h5file[HDF5FileNames.SUBSAMPLED_GROUP_NAME.value][entry][:min_step, :] + for entry in valid_entries + ]) - result = calculate_v_and_betas( - sub_displ, progress_bar=prog, task_id=beta_task - ) # type: ignore + result = calculate_v_and_betas(sub_displ, progress_bar=prog, task_id=beta_task) # type: ignore # returns string if samplesize to small if isinstance(result, str): self.raise_error(result) @@ -961,13 +947,13 @@ def clustering_results(self): for entry in self.exclude_runs ] - beta_index = np.stack( - [key for key in betas_group.keys() if key not in excluded_entries] - ) + beta_index = np.stack([ + key for key in betas_group.keys() if key not in excluded_entries + ]) try: - betas = np.stack( - [betas_group[entry][self.timestep, :3] for entry in beta_index] - ) # betas_group.keys()]) + betas = np.stack([ + betas_group[entry][self.timestep, :3] for entry in beta_index + ]) # betas_group.keys()]) except ValueError: log_msg = ( "Invalid parameter for timestep. Set a valid timestep with --timestep.\n" @@ -1032,7 +1018,6 @@ def visualize_results(self): # check if clustering was performed, else load all betas into one pseudo-cluster if HDF5FileNames.NR_CLUSTER.value not in betas_group.attrs: - # plotfunction expects list of cluster # we have no clusters -> we claim all is in one cluster @@ -1053,9 +1038,9 @@ def visualize_results(self): id_data = np.stack([key for key in betas_group.keys() if key not in excluded_entries]) # create an index referencing each run to a cluster - cluster_index = np.stack( - [betas_group[entry].attrs[HDF5FileNames.CLUSTER.value] for entry in id_data] - ) + cluster_index = np.stack([ + betas_group[entry].attrs[HDF5FileNames.CLUSTER.value] for entry in id_data + ]) # load betas & ids beta_data = np.stack([betas_group[entry][-1] for entry in id_data]) diff --git a/src/lasso/dimred/graph_laplacian.py b/src/lasso/dimred/graph_laplacian.py index 5b81d19..25a01ce 100644 --- a/src/lasso/dimred/graph_laplacian.py +++ b/src/lasso/dimred/graph_laplacian.py @@ -143,7 +143,6 @@ def _laplacian(lapl: csgraph, n_eigenmodes: int = 5): eigen_vecs = np.empty((0, 0)) while n_nonzero_eigenvalues < n_eigenmodes: - eigen_vals, eigen_vecs = map(np.real, eigsh(lapl, n_eigenvalues, which="SA")) i_start = np.argmax(eigen_vals > 1e-7) diff --git a/src/lasso/dimred/hashing.py b/src/lasso/dimred/hashing.py index a030b68..2186a8c 100644 --- a/src/lasso/dimred/hashing.py +++ b/src/lasso/dimred/hashing.py @@ -42,12 +42,10 @@ def _match_modes( mode2_hash_indexes = list(range(len(hashes2))) for i_hash in mode1_hash_indexes: - field1 = eigenvectors_sub1[:, i_hash] found_match = False for j_entry, j_hash in enumerate(mode2_hash_indexes): - field2 = eigenvectors_sub2[:, j_hash] if is_mode_match(field1, field2): @@ -135,7 +133,6 @@ def _compute_mode_similarities( mode_similarities = [] for i_hash, j_hash in matches: - assert hashes1.shape[2] == hashes2.shape[2] field1 = eigenvectors_sub1[:, i_hash] @@ -199,10 +196,8 @@ def _join_hash_comparison_thread_files( ) for thread_filepath in thread_filepaths: - # open thread file with h5py.File(thread_filepath, "r") as thread_file: - # insert matrix entries matrix_indexes = thread_file["matrix_indexes"] matrix_similarities = thread_file["matrix_similarities"] @@ -309,10 +304,8 @@ def _threading_run_comparison(run_indices, comparison_filepath, comm_q): ) def _save_data(computed_results, counter): - start = counter + 1 - len(computed_results) for i_result, result in enumerate(computed_results): - i_run, j_run = result["matrix_index"] similarities = result["similarities"] matches_tmp = result["matches"] @@ -336,7 +329,6 @@ def _save_data(computed_results, counter): counter = None # bugfix computed_results = [] for counter, (i_run, j_run) in enumerate(run_indices): - start = time.time() # get data (io) @@ -456,7 +448,6 @@ def _save_data(computed_results, counter): ] while any(thread.is_alive() for thread in threads): - # fetch data from channel for i_thread, comm_q in enumerate(queues): if not comm_q.empty(): @@ -605,10 +596,7 @@ def curve_normalizer(x: np.ndarray, y: np.ndarray): def compute_hashes( - eig_vecs: np.ndarray, - result_field: np.ndarray, - n_points: int = 100, - bandwidth: float = 0.05, + eig_vecs: np.ndarray, result_field: np.ndarray, n_points: int = 100, bandwidth: float = 0.05 ) -> List[Tuple[np.ndarray, np.ndarray]]: """Compute hashes for a result field @@ -639,7 +627,6 @@ def compute_hashes( hash_functions = [] for i_eigen in range(eig_vecs.shape[1]): - xmin = eig_vecs[:, i_eigen].min() xmax = eig_vecs[:, i_eigen].max() diff --git a/src/lasso/dimred/run.py b/src/lasso/dimred/run.py index 24de8bf..3c1b225 100644 --- a/src/lasso/dimred/run.py +++ b/src/lasso/dimred/run.py @@ -15,9 +15,12 @@ def main(): # parse command line stuff parser = parse_dimred_args() - log_theme = Theme( - {"info": "royal_blue1", "success": "green", "warning": "dark_orange3", "error": "bold red"} - ) + log_theme = Theme({ + "info": "royal_blue1", + "success": "green", + "warning": "dark_orange3", + "error": "bold red", + }) console = Console(theme=log_theme, record=True, highlight=False) try: @@ -48,7 +51,6 @@ def main(): # initiate threading pool for handling jobs with dimred_run: - # setup if ( dimred_run.start_stage_index diff --git a/src/lasso/dimred/svd/clustering_betas.py b/src/lasso/dimred/svd/clustering_betas.py index 8f84caf..08938a9 100644 --- a/src/lasso/dimred/svd/clustering_betas.py +++ b/src/lasso/dimred/svd/clustering_betas.py @@ -485,8 +485,7 @@ def create_cluster_arg_dict(args: Sequence[str]) -> Union[Tuple[str, dict], str] val = v_type(values[ind]) except ValueError: err_msg = ( - f"Clustering: Invalid value {values[ind]} " - f"for parameter {param} of type {v_type}" + f"Clustering: Invalid value {values[ind]} for parameter {param} of type {v_type}" ) return err_msg cluster_arg_dict[param] = val @@ -650,7 +649,6 @@ def group_betas( betas, _ = __rescale_betas(betas) if detector == "Experimental": - experimental_results = __detector_dict[detector](betas, beta_index, **detector_params) if not isinstance(experimental_results, bool): outlier_betas, outlier_index, inlier_index = experimental_results diff --git a/src/lasso/dimred/svd/pod_functions.py b/src/lasso/dimred/svd/pod_functions.py index 1abb000..d367539 100644 --- a/src/lasso/dimred/svd/pod_functions.py +++ b/src/lasso/dimred/svd/pod_functions.py @@ -57,17 +57,17 @@ def calculate_v_and_betas( Error message if not enough samples where provided """ - big_mat = stacked_sub_displ.reshape( - ( - stacked_sub_displ.shape[0], - stacked_sub_displ.shape[1], - stacked_sub_displ.shape[2] * stacked_sub_displ.shape[3], - ) - ) - - diff_mat = np.stack([big_mat[:, 0, :] for _ in range(big_mat.shape[1])]).reshape( - (big_mat.shape[0], big_mat.shape[1], big_mat.shape[2]) - ) + big_mat = stacked_sub_displ.reshape(( + stacked_sub_displ.shape[0], + stacked_sub_displ.shape[1], + stacked_sub_displ.shape[2] * stacked_sub_displ.shape[3], + )) + + diff_mat = np.stack([big_mat[:, 0, :] for _ in range(big_mat.shape[1])]).reshape(( + big_mat.shape[0], + big_mat.shape[1], + big_mat.shape[2], + )) # We only want the difference in displacement big_mat = big_mat - diff_mat diff --git a/src/lasso/dimred/svd/subsampling_methods.py b/src/lasso/dimred/svd/subsampling_methods.py index ee3d988..5a26611 100644 --- a/src/lasso/dimred/svd/subsampling_methods.py +++ b/src/lasso/dimred/svd/subsampling_methods.py @@ -125,7 +125,6 @@ def _extract_shell_parts( def mask_parts( part_list2: List[int], element_part_index: np.ndarray, element_node_index: np.ndarray ) -> np.ndarray: - element_part_filter = np.full(element_part_index.shape, False) proc_parts = [] diff --git a/src/lasso/dyna/binout.py b/src/lasso/dyna/binout.py index 9c6710d..be03569 100644 --- a/src/lasso/dyna/binout.py +++ b/src/lasso/dyna/binout.py @@ -109,14 +109,14 @@ def read(self, *path) -> Union[List[str], str, np.ndarray]: ['swforc'] >>> binout.read("swforc") ['title', 'failure', 'ids', 'failure_time', ...] - >>> binout.read("swforc","shear").shape + >>> binout.read("swforc", "shear").shape (321L, 26L) - >>> binout.read("swforc","ids").shape + >>> binout.read("swforc", "ids").shape (26L,) - >>> binout.read("swforc","ids") + >>> binout.read("swforc", "ids") array([52890, 52891, 52892, ...]) >>> # read a string value - >>> binout.read("swforc","date") + >>> binout.read("swforc", "date") '11/05/2013' """ @@ -143,11 +143,11 @@ def as_df(self, *args) -> pd.DataFrame: Examples -------- >>> from lasso.dyna import Binout - >>> binout = Binout('path/to/binout') + >>> binout = Binout("path/to/binout") Read a time-dependent array. - >>> binout.as_df('glstat', 'eroded_kinetic_energy') + >>> binout.as_df("glstat", "eroded_kinetic_energy") time 0.00000 0.000000 0.19971 0.000000 @@ -164,7 +164,7 @@ def as_df(self, *args) -> pd.DataFrame: Read a time and id-dependent array. - >>> binout.as_df('secforc', 'x_force') + >>> binout.as_df("secforc", "x_force") 1 2 3 ... 33 34 time . 0.00063 2.168547e-16 2.275245e-15 -3.118639e-14 ... -5.126108e-13 4.592941e-16 @@ -320,7 +320,6 @@ def _get_variable(self, path): time = [] data = [] for subdir_name, subdir_symbol in dir_symbol.children.items(): - # skip metadata if subdir_name == "metadata": continue @@ -465,7 +464,6 @@ def _save_all_variables(self, hdf5_grp, compression, *path): # iterate through subdirs if isinstance(ret, list): - if path_str: hdf5_grp = hdf5_grp.create_group(path_str) diff --git a/src/lasso/dyna/d3plot.py b/src/lasso/dyna/d3plot.py index 5520620..cc59317 100644 --- a/src/lasso/dyna/d3plot.py +++ b/src/lasso/dyna/d3plot.py @@ -225,9 +225,7 @@ def build_header(self): ArrayType.rigid_road_segment_road_id, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) has_rigid_road = ArrayType.rigid_road_node_ids in self.d3plot.arrays @@ -391,9 +389,7 @@ def build_header(self): ArrayType.element_solid_part_indexes, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) if ArrayType.element_solid_part_indexes in self.d3plot.arrays: part_indexes = self.d3plot.arrays[ArrayType.element_solid_part_indexes] @@ -434,12 +430,7 @@ def build_header(self): n_solid_hist_vars, _ = self.count_array_state_var( array_type=ArrayType.element_solid_history_variables, - dimension_names=[ - "n_timesteps", - "n_solids", - "n_solid_layers", - "n_history_vars", - ], + dimension_names=["n_timesteps", "n_solids", "n_solid_layers", "n_history_vars"], has_layers=True, n_layers=n_solid_layers, ) @@ -467,14 +458,9 @@ def build_header(self): ) # NUMMAT2 - beam material count - required_arrays = [ - ArrayType.element_beam_node_indexes, - ArrayType.element_beam_part_indexes, - ] + required_arrays = [ArrayType.element_beam_node_indexes, ArrayType.element_beam_part_indexes] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) if ArrayType.element_beam_part_indexes in self.d3plot.arrays: part_indexes = self.d3plot.arrays[ArrayType.element_beam_part_indexes] @@ -541,9 +527,7 @@ def build_header(self): ArrayType.element_shell_part_indexes, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) if ArrayType.element_shell_part_indexes in self.d3plot.arrays: part_indexes = self.d3plot.arrays[ArrayType.element_shell_part_indexes] @@ -567,23 +551,13 @@ def build_header(self): ): n_shell_history_vars, n_shell_layers = self.count_array_state_var( array_type=ArrayType.element_shell_history_vars, - dimension_names=[ - "n_timesteps", - "n_shells", - "n_shell_layers", - "n_history_vars", - ], + dimension_names=["n_timesteps", "n_shells", "n_shell_layers", "n_history_vars"], has_layers=True, n_layers=n_shell_layers, ) n_tshell_history_vars, n_tshell_layers = self.count_array_state_var( array_type=ArrayType.element_tshell_history_variables, - dimension_names=[ - "n_timesteps", - "n_tshells", - "n_shell_layers", - "n_history_vars", - ], + dimension_names=["n_timesteps", "n_tshells", "n_shell_layers", "n_history_vars"], has_layers=True, n_layers=n_shell_layers, ) @@ -598,8 +572,7 @@ def build_header(self): # we are tolerant here and simply add zero padding for the other # field later on new_header["neips"] = max( - n_tshell_history_vars // n_tshell_layers, - n_shell_history_vars // n_shell_layers, + n_tshell_history_vars // n_tshell_layers, n_shell_history_vars // n_shell_layers ) else: new_header["neips"] = 0 @@ -829,9 +802,7 @@ def build_header(self): ArrayType.element_tshell_part_indexes, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) if ArrayType.element_tshell_part_indexes in self.d3plot.arrays: part_indexes = self.d3plot.arrays[ArrayType.element_tshell_part_indexes] @@ -951,9 +922,7 @@ def build_header(self): msg = "Array '{0}' was expected to have {1} dimensions ({2})." raise ValueError( msg.format( - ArrayType.rigid_wall_force, - 2, - ",".join(["n_timesteps", "n_rigid_walls"]), + ArrayType.rigid_wall_force, 2, ",".join(["n_timesteps", "n_rigid_walls"]) ) ) n_rigid_walls = array.shape[1] @@ -1049,9 +1018,7 @@ def build_header(self): ArrayType.element_shell_node8_extra_node_indexes, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) new_header["nel48"] = ( len(self.d3plot.arrays[ArrayType.element_shell_node8_element_index]) @@ -1065,9 +1032,7 @@ def build_header(self): ArrayType.element_solid_node20_extra_node_indexes, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) if ArrayType.element_solid_node20_element_index in self.d3plot.arrays: new_header["nel20"] = len( @@ -1088,9 +1053,7 @@ def build_header(self): ArrayType.element_solid_node27_extra_node_indexes, ] _check_array_occurrence( - self.d3plot, - array_names=required_arrays, - required_array_names=required_arrays, + self.d3plot, array_names=required_arrays, required_array_names=required_arrays ) if ArrayType.element_solid_node27_element_index in self.d3plot.arrays: new_header["nel27"] = len( @@ -1208,11 +1171,7 @@ def pack(self, value: Any, size=None, dtype_hint=None) -> bytes: raise RuntimeError(msg, type(value), value) def count_array_state_var( - self, - array_type: str, - dimension_names: List[str], - has_layers: bool, - n_layers: int = 0, + self, array_type: str, dimension_names: List[str], has_layers: bool, n_layers: int = 0 ) -> Tuple[int, int]: """This functions checks and updates the variable count for certain types of arrays @@ -1258,8 +1217,7 @@ def count_array_state_var( else: if n_layers != array.shape[2]: msg = ( - "Array '{0}' has '{1}' integration layers" - " but another array used '{2}'." + "Array '{0}' has '{1}' integration layers but another array used '{2}'." ) raise ValueError(msg.format(array_type, array.shape[2], n_layers)) @@ -1416,9 +1374,7 @@ class RigidBodyInfo: n_rigid_bodies: int = 0 def __init__( - self, - rigid_body_metadata_list: Iterable[RigidBodyMetadata], - n_rigid_bodies: int = 0, + self, rigid_body_metadata_list: Iterable[RigidBodyMetadata], n_rigid_bodies: int = 0 ): self.rigid_body_metadata_list = rigid_body_metadata_list self.n_rigid_bodies = n_rigid_bodies @@ -1434,11 +1390,7 @@ class RigidRoadInfo: motion: int = 0 def __init__( - self, - n_nodes: int = 0, - n_road_segments: int = 0, - n_roads: int = 0, - motion: int = 0, + self, n_nodes: int = 0, n_road_segments: int = 0, n_roads: int = 0, motion: int = 0 ): self.n_nodes = n_nodes self.n_road_segments = n_road_segments @@ -1537,7 +1489,7 @@ def __init__( self._sph_info = SphSectionInfo() self._airbag_info = AirbagInfo() self._numbering_info = NumberingInfo() - self._rigid_body_info = RigidBodyInfo(rigid_body_metadata_list=tuple()) + self._rigid_body_info = RigidBodyInfo(rigid_body_metadata_list=()) self._rigid_road_info = RigidRoadInfo() self._state_info = StateInfo() @@ -1588,9 +1540,7 @@ def __init__( ): LOGGER.debug("Advanced FEMZIP-API used") try: - self._read_states_femzip_advanced( - filepath, - ) + self._read_states_femzip_advanced(filepath) except Exception: trace = traceback.format_exc() warn_msg = ( @@ -1821,7 +1771,7 @@ def _read_d3plot_file_generator( # some status n_files = len(file_infos) - n_states = sum(map(lambda file_info: file_info.n_states, file_infos)) + n_states = sum(file_info.n_states for file_info in file_infos) LOGGER.debug("n_files found: %d", n_files) LOGGER.debug("n_states estimated: %d", n_states) @@ -1859,7 +1809,7 @@ def _read_d3plot_file_generator( LOGGER.debug("buffers: %s", pprint.pformat([info.__dict__ for info in file_infos])) # number of states and if buffered reading is used - n_states_selected = sum(map(lambda file_info: file_info.n_states, file_infos)) + n_states_selected = sum(file_info.n_states for file_info in file_infos) yield n_states_selected sub_file_infos = [file_infos] if not buffered_reading else [[info] for info in file_infos] @@ -1964,10 +1914,7 @@ def _read_states_femzip_advanced(self, filepath: str) -> None: return # filter femzip vars according to requested d3plot vars - file_metadata_filtered = filter_femzip_variables( - file_metadata, - d3plot_array_filter, - ) + file_metadata_filtered = filter_femzip_variables(file_metadata, d3plot_array_filter) # read femzip arrays result_arrays = api.read_variables( @@ -1986,11 +1933,7 @@ def _read_states_femzip_advanced(self, filepath: str) -> None: # global vars if fz_cat == FemzipVariableCategory.GLOBAL: keys_to_remove.append((fz_index, fz_name, fz_cat)) - self._read_states_globals( - state_data=array, - var_index=0, - array_dict=self.arrays, - ) + self._read_states_globals(state_data=array, var_index=0, array_dict=self.arrays) # parts and rigid walls elif fz_cat == FemzipVariableCategory.PART: @@ -2153,14 +2096,11 @@ def _read_material_section(self): if test_nummat != self.header.n_parts: raise RuntimeError( "nmmat (header) != nmmat (material type data): " - f"{self.header.n_parts} != {test_nummat}", + f"{self.header.n_parts} != {test_nummat}" ) self.arrays[ArrayType.part_material_type] = self._buffer.read_ndarray( - position, - self.header.n_parts * self.header.wordsize, - 1, - self.header.itype, + position, self.header.n_parts * self.header.wordsize, 1, self.header.itype ) position += self.header.n_parts * self.header.wordsize @@ -2341,10 +2281,7 @@ def _read_particle_data(self): try: # variable typecodes self.arrays[ArrayType.airbag_variable_types] = self._buffer.read_ndarray( - position, - n_airbag_variables * self.header.wordsize, - 1, - self._header.itype, + position, n_airbag_variables * self.header.wordsize, 1, self._header.itype ) position += n_airbag_variables * self.header.wordsize @@ -2402,10 +2339,7 @@ def _read_geometry_data(self): section_word_length = n_dimensions * n_nodes try: node_coordinates = self._buffer.read_ndarray( - position, - section_word_length * self.header.wordsize, - 1, - self.header.ftype, + position, section_word_length * self.header.wordsize, 1, self.header.ftype ).reshape((n_nodes, n_dimensions)) self.arrays[ArrayType.node_coordinates] = node_coordinates except Exception: @@ -2420,10 +2354,7 @@ def _read_geometry_data(self): section_word_length = 9 * n_solids try: elem_solid_data = self._buffer.read_ndarray( - position, - section_word_length * self.header.wordsize, - 1, - self._header.itype, + position, section_word_length * self.header.wordsize, 1, self._header.itype ).reshape((n_solids, 9)) solid_connectivity = elem_solid_data[:, :8] solid_part_indexes = elem_solid_data[:, 8] @@ -2442,10 +2373,7 @@ def _read_geometry_data(self): try: self.arrays[ArrayType.element_solid_extra_nodes] = elem_solid_data = ( self._buffer.read_ndarray( - position, - section_word_length * self.header.wordsize, - 1, - self._header.itype, + position, section_word_length * self.header.wordsize, 1, self._header.itype ).reshape((n_solids, 2)) ) except Exception: @@ -2460,10 +2388,7 @@ def _read_geometry_data(self): section_word_length = 9 * n_thick_shells try: elem_tshell_data = self._buffer.read_ndarray( - position, - section_word_length * self.header.wordsize, - 1, - self._header.itype, + position, section_word_length * self.header.wordsize, 1, self._header.itype ).reshape((self.header.n_thick_shells, 9)) self.arrays[ArrayType.element_tshell_node_indexes] = ( elem_tshell_data[:, :8] - FORTRAN_OFFSET @@ -2483,10 +2408,7 @@ def _read_geometry_data(self): section_word_length = 6 * n_beams try: elem_beam_data = self._buffer.read_ndarray( - position, - section_word_length * self.header.wordsize, - 1, - self._header.itype, + position, section_word_length * self.header.wordsize, 1, self._header.itype ).reshape((n_beams, 6)) self.arrays[ArrayType.element_beam_part_indexes] = elem_beam_data[:, 5] - FORTRAN_OFFSET self.arrays[ArrayType.element_beam_node_indexes] = ( @@ -2504,10 +2426,7 @@ def _read_geometry_data(self): section_word_length = 5 * n_shells try: elem_shell_data = self._buffer.read_ndarray( - position, - section_word_length * self.header.wordsize, - 1, - self._header.itype, + position, section_word_length * self.header.wordsize, 1, self._header.itype ).reshape((self.header.n_shells, 5)) self.arrays[ArrayType.element_shell_node_indexes] = ( elem_shell_data[:, :4] - FORTRAN_OFFSET @@ -2533,34 +2452,22 @@ def _read_user_ids(self): if not self.header.has_numbering_section: self.arrays[ArrayType.node_ids] = np.arange( - FORTRAN_OFFSET, - self.header.n_nodes + FORTRAN_OFFSET, - dtype=self.header.itype, + FORTRAN_OFFSET, self.header.n_nodes + FORTRAN_OFFSET, dtype=self.header.itype ) self.arrays[ArrayType.element_solid_ids] = np.arange( - FORTRAN_OFFSET, - self.header.n_solids + FORTRAN_OFFSET, - dtype=self.header.itype, + FORTRAN_OFFSET, self.header.n_solids + FORTRAN_OFFSET, dtype=self.header.itype ) self.arrays[ArrayType.element_beam_ids] = np.arange( - FORTRAN_OFFSET, - self.header.n_beams + FORTRAN_OFFSET, - dtype=self.header.itype, + FORTRAN_OFFSET, self.header.n_beams + FORTRAN_OFFSET, dtype=self.header.itype ) self.arrays[ArrayType.element_shell_ids] = np.arange( - FORTRAN_OFFSET, - self.header.n_shells + FORTRAN_OFFSET, - dtype=self.header.itype, + FORTRAN_OFFSET, self.header.n_shells + FORTRAN_OFFSET, dtype=self.header.itype ) self.arrays[ArrayType.element_tshell_ids] = np.arange( - FORTRAN_OFFSET, - self.header.n_thick_shells + FORTRAN_OFFSET, - dtype=self.header.itype, + FORTRAN_OFFSET, self.header.n_thick_shells + FORTRAN_OFFSET, dtype=self.header.itype ) self.arrays[ArrayType.part_ids] = np.arange( - FORTRAN_OFFSET, - self.header.n_parts + FORTRAN_OFFSET, - dtype=self.header.itype, + FORTRAN_OFFSET, self.header.n_parts + FORTRAN_OFFSET, dtype=self.header.itype ) return @@ -2709,10 +2616,7 @@ def _read_user_ids(self): position += info.n_parts * self.header.wordsize self.arrays[ArrayType.part_ids_cross_references] = self._buffer.read_ndarray( - position, - info.n_parts * self.header.wordsize, - 1, - self._header.itype, + position, info.n_parts * self.header.wordsize, 1, self._header.itype ) position += info.n_parts * self.header.wordsize @@ -2841,8 +2745,7 @@ def _read_sph_node_and_material_list(self): return LOGGER.debug( - "_read_sph_node_and_material_list start at byte %d", - self.geometry_section_size, + "_read_sph_node_and_material_list start at byte %d", self.geometry_section_size ) position = self.geometry_section_size @@ -2868,10 +2771,7 @@ def _read_sph_node_and_material_list(self): # update position self.geometry_section_size += array_length - LOGGER.debug( - "_read_sph_node_and_material_list end at byte %d", - self.geometry_section_size, - ) + LOGGER.debug("_read_sph_node_and_material_list end at byte %d", self.geometry_section_size) def _read_particle_geometry_data(self): """Read the particle geometry data""" @@ -3273,8 +3173,7 @@ def _read_header_part_contact_interface_titles( return geometry_section_size LOGGER.debug( - "_read_header_part_contact_interface_titles start at byte %d", - geometry_section_size, + "_read_header_part_contact_interface_titles start at byte %d", geometry_section_size ) position = geometry_section_size @@ -3330,12 +3229,10 @@ def _read_header_part_contact_interface_titles( titles_wordsize = 4 # part ids and corresponding titles - array_type = np.dtype( - [ - ("ids", header.itype), - ("titles", "S" + str(18 * titles_wordsize)), - ] - ) + array_type = np.dtype([ + ("ids", header.itype), + ("titles", "S" + str(18 * titles_wordsize)), + ]) array_length = (header.wordsize + 18 * titles_wordsize) * int(entry_count) tmp_arrays = buffer.read_ndarray(position, array_length, 1, array_type) position += array_length @@ -3368,10 +3265,7 @@ def _read_header_part_contact_interface_titles( # keywords array_length = 20 * titles_wordsize * int(nline) d3prop_keywords = buffer.read_ndarray( - position, - array_length, - 1, - np.dtype("S" + str(titles_wordsize * 20)), + position, array_length, 1, np.dtype("S" + str(titles_wordsize * 20)) ) position += array_length @@ -3398,8 +3292,7 @@ def _read_header_part_contact_interface_titles( # remember position geometry_section_size = position LOGGER.debug( - "_read_header_part_contact_interface_titles end at byte %d", - geometry_section_size, + "_read_header_part_contact_interface_titles end at byte %d", geometry_section_size ) return geometry_section_size @@ -3470,8 +3363,7 @@ def _read_states_allocate_arrays( n_beams_history_vars = header.n_beam_history_vars n_beam_vars = header.n_beam_vars n_beams_layers = max( - int((-3 * n_beams_history_vars + n_beam_vars - 6) / (n_beams_history_vars + 5)), - 0, + int((-3 * n_beams_history_vars + n_beam_vars - 6) / (n_beams_history_vars + 5)), 0 ) # shells n_shells = header.n_shells @@ -3523,17 +3415,9 @@ def _read_states_allocate_arrays( ArrayType.node_residual_forces: [n_states, n_nodes, 3], ArrayType.node_residual_moments: [n_states, n_nodes, 3], # solids - ArrayType.element_solid_thermal_data: [ - n_states, - n_solids, - n_solids_thermal_vars, - ], + ArrayType.element_solid_thermal_data: [n_states, n_solids, n_solids_thermal_vars], ArrayType.element_solid_stress: [n_states, n_solids, n_solid_layers, 6], - ArrayType.element_solid_effective_plastic_strain: [ - n_states, - n_solids, - n_solid_layers, - ], + ArrayType.element_solid_effective_plastic_strain: [n_states, n_solids, n_solid_layers], ArrayType.element_solid_history_variables: [ n_states, n_solids, @@ -3542,18 +3426,8 @@ def _read_states_allocate_arrays( ], ArrayType.element_solid_strain: [n_states, n_solids, n_solid_layers, 6], ArrayType.element_solid_is_alive: [n_states, n_solids], - ArrayType.element_solid_plastic_strain_tensor: [ - n_states, - n_solids, - n_solid_layers, - 6, - ], - ArrayType.element_solid_thermal_strain_tensor: [ - n_states, - n_solids, - n_solid_layers, - 6, - ], + ArrayType.element_solid_plastic_strain_tensor: [n_states, n_solids, n_solid_layers, 6], + ArrayType.element_solid_thermal_strain_tensor: [n_states, n_solids, n_solid_layers, 6], # thick shells ArrayType.element_tshell_stress: [n_states, n_tshells, n_tshells_layers, 6], ArrayType.element_tshell_effective_plastic_strain: [ @@ -3586,12 +3460,7 @@ def _read_states_allocate_arrays( ], ArrayType.element_beam_is_alive: [n_states, n_beams], # shells - ArrayType.element_shell_stress: [ - n_states, - n_shells_reduced, - n_shell_layers, - 6, - ], + ArrayType.element_shell_stress: [n_states, n_shells_reduced, n_shell_layers, 6], ArrayType.element_shell_effective_plastic_strain: [ n_states, n_shells_reduced, @@ -3610,11 +3479,7 @@ def _read_states_allocate_arrays( ArrayType.element_shell_unknown_variables: [n_states, n_shells_reduced, 2], ArrayType.element_shell_internal_energy: [n_states, n_shells_reduced], ArrayType.element_shell_strain: [n_states, n_shells_reduced, 2, 6], - ArrayType.element_shell_thermal_strain_tensor: [ - n_states, - n_shells_reduced, - 6, - ], + ArrayType.element_shell_thermal_strain_tensor: [n_states, n_shells_reduced, 6], ArrayType.element_shell_plastic_strain_tensor: [ n_states, n_shells_reduced, @@ -3642,14 +3507,8 @@ def _read_states_allocate_arrays( ArrayType.airbag_particle_mass: [n_states, n_airbag_particles], ArrayType.airbag_particle_radius: [n_states, n_airbag_particles], ArrayType.airbag_particle_spin_energy: [n_states, n_airbag_particles], - ArrayType.airbag_particle_translation_energy: [ - n_states, - n_airbag_particles, - ], - ArrayType.airbag_particle_nearest_segment_distance: [ - n_states, - n_airbag_particles, - ], + ArrayType.airbag_particle_translation_energy: [n_states, n_airbag_particles], + ArrayType.airbag_particle_nearest_segment_distance: [n_states, n_airbag_particles], ArrayType.airbag_particle_position: [n_states, n_airbag_particles, 3], ArrayType.airbag_particle_velocity: [n_states, n_airbag_particles, 3], # rigid road @@ -3688,7 +3547,7 @@ def _read_states_allocate_arrays( else: raise ValueError( f"Array '{array_name}' is not a state array. " - f"Please try one of: {list(state_array_shapes.keys())}", + f"Please try one of: {list(state_array_shapes.keys())}" ) @staticmethod @@ -4210,9 +4069,11 @@ def _read_states_nodes(self, state_data: np.ndarray, var_index: int, array_dict: # displacement if self.header.has_node_displacement: try: - tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape( - (n_states, n_nodes, n_dim) - ) + tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape(( + n_states, + n_nodes, + n_dim, + )) array_dict[ArrayType.node_displacement] = tmp_array except Exception: trb_msg = traceback.format_exc() @@ -4238,9 +4099,11 @@ def _read_states_nodes(self, state_data: np.ndarray, var_index: int, array_dict: # node temperature layers else: try: - tmp_array = state_data[:, var_index : var_index + 3 * n_nodes].reshape( - (n_states, n_nodes, 3) - ) + tmp_array = state_data[:, var_index : var_index + 3 * n_nodes].reshape(( + n_states, + n_nodes, + 3, + )) array_dict[ArrayType.node_temperature] = tmp_array except Exception: trb_msg = traceback.format_exc() @@ -4252,9 +4115,11 @@ def _read_states_nodes(self, state_data: np.ndarray, var_index: int, array_dict: # node heat flux if self.header.has_node_heat_flux: try: - tmp_array = state_data[:, var_index : var_index + 3 * n_nodes].reshape( - (n_states, n_nodes, 3) - ) + tmp_array = state_data[:, var_index : var_index + 3 * n_nodes].reshape(( + n_states, + n_nodes, + 3, + )) array_dict[ArrayType.node_heat_flux] = tmp_array except Exception: trb_msg = traceback.format_exc() @@ -4319,9 +4184,11 @@ def _read_states_nodes(self, state_data: np.ndarray, var_index: int, array_dict: # velocity if self.header.has_node_velocity: try: - tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape( - (n_states, n_nodes, n_dim) - ) + tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape(( + n_states, + n_nodes, + n_dim, + )) array_dict[ArrayType.node_velocity] = tmp_array except Exception: trb_msg = traceback.format_exc() @@ -4333,9 +4200,11 @@ def _read_states_nodes(self, state_data: np.ndarray, var_index: int, array_dict: # acceleration if self.header.has_node_acceleration: try: - tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape( - (n_states, n_nodes, n_dim) - ) + tmp_array = state_data[:, var_index : var_index + n_dim * n_nodes].reshape(( + n_states, + n_nodes, + n_dim, + )) array_dict[ArrayType.node_acceleration] = tmp_array except Exception: trb_msg = traceback.format_exc() @@ -4379,9 +4248,11 @@ def _read_states_solids_thermal( try: tmp_array = state_data[:, var_index : var_index + n_solids * n_thermal_vars] - array_dict[ArrayType.element_solid_thermal_data] = tmp_array.reshape( - (n_states, n_solids, n_thermal_vars) - ) + array_dict[ArrayType.element_solid_thermal_data] = tmp_array.reshape(( + n_states, + n_solids, + n_thermal_vars, + )) except Exception: trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" @@ -4521,9 +4392,7 @@ def _read_states_solids(self, state_data: np.ndarray, var_index: int, array_dict trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_solids, element_solid_plastic_strain_tensor", - trb_msg, + msg, "_read_states_solids, element_solid_plastic_strain_tensor", trb_msg ) # thermal strain tensor @@ -4544,9 +4413,7 @@ def _read_states_solids(self, state_data: np.ndarray, var_index: int, array_dict trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_solids, element_solid_thermal_strain_tensor", - trb_msg, + msg, "_read_states_solids, element_solid_thermal_strain_tensor", trb_msg ) # catch formatting in solid_state_datra @@ -4637,9 +4504,11 @@ def _read_states_tshell(self, state_data: np.ndarray, var_index: int, array_dict if has_pstrain: try: array_dict[ArrayType.element_tshell_effective_plastic_strain] = ( - tshell_layer_data[:, :, :, i_tshell_layer_var].reshape( - (n_states, n_tshells, n_layers) - ) + tshell_layer_data[:, :, :, i_tshell_layer_var].reshape(( + n_states, + n_tshells, + n_layers, + )) ) except Exception: trb_msg = traceback.format_exc() @@ -4652,10 +4521,7 @@ def _read_states_tshell(self, state_data: np.ndarray, var_index: int, array_dict if n_history_vars: try: array_dict[ArrayType.element_tshell_history_variables] = tshell_layer_data[ - :, - :, - :, - i_tshell_layer_var : i_tshell_layer_var + n_history_vars, + :, :, :, i_tshell_layer_var : i_tshell_layer_var + n_history_vars ].reshape((n_states, n_tshells, n_layers, n_history_vars)) except Exception: trb_msg = traceback.format_exc() @@ -4666,9 +4532,12 @@ def _read_states_tshell(self, state_data: np.ndarray, var_index: int, array_dict if n_strain_vars: try: tshell_nonlayer_data = tshell_nonlayer_data[:, :, :n_strain_vars] - array_dict[ArrayType.element_tshell_strain] = tshell_nonlayer_data.reshape( - (n_states, n_tshells, 2, 6) - ) + array_dict[ArrayType.element_tshell_strain] = tshell_nonlayer_data.reshape(( + n_states, + n_tshells, + 2, + 6, + )) except Exception: trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" @@ -5001,9 +4870,12 @@ def _read_states_shell(self, state_data: np.ndarray, var_index: int, array_dict: shell_strain = shell_nonlayer_data[ :, :, nonlayer_var_index : nonlayer_var_index + n_strain_vars ] - array_dict[ArrayType.element_shell_strain] = shell_strain.reshape( - (n_states, n_shells, 2, 6) - ) + array_dict[ArrayType.element_shell_strain] = shell_strain.reshape(( + n_states, + n_shells, + 2, + 6, + )) except Exception: trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" @@ -5026,9 +4898,7 @@ def _read_states_shell(self, state_data: np.ndarray, var_index: int, array_dict: if n_plastic_strain_tensor: try: pstrain_tensor = shell_nonlayer_data[ - :, - :, - nonlayer_var_index : nonlayer_var_index + n_plastic_strain_tensor, + :, :, nonlayer_var_index : nonlayer_var_index + n_plastic_strain_tensor ] array_dict[ArrayType.element_shell_plastic_strain_tensor] = ( pstrain_tensor.reshape((n_states, n_shells, n_layers, 6)) @@ -5037,9 +4907,7 @@ def _read_states_shell(self, state_data: np.ndarray, var_index: int, array_dict: trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_shells, element_shell_plastic_strain_tensor", - trb_msg, + msg, "_read_states_shells, element_shell_plastic_strain_tensor", trb_msg ) finally: nonlayer_var_index += n_plastic_strain_tensor @@ -5048,9 +4916,7 @@ def _read_states_shell(self, state_data: np.ndarray, var_index: int, array_dict: if n_thermal_strain_tensor: try: thermal_tensor = shell_nonlayer_data[ - :, - :, - nonlayer_var_index : nonlayer_var_index + n_thermal_strain_tensor, + :, :, nonlayer_var_index : nonlayer_var_index + n_thermal_strain_tensor ] array_dict[ArrayType.element_shell_thermal_strain_tensor] = ( thermal_tensor.reshape((n_states, n_shells, 6)) @@ -5059,9 +4925,7 @@ def _read_states_shell(self, state_data: np.ndarray, var_index: int, array_dict: trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_shells, element_shell_thermal_strain_tensor", - trb_msg, + msg, "_read_states_shells, element_shell_thermal_strain_tensor", trb_msg ) finally: nonlayer_var_index += n_thermal_strain_tensor @@ -5413,15 +5277,16 @@ def get_dtype(type_flag): airbag_state_data = state_data[:, var_index : var_index + n_total_vars] # airbag data - airbag_data = airbag_state_data[:, : n_airbags * n_state_airbag_vars].reshape( - (n_states, n_airbags, n_state_airbag_vars) - ) + airbag_data = airbag_state_data[:, : n_airbags * n_state_airbag_vars].reshape(( + n_states, + n_airbags, + n_state_airbag_vars, + )) airbag_state_offset = n_airbags * n_state_airbag_vars # particle data particle_data = airbag_state_data[ - :, - airbag_state_offset : airbag_state_offset + n_particles * n_particle_vars, + :, airbag_state_offset : airbag_state_offset + n_particles * n_particle_vars ].reshape((n_states, n_particles, n_particle_vars)) # save sh... @@ -5440,9 +5305,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, airbag_n_active_particles", - trb_msg, + msg, "_read_states_airbags, airbag_n_active_particles", trb_msg ) elif var_name.startswith("Bag Vol"): try: @@ -5472,10 +5335,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle chamber id elif var_name.startswith("Cham ID"): @@ -5487,10 +5347,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle leakage elif var_name.startswith("Leakage"): @@ -5502,10 +5359,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle mass elif var_name.startswith("Mass"): @@ -5517,10 +5371,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle radius try: @@ -5531,10 +5382,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle spin energy elif var_name.startswith("Spin En"): @@ -5546,10 +5394,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle translational energy elif var_name.startswith("Tran En"): @@ -5561,10 +5406,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle segment distance elif var_name.startswith("NS dist"): @@ -5576,10 +5418,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) # particle position elif var_name.startswith("Pos x"): @@ -5598,10 +5437,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) elif var_name.startswith("Pos y"): # handled in Pos x @@ -5626,10 +5462,7 @@ def get_dtype(type_flag): trb_msg = traceback.format_exc() msg = "A failure in %s %s was caught:\n%s" LOGGER.warning( - msg, - "_read_states_airbags, particle_gas_id", - var_name, - trb_msg, + msg, "_read_states_airbags, particle_gas_id", var_name, trb_msg ) except Exception: @@ -5674,9 +5507,12 @@ def _read_states_road_surfaces( try: # read road data - road_data = state_data[:, var_index : var_index + 6 * n_roads].reshape( - (n_states, n_roads, 2, 3) - ) + road_data = state_data[:, var_index : var_index + 6 * n_roads].reshape(( + n_states, + n_roads, + 2, + 3, + )) # DISPLACEMENT try: @@ -5936,10 +5772,7 @@ def _collect_file_infos(self, size_per_state: int) -> List[MemoryInfo]: start = n_blocks * block_length mview = memoryview( mmap.mmap( - fp.fileno(), - offset=start, - length=rest_size, - access=mmap.ACCESS_READ, + fp.fileno(), offset=start, length=rest_size, access=mmap.ACCESS_READ ).read() ) (nz_indexes,) = np.nonzero(mview[::-1]) @@ -6315,10 +6148,7 @@ def plot( webbrowser.open(fp.name) def write_d3plot( - self, - filepath: Union[str, BinaryIO], - block_size_bytes: int = 2048, - single_file: bool = True, + self, filepath: Union[str, BinaryIO], block_size_bytes: int = 2048, single_file: bool = True ): """Write a d3plot file again @@ -6348,14 +6178,16 @@ def write_d3plot( Write a new d3plot from scratch: >>> d3plot = D3plot() - >>> d3plot.arrays[ArrayType.node_coordinates] = np.array([[0, 0, 0], - ... [1, 0, 0], - ... [0, 1, 0]]) + >>> d3plot.arrays[ArrayType.node_coordinates] = np.array([ + ... [0, 0, 0], + ... [1, 0, 0], + ... [0, 1, 0], + ... ]) >>> d3plot.arrays[ArrayType.element_shell_node_indexes] = np.array([[0, 2, 1, 1]]) >>> d3plot.arrays[ArrayType.element_shell_part_indexes] = np.array([0]) - >>> d3plot.arrays[ArrayType.node_displacement] = np.array([[[0, 0, 0], - ... [1, 0, 0], - ... [0, 1, 0]]]) + >>> d3plot.arrays[ArrayType.node_displacement] = np.array([ + ... [[0, 0, 0], [1, 0, 0], [0, 1, 0]] + ... ]) >>> d3plot.write_d3plot("yay.d3plot") """ @@ -7015,9 +6847,7 @@ def _write_geom_user_ids(self, fp: typing.IO[Any], settings: D3plotWriterSetting self.arrays[ArrayType.node_ids] if ArrayType.node_ids in self.arrays else np.arange( - FORTRAN_OFFSET, - settings.header["numnp"] + FORTRAN_OFFSET, - dtype=settings.itype, + FORTRAN_OFFSET, settings.header["numnp"] + FORTRAN_OFFSET, dtype=settings.itype ) ) n_bytes_written += fp.write(settings.pack(node_ids, dtype_hint=np.integer)) @@ -7027,9 +6857,7 @@ def _write_geom_user_ids(self, fp: typing.IO[Any], settings: D3plotWriterSetting self.arrays[ArrayType.element_solid_ids] if ArrayType.element_solid_ids in self.arrays else np.arange( - FORTRAN_OFFSET, - settings.header["nel8"] + FORTRAN_OFFSET, - dtype=settings.itype, + FORTRAN_OFFSET, settings.header["nel8"] + FORTRAN_OFFSET, dtype=settings.itype ) ) n_bytes_written += fp.write(settings.pack(solid_ids, dtype_hint=np.integer)) @@ -7039,9 +6867,7 @@ def _write_geom_user_ids(self, fp: typing.IO[Any], settings: D3plotWriterSetting self.arrays[ArrayType.element_beam_ids] if ArrayType.element_beam_ids in self.arrays else np.arange( - FORTRAN_OFFSET, - settings.header["nel2"] + FORTRAN_OFFSET, - dtype=settings.itype, + FORTRAN_OFFSET, settings.header["nel2"] + FORTRAN_OFFSET, dtype=settings.itype ) ) n_bytes_written += fp.write(settings.pack(beam_ids, dtype_hint=np.integer)) @@ -7051,9 +6877,7 @@ def _write_geom_user_ids(self, fp: typing.IO[Any], settings: D3plotWriterSetting self.arrays[ArrayType.element_shell_ids] if ArrayType.element_shell_ids in self.arrays else np.arange( - FORTRAN_OFFSET, - settings.header["nel4"] + FORTRAN_OFFSET, - dtype=settings.itype, + FORTRAN_OFFSET, settings.header["nel4"] + FORTRAN_OFFSET, dtype=settings.itype ) ) n_bytes_written += fp.write(settings.pack(shell_ids, dtype_hint=np.integer)) @@ -7063,9 +6887,7 @@ def _write_geom_user_ids(self, fp: typing.IO[Any], settings: D3plotWriterSetting self.arrays[ArrayType.element_tshell_ids] if ArrayType.element_tshell_ids in self.arrays else np.arange( - FORTRAN_OFFSET, - settings.header["nelth"] + FORTRAN_OFFSET, - dtype=settings.itype, + FORTRAN_OFFSET, settings.header["nelth"] + FORTRAN_OFFSET, dtype=settings.itype ) ) n_bytes_written += fp.write(settings.pack(tshell_ids, dtype_hint=np.integer)) @@ -7120,12 +6942,7 @@ def _write_geom_rigid_body_description( if settings.header["ndim"] not in (8, 9): return 0 - _check_ndim( - self, - { - ArrayType.rigid_body_part_indexes: ["n_rigid_bodies"], - }, - ) + _check_ndim(self, {ArrayType.rigid_body_part_indexes: ["n_rigid_bodies"]}) array_dims = { ArrayType.rigid_body_part_indexes: 0, ArrayType.rigid_body_node_indexes_list: 0, @@ -7206,10 +7023,7 @@ def _write_geom_sph_node_and_materials( ArrayType.sph_node_material_index: ["n_sph_nodes"], }, ) - array_dims = { - ArrayType.sph_node_indexes: 0, - ArrayType.sph_node_material_index: 0, - } + array_dims = {ArrayType.sph_node_indexes: 0, ArrayType.sph_node_material_index: 0} array_names = list(array_dims.keys()) _check_array_occurrence(self, array_names, array_names) self.check_array_dims(array_dims, "n_sph_nodes", nmsph) @@ -7292,10 +7106,7 @@ def _write_geom_rigid_road_surface( ArrayType.rigid_road_segment_road_id: ["n_segments"], }, ) - array_dims = { - ArrayType.rigid_road_node_ids: 0, - ArrayType.rigid_road_node_coordinates: 0, - } + array_dims = {ArrayType.rigid_road_node_ids: 0, ArrayType.rigid_road_node_coordinates: 0} n_rigid_road_nodes = self.check_array_dims(array_dims, "rigid_road_n_nodes") self.check_array_dims({ArrayType.rigid_road_node_coordinates: 1}, "x_y_z", 3) array_dims = { @@ -7366,7 +7177,7 @@ def _write_geom_extra_node_data( ArrayType.element_solid_node10_extra_node_indexes: [ "n_solids", "2_extra_node_ids", - ], + ] }, ) array_dims = { @@ -7375,9 +7186,7 @@ def _write_geom_extra_node_data( } self.check_array_dims(array_dims, "n_solids") self.check_array_dims( - {ArrayType.element_solid_node10_extra_node_indexes: 1}, - "extra_node_ids", - 2, + {ArrayType.element_solid_node10_extra_node_indexes: 1}, "extra_node_ids", 2 ) extra_nodes = ( @@ -7404,9 +7213,7 @@ def _write_geom_extra_node_data( } self.check_array_dims(array_dims, "n_node8_shells") self.check_array_dims( - {ArrayType.element_shell_node8_extra_node_indexes: 1}, - "extra_node_ids", - 4, + {ArrayType.element_shell_node8_extra_node_indexes: 1}, "extra_node_ids", 4 ) element_indexes = ( @@ -7438,9 +7245,7 @@ def _write_geom_extra_node_data( } self.check_array_dims(array_dims, "n_node20_solids") self.check_array_dims( - {ArrayType.element_solid_node20_extra_node_indexes: 1}, - "extra_node_ids", - 12, + {ArrayType.element_solid_node20_extra_node_indexes: 1}, "extra_node_ids", 12 ) element_indexes = ( @@ -7472,9 +7277,7 @@ def _write_geom_extra_node_data( } self.check_array_dims(array_dims, "n_node27_solids") self.check_array_dims( - {ArrayType.element_solid_node27_extra_node_indexes: 1}, - "extra_node_ids", - 19, + {ArrayType.element_solid_node27_extra_node_indexes: 1}, "extra_node_ids", 19 ) element_indexes = ( @@ -7510,13 +7313,10 @@ def _write_header_part_contact_interface_titles( self, { # ArrayType.part_titles: ["n_parts", "n_chars"], - ArrayType.part_titles_ids: ["n_parts"], + ArrayType.part_titles_ids: ["n_parts"] }, ) - array_dimensions = { - ArrayType.part_titles: 0, - ArrayType.part_titles_ids: 0, - } + array_dimensions = {ArrayType.part_titles: 0, ArrayType.part_titles_ids: 0} if _check_array_occurrence( self, list(array_dimensions.keys()), list(array_dimensions.keys()) ): @@ -7559,10 +7359,7 @@ def _write_header_part_contact_interface_titles( n_bytes_written += fp.write(settings.pack(title2, settings.wordsize * title_size_words)) # CONTACT TITLES - array_dimensions = { - ArrayType.contact_titles: 0, - ArrayType.contact_title_ids: 0, - } + array_dimensions = {ArrayType.contact_titles: 0, ArrayType.contact_title_ids: 0} if _check_array_occurrence( self, list(array_dimensions.keys()), list(array_dimensions.keys()) ): @@ -7597,7 +7394,7 @@ def _write_states( if ArrayType.global_timesteps not in self.arrays: # if any state array is present simply make up a timestep array if any(array_name in self.arrays for array_name in ArrayType.get_state_array_names()): - array_dims = {array_name: 0 for array_name in ArrayType.get_state_array_names()} + array_dims = dict.fromkeys(ArrayType.get_state_array_names(), 0) n_timesteps = self.check_array_dims( array_dimensions=array_dims, dimension_name="n_timesteps" ) @@ -7855,10 +7652,7 @@ def _write_part_field(array_type: str, default_shape: Union[int, Tuple], dtype: ArrayType.rigid_wall_position: 0, } self.check_array_dims(array_dims, "n_timesteps") - array_dims = { - ArrayType.rigid_wall_force: 1, - ArrayType.rigid_wall_position: 1, - } + array_dims = {ArrayType.rigid_wall_force: 1, ArrayType.rigid_wall_position: 1} self.check_array_dims(array_dims, "n_rigid_walls") self.check_array_dims({ArrayType.rigid_wall_position: 2}, "x_y_z", 3) @@ -8050,8 +7844,7 @@ def _write_states_nodes( ) if byte_checksum != byte_checksum_target: msg = ( - "byte checksum wrong: " - "{byte_checksum_target} (header) != {byte_checksum} (checksum)" + "byte checksum wrong: {byte_checksum_target} (header) != {byte_checksum} (checksum)" ) raise RuntimeError(msg) @@ -8078,10 +7871,7 @@ def _write_states_solid_thermal_data( }, ) - array_dims = { - ArrayType.global_timesteps: 0, - ArrayType.element_solid_thermal_data: 0, - } + array_dims = {ArrayType.global_timesteps: 0, ArrayType.element_solid_thermal_data: 0} self.check_array_dims(array_dims, "n_timesteps") array_dims = { @@ -8099,8 +7889,7 @@ def _write_states_solid_thermal_data( ) if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -8198,21 +7987,16 @@ def _write_states_solids( self.check_array_dims({ArrayType.element_solid_strain: 3}, "εx_εy_εz_εxy_εyz_εxz", 6) self.check_array_dims( - {ArrayType.element_solid_plastic_strain_tensor: 3}, - "εx_εy_εz_εxy_εyz_εxz", - 6, + {ArrayType.element_solid_plastic_strain_tensor: 3}, "εx_εy_εz_εxy_εyz_εxz", 6 ) self.check_array_dims( - {ArrayType.element_solid_thermal_strain_tensor: 3}, - "εx_εy_εz_εxy_εyz_εxz", - 6, + {ArrayType.element_solid_thermal_strain_tensor: 3}, "εx_εy_εz_εxy_εyz_εxz", 6 ) # allocate array solid_data = np.zeros( - (n_solids, n_solid_layers, n_solid_vars // n_solid_layers), - dtype=settings.ftype, + (n_solids, n_solid_layers, n_solid_vars // n_solid_layers), dtype=settings.ftype ) # SOLID STRESS @@ -8234,9 +8018,7 @@ def _write_states_solids( trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_write_states_solids, element_solid_effective_plastic_strain", - trb_msg, + msg, "_write_states_solids, element_solid_effective_plastic_strain", trb_msg ) # SOLID HISTORY VARIABLES @@ -8257,9 +8039,7 @@ def _write_states_solids( trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_write_states_solids, element_solid_history_variables", - trb_msg, + msg, "_write_states_solids, element_solid_history_variables", trb_msg ) # SOLID STRAIN @@ -8286,9 +8066,7 @@ def _write_states_solids( trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_write_states_solids, element_solid_plastic_strain_tensor", - trb_msg, + msg, "_write_states_solids, element_solid_plastic_strain_tensor", trb_msg ) # THERMAL STRAIN TENSOR @@ -8309,9 +8087,7 @@ def _write_states_solids( trb_msg = traceback.format_exc() msg = "A failure in %s was caught:\n%s" LOGGER.warning( - msg, - "_write_states_solids, element_solid_thermal_strain_tensor", - trb_msg, + msg, "_write_states_solids, element_solid_thermal_strain_tensor", trb_msg ) n_bytes_written = fp.write(settings.pack(solid_data, dtype_hint=np.floating)) @@ -8322,8 +8098,7 @@ def _write_states_solids( ) if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -8400,9 +8175,11 @@ def _write_states_tshells( n_layer_vars = settings.n_shell_layers * (6 * has_stress + has_pstrain + n_history_vars) tshell_data = np.zeros((n_tshells, n_tshell_vars), settings.ftype) - tshell_layer_data = tshell_data[:, :n_layer_vars].reshape( - (n_tshells, settings.n_shell_layers, -1) - ) + tshell_layer_data = tshell_data[:, :n_layer_vars].reshape(( + n_tshells, + settings.n_shell_layers, + -1, + )) tshell_nonlayer_data = tshell_data[:, n_layer_vars:] # TSHELL STRESS @@ -8441,8 +8218,7 @@ def _write_states_tshells( ) if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -8468,11 +8244,7 @@ def _write_states_beams( { ArrayType.element_beam_axial_force: ["n_timesteps", "n_beams"], ArrayType.element_beam_shear_force: ["n_timesteps", "n_beams", "fs_ft"], - ArrayType.element_beam_bending_moment: [ - "n_timesteps", - "n_beams", - "ms_mt", - ], + ArrayType.element_beam_bending_moment: ["n_timesteps", "n_beams", "ms_mt"], ArrayType.element_beam_torsion_moment: ["n_timesteps", "n_beams"], ArrayType.element_beam_shear_stress: [ "n_timesteps", @@ -8480,21 +8252,9 @@ def _write_states_beams( "n_beam_layers", "σrs_σtr", ], - ArrayType.element_beam_axial_stress: [ - "n_timesteps", - "n_beams", - "n_beam_layers", - ], - ArrayType.element_beam_plastic_strain: [ - "n_timesteps", - "n_beams", - "n_beam_layers", - ], - ArrayType.element_beam_axial_strain: [ - "n_timesteps", - "n_beams", - "n_beam_layers", - ], + ArrayType.element_beam_axial_stress: ["n_timesteps", "n_beams", "n_beam_layers"], + ArrayType.element_beam_plastic_strain: ["n_timesteps", "n_beams", "n_beam_layers"], + ArrayType.element_beam_axial_strain: ["n_timesteps", "n_beams", "n_beam_layers"], ArrayType.element_beam_history_vars: [ "n_timesteps", "n_beams", @@ -8547,12 +8307,16 @@ def _write_states_beams( # allocate buffer beam_data = np.zeros((n_beams, n_beam_vars), dtype=settings.ftype) n_layer_vars_total = 5 * n_beam_layers - beam_layer_data = beam_data[:, 6 : 6 + n_layer_vars_total].reshape( - (n_beams, n_beam_layers, 5) - ) - beam_history_vars = beam_data[:, 6 + n_layer_vars_total :].reshape( - (n_beams, 3 + n_beam_layers, n_beam_history_vars) - ) + beam_layer_data = beam_data[:, 6 : 6 + n_layer_vars_total].reshape(( + n_beams, + n_beam_layers, + 5, + )) + beam_history_vars = beam_data[:, 6 + n_layer_vars_total :].reshape(( + n_beams, + 3 + n_beam_layers, + n_beam_history_vars, + )) # BEAM AXIAL FORCE if ArrayType.element_beam_axial_force in self.arrays: @@ -8609,8 +8373,7 @@ def _write_states_beams( self.arrays[ArrayType.element_beam_history_vars][i_timestep] if ArrayType.element_beam_history_vars in self.arrays else np.zeros( - (n_beams, n_beam_layers + 3, n_beam_history_vars), - dtype=settings.ftype, + (n_beams, n_beam_layers + 3, n_beam_history_vars), dtype=settings.ftype ) ) beam_history_vars[:, :, :] = array @@ -8621,8 +8384,7 @@ def _write_states_beams( n_bytes_expected = settings.header["nv1d"] * settings.header["nel2"] * settings.wordsize if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -8672,21 +8434,9 @@ def _write_states_shells( "n_shell_layers", "n_shell_history_vars", ], - ArrayType.element_shell_bending_moment: [ - "n_timesteps", - "n_shells", - "mx_my_mxy", - ], - ArrayType.element_shell_shear_force: [ - "n_timesteps", - "n_shells", - "qx_qy", - ], - ArrayType.element_shell_normal_force: [ - "n_timesteps", - "n_shells", - "nx_ny_nxy", - ], + ArrayType.element_shell_bending_moment: ["n_timesteps", "n_shells", "mx_my_mxy"], + ArrayType.element_shell_shear_force: ["n_timesteps", "n_shells", "qx_qy"], + ArrayType.element_shell_normal_force: ["n_timesteps", "n_shells", "nx_ny_nxy"], ArrayType.element_shell_thickness: ["n_timesteps", "n_shells"], ArrayType.element_shell_unknown_variables: [ "n_timesteps", @@ -8758,9 +8508,11 @@ def _write_states_shells( n_layer_vars = has_stress * 6 + has_pstrain + n_shell_history_vars n_layer_vars_total = n_layer_vars * n_shell_layers - shell_layer_data = shell_data[:, :n_layer_vars_total].reshape( - (n_reduced_shells, n_shell_layers, n_layer_vars) - ) + shell_layer_data = shell_data[:, :n_layer_vars_total].reshape(( + n_reduced_shells, + n_shell_layers, + n_layer_vars, + )) shell_nonlayer_data = shell_data[:, n_layer_vars_total:] start_layer_index = 0 @@ -8780,9 +8532,11 @@ def _write_states_shells( end_layer_index = start_layer_index + has_pstrain if ArrayType.element_shell_effective_plastic_strain in self.arrays: array = self.arrays[ArrayType.element_shell_effective_plastic_strain][i_timestep] - shell_layer_data[:, :, start_layer_index:end_layer_index] = array.reshape( - (n_reduced_shells, n_shell_layers, 1) - ) + shell_layer_data[:, :, start_layer_index:end_layer_index] = array.reshape(( + n_reduced_shells, + n_shell_layers, + 1, + )) # SHELL HISTORY VARS if n_shell_history_vars: @@ -8832,9 +8586,10 @@ def _write_states_shells( start_index2 = start_index end_index2 = start_index + 1 array = self.arrays[ArrayType.element_shell_thickness][i_timestep] - shell_nonlayer_data[:, start_index2:end_index2] = array.reshape( - (n_reduced_shells, 1) - ) + shell_nonlayer_data[:, start_index2:end_index2] = array.reshape(( + n_reduced_shells, + 1, + )) # ELEMENT SPECIFIC VARS if ArrayType.element_shell_unknown_variables in self.arrays: @@ -8852,9 +8607,10 @@ def _write_states_shells( if ArrayType.element_shell_strain in self.arrays: array = self.arrays[ArrayType.element_shell_strain][i_timestep] - shell_nonlayer_data[:, start_index:end_index] = array.reshape( - (n_reduced_shells, 12) - ) + shell_nonlayer_data[:, start_index:end_index] = array.reshape(( + n_reduced_shells, + 12, + )) # INTERNAL ENERGY if has_else: @@ -8872,9 +8628,10 @@ def _write_states_shells( if ArrayType.element_shell_plastic_strain_tensor in self.arrays: array = self.arrays[ArrayType.element_shell_plastic_strain_tensor][i_timestep] - shell_nonlayer_data[:, start_index:end_index] = array.reshape( - (n_reduced_shells, n_shell_layers * 6) - ) + shell_nonlayer_data[:, start_index:end_index] = array.reshape(( + n_reduced_shells, + n_shell_layers * 6, + )) # PLASTIC THERMAL TENSOR if settings.has_thermal_strain_tensor: @@ -8892,8 +8649,7 @@ def _write_states_shells( n_bytes_expected = settings.header["nv2d"] * n_reduced_shells * settings.wordsize if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -8916,16 +8672,10 @@ def _write_states_deletion_info( if settings.mdlopt == 1: _check_ndim(self, {ArrayType.node_is_alive: ["n_timesteps", "n_nodes"]}) - array_dims = { - ArrayType.global_timesteps: 0, - ArrayType.node_is_alive: 0, - } + array_dims = {ArrayType.global_timesteps: 0, ArrayType.node_is_alive: 0} self.check_array_dims(array_dims, "n_timesteps") - array_dims = { - ArrayType.node_coordinates: 0, - ArrayType.node_is_alive: 1, - } + array_dims = {ArrayType.node_coordinates: 0, ArrayType.node_is_alive: 1} self.check_array_dims(array_dims, "n_nodes") n_nodes = settings.header["numnp"] @@ -9038,8 +8788,7 @@ def _write_states_deletion_info( # check bytes if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -9061,20 +8810,12 @@ def _write_states_sph( ArrayType.sph_deletion: ["n_timesteps", "n_particles"], ArrayType.sph_radius: ["n_timesteps", "n_particles"], ArrayType.sph_pressure: ["n_timesteps", "n_particles"], - ArrayType.sph_stress: [ - "n_timesteps", - "n_particles", - "σx_σy_σz_σxy_σyz_σxz", - ], + ArrayType.sph_stress: ["n_timesteps", "n_particles", "σx_σy_σz_σxy_σyz_σxz"], ArrayType.sph_effective_plastic_strain: ["n_timesteps", "n_particles"], ArrayType.sph_density: ["n_timesteps", "n_particles"], ArrayType.sph_internal_energy: ["n_timesteps", "n_particles"], ArrayType.sph_n_neighbors: ["n_timesteps", "n_particles"], - ArrayType.sph_strain: [ - "n_timesteps", - "n_particles", - "εx_εy_εz_εxy_εyz_εxz", - ], + ArrayType.sph_strain: ["n_timesteps", "n_particles", "εx_εy_εz_εxy_εyz_εxz"], ArrayType.sph_mass: ["n_timesteps", "n_particles"], }, ) @@ -9210,8 +8951,7 @@ def _write_states_sph( ) if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -9238,24 +8978,10 @@ def _write_states_airbags( ArrayType.airbag_particle_mass: ["n_timesteps", "n_particles"], ArrayType.airbag_particle_radius: ["n_timesteps", "n_particles"], ArrayType.airbag_particle_spin_energy: ["n_timesteps", "n_particles"], - ArrayType.airbag_particle_translation_energy: [ - "n_timesteps", - "n_particles", - ], - ArrayType.airbag_particle_nearest_segment_distance: [ - "n_timesteps", - "n_particles", - ], - ArrayType.airbag_particle_position: [ - "n_timesteps", - "n_particles", - "x_y_z", - ], - ArrayType.airbag_particle_velocity: [ - "n_timesteps", - "n_particles", - "vx_vy_vz", - ], + ArrayType.airbag_particle_translation_energy: ["n_timesteps", "n_particles"], + ArrayType.airbag_particle_nearest_segment_distance: ["n_timesteps", "n_particles"], + ArrayType.airbag_particle_position: ["n_timesteps", "n_particles", "x_y_z"], + ArrayType.airbag_particle_velocity: ["n_timesteps", "n_particles", "vx_vy_vz"], }, ) @@ -9428,8 +9154,7 @@ def _write_states_airbags( n_bytes_expected = (2 * n_airbags + n_particles * 14) * settings.wordsize if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -9448,16 +9173,8 @@ def _write_states_rigid_road( _check_ndim( self, { - ArrayType.rigid_road_displacement: [ - "n_timesteps", - "n_rigid_roads", - "x_y_z", - ], - ArrayType.rigid_road_velocity: [ - "n_timesteps", - "n_rigid_roads", - "vx_vy_vz", - ], + ArrayType.rigid_road_displacement: ["n_timesteps", "n_rigid_roads", "x_y_z"], + ArrayType.rigid_road_velocity: ["n_timesteps", "n_rigid_roads", "vx_vy_vz"], }, ) @@ -9497,8 +9214,7 @@ def _write_states_rigid_road( n_bytes_expected = settings.header["nv1d"] * settings.header["nel2"] * settings.wordsize if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -9521,31 +9237,11 @@ def _write_states_rigid_bodies( _check_ndim( self, { - ArrayType.rigid_body_coordinates: [ - "n_timesteps", - "n_rigid_bodies", - "x_y_z", - ], - ArrayType.rigid_body_rotation_matrix: [ - "n_timesteps", - "n_rigid_bodies", - "matrix", - ], - ArrayType.rigid_body_velocity: [ - "n_timesteps", - "n_rigid_bodies", - "vx_vy_vz", - ], - ArrayType.rigid_body_rot_velocity: [ - "n_timesteps", - "n_rigid_bodies", - "rvx_rvy_rvz", - ], - ArrayType.rigid_body_acceleration: [ - "n_timesteps", - "n_rigid_bodies", - "ax_ay_az", - ], + ArrayType.rigid_body_coordinates: ["n_timesteps", "n_rigid_bodies", "x_y_z"], + ArrayType.rigid_body_rotation_matrix: ["n_timesteps", "n_rigid_bodies", "matrix"], + ArrayType.rigid_body_velocity: ["n_timesteps", "n_rigid_bodies", "vx_vy_vz"], + ArrayType.rigid_body_rot_velocity: ["n_timesteps", "n_rigid_bodies", "rvx_rvy_rvz"], + ArrayType.rigid_body_acceleration: ["n_timesteps", "n_rigid_bodies", "ax_ay_az"], ArrayType.rigid_body_rot_acceleration: [ "n_timesteps", "n_rigid_bodies", @@ -9647,8 +9343,7 @@ def _write_states_rigid_bodies( n_bytes_expected = settings.header["nv1d"] * settings.header["nel2"] * settings.wordsize if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -9659,10 +9354,7 @@ def _write_states_rigid_bodies( return n_bytes_written def check_array_dims( - self, - array_dimensions: Dict[str, int], - dimension_name: str, - dimension_size: int = -1, + self, array_dimensions: Dict[str, int], dimension_name: str, dimension_size: int = -1 ): """This function checks if multiple arrays share an array dimensions with the same size. @@ -9754,8 +9446,7 @@ def _compare_n_bytes_checksum(n_bytes_written: int, n_bytes_expected: int): """ if n_bytes_expected != n_bytes_written: msg = ( - "byte checksum wrong: " - f"{n_bytes_expected} (header) != {n_bytes_written} (checksum)" + f"byte checksum wrong: {n_bytes_expected} (header) != {n_bytes_written} (checksum)" ) raise RuntimeError(msg) @@ -9893,10 +9584,7 @@ def compare(self, d3plot2, array_eps: Union[float, None] = None): return hdr_differences, array_differences def get_part_filter( - self, - filter_type: FilterType, - part_ids: Iterable[int], - for_state_array: bool = True, + self, filter_type: FilterType, part_ids: Iterable[int], for_state_array: bool = True ) -> np.ndarray: """Get a part filter for different entities diff --git a/src/lasso/dyna/d3plot_header.py b/src/lasso/dyna/d3plot_header.py index 061027b..6b62414 100644 --- a/src/lasso/dyna/d3plot_header.py +++ b/src/lasso/dyna/d3plot_header.py @@ -345,7 +345,7 @@ class D3plotHeader: quadratic_elems_has_full_connectivity: bool = False quadratic_elems_has_data_at_integration_points: bool = False n_post_branches: int = 0 - n_types: Tuple[int, ...] = tuple() + n_types: Tuple[int, ...] = () # parts n_parts: int = 0 @@ -1076,7 +1076,6 @@ def read_words(self, bb: BinaryBuffer, words_to_read: dict, storage_dict: dict = storage_dict = {} for name, data in words_to_read.items(): - # check buffer length if data[0] >= len(bb): continue @@ -1099,7 +1098,7 @@ def read_words(self, bb: BinaryBuffer, words_to_read: dict, storage_dict: dict = @staticmethod def _determine_file_settings( - bb: Union[BinaryBuffer, None] = None + bb: Union[BinaryBuffer, None] = None, ) -> Tuple[int, Union[np.int32, np.int64], Union[np.float32, np.float64]]: """Determine the precision of the file @@ -1127,7 +1126,6 @@ def _determine_file_settings( # test file type flag (1=d3plot, 5=d3part, 11=d3eigv) if isinstance(bb, BinaryBuffer): - # single precision value = bb.read_number(44, np.int32) if value > 1000: diff --git a/src/lasso/dyna/femzip_mapper.py b/src/lasso/dyna/femzip_mapper.py index 4222cd8..2324385 100644 --- a/src/lasso/dyna/femzip_mapper.py +++ b/src/lasso/dyna/femzip_mapper.py @@ -336,7 +336,7 @@ def femzip_to_d3plot( - result_arrays: Dict[Tuple[int, str, FemzipVariableCategory], np.ndarray] + result_arrays: Dict[Tuple[int, str, FemzipVariableCategory], np.ndarray], ) -> Dict[str, np.ndarray]: """Map femzip arrays to d3plot arrays diff --git a/src/lasso/femzip/femzip_api.py b/src/lasso/femzip/femzip_api.py index 8b3f564..f955826 100644 --- a/src/lasso/femzip/femzip_api.py +++ b/src/lasso/femzip/femzip_api.py @@ -47,10 +47,7 @@ class FemzipError(Structure): Error message """ - _fields_ = [ - ("ier", c_int32), - ("msg", c_char_p), - ] + _fields_ = [("ier", c_int32), ("msg", c_char_p)] class VariableInfo(Structure): @@ -245,7 +242,6 @@ def api(self) -> CDLL: # pylint: disable = too-many-statements if self._api is None: - # Set the base path once base_path = Path(__file__).parent @@ -329,7 +325,7 @@ def api(self) -> CDLL: self._api.is_femunzip_version_ok.restype = FemzipError # femzip status - self._api.get_femzip_status.argtypes = tuple() + self._api.get_femzip_status.argtypes = () self._api.get_femzip_status.restype = FemzipAPIStatus # get part titles @@ -341,7 +337,7 @@ def api(self) -> CDLL: self._api.finish_reading_states.restype = FemzipError # close file - self._api.close_current_file.argtypes = tuple() + self._api.close_current_file.argtypes = () self._api.close_current_file.restype = FemzipError # read single state @@ -360,7 +356,6 @@ def api(self) -> CDLL: @staticmethod def _parse_state_filter(state_filter: Union[Set[int], None], n_timesteps: int) -> Set[int]: - # convert negative indexes state_filter_parsed = ( {entry if entry >= 0 else entry + n_timesteps for entry in state_filter} @@ -479,9 +474,7 @@ def get_part_titles( # do the thing err = self.api.get_part_titles( - filepath.encode("utf-8"), - buffer, - buffer_info_parsed.size_titles, + filepath.encode("utf-8"), buffer, buffer_info_parsed.size_titles ) self._check_femzip_error(err) @@ -556,9 +549,10 @@ def read_state_deletion_info( break # convert buffer into array - array = np.frombuffer(buffer_c, dtype=np.float32).reshape( - (n_timesteps_read, buffer_info.size_activity) - ) + array = np.frombuffer(buffer_c, dtype=np.float32).reshape(( + n_timesteps_read, + buffer_info.size_activity, + )) logging.debug("FemzipAPI.read_state_deletion_info end") @@ -755,10 +749,7 @@ def get_buffer_info(self, filepath: str) -> FemzipBufferInfo: """ buffer_info = FemzipBufferInfo() - err = self.api.get_buffer_info( - filepath.encode("ascii"), - byref(buffer_info), - ) + err = self.api.get_buffer_info(filepath.encode("ascii"), byref(buffer_info)) self._check_femzip_error(err) # we need to copy the timesteps from C to Python @@ -822,10 +813,7 @@ def read_geometry( # read geometry err = self.api.read_geometry( - filepath.encode("ascii"), - byref(buffer_info), - buffer, - c_int32(close_file), + filepath.encode("ascii"), byref(buffer_info), buffer, c_int32(close_file) ) self._check_femzip_error(err) @@ -876,7 +864,6 @@ def read_states( n_timesteps_read = 0 for i_timestep in range(buffer_info_parsed.n_timesteps): - # forward pointer in buffer buffer_state = buffer[buffer_info.size_state * n_timesteps_read] @@ -890,9 +877,10 @@ def read_states( if not state_filter_valid: break - array = np.from_buffer(buffer, dtype=np.float32).reshape( - (n_timesteps_read, buffer_info_parsed.size_state) - ) + array = np.from_buffer(buffer, dtype=np.float32).reshape(( + n_timesteps_read, + buffer_info_parsed.size_state, + )) return array @@ -963,7 +951,6 @@ def _get_variables_state_buffer_size( variable_name = var_info.name.decode("utf-8") variable_category = FemzipVariableCategory.from_int(var_info.var_type) if variable_category == FemzipVariableCategory.NODE: - variable_multiplier = 1 if ( FemzipArrayType.NODE_DISPLACEMENT.value in variable_name @@ -1037,7 +1024,6 @@ def _decompose_read_variables_array( n_timesteps_read: int, file_metadata: FemzipFileMetadata, ) -> Dict[Tuple[int, str, FemzipVariableCategory], np.ndarray]: - # pylint: disable=too-many-arguments # pylint: disable=too-many-locals # pylint: disable=too-many-branches @@ -1047,7 +1033,6 @@ def _decompose_read_variables_array( result_arrays: Dict[Tuple[int, str, FemzipVariableCategory], np.ndarray] = {} var_pos = 0 for i_var in range(file_metadata.number_of_variables): - var_info: VariableInfo = file_metadata.variable_infos[i_var] variable_name: str = var_info.name.decode("utf-8") variable_index: int = var_info.var_index @@ -1061,9 +1046,11 @@ def _decompose_read_variables_array( FemzipArrayType.NODE_ACCELERATIONS.value, ): array_size = file_metadata.number_of_nodes * 3 - var_array = all_vars_array[:, var_pos : var_pos + array_size].reshape( - (n_timesteps_read, file_metadata.number_of_nodes, 3) - ) + var_array = all_vars_array[:, var_pos : var_pos + array_size].reshape(( + n_timesteps_read, + file_metadata.number_of_nodes, + 3, + )) var_pos += array_size result_arrays[(variable_index, variable_name, FemzipVariableCategory.NODE)] = ( var_array diff --git a/src/lasso/io/files.py b/src/lasso/io/files.py index 13b03d9..1e112dd 100644 --- a/src/lasso/io/files.py +++ b/src/lasso/io/files.py @@ -57,7 +57,7 @@ def collect_files( Examples -------- - >>> png_images, jpeg_images = collect_files('./folder', ['*.png', '*.jpeg']) + >>> png_images, jpeg_images = collect_files("./folder", ["*.png", "*.jpeg"]) """ if not isinstance(dirpath, (list, tuple)): @@ -67,7 +67,6 @@ def collect_files( found_files = [] for pattern in patterns: - files_with_pattern = [] for current_dir in dirpath: # files in root dir diff --git a/src/lasso/math/sampling.py b/src/lasso/math/sampling.py index 3f1efac..a007cd0 100644 --- a/src/lasso/math/sampling.py +++ b/src/lasso/math/sampling.py @@ -66,11 +66,9 @@ def homogenize_density( d_average = np.average(d[:, 1:], axis=1) if target_distance is None: target_distance = np.median(d_average) - is_selected = np.array( - [ - dist >= target_distance or random.random() < (dist / target_distance) ** dim - for i, dist in enumerate(d_average) - ] - ) + is_selected = np.array([ + dist >= target_distance or random.random() < (dist / target_distance) ** dim + for i, dist in enumerate(d_average) + ]) random.seed() return is_selected diff --git a/src/lasso/plotting/plot_shell_mesh.py b/src/lasso/plotting/plot_shell_mesh.py index 70ceba5..66ec7a7 100644 --- a/src/lasso/plotting/plot_shell_mesh.py +++ b/src/lasso/plotting/plot_shell_mesh.py @@ -92,17 +92,14 @@ def plot_shell_mesh( # the element values at the 3 corner nodes. Since elements share nodes # we can not use the same nodes, thus we need to create multiple nodes # at the same position but with different fringe. - nodes_xyz = np.concatenate( - [ - node_coordinates[tria_node_indexes].reshape((-1, 3)), - node_coordinates[quad_node_indexes_tria1].reshape((-1, 3)), - node_coordinates[quad_node_indexes_tria2].reshape((-1, 3)), - ] - ) + nodes_xyz = np.concatenate([ + node_coordinates[tria_node_indexes].reshape((-1, 3)), + node_coordinates[quad_node_indexes_tria1].reshape((-1, 3)), + node_coordinates[quad_node_indexes_tria2].reshape((-1, 3)), + ]) # fringe value and hover title if isinstance(field, np.ndarray): - if is_element_field: n_shells = len(shell_node_indexes) n_tria = np.sum(is_tria) @@ -132,13 +129,11 @@ def plot_shell_mesh( node_fringe = node_fringe.flatten() else: # copy & paste ftw - node_fringe = np.concatenate( - [ - field[tria_node_indexes].reshape((-1, 3)), - field[quad_node_indexes_tria1].reshape((-1, 3)), - field[quad_node_indexes_tria2].reshape((-1, 3)), - ] - ) + node_fringe = np.concatenate([ + field[tria_node_indexes].reshape((-1, 3)), + field[quad_node_indexes_tria1].reshape((-1, 3)), + field[quad_node_indexes_tria2].reshape((-1, 3)), + ]) node_fringe = node_fringe.flatten() # element text diff --git a/src/lasso/utils/language.py b/src/lasso/utils/language.py index 8f05b2f..f40b8f1 100644 --- a/src/lasso/utils/language.py +++ b/src/lasso/utils/language.py @@ -43,7 +43,6 @@ def set_var(name, value, context): current_context = context for i_name, current_name in enumerate(name): - # at last level set var if i_name == len(name) - 1: current_context[current_name] = value diff --git a/test/plot_creator_helper.py b/test/plot_creator_helper.py index d18b614..e685e71 100644 --- a/test/plot_creator_helper.py +++ b/test/plot_creator_helper.py @@ -38,12 +38,10 @@ def create_fake_d3plots( # ) # for _ in range(n_nodes_y)]).reshape(((bend_end - bend_start)*n_nodes_y)) - z_bend_mat = np.stack( - [ - np.array([1 + math.sin(x * math.pi / n_nodes_x) for x in range(n_nodes_x)]) - for _ in range(n_nodes_y) - ] - ).reshape((n_nodes_x * n_nodes_y,)) + z_bend_mat = np.stack([ + np.array([1 + math.sin(x * math.pi / n_nodes_x) for x in range(n_nodes_x)]) + for _ in range(n_nodes_y) + ]).reshape((n_nodes_x * n_nodes_y,)) node_coordinates = np.zeros((n_nodes_x * n_nodes_y, 3)) # fill in y coords @@ -151,7 +149,7 @@ def create_n_fake_plots(folder: str, n_nodes_x: int, n_nodes_y: int, n_timesteps # n plots bending down for i in range(int(n / 2)): create_fake_d3plots( - path=os.path.join(folder, plot_name.format(i=f"{i+int(n/2):02d}")), + path=os.path.join(folder, plot_name.format(i=f"{i + int(n / 2):02d}")), element_shell_node_indexes=element_shell_node_indexes, bend_multiplicator=-5 * (1 + randy_random.random()), n_nodes_x=n_nodes_x, diff --git a/test/unit_tests/dimred/svd/test_clustering_betas.py b/test/unit_tests/dimred/svd/test_clustering_betas.py index 1b76dc7..850fd87 100644 --- a/test/unit_tests/dimred/svd/test_clustering_betas.py +++ b/test/unit_tests/dimred/svd/test_clustering_betas.py @@ -22,9 +22,7 @@ def test_group_betas(self): fake_betas, cluster=ClusterType.KMeans, detector=DetectorType.LocalOutlierFactor, - cluster_params=dict( - n_clusters=expected_clusters, - ), + cluster_params={"n_clusters": expected_clusters}, ) # verify correct type of output diff --git a/test/unit_tests/dimred/svd/test_pod_functions.py b/test/unit_tests/dimred/svd/test_pod_functions.py index 98db1cd..5f0a544 100644 --- a/test/unit_tests/dimred/svd/test_pod_functions.py +++ b/test/unit_tests/dimred/svd/test_pod_functions.py @@ -44,9 +44,9 @@ def test_calculate_v_and_betas(self): # v_rob and betas should result in difference in displacements of original result reshaped_samples = rand_samples.reshape(samples, timesteps, nodes * dimensions) - delta_displ = reshaped_samples[:, :] - np.stack( - [reshaped_samples[0, :] for _ in range(timesteps)] - ) + delta_displ = reshaped_samples[:, :] - np.stack([ + reshaped_samples[0, :] for _ in range(timesteps) + ]) recacl_displ = np.einsum("ktn, stk -> stn", v_rob, betas) diff --git a/test/unit_tests/dimred/svd/test_subsampling_methods.py b/test/unit_tests/dimred/svd/test_subsampling_methods.py index 106a144..cf100ee 100644 --- a/test/unit_tests/dimred/svd/test_subsampling_methods.py +++ b/test/unit_tests/dimred/svd/test_subsampling_methods.py @@ -14,7 +14,6 @@ def test_create_reference_sample(self): """Tests the creation of reference sample""" with tempfile.TemporaryDirectory() as tmp_dir: - create_n_fake_plots(tmp_dir, 500, 10, n=2) load_path = os.path.join(tmp_dir, "SVDTestPlot00/plot") n_nodes = 200 @@ -54,7 +53,6 @@ def test_remap_random_subsample(self): """Verifies correct subsampling""" with tempfile.TemporaryDirectory() as tmp_dir: - create_n_fake_plots(tmp_dir, 500, 10, n=2) ref_path = os.path.join(tmp_dir, "SVDTestPlot00/plot") sample_path = os.path.join(tmp_dir, "SVDTestPlot01/plot") diff --git a/test/unit_tests/dimred/test_dimred_run.py b/test/unit_tests/dimred/test_dimred_run.py index 3e920b8..7747fec 100644 --- a/test/unit_tests/dimred/test_dimred_run.py +++ b/test/unit_tests/dimred/test_dimred_run.py @@ -17,7 +17,6 @@ def test_run(self): ) with tempfile.TemporaryDirectory() as tmpdir: - # create simulation runs create_n_fake_plots(folder=tmpdir, n_nodes_x=500, n_nodes_y=10) @@ -78,7 +77,7 @@ def test_run(self): # get test betas test_betas_group = test_run.h5file[HDF5FileNames.BETAS_GROUP_NAME.value] - test_ids = np.stack([key for key in test_betas_group.keys()]) + test_ids = np.stack(list(test_betas_group.keys())) test_betas = np.stack([test_betas_group[key][:] for key in test_betas_group.keys()]) # we check if test_ids and test_betas are of correct shape @@ -93,9 +92,9 @@ def test_run(self): # verify that calculated betas are reproducible as expected # first, create displ mat containing difference in displ over time verify_displ_stacked = test_subs.reshape(49, 5, 2000 * 3) - verify_diff_mat = np.stack( - [verify_displ_stacked[:, 0, :] for _ in range(5)] - ).reshape(49, 5, 2000 * 3) + verify_diff_mat = np.stack([ + verify_displ_stacked[:, 0, :] for _ in range(5) + ]).reshape(49, 5, 2000 * 3) verify_displ_stacked = verify_displ_stacked - verify_diff_mat # calculate betas and check if they are similar diff --git a/test/unit_tests/dyna/test_d3plot.py b/test/unit_tests/dyna/test_d3plot.py index 4ba00f9..2cd1ba9 100644 --- a/test/unit_tests/dyna/test_d3plot.py +++ b/test/unit_tests/dyna/test_d3plot.py @@ -13,7 +13,6 @@ class D3plotTest(TestCase): def test_init(self): - # settings self.maxDiff = None @@ -95,7 +94,6 @@ def test_init(self): self.assertDictEqual(array_diff, {}) def test_header(self): - test_header_data = { "title": " ", "runtime": 1472027823, @@ -159,7 +157,6 @@ def test_header(self): self.assertEqual(header.raw_header[name], value, "Invalid var %s" % name) def test_beam_integration_points(self): - self.maxDiff = None filepath = "test/test_data/d3plot_beamip/d3plot" @@ -187,7 +184,6 @@ def test_beam_integration_points(self): ) def test_correct_sort_of_more_than_100_state_files(self): - filepath = "test/test_data/order_d3plot/d3plot" d3plot = D3plot(filepath) @@ -196,7 +192,6 @@ def test_correct_sort_of_more_than_100_state_files(self): self.assertListEqual(timesteps.astype(int).tolist(), [1, 2, 10, 11, 12, 22, 100]) def test_femzip_basic(self): - self.maxDiff = None filepath1 = "test/test_data/femzip/d3plot.fz" @@ -207,7 +202,6 @@ def test_femzip_basic(self): D3plot.use_advanced_femzip_api = False for d3plot_kwargs in d3plot_kwargs_list: - d3plot1 = D3plot(filepath1, **d3plot_kwargs) d3plot2 = D3plot(filepath2, **d3plot_kwargs) @@ -217,7 +211,6 @@ def test_femzip_basic(self): self.assertDictEqual(array_diff, {}) def test_femzip_extended(self): - self.maxDiff = None filepath1 = "test/test_data/femzip/d3plot.fz" @@ -232,7 +225,6 @@ def test_femzip_extended(self): D3plot.use_advanced_femzip_api = True for d3plot_kwargs in d3plot_kwargs_list: - d3plot1 = D3plot(filepath1, **d3plot_kwargs) d3plot2 = D3plot(filepath2, **d3plot_kwargs) @@ -242,7 +234,6 @@ def test_femzip_extended(self): self.assertDictEqual(array_diff, {}) def test_part_filter(self): - self.maxDiff = None filepath = "test/test_data/simple_d3plot/d3plot" @@ -259,34 +250,29 @@ def test_part_filter(self): self.assertEqual(len(node_filter), 4915) def test_read_solid_integration_points(self): - filepath = "test/test_data/d3plot_solid_int/d3plot" # data from META - stress_valid = np.array( - [ - 2.132084e02, - 1.792203e02, - 1.397527e02, - 2.307352e02, - 2.132105e02, - 1.792210e02, - 1.397558e02, - 2.307304e02, - ] - ) - pstrain_valid = np.array( - [ - 2.227418e-02, - 2.576126e-03, - 1.909884e-02, - 3.695280e-02, - 2.227416e-02, - 2.576110e-03, - 1.909888e-02, - 3.695256e-02, - ] - ) + stress_valid = np.array([ + 2.132084e02, + 1.792203e02, + 1.397527e02, + 2.307352e02, + 2.132105e02, + 1.792210e02, + 1.397558e02, + 2.307304e02, + ]) + pstrain_valid = np.array([ + 2.227418e-02, + 2.576126e-03, + 1.909884e-02, + 3.695280e-02, + 2.227416e-02, + 2.576110e-03, + 1.909888e-02, + 3.695256e-02, + ]) last_timestep = -1 first_element = 0 stress_xx = 0 @@ -300,7 +286,6 @@ def test_read_solid_integration_points(self): np.array_equal(pstrain[last_timestep, first_element, :], pstrain_valid) def test_negative_to_positive_state_indexes(self) -> None: - indexes = set() new_indexes = _negative_to_positive_state_indexes(indexes, len(indexes)) self.assertSetEqual(indexes, new_indexes) @@ -314,7 +299,6 @@ def test_negative_to_positive_state_indexes(self) -> None: self.assertSetEqual(new_indexes, {0, 7}) def test_is_end_of_file_marker(self) -> None: - # -999999. in float32 bb = BinaryBuffer() bb.memoryview = memoryview(struct.pack(" None: result = D3plot._is_end_of_file_marker(bb, 0, np.int32) def test_write(self): - self.maxDiff = None filepaths = [ @@ -351,16 +334,11 @@ def test_write(self): "test/test_data/d3plot_solid_int/d3plot", ] - d3plot_kwargs_list = [ - {}, - {"buffered_reading": True}, - ] + d3plot_kwargs_list = [{}, {"buffered_reading": True}] with tempfile.TemporaryDirectory() as dirpath: - for d3plot_kwargs in d3plot_kwargs_list: for d3plot_filepath, _ in zip(filepaths, d3plot_kwargs_list): - print(d3plot_filepath) # read d3plot @@ -379,7 +357,6 @@ def test_write(self): self.assertDictEqual(array_diff, {}, err_msg) def test_write_new(self): - self.maxDiff = None d3plot1 = D3plot() @@ -418,7 +395,6 @@ def test_write_new(self): self.assertTrue(os.path.isfile(filepath + "01")) def test_append_4_shell_hists_then_read_bug(self): - self.maxDiff = None # we need some d3plot @@ -444,9 +420,12 @@ def test_append_4_shell_hists_then_read_bug(self): d3plot1 = D3plot(filepath1) n_timesteps, n_shells, n_layers = 1, d3plot1.header.n_shells, 3 - d3plot1.arrays[ArrayType.element_shell_history_vars] = np.random.random( - (n_timesteps, n_shells, n_layers, n_history_vars) - ) + d3plot1.arrays[ArrayType.element_shell_history_vars] = np.random.random(( + n_timesteps, + n_shells, + n_layers, + n_history_vars, + )) filepath2 = os.path.join(dirpath, "modified") d3plot1.write_d3plot(filepath2) @@ -455,7 +434,6 @@ def test_append_4_shell_hists_then_read_bug(self): self.assertTrue(ArrayType.element_shell_internal_energy not in d3plot_modif.arrays) def test_reading_selected_states(self): - # read all states filepath = "test/test_data/d3plot_solid_int/d3plot" diff --git a/test/unit_tests/dyna/test_d3plot_header.py b/test/unit_tests/dyna/test_d3plot_header.py index 4d107ed..1b93d44 100644 --- a/test/unit_tests/dyna/test_d3plot_header.py +++ b/test/unit_tests/dyna/test_d3plot_header.py @@ -12,9 +12,7 @@ class D3plotHeaderTest(TestCase): - def test_loading(self): - filepaths = [ "test/test_data/simple_d3plot/d3plot", "test/test_data/d3plot_node_temperature/d3plot", @@ -29,7 +27,6 @@ def test_loading(self): warnings.warn("No assertions of behavior, test is incomplete") def test_get_digit(self) -> None: - number = 1234567890 # the numbers are sorted from the lowest importance @@ -50,19 +47,9 @@ def test_get_digit(self) -> None: self.assertEqual(get_digit(number, 10), 0) def test_d3plot_filetype_from_integer(self) -> None: - - self.assertEqual( - d3plot_filetype_from_integer(1), - D3plotFiletype.D3PLOT, - ) - self.assertEqual( - d3plot_filetype_from_integer(5), - D3plotFiletype.D3PART, - ) - self.assertEqual( - d3plot_filetype_from_integer(11), - D3plotFiletype.D3EIGV, - ) + self.assertEqual(d3plot_filetype_from_integer(1), D3plotFiletype.D3PLOT) + self.assertEqual(d3plot_filetype_from_integer(5), D3plotFiletype.D3PART) + self.assertEqual(d3plot_filetype_from_integer(11), D3plotFiletype.D3EIGV) # INFOR is forbidden with self.assertRaises(ValueError): @@ -72,7 +59,6 @@ def test_d3plot_filetype_from_integer(self) -> None: d3plot_filetype_from_integer(0) def test_determine_file_settings(self) -> None: - # the routine checks the "filetype" flag # if it makes any sense under any circumstances # we assume the corresponding file settings @@ -81,7 +67,6 @@ def test_determine_file_settings(self) -> None: # 88 -> int64 for position in (44, 88): for filetype in (D3plotFiletype.D3PLOT, D3plotFiletype.D3PART, D3plotFiletype.D3EIGV): - bb = BinaryBuffer() bb.memoryview = memoryview(bytearray(256)) bb.write_number(position, filetype.value, np.int32) diff --git a/test/unit_tests/dyna/test_mapper.py b/test/unit_tests/dyna/test_mapper.py index 99bc88f..1b59266 100644 --- a/test/unit_tests/dyna/test_mapper.py +++ b/test/unit_tests/dyna/test_mapper.py @@ -322,9 +322,7 @@ def test_tshells(self): def test_internal_shell_energy(self): interal_energy = np.array([[1, 1, 0.12, 2.121202, 2.1123, 7.213]]).reshape(2, 3) - fz = { - (1, "internal_energy", FemzipVariableCategory.SHELL): interal_energy, - } + fz = {(1, "internal_energy", FemzipVariableCategory.SHELL): interal_energy} m = FemzipMapper() m.map(fz) diff --git a/test/unit_tests/io/test_binary_buffer.py b/test/unit_tests/io/test_binary_buffer.py index b79d3e9..3eb3996 100644 --- a/test/unit_tests/io/test_binary_buffer.py +++ b/test/unit_tests/io/test_binary_buffer.py @@ -8,26 +8,22 @@ class BinaryBufferTest(TestCase): def setUp(self): - # read file self.bb = BinaryBuffer("test/test_data/simple_d3plot/d3plot") def test_init(self): - # test some stuff for fun self.assertEqual(self.bb.mv_[40:42].tobytes(), b"\xaf\\") self.assertEqual(len(self.bb), len(self.bb.mv_)) self.assertEqual(len(self.bb), 192512) def test_memoryview(self): - self.assertEqual(self.bb.mv_, self.bb.memoryview) with self.assertRaises(AssertionError): self.bb.memoryview = None self.memoryview = memoryview(bytearray(b"")) def test_reading(self): - # numbers self.assertEqual(self.bb.read_number(44, np.int32), 1) self.assertEqual(self.bb.read_number(56, np.float32), 960.0) @@ -38,14 +34,12 @@ def test_reading(self): self.assertListEqual(self.bb.read_ndarray(60, 12, 1, np.int32).tolist(), [4, 4915, 6]) def test_save(self): - self.bb.save("test/test_data/tmp") eq = filecmp.cmp("test/test_data/simple_d3plot/d3plot", "test/test_data/tmp") os.remove("test/test_data/tmp") self.assertEqual(eq, True) def test_writing(self): - bb = BinaryBuffer("test/test_data/simple_d3plot/d3plot") bb.write_number(44, 13, np.int32) self.assertEqual(bb.read_number(44, np.int32), 13) @@ -55,7 +49,6 @@ def test_writing(self): self.assertListEqual(bb.read_ndarray(44, 16, 1, array.dtype).tolist(), array.tolist()) def test_size(self): - bb = BinaryBuffer("test/test_data/simple_d3plot/d3plot") self.assertEqual(bb.size, 192512) self.assertEqual(bb.size, len(bb)) diff --git a/test/unit_tests/math/test_sampling.py b/test/unit_tests/math/test_sampling.py index 3a3c0b5..a24f41e 100644 --- a/test/unit_tests/math/test_sampling.py +++ b/test/unit_tests/math/test_sampling.py @@ -5,6 +5,5 @@ class Test(unittest.TestCase): def test_unique_subsamples(self): - self.assertEqual(len(set(unique_subsamples(0, 20, 100))), 20) self.assertEqual(len(set(unique_subsamples(0, 200, 100))), 100) diff --git a/uv.lock b/uv.lock index 9c2992d..7b4cf3b 100644 --- a/uv.lock +++ b/uv.lock @@ -48,44 +48,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0c/37/fb6973edeb700f6e3d6ff222400602ab1830446c25c7b4676d8de93e65b8/backrefs-5.8-py39-none-any.whl", hash = "sha256:a66851e4533fb5b371aa0628e1fee1af05135616b86140c9d787a2ffdf4b8fdc", size = 380336 }, ] -[[package]] -name = "black" -version = "24.10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/f3/465c0eb5cddf7dbbfe1fecd9b875d1dcf51b88923cd2c1d7e9ab95c6336b/black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812", size = 1623211 }, - { url = "https://files.pythonhosted.org/packages/df/57/b6d2da7d200773fdfcc224ffb87052cf283cec4d7102fab450b4a05996d8/black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea", size = 1457139 }, - { url = "https://files.pythonhosted.org/packages/6e/c5/9023b7673904a5188f9be81f5e129fff69f51f5515655fbd1d5a4e80a47b/black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f", size = 1753774 }, - { url = "https://files.pythonhosted.org/packages/e1/32/df7f18bd0e724e0d9748829765455d6643ec847b3f87e77456fc99d0edab/black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e", size = 1414209 }, - { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, - { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, - { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, - { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, - { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, - { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, - { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, - { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, - { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, - { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, - { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, - { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, - { url = "https://files.pythonhosted.org/packages/fe/02/f408c804e0ee78c367dcea0a01aedde4f1712af93b8b6e60df981e0228c7/black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd", size = 1622516 }, - { url = "https://files.pythonhosted.org/packages/f8/b9/9b706ed2f55bfb28b436225a9c57da35990c9005b90b8c91f03924454ad7/black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f", size = 1456181 }, - { url = "https://files.pythonhosted.org/packages/0a/1c/314d7f17434a5375682ad097f6f4cc0e3f414f3c95a9b1bb4df14a0f11f9/black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800", size = 1752801 }, - { url = "https://files.pythonhosted.org/packages/39/a7/20e5cd9237d28ad0b31438de5d9f01c8b99814576f4c0cda1edd62caf4b0/black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7", size = 1413626 }, - { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, -] - [[package]] name = "certifi" version = "2025.1.31" @@ -593,7 +555,6 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "black" }, { name = "git-changelog" }, { name = "go-task-bin" }, { name = "mkdocs" }, @@ -620,7 +581,6 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ - { name = "black", specifier = "==24.*" }, { name = "git-changelog", specifier = "==2.*" }, { name = "go-task-bin" }, { name = "mkdocs", specifier = "==1.*" }, @@ -629,7 +589,7 @@ dev = [ { name = "mkdocstrings", extras = ["python"], specifier = "==0.*" }, { name = "pytest", specifier = "==8.*" }, { name = "pytest-cov", specifier = "==5.*" }, - { name = "ruff", specifier = "==0.3.*" }, + { name = "ruff", specifier = "==0.11.*" }, { name = "twine", specifier = "==5.*" }, ] @@ -887,15 +847,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/62/0fe302c6d1be1c777cab0616e6302478251dfbf9055ad426f5d0def75c89/more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89", size = 63038 }, ] -[[package]] -name = "mypy-extensions" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, -] - [[package]] name = "nh3" version = "0.2.21" @@ -1328,26 +1279,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.3.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/6a/5cdb9e5ae04210ddc5b7b6cf31aeca50654de595e73e59961ce1a662656c/ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba", size = 2164419 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/59/8416cddfcc65b710d79374358a81632f2c4810326e8391d5a3c23f1cc422/ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce", size = 16845547 }, - { url = "https://files.pythonhosted.org/packages/94/db/79298ddaddad3ddb7799fe995d508c49c5f83dbcc1a0f88d672105776906/ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b", size = 8634118 }, - { url = "https://files.pythonhosted.org/packages/00/5c/bea349c531f50b8462470b49e5eff11a860f63b2796d8643d4e4e0722b64/ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663", size = 8282193 }, - { url = "https://files.pythonhosted.org/packages/c2/5d/62593a1ec896c07a497fb653fa269595772abc15ce8306d6edda94aa3b54/ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb", size = 7655584 }, - { url = "https://files.pythonhosted.org/packages/b7/b9/00ecf95ea51f82ab68430851d13266a892c60a23c5058604494d5e474bbf/ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51", size = 8843361 }, - { url = "https://files.pythonhosted.org/packages/83/bb/94d0d8f9ae71f6a5384ed6bc2dfd3fd651148604b4aaec9bd44d0754ba1c/ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a", size = 9591014 }, - { url = "https://files.pythonhosted.org/packages/d0/3e/df5317d2f3915cac6a34a88cfd7a7bf7ba8d96cb92b9acd42414ac0738fc/ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2", size = 9277992 }, - { url = "https://files.pythonhosted.org/packages/4c/5a/202bae9d5af45ea2a49f21998dc7f6a8cc0cc7269540043f6cba5dd45cdc/ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea", size = 10179960 }, - { url = "https://files.pythonhosted.org/packages/99/b2/4b0796f93d8bd7188e47c198407f2999579599cd5a11e1ed8a66ee18b4ac/ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f", size = 8853914 }, - { url = "https://files.pythonhosted.org/packages/02/ae/7533335b669fa879d5a36d7bb7c3cdc96b4e7d49e9da71218d3bd0a24852/ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1", size = 8177020 }, - { url = "https://files.pythonhosted.org/packages/a8/55/92a6099ea0e49d500199bc169d83158719f9bf2e1b87f5a1b53210ba74d8/ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7", size = 7646460 }, - { url = "https://files.pythonhosted.org/packages/2f/3e/7370e849c14a8461aee6c4f0c87a784f3f2a96ac542c1056fae982cd0504/ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b", size = 8446755 }, - { url = "https://files.pythonhosted.org/packages/3a/5e/acae79c630de116212cd4c3346a80f34fe2b421270fa76640cf1756a62e9/ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4", size = 8899196 }, - { url = "https://files.pythonhosted.org/packages/d3/27/05d3398f0f00742201518c3362d0046ef3a03a50a6c1f1632e9cf36f9a9e/ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f", size = 7766434 }, - { url = "https://files.pythonhosted.org/packages/d6/f4/cdc6a5350ce8c9741f3a79ceca912045204adf20e0b4222632664b3cbd1e/ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74", size = 8650055 }, - { url = "https://files.pythonhosted.org/packages/20/02/8ec400f495308b4a3833f34d344ccc853ebace7ea6dfd886813c2a6de3d8/ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f", size = 8213066 }, +version = "0.11.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/11/bcef6784c7e5d200b8a1f5c2ddf53e5da0efec37e6e5a44d163fb97e04ba/ruff-0.11.6.tar.gz", hash = "sha256:bec8bcc3ac228a45ccc811e45f7eb61b950dbf4cf31a67fa89352574b01c7d79", size = 4010053 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/1f/8848b625100ebcc8740c8bac5b5dd8ba97dd4ee210970e98832092c1635b/ruff-0.11.6-py3-none-linux_armv6l.whl", hash = "sha256:d84dcbe74cf9356d1bdb4a78cf74fd47c740bf7bdeb7529068f69b08272239a1", size = 10248105 }, + { url = "https://files.pythonhosted.org/packages/e0/47/c44036e70c6cc11e6ee24399c2a1e1f1e99be5152bd7dff0190e4b325b76/ruff-0.11.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9bc583628e1096148011a5d51ff3c836f51899e61112e03e5f2b1573a9b726de", size = 11001494 }, + { url = "https://files.pythonhosted.org/packages/ed/5b/170444061650202d84d316e8f112de02d092bff71fafe060d3542f5bc5df/ruff-0.11.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2959049faeb5ba5e3b378709e9d1bf0cab06528b306b9dd6ebd2a312127964a", size = 10352151 }, + { url = "https://files.pythonhosted.org/packages/ff/91/f02839fb3787c678e112c8865f2c3e87cfe1744dcc96ff9fc56cfb97dda2/ruff-0.11.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63c5d4e30d9d0de7fedbfb3e9e20d134b73a30c1e74b596f40f0629d5c28a193", size = 10541951 }, + { url = "https://files.pythonhosted.org/packages/9e/f3/c09933306096ff7a08abede3cc2534d6fcf5529ccd26504c16bf363989b5/ruff-0.11.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a4b9a4e1439f7d0a091c6763a100cef8fbdc10d68593df6f3cfa5abdd9246e", size = 10079195 }, + { url = "https://files.pythonhosted.org/packages/e0/0d/a87f8933fccbc0d8c653cfbf44bedda69c9582ba09210a309c066794e2ee/ruff-0.11.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5edf270223dd622218256569636dc3e708c2cb989242262fe378609eccf1308", size = 11698918 }, + { url = "https://files.pythonhosted.org/packages/52/7d/8eac0bd083ea8a0b55b7e4628428203441ca68cd55e0b67c135a4bc6e309/ruff-0.11.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f55844e818206a9dd31ff27f91385afb538067e2dc0beb05f82c293ab84f7d55", size = 12319426 }, + { url = "https://files.pythonhosted.org/packages/c2/dc/d0c17d875662d0c86fadcf4ca014ab2001f867621b793d5d7eef01b9dcce/ruff-0.11.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d8f782286c5ff562e4e00344f954b9320026d8e3fae2ba9e6948443fafd9ffc", size = 11791012 }, + { url = "https://files.pythonhosted.org/packages/f9/f3/81a1aea17f1065449a72509fc7ccc3659cf93148b136ff2a8291c4bc3ef1/ruff-0.11.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01c63ba219514271cee955cd0adc26a4083df1956d57847978383b0e50ffd7d2", size = 13949947 }, + { url = "https://files.pythonhosted.org/packages/61/9f/a3e34de425a668284e7024ee6fd41f452f6fa9d817f1f3495b46e5e3a407/ruff-0.11.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15adac20ef2ca296dd3d8e2bedc6202ea6de81c091a74661c3666e5c4c223ff6", size = 11471753 }, + { url = "https://files.pythonhosted.org/packages/df/c5/4a57a86d12542c0f6e2744f262257b2aa5a3783098ec14e40f3e4b3a354a/ruff-0.11.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4dd6b09e98144ad7aec026f5588e493c65057d1b387dd937d7787baa531d9bc2", size = 10417121 }, + { url = "https://files.pythonhosted.org/packages/58/3f/a3b4346dff07ef5b862e2ba06d98fcbf71f66f04cf01d375e871382b5e4b/ruff-0.11.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:45b2e1d6c0eed89c248d024ea95074d0e09988d8e7b1dad8d3ab9a67017a5b03", size = 10073829 }, + { url = "https://files.pythonhosted.org/packages/93/cc/7ed02e0b86a649216b845b3ac66ed55d8aa86f5898c5f1691797f408fcb9/ruff-0.11.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bd40de4115b2ec4850302f1a1d8067f42e70b4990b68838ccb9ccd9f110c5e8b", size = 11076108 }, + { url = "https://files.pythonhosted.org/packages/39/5e/5b09840fef0eff1a6fa1dea6296c07d09c17cb6fb94ed5593aa591b50460/ruff-0.11.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:77cda2dfbac1ab73aef5e514c4cbfc4ec1fbef4b84a44c736cc26f61b3814cd9", size = 11512366 }, + { url = "https://files.pythonhosted.org/packages/6f/4c/1cd5a84a412d3626335ae69f5f9de2bb554eea0faf46deb1f0cb48534042/ruff-0.11.6-py3-none-win32.whl", hash = "sha256:5151a871554be3036cd6e51d0ec6eef56334d74dfe1702de717a995ee3d5b287", size = 10485900 }, + { url = "https://files.pythonhosted.org/packages/42/46/8997872bc44d43df986491c18d4418f1caff03bc47b7f381261d62c23442/ruff-0.11.6-py3-none-win_amd64.whl", hash = "sha256:cce85721d09c51f3b782c331b0abd07e9d7d5f775840379c640606d3159cae0e", size = 11558592 }, + { url = "https://files.pythonhosted.org/packages/d7/6a/65fecd51a9ca19e1477c3879a7fda24f8904174d1275b419422ac00f6eee/ruff-0.11.6-py3-none-win_arm64.whl", hash = "sha256:3567ba0d07fb170b1b48d944715e3294b77f5b7679e8ba258199a250383ccb79", size = 10682766 }, ] [[package]]