From 6672c1717d53fdb8ceb29f0822313cafb3e3a4b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sun, 1 Dec 2024 09:59:25 +0100 Subject: [PATCH] STY: apply autofixes for RUF031 --- .../tests/test_derived_quantities.py | 4 +- .../test_particle_trajectories_pytest.py | 4 +- yt/data_objects/tests/test_rays.py | 10 ++-- yt/frontends/amrex/tests/test_outputs.py | 42 +++++++-------- yt/frontends/parthenon/tests/test_outputs.py | 4 +- yt/frontends/stream/io.py | 2 +- .../stream/tests/test_stream_stretched.py | 2 +- .../coordinates/geographic_coordinates.py | 2 +- .../tests/test_sph_pixelization_pytestonly.py | 51 ++++++++----------- .../test_offaxisprojection_pytestonly.py | 2 +- 10 files changed, 57 insertions(+), 66 deletions(-) diff --git a/yt/data_objects/tests/test_derived_quantities.py b/yt/data_objects/tests/test_derived_quantities.py index 35ac7283d42..a47680b1389 100644 --- a/yt/data_objects/tests/test_derived_quantities.py +++ b/yt/data_objects/tests/test_derived_quantities.py @@ -68,7 +68,7 @@ def test_average(): ("gas", "density"), ("gas", "cell_mass") ) a_mean = (ad["gas", "density"] * ad["gas", "cell_mass"]).sum() / ad[ - ("gas", "cell_mass") + "gas", "cell_mass" ].sum() assert_rel_equal(my_mean, a_mean, 12) @@ -87,7 +87,7 @@ def test_standard_deviation(): ("gas", "density"), ("gas", "cell_mass") ) a_mean = (ad["gas", "density"] * ad["gas", "cell_mass"]).sum() / ad[ - ("gas", "cell_mass") + "gas", "cell_mass" ].sum() assert_rel_equal(my_mean, a_mean, 12) a_std = np.sqrt( diff --git a/yt/data_objects/tests/test_particle_trajectories_pytest.py b/yt/data_objects/tests/test_particle_trajectories_pytest.py index dbeda48fefe..9fa30049820 100644 --- a/yt/data_objects/tests/test_particle_trajectories_pytest.py +++ b/yt/data_objects/tests/test_particle_trajectories_pytest.py @@ -122,7 +122,7 @@ def dummy(pfilter, data): @pytest.mark.parametrize("ptype", [None, "io"]) def test_default_field_tuple(particle_trajectories_test_dataset, ptype): ds = particle_trajectories_test_dataset[0] - ids = ds.all_data()[("all", "particle_index")] + ids = ds.all_data()["all", "particle_index"] trajs = particle_trajectories_test_dataset.particle_trajectories( ids, ptype=ptype, suppress_logging=True ) @@ -138,7 +138,7 @@ def test_default_field_tuple(particle_trajectories_test_dataset, ptype): @pytest.mark.parametrize("ptype", [None, "io"]) def test_time_and_index(particle_trajectories_test_dataset, ptype): ds = particle_trajectories_test_dataset[0] - ids = ds.all_data()[("all", "particle_index")] + ids = ds.all_data()["all", "particle_index"] trajs = particle_trajectories_test_dataset.particle_trajectories( ids, ptype=ptype, suppress_logging=True ) diff --git a/yt/data_objects/tests/test_rays.py b/yt/data_objects/tests/test_rays.py index 3dcc2e97c83..7bafebca39d 100644 --- a/yt/data_objects/tests/test_rays.py +++ b/yt/data_objects/tests/test_rays.py @@ -110,9 +110,9 @@ def test_ray_particle2(): # restricts you to 4 -- 5 digits precision assert_equal(ray0["t"].shape, (1,)) assert_rel_equal(ray0["t"], np.array([0.5]), 5) - assert_rel_equal(ray0[("gas", "position")].v, np.array([[0.5, 0.5, 0.5]]), 5) + assert_rel_equal(ray0["gas", "position"].v, np.array([[0.5, 0.5, 0.5]]), 5) dl0 = integrate_kernel(kernelfunc, b0, hsml0) - dl0 *= ray0[("gas", "mass")].v / ray0[("gas", "density")].v + dl0 *= ray0["gas", "mass"].v / ray0["gas", "density"].v assert_rel_equal(ray0[("dts")].v, dl0 / len0, 4) ## Ray in the middle of the box: @@ -133,10 +133,10 @@ def test_ray_particle2(): assert_equal(ray1["t"].shape, (2,)) assert_rel_equal(ray1["t"], np.array([0.25, 0.75]), 5) assert_rel_equal( - ray1[("gas", "position")].v, np.array([[1.5, 0.5, 1.5], [1.5, 0.5, 2.5]]), 5 + ray1["gas", "position"].v, np.array([[1.5, 0.5, 1.5], [1.5, 0.5, 2.5]]), 5 ) dl1 = integrate_kernel(kernelfunc, b1, hsml1) - dl1 *= ray1[("gas", "mass")].v / ray1[("gas", "density")].v + dl1 *= ray1["gas", "mass"].v / ray1["gas", "density"].v assert_rel_equal(ray1[("dts")].v, dl1 / len1, 4) ## Ray missing all particles: @@ -150,4 +150,4 @@ def test_ray_particle2(): ray2.field_data["dts"] = ray2.ds.arr(ray2._generate_container_field_sph("dts")) assert_equal(ray2["t"].shape, (0,)) assert_equal(ray2["dts"].shape, (0,)) - assert_equal(ray2[("gas", "position")].v.shape, (0, 3)) + assert_equal(ray2["gas", "position"].v.shape, (0, 3)) diff --git a/yt/frontends/amrex/tests/test_outputs.py b/yt/frontends/amrex/tests/test_outputs.py index bfde30d0fb6..14e1031243b 100644 --- a/yt/frontends/amrex/tests/test_outputs.py +++ b/yt/frontends/amrex/tests/test_outputs.py @@ -97,17 +97,17 @@ def test_nyx_particle_io(): grid = ds.index.grids[0] npart_grid_0 = 7908 # read directly from the header - assert_equal(grid[("all", "particle_position_x")].size, npart_grid_0) + assert_equal(grid["all", "particle_position_x"].size, npart_grid_0) assert_equal(grid["DM", "particle_position_y"].size, npart_grid_0) assert_equal(grid["all", "particle_position_z"].size, npart_grid_0) ad = ds.all_data() npart = 32768 # read directly from the header - assert_equal(ad[("all", "particle_velocity_x")].size, npart) + assert_equal(ad["all", "particle_velocity_x"].size, npart) assert_equal(ad["DM", "particle_velocity_y"].size, npart) assert_equal(ad["all", "particle_velocity_z"].size, npart) - assert np.all(ad[("all", "particle_mass")] == ad[("all", "particle_mass")][0]) + assert np.all(ad["all", "particle_mass"] == ad["all", "particle_mass"][0]) left_edge = ds.arr([0.0, 0.0, 0.0], "code_length") right_edge = ds.arr([4.0, 4.0, 4.0], "code_length") @@ -117,22 +117,22 @@ def test_nyx_particle_io(): assert np.all( np.logical_and( - reg[("all", "particle_position_x")] <= right_edge[0], - reg[("all", "particle_position_x")] >= left_edge[0], + reg["all", "particle_position_x"] <= right_edge[0], + reg["all", "particle_position_x"] >= left_edge[0], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_y")] <= right_edge[1], - reg[("all", "particle_position_y")] >= left_edge[1], + reg["all", "particle_position_y"] <= right_edge[1], + reg["all", "particle_position_y"] >= left_edge[1], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_z")] <= right_edge[2], - reg[("all", "particle_position_z")] >= left_edge[2], + reg["all", "particle_position_z"] <= right_edge[2], + reg["all", "particle_position_z"] >= left_edge[2], ) ) @@ -155,13 +155,13 @@ def test_castro_particle_io(): grid = ds.index.grids[2] npart_grid_2 = 49 # read directly from the header - assert_equal(grid[("all", "particle_position_x")].size, npart_grid_2) + assert_equal(grid["all", "particle_position_x"].size, npart_grid_2) assert_equal(grid["Tracer", "particle_position_y"].size, npart_grid_2) assert_equal(grid["all", "particle_position_y"].size, npart_grid_2) ad = ds.all_data() npart = 49 # read directly from the header - assert_equal(ad[("all", "particle_velocity_x")].size, npart) + assert_equal(ad["all", "particle_velocity_x"].size, npart) assert_equal(ad["Tracer", "particle_velocity_y"].size, npart) assert_equal(ad["all", "particle_velocity_y"].size, npart) @@ -173,15 +173,15 @@ def test_castro_particle_io(): assert np.all( np.logical_and( - reg[("all", "particle_position_x")] <= right_edge[0], - reg[("all", "particle_position_x")] >= left_edge[0], + reg["all", "particle_position_x"] <= right_edge[0], + reg["all", "particle_position_x"] >= left_edge[0], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_y")] <= right_edge[1], - reg[("all", "particle_position_y")] >= left_edge[1], + reg["all", "particle_position_y"] <= right_edge[1], + reg["all", "particle_position_y"] >= left_edge[1], ) ) @@ -265,22 +265,22 @@ def test_warpx_particle_io(): assert np.all( np.logical_and( - reg[("all", "particle_position_x")] <= right_edge[0], - reg[("all", "particle_position_x")] >= left_edge[0], + reg["all", "particle_position_x"] <= right_edge[0], + reg["all", "particle_position_x"] >= left_edge[0], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_y")] <= right_edge[1], - reg[("all", "particle_position_y")] >= left_edge[1], + reg["all", "particle_position_y"] <= right_edge[1], + reg["all", "particle_position_y"] >= left_edge[1], ) ) assert np.all( np.logical_and( - reg[("all", "particle_position_z")] <= right_edge[2], - reg[("all", "particle_position_z")] >= left_edge[2], + reg["all", "particle_position_z"] <= right_edge[2], + reg["all", "particle_position_z"] >= left_edge[2], ) ) diff --git a/yt/frontends/parthenon/tests/test_outputs.py b/yt/frontends/parthenon/tests/test_outputs.py index 2737b2cce46..6c5e0ee7db2 100644 --- a/yt/frontends/parthenon/tests/test_outputs.py +++ b/yt/frontends/parthenon/tests/test_outputs.py @@ -50,8 +50,8 @@ def field_func(name): # reading data of two fields and compare against each other (data is squared in output) ad = ds.all_data() assert_allclose( - ad[("parthenon", "one_minus_advected")] ** 2.0, - ad[("parthenon", "one_minus_advected_sq")], + ad["parthenon", "one_minus_advected"] ** 2.0, + ad["parthenon", "one_minus_advected_sq"], ) # check if the peak is in the domain center (and at the highest refinement level) diff --git a/yt/frontends/stream/io.py b/yt/frontends/stream/io.py index 3d422992e53..d57da3b576c 100644 --- a/yt/frontends/stream/io.py +++ b/yt/frontends/stream/io.py @@ -161,7 +161,7 @@ def _yield_coordinates(self, data_file, needed_ptype=None): pos = np.column_stack( [ self.fields[data_file.filename][ - (ptype, f"particle_position_{ax}") + ptype, f"particle_position_{ax}" ] for ax in "xyz" ] diff --git a/yt/frontends/stream/tests/test_stream_stretched.py b/yt/frontends/stream/tests/test_stream_stretched.py index 2b4b3dded40..b93f0bc199d 100644 --- a/yt/frontends/stream/tests/test_stream_stretched.py +++ b/yt/frontends/stream/tests/test_stream_stretched.py @@ -87,7 +87,7 @@ def test_cell_width_type(data_cell_widths_N16): cell_widths=cell_widths, ) - _ = ds.slice(0, ds.domain_center[0])[("stream", "density")] + _ = ds.slice(0, ds.domain_center[0])["stream", "density"] def test_cell_width_dimensionality(data_cell_widths_N16): diff --git a/yt/geometry/coordinates/geographic_coordinates.py b/yt/geometry/coordinates/geographic_coordinates.py index 50cb4841f6f..036a28ac757 100644 --- a/yt/geometry/coordinates/geographic_coordinates.py +++ b/yt/geometry/coordinates/geographic_coordinates.py @@ -160,7 +160,7 @@ def _dlatitude_to_dtheta(field, data): def _longitude_to_phi(field, data): # longitude runs from -180 to 180 - lonvals = data[("index", "longitude")] + lonvals = data["index", "longitude"] neglons = lonvals < 0.0 if np.any(neglons): lonvals[neglons] = lonvals[neglons] + 360.0 diff --git a/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py b/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py index 29dcf63f0dd..ef12dac8242 100644 --- a/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py +++ b/yt/geometry/coordinates/tests/test_sph_pixelization_pytestonly.py @@ -113,7 +113,7 @@ def makemasses(i, j, k): center=center, data_source=source, ) - img = prj.frb.data[("gas", "density")] + img = prj.frb.data["gas", "density"] if weighted: expected_out = np.zeros( ( @@ -240,7 +240,7 @@ def makemasses(i, j, k): buff_size=(outgridsize,) * 2, center=(_center, "cm"), ) - img = slc.frb.data[("gas", "density")] + img = slc.frb.data["gas", "density"] # center is same in non-projection coords if axis == 0: @@ -272,9 +272,9 @@ def makemasses(i, j, k): ad = ds.all_data() sphcoords = np.array( [ - (ad[("gas", "x")]).to("cm"), - (ad[("gas", "y")]).to("cm"), - (ad[("gas", "z")]).to("cm"), + (ad["gas", "x"]).to("cm"), + (ad["gas", "y"]).to("cm"), + (ad["gas", "z"]).to("cm"), ] ).T # print("sphcoords:") @@ -289,15 +289,12 @@ def makemasses(i, j, k): ) # print("dists <= 1:") # print(dists <= 1) - sml = (ad[("gas", "smoothing_length")]).to("cm") + sml = (ad["gas", "smoothing_length"]).to("cm") normkern = cubicspline_python(dists / sml.v[np.newaxis, :]) - sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")] + sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] contsum = np.sum(sphcontr, axis=1) sphweights = ( - normkern - / sml[np.newaxis, :] ** 3 - * ad[("gas", "mass")] - / ad[("gas", "density")] + normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"] ) weights = np.sum(sphweights, axis=1) nzeromask = np.logical_not(weights == 0) @@ -406,7 +403,7 @@ def makemasses(i, j, k): center=(_center, "cm"), north_vector=e2dir, ) - img = slc.frb.data[("gas", "density")] + img = slc.frb.data["gas", "density"] # center is same in x/y (e3dir/e2dir) gridcenx = ( @@ -434,9 +431,9 @@ def makemasses(i, j, k): ad = ds.all_data() sphcoords = np.array( [ - (ad[("gas", "x")]).to("cm"), - (ad[("gas", "y")]).to("cm"), - (ad[("gas", "z")]).to("cm"), + (ad["gas", "x"]).to("cm"), + (ad["gas", "y"]).to("cm"), + (ad["gas", "z"]).to("cm"), ] ).T dists = distancematrix( @@ -445,15 +442,12 @@ def makemasses(i, j, k): periodic=(periodic,) * 3, periods=np.array([3.0, 3.0, 3.0]), ) - sml = (ad[("gas", "smoothing_length")]).to("cm") + sml = (ad["gas", "smoothing_length"]).to("cm") normkern = cubicspline_python(dists / sml.v[np.newaxis, :]) - sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")] + sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] contsum = np.sum(sphcontr, axis=1) sphweights = ( - normkern - / sml[np.newaxis, :] ** 3 - * ad[("gas", "mass")] - / ad[("gas", "density")] + normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"] ) weights = np.sum(sphweights, axis=1) nzeromask = np.logical_not(weights == 0) @@ -509,9 +503,9 @@ def test_sph_grid( ad = ds.all_data() sphcoords = np.array( [ - (ad[("gas", "x")]).to("cm"), - (ad[("gas", "y")]).to("cm"), - (ad[("gas", "z")]).to("cm"), + (ad["gas", "x"]).to("cm"), + (ad["gas", "y"]).to("cm"), + (ad["gas", "z"]).to("cm"), ] ).T gridx, gridy, gridz = np.meshgrid(xcens, ycens, zcens, indexing="ij") @@ -522,15 +516,12 @@ def test_sph_grid( gridcoords = np.array([gridx, gridy, gridz]).T periods = bbox[:, 1] - bbox[:, 0] dists = distancematrix(gridcoords, sphcoords, periodic=periodic, periods=periods) - sml = (ad[("gas", "smoothing_length")]).to("cm") + sml = (ad["gas", "smoothing_length"]).to("cm") normkern = cubicspline_python(dists / sml.v[np.newaxis, :]) - sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad[("gas", "mass")] + sphcontr = normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] contsum = np.sum(sphcontr, axis=1) sphweights = ( - normkern - / sml[np.newaxis, :] ** 3 - * ad[("gas", "mass")] - / ad[("gas", "density")] + normkern / sml[np.newaxis, :] ** 3 * ad["gas", "mass"] / ad["gas", "density"] ) weights = np.sum(sphweights, axis=1) nzeromask = np.logical_not(weights == 0) diff --git a/yt/visualization/tests/test_offaxisprojection_pytestonly.py b/yt/visualization/tests/test_offaxisprojection_pytestonly.py index ab3928a5dc0..40212b9992c 100644 --- a/yt/visualization/tests/test_offaxisprojection_pytestonly.py +++ b/yt/visualization/tests/test_offaxisprojection_pytestonly.py @@ -125,7 +125,7 @@ def makemasses(i, j, k): north_vector=northvector, depth=depth, ) - img = prj.frb.data[("gas", "density")] + img = prj.frb.data["gas", "density"] if weighted: # periodic shifts will modify the (relative) dl values a bit expected_out = np.zeros(