From 8ec65d1e2a31793bb5ef28aab9d3ef08578cb2e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Thu, 31 Aug 2023 12:40:48 +0200 Subject: [PATCH 1/9] DEP: drop support for CPython 3.8 --- .github/workflows/build-test.yaml | 2 +- .github/workflows/type-checking.yaml | 4 ++-- .github/workflows/wheels.yaml | 2 +- pyproject.toml | 18 ++++++------------ 4 files changed, 10 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build-test.yaml b/.github/workflows/build-test.yaml index ceab2770a0d..64b19d4406f 100644 --- a/.github/workflows/build-test.yaml +++ b/.github/workflows/build-test.yaml @@ -33,7 +33,7 @@ jobs: test-runner: [pytest] include: - os: ubuntu-latest - python-version: '3.8' + python-version: '3.9' dependencies: minimal tests-type: unit test-runner: pytest diff --git a/.github/workflows/type-checking.yaml b/.github/workflows/type-checking.yaml index a7c91184b94..41361c5f458 100644 --- a/.github/workflows/type-checking.yaml +++ b/.github/workflows/type-checking.yaml @@ -29,10 +29,10 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - # run with oldest support python version + # run with oldest supported python version # so that we always get compatible versions of # core dependencies at type-check time - python-version: '3.8' + python-version: '3.9' - name: Build run: | diff --git a/.github/workflows/wheels.yaml b/.github/workflows/wheels.yaml index e467f4032b2..17e4489467e 100644 --- a/.github/workflows/wheels.yaml +++ b/.github/workflows/wheels.yaml @@ -35,7 +35,7 @@ jobs: with: output-dir: dist env: - CIBW_BUILD: "cp38-* cp39-* cp310-* cp311-* cp312-*" + CIBW_BUILD: "cp39-* cp310-* cp311-* cp312-*" CIBW_SKIP: "*-musllinux_*" # numpy doesn't have wheels for musllinux so we can't build some quickly and without bloating CIBW_ARCHS_LINUX: "x86_64" CIBW_ARCHS_MACOS: x86_64 arm64 diff --git a/pyproject.toml b/pyproject.toml index 5c15dccf3c9..aa574c1e9d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,14 +2,12 @@ # keep in sync with .github/workflows/wheels.yaml requires = [ "setuptools>=61.2", - "importlib_resources>=1.3;python_version < '3.9'", # see https://github.com/yt-project/yt/issues/4044 "Cython>=3.0, <3.1", # TODO: simplify requirement after numpy 1.26.0 final is released - "oldest-supported-numpy ; python_version < '3.9'", - "numpy>=1.25, <2.0 ; python_version >= '3.9' and python_version < '3.12.0rc1'", + "numpy>=1.25, <2.0 ; python_version < '3.12.0rc1'", "numpy>=1.26.0b1, <2.0; python_version >= '3.12.0rc1'", # TODO: simplify requirement after ewah-bool-utils 1.1.0 final is released @@ -37,7 +35,6 @@ classifiers = [ "Programming Language :: C", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -48,17 +45,14 @@ classifiers = [ keywords = [ "astronomy astrophysics visualization amr adaptivemeshrefinement", ] -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ "cmyt>=1.1.2", "ewah-bool-utils>=1.0.2", "ipywidgets>=8.0.0", "matplotlib>=3.5", "more-itertools>=8.4", - # when Python 3.8 is dropped, keep minimal requirement in sync with NPY_TARGET_VERSION - # upper cap should be lifted when build-time requirement is bumped to >=2.0, see - # https://github.com/scipy/oldest-supported-numpy/issues/76#issuecomment-1628865694 - "numpy>=1.17.5,<2.0", + "numpy>=1.18,<2.0", # keep minimal requirement in sync with NPY_TARGET_VERSION "packaging>=20.9", "pillow>=6.2.1", # transitive dependency via MPL (>=3.3) "tomli-w>=0.4.0", @@ -219,7 +213,7 @@ minimal = [ "ipywidgets==8.0.0", "matplotlib==3.5", "more-itertools==8.4", - "numpy==1.17.5", + "numpy==1.18.0", "packaging==20.9", "pillow==6.2.1", "tomli-w==0.4.0", @@ -260,7 +254,7 @@ namespaces = false [tool.black] line-length = 88 -target-version = ['py38'] +target-version = ['py39'] include = '\.pyi?$' exclude = ''' /( @@ -488,7 +482,7 @@ ignore = [ [tool.mypy] -python_version = 3.8 +python_version = 3.9 show_error_codes = true ignore_missing_imports = true warn_unused_configs = true From 4cfd370a8445abd4620e3853c2c047ee3d649fd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Thu, 31 Aug 2023 15:55:23 +0200 Subject: [PATCH 2/9] MNT: auto upgrade code base to Python 3.9 --- tests/unpin_requirements.py | 3 +- yt/_maintenance/deprecation.py | 4 +- yt/_typing.py | 18 +++---- yt/data_objects/data_containers.py | 12 ++--- .../index_subobjects/grid_patch.py | 3 +- .../index_subobjects/octree_subset.py | 3 +- yt/data_objects/particle_filters.py | 3 +- .../data_selection_objects.py | 3 +- yt/data_objects/static_output.py | 48 ++++++++----------- yt/data_objects/time_series.py | 4 +- yt/fields/derived_field.py | 3 +- yt/fields/domain_context.py | 5 +- yt/fields/field_detector.py | 6 +-- yt/fields/field_info_container.py | 20 ++++---- yt/fields/field_plugin_registry.py | 4 +- yt/frontends/adaptahop/definitions.py | 4 +- yt/frontends/adaptahop/io.py | 6 +-- yt/frontends/art/data_structures.py | 3 +- yt/frontends/artio/data_structures.py | 6 +-- yt/frontends/boxlib/data_structures.py | 5 +- yt/frontends/cf_radial/data_structures.py | 20 ++++---- yt/frontends/cf_radial/fields.py | 3 +- yt/frontends/chombo/data_structures.py | 3 +- yt/frontends/eagle/data_structures.py | 4 +- yt/frontends/enzo/io.py | 4 +- yt/frontends/enzo_e/data_structures.py | 3 +- yt/frontends/fits/data_structures.py | 3 +- yt/frontends/flash/data_structures.py | 3 +- yt/frontends/gadget/data_structures.py | 9 ++-- yt/frontends/gadget/io.py | 5 +- yt/frontends/open_pmd/data_structures.py | 6 +-- yt/frontends/open_pmd/fields.py | 4 +- yt/frontends/owls/fields.py | 3 +- yt/frontends/ramses/data_structures.py | 8 ++-- yt/frontends/ramses/field_handlers.py | 8 ++-- yt/frontends/ramses/particle_handlers.py | 8 ++-- yt/frontends/rockstar/definitions.py | 12 ++--- yt/frontends/stream/data_structures.py | 6 +-- yt/frontends/stream/misc.py | 6 +-- yt/frontends/ytdata/data_structures.py | 11 ++--- yt/funcs.py | 18 +++---- yt/geometry/coordinates/coordinate_handler.py | 6 +-- yt/geometry/geometry_handler.py | 10 ++-- yt/geometry/grid_geometry_handler.py | 6 +-- yt/geometry/oct_geometry_handler.py | 6 +-- yt/loaders.py | 24 +++++----- yt/startup_tasks.py | 3 +- yt/utilities/command_line.py | 18 +++---- yt/utilities/configure.py | 4 +- yt/utilities/exceptions.py | 3 +- yt/utilities/hierarchy_inspection.py | 3 +- yt/utilities/io_handler.py | 34 +++++-------- yt/utilities/minimal_representation.py | 3 +- yt/utilities/object_registries.py | 11 ++--- yt/utilities/on_demand_imports.py | 4 +- .../parallel_analysis_interface.py | 3 +- yt/visualization/_commons.py | 10 ++-- yt/visualization/_handlers.py | 18 +++---- yt/visualization/base_plot_types.py | 10 ++-- yt/visualization/color_maps.py | 4 +- yt/visualization/fixed_resolution.py | 8 ++-- yt/visualization/geo_plot_utils.py | 8 ++-- yt/visualization/particle_plots.py | 4 +- yt/visualization/plot_container.py | 23 ++++----- yt/visualization/plot_modifications.py | 40 ++++++++-------- yt/visualization/plot_window.py | 8 ++-- yt/visualization/profile_plotter.py | 7 +-- .../tests/test_image_comp_2D_plots.py | 5 +- .../volume_rendering/old_camera.py | 3 +- yt/visualization/volume_rendering/scene.py | 4 +- .../volume_rendering/zbuffer_array.py | 6 +-- 71 files changed, 268 insertions(+), 338 deletions(-) diff --git a/tests/unpin_requirements.py b/tests/unpin_requirements.py index fbba183e89e..0da23061dca 100644 --- a/tests/unpin_requirements.py +++ b/tests/unpin_requirements.py @@ -1,6 +1,5 @@ import re import sys -from typing import List import tomli_w @@ -12,7 +11,7 @@ PINNED_VERSION_REGEXP = re.compile(r",?(<|<=|==)([0-9a-z]+\.?)+") -def unpin_requirements(requirements: List[str]) -> List[str]: +def unpin_requirements(requirements: list[str]) -> list[str]: return [re.sub(PINNED_VERSION_REGEXP, "", _) for _ in requirements] diff --git a/yt/_maintenance/deprecation.py b/yt/_maintenance/deprecation.py index 27f8c78b3e2..3d37a8f4969 100644 --- a/yt/_maintenance/deprecation.py +++ b/yt/_maintenance/deprecation.py @@ -1,7 +1,7 @@ import warnings from functools import wraps from types import FunctionType -from typing import Dict, Optional +from typing import Optional def issue_deprecation_warning( @@ -45,7 +45,7 @@ def issue_deprecation_warning( warnings.warn(msg, DeprecationWarning, stacklevel=stacklevel) -def future_positional_only(positions2names: Dict[int, str], /, **depr_kwargs): +def future_positional_only(positions2names: dict[int, str], /, **depr_kwargs): """Warn users when using a future positional-only argument as keyword. Note that positional-only arguments are available from Python 3.8 See https://www.python.org/dev/peps/pep-0570/ diff --git a/yt/_typing.py b/yt/_typing.py index 12aeb0d4de3..86b793f47d6 100644 --- a/yt/_typing.py +++ b/yt/_typing.py @@ -1,31 +1,31 @@ -from typing import List, Optional, Tuple, Union +from typing import Optional, Union import unyt as un from numpy import ndarray -FieldDescT = Tuple[str, Tuple[str, List[str], Optional[str]]] -KnownFieldsT = Tuple[FieldDescT, ...] +FieldDescT = tuple[str, tuple[str, list[str], Optional[str]]] +KnownFieldsT = tuple[FieldDescT, ...] ParticleType = str FieldType = str FieldName = str -FieldKey = Tuple[FieldType, FieldName] +FieldKey = tuple[FieldType, FieldName] ImplicitFieldKey = FieldName AnyFieldKey = Union[FieldKey, ImplicitFieldKey] -DomainDimensions = Union[Tuple[int, ...], List[int], ndarray] +DomainDimensions = Union[tuple[int, ...], list[int], ndarray] -ParticleCoordinateTuple = Tuple[ +ParticleCoordinateTuple = tuple[ str, # particle type - Tuple[ndarray, ndarray, ndarray], # xyz + tuple[ndarray, ndarray, ndarray], # xyz Union[float, ndarray], # hsml ] # Geometry specific types AxisName = str -AxisOrder = Tuple[AxisName, AxisName, AxisName] +AxisOrder = tuple[AxisName, AxisName, AxisName] # types that can be converted to un.Unit Unit = Union[un.Unit, str] # types that can be converted to un.unyt_quantity -Quantity = Union[un.unyt_quantity, Tuple[float, Unit]] +Quantity = Union[un.unyt_quantity, tuple[float, Unit]] diff --git a/yt/data_objects/data_containers.py b/yt/data_objects/data_containers.py index 299e28a5625..d804c3c2674 100644 --- a/yt/data_objects/data_containers.py +++ b/yt/data_objects/data_containers.py @@ -2,7 +2,7 @@ import weakref from collections import defaultdict from contextlib import contextmanager -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional import numpy as np @@ -66,14 +66,14 @@ class YTDataContainer(abc.ABC): _chunk_info = None _num_ghost_zones = 0 - _con_args: Tuple[str, ...] = () + _con_args: tuple[str, ...] = () _skip_add = False - _container_fields: Tuple[AnyFieldKey, ...] = () - _tds_attrs: Tuple[str, ...] = () - _tds_fields: Tuple[str, ...] = () + _container_fields: tuple[AnyFieldKey, ...] = () + _tds_attrs: tuple[str, ...] = () + _tds_fields: tuple[str, ...] = () _field_cache = None _index = None - _key_fields: List[str] + _key_fields: list[str] def __init__(self, ds: Optional["Dataset"], field_parameters) -> None: """ diff --git a/yt/data_objects/index_subobjects/grid_patch.py b/yt/data_objects/index_subobjects/grid_patch.py index 3d4bc1c57c3..3c3aea3b16b 100644 --- a/yt/data_objects/index_subobjects/grid_patch.py +++ b/yt/data_objects/index_subobjects/grid_patch.py @@ -1,5 +1,4 @@ import weakref -from typing import List import numpy as np @@ -270,7 +269,7 @@ def retrieve_ghost_zones(self, n_zones, fields, all_levels=False, smoothed=False def get_vertex_centered_data( self, - fields: List[FieldKey], + fields: list[FieldKey], smoothed: bool = True, no_ghost: bool = False, ): diff --git a/yt/data_objects/index_subobjects/octree_subset.py b/yt/data_objects/index_subobjects/octree_subset.py index 0b32c987299..49823d45dec 100644 --- a/yt/data_objects/index_subobjects/octree_subset.py +++ b/yt/data_objects/index_subobjects/octree_subset.py @@ -1,7 +1,6 @@ from contextlib import contextmanager from functools import cached_property from itertools import product, repeat -from typing import Tuple import numpy as np from unyt import unyt_array @@ -39,7 +38,7 @@ class OctreeSubset(YTSelectionContainer): _num_ghost_zones = 0 _type_name = "octree_subset" _skip_add = True - _con_args: Tuple[str, ...] = ("base_region", "domain", "ds") + _con_args: tuple[str, ...] = ("base_region", "domain", "ds") _domain_offset = 0 _cell_count = -1 _block_order = "C" diff --git a/yt/data_objects/particle_filters.py b/yt/data_objects/particle_filters.py index 01f7d369ab8..54bbb6df1f4 100644 --- a/yt/data_objects/particle_filters.py +++ b/yt/data_objects/particle_filters.py @@ -1,13 +1,12 @@ import copy from contextlib import contextmanager -from typing import Dict from yt.fields.field_info_container import NullFunc, TranslationFunc from yt.funcs import mylog from yt.utilities.exceptions import YTIllDefinedFilter # One to one mapping -filter_registry: Dict[str, "ParticleFilter"] = {} +filter_registry: dict[str, "ParticleFilter"] = {} class DummyFieldInfo: diff --git a/yt/data_objects/selection_objects/data_selection_objects.py b/yt/data_objects/selection_objects/data_selection_objects.py index 47b6e3f4fd7..228f6d7420d 100644 --- a/yt/data_objects/selection_objects/data_selection_objects.py +++ b/yt/data_objects/selection_objects/data_selection_objects.py @@ -4,7 +4,6 @@ import uuid from collections import defaultdict from contextlib import contextmanager -from typing import Tuple import numpy as np from more_itertools import always_iterable @@ -1398,7 +1397,7 @@ def _get_bbox(self): """ return self.ds.domain_left_edge, self.ds.domain_right_edge - def get_bbox(self) -> Tuple[unyt_array, unyt_array]: + def get_bbox(self) -> tuple[unyt_array, unyt_array]: """ Return the bounding box for this data container. """ diff --git a/yt/data_objects/static_output.py b/yt/data_objects/static_output.py index ee3d2f8e2d5..f491eb7b2d9 100644 --- a/yt/data_objects/static_output.py +++ b/yt/data_objects/static_output.py @@ -14,14 +14,8 @@ from stat import ST_CTIME from typing import ( Any, - DefaultDict, - Dict, - List, Literal, Optional, - Set, - Tuple, - Type, Union, ) @@ -89,11 +83,7 @@ else: from typing_extensions import assert_never -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping - +from collections.abc import MutableMapping # We want to support the movie format in the future. # When such a thing comes to pass, I'll move all the stuff that is constant up @@ -178,26 +168,26 @@ def ireq(self, value): class Dataset(abc.ABC): default_fluid_type = "gas" default_field = ("gas", "density") - fluid_types: Tuple[FieldType, ...] = ("gas", "deposit", "index") - particle_types: Tuple[ParticleType, ...] = ("io",) # By default we have an 'all' - particle_types_raw: Optional[Tuple[ParticleType, ...]] = ("io",) + fluid_types: tuple[FieldType, ...] = ("gas", "deposit", "index") + particle_types: tuple[ParticleType, ...] = ("io",) # By default we have an 'all' + particle_types_raw: Optional[tuple[ParticleType, ...]] = ("io",) geometry: Geometry = Geometry.CARTESIAN coordinates = None storage_filename = None - particle_unions: Optional[Dict[ParticleType, ParticleUnion]] = None - known_filters: Optional[Dict[ParticleType, ParticleFilter]] = None - _index_class: Type[Index] - field_units: Optional[Dict[AnyFieldKey, Unit]] = None + particle_unions: Optional[dict[ParticleType, ParticleUnion]] = None + known_filters: Optional[dict[ParticleType, ParticleFilter]] = None + _index_class: type[Index] + field_units: Optional[dict[AnyFieldKey, Unit]] = None derived_field_list = requires_index("derived_field_list") fields = requires_index("fields") - conversion_factors: Optional[Dict[str, float]] = None + conversion_factors: Optional[dict[str, float]] = None # _instantiated represents an instantiation time (since Epoch) # the default is a place holder sentinel, falsy value _instantiated: float = 0 _particle_type_counts = None _proj_type = "quad_proj" _ionization_label_format = "roman_numeral" - _determined_fields: Optional[Dict[str, List[FieldKey]]] = None + _determined_fields: Optional[dict[str, list[FieldKey]]] = None fields_detected = False # these are set in self._parse_parameter_file() @@ -253,7 +243,7 @@ def __init__( self, filename: str, dataset_type: Optional[str] = None, - units_override: Optional[Dict[str, str]] = None, + units_override: Optional[dict[str, str]] = None, # valid unit_system values include all keys from unyt.unit_systems.unit_systems_registry + "code" unit_system: Literal[ "cgs", @@ -281,7 +271,7 @@ def __init__( return self.dataset_type = dataset_type self.conversion_factors = {} - self.parameters: Dict[str, Any] = {} + self.parameters: dict[str, Any] = {} self.region_expression = self.r = RegionExpression(self) self.known_filters = self.known_filters or {} self.particle_unions = self.particle_unions or {} @@ -751,7 +741,7 @@ def setup_deprecated_fields(self): def _setup_coordinate_handler(self, axis_order: Optional[AxisOrder]) -> None: # backward compatibility layer: # turning off type-checker on a per-line basis - cls: Type[CoordinateHandler] + cls: type[CoordinateHandler] if isinstance(self.geometry, tuple): # type: ignore [unreachable] issue_deprecation_warning( # type: ignore [unreachable] @@ -973,7 +963,7 @@ def _get_field_info( # https://github.com/yt-project/yt/issues/3381 return field_info - def _are_ambiguous(candidates: List[FieldKey]) -> bool: + def _are_ambiguous(candidates: list[FieldKey]) -> bool: if len(candidates) < 2: return False @@ -996,7 +986,7 @@ def _are_ambiguous(candidates: List[FieldKey]) -> bool: elif all(ft in self.particle_types for ft in ftypes): ptypes = ftypes - sub_types_list: List[Set[str]] = [] + sub_types_list: list[set[str]] = [] for pt in ptypes: if pt in self.particle_types_raw: sub_types_list.append({pt}) @@ -1026,7 +1016,7 @@ def _get_field_info_helper( self, field: Union[FieldKey, ImplicitFieldKey, DerivedField], /, - ) -> Tuple[DerivedField, List[FieldKey]]: + ) -> tuple[DerivedField, list[FieldKey]]: self.index ftype: str @@ -1041,7 +1031,7 @@ def _get_field_info_helper( raise YTFieldNotParseable(field) if ftype == "unknown": - candidates: List[FieldKey] = [ + candidates: list[FieldKey] = [ (ft, fn) for ft, fn in self.field_info if fn == fname ] @@ -1297,7 +1287,7 @@ def _assign_unit_system( # dimensions as amperes. mks_system = False mag_unit: Optional[unyt_quantity] = getattr(self, "magnetic_unit", None) - mag_dims: Optional[Set[Symbol]] + mag_dims: Optional[set[Symbol]] if mag_unit is not None: mag_dims = mag_unit.units.dimensions.free_symbols else: @@ -2075,7 +2065,7 @@ class ParticleFile: start: Optional[int] = None end: Optional[int] = None - total_particles: Optional[DefaultDict[str, int]] = None + total_particles: Optional[defaultdict[str, int]] = None def __init__(self, ds, io, filename, file_id, range=None): self.ds = ds diff --git a/yt/data_objects/time_series.py b/yt/data_objects/time_series.py index 9b270d1d5cb..61f06d3c3b2 100644 --- a/yt/data_objects/time_series.py +++ b/yt/data_objects/time_series.py @@ -5,7 +5,7 @@ import weakref from abc import ABC, abstractmethod from functools import wraps -from typing import Optional, Type +from typing import Optional import numpy as np from more_itertools import always_iterable @@ -146,7 +146,7 @@ class DatasetSeries: # this annotation should really be Optional[Type[Dataset]] # but we cannot import the yt.data_objects.static_output.Dataset # class here without creating a circular import for now - _dataset_cls: Optional[Type] = None + _dataset_cls: Optional[type] = None def __init_subclass__(cls, *args, **kwargs): super().__init_subclass__(*args, **kwargs) diff --git a/yt/fields/derived_field.py b/yt/fields/derived_field.py index e42e2704d28..42b6ff6636c 100644 --- a/yt/fields/derived_field.py +++ b/yt/fields/derived_field.py @@ -1,7 +1,8 @@ import contextlib import inspect import re -from typing import Iterable, Optional, Union +from collections.abc import Iterable +from typing import Optional, Union from more_itertools import always_iterable diff --git a/yt/fields/domain_context.py b/yt/fields/domain_context.py index 7f3f8780123..2e3df797bba 100644 --- a/yt/fields/domain_context.py +++ b/yt/fields/domain_context.py @@ -1,5 +1,4 @@ import abc -from typing import Tuple from yt._typing import FieldKey @@ -12,8 +11,8 @@ def __init__(cls, name, b, d): type.__init__(cls, name, b, d) domain_context_registry[name] = cls - _known_fluid_fields: Tuple[FieldKey, ...] - _known_particle_fields: Tuple[FieldKey, ...] + _known_fluid_fields: tuple[FieldKey, ...] + _known_particle_fields: tuple[FieldKey, ...] def __init__(self, ds): self.ds = ds diff --git a/yt/fields/field_detector.py b/yt/fields/field_detector.py index 85a3102d4d4..be3e2330035 100644 --- a/yt/fields/field_detector.py +++ b/yt/fields/field_detector.py @@ -1,5 +1,5 @@ from collections import defaultdict -from typing import Tuple, Union +from typing import Union import numpy as np @@ -101,7 +101,7 @@ def _reshape_vals(self, arr): return arr return arr.reshape(self.ActiveDimensions, order="C") - def __missing__(self, item: Union[Tuple[str, str], str]): + def __missing__(self, item: Union[tuple[str, str], str]): from yt.fields.derived_field import NullFunc if not isinstance(item, tuple): @@ -117,7 +117,7 @@ def __missing__(self, item: Union[Tuple[str, str], str]): # types not getting correctly identified. # Note that the *only* way this works is if we also fix our field # dependencies during checking. Bug #627 talks about this. - _item: Tuple[str, str] = finfo.name + _item: tuple[str, str] = finfo.name if finfo is not None and finfo._function is not NullFunc: try: for param, param_v in permute_params.items(): diff --git a/yt/fields/field_info_container.py b/yt/fields/field_info_container.py index fd48b286446..262cd373e5e 100644 --- a/yt/fields/field_info_container.py +++ b/yt/fields/field_info_container.py @@ -1,7 +1,7 @@ import sys from collections import UserDict from collections.abc import Callable -from typing import Dict, List, Optional, Tuple +from typing import Optional from unyt.exceptions import UnitConversionError @@ -48,17 +48,17 @@ class FieldInfoContainer(UserDict): fallback = None known_other_fields: KnownFieldsT = () known_particle_fields: KnownFieldsT = () - extra_union_fields: Tuple[FieldKey, ...] = () + extra_union_fields: tuple[FieldKey, ...] = () - def __init__(self, ds, field_list: List[FieldKey], slice_info=None): + def __init__(self, ds, field_list: list[FieldKey], slice_info=None): super().__init__() - self._show_field_errors: List[Exception] = [] + self._show_field_errors: list[Exception] = [] self.ds = ds # Now we start setting things up. self.field_list = field_list self.slice_info = slice_info - self.field_aliases: Dict[FieldKey, FieldKey] = {} - self.species_names: List[FieldName] = [] + self.field_aliases: dict[FieldKey, FieldKey] = {} + self.species_names: list[FieldName] = [] self.setup_fluid_aliases() @property @@ -216,8 +216,8 @@ def setup_smoothed_fields(self, ptype, num_neighbors=64, ftype="gas"): self.alias((ftype, "particle_mass"), (ptype, "particle_mass")) # Collect the names for all aliases if geometry is curvilinear - def get_aliases_gallery(self) -> List[FieldName]: - aliases_gallery: List[FieldName] = [] + def get_aliases_gallery(self) -> list[FieldName]: + aliases_gallery: list[FieldName] = [] known_other_fields = dict(self.known_other_fields) if self.ds is None: @@ -229,7 +229,7 @@ def get_aliases_gallery(self) -> List[FieldName]: or geometry is Geometry.CYLINDRICAL or geometry is Geometry.SPHERICAL ): - aliases: List[FieldName] + aliases: list[FieldName] for field in sorted(self.field_list): if field[0] in self.ds.particle_types: continue @@ -452,7 +452,7 @@ def alias( alias_name: FieldKey, original_name: FieldKey, units: Optional[str] = None, - deprecate: Optional[Tuple[str, Optional[str]]] = None, + deprecate: Optional[tuple[str, Optional[str]]] = None, ): """ Alias one field to another field. diff --git a/yt/fields/field_plugin_registry.py b/yt/fields/field_plugin_registry.py index 1f3e725b038..3134000603d 100644 --- a/yt/fields/field_plugin_registry.py +++ b/yt/fields/field_plugin_registry.py @@ -1,7 +1,7 @@ -from typing import Callable, Dict +from typing import Callable FunctionName = str -FieldPluginMap = Dict[FunctionName, Callable] +FieldPluginMap = dict[FunctionName, Callable] field_plugins: FieldPluginMap = {} diff --git a/yt/frontends/adaptahop/definitions.py b/yt/frontends/adaptahop/definitions.py index fd282e6a8fe..89a5c656849 100644 --- a/yt/frontends/adaptahop/definitions.py +++ b/yt/frontends/adaptahop/definitions.py @@ -4,11 +4,11 @@ """ -from typing import Tuple, Union +from typing import Union from yt.funcs import mylog -ATTR_T = Tuple[Tuple[Union[Tuple[str, ...], str], int, str], ...] +ATTR_T = tuple[tuple[Union[tuple[str, ...], str], int, str], ...] def HEADER_ATTRIBUTES(*, double: bool, longint: bool) -> ATTR_T: diff --git a/yt/frontends/adaptahop/io.py b/yt/frontends/adaptahop/io.py index de0b42b3168..d4521771b53 100644 --- a/yt/frontends/adaptahop/io.py +++ b/yt/frontends/adaptahop/io.py @@ -9,7 +9,7 @@ from functools import partial from operator import attrgetter -from typing import List, Tuple, Union +from typing import Union import numpy as np @@ -195,12 +195,12 @@ def _todo_from_attributes(attributes: ATTR_T, halo_attributes: ATTR_T): # attributes. This is used to skip fields most of the fields when reading # the tree_brick files. iskip = 0 - todo: List[Union[int, List[Tuple[Union[Tuple[str, ...], str], int, str]]]] = [] + todo: list[Union[int, list[tuple[Union[tuple[str, ...], str], int, str]]]] = [] attributes = tuple(set(attributes)) for i, (attrs, l, k) in enumerate(halo_attributes): - attrs_list: Tuple[str, ...] + attrs_list: tuple[str, ...] if isinstance(attrs, tuple): if not all(isinstance(a, str) for a in attrs): raise TypeError(f"Expected a single str or a tuple of str, got {attrs}") diff --git a/yt/frontends/art/data_structures.py b/yt/frontends/art/data_structures.py index 41fd577eca7..9c836adf78c 100644 --- a/yt/frontends/art/data_structures.py +++ b/yt/frontends/art/data_structures.py @@ -2,7 +2,6 @@ import os import struct import weakref -from typing import Type import numpy as np @@ -133,7 +132,7 @@ def _chunk_io(self, dobj, cache=True, local_only=False): class ARTDataset(Dataset): - _index_class: Type[Index] = ARTIndex + _index_class: type[Index] = ARTIndex _field_info_class = ARTFieldInfo def __init__( diff --git a/yt/frontends/artio/data_structures.py b/yt/frontends/artio/data_structures.py index 7c3523e804a..ec069430fd9 100644 --- a/yt/frontends/artio/data_structures.py +++ b/yt/frontends/artio/data_structures.py @@ -1,7 +1,7 @@ import os import weakref from collections import defaultdict -from typing import Optional, Tuple +from typing import Optional import numpy as np @@ -338,8 +338,8 @@ def _icoords_to_fcoords( self, icoords: np.ndarray, ires: np.ndarray, - axes: Optional[Tuple[int, ...]] = None, - ) -> Tuple[np.ndarray, np.ndarray]: + axes: Optional[tuple[int, ...]] = None, + ) -> tuple[np.ndarray, np.ndarray]: """ Accepts icoords and ires and returns appropriate fcoords and fwidth. Mostly useful for cases where we have irregularly spaced or structured diff --git a/yt/frontends/boxlib/data_structures.py b/yt/frontends/boxlib/data_structures.py index 9c5232e86b8..ad66a4d8142 100644 --- a/yt/frontends/boxlib/data_structures.py +++ b/yt/frontends/boxlib/data_structures.py @@ -4,7 +4,6 @@ from collections import namedtuple from functools import cached_property from stat import ST_CTIME -from typing import Type import numpy as np @@ -623,7 +622,7 @@ class BoxlibDataset(Dataset): """ _index_class = BoxlibHierarchy - _field_info_class: Type[FieldInfoContainer] = BoxlibFieldInfo + _field_info_class: type[FieldInfoContainer] = BoxlibFieldInfo _output_prefix = None _default_cparam_filename = "job_info" @@ -933,7 +932,7 @@ def __init__(self, ds, dataset_type="boxlib_native"): class AMReXDataset(BoxlibDataset): - _index_class: Type[BoxlibHierarchy] = AMReXHierarchy + _index_class: type[BoxlibHierarchy] = AMReXHierarchy _subtype_keyword = "amrex" _default_cparam_filename = "job_info" diff --git a/yt/frontends/cf_radial/data_structures.py b/yt/frontends/cf_radial/data_structures.py index 511bdf1e62f..e1b552d2816 100644 --- a/yt/frontends/cf_radial/data_structures.py +++ b/yt/frontends/cf_radial/data_structures.py @@ -7,7 +7,7 @@ import contextlib import os import weakref -from typing import Optional, Tuple +from typing import Optional import numpy as np from unyt import unyt_array @@ -89,10 +89,10 @@ def __init__( dataset_type="cf_radial", storage_filename=None, storage_overwrite: bool = False, - grid_shape: Optional[Tuple[int, int, int]] = None, - grid_limit_x: Optional[Tuple[float, float]] = None, - grid_limit_y: Optional[Tuple[float, float]] = None, - grid_limit_z: Optional[Tuple[float, float]] = None, + grid_shape: Optional[tuple[int, int, int]] = None, + grid_limit_x: Optional[tuple[float, float]] = None, + grid_limit_y: Optional[tuple[float, float]] = None, + grid_limit_z: Optional[tuple[float, float]] = None, units_override=None, ): """ @@ -204,8 +204,8 @@ def _handle(self, filename: Optional[str] = None): yield xrds def _validate_grid_dim( - self, radar, dim: str, grid_limit: Optional[Tuple[float, float]] = None - ) -> Tuple[float, float]: + self, radar, dim: str, grid_limit: Optional[tuple[float, float]] = None + ) -> tuple[float, float]: if grid_limit is None: if dim.lower() == "z": gate_alt = radar.gate_altitude["data"] @@ -234,8 +234,8 @@ def _validate_grid_dim( return grid_limit def _validate_grid_shape( - self, grid_shape: Optional[Tuple[int, int, int]] = None - ) -> Tuple[int, int, int]: + self, grid_shape: Optional[tuple[int, int, int]] = None + ) -> tuple[int, int, int]: if grid_shape is None: grid_shape = (100, 100, 100) mylog.info( @@ -248,7 +248,7 @@ def _validate_grid_shape( ) return grid_shape - def _round_grid_guess(self, bounds: Tuple[float, float], unit_str: str): + def _round_grid_guess(self, bounds: tuple[float, float], unit_str: str): # rounds the bounds to the closest 10 km increment that still contains # the grid_limit for findstr, repstr in self._field_info_class.unit_subs: diff --git a/yt/frontends/cf_radial/fields.py b/yt/frontends/cf_radial/fields.py index b91152b2008..073aacd1439 100644 --- a/yt/frontends/cf_radial/fields.py +++ b/yt/frontends/cf_radial/fields.py @@ -4,7 +4,6 @@ """ -from typing import List from yt.fields.field_info_container import FieldInfoContainer @@ -13,7 +12,7 @@ class CFRadialFieldInfo(FieldInfoContainer): known_other_fields = () # fields are set dynamically known_particle_fields = () units_to_ignore = ("dBz", "dBZ", "ratio") # set as nondimensional if found - field_units_ignored: List[str] = [] # fields for which units have been ignored + field_units_ignored: list[str] = [] # fields for which units have been ignored # (find, replace) pairs for sanitizing: unit_subs = (("degrees", "degree"), ("meters", "m"), ("_per_", "/")) diff --git a/yt/frontends/chombo/data_structures.py b/yt/frontends/chombo/data_structures.py index 125ab2ac425..a6b6cc324ed 100644 --- a/yt/frontends/chombo/data_structures.py +++ b/yt/frontends/chombo/data_structures.py @@ -1,7 +1,6 @@ import os import re import weakref -from typing import Type import numpy as np @@ -236,7 +235,7 @@ def _reconstruct_parent_child(self): class ChomboDataset(Dataset): _index_class = ChomboHierarchy - _field_info_class: Type[FieldInfoContainer] = ChomboFieldInfo + _field_info_class: type[FieldInfoContainer] = ChomboFieldInfo def __init__( self, diff --git a/yt/frontends/eagle/data_structures.py b/yt/frontends/eagle/data_structures.py index d3bb34e76d9..ff8f1bbad53 100644 --- a/yt/frontends/eagle/data_structures.py +++ b/yt/frontends/eagle/data_structures.py @@ -1,5 +1,3 @@ -from typing import Type - import numpy as np import yt.units @@ -13,7 +11,7 @@ class EagleDataset(GadgetHDF5Dataset): _particle_mass_name = "Mass" - _field_info_class: Type[FieldInfoContainer] = OWLSFieldInfo + _field_info_class: type[FieldInfoContainer] = OWLSFieldInfo _time_readin_ = "Time" def _parse_parameter_file(self): diff --git a/yt/frontends/enzo/io.py b/yt/frontends/enzo/io.py index 37e8f49cf41..1c38f196888 100644 --- a/yt/frontends/enzo/io.py +++ b/yt/frontends/enzo/io.py @@ -1,5 +1,3 @@ -from typing import Dict - import numpy as np from yt.geometry.selection_routines import GridSelector @@ -9,7 +7,7 @@ _convert_mass = ("particle_mass", "mass") -_particle_position_names: Dict[str, str] = {} +_particle_position_names: dict[str, str] = {} class IOHandlerPackedHDF5(BaseIOHandler): diff --git a/yt/frontends/enzo_e/data_structures.py b/yt/frontends/enzo_e/data_structures.py index a00f9463f65..c237eac7705 100644 --- a/yt/frontends/enzo_e/data_structures.py +++ b/yt/frontends/enzo_e/data_structures.py @@ -1,6 +1,5 @@ import os from functools import cached_property -from typing import Tuple import numpy as np @@ -289,7 +288,7 @@ class EnzoEDataset(Dataset): _index_class = EnzoEHierarchy _field_info_class = EnzoEFieldInfo _suffix = ".block_list" - particle_types: Tuple[str, ...] = () + particle_types: tuple[str, ...] = () particle_types_raw = None def __init__( diff --git a/yt/frontends/fits/data_structures.py b/yt/frontends/fits/data_structures.py index be188ab0a00..dbe87283c36 100644 --- a/yt/frontends/fits/data_structures.py +++ b/yt/frontends/fits/data_structures.py @@ -5,7 +5,6 @@ import weakref from collections import defaultdict from functools import cached_property -from typing import Type import numpy as np import numpy.core.defchararray as np_char @@ -316,7 +315,7 @@ def check_sky_coords(filename, ndim): class FITSDataset(Dataset): _index_class = FITSHierarchy - _field_info_class: Type[FieldInfoContainer] = FITSFieldInfo + _field_info_class: type[FieldInfoContainer] = FITSFieldInfo _dataset_type = "fits" _handle = None diff --git a/yt/frontends/flash/data_structures.py b/yt/frontends/flash/data_structures.py index 5452b928cde..608143add1e 100644 --- a/yt/frontends/flash/data_structures.py +++ b/yt/frontends/flash/data_structures.py @@ -1,6 +1,5 @@ import os import weakref -from typing import Type import numpy as np @@ -164,7 +163,7 @@ def _populate_grid_objects(self): class FLASHDataset(Dataset): - _index_class: Type[Index] = FLASHHierarchy + _index_class: type[Index] = FLASHHierarchy _field_info_class = FLASHFieldInfo _handle = None diff --git a/yt/frontends/gadget/data_structures.py b/yt/frontends/gadget/data_structures.py index f7c828203d6..dbf3ed7d34d 100644 --- a/yt/frontends/gadget/data_structures.py +++ b/yt/frontends/gadget/data_structures.py @@ -1,6 +1,5 @@ import os import struct -from typing import Type import numpy as np @@ -210,9 +209,9 @@ def _initialize_frontend_specific(self): class GadgetDataset(SPHDataset): - _index_class: Type[Index] = GadgetBinaryIndex - _file_class: Type[ParticleFile] = GadgetBinaryFile - _field_info_class: Type[FieldInfoContainer] = GadgetFieldInfo + _index_class: type[Index] = GadgetBinaryIndex + _file_class: type[ParticleFile] = GadgetBinaryFile + _field_info_class: type[FieldInfoContainer] = GadgetFieldInfo _particle_mass_name = "Mass" _particle_coordinates_name = "Coordinates" _particle_velocity_name = "Velocities" @@ -565,7 +564,7 @@ class GadgetHDF5File(ParticleFile): class GadgetHDF5Dataset(GadgetDataset): _file_class = GadgetHDF5File _index_class = SPHParticleIndex - _field_info_class: Type[FieldInfoContainer] = GadgetFieldInfo + _field_info_class: type[FieldInfoContainer] = GadgetFieldInfo _particle_mass_name = "Masses" _sph_ptypes = ("PartType0",) _suffix = ".hdf5" diff --git a/yt/frontends/gadget/io.py b/yt/frontends/gadget/io.py index 8cf4114a743..fd60da00060 100644 --- a/yt/frontends/gadget/io.py +++ b/yt/frontends/gadget/io.py @@ -1,7 +1,6 @@ import os from collections import defaultdict from functools import cached_property -from typing import Tuple import numpy as np @@ -38,7 +37,7 @@ class IOHandlerGadgetHDF5(IOHandlerSPH): _coord_name = "Coordinates" @cached_property - def var_mass(self) -> Tuple[str, ...]: + def var_mass(self) -> tuple[str, ...]: vm = [] for i, v in enumerate(self.ds["Massarr"]): if v == 0: @@ -351,7 +350,7 @@ def __init__(self, ds, *args, **kwargs): super().__init__(ds, *args, **kwargs) @cached_property - def var_mass(self) -> Tuple[str, ...]: + def var_mass(self) -> tuple[str, ...]: vm = [] for i, v in enumerate(self.ds["Massarr"]): if v == 0: diff --git a/yt/frontends/open_pmd/data_structures.py b/yt/frontends/open_pmd/data_structures.py index df3b1947044..9f5f2aef7fe 100644 --- a/yt/frontends/open_pmd/data_structures.py +++ b/yt/frontends/open_pmd/data_structures.py @@ -2,7 +2,7 @@ from operator import mul from os import listdir, path from re import match -from typing import List, Optional +from typing import Optional import numpy as np from packaging.version import Version @@ -34,8 +34,8 @@ class OpenPMDGrid(AMRGridPatch): __slots__ = ["_level_id"] # Every particle species and mesh might have different hdf5-indices and offsets - ftypes: Optional[List[str]] = [] - ptypes: Optional[List[str]] = [] + ftypes: Optional[list[str]] = [] + ptypes: Optional[list[str]] = [] findex = 0 foffset = 0 pindex = 0 diff --git a/yt/frontends/open_pmd/fields.py b/yt/frontends/open_pmd/fields.py index 7ded77a3337..c67dfc18dee 100644 --- a/yt/frontends/open_pmd/fields.py +++ b/yt/frontends/open_pmd/fields.py @@ -1,5 +1,3 @@ -from typing import List - import numpy as np from yt.fields.field_info_container import FieldInfoContainer @@ -141,7 +139,7 @@ class OpenPMDFieldInfo(FieldInfoContainer): * [1] http://yt-project.org/docs/dev/reference/field_list.html#universal-fields """ - _mag_fields: List[str] = [] + _mag_fields: list[str] = [] def __init__(self, ds, field_list): f = ds._handle diff --git a/yt/frontends/owls/fields.py b/yt/frontends/owls/fields.py index f1256ae810b..5f0b9f691f4 100644 --- a/yt/frontends/owls/fields.py +++ b/yt/frontends/owls/fields.py @@ -1,5 +1,4 @@ import os -from typing import Tuple import numpy as np @@ -44,7 +43,7 @@ def _get_ion_mass_frac(ion, ftype, itab, data): class OWLSFieldInfo(SPHFieldInfo): - _ions: Tuple[str, ...] = ( + _ions: tuple[str, ...] = ( "c1", "c2", "c3", diff --git a/yt/frontends/ramses/data_structures.py b/yt/frontends/ramses/data_structures.py index 59acf69030a..27cd9642e59 100644 --- a/yt/frontends/ramses/data_structures.py +++ b/yt/frontends/ramses/data_structures.py @@ -3,7 +3,7 @@ from collections import defaultdict from itertools import product from pathlib import Path -from typing import Optional, Tuple +from typing import Optional import numpy as np @@ -109,7 +109,7 @@ def check_standard_files(folder, iout): @staticmethod def _match_output_and_group( path: Path, - ) -> Tuple[Path, Optional[Path], Optional[str]]: + ) -> tuple[Path, Optional[Path], Optional[str]]: # Make sure we work with a directory of the form `output_XXXXX` for p in (path, path.parent): match = OUTPUT_DIR_RE.match(p.name) @@ -133,7 +133,7 @@ def _match_output_and_group( @classmethod def test_with_folder_name( cls, output_dir: Path - ) -> Tuple[bool, Optional[Path], Optional[Path], Optional[Path]]: + ) -> tuple[bool, Optional[Path], Optional[Path], Optional[Path]]: output_dir, group_dir, iout = cls._match_output_and_group(output_dir) ok = output_dir.is_dir() and iout is not None @@ -151,7 +151,7 @@ def test_with_folder_name( @classmethod def test_with_standard_file( cls, filename: Path - ) -> Tuple[bool, Optional[Path], Optional[Path], Optional[Path]]: + ) -> tuple[bool, Optional[Path], Optional[Path], Optional[Path]]: output_dir, group_dir, iout = cls._match_output_and_group(filename.parent) ok = ( filename.is_file() diff --git a/yt/frontends/ramses/field_handlers.py b/yt/frontends/ramses/field_handlers.py index 57cc46f242d..10860b5afd5 100644 --- a/yt/frontends/ramses/field_handlers.py +++ b/yt/frontends/ramses/field_handlers.py @@ -1,7 +1,7 @@ import abc import glob import os -from typing import List, Optional, Set, Tuple, Type +from typing import Optional from yt.config import ytcfg from yt.funcs import mylog @@ -10,7 +10,7 @@ from .io import _read_fluid_file_descriptor from .io_utils import read_offset -FIELD_HANDLERS: Set[Type["FieldFileHandler"]] = set() +FIELD_HANDLERS: set[type["FieldFileHandler"]] = set() def get_field_handlers(): @@ -144,7 +144,7 @@ class FieldFileHandler(abc.ABC, HandlerMixin): ftype: Optional[str] = None # The name to give to the field type fname: Optional[str] = None # The name of the file(s) attrs: Optional[ - Tuple[Tuple[str, int, str], ...] + tuple[tuple[str, int, str], ...] ] = None # The attributes of the header known_fields = None # A list of tuple containing the field name and its type config_field: Optional[str] = None # Name of the config section (if any) @@ -280,7 +280,7 @@ def offset(self): return self._offset @classmethod - def load_fields_from_yt_config(cls) -> List[str]: + def load_fields_from_yt_config(cls) -> list[str]: if cls.config_field and ytcfg.has_section(cls.config_field): cfg = ytcfg.get(cls.config_field, "fields") fields = [_.strip() for _ in cfg if _.strip() != ""] diff --git a/yt/frontends/ramses/particle_handlers.py b/yt/frontends/ramses/particle_handlers.py index aa4897c58d0..ea341132072 100644 --- a/yt/frontends/ramses/particle_handlers.py +++ b/yt/frontends/ramses/particle_handlers.py @@ -1,6 +1,6 @@ import abc import os -from typing import List, Optional, Set, Tuple, Type +from typing import Optional from yt._typing import FieldKey from yt.config import ytcfg @@ -15,7 +15,7 @@ _read_part_csv_file_descriptor, ) -PARTICLE_HANDLERS: Set[Type["ParticleFileHandler"]] = set() +PARTICLE_HANDLERS: set[type["ParticleFileHandler"]] = set() def get_particle_handlers(): @@ -43,9 +43,9 @@ class ParticleFileHandler(abc.ABC, HandlerMixin): fname: Optional[str] = None # The name of the file(s). file_descriptor: Optional[str] = None # The name of the file descriptor (if any) - attrs: Tuple[Tuple[str, int, str], ...] # The attributes of the header + attrs: tuple[tuple[str, int, str], ...] # The attributes of the header known_fields: Optional[ - List[FieldKey] + list[FieldKey] ] = None # A list of tuple containing the field name and its type config_field: Optional[str] = None # Name of the config section (if any) diff --git a/yt/frontends/rockstar/definitions.py b/yt/frontends/rockstar/definitions.py index f5c4c5e96f3..7e6753f8308 100644 --- a/yt/frontends/rockstar/definitions.py +++ b/yt/frontends/rockstar/definitions.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Tuple, Union +from typing import Any, Union import numpy as np @@ -25,11 +25,11 @@ # Note the final field here, which is a field for min/max format revision in # which the field appears. -KNOWN_REVISIONS: List[int] = [0, 1, 2] +KNOWN_REVISIONS: list[int] = [0, 1, 2] # using typing.Any here in lieu of numpy.typing.DTypeLike (should be backported for numpy < 1.20) -HaloDataType = Union[Tuple[str, Any], Tuple[str, Any, Tuple[int, int]]] -halo_dt: List[HaloDataType] = [ +HaloDataType = Union[tuple[str, Any], tuple[str, Any, tuple[int, int]]] +halo_dt: list[HaloDataType] = [ ("particle_identifier", np.int64), ("particle_position_x", np.float32), ("particle_position_y", np.float32), @@ -99,8 +99,8 @@ ] # using typing.Any here in lieu of numpy.typing.DTypeLike (should be backported for numpy < 1.20) -halo_dts_tmp: Dict[int, List[HaloDataType]] = {} -halo_dts: Dict[int, np.dtype] = {} +halo_dts_tmp: dict[int, list[HaloDataType]] = {} +halo_dts: dict[int, np.dtype] = {} for rev in KNOWN_REVISIONS: halo_dts_tmp[rev] = [] diff --git a/yt/frontends/stream/data_structures.py b/yt/frontends/stream/data_structures.py index dfc6110cf4f..1cb132fd3fe 100644 --- a/yt/frontends/stream/data_structures.py +++ b/yt/frontends/stream/data_structures.py @@ -6,7 +6,7 @@ from functools import cached_property from itertools import chain, product, repeat from numbers import Number as numeric_type -from typing import Optional, Tuple, Type +from typing import Optional import numpy as np from more_itertools import always_iterable @@ -321,7 +321,7 @@ def update_data(self, data): class StreamDataset(Dataset): - _index_class: Type[Index] = StreamHierarchy + _index_class: type[Index] = StreamHierarchy _field_info_class = StreamFieldInfo _dataset_type = "stream" @@ -462,7 +462,7 @@ def _find_particle_types(self): class StreamDictFieldHandler(UserDict): - _additional_fields: Tuple[FieldKey, ...] = () + _additional_fields: tuple[FieldKey, ...] = () @property def all_fields(self): diff --git a/yt/frontends/stream/misc.py b/yt/frontends/stream/misc.py index f67254ded09..7379b87660b 100644 --- a/yt/frontends/stream/misc.py +++ b/yt/frontends/stream/misc.py @@ -1,14 +1,12 @@ -from typing import List - import numpy as np from yt._typing import DomainDimensions def _validate_cell_widths( - cell_widths: List[np.ndarray], + cell_widths: list[np.ndarray], domain_dimensions: DomainDimensions, -) -> List[List[np.ndarray]]: +) -> list[list[np.ndarray]]: # check dimensionality if (nwids := len(cell_widths)) != (ndims := len(domain_dimensions)): raise ValueError( diff --git a/yt/frontends/ytdata/data_structures.py b/yt/frontends/ytdata/data_structures.py index ab91f1ce910..bed7e5ec3de 100644 --- a/yt/frontends/ytdata/data_structures.py +++ b/yt/frontends/ytdata/data_structures.py @@ -3,7 +3,6 @@ from collections import defaultdict from functools import cached_property from numbers import Number as numeric_type -from typing import Tuple, Type import numpy as np @@ -44,7 +43,7 @@ class SavedDataset(Dataset): """ geometry = Geometry.CARTESIAN - _con_attrs: Tuple[str, ...] = () + _con_attrs: tuple[str, ...] = () def _parse_parameter_file(self): self.refine_by = 2 @@ -239,7 +238,7 @@ class YTDataContainerDataset(YTDataset): _index_class = ParticleIndex _file_class = YTDataHDF5File - _field_info_class: Type[FieldInfoContainer] = YTDataContainerFieldInfo + _field_info_class: type[FieldInfoContainer] = YTDataContainerFieldInfo _suffix = ".h5" fluid_types = ("grid", "gas", "deposit", "index") @@ -480,12 +479,12 @@ def _populate_grid_objects(self): class YTGridDataset(YTDataset): """Dataset for saved covering grids, arbitrary grids, and FRBs.""" - _index_class: Type[Index] = YTGridHierarchy + _index_class: type[Index] = YTGridHierarchy _field_info_class = YTGridFieldInfo _dataset_type = "ytgridhdf5" geometry = Geometry.CARTESIAN default_fluid_type = "grid" - fluid_types: Tuple[str, ...] = ("grid", "gas", "deposit", "index") + fluid_types: tuple[str, ...] = ("grid", "gas", "deposit", "index") def __init__(self, filename, unit_system="cgs"): super().__init__(filename, self._dataset_type, unit_system=unit_system) @@ -717,7 +716,7 @@ class YTNonspatialDataset(YTGridDataset): _dataset_type = "ytnonspatialhdf5" geometry = Geometry.CARTESIAN default_fluid_type = "data" - fluid_types: Tuple[str, ...] = ("data", "gas") + fluid_types: tuple[str, ...] = ("data", "gas") def _parse_parameter_file(self): super(YTGridDataset, self)._parse_parameter_file() diff --git a/yt/funcs.py b/yt/funcs.py index a43a0a4ad30..6ee5c7f8ea0 100644 --- a/yt/funcs.py +++ b/yt/funcs.py @@ -18,7 +18,7 @@ from copy import deepcopy from functools import lru_cache, wraps from numbers import Number as numeric_type -from typing import Any, Callable, Dict, Optional, Type +from typing import Any, Callable, Optional import numpy as np from more_itertools import always_iterable, collapse, first @@ -547,10 +547,8 @@ def get_git_version(path): def get_yt_version(): - if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources - else: - import importlib_resources + import importlib.resources as importlib_resources + version = get_git_version(os.path.dirname(importlib_resources.files("yt"))) if version is None: return version @@ -1002,10 +1000,8 @@ def matplotlib_style_context(style="yt.default", after_reset=False): """ # FUTURE: this function should be deprecated in favour of matplotlib.style.context # after support for matplotlib 3.6 and older versions is dropped. - if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources - else: - import importlib_resources + import importlib.resources as importlib_resources + import matplotlib as mpl import matplotlib.style @@ -1350,7 +1346,7 @@ def sglob(pattern): return sorted(glob.glob(pattern)) -def dictWithFactory(factory: Callable[[Any], Any]) -> Type: +def dictWithFactory(factory: Callable[[Any], Any]) -> type: """ Create a dictionary class with a default factory function. Contrary to `collections.defaultdict`, the factory takes @@ -1454,7 +1450,7 @@ def validate_moment(moment, weight_field): ) -def setdefault_mpl_metadata(save_kwargs: Dict[str, Any], name: str) -> None: +def setdefault_mpl_metadata(save_kwargs: dict[str, Any], name: str) -> None: """ Set a default Software metadata entry for use with Matplotlib outputs. """ diff --git a/yt/geometry/coordinates/coordinate_handler.py b/yt/geometry/coordinates/coordinate_handler.py index d579a6209e8..22c88279232 100644 --- a/yt/geometry/coordinates/coordinate_handler.py +++ b/yt/geometry/coordinates/coordinate_handler.py @@ -2,7 +2,7 @@ import weakref from functools import cached_property from numbers import Number -from typing import Optional, Tuple +from typing import Optional import numpy as np @@ -36,7 +36,7 @@ def _vert(field, data): return _vert -def _setup_dummy_cartesian_coords_and_widths(registry, axes: Tuple[str]): +def _setup_dummy_cartesian_coords_and_widths(registry, axes: tuple[str]): for ax in axes: registry.add_field( ("index", f"d{ax}"), sampling_type="cell", function=_unknown_coord @@ -313,7 +313,7 @@ def cylindrical_to_cartesian(coord, center=(0, 0, 0)): return c2 -def _get_polar_bounds(self: CoordinateHandler, axes: Tuple[str, str]): +def _get_polar_bounds(self: CoordinateHandler, axes: tuple[str, str]): # a small helper function that is needed by two unrelated classes ri = self.axis_id[axes[0]] pi = self.axis_id[axes[1]] diff --git a/yt/geometry/geometry_handler.py b/yt/geometry/geometry_handler.py index 5f713256a9d..6906c171d39 100644 --- a/yt/geometry/geometry_handler.py +++ b/yt/geometry/geometry_handler.py @@ -1,7 +1,7 @@ import abc import os import weakref -from typing import Optional, Tuple +from typing import Optional import numpy as np @@ -22,8 +22,8 @@ class Index(ParallelAnalysisInterface, abc.ABC): """The base index class""" - _unsupported_objects: Tuple[str, ...] = () - _index_properties: Tuple[str, ...] = () + _unsupported_objects: tuple[str, ...] = () + _index_properties: tuple[str, ...] = () def __init__(self, ds, dataset_type): ParallelAnalysisInterface.__init__(self) @@ -54,8 +54,8 @@ def _icoords_to_fcoords( self, icoords: np.ndarray, ires: np.ndarray, - axes: Optional[Tuple[int, ...]] = None, - ) -> Tuple[np.ndarray, np.ndarray]: + axes: Optional[tuple[int, ...]] = None, + ) -> tuple[np.ndarray, np.ndarray]: # What's the use of raising NotImplementedError for this, when it's an # abstract base class? Well, only *some* of the subclasses have it -- # and for those that *don't*, we should not be calling it -- and since diff --git a/yt/geometry/grid_geometry_handler.py b/yt/geometry/grid_geometry_handler.py index e3d9a3d1857..4e1f55c8025 100644 --- a/yt/geometry/grid_geometry_handler.py +++ b/yt/geometry/grid_geometry_handler.py @@ -1,7 +1,7 @@ import abc import weakref from collections import defaultdict -from typing import Optional, Tuple +from typing import Optional import numpy as np @@ -452,8 +452,8 @@ def _icoords_to_fcoords( self, icoords: np.ndarray, ires: np.ndarray, - axes: Optional[Tuple[int, ...]] = None, - ) -> Tuple[np.ndarray, np.ndarray]: + axes: Optional[tuple[int, ...]] = None, + ) -> tuple[np.ndarray, np.ndarray]: """ Accepts icoords and ires and returns appropriate fcoords and fwidth. Mostly useful for cases where we have irregularly spaced or structured diff --git a/yt/geometry/oct_geometry_handler.py b/yt/geometry/oct_geometry_handler.py index 656a917233e..65d35fc6a62 100644 --- a/yt/geometry/oct_geometry_handler.py +++ b/yt/geometry/oct_geometry_handler.py @@ -1,4 +1,4 @@ -from typing import Optional, Tuple +from typing import Optional import numpy as np @@ -123,8 +123,8 @@ def _icoords_to_fcoords( self, icoords: np.ndarray, ires: np.ndarray, - axes: Optional[Tuple[int, ...]] = None, - ) -> Tuple[np.ndarray, np.ndarray]: + axes: Optional[tuple[int, ...]] = None, + ) -> tuple[np.ndarray, np.ndarray]: """ Accepts icoords and ires and returns appropriate fcoords and fwidth. Mostly useful for cases where we have irregularly spaced or structured diff --git a/yt/loaders.py b/yt/loaders.py index 0242df0f438..46636ef9c8a 100644 --- a/yt/loaders.py +++ b/yt/loaders.py @@ -9,7 +9,7 @@ import types import warnings from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast from urllib.parse import urlsplit import numpy as np @@ -173,8 +173,8 @@ def load_simulation(fn, simulation_type, find_outputs=False): def _sanitize_axis_order_args( - geometry: Union[str, Tuple[str, AxisOrder]], axis_order: Optional[AxisOrder] -) -> Tuple[str, Optional[AxisOrder]]: + geometry: Union[str, tuple[str, AxisOrder]], axis_order: Optional[AxisOrder] +) -> tuple[str, Optional[AxisOrder]]: # this entire function should be removed at the end of its deprecation cycle geometry_str: str if isinstance(geometry, tuple): @@ -322,7 +322,7 @@ def load_uniform_grid( if number_of_particles > 0: particle_types = set_particle_types(data) # Used much further below. - pdata: Dict[Union[str, FieldKey], Any] = { + pdata: dict[Union[str, FieldKey], Any] = { "number_of_particles": number_of_particles } for key in list(data.keys()): @@ -667,7 +667,7 @@ def load_amr_grids( def load_particles( - data: Dict[AnyFieldKey, np.ndarray], + data: dict[AnyFieldKey, np.ndarray], length_unit=None, bbox=None, sim_time=None, @@ -806,7 +806,7 @@ def parse_unit(unit, dimension): field_units, data, _ = process_data(data) sfh = StreamDictFieldHandler() - pdata: Dict[AnyFieldKey, np.ndarray] = {} + pdata: dict[AnyFieldKey, np.ndarray] = {} for key in data.keys(): field: FieldKey if not isinstance(key, tuple): @@ -1534,13 +1534,13 @@ def load_sample( registry_table = get_data_registry_table() - known_names: List[str] = registry_table.dropna()["filename"].to_list() + known_names: list[str] = registry_table.dropna()["filename"].to_list() if topdir not in known_names: msg = f"'{topdir}' is not an available dataset." - lexical_distances: List[Tuple[str, int]] = [ + lexical_distances: list[tuple[str, int]] = [ (name, levenshtein_distance(name, topdir)) for name in known_names ] - suggestions: List[str] = [name for name, dist in lexical_distances if dist < 4] + suggestions: list[str] = [name for name, dist in lexical_distances if dist < 4] if len(suggestions) == 1: msg += f" Did you mean '{suggestions[0]}' ?" elif suggestions: @@ -1640,7 +1640,7 @@ def safe_extract(tar, path=".", members=None, *, numeric_owner=False): def _mount_helper( - archive: str, mountPoint: str, ratarmount_kwa: Dict, conn: "Connection" + archive: str, mountPoint: str, ratarmount_kwa: dict, conn: "Connection" ): try: fuseOperationsObject = ratarmount.TarMount( @@ -1667,7 +1667,7 @@ def _mount_helper( def load_archive( fn: Union[str, Path], path: str, - ratarmount_kwa: Optional[Dict] = None, + ratarmount_kwa: Optional[dict] = None, mount_timeout: float = 1.0, *args, **kwargs, @@ -1771,7 +1771,7 @@ def del_callback(self): def load_hdf5_file( fn: Union[str, "os.PathLike[str]"], root_node: Optional[str] = "/", - fields: Optional[List[str]] = None, + fields: Optional[list[str]] = None, bbox: Optional[np.ndarray] = None, nchunks: int = 0, dataset_arguments: Optional[dict] = None, diff --git a/yt/startup_tasks.py b/yt/startup_tasks.py index 19944fc953a..fd17a9b8549 100644 --- a/yt/startup_tasks.py +++ b/yt/startup_tasks.py @@ -4,7 +4,6 @@ import os import signal import sys -from typing import List from yt.config import ytcfg from yt.funcs import ( @@ -138,7 +137,7 @@ def error(self, message): if not hasattr(sys, "argv") or sys.argv is None: sys.argv = [] -unparsed_args: List[str] = [] +unparsed_args: list[str] = [] parallel_capable = False if not ytcfg.get("yt", "internals", "command_line"): diff --git a/yt/utilities/command_line.py b/yt/utilities/command_line.py index 7b59d1f4c19..62bf70af612 100644 --- a/yt/utilities/command_line.py +++ b/yt/utilities/command_line.py @@ -6,7 +6,7 @@ import sys import textwrap import urllib -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union import numpy as np from more_itertools import always_iterable @@ -186,8 +186,8 @@ def __init__(cls, name, b, d): class YTCommand(metaclass=YTCommandSubtype): - args: Tuple[Union[str, Dict[str, Any]], ...] = () - name: Optional[Union[str, List[str]]] = None + args: tuple[Union[str, dict[str, Any]], ...] = () + name: Optional[Union[str, list[str]]] = None description: str = "" aliases = () ndatasets: int = 1 @@ -660,10 +660,8 @@ class YTInstInfoCmd(YTCommand): """ def __call__(self, opts): - if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources - else: - import importlib_resources + import importlib.resources as importlib_resources + path = os.path.dirname(importlib_resources.files("yt")) vstring = _print_installation_information(path) if vstring is not None: @@ -1183,10 +1181,8 @@ class YTUpdateCmd(YTCommand): """ def __call__(self, opts): - if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources - else: - import importlib_resources + import importlib.resources as importlib_resources + path = os.path.dirname(importlib_resources.files("yt")) vstring = _print_installation_information(path) if vstring is not None: diff --git a/yt/utilities/configure.py b/yt/utilities/configure.py index 542dbb4e8b9..657b4a18986 100644 --- a/yt/utilities/configure.py +++ b/yt/utilities/configure.py @@ -2,13 +2,13 @@ import sys import warnings from pathlib import Path -from typing import Callable, List +from typing import Callable from more_itertools import always_iterable from yt.utilities.configuration_tree import ConfigLeaf, ConfigNode -configuration_callbacks: List[Callable[["YTConfig"], None]] = [] +configuration_callbacks: list[Callable[["YTConfig"], None]] = [] def config_dir(): diff --git a/yt/utilities/exceptions.py b/yt/utilities/exceptions.py index 0035a21a287..bb96ef7c79d 100644 --- a/yt/utilities/exceptions.py +++ b/yt/utilities/exceptions.py @@ -1,6 +1,5 @@ # We don't need to import 'exceptions' import os.path -from typing import List from unyt.exceptions import UnitOperationError @@ -79,7 +78,7 @@ def __init__(self, field, ds): self.field = field self.ds = ds - def _get_suggestions(self) -> List[FieldKey]: + def _get_suggestions(self) -> list[FieldKey]: from yt.funcs import levenshtein_distance field = self.field diff --git a/yt/utilities/hierarchy_inspection.py b/yt/utilities/hierarchy_inspection.py index 65f9385c2bd..fd6c54af3a3 100644 --- a/yt/utilities/hierarchy_inspection.py +++ b/yt/utilities/hierarchy_inspection.py @@ -1,11 +1,10 @@ import inspect from collections import Counter -from typing import List, Type from more_itertools import flatten -def find_lowest_subclasses(candidates: List[Type]) -> List[Type]: +def find_lowest_subclasses(candidates: list[type]) -> list[type]: """ This function takes a list of classes, and returns only the ones that are are not super classes of any others in the list. i.e. the ones that are at diff --git a/yt/utilities/io_handler.py b/yt/utilities/io_handler.py index ee0e6da9090..f1ef0f2522a 100644 --- a/yt/utilities/io_handler.py +++ b/yt/utilities/io_handler.py @@ -1,20 +1,12 @@ import os -import sys from collections import defaultdict +from collections.abc import Iterator, Mapping from contextlib import contextmanager from functools import _make_key, lru_cache -from typing import DefaultDict, Dict, List - -from yt._typing import FieldKey - -if sys.version_info >= (3, 9): - from collections.abc import Iterator, Mapping -else: - from typing import Iterator, Mapping import numpy as np -from yt._typing import ParticleCoordinateTuple +from yt._typing import FieldKey, ParticleCoordinateTuple from yt.geometry.selection_routines import GridSelector from yt.utilities.on_demand_imports import _h5py as h5py @@ -30,7 +22,7 @@ def _make_io_key(args, *_args, **kwargs): class BaseIOHandler: - _vector_fields: Dict[str, int] = {} + _vector_fields: dict[str, int] = {} _dataset_type: str _particle_reader = False _cache_on = False @@ -57,7 +49,7 @@ def __init__(self, ds): # We need a function for reading a list of sets # and a function for *popping* from a queue all the appropriate sets @contextmanager - def preload(self, chunk, fields: List[FieldKey], max_size): + def preload(self, chunk, fields: list[FieldKey], max_size): yield self def peek(self, grid, field): @@ -103,7 +95,7 @@ def _read_data(self, grid, field): pass def _read_fluid_selection( - self, chunks, selector, fields: List[FieldKey], size + self, chunks, selector, fields: list[FieldKey], size ) -> Mapping[FieldKey, np.ndarray]: # This function has an interesting history. It previously was mandate # to be defined by all of the subclasses. But, to avoid having to @@ -132,7 +124,7 @@ def _read_fluid_selection( ind[field] += obj.select(selector, data, rv[field], ind[field]) return rv - def io_iter(self, chunks, fields: List[FieldKey]): + def io_iter(self, chunks, fields: list[FieldKey]): raise NotImplementedError( "subclassing Dataset.io_iter this is required in order to use the default " "implementation of Dataset._read_fluid_selection. " @@ -158,7 +150,7 @@ def _read_chunk_data(self, chunk, fields): return {} def _read_particle_coords( - self, chunks, ptf: DefaultDict[str, List[str]] + self, chunks, ptf: defaultdict[str, list[str]] ) -> Iterator[ParticleCoordinateTuple]: # An iterator that yields particle coordinates for each chunk by particle # type. Must be implemented by each frontend. Must yield a tuple of @@ -172,15 +164,15 @@ def _read_particle_data_file(self, data_file, ptf, selector=None): raise NotImplementedError def _read_particle_selection( - self, chunks, selector, fields: List[FieldKey] - ) -> Dict[FieldKey, np.ndarray]: - data: Dict[FieldKey, List[np.ndarray]] = {} + self, chunks, selector, fields: list[FieldKey] + ) -> dict[FieldKey, np.ndarray]: + data: dict[FieldKey, list[np.ndarray]] = {} # Initialize containers for tracking particle, field information # ptf (particle field types) maps particle type to list of on-disk fields to read # field_maps stores fields, accounting for field unions - ptf: DefaultDict[str, List[str]] = defaultdict(list) - field_maps: DefaultDict[FieldKey, List[FieldKey]] = defaultdict(list) + ptf: defaultdict[str, list[str]] = defaultdict(list) + field_maps: defaultdict[FieldKey, list[FieldKey]] = defaultdict(list) # We first need a set of masks for each particle type chunks = list(chunks) @@ -204,7 +196,7 @@ def _read_particle_selection( for field_f in field_maps[field_r]: data[field_f].append(vals) - rv: Dict[FieldKey, np.ndarray] = {} # the return dictionary + rv: dict[FieldKey, np.ndarray] = {} # the return dictionary fields = list(data.keys()) for field_f in fields: # We need to ensure the arrays have the right shape if there are no diff --git a/yt/utilities/minimal_representation.py b/yt/utilities/minimal_representation.py index 42c3c70dc8f..920a4faa4f4 100644 --- a/yt/utilities/minimal_representation.py +++ b/yt/utilities/minimal_representation.py @@ -1,7 +1,6 @@ import abc import json import os -from typing import Tuple from uuid import uuid4 import numpy as np @@ -181,7 +180,7 @@ def _generate_post(self): class MinimalMappableData(MinimalRepresentation): - _attr_list: Tuple[str, ...] = ( + _attr_list: tuple[str, ...] = ( "field_data", "field", "weight_field", diff --git a/yt/utilities/object_registries.py b/yt/utilities/object_registries.py index fde2edd5619..3da10e313ed 100644 --- a/yt/utilities/object_registries.py +++ b/yt/utilities/object_registries.py @@ -3,19 +3,18 @@ # type hints are simplified as raw Type (instead of, e.g., Type[Dataset]) # to workaround circular imports -from typing import Dict, Type # subclasses of yt.data_objects.analyzer_objects.AnalysisTask -analysis_task_registry: Dict[str, Type] = {} +analysis_task_registry: dict[str, type] = {} # subclasses of yt.data_objects.data_containers.YTDataContainer -data_object_registry: Dict[str, Type] = {} +data_object_registry: dict[str, type] = {} # suclasses of yt.data_objects.derived_quantity.DerivedQuantity -derived_quantity_registry: Dict[str, Type] = {} +derived_quantity_registry: dict[str, type] = {} # suclasses of yt.data_objects.static_outputs.Dataset -output_type_registry: Dict[str, Type] = {} +output_type_registry: dict[str, type] = {} # subclasses of yt.data_objects.time_series.DatasetSeries -simulation_time_series_registry: Dict[str, Type] = {} +simulation_time_series_registry: dict[str, type] = {} diff --git a/yt/utilities/on_demand_imports.py b/yt/utilities/on_demand_imports.py index 03919271eae..e77a482e34f 100644 --- a/yt/utilities/on_demand_imports.py +++ b/yt/utilities/on_demand_imports.py @@ -1,7 +1,7 @@ import sys from functools import wraps from importlib.util import find_spec -from typing import Optional, Type +from typing import Optional class NotAModule: @@ -47,7 +47,7 @@ def __repr__(self) -> str: class OnDemand: - _default_factory: Type[NotAModule] = NotAModule + _default_factory: type[NotAModule] = NotAModule def __init_subclass__(cls): if not cls.__name__.endswith("_imports"): diff --git a/yt/utilities/parallel_tools/parallel_analysis_interface.py b/yt/utilities/parallel_tools/parallel_analysis_interface.py index 6ca12e7d096..e06a5a408e0 100644 --- a/yt/utilities/parallel_tools/parallel_analysis_interface.py +++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py @@ -5,7 +5,6 @@ import traceback from functools import wraps from io import StringIO -from typing import List import numpy as np from more_itertools import always_iterable @@ -663,7 +662,7 @@ def parallel_ring(objects, generator_func, mutable=False): class CommunicationSystem: - communicators: List["Communicator"] = [] + communicators: list["Communicator"] = [] def __init__(self): self.communicators.append(Communicator(None)) diff --git a/yt/visualization/_commons.py b/yt/visualization/_commons.py index 180a5c2c35a..065831c88b2 100644 --- a/yt/visualization/_commons.py +++ b/yt/visualization/_commons.py @@ -2,7 +2,7 @@ import sys import warnings from functools import wraps -from typing import TYPE_CHECKING, Optional, Type, TypeVar +from typing import TYPE_CHECKING, Optional, TypeVar import matplotlib as mpl from matplotlib.ticker import SymmetricalLogLocator @@ -25,10 +25,8 @@ def get_default_font_properties(): global _DEFAULT_FONT_PROPERTIES if _DEFAULT_FONT_PROPERTIES is None: - if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources - else: - import importlib_resources + import importlib.resources as importlib_resources + _yt_style = mpl.rc_params_from_file( importlib_resources.files("yt") / "default.mplstyle", use_default_template=False, @@ -58,7 +56,7 @@ def _get_supported_canvas_classes(): ) -def get_canvas_class(suffix: str) -> Type["FigureCanvasBase"]: +def get_canvas_class(suffix: str) -> type["FigureCanvasBase"]: s = normalize_extension_string(suffix) if s not in _get_supported_image_file_formats(): raise ValueError(f"Unsupported file format '{suffix}'.") diff --git a/yt/visualization/_handlers.py b/yt/visualization/_handlers.py index c1f620e724f..fe674fb177c 100644 --- a/yt/visualization/_handlers.py +++ b/yt/visualization/_handlers.py @@ -1,7 +1,7 @@ import sys import weakref from numbers import Real -from typing import Any, Dict, List, Literal, Optional, Tuple, Type, Union +from typing import Any, Literal, Optional, Union import matplotlib as mpl import numpy as np @@ -47,7 +47,7 @@ class NormHandler: "_norm", "prefer_log", ) - _constraint_attrs: List[str] = [ + _constraint_attrs: list[str] = [ "vmin", "vmax", "dynamic_range", @@ -63,7 +63,7 @@ def __init__( vmin: Optional[un.unyt_quantity] = None, vmax: Optional[un.unyt_quantity] = None, dynamic_range: Optional[float] = None, - norm_type: Optional[Type[Normalize]] = None, + norm_type: Optional[type[Normalize]] = None, norm: Optional[Normalize] = None, linthresh: Optional[float] = None, ): @@ -85,7 +85,7 @@ def __init__( "A norm cannot be passed along other constraints." ) - def _get_constraints(self) -> Dict[str, Any]: + def _get_constraints(self) -> dict[str, Any]: return { attr: getattr(self, attr) for attr in self.__class__._constraint_attrs @@ -205,7 +205,7 @@ def dynamic_range(self, newval: Optional[float]) -> None: def get_dynamic_range( self, dvmin: Optional[float], dvmax: Optional[float] - ) -> Tuple[float, float]: + ) -> tuple[float, float]: if self.dynamic_range is None: raise RuntimeError( "Something went terribly wrong in setting up a dynamic range" @@ -235,11 +235,11 @@ def get_dynamic_range( ) @property - def norm_type(self) -> Optional[Type[Normalize]]: + def norm_type(self) -> Optional[type[Normalize]]: return self._norm_type @norm_type.setter - def norm_type(self, newval: Optional[Type[Normalize]]) -> None: + def norm_type(self, newval: Optional[type[Normalize]]) -> None: if not ( newval is None or (isinstance(newval, type) and issubclass(newval, Normalize)) @@ -314,7 +314,7 @@ def get_norm(self, data: np.ndarray, *args, **kw) -> Normalize: dvmax = 1 * getattr(data, "units", 1) kw.setdefault("vmax", dvmax) - norm_type: Type[Normalize] + norm_type: type[Normalize] if data.ndim == 3: assert data.shape[-1] == 4 # this is an RGBA array, only linear normalization makes sense here @@ -399,7 +399,7 @@ def get_minmax(data): BackgroundColor: TypeAlias = Union[ - Tuple[float, float, float, float], + tuple[float, float, float, float], # np.ndarray is only runtime-subscribtable since numpy 1.22 "np.ndarray[Any, Any]", str, diff --git a/yt/visualization/base_plot_types.py b/yt/visualization/base_plot_types.py index 734827857c4..ed94d08c98e 100644 --- a/yt/visualization/base_plot_types.py +++ b/yt/visualization/base_plot_types.py @@ -2,7 +2,7 @@ import warnings from abc import ABC from io import BytesIO -from typing import TYPE_CHECKING, Optional, Tuple, TypedDict, Union +from typing import TYPE_CHECKING, Optional, TypedDict, Union import matplotlib import numpy as np @@ -37,7 +37,7 @@ class FormatKwargs(TypedDict): style: Literal["scientific"] - scilimits: Tuple[int, int] + scilimits: tuple[int, int] useMathText: bool @@ -106,7 +106,7 @@ class PlotMPL: def __init__( self, fsize, - axrect: Tuple[float, float, float, float], + axrect: tuple[float, float, float, float], *, norm_handler: NormHandler, figure: Optional["Figure"] = None, @@ -142,7 +142,7 @@ def __init__( self.norm_handler = norm_handler - def _create_axes(self, axrect: Tuple[float, float, float, float]) -> None: + def _create_axes(self, axrect: tuple[float, float, float, float]) -> None: self.axes = self.figure.add_axes(axrect) def _get_canvas_classes(self): @@ -267,7 +267,7 @@ def __init__( self.cax = cax def _setup_layout_constraints( - self, figure_size: Union[Tuple[float, float], float], fontsize: float + self, figure_size: Union[tuple[float, float], float], fontsize: float ): # Setup base layout attributes # derived classes need to call this before super().__init__ diff --git a/yt/visualization/color_maps.py b/yt/visualization/color_maps.py index 14e451bc9af..02cc49b16f9 100644 --- a/yt/visualization/color_maps.py +++ b/yt/visualization/color_maps.py @@ -1,4 +1,4 @@ -from typing import Tuple, Union +from typing import Union import cmyt # noqa: F401 import matplotlib as mpl @@ -74,7 +74,7 @@ def register_yt_colormaps_from_cmyt(): mylog.warning("cannot register colormap '%s' (naming collision)", k) -def get_colormap_lut(cmap_id: Union[Tuple[str, str], str]): +def get_colormap_lut(cmap_id: Union[tuple[str, str], str]): # "lut" stands for "lookup table". This function provides a consistent and # reusable accessor to a hidden (and by default, uninitialized) attribute # (`_lut`) in registered colormaps, from matplotlib or palettable. diff --git a/yt/visualization/fixed_resolution.py b/yt/visualization/fixed_resolution.py index 2a7c7eca0ce..7487de50152 100644 --- a/yt/visualization/fixed_resolution.py +++ b/yt/visualization/fixed_resolution.py @@ -1,6 +1,6 @@ import weakref from functools import partial -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Optional import numpy as np @@ -109,14 +109,14 @@ def __init__( antialias=True, periodic=False, *, - filters: Optional[List["FixedResolutionBufferFilter"]] = None, + filters: Optional[list["FixedResolutionBufferFilter"]] = None, ): self.data_source = data_source self.ds = data_source.ds self.bounds = bounds self.buff_size = (int(buff_size[0]), int(buff_size[1])) self.antialias = antialias - self.data: Dict[str, np.ndarray] = {} + self.data: dict[str, np.ndarray] = {} self.axis = data_source.axis self.periodic = periodic self._data_valid = False @@ -130,7 +130,7 @@ def __init__( FixedResolutionBufferFilter, ) - self._filters: List[FixedResolutionBufferFilter] = ( + self._filters: list[FixedResolutionBufferFilter] = ( filters if filters is not None else [] ) diff --git a/yt/visualization/geo_plot_utils.py b/yt/visualization/geo_plot_utils.py index db6716e42e8..c7bd8197c0f 100644 --- a/yt/visualization/geo_plot_utils.py +++ b/yt/visualization/geo_plot_utils.py @@ -1,7 +1,7 @@ from types import FunctionType -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional -valid_transforms: Dict[str, FunctionType] = {} +valid_transforms: dict[str, FunctionType] = {} transform_list = [ "PlateCarree", @@ -71,8 +71,8 @@ def get_mpl_transform(mpl_proj) -> Optional[FunctionType]: # check to see if mpl_proj is a string or tuple, and construct args and # kwargs to pass to cartopy function based on that. key: Optional[str] = None - args: Tuple = () - kwargs: Dict[str, Any] = {} + args: tuple = () + kwargs: dict[str, Any] = {} if isinstance(mpl_proj, str): key = mpl_proj instantiated_func = valid_transforms[key](*args, **kwargs) diff --git a/yt/visualization/particle_plots.py b/yt/visualization/particle_plots.py index 45454dd60e5..6d4b92670bd 100644 --- a/yt/visualization/particle_plots.py +++ b/yt/visualization/particle_plots.py @@ -1,5 +1,3 @@ -from typing import List - import numpy as np from yt._maintenance.deprecation import issue_deprecation_warning @@ -25,7 +23,7 @@ class ParticleDummyDataSource: _dimensionality = 2 _con_args = ("center", "axis", "width", "fields", "weight_field") _tds_attrs = () - _key_fields: List[str] = [] + _key_fields: list[str] = [] def __init__( self, diff --git a/yt/visualization/plot_container.py b/yt/visualization/plot_container.py index 38d63dbd80b..ce389521037 100644 --- a/yt/visualization/plot_container.py +++ b/yt/visualization/plot_container.py @@ -2,11 +2,10 @@ import base64 import builtins import os -import sys import warnings from collections import defaultdict from functools import wraps -from typing import Any, Dict, Final, List, Literal, Optional, Tuple, Type, Union +from typing import Any, Final, Literal, Optional, Union import matplotlib from matplotlib.colors import LogNorm, Normalize, SymLogNorm @@ -114,7 +113,7 @@ def __init__(self, data_source, default_factory=None): class PlotContainer(abc.ABC): """A container for generic plots""" - _plot_dict_type: Type[PlotDictionary] = PlotDictionary + _plot_dict_type: type[PlotDictionary] = PlotDictionary _plot_type: Optional[str] = None _plot_valid = False @@ -133,16 +132,13 @@ def __init__(self, data_source, figure_size=None, fontsize: Optional[float] = No if fontsize is None: fontsize = self.__class__._default_font_size - if sys.version_info >= (3, 9): - font_dict = get_default_font_properties() | {"size": fontsize} - else: - font_dict = {**get_default_font_properties(), "size": fontsize} + font_dict = get_default_font_properties() | {"size": fontsize} self._font_properties = FontProperties(**font_dict) self._font_color = None self._xlabel = None self._ylabel = None - self._minorticks: Dict[FieldKey, bool] = {} + self._minorticks: dict[FieldKey, bool] = {} @accepts_all_fields @invalidate_plot @@ -458,10 +454,7 @@ def set_font(self, font_dict=None): # Set default values if the user does not explicitly set them. # this prevents reverting to the matplotlib defaults. _default_size = {"size": self.__class__._default_font_size} - if sys.version_info >= (3, 9): - font_dict = get_default_font_properties() | _default_size | font_dict - else: - font_dict = {**get_default_font_properties(), **_default_size, **font_dict} + font_dict = get_default_font_properties() | _default_size | font_dict self._font_properties = FontProperties(**font_dict) return self @@ -508,9 +501,9 @@ def set_figure_size(self, size): @validate_plot def save( self, - name: Optional[Union[str, List[str], Tuple[str, ...]]] = None, + name: Optional[Union[str, list[str], tuple[str, ...]]] = None, suffix: Optional[str] = None, - mpl_kwargs: Optional[Dict[str, Any]] = None, + mpl_kwargs: Optional[dict[str, Any]] = None, ): """saves the plot to disk. @@ -902,7 +895,7 @@ def __init__(self, data_source, figure_size, fontsize): def _get_default_handlers( self, field, default_display_units: Unit - ) -> Tuple[NormHandler, ColorbarHandler]: + ) -> tuple[NormHandler, ColorbarHandler]: usr_units_str = get_default_from_config( self.data_source, field=field, keys="units", defaults=[None] ) diff --git a/yt/visualization/plot_modifications.py b/yt/visualization/plot_modifications.py index 42848a0bfa2..bf977e9f95b 100644 --- a/yt/visualization/plot_modifications.py +++ b/yt/visualization/plot_modifications.py @@ -5,7 +5,7 @@ from abc import ABC, abstractmethod from functools import update_wrapper from numbers import Integral, Number -from typing import Any, Dict, Optional, Tuple, Type, Union +from typing import Any, Optional, Union import matplotlib import numpy as np @@ -56,10 +56,10 @@ else: from typing_extensions import assert_never -callback_registry: Dict[str, Type["PlotCallback"]] = {} +callback_registry: dict[str, type["PlotCallback"]] = {} -def _validate_factor_tuple(factor) -> Tuple[int, int]: +def _validate_factor_tuple(factor) -> tuple[int, int]: if ( is_sequence(factor) and len(factor) == 2 @@ -85,8 +85,8 @@ class PlotCallback(ABC): # "figure" this is disregarded. If "force" is included in the tuple, it # will *not* check whether or not the coord_system is in axis or figure, # and will only look at the geometries. - _supported_geometries: Optional[Tuple[str, ...]] = None - _incompatible_plot_types: Tuple[str, ...] = () + _supported_geometries: Optional[tuple[str, ...]] = None + _incompatible_plot_types: tuple[str, ...] = () def __init_subclass__(cls, *args, **kwargs): if inspect.isabstract(cls): @@ -438,7 +438,7 @@ class VelocityCallback(PlotCallback): def __init__( self, - factor: Union[Tuple[int, int], int] = 16, + factor: Union[tuple[int, int], int] = 16, *, scale=None, scale_units=None, @@ -581,7 +581,7 @@ class MagFieldCallback(PlotCallback): def __init__( self, - factor: Union[Tuple[int, int], int] = 16, + factor: Union[tuple[int, int], int] = 16, *, scale=None, scale_units=None, @@ -695,7 +695,7 @@ def __init__( field_y, field_c=None, *, - factor: Union[Tuple[int, int], int] = 16, + factor: Union[tuple[int, int], int] = 16, scale=None, scale_units=None, normalize=False, @@ -786,7 +786,7 @@ class QuiverCallback(BaseQuiverCallback): """ _type_name = "quiver" - _supported_geometries: Tuple[str, ...] = ( + _supported_geometries: tuple[str, ...] = ( "cartesian", "spectral_cube", "polar", @@ -800,7 +800,7 @@ def __init__( field_y, field_c=None, *, - factor: Union[Tuple[int, int], int] = 16, + factor: Union[tuple[int, int], int] = 16, scale=None, scale_units=None, normalize=False, @@ -904,13 +904,13 @@ def __init__( field: AnyFieldKey, levels: int = 5, *, - factor: Union[Tuple[int, int], int] = 4, - clim: Optional[Tuple[float, float]] = None, + factor: Union[tuple[int, int], int] = 4, + clim: Optional[tuple[float, float]] = None, label: bool = False, take_log: Optional[bool] = None, data_source: Optional[YTDataContainer] = None, - plot_args: Optional[Dict[str, Any]] = None, - text_args: Optional[Dict[str, Any]] = None, + plot_args: Optional[dict[str, Any]] = None, + text_args: Optional[dict[str, Any]] = None, ncont: Optional[int] = None, # deprecated ) -> None: if ncont is not None: @@ -1016,7 +1016,7 @@ def __call__(self, plot) -> None: if take_log: zi = np.log10(zi) - clim: Optional[Tuple[float, float]] + clim: Optional[tuple[float, float]] if take_log and self.clim is not None: clim = np.log10(self.clim[0]), np.log10(self.clim[1]) else: @@ -1285,7 +1285,7 @@ def __init__( linewidth_upscaling: float = 1.0, color: Optional[Union[_ColorType, FieldKey]] = None, color_threshold: Union[float, unyt_quantity] = float("-inf"), - factor: Union[Tuple[int, int], int] = 16, + factor: Union[tuple[int, int], int] = 16, field_color=None, # deprecated display_threshold=None, # deprecated plot_args=None, # deprecated @@ -1480,7 +1480,7 @@ class LinePlotCallback(PlotCallback): """ _type_name = "line" - _supported_geometries: Tuple[str, ...] = ( + _supported_geometries: tuple[str, ...] = ( "cartesian", "spectral_cube", "polar", @@ -1493,7 +1493,7 @@ def __init__( p2, *, coord_system="data", - plot_args: Optional[Dict[str, Any]] = None, + plot_args: Optional[dict[str, Any]] = None, **kwargs, ): self.p1 = p1 @@ -1774,7 +1774,7 @@ def __init__( head_length=0.01, starting_pos=None, coord_system="data", - plot_args: Optional[Dict[str, Any]] = None, # deprecated + plot_args: Optional[dict[str, Any]] = None, # deprecated **kwargs, ): self.pos = pos @@ -2135,7 +2135,7 @@ class TextLabelCallback(PlotCallback): """ _type_name = "text" - _supported_geometries: Tuple[str, ...] = ( + _supported_geometries: tuple[str, ...] = ( "cartesian", "spectral_cube", "polar", diff --git a/yt/visualization/plot_window.py b/yt/visualization/plot_window.py index 0cca1b6ac70..ebe64ad4d65 100644 --- a/yt/visualization/plot_window.py +++ b/yt/visualization/plot_window.py @@ -1,7 +1,7 @@ import abc from collections import defaultdict from numbers import Number -from typing import List, Optional, Type, Union +from typing import Optional, Union import matplotlib import numpy as np @@ -852,7 +852,7 @@ class PWViewerMPL(PlotWindow): """Viewer using matplotlib as a backend via the WindowPlotMPL.""" _current_field = None - _frb_generator: Optional[Type[FixedResolutionBuffer]] = None + _frb_generator: Optional[type[FixedResolutionBuffer]] = None _plot_type: Optional[str] = None def __init__(self, *args, **kwargs) -> None: @@ -870,7 +870,7 @@ def __init__(self, *args, **kwargs) -> None: # that it happens no later than instantiation from yt.visualization.plot_modifications import PlotCallback - self._callbacks: List[PlotCallback] = [] + self._callbacks: list[PlotCallback] = [] @property def _data_valid(self) -> bool: @@ -2251,7 +2251,7 @@ def __init__( class OffAxisProjectionDummyDataSource: _type_name = "proj" - _key_fields: List[str] = [] + _key_fields: list[str] = [] def __init__( self, diff --git a/yt/visualization/profile_plotter.py b/yt/visualization/profile_plotter.py index 79fab314c71..792ea667519 100644 --- a/yt/visualization/profile_plotter.py +++ b/yt/visualization/profile_plotter.py @@ -1,8 +1,9 @@ import base64 import builtins import os +from collections.abc import Iterable from functools import wraps -from typing import Any, Dict, Iterable, Optional, Tuple, Union +from typing import Any, Optional, Union import matplotlib import numpy as np @@ -270,7 +271,7 @@ def save( self, name: Optional[str] = None, suffix: Optional[str] = None, - mpl_kwargs: Optional[Dict[str, Any]] = None, + mpl_kwargs: Optional[dict[str, Any]] = None, ): r""" Saves a 1d profile plot. @@ -291,7 +292,7 @@ def save( # Mypy is hardly convinced that we have a `profiles` attribute # at this stage, so we're lasily going to deactivate it locally unique = set(self.plots.values()) - iters: Iterable[Tuple[Union[int, FieldKey], PlotMPL]] + iters: Iterable[tuple[Union[int, FieldKey], PlotMPL]] if len(unique) < len(self.plots): iters = enumerate(sorted(unique)) else: diff --git a/yt/visualization/tests/test_image_comp_2D_plots.py b/yt/visualization/tests/test_image_comp_2D_plots.py index d32b38ee61d..a01351e4f89 100644 --- a/yt/visualization/tests/test_image_comp_2D_plots.py +++ b/yt/visualization/tests/test_image_comp_2D_plots.py @@ -1,6 +1,5 @@ # image tests using pytest-mpl from itertools import chain -from typing import Dict import numpy as np import numpy.testing as npt @@ -161,7 +160,7 @@ def setup_class(cls): [("gas", "density"), ("index", "radius")], [("gas", "velocity_magnitude"), ("gas", "mass")], ] - cls.profiles: Dict[str, ProfilePlot] = {} + cls.profiles: dict[str, ProfilePlot] = {} for i_reg, reg in enumerate(regions): id_prefix = str(i_reg) for x_field, y_field in pr_fields: @@ -240,7 +239,7 @@ def setup_class(cls): ("gas", "velocity_magnitude"), ], ] - cls.profiles: Dict[str, PhasePlot] = {} + cls.profiles: dict[str, PhasePlot] = {} for i_reg, reg in enumerate(regions): id_prefix = str(i_reg) for x_field, y_field, z_field in pr_fields: diff --git a/yt/visualization/volume_rendering/old_camera.py b/yt/visualization/volume_rendering/old_camera.py index 1957871940d..d924226a876 100644 --- a/yt/visualization/volume_rendering/old_camera.py +++ b/yt/visualization/volume_rendering/old_camera.py @@ -1,6 +1,5 @@ import builtins from copy import deepcopy -from typing import List import numpy as np @@ -1177,7 +1176,7 @@ def rotation(self, theta, n_steps, rot_vector=None, clip_ratio=None): class InteractiveCamera(Camera): - frames: List[ImageArray] = [] + frames: list[ImageArray] = [] def snapshot(self, fn=None, clip_ratio=None): self._pyplot.figure(2) diff --git a/yt/visualization/volume_rendering/scene.py b/yt/visualization/volume_rendering/scene.py index 4b629a18853..0ef232f3f10 100644 --- a/yt/visualization/volume_rendering/scene.py +++ b/yt/visualization/volume_rendering/scene.py @@ -1,7 +1,7 @@ import builtins import functools from collections import OrderedDict -from typing import List, Optional +from typing import Optional import numpy as np @@ -368,7 +368,7 @@ def save_annotated( dpi: int = 100, sigma_clip: Optional[float] = None, render: bool = True, - tf_rect: Optional[List[float]] = None, + tf_rect: Optional[list[float]] = None, ): r"""Saves the most recently rendered image of the Scene to disk, including an image of the transfer function and and user-defined diff --git a/yt/visualization/volume_rendering/zbuffer_array.py b/yt/visualization/volume_rendering/zbuffer_array.py index 27dd47d6e5c..2b7ae746f92 100644 --- a/yt/visualization/volume_rendering/zbuffer_array.py +++ b/yt/visualization/volume_rendering/zbuffer_array.py @@ -1,5 +1,3 @@ -from typing import List, Tuple - import numpy as np @@ -76,8 +74,8 @@ def paint(self, ind, value, z): if __name__ == "__main__": - shape: Tuple[int, ...] = (64, 64) - shapes: List[Tuple[int, ...]] = [(64, 64), (16, 16, 4), (128,), (16, 32)] + shape: tuple[int, ...] = (64, 64) + shapes: list[tuple[int, ...]] = [(64, 64), (16, 16, 4), (128,), (16, 32)] for shape in shapes: b1 = ZBuffer(np.random.random(shape), np.ones(shape)) b2 = ZBuffer(np.random.random(shape), np.zeros(shape)) From 01ea5a94de6297fa91a184c669f84f576f1f3e32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sun, 3 Sep 2023 18:38:58 +0200 Subject: [PATCH 3/9] MNT: ignore auto-formatting commit in git blame --- .git-blame-ignore-revs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index ae2c68af519..32a4c19e5eb 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -44,3 +44,6 @@ ef783151bfd7c6777fa25e9e06f95fe47653b3aa # apply linting to ipynb files ec8bb45ea1603f3862041fa9e8ec274afd9bbbfd + +# auto upgrade typing idioms from Python 3.8 to 3.9 +4cfd370a8445abd4620e3853c2c047ee3d649fd7 From 0e39b6d39565f4b6b0f0a88d75fcd6e611f62243 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sun, 3 Sep 2023 19:42:20 +0200 Subject: [PATCH 4/9] DEP: update minimal dependencies for Python 3.9 --- pyproject.toml | 13 +++++-------- setupext.py | 2 +- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index aa574c1e9d4..f4e0ee22626 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,13 +52,12 @@ dependencies = [ "ipywidgets>=8.0.0", "matplotlib>=3.5", "more-itertools>=8.4", - "numpy>=1.18,<2.0", # keep minimal requirement in sync with NPY_TARGET_VERSION + "numpy>=1.19.3, <2.0", # keep minimal requirement in sync with NPY_TARGET_VERSION "packaging>=20.9", - "pillow>=6.2.1", # transitive dependency via MPL (>=3.3) + "pillow>=8.0.0", "tomli-w>=0.4.0", "tqdm>=3.4.0", "unyt>=2.9.2,<3.0", # see https://github.com/yt-project/yt/issues/4162 - "importlib_resources>=1.3;python_version < '3.9'", "tomli>=1.2.3;python_version < '3.11'", "typing-extensions>=4.1.0;python_version < '3.11'", ] @@ -134,7 +133,7 @@ ytdata = ["yt[HDF5]"] # "full" should contain all optional dependencies intended for users (not devs) # in particular it should enable support for all frontends full = [ - "cartopy>=0.22.0; python_version >= '3.9'", + "cartopy>=0.22.0", "firefly>=3.2.0", "glueviz>=0.13.3", "ipython>=2.0.0", @@ -213,13 +212,12 @@ minimal = [ "ipywidgets==8.0.0", "matplotlib==3.5", "more-itertools==8.4", - "numpy==1.18.0", + "numpy==1.19.3", "packaging==20.9", - "pillow==6.2.1", + "pillow==8.0.0", "tomli-w==0.4.0", "tqdm==3.4.0", "unyt==2.9.2", - "importlib_resources==1.3;python_version < '3.9'", "tomli==1.2.3;python_version < '3.11'", "typing-extensions==4.1.0;python_version < '3.11'", ] @@ -237,7 +235,6 @@ typecheck = [ "types-PyYAML==6.0.12.2", "types-chardet==5.0.4", "types-requests==2.28.11.5", - "importlib_resources==1.3;python_version < '3.9'", "typing-extensions==4.1.0;python_version < '3.11'", ] diff --git a/setupext.py b/setupext.py index 19f68a17062..0ff7c5ee8c9 100644 --- a/setupext.py +++ b/setupext.py @@ -414,7 +414,7 @@ def finalize_options(self): ] if sys.version_info >= (3, 9): # keep in sync with runtime requirements (pyproject.toml) - define_macros.append(("NPY_TARGET_VERSION", "NPY_1_18_API_VERSION")) + define_macros.append(("NPY_TARGET_VERSION", "NPY_1_19_API_VERSION")) else: pass From 0762d27855626e12cf54e3b648247f1f1a63d9e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sun, 3 Sep 2023 19:57:01 +0200 Subject: [PATCH 5/9] MNT: manually upgrade some python-38-only paths --- setupext.py | 19 ++++--------------- yt/frontends/amrvac/data_structures.py | 17 ----------------- yt/sample_data/api.py | 5 +---- yt/visualization/_commons.py | 15 +++------------ 4 files changed, 8 insertions(+), 48 deletions(-) diff --git a/setupext.py b/setupext.py index 0ff7c5ee8c9..c351765444e 100644 --- a/setupext.py +++ b/setupext.py @@ -17,11 +17,7 @@ from setuptools.command.build_ext import build_ext as _build_ext from setuptools.command.sdist import sdist as _sdist from setuptools.errors import CompileError, LinkError - -if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources -else: - import importlib_resources +import importlib.resources as importlib_resources log = logging.getLogger("setupext") @@ -205,11 +201,7 @@ def check_CPP14_flags(possible_compile_flags): return [] def get_ewah_bool_utils_path(): - if sys.version_info >= (3, 9): - return os.path.abspath(importlib_resources.files("ewah_bool_utils")) - else: - from pkg_resources import resource_filename - return os.path.dirname(os.path.abspath(resource_filename("ewah_bool_utils", "ewah_bool_wrap.pxd"))) + return os.path.abspath(importlib_resources.files("ewah_bool_utils")) def check_for_pyembree(std_libs): embree_libs = [] @@ -411,12 +403,9 @@ def finalize_options(self): define_macros = [ ("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"), - ] - if sys.version_info >= (3, 9): # keep in sync with runtime requirements (pyproject.toml) - define_macros.append(("NPY_TARGET_VERSION", "NPY_1_19_API_VERSION")) - else: - pass + ("NPY_TARGET_VERSION", "NPY_1_19_API_VERSION"), + ] if self.define is None: self.define = define_macros diff --git a/yt/frontends/amrvac/data_structures.py b/yt/frontends/amrvac/data_structures.py index d5c66bda339..9625a0792a9 100644 --- a/yt/frontends/amrvac/data_structures.py +++ b/yt/frontends/amrvac/data_structures.py @@ -6,7 +6,6 @@ """ import os import struct -import sys import warnings import weakref from pathlib import Path @@ -26,22 +25,6 @@ from .fields import AMRVACFieldInfo from .io import read_amrvac_namelist -if sys.version_info < (3, 9): - # This is directly taken from the standard library, - # but only available from Python 3.9 - def _is_relative_to(self, *other): - """Return True if the path is relative to another path or False.""" - try: - self.relative_to(*other) - return True - except ValueError: - return False - - Path.is_relative_to = _is_relative_to # type: ignore -else: - # an else block is mandated for pyupgrade to enable auto-cleanup - pass - def _parse_geometry(geometry_tag: str) -> Geometry: """Translate AMRVAC's geometry tag to yt's format. diff --git a/yt/sample_data/api.py b/yt/sample_data/api.py index f712a7f648d..3d4e84fb3ea 100644 --- a/yt/sample_data/api.py +++ b/yt/sample_data/api.py @@ -79,10 +79,7 @@ def _parse_byte_size(s: str): def _get_sample_data_registry(): - if sys.version_info >= (3, 9): - import importlib.resources as importlib_resources - else: - import importlib_resources + import importlib.resources as importlib_resources return json.loads( importlib_resources.files("yt") diff --git a/yt/visualization/_commons.py b/yt/visualization/_commons.py index 065831c88b2..49fdc33c156 100644 --- a/yt/visualization/_commons.py +++ b/yt/visualization/_commons.py @@ -57,7 +57,7 @@ def _get_supported_canvas_classes(): def get_canvas_class(suffix: str) -> type["FigureCanvasBase"]: - s = normalize_extension_string(suffix) + s = suffix.removeprefix(".") if s not in _get_supported_image_file_formats(): raise ValueError(f"Unsupported file format '{suffix}'.") for cls in _get_supported_canvas_classes(): @@ -70,15 +70,6 @@ def get_canvas_class(suffix: str) -> type["FigureCanvasBase"]: ) -def normalize_extension_string(s: str) -> str: - if sys.version_info < (3, 9): - if s.startswith("."): - return s[1:] - return s - else: - return s.removeprefix(".") - - def validate_image_name(filename, suffix: Optional[str] = None) -> str: """ Build a valid image filename with a specified extension (default to png). @@ -86,10 +77,10 @@ def validate_image_name(filename, suffix: Optional[str] = None) -> str: Otherwise, suffix is appended to the filename, replacing any existing extension. """ name, psuffix = os.path.splitext(filename) - psuffix = normalize_extension_string(psuffix) + psuffix = psuffix.removeprefix(".") if suffix is not None: - suffix = normalize_extension_string(suffix) + suffix = suffix.removeprefix(".") if psuffix in _get_supported_image_file_formats(): if suffix in _get_supported_image_file_formats() and suffix != psuffix: From 38889a89a6f1094e41897ad33f05adc38871016f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sun, 3 Sep 2023 20:10:34 +0200 Subject: [PATCH 6/9] TYP: upgrade mypy (1.0.0 -> 1.5.1) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f4e0ee22626..d0720f1db21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -231,7 +231,7 @@ test = [ "nose-timer~=1.0.0; python_version < '3.10'", ] typecheck = [ - "mypy==1.0.0", + "mypy==1.5.1", "types-PyYAML==6.0.12.2", "types-chardet==5.0.4", "types-requests==2.28.11.5", From 0ffc9f1ad6d21628e3cb068759fceb21fd4d96ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Sun, 3 Sep 2023 20:11:15 +0200 Subject: [PATCH 7/9] TYP: fix type checking errors relative to object registries --- yt/utilities/object_registries.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/yt/utilities/object_registries.py b/yt/utilities/object_registries.py index 3da10e313ed..6fa31e47f3a 100644 --- a/yt/utilities/object_registries.py +++ b/yt/utilities/object_registries.py @@ -1,20 +1,22 @@ # These are some of the data object registries that are used in different places in the # code. Not all of the self-registering objects are included in these. -# type hints are simplified as raw Type (instead of, e.g., Type[Dataset]) -# to workaround circular imports +from typing import TYPE_CHECKING -# subclasses of yt.data_objects.analyzer_objects.AnalysisTask -analysis_task_registry: dict[str, type] = {} +if TYPE_CHECKING: + from typing import Union -# subclasses of yt.data_objects.data_containers.YTDataContainer -data_object_registry: dict[str, type] = {} + from yt.data_objects.analyzer_objects import AnalysisTask + from yt.data_objects.data_containers import YTDataContainer + from yt.data_objects.derived_quantities import DerivedQuantity + from yt.data_objects.static_output import Dataset + from yt.data_objects.time_series import DatasetSeries + from yt.visualization.volume_rendering.old_camera import Camera -# suclasses of yt.data_objects.derived_quantity.DerivedQuantity -derived_quantity_registry: dict[str, type] = {} +analysis_task_registry: dict[str, type["AnalysisTask"]] = {} +derived_quantity_registry: dict[str, type["DerivedQuantity"]] = {} +output_type_registry: dict[str, type["Dataset"]] = {} +simulation_time_series_registry: dict[str, type["DatasetSeries"]] = {} -# suclasses of yt.data_objects.static_outputs.Dataset -output_type_registry: dict[str, type] = {} - -# subclasses of yt.data_objects.time_series.DatasetSeries -simulation_time_series_registry: dict[str, type] = {} +# TODO: split into 2 registries to avoid a typing.Union +data_object_registry: dict[str, "Union[type[YTDataContainer], type[Camera]]"] = {} From bdbef8fb0077da14a0697f202f66195ea8d8ce83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Mon, 4 Sep 2023 08:17:59 +0200 Subject: [PATCH 8/9] MNT: cleanup dangling imports --- setup.py | 5 +++-- setupext.py | 2 -- yt/data_objects/static_output.py | 9 ++------- yt/visualization/plot_window.py | 11 +++++------ 4 files changed, 10 insertions(+), 17 deletions(-) diff --git a/setup.py b/setup.py index e9c1881f705..33bf62f5035 100644 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ import os from collections import defaultdict from distutils.ccompiler import get_default_compiler +from importlib import resources as importlib_resources from setuptools import Distribution, setup @@ -10,7 +11,6 @@ check_for_openmp, check_for_pyembree, create_build_ext, - get_ewah_bool_utils_path, install_ccompiler, ) @@ -49,7 +49,8 @@ "yt/frontends/artio/artio_headers/", ], "STD_LIBS": std_libs, - "EWAH_LIBS": std_libs + [get_ewah_bool_utils_path()], + "EWAH_LIBS": std_libs + + [os.path.abspath(importlib_resources.files("ewah_bool_utils"))], "OMP_ARGS": omp_args, "FIXED_INTERP": "yt/utilities/lib/fixed_interpolator.cpp", "ARTIO_SOURCE": sorted(glob.glob("yt/frontends/artio/artio_headers/*.c")), diff --git a/setupext.py b/setupext.py index c351765444e..fb7681a5141 100644 --- a/setupext.py +++ b/setupext.py @@ -200,8 +200,6 @@ def check_CPP14_flags(possible_compile_flags): ) return [] -def get_ewah_bool_utils_path(): - return os.path.abspath(importlib_resources.files("ewah_bool_utils")) def check_for_pyembree(std_libs): embree_libs = [] diff --git a/yt/data_objects/static_output.py b/yt/data_objects/static_output.py index f491eb7b2d9..9401fee3e39 100644 --- a/yt/data_objects/static_output.py +++ b/yt/data_objects/static_output.py @@ -9,15 +9,11 @@ import warnings import weakref from collections import defaultdict +from collections.abc import MutableMapping from functools import cached_property from importlib.util import find_spec from stat import ST_CTIME -from typing import ( - Any, - Literal, - Optional, - Union, -) +from typing import Any, Literal, Optional, Union import numpy as np import unyt as un @@ -83,7 +79,6 @@ else: from typing_extensions import assert_never -from collections.abc import MutableMapping # We want to support the movie format in the future. # When such a thing comes to pass, I'll move all the stuff that is constant up diff --git a/yt/visualization/plot_window.py b/yt/visualization/plot_window.py index ebe64ad4d65..90e44423fa5 100644 --- a/yt/visualization/plot_window.py +++ b/yt/visualization/plot_window.py @@ -1,4 +1,5 @@ import abc +import sys from collections import defaultdict from numbers import Number from typing import Optional, Union @@ -53,18 +54,16 @@ invalidate_plot, ) -import sys # isort: skip +if sys.version_info >= (3, 10): + pass +else: + from yt._maintenance.backports import zip if sys.version_info >= (3, 11): from typing import assert_never else: from typing_extensions import assert_never -if sys.version_info >= (3, 10): - pass -else: - from yt._maintenance.backports import zip - def get_window_parameters(axis, center, width, ds): width = ds.coordinates.sanitize_width(axis, width, None) From e55b1242834d2311cb2242eb64313b04bd945ded Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Robert?= Date: Mon, 4 Sep 2023 09:09:01 +0200 Subject: [PATCH 9/9] MNT: manually upgrade typing.Callable to collections.abc.Callable --- yt/fields/field_functions.py | 2 +- yt/fields/field_plugin_registry.py | 2 +- yt/fields/local_fields.py | 3 ++- yt/funcs.py | 3 ++- yt/utilities/configure.py | 2 +- yt/utilities/logger.py | 3 ++- 6 files changed, 9 insertions(+), 6 deletions(-) diff --git a/yt/fields/field_functions.py b/yt/fields/field_functions.py index 2acb949c819..12db58bef47 100644 --- a/yt/fields/field_functions.py +++ b/yt/fields/field_functions.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from inspect import signature -from typing import Callable import numpy as np diff --git a/yt/fields/field_plugin_registry.py b/yt/fields/field_plugin_registry.py index 3134000603d..e03813305ce 100644 --- a/yt/fields/field_plugin_registry.py +++ b/yt/fields/field_plugin_registry.py @@ -1,4 +1,4 @@ -from typing import Callable +from collections.abc import Callable FunctionName = str FieldPluginMap = dict[FunctionName, Callable] diff --git a/yt/fields/local_fields.py b/yt/fields/local_fields.py index 6cec3d0a8ca..9f1db66e864 100644 --- a/yt/fields/local_fields.py +++ b/yt/fields/local_fields.py @@ -1,5 +1,6 @@ +from collections.abc import Callable from functools import partial -from typing import Any, Callable, TypeVar +from typing import Any, TypeVar from yt.funcs import is_sequence from yt.utilities.logger import ytLogger as mylog diff --git a/yt/funcs.py b/yt/funcs.py index 6ee5c7f8ea0..7db129e52cc 100644 --- a/yt/funcs.py +++ b/yt/funcs.py @@ -15,10 +15,11 @@ import traceback import urllib from collections import UserDict +from collections.abc import Callable from copy import deepcopy from functools import lru_cache, wraps from numbers import Number as numeric_type -from typing import Any, Callable, Optional +from typing import Any, Optional import numpy as np from more_itertools import always_iterable, collapse, first diff --git a/yt/utilities/configure.py b/yt/utilities/configure.py index 657b4a18986..7d355e33a96 100644 --- a/yt/utilities/configure.py +++ b/yt/utilities/configure.py @@ -1,8 +1,8 @@ import os import sys import warnings +from collections.abc import Callable from pathlib import Path -from typing import Callable from more_itertools import always_iterable diff --git a/yt/utilities/logger.py b/yt/utilities/logger.py index d6e726caf24..87acf7437dd 100644 --- a/yt/utilities/logger.py +++ b/yt/utilities/logger.py @@ -1,6 +1,7 @@ import logging import sys -from typing import Callable, Optional +from collections.abc import Callable +from typing import Optional from yt.utilities.configure import YTConfig, configuration_callbacks