Skip to content

Commit 2e21ed5

Browse files
committed
Use type union operator in 34 files
- Remove use of typing.{Optional,Union}. - Import generics from collections.abc.
1 parent 7df3aa3 commit 2e21ed5

35 files changed

+365
-404
lines changed

sdmx/client.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import logging
22
from functools import partial
3-
from typing import IO, TYPE_CHECKING, Any, Optional, Union
3+
from typing import IO, TYPE_CHECKING, Any
44
from warnings import warn
55

66
import requests
@@ -74,7 +74,7 @@ def __init__(
7474
self,
7575
source=None,
7676
*,
77-
session: Optional["requests.Session"] = None,
77+
session: "requests.Session" | None = None,
7878
log_level=None,
7979
**session_opts,
8080
):
@@ -333,9 +333,9 @@ def _collect(*keywords):
333333

334334
def get(
335335
self,
336-
resource_type: Union[str, Resource, None] = None,
337-
resource_id: Optional[str] = None,
338-
tofile: Union["os.PathLike", IO, None] = None,
336+
resource_type: str | Resource | None = None,
337+
resource_id: str | None = None,
338+
tofile: "os.PathLike" | IO | None = None,
339339
use_cache: bool = False,
340340
dry_run: bool = False,
341341
**kwargs,

sdmx/compare.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from copy import copy
1010
from dataclasses import dataclass, fields, is_dataclass
1111
from functools import singledispatch
12-
from typing import Any, TypeVar, Union
12+
from typing import Any, TypeVar
1313

1414
import lxml.etree
1515

@@ -157,7 +157,7 @@ def compare_dataclass(left, right, opts: Options, context: str) -> bool:
157157
@compare.register(int)
158158
@compare.register(str)
159159
@compare.register(datetime.date)
160-
def _eq(left: Union[int, str, datetime.date], right, opts, context=""):
160+
def _eq(left: int | str | datetime.date, right, opts, context=""):
161161
"""Built-in types that must compare equal."""
162162
return left == right or (not opts.strict and right is None)
163163

@@ -168,7 +168,7 @@ def _eq(left: Union[int, str, datetime.date], right, opts, context=""):
168168
@compare.register(float)
169169
@compare.register(type)
170170
@compare.register(enum.Enum)
171-
def _is(left: Union[None, bool, float, type, enum.Enum], right, opts, context):
171+
def _is(left: None | bool | float | type | enum.Enum, right, opts, context):
172172
"""Built-in types that must compare identical."""
173173
return left is right or (not opts.strict and right is None or left is None)
174174

@@ -203,7 +203,7 @@ def _(left: dict, right, opts, context=""):
203203
# TODO When dropping support for Python <=3.10, change to '@compare.register'
204204
@compare.register(list)
205205
@compare.register(set)
206-
def _(left: Union[list, set], right, opts, context=""):
206+
def _(left: list | set, right, opts, context=""):
207207
if len(left) != len(right):
208208
opts.log(f"Mismatched length: {len(left)} != {len(right)}")
209209
return False

sdmx/convert/pandas.py

Lines changed: 20 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from dataclasses import InitVar, dataclass, field
66
from itertools import chain, product, repeat
77
from types import SimpleNamespace
8-
from typing import TYPE_CHECKING, Any, Optional, Union, cast
8+
from typing import TYPE_CHECKING, Any, cast
99
from warnings import warn
1010

1111
import numpy as np
@@ -34,7 +34,7 @@
3434
class ToDatetimeKeywords(TypedDict, total=False):
3535
format: str
3636

37-
KeyOrAttributeValue = Union["common.KeyValue", "common.AttributeValue"]
37+
KeyOrAttributeValue = common.KeyValue | common.AttributeValue
3838

3939

4040
_HAS_PANDAS_2 = pd.__version__.split(".")[0] >= "2"
@@ -150,8 +150,8 @@ class ColumnSpec:
150150

151151
def __init__(
152152
self,
153-
pc: Optional["PandasConverter"] = None,
154-
ds: Optional["common.BaseDataSet"] = None,
153+
pc: "PandasConverter" | None = None,
154+
ds: "common.BaseDataSet" | None = None,
155155
) -> None:
156156
if pc is None or ds is None:
157157
return # Empty/placeholder
@@ -227,9 +227,9 @@ def __init__(
227227

228228
@staticmethod
229229
def _maybe_construct_dsd(
230-
dsd: Optional["common.BaseDataStructureDefinition"],
230+
dsd: "common.BaseDataStructureDefinition" | None,
231231
obs: "common.BaseObservation",
232-
) -> Union["v21.DataStructureDefinition", "v30.DataStructure"]:
232+
) -> "v21.DataStructureDefinition" | "v30.DataStructure":
233233
"""If `dsd` is None, construct a DSD by inspection of `obs`."""
234234
if dsd is not None:
235235
return dsd
@@ -275,7 +275,7 @@ def convert_obs(self, obs: "common.BaseObservation") -> list:
275275
key = obs.key
276276
if self.constraint and key not in self.constraint:
277277
# Emit an empty row to be dropped
278-
result: Iterable[Union[str, None]] = repeat(None, len(self.obs))
278+
result: Iterable[str | None] = repeat(None, len(self.obs))
279279
else:
280280
# Observation values
281281
# FIXME Handled CodedObservationValue, similar to AttributeValue
@@ -288,7 +288,7 @@ def convert_obs(self, obs: "common.BaseObservation") -> list:
288288
self.add_obs_attrib(avs)
289289

290290
# - Convert the observation Key using key Columns.
291-
# - Convert the value to Optional[str].
291+
# - Convert the value to str | None.
292292
# - Convert the attribute values using attribute Columns.
293293
result = chain(
294294
[c(key.values) for c in self.key],
@@ -314,26 +314,26 @@ class PandasConverter(DispatchConverter):
314314
attributes: Attributes = Attributes.none
315315

316316
#: If given, only Observations included by the *constraint* are returned.
317-
constraint: Optional["ContentConstraint"] = None
317+
constraint: "ContentConstraint | None" = None
318318

319319
#: Datatype for observation values. If :any:`None`, data values remain
320320
#: :class:`object`/:class:`str`.
321-
dtype: Union[type["np.generic"], type["ExtensionDtype"], str, None] = np.float64
321+
dtype: type["np.generic"] | type["ExtensionDtype"] | str | None = np.float64
322322

323323
#: Axis on which to place a time dimension. One of:
324324
#:
325325
#: - :py:`-1`: disabled.
326326
#: - :py:`0, "index"`: first/index axis.
327327
#: - :py:`1, "columns"`: second/columns axis.
328-
datetime_axis: Union[int, str] = -1
328+
datetime_axis: int | str = -1
329329

330330
#: Dimension to convert to :class:`pandas.DatetimeIndex`. A :class:`str` value is
331331
#: interpreted as a dimension ID.
332-
datetime_dimension: Optional["common.DimensionComponent"] = None
332+
datetime_dimension: "common.DimensionComponent | None" = None
333333

334334
#: Frequency for conversion to :class:`pandas.PeriodIndex`. A :class:`str` value is
335335
#: interpreted as one of the :ref:`pd:timeseries.period_aliases`.
336-
datetime_freq: Optional["DateOffset"] = None
336+
datetime_freq: "DateOffset | None" = None
337337

338338
#: include : iterable of str or str, optional
339339
#: One or more of the attributes of the StructureMessage ('category_scheme',
@@ -367,9 +367,7 @@ class PandasConverter(DispatchConverter):
367367
# Columns to be set as index levels, then unstacked.
368368
_unstack: list[str] = field(default_factory=list)
369369

370-
_context: dict[Union[str, type], Any] = field(
371-
default_factory=lambda: dict(compat=False)
372-
)
370+
_context: dict[str | type, Any] = field(default_factory=lambda: dict(compat=False))
373371

374372
def get_components(self, kind) -> list["common.Component"]:
375373
"""Return an appropriate list of dimensions or attributes."""
@@ -448,7 +446,7 @@ def handle_datetime(self, value: Any) -> None:
448446
else:
449447
raise TypeError(f"PandasConverter(…, datetime={type(value)})")
450448

451-
def __post_init__(self, datetime: Any, rtype: Optional[str]) -> None:
449+
def __post_init__(self, datetime: Any, rtype: str | None) -> None:
452450
"""Transform and validate arguments."""
453451
# Raise on unsupported arguments
454452
if isinstance(
@@ -626,7 +624,7 @@ def convert_structuremessage(c: "PandasConverter", obj: message.StructureMessage
626624
Keys are StructureMessage attributes; values are pandas objects.
627625
"""
628626
attrs = sorted(c.include)
629-
result: DictLike[str, Union[pd.Series, pd.DataFrame]] = DictLike()
627+
result: DictLike[str, pd.Series | pd.DataFrame] = DictLike()
630628
for a in attrs:
631629
dl = c.convert(getattr(obj, a))
632630
if len(dl):
@@ -755,17 +753,15 @@ def _convert_datetime(df: "pd.DataFrame", c: "PandasConverter") -> "pd.DataFrame
755753
return df.assign(**{dim.id: pd.to_datetime(df[dim.id], **dt_kw)})
756754

757755

758-
def _ensure_multiindex(obj: Union[pd.Series, pd.DataFrame]):
756+
def _ensure_multiindex(obj: pd.Series | pd.DataFrame):
759757
if not isinstance(obj.index, pd.MultiIndex):
760758
obj.index = pd.MultiIndex.from_product(
761759
[obj.index.to_list()], names=[obj.index.name]
762760
)
763761
return obj
764762

765763

766-
def _reshape(
767-
df: "pd.DataFrame", c: "PandasConverter"
768-
) -> Union[pd.Series, pd.DataFrame]:
764+
def _reshape(df: "pd.DataFrame", c: "PandasConverter") -> pd.Series | pd.DataFrame:
769765
"""Reshape `df` to provide expected return types."""
770766

771767
if c._strict:
@@ -790,7 +786,7 @@ def _reshape(
790786
return result
791787

792788

793-
def _to_periodindex(obj: Union["pd.Series", "pd.DataFrame"], c: "PandasConverter"):
789+
def _to_periodindex(obj: "pd.Series" | "pd.DataFrame", c: "PandasConverter"):
794790
"""Convert a 1-D datetime index on `obj` to a PeriodIndex."""
795791
result = obj
796792

@@ -887,7 +883,7 @@ def add_item(item):
887883
add_item(item)
888884

889885
# Convert to DataFrame
890-
result: Union[pd.DataFrame, pd.Series] = pd.DataFrame.from_dict(
886+
result: pd.DataFrame | pd.Series = pd.DataFrame.from_dict(
891887
items,
892888
orient="index",
893889
dtype=object, # type: ignore [arg-type]

sdmx/dictlike.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import logging
2-
import typing
2+
from collections.abc import MutableMapping
33
from dataclasses import fields
4-
from typing import Generic, TypeVar, Union, get_args, get_origin
4+
from typing import Generic, TypeVar, get_args, get_origin
55

66
from sdmx.compare import Comparable
77

@@ -11,7 +11,7 @@
1111
VT = TypeVar("VT")
1212

1313

14-
class DictLike(dict, typing.MutableMapping[KT, VT], Comparable):
14+
class DictLike(dict, MutableMapping[KT, VT], Comparable):
1515
"""Container with features of :class:`dict`, attribute access, and validation."""
1616

1717
__slots__ = ("__dict__", "_types")
@@ -32,7 +32,7 @@ def with_types(cls, key_type, value_type):
3232
result._types = (key_type, value_type)
3333
return result
3434

35-
def __getitem__(self, key: Union[KT, int]) -> VT:
35+
def __getitem__(self, key: KT | int) -> VT:
3636
""":meth:`dict.__getitem__` with integer access."""
3737
try:
3838
return super().__getitem__(key)

sdmx/experimental.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
1414
"""
1515

16-
from typing import Optional, Text
16+
from typing import Text
1717

1818
import pandas as pd
1919

@@ -31,10 +31,10 @@
3131

3232
class DataSet(AnnotableArtefact):
3333
# SDMX-IM features
34-
action: Optional[ActionType] = None
34+
action: ActionType | None = None
3535
attrib: DictLike[str, AttributeValue] = DictLike()
36-
valid_from: Optional[Text] = None
37-
structured_by: Optional[DataStructureDefinition] = None
36+
valid_from: Text | None = None
37+
structured_by: DataStructureDefinition | None = None
3838

3939
# Internal storage: a pd.DataFrame with columns:
4040
# - 'value': the Observation value.

sdmx/format/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from dataclasses import InitVar, dataclass, field
33
from enum import Enum, IntFlag
44
from functools import lru_cache
5-
from typing import Literal, Optional, Union
5+
from typing import Literal
66

77
from sdmx.util import parse_content_type
88

@@ -48,12 +48,12 @@ class MediaType:
4848

4949
#: Format version.
5050
version: Version = field(init=False)
51-
_version: InitVar[Union[str, Version]]
51+
_version: InitVar[str | Version]
5252

5353
flags: Flag = Flag(0)
5454

5555
#: Specify the full media type string.
56-
full: Optional[str] = None
56+
full: str | None = None
5757

5858
def __post_init__(self, _version):
5959
self.__dict__["version"] = Version[_version]

sdmx/format/xml/common.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
import logging
22
import re
33
import zipfile
4+
from collections.abc import Iterable, Mapping
45
from functools import lru_cache
56
from itertools import chain
67
from operator import itemgetter
78
from pathlib import Path
89
from shutil import copytree
9-
from typing import IO, Iterable, Mapping, Optional, Union, cast
10+
from typing import IO, cast
1011

1112
from lxml import etree
1213
from lxml.etree import QName
@@ -98,9 +99,9 @@
9899

99100

100101
def validate_xml(
101-
msg: Union[Path, IO],
102-
schema_dir: Optional[Path] = None,
103-
version: Union[str, Version] = Version["2.1"],
102+
msg: Path | IO,
103+
schema_dir: Path | None = None,
104+
version: str | Version = Version["2.1"],
104105
max_errors: int = -1,
105106
) -> bool:
106107
"""Validate SDMX-ML in `msg` against the XML Schema (XSD) documents.
@@ -160,8 +161,8 @@ def validate_xml(
160161

161162

162163
def construct_schema(
163-
schema_dir: Optional[Path] = None,
164-
version: Union[str, Version] = Version["2.1"],
164+
schema_dir: Path | None = None,
165+
version: str | Version = Version["2.1"],
165166
) -> "etree.XMLSchema":
166167
"""Construct a :class:`lxml.etree.XMLSchema` for SDMX-ML of the given `version`.
167168
@@ -279,7 +280,7 @@ def _extracted_zipball(version: Version, force: bool = False) -> Path:
279280

280281

281282
def _handle_validate_args(
282-
schema_dir: Optional[Path], version: Union[str, Version]
283+
schema_dir: Path | None, version: str | Version
283284
) -> tuple[Path, Version]:
284285
"""Handle arguments for :func:`.install_schemas` and :func:`.validate_xml`."""
285286
import platformdirs
@@ -294,16 +295,16 @@ def _handle_validate_args(
294295
) from None
295296

296297
# If the user has no preference, download the schemas to the local cache directory
297-
if not schema_dir:
298+
if schema_dir is None:
298299
schema_dir = platformdirs.user_cache_path("sdmx") / version.name
299300
schema_dir.mkdir(exist_ok=True, parents=True)
300301

301302
return schema_dir, version
302303

303304

304305
def install_schemas(
305-
schema_dir: Optional[Path] = None,
306-
version: Union[str, Version] = Version["2.1"],
306+
schema_dir: Path | None = None,
307+
version: str | Version = Version["2.1"],
307308
) -> Path:
308309
"""Install SDMX-ML XML Schema documents for use with :func:`.validate_xml`.
309310
@@ -334,7 +335,7 @@ def install_schemas(
334335
class XMLFormat(Format):
335336
"""Information about an SDMX-ML format."""
336337

337-
NS: Mapping[str, Optional[str]]
338+
NS: Mapping[str, str | None]
338339
_class_tag: list
339340

340341
def __init__(self, model, base_ns: str, class_tag: Iterable[tuple[str, str]]):
@@ -370,7 +371,7 @@ def ns_prefix(self, url) -> str:
370371
_NS_PATTERN = re.compile(r"(\{(?P<ns>.*)\}|(?P<ns_prefix>.*):)?(?P<localname>.*)")
371372

372373
@lru_cache()
373-
def qname(self, ns_or_name: str, name: Optional[str] = None) -> QName:
374+
def qname(self, ns_or_name: str, name: str | None = None) -> QName:
374375
"""Return a fully-qualified tag `name` in namespace `ns`."""
375376
if isinstance(ns_or_name, QName):
376377
# Already a QName; do nothing
@@ -395,7 +396,7 @@ def qname(self, ns_or_name: str, name: Optional[str] = None) -> QName:
395396
return QName(ns, name)
396397

397398
@lru_cache()
398-
def class_for_tag(self, tag) -> Optional[type]:
399+
def class_for_tag(self, tag) -> type | None:
399400
"""Return a message or model class for an XML tag."""
400401
qname = self.qname(tag)
401402
results = map(itemgetter(0), filter(lambda ct: ct[1] == qname, self._class_tag))

0 commit comments

Comments
 (0)