Skip to content

Work with pydantic 2.10 #87

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Nov 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "labthings-fastapi"
version = "0.0.6"
version = "0.0.7"
authors = [
{ name="Richard Bowman", email="[email protected]" },
]
Expand Down
13 changes: 9 additions & 4 deletions src/labthings_fastapi/thing_description/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@

from __future__ import annotations
from collections.abc import Mapping, Sequence
from typing import Any, Optional, Union
from typing import Any, Optional
import json

from pydantic import TypeAdapter, ValidationError, BaseModel
from pydantic import TypeAdapter, ValidationError
from .model import DataSchema


Expand All @@ -43,7 +43,7 @@
each path component in turn.
"""
if not reference.startswith("#/"):
raise NotImplementedError(

Check warning on line 46 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

46 line is not covered with tests
"Built-in resolver can only dereference internal JSON references "
"(i.e. starting with #)."
)
Expand All @@ -52,8 +52,8 @@
for key in reference[2:].split("/"):
resolved = resolved[key]
return resolved
except KeyError as ke:
raise KeyError(

Check warning on line 56 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

55-56 lines are not covered with tests
f"The JSON reference {reference} was not found in the schema "
f"(original error {ke})."
)
Expand Down Expand Up @@ -107,29 +107,29 @@
"""
if "prefixItems" not in d:
return d
out: JSONSchema = d.copy()
if "items" in out:
raise ValueError(f"Overwrote the `items` key on {out}.")
out["items"] = out["prefixItems"]
del out["prefixItems"]
return out

Check warning on line 115 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

110-115 lines are not covered with tests


def convert_additionalproperties(d: JSONSchema) -> JSONSchema:
"""Move additionalProperties into properties, or remove it"""
if "additionalProperties" not in d:
return d
out: JSONSchema = d.copy()
if "properties" in out and "additionalProperties" not in out["properties"]:
out["properties"]["additionalProperties"] = out["additionalProperties"]
del out["additionalProperties"]
return out

Check warning on line 126 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

122-126 lines are not covered with tests


def check_recursion(depth: int, limit: int):
"""Check the recursion count is less than the limit"""
if depth > limit:
raise ValueError(

Check warning on line 132 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

132 line is not covered with tests
f"Recursion depth of {limit} exceeded - perhaps there is a circular "
"reference?"
)
Expand Down Expand Up @@ -192,7 +192,7 @@
return output


def type_to_dataschema(t: Union[type, BaseModel], **kwargs) -> DataSchema:
def type_to_dataschema(t: type, **kwargs) -> DataSchema:
"""Convert a Python type to a Thing Description DataSchema

This makes use of pydantic's `schema_of` function to create a
Expand All @@ -205,9 +205,14 @@
is passed in. Typically you'll want to use this for the
`title` field.
"""
if isinstance(t, BaseModel):
if hasattr(t, "model_json_schema"):
# The input should be a `BaseModel` subclass, in which case this works:
json_schema = t.model_json_schema()
else:
# In principle, the below should work for any type, though some
# deferred annotations can go wrong.
# Some attempt at looking up the environment of functions might help
# here.
json_schema = TypeAdapter(t).json_schema()
schema_dict = jsonschema_to_dataschema(json_schema)
# Definitions of referenced ($ref) schemas are put in a
Expand All @@ -221,12 +226,12 @@
schema_dict.update(kwargs)
try:
return DataSchema(**schema_dict)
except ValidationError as ve:
print(

Check warning on line 230 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

229-230 lines are not covered with tests
"Error while constructing DataSchema from the "
"following dictionary:\n"
+ json.dumps(schema_dict, indent=2)
+ "Before conversion, the JSONSchema was:\n"
+ json.dumps(json_schema, indent=2)
)
raise ve

Check warning on line 237 in src/labthings_fastapi/thing_description/__init__.py

View workflow job for this annotation

GitHub Actions / coverage

237 line is not covered with tests
11 changes: 7 additions & 4 deletions src/labthings_fastapi/types/numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,15 @@ def double(arr: NDArray) -> NDArray:
WrapSerializer,
)
from typing import Annotated, Any, List, Union
from typing_extensions import TypeAlias
from collections.abc import Mapping, Sequence


# Define a nested list of floats with 0-6 dimensions
# This would be most elegantly defined as a recursive type
# but the below gets the job done for now.
Number = Union[int, float]
NestedListOfNumbers = Union[
Number: TypeAlias = Union[int, float]
NestedListOfNumbers: TypeAlias = Union[
Number,
List[Number],
List[List[Number]],
Expand Down Expand Up @@ -68,10 +69,12 @@ def listoflists_to_np(lol: Union[NestedListOfNumbers, np.ndarray]) -> np.ndarray


# Define an annotated type so Pydantic can cope with numpy
NDArray = Annotated[
NDArray: TypeAlias = Annotated[
np.ndarray,
PlainValidator(listoflists_to_np),
PlainSerializer(np_to_listoflists, when_used="json-unless-none"),
PlainSerializer(
np_to_listoflists, when_used="json-unless-none", return_type=NestedListOfNumbers
),
WithJsonSchema(NestedListOfNumbersModel.model_json_schema(), mode="validation"),
]

Expand Down
13 changes: 12 additions & 1 deletion tests/test_numpy_type.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
from __future__ import annotations

from pydantic import BaseModel
from pydantic import BaseModel, RootModel
import numpy as np

from labthings_fastapi.types.numpy import NDArray, DenumpifyingDict
from labthings_fastapi.thing import Thing
from labthings_fastapi.decorators import thing_action


class ArrayModel(RootModel):
root: NDArray


def check_field_works_with_list(data):
class Model(BaseModel):
a: NDArray
Expand Down Expand Up @@ -86,3 +90,10 @@ def test_denumpifying_dict():
assert dump["e"] is None
assert dump["f"] == 1
d.model_dump_json()


def test_rootmodel():
for input in [[0, 1, 2], np.arange(3)]:
m = ArrayModel(root=input)
assert isinstance(m.root, np.ndarray)
assert (m.model_dump() == [0, 1, 2]).all()