Skip to content
Draft
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 8 additions & 10 deletions tiled/_tests/adapters/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,11 +205,11 @@ def test_psql(adapter_psql_one_partition: SQLAdapter) -> None:
],
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could instead shrink all the very similar SQLAdapter fixtures into one fixture that fetches the right database scheme and number of partitions?

async def _postgresql_uri() -> AsyncGenerator[str, None]:
    uri = os.getenv("TILED_TEST_POSTGRESQL_URI")
    if uri is None:
        pytest.skip("TILED_TEST_POSTGRESQL_URI is not set")
    async with temp_postgres(uri) as uri_with_database_name:
        yield uri_with_database_name

async def _database_uri(scheme: str, tmp_path: Path) -> Generator[str, None, None]:
    if schema == "postgresql":
       url = await postgresql_uri()
       yield url
    else:
       yield f"{scheme}:///{tmp_path}/test.db"

@asyncio.fixture
async def adapter(tmp_path: Path, request: pytest.FixtureRequest) -> Generator[SQLAdapter, None, None]:
    database_params: tuple[str, int] = request.params
    uri = await database_uri(database_params[0], tmp_path)
    data_source = data_source_from_init_storage(uri, database_params[1])
    yield SQLAdapter(
        data_source.assets[0].data_uri,
        data_source.structure,
        data_source.parameters["table_name"],
        data_source.parameters["dataset_id"],
    )


@pytest.mark.parametrize(
    "adapter",
    [
        ("sqlite", 1),
        ("duckdb", 1),
        ("postgresql", 1),
    ],
   indirect=True
)
def test_write_read_one_batch_one_part(adapter: SQLAdapter) -> None:

And adapter_psql_many_partitions becomes ("postgresql", 3) etc.?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(I just don't like the pattern of fetching the fixture inside the test, this way the skip is done before the first line of the test)

)
def test_write_read_one_batch_one_part(
adapter: SQLAdapter, request: pytest.FixtureRequest
adapter: str, request: pytest.FixtureRequest
) -> None:
# get adapter from fixture
adapter = request.getfixturevalue(adapter)

assert isinstance(adapter, SQLAdapter)
# test appending and reading a table as a whole
test_table = pa.Table.from_arrays(data0, names)

Expand Down Expand Up @@ -237,11 +237,11 @@ def test_write_read_one_batch_one_part(
],
)
def test_write_read_list_batch_one_part(
adapter: SQLAdapter, request: pytest.FixtureRequest
adapter: str, request: pytest.FixtureRequest
) -> None:
# get adapter from fixture
adapter = request.getfixturevalue(adapter)

assert isinstance(adapter, SQLAdapter)
test_table = pa.Table.from_batches([batch0, batch1, batch2])
# test appending a list of batches to a table and read as a whole
adapter.append_partition([batch0, batch1, batch2], 0)
Expand Down Expand Up @@ -293,12 +293,10 @@ def assert_same_rows(table1: pa.Table, table2: pa.Table) -> None:
("adapter_psql_many_partitions"),
],
)
def test_append_single_partition(
adapter: SQLAdapter, request: pytest.FixtureRequest
) -> None:
def test_append_single_partition(adapter: str, request: pytest.FixtureRequest) -> None:
# get adapter from fixture
adapter = request.getfixturevalue(adapter)

assert isinstance(adapter, SQLAdapter)
# test writing an entire pyarrow table to a single partition
table = pa.Table.from_batches([batch0, batch1, batch2])
adapter.append_partition(table, 0)
Expand All @@ -321,11 +319,11 @@ def test_append_single_partition(
],
)
def test_write_read_one_batch_many_part(
adapter: SQLAdapter, request: pytest.FixtureRequest
adapter: str, request: pytest.FixtureRequest
) -> None:
# get adapter from fixture
adapter = request.getfixturevalue(adapter)

assert isinstance(adapter, SQLAdapter)
# test writing to many partitions and reading it whole
adapter.append_partition(batch0, 0)
adapter.append_partition(batch1, 1)
Expand Down
4 changes: 2 additions & 2 deletions tiled/_tests/adapters/test_sql_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@ async def postgresql_uri() -> AsyncGenerator[str, None]:
# yield uri_with_database_name.rsplit("/", 1)[0]


@pytest_asyncio.fixture
@pytest.fixture
def sqlite_uri(tmp_path: Path) -> Generator[str, None, None]:
yield f"sqlite:///{tmp_path}/test.db"


@pytest_asyncio.fixture
@pytest.fixture
def duckdb_uri(tmp_path: Path) -> Generator[str, None, None]:
yield f"duckdb:///{tmp_path}/test.db"

Expand Down
3 changes: 2 additions & 1 deletion tiled/_tests/test_protocols.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import uuid
from pathlib import Path
from typing import Any, Dict, List, Literal, Optional, Set, Tuple, Union

Expand Down Expand Up @@ -420,7 +421,7 @@ async def test_accesspolicy_protocol(mocker: MockFixture) -> None:
metadata: JSON = {"foo": "bar"}
container = DirectoryContainer(directory=Path("somedirectory"), form={})
principal = Principal(
uuid="12345678124123412345678123456781", type=PrincipalType.user
uuid=uuid.UUID(int=0x12345678124123412345678123456781), type=PrincipalType.user
)
authn_scopes = {"abc", "baz"}
scopes = {"abc"}
Expand Down
2 changes: 1 addition & 1 deletion tiled/adapters/hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,12 +232,12 @@ def from_catalog(
array = cls.lazy_load_hdf5_array(
*file_paths, dataset=dataset, swmr=swmr, libver=libver
)

if slice:
if isinstance(slice, str):
slice = NDSlice.from_numpy_str(slice)
array = array[slice]
if squeeze:
assert isinstance(array, dask.array.Array)
array = array.squeeze()

if array.shape != tuple(structure.shape):
Expand Down
4 changes: 2 additions & 2 deletions tiled/adapters/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
StructureFamilyQuery,
)
from ..query_registration import QueryTranslationRegistry
from ..server.schemas import SortingItem
from ..server.schemas import SortingDirection, SortingItem
from ..storage import Storage
from ..structures.core import Spec, StructureFamily
from ..structures.table import TableStructure
Expand Down Expand Up @@ -111,7 +111,7 @@ def __init__(
# This is a special case that means, "the given ordering".
# By giving that a name ("_") we enable requests to asking for the
# last N by requesting the sorting ("_", -1).
sorting = [SortingItem(key="_", direction=1)]
sorting = [SortingItem(key="_", direction=SortingDirection.ASCENDING)]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this actually work? Or as discussed, is the type hint wrong and the method works with what it was previously receiving?

self._sorting = sorting
self._metadata = metadata or {}
self.specs = specs or []
Expand Down
4 changes: 2 additions & 2 deletions tiled/adapters/netcdf.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from pathlib import Path
from typing import Any, List, Optional, Set, Union
from typing import Any, Optional, Set

import xarray

Expand All @@ -10,7 +10,7 @@
from .xarray import DatasetAdapter


def read_netcdf(filepath: Union[str, List[str], Path]) -> DatasetAdapter:
def read_netcdf(filepath: str | Path) -> DatasetAdapter:
"""
Parameters
Expand Down
2 changes: 1 addition & 1 deletion tiled/adapters/sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .utils import init_adapter_from_catalog


def force_reshape(arr: np.array, desired_shape: Tuple[int, ...]) -> np.array:
def force_reshape(arr: np.ndarray, desired_shape: Tuple[int, ...]) -> np.ndarray:
"""Reshape a numpy array to match the desired shape, if possible.

Parameters
Expand Down