Skip to content

Commit 33ef4b6

Browse files
simplify
1 parent 954622d commit 33ef4b6

File tree

4 files changed

+158
-156
lines changed

4 files changed

+158
-156
lines changed

.github/workflows/scripts.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ jobs:
3333
run: |
3434
python -m pip install pytest
3535
cd tests
36-
python generate_test_data.py
3736
pytest test_json_schema.py -v
3837
3938
test-json-metadata:

.gitignore

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
# Ignore GeoPackage file used in conversion to GeoParquet
22
*.gpkg*
3-
tests/valid/*
4-
tests/invalid/*
3+
tests/data/*

tests/generate_test_data.py

Lines changed: 0 additions & 139 deletions
This file was deleted.

tests/test_json_schema.py

Lines changed: 157 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
2+
import copy
13
import json
24
import pathlib
35

@@ -11,14 +13,139 @@
1113
SCHEMA = json.loads(SCHEMA_SRC.read_text())
1214

1315

14-
@pytest.mark.parametrize(
15-
"path", list((HERE / "valid").iterdir()), ids=lambda path: path.name
16-
)
17-
def test_valid_schema(path):
16+
# # Define test cases
17+
18+
valid_cases = {}
19+
invalid_cases = {}
20+
21+
22+
metadata_template = {
23+
"version": "0.5.0-dev",
24+
"primary_column": "geometry",
25+
"columns": {
26+
"geometry": {
27+
"encoding": "WKB",
28+
"geometry_types": [],
29+
},
30+
},
31+
}
32+
33+
34+
# Minimum required metadata
35+
36+
metadata = copy.deepcopy(metadata_template)
37+
valid_cases["minimal"] = metadata
38+
39+
metadata = copy.deepcopy(metadata_template)
40+
metadata.pop("version")
41+
invalid_cases["missing_version"] = metadata
42+
43+
metadata = copy.deepcopy(metadata_template)
44+
metadata.pop("primary_column")
45+
invalid_cases["missing_primary_column"] = metadata
46+
47+
metadata = copy.deepcopy(metadata_template)
48+
metadata.pop("columns")
49+
invalid_cases["missing_columns"] = metadata
50+
51+
metadata = copy.deepcopy(metadata_template)
52+
metadata["columns"] = {}
53+
invalid_cases["missing_columns_entry"] = metadata
54+
55+
metadata = copy.deepcopy(metadata_template)
56+
metadata["columns"]["geometry"].pop("encoding")
57+
invalid_cases["missing_geometry_encoding"] = metadata
58+
59+
metadata = copy.deepcopy(metadata_template)
60+
metadata["columns"]["geometry"].pop("geometry_types")
61+
invalid_cases["missing_geometry_type"] = metadata
62+
63+
64+
# Geometry column name
65+
66+
metadata = copy.deepcopy(metadata_template)
67+
metadata["primary_column"] = "geom"
68+
metadata["columns"]["geom"] = metadata["columns"].pop("geometry")
69+
valid_cases["geometry_column_name"] = metadata
70+
71+
metadata = copy.deepcopy(metadata_template)
72+
metadata["primary_column"] = ""
73+
invalid_cases["geometry_column_name_empty"] = metadata
74+
75+
76+
# Encoding
77+
78+
metadata = copy.deepcopy(metadata_template)
79+
metadata["columns"]["geometry"]["encoding"] = "WKT"
80+
invalid_cases["encoding"] = metadata
81+
82+
83+
# Geometry type - non-empty list
84+
85+
metadata = copy.deepcopy(metadata_template)
86+
metadata["columns"]["geometry"]["geometry_types"] = ["Point"]
87+
valid_cases["geometry_type_list"] = metadata
88+
89+
metadata = copy.deepcopy(metadata_template)
90+
metadata["columns"]["geometry"]["geometry_types"] = "Point"
91+
invalid_cases["geometry_type_string"] = metadata
1892

19-
with open(path, "r") as f:
20-
metadata = json.load(f)["geo"]
93+
metadata = copy.deepcopy(metadata_template)
94+
metadata["columns"]["geometry"]["geometry_types"] = ["Curve"]
95+
invalid_cases["geometry_type_nonexistent"] = metadata
2196

97+
# CRS - explicit null
98+
99+
metadata = copy.deepcopy(metadata_template)
100+
metadata["columns"]["geometry"]["crs"] = None
101+
valid_cases["crs_null"] = metadata
102+
103+
metadata = copy.deepcopy(metadata_template)
104+
metadata["columns"]["geometry"]["crs"] = "EPSG:4326"
105+
invalid_cases["crs_string"] = metadata
106+
107+
108+
# Orientation
109+
110+
metadata = copy.deepcopy(metadata_template)
111+
metadata["columns"]["geometry"]["orientation"] = "counterclockwise"
112+
valid_cases["orientation"] = metadata
113+
114+
metadata = copy.deepcopy(metadata_template)
115+
metadata["columns"]["geometry"]["orientation"] = "clockwise"
116+
invalid_cases["orientation"] = metadata
117+
118+
# Edges
119+
120+
metadata = copy.deepcopy(metadata_template)
121+
metadata["columns"]["geometry"]["edges"] = "planar"
122+
valid_cases["edges_planar"] = metadata
123+
124+
metadata = copy.deepcopy(metadata_template)
125+
metadata["columns"]["geometry"]["edges"] = "spherical"
126+
valid_cases["edges_spherical"] = metadata
127+
128+
metadata = copy.deepcopy(metadata_template)
129+
metadata["columns"]["geometry"]["edges"] = "ellipsoid"
130+
invalid_cases["edges"] = metadata
131+
132+
# Epoch
133+
134+
metadata = copy.deepcopy(metadata_template)
135+
metadata["columns"]["geometry"]["epoch"] = 2015.1
136+
valid_cases["epoch"] = metadata
137+
138+
metadata = copy.deepcopy(metadata_template)
139+
metadata["columns"]["geometry"]["epoch"] = "2015.1"
140+
invalid_cases["epoch_string"] = metadata
141+
142+
143+
# # Tests
144+
145+
@pytest.mark.parametrize(
146+
"metadata", valid_cases.values(), ids=valid_cases.keys()
147+
)
148+
def test_valid_schema(request, metadata):
22149
errors = Draft7Validator(SCHEMA).iter_errors(metadata)
23150

24151
msgs = []
@@ -31,25 +158,41 @@ def test_valid_schema(path):
31158
msgs.append(msg)
32159

33160
if not valid:
34-
raise AssertionError(f"Error while validation {path.name}:\n" + "\n".join(msgs))
161+
raise AssertionError(
162+
f"Error while validating '{request.node.callspec.id}':\n"
163+
+ json.dumps({"geo": metadata}, indent=2, sort_keys=True)
164+
+ "\n\nErrors:\n" + "\n".join(msgs)
165+
)
35166

36167

37168
@pytest.mark.parametrize(
38-
"path", list((HERE / "invalid").iterdir()), ids=lambda path: path.name
169+
"metadata", invalid_cases.values(), ids=invalid_cases.keys()
39170
)
40-
def test_invalid_schema(request, path):
41-
if "missing_columns_entry" in path.name:
171+
def test_invalid_schema(request, metadata):
172+
if "missing_columns_entry" in request.node.callspec.id:
42173
request.node.add_marker(
43174
pytest.mark.xfail(reason="Not yet working", strict=True)
44175
)
45176

46-
with open(path, "r") as f:
47-
metadata = json.load(f)["geo"]
48-
49177
errors = Draft7Validator(SCHEMA).iter_errors(metadata)
50178

51179
if not len(list(errors)):
52180
raise AssertionError(
53181
"This is an invalid GeoParquet file, but no validation error "
54-
f"occurred for {path.name}."
182+
f"occurred for '{request.node.callspec.id}':\n"
183+
+ json.dumps({"geo": metadata}, indent=2, sort_keys=True)
55184
)
185+
186+
187+
if __name__ == "__main__":
188+
(HERE / "data").mkdir(exist_ok=True)
189+
190+
def write_metadata_json(metadata, name):
191+
with open(HERE / "data" / ("metadata_" + name + ".json"), "w") as f:
192+
json.dump({"geo": metadata}, f, indent=2, sort_keys=True)
193+
194+
for case, metadata in valid_cases.items():
195+
write_metadata_json(metadata, "valid_" + case)
196+
197+
for case, metadata in invalid_cases.items():
198+
write_metadata_json(metadata, "invalid_" + case)

0 commit comments

Comments
 (0)