Skip to content

Commit fdef919

Browse files
committed
fix typeguard/CI errors in tests/test_soma_sparse_nd_array.py
1 parent 74ee1dc commit fdef919

File tree

3 files changed

+20
-9
lines changed

3 files changed

+20
-9
lines changed

apis/python/tests/__init__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,20 @@
11
import pyarrow as pa
22
from typeguard.importhook import TypeguardFinder, install_import_hook
33

4+
45
class CustomFinder(TypeguardFinder):
56
"""
67
As noted in apis/python/src/tiledbsoma/query_condition.py we intentionally
78
suppress ``mypy`` there. However we need this extra step to also suppress
89
``typeguard`` there.
910
"""
11+
1012
def should_instrument(self, module_name: str):
11-
if module_name == 'tiledbsoma.query_condition':
13+
if module_name == "tiledbsoma.query_condition":
1214
return False
1315
return True
1416

17+
1518
install_import_hook("tiledbsoma", cls=CustomFinder)
1619

1720
"""Types supported in a SOMA*NdArray """

apis/python/tests/test_soma_dense_nd_array.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Tuple
1+
from typing import List, Tuple, Union
22

33
import numpy as np
44
import pyarrow as pa
@@ -21,7 +21,7 @@ def test_soma_dense_nd_array_ok_no_storage():
2121
)
2222
@pytest.mark.parametrize("element_type", NDARRAY_ARROW_TYPES_SUPPORTED)
2323
def test_soma_dense_nd_array_create_ok(
24-
tmp_path, shape: Tuple[int, ...], element_type: pa.DataType
24+
tmp_path, shape: Union[Tuple[int, ...], List], element_type: pa.DataType
2525
):
2626
"""
2727
Test all cases we expect "create" to succeed.

apis/python/tests/test_soma_sparse_nd_array.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Optional, Tuple, Union
1+
from typing import List, Optional, Tuple, Union
22

33
import numpy as np
44
import pyarrow as pa
@@ -24,7 +24,7 @@ def test_soma_sparse_nd_array_ok_no_storage():
2424
)
2525
@pytest.mark.parametrize("element_type", NDARRAY_ARROW_TYPES_SUPPORTED)
2626
def test_soma_sparse_nd_array_create_ok(
27-
tmp_path, shape: Tuple[int, ...], element_type: pa.DataType
27+
tmp_path, shape: Union[Tuple[int, ...], List[int]], element_type: pa.DataType
2828
):
2929
"""
3030
Test all cases we expect "create" to succeed.
@@ -250,7 +250,7 @@ def test_soma_sparse_nd_array_read_write_sparse_tensor(
250250
assert a.shape == shape
251251

252252
# make a random sample in the desired format
253-
data = create_random_tensor(format, shape, np.float64)
253+
data = create_random_tensor(format, shape, np.dtype(np.float64))
254254
a.write_sparse_tensor(data)
255255
del a
256256

@@ -271,7 +271,7 @@ def test_soma_sparse_nd_array_read_write_table(
271271
assert a.shape == shape
272272

273273
# make a random sample in the desired format
274-
data = create_random_tensor("table", shape, np.float32)
274+
data = create_random_tensor("table", shape, np.dtype(np.float32))
275275
a.write_table(data)
276276
del a
277277

@@ -282,7 +282,15 @@ def test_soma_sparse_nd_array_read_write_table(
282282
assert tables_are_same_value(data, t)
283283

284284

285-
@pytest.mark.parametrize("dtype", [np.float32, np.float64, np.int32, np.int64])
285+
@pytest.mark.parametrize(
286+
"dtype",
287+
[
288+
np.dtype(np.float32),
289+
np.dtype(np.float64),
290+
np.dtype(np.int32),
291+
np.dtype(np.int64),
292+
],
293+
)
286294
@pytest.mark.parametrize("shape", [(1,), (23, 14), (35, 3, 2), (8, 4, 2, 30)])
287295
def test_soma_sparse_nd_array_read_as_pandas(
288296
tmp_path, dtype: np.dtype, shape: Tuple[int, ...]
@@ -378,7 +386,7 @@ def test_soma_sparse_nd_array_nnz(tmp_path):
378386
assert a.nnz == 0
379387

380388
t: pa.SparseCOOTensor = create_random_tensor(
381-
"coo", a.shape, pa.int32().to_pandas_dtype(), 0.1
389+
"coo", a.shape, np.dtype(pa.int32().to_pandas_dtype()), 0.1
382390
)
383391
a.write_sparse_tensor(t)
384392
with pytest.raises(NotImplementedError):

0 commit comments

Comments
 (0)