Skip to content

Commit

Permalink
make all tests pass
Browse files Browse the repository at this point in the history
  • Loading branch information
jhamman committed Feb 7, 2024
1 parent f47c872 commit e00ce0b
Show file tree
Hide file tree
Showing 6 changed files with 726 additions and 721 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -78,5 +78,6 @@ src/zarr/_version.py
#doesnotexist
#test_sync*
data/*
src/fixture/

.DS_Store
12 changes: 6 additions & 6 deletions src/zarr/v3/codecs/sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ async def decode_partial(
for chunk_coords in all_chunk_coords:
chunk_byte_slice = shard_index.get_chunk_slice(chunk_coords)
if chunk_byte_slice:
chunk_bytes = await store_path.get_async(chunk_byte_slice)
chunk_bytes = await store_path.get(chunk_byte_slice)
if chunk_bytes:
shard_dict[chunk_coords] = chunk_bytes

Expand Down Expand Up @@ -533,9 +533,9 @@ async def _write_chunk(
)

if shard_builder.index.is_all_empty():
await store_path.delete_async()
await store_path.delete()
else:
await store_path.set_async(
await store_path.set(
await shard_builder.finalize(
self.configuration.index_location,
self._encode_shard_index,
Expand All @@ -561,9 +561,9 @@ def _shard_index_size(self) -> int:
async def _load_shard_index_maybe(self, store_path: StorePath) -> Optional[_ShardIndex]:
shard_index_size = self._shard_index_size()
if self.configuration.index_location == ShardingCodecIndexLocation.start:
index_bytes = await store_path.get_async((0, shard_index_size))
index_bytes = await store_path.get((0, shard_index_size))
else:
index_bytes = await store_path.get_async((-shard_index_size, None))
index_bytes = await store_path.get((-shard_index_size, None))
if index_bytes is not None:
return await self._decode_shard_index(index_bytes)
return None
Expand All @@ -574,7 +574,7 @@ async def _load_shard_index(self, store_path: StorePath) -> _ShardIndex:
)

async def _load_full_shard_maybe(self, store_path: StorePath) -> Optional[_ShardProxy]:
shard_bytes = await store_path.get_async()
shard_bytes = await store_path.get()

return await _ShardProxy.from_bytes(shard_bytes, self) if shard_bytes else None

Expand Down
6 changes: 3 additions & 3 deletions src/zarr/v3/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ async def delitem(self, key: str) -> None:

async def _save_metadata(self) -> None:
to_save = self.metadata.to_bytes()
awaitables = [(self.store_path / key).set_async(value) for key, value in to_save.items()]
awaitables = [(self.store_path / key).set(value) for key, value in to_save.items()]
await asyncio.gather(*awaitables)

@property
Expand Down Expand Up @@ -227,9 +227,9 @@ async def update_attributes(self, new_attributes: Dict[str, Any]):
to_save = self.metadata.to_bytes()
if self.metadata.zarr_format == 2:
# only save the .zattrs object
await (self.store_path / ZATTRS_JSON).set_async(to_save[ZATTRS_JSON])
await (self.store_path / ZATTRS_JSON).set(to_save[ZATTRS_JSON])
else:
await (self.store_path / ZARR_JSON).set_async(to_save[ZARR_JSON])
await (self.store_path / ZARR_JSON).set(to_save[ZARR_JSON])

self.metadata.attributes.clear()
self.metadata.attributes.update(new_attributes)
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/v3/store/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async def get(
value = value[byte_range[0] : byte_range[1]]
return value
except KeyError:
return None # Q(JH): why not raise?
return None

async def get_partial_values(
self, key_ranges: List[Tuple[str, Tuple[int, int]]]
Expand Down
53 changes: 27 additions & 26 deletions tests/test_codecs_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
from zarr.v3.indexing import morton_order_iter
from zarr.v3.metadata import CodecMetadata, ShardingCodecIndexLocation, runtime_configuration

from zarr.v3.store import MemoryStore, Store
from zarr.v3.abc.store import Store
from zarr.v3.store import MemoryStore, StorePath


@frozen
Expand All @@ -38,7 +39,7 @@ async def set(self, value: np.ndarray):

@pytest.fixture
def store() -> Iterator[Store]:
yield MemoryStore()
yield StorePath(MemoryStore())


@pytest.fixture
Expand Down Expand Up @@ -283,7 +284,7 @@ async def test_order(
fill_value=1,
)
z[:, :] = data
assert await store.get_async("order/0.0") == z._store["0.0"]
assert await (store / "order/0.0").get() == z._store["0.0"]


@pytest.mark.parametrize("input_order", ["F", "C"])
Expand Down Expand Up @@ -395,7 +396,7 @@ async def test_transpose(
fill_value=1,
)
z[:, :] = data
assert await store.get_async("transpose/0.0") == await store.get_async("transpose_zarr/0.0")
assert await (store / "transpose/0.0").get() == await (store / "transpose_zarr/0.0").get()


def test_transpose_invalid(
Expand Down Expand Up @@ -606,7 +607,7 @@ async def test_delete_empty_chunks(store: Store):
await _AsyncArrayProxy(a)[:16, :16].set(np.zeros((16, 16)))
await _AsyncArrayProxy(a)[:16, :16].set(data)
assert np.array_equal(await _AsyncArrayProxy(a)[:16, :16].get(), data)
assert await store.get_async("delete_empty_chunks/c0/0") is None
assert await (store / "delete_empty_chunks/c0/0").get() is None


@pytest.mark.asyncio
Expand All @@ -630,8 +631,8 @@ async def test_delete_empty_sharded_chunks(store: Store):
data = np.ones((16, 16), dtype="uint16")
data[:8, :8] = 0
assert np.array_equal(data, await _AsyncArrayProxy(a)[:, :].get())
assert await store.get_async("delete_empty_sharded_chunks/c/1/0") is None
chunk_bytes = await store.get_async("delete_empty_sharded_chunks/c/0/0")
assert await (store / "delete_empty_sharded_chunks/c/1/0").get() is None
chunk_bytes = await (store / "delete_empty_sharded_chunks/c/0/0").get()
assert chunk_bytes is not None and len(chunk_bytes) == 16 * 2 + 8 * 8 * 2 + 4


Expand Down Expand Up @@ -661,10 +662,10 @@ async def test_zarr_compat(store: Store):
assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get())
assert np.array_equal(data, z2[:16, :18])

assert z2._store["0.0"] == await store.get_async("zarr_compat3/0.0")
assert z2._store["0.1"] == await store.get_async("zarr_compat3/0.1")
assert z2._store["1.0"] == await store.get_async("zarr_compat3/1.0")
assert z2._store["1.1"] == await store.get_async("zarr_compat3/1.1")
assert z2._store["0.0"] == await (store / "zarr_compat3/0.0").get()
assert z2._store["0.1"] == await (store / "zarr_compat3/0.1").get()
assert z2._store["1.0"] == await (store / "zarr_compat3/1.0").get()
assert z2._store["1.1"] == await (store / "zarr_compat3/1.1").get()


@pytest.mark.asyncio
Expand Down Expand Up @@ -695,10 +696,10 @@ async def test_zarr_compat_F(store: Store):
assert np.array_equal(data, await _AsyncArrayProxy(a)[:16, :18].get())
assert np.array_equal(data, z2[:16, :18])

assert z2._store["0.0"] == await store.get_async("zarr_compatF3/0.0")
assert z2._store["0.1"] == await store.get_async("zarr_compatF3/0.1")
assert z2._store["1.0"] == await store.get_async("zarr_compatF3/1.0")
assert z2._store["1.1"] == await store.get_async("zarr_compatF3/1.1")
assert z2._store["0.0"] == await (store / "zarr_compatF3/0.0").get()
assert z2._store["0.1"] == await (store / "zarr_compatF3/0.1").get()
assert z2._store["1.0"] == await (store / "zarr_compatF3/1.0").get()
assert z2._store["1.1"] == await (store / "zarr_compatF3/1.1").get()


@pytest.mark.asyncio
Expand Down Expand Up @@ -728,7 +729,7 @@ async def test_dimension_names(store: Store):
)

assert (await AsyncArray.open(store / "dimension_names2")).metadata.dimension_names is None
zarr_json_bytes = await (store / "dimension_names2" / "zarr.json").get_async()
zarr_json_bytes = await (store / "dimension_names2" / "zarr.json").get()
assert zarr_json_bytes is not None
assert "dimension_names" not in json.loads(zarr_json_bytes)

Expand Down Expand Up @@ -794,7 +795,7 @@ async def test_endian(store: Store, endian: Literal["big", "little"]):
fill_value=1,
)
z[:, :] = data
assert await store.get_async("endian/0.0") == z._store["0.0"]
assert await (store / "endian/0.0").get() == z._store["0.0"]


@pytest.mark.parametrize("dtype_input_endian", [">u2", "<u2"])
Expand Down Expand Up @@ -830,7 +831,7 @@ async def test_endian_write(
fill_value=1,
)
z[:, :] = data
assert await store.get_async("endian/0.0") == z._store["0.0"]
assert await (store / "endian/0.0").get() == z._store["0.0"]


def test_invalid_metadata(store: Store):
Expand Down Expand Up @@ -932,17 +933,17 @@ async def test_resize(store: Store):
)

await _AsyncArrayProxy(a)[:16, :18].set(data)
assert await store.get_async("resize/0.0") is not None
assert await store.get_async("resize/0.1") is not None
assert await store.get_async("resize/1.0") is not None
assert await store.get_async("resize/1.1") is not None
assert await (store / "resize/0.0").get() is not None
assert await (store / "resize/0.1").get() is not None
assert await (store / "resize/1.0").get() is not None
assert await (store / "resize/1.1").get() is not None

a = await a.resize((10, 12))
assert a.metadata.shape == (10, 12)
assert await store.get_async("resize/0.0") is not None
assert await store.get_async("resize/0.1") is not None
assert await store.get_async("resize/1.0") is None
assert await store.get_async("resize/1.1") is None
assert await (store / "resize/0.0").get() is not None
assert await (store / "resize/0.1").get() is not None
assert await (store / "resize/1.0").get() is None
assert await (store / "resize/1.1").get() is None


def test_exists_ok(store: Store):
Expand Down
Loading

0 comments on commit e00ce0b

Please sign in to comment.