Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run black 24.4.0 against the source. #482

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/_zkapauthorizer/_storage_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ async def stat_shares(
)

known_stats: dict[int, ShareStat] = {}
for (shnum, stat) in stats.items():
for shnum, stat in stats.items():
if not isinstance(shnum, int) or not isinstance(stat, ShareStat):
raise ValueError(
f"expected stat_share to return list of dict of int:ShareStat, instead got item of {type(shnum)}:{type(stat)}"
Expand Down
4 changes: 2 additions & 2 deletions src/_zkapauthorizer/_storage_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,7 @@ def _slot_testv_and_readv_and_writev(
# We're not exactly sure what to do with mutable container truncations
# and the official client doesn't ever use that feature so just
# disable it by rejecting all attempts here.
for (testv, writev, new_length) in tw_vectors.values():
for testv, writev, new_length in tw_vectors.values():
if new_length is not None:
raise NewLengthRejected(new_length)

Expand Down Expand Up @@ -976,7 +976,7 @@ def add_leases_for_writev(
Add a new lease using the given secrets to all shares written by
``tw_vectors``.
"""
for (sharenum, sharepath) in get_all_share_paths(storage_server, storage_index):
for sharenum, sharepath in get_all_share_paths(storage_server, storage_index):
testv, datav, new_length = tw_vectors.get(sharenum, (None, b"", None))
if datav or (new_length is not None):
# It has data or a new length - it is a write.
Expand Down
1 change: 1 addition & 0 deletions src/_zkapauthorizer/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@

StorageAnnouncement: TypeAlias = Optional[dict[str, Any]]


# It would be nice to have frozen exception types but Failure.cleanFailure
# interacts poorly with these.
# https://twistedmatrix.com/trac/ticket/9641
Expand Down
2 changes: 1 addition & 1 deletion src/_zkapauthorizer/lease_maintenance.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ async def visit_storage_indexes(
# Produce consistent results by forcing some consistent ordering
# here. This will sort by name.
stable_children = sorted(children.items())
for (name, (child_node, child_metadata)) in stable_children:
for name, (child_node, child_metadata) in stable_children:
stack.append(child_node)


Expand Down
9 changes: 4 additions & 5 deletions src/_zkapauthorizer/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,7 @@ class JSONAble(Protocol):
An object which can marshal itself to JSON-compatible types.
"""

def to_json_v1(self) -> JSON:
...
def to_json_v1(self) -> JSON: ...


def aware_now() -> datetime:
Expand Down Expand Up @@ -1347,9 +1346,9 @@ def from_json_v1(cls, values: dict[str, JSON]) -> "Voucher":
return cls(
number=number.encode("ascii"),
expected_tokens=expected_tokens,
created=None
if values["created"] is None
else parse_datetime(values["created"]),
created=(
None if values["created"] is None else parse_datetime(values["created"])
),
state=state,
)

Expand Down
2 changes: 1 addition & 1 deletion src/_zkapauthorizer/replicate.py
Original file line number Diff line number Diff line change
Expand Up @@ -1069,7 +1069,7 @@ def observed_event(
# individual argument tuple with its statement.
events = []
any_important = False
for (important, sql, manyargs) in all_changes:
for important, sql, manyargs in all_changes:
any_important = any_important or important
for args in manyargs:
events.append((sql, args))
Expand Down
45 changes: 15 additions & 30 deletions src/_zkapauthorizer/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,55 +33,43 @@ class AbstractCursor(Protocol):
"""

@property
def lastrowid(self) -> Optional[int]:
...
def lastrowid(self) -> Optional[int]: ...

@property
def rowcount(self) -> Optional[int]:
...
def rowcount(self) -> Optional[int]: ...

def execute(self, statement: str, args: "Parameters", /) -> "AbstractCursor":
...
def execute(self, statement: str, args: "Parameters", /) -> "AbstractCursor": ...

def executemany(
self, statement: str, args: Iterable["Parameters"]
) -> "AbstractCursor":
...
) -> "AbstractCursor": ...

def close(self) -> None:
...
def close(self) -> None: ...

def fetchall(self) -> list[Any]:
...
def fetchall(self) -> list[Any]: ...

def fetchmany(self, n: int) -> list[Any]:
...
def fetchmany(self, n: int) -> list[Any]: ...

def fetchone(self) -> Any:
...
def fetchone(self) -> Any: ...


class AbstractConnection(Protocol):
"""
A SQLite3 database connection.
"""

def iterdump(self) -> Iterable[str]:
...
def iterdump(self) -> Iterable[str]: ...

def cursor(self, cursorClass: None = None) -> AbstractCursor:
...
def cursor(self, cursorClass: None = None) -> AbstractCursor: ...

def __enter__(self) -> AbstractContextManager["AbstractConnection"]:
...
def __enter__(self) -> AbstractContextManager["AbstractConnection"]: ...

def __exit__(
self,
exc_type: Optional[type],
exc_value: Optional[BaseException],
exc_tb: Optional[Any],
) -> bool:
...
) -> bool: ...


Connection = AbstractConnection
Expand Down Expand Up @@ -155,14 +143,11 @@ class Table:

class Statement(Protocol):
@property
def table_name(self) -> str:
...
def table_name(self) -> str: ...

def statement(self) -> str:
...
def statement(self) -> str: ...

def arguments(self) -> tuple[SQLType, ...]:
...
def arguments(self) -> tuple[SQLType, ...]: ...


@frozen
Expand Down
2 changes: 1 addition & 1 deletion src/_zkapauthorizer/tahoe.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def dirnode(entry: dict[str, Any]) -> DirectoryNode:
return DirectoryNode(readonly_directory_from_string(entry["ro_uri"]))

r: _DirectoryListing = {}
for (name, (entry_kind, entry)) in details["children"].items():
for name, (entry_kind, entry) in details["children"].items():
if entry_kind == "filenode":
r[name] = filenode(entry)
else:
Expand Down
4 changes: 2 additions & 2 deletions src/_zkapauthorizer/tests/_sql_matchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def structured_dump(db: Connection) -> Iterator[Union[str, Insert]]:
formatting.
"""
tables = list(_structured_dump_tables(db))
for (name, sql) in tables:
for name, sql in tables:
yield sql
yield from _structured_dump_table(db, name)

Expand Down Expand Up @@ -134,7 +134,7 @@ def match(self, actual: Statement) -> Optional[Mismatch]:
return Mismatch(
f"length {len(actual.fields)} != {len(self.reference.fields)}",
)
for (actual_field, reference_field) in zip(
for actual_field, reference_field in zip(
actual.fields, self.reference.fields
):
matcher = _get_matcher(reference_field, actual_field)
Expand Down
4 changes: 2 additions & 2 deletions src/_zkapauthorizer/tests/matchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,9 +178,9 @@ def leases_current(
"""

def get_relevant_stats(storage_server: StorageServer) -> Iterator[ShareStat]:
for (storage_index, shares) in storage_server.buckets.items():
for storage_index, shares in storage_server.buckets.items():
if storage_index in relevant_storage_indexes:
for (sharenum, stat) in shares.items():
for sharenum, stat in shares.items():
yield stat

return AfterPreprocessing( # type: ignore[no-any-return]
Expand Down
14 changes: 8 additions & 6 deletions src/_zkapauthorizer/tests/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,9 +205,11 @@ def addDetail(self, case: TestCase) -> None:
f"{node_type}-create-output",
Content(
UTF8_TEXT,
lambda: [self.create_output.encode("utf-8")]
if self.create_output is not None
else [],
lambda: (
[self.create_output.encode("utf-8")]
if self.create_output is not None
else []
),
),
)

Expand Down Expand Up @@ -386,9 +388,9 @@ class TahoeClient(TahoeNode):
"""

storage: Optional[TahoeStorage] = None
make_storage_announcement: Callable[
[TahoeStorage], JSON
] = make_anonymous_storage_announcement
make_storage_announcement: Callable[[TahoeStorage], JSON] = (
make_anonymous_storage_announcement
)

@property
def node_type(self) -> str:
Expand Down
2 changes: 1 addition & 1 deletion src/_zkapauthorizer/tests/storage_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def write_toy_shares(
sharenums,
size,
)
for (sharenum, writer) in allocated.items():
for sharenum, writer in allocated.items():
writer.write(0, bytes_for_share(sharenum, size))
writer.close()

Expand Down
36 changes: 26 additions & 10 deletions src/_zkapauthorizer/tests/strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def tahoe_config_texts(
def merge_shares(
shares: tuple[int, int, int], the_rest: dict[str, object]
) -> dict[str, object]:
for (k, v) in zip(("needed", "happy", "total"), shares):
for k, v in zip(("needed", "happy", "total"), shares):
if v is not None:
the_rest["shares." + k] = f"{v}"
return the_rest
Expand Down Expand Up @@ -606,7 +606,10 @@ def set_paths(basedir: str, portnumfile: str) -> Config:

return set_paths

return direct_tahoe_configs(zkapauthz_v2_configuration, shares,).map(
return direct_tahoe_configs(
zkapauthz_v2_configuration,
shares,
).map(
path_setter,
)

Expand All @@ -615,7 +618,10 @@ def vouchers() -> SearchStrategy[bytes]:
"""
Build byte strings in the format of vouchers.
"""
return binary(min_size=32, max_size=32,).map(
return binary(
min_size=32,
max_size=32,
).map(
urlsafe_b64encode,
)

Expand Down Expand Up @@ -717,7 +723,10 @@ def byte_strings(label: bytes, length: int, entropy: int) -> SearchStrategy[byte
length,
)
)
return binary(min_size=entropy, max_size=entropy,).map(
return binary(
min_size=entropy,
max_size=entropy,
).map(
lambda bs: label + b"x" * (length - entropy - len(label)) + bs,
)

Expand Down Expand Up @@ -1019,9 +1028,9 @@ def slot_test_and_write_vectors() -> SearchStrategy[TestAndWriteVectors]:
)


def slot_test_and_write_vectors_for_shares() -> SearchStrategy[
dict[int, TestAndWriteVectors]
]:
def slot_test_and_write_vectors_for_shares() -> (
SearchStrategy[dict[int, TestAndWriteVectors]]
):
"""
Build Tahoe-LAFS test and write vectors for a number of shares.
"""
Expand Down Expand Up @@ -1077,7 +1086,7 @@ def get_storage_index(self) -> bytes:
# For testing
def flatten(self) -> list[IFilesystemNode]:
result = [self]
for (node, _) in self._children.values():
for node, _ in self._children.values():
result.extend(node.flatten())
return result

Expand Down Expand Up @@ -1120,7 +1129,10 @@ def storage_indexes_are_distinct(nodes: IFilesystemNode) -> bool:
seen.add(si)
return True

return recursive(leaf_nodes(), directory_nodes,).filter(
return recursive(
leaf_nodes(),
directory_nodes,
).filter(
storage_indexes_are_distinct,
)

Expand Down Expand Up @@ -1443,6 +1455,10 @@ def make(

stmts: SearchStrategy[Callable[[str, Table], SearchStrategy[Statement]]]
stmts = sampled_from([inserts, deletes, updates])
return tuples(stmts, sql_identifiers(), tables(),).flatmap(
return tuples(
stmts,
sql_identifiers(),
tables(),
).flatmap(
make,
)
5 changes: 4 additions & 1 deletion src/_zkapauthorizer/tests/test_client_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ def directory_writes() -> SearchStrategy[DirectoryWriteCapability]:

TRANSIENT_ERROR = "something went wrong, who knows what"


# Helper to work-around https://github.com/twisted/treq/issues/161
def uncooperator(started: bool = True) -> Cooperator:
def schedule(f: Callable[[], object]) -> DelayedCall:
Expand Down Expand Up @@ -1784,7 +1785,9 @@ def bad_calculate_price_requests() -> SearchStrategy[Request]:
good_headers = just({b"content-type": [b"application/json"]})
bad_headers = fixed_dictionaries(
{
b"content-type": mime_types(blacklist={"application/json"},).map(
b"content-type": mime_types(
blacklist={"application/json"},
).map(
lambda content_type: [content_type.encode("utf-8")],
),
}
Expand Down
2 changes: 1 addition & 1 deletion src/_zkapauthorizer/tests/test_lease_maintenance.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,7 @@ def test_renewed(
# Make sure that the storage brokers have shares at the storage
# indexes we're going to operate on.
for storage_server in storage_broker.get_connected_servers():
for (storage_index, shares) in buckets:
for storage_index, shares in buckets:
for sharenum, expiration_time in shares.items():
try:
create_share(
Expand Down
4 changes: 2 additions & 2 deletions src/_zkapauthorizer/tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -882,12 +882,12 @@ def test_lease_maintenance_activity(
).store

expected = None
for (start_delay, sizes, finish_delay) in activity:
for start_delay, sizes, finish_delay in activity:
now += start_delay
started = now
x = store.start_lease_maintenance()
passes_required = 0
for (num_passes, trim_size) in sizes:
for num_passes, trim_size in sizes:
passes_required += num_passes
trim_size %= store.pass_value
x.observe(
Expand Down
5 changes: 3 additions & 2 deletions src/_zkapauthorizer/tests/test_storage_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def test_create_immutable(
Equals(sharenums),
"server did not return all buckets we wrote",
)
for (sharenum, bucket) in readers.items():
for sharenum, bucket in readers.items():
self.expectThat(
bucket.remote_read(0, size),
Equals(bytes_for_share(sharenum, size)),
Expand Down Expand Up @@ -435,6 +435,7 @@ def test_shares_already_exist(
When the remote *allocate_buckets* implementation reports that shares
already exist, passes are not spent for those shares.
"""

# A helper that only varies on sharenums.
async def allocate_buckets(sharenums: set[int]) -> None:
alreadygot, writers = await self.client.allocate_buckets(
Expand Down Expand Up @@ -1292,7 +1293,7 @@ def assert_read_back_data(
for sharenum, vectors in test_and_write_vectors_for_shares.items():
length = max(offset + len(data) for (offset, data) in vectors.write_vector)
expected = b"\x00" * length
for (offset, data) in vectors.write_vector:
for offset, data in vectors.write_vector:
expected = expected[:offset] + data + expected[offset + len(data) :]
if vectors.new_length is not None and vectors.new_length < length:
expected = expected[: vectors.new_length]
Expand Down
Loading