diff --git a/src/_zkapauthorizer/_storage_client.py b/src/_zkapauthorizer/_storage_client.py index f89c9384..19c8ee2b 100644 --- a/src/_zkapauthorizer/_storage_client.py +++ b/src/_zkapauthorizer/_storage_client.py @@ -272,7 +272,7 @@ async def stat_shares( ) known_stats: dict[int, ShareStat] = {} - for (shnum, stat) in stats.items(): + for shnum, stat in stats.items(): if not isinstance(shnum, int) or not isinstance(stat, ShareStat): raise ValueError( f"expected stat_share to return list of dict of int:ShareStat, instead got item of {type(shnum)}:{type(stat)}" diff --git a/src/_zkapauthorizer/_storage_server.py b/src/_zkapauthorizer/_storage_server.py index 470c5863..e60b1937 100644 --- a/src/_zkapauthorizer/_storage_server.py +++ b/src/_zkapauthorizer/_storage_server.py @@ -560,7 +560,7 @@ def _slot_testv_and_readv_and_writev( # We're not exactly sure what to do with mutable container truncations # and the official client doesn't ever use that feature so just # disable it by rejecting all attempts here. - for (testv, writev, new_length) in tw_vectors.values(): + for testv, writev, new_length in tw_vectors.values(): if new_length is not None: raise NewLengthRejected(new_length) @@ -976,7 +976,7 @@ def add_leases_for_writev( Add a new lease using the given secrets to all shares written by ``tw_vectors``. """ - for (sharenum, sharepath) in get_all_share_paths(storage_server, storage_index): + for sharenum, sharepath in get_all_share_paths(storage_server, storage_index): testv, datav, new_length = tw_vectors.get(sharenum, (None, b"", None)) if datav or (new_length is not None): # It has data or a new length - it is a write. diff --git a/src/_zkapauthorizer/controller.py b/src/_zkapauthorizer/controller.py index b6c0aedf..aa728e2d 100644 --- a/src/_zkapauthorizer/controller.py +++ b/src/_zkapauthorizer/controller.py @@ -56,6 +56,7 @@ StorageAnnouncement: TypeAlias = Optional[dict[str, Any]] + # It would be nice to have frozen exception types but Failure.cleanFailure # interacts poorly with these. # https://twistedmatrix.com/trac/ticket/9641 diff --git a/src/_zkapauthorizer/lease_maintenance.py b/src/_zkapauthorizer/lease_maintenance.py index d2ab6818..0746a33a 100644 --- a/src/_zkapauthorizer/lease_maintenance.py +++ b/src/_zkapauthorizer/lease_maintenance.py @@ -103,7 +103,7 @@ async def visit_storage_indexes( # Produce consistent results by forcing some consistent ordering # here. This will sort by name. stable_children = sorted(children.items()) - for (name, (child_node, child_metadata)) in stable_children: + for name, (child_node, child_metadata) in stable_children: stack.append(child_node) diff --git a/src/_zkapauthorizer/model.py b/src/_zkapauthorizer/model.py index 5979931b..47fa3a1e 100644 --- a/src/_zkapauthorizer/model.py +++ b/src/_zkapauthorizer/model.py @@ -80,8 +80,7 @@ class JSONAble(Protocol): An object which can marshal itself to JSON-compatible types. """ - def to_json_v1(self) -> JSON: - ... + def to_json_v1(self) -> JSON: ... def aware_now() -> datetime: @@ -1347,9 +1346,9 @@ def from_json_v1(cls, values: dict[str, JSON]) -> "Voucher": return cls( number=number.encode("ascii"), expected_tokens=expected_tokens, - created=None - if values["created"] is None - else parse_datetime(values["created"]), + created=( + None if values["created"] is None else parse_datetime(values["created"]) + ), state=state, ) diff --git a/src/_zkapauthorizer/replicate.py b/src/_zkapauthorizer/replicate.py index f4f10d34..1b56cd47 100644 --- a/src/_zkapauthorizer/replicate.py +++ b/src/_zkapauthorizer/replicate.py @@ -1069,7 +1069,7 @@ def observed_event( # individual argument tuple with its statement. events = [] any_important = False - for (important, sql, manyargs) in all_changes: + for important, sql, manyargs in all_changes: any_important = any_important or important for args in manyargs: events.append((sql, args)) diff --git a/src/_zkapauthorizer/sql.py b/src/_zkapauthorizer/sql.py index 857d5d36..42afde4f 100644 --- a/src/_zkapauthorizer/sql.py +++ b/src/_zkapauthorizer/sql.py @@ -33,32 +33,24 @@ class AbstractCursor(Protocol): """ @property - def lastrowid(self) -> Optional[int]: - ... + def lastrowid(self) -> Optional[int]: ... @property - def rowcount(self) -> Optional[int]: - ... + def rowcount(self) -> Optional[int]: ... - def execute(self, statement: str, args: "Parameters", /) -> "AbstractCursor": - ... + def execute(self, statement: str, args: "Parameters", /) -> "AbstractCursor": ... def executemany( self, statement: str, args: Iterable["Parameters"] - ) -> "AbstractCursor": - ... + ) -> "AbstractCursor": ... - def close(self) -> None: - ... + def close(self) -> None: ... - def fetchall(self) -> list[Any]: - ... + def fetchall(self) -> list[Any]: ... - def fetchmany(self, n: int) -> list[Any]: - ... + def fetchmany(self, n: int) -> list[Any]: ... - def fetchone(self) -> Any: - ... + def fetchone(self) -> Any: ... class AbstractConnection(Protocol): @@ -66,22 +58,18 @@ class AbstractConnection(Protocol): A SQLite3 database connection. """ - def iterdump(self) -> Iterable[str]: - ... + def iterdump(self) -> Iterable[str]: ... - def cursor(self, cursorClass: None = None) -> AbstractCursor: - ... + def cursor(self, cursorClass: None = None) -> AbstractCursor: ... - def __enter__(self) -> AbstractContextManager["AbstractConnection"]: - ... + def __enter__(self) -> AbstractContextManager["AbstractConnection"]: ... def __exit__( self, exc_type: Optional[type], exc_value: Optional[BaseException], exc_tb: Optional[Any], - ) -> bool: - ... + ) -> bool: ... Connection = AbstractConnection @@ -155,14 +143,11 @@ class Table: class Statement(Protocol): @property - def table_name(self) -> str: - ... + def table_name(self) -> str: ... - def statement(self) -> str: - ... + def statement(self) -> str: ... - def arguments(self) -> tuple[SQLType, ...]: - ... + def arguments(self) -> tuple[SQLType, ...]: ... @frozen diff --git a/src/_zkapauthorizer/tahoe.py b/src/_zkapauthorizer/tahoe.py index 6148e7dd..b5c4a881 100644 --- a/src/_zkapauthorizer/tahoe.py +++ b/src/_zkapauthorizer/tahoe.py @@ -320,7 +320,7 @@ def dirnode(entry: dict[str, Any]) -> DirectoryNode: return DirectoryNode(readonly_directory_from_string(entry["ro_uri"])) r: _DirectoryListing = {} - for (name, (entry_kind, entry)) in details["children"].items(): + for name, (entry_kind, entry) in details["children"].items(): if entry_kind == "filenode": r[name] = filenode(entry) else: diff --git a/src/_zkapauthorizer/tests/_sql_matchers.py b/src/_zkapauthorizer/tests/_sql_matchers.py index da2a7c59..f1a5aecb 100644 --- a/src/_zkapauthorizer/tests/_sql_matchers.py +++ b/src/_zkapauthorizer/tests/_sql_matchers.py @@ -44,7 +44,7 @@ def structured_dump(db: Connection) -> Iterator[Union[str, Insert]]: formatting. """ tables = list(_structured_dump_tables(db)) - for (name, sql) in tables: + for name, sql in tables: yield sql yield from _structured_dump_table(db, name) @@ -134,7 +134,7 @@ def match(self, actual: Statement) -> Optional[Mismatch]: return Mismatch( f"length {len(actual.fields)} != {len(self.reference.fields)}", ) - for (actual_field, reference_field) in zip( + for actual_field, reference_field in zip( actual.fields, self.reference.fields ): matcher = _get_matcher(reference_field, actual_field) diff --git a/src/_zkapauthorizer/tests/matchers.py b/src/_zkapauthorizer/tests/matchers.py index 107f9d2a..ae562838 100644 --- a/src/_zkapauthorizer/tests/matchers.py +++ b/src/_zkapauthorizer/tests/matchers.py @@ -178,9 +178,9 @@ def leases_current( """ def get_relevant_stats(storage_server: StorageServer) -> Iterator[ShareStat]: - for (storage_index, shares) in storage_server.buckets.items(): + for storage_index, shares in storage_server.buckets.items(): if storage_index in relevant_storage_indexes: - for (sharenum, stat) in shares.items(): + for sharenum, stat in shares.items(): yield stat return AfterPreprocessing( # type: ignore[no-any-return] diff --git a/src/_zkapauthorizer/tests/resources.py b/src/_zkapauthorizer/tests/resources.py index 19e1d621..14222c9f 100644 --- a/src/_zkapauthorizer/tests/resources.py +++ b/src/_zkapauthorizer/tests/resources.py @@ -205,9 +205,11 @@ def addDetail(self, case: TestCase) -> None: f"{node_type}-create-output", Content( UTF8_TEXT, - lambda: [self.create_output.encode("utf-8")] - if self.create_output is not None - else [], + lambda: ( + [self.create_output.encode("utf-8")] + if self.create_output is not None + else [] + ), ), ) @@ -386,9 +388,9 @@ class TahoeClient(TahoeNode): """ storage: Optional[TahoeStorage] = None - make_storage_announcement: Callable[ - [TahoeStorage], JSON - ] = make_anonymous_storage_announcement + make_storage_announcement: Callable[[TahoeStorage], JSON] = ( + make_anonymous_storage_announcement + ) @property def node_type(self) -> str: diff --git a/src/_zkapauthorizer/tests/storage_common.py b/src/_zkapauthorizer/tests/storage_common.py index 2910a922..07365a01 100644 --- a/src/_zkapauthorizer/tests/storage_common.py +++ b/src/_zkapauthorizer/tests/storage_common.py @@ -82,7 +82,7 @@ def write_toy_shares( sharenums, size, ) - for (sharenum, writer) in allocated.items(): + for sharenum, writer in allocated.items(): writer.write(0, bytes_for_share(sharenum, size)) writer.close() diff --git a/src/_zkapauthorizer/tests/strategies.py b/src/_zkapauthorizer/tests/strategies.py index 3c594cd1..e140e150 100644 --- a/src/_zkapauthorizer/tests/strategies.py +++ b/src/_zkapauthorizer/tests/strategies.py @@ -213,7 +213,7 @@ def tahoe_config_texts( def merge_shares( shares: tuple[int, int, int], the_rest: dict[str, object] ) -> dict[str, object]: - for (k, v) in zip(("needed", "happy", "total"), shares): + for k, v in zip(("needed", "happy", "total"), shares): if v is not None: the_rest["shares." + k] = f"{v}" return the_rest @@ -606,7 +606,10 @@ def set_paths(basedir: str, portnumfile: str) -> Config: return set_paths - return direct_tahoe_configs(zkapauthz_v2_configuration, shares,).map( + return direct_tahoe_configs( + zkapauthz_v2_configuration, + shares, + ).map( path_setter, ) @@ -615,7 +618,10 @@ def vouchers() -> SearchStrategy[bytes]: """ Build byte strings in the format of vouchers. """ - return binary(min_size=32, max_size=32,).map( + return binary( + min_size=32, + max_size=32, + ).map( urlsafe_b64encode, ) @@ -717,7 +723,10 @@ def byte_strings(label: bytes, length: int, entropy: int) -> SearchStrategy[byte length, ) ) - return binary(min_size=entropy, max_size=entropy,).map( + return binary( + min_size=entropy, + max_size=entropy, + ).map( lambda bs: label + b"x" * (length - entropy - len(label)) + bs, ) @@ -1019,9 +1028,9 @@ def slot_test_and_write_vectors() -> SearchStrategy[TestAndWriteVectors]: ) -def slot_test_and_write_vectors_for_shares() -> SearchStrategy[ - dict[int, TestAndWriteVectors] -]: +def slot_test_and_write_vectors_for_shares() -> ( + SearchStrategy[dict[int, TestAndWriteVectors]] +): """ Build Tahoe-LAFS test and write vectors for a number of shares. """ @@ -1077,7 +1086,7 @@ def get_storage_index(self) -> bytes: # For testing def flatten(self) -> list[IFilesystemNode]: result = [self] - for (node, _) in self._children.values(): + for node, _ in self._children.values(): result.extend(node.flatten()) return result @@ -1120,7 +1129,10 @@ def storage_indexes_are_distinct(nodes: IFilesystemNode) -> bool: seen.add(si) return True - return recursive(leaf_nodes(), directory_nodes,).filter( + return recursive( + leaf_nodes(), + directory_nodes, + ).filter( storage_indexes_are_distinct, ) @@ -1443,6 +1455,10 @@ def make( stmts: SearchStrategy[Callable[[str, Table], SearchStrategy[Statement]]] stmts = sampled_from([inserts, deletes, updates]) - return tuples(stmts, sql_identifiers(), tables(),).flatmap( + return tuples( + stmts, + sql_identifiers(), + tables(), + ).flatmap( make, ) diff --git a/src/_zkapauthorizer/tests/test_client_resource.py b/src/_zkapauthorizer/tests/test_client_resource.py index 2bb0f792..77e3f75f 100644 --- a/src/_zkapauthorizer/tests/test_client_resource.py +++ b/src/_zkapauthorizer/tests/test_client_resource.py @@ -198,6 +198,7 @@ def directory_writes() -> SearchStrategy[DirectoryWriteCapability]: TRANSIENT_ERROR = "something went wrong, who knows what" + # Helper to work-around https://github.com/twisted/treq/issues/161 def uncooperator(started: bool = True) -> Cooperator: def schedule(f: Callable[[], object]) -> DelayedCall: @@ -1784,7 +1785,9 @@ def bad_calculate_price_requests() -> SearchStrategy[Request]: good_headers = just({b"content-type": [b"application/json"]}) bad_headers = fixed_dictionaries( { - b"content-type": mime_types(blacklist={"application/json"},).map( + b"content-type": mime_types( + blacklist={"application/json"}, + ).map( lambda content_type: [content_type.encode("utf-8")], ), } diff --git a/src/_zkapauthorizer/tests/test_lease_maintenance.py b/src/_zkapauthorizer/tests/test_lease_maintenance.py index e6fd596c..63454bdf 100644 --- a/src/_zkapauthorizer/tests/test_lease_maintenance.py +++ b/src/_zkapauthorizer/tests/test_lease_maintenance.py @@ -528,7 +528,7 @@ def test_renewed( # Make sure that the storage brokers have shares at the storage # indexes we're going to operate on. for storage_server in storage_broker.get_connected_servers(): - for (storage_index, shares) in buckets: + for storage_index, shares in buckets: for sharenum, expiration_time in shares.items(): try: create_share( diff --git a/src/_zkapauthorizer/tests/test_model.py b/src/_zkapauthorizer/tests/test_model.py index 49529b29..dcb823db 100644 --- a/src/_zkapauthorizer/tests/test_model.py +++ b/src/_zkapauthorizer/tests/test_model.py @@ -882,12 +882,12 @@ def test_lease_maintenance_activity( ).store expected = None - for (start_delay, sizes, finish_delay) in activity: + for start_delay, sizes, finish_delay in activity: now += start_delay started = now x = store.start_lease_maintenance() passes_required = 0 - for (num_passes, trim_size) in sizes: + for num_passes, trim_size in sizes: passes_required += num_passes trim_size %= store.pass_value x.observe( diff --git a/src/_zkapauthorizer/tests/test_storage_protocol.py b/src/_zkapauthorizer/tests/test_storage_protocol.py index 50ea168e..3529baec 100644 --- a/src/_zkapauthorizer/tests/test_storage_protocol.py +++ b/src/_zkapauthorizer/tests/test_storage_protocol.py @@ -384,7 +384,7 @@ def test_create_immutable( Equals(sharenums), "server did not return all buckets we wrote", ) - for (sharenum, bucket) in readers.items(): + for sharenum, bucket in readers.items(): self.expectThat( bucket.remote_read(0, size), Equals(bytes_for_share(sharenum, size)), @@ -435,6 +435,7 @@ def test_shares_already_exist( When the remote *allocate_buckets* implementation reports that shares already exist, passes are not spent for those shares. """ + # A helper that only varies on sharenums. async def allocate_buckets(sharenums: set[int]) -> None: alreadygot, writers = await self.client.allocate_buckets( @@ -1292,7 +1293,7 @@ def assert_read_back_data( for sharenum, vectors in test_and_write_vectors_for_shares.items(): length = max(offset + len(data) for (offset, data) in vectors.write_vector) expected = b"\x00" * length - for (offset, data) in vectors.write_vector: + for offset, data in vectors.write_vector: expected = expected[:offset] + data + expected[offset + len(data) :] if vectors.new_length is not None and vectors.new_length < length: expected = expected[: vectors.new_length] diff --git a/src/_zkapauthorizer/validators.py b/src/_zkapauthorizer/validators.py index f402fc1b..5546c35e 100644 --- a/src/_zkapauthorizer/validators.py +++ b/src/_zkapauthorizer/validators.py @@ -105,8 +105,7 @@ def validate_has_length( class Ordered(Protocol): - def __gt__(self: _T, other: _T) -> bool: - ... + def __gt__(self: _T, other: _T) -> bool: ... def greater_than(expected: Ordered) -> ValidatorType[Ordered]: