Skip to content

Commit

Permalink
Fix all the flake8 warnings and errors
Browse files Browse the repository at this point in the history
There should be no functional changes here. Mostly there were line too
long warnings. After this both flake8 and black pass.
  • Loading branch information
dbnicholson committed Apr 25, 2024
1 parent efa4818 commit dd676fc
Show file tree
Hide file tree
Showing 22 changed files with 181 additions and 139 deletions.
15 changes: 8 additions & 7 deletions src/checkers/debianrepochecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,10 @@ async def check(self, external_data: ExternalBase):

new_version = ExternalFile(
url=candidate.uri,
# FIXME: apt.package.Version.{md5,sha1,sha256} can raise an exception
# if given hash isn't set, while sha512 isn't accessible at all.
# Raw hashes are handy, but accessible only through protected property.
# FIXME: apt.package.Version.{md5,sha1,sha256} can raise an
# exception if given hash isn't set, while sha512 isn't accessible
# at all. Raw hashes are handy, but accessible only through
# protected property.
checksum=read_deb_hashes(candidate._records.hashes),
size=candidate.size,
version=candidate.version,
Expand All @@ -180,10 +181,10 @@ def _translate_arch(self, arch: str) -> str:
return arches.get(arch, arch)

async def _get_timestamp_for_candidate(self, candidate: apt.Version):
# TODO: fetch package, parse changelog, get the date from there.
# python-apt can fetch changelogs from Debian and Ubuntu's changelog
# server, but most packages this checker will be used for are not from these repos.
# We'd have to open-code it.
# TODO: fetch package, parse changelog, get the date from there. python-apt can
# fetch changelogs from Debian and Ubuntu's changelog server, but most packages
# this checker will be used for are not from these repos. We'd have to open-code
# it.
# https://salsa.debian.org/apt-team/python-apt/blob/master/apt/package.py#L1245-1417
assert candidate.uri
return await get_timestamp_from_url(candidate.uri, self.session)
Expand Down
3 changes: 2 additions & 1 deletion src/checkers/gitchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ async def _check_has_new(external_data: ExternalGitRepo):
latest_tag = sorted_tags[-1]
except IndexError as err:
raise CheckerQueryError(
f"{external_data.current_version.url} has no tags matching '{tag_pattern}'"
f"{external_data.current_version.url} has no tags matching "
f"'{tag_pattern}'"
) from err

new_version = ExternalGitRef(
Expand Down
3 changes: 2 additions & 1 deletion src/checkers/gnomechecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@ async def check(self, external_data: ExternalBase):
proj_url = GNOME_MIRROR / "sources" / project_name
try:
async with self.session.get(proj_url / "cache.json") as cache_resp:
# Some mirrors may sand invalid content-type; don't require it to be application/json
# Some mirrors may sand invalid content-type; don't require it to be
# application/json
cache_json = await cache_resp.json(content_type=None)
except NETWORK_ERRORS as err:
raise CheckerQueryError from err
Expand Down
10 changes: 6 additions & 4 deletions src/checkers/htmlchecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,9 @@ async def _get_text(self, url: t.Union[URL, str]) -> str:
try:
async with self.session.get(url) as response:
encoding = await self._get_encoding(response)
# We use streaming decoding in order to get decode error and abort the check
# as early as possible, without preloading the whole raw contents into memory
# We use streaming decoding in order to get decode error and abort the
# check as early as possible, without preloading the whole raw contents
# into memory
decoder_cls = codecs.getincrementaldecoder(encoding)
decoder = decoder_cls(errors="strict")
with io.StringIO() as buf:
Expand Down Expand Up @@ -184,8 +185,9 @@ def _get_latest(pattern: re.Pattern, ver_group: int) -> re.Match:
)

try:
# NOTE Returning last match when sort is requested and first match otherwise
# doesn't seem sensible, but we need to retain backward compatibility
# NOTE Returning last match when sort is requested and first match
# otherwise doesn't seem sensible, but we need to retain backward
# compatibility
result = matches[-1 if sort_matches else 0]
except IndexError as err:
raise CheckerQueryError(
Expand Down
7 changes: 4 additions & 3 deletions src/lib/externaldata.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,10 @@ class State(IntFlag):
checker_data: t.Dict[str, t.Any]
module: t.Optional[BuilderModule]
parent: t.Optional[BuilderSource] = dataclasses.field(init=False, default=None)
# fmt: off
checked: asyncio.Event = dataclasses.field(init=False, default_factory=asyncio.Event)
# fmt: on
checked: asyncio.Event = dataclasses.field(
init=False,
default_factory=asyncio.Event,
)

@classmethod
def __init_subclass__(cls, *args, **kwargs):
Expand Down
11 changes: 7 additions & 4 deletions src/lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,11 +160,13 @@ def content_type_rejected(content_type: t.Optional[str]) -> bool:
async for chunk in response.content.iter_chunked(HTTP_CHUNK_SIZE):
if first_chunk:
first_chunk = False
# determine content type from magic number since http header may be wrong
# determine content type from magic number since http header may be
# wrong
actual_content_type = magic.from_buffer(chunk, mime=True)
if content_type_rejected(actual_content_type):
raise CheckerFetchError(
f"Wrong content type '{actual_content_type}' received from '{url}'"
f"Wrong content type '{actual_content_type}' received "
f"from '{url}'"
)

checksum.update(chunk)
Expand Down Expand Up @@ -429,7 +431,7 @@ async def git_ls_remote(url: str) -> t.Dict[str, str]:
raise CheckerQueryError("Listing Git remote failed") from err
git_stdout = git_stdout_raw.decode()

return {r: c for c, r in (l.split() for l in git_stdout.splitlines())}
return {r: c for c, r in (line.split() for line in git_stdout.splitlines())}


async def extract_appimage_version(appimg_io: t.IO):
Expand Down Expand Up @@ -549,7 +551,8 @@ def dump_manifest(contents: t.Dict, manifest_path: t.Union[Path, str]):
# Determine max line length preference
if max_line_length := conf.get("max_line_length"):
try:
_yaml.width = int(max_line_length) # type: ignore # See https://sourceforge.net/p/ruamel-yaml/tickets/322/
# See https://sourceforge.net/p/ruamel-yaml/tickets/322/
_yaml.width = int(max_line_length) # type: ignore
except ValueError:
log.warning("Ignoring invalid max_line_length %r", max_line_length)

Expand Down
10 changes: 7 additions & 3 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,9 +435,13 @@ def parse_cli_args(cli_args=None):
)
parser.add_argument(
"--require-important-update",
help="Require an update to at least one source with is-important or is-main-source to save changes to the manifest. "
"If no instances of is-important or is-main-source are found, assume normal behaviour and always save changes to the manifest. "
"This is useful to avoid PRs generated to update a singular unimportant source.",
help=(
"Require an update to at least one source with is-important or "
"is-main-source to save changes to the manifest. If no instances of "
"is-important or is-main-source are found, assume normal behaviour and "
"always save changes to the manifest. This is useful to avoid PRs "
"generated to update a singular unimportant source."
),
action="store_true",
)

Expand Down
10 changes: 5 additions & 5 deletions src/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,10 +510,10 @@ def _update_appdata(self):
log.debug("Version didn't change, not adding release")

def update_manifests(self) -> t.List[str]:
"""
Updates references to external data in manifests.
If require_important_update is True, only update the manifest
if at least one source with IMPORTANT_SRC_PROP or MAIN_SRC_PROP received an update.
"""Updates references to external data in manifests.
If require_important_update is True, only update the manifest if at least one
source with IMPORTANT_SRC_PROP or MAIN_SRC_PROP received an update.
"""
# We want a list, without duplicates; Python provides an
# insertion-order-preserving dictionary so we use that.
Expand All @@ -526,7 +526,7 @@ def update_manifests(self) -> t.List[str]:
for data in self.get_external_data():
important = data.checker_data.get(IMPORTANT_SRC_PROP)
main = data.checker_data.get(MAIN_SRC_PROP)
if important or (main and important != False):
if important or (main and important is not False):
log.debug("Found an important source: %s", data)

found_important_update = data.has_version_changed
Expand Down
12 changes: 6 additions & 6 deletions tests/test_anityachecker.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ async def test_check(self):
self.assertIsInstance(data.new_version, ExternalFile)
self.assertRegex(
data.new_version.url,
r"^https://download.gnome.org/sources/glib-networking/\d+.\d+/glib-networking-[\d.]+.tar.xz$",
r"^https://download.gnome.org/sources/glib-networking/\d+.\d+/glib-networking-[\d.]+.tar.xz$", # noqa: E501
)
self.assertIsNotNone(data.new_version.version)
self.assertGreater(
Expand All @@ -38,15 +38,15 @@ async def test_check(self):
self.assertNotEqual(
data.new_version.checksum,
MultiDigest(
sha256="1f185aaef094123f8e25d8fa55661b3fd71020163a0174adb35a37685cda613b",
sha256="1f185aaef094123f8e25d8fa55661b3fd71020163a0174adb35a37685cda613b", # noqa: E501
),
)
elif data.filename == "boost_1_74_0.tar.bz2":
self.assertIsNotNone(data.new_version)
self.assertIsInstance(data.new_version, ExternalFile)
self.assertRegex(
data.new_version.url,
r"^https://boostorg\.jfrog\.io/artifactory/main/release/[\d.]+/source/boost_[\d]+_[\d]+_[\d]+.tar.bz2$",
r"^https://boostorg\.jfrog\.io/artifactory/main/release/[\d.]+/source/boost_[\d]+_[\d]+_[\d]+.tar.bz2$", # noqa: E501
)
self.assertIsNotNone(data.new_version.version)
self.assertGreater(
Expand All @@ -59,15 +59,15 @@ async def test_check(self):
self.assertNotEqual(
data.new_version.checksum,
MultiDigest(
sha256="83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1"
sha256="83bfc1507731a0906e387fc28b7ef5417d591429e51e788417fe9ff025e116b1" # noqa: E501
),
)
elif data.filename == "flatpak-1.8.2.tar.xz":
self.assertIsNotNone(data.new_version)
self.assertIsInstance(data.new_version, ExternalFile)
self.assertRegex(
data.new_version.url,
r"^https://github.com/flatpak/flatpak/releases/download/[\w\d.]+/flatpak-[\w\d.]+.tar.xz$",
r"^https://github.com/flatpak/flatpak/releases/download/[\w\d.]+/flatpak-[\w\d.]+.tar.xz$", # noqa: E501
)
self.assertIsNotNone(data.new_version.version)
self.assertEqual(
Expand All @@ -80,7 +80,7 @@ async def test_check(self):
self.assertNotEqual(
data.new_version.checksum,
MultiDigest(
sha256="7926625df7c2282a5ee1a8b3c317af53d40a663b1bc6b18a2dc8747e265085b0"
sha256="7926625df7c2282a5ee1a8b3c317af53d40a663b1bc6b18a2dc8747e265085b0" # noqa: E501
),
)
elif data.filename == "ostree.git":
Expand Down
Loading

0 comments on commit dd676fc

Please sign in to comment.