Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix style issues found by Ruff #1741

Merged
merged 20 commits into from
Nov 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
82c6e76
Fix PT023 pytest-fixture-incorrect-parentheses-style
brianhelba Nov 3, 2023
bb89d47
Fix RSE102 Unnecessary parentheses on raised exception
brianhelba Nov 3, 2023
9fe7f6c
Fix INP001 File is part of an implicit namespace package
brianhelba Nov 3, 2023
3fe3ccc
Fix PT001 Use `@pytest.fixture()` over `@pytest.fixture`
brianhelba Nov 3, 2023
15b7c7a
Fix TD004 Missing colon in TODO
brianhelba Nov 3, 2023
e30606f
Fix PT006 Wrong name(s) type in `@pytest.mark.parametrize`, expected …
brianhelba Nov 3, 2023
ea99701
Fix RUF005 Consider unpacking instead of concatenation
brianhelba Nov 6, 2023
b26e241
Fix RET504 Unnecessary assignment before `return` statement
brianhelba Nov 6, 2023
2f332f7
Fix SIM108 Use ternary operator instead of `if`-`else`-block
brianhelba Nov 6, 2023
759dd61
Fix C419 Unnecessary list comprehension
brianhelba Nov 6, 2023
23baa2e
Fix TRY201 Use `raise` without specifying exception name
brianhelba Nov 6, 2023
7189c77
Fix PERF102 When using only the values of a dict use the `values()` m…
brianhelba Nov 6, 2023
548d42f
Fix PIE808 Unnecessary `start` argument in `range`
brianhelba Nov 6, 2023
74d630b
Fix RUF010 Use explicit conversion flag
brianhelba Nov 6, 2023
7e8273c
Fix FLY002 Consider f-expression instead of string join
brianhelba Nov 6, 2023
9c66a59
Fix SIM110 Use `any` instead of `for` loop
brianhelba Nov 6, 2023
d922712
Fix PTH123 `open()` should be replaced by `Path.open()`
brianhelba Nov 6, 2023
727a533
Fix PT018 Assertion should be broken down into multiple parts
brianhelba Nov 6, 2023
9f1e45b
Fix PT012 `pytest.raises()` block should contain a single simple stat…
brianhelba Nov 6, 2023
ddc64a9
Merge remote-tracking branch 'origin/master' into ruff-fixes
brianhelba Nov 9, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
10 changes: 5 additions & 5 deletions dandiapi/analytics/tests/test_download_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@
)


@pytest.fixture
@pytest.fixture()
def s3_log_bucket():
return create_s3_storage(settings.DANDI_DANDISETS_LOG_BUCKET_NAME).bucket_name


@pytest.fixture
@pytest.fixture()
def s3_log_file(s3_log_bucket, asset_blob):
embargoed = s3_log_bucket == settings.DANDI_DANDISETS_EMBARGO_LOG_BUCKET_NAME
s3 = get_boto_client(get_storage() if not embargoed else get_embargo_storage())
Expand Down Expand Up @@ -48,7 +48,7 @@ def s3_log_file(s3_log_bucket, asset_blob):
s3.delete_object(Bucket=s3_log_bucket, Key=log_file_name)


@pytest.mark.django_db
@pytest.mark.django_db()
def test_processing_s3_log_files(s3_log_bucket, s3_log_file, asset_blob):
collect_s3_log_records_task(s3_log_bucket)
asset_blob.refresh_from_db()
Expand All @@ -57,7 +57,7 @@ def test_processing_s3_log_files(s3_log_bucket, s3_log_file, asset_blob):
assert asset_blob.download_count == 1


@pytest.mark.django_db
@pytest.mark.django_db()
def test_processing_s3_log_files_idempotent(s3_log_bucket, s3_log_file, asset_blob):
# this tests that the outer task which collects the log files to process is
# idempotent, in other words, it uses StartAfter correctly.
Expand All @@ -70,7 +70,7 @@ def test_processing_s3_log_files_idempotent(s3_log_bucket, s3_log_file, asset_bl
assert asset_blob.download_count == 1


@pytest.mark.django_db
@pytest.mark.django_db()
def test_processing_s3_log_file_task_idempotent(s3_log_bucket, s3_log_file, asset_blob):
# this tests that the inner task which processes a single log file is
# idempotent, utilizing the unique constraint on ProcessedS3Log correctly.
Expand Down
2 changes: 1 addition & 1 deletion dandiapi/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# flake8: noqa
# TODO remove this after migration is complete
# TODO: remove this after migration is complete
import dandiapi.api.user_migration
4 changes: 1 addition & 3 deletions dandiapi/api/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,7 @@ def get_queryset(self, request: HttpRequest) -> QuerySet:

# Using the `asset_count` property here results in N queries being made
# for N versions. Instead, use annotation to make one query for N versions.
qs = qs.annotate(number_of_assets=Count('assets'))

return qs
return qs.annotate(number_of_assets=Count('assets'))

def number_of_assets(self, obj):
return obj.number_of_assets
Expand Down
2 changes: 1 addition & 1 deletion dandiapi/api/asset_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def insert_asset_paths(asset: Asset, version: Version):
# If there are simultaneous requests to create the same asset, this check constraint can
# fail, and should be handled directly, rather than be allowed to bubble up
if 'unique-version-path' in str(e):
raise AssetAlreadyExists()
raise AssetAlreadyExists

# Re-raise original exception otherwise
raise
Expand Down
2 changes: 1 addition & 1 deletion dandiapi/api/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

@register()
def check_doi_settings(app_configs, **kwargs):
any_doi_setting = any([setting is not None for setting, _ in DANDI_DOI_SETTINGS])
any_doi_setting = any(setting is not None for setting, _ in DANDI_DOI_SETTINGS)
if not any_doi_setting:
# If no DOI settings are defined, DOIs will not be created on publish.
return []
Expand Down
10 changes: 5 additions & 5 deletions dandiapi/api/doi.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@


def doi_configured() -> bool:
return any([setting is not None for setting, _ in DANDI_DOI_SETTINGS])
return any(setting is not None for setting, _ in DANDI_DOI_SETTINGS)


def _generate_doi_data(version: Version):
Expand Down Expand Up @@ -50,7 +50,7 @@ def create_doi(version: Version) -> str:
logging.error(request_body)
if e.response:
logging.error(e.response.text)
raise e
raise
return doi


Expand All @@ -69,10 +69,10 @@ def delete_doi(doi: str) -> None:
return
else:
logging.error('Failed to fetch data for DOI %s', doi)
raise e
raise
if r.json()['data']['attributes']['state'] == 'draft':
try:
s.delete(doi_url).raise_for_status()
except requests.exceptions.HTTPError as e:
except requests.exceptions.HTTPError:
logging.error('Failed to delete DOI %s', doi)
raise e
raise
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def migrate_published_version_metadata(dandiset: str, published_version: str, to
except Exception as e:
click.echo(f'Failed to migrate {dandiset}/{published_version}')
click.echo(e)
raise click.Abort()
raise click.Abort

if metadata == metanew:
click.echo('No changes detected')
Expand Down
3 changes: 1 addition & 2 deletions dandiapi/api/manifests.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ def _s3_url(path: str) -> str:
signed_url = storage.url(path)
# Strip off the query parameters from the presigning, as they are different every time
parsed = urlparse(signed_url)
s3_url = urlunparse((parsed[0], parsed[1], parsed[2], '', '', ''))
return s3_url
return urlunparse((parsed[0], parsed[1], parsed[2], '', '', ''))


def _manifests_path(version: Version) -> str:
Expand Down
5 changes: 1 addition & 4 deletions dandiapi/api/migrations/0019_usermetadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@
def update_existing_users(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor):
"""Automatically approve any existing users."""
for user in User.objects.all():
if user.is_active:
status = UserMetadata.Status.APPROVED
else:
status = UserMetadata.Status.REJECTED
status = UserMetadata.Status.APPROVED if user.is_active else UserMetadata.Status.REJECTED
UserMetadata.objects.create(user=user, status=status)


Expand Down
3 changes: 1 addition & 2 deletions dandiapi/api/models/asset.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,7 @@ def s3_url(self) -> str:
signed_url = self.blob.url
# Strip off the query parameters from the presigning, as they are different every time
parsed = urlparse(signed_url)
s3_url = urlunparse((parsed[0], parsed[1], parsed[2], '', '', ''))
return s3_url
return urlunparse((parsed[0], parsed[1], parsed[2], '', '', ''))

def __str__(self) -> str:
return self.blob.name
Expand Down
4 changes: 2 additions & 2 deletions dandiapi/api/models/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@ def published_by(self, now: datetime.datetime):
'id': uuid4().urn,
'name': 'DANDI publish',
'startDate': now.isoformat(),
# TODO https://github.com/dandi/dandi-api/issues/465
# TODO: https://github.com/dandi/dandi-api/issues/465
# endDate needs to be defined before publish is complete
'endDate': now.isoformat(),
'wasAssociatedWith': [
{
'id': uuid4().urn,
'identifier': 'RRID:SCR_017571',
'name': 'DANDI API',
# TODO version the API
# TODO: version the API
'version': '0.1.0',
'schemaKey': 'Software',
}
Expand Down
7 changes: 2 additions & 5 deletions dandiapi/api/models/oauth.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,8 @@ def clean(self):
except ValidationError as e:
# don't validate URLs so we can use wildcards too
if 'Enter a valid URL.' not in str(e):
raise e
raise

def redirect_uri_allowed(self, uri):
"""Check whether or not `uri` is a valid redirect_uri using wildcard matching."""
for allowed_uri in self.redirect_uris.split():
if fnmatch(uri, allowed_uri):
return True
return False
return any(fnmatch(uri, allowed_uri) for allowed_uri in self.redirect_uris.split())
5 changes: 1 addition & 4 deletions dandiapi/api/models/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,7 @@ def next_published_version(cls, dandiset: Dandiset) -> str:
def citation(cls, metadata):
year = datetime.datetime.now().year
name = metadata['name'].rstrip('.')
if 'doi' in metadata:
url = f'https://doi.org/{metadata["doi"]}'
else:
url = metadata['url']
url = f'https://doi.org/{metadata["doi"]}' if 'doi' in metadata else metadata['url']
version = metadata['version']
# If we can't find any contributors, use this citation format
citation = f'{name} ({year}). (Version {version}) [Data set]. DANDI archive. {url}'
Expand Down
18 changes: 9 additions & 9 deletions dandiapi/api/services/asset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def change_asset(
assert 'path' in new_metadata, 'Path must be present in new_metadata'

if not user.has_perm('owner', version.dandiset):
raise DandisetOwnerRequired()
raise DandisetOwnerRequired
elif version.version != 'draft':
raise DraftDandisetNotModifiable()
raise DraftDandisetNotModifiable

path = new_metadata['path']
new_metadata_stripped = Asset.strip_metadata(new_metadata)
Expand All @@ -75,7 +75,7 @@ def change_asset(

# Verify we aren't changing path to the same value as an existing asset
if version.assets.filter(path=path).exclude(asset_id=asset.asset_id).exists():
raise AssetAlreadyExists()
raise AssetAlreadyExists

with transaction.atomic():
remove_asset_from_version(user=user, asset=asset, version=version)
Expand Down Expand Up @@ -109,14 +109,14 @@ def add_asset_to_version(
assert 'path' in metadata, 'Path must be present in metadata'

if not user.has_perm('owner', version.dandiset):
raise DandisetOwnerRequired()
raise DandisetOwnerRequired
elif version.version != 'draft':
raise DraftDandisetNotModifiable()
raise DraftDandisetNotModifiable

# Check if there are already any assets with the same path
path = metadata['path']
if version.assets.filter(path=path).exists():
raise AssetAlreadyExists()
raise AssetAlreadyExists

# Check if there are any assets that conflict with this path
conflicts = get_conflicting_paths(path, version)
Expand All @@ -125,7 +125,7 @@ def add_asset_to_version(

# Ensure zarr archive doesn't already belong to a dandiset
if zarr_archive and zarr_archive.dandiset != version.dandiset:
raise ZarrArchiveBelongsToDifferentDandiset()
raise ZarrArchiveBelongsToDifferentDandiset

if isinstance(asset_blob, EmbargoedAssetBlob):
embargoed_asset_blob = asset_blob
Expand Down Expand Up @@ -155,9 +155,9 @@ def add_asset_to_version(

def remove_asset_from_version(*, user, asset: Asset, version: Version) -> Version:
if not user.has_perm('owner', version.dandiset):
raise DandisetOwnerRequired()
raise DandisetOwnerRequired
elif version.version != 'draft':
raise DraftDandisetNotModifiable()
raise DraftDandisetNotModifiable

with transaction.atomic():
# Remove asset paths and asset itself from version
Expand Down
4 changes: 2 additions & 2 deletions dandiapi/api/services/embargo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
def _unembargo_asset(asset: Asset):
"""Unembargo an asset by copying its blob to the public bucket."""
if asset.embargoed_blob is None:
raise AssetNotEmbargoed()
raise AssetNotEmbargoed

# Use existing AssetBlob if possible
etag = asset.embargoed_blob.etag
Expand Down Expand Up @@ -75,6 +75,6 @@ def unembargo_dandiset(*, user: User, dandiset: Dandiset):
raise DandisetNotEmbargoed

if not user.has_perm('owner', dandiset):
raise DandisetOwnerRequired()
raise DandisetOwnerRequired

unembargo_dandiset_task.delay(dandiset.id)
6 changes: 3 additions & 3 deletions dandiapi/api/services/metadata/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def validate_asset_metadata(*, asset: Asset) -> bool:

# Published assets are immutable
if asset.published:
raise AssetHasBeenPublished()
raise AssetHasBeenPublished

# track the state of the asset before to use optimistic locking
asset_state = asset.status
Expand Down Expand Up @@ -76,7 +76,7 @@ def validate_asset_metadata(*, asset: Asset) -> bool:

def version_aggregate_assets_summary(version: Version) -> None:
if version.version != 'draft':
raise VersionHasBeenPublished()
raise VersionHasBeenPublished

version.metadata['assetsSummary'] = aggregate_assets_summary(
asset.full_metadata
Expand Down Expand Up @@ -117,7 +117,7 @@ def _build_validatable_version_metadata(version: Version) -> dict:

# Published versions are immutable
if version.version != 'draft':
raise VersionHasBeenPublished()
raise VersionHasBeenPublished

with transaction.atomic():
# validating version metadata needs to lock the version to avoid racing with
Expand Down
14 changes: 7 additions & 7 deletions dandiapi/api/services/publish/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def _lock_dandiset_for_publishing(*, user: User, dandiset: Dandiset) -> None:
not user.has_perm('owner', dandiset)
or dandiset.embargo_status != Dandiset.EmbargoStatus.OPEN
):
raise NotAllowed()
raise NotAllowed
if dandiset.zarr_archives.exists() or dandiset.embargoed_zarr_archives.exists():
# TODO: return a string instead of a list here
raise NotAllowed(['Cannot publish dandisets which contain zarrs'], 400) # type: ignore
Expand All @@ -55,17 +55,17 @@ def _lock_dandiset_for_publishing(*, user: User, dandiset: Dandiset) -> None:
if not draft_version.publishable:
match draft_version.status:
case Version.Status.PUBLISHED:
raise DandisetAlreadyPublished()
raise DandisetAlreadyPublished
case Version.Status.PUBLISHING:
raise DandisetAlreadyPublishing()
raise DandisetAlreadyPublishing
case Version.Status.VALIDATING:
raise DandisetBeingValidated()
raise DandisetBeingValidated
case Version.Status.INVALID:
raise DandisetInvalidMetadata()
raise DandisetInvalidMetadata
case Version.Status.PENDING:
raise DandisetValidationPending()
raise DandisetValidationPending
case Version.Status.VALID:
raise DandisetInvalidMetadata()
raise DandisetInvalidMetadata
case other:
raise NotImplementedError(
f'Draft version of dandiset {dandiset.identifier} '
Expand Down
4 changes: 1 addition & 3 deletions dandiapi/api/services/version/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,4 @@ def _normalize_version_metadata(
}
# Run the version_metadata through the pydantic model to automatically include any boilerplate
# like the access or repository fields
version_metadata = PydanticDandiset.unvalidated(**version_metadata).json_dict()

return version_metadata
return PydanticDandiset.unvalidated(**version_metadata).json_dict()
4 changes: 2 additions & 2 deletions dandiapi/api/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def generate_presigned_put_object_url(self, blob_name: str, base64md5: str) -> s
'ACL': 'bucket-owner-full-control',
'ContentMD5': base64md5,
},
ExpiresIn=600, # TODO proper expiration
ExpiresIn=600, # TODO: proper expiration
)

def generate_presigned_head_object_url(self, key: str) -> str:
Expand Down Expand Up @@ -211,7 +211,7 @@ def generate_presigned_put_object_url(self, blob_name: str, _: str) -> str:
return self.base_url_client.presigned_put_object(
bucket_name=self.bucket_name,
object_name=blob_name,
expires=timedelta(seconds=600), # TODO proper expiration
expires=timedelta(seconds=600), # TODO: proper expiration
)

def generate_presigned_head_object_url(self, key: str) -> str:
Expand Down
Loading