Skip to content

Commit

Permalink
Merge pull request #1748 from dandi/ruff-fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
brianhelba authored Nov 29, 2023
2 parents e45e3ed + 66861ca commit 366a9a3
Show file tree
Hide file tree
Showing 17 changed files with 65 additions and 61 deletions.
16 changes: 8 additions & 8 deletions dandiapi/analytics/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,6 @@


class ProcessedS3Log(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(
fields=['name', 'embargoed'],
name='%(app_label)s_%(class)s_unique_name_embargoed',
)
]

name = models.CharField(
max_length=36,
validators=[
Expand All @@ -20,3 +12,11 @@ class Meta:
)
# This is necessary to determine which bucket the logfile corresponds to
embargoed = models.BooleanField()

class Meta:
constraints = [
models.UniqueConstraint(
fields=['name', 'embargoed'],
name='%(app_label)s_%(class)s_unique_name_embargoed',
)
]
5 changes: 2 additions & 3 deletions dandiapi/api/asset_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def insert_asset_paths(asset: Asset, version: Version):
# If there are simultaneous requests to create the same asset, this check constraint can
# fail, and should be handled directly, rather than be allowed to bubble up
if 'unique-version-path' in str(e):
raise AssetAlreadyExistsError
raise AssetAlreadyExistsError from e

# Re-raise original exception otherwise
raise
Expand Down Expand Up @@ -240,7 +240,7 @@ def update_asset_paths(old_asset: Asset, new_asset: Asset, version: Version):
@transaction.atomic
def add_version_asset_paths(version: Version):
"""Add every asset from a version."""
print('\t Leaves...')
# Leaves
for asset in tqdm(version.assets.all()):
insert_asset_paths(asset, version)

Expand All @@ -249,7 +249,6 @@ def add_version_asset_paths(version: Version):
# https://stackoverflow.com/a/60221875

# Get all nodes which haven't been computed yet
print('\t Nodes...')
nodes = AssetPath.objects.filter(version=version, aggregate_files=0)
for node in tqdm(nodes):
child_link_ids = node.child_links.distinct('child_id').values_list('child_id', flat=True)
Expand Down
10 changes: 5 additions & 5 deletions dandiapi/api/doi.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ def create_doi(version: Version) -> str:
timeout=30,
).raise_for_status()
except requests.exceptions.HTTPError as e:
logging.error('Failed to create DOI %s', doi)
logging.error(request_body)
logging.exception('Failed to create DOI %s', doi)
logging.exception(request_body)
if e.response:
logging.error(e.response.text)
logging.exception(e.response.text)
raise
return doi

Expand All @@ -69,11 +69,11 @@ def delete_doi(doi: str) -> None:
logging.warning('Tried to get data for nonexistent DOI %s', doi)
return
else:
logging.error('Failed to fetch data for DOI %s', doi)
logging.exception('Failed to fetch data for DOI %s', doi)
raise
if r.json()['data']['attributes']['state'] == 'draft':
try:
s.delete(doi_url).raise_for_status()
except requests.exceptions.HTTPError:
logging.error('Failed to delete DOI %s', doi)
logging.exception('Failed to delete DOI %s', doi)
raise
4 changes: 2 additions & 2 deletions dandiapi/api/management/commands/ingest_asset_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@
@click.command()
def ingest_asset_paths():
for version in Version.objects.iterator():
print(f'Version: {version}')
print(f'\t {version.assets.count()} assets')
click.echo(f'Version: {version}')
click.echo(f'\t {version.assets.count()} assets')
add_version_asset_paths(version)
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def migrate_published_version_metadata(dandiset: str, published_version: str, to
except Exception as e:
click.echo(f'Failed to migrate {dandiset}/{published_version}')
click.echo(e)
raise click.Abort
raise click.Abort from e

if metadata == metanew:
click.echo('No changes detected')
Expand Down
8 changes: 4 additions & 4 deletions dandiapi/api/management/commands/migrate_version_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,17 @@
@click.command()
@click.argument('to_version')
def migrate_version_metadata(to_version: str):
print(f'Migrating all version metadata to version {to_version}')
click.echo(f'Migrating all version metadata to version {to_version}')
for version in Version.objects.filter(version='draft'):
print(f'Migrating {version.dandiset.identifier}/{version.version}')
click.echo(f'Migrating {version.dandiset.identifier}/{version.version}')

metadata = version.metadata

try:
metanew = migrate(metadata, to_version=to_version, skip_validation=True)
except Exception as e:
print(f'Failed to migrate {version.dandiset.identifier}/{version.version}')
print(e)
click.echo(f'Failed to migrate {version.dandiset.identifier}/{version.version}')
click.echo(e)
continue

if version.metadata != metanew:
Expand Down
8 changes: 4 additions & 4 deletions dandiapi/api/models/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,6 @@
class BaseUpload(models.Model):
ETAG_REGEX = r'[0-9a-f]{32}(-[1-9][0-9]*)?'

class Meta:
indexes = [models.Index(fields=['etag'])]
abstract = True

created = CreationDateTimeField()

# This is the key used to generate the object key, and the primary identifier for the upload.
Expand All @@ -41,6 +37,10 @@ class Meta:
multipart_upload_id = models.CharField(max_length=128, unique=True, db_index=True)
size = models.PositiveBigIntegerField()

class Meta:
indexes = [models.Index(fields=['etag'])]
abstract = True

@staticmethod
@abstractmethod
def object_key(upload_id, *, dandiset: Dandiset): # noqa: N805
Expand Down
2 changes: 1 addition & 1 deletion dandiapi/api/models/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def next_published_version(cls, dandiset: Dandiset) -> str:

@classmethod
def citation(cls, metadata):
year = datetime.datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
name = metadata['name'].rstrip('.')
url = f'https://doi.org/{metadata["doi"]}' if 'doi' in metadata else metadata['url']
version = metadata['version']
Expand Down
4 changes: 2 additions & 2 deletions dandiapi/api/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ class ChecksumCalculatorFile:
def __init__(self):
self.h = hashlib.sha256()

def write(self, bytes):
self.h.update(bytes)
def write(self, data: bytes) -> None:
self.h.update(data)

@property
def checksum(self):
Expand Down
4 changes: 3 additions & 1 deletion dandiapi/api/tests/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ def extra_data(self):
# Supply a fake created date at least 1 year before now
created = (
faker.Faker()
.date_time_between(end_date=datetime.datetime.now() - datetime.timedelta(days=365))
.date_time_between(
end_date=datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=365)
)
.isoformat()
)

Expand Down
10 changes: 5 additions & 5 deletions dandiapi/api/tests/test_dandiset.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
import datetime

from django.conf import settings
from django.contrib.auth.models import AnonymousUser
Expand Down Expand Up @@ -357,7 +357,7 @@ def test_dandiset_rest_create(api_client, user):
assert dandiset.draft_version.status == Version.Status.PENDING

# Verify that computed metadata was injected
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'{settings.DANDI_WEB_APP_URL}/dandiset/{dandiset.identifier}/draft'
assert dandiset.draft_version.metadata == {
**metadata,
Expand Down Expand Up @@ -450,7 +450,7 @@ def test_dandiset_rest_create_with_identifier(api_client, admin_user):
assert dandiset.draft_version.status == Version.Status.PENDING

# Verify that computed metadata was injected
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'{settings.DANDI_WEB_APP_URL}/dandiset/{dandiset.identifier}/draft'
assert dandiset.draft_version.metadata == {
**metadata,
Expand Down Expand Up @@ -557,7 +557,7 @@ def test_dandiset_rest_create_with_contributor(api_client, admin_user):
assert dandiset.draft_version.status == Version.Status.PENDING

# Verify that computed metadata was injected
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'{settings.DANDI_WEB_APP_URL}/dandiset/{dandiset.identifier}/draft'
assert dandiset.draft_version.metadata == {
**metadata,
Expand Down Expand Up @@ -647,7 +647,7 @@ def test_dandiset_rest_create_embargoed(api_client, user):
assert dandiset.draft_version.status == Version.Status.PENDING

# Verify that computed metadata was injected
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'{settings.DANDI_WEB_APP_URL}/dandiset/{dandiset.identifier}/draft'
assert dandiset.draft_version.metadata == {
**metadata,
Expand Down
16 changes: 8 additions & 8 deletions dandiapi/api/tests/test_version.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from datetime import datetime
import datetime
from time import sleep
from typing import TYPE_CHECKING

Expand Down Expand Up @@ -138,7 +138,7 @@ def test_published_version_metadata_computed(published_version: Version):
@pytest.mark.django_db()
def test_version_metadata_citation_draft(draft_version):
name = draft_version.metadata['name'].rstrip('.')
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'{settings.DANDI_WEB_APP_URL}/dandiset/{draft_version.dandiset.identifier}/{draft_version.version}' # noqa: E501
assert (
draft_version.metadata['citation']
Expand All @@ -149,7 +149,7 @@ def test_version_metadata_citation_draft(draft_version):
@pytest.mark.django_db()
def test_version_metadata_citation_published(published_version):
name = published_version.metadata['name'].rstrip('.')
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'https://doi.org/{published_version.doi}'
assert (
published_version.metadata['citation']
Expand All @@ -163,7 +163,7 @@ def test_version_metadata_citation_no_contributors(version):
version.save()

name = version.metadata['name'].rstrip('.')
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
assert version.metadata['citation'].startswith(
f'{name} ({year}). (Version {version.version}) [Data set]. DANDI archive. '
)
Expand All @@ -178,7 +178,7 @@ def test_version_metadata_citation_contributor_not_in_citation(version):
version.save()

name = version.metadata['name'].rstrip('.')
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
assert version.metadata['citation'].startswith(
f'{name} ({year}). (Version {version.version}) [Data set]. DANDI archive. '
)
Expand All @@ -190,7 +190,7 @@ def test_version_metadata_citation_contributor(version):
version.save()

name = version.metadata['name'].rstrip('.')
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
assert version.metadata['citation'].startswith(
f'Doe, Jane ({year}) {name} (Version {version.version}) [Data set]. DANDI archive. '
)
Expand All @@ -205,7 +205,7 @@ def test_version_metadata_citation_multiple_contributors(version):
version.save()

name = version.metadata['name'].rstrip('.')
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
assert version.metadata['citation'].startswith(
f'John Doe; Jane Doe ({year}) {name} (Version {version.version}) [Data set]. '
f'DANDI archive. '
Expand Down Expand Up @@ -507,7 +507,7 @@ def test_version_rest_update(api_client, user, draft_version):
# This should be stripped out
'dateCreated': 'foobar',
}
year = datetime.now().year
year = datetime.datetime.now(datetime.UTC).year
url = f'{settings.DANDI_WEB_APP_URL}/dandiset/{draft_version.dandiset.identifier}/draft'
saved_metadata = {
**new_metadata,
Expand Down
4 changes: 2 additions & 2 deletions dandiapi/api/views/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,8 @@ def user_questionnaire_form_view(request: HttpRequest) -> HttpResponse:
try:
# questions to display in the form
questions = json.loads(request.GET.get('QUESTIONS'))
except (JSONDecodeError, TypeError):
raise Http404
except (JSONDecodeError, TypeError) as e:
raise Http404 from e

return render(
request,
Expand Down
19 changes: 11 additions & 8 deletions dandiapi/api/views/dandiset.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,8 @@ def get_object(self):
lookup_url = self.kwargs[self.lookup_url_kwarg]
try:
lookup_value = int(lookup_url)
except ValueError:
raise Http404('Not a valid identifier.')
except ValueError as e:
raise Http404('Not a valid identifier.') from e
self.kwargs[self.lookup_url_kwarg] = lookup_value

dandiset = super().get_object()
Expand Down Expand Up @@ -402,16 +402,19 @@ def users(self, request, dandiset__pk):
send_ownership_change_emails(dandiset, removed_owners, added_owners)

owners = []
for owner in dandiset.owners:
for owner_user in dandiset.owners:
try:
owner = SocialAccount.objects.get(user=owner)
owner_dict = {'username': owner.extra_data['login']}
if 'name' in owner.extra_data:
owner_dict['name'] = owner.extra_data['name']
owner_account = SocialAccount.objects.get(user=owner_user)
owner_dict = {'username': owner_account.extra_data['login']}
if 'name' in owner_account.extra_data:
owner_dict['name'] = owner_account.extra_data['name']
owners.append(owner_dict)
except SocialAccount.DoesNotExist:
# Just in case some users aren't using social accounts, have a fallback
owners.append(
{'username': owner.username, 'name': f'{owner.first_name} {owner.last_name}'}
{
'username': owner_user.username,
'name': f'{owner_user.first_name} {owner_user.last_name}',
}
)
return Response(owners, status=status.HTTP_200_OK)
6 changes: 3 additions & 3 deletions dandiapi/api/views/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def upload_complete_view(request: Request, upload_id: str) -> HttpResponseBase:
except Upload.DoesNotExist:
upload = get_object_or_404(EmbargoedUpload, upload_id=upload_id)
if not request.user.has_perm('owner', upload.dandiset):
raise Http404
raise Http404 from None

completion = TransferredParts(
object_key=upload.blob.name,
Expand Down Expand Up @@ -237,7 +237,7 @@ def upload_validate_view(request: Request, upload_id: str) -> HttpResponseBase:
except Upload.DoesNotExist:
upload = get_object_or_404(EmbargoedUpload, upload_id=upload_id)
if not request.user.has_perm('owner', upload.dandiset):
raise Http404
raise Http404 from None

# Verify that the upload was successful
if not upload.object_key_exists():
Expand Down Expand Up @@ -275,7 +275,7 @@ def upload_validate_view(request: Request, upload_id: str) -> HttpResponseBase:
asset_blob = upload.to_asset_blob()
asset_blob.save()
else:
raise ValueError(f'Unknown Upload type {type(upload)}')
raise ValueError(f'Unknown Upload type {type(upload)}') from None

# Clean up the upload
upload.delete()
Expand Down
4 changes: 2 additions & 2 deletions dandiapi/search/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ def visible_to(self, user: User) -> models.QuerySet[AssetSearch]:


class AssetSearch(models.Model):
objects = AssetSearchManager()

dandiset_id = models.PositiveBigIntegerField()
asset_id = models.PositiveBigIntegerField(primary_key=True)
asset_metadata = models.JSONField()
asset_size = models.PositiveBigIntegerField()

objects = AssetSearchManager()

class Meta:
managed = False
db_table = 'asset_search'
Expand Down
4 changes: 2 additions & 2 deletions dandiapi/zarr/views/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,8 @@ def create(self, request):
zarr_archive: ZarrArchive = ZarrArchive(name=name, dandiset=dandiset)
try:
zarr_archive.save()
except IntegrityError:
raise ValidationError('Zarr already exists')
except IntegrityError as e:
raise ValidationError('Zarr already exists') from e

serializer = ZarrSerializer(instance=zarr_archive)
return Response(serializer.data, status=status.HTTP_200_OK)
Expand Down

0 comments on commit 366a9a3

Please sign in to comment.