From ba6b1476b5e42670acbebd7177a99723af2c3be7 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sun, 3 Mar 2024 14:34:07 +0100 Subject: [PATCH 01/35] Move video upload workflow to celery --- backend/api/schedule/types/schedule_item.py | 15 +- backend/conferences/admin.py | 2 +- ...nce_video_description_template_and_more.py | 23 ++ backend/conferences/models/conference.py | 9 + backend/google_api/admin.py | 10 +- backend/google_api/models.py | 12 +- backend/google_api/sdk.py | 32 +-- backend/pdm.lock | 40 +++- backend/pycon/celery.py | 2 + backend/pyproject.toml | 1 + backend/schedule/admin.py | 50 +++- .../0052_scheduleitemsentforvideoupload.py | 33 +++ ...eitemsentforvideoupload_last_attempt_at.py | 18 ++ backend/schedule/models.py | 62 +++++ backend/schedule/tasks.py | 139 +++++++++++ backend/schedule/video_upload.py | 127 ++++++++++ backend/video_upload/__init__.py | 0 backend/video_upload/activities.py | 216 ------------------ backend/video_upload/workflows/__init__.py | 0 ...ch_multiple_schedule_items_video_upload.py | 28 --- .../delayed_upload_video_thumbnail.py | 51 ----- .../workflows/upload_schedule_item_video.py | 214 ----------------- backend/worker.py | 57 ----- temporal-config/development.yml | 6 - 24 files changed, 520 insertions(+), 627 deletions(-) create mode 100644 backend/conferences/migrations/0042_conference_video_description_template_and_more.py create mode 100644 backend/schedule/migrations/0052_scheduleitemsentforvideoupload.py create mode 100644 backend/schedule/migrations/0053_scheduleitemsentforvideoupload_last_attempt_at.py create mode 100644 backend/schedule/video_upload.py delete mode 100644 backend/video_upload/__init__.py delete mode 100644 backend/video_upload/activities.py delete mode 100644 backend/video_upload/workflows/__init__.py delete mode 100644 backend/video_upload/workflows/batch_multiple_schedule_items_video_upload.py delete mode 100644 backend/video_upload/workflows/delayed_upload_video_thumbnail.py delete mode 100644 backend/video_upload/workflows/upload_schedule_item_video.py delete mode 100644 backend/worker.py delete mode 100644 temporal-config/development.yml diff --git a/backend/api/schedule/types/schedule_item.py b/backend/api/schedule/types/schedule_item.py index 7fd90c5a25..b31bfac69c 100644 --- a/backend/api/schedule/types/schedule_item.py +++ b/backend/api/schedule/types/schedule_item.py @@ -34,19 +34,8 @@ class ScheduleItem: ] | None youtube_video_id: str | None - @strawberry.field - def abstract(self) -> str: - if self.submission_id: - return self.submission.abstract.localize(self.language.code) - - return "" - - @strawberry.field - def elevator_pitch(self) -> str: - if self.submission_id: - return self.submission.elevator_pitch.localize(self.language.code) - - return "" + abstract: str + elevator_pitch: str @strawberry.field def has_limited_capacity(self) -> bool: diff --git a/backend/conferences/admin.py b/backend/conferences/admin.py index 360e1cf62f..a5ee62dac8 100644 --- a/backend/conferences/admin.py +++ b/backend/conferences/admin.py @@ -178,7 +178,7 @@ class ConferenceAdmin(OrderedInlineModelAdminMixin, admin.ModelAdmin): ) }, ), - ("YouTube", {"fields": ("youtube_video_bottom_text",)}), + ("YouTube", {"fields": ("video_title_template", "video_description_template")}), ) inlines = [DeadlineInline, DurationInline, SponsorLevelInline, IncludedEventInline] diff --git a/backend/conferences/migrations/0042_conference_video_description_template_and_more.py b/backend/conferences/migrations/0042_conference_video_description_template_and_more.py new file mode 100644 index 0000000000..468294b144 --- /dev/null +++ b/backend/conferences/migrations/0042_conference_video_description_template_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.7 on 2024-03-03 11:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('conferences', '0041_remove_conference_visa_application_form_link'), + ] + + operations = [ + migrations.AddField( + model_name='conference', + name='video_description_template', + field=models.TextField(blank=True, default=''), + ), + migrations.AddField( + model_name='conference', + name='video_title_template', + field=models.TextField(blank=True, default=''), + ), + ] diff --git a/backend/conferences/models/conference.py b/backend/conferences/models/conference.py index a6ebacdb2d..8d8e648a5d 100644 --- a/backend/conferences/models/conference.py +++ b/backend/conferences/models/conference.py @@ -133,6 +133,15 @@ class Conference(GeoLocalizedModel, TimeFramedModel, TimeStampedModel): default=None, ) + video_title_template = models.TextField( + default="", + blank=True, + ) + video_description_template = models.TextField( + default="", + blank=True, + ) + youtube_video_bottom_text = models.TextField( default="", blank=True, diff --git a/backend/google_api/admin.py b/backend/google_api/admin.py index f0fb7e6f97..73abdbaa0a 100644 --- a/backend/google_api/admin.py +++ b/backend/google_api/admin.py @@ -71,8 +71,6 @@ def build_google_flow(self, request, obj, *, state=None): reverse("admin:google-api-oauth-callback") ) - # flow.redirect_uri = flow.redirect_uri + f"?obj_id={obj.id}" - return flow def auth_callback(self, request): @@ -115,15 +113,15 @@ def google_oauth_obj_id_for_state(self, state): return f"google_api:data:{state}" def get_urls(self): - return super().get_urls() + [ + return [ path( - "/auth", + "/auth/", self.admin_site.admin_view(self.auth), name="google-api-oauth-auth", ), path( - "auth-callback", + "auth-callback/", self.admin_site.admin_view(self.auth_callback), name="google-api-oauth-callback", ), - ] + ] + super().get_urls() diff --git a/backend/google_api/models.py b/backend/google_api/models.py index 6f729e11a4..d847ebd06e 100644 --- a/backend/google_api/models.py +++ b/backend/google_api/models.py @@ -10,8 +10,8 @@ class GoogleCloudOAuthCredentialQuerySet(models.QuerySet): - async def get_by_client_id(self, client_id: str) -> "GoogleCloudOAuthCredential": - return await self.filter(client_id=client_id).afirst() + def get_by_client_id(self, client_id: str) -> "GoogleCloudOAuthCredential": + return self.filter(client_id=client_id).first() def with_quota_left(self, service: str): midnight_pacific_time = ( @@ -56,11 +56,11 @@ class GoogleCloudOAuthCredential(models.Model): objects = GoogleCloudOAuthCredentialQuerySet.as_manager() @staticmethod - async def get_available_credentials_token( + def get_available_credentials_token( service: str, min_quota: int ) -> Optional["GoogleCloudToken"]: credential = ( - await GoogleCloudOAuthCredential.objects.with_quota_left(service) + GoogleCloudOAuthCredential.objects.with_quota_left(service) .annotate( has_token=models.Exists( GoogleCloudToken.objects.filter( @@ -75,9 +75,9 @@ async def get_available_credentials_token( }, ) .order_by(f"{service}_quota_left") - .afirst() + .first() ) - return (await credential.googlecloudtoken_set.afirst()) if credential else None + return credential.googlecloudtoken_set.first() if credential else None class Meta: verbose_name = "Google Cloud OAuth Credential" diff --git a/backend/google_api/sdk.py b/backend/google_api/sdk.py index 8a642fa818..3dd4da392e 100644 --- a/backend/google_api/sdk.py +++ b/backend/google_api/sdk.py @@ -8,8 +8,8 @@ GOOGLE_CLOUD_SCOPES = ["https://www.googleapis.com/auth/youtube"] -async def get_available_credentials(service, min_quota): - token = await GoogleCloudOAuthCredential.get_available_credentials_token( +def get_available_credentials(service, min_quota): + token = GoogleCloudOAuthCredential.get_available_credentials_token( service=service, min_quota=min_quota ) return Credentials.from_authorized_user_info( @@ -25,36 +25,36 @@ async def get_available_credentials(service, min_quota): def count_quota(service: str, quota: int): - async def _add_quota(credentials): - credential_object = await GoogleCloudOAuthCredential.objects.get_by_client_id( + def _add_quota(credentials): + credential_object = GoogleCloudOAuthCredential.objects.get_by_client_id( credentials.client_id ) - await UsedRequestQuota.objects.acreate( + UsedRequestQuota.objects.create( credentials=credential_object, cost=quota, service=service, ) def wrapper(func): - if inspect.isasyncgenfunction(func): + if inspect.isgeneratorfunction(func): - async def wrapped(*args, **kwargs): - credentials = await get_available_credentials(service, quota) + def wrapped(*args, **kwargs): + credentials = get_available_credentials(service, quota) try: - async for value in func(*args, credentials=credentials, **kwargs): + for value in func(*args, credentials=credentials, **kwargs): yield value finally: - await _add_quota(credentials) + _add_quota(credentials) else: - async def wrapped(*args, **kwargs): - credentials = await get_available_credentials(service, quota) + def wrapped(*args, **kwargs): + credentials = get_available_credentials(service, quota) try: - ret_value = await func(*args, credentials=credentials, **kwargs) + ret_value = func(*args, credentials=credentials, **kwargs) finally: - await _add_quota(credentials) + _add_quota(credentials) return ret_value return wrapped @@ -63,7 +63,7 @@ async def wrapped(*args, **kwargs): @count_quota("youtube", 1600) -async def youtube_videos_insert( +def youtube_videos_insert( *, title: str, description: str, @@ -103,7 +103,7 @@ async def youtube_videos_insert( @count_quota("youtube", 50) -async def youtube_videos_set_thumbnail( +def youtube_videos_set_thumbnail( *, video_id: str, thumbnail_path: str, credentials: Credentials ): youtube = build("youtube", "v3", credentials=credentials) diff --git a/backend/pdm.lock b/backend/pdm.lock index f81d7b73f6..b6512c0143 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "lambda"] strategy = ["cross_platform"] lock_version = "4.4" -content_hash = "sha256:3b3ff616a54bd22aba45f7faab7c2cc2d1f60d1850ae8e6747848ede7fb587de" +content_hash = "sha256:d503642e635606fcc04a0ff2d0f32e85b8982dd343e91fbaee6b4e53bf2e0966" [[package]] name = "amqp" @@ -332,6 +332,21 @@ files = [ {file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"}, ] +[[package]] +name = "celery-heimdall" +version = "1.0.0" +requires_python = ">=3.8,<4.0" +summary = "Helpful celery extensions." +dependencies = [ + "celery<6.0.0,>=5.2.7", + "importlib-metadata<=4.13", + "redis<5.0.0,>=4.3.4", +] +files = [ + {file = "celery_heimdall-1.0.0-py3-none-any.whl", hash = "sha256:cff237872334a74fb1f6a51e4206472d337e5865d851e308b6b55f6645da2327"}, + {file = "celery_heimdall-1.0.0.tar.gz", hash = "sha256:f357ea06cc588c702020375d9ba81d3dc7959ae2966b592123343d1bd0f5f925"}, +] + [[package]] name = "certifi" version = "2021.10.8" @@ -1233,6 +1248,19 @@ files = [ {file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"}, ] +[[package]] +name = "importlib-metadata" +version = "4.13.0" +requires_python = ">=3.7" +summary = "Read metadata from Python packages" +dependencies = [ + "zipp>=0.5", +] +files = [ + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, +] + [[package]] name = "inflection" version = "0.5.1" @@ -2915,3 +2943,13 @@ files = [ {file = "xlwt-1.3.0-py2.py3-none-any.whl", hash = "sha256:a082260524678ba48a297d922cc385f58278b8aa68741596a87de01a9c628b2e"}, {file = "xlwt-1.3.0.tar.gz", hash = "sha256:c59912717a9b28f1a3c2a98fd60741014b06b043936dcecbc113eaaada156c88"}, ] + +[[package]] +name = "zipp" +version = "3.17.0" +requires_python = ">=3.8" +summary = "Backport of pathlib-compatible object wrapper for zip files" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] diff --git a/backend/pycon/celery.py b/backend/pycon/celery.py index ff2de2e811..e3fac81821 100644 --- a/backend/pycon/celery.py +++ b/backend/pycon/celery.py @@ -22,9 +22,11 @@ def setup_periodic_tasks(sender, **kwargs): from association_membership.tasks import ( check_association_membership_subscriptions, ) + from schedule.tasks import process_schedule_items_videos_to_upload add = sender.add_periodic_task add(timedelta(minutes=5), check_association_membership_subscriptions) + add(timedelta(minutes=10), process_schedule_items_videos_to_upload) except Exception: logger.exception("setup_periodic_tasks") diff --git a/backend/pyproject.toml b/backend/pyproject.toml index ee756afcf2..fafb6ef233 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -98,6 +98,7 @@ dependencies = [ "wagtail-localize==1.5.2", "celery>=5.3.6", "wagtail-headless-preview>=0.7.0", + "celery-heimdall>=1.0.0", ] name = "backend" version = "0.1.0" diff --git a/backend/schedule/admin.py b/backend/schedule/admin.py index 31dc493447..6883e562f9 100644 --- a/backend/schedule/admin.py +++ b/backend/schedule/admin.py @@ -1,3 +1,4 @@ +from django.db import transaction from custom_admin.admin import validate_single_conference_selection from import_export.resources import ModelResource from django.db.models import Prefetch @@ -22,15 +23,12 @@ from conferences.models import SpeakerVoucher from pretix import user_has_admission_ticket from schedule.tasks import ( + process_schedule_items_videos_to_upload, send_schedule_invitation_email, send_speaker_communication_email, send_submission_time_slot_changed_email, ) from users.admin_mixins import ConferencePermissionMixin -from video_upload.workflows.batch_multiple_schedule_items_video_upload import ( - BatchMultipleScheduleItemsVideoUpload, -) -from temporal.sdk import start_workflow from schedule.forms import EmailSpeakersForm from .models import ( @@ -41,6 +39,7 @@ ScheduleItemAdditionalSpeaker, ScheduleItemAttendee, ScheduleItemInvitation, + ScheduleItemSentForVideoUpload, Slot, ) @@ -205,16 +204,18 @@ def upload_videos_to_youtube(modeladmin, request, queryset): videos = queryset.filter(youtube_video_id__exact="").exclude( video_uploaded_path__exact="" ) - conference_id = queryset.first().conference_id - start_workflow( - workflow=BatchMultipleScheduleItemsVideoUpload.run, - id=f"batch-upload-video-conference-{conference_id}", - task_queue="default", - arg=BatchMultipleScheduleItemsVideoUpload.input( - schedule_items_ids=list(videos.values_list("id", flat=True)) - ), + + sent_for_video_upload_objs = [] + for video in videos: + sent_for_video_upload_objs.append( + ScheduleItemSentForVideoUpload(schedule_item=video) + ) + + ScheduleItemSentForVideoUpload.objects.bulk_create( + sent_for_video_upload_objs, ignore_conflicts=True ) + transaction.on_commit(process_schedule_items_videos_to_upload.delay) messages.add_message( request, messages.INFO, f"Scheduled {videos.count()} videos to upload" ) @@ -691,3 +692,28 @@ class DayAdmin(OrderedInlineModelAdminMixin, admin.ModelAdmin): list_display = ("day", "conference") list_filter = ("conference",) inlines = (SlotInline, DayRoomThroughModelInline) + + +@admin.action(description="Retry video upload") +def retry_video_upload(modeladmin, request, queryset): + queryset.update( + status=ScheduleItemSentForVideoUpload.Status.pending, last_attempt_at=None + ) + process_schedule_items_videos_to_upload.delay() + messages.add_message( + request, messages.INFO, f"Scheduled {queryset.count()} videos to upload" + ) + + +@admin.register(ScheduleItemSentForVideoUpload) +class ScheduleItemSentForVideoUploadAdmin(admin.ModelAdmin): + list_display = ( + "schedule_item", + "status", + "video_uploaded", + "thumbnail_uploaded", + ) + list_filter = ("status", "schedule_item__conference") + search_fields = ("schedule_item__title",) + autocomplete_fields = ("schedule_item",) + actions = [retry_video_upload] diff --git a/backend/schedule/migrations/0052_scheduleitemsentforvideoupload.py b/backend/schedule/migrations/0052_scheduleitemsentforvideoupload.py new file mode 100644 index 0000000000..7e9fe10380 --- /dev/null +++ b/backend/schedule/migrations/0052_scheduleitemsentforvideoupload.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.7 on 2024-03-03 11:44 + +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone +import model_utils.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('schedule', '0051_scheduleitem_plain_thread_id'), + ] + + operations = [ + migrations.CreateModel( + name='ScheduleItemSentForVideoUpload', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')), + ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')), + ('status', models.CharField(choices=[('pending', 'Pending'), ('processing', 'Processing'), ('completed', 'Completed'), ('failed', 'Failed')], default='pending', max_length=100, verbose_name='Status')), + ('attempts', models.PositiveIntegerField(default=0, verbose_name='Video upload attempts')), + ('video_uploaded', models.BooleanField(default=False, verbose_name='Video uploaded')), + ('thumbnail_uploaded', models.BooleanField(default=False, verbose_name='Thumbnail uploaded')), + ('failed_reason', models.TextField(blank=True, default='', verbose_name='Failed reason')), + ('schedule_item', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='schedule.scheduleitem')), + ], + options={ + 'abstract': False, + }, + ), + ] diff --git a/backend/schedule/migrations/0053_scheduleitemsentforvideoupload_last_attempt_at.py b/backend/schedule/migrations/0053_scheduleitemsentforvideoupload_last_attempt_at.py new file mode 100644 index 0000000000..de703534a7 --- /dev/null +++ b/backend/schedule/migrations/0053_scheduleitemsentforvideoupload_last_attempt_at.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.7 on 2024-03-03 13:26 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('schedule', '0052_scheduleitemsentforvideoupload'), + ] + + operations = [ + migrations.AddField( + model_name='scheduleitemsentforvideoupload', + name='last_attempt_at', + field=models.DateTimeField(blank=True, null=True, verbose_name='Last attempt at'), + ), + ] diff --git a/backend/schedule/models.py b/backend/schedule/models.py index 4108de9c78..1645fe9f94 100644 --- a/backend/schedule/models.py +++ b/backend/schedule/models.py @@ -122,6 +122,44 @@ class Meta: ordering = ["hour"] +class ScheduleItemSentForVideoUploadQuerySet(QuerySet): + def to_upload(self): + return self.filter(status=ScheduleItemSentForVideoUpload.Status.pending) + + +class ScheduleItemSentForVideoUpload(TimeStampedModel): + class Status(models.TextChoices): + pending = "pending", _("Pending") + processing = "processing", _("Processing") + completed = "completed", _("Completed") + failed = "failed", _("Failed") + + status = models.CharField( + _("Status"), + max_length=100, + choices=Status.choices, + default=Status.pending, + ) + schedule_item = models.OneToOneField( + "schedule.ScheduleItem", + on_delete=models.CASCADE, + ) + attempts = models.PositiveIntegerField(_("Video upload attempts"), default=0) + last_attempt_at = models.DateTimeField(_("Last attempt at"), null=True, blank=True) + video_uploaded = models.BooleanField(_("Video uploaded"), default=False) + thumbnail_uploaded = models.BooleanField(_("Thumbnail uploaded"), default=False) + failed_reason = models.TextField( + _("Failed reason"), + blank=True, + default="", + ) + objects = ScheduleItemSentForVideoUploadQuerySet().as_manager() + + @property + def is_pending(self): + return self.status == self.Status.pending + + class ScheduleItemQuerySet(QuerySet, ConferenceQuerySetMixin): pass @@ -335,6 +373,30 @@ def get_admin_url(self): args=(self.pk,), ) + def __str__(self): + return self.title + + @property + def abstract(self): + language_code = self.language.code + + if self.submission_id: + return self.submission.abstract.localize(language_code) + + if self.keynote_id: + return self.keynote.description.localize(language_code) + + return self.description + + @property + def elevator_pitch(self): + language_code = self.language.code + + if self.submission_id: + return self.submission.elevator_pitch.localize(language_code) + + return "" + class Meta: verbose_name = _("Schedule item") verbose_name_plural = _("Schedule items") diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 63d70e0331..e85e05fe60 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -1,3 +1,7 @@ +from django.db.models import Q +from googleapiclient.errors import HttpError +from celery_heimdall import HeimdallTask +from google_api.sdk import youtube_videos_insert, youtube_videos_set_thumbnail from integrations import plain from pythonit_toolkit.emails.utils import mark_safe from pretix import user_has_admission_ticket @@ -11,6 +15,13 @@ from integrations import slack from pycon.celery import app +from schedule.models import ScheduleItemSentForVideoUpload +from schedule.video_upload import ( + cleanup_local_files, + create_video_info, + download_video_file, + extract_video_thumbnail, +) from users.models import User logger = logging.getLogger(__name__) @@ -256,3 +267,131 @@ def send_schedule_invitation_plain_message(*, schedule_item_id, message): schedule_item.plain_thread_id = thread_id schedule_item.save(update_fields=["plain_thread_id"]) + + +@app.task +def upload_schedule_item_video(*, sent_for_video_upload_state_id): + sent_for_video_upload = ScheduleItemSentForVideoUpload.objects.get( + id=sent_for_video_upload_state_id + ) + + if not sent_for_video_upload.is_pending: + logger.info( + "Schedule Item Sent for upload %s is not pending but %s, skipping", + sent_for_video_upload_state_id, + sent_for_video_upload.status, + ) + return + + sent_for_video_upload.status = ScheduleItemSentForVideoUpload.Status.processing + sent_for_video_upload.failed_reason = "" + sent_for_video_upload.attempts += 1 + sent_for_video_upload.last_attempt_at = timezone.now() + sent_for_video_upload.save( + update_fields=["status", "attempts", "failed_reason", "last_attempt_at"] + ) + + schedule_item = sent_for_video_upload.schedule_item + remote_video_path = schedule_item.video_uploaded_path + video_id = None + + if not sent_for_video_upload.video_uploaded: + logger.info("Uploading video for schedule_item_id=%s", schedule_item.id) + + video_info = create_video_info(schedule_item) + + logger.info("Downloading video file %s", remote_video_path) + + local_video_path = download_video_file(schedule_item.id, remote_video_path) + + for response in youtube_videos_insert( + title=video_info.title, + description=video_info.description, + tags=video_info.tags_as_str, + file_path=local_video_path, + ): + logger.info( + "schedule_item_id=%s Video uploading: %s", schedule_item.id, response + ) + + sent_for_video_upload.video_uploaded = True + sent_for_video_upload.save(update_fields=["video_uploaded"]) + + video_id = response["id"] + schedule_item.youtube_video_id = video_id + schedule_item.save(update_fields=["youtube_video_id"]) + else: + logger.info("Video already uploaded for schedule_item_id=%s", schedule_item.id) + + if not sent_for_video_upload.thumbnail_uploaded: + video_id = video_id or schedule_item.youtube_video_id + logger.info("Extracting thumbnail for schedule_item_id=%s", schedule_item.id) + + thumbnail_path = extract_video_thumbnail( + remote_video_path, + schedule_item.id, + ) + + try: + youtube_videos_set_thumbnail( + video_id=video_id, + thumbnail_path=thumbnail_path, + ) + except HttpError as e: + if e.status_code == 429: + # we reached the daily thumbnail limit + logger.warning( + "Reached the daily thumbnail limit! schedule_item_id=%s moved back to pending", + schedule_item.id, + ) + sent_for_video_upload.status = ( + ScheduleItemSentForVideoUpload.Status.pending + ) + sent_for_video_upload.save(update_fields=["status"]) + return + + raise + + sent_for_video_upload.thumbnail_uploaded = True + sent_for_video_upload.save(update_fields=["thumbnail_uploaded"]) + + cleanup_local_files(schedule_item.id) + + logger.info("Video uploaded for schedule_item_id=%s", schedule_item.id) + sent_for_video_upload.status = ScheduleItemSentForVideoUpload.Status.completed + sent_for_video_upload.save(update_fields=["status"]) + + +@app.task( + base=HeimdallTask, + heimdall={ + "unique": True, + }, +) +def process_schedule_items_videos_to_upload(): + statuses = ( + ScheduleItemSentForVideoUpload.objects.filter( + Q(last_attempt_at__isnull=True) + | Q( + last_attempt_at__lt=timezone.now() - timezone.timedelta(hours=1), + ) + ) + .to_upload() + .order_by("last_attempt_at") + ) + for sent_for_video_upload_state in statuses: + try: + upload_schedule_item_video( + sent_for_video_upload_state_id=sent_for_video_upload_state.id + ) + except Exception as e: + logger.exception( + "Error processing schedule item %s video upload: %s", + sent_for_video_upload_state.schedule_item.id, + e, + ) + sent_for_video_upload_state.status = ( + ScheduleItemSentForVideoUpload.Status.failed + ) + sent_for_video_upload_state.failed_reason = str(e) + sent_for_video_upload_state.save(update_fields=["status", "failed_reason"]) diff --git a/backend/schedule/video_upload.py b/backend/schedule/video_upload.py new file mode 100644 index 0000000000..8424e0cb93 --- /dev/null +++ b/backend/schedule/video_upload.py @@ -0,0 +1,127 @@ +from dataclasses import dataclass +import cv2 +from django.core.files.storage import FileSystemStorage +from django.core.files.storage import storages +from schedule.models import ScheduleItem + +from django.template import Context, Template + +local_storage = FileSystemStorage() +local_storage.base_location = "/tmp/" + + +@dataclass +class VideoInfo: + title: str + description: str + tags: list[str] + + @property + def tags_as_str(self) -> str: + return ",".join(self.tags) + + +def create_video_info(schedule_item: ScheduleItem) -> VideoInfo: + all_speakers = schedule_item.speakers + count_speakers = len(all_speakers) + speakers_names = ", ".join([speaker.fullname for speaker in all_speakers]) + tags = [ + clean_tag(tag) + for tag in ( + schedule_item.submission.tags.values_list("name", flat=True) + if schedule_item.submission_id + else [] + ) + ] + + context = { + "count_speakers": count_speakers, + "speakers_names": speakers_names, + "title": schedule_item.title, + "abstract": schedule_item.abstract, + "elevator_pitch": schedule_item.elevator_pitch, + "conference_name": schedule_item.conference.name.localize("en"), + "hashtags": [f"#{tag}" for tag in tags], + } + + title = _process_string_template( + schedule_item.conference.video_title_template, context + ) + + if len(title) > 100: + title = schedule_item.title + + description = _process_string_template( + schedule_item.conference.video_description_template, context + ) + + return VideoInfo( + title=replace_invalid_chars_with_lookalikes(title), + description=replace_invalid_chars_with_lookalikes(description), + tags=tags, + ) + + +def _process_string_template(template_string: str, context) -> str: + return Template(template_string).render(Context(context)).strip() + + +def download_video_file(id: int, path: str) -> str: + storage = storages["conferencevideos"] + filename = get_video_file_name(id) + + if not local_storage.exists(filename): + local_storage.save(filename, storage.open(path)) + + return local_storage.path(filename) + + +def extract_video_thumbnail(remote_video_path: str, id: int) -> str: + thumbnail_file_name = get_thumbnail_file_name(id) + file_path = local_storage.path(thumbnail_file_name) + + if local_storage.exists(thumbnail_file_name): + return file_path + + local_video_path = download_video_file(id, remote_video_path) + + video_capture = cv2.VideoCapture(local_video_path) + success, image = video_capture.read() + + if not success: + raise ValueError("Unable to extract frame") + + cv2.imwrite(file_path, image) + return file_path + + +def cleanup_local_files(id: int): + thumbnail_file_name = get_thumbnail_file_name(id) + if local_storage.exists(thumbnail_file_name): + local_storage.delete(thumbnail_file_name) + + video_file_name = get_video_file_name(id) + if local_storage.exists(video_file_name): + local_storage.delete(video_file_name) + + +def get_thumbnail_file_name(id: int) -> str: + return f"{id}-thumbnail.jpg" + + +def get_video_file_name(id: int) -> str: + return f"{id}-video_upload" + + +def replace_invalid_chars_with_lookalikes(text: str) -> str: + homoglyphs = { + "<": "\u1438", + ">": "\u1433", + } + for char, homoglyph in homoglyphs.items(): + text = text.replace(char, homoglyph) + return text + + +def clean_tag(tag: str) -> str: + return tag.strip().replace(" ", "").replace("-", "").lower() diff --git a/backend/video_upload/__init__.py b/backend/video_upload/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/backend/video_upload/activities.py b/backend/video_upload/activities.py deleted file mode 100644 index db5a668da3..0000000000 --- a/backend/video_upload/activities.py +++ /dev/null @@ -1,216 +0,0 @@ -from googleapiclient.errors import HttpError -import cv2 -import logging -from dataclasses import dataclass -from typing import Any -from google_api.sdk import youtube_videos_insert, youtube_videos_set_thumbnail -from temporalio import activity -from schedule.models import ScheduleItem -from django.core.files.storage import FileSystemStorage -from django.core.files.storage import storages - -logger = logging.getLogger(__name__) - -local_storage = FileSystemStorage() -local_storage.base_location = "/tmp/" - - -class DailyThumbnailLimitException(Exception): - pass - - -@dataclass -class ScheduleItemData: - id: int - slug: str - title: str - type: str - description: str - keynote_description: str - abstract: str - elevator_pitch: str - video_uploaded_path: str - tags: list[str] - speakers_ids: list[int] - conference_name: str - conference_youtube_video_bottom_text: str - has_submission: bool - - @property - def clean_tags(self) -> list[str]: - return [tag.replace(" ", "").replace("-", "").lower() for tag in self.tags] - - @property - def hashtags(self) -> list[str]: - return [f"#{tag}" for tag in self.clean_tags] - - -@activity.defn -async def fetch_schedule_item(schedule_item_id: int) -> ScheduleItemData: - schedule_item = await ScheduleItem.objects.prefetch_related( - "submission", - "submission__tags", - "conference", - "language", - "additional_speakers", - "keynote__speakers", - ).aget(id=schedule_item_id) - - speakers_ids = schedule_item.speakers - language_code = schedule_item.language.code - - return ScheduleItemData( - id=schedule_item.id, - slug=schedule_item.slug, - type=schedule_item.type, - title=schedule_item.title.strip(), - description=schedule_item.description.strip(), - abstract=schedule_item.submission.abstract.localize(language_code).strip() - if schedule_item.submission_id - else "", - keynote_description=schedule_item.keynote.description.localize( - language_code - ).strip() - if schedule_item.keynote_id - else "", - elevator_pitch=schedule_item.submission.elevator_pitch.localize( - language_code - ).strip() - if schedule_item.submission_id - else "", - tags=[tag.name for tag in schedule_item.submission.tags.all()] - if schedule_item.submission_id - else [], - video_uploaded_path=schedule_item.video_uploaded_path, - speakers_ids=speakers_ids, - conference_name=schedule_item.conference.name.localize(language_code), - conference_youtube_video_bottom_text=schedule_item.conference.youtube_video_bottom_text, - has_submission=schedule_item.submission_id is not None, - ) - - -@activity.defn -async def fetch_speakers_data( - speakers_ids: list[int], -) -> dict[str, dict[str, Any]]: - raise ValueError("update me :)") - - -@dataclass -class AddYouTubeIDToScheduleItemInput: - schedule_item_id: int - youtube_id: str - - -@activity.defn -async def add_youtube_id_to_schedule_item( - input: AddYouTubeIDToScheduleItemInput, -) -> None: - schedule_item = await ScheduleItem.objects.aget(id=input.schedule_item_id) - schedule_item.youtube_video_id = input.youtube_id - await schedule_item.asave(update_fields=["youtube_video_id"]) - - -@dataclass -class DownloadVideoFileInput: - path: str - id: int - - -@activity.defn -async def download_video_file(input: DownloadVideoFileInput) -> str: - logger.warning(f"downloading {input.path}") - storage = storages["conferencevideos"] - filename = f"yt_upload_{input.id}" - - if not local_storage.exists(filename): - local_storage.save(filename, storage.open(input.path)) - - return local_storage.path(filename) - - -@dataclass -class UploadVideoToYouTubeInput: - title: str - description: str - file_path: str - tags: list[str] - - @property - def tags_as_str(self) -> list[str]: - return ",".join(self.tags) - - -@activity.defn -async def upload_video_to_youtube(input: UploadVideoToYouTubeInput): - async for response in youtube_videos_insert( - title=input.title, - description=input.description, - tags=input.tags_as_str, - file_path=input.file_path, - ): - activity.heartbeat("video uploading") - - return response - - -@dataclass -class ExtractVideoThumbnailInput: - file_path: str - schedule_item_id: int - - -@activity.defn -async def extract_video_thumbnail(input: ExtractVideoThumbnailInput): - thumbnail_file_name = f"{input.schedule_item_id}-thumbnail.jpg" - file_path = local_storage.path(thumbnail_file_name) - - if local_storage.exists(thumbnail_file_name): - return file_path - - video_capture = cv2.VideoCapture(input.file_path) - success, image = video_capture.read() - - if not success: - raise ValueError("Unable to extract frame") - - cv2.imwrite(file_path, image) - return file_path - - -@dataclass -class SetThumbnailToYouTubeVideoInput: - youtube_id: str - thumbnail_path: str - - -@activity.defn -async def set_thumbnail_to_youtube_video(input: SetThumbnailToYouTubeVideoInput): - try: - return await youtube_videos_set_thumbnail( - video_id=input.youtube_id, - thumbnail_path=input.thumbnail_path, - ) - except HttpError as e: - if e.status_code == 429: - # we reached the daily thumbnail limit - raise DailyThumbnailLimitException() - raise - - -@dataclass -class CleanupLocalVideoFilesInput: - schedule_item_id: int - delete_thumbnail: bool - - -@activity.defn -async def cleanup_local_video_files(input: CleanupLocalVideoFilesInput): - thumbnail_name = f"{input.schedule_item_id}-thumbnail.jpg" - - if input.delete_thumbnail and local_storage.exists(thumbnail_name): - local_storage.delete(thumbnail_name) - - video_name = f"yt_upload_{input.schedule_item_id}" - if local_storage.exists(video_name): - local_storage.delete(video_name) diff --git a/backend/video_upload/workflows/__init__.py b/backend/video_upload/workflows/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/backend/video_upload/workflows/batch_multiple_schedule_items_video_upload.py b/backend/video_upload/workflows/batch_multiple_schedule_items_video_upload.py deleted file mode 100644 index 3f76e5f5e4..0000000000 --- a/backend/video_upload/workflows/batch_multiple_schedule_items_video_upload.py +++ /dev/null @@ -1,28 +0,0 @@ -from dataclasses import dataclass -from temporalio import workflow - -with workflow.unsafe.imports_passed_through(): - from video_upload.workflows.upload_schedule_item_video import ( - UploadScheduleItemVideoWorkflow, - ) - - -@dataclass -class BatchMultipleScheduleItemsVideoUploadInput: - schedule_items_ids: list - - -@workflow.defn -class BatchMultipleScheduleItemsVideoUpload: - input = BatchMultipleScheduleItemsVideoUploadInput - - @workflow.run - async def run(self, input: BatchMultipleScheduleItemsVideoUploadInput): - for schedule_item_id in input.schedule_items_ids: - await workflow.execute_child_workflow( - UploadScheduleItemVideoWorkflow, - UploadScheduleItemVideoWorkflow.input( - schedule_item_id=schedule_item_id - ), - id=f"upload_schedule_item_video_{schedule_item_id}", - ) diff --git a/backend/video_upload/workflows/delayed_upload_video_thumbnail.py b/backend/video_upload/workflows/delayed_upload_video_thumbnail.py deleted file mode 100644 index 2c951495af..0000000000 --- a/backend/video_upload/workflows/delayed_upload_video_thumbnail.py +++ /dev/null @@ -1,51 +0,0 @@ -import asyncio -from dataclasses import dataclass -from datetime import timedelta -from temporalio import workflow -from temporalio.common import RetryPolicy - -with workflow.unsafe.imports_passed_through(): - from video_upload.activities import ( - set_thumbnail_to_youtube_video, - SetThumbnailToYouTubeVideoInput, - CleanupLocalVideoFilesInput, - cleanup_local_video_files, - ) - - -@dataclass -class DelayedUploadVideoThumbnailInput: - schedule_item_id: int - youtube_id: int - thumbnail_path: str - - -@workflow.defn -class DelayedUploadVideoThumbnail: - input = DelayedUploadVideoThumbnailInput - - @workflow.run - async def run(self, input: DelayedUploadVideoThumbnailInput): - # sleep for 12 hours at the start - await asyncio.sleep(12 * 60 * 60) - await workflow.execute_activity( - set_thumbnail_to_youtube_video, - SetThumbnailToYouTubeVideoInput( - youtube_id=input.youtube_id, - thumbnail_path=input.thumbnail_path, - ), - schedule_to_close_timeout=timedelta(minutes=1), - retry_policy=RetryPolicy( - maximum_attempts=6, - backoff_coefficient=12 * 60 * 60, - ), - ) - - await workflow.execute_activity( - cleanup_local_video_files, - CleanupLocalVideoFilesInput( - schedule_item_id=input.schedule_item_id, - delete_thumbnail=True, - ), - start_to_close_timeout=timedelta(seconds=30), - ) diff --git a/backend/video_upload/workflows/upload_schedule_item_video.py b/backend/video_upload/workflows/upload_schedule_item_video.py deleted file mode 100644 index 14dbe1bb8b..0000000000 --- a/backend/video_upload/workflows/upload_schedule_item_video.py +++ /dev/null @@ -1,214 +0,0 @@ -from dataclasses import dataclass -from datetime import timedelta -from temporalio import workflow -from temporalio.exceptions import ActivityError -from temporalio.common import RetryPolicy - -with workflow.unsafe.imports_passed_through(): - from video_upload.activities import fetch_schedule_item, download_video_file - from video_upload.activities import ( - upload_video_to_youtube, - add_youtube_id_to_schedule_item, - fetch_speakers_data, - UploadVideoToYouTubeInput, - extract_video_thumbnail, - set_thumbnail_to_youtube_video, - SetThumbnailToYouTubeVideoInput, - cleanup_local_video_files, - CleanupLocalVideoFilesInput, - ) - from video_upload.workflows.delayed_upload_video_thumbnail import ( - DelayedUploadVideoThumbnail, - ) - from video_upload.activities import ( - AddYouTubeIDToScheduleItemInput, - ExtractVideoThumbnailInput, - ) - from video_upload.activities import ScheduleItemData - from video_upload.activities import DownloadVideoFileInput - - -@dataclass -class UploadScheduleItemVideoWorkflowInput: - schedule_item_id: int - - -@workflow.defn -class UploadScheduleItemVideoWorkflow: - input = UploadScheduleItemVideoWorkflowInput - - @workflow.run - async def run(self, input: UploadScheduleItemVideoWorkflowInput): - schedule_item = await workflow.execute_activity( - fetch_schedule_item, - input.schedule_item_id, - schedule_to_close_timeout=timedelta(seconds=5), - retry_policy=RetryPolicy( - maximum_attempts=3, - ), - ) - - speakers_data = await workflow.execute_activity( - fetch_speakers_data, - schedule_item.speakers_ids, - schedule_to_close_timeout=timedelta(seconds=5), - retry_policy=RetryPolicy( - maximum_attempts=3, - ), - ) - - media_file_path = await workflow.execute_activity( - download_video_file, - DownloadVideoFileInput( - path=schedule_item.video_uploaded_path, - id=schedule_item.id, - ), - schedule_to_close_timeout=timedelta(minutes=20), - retry_policy=RetryPolicy( - maximum_attempts=1, non_retryable_error_types=["ResourceNotFoundError"] - ), - ) - - upload_video_input = self.create_youtube_video_input( - schedule_item=schedule_item, - speakers_data=speakers_data, - media_file_path=media_file_path, - ) - - if len(upload_video_input.title) > 100: - raise ValueError("YouTube title is too long") - - if len(upload_video_input.description) > 5000: - raise ValueError("YouTube description is too long") - - response = await workflow.execute_activity( - upload_video_to_youtube, - upload_video_input, - schedule_to_close_timeout=timedelta(minutes=30), - retry_policy=RetryPolicy( - maximum_attempts=1, - ), - ) - - await workflow.execute_activity( - add_youtube_id_to_schedule_item, - AddYouTubeIDToScheduleItemInput( - schedule_item_id=schedule_item.id, youtube_id=response["id"] - ), - schedule_to_close_timeout=timedelta(seconds=5), - retry_policy=RetryPolicy( - maximum_attempts=3, - ), - ) - - thumbnail_path = await workflow.execute_activity( - extract_video_thumbnail, - ExtractVideoThumbnailInput( - file_path=media_file_path, - schedule_item_id=schedule_item.id, - ), - schedule_to_close_timeout=timedelta(minutes=20), - retry_policy=RetryPolicy( - maximum_attempts=30, - backoff_coefficient=1, - ), - ) - - delete_thumbnail = True - try: - await workflow.execute_activity( - set_thumbnail_to_youtube_video, - SetThumbnailToYouTubeVideoInput( - youtube_id=response["id"], - thumbnail_path=thumbnail_path, - ), - schedule_to_close_timeout=timedelta(minutes=1), - retry_policy=RetryPolicy( - maximum_attempts=1, - backoff_coefficient=2.0, - ), - ) - except ActivityError as exc: - if exc.cause == "DailyThumbnailLimitException": - workflow.start_child_workflow( - DelayedUploadVideoThumbnail.run, - DelayedUploadVideoThumbnail.input( - schedule_item_id=schedule_item.id, - youtube_id=response["id"], - thumbnail_path=thumbnail_path, - ), - id=f"upload_video_thumbnail-{schedule_item.id}", - ) - delete_thumbnail = False - else: - raise - - await workflow.execute_activity( - cleanup_local_video_files, - CleanupLocalVideoFilesInput( - schedule_item_id=schedule_item.id, delete_thumbnail=delete_thumbnail - ), - start_to_close_timeout=timedelta(seconds=30), - ) - - def create_youtube_video_input( - self, - *, - schedule_item: ScheduleItemData, - speakers_data: dict, - media_file_path: str, - ) -> UploadVideoToYouTubeInput: - count_speakers = len(schedule_item.speakers_ids) - speakers_names = ", ".join( - [ - speakers_data[str(speaker_id)]["fullname"] - for speaker_id in schedule_item.speakers_ids - ] - ) - - if count_speakers == 0 or count_speakers > 2: - title = f"{schedule_item.title} - {schedule_item.conference_name}" - else: - title = f"{schedule_item.title} - {speakers_names}" - - if len(title) > 100: - title = schedule_item.title - - description = ( - f"{schedule_item.title} - " - f"{speakers_names} - {schedule_item.conference_name}\n\n" - ) - - if schedule_item.elevator_pitch: - description += f"{schedule_item.elevator_pitch}\n\n" - elif schedule_item.description: - description += f"{schedule_item.description}\n\n" - elif schedule_item.keynote_description: - description += f"{schedule_item.keynote_description}\n\n" - - if schedule_item.type.lower() != "custom": - description += ( - f"Full Abstract: https://2023.pycon.it/event/{schedule_item.slug}\n\n" - ) - - description += f"{schedule_item.conference_youtube_video_bottom_text}\n\n" - - if schedule_item.clean_tags: - description += " ".join(schedule_item.hashtags) - - return UploadVideoToYouTubeInput( - title=replace_invalid_chars_with_lookalikes(title), - description=replace_invalid_chars_with_lookalikes(description), - file_path=media_file_path, - tags=schedule_item.clean_tags, - ) - - -def replace_invalid_chars_with_lookalikes(text: str) -> str: - homoglyphs = { - "<": "\u1438", - ">": "\u1433", - } - for char, homoglyph in homoglyphs.items(): - text = text.replace(char, homoglyph) - return text diff --git a/backend/worker.py b/backend/worker.py deleted file mode 100644 index 9360a5198a..0000000000 --- a/backend/worker.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -import asyncio -from django.apps import apps -from django.conf import settings -from temporalio.client import Client -from temporalio.worker import Worker - - -async def main(): - apps.populate(settings.INSTALLED_APPS) - - from video_upload.activities import ( - fetch_schedule_item, - download_video_file, - upload_video_to_youtube, - fetch_speakers_data, - add_youtube_id_to_schedule_item, - extract_video_thumbnail, - set_thumbnail_to_youtube_video, - cleanup_local_video_files, - ) - from video_upload.workflows.upload_schedule_item_video import ( - UploadScheduleItemVideoWorkflow, - ) - from video_upload.workflows.batch_multiple_schedule_items_video_upload import ( - BatchMultipleScheduleItemsVideoUpload, - ) - from video_upload.workflows.delayed_upload_video_thumbnail import ( - DelayedUploadVideoThumbnail, - ) - - client = await Client.connect(os.getenv("TEMPORAL_ADDRESS")) - worker = Worker( - client, - task_queue="default", - workflows=[ - UploadScheduleItemVideoWorkflow, - BatchMultipleScheduleItemsVideoUpload, - DelayedUploadVideoThumbnail, - ], - activities=[ - fetch_schedule_item, - download_video_file, - upload_video_to_youtube, - fetch_speakers_data, - add_youtube_id_to_schedule_item, - extract_video_thumbnail, - set_thumbnail_to_youtube_video, - cleanup_local_video_files, - ], - ) - print("Starting worker") - await worker.run() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/temporal-config/development.yml b/temporal-config/development.yml deleted file mode 100644 index 8862dfad72..0000000000 --- a/temporal-config/development.yml +++ /dev/null @@ -1,6 +0,0 @@ -limit.maxIDLength: - - value: 255 - constraints: {} -system.forceSearchAttributesCacheRefreshOnRead: - - value: true # Dev setup only. Please don't turn this on in production. - constraints: {} From 2c175e90194b74bc8cbe1b730873647f0f3cb562 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sun, 3 Mar 2024 17:38:06 +0100 Subject: [PATCH 02/35] Fix tags, tests --- .../api/conferences/tests/test_query_talk.py | 42 +++++++++++++++++-- backend/schedule/admin.py | 2 + backend/schedule/tasks.py | 5 +++ backend/schedule/video_upload.py | 6 +-- 4 files changed, 49 insertions(+), 6 deletions(-) diff --git a/backend/api/conferences/tests/test_query_talk.py b/backend/api/conferences/tests/test_query_talk.py index f73793c7ad..ccae36a573 100644 --- a/backend/api/conferences/tests/test_query_talk.py +++ b/backend/api/conferences/tests/test_query_talk.py @@ -1,6 +1,6 @@ import datetime -from conferences.tests.factories import ConferenceFactory +from conferences.tests.factories import ConferenceFactory, KeynoteFactory from i18n.strings import LazyI18nString from languages.models import Language import pytest @@ -81,7 +81,7 @@ def test_exposes_abstract_elevator_pitch_in_correct_language( } -def test_empty_abstract_elevator_pitch_with_no_submission(graphql_client, user): +def test_abstract_fallbacks_to_description(graphql_client, user): graphql_client.force_login(user) schedule_item = ScheduleItemFactory( @@ -90,6 +90,7 @@ def test_empty_abstract_elevator_pitch_with_no_submission(graphql_client, user): type=ScheduleItem.TYPES.talk, conference=ConferenceFactory(), attendees_total_capacity=None, + description="Description", ) response = graphql_client.query( @@ -105,7 +106,42 @@ def test_empty_abstract_elevator_pitch_with_no_submission(graphql_client, user): ) assert response["data"]["conference"]["talk"] == { - "abstract": "", + "abstract": "Description", + "elevatorPitch": "", + } + + +def test_abstract_shows_keynote_description(graphql_client, user): + graphql_client.force_login(user) + conference = ConferenceFactory() + + schedule_item = ScheduleItemFactory( + status=ScheduleItem.STATUS.confirmed, + submission=None, + keynote=KeynoteFactory( + conference=conference, + description=LazyI18nString({"en": "Description Keynote", "it": ""}), + ), + type=ScheduleItem.TYPES.keynote, + conference=conference, + attendees_total_capacity=None, + description="Description", + ) + + response = graphql_client.query( + """query($slug: String!, $code: String!) { + conference(code: $code) { + talk(slug: $slug) { + abstract + elevatorPitch + } + } + }""", + variables={"slug": schedule_item.slug, "code": schedule_item.conference.code}, + ) + + assert response["data"]["conference"]["talk"] == { + "abstract": "Description Keynote", "elevatorPitch": "", } diff --git a/backend/schedule/admin.py b/backend/schedule/admin.py index 6883e562f9..4ed8a50537 100644 --- a/backend/schedule/admin.py +++ b/backend/schedule/admin.py @@ -712,6 +712,8 @@ class ScheduleItemSentForVideoUploadAdmin(admin.ModelAdmin): "status", "video_uploaded", "thumbnail_uploaded", + "attempts", + "last_attempt_at", ) list_filter = ("status", "schedule_item__conference") search_fields = ("schedule_item__title",) diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index e85e05fe60..4a238a3b3c 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -325,6 +325,8 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id): if not sent_for_video_upload.thumbnail_uploaded: video_id = video_id or schedule_item.youtube_video_id + assert video_id, "Video marked as uploaded but Video ID is missing" + logger.info("Extracting thumbnail for schedule_item_id=%s", schedule_item.id) thumbnail_path = extract_video_thumbnail( @@ -332,6 +334,9 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id): schedule_item.id, ) + # we don't need the video file anymore as we already extracted the thumbnail + cleanup_local_files(schedule_item.id, delete_thumbnail=False) + try: youtube_videos_set_thumbnail( video_id=video_id, diff --git a/backend/schedule/video_upload.py b/backend/schedule/video_upload.py index 8424e0cb93..34040dbfe1 100644 --- a/backend/schedule/video_upload.py +++ b/backend/schedule/video_upload.py @@ -41,7 +41,7 @@ def create_video_info(schedule_item: ScheduleItem) -> VideoInfo: "abstract": schedule_item.abstract, "elevator_pitch": schedule_item.elevator_pitch, "conference_name": schedule_item.conference.name.localize("en"), - "hashtags": [f"#{tag}" for tag in tags], + "hashtags": " ".join([f"#{tag}" for tag in tags]), } title = _process_string_template( @@ -95,9 +95,9 @@ def extract_video_thumbnail(remote_video_path: str, id: int) -> str: return file_path -def cleanup_local_files(id: int): +def cleanup_local_files(id: int, delete_thumbnail: bool = True): thumbnail_file_name = get_thumbnail_file_name(id) - if local_storage.exists(thumbnail_file_name): + if delete_thumbnail and local_storage.exists(thumbnail_file_name): local_storage.delete(thumbnail_file_name) video_file_name = get_video_file_name(id) From 97cbb4f769437cb5ea3c626b78dc421be070adba Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Mon, 4 Mar 2024 01:35:55 +0100 Subject: [PATCH 03/35] remove celery-heimdall --- backend/pdm.lock | 521 +++++++++++++-------------- backend/pycon/settings/base.py | 2 + backend/pycon/settings/test.py | 6 + backend/pyproject.toml | 3 +- backend/schedule/tasks.py | 15 +- backend/schedule/tests/factories.py | 9 + backend/schedule/tests/test_tasks.py | 50 ++- docker-compose.yml | 4 +- 8 files changed, 315 insertions(+), 295 deletions(-) diff --git a/backend/pdm.lock b/backend/pdm.lock index b6512c0143..8e3bda2441 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "lambda"] strategy = ["cross_platform"] lock_version = "4.4" -content_hash = "sha256:d503642e635606fcc04a0ff2d0f32e85b8982dd343e91fbaee6b4e53bf2e0966" +content_hash = "sha256:4b558cd3dfe7dbdea16fb2896c34f8fc83949a6dc6cf3bcdbd5d4a035a5abd6e" [[package]] name = "amqp" @@ -51,7 +51,7 @@ files = [ [[package]] name = "anyio" -version = "4.2.0" +version = "4.3.0" requires_python = ">=3.8" summary = "High level compatibility layer for multiple asynchronous event loop implementations" dependencies = [ @@ -59,8 +59,8 @@ dependencies = [ "sniffio>=1.1", ] files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [[package]] @@ -174,7 +174,7 @@ files = [ [[package]] name = "azure-core" -version = "1.30.0" +version = "1.30.1" requires_python = ">=3.7" summary = "Microsoft Azure Core Library for Python" dependencies = [ @@ -183,8 +183,8 @@ dependencies = [ "typing-extensions>=4.6.0", ] files = [ - {file = "azure-core-1.30.0.tar.gz", hash = "sha256:6f3a7883ef184722f6bd997262eddaf80cfe7e5b3e0caaaf8db1695695893d35"}, - {file = "azure_core-1.30.0-py3-none-any.whl", hash = "sha256:3dae7962aad109610e68c9a7abb31d79720e1d982ddf61363038d175a5025e89"}, + {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, + {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, ] [[package]] @@ -273,22 +273,22 @@ files = [ [[package]] name = "boto3" -version = "1.34.44" +version = "1.34.54" requires_python = ">= 3.8" summary = "The AWS SDK for Python" dependencies = [ - "botocore<1.35.0,>=1.34.44", + "botocore<1.35.0,>=1.34.54", "jmespath<2.0.0,>=0.7.1", "s3transfer<0.11.0,>=0.10.0", ] files = [ - {file = "boto3-1.34.44-py3-none-any.whl", hash = "sha256:40f89fb2acee0a0879effe81badffcd801a348e715483227223241ae311c48fc"}, - {file = "boto3-1.34.44.tar.gz", hash = "sha256:86bcf79a56631609a9f8023fe8f53e2869702bdd4c9047c6d9f091eb39c9b0fa"}, + {file = "boto3-1.34.54-py3-none-any.whl", hash = "sha256:f201b6a416f809283d554c652211eecec9fe3a52ed4063dab3f3e7aea7571d9c"}, + {file = "boto3-1.34.54.tar.gz", hash = "sha256:8b3f5cc7fbedcbb22271c328039df8a6ab343001e746e0cdb24774c426cadcf8"}, ] [[package]] name = "botocore" -version = "1.34.44" +version = "1.34.54" requires_python = ">= 3.8" summary = "Low-level, data-driven core of boto 3." dependencies = [ @@ -297,18 +297,18 @@ dependencies = [ "urllib3<2.1,>=1.25.4; python_version >= \"3.10\"", ] files = [ - {file = "botocore-1.34.44-py3-none-any.whl", hash = "sha256:8d9837fb33256e70b9c8955a32d3e60fa70a0b72849a909737cf105fcc3b5deb"}, - {file = "botocore-1.34.44.tar.gz", hash = "sha256:b0f40c54477e8e0a5c43377a927b8959a86bb8824aaef2d28db7c9c367cdefaa"}, + {file = "botocore-1.34.54-py3-none-any.whl", hash = "sha256:bf215d93e9d5544c593962780d194e74c6ee40b883d0b885e62ef35fc0ec01e5"}, + {file = "botocore-1.34.54.tar.gz", hash = "sha256:4061ff4be3efcf53547ebadf2c94d419dfc8be7beec24e9fa1819599ffd936fa"}, ] [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" requires_python = ">=3.7" summary = "Extensible memoizing collections and decorators" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -332,21 +332,6 @@ files = [ {file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"}, ] -[[package]] -name = "celery-heimdall" -version = "1.0.0" -requires_python = ">=3.8,<4.0" -summary = "Helpful celery extensions." -dependencies = [ - "celery<6.0.0,>=5.2.7", - "importlib-metadata<=4.13", - "redis<5.0.0,>=4.3.4", -] -files = [ - {file = "celery_heimdall-1.0.0-py3-none-any.whl", hash = "sha256:cff237872334a74fb1f6a51e4206472d337e5865d851e308b6b55f6645da2327"}, - {file = "celery_heimdall-1.0.0.tar.gz", hash = "sha256:f357ea06cc588c702020375d9ba81d3dc7959ae2966b592123343d1bd0f5f925"}, -] - [[package]] name = "certifi" version = "2021.10.8" @@ -483,109 +468,109 @@ files = [ [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.3" requires_python = ">=3.8" summary = "Code coverage measurement for Python" files = [ - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, ] [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.3" extras = ["toml"] requires_python = ">=3.8" summary = "Code coverage measurement for Python" dependencies = [ - "coverage==7.4.1", -] -files = [ - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + "coverage==7.4.3", +] +files = [ + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, ] [[package]] name = "cryptography" -version = "42.0.3" +version = "42.0.5" requires_python = ">=3.7" summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." dependencies = [ "cffi>=1.12; platform_python_implementation != \"PyPy\"", ] files = [ - {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:de5086cd475d67113ccb6f9fae6d8fe3ac54a4f9238fd08bfdb07b03d791ff0a"}, - {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:935cca25d35dda9e7bd46a24831dfd255307c55a07ff38fd1a92119cffc34857"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20100c22b298c9eaebe4f0b9032ea97186ac2555f426c3e70670f2517989543b"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb6368d5327d6455f20327fb6159b97538820355ec00f8cc9464d617caecead"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39d5c93e95bcbc4c06313fc6a500cee414ee39b616b55320c1904760ad686938"}, - {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d96ea47ce6d0055d5b97e761d37b4e84195485cb5a38401be341fabf23bc32a"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d1998e545081da0ab276bcb4b33cce85f775adb86a516e8f55b3dac87f469548"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93fbee08c48e63d5d1b39ab56fd3fdd02e6c2431c3da0f4edaf54954744c718f"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90147dad8c22d64b2ff7331f8d4cddfdc3ee93e4879796f837bdbb2a0b141e0c"}, - {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4dcab7c25e48fc09a73c3e463d09ac902a932a0f8d0c568238b3696d06bf377b"}, - {file = "cryptography-42.0.3-cp37-abi3-win32.whl", hash = "sha256:1e935c2900fb53d31f491c0de04f41110351377be19d83d908c1fd502ae8daa5"}, - {file = "cryptography-42.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:762f3771ae40e111d78d77cbe9c1035e886ac04a234d3ee0856bf4ecb3749d54"}, - {file = "cryptography-42.0.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3ec384058b642f7fb7e7bff9664030011ed1af8f852540c76a1317a9dd0d20"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35772a6cffd1f59b85cb670f12faba05513446f80352fe811689b4e439b5d89e"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04859aa7f12c2b5f7e22d25198ddd537391f1695df7057c8700f71f26f47a129"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c3d1f5a1d403a8e640fa0887e9f7087331abb3f33b0f2207d2cc7f213e4a864c"}, - {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df34312149b495d9d03492ce97471234fd9037aa5ba217c2a6ea890e9166f151"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:de4ae486041878dc46e571a4c70ba337ed5233a1344c14a0790c4c4be4bbb8b4"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0fab2a5c479b360e5e0ea9f654bcebb535e3aa1e493a715b13244f4e07ea8eec"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25b09b73db78facdfd7dd0fa77a3f19e94896197c86e9f6dc16bce7b37a96504"}, - {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d5cf11bc7f0b71fb71af26af396c83dfd3f6eed56d4b6ef95d57867bf1e4ba65"}, - {file = "cryptography-42.0.3-cp39-abi3-win32.whl", hash = "sha256:0fea01527d4fb22ffe38cd98951c9044400f6eff4788cf52ae116e27d30a1ba3"}, - {file = "cryptography-42.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:2619487f37da18d6826e27854a7f9d4d013c51eafb066c80d09c63cf24505306"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ead69ba488f806fe1b1b4050febafdbf206b81fa476126f3e16110c818bac396"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:20180da1b508f4aefc101cebc14c57043a02b355d1a652b6e8e537967f1e1b46"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fbf0f3f0fac7c089308bd771d2c6c7b7d53ae909dce1db52d8e921f6c19bb3a"}, - {file = "cryptography-42.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c23f03cfd7d9826cdcbad7850de67e18b4654179e01fe9bc623d37c2638eb4ef"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db0480ffbfb1193ac4e1e88239f31314fe4c6cdcf9c0b8712b55414afbf80db4"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6c25e1e9c2ce682d01fc5e2dde6598f7313027343bd14f4049b82ad0402e52cd"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9541c69c62d7446539f2c1c06d7046aef822940d248fa4b8962ff0302862cc1f"}, - {file = "cryptography-42.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b797099d221df7cce5ff2a1d272761d1554ddf9a987d3e11f6459b38cd300fd"}, - {file = "cryptography-42.0.3.tar.gz", hash = "sha256:069d2ce9be5526a44093a0991c450fe9906cdf069e0e7cd67d9dee49a62b9ebe"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [[package]] @@ -758,7 +743,7 @@ files = [ [[package]] name = "django-modelcluster" -version = "6.2.1" +version = "6.3" requires_python = ">=3.8" summary = "Django extension to allow working with 'clusters' of models as a single unit, independently of the database" dependencies = [ @@ -766,8 +751,8 @@ dependencies = [ "pytz>=2022.4", ] files = [ - {file = "django-modelcluster-6.2.1.tar.gz", hash = "sha256:3f53d47e1af7aec5e238724be16bbebdac9c518e4788b31429e773dcd8e8ea49"}, - {file = "django_modelcluster-6.2.1-py2.py3-none-any.whl", hash = "sha256:725c005de22191165b690ea05588b1c702ac34b1e9cd8a8b26fadc2a35fcd337"}, + {file = "django-modelcluster-6.3.tar.gz", hash = "sha256:0caed8a0e889f3abb92f144670878a466ef954ffa6c4c7b9c80e6426b720a49d"}, + {file = "django_modelcluster-6.3-py2.py3-none-any.whl", hash = "sha256:a8783d6565a0663f41cd6003ea361c3a5711e8a2a326160f1ec1eceb3e973d4f"}, ] [[package]] @@ -946,15 +931,15 @@ files = [ [[package]] name = "faker" -version = "23.2.1" +version = "23.3.0" requires_python = ">=3.8" summary = "Faker is a Python package that generates fake data for you." dependencies = [ "python-dateutil>=2.4", ] files = [ - {file = "Faker-23.2.1-py3-none-any.whl", hash = "sha256:0520a6b97e07c658b2798d7140971c1d5bc4bcd5013e7937fe075fd054aa320c"}, - {file = "Faker-23.2.1.tar.gz", hash = "sha256:f07b64d27f67b62c7f0536a72f47813015b3b51cd4664918454011094321e464"}, + {file = "Faker-23.3.0-py3-none-any.whl", hash = "sha256:117ce1a2805c1bc5ca753b3dc6f9d567732893b2294b827d3164261ee8f20267"}, + {file = "Faker-23.3.0.tar.gz", hash = "sha256:458d93580de34403a8dec1e8d5e6be2fee96c4deca63b95d71df7a6a80a690de"}, ] [[package]] @@ -996,11 +981,12 @@ files = [ [[package]] name = "future" -version = "0.18.3" +version = "1.0.0" requires_python = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Clean single-source support for Python 3 and 2" files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] [[package]] @@ -1021,7 +1007,7 @@ files = [ [[package]] name = "google-api-python-client" -version = "2.118.0" +version = "2.120.0" requires_python = ">=3.7" summary = "Google API Client Library for Python" dependencies = [ @@ -1032,13 +1018,13 @@ dependencies = [ "uritemplate<5,>=3.0.1", ] files = [ - {file = "google-api-python-client-2.118.0.tar.gz", hash = "sha256:ebf4927a3f5184096647be8f705d090e7f06d48ad82b0fa431a2fe80c2cbe182"}, - {file = "google_api_python_client-2.118.0-py2.py3-none-any.whl", hash = "sha256:9d83b178496b180e058fd206ebfb70ea1afab49f235dd326f557513f56f496d5"}, + {file = "google-api-python-client-2.120.0.tar.gz", hash = "sha256:a0c8769cad9576768bcb3191cb1f550f6ab3290cba042badb0fb17bba03f70cc"}, + {file = "google_api_python_client-2.120.0-py2.py3-none-any.whl", hash = "sha256:e2cdf4497bfc758fb44a4b487920cc1ca0571c2428187697a8e43e3b9feba1c9"}, ] [[package]] name = "google-auth" -version = "2.28.0" +version = "2.28.1" requires_python = ">=3.7" summary = "Google Authentication Library" dependencies = [ @@ -1047,8 +1033,8 @@ dependencies = [ "rsa<5,>=3.1.4", ] files = [ - {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"}, - {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"}, + {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, + {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, ] [[package]] @@ -1196,7 +1182,7 @@ files = [ [[package]] name = "httpcore" -version = "1.0.3" +version = "1.0.4" requires_python = ">=3.8" summary = "A minimal low-level HTTP client." dependencies = [ @@ -1204,8 +1190,8 @@ dependencies = [ "h11<0.15,>=0.13", ] files = [ - {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, - {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, + {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, + {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, ] [[package]] @@ -1248,19 +1234,6 @@ files = [ {file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"}, ] -[[package]] -name = "importlib-metadata" -version = "4.13.0" -requires_python = ">=3.7" -summary = "Read metadata from Python packages" -dependencies = [ - "zipp>=0.5", -] -files = [ - {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, - {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, -] - [[package]] name = "inflection" version = "0.5.1" @@ -1357,28 +1330,28 @@ files = [ [[package]] name = "libcst" -version = "1.1.0" -requires_python = ">=3.8" -summary = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." +version = "1.2.0" +requires_python = ">=3.9" +summary = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.12 programs." dependencies = [ "pyyaml>=5.2", "typing-extensions>=3.7.4.2", "typing-inspect>=0.4.0", ] files = [ - {file = "libcst-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d68c34e3038d3d1d6324eb47744cbf13f2c65e1214cf49db6ff2a6603c1cd838"}, - {file = "libcst-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dffa1795c2804d183efb01c0f1efd20a7831db6a21a0311edf90b4100d67436"}, - {file = "libcst-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc9b6ac36d7ec9db2f053014ea488086ca2ed9c322be104fbe2c71ca759da4bb"}, - {file = "libcst-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b7a38ec4c1c009ac39027d51558b52851fb9234669ba5ba62283185963a31c"}, - {file = "libcst-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5297a16e575be8173185e936b7765c89a3ca69d4ae217a4af161814a0f9745a7"}, - {file = "libcst-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:7ccaf53925f81118aeaadb068a911fac8abaff608817d7343da280616a5ca9c1"}, - {file = "libcst-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:75816647736f7e09c6120bdbf408456f99b248d6272277eed9a58cf50fb8bc7d"}, - {file = "libcst-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c8f26250f87ca849a7303ed7a4fd6b2c7ac4dec16b7d7e68ca6a476d7c9bfcdb"}, - {file = "libcst-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d37326bd6f379c64190a28947a586b949de3a76be00176b0732c8ee87d67ebe"}, - {file = "libcst-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d8cf974cfa2487b28f23f56c4bff90d550ef16505e58b0dca0493d5293784b"}, - {file = "libcst-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d1271403509b0a4ee6ff7917c2d33b5a015f44d1e208abb1da06ba93b2a378"}, - {file = "libcst-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bca1841693941fdd18371824bb19a9702d5784cd347cb8231317dbdc7062c5bc"}, - {file = "libcst-1.1.0.tar.gz", hash = "sha256:0acbacb9a170455701845b7e940e2d7b9519db35a86768d86330a0b0deae1086"}, + {file = "libcst-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f080e9af843ff609f8f35fc7275c8bf08b02c31115e7cd5b77ca3b6a56c75096"}, + {file = "libcst-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3c7c0edfe3b878d64877671261c7b3ffe9d23181774bfad5d8fcbdbbbde9f064"}, + {file = "libcst-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b5fecb2b26fa3c1efe6e05ef1420522bd31bb4dae239e4c41fdf3ddbd853aeb"}, + {file = "libcst-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:968b93400e66e6711a29793291365e312d206dbafd3fc80219cfa717f0f01ad5"}, + {file = "libcst-1.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e01879aa8cd478bb8b1e4285cfd0607e64047116f7ab52bc2a787cde584cd686"}, + {file = "libcst-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:b4066dcadf92b183706f81ae0b4342e7624fc1d9c5ca2bf2b44066cb74bf863f"}, + {file = "libcst-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5c0d548d92c6704bb07ce35d78c0e054cdff365def0645c1b57c856c8e112bb4"}, + {file = "libcst-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82373a35711a8bb2a664dba2b7aeb20bbcce92a4db40af964e9cb2b976f989e7"}, + {file = "libcst-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb92398236566f0b73a0c73f8a41a9c4906c793e8f7c2745f30e3fb141a34b5"}, + {file = "libcst-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13ca9fe82326d82feb2c7b0f5a320ce7ed0d707c32919dd36e1f40792459bf6f"}, + {file = "libcst-1.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dded0e4f2e18150c4b07fedd7ef84a9abc7f9bd2d47cc1c485248ee1ec58e5cc"}, + {file = "libcst-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:dece0362540abfc39cd2cf5c98cde238b35fd74a1b0167e2563e4b8bb5f47489"}, + {file = "libcst-1.2.0.tar.gz", hash = "sha256:71dd69fff76e7edaf8fae0f63ffcdbf5016e8cd83165b1d0688d6856aa48186a"}, ] [[package]] @@ -1476,17 +1449,17 @@ files = [ [[package]] name = "msal" -version = "1.26.0" +version = "1.27.0" requires_python = ">=2.7" summary = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." dependencies = [ "PyJWT[crypto]<3,>=1.0.0", - "cryptography<44,>=0.6", + "cryptography<45,>=0.6", "requests<3,>=2.0.0", ] files = [ - {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"}, - {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"}, + {file = "msal-1.27.0-py2.py3-none-any.whl", hash = "sha256:572d07149b83e7343a85a3bcef8e581167b4ac76befcbbb6eef0c0e19643cdc0"}, + {file = "msal-1.27.0.tar.gz", hash = "sha256:3109503c038ba6b307152b0e8d34f98113f2e7a78986e28d0baf5b5303afda52"}, ] [[package]] @@ -1883,71 +1856,71 @@ files = [ [[package]] name = "pydantic" -version = "2.6.1" +version = "2.6.3" requires_python = ">=3.8" summary = "Data validation using Python type hints" dependencies = [ "annotated-types>=0.4.0", - "pydantic-core==2.16.2", + "pydantic-core==2.16.3", "typing-extensions>=4.6.1", ] files = [ - {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, - {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, + {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, + {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, ] [[package]] name = "pydantic-core" -version = "2.16.2" +version = "2.16.3" requires_python = ">=3.8" summary = "" dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] files = [ - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, - {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, - {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, - {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, - {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, - {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, - {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, - {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [[package]] @@ -2204,15 +2177,15 @@ files = [ [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Extensions to the standard Python datetime module" dependencies = [ "six>=1.5", ] files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [[package]] @@ -2282,30 +2255,30 @@ files = [ [[package]] name = "redis" -version = "4.6.0" +version = "5.0.2" requires_python = ">=3.7" summary = "Python client for Redis database and key-value store" dependencies = [ - "async-timeout>=4.0.2; python_full_version <= \"3.11.2\"", + "async-timeout>=4.0.3", ] files = [ - {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, - {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, + {file = "redis-5.0.2-py3-none-any.whl", hash = "sha256:4caa8e1fcb6f3c0ef28dba99535101d80934b7d4cd541bbb47f4a3826ee472d1"}, + {file = "redis-5.0.2.tar.gz", hash = "sha256:3f82cc80d350e93042c8e6e7a5d0596e4dd68715babffba79492733e1f367037"}, ] [[package]] name = "redis" -version = "4.6.0" +version = "5.0.2" extras = ["hiredis"] requires_python = ">=3.7" summary = "Python client for Redis database and key-value store" dependencies = [ "hiredis>=1.0.0", - "redis==4.6.0", + "redis==5.0.2", ] files = [ - {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, - {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, + {file = "redis-5.0.2-py3-none-any.whl", hash = "sha256:4caa8e1fcb6f3c0ef28dba99535101d80934b7d4cd541bbb47f4a3826ee472d1"}, + {file = "redis-5.0.2.tar.gz", hash = "sha256:3f82cc80d350e93042c8e6e7a5d0596e4dd68715babffba79492733e1f367037"}, ] [[package]] @@ -2353,7 +2326,7 @@ files = [ [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" requires_python = ">=3.7.0" summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" dependencies = [ @@ -2361,8 +2334,8 @@ dependencies = [ "pygments<3.0.0,>=2.13.0", ] files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [[package]] @@ -2406,12 +2379,12 @@ files = [ [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [[package]] @@ -2435,12 +2408,12 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" requires_python = ">=3.7" summary = "Sniff out which async library your code is running under" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -2565,37 +2538,37 @@ files = [ [[package]] name = "time-machine" -version = "2.13.0" +version = "2.14.0" requires_python = ">=3.8" summary = "Travel through time in your tests." dependencies = [ "python-dateutil", ] files = [ - {file = "time_machine-2.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e9a9d150e098be3daee5c9f10859ab1bd14a61abebaed86e6d71f7f18c05b9d7"}, - {file = "time_machine-2.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2bd4169b808745d219a69094b3cb86006938d45e7293249694e6b7366225a186"}, - {file = "time_machine-2.13.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:8d526cdcaca06a496877cfe61cc6608df2c3a6fce210e076761964ebac7f77cc"}, - {file = "time_machine-2.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfef4ebfb4f055ce3ebc7b6c1c4d0dbfcffdca0e783ad8c6986c992915a57ed3"}, - {file = "time_machine-2.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f128db8997c3339f04f7f3946dd9bb2a83d15e0a40d35529774da1e9e501511"}, - {file = "time_machine-2.13.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21bef5854d49b62e2c33848b5c3e8acf22a3b46af803ef6ff19529949cb7cf9f"}, - {file = "time_machine-2.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:32b71e50b07f86916ac04bd1eefc2bd2c93706b81393748b08394509ee6585dc"}, - {file = "time_machine-2.13.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ac8ff145c63cd0dcfd9590fe694b5269aacbc130298dc7209b095d101f8cdde"}, - {file = "time_machine-2.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:19a3b10161c91ca8e0fd79348665cca711fd2eac6ce336ff9e6b447783817f93"}, - {file = "time_machine-2.13.0-cp311-cp311-win32.whl", hash = "sha256:5f87787d562e42bf1006a87eb689814105b98c4d5545874a281280d0f8b9a2d9"}, - {file = "time_machine-2.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:62fd14a80b8b71726e07018628daaee0a2e00937625083f96f69ed6b8e3304c0"}, - {file = "time_machine-2.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:e9935aff447f5400a2665ab10ed2da972591713080e1befe1bb8954e7c0c7806"}, - {file = "time_machine-2.13.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:34dcdbbd25c1e124e17fe58050452960fd16a11f9d3476aaa87260e28ecca0fd"}, - {file = "time_machine-2.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e58d82fe0e59d6e096ada3281d647a2e7420f7da5453b433b43880e1c2e8e0c5"}, - {file = "time_machine-2.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71acbc1febbe87532c7355eca3308c073d6e502ee4ce272b5028967847c8e063"}, - {file = "time_machine-2.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dec0ec2135a4e2a59623e40c31d6e8a8ae73305ade2634380e4263d815855750"}, - {file = "time_machine-2.13.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e3a2611f8788608ebbcb060a5e36b45911bc3b8adc421b1dc29d2c81786ce4d"}, - {file = "time_machine-2.13.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:42ef5349135626ad6cd889a0a81400137e5c6928502b0817ea9e90bb10702000"}, - {file = "time_machine-2.13.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6c16d90a597a8c2d3ce22d6be2eb3e3f14786974c11b01886e51b3cf0d5edaf7"}, - {file = "time_machine-2.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f2ae8d0e359b216b695f1e7e7256f208c390db0480601a439c5dd1e1e4e16ce"}, - {file = "time_machine-2.13.0-cp312-cp312-win32.whl", hash = "sha256:f5fa9610f7e73fff42806a2ed8b06d862aa59ce4d178a52181771d6939c3e237"}, - {file = "time_machine-2.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:02b33a8c19768c94f7ffd6aa6f9f64818e88afce23250016b28583929d20fb12"}, - {file = "time_machine-2.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:0cc116056a8a2a917a4eec85661dfadd411e0d8faae604ef6a0e19fe5cd57ef1"}, - {file = "time_machine-2.13.0.tar.gz", hash = "sha256:c23b2408e3adcedec84ea1131e238f0124a5bc0e491f60d1137ad7239b37c01a"}, + {file = "time-machine-2.14.0.tar.gz", hash = "sha256:b1076afb7825122a89a7be157d3a02f69f07d6fa0bacfaec463c71ac0488bd58"}, + {file = "time_machine-2.14.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:412ace2c9053a7f4c513d8723f78bec3a5c2b4721e6bbf60f33de94abc88503a"}, + {file = "time_machine-2.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ed812603f0233770faba6f7e60f5ed04bae1a5290c8159f19cb8c6888f99fc1"}, + {file = "time_machine-2.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e00a9cff6df58cfe584ab55cbb21acdaa3ecc6d75414d59cf65726b2e3d90a6c"}, + {file = "time_machine-2.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10e30c8e9b5ef1e4b10e588d3e789888ff2a94bcc9120d300954116a5d83556b"}, + {file = "time_machine-2.14.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1b006d483d11f0dfe64b2a7f17d5fa16c3fd2940042731f5b3bd1533c7d827"}, + {file = "time_machine-2.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb9f6c62a205f12f6f054a027df221927f8066b2bca2b82477793291460410fa"}, + {file = "time_machine-2.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f01da787c2ac4c05e3722e94bf70da9698548c13ccfe6ca44ca2633c4b1cc24d"}, + {file = "time_machine-2.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d56a67b4656ae527b8152dd682642e31735559de653619116e92ab345b86a"}, + {file = "time_machine-2.14.0-cp311-cp311-win32.whl", hash = "sha256:14a82de9b00ed8427e4b9136a6d8e10a8c330b5cea62b5813fbedde978701c4a"}, + {file = "time_machine-2.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:26bf274f6b591ddc0f41e54b4b3a74d83748177dd96c5cfb8496adae1ada00ab"}, + {file = "time_machine-2.14.0-cp311-cp311-win_arm64.whl", hash = "sha256:9d2fac0e454c3aa63c10b331f5349fa2c961d58c4d430113f14698aac9565b3c"}, + {file = "time_machine-2.14.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a55717b8e3a153e4b7a9b6f551cd89e9d037db7e7732fc909c436d94e79628"}, + {file = "time_machine-2.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f92693a7ceedde14d507e906a26600ef11b80ca17cccfa91906266510f07b024"}, + {file = "time_machine-2.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:4a2670120780ad67c327f065eed03be917209cecd6fb0e9ada29720dbc1411e9"}, + {file = "time_machine-2.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e5a9ff08c585b8aac5d3db80a828dc549f5962c07297e1441e04cb0825464ac"}, + {file = "time_machine-2.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4e1a3c8bca77201dc6684d3c1d65d3ca4249872beb7ee9283c0b6e2df5cb677"}, + {file = "time_machine-2.14.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb603f46281c2d7f5c9607dd195107c9642af9bb36806386f66087b2741d0327"}, + {file = "time_machine-2.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9227c26a8d9e0cb0727917aa6470855320bde85f65deba58b988a8c0cc04bf9a"}, + {file = "time_machine-2.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4df6ee8f1ed9d9ca4aa7750e5cfc0d8bc0143c2cac068258af5bad5f50e3b3e8"}, + {file = "time_machine-2.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a59bee89bf015f3ff1db012436bc7905fd99a4aa827d2feb73f1301afb0cb5c6"}, + {file = "time_machine-2.14.0-cp312-cp312-win32.whl", hash = "sha256:892ee00cc176c9da6b465cf9d44da408fa3297d72fcb45aec1aac09d8e381f22"}, + {file = "time_machine-2.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:9ca7c08ded824e6ae138280524d9ebcceaf50623e5263f24e38a28259215fb37"}, + {file = "time_machine-2.14.0-cp312-cp312-win_arm64.whl", hash = "sha256:b604d904dbe5aa36be37df61b47c15d87c359764dadb70f3a8eae7191e382bd4"}, ] [[package]] @@ -2613,12 +2586,12 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.3" +version = "0.12.4" requires_python = ">=3.7" summary = "Style preserving TOML library" files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, + {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, + {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, ] [[package]] @@ -2637,22 +2610,22 @@ files = [ [[package]] name = "types-protobuf" -version = "4.24.0.20240129" +version = "4.24.0.20240302" requires_python = ">=3.8" summary = "Typing stubs for protobuf" files = [ - {file = "types-protobuf-4.24.0.20240129.tar.gz", hash = "sha256:8a83dd3b9b76a33e08d8636c5daa212ace1396418ed91837635fcd564a624891"}, - {file = "types_protobuf-4.24.0.20240129-py3-none-any.whl", hash = "sha256:23be68cc29f3f5213b5c5878ac0151706182874040e220cfb11336f9ee642ead"}, + {file = "types-protobuf-4.24.0.20240302.tar.gz", hash = "sha256:f22c00cc0cea9722e71e14d389bba429af9e35a74a949719c167203a5abbe2e4"}, + {file = "types_protobuf-4.24.0.20240302-py3-none-any.whl", hash = "sha256:5c607990f50f14606c2edaf379f8acc7418fef1451b227aa3c6a8a2cbc6ff14a"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" requires_python = ">=3.8" summary = "Backported and Experimental Type Hints for Python 3.8+" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -2770,15 +2743,15 @@ files = [ [[package]] name = "wagtail-headless-preview" -version = "0.7.0" +version = "0.8.0" requires_python = ">=3.8" summary = "Enhance Wagtail previews in headless setups." dependencies = [ "Wagtail>=4.1", ] files = [ - {file = "wagtail_headless_preview-0.7.0-py3-none-any.whl", hash = "sha256:105a62e130b7cfe3f39b1ff2cf8aef5080f578e98553af02fb4099cdacf5dfa9"}, - {file = "wagtail_headless_preview-0.7.0.tar.gz", hash = "sha256:730e86ea2de91602b64cc2412b5ff3e0b3f3679e3767eb417c109c9110209c07"}, + {file = "wagtail_headless_preview-0.8.0-py3-none-any.whl", hash = "sha256:91b305c36573490856e212f9e5645ada81f72ad37f1bb00e6feddc5537252358"}, + {file = "wagtail_headless_preview-0.8.0.tar.gz", hash = "sha256:581d8419cd1ef1f7de88235445e9695e5591d46259283d56bfe814e8620fa1d5"}, ] [[package]] @@ -2943,13 +2916,3 @@ files = [ {file = "xlwt-1.3.0-py2.py3-none-any.whl", hash = "sha256:a082260524678ba48a297d922cc385f58278b8aa68741596a87de01a9c628b2e"}, {file = "xlwt-1.3.0.tar.gz", hash = "sha256:c59912717a9b28f1a3c2a98fd60741014b06b043936dcecbc113eaaada156c88"}, ] - -[[package]] -name = "zipp" -version = "3.17.0" -requires_python = ">=3.8" -summary = "Backport of pathlib-compatible object wrapper for zip files" -files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, -] diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index df63998cf6..933fc653a0 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -368,3 +368,5 @@ } X_FRAME_OPTIONS = "SAMEORIGIN" + +CELERY_TASK_IGNORE_RESULT = True diff --git a/backend/pycon/settings/test.py b/backend/pycon/settings/test.py index 79e18c521f..3c2019a4ca 100644 --- a/backend/pycon/settings/test.py +++ b/backend/pycon/settings/test.py @@ -1,3 +1,4 @@ +from .base import env from .base import * # noqa SECRET_KEY = "this-key-should-only-be-used-for-tests" @@ -35,3 +36,8 @@ CELERY_TASK_ALWAYS_EAGER = True CELERY_TASK_EAGER_PROPAGATES = True + +PASSWORD_HASHERS = ("django.contrib.auth.hashers.MD5PasswordHasher",) + +CELERY_BROKER_URL = env("CELERY_BROKER_URL") +CELERY_RESULT_BACKEND = env("CELERY_RESULT_BACKEND") diff --git a/backend/pyproject.toml b/backend/pyproject.toml index fafb6ef233..8a7edeeeec 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -83,7 +83,7 @@ dependencies = [ "psycopg2<3.0.0,>=2.9.5", "django-imagekit<5.0.0,>=4.1.0", "pillow<11.0.0,>=10.0.1", - "redis[hiredis]<5.0.0,>=4.5.5", + "redis[hiredis]==5.0.2", "google-api-python-client<3.0.0,>=2.94.0", "google-auth<3.0.0,>=2.22.0", "google-auth-oauthlib<2.0.0,>=1.0.0", @@ -98,7 +98,6 @@ dependencies = [ "wagtail-localize==1.5.2", "celery>=5.3.6", "wagtail-headless-preview>=0.7.0", - "celery-heimdall>=1.0.0", ] name = "backend" version = "0.1.0" diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 4a238a3b3c..4b6d825c1c 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -1,6 +1,5 @@ from django.db.models import Q from googleapiclient.errors import HttpError -from celery_heimdall import HeimdallTask from google_api.sdk import youtube_videos_insert, youtube_videos_set_thumbnail from integrations import plain from pythonit_toolkit.emails.utils import mark_safe @@ -269,8 +268,8 @@ def send_schedule_invitation_plain_message(*, schedule_item_id, message): schedule_item.save(update_fields=["plain_thread_id"]) -@app.task -def upload_schedule_item_video(*, sent_for_video_upload_state_id): +@app.task() +def upload_schedule_item_video(*, sent_for_video_upload_state_id: int): sent_for_video_upload = ScheduleItemSentForVideoUpload.objects.get( id=sent_for_video_upload_state_id ) @@ -367,12 +366,7 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id): sent_for_video_upload.save(update_fields=["status"]) -@app.task( - base=HeimdallTask, - heimdall={ - "unique": True, - }, -) +@app.task() def process_schedule_items_videos_to_upload(): statuses = ( ScheduleItemSentForVideoUpload.objects.filter( @@ -384,7 +378,8 @@ def process_schedule_items_videos_to_upload(): .to_upload() .order_by("last_attempt_at") ) - for sent_for_video_upload_state in statuses: + + for sent_for_video_upload_state in statuses.iterator(): try: upload_schedule_item_video( sent_for_video_upload_state_id=sent_for_video_upload_state.id diff --git a/backend/schedule/tests/factories.py b/backend/schedule/tests/factories.py index a6665a77d7..d7537dcfc0 100644 --- a/backend/schedule/tests/factories.py +++ b/backend/schedule/tests/factories.py @@ -13,6 +13,7 @@ ScheduleItem, ScheduleItemAdditionalSpeaker, ScheduleItemAttendee, + ScheduleItemSentForVideoUpload, Slot, ) from submissions.tests.factories import SubmissionFactory @@ -112,3 +113,11 @@ class ScheduleItemAttendeeFactory(DjangoModelFactory): class Meta: model = ScheduleItemAttendee + + +@register +class ScheduleItemSentForVideoUploadFactory(DjangoModelFactory): + schedule_item = factory.SubFactory(ScheduleItemFactory) + + class Meta: + model = ScheduleItemSentForVideoUpload diff --git a/backend/schedule/tests/test_tasks.py b/backend/schedule/tests/test_tasks.py index 036fc70d31..06d7c9c62e 100644 --- a/backend/schedule/tests/test_tasks.py +++ b/backend/schedule/tests/test_tasks.py @@ -1,3 +1,4 @@ +from unittest import mock from conferences.tests.factories import SpeakerVoucherFactory from i18n.strings import LazyI18nString from datetime import datetime, timezone @@ -6,18 +7,22 @@ from schedule.tasks import ( notify_new_schedule_invitation_answer_slack, + process_schedule_items_videos_to_upload, send_schedule_invitation_email, send_schedule_invitation_plain_message, send_speaker_communication_email, send_speaker_voucher_email, send_submission_time_slot_changed_email, ) -from schedule.tests.factories import ScheduleItemFactory +from schedule.tests.factories import ( + ScheduleItemFactory, + ScheduleItemSentForVideoUploadFactory, +) from submissions.tests.factories import SubmissionFactory import time_machine from conferences.models.speaker_voucher import SpeakerVoucher from users.tests.factories import UserFactory -from schedule.models import ScheduleItem +from schedule.models import ScheduleItem, ScheduleItemSentForVideoUpload from pythonit_toolkit.emails.templates import EmailTemplate import pytest @@ -366,3 +371,44 @@ def test_send_schedule_invitation_plain_message_with_existing_thread(mocker): schedule_item.refresh_from_db() assert schedule_item.plain_thread_id == "thread_id" + + +def test_process_schedule_items_videos_to_upload(mocker): + mock_process = mocker.patch("schedule.tasks.upload_schedule_item_video") + + ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.failed, + ) + + # Never attempted - should be scheduled + sent_for_upload_1 = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + ) + + # Recently attempted + sent_for_upload_2 = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=datetime(2020, 1, 1, 10, 0, 0, tzinfo=timezone.utc), + status=ScheduleItemSentForVideoUpload.Status.pending, + ) + + with time_machine.travel("2020-01-01 10:30:00Z", tick=False): + process_schedule_items_videos_to_upload() + + mock_process.assert_called_once_with( + sent_for_video_upload_state_id=sent_for_upload_1.id + ) + mock_process.reset() + + # it has been an hour since upload 2 attempt, so it should be rescheduled + with time_machine.travel("2020-01-01 11:20:00Z", tick=False): + process_schedule_items_videos_to_upload() + + mock_process.assert_has_calls( + [ + mock.call(sent_for_video_upload_state_id=sent_for_upload_1.id), + mock.call(sent_for_video_upload_state_id=sent_for_upload_2.id), + ], + any_order=True, + ) diff --git a/docker-compose.yml b/docker-compose.yml index 9158c6f29a..137bd7e796 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,7 +24,6 @@ x-defaults: <<: *enviroment_defaults CACHE_URL: redis://redis:6379/0 DATABASE_URL: psql://pycon:pycon@backend-db/pycon - DJANGO_SETTINGS_MODULE: pycon.settings.dev ALLOWED_HOSTS: "*" PRETIX_API_TOKEN: ${PRETIX_API_TOKEN} MAILCHIMP_SECRET_KEY: ${MAILCHIMP_SECRET_KEY} @@ -53,7 +52,8 @@ services: dockerfile: ../Dockerfile.python.local networks: [pycon_net] entrypoint: "" - command: sh -c "pdm install && + command: sh -c "export DJANGO_SETTINGS_MODULE=pycon.settings.dev && + pdm install && pdm run python manage.py migrate && pdm run python manage.py create_admin && touch /.ready && From c699cd4e7e9d859b4fbe004bcc46d6a3a786cd0a Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 00:33:29 +0100 Subject: [PATCH 04/35] tests --- backend/pycon/settings/base.py | 4 + backend/pycon/settings/test.py | 7 +- backend/schedule/tests/test_tasks.py | 252 +++++++++++++++++++++++++++ backend/schedule/video_upload.py | 4 +- 4 files changed, 262 insertions(+), 5 deletions(-) diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index 933fc653a0..d98052283f 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -224,6 +224,10 @@ "conferencevideos": { "BACKEND": "pycon.storages.ConferenceVideosStorage", }, + "localstorage": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + "LOCATION": "/tmp/", + }, "staticfiles": { "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", }, diff --git a/backend/pycon/settings/test.py b/backend/pycon/settings/test.py index 3c2019a4ca..aba6aee302 100644 --- a/backend/pycon/settings/test.py +++ b/backend/pycon/settings/test.py @@ -17,10 +17,13 @@ STORAGES = { "default": { - "BACKEND": "django.core.files.storage.FileSystemStorage", + "BACKEND": "django.core.files.storage.memory.InMemoryStorage", }, "conferencevideos": { - "BACKEND": "django.core.files.storage.FileSystemStorage", + "BACKEND": "django.core.files.storage.memory.InMemoryStorage", + }, + "localstorage": { + "BACKEND": "django.core.files.storage.memory.InMemoryStorage", }, "staticfiles": { "BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage", diff --git a/backend/schedule/tests/test_tasks.py b/backend/schedule/tests/test_tasks.py index 06d7c9c62e..667702c574 100644 --- a/backend/schedule/tests/test_tasks.py +++ b/backend/schedule/tests/test_tasks.py @@ -1,3 +1,8 @@ +from googleapiclient.errors import HttpError +import numpy as np +from io import BytesIO +from django.core.files.storage import storages +from django.core.files.uploadedfile import InMemoryUploadedFile from unittest import mock from conferences.tests.factories import SpeakerVoucherFactory from i18n.strings import LazyI18nString @@ -13,6 +18,7 @@ send_speaker_communication_email, send_speaker_voucher_email, send_submission_time_slot_changed_email, + upload_schedule_item_video, ) from schedule.tests.factories import ( ScheduleItemFactory, @@ -412,3 +418,249 @@ def test_process_schedule_items_videos_to_upload(mocker): ], any_order=True, ) + + +def test_process_schedule_items_videos_to_upload_sets_failed_status(mocker): + mock_process = mocker.patch( + "schedule.tasks.upload_schedule_item_video", + side_effect=ValueError("test error"), + ) + + sent_for_upload = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + ) + + with time_machine.travel("2020-01-01 10:30:00Z", tick=False): + process_schedule_items_videos_to_upload() + + mock_process.assert_called_once_with( + sent_for_video_upload_state_id=sent_for_upload.id + ) + + sent_for_upload.refresh_from_db() + assert sent_for_upload.status == ScheduleItemSentForVideoUpload.Status.failed + assert sent_for_upload.failed_reason == "test error" + + +def test_upload_schedule_item_video_flow(mocker): + file_content = BytesIO(b"File") + file_content.name = "test.mp4" + file_content.seek(0) + + image_content = BytesIO(b"Image") + image_content.name = "test.jpg" + image_content.seek(0) + + conferencevideos_storage = storages["conferencevideos"] + conferencevideos_storage.save( + "videos/test.mp4", + InMemoryUploadedFile( + file=file_content, + field_name="file_field", + name="test.txt", + content_type="text/plain", + size=file_content.getbuffer().nbytes, + charset="utf-8", + content_type_extra=None, + ), + ) + + mock_yt_insert = mocker.patch( + "schedule.tasks.youtube_videos_insert", + autospec=True, + return_value=[{"id": "vid_123"}], + ) + mock_yt_set_thumbnail = mocker.patch( + "schedule.tasks.youtube_videos_set_thumbnail", autospec=True + ) + mocker.patch( + "schedule.video_upload.cv2.VideoCapture.read", + return_value=(True, np.array([1])), + ) + + sent_for_upload = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + video_uploaded=False, + thumbnail_uploaded=False, + schedule_item__type=ScheduleItem.TYPES.talk, + schedule_item__submission=None, + schedule_item__title="Test Title", + schedule_item__description="Test Description", + schedule_item__video_uploaded_path=conferencevideos_storage.path( + "videos/test.mp4" + ), + schedule_item__conference__video_title_template="{{ title }}", + schedule_item__conference__video_description_template="{{ abstract }}", + ) + upload_schedule_item_video( + sent_for_video_upload_state_id=sent_for_upload.id, + ) + + mock_yt_insert.assert_called_with( + title="Test Title", + description="Test Description", + tags="", + file_path=mock.ANY, + ) + mock_yt_set_thumbnail.assert_called_with( + video_id="vid_123", thumbnail_path=mock.ANY + ) + + sent_for_upload.refresh_from_db() + assert sent_for_upload.attempts == 1 + assert sent_for_upload.failed_reason == "" + + assert sent_for_upload.status == ScheduleItemSentForVideoUpload.Status.completed + assert sent_for_upload.video_uploaded + assert sent_for_upload.thumbnail_uploaded + + sent_for_upload.schedule_item.refresh_from_db() + assert sent_for_upload.schedule_item.youtube_video_id == "vid_123" + + +def test_upload_schedule_item_with_only_thumbnail_to_upload(mocker): + file_content = BytesIO(b"File") + file_content.name = "test.mp4" + file_content.seek(0) + + image_content = BytesIO(b"Image") + image_content.name = "test.jpg" + image_content.seek(0) + + conferencevideos_storage = storages["conferencevideos"] + conferencevideos_storage.save( + "videos/test.mp4", + InMemoryUploadedFile( + file=file_content, + field_name="file_field", + name="test.txt", + content_type="text/plain", + size=file_content.getbuffer().nbytes, + charset="utf-8", + content_type_extra=None, + ), + ) + + mock_yt_insert = mocker.patch( + "schedule.tasks.youtube_videos_insert", + autospec=True, + return_value=[{"id": "vid_123"}], + ) + mock_yt_set_thumbnail = mocker.patch( + "schedule.tasks.youtube_videos_set_thumbnail", autospec=True + ) + mocker.patch( + "schedule.video_upload.cv2.VideoCapture.read", + return_value=(True, np.array([1])), + ) + + sent_for_upload = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + video_uploaded=True, + thumbnail_uploaded=False, + schedule_item__type=ScheduleItem.TYPES.talk, + schedule_item__submission=None, + schedule_item__youtube_video_id="vid_10", + schedule_item__title="Test Title", + schedule_item__description="Test Description", + schedule_item__video_uploaded_path=conferencevideos_storage.path( + "videos/test.mp4" + ), + schedule_item__conference__video_title_template="{{ title }}", + schedule_item__conference__video_description_template="{{ abstract }}", + ) + upload_schedule_item_video( + sent_for_video_upload_state_id=sent_for_upload.id, + ) + + mock_yt_insert.assert_not_called() + mock_yt_set_thumbnail.assert_called_with(video_id="vid_10", thumbnail_path=mock.ANY) + + sent_for_upload.refresh_from_db() + assert sent_for_upload.attempts == 1 + assert sent_for_upload.failed_reason == "" + + assert sent_for_upload.status == ScheduleItemSentForVideoUpload.Status.completed + assert sent_for_upload.video_uploaded + assert sent_for_upload.thumbnail_uploaded + + sent_for_upload.schedule_item.refresh_from_db() + assert sent_for_upload.schedule_item.youtube_video_id == "vid_10" + + +def test_upload_schedule_item_video_with_failing_thumbnail_is_rescheduled(mocker): + file_content = BytesIO(b"File") + file_content.name = "test.mp4" + file_content.seek(0) + + image_content = BytesIO(b"Image") + image_content.name = "test.jpg" + image_content.seek(0) + + conferencevideos_storage = storages["conferencevideos"] + conferencevideos_storage.save( + "videos/test.mp4", + InMemoryUploadedFile( + file=file_content, + field_name="file_field", + name="test.txt", + content_type="text/plain", + size=file_content.getbuffer().nbytes, + charset="utf-8", + content_type_extra=None, + ), + ) + + mock_yt_insert = mocker.patch( + "schedule.tasks.youtube_videos_insert", + autospec=True, + return_value=[{"id": "vid_123"}], + ) + mock_yt_set_thumbnail = mocker.patch( + "schedule.tasks.youtube_videos_set_thumbnail", + side_effect=HttpError(resp=mock.Mock(status=429), content=b""), + ) + mocker.patch( + "schedule.video_upload.cv2.VideoCapture.read", + return_value=(True, np.array([1])), + ) + + sent_for_upload = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + video_uploaded=False, + thumbnail_uploaded=False, + schedule_item__type=ScheduleItem.TYPES.talk, + schedule_item__submission=None, + schedule_item__title="Test Title", + schedule_item__description="Test Description", + schedule_item__video_uploaded_path=conferencevideos_storage.path( + "videos/test.mp4" + ), + schedule_item__conference__video_title_template="{{ title }}", + schedule_item__conference__video_description_template="{{ abstract }}", + ) + upload_schedule_item_video( + sent_for_video_upload_state_id=sent_for_upload.id, + ) + + mock_yt_insert.assert_called_with( + title="Test Title", + description="Test Description", + tags="", + file_path=mock.ANY, + ) + mock_yt_set_thumbnail.assert_called_with( + video_id="vid_123", thumbnail_path=mock.ANY + ) + + sent_for_upload.refresh_from_db() + assert sent_for_upload.video_uploaded + assert not sent_for_upload.thumbnail_uploaded + assert sent_for_upload.status == ScheduleItemSentForVideoUpload.Status.pending + + sent_for_upload.schedule_item.refresh_from_db() + assert sent_for_upload.schedule_item.youtube_video_id == "vid_123" diff --git a/backend/schedule/video_upload.py b/backend/schedule/video_upload.py index 34040dbfe1..b5b526a553 100644 --- a/backend/schedule/video_upload.py +++ b/backend/schedule/video_upload.py @@ -1,13 +1,11 @@ from dataclasses import dataclass import cv2 -from django.core.files.storage import FileSystemStorage from django.core.files.storage import storages from schedule.models import ScheduleItem from django.template import Context, Template -local_storage = FileSystemStorage() -local_storage.base_location = "/tmp/" +local_storage = storages["localstorage"] @dataclass From c3d29d497b4b6f3e4ffbeeba07a40a03329a925e Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 13:17:00 +0100 Subject: [PATCH 05/35] improvements --- backend/schedule/tests/test_tasks.py | 87 +++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 1 deletion(-) diff --git a/backend/schedule/tests/test_tasks.py b/backend/schedule/tests/test_tasks.py index 667702c574..50f1e278fd 100644 --- a/backend/schedule/tests/test_tasks.py +++ b/backend/schedule/tests/test_tasks.py @@ -420,7 +420,7 @@ def test_process_schedule_items_videos_to_upload(mocker): ) -def test_process_schedule_items_videos_to_upload_sets_failed_status(mocker): +def test_failing_to_process_schedule_items_videos_to_upload_sets_failed_status(mocker): mock_process = mocker.patch( "schedule.tasks.upload_schedule_item_video", side_effect=ValueError("test error"), @@ -591,6 +591,32 @@ def test_upload_schedule_item_with_only_thumbnail_to_upload(mocker): assert sent_for_upload.schedule_item.youtube_video_id == "vid_10" +@pytest.mark.parametrize( + "status", + [ + ScheduleItemSentForVideoUpload.Status.completed, + ScheduleItemSentForVideoUpload.Status.failed, + ScheduleItemSentForVideoUpload.Status.processing, + ], +) +def test_upload_schedule_item_ignores_non_pending_jobs(status): + sent_for_upload = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=status, + schedule_item__type=ScheduleItem.TYPES.talk, + schedule_item__submission=None, + schedule_item__title="Test Title", + schedule_item__description="Test Description", + schedule_item__conference__video_title_template="{{ title }}", + schedule_item__conference__video_description_template="{{ abstract }}", + ) + upload_schedule_item_video( + sent_for_video_upload_state_id=sent_for_upload.id, + ) + + assert sent_for_upload.status == status + + def test_upload_schedule_item_video_with_failing_thumbnail_is_rescheduled(mocker): file_content = BytesIO(b"File") file_content.name = "test.mp4" @@ -664,3 +690,62 @@ def test_upload_schedule_item_video_with_failing_thumbnail_is_rescheduled(mocker sent_for_upload.schedule_item.refresh_from_db() assert sent_for_upload.schedule_item.youtube_video_id == "vid_123" + + +def test_upload_schedule_item_video_with_failing_thumbnail_upload_fails(mocker): + file_content = BytesIO(b"File") + file_content.name = "test.mp4" + file_content.seek(0) + + image_content = BytesIO(b"Image") + image_content.name = "test.jpg" + image_content.seek(0) + + conferencevideos_storage = storages["conferencevideos"] + conferencevideos_storage.save( + "videos/test.mp4", + InMemoryUploadedFile( + file=file_content, + field_name="file_field", + name="test.txt", + content_type="text/plain", + size=file_content.getbuffer().nbytes, + charset="utf-8", + content_type_extra=None, + ), + ) + + mocker.patch( + "schedule.tasks.youtube_videos_insert", + autospec=True, + return_value=[{"id": "vid_123"}], + ) + mocker.patch( + "schedule.tasks.youtube_videos_set_thumbnail", + side_effect=HttpError(resp=mock.Mock(status=400), content=b""), + ) + mocker.patch( + "schedule.video_upload.cv2.VideoCapture.read", + return_value=(True, np.array([1])), + ) + + sent_for_upload = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + video_uploaded=False, + thumbnail_uploaded=False, + schedule_item__type=ScheduleItem.TYPES.talk, + schedule_item__submission=None, + schedule_item__title="Test Title", + schedule_item__description="Test Description", + schedule_item__video_uploaded_path=conferencevideos_storage.path( + "videos/test.mp4" + ), + schedule_item__conference__video_title_template="{{ title }}", + schedule_item__conference__video_description_template="{{ abstract }}", + ) + + with pytest.raises(HttpError): + upload_schedule_item_video( + sent_for_video_upload_state_id=sent_for_upload.id, + ) From 71f2fe75dfaea6907ec2a38a3f39f6b5b137189f Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 14:46:09 +0100 Subject: [PATCH 06/35] more tests --- backend/pdm.lock | 44 ++++- backend/pyproject.toml | 1 + backend/schedule/tests/test_tasks.py | 21 ++- backend/schedule/tests/test_video_upload.py | 193 ++++++++++++++++++++ backend/schedule/video_upload.py | 24 ++- 5 files changed, 275 insertions(+), 8 deletions(-) create mode 100644 backend/schedule/tests/test_video_upload.py diff --git a/backend/pdm.lock b/backend/pdm.lock index 8e3bda2441..b86bb712f2 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "lambda"] strategy = ["cross_platform"] lock_version = "4.4" -content_hash = "sha256:4b558cd3dfe7dbdea16fb2896c34f8fc83949a6dc6cf3bcdbd5d4a035a5abd6e" +content_hash = "sha256:f9ba1cfc33d63fa0a6ffd94345d0949f694251b4c907c36522be36544fa5d6d6" [[package]] name = "amqp" @@ -1276,6 +1276,19 @@ files = [ {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] +[[package]] +name = "jinja2" +version = "3.1.3" +requires_python = ">=3.7" +summary = "A very fast and expressive template engine." +dependencies = [ + "MarkupSafe>=2.0", +] +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + [[package]] name = "jmespath" version = "1.0.1" @@ -1427,6 +1440,35 @@ files = [ {file = "MarkupPy-1.14.tar.gz", hash = "sha256:1adee2c0a542af378fe84548ff6f6b0168f3cb7f426b46961038a2bcfaad0d5f"}, ] +[[package]] +name = "markupsafe" +version = "2.1.5" +requires_python = ">=3.7" +summary = "Safely add untrusted strings to HTML/XML markup." +files = [ + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + [[package]] name = "mccabe" version = "0.7.0" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 8a7edeeeec..741c62bdc7 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -98,6 +98,7 @@ dependencies = [ "wagtail-localize==1.5.2", "celery>=5.3.6", "wagtail-headless-preview>=0.7.0", + "Jinja2>=3.1.3", ] name = "backend" version = "0.1.0" diff --git a/backend/schedule/tests/test_tasks.py b/backend/schedule/tests/test_tasks.py index 50f1e278fd..c9ecf8958c 100644 --- a/backend/schedule/tests/test_tasks.py +++ b/backend/schedule/tests/test_tasks.py @@ -24,6 +24,7 @@ ScheduleItemFactory, ScheduleItemSentForVideoUploadFactory, ) +from schedule.video_upload import get_thumbnail_file_name, get_video_file_name from submissions.tests.factories import SubmissionFactory import time_machine from conferences.models.speaker_voucher import SpeakerVoucher @@ -452,6 +453,8 @@ def test_upload_schedule_item_video_flow(mocker): image_content.name = "test.jpg" image_content.seek(0) + localstorage = storages["localstorage"] + conferencevideos_storage = storages["conferencevideos"] conferencevideos_storage.save( "videos/test.mp4", @@ -516,8 +519,13 @@ def test_upload_schedule_item_video_flow(mocker): assert sent_for_upload.video_uploaded assert sent_for_upload.thumbnail_uploaded - sent_for_upload.schedule_item.refresh_from_db() - assert sent_for_upload.schedule_item.youtube_video_id == "vid_123" + schedule_item = sent_for_upload.schedule_item + schedule_item.refresh_from_db() + assert schedule_item.youtube_video_id == "vid_123" + + # Make sure we cleanup files at the end + assert not localstorage.exists(get_thumbnail_file_name(schedule_item.id)) + assert not localstorage.exists(get_video_file_name(schedule_item.id)) def test_upload_schedule_item_with_only_thumbnail_to_upload(mocker): @@ -691,6 +699,15 @@ def test_upload_schedule_item_video_with_failing_thumbnail_is_rescheduled(mocker sent_for_upload.schedule_item.refresh_from_db() assert sent_for_upload.schedule_item.youtube_video_id == "vid_123" + localstorage = storages["localstorage"] + # thumbnail is not deleted in this case, but the video is + assert localstorage.exists( + get_thumbnail_file_name(sent_for_upload.schedule_item.id) + ) + assert not localstorage.exists( + get_video_file_name(sent_for_upload.schedule_item.id) + ) + def test_upload_schedule_item_video_with_failing_thumbnail_upload_fails(mocker): file_content = BytesIO(b"File") diff --git a/backend/schedule/tests/test_video_upload.py b/backend/schedule/tests/test_video_upload.py new file mode 100644 index 0000000000..33e683cf76 --- /dev/null +++ b/backend/schedule/tests/test_video_upload.py @@ -0,0 +1,193 @@ +from i18n.strings import LazyI18nString + +from schedule.models import ScheduleItem +from schedule.tests.factories import ( + ScheduleItemFactory, +) +import pytest +from schedule.video_upload import create_video_info + + +pytestmark = pytest.mark.django_db + +VIDEO_TITLE_TEMPLATE = """ +{% if has_zero_or_more_than_2_speakers %} +{{title}} - {{conference_name}} +{% else %} +{{title}} - {{speakers_names}} +{% endif %} +""" +VIDEO_DESCRIPTION_TEMPLATE = """ +{% if has_zero_or_more_than_2_speakers %} +{{title}} - {{conference_name}} +{% else %} +{{title}} - {{speakers_names}} - {{conference_name}} +{% endif %} + +{% if elevator_pitch %} +Elevator Pitch: +{{elevator_pitch}} +{% endif %} + +{% if abstract %} +Description: +{{abstract}} +{% endif %} + +{{hashtags}} +""" + + +def test_create_video_info_case1(): + schedule_item = ScheduleItemFactory( + conference__name=LazyI18nString({"en": "Conf Name", "it": "Conf Name"}), + type=ScheduleItem.TYPES.talk, + submission=None, + title="Schedule Item Title", + description="Schedule Item Description", + conference__video_title_template=VIDEO_TITLE_TEMPLATE, + conference__video_description_template=VIDEO_DESCRIPTION_TEMPLATE, + ) + + output = create_video_info(schedule_item) + + assert output.title == "Schedule Item Title - Conf Name" + assert ( + output.description + == """Schedule Item Title - Conf Name + +Description: +Schedule Item Description""" + ) + + +def test_create_video_info_case2_with_elevator_pitch_and_abstract(): + schedule_item = ScheduleItemFactory( + conference__name=LazyI18nString({"en": "Conf Name", "it": "Conf Name"}), + type=ScheduleItem.TYPES.talk, + submission__abstract=LazyI18nString({"en": "Abstract", "it": "Abstract"}), + submission__speaker__full_name="SpeakerName", + submission__elevator_pitch=LazyI18nString( + {"en": "Elevator Pitch", "it": "Elevator Pitch"} + ), + title="Schedule Item Title", + description="Schedule Item Description", + conference__video_title_template=VIDEO_TITLE_TEMPLATE, + conference__video_description_template=VIDEO_DESCRIPTION_TEMPLATE, + ) + + output = create_video_info(schedule_item) + + assert output.title == "Schedule Item Title - SpeakerName" + assert ( + output.description + == """Schedule Item Title - SpeakerName - Conf Name + +Elevator Pitch: +Elevator Pitch + +Description: +Abstract""" + ) + + +def test_create_video_info_case3_with_tags(): + schedule_item = ScheduleItemFactory( + conference__name=LazyI18nString({"en": "Conf Name", "it": "Conf Name"}), + type=ScheduleItem.TYPES.talk, + submission__abstract=LazyI18nString({"en": "Abstract", "it": "Abstract"}), + submission__speaker__full_name="SpeakerName", + submission__elevator_pitch=LazyI18nString( + {"en": "Elevator Pitch", "it": "Elevator Pitch"} + ), + submission__tags=["django", "php"], + title="Schedule Item Title", + description="Schedule Item Description", + conference__video_title_template=VIDEO_TITLE_TEMPLATE, + conference__video_description_template=VIDEO_DESCRIPTION_TEMPLATE, + ) + + output = create_video_info(schedule_item) + + assert output.title == "Schedule Item Title - SpeakerName" + assert ( + output.description + == """Schedule Item Title - SpeakerName - Conf Name + +Elevator Pitch: +Elevator Pitch + +Description: +Abstract + +#django #php""" + ) + + +def test_create_video_info_with_long_title_fallbacks_to_schedule_item_title(): + schedule_item = ScheduleItemFactory( + conference__name=LazyI18nString({"en": "Conf Name", "it": "Conf Name"}), + type=ScheduleItem.TYPES.talk, + submission__abstract=LazyI18nString({"en": "Abstract", "it": "Abstract"}), + submission__speaker__full_name="SpeakerName", + submission__elevator_pitch=LazyI18nString( + {"en": "Elevator Pitch", "it": "Elevator Pitch"} + ), + title="I am a very long title, in fact I am so long that I am long, almost 100 chars! Stop me, really! No", + description="Schedule Item Description", + conference__video_title_template=VIDEO_TITLE_TEMPLATE, + conference__video_description_template=VIDEO_DESCRIPTION_TEMPLATE, + ) + + output = create_video_info(schedule_item) + + assert ( + output.title + == "I am a very long title, in fact I am so long that I am long, almost 100 chars! Stop me, really! No" + ) + assert ( + output.description + == """I am a very long title, in fact I am so long that I am long, almost 100 chars! Stop me, really! No - SpeakerName - Conf Name + +Elevator Pitch: +Elevator Pitch + +Description: +Abstract""" + ) + + +def test_create_video_info_special_chars_are_replaced(): + schedule_item = ScheduleItemFactory( + conference__name=LazyI18nString({"en": "Conf Name", "it": "Conf Name"}), + type=ScheduleItem.TYPES.talk, + submission=None, + title="This is an talk!", + description="We like to <> about HTML.", + conference__video_title_template="{{title}}", + conference__video_description_template="{{abstract}}", + ) + + output = create_video_info(schedule_item) + + assert output.title == "This is an \u1438html/\u1433 talk!" + assert output.description == "We like to \u1438\u1438talk\u1433\u1433 about HTML." + + +def test_create_video_info_tags(): + schedule_item = ScheduleItemFactory( + conference__name=LazyI18nString({"en": "Conf Name", "it": "Conf Name"}), + type=ScheduleItem.TYPES.talk, + submission__abstract=LazyI18nString({"en": "Abstract", "it": "Abstract"}), + submission__speaker__full_name="SpeakerName", + submission__tags=["Django Girls", "tag2", "php-5.3"], + title="Schedule Item Title", + description="Schedule Item Description", + conference__video_title_template=VIDEO_TITLE_TEMPLATE, + conference__video_description_template=VIDEO_DESCRIPTION_TEMPLATE, + ) + + output = create_video_info(schedule_item) + + assert output.tags == ["djangogirls", "tag2", "php53"] + assert output.tags_as_str == "djangogirls,tag2,php53" diff --git a/backend/schedule/video_upload.py b/backend/schedule/video_upload.py index b5b526a553..2a7f0cd5eb 100644 --- a/backend/schedule/video_upload.py +++ b/backend/schedule/video_upload.py @@ -1,9 +1,10 @@ from dataclasses import dataclass import cv2 +from django.core.files.base import ContentFile from django.core.files.storage import storages from schedule.models import ScheduleItem -from django.template import Context, Template +from jinja2 import Environment local_storage = storages["localstorage"] @@ -33,6 +34,9 @@ def create_video_info(schedule_item: ScheduleItem) -> VideoInfo: ] context = { + "has_speakers": count_speakers > 0, + "has_more_than_2_speakers": count_speakers > 2, + "has_zero_or_more_than_2_speakers": count_speakers == 0 or count_speakers > 2, "count_speakers": count_speakers, "speakers_names": speakers_names, "title": schedule_item.title, @@ -61,7 +65,12 @@ def create_video_info(schedule_item: ScheduleItem) -> VideoInfo: def _process_string_template(template_string: str, context) -> str: - return Template(template_string).render(Context(context)).strip() + env = Environment( + trim_blocks=True, + lstrip_blocks=True, + ) + template = env.from_string(template_string) + return template.render(context).strip().replace("\n\n\n", "\n\n") def download_video_file(id: int, path: str) -> str: @@ -89,8 +98,13 @@ def extract_video_thumbnail(remote_video_path: str, id: int) -> str: if not success: raise ValueError("Unable to extract frame") - cv2.imwrite(file_path, image) - return file_path + ret, buffer = cv2.imencode(".jpg", image) + if not ret: + raise ValueError("Unable to encode frame") + + content_file = ContentFile(buffer.tobytes()) + local_storage.save(thumbnail_file_name, content_file) + return thumbnail_file_name def cleanup_local_files(id: int, delete_thumbnail: bool = True): @@ -122,4 +136,4 @@ def replace_invalid_chars_with_lookalikes(text: str) -> str: def clean_tag(tag: str) -> str: - return tag.strip().replace(" ", "").replace("-", "").lower() + return tag.strip().replace(" ", "").replace("-", "").replace(".", "").lower() From eda54fda787a248b24b34afb68b39ebc72e6dc6c Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:04:48 +0100 Subject: [PATCH 07/35] changes --- .github/workflows/backend-checks.yml | 2 +- backend/schedule/tests/test_video_upload.py | 4 ++-- backend/schedule/video_upload.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index f27820daa1..a869288395 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -35,7 +35,7 @@ jobs: - name: Check missing not pushed migrations run: pdm run python manage.py makemigrations --check env: - DJANGO_SETTINGS_MODULE: pycon.settings.test + DJANGO_SETTINGS_MODULE: pycon.settings.dev STRIPE_SECRET_API_KEY: "" STRIPE_SUBSCRIPTION_PRICE_ID: "" STRIPE_WEBHOOK_SIGNATURE_SECRET: "" diff --git a/backend/schedule/tests/test_video_upload.py b/backend/schedule/tests/test_video_upload.py index 33e683cf76..26a613d322 100644 --- a/backend/schedule/tests/test_video_upload.py +++ b/backend/schedule/tests/test_video_upload.py @@ -189,5 +189,5 @@ def test_create_video_info_tags(): output = create_video_info(schedule_item) - assert output.tags == ["djangogirls", "tag2", "php53"] - assert output.tags_as_str == "djangogirls,tag2,php53" + assert output.tags == ["DjangoGirls", "tag2", "php53"] + assert output.tags_as_str == "DjangoGirls,tag2,php53" diff --git a/backend/schedule/video_upload.py b/backend/schedule/video_upload.py index 2a7f0cd5eb..c0a31875ee 100644 --- a/backend/schedule/video_upload.py +++ b/backend/schedule/video_upload.py @@ -136,4 +136,4 @@ def replace_invalid_chars_with_lookalikes(text: str) -> str: def clean_tag(tag: str) -> str: - return tag.strip().replace(" ", "").replace("-", "").replace(".", "").lower() + return tag.strip().replace(" ", "").replace("-", "").replace(".", "") From 4982c3078294b92ec3f669a3a42837ea00ae6bd0 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:14:45 +0100 Subject: [PATCH 08/35] check container build in ci --- .github/workflows/backend-checks.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index a869288395..fde8c1b98c 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -39,3 +39,23 @@ jobs: STRIPE_SECRET_API_KEY: "" STRIPE_SUBSCRIPTION_PRICE_ID: "" STRIPE_WEBHOOK_SIGNATURE_SECRET: "" + + check-building-container: + runs-on: ubuntu-latest + defaults: + run: + working-directory: backend + + steps: + - uses: actions/checkout@v2 + - name: Build test + uses: docker/build-push-action@v4.0.0 + with: + context: ./${{ matrix.service.dir }} + file: ./${{ matrix.service.dir }}/Dockerfile + builder: ${{ steps.buildx.outputs.name }} + provenance: false + push: false + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache + platforms: linux/amd64 From 3a02991b67d8b25c9382bf49c035430bac8fd995 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:17:46 +0100 Subject: [PATCH 09/35] docekrx --- .github/workflows/backend-checks.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index fde8c1b98c..4ed70f49e8 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -48,6 +48,14 @@ jobs: steps: - uses: actions/checkout@v2 + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v2 + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ matrix.service.name }} - name: Build test uses: docker/build-push-action@v4.0.0 with: From eea66e872e234cac1e3487f55f3ca10605527828 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:18:23 +0100 Subject: [PATCH 10/35] test --- .github/workflows/backend-checks.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index 4ed70f49e8..4933e6849d 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -32,6 +32,11 @@ jobs: if: steps.cache-deps.outputs.cache-hit != 'true' run: pdm install + - name: Setup upterm session + uses: lhotari/action-upterm@v1 + with: + limit-access-to-actor: true + - name: Check missing not pushed migrations run: pdm run python manage.py makemigrations --check env: From bcec78f6fe72c71a10319e93d2b23783c60afd0c Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:35:06 +0100 Subject: [PATCH 11/35] fix job --- .github/workflows/backend-checks.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index 4933e6849d..90ac6d207a 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -40,10 +40,12 @@ jobs: - name: Check missing not pushed migrations run: pdm run python manage.py makemigrations --check env: - DJANGO_SETTINGS_MODULE: pycon.settings.dev + DJANGO_SETTINGS_MODULE: pycon.settings.test STRIPE_SECRET_API_KEY: "" STRIPE_SUBSCRIPTION_PRICE_ID: "" STRIPE_WEBHOOK_SIGNATURE_SECRET: "" + CELERY_BROKER_URL: "" + CELERY_RESULT_BACKEND: "" check-building-container: runs-on: ubuntu-latest From ca923c0f9c71aa20d7b3ef9ab402c152bf9849f5 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:35:20 +0100 Subject: [PATCH 12/35] remove debug --- .github/workflows/backend-checks.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index 90ac6d207a..a426c0fd62 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -32,11 +32,6 @@ jobs: if: steps.cache-deps.outputs.cache-hit != 'true' run: pdm install - - name: Setup upterm session - uses: lhotari/action-upterm@v1 - with: - limit-access-to-actor: true - - name: Check missing not pushed migrations run: pdm run python manage.py makemigrations --check env: From 2a571e56a5e0f8d3326503d273c00b6db31006f2 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:35:54 +0100 Subject: [PATCH 13/35] fix ref --- .github/workflows/backend-checks.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backend-checks.yml b/.github/workflows/backend-checks.yml index a426c0fd62..42bf390c7a 100644 --- a/.github/workflows/backend-checks.yml +++ b/.github/workflows/backend-checks.yml @@ -61,8 +61,8 @@ jobs: - name: Build test uses: docker/build-push-action@v4.0.0 with: - context: ./${{ matrix.service.dir }} - file: ./${{ matrix.service.dir }}/Dockerfile + context: ./backend + file: ./backend/Dockerfile builder: ${{ steps.buildx.outputs.name }} provenance: false push: false From 7b68c3628484cdd8def23e2cb76b7aee4b1b4188 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 15:55:08 +0100 Subject: [PATCH 14/35] Stop processing when the quota is finished --- backend/google_api/exceptions.py | 2 ++ backend/google_api/models.py | 3 ++- backend/google_api/sdk.py | 5 +++++ backend/pycon/celery.py | 2 +- backend/schedule/admin.py | 10 ++++++++- backend/schedule/tasks.py | 12 +++++++++++ backend/schedule/tests/test_tasks.py | 31 ++++++++++++++++++++++++++++ 7 files changed, 62 insertions(+), 3 deletions(-) create mode 100644 backend/google_api/exceptions.py diff --git a/backend/google_api/exceptions.py b/backend/google_api/exceptions.py new file mode 100644 index 0000000000..499e45086f --- /dev/null +++ b/backend/google_api/exceptions.py @@ -0,0 +1,2 @@ +class NoGoogleCloudQuotaLeftError(Exception): + pass diff --git a/backend/google_api/models.py b/backend/google_api/models.py index d847ebd06e..1703760b5e 100644 --- a/backend/google_api/models.py +++ b/backend/google_api/models.py @@ -60,7 +60,8 @@ def get_available_credentials_token( service: str, min_quota: int ) -> Optional["GoogleCloudToken"]: credential = ( - GoogleCloudOAuthCredential.objects.with_quota_left(service) + GoogleCloudOAuthCredential.objects.has_token() + .with_quota_left(service) .annotate( has_token=models.Exists( GoogleCloudToken.objects.filter( diff --git a/backend/google_api/sdk.py b/backend/google_api/sdk.py index 3dd4da392e..9c70c06162 100644 --- a/backend/google_api/sdk.py +++ b/backend/google_api/sdk.py @@ -1,4 +1,5 @@ import inspect +from google_api.exceptions import NoGoogleCloudQuotaLeftError from google_api.models import GoogleCloudOAuthCredential, UsedRequestQuota from googleapiclient.discovery import build from apiclient.http import MediaFileUpload @@ -12,6 +13,10 @@ def get_available_credentials(service, min_quota): token = GoogleCloudOAuthCredential.get_available_credentials_token( service=service, min_quota=min_quota ) + + if not token: + raise NoGoogleCloudQuotaLeftError() + return Credentials.from_authorized_user_info( { "token": token.token, diff --git a/backend/pycon/celery.py b/backend/pycon/celery.py index e3fac81821..d7f5f56785 100644 --- a/backend/pycon/celery.py +++ b/backend/pycon/celery.py @@ -27,6 +27,6 @@ def setup_periodic_tasks(sender, **kwargs): add = sender.add_periodic_task add(timedelta(minutes=5), check_association_membership_subscriptions) - add(timedelta(minutes=10), process_schedule_items_videos_to_upload) + add(timedelta(minutes=30), process_schedule_items_videos_to_upload) except Exception: logger.exception("setup_periodic_tasks") diff --git a/backend/schedule/admin.py b/backend/schedule/admin.py index 4ed8a50537..20d4e71866 100644 --- a/backend/schedule/admin.py +++ b/backend/schedule/admin.py @@ -705,6 +705,14 @@ def retry_video_upload(modeladmin, request, queryset): ) +@admin.action(description="Mark as failed") +def mark_as_failed(modeladmin, request, queryset): + queryset.update(status=ScheduleItemSentForVideoUpload.Status.failed) + messages.add_message( + request, messages.INFO, f"Marked {queryset.count()} videos as failed" + ) + + @admin.register(ScheduleItemSentForVideoUpload) class ScheduleItemSentForVideoUploadAdmin(admin.ModelAdmin): list_display = ( @@ -718,4 +726,4 @@ class ScheduleItemSentForVideoUploadAdmin(admin.ModelAdmin): list_filter = ("status", "schedule_item__conference") search_fields = ("schedule_item__title",) autocomplete_fields = ("schedule_item",) - actions = [retry_video_upload] + actions = [retry_video_upload, mark_as_failed] diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 4b6d825c1c..0b8f9d63c5 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -1,4 +1,5 @@ from django.db.models import Q +from google_api.exceptions import NoGoogleCloudQuotaLeftError from googleapiclient.errors import HttpError from google_api.sdk import youtube_videos_insert, youtube_videos_set_thumbnail from integrations import plain @@ -384,6 +385,17 @@ def process_schedule_items_videos_to_upload(): upload_schedule_item_video( sent_for_video_upload_state_id=sent_for_video_upload_state.id ) + except NoGoogleCloudQuotaLeftError: + logger.info( + "No google cloud quota left to upload the schedule item %s. Moving back to pending and stopping processing.", + sent_for_video_upload_state.schedule_item.id, + ) + sent_for_video_upload_state.status = ( + ScheduleItemSentForVideoUpload.Status.pending + ) + sent_for_video_upload_state.failed_reason = "No Google Cloud Quota Left" + sent_for_video_upload_state.save(update_fields=["status", "failed_reason"]) + break except Exception as e: logger.exception( "Error processing schedule item %s video upload: %s", diff --git a/backend/schedule/tests/test_tasks.py b/backend/schedule/tests/test_tasks.py index c9ecf8958c..eff4ef4be8 100644 --- a/backend/schedule/tests/test_tasks.py +++ b/backend/schedule/tests/test_tasks.py @@ -1,3 +1,4 @@ +from google_api.exceptions import NoGoogleCloudQuotaLeftError from googleapiclient.errors import HttpError import numpy as np from io import BytesIO @@ -421,6 +422,36 @@ def test_process_schedule_items_videos_to_upload(mocker): ) +def test_process_schedule_items_videos_stops_processing_when_the_quota_is_finished( + mocker, +): + mock_process = mocker.patch( + "schedule.tasks.upload_schedule_item_video", + side_effect=NoGoogleCloudQuotaLeftError(), + ) + + sent_for_upload_1 = ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + ) + + ScheduleItemSentForVideoUploadFactory( + last_attempt_at=None, + status=ScheduleItemSentForVideoUpload.Status.pending, + ) + + with time_machine.travel("2020-01-01 10:30:00Z", tick=False): + process_schedule_items_videos_to_upload() + + mock_process.assert_called_once_with( + sent_for_video_upload_state_id=sent_for_upload_1.id + ) + mock_process.reset() + + sent_for_upload_1.refresh_from_db() + assert sent_for_upload_1.status == ScheduleItemSentForVideoUpload.Status.pending + + def test_failing_to_process_schedule_items_videos_to_upload_sets_failed_status(mocker): mock_process = mocker.patch( "schedule.tasks.upload_schedule_item_video", From 072c9b606b5b652913c058980589886c4e6c8b57 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 16:29:10 +0100 Subject: [PATCH 15/35] more tests --- backend/google_api/models.py | 3 +- backend/google_api/tests/test_models.py | 40 ++++++++++++++----- backend/google_api/tests/test_sdk.py | 53 +++++++++++++++++++++++++ 3 files changed, 85 insertions(+), 11 deletions(-) create mode 100644 backend/google_api/tests/test_sdk.py diff --git a/backend/google_api/models.py b/backend/google_api/models.py index 1703760b5e..d847ebd06e 100644 --- a/backend/google_api/models.py +++ b/backend/google_api/models.py @@ -60,8 +60,7 @@ def get_available_credentials_token( service: str, min_quota: int ) -> Optional["GoogleCloudToken"]: credential = ( - GoogleCloudOAuthCredential.objects.has_token() - .with_quota_left(service) + GoogleCloudOAuthCredential.objects.with_quota_left(service) .annotate( has_token=models.Exists( GoogleCloudToken.objects.filter( diff --git a/backend/google_api/tests/test_models.py b/backend/google_api/tests/test_models.py index 35debb921d..1d7f253318 100644 --- a/backend/google_api/tests/test_models.py +++ b/backend/google_api/tests/test_models.py @@ -1,17 +1,39 @@ +import time_machine +from google_api.models import GoogleCloudOAuthCredential, UsedRequestQuota import pytest pytestmark = pytest.mark.django_db -# def test_get_available_credentials(): -# credential = GoogleCloudOAuthCredential.objects.create( -# quota_limit_for_youtube=10_000, -# ) +def test_with_quota_left(): + credential = GoogleCloudOAuthCredential.objects.create( + quota_limit_for_youtube=10_000, + ) -# found = GoogleCloudOAuthCredential.get_available_credentials( -# service="youtube", -# min_quota=1600, -# ) + with time_machine.travel("2023-10-10 00:00:00", tick=False): + result = GoogleCloudOAuthCredential.objects.with_quota_left("youtube").get() + assert result.youtube_quota_left == 10_000 -# assert found.id == credential.id + UsedRequestQuota.objects.create( + credentials=credential, + cost=1000, + service="youtube", + ) + + result = GoogleCloudOAuthCredential.objects.with_quota_left("youtube").get() + assert result.youtube_quota_left == 9_000 + + with time_machine.travel("2023-10-10 08:00:00", tick=False): + result = GoogleCloudOAuthCredential.objects.with_quota_left("youtube").get() + assert result.youtube_quota_left == 10_000 + + +def test_get_by_client_id(): + credential = GoogleCloudOAuthCredential.objects.create(client_id="test123") + + assert ( + GoogleCloudOAuthCredential.objects.get_by_client_id("test123").id + == credential.id + ) + assert GoogleCloudOAuthCredential.objects.get_by_client_id("invalid") is None diff --git a/backend/google_api/tests/test_sdk.py b/backend/google_api/tests/test_sdk.py new file mode 100644 index 0000000000..9ee1518155 --- /dev/null +++ b/backend/google_api/tests/test_sdk.py @@ -0,0 +1,53 @@ +import datetime +import time_machine +from google_api.models import GoogleCloudOAuthCredential, GoogleCloudToken +from google_api.exceptions import NoGoogleCloudQuotaLeftError +from google_api.sdk import count_quota, get_available_credentials +import pytest + +pytestmark = pytest.mark.django_db + + +def test_get_available_credentials(admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create() + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + available_credentials = get_available_credentials("youtube", 1000) + + assert available_credentials.token == "token" + + +def test_get_available_credentials_fails_when_no_quota_is_left(admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create( + quota_limit_for_youtube=500 + ) + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + with pytest.raises(NoGoogleCloudQuotaLeftError): + get_available_credentials("youtube", 1000) + + +def test_count_quota(admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create() + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + @count_quota("youtube", 1000) + def test_function(*, credentials): + return credentials + + with time_machine.travel("2023-10-10 12:00:00", tick=False): + credentials = test_function() + + assert credentials.token == "token" + assert stored_credential.usedrequestquota_set.count() == 1 + + used_quota = stored_credential.usedrequestquota_set.first() + assert used_quota.cost == 1000 + assert used_quota.service == "youtube" + assert used_quota.used_at == datetime.datetime.now(tz=datetime.timezone.utc) From b7710155a2fc3ef8f6544ade13d34b7a35af19dc Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 16:36:07 +0100 Subject: [PATCH 16/35] generator function --- backend/google_api/tests/test_sdk.py | 29 ++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/backend/google_api/tests/test_sdk.py b/backend/google_api/tests/test_sdk.py index 9ee1518155..e9ffd79cbb 100644 --- a/backend/google_api/tests/test_sdk.py +++ b/backend/google_api/tests/test_sdk.py @@ -51,3 +51,32 @@ def test_function(*, credentials): assert used_quota.cost == 1000 assert used_quota.service == "youtube" assert used_quota.used_at == datetime.datetime.now(tz=datetime.timezone.utc) + + +def test_count_quota_with_generator_function(admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create() + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + @count_quota("youtube", 1000) + def test_generator_function(*, credentials): + yield 1 + yield 2 + yield 3 + + with time_machine.travel("2023-10-20 12:00:00", tick=False): + generator = test_generator_function() + vals = [] + + for val in generator: + vals.append(val) + + assert vals == [1, 2, 3] + + assert stored_credential.usedrequestquota_set.count() == 1 + + used_quota = stored_credential.usedrequestquota_set.first() + assert used_quota.cost == 1000 + assert used_quota.service == "youtube" + assert used_quota.used_at == datetime.datetime.now(tz=datetime.timezone.utc) From 8e74153b83ed4f4e877e24bcd98d5db64409dd62 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 16:53:50 +0100 Subject: [PATCH 17/35] remove temporal --- backend/pdm.lock | 101 +++---- backend/pycon/settings/base.py | 2 - backend/pyproject.toml | 1 - backend/temporal/__init__.py | 0 backend/temporal/sdk.py | 9 - docker-compose.yml | 2 - infrastructure/applications/applications.tf | 8 - .../applications/pycon_backend/main.tf | 10 - .../applications/pycon_backend/variables.tf | 1 - .../applications/pycon_backend/worker.tf | 4 - infrastructure/applications/temporal/ecr.tf | 11 - .../applications/temporal/githash.py | 12 - infrastructure/applications/temporal/main.tf | 247 ------------------ .../applications/temporal/secrets.tf | 13 - .../applications/temporal/security.tf | 50 ---- .../applications/temporal/user_data.sh | 16 -- .../applications/temporal/variables.tf | 1 - infrastructure/applications/temporal/vpc.tf | 35 --- .../global/vpc/lambda_security_group.tf | 9 - 19 files changed, 36 insertions(+), 496 deletions(-) delete mode 100644 backend/temporal/__init__.py delete mode 100644 backend/temporal/sdk.py delete mode 100644 infrastructure/applications/temporal/ecr.tf delete mode 100644 infrastructure/applications/temporal/githash.py delete mode 100644 infrastructure/applications/temporal/main.tf delete mode 100644 infrastructure/applications/temporal/secrets.tf delete mode 100644 infrastructure/applications/temporal/security.tf delete mode 100644 infrastructure/applications/temporal/user_data.sh delete mode 100644 infrastructure/applications/temporal/variables.tf delete mode 100644 infrastructure/applications/temporal/vpc.tf diff --git a/backend/pdm.lock b/backend/pdm.lock index b86bb712f2..16e39aef66 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "lambda"] strategy = ["cross_platform"] lock_version = "4.4" -content_hash = "sha256:f9ba1cfc33d63fa0a6ffd94345d0949f694251b4c907c36522be36544fa5d6d6" +content_hash = "sha256:b64dac79b33562fbcf33024422734fd39550c1eafe90c60795c23bb636def78f" [[package]] name = "amqp" @@ -205,7 +205,7 @@ files = [ [[package]] name = "azure-storage-blob" -version = "12.19.0" +version = "12.19.1" requires_python = ">=3.7" summary = "Microsoft Azure Blob Storage Client Library for Python" dependencies = [ @@ -215,8 +215,8 @@ dependencies = [ "typing-extensions>=4.3.0", ] files = [ - {file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"}, - {file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"}, + {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, + {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, ] [[package]] @@ -273,22 +273,22 @@ files = [ [[package]] name = "boto3" -version = "1.34.54" +version = "1.34.59" requires_python = ">= 3.8" summary = "The AWS SDK for Python" dependencies = [ - "botocore<1.35.0,>=1.34.54", + "botocore<1.35.0,>=1.34.59", "jmespath<2.0.0,>=0.7.1", "s3transfer<0.11.0,>=0.10.0", ] files = [ - {file = "boto3-1.34.54-py3-none-any.whl", hash = "sha256:f201b6a416f809283d554c652211eecec9fe3a52ed4063dab3f3e7aea7571d9c"}, - {file = "boto3-1.34.54.tar.gz", hash = "sha256:8b3f5cc7fbedcbb22271c328039df8a6ab343001e746e0cdb24774c426cadcf8"}, + {file = "boto3-1.34.59-py3-none-any.whl", hash = "sha256:004e67b078be58d34469406f93cc8b95bc43becef4bbe44523a0b8e51f84c668"}, + {file = "boto3-1.34.59.tar.gz", hash = "sha256:162edf182e53c198137a28432a626dba103f787a8f5000ed4758b73ccd203fa0"}, ] [[package]] name = "botocore" -version = "1.34.54" +version = "1.34.59" requires_python = ">= 3.8" summary = "Low-level, data-driven core of boto 3." dependencies = [ @@ -297,8 +297,8 @@ dependencies = [ "urllib3<2.1,>=1.25.4; python_version >= \"3.10\"", ] files = [ - {file = "botocore-1.34.54-py3-none-any.whl", hash = "sha256:bf215d93e9d5544c593962780d194e74c6ee40b883d0b885e62ef35fc0ec01e5"}, - {file = "botocore-1.34.54.tar.gz", hash = "sha256:4061ff4be3efcf53547ebadf2c94d419dfc8be7beec24e9fa1819599ffd936fa"}, + {file = "botocore-1.34.59-py3-none-any.whl", hash = "sha256:4bc112dafb1679ab571117593f7656604726a3da0e5ae5bad00ea772fa40e75c"}, + {file = "botocore-1.34.59.tar.gz", hash = "sha256:24edb4d21d7c97dea0c6c4a80d36b3809b1443a30b0bd5e317d6c319dfac823f"}, ] [[package]] @@ -931,15 +931,15 @@ files = [ [[package]] name = "faker" -version = "23.3.0" +version = "24.1.0" requires_python = ">=3.8" summary = "Faker is a Python package that generates fake data for you." dependencies = [ "python-dateutil>=2.4", ] files = [ - {file = "Faker-23.3.0-py3-none-any.whl", hash = "sha256:117ce1a2805c1bc5ca753b3dc6f9d567732893b2294b827d3164261ee8f20267"}, - {file = "Faker-23.3.0.tar.gz", hash = "sha256:458d93580de34403a8dec1e8d5e6be2fee96c4deca63b95d71df7a6a80a690de"}, + {file = "Faker-24.1.0-py3-none-any.whl", hash = "sha256:89ae0932f4f269754790569828859eaa0ae2ce73d1f3eb1f30ae7c20d4daf5ce"}, + {file = "Faker-24.1.0.tar.gz", hash = "sha256:4fb0c16c71ad35d278a5fa7a4106a5c26c2b2b5c5efc47c1d67635db90b6071e"}, ] [[package]] @@ -1007,7 +1007,7 @@ files = [ [[package]] name = "google-api-python-client" -version = "2.120.0" +version = "2.121.0" requires_python = ">=3.7" summary = "Google API Client Library for Python" dependencies = [ @@ -1018,13 +1018,13 @@ dependencies = [ "uritemplate<5,>=3.0.1", ] files = [ - {file = "google-api-python-client-2.120.0.tar.gz", hash = "sha256:a0c8769cad9576768bcb3191cb1f550f6ab3290cba042badb0fb17bba03f70cc"}, - {file = "google_api_python_client-2.120.0-py2.py3-none-any.whl", hash = "sha256:e2cdf4497bfc758fb44a4b487920cc1ca0571c2428187697a8e43e3b9feba1c9"}, + {file = "google-api-python-client-2.121.0.tar.gz", hash = "sha256:df863ece4db8b36ce1053ebd983e43fbc5b664209eed78e82cc84ae56ddac6c0"}, + {file = "google_api_python_client-2.121.0-py2.py3-none-any.whl", hash = "sha256:bb4da677150dd16c45818620baca8a63208c6f4180a0691ad1c1708b384c10be"}, ] [[package]] name = "google-auth" -version = "2.28.1" +version = "2.28.2" requires_python = ">=3.7" summary = "Google Authentication Library" dependencies = [ @@ -1033,8 +1033,8 @@ dependencies = [ "rsa<5,>=3.1.4", ] files = [ - {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, - {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, + {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"}, + {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"}, ] [[package]] @@ -1522,7 +1522,7 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" requires_python = ">=3.8" summary = "Optional static typing for Python" dependencies = [ @@ -1530,18 +1530,18 @@ dependencies = [ "typing-extensions>=4.1.0", ] files = [ - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [[package]] @@ -2059,12 +2059,12 @@ files = [ [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" requires_python = ">=3.6.8" summary = "pyparsing module - Classes and methods to define and execute parsing grammars" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [[package]] @@ -2559,25 +2559,6 @@ files = [ {file = "telepath-0.3.1.tar.gz", hash = "sha256:925c0609e0a8a6488ec4a55b19d485882cf72223b2b19fe2359a50fddd813c9c"}, ] -[[package]] -name = "temporalio" -version = "1.4.0" -requires_python = ">=3.7,<4.0" -summary = "Temporal.io Python SDK" -dependencies = [ - "protobuf>=3.20", - "types-protobuf>=3.20", - "typing-extensions<5.0.0,>=4.2.0", -] -files = [ - {file = "temporalio-1.4.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:344a75f559d9a0aabde59da0072688523a0c344e8c916d5b5597e6a618696dd4"}, - {file = "temporalio-1.4.0-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f39b7977d1f7f2e3938172980764ec5c50755653fd986d6e68c6b4db242594d6"}, - {file = "temporalio-1.4.0-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58e29300bdb586160b8d8f2208106ff9e9d9f740bfb3385b82a509fa38d9a743"}, - {file = "temporalio-1.4.0-cp37-abi3-win_amd64.whl", hash = "sha256:16bc76cb4d57e633f9e26381d941e3c251a42b5bef6b3668cc4cc2fc08ad6696"}, - {file = "temporalio-1.4.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:209369da8e529838a238fc6ec464727f7c922d9314ec0ab8f4c19be6dddf0339"}, - {file = "temporalio-1.4.0.tar.gz", hash = "sha256:56ef81b4b57e709e8f52d49e16b5c7a77d4b63cb697b1d991dd9eaa7fbbda591"}, -] - [[package]] name = "time-machine" version = "2.14.0" @@ -2650,16 +2631,6 @@ files = [ {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, ] -[[package]] -name = "types-protobuf" -version = "4.24.0.20240302" -requires_python = ">=3.8" -summary = "Typing stubs for protobuf" -files = [ - {file = "types-protobuf-4.24.0.20240302.tar.gz", hash = "sha256:f22c00cc0cea9722e71e14d389bba429af9e35a74a949719c167203a5abbe2e4"}, - {file = "types_protobuf-4.24.0.20240302-py3-none-any.whl", hash = "sha256:5c607990f50f14606c2edaf379f8acc7418fef1451b227aa3c6a8a2cbc6ff14a"}, -] - [[package]] name = "typing-extensions" version = "4.10.0" diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index d98052283f..8b0ca3bd16 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -315,8 +315,6 @@ "default": env.cache(default="locmemcache://snowflake"), } -TEMPORAL_ADDRESS = env("TEMPORAL_ADDRESS", default="") - SESSION_COOKIE_NAME = "pythonitalia_sessionid" CSRF_USE_SESSIONS = True CSRF_COOKIE_SECURE = DEBUG is False diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 741c62bdc7..1e116570bb 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -88,7 +88,6 @@ dependencies = [ "google-auth<3.0.0,>=2.22.0", "google-auth-oauthlib<2.0.0,>=1.0.0", "google-auth-httplib2<1.0.0,>=0.1.0", - "temporalio==1.4.0", "opencv-python<5.0.0.0,>=4.8.0.74", "argon2-cffi<24.0.0,>=23.1.0", "stripe<8.0.0,>=7.0.0", diff --git a/backend/temporal/__init__.py b/backend/temporal/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/backend/temporal/sdk.py b/backend/temporal/sdk.py deleted file mode 100644 index 2aad860391..0000000000 --- a/backend/temporal/sdk.py +++ /dev/null @@ -1,9 +0,0 @@ -from temporalio.client import Client -from asgiref.sync import async_to_sync -from django.conf import settings - - -@async_to_sync -async def start_workflow(*args, **kwargs): - client = await Client.connect(settings.TEMPORAL_ADDRESS) - return await client.start_workflow(*args, **kwargs) diff --git a/docker-compose.yml b/docker-compose.yml index 137bd7e796..68e96d4784 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,8 +14,6 @@ x-defaults: STRIPE_SUBSCRIPTION_PRICE_ID: price_1IkVzxD5MZ3GejSORRBZCvK6 # URLs ASSOCIATION_FRONTEND_URL: http://localhost:3020 - # Services URLs - TEMPORAL_ADDRESS: temporal:7233 # stripe STRIPE_SECRET_API_KEY: ${STRIPE_SECRET_API_KEY} STRIPE_WEBHOOK_SIGNATURE_SECRET: ${STRIPE_WEBHOOK_SIGNATURE_SECRET} diff --git a/infrastructure/applications/applications.tf b/infrastructure/applications/applications.tf index 1fc0027b12..380a5c6791 100644 --- a/infrastructure/applications/applications.tf +++ b/infrastructure/applications/applications.tf @@ -1,7 +1,6 @@ locals { is_prod = terraform.workspace == "production" deploy_pretix = local.is_prod - deploy_temporal = false enable_proxy = local.is_prod ? false : false } @@ -13,16 +12,9 @@ module "pretix" { enable_proxy = local.enable_proxy } -module "temporal" { - source = "./temporal" - count = local.deploy_temporal ? 1 : 0 - enable_proxy = local.enable_proxy -} - module "pycon_backend" { source = "./pycon_backend" enable_proxy = local.enable_proxy - deploy_temporal = local.deploy_temporal providers = { aws = aws diff --git a/infrastructure/applications/pycon_backend/main.tf b/infrastructure/applications/pycon_backend/main.tf index 53d4e85487..64b0f37b27 100644 --- a/infrastructure/applications/pycon_backend/main.tf +++ b/infrastructure/applications/pycon_backend/main.tf @@ -55,15 +55,6 @@ data "aws_elasticache_cluster" "redis" { cluster_id = "production-pretix" } -data "aws_instance" "temporal_machine" { - count = var.deploy_temporal ? 1 : 0 - - filter { - name = "tag:Name" - values = ["production-temporal-instance"] - } -} - module "lambda" { source = "../../components/application_lambda" @@ -104,7 +95,6 @@ module "lambda" { PLAIN_API = "https://core-api.uk.plain.com/graphql/v1" PLAIN_API_TOKEN = module.secrets.value.plain_api_token CACHE_URL = local.is_prod ? "redis://${data.aws_elasticache_cluster.redis.cache_nodes.0.address}/8" : "locmemcache://snowflake" - TEMPORAL_ADDRESS = var.deploy_temporal ? "${data.aws_instance.temporal_machine[0].private_ip}:7233" : "" STRIPE_WEBHOOK_SIGNATURE_SECRET = module.secrets.value.stripe_webhook_secret STRIPE_SUBSCRIPTION_PRICE_ID = module.secrets.value.stripe_membership_price_id STRIPE_SECRET_API_KEY = module.secrets.value.stripe_secret_api_key diff --git a/infrastructure/applications/pycon_backend/variables.tf b/infrastructure/applications/pycon_backend/variables.tf index ffb550941f..a286bf9688 100644 --- a/infrastructure/applications/pycon_backend/variables.tf +++ b/infrastructure/applications/pycon_backend/variables.tf @@ -5,4 +5,3 @@ locals { } variable "enable_proxy" {} -variable "deploy_temporal" {} diff --git a/infrastructure/applications/pycon_backend/worker.tf b/infrastructure/applications/pycon_backend/worker.tf index 820b649908..5c630b41aa 100644 --- a/infrastructure/applications/pycon_backend/worker.tf +++ b/infrastructure/applications/pycon_backend/worker.tf @@ -128,10 +128,6 @@ locals { name = "CACHE_URL", value = local.is_prod ? "redis://${data.aws_elasticache_cluster.redis.cache_nodes.0.address}/8" : "locmemcache://snowflake" }, - { - name = "TEMPORAL_ADDRESS", - value = var.deploy_temporal ? "${data.aws_instance.temporal_machine[0].private_ip}:7233" : "" - }, { name = "STRIPE_WEBHOOK_SIGNATURE_SECRET", value = module.secrets.value.stripe_webhook_secret diff --git a/infrastructure/applications/temporal/ecr.tf b/infrastructure/applications/temporal/ecr.tf deleted file mode 100644 index b3091adbc1..0000000000 --- a/infrastructure/applications/temporal/ecr.tf +++ /dev/null @@ -1,11 +0,0 @@ -locals { - image_uri_prefix = "${data.aws_caller_identity.current.account_id}.dkr.ecr.eu-central-1.amazonaws.com" - pycon_be_image_uri = "${local.image_uri_prefix}/pythonit/pycon-backend:${data.external.githash_pycon_be.result.githash}" -} - -data "aws_caller_identity" "current" {} - -data "external" "githash_pycon_be" { - program = ["python", abspath("${path.module}/githash.py")] - working_dir = abspath("${path.root}/../../backend") -} diff --git a/infrastructure/applications/temporal/githash.py b/infrastructure/applications/temporal/githash.py deleted file mode 100644 index 14cef63fd8..0000000000 --- a/infrastructure/applications/temporal/githash.py +++ /dev/null @@ -1,12 +0,0 @@ -import json -import subprocess -import sys - -git_output = subprocess.check_output( - ["git", "rev-list", "-1", "HEAD", "--", "."], -) -githash = git_output.decode().strip() - -output = {"githash": githash} -output_json = json.dumps(output) -sys.stdout.write(output_json) diff --git a/infrastructure/applications/temporal/main.tf b/infrastructure/applications/temporal/main.tf deleted file mode 100644 index 54b2520483..0000000000 --- a/infrastructure/applications/temporal/main.tf +++ /dev/null @@ -1,247 +0,0 @@ -locals { - is_prod = terraform.workspace == "production" - db_connection_pycon_be = var.enable_proxy ? "postgres://${data.aws_db_instance.database.master_username}:${module.common_secrets.value.database_password}@${data.aws_db_proxy.proxy[0].endpoint}:${data.aws_db_instance.database.port}/pycon" : "postgres://${data.aws_db_instance.database.master_username}:${module.common_secrets.value.database_password}@${data.aws_db_instance.database.address}:${data.aws_db_instance.database.port}/pycon" - - users_backend_url = local.is_prod ? "https://users-api.python.it" : "https://${terraform.workspace}-users-api.python.it" - association_backend_url = local.is_prod ? "https://association-api.python.it" : "https://${terraform.workspace}-association-api.python.it" -} - -data "aws_db_instance" "database" { - db_instance_identifier = "pythonit-${terraform.workspace}" -} - -data "aws_db_proxy" "proxy" { - count = var.enable_proxy ? 1 : 0 - name = "pythonit-${terraform.workspace}-database-proxy" -} - -data "aws_elasticache_cluster" "redis" { - cluster_id = "production-pretix" -} - -resource "aws_ecs_cluster" "temporal" { - name = "${terraform.workspace}-temporal" -} - -data "template_file" "user_data" { - template = file("${path.module}/user_data.sh") - vars = { - ecs_cluster = aws_ecs_cluster.temporal.name - } -} - -data "aws_iam_instance_profile" "instance" { - name = "${terraform.workspace}-pretix-instance-profile" -} - -resource "aws_instance" "temporal" { - ami = "ami-0d24d62eae192fc54" - instance_type = "t3.small" - subnet_id = data.aws_subnet.private.id - availability_zone = "eu-central-1a" - vpc_security_group_ids = [ - aws_security_group.instance.id, - data.aws_security_group.rds.id - ] - source_dest_check = false - user_data = data.template_file.user_data.rendered - iam_instance_profile = data.aws_iam_instance_profile.instance.name - key_name = "pretix" - - tags = { - Name = "${terraform.workspace}-temporal-instance" - } - - root_block_device { - volume_size = 30 - } -} - -resource "aws_ecs_task_definition" "temporal_service" { - family = "${terraform.workspace}-temporal" - container_definitions = jsonencode([ - { - name = "temporal" - image = "temporalio/auto-setup:1.21.2.0" - cpu = 512 - memory = 512 - essential = true - environment = [ - { - name = "DB" - value = "postgres" - }, - { - name = "DB_PORT", - value = "5432" - }, - { - name = "POSTGRES_USER" - value = data.aws_db_instance.database.master_username - }, - { - name = "POSTGRES_PWD" - value = module.common_secrets.value.database_password - }, - { - name = "POSTGRES_DB" - value = "temporal" - }, - { - name = "POSTGRES_SEEDS" - value = var.enable_proxy ? data.aws_db_proxy.proxy[0].endpoint : data.aws_db_instance.database.address - }, - ] - portMappings = [ - { - containerPort = 7233 - hostPort = 7233 - protocol = "tcp" - } - ] - mountPoints = [] - systemControls = [] - }, - { - name = "temporal-ui", - image = "temporalio/ui:2.16.2" - cpu = 512 - memory = 512 - environment = [ - { - name = "TEMPORAL_ADDRESS", - value = "172.17.0.1:7233" - } - ], - portMappings = [ - { - containerPort = 8080 - hostPort = 8080 - protocol = "tcp" - } - ] - }, - { - name = "admin-tools" - cpu = 100 - memory = 100 - image = "temporalio/admin-tools:1.21.2.0" - environment = [ - { - name = "TEMPORAL_ADDRESS", - value = "172.17.0.1:7233" - }, - { - name = "TEMPORAL_CLI_ADDRESS", - value = "172.17.0.1:7233" - } - ] - }, - { - name = "pycon-backend-worker", - cpu = 512 - memory = 512 - image = local.pycon_be_image_uri - entrypoint = ["/home/app/.venv/bin/python"] - command = ["worker.py"] - environment = [ - { - name = "DJANGO_SETTINGS_MODULE", - value = "pycon.settings.prod" - }, - { - name = "TEMPORAL_ADDRESS", - value = "172.17.0.1:7233" - }, - { - name = "DATABASE_URL" - value = local.db_connection_pycon_be - }, - { - name = "DEBUG", - value = "False" - }, - { - name = "SECRET_KEY", - value = module.pycon_be_secrets.value.secret_key - }, - { - name = "SENTRY_DSN", - value = module.pycon_be_secrets.value.sentry_dsn - }, - { - name = "SPEAKERS_EMAIL_ADDRESS", - value = module.pycon_be_secrets.value.speakers_email_address - }, - { - name = "EMAIL_BACKEND", - value = "django_ses.SESBackend" - }, - { - name = "PYTHONIT_EMAIL_BACKEND", - value = "pythonit_toolkit.emails.backends.ses.SESEmailBackend" - }, - { - name = "PRETIX_API", - value = "https://tickets.pycon.it/api/v1/" - }, - { - name = "PRETIX_API_TOKEN", - value = module.pycon_be_secrets.value.pretix_api_token - }, - { - name = "ASSOCIATION_BACKEND_SERVICE", - value = local.association_backend_url - }, - { - name = "USERS_SERVICE", - value = local.users_backend_url - }, - { - name = "SERVICE_TO_SERVICE_SECRET", - value = module.common_secrets.value.service_to_service_secret - }, - { - name = "PASTAPORTO_SECRET", - value = module.common_secrets.value.pastaporto_secret - }, - { - name = "AZURE_STORAGE_ACCOUNT_NAME", - value = module.pycon_be_secrets.value.azure_storage_account_name - }, - { - name = "AZURE_STORAGE_ACCOUNT_KEY", - value = module.pycon_be_secrets.value.azure_storage_account_key - }, - { - name = "CACHE_URL", - value = local.is_prod ? "redis://${data.aws_elasticache_cluster.redis.cache_nodes.0.address}/8" : "locmemcache://snowflake" - } - ] - mountPoints = [ - { - sourceVolume = "tmp" - containerPath = "/tmp" - } - ] - } - ]) - - volume { - name = "tmp" - host_path = "/tmp" - } - - requires_compatibilities = [] - tags = {} -} - - -resource "aws_ecs_service" "temporal" { - name = "${terraform.workspace}-temporal-service" - cluster = aws_ecs_cluster.temporal.id - task_definition = aws_ecs_task_definition.temporal_service.arn - desired_count = 1 - deployment_minimum_healthy_percent = 0 - deployment_maximum_percent = 100 -} diff --git a/infrastructure/applications/temporal/secrets.tf b/infrastructure/applications/temporal/secrets.tf deleted file mode 100644 index 4afbddf746..0000000000 --- a/infrastructure/applications/temporal/secrets.tf +++ /dev/null @@ -1,13 +0,0 @@ -module "secrets" { - source = "../../components/secrets" - service = "temporal" -} - -module "pycon_be_secrets" { - source = "../../components/secrets" - service = "pycon-backend" -} - -module "common_secrets" { - source = "../../components/secrets" -} diff --git a/infrastructure/applications/temporal/security.tf b/infrastructure/applications/temporal/security.tf deleted file mode 100644 index 155ed8f789..0000000000 --- a/infrastructure/applications/temporal/security.tf +++ /dev/null @@ -1,50 +0,0 @@ -resource "aws_security_group" "instance" { - name = "${terraform.workspace}-temporal-instance" - description = "${terraform.workspace} temporal instance" - vpc_id = data.aws_vpc.default.id -} - -resource "aws_security_group_rule" "allow_http" { - type = "egress" - from_port = 80 - to_port = 80 - protocol = "tcp" - security_group_id = aws_security_group.instance.id - cidr_blocks = ["0.0.0.0/0"] -} - -resource "aws_security_group_rule" "allow_https" { - type = "egress" - from_port = 443 - to_port = 443 - protocol = "tcp" - security_group_id = aws_security_group.instance.id - cidr_blocks = ["0.0.0.0/0"] -} - -resource "aws_security_group_rule" "ssh" { - type = "ingress" - from_port = 22 - to_port = 22 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0"] - security_group_id = aws_security_group.instance.id -} - -resource "aws_security_group_rule" "temporal_ui" { - type = "ingress" - from_port = 8080 - to_port = 8080 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0"] - security_group_id = aws_security_group.instance.id -} - -resource "aws_security_group_rule" "temporal" { - type = "ingress" - from_port = 7233 - to_port = 7233 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0"] - security_group_id = aws_security_group.instance.id -} diff --git a/infrastructure/applications/temporal/user_data.sh b/infrastructure/applications/temporal/user_data.sh deleted file mode 100644 index 2d37e49040..0000000000 --- a/infrastructure/applications/temporal/user_data.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -set -x - -# Config ECS agent -echo "ECS_CLUSTER=${ecs_cluster}" > /etc/ecs/ecs.config - -# Reclaim unused Docker disk space -cat << "EOF" > /usr/local/bin/claimspace.sh -#!/bin/bash -# Run fstrim on the host OS periodically to reclaim the unused container data blocks -docker ps -q | xargs docker inspect --format='{{ .State.Pid }}' | xargs -IZ sudo fstrim /proc/Z/root/ -exit $? -EOF - -chmod +x /usr/local/bin/claimspace.sh -echo "0 0 * * * root /usr/local/bin/claimspace.sh" > /etc/cron.d/claimspace diff --git a/infrastructure/applications/temporal/variables.tf b/infrastructure/applications/temporal/variables.tf deleted file mode 100644 index 0a600173f7..0000000000 --- a/infrastructure/applications/temporal/variables.tf +++ /dev/null @@ -1 +0,0 @@ -variable "enable_proxy" {} diff --git a/infrastructure/applications/temporal/vpc.tf b/infrastructure/applications/temporal/vpc.tf deleted file mode 100644 index 534918cf3d..0000000000 --- a/infrastructure/applications/temporal/vpc.tf +++ /dev/null @@ -1,35 +0,0 @@ -data "aws_vpc" "default" { - filter { - name = "tag:Name" - values = ["pythonit-vpc"] - } -} - -data "aws_security_group" "rds" { - name = "pythonit-rds-security-group" -} - -data "aws_subnet" "private" { - vpc_id = data.aws_vpc.default.id - - filter { - name = "tag:Type" - values = ["private"] - } - - filter { - name = "tag:AZ" - values = ["eu-central-1a"] - } -} - -data "aws_subnets" "private" { - filter { - name = "vpc-id" - values = [data.aws_vpc.default.id] - } - - tags = { - Type = "private" - } -} diff --git a/infrastructure/global/vpc/lambda_security_group.tf b/infrastructure/global/vpc/lambda_security_group.tf index 4fe55455d7..cd6a436a8b 100644 --- a/infrastructure/global/vpc/lambda_security_group.tf +++ b/infrastructure/global/vpc/lambda_security_group.tf @@ -34,12 +34,3 @@ resource "aws_security_group_rule" "allow_outbound_redis" { security_group_id = aws_security_group.lambda.id cidr_blocks = ["0.0.0.0/0"] } - -resource "aws_security_group_rule" "allow_outbound_temporal" { - type = "egress" - from_port = 7233 - to_port = 7233 - protocol = "tcp" - security_group_id = aws_security_group.lambda.id - cidr_blocks = ["0.0.0.0/0"] -} From 6c968f6bbbff7f5d684d34284eb18cd6d2745302 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:20:19 +0100 Subject: [PATCH 18/35] more tests --- backend/google_api/tests/test_sdk.py | 135 ++++++++++++++++++++++++++- 1 file changed, 133 insertions(+), 2 deletions(-) diff --git a/backend/google_api/tests/test_sdk.py b/backend/google_api/tests/test_sdk.py index e9ffd79cbb..310ad68f9c 100644 --- a/backend/google_api/tests/test_sdk.py +++ b/backend/google_api/tests/test_sdk.py @@ -1,8 +1,18 @@ import datetime +from unittest import mock import time_machine -from google_api.models import GoogleCloudOAuthCredential, GoogleCloudToken +from google_api.models import ( + GoogleCloudOAuthCredential, + GoogleCloudToken, + UsedRequestQuota, +) from google_api.exceptions import NoGoogleCloudQuotaLeftError -from google_api.sdk import count_quota, get_available_credentials +from google_api.sdk import ( + count_quota, + get_available_credentials, + youtube_videos_insert, + youtube_videos_set_thumbnail, +) import pytest pytestmark = pytest.mark.django_db @@ -80,3 +90,124 @@ def test_generator_function(*, credentials): assert used_quota.cost == 1000 assert used_quota.service == "youtube" assert used_quota.used_at == datetime.datetime.now(tz=datetime.timezone.utc) + + +def test_youtube_videos_insert(mocker, admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create() + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + mock_build = mocker.patch("google_api.sdk.build") + mocker.patch("google_api.sdk.MediaFileUpload") + + mock_youtube = mocker.Mock() + mock_build.return_value = mock_youtube + + mock_upload_request = mocker.Mock() + mock_youtube.videos.return_value.insert.return_value = mock_upload_request + mock_upload_request.next_chunk.side_effect = [(None, {"id": "12345"})] + + response = list( + youtube_videos_insert( + title="Title", + description="Description", + tags="Tag1,Tag2", + file_path="/file/test.mp4", + ) + ) + + mock_youtube.videos.return_value.insert.assert_called_with( + part="snippet,status", + notifySubscribers=False, + body={ + "snippet": { + "title": "Title", + "description": "Description", + "tags": "Tag1,Tag2", + }, + "status": { + "privacyStatus": "public", + "selfDeclaredMadeForKids": False, + }, + }, + media_body=mock.ANY, + ) + + assert response[0] is None + assert response[1]["id"] == "12345" + + assert UsedRequestQuota.objects.filter(service="youtube", cost=1600).exists() + + +def test_youtube_videos_insert_when_failing_raises_an_error(mocker, admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create() + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + mock_build = mocker.patch("google_api.sdk.build") + mocker.patch("google_api.sdk.MediaFileUpload") + + mock_youtube = mocker.Mock() + mock_build.return_value = mock_youtube + + mock_upload_request = mocker.Mock() + mock_youtube.videos.return_value.insert.return_value = mock_upload_request + mock_upload_request.next_chunk.side_effect = [(None, {"error": "Message"})] + + with pytest.raises(ValueError) as exc: + list( + youtube_videos_insert( + title="Title", + description="Description", + tags="Tag1,Tag2", + file_path="/file/test.mp4", + ) + ) + + assert "The upload failed with an unexpected response: {'error': 'Message'}" == str( + exc.value + ) + + mock_youtube.videos.return_value.insert.assert_called_with( + part="snippet,status", + notifySubscribers=False, + body={ + "snippet": { + "title": "Title", + "description": "Description", + "tags": "Tag1,Tag2", + }, + "status": { + "privacyStatus": "public", + "selfDeclaredMadeForKids": False, + }, + }, + media_body=mock.ANY, + ) + + assert UsedRequestQuota.objects.filter(service="youtube", cost=1600).exists() + + +def test_youtube_videos_set_thumbnail(mocker, admin_user): + stored_credential = GoogleCloudOAuthCredential.objects.create() + GoogleCloudToken.objects.create( + oauth_credential=stored_credential, token="token", admin_user=admin_user + ) + + mock_build = mocker.patch("google_api.sdk.build") + mocker.patch("google_api.sdk.MediaFileUpload") + + mock_youtube = mocker.Mock() + mock_build.return_value = mock_youtube + + mock_youtube.thumbnails.return_value.set.return_value.execute.return_value = {} + + youtube_videos_set_thumbnail(video_id="123", thumbnail_path="/test.png") + + mock_youtube.thumbnails.return_value.set.assert_called_once_with( + videoId="123", media_body=mock.ANY + ) + + assert UsedRequestQuota.objects.filter(service="youtube", cost=50).exists() From 31b18f704ebc7ae7d092a3f8b5d4e0b93c268a7a Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:21:36 +0100 Subject: [PATCH 19/35] assert build call --- backend/google_api/tests/test_sdk.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backend/google_api/tests/test_sdk.py b/backend/google_api/tests/test_sdk.py index 310ad68f9c..c4704c92a2 100644 --- a/backend/google_api/tests/test_sdk.py +++ b/backend/google_api/tests/test_sdk.py @@ -117,6 +117,8 @@ def test_youtube_videos_insert(mocker, admin_user): ) ) + mock_build.assert_called_with("youtube", "v3", credentials=mocker.ANY) + mock_youtube.videos.return_value.insert.assert_called_with( part="snippet,status", notifySubscribers=False, @@ -206,6 +208,8 @@ def test_youtube_videos_set_thumbnail(mocker, admin_user): youtube_videos_set_thumbnail(video_id="123", thumbnail_path="/test.png") + mock_build.assert_called_with("youtube", "v3", credentials=mocker.ANY) + mock_youtube.thumbnails.return_value.set.assert_called_once_with( videoId="123", media_body=mock.ANY ) From 17c69823fcce1cc63e70cc807da9779a7028f344 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:24:41 +0100 Subject: [PATCH 20/35] remove never working action --- .github/workflows/terraform-lint.yml | 95 ---------------------------- 1 file changed, 95 deletions(-) delete mode 100644 .github/workflows/terraform-lint.yml diff --git a/.github/workflows/terraform-lint.yml b/.github/workflows/terraform-lint.yml deleted file mode 100644 index 5e03c7ac0e..0000000000 --- a/.github/workflows/terraform-lint.yml +++ /dev/null @@ -1,95 +0,0 @@ -on: - pull_request: - paths: - - "infrastructure/applications/**/*" - - "infrastructure/applications/*" - - "infrastructure/azure-applications/*" - - "infrastructure/azure-applications/**/*" - -name: Terraform Lint - -env: - TF_WORKSPACE: "production" - -jobs: - tfsec: - name: TFSec - runs-on: ubuntu-latest - - permissions: - contents: read - pull-requests: write - - steps: - - name: Clone repo - uses: actions/checkout@master - - name: tfsec - uses: aquasecurity/tfsec-pr-commenter-action@v1.2.0 - with: - github_token: ${{ github.token }} - - lint: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Terraform Format - id: fmt - run: terraform fmt -check - env: - TF_IN_AUTOMATION: 1 - - plan: - name: Plan [AWS] - runs-on: ubuntu-latest - defaults: - run: - working-directory: ./infrastructure/applications - steps: - - uses: actions/checkout@v2 - - uses: hashicorp/setup-terraform@v1 - with: - terraform_version: 1.2.4 - - name: Terraform Init - run: terraform init - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TF_IN_AUTOMATION: 1 - - name: Terraform Validate - id: validate - run: terraform validate -no-color - - name: Terraform Plan - id: plan - run: terraform plan -no-color &> /dev/null - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: eu-central-1 - TF_IN_AUTOMATION: 1 - - name: Find Plan comment - uses: peter-evans/find-comment@v1 - id: find_comment - with: - token: ${{ secrets.BOT_TOKEN }} - issue-number: ${{ github.event.pull_request.number }} - comment-author: pythonitaliabot - body-includes: "# Terraform" - - name: Create or Update comment - uses: peter-evans/create-or-update-comment@v1 - if: steps.find_comment.outputs.comment-id != '' - with: - token: ${{ secrets.BOT_TOKEN }} - comment-id: ${{ steps.find_comment.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - edit-mode: replace - body: | - # Terraform - - ## Terraform Validation 🤖 - ${{ steps.validate.outputs.stdout }} - - ## Terraform Plan 📖 - ${{ steps.plan.outcome }} From 4daaec3cdbb61334e8d024659d0766451c21431b Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:25:04 +0100 Subject: [PATCH 21/35] Revert "remove never working action" This reverts commit 17c69823fcce1cc63e70cc807da9779a7028f344. --- .github/workflows/terraform-lint.yml | 95 ++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 .github/workflows/terraform-lint.yml diff --git a/.github/workflows/terraform-lint.yml b/.github/workflows/terraform-lint.yml new file mode 100644 index 0000000000..5e03c7ac0e --- /dev/null +++ b/.github/workflows/terraform-lint.yml @@ -0,0 +1,95 @@ +on: + pull_request: + paths: + - "infrastructure/applications/**/*" + - "infrastructure/applications/*" + - "infrastructure/azure-applications/*" + - "infrastructure/azure-applications/**/*" + +name: Terraform Lint + +env: + TF_WORKSPACE: "production" + +jobs: + tfsec: + name: TFSec + runs-on: ubuntu-latest + + permissions: + contents: read + pull-requests: write + + steps: + - name: Clone repo + uses: actions/checkout@master + - name: tfsec + uses: aquasecurity/tfsec-pr-commenter-action@v1.2.0 + with: + github_token: ${{ github.token }} + + lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Terraform Format + id: fmt + run: terraform fmt -check + env: + TF_IN_AUTOMATION: 1 + + plan: + name: Plan [AWS] + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./infrastructure/applications + steps: + - uses: actions/checkout@v2 + - uses: hashicorp/setup-terraform@v1 + with: + terraform_version: 1.2.4 + - name: Terraform Init + run: terraform init + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + TF_IN_AUTOMATION: 1 + - name: Terraform Validate + id: validate + run: terraform validate -no-color + - name: Terraform Plan + id: plan + run: terraform plan -no-color &> /dev/null + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: eu-central-1 + TF_IN_AUTOMATION: 1 + - name: Find Plan comment + uses: peter-evans/find-comment@v1 + id: find_comment + with: + token: ${{ secrets.BOT_TOKEN }} + issue-number: ${{ github.event.pull_request.number }} + comment-author: pythonitaliabot + body-includes: "# Terraform" + - name: Create or Update comment + uses: peter-evans/create-or-update-comment@v1 + if: steps.find_comment.outputs.comment-id != '' + with: + token: ${{ secrets.BOT_TOKEN }} + comment-id: ${{ steps.find_comment.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + edit-mode: replace + body: | + # Terraform + + ## Terraform Validation 🤖 + ${{ steps.validate.outputs.stdout }} + + ## Terraform Plan 📖 + ${{ steps.plan.outcome }} From 2384e003481cdab24e1ae2bd144c87a20ff219e1 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:25:17 +0100 Subject: [PATCH 22/35] remove plan --- .github/workflows/terraform-lint.yml | 55 ---------------------------- 1 file changed, 55 deletions(-) diff --git a/.github/workflows/terraform-lint.yml b/.github/workflows/terraform-lint.yml index 5e03c7ac0e..4487206e89 100644 --- a/.github/workflows/terraform-lint.yml +++ b/.github/workflows/terraform-lint.yml @@ -38,58 +38,3 @@ jobs: run: terraform fmt -check env: TF_IN_AUTOMATION: 1 - - plan: - name: Plan [AWS] - runs-on: ubuntu-latest - defaults: - run: - working-directory: ./infrastructure/applications - steps: - - uses: actions/checkout@v2 - - uses: hashicorp/setup-terraform@v1 - with: - terraform_version: 1.2.4 - - name: Terraform Init - run: terraform init - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TF_IN_AUTOMATION: 1 - - name: Terraform Validate - id: validate - run: terraform validate -no-color - - name: Terraform Plan - id: plan - run: terraform plan -no-color &> /dev/null - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: eu-central-1 - TF_IN_AUTOMATION: 1 - - name: Find Plan comment - uses: peter-evans/find-comment@v1 - id: find_comment - with: - token: ${{ secrets.BOT_TOKEN }} - issue-number: ${{ github.event.pull_request.number }} - comment-author: pythonitaliabot - body-includes: "# Terraform" - - name: Create or Update comment - uses: peter-evans/create-or-update-comment@v1 - if: steps.find_comment.outputs.comment-id != '' - with: - token: ${{ secrets.BOT_TOKEN }} - comment-id: ${{ steps.find_comment.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - edit-mode: replace - body: | - # Terraform - - ## Terraform Validation 🤖 - ${{ steps.validate.outputs.stdout }} - - ## Terraform Plan 📖 - ${{ steps.plan.outcome }} From bd9a0fbbb82d6de7daf22f3c0e43d742303bc84b Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:25:48 +0100 Subject: [PATCH 23/35] remove plan --- .github/workflows/global-terraform-lint.yml | 54 --------------------- 1 file changed, 54 deletions(-) diff --git a/.github/workflows/global-terraform-lint.yml b/.github/workflows/global-terraform-lint.yml index 6ef309f3e0..a1e89a1080 100644 --- a/.github/workflows/global-terraform-lint.yml +++ b/.github/workflows/global-terraform-lint.yml @@ -20,57 +20,3 @@ jobs: run: terraform fmt -check env: TF_IN_AUTOMATION: 1 - - plan: - runs-on: ubuntu-latest - defaults: - run: - working-directory: ./infrastructure/global - steps: - - uses: actions/checkout@v2 - - uses: hashicorp/setup-terraform@v1 - with: - terraform_version: 1.2.4 - - name: Terraform Init - run: terraform init - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TF_IN_AUTOMATION: 1 - - name: Terraform Validate - id: validate - run: terraform validate -no-color - - name: Terraform Plan - id: plan - run: terraform plan -no-color &> /dev/null - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: eu-central-1 - TF_IN_AUTOMATION: 1 - - name: Find Plan comment - uses: peter-evans/find-comment@v1 - id: find_comment - with: - token: ${{ secrets.BOT_TOKEN }} - issue-number: ${{ github.event.pull_request.number }} - comment-author: pythonitaliabot - body-includes: "# Global Terraform" - - name: Create or Update comment - uses: peter-evans/create-or-update-comment@v1 - if: steps.find_comment.outputs.comment-id != '' - with: - token: ${{ secrets.BOT_TOKEN }} - comment-id: ${{ steps.find_comment.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - edit-mode: replace - body: | - # Global Terraform - - ## Terraform Validation 🤖 - ${{ steps.validate.outputs.stdout }} - - ## Terraform Plan 📖 - ${{ steps.plan.outcome }} From 2e9c94ca4c73abf2c5e1334e7e0be13ff5882e02 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:26:50 +0100 Subject: [PATCH 24/35] reduce sleep time --- .github/workflows/deploy.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 84494efbb3..31ba5338d2 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -250,8 +250,8 @@ jobs: runs-on: ubuntu-latest needs: [terraform] steps: - - name: Sleep for 20 seconds - run: sleep 20s + - name: Sleep for 10 seconds + run: sleep 10s shell: bash # Migrate the database From 2c547bbf49626bf0398db72fa660a44c4fbf4a7d Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 17:48:13 +0100 Subject: [PATCH 25/35] dummy lock --- backend/schedule/tasks.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 0b8f9d63c5..33a2d92446 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -13,7 +13,7 @@ from django.conf import settings import logging from integrations import slack - +from django.core.cache import cache from pycon.celery import app from schedule.models import ScheduleItemSentForVideoUpload from schedule.video_upload import ( @@ -367,7 +367,26 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id: int): sent_for_video_upload.save(update_fields=["status"]) +def lock_task(func): + # This is a dummy lock until we can get celery-heimdall + def wrapper(*args, **kwargs): + lock_id = f"celery_lock_{func.__name__}" + lock = cache.add(lock_id, "locked", timeout=60 * 60 * 1) + + if not lock: + logger.info("Task %s is already running, skipping", func.__name__) + return + + try: + return func(*args, **kwargs) + finally: + cache.delete(lock_id) + + return wrapper + + @app.task() +@lock_task def process_schedule_items_videos_to_upload(): statuses = ( ScheduleItemSentForVideoUpload.objects.filter( From 1ae0373f48c03ac250b055782f7b748df74f3ccf Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 18:22:50 +0100 Subject: [PATCH 26/35] dummy lock --- .github/workflows/backend-test.yml | 5 ++-- backend/pycon/settings/base.py | 4 ++++ backend/schedule/tasks.py | 37 ++++++++++++++++++++++++------ 3 files changed, 37 insertions(+), 9 deletions(-) diff --git a/.github/workflows/backend-test.yml b/.github/workflows/backend-test.yml index 07ffe0d6ba..e086499a51 100644 --- a/.github/workflows/backend-test.yml +++ b/.github/workflows/backend-test.yml @@ -62,8 +62,9 @@ jobs: exit $STATUS env: DATABASE_URL: postgresql://postgres:postgres@localhost:${{ job.services.postgres.ports['5432'] }}/postgres - CELERY_BROKER_URL: redis://redis:6379/0 - CELERY_RESULT_BACKEND: redis://redis:6379/1 + CACHE_URL: redis://redis:6379/0 + CELERY_BROKER_URL: redis://redis:6379/1 + CELERY_RESULT_BACKEND: redis://redis:6379/2 STRIPE_SECRET_API_KEY: fake-key CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} MEDIA_FILES_STORAGE_BACKEND: django.core.files.storage.FileSystemStorage diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index 8b0ca3bd16..565f7f72f7 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -311,6 +311,10 @@ IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY = "imagekit.cachefiles.strategies.Optimistic" +REDIS_URL = env("CACHE_URL") +if not REDIS_URL.startswith("redis://"): + raise ValueError("We expect Cache URL to be Redis URL") + CACHES = { "default": env.cache(default="locmemcache://snowflake"), } diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 33a2d92446..1d6d32e142 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -1,3 +1,7 @@ +import time +import threading + +import redis from django.db.models import Q from google_api.exceptions import NoGoogleCloudQuotaLeftError from googleapiclient.errors import HttpError @@ -13,7 +17,6 @@ from django.conf import settings import logging from integrations import slack -from django.core.cache import cache from pycon.celery import app from schedule.models import ScheduleItemSentForVideoUpload from schedule.video_upload import ( @@ -367,21 +370,41 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id: int): sent_for_video_upload.save(update_fields=["status"]) +def renew_lock(lock, interval): + while lock.locked: + try: + lock.extend(interval, replace_ttl=True) + except Exception as e: + logger.exception("Error renewing lock: %s", e) + break + + if lock.locked: + time.sleep(interval) + + def lock_task(func): # This is a dummy lock until we can get celery-heimdall def wrapper(*args, **kwargs): + timeout = 60 * 5 lock_id = f"celery_lock_{func.__name__}" - lock = cache.add(lock_id, "locked", timeout=60 * 60 * 1) + client = redis.Redis.from_url(settings.REDIS_URL) + lock = client.lock(lock_id, timeout=timeout, thread_local=False) + + if lock.acquire(blocking=False): + renewer_thread = threading.Thread(target=renew_lock, args=(lock, timeout)) + renewer_thread.daemon = True + renewer_thread.start() + + try: + return func(*args, **kwargs) + finally: + lock.release() + renewer_thread.join() if not lock: logger.info("Task %s is already running, skipping", func.__name__) return - try: - return func(*args, **kwargs) - finally: - cache.delete(lock_id) - return wrapper From 84e12ddce9a349fe55a3df115cc59f0f4eafe018 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 18:52:14 +0100 Subject: [PATCH 27/35] changes --- .dockerignore | 2 ++ backend/.dockerignore | 16 ++++++++++++++++ docker-compose.yml | 8 ++++---- 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/.dockerignore b/.dockerignore index 47bba41e0a..fda38bc40a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -10,3 +10,5 @@ **/.pytest_cache/** Dockerfile **/_dist/** +**/backend/media +*.sqlite3 diff --git a/backend/.dockerignore b/backend/.dockerignore index 12decee1ab..cecbad5e1f 100644 --- a/backend/.dockerignore +++ b/backend/.dockerignore @@ -7,3 +7,19 @@ cov.xml **/__pycache__/** **/node_modules/** *.sqlite3 +**/node_modules +**/.env +**/.npmrc +**/.pdm-build +**/.venv/** +**/.ruff_cache/** +**/.pnpm-store/** +**/__pycache__/** +.pdm-python +**/.pytest_cache/** +Dockerfile +**/_dist/** +**/backend/media +*.sqlite3 +db.sqlite3 +**/.virtualenv diff --git a/docker-compose.yml b/docker-compose.yml index 68e96d4784..d15b29170e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -49,13 +49,13 @@ services: context: ./backend dockerfile: ../Dockerfile.python.local networks: [pycon_net] - entrypoint: "" command: sh -c "export DJANGO_SETTINGS_MODULE=pycon.settings.dev && + eval $(pdm venv activate) && pdm install && - pdm run python manage.py migrate && - pdm run python manage.py create_admin && + python manage.py migrate && + python manage.py create_admin && touch /.ready && - pdm run python manage.py runserver 0.0.0.0:8000" + python manage.py runserver 0.0.0.0:8000" depends_on: redis: condition: service_healthy From 0c2e6cd0c3981fc816fc9b71d0c0abc85fc06014 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 18:54:39 +0100 Subject: [PATCH 28/35] changs --- .gitignore | 1 + docker-compose.yml | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 035350db6d..56903b6d4e 100644 --- a/.gitignore +++ b/.gitignore @@ -134,3 +134,4 @@ badge-service/badges.zip *.generated.ts backend/custom_admin/src/types.ts backend/schema.graphql +backend/__pypackages__/ diff --git a/docker-compose.yml b/docker-compose.yml index d15b29170e..7d1ab75df1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -50,12 +50,11 @@ services: dockerfile: ../Dockerfile.python.local networks: [pycon_net] command: sh -c "export DJANGO_SETTINGS_MODULE=pycon.settings.dev && - eval $(pdm venv activate) && pdm install && - python manage.py migrate && - python manage.py create_admin && + pdm run python manage.py migrate && + pdm run python manage.py create_admin && touch /.ready && - python manage.py runserver 0.0.0.0:8000" + pdm run python manage.py runserver 0.0.0.0:8000" depends_on: redis: condition: service_healthy From 1fcf9d08261c0160b23d75fbea2102367da5aabd Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:00:36 +0100 Subject: [PATCH 29/35] changes --- backend/pycon/settings/base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index 565f7f72f7..fe3344f98c 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -311,9 +311,7 @@ IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY = "imagekit.cachefiles.strategies.Optimistic" -REDIS_URL = env("CACHE_URL") -if not REDIS_URL.startswith("redis://"): - raise ValueError("We expect Cache URL to be Redis URL") +REDIS_URL = env("CACHE_URL", default="") CACHES = { "default": env.cache(default="locmemcache://snowflake"), From 7c05404870da9ee9a87fcb3bb22c530dabc89a6e Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:06:53 +0100 Subject: [PATCH 30/35] fix --- .github/workflows/backend-test.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/backend-test.yml b/.github/workflows/backend-test.yml index e086499a51..7335aa2eec 100644 --- a/.github/workflows/backend-test.yml +++ b/.github/workflows/backend-test.yml @@ -31,6 +31,8 @@ jobs: redis: image: redis:7.2.3 options: --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 + ports: + - 6379/tcp steps: - uses: actions/checkout@v2 From c110ff7b9c5af68a5819c029877e169de98abf7d Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:09:42 +0100 Subject: [PATCH 31/35] redis --- .github/workflows/backend-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backend-test.yml b/.github/workflows/backend-test.yml index 7335aa2eec..29d96073e1 100644 --- a/.github/workflows/backend-test.yml +++ b/.github/workflows/backend-test.yml @@ -32,7 +32,7 @@ jobs: image: redis:7.2.3 options: --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 ports: - - 6379/tcp + - 6379:6379 steps: - uses: actions/checkout@v2 From 1ba8f25d2d785ff8685334e0c4aae670c55e7bec Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:25:47 +0100 Subject: [PATCH 32/35] ee --- .dockerignore | 1 + .github/workflows/backend-test.yml | 2 +- Dockerfile.python.local | 2 +- backend/pdm.lock | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.dockerignore b/.dockerignore index fda38bc40a..431cbaa8fc 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,7 @@ **/.ruff_cache/** **/.pnpm-store/** **/__pycache__/** +**/__pypackages__/** .pdm-python **/.pytest_cache/** Dockerfile diff --git a/.github/workflows/backend-test.yml b/.github/workflows/backend-test.yml index 29d96073e1..03bd44981b 100644 --- a/.github/workflows/backend-test.yml +++ b/.github/workflows/backend-test.yml @@ -64,7 +64,7 @@ jobs: exit $STATUS env: DATABASE_URL: postgresql://postgres:postgres@localhost:${{ job.services.postgres.ports['5432'] }}/postgres - CACHE_URL: redis://redis:6379/0 + CACHE_URL: redis://localhost:6379/0 CELERY_BROKER_URL: redis://redis:6379/1 CELERY_RESULT_BACKEND: redis://redis:6379/2 STRIPE_SECRET_API_KEY: fake-key diff --git a/Dockerfile.python.local b/Dockerfile.python.local index f6f949aaad..72798b07ec 100644 --- a/Dockerfile.python.local +++ b/Dockerfile.python.local @@ -9,7 +9,7 @@ ARG FUNCTION_DIR RUN mkdir -p ${FUNCTION_DIR} WORKDIR ${FUNCTION_DIR} -RUN pip install pdm==2.10.4 +RUN pip install pdm==2.12.4 COPY pyproject.toml pdm.lock ${FUNCTION_DIR} diff --git a/backend/pdm.lock b/backend/pdm.lock index 16e39aef66..08d9af6b50 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -4,7 +4,7 @@ [metadata] groups = ["default", "dev", "lambda"] strategy = ["cross_platform"] -lock_version = "4.4" +lock_version = "4.4.1" content_hash = "sha256:b64dac79b33562fbcf33024422734fd39550c1eafe90c60795c23bb636def78f" [[package]] From cf0210738d824c19bc56bef8814886c558cf7940 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:32:33 +0100 Subject: [PATCH 33/35] parallel --- backend/pycon/settings/base.py | 2 ++ backend/schedule/tasks.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index fe3344f98c..dbed273601 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -374,3 +374,5 @@ X_FRAME_OPTIONS = "SAMEORIGIN" CELERY_TASK_IGNORE_RESULT = True + +PYTEST_XDIST_WORKER = env("PYTEST_XDIST_WORKER", default="") diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 1d6d32e142..7dc658f8c1 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -387,6 +387,10 @@ def lock_task(func): def wrapper(*args, **kwargs): timeout = 60 * 5 lock_id = f"celery_lock_{func.__name__}" + + if settings.PYTEST_XDIST_WORKER: + lock_id = f"{lock_id}_{settings.PYTEST_XDIST_WORKER}" + client = redis.Redis.from_url(settings.REDIS_URL) lock = client.lock(lock_id, timeout=timeout, thread_local=False) From 7ffc106cbae3782e672d4b7e6335c0afe23424e3 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:53:37 +0100 Subject: [PATCH 34/35] worker --- backend/pycon/settings/base.py | 2 -- backend/schedule/tasks.py | 16 +++++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/backend/pycon/settings/base.py b/backend/pycon/settings/base.py index dbed273601..fe3344f98c 100644 --- a/backend/pycon/settings/base.py +++ b/backend/pycon/settings/base.py @@ -374,5 +374,3 @@ X_FRAME_OPTIONS = "SAMEORIGIN" CELERY_TASK_IGNORE_RESULT = True - -PYTEST_XDIST_WORKER = env("PYTEST_XDIST_WORKER", default="") diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index 7dc658f8c1..aaebbe7e8c 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -1,3 +1,4 @@ +import os import time import threading @@ -373,12 +374,12 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id: int): def renew_lock(lock, interval): while lock.locked: try: - lock.extend(interval, replace_ttl=True) + extended = lock.extend(interval, replace_ttl=True) except Exception as e: logger.exception("Error renewing lock: %s", e) break - if lock.locked: + if extended and lock.locked: time.sleep(interval) @@ -386,10 +387,12 @@ def lock_task(func): # This is a dummy lock until we can get celery-heimdall def wrapper(*args, **kwargs): timeout = 60 * 5 + lock_id = f"celery_lock_{func.__name__}" + PYTEST_XDIST_WORKER = os.environ.get("PYTEST_XDIST_WORKER") - if settings.PYTEST_XDIST_WORKER: - lock_id = f"{lock_id}_{settings.PYTEST_XDIST_WORKER}" + if PYTEST_XDIST_WORKER: + lock_id = f"{lock_id}_{PYTEST_XDIST_WORKER}" client = redis.Redis.from_url(settings.REDIS_URL) lock = client.lock(lock_id, timeout=timeout, thread_local=False) @@ -403,9 +406,8 @@ def wrapper(*args, **kwargs): return func(*args, **kwargs) finally: lock.release() - renewer_thread.join() - - if not lock: + renewer_thread.join(1) + else: logger.info("Task %s is already running, skipping", func.__name__) return From c2577c6349eb0ae4c22a2bfce5289223d454dc69 Mon Sep 17 00:00:00 2001 From: Marco Acierno Date: Sat, 9 Mar 2024 19:58:49 +0100 Subject: [PATCH 35/35] lock improvement --- backend/schedule/tasks.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/backend/schedule/tasks.py b/backend/schedule/tasks.py index aaebbe7e8c..9ac2dfe4c3 100644 --- a/backend/schedule/tasks.py +++ b/backend/schedule/tasks.py @@ -1,5 +1,4 @@ import os -import time import threading import redis @@ -371,23 +370,26 @@ def upload_schedule_item_video(*, sent_for_video_upload_state_id: int): sent_for_video_upload.save(update_fields=["status"]) -def renew_lock(lock, interval): - while lock.locked: +def renew_lock(lock, interval, _stop_event): + while not _stop_event.wait(timeout=interval): + if not lock.locked: + return + + if _stop_event.is_set(): + return + try: - extended = lock.extend(interval, replace_ttl=True) + lock.extend(interval, replace_ttl=True) except Exception as e: logger.exception("Error renewing lock: %s", e) break - if extended and lock.locked: - time.sleep(interval) - def lock_task(func): # This is a dummy lock until we can get celery-heimdall def wrapper(*args, **kwargs): timeout = 60 * 5 - + _stop_event = threading.Event() lock_id = f"celery_lock_{func.__name__}" PYTEST_XDIST_WORKER = os.environ.get("PYTEST_XDIST_WORKER") @@ -398,7 +400,9 @@ def wrapper(*args, **kwargs): lock = client.lock(lock_id, timeout=timeout, thread_local=False) if lock.acquire(blocking=False): - renewer_thread = threading.Thread(target=renew_lock, args=(lock, timeout)) + renewer_thread = threading.Thread( + target=renew_lock, args=(lock, timeout, _stop_event) + ) renewer_thread.daemon = True renewer_thread.start() @@ -406,7 +410,8 @@ def wrapper(*args, **kwargs): return func(*args, **kwargs) finally: lock.release() - renewer_thread.join(1) + _stop_event.set() + renewer_thread.join() else: logger.info("Task %s is already running, skipping", func.__name__) return