diff --git a/ajapaik/ajapaik/management/commands/fb_rescrape.py b/ajapaik/ajapaik/management/commands/fb_rescrape.py index 4f7dfeadf..a5fdcba03 100644 --- a/ajapaik/ajapaik/management/commands/fb_rescrape.py +++ b/ajapaik/ajapaik/management/commands/fb_rescrape.py @@ -9,8 +9,7 @@ class Command(BaseCommand): def handle(self, *args, **options): photos = Photo.objects.filter(latest_comment__isnull=False) - query_string = 'https://graph.facebook.com/?id=%s&scrape=true' - url_template = 'https://ajapaik.ee/foto/%d/' + for p in photos: - url = url_template % p.id - requests.post(query_string % url) + url = f'https://ajapaik.ee/photo/{p.id}' + requests.post(f'https://graph.facebook.com/?id={url}&scrape=true') diff --git a/ajapaik/ajapaik/management/commands/set_display_name_for_all_users.py b/ajapaik/ajapaik/management/commands/set_display_name_for_all_users.py index b862d5c66..1fceaeb39 100644 --- a/ajapaik/ajapaik/management/commands/set_display_name_for_all_users.py +++ b/ajapaik/ajapaik/management/commands/set_display_name_for_all_users.py @@ -14,17 +14,23 @@ def handle(self, *args, **options): for profile in profiles: if profile.first_name and profile.last_name: profile.display_name = '%s %s' % (profile.first_name, profile.last_name) + elif profile.google_plus_name: profile.display_name = profile.google_plus_name + elif profile.fb_name: profile.display_name = profile.fb_name + elif profile.google_plus_email: try: profile.display_name = profile.google_plus_email.split('@')[0] except: # noqa pass + elif profile.first_name: profile.display_name = profile.first_name + elif profile.last_name: profile.display_name = profile.last_name + profile.save() diff --git a/ajapaik/ajapaik/models.py b/ajapaik/ajapaik/models.py index b665fc77d..ef030d8aa 100644 --- a/ajapaik/ajapaik/models.py +++ b/ajapaik/ajapaik/models.py @@ -1,4 +1,5 @@ import hashlib +import json import os import re from contextlib import closing @@ -57,10 +58,9 @@ class EstimatedCountQuerySet(QuerySet): # Get count from cache if it is available - def cached_count(self): - cached_count = 0 - key = "query: " + str(hashlib.md5(str(self.query).encode()).hexdigest()) - cached_count = cache.get(key, cached_count) + def cached_count(self, query): + key = "query:" + str(hashlib.md5(query.encode()).hexdigest()) + cached_count = cache.get(key, 0) # not in cache or small then query exact value if cached_count < 100000: @@ -118,7 +118,7 @@ class BooleanValue(Transform): def as_sql(self, compiler, connection): sql, params = compiler.compile(self.lhs) - sql = 'CAST(%s AS BOOL)' % sql + sql = f'CAST({sql} AS BOOL)' return sql, params @@ -217,11 +217,10 @@ class Meta: def __str__(self): if self.profile: - profilename = self.profile.get_display_name - else: - profilename = 'None' + profile_name = self.profile.get_display_name + return f'{self.album_id} - {self.photo_id} - {self.TYPE_CHOICES[self.type][1]} - {profile_name}' - return '%d - %d - %s - %s' % (self.album_id, self.photo_id, self.TYPE_CHOICES[self.type][1], profilename) + return f'{self.album_id} - {self.photo_id} - {self.TYPE_CHOICES[self.type][1]}' def delete(self, *args, **kwargs): if self.album.atype == Album.CURATED: @@ -308,9 +307,16 @@ def __init__(self, *args, **kwargs): self.original_lat = self.lat self.original_lon = self.lon - def save(self, *args, **kwargs): - if not self.id: - super(Album, self).save(*args, **kwargs) + def save(self, *args, update_fields=None, force_insert=False, **kwargs): + # We should not have such complex logic in save methods, because currently we perform save multiple times + # which results in objects.create not working, since we can only save with `force_insert=True` only once + # but our complicated save methods calls it multiple times. + if not self.id or force_insert: + super(Album, self).save(*args, update_fields=update_fields, force_insert=force_insert, **kwargs) + + elif update_fields: + super(Album, self).save(*args, update_fields=update_fields, **kwargs) + return self.set_calculated_fields() if not self.cover_photo and self.photo_count_with_subalbums > 0: @@ -318,26 +324,33 @@ def save(self, *args, **kwargs): self.cover_photo = random_photo if random_photo and random_photo.flip: self.cover_photo_flipped = random_photo.flip + if not self.lat and not self.lon: random_photo_with_location = self.get_geotagged_historic_photo_queryset_with_subalbums().first() if random_photo_with_location: self.lat = random_photo_with_location.lat self.lon = random_photo_with_location.lon + if self.lat and self.lon and self.lat != self.original_lat and self.lon != self.original_lon: self.geography = Point(x=float(self.lon), y=float(self.lat), srid=4326) + self.original_lat = self.lat self.original_lon = self.lon super(Album, self).save(*args, **kwargs) + if self.subalbum_of: self.subalbum_of.save() connections['default'].get_unified_index().get_index(Album).update_object(self) def get_historic_photos_queryset_with_subalbums(self): sa_ids = [self.id] + for sa in self.subalbums.filter(atype__in=[Album.CURATED, Album.PERSON]): sa_ids.append(sa.id) + qs = Photo.objects.filter(rephoto_of__isnull=True).prefetch_related('albumphoto').filter( albumphoto__album__in=sa_ids) + return qs.distinct('id') def get_geotagged_historic_photo_queryset_with_subalbums(self): @@ -348,6 +361,7 @@ def get_rephotos_queryset_with_subalbums(self): historic_photo_qs = self.get_historic_photos_queryset_with_subalbums().only('id').order_by() qs = Photo.objects.filter(rephoto_of__isnull=False, rephoto_of__in=historic_photo_qs.values('id').order_by()).order_by() + return qs.distinct('pk') # All photos = historical photos + rephotos @@ -358,6 +372,7 @@ def get_all_photos_queryset_with_subalbums(self): 'id').distinct('id').order_by() historic_photo_list = list(historic_photo_qs.values_list('id', flat=True)) + for p in rephoto_qs: historic_photo_list.append(p['id']) @@ -767,14 +782,13 @@ def get_next_photo_to_geotag(qs, request): return [Photo.get_game_json_format_photo(ret), user_seen_all, nothing_more_to_show] def __str__(self): - return u'%s - %s (%s) (%s)' % (self.id, self.get_display_text, self.date_text, self.source_key) + return f'{self.id} - {self.get_display_text} {self.date_text} {self.source_key}' def __init__(self, *args, **kwargs): super(Photo, self).__init__(*args, **kwargs) self.original_lat = self.lat self.original_lon = self.lon self.original_flip = self.flip - self.original_invert = self.invert self.original_rotated = self.rotated self.original_height = self.height @@ -782,6 +796,18 @@ def get_detail_url(self): # Legacy URL needs to stay this way for now for Facebook return reverse('photo', args=(self.pk,)) + def _generate_thumbnail_and_apply_action(self, size, action, parameter=None): + thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, f"{size}x{size}", upscale=False).name}' + img_thumb = Image.open(thumb_path) + + if isinstance(action, str): + img_thumb = getattr(img_thumb, action)(parameter) + else: + img_thumb = action(img_thumb) + + img_thumb.save(thumb_path) + img_thumb.close() + def do_flip(self): photo_path = f'{settings.MEDIA_ROOT}/{str(self.image)}' img = Image.open(photo_path) @@ -790,20 +816,15 @@ def do_flip(self): img.close() flipped_image.close() self.flip = not self.flip - small_thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, "400x400", upscale=False).name}' - img_small_thumb = Image.open(small_thumb_path) - img_small_thumb = img_small_thumb.transpose(Image.FLIP_LEFT_RIGHT) - img_small_thumb.save(small_thumb_path) - img_small_thumb.close() - bigger_thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, "1024x1024", upscale=False).name}' - img_bigger_thumb = Image.open(bigger_thumb_path) - img_bigger_thumb = img_bigger_thumb.transpose(Image.FLIP_LEFT_RIGHT) - img_bigger_thumb.save(bigger_thumb_path) - img_bigger_thumb.close() + self._generate_thumbnail_and_apply_action(400, 'transpose', Image.FLIP_LEFT_RIGHT) + self._generate_thumbnail_and_apply_action(1024, 'transpose', Image.FLIP_LEFT_RIGHT) + if self.image_unscaled != '': delete(self.image_unscaled, delete_file=False) + if self.image_no_watermark != '': delete(self.image_no_watermark, delete_file=False) + self.original_flip = self.flip face_recognition_rectangles = apps.get_model( @@ -839,29 +860,24 @@ def do_invert(self): self.perceptual_hash = phash(inverted_image) inverted_image.close() self.invert = not self.invert - small_thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, "400x400", upscale=False).name}' - img_small_thumb = Image.open(small_thumb_path) - img_small_thumb = ImageOps.invert(img_small_thumb) - img_small_thumb.save(small_thumb_path) - img_small_thumb.close() - bigger_thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, "1024x1024", upscale=False).name}' - img_bigger_thumb = Image.open(bigger_thumb_path) - img_bigger_thumb = ImageOps.invert(img_bigger_thumb) - img_bigger_thumb.save(bigger_thumb_path) - img_bigger_thumb.close() + self._generate_thumbnail_and_apply_action(400, ImageOps.invert) + self._generate_thumbnail_and_apply_action(1024, ImageOps.invert) + if self.image_unscaled != '': delete(self.image_unscaled, delete_file=False) if self.image_no_watermark != '': delete(self.image_no_watermark, delete_file=False) - self.original_invert = self.invert + self.light_save() def do_rotate(self, degrees): photo_path = f'{settings.MEDIA_ROOT}/{str(self.image)}' img = Image.open(photo_path) original_degrees = 0 + if self.original_rotated is not None: original_degrees = self.original_rotated + rotation_degrees = degrees - original_degrees rotated_image = img.rotate(rotation_degrees, expand=True) rotated_image.save(photo_path) @@ -869,16 +885,9 @@ def do_rotate(self, degrees): self.perceptual_hash = phash(rotated_image) rotated_image.close() self.rotated = degrees - small_thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, "400x400", upscale=False).name}' - img_small_thumb = Image.open(small_thumb_path) - img_small_thumb = img_small_thumb.rotate(rotation_degrees, expand=True) - img_small_thumb.save(small_thumb_path) - img_small_thumb.close() - bigger_thumb_path = f'{settings.MEDIA_ROOT}/{get_thumbnail(self.image, "1024x1024", upscale=False).name}' - img_bigger_thumb = Image.open(bigger_thumb_path) - img_bigger_thumb = img_bigger_thumb.rotate(rotation_degrees, expand=True) - img_bigger_thumb.save(bigger_thumb_path) - img_bigger_thumb.close() + self._generate_thumbnail_and_apply_action(400, 'rotate', rotation_degrees) + self._generate_thumbnail_and_apply_action(1024, 'rotate', rotation_degrees) + if self.image_unscaled != '': delete(self.image_unscaled, delete_file=False) if self.image_no_watermark != '': @@ -899,24 +908,31 @@ def do_rotate(self, degrees): 'ajapaik_face_recognition.FaceRecognitionRectangle').objects.filter(photo_id=self.id) if face_recognition_rectangles is None: return + for face_recognition_rectangle in face_recognition_rectangles: top, right, bottom, left = face_recognition_rectangle.coordinates.strip('[').strip(']').split(', ') + if rotation_degrees == 0: return + elif rotation_degrees == 90 or rotation_degrees == -270: face_recognition_rectangle.coordinates = \ f'[{str(self.height - int(right))}, {bottom}, {str(self.height - int(left))}, {top}]' + elif rotation_degrees == 180 or rotation_degrees == -180: face_recognition_rectangle.coordinates = \ f'[{str(self.height - int(bottom))}, {str(self.width - int(left))},' + \ f'{str(self.height - int(top))}, {str(self.width - int(right))}]' + elif rotation_degrees == 270 or rotation_degrees == -90: face_recognition_rectangle.coordinates = \ f'[{left}, {str(self.width - int(top))}, {right}, {str(self.width - int(bottom))}]' + face_recognition_rectangle.save() object_recognition_rectangles = apps.get_model( 'ajapaik_object_recognition.ObjectDetectionAnnotation').objects.filter(photo_id=self.id) + if object_recognition_rectangles is None: return for object_recognition_rectangle in object_recognition_rectangles: @@ -924,58 +940,67 @@ def do_rotate(self, degrees): right = object_recognition_rectangle.x2 bottom = object_recognition_rectangle.y2 left = object_recognition_rectangle.x1 + if rotation_degrees == 0: return + elif rotation_degrees == 90 or rotation_degrees == -270: object_recognition_rectangle.y1 = self.height - right object_recognition_rectangle.x2 = bottom object_recognition_rectangle.y2 = self.height - left object_recognition_rectangle.x1 = top + elif rotation_degrees == 180 or rotation_degrees == -180: object_recognition_rectangle.y1 = self.height - bottom object_recognition_rectangle.x2 = self.width - left object_recognition_rectangle.y2 = self.height - top object_recognition_rectangle.x1 = self.width - right + elif rotation_degrees == 270 or rotation_degrees == -90: object_recognition_rectangle.y1 = left object_recognition_rectangle.x2 = self.width - top object_recognition_rectangle.y2 = right object_recognition_rectangle.x1 = self.width - bottom + object_recognition_rectangle.save() self.light_save() def set_aspect_ratio(self): - if self.height is not None and self.width is not None: + if self.height and self.width: self.aspect_ratio = self.width / self.height - self.light_save() + self.save(update_fields=['aspect_ratio']) def calculate_phash(self): img = Image.open(f'{settings.MEDIA_ROOT}/{str(self.image)}') self.perceptual_hash = phash(img) - self.light_save() + self.save(update_fields=['perceptual_hash']) def find_similar(self): if not settings.DEBUG: - img = Image.open(f'{settings.MEDIA_ROOT}/{str(self.image)}') - self.perceptual_hash = phash(img) + self.calculate_phash() query = 'SELECT * FROM project_photo WHERE rephoto_of_id IS NULL AND perceptual_hash <@ (%s, 8) ' \ 'AND NOT id=%s AND aspect_ratio > %s AND aspect_ratio < %s' - if self.aspect_ratio is None: - self.aspect_ratio = self.width / self.height + + if not self.aspect_ratio: + self.set_aspect_ratio() + photos = Photo.objects.raw(query, [str(self.perceptual_hash), self.id, self.aspect_ratio * 0.8, self.aspect_ratio * 1.25]) for similar in photos: ImageSimilarity.add_or_update(self, similar) similar.light_save() - self.light_save() + + self.save(update_fields=['perceptual_hash']) def find_similar_for_existing_photo(self): if self.rephoto_of_id is not None: return - if self.aspect_ratio is None: - self.aspect_ratio = self.width / self.height - if not (self.lat is None and self.lon is None): + + if not self.aspect_ratio: + self.set_aspect_ratio() + + if self.lat and self.lon: query = 'SELECT * FROM project_photo WHERE perceptual_hash <@ (%s, 8) AND rephoto_of_id IS NULL ' \ 'AND NOT id=%s AND lat < %s AND lon < %s AND lat > %s AND lon > %s AND aspect_ratio > %s ' \ 'AND aspect_ratio < %s' @@ -991,10 +1016,9 @@ def find_similar_for_existing_photo(self): for similar in photos: list1 = ImageSimilarity.objects.filter(Q(from_photo=self) & Q(to_photo=similar)) list2 = ImageSimilarity.objects.filter(Q(from_photo=similar) & Q(to_photo=self)) - if list1.count() < 1 or list2.count() < 1: + + if not list1 or not list2: ImageSimilarity.add_or_update(self, similar) - similar.light_save() - self.light_save() def watermark(self): # For ETERA @@ -1027,10 +1051,13 @@ def get_pseudo_slug(self): def get_heatmap_points(self): valid_geotags = self.geotags.distinct('user_id').order_by('user_id', '-created') data = [] + for each in valid_geotags: serialized = [each.lat, each.lon] + if each.azimuth: serialized.append(each.azimuth) + data.append(serialized) return data @@ -1039,6 +1066,7 @@ def reverse_geocode_location(self): url = f'https://maps.googleapis.com/maps/api/geocode/json?latlng=%0.5f,%0.5f&key={settings.GOOGLE_API_KEY}' lat = None lon = None + if self.lat and self.lon: lat = self.lat lon = self.lon @@ -1048,6 +1076,7 @@ def reverse_geocode_location(self): lat = a.lat lon = a.lon break + if lat and lon: cached_response = GoogleMapsReverseGeocode.objects.filter(lat='{:.5f}'.format(lat), lon='{:.5f}'.format(lon)).first() @@ -1064,9 +1093,11 @@ def reverse_geocode_location(self): response=response.text ).save() response = decoded_response + if response['status'] == 'OK': most_accurate_result = response['results'][0] self.address = most_accurate_result['formatted_address'] + elif response['status'] == 'OVER_QUERY_LIMIT': return @@ -1077,52 +1108,70 @@ def set_backside(self, opposite): opposite.save() def save(self, *args, **kwargs): + update_fields = kwargs.get('update_fields') + + if update_fields: + super(Photo, self).save(*args, **kwargs) + return + super(Photo, self).save(*args, **kwargs) + kwargs = {**kwargs, 'force_insert': False} + if self.lat and self.lon and self.lat != self.original_lat and self.lon != self.original_lon: self.geography = Point(x=float(self.lon), y=float(self.lat), srid=4326) self.reverse_geocode_location() + if self.flip is None: self.flip = False + if self.original_flip is None: self.original_flip = False + if self.flip != self.original_flip: self.do_flip() + self.original_lat = self.lat self.original_lon = self.lon + if not self.first_rephoto: first_rephoto = self.rephotos.order_by('created').first() if first_rephoto: self.first_rephoto = first_rephoto.created last_rephoto = self.rephotos.order_by('-created').first() + if last_rephoto: self.latest_rephoto = last_rephoto.created self.rephoto_count = self.rephotos.count() + super(Photo, self).save(*args, **kwargs) + if not settings.DEBUG: connections['default'].get_unified_index().get_index(Photo).update_object(self) + if self.aspect_ratio is None: self.set_aspect_ratio() - def add_to_source_album(self, *args, **kwargs): + def add_to_source_album(self): if self.source_id is not None and self.source_id > 0: - sourceAlbum = Album.objects.filter(source_id=self.source_id).first() - if sourceAlbum is None: - sourceAlbum = Album( + source_album = Album.objects.filter(source_id=self.source_id).first() + + if not source_album: + source_album = Album( name=self.source.name, slug=self.source.name, atype=Album.COLLECTION, cover_photo=self, source=self.source ) - sourceAlbum.save() + source_album.save() AlbumPhoto( type=AlbumPhoto.COLLECTION, photo=self, - album=sourceAlbum + album=source_album ).save() - - sourceAlbum.save() + source_album.set_calculated_fields_new() + source_album.light_save() def light_save(self, *args, **kwargs): super(Photo, self).save(*args, **kwargs) @@ -1143,6 +1192,7 @@ def get_nearest_point(set_of_points, point_of_reference): for point in set_of_points: point = (point[1], point[0]) dist = great_circle(point_of_reference, point).meters + if (closest_dist is None) or (dist < closest_dist): closest_point = point closest_dist = dist @@ -1158,18 +1208,22 @@ def fill_untranslated_fields(self): if getattr(self, key): translation_source = key original_languages.append(each) + self.description_original_language = ','.join(original_languages) if translation_source: translation_done = False + for each in settings.TARTUNLP_LANGUAGES: key = f'description_{each}' current_value = getattr(self, key) + if not current_value: headers = {'Content-Type': 'application/json', 'x-api-key': 'public', 'application': 'ajapaik'} json = {'text': getattr(self, translation_source), 'tgt': each} response = requests.post(settings.TARTUNLP_API_URL, headers=headers, json=json).json() setattr(self, key, response['result']) translation_done = True + if translation_done: self.light_save() @@ -1300,15 +1354,18 @@ def __update__(self, qs): similarity_type=self.similarity_type) suggestions = ImageSimilaritySuggestion.objects.filter(image_similarity_id=imageSimilarity.id).order_by( 'proposer_id', '-created').all().distinct('proposer_id') + if self.similarity_type is not None: first_suggestion = 0 if self.similarity_type == 1 else 1 second_suggestion = 0 if self.similarity_type == 2 else 2 + if suggestions.filter(similarity_type=self.similarity_type).count() >= ( suggestions.filter(similarity_type=second_suggestion).count() - 1) \ and suggestions.filter(similarity_type=self.similarity_type).count() >= ( suggestions.filter(similarity_type=first_suggestion).count() - 1): suggestion.proposer = self.user_last_modified imageSimilarity.similarity_type = self.similarity_type + if self.similarity_type == 0: has_similar = ImageSimilarity.objects.filter( Q(from_photo_id=imageSimilarity.from_photo.id) & @@ -1316,6 +1373,7 @@ def __update__(self, qs): Q(similarity_type__gt=0)).first() is not None imageSimilarity.from_photo.has_similar = has_similar imageSimilarity.to_photo.has_similar = has_similar + imageSimilarity.save() imageSimilarity.to_photo.has_similar = ImageSimilarity.objects.filter( from_photo_id=imageSimilarity.from_photo.id).exclude(similarity_type=0).first() is not None @@ -1332,12 +1390,12 @@ def __update__(self, qs): def __add_or_update__(self): qs = ImageSimilarity.objects.filter(from_photo=self.from_photo, to_photo=self.to_photo) - points = 0 if len(qs) == 0: points, suggestion = self.__add__() else: points, suggestion = self.__update__(qs) - if points > 0: + + if points: Points( user=self.user_last_modified, action=Points.CONFIRM_IMAGE_SIMILARITY, @@ -1345,6 +1403,7 @@ def __add_or_update__(self): image_similarity_confirmation=suggestion, created=timezone.now() ).save() + return points @staticmethod @@ -1464,7 +1523,7 @@ class Meta: ('user', 'image_similarity_confirmation')) def __str__(self): - return u'%d - %s - %d' % (self.user_id, self.ACTION_CHOICES[self.action], self.points) + return f'{self.user_id} - {self.ACTION_CHOICES[self.action]} - {self.points}' class Transcription(Model): @@ -1544,10 +1603,11 @@ def save(self, *args, **kwargs): super(GeoTag, self).save(*args, **kwargs) def __str__(self): - # Django admin may crash with too long names importer = self.user.get_display_name if self.user else 'Ajapaik' photo = self.photo + if importer: + # Django admin may crash with too long names return f'{str(self.id)} - {str(photo.id)} - {photo.get_display_text[:50]} - {importer}' @@ -1577,7 +1637,7 @@ def url_read(uri): return request.read() def get_user(self, access_token): - data = loads(self.url_read('https://graph.facebook.com/v7.0/me?access_token=%s' % access_token)) + data = loads(self.url_read(f'https://graph.facebook.com/v7.0/me?access_token={access_token}')) if not data: raise Exception('Facebook did not return anything useful for this access token') @@ -1648,7 +1708,7 @@ class Meta: db_table = 'project_googlemapsreversegeocode' def __str__(self): - if self.response.get('results') and self.response.get('results')[0]: + if self.response.get('results'): location = self.response.get('results')[0].get('formatted_address') return f'{location};{self.lat};{self.lon}' else: @@ -1722,10 +1782,8 @@ class Meta: db_table = 'project_video' def save(self, *args, **kwargs): + self.slug = slugify(self.name) super(Video, self).save(*args, **kwargs) - if not self.slug: - self.slug = slugify(self.name) - super(Video, self).save(*args, **kwargs) def __str__(self): return self.name @@ -1743,17 +1801,19 @@ def save(self, **kwargs): if photo: if not photo.first_comment: photo.first_comment = self.submit_date + if not photo.latest_comment or photo.latest_comment < self.submit_date: photo.latest_comment = self.submit_date - photo.comment_count = MyXtdComment.objects.filter( - object_pk=self.object_pk, is_removed=False - ).count() + + photo.comment_count = MyXtdComment.objects.filter(object_pk=self.object_pk, is_removed=False).count() photo.light_save() def delete(self, *args, **kwargs): + # It evaluates to string? SO no need to deepcopy as strings are not pass-by-reference. object_pk = deepcopy(self.object_pk) super(MyXtdComment, self).delete(*args, **kwargs) photo = Photo.objects.filter(pk=object_pk).first() + if photo: comments = MyXtdComment.objects.filter( object_pk=self.object_pk, is_removed=False @@ -1921,12 +1981,16 @@ def is_legit(self): def get_display_name(self): if self.display_name: return self.display_name - elif self.first_name and self.last_name: - return '%s %s' % (self.first_name, self.last_name) + + elif self.first_name or self.last_name: + return f'{self.first_name or ""} {self.last_name or ""}'.strip() + elif self.google_plus_name: return self.google_plus_name + elif self.fb_name: return self.fb_name + elif self.google_plus_email: try: return self.google_plus_email.split('@')[0] diff --git a/ajapaik/ajapaik/opendata.py b/ajapaik/ajapaik/opendata.py index c600bbb69..0f03e49e8 100644 --- a/ajapaik/ajapaik/opendata.py +++ b/ajapaik/ajapaik/opendata.py @@ -47,21 +47,10 @@ class Meta: fields = '__all__' -class CustomLimitOffsetPagination(LimitOffsetPagination): - def get_count(self, queryset): - """ - Determine an object count, supporting either querysets or regular lists. - """ - try: - return queryset.cached_count() - except (AttributeError, TypeError): - return len(queryset) - - class PhotoViewSet(viewsets.ModelViewSet): queryset = Photo.objects.filter(rephoto_of__isnull=True) serializer_class = PhotoSerializer - pagination_class = CustomLimitOffsetPagination + pagination_class = LimitOffsetPagination filter_backends = [filters.SearchFilter] search_fields = list(PhotoIndex.fields) search_fields.remove('text') diff --git a/ajapaik/ajapaik/socialaccount/providers/wikimedia_commons/client.py b/ajapaik/ajapaik/socialaccount/providers/wikimedia_commons/client.py index 536a9f789..e9159672d 100644 --- a/ajapaik/ajapaik/socialaccount/providers/wikimedia_commons/client.py +++ b/ajapaik/ajapaik/socialaccount/providers/wikimedia_commons/client.py @@ -17,13 +17,17 @@ def get_redirect_url(self, authorization_url, extra_params): 'scope': self.scope, 'response_type': 'code' } + if self.state: params['state'] = self.state + params.update(extra_params) sorted_params = OrderedDict() + for param in sorted(params): sorted_params[param] = params[param] - return '%s?%s' % (authorization_url, urlencode(sorted_params)) + + return f'{authorization_url}?{urlencode(sorted_params)}' def get_access_token(self, code): data = {'client_id': self.consumer_key, diff --git a/ajapaik/ajapaik/sorl_overrides.py b/ajapaik/ajapaik/sorl_overrides.py index 38efc4931..3f4949f94 100644 --- a/ajapaik/ajapaik/sorl_overrides.py +++ b/ajapaik/ajapaik/sorl_overrides.py @@ -1,4 +1,5 @@ import os + from sorl.thumbnail.base import ThumbnailBackend, EXTENSIONS from sorl.thumbnail.conf.defaults import THUMBNAIL_PREFIX from sorl.thumbnail.helpers import tokey, serialize @@ -10,6 +11,6 @@ def _get_thumbnail_filename(self, source, geometry_string, options): filename, _ext = os.path.splitext(os.path.basename(source.name)) - path = '%s/%s' % (key, filename) + path = f'{key}/{filename}' - return '%s%s.%s' % (THUMBNAIL_PREFIX, path, EXTENSIONS[options['format']]) + return f'{THUMBNAIL_PREFIX}{path}.{EXTENSIONS[options["format"]]}' diff --git a/ajapaik/ajapaik/templatetags/ajapaik_templatetags.py b/ajapaik/ajapaik/templatetags/ajapaik_templatetags.py index 26629b1f0..9d1996f2a 100644 --- a/ajapaik/ajapaik/templatetags/ajapaik_templatetags.py +++ b/ajapaik/ajapaik/templatetags/ajapaik_templatetags.py @@ -15,9 +15,11 @@ def __init__(self, values): def render(self, context): req = Variable('request').resolve(context) params = req.GET.copy() + for key, value in self.values.items(): params[key] = value.resolve(context) - return '?%s' % params.urlencode() + + return f'?{params.urlencode()}' @register.tag diff --git a/ajapaik/ajapaik/urls_opendata.py b/ajapaik/ajapaik/urls_opendata.py index 5b4bfc684..32f644807 100644 --- a/ajapaik/ajapaik/urls_opendata.py +++ b/ajapaik/ajapaik/urls_opendata.py @@ -1,4 +1,4 @@ -from django.urls import re_path, path +from django.urls import path from django.views.generic import TemplateView from rest_framework.routers import DefaultRouter from rest_framework.urlpatterns import format_suffix_patterns @@ -14,5 +14,5 @@ path('photos//geotags/', PhotoGeoTagViewSet.as_view({'get': 'retrieve'}), name='opendata-photo-geotags'), - re_path(r'^robots\.txt', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')), + path('robots.txt', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')), ]) diff --git a/ajapaik/ajapaik/views.py b/ajapaik/ajapaik/views.py index 73d674a88..a8027928a 100644 --- a/ajapaik/ajapaik/views.py +++ b/ajapaik/ajapaik/views.py @@ -486,7 +486,7 @@ def _get_filtered_data_for_frontpage(request, album_id=None, page_override=None) # In some cases it is faster to get number of photos before we annotate new columns to it albumsize_before_sorting = 0 if not album: - albumsize_before_sorting = Photo.objects.filter(pk__in=photos).cached_count() + albumsize_before_sorting = Photo.objects.filter(pk__in=photos).cached_count(q) # SORTING BELOW THIS LINE @@ -660,7 +660,7 @@ def _get_filtered_data_for_frontpage(request, album_id=None, page_override=None) p[3] = p[14] + (". " if p[14][-1] != "." else " ") + p[ 3] # add title to image description if both are present. - # Failback width/height for photos which imagedata arent saved yet + # Failback width/height for photos which imagedata is not saved yet if p[1] == '' or p[1] is None: p[1] = 400 if p[2] == '' or p[2] is None: diff --git a/ajapaik/ajapaik_curator/views.py b/ajapaik/ajapaik_curator/views.py index f6a6fc950..8cae4d266 100644 --- a/ajapaik/ajapaik_curator/views.py +++ b/ajapaik/ajapaik_curator/views.py @@ -1,6 +1,8 @@ import datetime import json +import os import ssl +import traceback from urllib.request import build_opener import requests @@ -9,6 +11,7 @@ from django.core.exceptions import ObjectDoesNotExist from django.core.files.base import ContentFile from django.db.models import Q, F +from django.db.transaction import atomic from django.http import HttpResponse from django.shortcuts import render from django.urls import reverse @@ -19,7 +22,8 @@ from ajapaik.ajapaik.forms import CuratorWholeSetAlbumsSelectionForm, CuratorAlbumEditForm, CuratorPhotoUploadForm from ajapaik.ajapaik.fotis_utils import parse_fotis_timestamp_data -from ajapaik.ajapaik.models import Album, AlbumPhoto, Photo, Licence, Source, GeoTag, Points, Dating +from ajapaik.ajapaik.models import Album, AlbumPhoto, Photo, Licence, Source, GeoTag, Points, Dating, \ + ApplicationException from ajapaik.ajapaik.serializers import CuratorMyAlbumListAlbumSerializer, CuratorAlbumSelectionAlbumSerializer, \ CuratorAlbumInfoSerializer from ajapaik.ajapaik.utils import ImportBlacklistService, _join_2_json_objects @@ -36,14 +40,14 @@ @ensure_csrf_cookie def curator(request): last_created_album = Album.objects.filter(is_public=True).order_by('-created').first() - # FIXME: Ugly - curator_random_image_ids = None - if last_created_album: + if last_created_album and last_created_album.photo_count_with_subalbums > 5: curator_random_image_ids = AlbumPhoto.objects.filter( album_id=last_created_album.id).order_by('?').values_list('photo_id', flat=True) - if not curator_random_image_ids or curator_random_image_ids.count() < 5: + else: curator_random_image_ids = AlbumPhoto.objects.order_by('?').values_list('photo_id', flat=True) - curator_random_images = Photo.objects.filter(pk__in=curator_random_image_ids)[:5] + + image_count = min(5, curator_random_image_ids.count()) + curator_random_images = Photo.objects.filter(pk__in=curator_random_image_ids)[:image_count] context = { 'description': _('Search for old photos, add them to Ajapaik, ' 'determine their locations and share the resulting album!'), @@ -63,13 +67,17 @@ def curator_search(request): form = CuratorSearchForm(request.POST) response = json.dumps({}) if form.is_valid(): + if form.cleaned_data['useMUIS'] or form.cleaned_data['useMKA'] or form.cleaned_data['useDIGAR'] or \ form.cleaned_data['useETERA'] or form.cleaned_data['useUTLIB']: + valimimoodul_driver = ValimimoodulDriver() + if form.cleaned_data['ids']: response = valimimoodul_driver.transform_response( valimimoodul_driver.get_by_ids(form.cleaned_data['ids']), form.cleaned_data['filterExisting']) + else: valimimoodul_driver = None @@ -77,23 +85,28 @@ def curator_search(request): if valimimoodul_driver and not form.cleaned_data['ids']: response = _join_2_json_objects(response, valimimoodul_driver.transform_response( valimimoodul_driver.search(form.cleaned_data), form.cleaned_data['filterExisting'])) + if form.cleaned_data['useFlickr']: flickr_driver = FlickrCommonsDriver() response = _join_2_json_objects(response, flickr_driver.transform_response( flickr_driver.search(form.cleaned_data), form.cleaned_data['filterExisting'])) + if form.cleaned_data['useCommons']: commons_driver = CommonsDriver() response = _join_2_json_objects(response, commons_driver.transform_response( commons_driver.search(form.cleaned_data), form.cleaned_data['filterExisting'])) + if form.cleaned_data['useEuropeana']: europeana_driver = EuropeanaDriver() response = _join_2_json_objects(response, europeana_driver.transform_response( europeana_driver.search(form.cleaned_data), form.cleaned_data['filterExisting'])) + if form.cleaned_data['useFinna']: finna_driver = FinnaDriver() response = _join_2_json_objects(response, finna_driver.transform_response( finna_driver.search(form.cleaned_data), form.cleaned_data['filterExisting'], form.cleaned_data['driverPage'])) + if form.cleaned_data['useFotis']: fotis_driver = FotisDriver() fotis_data = fotis_driver.search(form.cleaned_data) @@ -175,362 +188,386 @@ def curator_update_my_album(request): def curator_photo_upload_handler(request): profile = request.get_user().profile - etera_token = request.POST.get('eteraToken') - curator_album_selection_form = CuratorWholeSetAlbumsSelectionForm(request.POST) + selection_json = request.POST.get('selection') - selection_json = request.POST.get('selection') or None - selection = None - if selection_json is not None: + if selection_json: selection = json.loads(selection_json) + else: + selection = None - all_curating_points = [] - total_points_for_curating = 0 - context = { - 'photos': {} - } + context = {'photos': {}} - if selection and profile is not None and curator_album_selection_form.is_valid(): - general_albums = Album.objects.filter(id__in=request.POST.getlist('albums')) - if general_albums.exists(): - context['album_id'] = general_albums[0].pk + if not selection or not profile or not curator_album_selection_form.is_valid(): + if not selection: + error = _('Please add pictures to your album') else: - context['album_id'] = None - default_album = Album( - name=f'{str(profile.id)}-{str(timezone.now())}', - atype=Album.AUTO, - profile=profile, - is_public=False, - ) - default_album.save() - # 15 => unknown copyright - unknown_licence = Licence.objects.get(pk=15) - flickr_licence = Licence.objects.filter(url='https://www.flickr.com/commons/usage/').first() - import_blacklist_service = ImportBlacklistService() - - for k, v in selection.items(): - upload_form = CuratorPhotoUploadForm(v) - created_album_photo_links = [] - awarded_curator_points = [] - if upload_form.is_valid(): - source_key = upload_form.cleaned_data['identifyingNumber'] - - if source_key and import_blacklist_service.is_blacklisted(source_key): - context['photos'][k] = { - 'error': _( - f'Could not import picture, as it is blacklisted from being imported: {upload_form.cleaned_data["imageUrl"]}')} - context['photos'][k]['success'] = False - continue - - if not upload_form.cleaned_data['institution']: - licence = unknown_licence - source = Source.objects.get(name='AJP') + error = _('Not enough data submitted') + context = { + 'error': error + } + + return HttpResponse(json.dumps(context), content_type='application/json') + + total_points_for_curating = 0 + general_albums = Album.objects.filter(id__in=request.POST.getlist('albums')) + + if general_albums: + context['album_id'] = general_albums[0].pk + else: + context['album_id'] = None + + # 15 => unknown copyright + unknown_licence = Licence.objects.get(pk=15) + flickr_licence = Licence.objects.filter(url='https://www.flickr.com/commons/usage/').first() + import_blacklist_service = ImportBlacklistService() + + institution = None + + # UGLY! WE should only create in ATOMIC blocks... + default_album = Album.objects.create( + name=f'{str(profile.id)}-{str(timezone.now())}', + atype=Album.AUTO, + profile=profile, + is_public=False, + ) + + all_curating_points = [] + awarded_curator_points = [] + + # DO We really need to support importing from two different sources at the same time. + for k, v in selection.items(): + upload_form = CuratorPhotoUploadForm(v) + + if not upload_form.is_valid(): + context['photos'][k] = {} + context['photos'][k]['error'] = _('Error uploading file: %s (%s)' + % (upload_form.errors, upload_form.cleaned_data['imageUrl'])) + context['total_points_for_curating'] = total_points_for_curating + return HttpResponse(json.dumps(context), content_type='application/json') + + source_key = upload_form.cleaned_data['identifyingNumber'] + + if source_key and import_blacklist_service.is_blacklisted(source_key): + context['photos'][k] = { + 'error': _( + f'Could not import picture, as it is blacklisted from being imported: {upload_form.cleaned_data["imageUrl"]}')} + context['photos'][k]['success'] = False + continue + + institution_changed = institution != upload_form.cleaned_data['institution'] + licence = None + + if institution_changed: + institution = upload_form.cleaned_data['institution'] + + if not institution: + licence = unknown_licence + source = Source.objects.get(name='AJP') + + elif institution == 'Flickr Commons': + licence = flickr_licence + + elif institution and institution.split(',')[0] == 'ETERA': + institution = 'TLÜAR ETERA' + + # For Finna + elif not institution and upload_form.cleaned_data['licence']: + licence = Licence.objects.filter( + Q(name=upload_form.cleaned_data['licence']) | Q(url=upload_form.cleaned_data['licenceUrl'])).first() + + if not licence: + if upload_form.cleaned_data['licence'] != upload_form.cleaned_data['licenceUrl']: + licence_name = upload_form.cleaned_data['licence'] else: - if upload_form.cleaned_data['institution'] == 'Flickr Commons': - licence = flickr_licence - else: - # For Finna - if upload_form.cleaned_data['licence']: - licence = Licence.objects.filter( - name=upload_form.cleaned_data['licence']).first() or Licence.objects.filter( - url=upload_form.cleaned_data['licenceUrl']).first() - - if not licence: - if upload_form.cleaned_data['licence'] != upload_form.cleaned_data['licenceUrl']: - licence_name = upload_form.cleaned_data['licence'] - else: - licence_name = _get_licence_name_from_url(upload_form.cleaned_data['licenceUrl']) - - licence = Licence( - name=licence_name, - url=upload_form.cleaned_data['licenceUrl'] or '' - ) - licence.save() - else: - licence = unknown_licence - institution = upload_form.cleaned_data['institution'].split(',')[0] - if upload_form.cleaned_data['institution'] == 'ETERA': - upload_form.cleaned_data['institution'] = 'TLÜAR ETERA' - try: - source = Source.objects.get(description=institution) - except ObjectDoesNotExist: - source = Source.objects.create(name=institution, description=institution) - - existing_photo = None - if upload_form.cleaned_data['id'] and upload_form.cleaned_data['id'] != '': - if upload_form.cleaned_data['collections'] == 'DIGAR': - incoming_muis_id = source_key - else: - incoming_muis_id = upload_form.cleaned_data['id'] - if 'ETERA' in upload_form.cleaned_data['institution']: - upload_form.cleaned_data['types'] = 'photo' - if '_' in incoming_muis_id \ - and not ('finna.fi' in upload_form.cleaned_data['urlToRecord']) \ - and not ('europeana.eu' in upload_form.cleaned_data['urlToRecord']): - muis_id = incoming_muis_id.split('_')[0] - muis_media_id = incoming_muis_id.split('_')[1] - else: - muis_id = incoming_muis_id - muis_media_id = None - if upload_form.cleaned_data['collections'] == 'DIGAR': - source_key = \ - f'nlib-digar:{upload_form.cleaned_data["identifyingNumber"]}' - muis_media_id = 1 - try: - if muis_media_id: - existing_photo = Photo.objects.filter( - source=source, external_id=muis_id, external_sub_id=muis_media_id).get() + licence_name = _get_licence_name_from_url(upload_form.cleaned_data['licenceUrl']) + + licence = Licence(name=licence_name, url=upload_form.cleaned_data['licenceUrl']) + licence.save() + + if institution_changed and institution: + source = Source.objects.filter(description=institution).first() + + if not source: + Source.objects.create(name=institution, description=institution) + source = Source.objects.get(name=institution, description=institution) + + if not licence: + licence = unknown_licence + + if upload_form.cleaned_data['id']: + + if upload_form.cleaned_data['collections'] == 'DIGAR': + incoming_muis_id = source_key + else: + incoming_muis_id = upload_form.cleaned_data['id'] + + if 'ETERA' in institution: + upload_form.cleaned_data['types'] = 'photo' + + if '_' in incoming_muis_id \ + and not ('finna.fi' in upload_form.cleaned_data['urlToRecord']) \ + and not ('europeana.eu' in upload_form.cleaned_data['urlToRecord']): + muis_id = incoming_muis_id.split('_')[0] + muis_media_id = incoming_muis_id.split('_')[1] + else: + muis_id = incoming_muis_id + muis_media_id = None + + if upload_form.cleaned_data['collections'] == 'DIGAR': + source_key = \ + f'nlib-digar:{upload_form.cleaned_data["identifyingNumber"]}' + muis_media_id = 1 + + existing_photos = Photo.objects.filter(source=source, external_id=muis_id) + if muis_media_id: + existing_photo = existing_photos.filter(external_sub_id=muis_media_id).first() + else: + existing_photo = existing_photos.first() + + if existing_photo: + if general_albums.exists(): + for a in general_albums: + ap = AlbumPhoto(photo=existing_photo, album=a, profile=profile, + type=AlbumPhoto.RECURATED) + ap.save() + points_for_recurating = Points(user=profile, action=Points.PHOTO_RECURATION, + photo=existing_photo, points=30, + album=general_albums[0], created=timezone.now()) + points_for_recurating.save() + all_curating_points.append(points_for_recurating) + dap = AlbumPhoto(photo=existing_photo, album=default_album, profile=profile, + type=AlbumPhoto.RECURATED) + dap.save() + context['photos'][k] = {} + context['photos'][k]['success'] = True + context['photos'][k]['message'] = _('Photo already exists in Ajapaik') + + else: + if upload_form.cleaned_data['date'] == '[]': + upload_form.cleaned_data['date'] = None + + photo_path = None + try: + with atomic(): + photo = Photo.objects.create( + user=profile, + author=upload_form.cleaned_data['creators'], + description=upload_form.cleaned_data['title'].rstrip(), + source=source, + types=upload_form.cleaned_data['types'] if upload_form.cleaned_data['types'] else None, + keywords=upload_form.cleaned_data['keywords'].strip() if upload_form.cleaned_data[ + 'keywords'] else None, + date_text=upload_form.cleaned_data['date'] if upload_form.cleaned_data[ + 'date'] else None, + licence=licence, + external_id=muis_id, + external_sub_id=muis_media_id, + source_key=source_key, + source_url=upload_form.cleaned_data['urlToRecord'], + flip=upload_form.cleaned_data['flip'], + invert=upload_form.cleaned_data['invert'], + stereo=upload_form.cleaned_data['stereo'], + rotated=upload_form.cleaned_data['rotated'] + ) + + if upload_form.cleaned_data['collections'] == 'DIGAR': + photo.image = f'uploads/DIGAR_{str(photo.source_key).split(":")[1]}_1.jpg' else: - existing_photo = Photo.objects.filter( - source=source, external_id=muis_id).get() - except ObjectDoesNotExist: - pass - if not existing_photo: - new_photo = None - if upload_form.cleaned_data['date'] == '[]': - upload_form.cleaned_data['date'] = None - try: - new_photo = Photo( - user=profile, - author=upload_form.cleaned_data['creators'], - description=upload_form.cleaned_data['title'].rstrip(), - source=source, - types=upload_form.cleaned_data['types'] if upload_form.cleaned_data['types'] else None, - keywords=upload_form.cleaned_data['keywords'].strip() if upload_form.cleaned_data[ - 'keywords'] else None, - date_text=upload_form.cleaned_data['date'] if upload_form.cleaned_data[ - 'date'] else None, - licence=licence, - external_id=muis_id, - external_sub_id=muis_media_id, - source_key=source_key, - source_url=upload_form.cleaned_data['urlToRecord'], - flip=upload_form.cleaned_data['flip'], - invert=upload_form.cleaned_data['invert'], - stereo=upload_form.cleaned_data['stereo'], - rotated=upload_form.cleaned_data['rotated'] - ) - new_photo.save() - if upload_form.cleaned_data['collections'] == 'DIGAR': - new_photo.image = f'uploads/DIGAR_{str(new_photo.source_key).split(":")[1]}_1.jpg' + # Enable plain http and broken SSL + ssl._create_default_https_context = ssl._create_unverified_context + opener = build_opener() + headers = [('User-Agent', settings.UA)] + + if etera_token: + headers.append(('Authorization', f'Bearer {etera_token}')) + opener.addheaders = headers + img_response = opener.open(upload_form.cleaned_data['imageUrl']) + + if 'ETERA' in photo.source.description: + img = ContentFile(img_response.read()) + photo.image_no_watermark.save('etera.jpg', img) + photo.watermark() else: - # Enable plain http and broken SSL - ssl._create_default_https_context = ssl._create_unverified_context - opener = build_opener() - headers = [('User-Agent', settings.UA)] - if etera_token: - headers.append(('Authorization', f'Bearer {etera_token}')) - opener.addheaders = headers - img_response = opener.open(upload_form.cleaned_data['imageUrl']) - if 'ETERA' in new_photo.source.description: - img = ContentFile(img_response.read()) - new_photo.image_no_watermark.save('etera.jpg', img) - new_photo.watermark() - else: - new_photo.image.save('muis.jpg', ContentFile(img_response.read())) - if new_photo.invert: - photo_path = f'{settings.MEDIA_ROOT}/{str(new_photo.image)}' - img = Image.open(photo_path) + photo.image.save('muis.jpg', ContentFile(img_response.read())) + + if photo.invert or photo.rotated or photo.flip: + photo_path = f'{settings.MEDIA_ROOT}/{str(photo.image)}' + img = Image.open(photo_path) + + if photo.invert: inverted_grayscale_image = ImageOps.invert(img).convert('L') inverted_grayscale_image.save(photo_path) - if new_photo.rotated is not None and new_photo.rotated > 0: - photo_path = f'{settings.MEDIA_ROOT}/{str(new_photo.image)}' - img = Image.open(photo_path) - rot = img.rotate(new_photo.rotated, expand=1) + if photo.rotated: + rot = img.rotate(photo.rotated, expand=1) rot.save(photo_path) - new_photo.width, new_photo.height = rot.size - if new_photo.flip: - photo_path = f'{settings.MEDIA_ROOT}/{str(new_photo.image)}' - img = Image.open(photo_path) + photo.width, photo.height = rot.size + if photo.flip: flipped_image = img.transpose(Image.FLIP_LEFT_RIGHT) flipped_image.save(photo_path) - context['photos'][k] = {} - context['photos'][k]['message'] = _('OK') - - lat = upload_form.cleaned_data.get('latitude') - lng = upload_form.cleaned_data.get('longitude') - - gt_exists = GeoTag.objects.filter(type=GeoTag.SOURCE_GEOTAG, - photo__source_key=new_photo.source_key).exists() - if lat and lng and not gt_exists: - source_geotag = GeoTag( - lat=lat, - lon=lng, - origin=GeoTag.SOURCE, - type=GeoTag.SOURCE_GEOTAG, - map_type=GeoTag.NO_MAP, - photo=new_photo, - is_correct=True, - trustworthiness=0.07 - ) - source_geotag.save() - new_photo.latest_geotag = source_geotag.created - new_photo.set_calculated_fields() - new_photo.image - new_photo.save() - new_photo.set_aspect_ratio() - new_photo.add_to_source_album() - new_photo.find_similar() - points_for_curating = Points(action=Points.PHOTO_CURATION, photo=new_photo, points=50, - user=profile, created=new_photo.created, - album=general_albums[0]) - points_for_curating.save() - awarded_curator_points.append(points_for_curating) - if general_albums.exists(): - for a in general_albums: - ap = AlbumPhoto(photo=new_photo, album=a, profile=profile, type=AlbumPhoto.CURATED) - ap.save() - created_album_photo_links.append(ap) - if not a.cover_photo: - a.cover_photo = new_photo - a.light_save() - for b in general_albums[1:]: - points_for_curating = Points(action=Points.PHOTO_RECURATION, photo=new_photo, - points=30, - user=profile, created=new_photo.created, - album=b) - points_for_curating.save() - awarded_curator_points.append(points_for_curating) - all_curating_points.append(points_for_curating) - ap = AlbumPhoto(photo=new_photo, album=default_album, profile=profile, - type=AlbumPhoto.CURATED) - ap.save() - created_album_photo_links.append(ap) - - persons = upload_form.cleaned_data.get('persons', []) - - if persons: - existing_albums = Album.objects.filter(name__in=persons, atype=Album.PERSON) - album_ids = list(existing_albums.values_list('id', flat=True)) - - for album in existing_albums: - person_ap = AlbumPhoto.objects.create( - photo=new_photo, - album=album, - type=AlbumPhoto.FACE_TAGGED - ) - created_album_photo_links.append(person_ap) - - existing_names = existing_albums.values_list('name', flat=True) - new_names = list(set(persons) - set(existing_names)) - - for person_name in new_names: - album = Album.objects.create( - name=person_name, - atype=Album.PERSON, - ) - album_ids.append(album.id) - person_ap = AlbumPhoto.objects.create( - photo=new_photo, - album=album, - type=AlbumPhoto.FACE_TAGGED - ) - created_album_photo_links.append(person_ap) - - affected_albums = Album.objects.filter(id__in=album_ids) - affected_albums.update(photo_count_with_subalbums=F('photo_count_with_subalbums') + 1) - - if lat and lng: - affected_albums.update( - geotagged_photo_count_with_subalbums=F( - 'geotagged_photo_count_with_subalbums') + 1) - - affected_albums_without_photo = affected_albums.filter(cover_photo=None) - affected_albums_without_photo.update(cover_photo=new_photo) - - start_date = upload_form.cleaned_data.get('start_date') - end_date = upload_form.cleaned_data.get('end_date') - - if start_date or end_date: - date_start_accuracy = upload_form.cleaned_data.get('date_start_accuracy') - date_end_accuracy = upload_form.cleaned_data.get('date_end_accuracy') - - start_accuracy, raw_start_pattern = parse_fotis_timestamp_data(date_start_accuracy) - end_accuracy, raw_end_pattern = parse_fotis_timestamp_data(date_end_accuracy) - - raw_start = datetime.datetime.fromisoformat(start_date).strftime( - raw_start_pattern) if start_date else None - raw_end = datetime.datetime.fromisoformat(end_date).strftime( - raw_end_pattern) if end_date else None - - start = datetime.datetime.fromisoformat(start_date).strftime( - '%Y-%m-%d') if start_date else None - - end = datetime.datetime.fromisoformat(end_date).strftime( - '%Y-%m-%d') if end_date else None - - dating = Dating.objects.create( - photo=new_photo, - start=start or end, - end=start or end, - raw=f'{raw_start}-{raw_end}' if start and end else start and f'{raw_start}' or end and f"-{raw_end}", - start_accuracy=start_accuracy, - end_accuracy=end_accuracy, - start_approximate=start_accuracy != Dating.DAY, - end_approximate=end_accuracy != Dating.DAY, - comment=f'Data from FOTIS / Andmed FOTIS-est' - ) - new_photo.dating_count = 1 - new_photo.first_dating = dating.created - new_photo.latest_dating = dating.created - new_photo.light_save(update_fields=['dating_count', 'latest_dating', 'dating_count']) - - context['photos'][k]['success'] = True - all_curating_points.append(points_for_curating) - except Exception as e: - if new_photo: - new_photo.image.delete() - new_photo.delete() - for ap in created_album_photo_links: - ap.delete() - for cp in awarded_curator_points: - cp.delete() - context['photos'][k] = {'error': _('Error uploading file: %s (%s)' % - (e, upload_form.cleaned_data['imageUrl']))} - else: + context['photos'][k] = {} + context['photos'][k]['message'] = _('OK') + + lat = upload_form.cleaned_data.get('latitude') + lng = upload_form.cleaned_data.get('longitude') + + gt_exists = GeoTag.objects.filter(type=GeoTag.SOURCE_GEOTAG, + photo__source_key=photo.source_key).exists() + + if lat and lng and not gt_exists: + source_geotag = GeoTag( + lat=lat, + lon=lng, + origin=GeoTag.SOURCE, + type=GeoTag.SOURCE_GEOTAG, + map_type=GeoTag.NO_MAP, + photo=photo, + is_correct=True, + trustworthiness=0.07 + ) + source_geotag.save() + photo.latest_geotag = source_geotag.created + photo.set_calculated_fields() + + photo.image + photo.save() + photo.add_to_source_album() + photo.find_similar() + points_for_curating = Points.objects.create(action=Points.PHOTO_CURATION, photo=photo, + points=50, + user=profile, created=photo.created, + album=general_albums[0]) + awarded_curator_points.append(points_for_curating) + if general_albums.exists(): for a in general_albums: - ap = AlbumPhoto(photo=existing_photo, album=a, profile=profile, - type=AlbumPhoto.RECURATED) + ap = AlbumPhoto(photo=photo, album=a, profile=profile, type=AlbumPhoto.CURATED) ap.save() - points_for_recurating = Points(user=profile, action=Points.PHOTO_RECURATION, - photo=existing_photo, points=30, - album=general_albums[0], created=timezone.now()) - points_for_recurating.save() - all_curating_points.append(points_for_recurating) - dap = AlbumPhoto(photo=existing_photo, album=default_album, profile=profile, - type=AlbumPhoto.RECURATED) - dap.save() - context['photos'][k] = {} + + if not a.cover_photo: + a.cover_photo = photo + a.light_save() + + for b in general_albums[1:]: + points_for_curating = Points.objects.create(action=Points.PHOTO_RECURATION, photo=photo, + points=30, + user=profile, created=photo.created, + album=b) + awarded_curator_points.append(points_for_curating) + all_curating_points.append(points_for_curating) + + AlbumPhoto.objects.create( + photo=photo, + album=default_album, + profile=profile, + type=AlbumPhoto.CURATED + ) + persons = upload_form.cleaned_data.get('persons', []) + + if persons: + existing_albums = Album.objects.filter(name__in=persons, atype=Album.PERSON) + album_ids = list(existing_albums.values_list('id', flat=True)) + + for album in existing_albums: + AlbumPhoto.objects.create( + photo=photo, + album=album, + type=AlbumPhoto.FACE_TAGGED + ) + + existing_names = existing_albums.values_list('name', flat=True) + new_names = list(set(persons) - set(existing_names)) + + for person_name in new_names: + album = Album.objects.create( + name=person_name, + atype=Album.PERSON, + ) + album_ids.append(album.id) + AlbumPhoto.objects.create( + photo=photo, + album=album, + type=AlbumPhoto.FACE_TAGGED + ) + + affected_albums = Album.objects.filter(id__in=album_ids) + affected_albums.update(photo_count_with_subalbums=F('photo_count_with_subalbums') + 1) + + if lat and lng: + affected_albums.update( + geotagged_photo_count_with_subalbums=F( + 'geotagged_photo_count_with_subalbums') + 1) + + affected_albums_without_photo = affected_albums.filter(cover_photo=None) + affected_albums_without_photo.update(cover_photo=photo) + + start_date = upload_form.cleaned_data.get('start_date') + end_date = upload_form.cleaned_data.get('end_date') + + if start_date or end_date: + date_start_accuracy = upload_form.cleaned_data.get('date_start_accuracy') + date_end_accuracy = upload_form.cleaned_data.get('date_end_accuracy') + + start_accuracy, raw_start_pattern = parse_fotis_timestamp_data(date_start_accuracy) + end_accuracy, raw_end_pattern = parse_fotis_timestamp_data(date_end_accuracy) + + raw_start = datetime.datetime.fromisoformat(start_date).strftime( + raw_start_pattern) if start_date else None + raw_end = datetime.datetime.fromisoformat(end_date).strftime( + raw_end_pattern) if end_date else None + + start = datetime.datetime.fromisoformat(start_date).strftime( + '%Y-%m-%d') if start_date else None + + end = datetime.datetime.fromisoformat(end_date).strftime( + '%Y-%m-%d') if end_date else None + + dating = Dating.objects.create( + photo=photo, + start=start or end, + end=start or end, + raw=f'{raw_start}-{raw_end}' if start and end else start and f'{raw_start}' or end and f"-{raw_end}", + start_accuracy=start_accuracy, + end_accuracy=end_accuracy, + start_approximate=start_accuracy != Dating.DAY, + end_approximate=end_accuracy != Dating.DAY, + comment=f'Data from FOTIS / Andmed FOTIS-est' + ) + + photo.dating_count = 1 + photo.first_dating = dating.created + photo.latest_dating = dating.created + photo.light_save(update_fields=['dating_count', 'latest_dating', 'dating_count']) + context['photos'][k]['success'] = True - context['photos'][k]['message'] = _('Photo already exists in Ajapaik') - else: - context['photos'][k] = {} - context['photos'][k]['error'] = _('Error uploading file: %s (%s)' - % (upload_form.errors, upload_form.cleaned_data['imageUrl'])) - - if general_albums: - game_reverse = request.build_absolute_uri(reverse('game')) - for ga in general_albums: - requests.post( - f'https://graph.facebook.com/v7.0/?id={game_reverse}?album={str(ga.id)}&scrape=true' - ) - for cp in all_curating_points: - total_points_for_curating += cp.points - context['total_points_for_curating'] = total_points_for_curating - if general_albums.exists(): - for album in general_albums: - album.save() - if album.subalbum_of: - album.subalbum_of.save() - else: - if not selection or len(selection) == 0: - error = _('Please add pictures to your album') - else: - error = _('Not enough data submitted') - context = { - 'error': error - } + all_curating_points.append(points_for_curating) + except Exception as e: + ApplicationException.objects.create(exception=traceback.format_exc()) + if photo_path: + os.remove(photo_path) + + context['photos'][k] = {'error': _('Error uploading file: %s (%s)' % + (e, upload_form.cleaned_data['imageUrl']))} + + raise e + + if general_albums: + game_reverse = request.build_absolute_uri(reverse('game')) + for ga in general_albums: + requests.post( + f'https://graph.facebook.com/v7.0/?id={game_reverse}?album={str(ga.id)}&scrape=true' + ) + + for cp in all_curating_points: + total_points_for_curating += cp.points + + context['total_points_for_curating'] = total_points_for_curating + if general_albums.exists(): + for album in general_albums: + album.save() + if album.subalbum_of: + album.subalbum_of.save() + return HttpResponse(json.dumps(context), content_type='application/json') diff --git a/ajapaik/ajapaik_upload/rephoto/views.py b/ajapaik/ajapaik_upload/rephoto/views.py index 09907b7e4..c5815a925 100644 --- a/ajapaik/ajapaik_upload/rephoto/views.py +++ b/ajapaik/ajapaik_upload/rephoto/views.py @@ -77,6 +77,7 @@ def rephoto_upload(request, photo_id): for each in photo.albums.all(): each.rephoto_count_with_subalbums = each.get_rephotos_queryset_with_subalbums().count() each.save(update_fields=['rephoto_count_with_subalbums']) + rephoto.image.save('rephoto.jpg', file_obj) if rephoto.cam_scale_factor: diff --git a/ajapaik/ajapaik_upload/views.py b/ajapaik/ajapaik_upload/views.py index 9f2d47601..643cf7e8c 100644 --- a/ajapaik/ajapaik_upload/views.py +++ b/ajapaik/ajapaik_upload/views.py @@ -13,54 +13,65 @@ def user_upload(request): 'is_user_upload': True, 'show_albums_error': False } + if request.method == 'POST': form = UserPhotoUploadForm(request.POST, request.FILES) albums = request.POST.getlist('albums') - if form.is_valid() and albums is not None and len(albums) > 0: + + if form.is_valid() and albums and len(albums) > 0: photo = form.save(commit=False) photo.user = request.user.profile + if photo.uploader_is_author: photo.author = request.user.profile.get_display_name photo.licence = Licence.objects.get(id=17) # CC BY 4.0 + photo.save() photo.set_aspect_ratio() photo.find_similar() albums = request.POST.getlist('albums') album_photos = [] - for each in albums: + + for album_id in albums: + album = Album.objects.filter(id=album_id).first() + + if not album: + continue + + album.set_calculated_fields() + album.light_save() + album_photos.append( AlbumPhoto(photo=photo, - album=Album.objects.filter(id=each).first(), + album=Album.objects.filter(id=album.id).first(), type=AlbumPhoto.UPLOADED, profile=request.user.profile )) + AlbumPhoto.objects.bulk_create(album_photos) - for a in albums: - album = Album.objects.filter(id=a).first() - if album is not None: - album.set_calculated_fields() - album.light_save() - form = UserPhotoUploadForm() photo.add_to_source_album() + if request.POST.get('geotag') == 'true': return redirect(f'{reverse("frontpage_photos")}?photo={str(photo.id)}&locationToolsOpen=1') else: context['message'] = _('Photo uploaded') + if albums is None or len(albums) < 1: context['show_albums_error'] = True else: form = UserPhotoUploadForm() - context['form'] = form - return render(request, 'user_upload/user_upload.html', context) + return render(request, 'user_upload/user_upload.html', {**context, 'form': form}) def user_upload_add_album(request): context = { 'ajapaik_facebook_link': settings.AJAPAIK_FACEBOOK_LINK } + if request.method == 'POST': form = UserPhotoUploadAddAlbumForm(request.POST, profile=request.user.profile) + if form.is_valid(): album = form.save(commit=False) album.profile = request.user.profile @@ -68,6 +79,7 @@ def user_upload_add_album(request): context['message'] = _('Album created') else: form = UserPhotoUploadAddAlbumForm(profile=request.user.profile) + context['form'] = form return render(request, 'user_upload/user_upload_add_album.html', context) @@ -79,6 +91,6 @@ def photo_upload_modal(request, photo_id): context = { 'photo': photo, 'licence': licence, - 'next': request.META['HTTP_REFERER'] + 'next': request.META.get('HTTP_REFERER') } return render(request, 'rephoto_upload/_rephoto_upload_modal_content.html', context) diff --git a/ajapaik/settings/default.py b/ajapaik/settings/default.py index 7bdecf262..9ba699bbe 100644 --- a/ajapaik/settings/default.py +++ b/ajapaik/settings/default.py @@ -23,13 +23,13 @@ AJAPAIK_FACEBOOK_LINK = 'https://www.facebook.com/ajapaik' ABSOLUTE_PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) -ABSOLUTE_TEMPLATES_PATH = '%s/templates' % ABSOLUTE_PROJECT_ROOT +ABSOLUTE_TEMPLATES_PATH = f'{ABSOLUTE_PROJECT_ROOT}/templates' if ABSOLUTE_PROJECT_ROOT not in sys.path: sys.path.insert(0, ABSOLUTE_PROJECT_ROOT) -STATIC_ROOT = '%s/static-collected' % ABSOLUTE_PROJECT_ROOT -MEDIA_ROOT = '%s/media' % ABSOLUTE_PROJECT_ROOT +STATIC_ROOT = f'{ABSOLUTE_PROJECT_ROOT}/static-collected' +MEDIA_ROOT = f'{ABSOLUTE_PROJECT_ROOT}/media' VANALINNAD_ROOT = '/home/ajapaik/vanalinnad.mooo.com' STATIC_URL = '/static/' MEDIA_URL = '/media/' @@ -39,11 +39,11 @@ STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage' STATICFILES_DIRS = ( - '%s/ajapaik/ajapaik/static' % ABSOLUTE_PROJECT_ROOT, + f'{ABSOLUTE_PROJECT_ROOT}/ajapaik/ajapaik/static', ) LOCALE_PATHS = ( - '%s/ajapaik/ajapaik/locale' % ABSOLUTE_PROJECT_ROOT, + f'{ABSOLUTE_PROJECT_ROOT}/ajapaik/ajapaik/locale', ) ADMINS = ( diff --git a/ajapaik/utils.py b/ajapaik/utils.py index 8b287979d..08cc58465 100644 --- a/ajapaik/utils.py +++ b/ajapaik/utils.py @@ -124,15 +124,15 @@ def can_action_be_done(model, photo, profile, key, new_value): setattr(new_suggestion, key, new_value) all_suggestions = model.objects.filter( - photo=photo - ).exclude( - proposer=profile - ).order_by( - 'proposer_id', - '-created' - ).all().distinct( - 'proposer_id' - ) + photo=photo + ).exclude( + proposer=profile + ).order_by( + 'proposer_id', + '-created' + ).all().distinct( + 'proposer_id' + ) if all_suggestions is not None: suggestions = [new_value] @@ -172,7 +172,7 @@ def suggest_photo_edit(photo_suggestions, key, new_value, Points, score, action_ all_suggestions = model.objects.filter(photo=photo).exclude(proposer=profile) \ .order_by('proposer_id', '-created').all().distinct('proposer_id') - if all_suggestions is not None: + if all_suggestions: suggestions = [new_value] for suggestion in all_suggestions: @@ -182,14 +182,17 @@ def suggest_photo_edit(photo_suggestions, key, new_value, Points, score, action_ if new_value != most_common_choice: response = SUGGESTION_SAVED_BUT_CONSENSUS_NOT_AFFECTED was_action_successful = False + new_value = most_common_choice if function_name is not None: old_value = getattr(photo, key) if function_name == 'do_rotate' and (old_value is None or (new_value != old_value)): getattr(photo, function_name)(new_value) + elif (function_name != 'do_rotate') and ( (old_value or new_value is True) and old_value != new_value): + getattr(photo, function_name)() else: setattr(photo, key, new_value)