From 8cbe46baf60b6dd4c2922e79a05fabd3c1277c0d Mon Sep 17 00:00:00 2001 From: Simon Hildrew Date: Mon, 1 Feb 2021 13:39:58 +0000 Subject: [PATCH 1/2] Add scalafmt to the project --- .scalafmt.conf | 1 + project/plugins.sbt | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 .scalafmt.conf diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 0000000000..33df8a757b --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1 @@ +version = 2.7.5 diff --git a/project/plugins.sbt b/project/plugins.sbt index 71b797b402..d8a351df7a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,3 +11,5 @@ addSbtPlugin("com.gu" % "sbt-riffraff-artifact" % "1.1.9") addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3") addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") + +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") From deb86277c923e29d93e40760256b85028c218849 Mon Sep 17 00:00:00 2001 From: Simon Hildrew Date: Mon, 1 Feb 2021 13:40:05 +0000 Subject: [PATCH 2/2] Apply scalafmt on the project --- .../dev/app/AdminToolsComponents.scala | 20 +- admin-tools/dev/app/AppLoader.scala | 3 +- .../dev/app/controllers/AdminToolsCtr.scala | 41 +- .../dev/app/lib/AdminToolsConfig.scala | 3 +- .../BatchIndexLambdaHandler.scala | 4 +- .../ImageProjectionLambdaHandler.scala | 50 +- .../ImagesGroupByProgressState.scala | 8 +- .../ResetImageBatchIndexTable.scala | 3 +- .../gu/mediaservice/ResetKnownErrors.scala | 22 +- auth/app/auth/AuthComponents.scala | 24 +- auth/app/auth/AuthConfig.scala | 3 +- auth/app/auth/AuthController.scala | 124 +- collections/app/AppLoader.scala | 3 +- collections/app/CollectionsComponents.scala | 16 +- .../controllers/CollectionsController.scala | 182 ++- .../ImageCollectionsController.scala | 72 +- collections/app/lib/CollectionsConfig.scala | 4 +- collections/app/lib/CollectionsMetrics.scala | 3 +- collections/app/lib/Notifications.scala | 3 +- collections/app/model/Node.scala | 45 +- collections/app/store/CollectionsStore.scala | 12 +- collections/test/model/NodeTest.scala | 27 +- .../com/gu/mediaservice/lib/BaseStore.scala | 16 +- .../gu/mediaservice/lib/DateTimeUtils.scala | 11 +- .../scala/com/gu/mediaservice/lib/Files.scala | 20 +- .../com/gu/mediaservice/lib/ImageFields.scala | 18 +- .../lib/ImageIngestOperations.scala | 102 +- .../lib/ImageQuarantineOperations.scala | 28 +- .../gu/mediaservice/lib/ImageStorage.scala | 9 +- .../scala/com/gu/mediaservice/lib/MD5.scala | 1 - .../com/gu/mediaservice/lib/Processes.scala | 14 +- .../gu/mediaservice/lib/S3ImageStorage.scala | 32 +- .../mediaservice/lib/argo/ArgoHelpers.scala | 61 +- .../mediaservice/lib/argo/model/Action.scala | 3 +- .../lib/argo/model/CollectionResponse.scala | 22 +- .../lib/argo/model/EmbeddedEntity.scala | 15 +- .../lib/argo/model/EntityResponse.scala | 19 +- .../lib/argo/model/ErrorResponse.scala | 17 +- .../gu/mediaservice/lib/argo/model/Link.scala | 3 +- .../mediaservice/lib/auth/ApiAccessor.scala | 14 +- .../lib/auth/Authentication.scala | 107 +- .../gu/mediaservice/lib/auth/KeyStore.scala | 8 +- .../lib/auth/PermissionsHandler.scala | 36 +- .../ApiKeyAuthenticationProvider.scala | 68 +- .../provider/AuthenticationProvider.scala | 39 +- .../provider/AuthenticationProviders.scala | 5 +- .../auth/provider/AuthenticationStatus.scala | 10 +- .../lib/aws/AwsClientBuilderUtils.scala | 29 +- .../gu/mediaservice/lib/aws/DynamoDB.scala | 227 ++-- .../com/gu/mediaservice/lib/aws/Kinesis.scala | 27 +- .../com/gu/mediaservice/lib/aws/S3.scala | 140 ++- .../com/gu/mediaservice/lib/aws/SNS.scala | 7 +- .../lib/aws/SqsMessageConsumer.scala | 66 +- .../lib/aws/ThrallMessageSender.scala | 52 +- .../lib/bbc/BBCMetadataProcessor.scala | 24 +- .../lib/bbc/BBCSupplierProcessors.scala | 118 +- .../BBCImageProcessorsDependencies.scala | 48 +- .../bbc/components/BBCMetadataConfig.scala | 50 +- .../lib/bbc/components/BBCMetadataStore.scala | 33 +- .../bbc/components/BBCUsageRightsConfig.scala | 19 +- .../bbc/components/BBCUsageRightsStore.scala | 28 +- .../cleanup/AttributeCreditFromByline.scala | 29 +- .../lib/cleanup/BylineCreditReorganise.scala | 52 +- .../lib/cleanup/CapitaliseProperty.scala | 6 +- .../lib/cleanup/CleanRubbishLocation.scala | 8 +- .../lib/cleanup/CountryCode.scala | 22 +- .../lib/cleanup/DropRedundantTitle.scala | 13 +- .../ExtractGuardianCreditFromByline.scala | 31 +- .../lib/cleanup/GuardianStyleByline.scala | 5 +- .../lib/cleanup/ImageProcessor.scala | 20 +- .../lib/cleanup/ImageProcessorResources.scala | 8 +- .../lib/cleanup/InitialJoinerByline.scala | 7 +- .../lib/cleanup/MetadataCleaner.scala | 44 +- .../lib/cleanup/PhotographerRenamer.scala | 4 +- .../lib/cleanup/RedundantTokenRemover.scala | 34 +- .../lib/cleanup/StripCopyrightPrefix.scala | 10 +- .../lib/cleanup/SupplierProcessors.scala | 325 +++-- .../cleanup/UseCanonicalGuardianCredit.scala | 9 +- .../lib/collections/CollectionsManager.scala | 54 +- .../config/AuthenticationProviderLoader.scala | 18 +- .../lib/config/CommonConfig.scala | 44 +- .../lib/config/GridConfigLoader.scala | 14 +- .../lib/config/GridConfigResources.scala | 5 +- .../lib/config/ImageProcessorLoader.scala | 5 +- .../lib/config/MetadataConfig.scala | 221 ++-- .../mediaservice/lib/config/Properties.scala | 3 +- .../lib/config/ProviderLoader.scala | 221 +++- .../gu/mediaservice/lib/config/Services.scala | 68 +- .../lib/config/StageIdentifier.scala | 15 +- .../gu/mediaservice/lib/discovery/EC2.scala | 31 +- .../elasticsearch/ElasticSearchClient.scala | 162 ++- .../elasticsearch/ElasticSearchConfig.scala | 8 +- .../ElasticSearchException.scala | 70 +- .../ElasticSearchExecutions.scala | 65 +- .../lib/elasticsearch/IndexSettings.scala | 22 +- .../lib/elasticsearch/Mappings.scala | 265 ++-- .../mediaservice/lib/formatting/package.scala | 15 +- .../auth/PandaAuthenticationProvider.scala | 213 +++- .../lib/imaging/ImageOperations.scala | 258 ++-- .../lib/imaging/im4jwrapper/ExifTool.scala | 17 +- .../lib/imaging/im4jwrapper/ImageMagick.scala | 38 +- .../lib/json/JsonByteArrayUtil.scala | 10 +- .../lib/json/PlayJsonHelpers.scala | 8 +- .../gu/mediaservice/lib/json/package.scala | 1 - .../lib/logging/GridLogging.scala | 43 +- .../mediaservice/lib/logging/LogConfig.scala | 64 +- .../lib/logging/MarkerUtils.scala | 22 +- .../lib/logging/RequestLoggingContext.scala | 8 +- .../mediaservice/lib/logging/Stopwatch.scala | 19 +- .../lib/management/Management.scala | 31 +- .../lib/metadata/ImageMetadataConverter.scala | 168 +-- .../mediaservice/lib/metadata/Subject.scala | 90 +- .../lib/metrics/CloudWatchMetrics.scala | 104 +- .../lib/metrics/FutureSyntax.scala | 13 +- .../com/gu/mediaservice/lib/net/URI.scala | 14 +- .../lib/play/ConnectionBrokenFilter.scala | 21 +- .../mediaservice/lib/play/GridAppLoader.scala | 11 +- .../lib/play/GridComponents.scala | 62 +- .../lib/play/RequestLoggingFilter.scala | 52 +- .../lib/play/RequestMetricFilter.scala | 17 +- .../lib/resource/FutureResources.scala | 7 +- .../lib/usage/ItemToMediaUsage.scala | 68 +- .../mediaservice/lib/usage/UsageBuilder.scala | 97 +- .../com/gu/mediaservice/model/Asset.scala | 33 +- .../gu/mediaservice/model/Collection.scala | 16 +- .../com/gu/mediaservice/model/Cost.scala | 15 +- .../com/gu/mediaservice/model/Crop.scala | 76 +- .../com/gu/mediaservice/model/Edits.scala | 96 +- .../com/gu/mediaservice/model/Export.scala | 35 +- .../gu/mediaservice/model/FileMetadata.scala | 72 +- .../model/FileMetadataAggregator.scala | 95 +- .../com/gu/mediaservice/model/Image.scala | 98 +- .../gu/mediaservice/model/ImageMetadata.scala | 52 +- .../com/gu/mediaservice/model/MimeType.scala | 7 +- .../gu/mediaservice/model/Photoshoot.scala | 2 +- .../gu/mediaservice/model/SourceImage.scala | 13 +- .../model/SyndicationRights.scala | 66 +- .../model/SyndicationStatus.scala | 19 +- .../gu/mediaservice/model/UsageRights.scala | 352 ++++-- .../model/leases/LeasesByMedia.scala | 19 +- .../model/leases/MediaLease.scala | 75 +- .../model/usage/DigitalUsageMetadata.scala | 19 +- .../model/usage/DownloadUsageMetadata.scala | 8 +- .../model/usage/FrontUsageMetadata.scala | 10 +- .../mediaservice/model/usage/MediaUsage.scala | 37 +- .../model/usage/PrintUsageMetadata.scala | 57 +- .../usage/SyndicationUsageMetadata.scala | 8 +- .../gu/mediaservice/model/usage/Usage.scala | 30 +- .../model/usage/UsageNotice.scala | 16 +- .../model/usage/UsageReference.scala | 6 +- .../model/usage/UsageReferenceType.scala | 34 +- .../model/usage/UsageStatus.scala | 19 +- .../model/usage/UsageTableFullKey.scala | 2 +- .../mediaservice/model/usage/UsageType.scala | 15 +- .../syntax/ElasticSearchSyntax.scala | 59 +- .../mediaservice/syntax/PlayJsonSyntax.scala | 13 +- .../com/gu/mediaservice/syntax/package.scala | 10 +- .../mediaservice/lib/DateTimeUtilsTest.scala | 6 +- .../gu/mediaservice/lib/ProcessesSpec.scala | 11 +- .../ApiKeyAuthenticationProviderTest.scala | 94 +- .../lib/auth/AuthenticationTest.scala | 204 +++- .../mediaservice/lib/aws/DynamoDBTest.scala | 83 +- .../lib/aws/ThrallMessageSenderTest.scala | 12 +- .../AttributeCreditFromBylineTest.scala | 13 +- .../cleanup/BylineCreditReorganiseTest.scala | 69 +- .../lib/cleanup/CapitalisationFixerTest.scala | 13 +- .../lib/cleanup/CapitaliseBylineTest.scala | 9 +- .../cleanup/CleanRubbishLocationTest.scala | 32 +- .../lib/cleanup/CountryCodeTest.scala | 24 +- .../lib/cleanup/DropRedundantTitleTest.scala | 28 +- .../ExtractGuardianCreditFromBylineTest.scala | 94 +- .../lib/cleanup/GuardianStyleBylineTest.scala | 9 +- .../lib/cleanup/InitialJoinerBylineTest.scala | 13 +- .../lib/cleanup/MetadataHelper.scala | 31 +- .../lib/cleanup/PhotographerRenamerTest.scala | 14 +- .../cleanup/RedundantTokenRemoverTest.scala | 25 +- .../cleanup/StripCopyrightPrefixTest.scala | 40 +- .../lib/cleanup/SupplierProcessorsTest.scala | 237 ++-- .../UseCanonicalGuardianCreditTest.scala | 17 +- .../collections/CollectionsManagerTest.scala | 53 +- .../lib/config/ProviderLoaderTest.scala | 189 ++- .../PandaAuthenticationProviderTest.scala | 21 +- .../lib/imaging/ImageOperationsTest.scala | 29 +- .../lib/json/JsonByteArrayUtilTest.scala | 12 +- .../lib/json/JsonOrderingTest.scala | 39 +- .../lib/logging/StopwatchTest.scala | 4 +- .../metadata/ImageMetadataConverterTest.scala | 618 +++++++--- .../model/FileMetadataAggregatorTest.scala | 210 ++-- .../mediaservice/model/FileMetadataTest.scala | 135 +- .../com/gu/mediaservice/model/ImageTest.scala | 93 +- .../gu/mediaservice/model/MimeTypeTest.scala | 51 +- .../gu/mediaservice/model/PropertyTest.scala | 4 +- .../model/SyndicationRightsTest.scala | 15 +- .../mediaservice/model/UsageRightsTest.scala | 69 +- cropper/app/CropperComponents.scala | 23 +- .../app/controllers/CropperController.scala | 164 ++- cropper/app/lib/AspectRatio.scala | 8 +- cropper/app/lib/CropStore.scala | 187 ++- cropper/app/lib/CropperConfig.scala | 7 +- cropper/app/lib/Crops.scala | 211 +++- cropper/app/lib/Notifications.scala | 3 +- cropper/app/model/ExportRequest.scala | 60 +- cropper/test/lib/AspectRatioTest.scala | 23 +- cropper/test/lib/CropsTest.scala | 24 +- image-loader/app/AppLoader.scala | 3 +- image-loader/app/ImageLoaderComponents.scala | 50 +- .../controllers/ImageLoaderController.scala | 148 ++- image-loader/app/lib/BodyParsers.scala | 46 +- image-loader/app/lib/Downloader.scala | 2 +- image-loader/app/lib/FailureResponse.scala | 31 +- image-loader/app/lib/ImageLoaderConfig.scala | 33 +- image-loader/app/lib/ImageLoaderStore.scala | 7 +- image-loader/app/lib/Notifications.scala | 3 +- image-loader/app/lib/QuarantineStore.scala | 3 +- .../app/lib/imaging/FileMetadataReader.scala | 291 +++-- .../app/lib/imaging/MimeTypeDetection.scala | 31 +- image-loader/app/lib/imaging/exceptions.scala | 13 +- image-loader/app/model/Projector.scala | 138 ++- .../app/model/QuarantineUploader.scala | 24 +- image-loader/app/model/Uploader.scala | 356 ++++-- .../app/model/upload/OptimiseOps.scala | 56 +- .../app/model/upload/UploadRequest.scala | 28 +- .../lib/imaging/FileMetadataReaderTest.scala | 130 +- .../lib/imaging/MimeTypeDetectionTest.scala | 24 +- .../test/scala/model/ImageUploadTest.scala | 128 +- .../test/scala/model/ProjectorTest.scala | 147 ++- kahuna/app/KahunaComponents.scala | 28 +- kahuna/app/controllers/KahunaController.scala | 35 +- kahuna/app/lib/KahunaConfig.scala | 18 +- leases/app/LeasesComponents.scala | 14 +- .../controllers/MediaLeaseController.scala | 151 ++- leases/app/lib/LeaseNotifier.scala | 25 +- leases/app/lib/LeaseStore.scala | 26 +- leases/app/lib/LeasesConfig.scala | 11 +- media-api/app/MediaApiComponents.scala | 63 +- .../controllers/AggregationController.scala | 13 +- media-api/app/controllers/MediaApi.scala | 384 ++++-- .../controllers/SuggestionController.scala | 52 +- .../app/controllers/UsageController.scala | 57 +- media-api/app/lib/ImageExtras.scala | 79 +- .../app/lib/ImagePersistenceReasons.scala | 44 +- media-api/app/lib/ImageResponse.scala | 326 +++-- media-api/app/lib/MediaApiConfig.scala | 32 +- media-api/app/lib/MediaApiMetrics.scala | 14 +- media-api/app/lib/S3Client.scala | 32 +- media-api/app/lib/UsageQuota.scala | 4 +- media-api/app/lib/UsageStore.scala | 143 ++- .../app/lib/elasticsearch/ElasticSearch.scala | 316 +++-- .../elasticsearch/ElasticSearchModel.scala | 159 +-- .../app/lib/elasticsearch/IsQueryFilter.scala | 40 +- .../app/lib/elasticsearch/MatchFields.scala | 27 +- .../app/lib/elasticsearch/QueryBuilder.scala | 114 +- .../app/lib/elasticsearch/SearchFilters.scala | 101 +- .../lib/elasticsearch/SyndicationFilter.scala | 59 +- media-api/app/lib/elasticsearch/filters.scala | 24 +- media-api/app/lib/elasticsearch/sorts.scala | 15 +- .../app/lib/querysyntax/DateRangeParser.scala | 14 +- media-api/app/lib/querysyntax/Parser.scala | 22 +- .../app/lib/querysyntax/QuerySyntax.scala | 182 +-- media-api/app/lib/querysyntax/model.scala | 3 +- .../app/lib/usagerights/CostCalculator.scala | 36 +- .../lib/ImagePersistenceReasonsTest.scala | 135 +- media-api/test/lib/ImageResponseTest.scala | 33 +- media-api/test/lib/UsageStoreTest.scala | 13 +- .../lib/elasticsearch/ConditionFixtures.scala | 35 +- .../lib/elasticsearch/ElasticSearchTest.scala | 452 ++++--- .../elasticsearch/ElasticSearchTestBase.scala | 74 +- .../test/lib/elasticsearch/Fixtures.scala | 104 +- .../lib/elasticsearch/QueryBuilderTest.scala | 107 +- .../test/scala/lib/ImageExtrasTest.scala | 61 +- .../scala/lib/querysyntax/ParserTest.scala | 570 +++++---- .../test/usagerights/CostCalculatorTest.scala | 10 +- metadata-editor/app/AppLoader.scala | 3 +- .../app/MetadataEditorComponents.scala | 22 +- .../app/controllers/EditsApi.scala | 52 +- .../app/controllers/EditsController.scala | 188 +-- metadata-editor/app/lib/EditsConfig.scala | 4 +- metadata-editor/app/lib/EditsStore.scala | 3 +- .../app/lib/MetadataEditorMetrics.scala | 3 +- .../app/lib/MetadataSqsMessageConsumer.scala | 21 +- metadata-editor/app/lib/Notifications.scala | 3 +- .../app/lib/UsageRightsMetadataMapper.scala | 31 +- .../app/model/UsageRightsProperty.scala | 254 ++-- .../test/UsageRightsMetadataMapperTest.scala | 42 +- thrall/app/ThrallComponents.scala | 44 +- thrall/app/controllers/HealthCheck.scala | 10 +- thrall/app/controllers/ThrallController.scala | 4 +- .../app/lib/MetadataEditorNotifications.scala | 6 +- thrall/app/lib/OrderedFutureRunner.scala | 4 +- thrall/app/lib/RetryHandler.scala | 74 +- thrall/app/lib/ThrallConfig.scala | 42 +- thrall/app/lib/ThrallMetrics.scala | 7 +- thrall/app/lib/ThrallStore.scala | 8 +- thrall/app/lib/ThrallStreamProcessor.scala | 80 +- .../app/lib/elasticsearch/ElasticSearch.scala | 500 +++++--- .../elasticsearch/SyndicationRightsOps.scala | 245 ++-- thrall/app/lib/kinesis/KinesisConfig.scala | 50 +- thrall/app/lib/kinesis/MessageProcessor.scala | 255 ++-- .../app/lib/kinesis/ThrallEventConsumer.scala | 127 +- thrall/test/helpers/Fixtures.scala | 172 ++- thrall/test/lib/OrderedFutureRunnerTest.scala | 7 +- .../test/lib/ThrallStreamProcessorTest.scala | 45 +- .../lib/elasticsearch/ElasticSearchTest.scala | 1081 +++++++++++++---- .../elasticsearch/ElasticSearchTestBase.scala | 63 +- .../SyndicationRightsOpsTest.scala | 171 ++- .../lib/kinesis/MessageProcessorTest.scala | 63 +- .../lib/kinesis/ThrallEventConsumerTest.scala | 1 - usage/app/UsageComponents.scala | 30 +- usage/app/controllers/UsageApi.scala | 325 +++-- usage/app/lib/ContentApi.scala | 8 +- usage/app/lib/ContentStream.scala | 12 +- usage/app/lib/CrierStreamReader.scala | 40 +- usage/app/lib/EventProcessor.scala | 83 +- usage/app/lib/MediaUsageBuilder.scala | 47 +- usage/app/lib/Notifications.scala | 3 +- usage/app/lib/SingleThreadedScheduler.scala | 6 +- usage/app/lib/UsageConfig.scala | 20 +- usage/app/lib/UsageMetadataBuilder.scala | 4 +- usage/app/lib/UsageMetrics.scala | 3 +- usage/app/lib/UsageNotifier.scala | 20 +- usage/app/lib/UsageRecorder.scala | 138 ++- usage/app/lib/UsageStream.scala | 28 +- usage/app/model/ContentWrapper.scala | 19 +- usage/app/model/DownloadUsageRequest.scala | 21 +- usage/app/model/FrontUsageRequest.scala | 20 +- usage/app/model/PrintUsageRequest.scala | 17 +- usage/app/model/SyndicationUsageRequest.scala | 20 +- usage/app/model/UsageGroup.scala | 227 ++-- usage/app/model/UsageIdBuilder.scala | 64 +- usage/app/model/UsageRecord.scala | 52 +- usage/app/model/UsageTable.scala | 113 +- 331 files changed, 14347 insertions(+), 7028 deletions(-) diff --git a/admin-tools/dev/app/AdminToolsComponents.scala b/admin-tools/dev/app/AdminToolsComponents.scala index 2d59c1286f..c53da3e935 100644 --- a/admin-tools/dev/app/AdminToolsComponents.scala +++ b/admin-tools/dev/app/AdminToolsComponents.scala @@ -8,19 +8,23 @@ import router.Routes object AdminToolsComponents { def config(resources: GridConfigResources) = new AdminToolsConfig( - resources.configuration ++ Configuration.from(Map( - "domain.root" -> "local.dev-gutools.co.uk", - "auth.keystore.bucket" -> "not-used", - "thrall.kinesis.stream.name"-> "not-used", - "thrall.kinesis.lowPriorityStream.name"-> "not-used" - )) + resources.configuration ++ Configuration.from( + Map( + "domain.root" -> "local.dev-gutools.co.uk", + "auth.keystore.bucket" -> "not-used", + "thrall.kinesis.stream.name" -> "not-used", + "thrall.kinesis.lowPriorityStream.name" -> "not-used" + ) + ) ) } -class AdminToolsComponents(context: Context) extends GridComponents(context, AdminToolsComponents.config) { +class AdminToolsComponents(context: Context) + extends GridComponents(context, AdminToolsComponents.config) { final override val buildInfo = utils.buildinfo.BuildInfo val controller = new AdminToolsCtr(config, controllerComponents) - override lazy val router = new Routes(httpErrorHandler, controller, management) + override lazy val router = + new Routes(httpErrorHandler, controller, management) } diff --git a/admin-tools/dev/app/AppLoader.scala b/admin-tools/dev/app/AppLoader.scala index d6bb9f7333..61bd1726ba 100644 --- a/admin-tools/dev/app/AppLoader.scala +++ b/admin-tools/dev/app/AppLoader.scala @@ -1,3 +1,4 @@ import com.gu.mediaservice.lib.play.GridAppLoader -class AppLoader extends GridAppLoader("admin-tools", new AdminToolsComponents(_)) +class AppLoader + extends GridAppLoader("admin-tools", new AdminToolsComponents(_)) diff --git a/admin-tools/dev/app/controllers/AdminToolsCtr.scala b/admin-tools/dev/app/controllers/AdminToolsCtr.scala index b4eeadb4f5..ca29bf644d 100644 --- a/admin-tools/dev/app/controllers/AdminToolsCtr.scala +++ b/admin-tools/dev/app/controllers/AdminToolsCtr.scala @@ -3,17 +3,30 @@ package controllers import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.argo.model.Link import com.gu.mediaservice.model.Image._ -import com.gu.mediaservice.{FullImageProjectionFailed, FullImageProjectionSuccess, ImageDataMerger, ImageDataMergerConfig} +import com.gu.mediaservice.{ + FullImageProjectionFailed, + FullImageProjectionSuccess, + ImageDataMerger, + ImageDataMergerConfig +} import lib.AdminToolsConfig import play.api.libs.json.Json import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.ExecutionContext -class AdminToolsCtr(config: AdminToolsConfig, override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers { +class AdminToolsCtr( + config: AdminToolsConfig, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { - private val cfg = ImageDataMergerConfig(apiKey = config.apiKey, domainRoot = config.domainRoot, imageLoaderEndpointOpt = None) + private val cfg = ImageDataMergerConfig( + apiKey = config.apiKey, + domainRoot = config.domainRoot, + imageLoaderEndpointOpt = None + ) private val merger = new ImageDataMerger(cfg) @@ -39,13 +52,23 @@ class AdminToolsCtr(config: AdminToolsConfig, override val controllerComponents: case Some(img) => Ok(Json.toJson(img)).as(ArgoMediaType) case _ => - respondError(NotFound, "not-found", s"image with mediaId: $mediaId not found") + respondError( + NotFound, + "not-found", + s"image with mediaId: $mediaId not found" + ) } case FullImageProjectionFailed(expMessage, downstreamMessage) => - respondError(InternalServerError, "image-projection-failed", Json.obj( - "errorMessage" -> expMessage, - "downstreamErrorMessage" -> downstreamMessage - ).toString) + respondError( + InternalServerError, + "image-projection-failed", + Json + .obj( + "errorMessage" -> expMessage, + "downstreamErrorMessage" -> downstreamMessage + ) + .toString + ) } } } diff --git a/admin-tools/dev/app/lib/AdminToolsConfig.scala b/admin-tools/dev/app/lib/AdminToolsConfig.scala index 791e321c59..cf0be68779 100644 --- a/admin-tools/dev/app/lib/AdminToolsConfig.scala +++ b/admin-tools/dev/app/lib/AdminToolsConfig.scala @@ -3,7 +3,8 @@ package lib import com.gu.mediaservice.lib.config.CommonConfig import play.api.Configuration -class AdminToolsConfig(playAppConfiguration: Configuration) extends CommonConfig(playAppConfiguration) { +class AdminToolsConfig(playAppConfiguration: Configuration) + extends CommonConfig(playAppConfiguration) { // hardcoded for dev val apiKey: String = "dev-" diff --git a/admin-tools/lambda/src/main/scala/com/gu/mediaservice/BatchIndexLambdaHandler.scala b/admin-tools/lambda/src/main/scala/com/gu/mediaservice/BatchIndexLambdaHandler.scala index e71ca9a0cb..5afa5f1c82 100644 --- a/admin-tools/lambda/src/main/scala/com/gu/mediaservice/BatchIndexLambdaHandler.scala +++ b/admin-tools/lambda/src/main/scala/com/gu/mediaservice/BatchIndexLambdaHandler.scala @@ -17,12 +17,12 @@ class BatchIndexLambdaHandler { threshold = sys.env.get("LATENCY_THRESHOLD").map(t => Integer.parseInt(t)), maxSize = sys.env("MAX_SIZE").toInt, startState = IndexInputCreation.get(sys.env("START_STATE").toInt), - checkerStartState = IndexInputCreation.get(sys.env("CHECKER_START_STATE").toInt) + checkerStartState = + IndexInputCreation.get(sys.env("CHECKER_START_STATE").toInt) ) private val batchIndex = new BatchIndexHandler(cfg) - def handleRequest() = { batchIndex.processImages() } diff --git a/admin-tools/lambda/src/main/scala/com/gu/mediaservice/ImageProjectionLambdaHandler.scala b/admin-tools/lambda/src/main/scala/com/gu/mediaservice/ImageProjectionLambdaHandler.scala index 9ec4515799..63c53ae650 100644 --- a/admin-tools/lambda/src/main/scala/com/gu/mediaservice/ImageProjectionLambdaHandler.scala +++ b/admin-tools/lambda/src/main/scala/com/gu/mediaservice/ImageProjectionLambdaHandler.scala @@ -1,7 +1,10 @@ package com.gu.mediaservice import com.amazonaws.services.lambda.runtime.Context -import com.amazonaws.services.lambda.runtime.events.{APIGatewayProxyRequestEvent, APIGatewayProxyResponseEvent} +import com.amazonaws.services.lambda.runtime.events.{ + APIGatewayProxyRequestEvent, + APIGatewayProxyResponseEvent +} import com.gu.mediaservice.lib.auth.provider.ApiKeyAuthenticationProvider import com.gu.mediaservice.model.Image import com.typesafe.scalalogging.LazyLogging @@ -12,7 +15,10 @@ import scala.concurrent.ExecutionContext.Implicits.global class ImageProjectionLambdaHandler extends LazyLogging { - def handleRequest(event: APIGatewayProxyRequestEvent, context: Context): APIGatewayProxyResponseEvent = { + def handleRequest( + event: APIGatewayProxyRequestEvent, + context: Context + ): APIGatewayProxyResponseEvent = { logger.info(s"handleImageProjection event: $event") @@ -27,14 +33,19 @@ class ImageProjectionLambdaHandler extends LazyLogging { apiKey match { case Some(key) => - val cfg: ImageDataMergerConfig = ImageDataMergerConfig(apiKey = key, domainRoot = domainRoot, imageLoaderEndpointOpt = imageLoaderEndpoint) + val cfg: ImageDataMergerConfig = ImageDataMergerConfig( + apiKey = key, + domainRoot = domainRoot, + imageLoaderEndpointOpt = imageLoaderEndpoint + ) if (!cfg.isValidApiKey()) return getUnauthorisedResponse logger.info(s"starting handleImageProjection for mediaId=$mediaId") logger.info(s"with config: $cfg") val merger = new ImageDataMerger(cfg) - val result: FullImageProjectionResult = merger.getMergedImageData(mediaId.asInstanceOf[String]) + val result: FullImageProjectionResult = + merger.getMergedImageData(mediaId.asInstanceOf[String]) result match { case FullImageProjectionSuccess(mayBeImage) => mayBeImage match { @@ -60,7 +71,8 @@ class ImageProjectionLambdaHandler extends LazyLogging { } private def getNotFoundResponse(mediaId: String) = { - val emptyRes = Json.obj("message" -> s"image with id=$mediaId not-found").toString + val emptyRes = + Json.obj("message" -> s"image with id=$mediaId not-found").toString logger.info(s"image not projected \n $emptyRes") new APIGatewayProxyResponseEvent() .withStatusCode(404) @@ -68,11 +80,18 @@ class ImageProjectionLambdaHandler extends LazyLogging { .withBody(emptyRes) } - private def getErrorFoundResponse(message: String, downstreamMessage: String) = { - val res = Json.obj("message" -> Json.obj( - "errorMessage" -> message, - "downstreamErrorMessage" -> downstreamMessage - )).toString + private def getErrorFoundResponse( + message: String, + downstreamMessage: String + ) = { + val res = Json + .obj( + "message" -> Json.obj( + "errorMessage" -> message, + "downstreamErrorMessage" -> downstreamMessage + ) + ) + .toString logger.info(s"image not projected due to error \n $res") @@ -83,7 +102,8 @@ class ImageProjectionLambdaHandler extends LazyLogging { } private def getUnauthorisedResponse = { - val res = Json.obj("message" -> s"missing or invalid api key header").toString + val res = + Json.obj("message" -> s"missing or invalid api key header").toString new APIGatewayProxyResponseEvent() .withStatusCode(401) @@ -93,8 +113,10 @@ class ImageProjectionLambdaHandler extends LazyLogging { private def getAuthKeyFrom(headers: Map[String, String]) = { // clients like curl or API gateway may lowerCases custom header names, yay! - headers.find { - case (k, _) => k.equalsIgnoreCase(ApiKeyAuthenticationProvider.apiKeyHeaderName) - }.map(_._2) + headers + .find { case (k, _) => + k.equalsIgnoreCase(ApiKeyAuthenticationProvider.apiKeyHeaderName) + } + .map(_._2) } } diff --git a/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ImagesGroupByProgressState.scala b/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ImagesGroupByProgressState.scala index ace3cfdfb7..0efd7aa114 100644 --- a/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ImagesGroupByProgressState.scala +++ b/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ImagesGroupByProgressState.scala @@ -7,7 +7,8 @@ import scala.collection.JavaConverters._ object ImagesGroupByProgressState extends App with LazyLogging { - if (args.isEmpty) throw new IllegalArgumentException("please provide dynamo table name") + if (args.isEmpty) + throw new IllegalArgumentException("please provide dynamo table name") import InputIdsStore._ @@ -19,7 +20,8 @@ object ImagesGroupByProgressState extends App with LazyLogging { def stateNameToCount(progressType: ProduceProgress): (String, Int) = { logger.info(s"calculating stateNameToCount for $progressType") - val queryRes = stateIndex.query(getAllMediaIdsWithinProgressQuery(progressType)) + val queryRes = + stateIndex.query(getAllMediaIdsWithinProgressQuery(progressType)) val result = progressType.name -> queryRes.iterator.asScala.length logger.info(s"result=$result") result @@ -42,7 +44,7 @@ object ImagesGroupByProgressState extends App with LazyLogging { stateNameToCount(NotFound), stateNameToCount(KnownError), stateNameToCount(NotStarted), - stateNameToCount(InProgress), + stateNameToCount(InProgress) ).mkString("\n") logger.info(s"results from dynamoTable=$dynamoTable") diff --git a/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetImageBatchIndexTable.scala b/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetImageBatchIndexTable.scala index 3f4ba7c402..449ef67e73 100644 --- a/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetImageBatchIndexTable.scala +++ b/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetImageBatchIndexTable.scala @@ -8,7 +8,8 @@ import scala.concurrent.duration.Duration object ResetImageBatchIndexTable extends App with LazyLogging { - if (args.isEmpty) throw new IllegalArgumentException("please provide dynamo table name") + if (args.isEmpty) + throw new IllegalArgumentException("please provide dynamo table name") private val dynamoTable = args(0) private val ddbClient = AwsHelpers.buildDynamoTableClient(dynamoTable) diff --git a/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetKnownErrors.scala b/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetKnownErrors.scala index 4ad4873e1f..0cc1aea49d 100644 --- a/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetKnownErrors.scala +++ b/admin-tools/scripts/src/main/scala/com/gu/mediaservice/ResetKnownErrors.scala @@ -8,7 +8,8 @@ import scala.collection.JavaConverters._ object ResetKnownErrors extends App with LazyLogging { - if (args.isEmpty) throw new IllegalArgumentException("please provide dynamo table name") + if (args.isEmpty) + throw new IllegalArgumentException("please provide dynamo table name") import IndexInputCreation._ import InputIdsStore._ @@ -21,13 +22,18 @@ object ResetKnownErrors extends App with LazyLogging { val ignoredAtThisScript = 100 val InputIdsStore = new InputIdsStore(ddbClient, ignoredAtThisScript) - val mediaIDsWithKnownErrors = stateIndex.query(getAllMediaIdsWithinProgressQuery(KnownError)) - .asScala.toList.map { it => - val json = Json.parse(it.toJSON).as[JsObject] - (json \ PKField).as[String] - } - - logger.info(s"got ${mediaIDsWithKnownErrors.size}, mediaIds blacklisted as KnownError") + val mediaIDsWithKnownErrors = stateIndex + .query(getAllMediaIdsWithinProgressQuery(KnownError)) + .asScala + .toList + .map { it => + val json = Json.parse(it.toJSON).as[JsObject] + (json \ PKField).as[String] + } + + logger.info( + s"got ${mediaIDsWithKnownErrors.size}, mediaIds blacklisted as KnownError" + ) InputIdsStore.resetItemsState(mediaIDsWithKnownErrors) } diff --git a/auth/app/auth/AuthComponents.scala b/auth/app/auth/AuthComponents.scala index e8fe7bd8aa..61f587e36a 100644 --- a/auth/app/auth/AuthComponents.scala +++ b/auth/app/auth/AuthComponents.scala @@ -7,22 +7,28 @@ import play.api.{Configuration, Environment} import play.api.http.HttpConfiguration import router.Routes -class AuthComponents(context: Context) extends GridComponents(context, new AuthConfig(_)) { - final override lazy val httpConfiguration = AuthHttpConfig(configuration, context.environment) +class AuthComponents(context: Context) + extends GridComponents(context, new AuthConfig(_)) { + final override lazy val httpConfiguration = + AuthHttpConfig(configuration, context.environment) final override val buildInfo = utils.buildinfo.BuildInfo - val controller = new AuthController(auth, providers, config, controllerComponents) - val permissionsAwareManagement = new ManagementWithPermissions(controllerComponents, controller, buildInfo) + val controller = + new AuthController(auth, providers, config, controllerComponents) + val permissionsAwareManagement = + new ManagementWithPermissions(controllerComponents, controller, buildInfo) - override val router = new Routes(httpErrorHandler, controller, permissionsAwareManagement) + override val router = + new Routes(httpErrorHandler, controller, permissionsAwareManagement) } object AuthHttpConfig { - def apply(playConfig: Configuration, environment: Environment): HttpConfiguration = { + def apply( + playConfig: Configuration, + environment: Environment + ): HttpConfiguration = { val base = HttpConfiguration.fromConfiguration(playConfig, environment) - base.copy(session = - base.session.copy(sameSite = None) - ) + base.copy(session = base.session.copy(sameSite = None)) } } diff --git a/auth/app/auth/AuthConfig.scala b/auth/app/auth/AuthConfig.scala index 6d1256e1ad..e949730eae 100644 --- a/auth/app/auth/AuthConfig.scala +++ b/auth/app/auth/AuthConfig.scala @@ -2,7 +2,8 @@ package auth import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources} -class AuthConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class AuthConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val rootUri: String = services.authBaseUri val mediaApiUri: String = services.apiBaseUri val kahunaUri = services.kahunaBaseUri diff --git a/auth/app/auth/AuthController.scala b/auth/app/auth/AuthController.scala index dc10bde039..b0e85955f1 100644 --- a/auth/app/auth/AuthController.scala +++ b/auth/app/auth/AuthController.scala @@ -3,28 +3,39 @@ package auth import java.net.URI import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.argo.model.Link -import com.gu.mediaservice.lib.auth.Authentication.{MachinePrincipal, UserPrincipal} +import com.gu.mediaservice.lib.auth.Authentication.{ + MachinePrincipal, + UserPrincipal +} import com.gu.mediaservice.lib.auth.provider.AuthenticationProviders -import com.gu.mediaservice.lib.auth.{Authentication, Permissions, PermissionsHandler} +import com.gu.mediaservice.lib.auth.{ + Authentication, + Permissions, + PermissionsHandler +} import play.api.libs.json.Json import play.api.mvc.{BaseController, ControllerComponents, Result} import scala.concurrent.{ExecutionContext, Future} import scala.util.Try -class AuthController(auth: Authentication, providers: AuthenticationProviders, val config: AuthConfig, - override val controllerComponents: ControllerComponents)(implicit ec: ExecutionContext) - extends BaseController - with ArgoHelpers - with PermissionsHandler { +class AuthController( + auth: Authentication, + providers: AuthenticationProviders, + val config: AuthConfig, + override val controllerComponents: ControllerComponents +)(implicit ec: ExecutionContext) + extends BaseController + with ArgoHelpers + with PermissionsHandler { val indexResponse = { val indexData = Map("description" -> "This is the Auth API") val indexLinks = List( - Link("root", config.mediaApiUri), - Link("login", config.services.loginUriTemplate), - Link("ui:logout", s"${config.rootUri}/logout"), - Link("session", s"${config.rootUri}/session") + Link("root", config.mediaApiUri), + Link("login", config.services.loginUriTemplate), + Link("ui:logout", s"${config.rootUri}/logout"), + Link("session", s"${config.rootUri}/session") ) respond(indexData, indexLinks) } @@ -35,43 +46,46 @@ class AuthController(auth: Authentication, providers: AuthenticationProviders, v val showPaid = hasPermission(request.user, Permissions.ShowPaid) request.user match { case UserPrincipal(firstName, lastName, email, _) => - respond( - Json.obj("user" -> - Json.obj( - "name" -> s"$firstName $lastName", - "firstName" -> firstName, - "lastName" -> lastName, - "email" -> email, - "permissions" -> - Json.obj( - "showPaid" -> showPaid - ) - ) + Json.obj( + "user" -> + Json.obj( + "name" -> s"$firstName $lastName", + "firstName" -> firstName, + "lastName" -> lastName, + "email" -> email, + "permissions" -> + Json.obj( + "showPaid" -> showPaid + ) + ) ) ) - case MachinePrincipal(accessor, _) => respond( - Json.obj("api-key" -> + case MachinePrincipal(accessor, _) => + respond( Json.obj( - "name" -> accessor.identity, - "tier" -> accessor.tier.toString, - "permissions" -> + "api-key" -> Json.obj( - "showPaid" -> showPaid + "name" -> accessor.identity, + "tier" -> accessor.tier.toString, + "permissions" -> + Json.obj( + "showPaid" -> showPaid + ) ) ) ) - ) } } - def isOwnDomainAndSecure(uri: URI): Boolean = { uri.getHost.endsWith(config.domainRoot) && uri.getScheme == "https" } def isValidDomain(inputUri: String): Boolean = { - val success = Try(URI.create(inputUri)).filter(isOwnDomainAndSecure).isSuccess - if (!success) logger.warn(s"Provided login redirect URI is invalid: $inputUri") + val success = + Try(URI.create(inputUri)).filter(isOwnDomainAndSecure).isSuccess + if (!success) + logger.warn(s"Provided login redirect URI is invalid: $inputUri") success } @@ -81,32 +95,50 @@ class AuthController(auth: Authentication, providers: AuthenticationProviders, v // Trigger the auth cycle // If a redirectUri is provided, redirect the browser there once auth'd, // else return a dummy page (e.g. for automatically re-auth'ing in the background) - def doLogin(redirectUri: Option[String] = None) = Action.async { implicit req => - val checkedRedirectUri = redirectUri collect { - case uri if isValidDomain(uri) => uri - } - providers.userProvider.sendForAuthentication match { - case Some(authCallback) => - authCallback(req).map(_.addingToSession(checkedRedirectUri.map(REDIRECT_SESSION_KEY -> _).toSeq:_*)) - case None => - Future.successful(InternalServerError("Login not supported by configured authentication provider")) - } + def doLogin(redirectUri: Option[String] = None) = Action.async { + implicit req => + val checkedRedirectUri = redirectUri collect { + case uri if isValidDomain(uri) => uri + } + providers.userProvider.sendForAuthentication match { + case Some(authCallback) => + authCallback(req).map( + _.addingToSession( + checkedRedirectUri.map(REDIRECT_SESSION_KEY -> _).toSeq: _* + ) + ) + case None => + Future.successful( + InternalServerError( + "Login not supported by configured authentication provider" + ) + ) + } } def oauthCallback = Action.async { implicit request => providers.userProvider.sendForAuthenticationCallback match { case Some(callback) => val maybeRedirectUri = request.session.get(REDIRECT_SESSION_KEY) - callback(request, maybeRedirectUri).map(_.removingFromSession(REDIRECT_SESSION_KEY)) + callback(request, maybeRedirectUri).map( + _.removingFromSession(REDIRECT_SESSION_KEY) + ) case None => - Future.successful(InternalServerError("No callback for configured authentication provider")) + Future.successful( + InternalServerError( + "No callback for configured authentication provider" + ) + ) } } def logout = Action { implicit request => val result: Result = providers.userProvider.flushToken match { case Some(callback) => callback(request, Ok("Logged out")) - case None => InternalServerError("Logout not supported by configured authentication provider") + case None => + InternalServerError( + "Logout not supported by configured authentication provider" + ) } result.withNewSession } diff --git a/collections/app/AppLoader.scala b/collections/app/AppLoader.scala index 29fa43c60c..988472c211 100644 --- a/collections/app/AppLoader.scala +++ b/collections/app/AppLoader.scala @@ -1,3 +1,4 @@ import com.gu.mediaservice.lib.play.GridAppLoader -class AppLoader extends GridAppLoader("collections", new CollectionsComponents(_)) +class AppLoader + extends GridAppLoader("collections", new CollectionsComponents(_)) diff --git a/collections/app/CollectionsComponents.scala b/collections/app/CollectionsComponents.scala index a4117de8e1..1a9fc819ea 100644 --- a/collections/app/CollectionsComponents.scala +++ b/collections/app/CollectionsComponents.scala @@ -5,15 +5,23 @@ import play.api.ApplicationLoader.Context import router.Routes import store.CollectionsStore -class CollectionsComponents(context: Context) extends GridComponents(context, new CollectionsConfig(_)) { +class CollectionsComponents(context: Context) + extends GridComponents(context, new CollectionsConfig(_)) { final override val buildInfo = utils.buildinfo.BuildInfo val store = new CollectionsStore(config) val metrics = new CollectionsMetrics(config) val notifications = new Notifications(config) - val collections = new CollectionsController(auth, config, store, controllerComponents) - val imageCollections = new ImageCollectionsController(auth, config, notifications, controllerComponents) + val collections = + new CollectionsController(auth, config, store, controllerComponents) + val imageCollections = new ImageCollectionsController( + auth, + config, + notifications, + controllerComponents + ) - override val router = new Routes(httpErrorHandler, collections, imageCollections, management) + override val router = + new Routes(httpErrorHandler, collections, imageCollections, management) } diff --git a/collections/app/controllers/CollectionsController.scala b/collections/app/controllers/CollectionsController.scala index 4d3d39e235..c272195232 100644 --- a/collections/app/controllers/CollectionsController.scala +++ b/collections/app/controllers/CollectionsController.scala @@ -15,52 +15,77 @@ import play.api.libs.functional.syntax._ import play.api.libs.json._ import play.api.mvc.{BaseController, ControllerComponents} import store.{CollectionsStore, CollectionsStoreError} -import com.gu.mediaservice.lib.net.{ URI => UriOps } +import com.gu.mediaservice.lib.net.{URI => UriOps} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future - case class HasChildrenError(message: String) extends Throwable case class InvalidPrinciple(message: String) extends Throwable -case class AppIndex(name: String, description: String, config: Map[String, String] = Map()) +case class AppIndex( + name: String, + description: String, + config: Map[String, String] = Map() +) object AppIndex { implicit def jsonWrites: Writes[AppIndex] = Json.writes[AppIndex] } -class CollectionsController(authenticated: Authentication, config: CollectionsConfig, store: CollectionsStore, - val controllerComponents: ControllerComponents) extends BaseController with ArgoHelpers { +class CollectionsController( + authenticated: Authentication, + config: CollectionsConfig, + store: CollectionsStore, + val controllerComponents: ControllerComponents +) extends BaseController + with ArgoHelpers { import CollectionsManager.{getCssColour, isValidPathBit, pathToUri, uriToPath} // Stupid name clash between Argo and Play import com.gu.mediaservice.lib.argo.model.{Action => ArgoAction} -def uri(u: String) = URI.create(u) + def uri(u: String) = URI.create(u) val collectionUri = uri(s"${config.rootUri}/collections") def collectionUri(p: List[String] = Nil) = { - val path = if(p.nonEmpty) s"/${pathToUri(p)}" else "" + val path = if (p.nonEmpty) s"/${pathToUri(p)}" else "" uri(s"${config.rootUri}/collections$path") } - val appIndex = AppIndex("media-collections", "The one stop shop for collections") + val appIndex = + AppIndex("media-collections", "The one stop shop for collections") val indexLinks = List(Link("collections", collectionUri.toString)) - def addChildAction(pathId: List[String] = Nil): Option[ArgoAction] = Some(ArgoAction("add-child", collectionUri(pathId), "POST")) - def addChildAction(n: Node[Collection]): Option[ArgoAction] = addChildAction(n.fullPath) - def removeNodeAction(n: Node[Collection]): Option[ArgoAction] = if (n.children.nonEmpty) None else Some( - ArgoAction("remove", collectionUri(n.fullPath), "DELETE") + def addChildAction(pathId: List[String] = Nil): Option[ArgoAction] = Some( + ArgoAction("add-child", collectionUri(pathId), "POST") + ) + def addChildAction(n: Node[Collection]): Option[ArgoAction] = addChildAction( + n.fullPath ) + def removeNodeAction(n: Node[Collection]): Option[ArgoAction] = if ( + n.children.nonEmpty + ) None + else + Some( + ArgoAction("remove", collectionUri(n.fullPath), "DELETE") + ) def index = authenticated { req => respond(appIndex, links = indexLinks) } def collectionNotFound(path: String) = - respondError(NotFound, "collection-not-found", s"Could not find collection: $path") + respondError( + NotFound, + "collection-not-found", + s"Could not find collection: $path" + ) def invalidJson(json: JsValue) = - respondError(BadRequest, "invalid-json", s"Could not parse json: ${json.toString}") + respondError( + BadRequest, + "invalid-json", + s"Could not parse json: ${json.toString}" + ) def invalidTreeOperationError(message: String) = respondError(BadRequest, "invalid-tree-operation", message) @@ -77,11 +102,19 @@ def uri(u: String) = URI.create(u) val tree = Node.fromList[Collection]( collections, (collection) => collection.path, - (collection) => collection.description) + (collection) => collection.description + ) val correctTree = tree hackmap { node => - val correctedCollection = node.data.map(c => c.copy(path = node.correctPath)) - Node(node.basename, node.children, node.fullPath, node.correctPath, correctedCollection) + val correctedCollection = + node.data.map(c => c.copy(path = node.correctPath)) + Node( + node.basename, + node.children, + node.fullPath, + node.correctPath, + correctedCollection + ) } val futures = correctTree.toList(Nil) map { correctedCollection => @@ -98,7 +131,8 @@ def uri(u: String) = URI.create(u) Node.fromList[Collection]( collections, (collection) => collection.path, - (collection) => collection.description) + (collection) => collection.description + ) } def getCollections = authenticated.async { req => @@ -107,37 +141,57 @@ def uri(u: String) = URI.create(u) Json.toJson(tree)(asArgo), actions = List(addChildAction()).flatten ) - } recover { - case e: CollectionsStoreError => storeError(e.message) + } recover { case e: CollectionsStoreError => + storeError(e.message) } } // Basically default parameters, which Play doesn't support def addChildToRoot = addChildTo(None) - def addChildToCollection(collectionPathId: String) = addChildTo(Some(collectionPathId)) - def addChildTo(collectionPathId: Option[String]) = authenticated.async(parse.json) { req => - (req.body \ "data").asOpt[String] map { child => - if (isValidPathBit(child)) { - val path = collectionPathId.map(uriToPath).getOrElse(Nil) :+ child - val collection = Collection.build(path, ActionData(getIdentity(req.user), DateTime.now)) - - store.add(collection).map { collection => - val node = Node(collection.path.last, Nil, collection.path, collection.path, Some(collection)) - logger.info(req.user.accessor, s"Adding collection ${path.mkString("/")}") - respond(node, actions = getActions(node)) - } recover { - case e: CollectionsStoreError => storeError(e.message) + def addChildToCollection(collectionPathId: String) = addChildTo( + Some(collectionPathId) + ) + def addChildTo(collectionPathId: Option[String]) = + authenticated.async(parse.json) { req => + (req.body \ "data").asOpt[String] map { child => + if (isValidPathBit(child)) { + val path = collectionPathId.map(uriToPath).getOrElse(Nil) :+ child + val collection = Collection.build( + path, + ActionData(getIdentity(req.user), DateTime.now) + ) + + store.add(collection).map { collection => + val node = Node( + collection.path.last, + Nil, + collection.path, + collection.path, + Some(collection) + ) + logger.info( + req.user.accessor, + s"Adding collection ${path.mkString("/")}" + ) + respond(node, actions = getActions(node)) + } recover { case e: CollectionsStoreError => + storeError(e.message) + } + } else { + Future.successful( + respondError( + BadRequest, + "invalid-input", + "You cannot have slashes or double quotes in your path name" + ) + ) } - } else { - Future.successful(respondError(BadRequest, "invalid-input", "You cannot have slashes or double quotes in your path name")) - } - } getOrElse Future.successful(invalidJson(req.body)) - } + } getOrElse Future.successful(invalidJson(req.body)) + } type MaybeTree = Option[Node[Collection]] def hasChildren(path: List[String]): Future[Boolean] = allCollections.map { tree => - // Traverse the tree using the path val maybeTree = path .foldLeft[MaybeTree](Some(tree))((optBranch, nodeName) => { @@ -153,46 +207,66 @@ def uri(u: String) = URI.create(u) val path = CollectionsManager.uriToPath(UriOps.encodePlus(collectionPath)) hasChildren(path).flatMap { noRemove => - if(noRemove) { + if (noRemove) { throw HasChildrenError( s"$collectionPath has children, can't delete!" ) } else { - logger.info(req.user.accessor, s"Deleting collection ${path.mkString("/")}") + logger.info( + req.user.accessor, + s"Deleting collection ${path.mkString("/")}" + ) store.remove(path).map(_ => Accepted) } } recover { case e: CollectionsStoreError => storeError(e.message) - case e: HasChildrenError => invalidTreeOperationError(e.message) + case e: HasChildrenError => invalidTreeOperationError(e.message) } } // We have to do this as Play's serialisation doesn't work all that well. // Especially around types with subtypes, so we have to be very explicit here. - implicit def collectionEntityWrites: Writes[Node[EmbeddedEntity[Collection]]] = ( + implicit def collectionEntityWrites + : Writes[Node[EmbeddedEntity[Collection]]] = ( (__ \ "name").write[String] ~ - (__ \ "children").lazyWrite(Writes.seq[Node[EmbeddedEntity[Collection]]](collectionEntityWrites)) ~ - (__ \ "fullPath").write[List[String]] ~ - (__ \ "data").writeNullable[EmbeddedEntity[Collection]] + (__ \ "children").lazyWrite( + Writes.seq[Node[EmbeddedEntity[Collection]]](collectionEntityWrites) + ) ~ + (__ \ "fullPath").write[List[String]] ~ + (__ \ "data").writeNullable[EmbeddedEntity[Collection]] )(node => (node.basename, node.children, node.fullPath, node.data)) type CollectionsEntity = Seq[EmbeddedEntity[Node[Collection]]] implicit def asArgo: Writes[Node[Collection]] = ( (__ \ "basename").write[String] ~ - (__ \ "children").lazyWrite[CollectionsEntity](Writes[CollectionsEntity] + (__ \ "children") + .lazyWrite[CollectionsEntity]( + Writes[CollectionsEntity] // This is so we don't have to rewrite the Write[Seq[T]] - (seq => Json.toJson(seq))).contramap(collectionsEntity) ~ + (seq => Json.toJson(seq)) + ) + .contramap(collectionsEntity) ~ (__ \ "fullPath").write[List[String]] ~ (__ \ "data").writeNullable[Collection] ~ (__ \ "cssColour").writeNullable[String] - )(node => (node.basename, node.children, node.fullPath, node.data, getCssColour(node.fullPath))) - + )(node => + ( + node.basename, + node.children, + node.fullPath, + node.data, + getCssColour(node.fullPath) + ) + ) def collectionsEntity(nodes: List[Node[Collection]]): CollectionsEntity = { - nodes.map(n => EmbeddedEntity(collectionUri(n.fullPath), Some(n), actions = getActions(n))) + nodes.map(n => + EmbeddedEntity( + collectionUri(n.fullPath), + Some(n), + actions = getActions(n) + ) + ) } } - - - diff --git a/collections/app/controllers/ImageCollectionsController.scala b/collections/app/controllers/ImageCollectionsController.scala index 1ba211c53a..183df4e75f 100644 --- a/collections/app/controllers/ImageCollectionsController.scala +++ b/collections/app/controllers/ImageCollectionsController.scala @@ -15,10 +15,13 @@ import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future - -class ImageCollectionsController(authenticated: Authentication, config: CollectionsConfig, notifications: Notifications, - override val controllerComponents: ControllerComponents) - extends BaseController with ArgoHelpers { +class ImageCollectionsController( + authenticated: Authentication, + config: CollectionsConfig, + notifications: Notifications, + override val controllerComponents: ControllerComponents +) extends BaseController + with ArgoHelpers { import CollectionsManager.onlyLatest @@ -27,47 +30,56 @@ class ImageCollectionsController(authenticated: Authentication, config: Collecti def getCollections(id: String) = authenticated.async { req => dynamo.listGet[Collection](id, "collections").map { collections => respond(onlyLatest(collections)) - } recover { - case NoItemFound => respondNotFound("No collections found") + } recover { case NoItemFound => + respondNotFound("No collections found") } } def addCollection(id: String) = authenticated.async(parse.json) { req => (req.body \ "data").asOpt[List[String]].map { path => - val collection = Collection.build(path, ActionData(getIdentity(req.user), DateTime.now())) - dynamo.listAdd(id, "collections", collection) + val collection = Collection + .build(path, ActionData(getIdentity(req.user), DateTime.now())) + dynamo + .listAdd(id, "collections", collection) .map(publish(id)) .map(cols => respond(collection)) - } getOrElse Future.successful(respondError(BadRequest, "invalid-form-data", "Invalid form data")) + } getOrElse Future.successful( + respondError(BadRequest, "invalid-form-data", "Invalid form data") + ) } - - def removeCollection(id: String, collectionString: String) = authenticated.async { req => - val path = CollectionsManager.uriToPath(UriOps.encodePlus(collectionString)) - // We do a get to be able to find the index of the current collection, then remove it. - // Given that we're using Dynamo Lists this seemed like a decent way to do it. - // Dynamo Lists, like other lists do respect order. - dynamo.listGet[Collection](id, "collections") flatMap { collections => - CollectionsManager.findIndexes(path, collections) match { - case Nil => - Future.successful(respondNotFound(s"Collection $collectionString not found")) - case indexes => - dynamo.listRemoveIndexes[Collection](id, "collections", indexes) - .map(publish(id)) - .map(cols => respond(cols)) + def removeCollection(id: String, collectionString: String) = + authenticated.async { req => + val path = + CollectionsManager.uriToPath(UriOps.encodePlus(collectionString)) + // We do a get to be able to find the index of the current collection, then remove it. + // Given that we're using Dynamo Lists this seemed like a decent way to do it. + // Dynamo Lists, like other lists do respect order. + dynamo.listGet[Collection](id, "collections") flatMap { collections => + CollectionsManager.findIndexes(path, collections) match { + case Nil => + Future.successful( + respondNotFound(s"Collection $collectionString not found") + ) + case indexes => + dynamo + .listRemoveIndexes[Collection](id, "collections", indexes) + .map(publish(id)) + .map(cols => respond(cols)) + } + } recover { case NoItemFound => + respondNotFound("No collections found") } - } recover { - case NoItemFound => respondNotFound("No collections found") } - } def publish(id: String)(collections: List[Collection]): List[Collection] = { val onlyLatestCollections = onlyLatest(collections) - val updateMessage = UpdateMessage(subject = "set-image-collections", id = Some(id), collections = Some(onlyLatestCollections)) + val updateMessage = UpdateMessage( + subject = "set-image-collections", + id = Some(id), + collections = Some(onlyLatestCollections) + ) notifications.publish(updateMessage) onlyLatestCollections } } - - - diff --git a/collections/app/lib/CollectionsConfig.scala b/collections/app/lib/CollectionsConfig.scala index 3d73c00572..58060e7bc9 100644 --- a/collections/app/lib/CollectionsConfig.scala +++ b/collections/app/lib/CollectionsConfig.scala @@ -2,8 +2,8 @@ package lib import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources} - -class CollectionsConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class CollectionsConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val collectionsTable = string("dynamo.table.collections") val imageCollectionsTable = string("dynamo.table.imageCollections") diff --git a/collections/app/lib/CollectionsMetrics.scala b/collections/app/lib/CollectionsMetrics.scala index 2afba42ab0..439d57942d 100644 --- a/collections/app/lib/CollectionsMetrics.scala +++ b/collections/app/lib/CollectionsMetrics.scala @@ -2,7 +2,8 @@ package lib import com.gu.mediaservice.lib.metrics.CloudWatchMetrics -class CollectionsMetrics(config: CollectionsConfig) extends CloudWatchMetrics(s"${config.stage}/Collections", config) { +class CollectionsMetrics(config: CollectionsConfig) + extends CloudWatchMetrics(s"${config.stage}/Collections", config) { val processingLatency = new TimeMetric("ProcessingLatency") diff --git a/collections/app/lib/Notifications.scala b/collections/app/lib/Notifications.scala index 803219f617..5a639d872d 100644 --- a/collections/app/lib/Notifications.scala +++ b/collections/app/lib/Notifications.scala @@ -2,4 +2,5 @@ package lib import com.gu.mediaservice.lib.aws.ThrallMessageSender -class Notifications(config: CollectionsConfig) extends ThrallMessageSender(config.thrallKinesisStreamConfig) +class Notifications(config: CollectionsConfig) + extends ThrallMessageSender(config.thrallKinesisStreamConfig) diff --git a/collections/app/model/Node.scala b/collections/app/model/Node.scala index 7346a8f180..29373aa06d 100644 --- a/collections/app/model/Node.scala +++ b/collections/app/model/Node.scala @@ -4,7 +4,13 @@ import play.api.libs.functional.syntax._ import play.api.libs.json._ // TODO: Convert fullPath to NonEmptylist -case class Node[T](basename: String, children: List[Node[T]], fullPath: List[String], correctPath: List[String], data: Option[T]) { +case class Node[T]( + basename: String, + children: List[Node[T]], + fullPath: List[String], + correctPath: List[String], + data: Option[T] +) { // This is a hackmap that should map from T => V def hackmap[V](f: Node[T] => Node[V]): Node[V] = { val newNode = f(this) @@ -20,9 +26,17 @@ case class Node[T](basename: String, children: List[Node[T]], fullPath: List[Str } } object Node { - def fromList[T](list: List[T], getPath: T => List[String], getCorrectPathBit: T => String): Node[T] = { + def fromList[T]( + list: List[T], + getPath: T => List[String], + getCorrectPathBit: T => String + ): Node[T] = { // returns children for a given path - def loop(ts: List[T], fullPath: List[String], correctPath: List[String]): List[Node[T]] = { + def loop( + ts: List[T], + fullPath: List[String], + correctPath: List[String] + ): List[Node[T]] = { ts // group by slug at current level .groupBy(getPath(_).drop(fullPath.size).head) @@ -33,11 +47,19 @@ object Node { getPath(t).size == fullPath.size + 1 } val thisFullPath = fullPath :+ currentSlug - val thisCorrectPath = thisLevel.headOption.map(getCorrectPathBit).map(c => correctPath :+ c).getOrElse(Nil) - + val thisCorrectPath = thisLevel.headOption + .map(getCorrectPathBit) + .map(c => correctPath :+ c) + .getOrElse(Nil) // use the T at this level or an empty node to hold children - Node(currentSlug, loop(children, thisFullPath, thisCorrectPath), thisFullPath, thisCorrectPath, thisLevel.headOption) + Node( + currentSlug, + loop(children, thisFullPath, thisCorrectPath), + thisFullPath, + thisCorrectPath, + thisLevel.headOption + ) } .sortBy(node => (node.children.isEmpty, node.basename)) } @@ -46,9 +68,12 @@ object Node { implicit def nodeFormat[T: Format]: Format[Node[T]] = ( (__ \ "basename").format[String] ~ - (__ \ "children").lazyFormat(Reads.list(nodeFormat[T]), Writes.list(nodeFormat[T])) ~ - (__ \ "fullPath").format[List[String]] ~ - (__ \ "correctPath").format[List[String]] ~ - (__ \ "data").formatNullable[T] + (__ \ "children").lazyFormat( + Reads.list(nodeFormat[T]), + Writes.list(nodeFormat[T]) + ) ~ + (__ \ "fullPath").format[List[String]] ~ + (__ \ "correctPath").format[List[String]] ~ + (__ \ "data").formatNullable[T] )(Node.apply, unlift(Node.unapply)) } diff --git a/collections/app/store/CollectionsStore.scala b/collections/app/store/CollectionsStore.scala index 3814f5b392..3032a9f230 100644 --- a/collections/app/store/CollectionsStore.scala +++ b/collections/app/store/CollectionsStore.scala @@ -14,21 +14,21 @@ class CollectionsStore(config: CollectionsConfig) { def getAll: Future[List[Collection]] = dynamo.scan map { jsonList => jsonList.flatMap(json => (json \ "collection").asOpt[Collection]) - } recover { - case e => throw CollectionsStoreError(e) + } recover { case e => + throw CollectionsStoreError(e) } def add(collection: Collection): Future[Collection] = { dynamo.objPut(collection.pathId, "collection", collection) - } recover { - case e => throw CollectionsStoreError(e) + } recover { case e => + throw CollectionsStoreError(e) } def remove(collectionPath: List[String]): Future[Unit] = { val path = CollectionsManager.pathToPathId(collectionPath) dynamo.deleteItem(path) - } recover { - case e => throw CollectionsStoreError(e) + } recover { case e => + throw CollectionsStoreError(e) } } diff --git a/collections/test/model/NodeTest.scala b/collections/test/model/NodeTest.scala index 4a7310ea84..1b7f7bf064 100644 --- a/collections/test/model/NodeTest.scala +++ b/collections/test/model/NodeTest.scala @@ -38,7 +38,12 @@ class NodeTest extends FunSpec with Matchers with OptionValues { it("should have second level children") { val tree = buildTree val g2 = tree.children.find(_.basename == "g2").value - g2.children.map(_.basename).toSet shouldEqual Set("features", "food", "health", "lifestyle") + g2.children.map(_.basename).toSet shouldEqual Set( + "features", + "food", + "health", + "lifestyle" + ) } it("should attach the content to the Node if available") { @@ -55,7 +60,6 @@ class NodeTest extends FunSpec with Matchers with OptionValues { } - describe("hackmap") { val wrongList = List( TestNodeData(List("all"), "All"), @@ -63,24 +67,31 @@ class NodeTest extends FunSpec with Matchers with OptionValues { TestNodeData(List("all", "lower", "case"), "CaSe") ) - val wrongTree = Node.fromList[TestNodeData](wrongList, (d) => d.path, (d) => d.right) + val wrongTree = + Node.fromList[TestNodeData](wrongList, (d) => d.path, (d) => d.right) val rightTree = wrongTree hackmap { node => val correctedData = node.data.map(d => d.copy(path = node.correctPath)) - Node(node.basename, node.children, node.fullPath, node.correctPath, correctedData) + Node( + node.basename, + node.children, + node.fullPath, + node.correctPath, + correctedData + ) } val rightTreeList = rightTree.toList(Nil) - rightTreeList(0) shouldEqual TestNodeData(List("All"), "All") rightTreeList(1) shouldEqual TestNodeData(List("All", "LOWER"), "LOWER") - rightTreeList(2) shouldEqual TestNodeData(List("All", "LOWER", "CaSe"), "CaSe") + rightTreeList(2) shouldEqual TestNodeData( + List("All", "LOWER", "CaSe"), + "CaSe" + ) } - } - case class TestNodeData(path: List[String], right: String) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/BaseStore.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/BaseStore.scala index 28e307ee69..2e46fdb131 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/BaseStore.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/BaseStore.scala @@ -13,20 +13,25 @@ import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ - -abstract class BaseStore[TStoreKey, TStoreVal](bucket: String, config: CommonConfig)(implicit ec: ExecutionContext) - extends GridLogging { +abstract class BaseStore[TStoreKey, TStoreVal]( + bucket: String, + config: CommonConfig +)(implicit ec: ExecutionContext) + extends GridLogging { val s3 = new S3(config) protected val store: Box[Map[TStoreKey, TStoreVal]] = Box(Map.empty) protected val lastUpdated: Box[DateTime] = Box(DateTime.now()) - protected def getS3Object(key: String): Option[String] = s3.getObjectAsString(bucket, key) + protected def getS3Object(key: String): Option[String] = + s3.getObjectAsString(bucket, key) protected def getLatestS3Stream: Option[InputStream] = { val objects = s3.client - .listObjects(bucket).getObjectSummaries.asScala + .listObjects(bucket) + .getObjectSummaries + .asScala .filterNot(_.getKey == "AMAZON_SES_SETUP_NOTIFICATION") if (objects.nonEmpty) { @@ -53,4 +58,3 @@ abstract class BaseStore[TStoreKey, TStoreVal](bucket: String, config: CommonCon def update(): Unit } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/DateTimeUtils.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/DateTimeUtils.scala index ab43fe1362..7a0928038f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/DateTimeUtils.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/DateTimeUtils.scala @@ -12,10 +12,15 @@ object DateTimeUtils { def now(): ZonedDateTime = ZonedDateTime.now(EuropeLondonZone) - def toString(zonedDateTime: ZonedDateTime): String = zonedDateTime.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME) + def toString(zonedDateTime: ZonedDateTime): String = + zonedDateTime.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME) - def toString(instant: Instant): String = instant.atZone(EuropeLondonZone).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME) + def toString(instant: Instant): String = instant + .atZone(EuropeLondonZone) + .format(DateTimeFormatter.ISO_OFFSET_DATE_TIME) // TODO move this to a LocalDateTime - def fromValueOrNow(value: Option[String]): DateTime = Try{new DateTime(value.get)}.getOrElse(DateTime.now) + def fromValueOrNow(value: Option[String]): DateTime = Try { + new DateTime(value.get) + }.getOrElse(DateTime.now) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/Files.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/Files.scala index 045a1c8987..5c8cde8232 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/Files.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/Files.scala @@ -7,12 +7,16 @@ import java.util.concurrent.Executors import scala.concurrent.{ExecutionContext, Future} - object Files { - private implicit val ctx = ExecutionContext.fromExecutor(Executors.newCachedThreadPool) + private implicit val ctx = + ExecutionContext.fromExecutor(Executors.newCachedThreadPool) - def createTempFile(prefix: String, suffix: String, tempDir: File): Future[File] = + def createTempFile( + prefix: String, + suffix: String, + tempDir: File + ): Future[File] = Future { File.createTempFile(prefix, suffix, tempDir) } @@ -24,12 +28,16 @@ object Files { output.getChannel.transferFrom(channel, 0, java.lang.Long.MAX_VALUE) } - def tempFileFromURL(from: URL, prefix: String, suffix: String, tempDir: File): Future[File] = + def tempFileFromURL( + from: URL, + prefix: String, + suffix: String, + tempDir: File + ): Future[File] = for { tempFile <- createTempFile(prefix, suffix, tempDir: File) _ <- transferFromURL(from, tempFile) - } - yield tempFile + } yield tempFile def delete(file: File): Future[Unit] = Future(file.delete()) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageFields.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageFields.scala index 4edc784915..4e96cc2d32 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageFields.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageFields.scala @@ -38,20 +38,20 @@ trait ImageFields { val collectionsFields = List("path", "pathId", "pathHierarchy") val usagesFields = List("status", "platform", "dateAdded") - def identifierField(field: String) = s"identifiers.$field" - def metadataField(field: String) = s"metadata.$field" - def editsField(field: String) = s"userMetadata.$field" + def identifierField(field: String) = s"identifiers.$field" + def metadataField(field: String) = s"metadata.$field" + def editsField(field: String) = s"userMetadata.$field" def usageRightsField(field: String) = s"usageRights.$field" def collectionsField(field: String) = s"collections.$field" - def usagesField(field: String) = s"usages.$field" - def sourceField(field: String) = s"source.$field" + def usagesField(field: String) = s"usages.$field" + def sourceField(field: String) = s"source.$field" def photoshootField(field: String) = editsField(s"photoshoot.$field") val aliases = Map( - "crops" -> "exports", + "crops" -> "exports", "croppedBy" -> "exports.author", - "filename" -> "uploadInfo.filename", - "photoshoot"-> photoshootField("title"), + "filename" -> "uploadInfo.filename", + "photoshoot" -> photoshootField("title"), "leases" -> "leases.leases", "leasedBy" -> "leases.leases.leasedBy", "people" -> metadataField("peopleInImage") @@ -64,7 +64,7 @@ trait ImageFields { case f if collectionsFields.contains(f) => collectionsField(f) case f if usagesFields.contains(f) => usagesField(f) case f if sourceFields.contains(f) => sourceField(f) - case f => aliases.getOrElse(f, f) + case f => aliases.getOrElse(f, f) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageIngestOperations.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageIngestOperations.scala index 1b070331fd..e0b41d8484 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageIngestOperations.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageIngestOperations.scala @@ -12,36 +12,65 @@ import scala.concurrent.Future object ImageIngestOperations { def fileKeyFromId(id: String): String = id.take(6).mkString("/") + "/" + id - def optimisedPngKeyFromId(id: String): String = "optimised/" + fileKeyFromId(id: String) + def optimisedPngKeyFromId(id: String): String = + "optimised/" + fileKeyFromId(id: String) } -class ImageIngestOperations(imageBucket: String, thumbnailBucket: String, config: CommonConfig, isVersionedS3: Boolean = false) - extends S3ImageStorage(config) { +class ImageIngestOperations( + imageBucket: String, + thumbnailBucket: String, + config: CommonConfig, + isVersionedS3: Boolean = false +) extends S3ImageStorage(config) { import ImageIngestOperations.{fileKeyFromId, optimisedPngKeyFromId} - def store(storableImage: StorableImage) - (implicit logMarker: LogMarker): Future[S3Object] = storableImage match { - case s:StorableOriginalImage => storeOriginalImage(s) - case s:StorableThumbImage => storeThumbnailImage(s) - case s:StorableOptimisedImage => storeOptimisedImage(s) + def store( + storableImage: StorableImage + )(implicit logMarker: LogMarker): Future[S3Object] = storableImage match { + case s: StorableOriginalImage => storeOriginalImage(s) + case s: StorableThumbImage => storeThumbnailImage(s) + case s: StorableOptimisedImage => storeOptimisedImage(s) } - private def storeOriginalImage(storableImage: StorableOriginalImage) - (implicit logMarker: LogMarker): Future[S3Object] = - storeImage(imageBucket, fileKeyFromId(storableImage.id), storableImage.file, Some(storableImage.mimeType), storableImage.meta) + private def storeOriginalImage( + storableImage: StorableOriginalImage + )(implicit logMarker: LogMarker): Future[S3Object] = + storeImage( + imageBucket, + fileKeyFromId(storableImage.id), + storableImage.file, + Some(storableImage.mimeType), + storableImage.meta + ) - private def storeThumbnailImage(storableImage: StorableThumbImage) - (implicit logMarker: LogMarker): Future[S3Object] = - storeImage(thumbnailBucket, fileKeyFromId(storableImage.id), storableImage.file, Some(storableImage.mimeType)) + private def storeThumbnailImage( + storableImage: StorableThumbImage + )(implicit logMarker: LogMarker): Future[S3Object] = + storeImage( + thumbnailBucket, + fileKeyFromId(storableImage.id), + storableImage.file, + Some(storableImage.mimeType) + ) - private def storeOptimisedImage(storableImage: StorableOptimisedImage) - (implicit logMarker: LogMarker): Future[S3Object] = - storeImage(imageBucket, optimisedPngKeyFromId(storableImage.id), storableImage.file, Some(storableImage.mimeType)) + private def storeOptimisedImage( + storableImage: StorableOptimisedImage + )(implicit logMarker: LogMarker): Future[S3Object] = + storeImage( + imageBucket, + optimisedPngKeyFromId(storableImage.id), + storableImage.file, + Some(storableImage.mimeType) + ) - def deleteOriginal(id: String): Future[Unit] = if(isVersionedS3) deleteVersionedImage(imageBucket, fileKeyFromId(id)) else deleteImage(imageBucket, fileKeyFromId(id)) - def deleteThumbnail(id: String): Future[Unit] = deleteImage(thumbnailBucket, fileKeyFromId(id)) - def deletePng(id: String): Future[Unit] = deleteImage(imageBucket, optimisedPngKeyFromId(id)) + def deleteOriginal(id: String): Future[Unit] = if (isVersionedS3) + deleteVersionedImage(imageBucket, fileKeyFromId(id)) + else deleteImage(imageBucket, fileKeyFromId(id)) + def deleteThumbnail(id: String): Future[Unit] = + deleteImage(thumbnailBucket, fileKeyFromId(id)) + def deletePng(id: String): Future[Unit] = + deleteImage(imageBucket, optimisedPngKeyFromId(id)) } sealed trait ImageWrapper { @@ -52,11 +81,32 @@ sealed trait ImageWrapper { } sealed trait StorableImage extends ImageWrapper -case class StorableThumbImage(id: String, file: File, mimeType: MimeType, meta: Map[String, String] = Map.empty) extends StorableImage -case class StorableOriginalImage(id: String, file: File, mimeType: MimeType, meta: Map[String, String] = Map.empty) extends StorableImage -case class StorableOptimisedImage(id: String, file: File, mimeType: MimeType, meta: Map[String, String] = Map.empty) extends StorableImage -case class BrowserViewableImage(id: String, file: File, mimeType: MimeType, meta: Map[String, String] = Map.empty, mustUpload: Boolean = false) extends ImageWrapper { - def asStorableOptimisedImage = StorableOptimisedImage(id, file, mimeType, meta) +case class StorableThumbImage( + id: String, + file: File, + mimeType: MimeType, + meta: Map[String, String] = Map.empty +) extends StorableImage +case class StorableOriginalImage( + id: String, + file: File, + mimeType: MimeType, + meta: Map[String, String] = Map.empty +) extends StorableImage +case class StorableOptimisedImage( + id: String, + file: File, + mimeType: MimeType, + meta: Map[String, String] = Map.empty +) extends StorableImage +case class BrowserViewableImage( + id: String, + file: File, + mimeType: MimeType, + meta: Map[String, String] = Map.empty, + mustUpload: Boolean = false +) extends ImageWrapper { + def asStorableOptimisedImage = + StorableOptimisedImage(id, file, mimeType, meta) def asStorableThumbImage = StorableThumbImage(id, file, mimeType, meta) } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageQuarantineOperations.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageQuarantineOperations.scala index 73d9fe81dd..e161b925a4 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageQuarantineOperations.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageQuarantineOperations.scala @@ -9,13 +9,23 @@ import com.gu.mediaservice.model.MimeType import scala.concurrent.Future -class ImageQuarantineOperations(quarantineBucket: String, config: CommonConfig, isVersionedS3: Boolean = false) - extends S3ImageStorage(config) { - - def storeQuarantineImage(id: String, file: File, mimeType: Option[MimeType], meta: Map[String, String] = Map.empty) - (implicit logMarker: LogMarker): Future[S3Object] = - storeImage(quarantineBucket, ImageIngestOperations.fileKeyFromId(id), file, mimeType, meta) +class ImageQuarantineOperations( + quarantineBucket: String, + config: CommonConfig, + isVersionedS3: Boolean = false +) extends S3ImageStorage(config) { + + def storeQuarantineImage( + id: String, + file: File, + mimeType: Option[MimeType], + meta: Map[String, String] = Map.empty + )(implicit logMarker: LogMarker): Future[S3Object] = + storeImage( + quarantineBucket, + ImageIngestOperations.fileKeyFromId(id), + file, + mimeType, + meta + ) } - - - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageStorage.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageStorage.scala index ad4ea3f1c6..80493fc404 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageStorage.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/ImageStorage.scala @@ -30,8 +30,13 @@ trait ImageStorage { /** Store a copy of the given file and return the URI of that copy. * The file can safely be deleted afterwards. */ - def storeImage(bucket: String, id: String, file: File, mimeType: Option[MimeType], meta: Map[String, String] = Map.empty) - (implicit logMarker: LogMarker): Future[S3Object] + def storeImage( + bucket: String, + id: String, + file: File, + mimeType: Option[MimeType], + meta: Map[String, String] = Map.empty + )(implicit logMarker: LogMarker): Future[S3Object] def deleteImage(bucket: String, id: String): Future[Unit] } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/MD5.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/MD5.scala index d14884742d..53ed4004b9 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/MD5.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/MD5.scala @@ -1,6 +1,5 @@ package lib - object MD5 { def hash(s: String) = { val m = java.security.MessageDigest.getInstance("MD5") diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/Processes.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/Processes.scala index ac24e3c091..ca4cf4f416 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/Processes.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/Processes.scala @@ -16,15 +16,16 @@ object Processes { def unchunk[O]: Process1[Seq[O], O] = process1.id[Seq[O]].flatMap(emitAll) - def sleepIfEmpty[A](duration: FiniteDuration)(p: Process[Task, Seq[A]]): Process[Task, Seq[A]] = + def sleepIfEmpty[A](duration: FiniteDuration)( + p: Process[Task, Seq[A]] + ): Process[Task, Seq[A]] = p.flatMap(xs => if (xs.isEmpty) sleep(duration) else emit(xs)) implicit class SourceSyntax[O](self: Process[Task, O]) { - /** - * Emits a chunk whenever `maxSize` elements have accumulated, or at every - * `maxAge` time when elements are buffered, whichever is sooner. - */ + /** Emits a chunk whenever `maxSize` elements have accumulated, or at every + * `maxAge` time when elements are buffered, whichever is sooner. + */ def chunkTimed(maxAge: Duration, maxSize: Int): Process[Task, Vector[O]] = { def go(buf: Vector[O], lastEmit: Duration): Wye[Duration, O, Vector[O]] = awaitBoth[Duration, O].flatMap { @@ -32,7 +33,8 @@ object Processes { if (buf.nonEmpty) emit(buf) fby go(Vector(), t) else go(buf, lastEmit) case ReceiveR(o) => - if (buf.size >= (maxSize - 1)) emit(buf :+ o) fby go(Vector(), lastEmit) + if (buf.size >= (maxSize - 1)) + emit(buf :+ o) fby go(Vector(), lastEmit) else go(buf :+ o, lastEmit) case HaltL(e) => Halt(e) case HaltR(e) => Halt(e) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/S3ImageStorage.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/S3ImageStorage.scala index 0c7ed5d767..f62c6a416c 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/S3ImageStorage.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/S3ImageStorage.scala @@ -12,16 +12,30 @@ import scala.collection.JavaConverters._ import scala.concurrent.Future // TODO: If deleteObject fails - we should be catching the errors here to avoid them bubbling to the application -class S3ImageStorage(config: CommonConfig) extends S3(config) with ImageStorage { +class S3ImageStorage(config: CommonConfig) + extends S3(config) + with ImageStorage { private val log = LoggerFactory.getLogger(getClass) private val cacheSetting = Some(cacheForever) - def storeImage(bucket: String, id: String, file: File, mimeType: Option[MimeType], meta: Map[String, String] = Map.empty) - (implicit logMarker: LogMarker) = { + def storeImage( + bucket: String, + id: String, + file: File, + mimeType: Option[MimeType], + meta: Map[String, String] = Map.empty + )(implicit logMarker: LogMarker) = { store(bucket, id, file, mimeType, meta, cacheSetting) - .map( _ => + .map(_ => // TODO this is just giving back the stuff we passed in and should be factored out. - S3Ops.projectFileAsS3Object(bucket, id, file, mimeType, meta, cacheSetting) + S3Ops.projectFileAsS3Object( + bucket, + id, + file, + mimeType, + meta, + cacheSetting + ) ) } @@ -37,8 +51,8 @@ class S3ImageStorage(config: CommonConfig) extends S3(config) with ImageStorage } def deleteFolder(bucket: String, id: String) = Future { - val files = client.listObjects(bucket, id).getObjectSummaries.asScala - files.foreach(file => client.deleteObject(bucket, file.getKey)) - log.info(s"Deleting images in folder $id from bucket $bucket") - } + val files = client.listObjects(bucket, id).getObjectSummaries.asScala + files.foreach(file => client.deleteObject(bucket, file.getKey)) + log.info(s"Deleting images in folder $id from bucket $bucket") + } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/ArgoHelpers.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/ArgoHelpers.scala index 9212c38121..6b247e9f8b 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/ArgoHelpers.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/ArgoHelpers.scala @@ -8,34 +8,41 @@ import com.gu.mediaservice.lib.argo.model._ import com.gu.mediaservice.lib.logging.GridLogging import com.typesafe.scalalogging.Logger - trait ArgoHelpers extends Results with GridLogging { val ArgoMediaType = "application/vnd.argo+json" // FIXME: DSL to append links and actions? - def respond[T](data: T, links: List[Link] = Nil, actions: List[Action] = Nil, uri: Option[URI] = None) - (implicit writes: Writes[T]): Result = { + def respond[T]( + data: T, + links: List[Link] = Nil, + actions: List[Action] = Nil, + uri: Option[URI] = None + )(implicit writes: Writes[T]): Result = { val response = EntityResponse( - uri = uri, - data = data, - links = links, + uri = uri, + data = data, + links = links, actions = actions ) serializeAndWrap(response, Ok) } - def respondCollection[T](data: Seq[T], offset: Option[Long] = None, total: Option[Long] = None, - links: List[Link] = Nil, uri: Option[URI] = None) - (implicit writes: Writes[T]): Result = { + def respondCollection[T]( + data: Seq[T], + offset: Option[Long] = None, + total: Option[Long] = None, + links: List[Link] = Nil, + uri: Option[URI] = None + )(implicit writes: Writes[T]): Result = { val response = CollectionResponse( - uri = uri, + uri = uri, offset = offset, length = Some(data.size), - total = total, - data = data, - links = links + total = total, + data = data, + links = links ) serializeAndWrap(response, Ok) @@ -55,13 +62,20 @@ trait ArgoHelpers extends Results with GridLogging { // } // TODO: find a nicer way to serialise ErrorResponse[Nothing] without this hack - def respondError(errorStatus: Status, errorKey: String, errorMessage: String, links: List[Link] = Nil): Result = { - logger.warn(s"[$errorKey] Responding with error status ${errorStatus.header.status}, $errorMessage") + def respondError( + errorStatus: Status, + errorKey: String, + errorMessage: String, + links: List[Link] = Nil + ): Result = { + logger.warn( + s"[$errorKey] Responding with error status ${errorStatus.header.status}, $errorMessage" + ) val response = ErrorResponse[Int]( - errorKey = errorKey, + errorKey = errorKey, errorMessage = errorMessage, - data = None, - links = links + data = None, + links = links ) serializeAndWrap(response, errorStatus) @@ -69,17 +83,18 @@ trait ArgoHelpers extends Results with GridLogging { def respondNotFound(errorMessage: String): Result = { val response = ErrorResponse[Int]( - errorKey = "not-found", + errorKey = "not-found", errorMessage = errorMessage, - data = None, - links = Nil + data = None, + links = Nil ) serializeAndWrap(response, Status(404)) } - - private def serializeAndWrap[T](response: T, status: Status)(implicit writes: Writes[T]): Result = { + private def serializeAndWrap[T](response: T, status: Status)(implicit + writes: Writes[T] + ): Result = { val json = Json.toJson(response) status(json).as(ArgoMediaType) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Action.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Action.scala index bcda18f89a..9c85e44587 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Action.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Action.scala @@ -5,7 +5,6 @@ import java.net.URI import play.api.libs.json._ import play.api.libs.functional.syntax._ - // TODO: add specification of parameters and body structure, mimeType case class Action(name: String, href: URI, method: String) @@ -15,6 +14,6 @@ object Action { (__ \ "name").write[String] ~ (__ \ "href").write[String].contramap((_: URI).toString) ~ (__ \ "method").write[String] - )(unlift(Action.unapply)) + )(unlift(Action.unapply)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/CollectionResponse.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/CollectionResponse.scala index b37d40fa0e..95e140ef8f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/CollectionResponse.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/CollectionResponse.scala @@ -7,25 +7,27 @@ import play.api.libs.functional.syntax._ import com.gu.mediaservice.lib.argo.WriteHelpers - case class CollectionResponse[T]( - uri: Option[URI] = None, - offset: Option[Long] = None, - length: Option[Long], - total: Option[Long] = None, - data: Seq[T], - links: List[Link] = List() + uri: Option[URI] = None, + offset: Option[Long] = None, + length: Option[Long], + total: Option[Long] = None, + data: Seq[T], + links: List[Link] = List() ) object CollectionResponse extends WriteHelpers { - implicit def collectionResponseWrites[T: Writes]: Writes[CollectionResponse[T]] = ( - (__ \ "uri").writeNullable[String].contramap((_: Option[URI]).map(_.toString)) ~ + implicit def collectionResponseWrites[T: Writes] + : Writes[CollectionResponse[T]] = ( + (__ \ "uri") + .writeNullable[String] + .contramap((_: Option[URI]).map(_.toString)) ~ (__ \ "offset").writeNullable[Long] ~ (__ \ "length").writeNullable[Long] ~ (__ \ "total").writeNullable[Long] ~ (__ \ "data").write[Seq[T]] ~ (__ \ "links").writeNullable[List[Link]].contramap(someListOrNone[Link]) - )(unlift(CollectionResponse.unapply[T])) + )(unlift(CollectionResponse.unapply[T])) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EmbeddedEntity.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EmbeddedEntity.scala index f952e5bbe0..d82a5d856d 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EmbeddedEntity.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EmbeddedEntity.scala @@ -7,12 +7,11 @@ import play.api.libs.functional.syntax._ import com.gu.mediaservice.lib.argo.WriteHelpers - case class EmbeddedEntity[T]( - uri: URI, - data: Option[T], - links: List[Link] = Nil, - actions: List[Action] = Nil + uri: URI, + data: Option[T], + links: List[Link] = Nil, + actions: List[Action] = Nil ) object EmbeddedEntity extends WriteHelpers { @@ -21,7 +20,9 @@ object EmbeddedEntity extends WriteHelpers { (__ \ "uri").write[String].contramap((_: URI).toString) ~ (__ \ "data").writeNullable[T] ~ (__ \ "links").writeNullable[List[Link]].contramap(someListOrNone[Link]) ~ - (__ \ "actions").writeNullable[List[Action]].contramap(someListOrNone[Action]) - )(unlift(EmbeddedEntity.unapply[T])) + (__ \ "actions") + .writeNullable[List[Action]] + .contramap(someListOrNone[Action]) + )(unlift(EmbeddedEntity.unapply[T])) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EntityResponse.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EntityResponse.scala index 084c37d58f..57d2574e49 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EntityResponse.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/EntityResponse.scala @@ -7,21 +7,24 @@ import play.api.libs.functional.syntax._ import com.gu.mediaservice.lib.argo.WriteHelpers - case class EntityResponse[T]( - uri: Option[URI] = None, - data: T, - links: List[Link] = Nil, - actions: List[Action] = Nil + uri: Option[URI] = None, + data: T, + links: List[Link] = Nil, + actions: List[Action] = Nil ) object EntityResponse extends WriteHelpers { implicit def entityResponseWrites[T: Writes]: Writes[EntityResponse[T]] = ( - (__ \ "uri").writeNullable[String].contramap((_: Option[URI]).map(_.toString)) ~ + (__ \ "uri") + .writeNullable[String] + .contramap((_: Option[URI]).map(_.toString)) ~ (__ \ "data").write[T] ~ (__ \ "links").writeNullable[List[Link]].contramap(someListOrNone[Link]) ~ - (__ \ "actions").writeNullable[List[Action]].contramap(someListOrNone[Action]) - )(unlift(EntityResponse.unapply[T])) + (__ \ "actions") + .writeNullable[List[Action]] + .contramap(someListOrNone[Action]) + )(unlift(EntityResponse.unapply[T])) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/ErrorResponse.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/ErrorResponse.scala index 003d1ba196..ea480a98f9 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/ErrorResponse.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/ErrorResponse.scala @@ -7,23 +7,24 @@ import play.api.libs.functional.syntax._ import com.gu.mediaservice.lib.argo.WriteHelpers - case class ErrorResponse[T]( - uri: Option[URI] = None, - errorKey: String, - errorMessage: String, - data: Option[T], - links: List[Link] = List() + uri: Option[URI] = None, + errorKey: String, + errorMessage: String, + data: Option[T], + links: List[Link] = List() ) object ErrorResponse extends WriteHelpers { implicit def errorResponseWrites[T: Writes]: Writes[ErrorResponse[T]] = ( - (__ \ "uri").writeNullable[String].contramap((_: Option[URI]).map(_.toString)) ~ + (__ \ "uri") + .writeNullable[String] + .contramap((_: Option[URI]).map(_.toString)) ~ (__ \ "errorKey").write[String] ~ (__ \ "errorMessage").write[String] ~ (__ \ "data").writeNullable[T] ~ (__ \ "links").writeNullable[List[Link]].contramap(someListOrNone[Link]) - )(unlift(ErrorResponse.unapply[T])) + )(unlift(ErrorResponse.unapply[T])) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Link.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Link.scala index 5f1dfcf09c..483a6b605a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Link.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/argo/model/Link.scala @@ -3,7 +3,6 @@ package com.gu.mediaservice.lib.argo.model import play.api.libs.json._ import play.api.libs.functional.syntax._ - // TODO: or uri template? case class Link(rel: String, href: String) @@ -12,6 +11,6 @@ object Link { implicit val linkWrites: Writes[Link] = ( (__ \ "rel").write[String] ~ (__ \ "href").write[String] - )(unlift(Link.unapply)) + )(unlift(Link.unapply)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/ApiAccessor.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/ApiAccessor.scala index 3e1924451d..98880557b8 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/ApiAccessor.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/ApiAccessor.scala @@ -11,9 +11,9 @@ case object Syndication extends Tier object Tier { def apply(value: String): Tier = value.toLowerCase match { - case "internal" => Internal + case "internal" => Internal case "syndication" => Syndication - case _ => ReadOnly // readonly by default + case _ => ReadOnly // readonly by default } } @@ -26,9 +26,15 @@ object ApiAccessor extends ArgoHelpers { ApiAccessor(name, tier) } - def hasAccess(apiKey: ApiAccessor, request: RequestHeader, services: Services): Boolean = apiKey.tier match { + def hasAccess( + apiKey: ApiAccessor, + request: RequestHeader, + services: Services + ): Boolean = apiKey.tier match { case Internal => true case ReadOnly => request.method == "GET" - case Syndication => request.method == "GET" && request.host == services.apiHost && request.path.startsWith("/images") + case Syndication => + request.method == "GET" && request.host == services.apiHost && request.path + .startsWith("/images") } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/Authentication.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/Authentication.scala index 57adcc2b7c..35abafa238 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/Authentication.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/Authentication.scala @@ -2,7 +2,12 @@ package com.gu.mediaservice.lib.auth import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.argo.model.Link -import com.gu.mediaservice.lib.auth.Authentication.{MachinePrincipal, UserPrincipal, OnBehalfOfPrincipal, Principal} +import com.gu.mediaservice.lib.auth.Authentication.{ + MachinePrincipal, + UserPrincipal, + OnBehalfOfPrincipal, + Principal +} import com.gu.mediaservice.lib.auth.provider._ import com.gu.mediaservice.lib.config.CommonConfig import play.api.libs.typedmap.TypedMap @@ -12,61 +17,97 @@ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} -class Authentication(config: CommonConfig, - providers: AuthenticationProviders, - override val parser: BodyParser[AnyContent], - override val executionContext: ExecutionContext) - extends ActionBuilder[Authentication.Request, AnyContent] with ArgoHelpers { +class Authentication( + config: CommonConfig, + providers: AuthenticationProviders, + override val parser: BodyParser[AnyContent], + override val executionContext: ExecutionContext +) extends ActionBuilder[Authentication.Request, AnyContent] + with ArgoHelpers { // make the execution context implicit so it will be picked up appropriately implicit val ec: ExecutionContext = executionContext val loginLinks: List[Link] = providers.userProvider.loginLink match { case DisableLoginLink => Nil - case BuiltInAuthService => List(Link("login", config.services.loginUriTemplate)) + case BuiltInAuthService => + List(Link("login", config.services.loginUriTemplate)) case ExternalLoginLink(link) => List(Link("login", link)) } - def unauthorised(errorMessage: String, throwable: Option[Throwable] = None): Future[Result] = { + def unauthorised( + errorMessage: String, + throwable: Option[Throwable] = None + ): Future[Result] = { logger.info(s"Authentication failure $errorMessage", throwable.orNull) - Future.successful(respondError(Unauthorized, "authentication-failure", "Authentication failure", loginLinks)) + Future.successful( + respondError( + Unauthorized, + "authentication-failure", + "Authentication failure", + loginLinks + ) + ) } def forbidden(errorMessage: String): Future[Result] = { logger.info(s"User not authorised: $errorMessage") - Future.successful(respondError(Forbidden, "principal-not-authorised", "Principal not authorised", loginLinks)) + Future.successful( + respondError( + Forbidden, + "principal-not-authorised", + "Principal not authorised", + loginLinks + ) + ) } def expired(user: UserPrincipal): Future[Result] = { logger.info(s"User token expired for ${user.email}, return 419") - Future.successful(respondError(new Status(419), errorKey = "authentication-expired", errorMessage = "User authentication token has expired", loginLinks)) + Future.successful( + respondError( + new Status(419), + errorKey = "authentication-expired", + errorMessage = "User authentication token has expired", + loginLinks + ) + ) } def authenticationStatus(requestHeader: RequestHeader) = { def flushToken(resultWhenAbsent: Result): Result = { - providers.userProvider.flushToken.fold(resultWhenAbsent)(_(requestHeader, resultWhenAbsent)) + providers.userProvider.flushToken.fold(resultWhenAbsent)( + _(requestHeader, resultWhenAbsent) + ) } // Authenticate request. Try with API authenticator first and then with user authenticator providers.apiProvider.authenticateRequest(requestHeader) match { - case Authenticated(authedUser) => Right(authedUser) + case Authenticated(authedUser) => Right(authedUser) case Invalid(message, throwable) => Left(unauthorised(message, throwable)) - case NotAuthorised(message) => Left(forbidden(s"Principal not authorised: $message")) + case NotAuthorised(message) => + Left(forbidden(s"Principal not authorised: $message")) case NotAuthenticated => providers.userProvider.authenticateRequest(requestHeader) match { - case NotAuthenticated => Left(unauthorised("Not authenticated")) - case Expired(principal) => Left(expired(principal)) + case NotAuthenticated => Left(unauthorised("Not authenticated")) + case Expired(principal) => Left(expired(principal)) case Authenticated(authedUser) => Right(authedUser) - case Invalid(message, throwable) => Left(unauthorised(message, throwable).map(flushToken)) - case NotAuthorised(message) => Left(forbidden(s"Principal not authorised: $message")) + case Invalid(message, throwable) => + Left(unauthorised(message, throwable).map(flushToken)) + case NotAuthorised(message) => + Left(forbidden(s"Principal not authorised: $message")) } } } - override def invokeBlock[A](request: Request[A], block: Authentication.Request[A] => Future[Result]): Future[Result] = { + override def invokeBlock[A]( + request: Request[A], + block: Authentication.Request[A] => Future[Result] + ): Future[Result] = { authenticationStatus(request) match { // we have a principal, so process the block - case Right(principal) => block(new AuthenticatedRequest(principal, request)) + case Right(principal) => + block(new AuthenticatedRequest(principal, request)) // no principal so return a result which will either be an error or a form of redirect case Left(result) => result } @@ -74,11 +115,15 @@ class Authentication(config: CommonConfig, def getOnBehalfOfPrincipal(principal: Principal): OnBehalfOfPrincipal = { val provider: AuthenticationProvider = principal match { - case _:MachinePrincipal => providers.apiProvider - case _:UserPrincipal => providers.userProvider + case _: MachinePrincipal => providers.apiProvider + case _: UserPrincipal => providers.userProvider } - val maybeEnrichFn: Either[String, WSRequest => WSRequest] = provider.onBehalfOf(principal) - maybeEnrichFn.fold(error => throw new IllegalStateException(error), identity) + val maybeEnrichFn: Either[String, WSRequest => WSRequest] = + provider.onBehalfOf(principal) + maybeEnrichFn.fold( + error => throw new IllegalStateException(error), + identity + ) } } @@ -87,12 +132,22 @@ object Authentication { def accessor: ApiAccessor def attributes: TypedMap } + /** A human user with a name */ - case class UserPrincipal(firstName: String, lastName: String, email: String, attributes: TypedMap = TypedMap.empty) extends Principal { + case class UserPrincipal( + firstName: String, + lastName: String, + email: String, + attributes: TypedMap = TypedMap.empty + ) extends Principal { def accessor: ApiAccessor = ApiAccessor(identity = email, tier = Internal) } + /** A machine user doing work automatically for its human programmers */ - case class MachinePrincipal(accessor: ApiAccessor, attributes: TypedMap = TypedMap.empty) extends Principal + case class MachinePrincipal( + accessor: ApiAccessor, + attributes: TypedMap = TypedMap.empty + ) extends Principal type Request[A] = AuthenticatedRequest[A, Principal] diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/KeyStore.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/KeyStore.scala index 9ea804a277..e7a596afb3 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/KeyStore.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/KeyStore.scala @@ -7,8 +7,9 @@ import org.joda.time.DateTime import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext -class KeyStore(bucket: String, config: CommonConfig)(implicit ec: ExecutionContext) - extends BaseStore[String, ApiAccessor](bucket, config)(ec) { +class KeyStore(bucket: String, config: CommonConfig)(implicit + ec: ExecutionContext +) extends BaseStore[String, ApiAccessor](bucket, config)(ec) { def lookupIdentity(key: String): Option[ApiAccessor] = store.get().get(key) @@ -20,7 +21,8 @@ class KeyStore(bucket: String, config: CommonConfig)(implicit ec: ExecutionConte } private def fetchAll: Map[String, ApiAccessor] = { - val keys = s3.client.listObjects(bucket).getObjectSummaries.asScala.map(_.getKey) + val keys = + s3.client.listObjects(bucket).getObjectSummaries.asScala.map(_.getKey) keys.flatMap(k => getS3Object(k).map(k -> ApiAccessor(_))).toMap } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/PermissionsHandler.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/PermissionsHandler.scala index e81de454dc..fda0c271b1 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/PermissionsHandler.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/PermissionsHandler.scala @@ -1,6 +1,10 @@ package com.gu.mediaservice.lib.auth -import com.gu.mediaservice.lib.auth.Authentication.{MachinePrincipal, UserPrincipal, Principal} +import com.gu.mediaservice.lib.auth.Authentication.{ + MachinePrincipal, + UserPrincipal, + Principal +} import com.gu.mediaservice.lib.aws.S3Ops import com.gu.mediaservice.lib.config.CommonConfig import com.gu.permissions._ @@ -14,13 +18,26 @@ trait PermissionsHandler { private val permissions: PermissionsProvider = config.awsLocalEndpoint match { case Some(_) if config.isDev && config.useLocalAuth => { - val provider = new S3PermissionsProvider(config.permissionsBucket, "permissions.json", 1.minute, PermissionsS3(S3Ops.buildS3Client(config))) + val provider = new S3PermissionsProvider( + config.permissionsBucket, + "permissions.json", + 1.minute, + PermissionsS3(S3Ops.buildS3Client(config)) + ) provider.start() provider } case _ => { - val permissionsStage = if(config.isProd) { "PROD" } else { "CODE" } - PermissionsProvider(PermissionsConfig(permissionsStage, config.awsRegion, config.awsCredentials, config.permissionsBucket)) + val permissionsStage = if (config.isProd) { "PROD" } + else { "CODE" } + PermissionsProvider( + PermissionsConfig( + permissionsStage, + config.awsRegion, + config.awsCredentials, + config.permissionsBucket + ) + ) } } @@ -28,11 +45,16 @@ trait PermissionsHandler { permissions.storeIsEmpty } - def hasPermission(user: Principal, permission: PermissionDefinition): Boolean = { + def hasPermission( + user: Principal, + permission: PermissionDefinition + ): Boolean = { user match { - case UserPrincipal(_, _, email, _) => permissions.hasPermission(permission, email) + case UserPrincipal(_, _, email, _) => + permissions.hasPermission(permission, email) // think about only allowing certain services i.e. on `service.name`? - case service: MachinePrincipal if service.accessor.tier == Internal => true + case service: MachinePrincipal if service.accessor.tier == Internal => + true case _ => false } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/ApiKeyAuthenticationProvider.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/ApiKeyAuthenticationProvider.scala index 1c71aca703..8ec0133d8c 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/ApiKeyAuthenticationProvider.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/ApiKeyAuthenticationProvider.scala @@ -10,16 +10,25 @@ import play.api.mvc.RequestHeader import scala.concurrent.{ExecutionContext, Future} object ApiKeyAuthenticationProvider { - val ApiKeyHeader: TypedKey[(String, String)] = TypedKey[(String, String)]("ApiKeyHeader") + val ApiKeyHeader: TypedKey[(String, String)] = + TypedKey[(String, String)]("ApiKeyHeader") val apiKeyHeaderName = "X-Gu-Media-Key" } -class ApiKeyAuthenticationProvider(configuration: Configuration, resources: AuthenticationProviderResources) extends MachineAuthenticationProvider with StrictLogging { - implicit val executionContext: ExecutionContext = resources.controllerComponents.executionContext +class ApiKeyAuthenticationProvider( + configuration: Configuration, + resources: AuthenticationProviderResources +) extends MachineAuthenticationProvider + with StrictLogging { + implicit val executionContext: ExecutionContext = + resources.controllerComponents.executionContext var keyStorePlaceholder: Option[KeyStore] = _ override def initialise(): Unit = { - val store = new KeyStore(configuration.get[String]("authKeyStoreBucket"), resources.commonConfig) + val store = new KeyStore( + configuration.get[String]("authKeyStoreBucket"), + resources.commonConfig + ) store.scheduleUpdates(resources.actorSystem.scheduler) keyStorePlaceholder = Some(store) } @@ -28,32 +37,51 @@ class ApiKeyAuthenticationProvider(configuration: Configuration, resources: Auth keyStorePlaceholder.foreach(_.stopUpdates()) } - def keyStore: KeyStore = keyStorePlaceholder.getOrElse(throw new IllegalStateException("Not initialised")) + def keyStore: KeyStore = keyStorePlaceholder.getOrElse( + throw new IllegalStateException("Not initialised") + ) - /** - * Establish the authentication status of the given request header. This can return an authenticated user or a number + /** Establish the authentication status of the given request header. This can return an authenticated user or a number * of reasons why a user is not authenticated. * * @param request The request header containing cookies and other request headers that can be used to establish the * authentication status of a request. * @return An authentication status expressing whether the */ - override def authenticateRequest(request: RequestHeader): ApiAuthenticationStatus = { + override def authenticateRequest( + request: RequestHeader + ): ApiAuthenticationStatus = { request.headers.get(ApiKeyAuthenticationProvider.apiKeyHeaderName) match { case Some(key) => keyStore.lookupIdentity(key) match { // api key provided case Some(apiKey) => // valid api key - if (ApiAccessor.hasAccess(apiKey, request, resources.commonConfig.services)) { + if ( + ApiAccessor.hasAccess( + apiKey, + request, + resources.commonConfig.services + ) + ) { // valid api key which has access // store the header that was used in the attributes map of the principal for use in onBehalfOf calls - val accessor = MachinePrincipal(apiKey, TypedMap(ApiKeyAuthenticationProvider.ApiKeyHeader -> (ApiKeyAuthenticationProvider.apiKeyHeaderName -> key))) - logger.info(s"Using api key with name ${apiKey.identity} and tier ${apiKey.tier}", apiKey) + val accessor = MachinePrincipal( + apiKey, + TypedMap( + ApiKeyAuthenticationProvider.ApiKeyHeader -> (ApiKeyAuthenticationProvider.apiKeyHeaderName -> key) + ) + ) + logger.info( + s"Using api key with name ${apiKey.identity} and tier ${apiKey.tier}", + apiKey + ) Authenticated(accessor) } else { // valid api key which doesn't have access - NotAuthorised(s"API key ${apiKey.identity} valid but not authorised for this request") + NotAuthorised( + s"API key ${apiKey.identity} valid but not authorised for this request" + ) } // provided api key not known case None => Invalid("API key not valid") @@ -63,12 +91,18 @@ class ApiKeyAuthenticationProvider(configuration: Configuration, resources: Auth } } - override def onBehalfOf(principal: Principal): Either[String, WSRequest => WSRequest] = { + override def onBehalfOf( + principal: Principal + ): Either[String, WSRequest => WSRequest] = { principal.attributes.get(ApiKeyAuthenticationProvider.ApiKeyHeader) match { - case Some(apiKeyHeaderTuple) => Right { - wsRequest: WSRequest => wsRequest.addHttpHeaders(apiKeyHeaderTuple) - } - case None => Left(s"API key not found in request, no header ${ApiKeyAuthenticationProvider.apiKeyHeaderName}") + case Some(apiKeyHeaderTuple) => + Right { wsRequest: WSRequest => + wsRequest.addHttpHeaders(apiKeyHeaderTuple) + } + case None => + Left( + s"API key not found in request, no header ${ApiKeyAuthenticationProvider.apiKeyHeaderName}" + ) } } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProvider.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProvider.scala index 7d599483ed..35bc69c757 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProvider.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProvider.scala @@ -9,18 +9,19 @@ import play.api.mvc.{ControllerComponents, RequestHeader, Result} import scala.concurrent.Future -/** - * Case class containing useful resources for authentication providers to allow concurrent processing and external +/** Case class containing useful resources for authentication providers to allow concurrent processing and external * API calls to be conducted. * @param commonConfig the Grid common config object * @param actorSystem an actor system * @param wsClient a play WSClient for making API calls * @param controllerComponents play components, including the execution context for example */ -case class AuthenticationProviderResources(commonConfig: CommonConfig, - actorSystem: ActorSystem, - wsClient: WSClient, - controllerComponents: ControllerComponents) +case class AuthenticationProviderResources( + commonConfig: CommonConfig, + actorSystem: ActorSystem, + wsClient: WSClient, + controllerComponents: ControllerComponents +) sealed trait LoginLink case object BuiltInAuthService extends LoginLink @@ -31,8 +32,7 @@ sealed trait AuthenticationProvider { def initialise(): Unit = {} def shutdown(): Future[Unit] = Future.successful(()) - /** - * A function that allows downstream API calls to be made using the credentials of the current principal. + /** A function that allows downstream API calls to be made using the credentials of the current principal. * It is recommended that any data required for this downstream request enrichment is put into the principal's * attribute map when the principal is created in the authenticateRequest call. * @param principal The principal for the current request @@ -47,8 +47,8 @@ object AuthenticationProvider { } trait UserAuthenticationProvider extends AuthenticationProvider { - /** - * Establish the authentication status of the given request header. This can return an authenticated user or a number + + /** Establish the authentication status of the given request header. This can return an authenticated user or a number * of reasons why a user is not authenticated. * @param request The request header containing cookies and other request headers that can be used to establish the * authentication status of a request. @@ -56,32 +56,29 @@ trait UserAuthenticationProvider extends AuthenticationProvider { */ def authenticateRequest(request: RequestHeader): AuthenticationStatus - /** - * If this provider supports sending a user that is not authorised to a federated auth provider then it should + /** If this provider supports sending a user that is not authorised to a federated auth provider then it should * provide a function here to redirect the user. The function signature takes the the request and returns a result * which is likely a redirect to an external authentication system. */ def sendForAuthentication: Option[RequestHeader => Future[Result]] - /** - * If this provider supports sending a user that is not authorised to a federated auth provider then it should + /** If this provider supports sending a user that is not authorised to a federated auth provider then it should * provide a function here that deals with the return of a user from a federated provider. This should be * used to set a cookie or similar to ensure that a subsequent call to authenticateRequest will succeed. If * authentication failed then this should return an appropriate 4xx result. * The function should take the Play request header and the redirect URI that the user should be * sent to on successful completion of the authentication. */ - def sendForAuthenticationCallback: Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] + def sendForAuthenticationCallback + : Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] - /** - * If this provider is able to clear user tokens (i.e. by clearing cookies) then it should provide a function to + /** If this provider is able to clear user tokens (i.e. by clearing cookies) then it should provide a function to * do that here which will be used to log users out and also if the token is invalid. * This function takes the request header and a result to modify and returns the modified result. */ def flushToken: Option[(RequestHeader, Result) => Result] - /** - * The login link is provided to the client to tell them where to go if they are + /** The login link is provided to the client to tell them where to go if they are * not authenticated. By default the Grid provides a link to the authentication * microservice but this behaviour can be modified. If it is not possible to login * or authentication is handled by a proxy you can set this to DisableLoginLink. @@ -92,8 +89,8 @@ trait UserAuthenticationProvider extends AuthenticationProvider { } trait MachineAuthenticationProvider extends AuthenticationProvider { - /** - * Establish the authentication status of the given request header. This can return an authenticated user or a number + + /** Establish the authentication status of the given request header. This can return an authenticated user or a number * of reasons why a user is not authenticated. * @param request The request header containing cookies and other request headers that can be used to establish the * authentication status of a request. diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProviders.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProviders.scala index 1d219d8383..588424bd00 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProviders.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationProviders.scala @@ -1,3 +1,6 @@ package com.gu.mediaservice.lib.auth.provider -case class AuthenticationProviders(userProvider: UserAuthenticationProvider, apiProvider: MachineAuthenticationProvider) +case class AuthenticationProviders( + userProvider: UserAuthenticationProvider, + apiProvider: MachineAuthenticationProvider +) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationStatus.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationStatus.scala index 581eebba17..f34e9da2bd 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationStatus.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/auth/provider/AuthenticationStatus.scala @@ -15,11 +15,15 @@ sealed trait ApiAuthenticationStatus extends AuthenticationStatus /** User authentication is valid */ case class Authenticated(authedUser: Principal) extends ApiAuthenticationStatus + /** User authentication is OK but the user is not authorised to use this system - might be a group or 2FA check failure */ case class NotAuthorised(message: String) extends ApiAuthenticationStatus + /** User authentication token or key (cookie, header, query param) exists but isn't valid - - * the message and exception will be logged but not leaked to user */ -case class Invalid(message: String, throwable: Option[Throwable] = None) extends ApiAuthenticationStatus + * the message and exception will be logged but not leaked to user + */ +case class Invalid(message: String, throwable: Option[Throwable] = None) + extends ApiAuthenticationStatus + /** User authentication token doesn't exist */ case object NotAuthenticated extends ApiAuthenticationStatus - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/AwsClientBuilderUtils.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/AwsClientBuilderUtils.scala index 5960315cdb..28d246944b 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/AwsClientBuilderUtils.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/AwsClientBuilderUtils.scala @@ -1,7 +1,11 @@ package com.gu.mediaservice.lib.aws import com.amazonaws.auth.profile.ProfileCredentialsProvider -import com.amazonaws.auth.{AWSCredentialsProvider, AWSCredentialsProviderChain, InstanceProfileCredentialsProvider} +import com.amazonaws.auth.{ + AWSCredentialsProvider, + AWSCredentialsProviderChain, + InstanceProfileCredentialsProvider +} import com.amazonaws.client.builder.AwsClientBuilder import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration import com.gu.mediaservice.lib.logging.GridLogging @@ -16,16 +20,25 @@ trait AwsClientBuilderUtils extends GridLogging { InstanceProfileCredentialsProvider.getInstance() ) - final def awsEndpointConfiguration: Option[EndpointConfiguration] = awsLocalEndpoint match { - case Some(endpoint) if isDev => Some(new EndpointConfiguration(endpoint, awsRegion)) - case _ => None - } + final def awsEndpointConfiguration: Option[EndpointConfiguration] = + awsLocalEndpoint match { + case Some(endpoint) if isDev => + Some(new EndpointConfiguration(endpoint, awsRegion)) + case _ => None + } - final def withAWSCredentials[T, S <: AwsClientBuilder[S, T]](builder: AwsClientBuilder[S, T], localstackAware: Boolean = true): S = { + final def withAWSCredentials[T, S <: AwsClientBuilder[S, T]]( + builder: AwsClientBuilder[S, T], + localstackAware: Boolean = true + ): S = { awsEndpointConfiguration match { case Some(endpointConfiguration) if localstackAware => { - logger.info(s"creating aws client with local endpoint $endpointConfiguration") - builder.withCredentials(awsCredentials).withEndpointConfiguration(endpointConfiguration) + logger.info( + s"creating aws client with local endpoint $endpointConfiguration" + ) + builder + .withCredentials(awsCredentials) + .withEndpointConfiguration(endpointConfiguration) } case _ => builder.withCredentials(awsCredentials).withRegion(awsRegion) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/DynamoDB.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/DynamoDB.scala index 486eccbdf8..8d4385cdab 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/DynamoDB.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/DynamoDB.scala @@ -1,11 +1,19 @@ package com.gu.mediaservice.lib.aws import com.amazonaws.AmazonServiceException -import com.amazonaws.services.dynamodbv2.document.spec.{DeleteItemSpec, GetItemSpec, PutItemSpec, UpdateItemSpec} +import com.amazonaws.services.dynamodbv2.document.spec.{ + DeleteItemSpec, + GetItemSpec, + PutItemSpec, + UpdateItemSpec +} import com.amazonaws.services.dynamodbv2.document.utils.ValueMap import com.amazonaws.services.dynamodbv2.document.{DynamoDB => AwsDynamoDB, _} import com.amazonaws.services.dynamodbv2.model.ReturnValue -import com.amazonaws.services.dynamodbv2.{AmazonDynamoDBAsync, AmazonDynamoDBAsyncClientBuilder} +import com.amazonaws.services.dynamodbv2.{ + AmazonDynamoDBAsync, + AmazonDynamoDBAsyncClientBuilder +} import com.gu.mediaservice.lib.config.CommonConfig import play.api.libs.json._ @@ -16,26 +24,29 @@ object NoItemFound extends Throwable("item not found") class DynamoDB(config: CommonConfig, tableName: String) { - lazy val client: AmazonDynamoDBAsync = config.withAWSCredentials(AmazonDynamoDBAsyncClientBuilder.standard()).build() + lazy val client: AmazonDynamoDBAsync = config + .withAWSCredentials(AmazonDynamoDBAsyncClientBuilder.standard()) + .build() lazy val dynamo = new AwsDynamoDB(client) lazy val table: Table = dynamo.getTable(tableName) val IdKey = "id" - def exists(id: String)(implicit ex: ExecutionContext): Future[Boolean] = Future { + def exists(id: String)(implicit ex: ExecutionContext): Future[Boolean] = + Future { table.getItem(new GetItemSpec().withPrimaryKey(IdKey, id)) - } map(Option(_).isDefined) + } map (Option(_).isDefined) - def get(id: String) - (implicit ex: ExecutionContext): Future[JsObject] = Future { - table.getItem( - new GetItemSpec(). - withPrimaryKey(IdKey, id) - ) - } flatMap itemOrNotFound map asJsObject + def get(id: String)(implicit ex: ExecutionContext): Future[JsObject] = + Future { + table.getItem( + new GetItemSpec().withPrimaryKey(IdKey, id) + ) + } flatMap itemOrNotFound map asJsObject - private def get(id: String, key: String) - (implicit ex: ExecutionContext): Future[Item] = Future { + private def get(id: String, key: String)(implicit + ex: ExecutionContext + ): Future[Item] = Future { table.getItem( new GetItemSpec() .withPrimaryKey(IdKey, id) @@ -50,119 +61,139 @@ class DynamoDB(config: CommonConfig, tableName: String) { } } - def removeKey(id: String, key: String) - (implicit ex: ExecutionContext): Future[JsObject] = + def removeKey(id: String, key: String)(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"REMOVE $key" ) - def deleteItem(id: String)(implicit ex: ExecutionContext): Future[Unit] = Future { - table.deleteItem(new DeleteItemSpec().withPrimaryKey(IdKey, id)) - } + def deleteItem(id: String)(implicit ex: ExecutionContext): Future[Unit] = + Future { + table.deleteItem(new DeleteItemSpec().withPrimaryKey(IdKey, id)) + } - def booleanGet(id: String, key: String) - (implicit ex: ExecutionContext): Future[Option[Boolean]] = + def booleanGet(id: String, key: String)(implicit + ex: ExecutionContext + ): Future[Option[Boolean]] = // TODO: add Option to item as it can be null - get(id, key).map{ item => item.get(key) match { - case b: java.lang.Boolean => Some(b.booleanValue) - case _ => None - }} + get(id, key).map { item => + item.get(key) match { + case b: java.lang.Boolean => Some(b.booleanValue) + case _ => None + } + } - def booleanSet(id: String, key: String, value: Boolean) - (implicit ex: ExecutionContext): Future[JsObject] = + def booleanSet(id: String, key: String, value: Boolean)(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"SET $key = :value", new ValueMap().withBoolean(":value", value) ) - def booleanSetOrRemove(id: String, key: String, value: Boolean) - (implicit ex: ExecutionContext): Future[JsObject] = + def booleanSetOrRemove(id: String, key: String, value: Boolean)(implicit + ex: ExecutionContext + ): Future[JsObject] = if (value) booleanSet(id, key, value) else removeKey(id, key) - def stringSet(id: String, key: String, value: JsValue) - (implicit ex: ExecutionContext): Future[JsObject] = + def stringSet(id: String, key: String, value: JsValue)(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"SET $key = :value", valueMapWithNullForEmptyString(Map(":value" -> value)) ) - - def setGet(id: String, key: String) - (implicit ex: ExecutionContext): Future[Set[String]] = - get(id, key).map{ item => Option(item.getStringSet(key)) match { + def setGet(id: String, key: String)(implicit + ex: ExecutionContext + ): Future[Set[String]] = + get(id, key).map { item => + Option(item.getStringSet(key)) match { case Some(set) => set.asScala.toSet case None => Set() } } - def setAdd(id: String, key: String, value: String) - (implicit ex: ExecutionContext): Future[JsObject] = + def setAdd(id: String, key: String, value: String)(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"ADD $key :value", new ValueMap().withStringSet(":value", value) ) - def setAdd(id: String, key: String, value: List[String]) - (implicit ex: ExecutionContext): Future[JsObject] = + def setAdd(id: String, key: String, value: List[String])(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"ADD $key :value", - new ValueMap().withStringSet(":value", value:_*) + new ValueMap().withStringSet(":value", value: _*) ) - - def jsonGet(id: String, key: String) - (implicit ex: ExecutionContext): Future[JsValue] = - get(id, key).map(item => asJsObject(item)) + def jsonGet(id: String, key: String)(implicit + ex: ExecutionContext + ): Future[JsValue] = + get(id, key).map(item => asJsObject(item)) // We cannot update, so make sure you send over the WHOLE document - def jsonAdd(id: String, key: String, value: Map[String, JsValue]) - (implicit ex: ExecutionContext): Future[JsObject] = + def jsonAdd(id: String, key: String, value: Map[String, JsValue])(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"SET $key = :value", - new ValueMap().withMap(":value", valueMapWithNullForEmptyString(value)) + new ValueMap().withMap(":value", valueMapWithNullForEmptyString(value)) ) - def setDelete(id: String, key: String, value: String) - (implicit ex: ExecutionContext): Future[JsObject] = + def setDelete(id: String, key: String, value: String)(implicit + ex: ExecutionContext + ): Future[JsObject] = update( id, s"DELETE $key :value", new ValueMap().withStringSet(":value", value) ) - def listGet[T](id: String, key: String) - (implicit ex: ExecutionContext, reads: Reads[T]): Future[List[T]] = { + def listGet[T](id: String, key: String)(implicit + ex: ExecutionContext, + reads: Reads[T] + ): Future[List[T]] = { get(id, key) map { item => Option(item.toJSON) match { case Some(json) => (Json.parse(json) \ key).as[List[T]] - case None => Nil + case None => Nil } } } - def listAdd[T](id: String, key: String, value: T) - (implicit ex: ExecutionContext, tjs: Writes[T], rjs: Reads[T]): Future[List[T]] = { + def listAdd[T](id: String, key: String, value: T)(implicit + ex: ExecutionContext, + tjs: Writes[T], + rjs: Reads[T] + ): Future[List[T]] = { // TODO: Deal with the case that we don't have JSON serialisers, for now we just fail. val json = Json.toJson(value).as[JsObject] val valueMap = DynamoDB.jsonToValueMap(json) def append = update( - id, s"SET $key = list_append($key, :value)", + id, + s"SET $key = list_append($key, :value)", new ValueMap().withList(":value", valueMap) ) def create = update( - id, s"SET $key = :value", + id, + s"SET $key = :value", new ValueMap().withList(":value", valueMap) ) @@ -171,20 +202,28 @@ class DynamoDB(config: CommonConfig, tableName: String) { // Append to the list => if it doesn't exist => create it with the initial value. append.map(j => (j \ key).as[List[T]]) recoverWith { case err: AmazonServiceException => create.map(j => (j \ key).as[List[T]]) - case err => throw err + case err => throw err } } - def listRemoveIndexes[T](id: String, key: String, indexes: List[Int]) - (implicit ex: ExecutionContext, rjs: Reads[T]): Future[List[T]] = + def listRemoveIndexes[T](id: String, key: String, indexes: List[Int])(implicit + ex: ExecutionContext, + rjs: Reads[T] + ): Future[List[T]] = update( - id, s"REMOVE ${indexes.map(i => s"$key[$i]").mkString(",")}" - ) map(j => (j \ key).as[List[T]]) + id, + s"REMOVE ${indexes.map(i => s"$key[$i]").mkString(",")}" + ) map (j => (j \ key).as[List[T]]) - def objPut[T](id: String, key: String, value: T) - (implicit ex: ExecutionContext, wjs: Writes[T], rjs: Reads[T]): Future[T] = Future { + def objPut[T](id: String, key: String, value: T)(implicit + ex: ExecutionContext, + wjs: Writes[T], + rjs: Reads[T] + ): Future[T] = Future { - val item = new Item().withPrimaryKey(IdKey, id).withJSON(key, Json.toJson(value).toString) + val item = new Item() + .withPrimaryKey(IdKey, id) + .withJSON(key, Json.toJson(value).toString) val spec = new PutItemSpec().withItem(item) table.putItem(spec) @@ -196,24 +235,26 @@ class DynamoDB(config: CommonConfig, tableName: String) { table.scan().iterator.asScala.toList } map (_.map(asJsObject)) - def update(id: String, expression: String, valueMap: ValueMap) - (implicit ex: ExecutionContext): Future[JsObject] = + def update(id: String, expression: String, valueMap: ValueMap)(implicit + ex: ExecutionContext + ): Future[JsObject] = update(id, expression, Some(valueMap)) - def update(id: String, expression: String, valueMap: Option[ValueMap] = None) - (implicit ex: ExecutionContext): Future[JsObject] = Future { + def update(id: String, expression: String, valueMap: Option[ValueMap] = None)( + implicit ex: ExecutionContext + ): Future[JsObject] = Future { - val baseUpdateSpec = new UpdateItemSpec(). - withPrimaryKey(IdKey, id). - withUpdateExpression(expression). - withReturnValues(ReturnValue.ALL_NEW) + val baseUpdateSpec = new UpdateItemSpec() + .withPrimaryKey(IdKey, id) + .withUpdateExpression(expression) + .withReturnValues(ReturnValue.ALL_NEW) - val updateSpec = valueMap.map(baseUpdateSpec.withValueMap(_)) getOrElse baseUpdateSpec + val updateSpec = + valueMap.map(baseUpdateSpec.withValueMap(_)) getOrElse baseUpdateSpec table.updateItem(updateSpec) } map asJsObject - // FIXME: surely there must be a better way to convert? def asJsObject(item: Item): JsObject = { jsonWithNullAsEmptyString(Json.parse(item.toJSON)).as[JsObject] - IdKey @@ -227,21 +268,25 @@ class DynamoDB(config: CommonConfig, tableName: String) { // fenced in this Dynamo play area. `null` is continual and big annoyance with AWS libs. // see: https://forums.aws.amazon.com/message.jspa?messageID=389032 // see: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html - def mapJsValue(jsValue: JsValue)(f: JsValue => JsValue): JsValue = jsValue match { - case JsObject(items) => JsObject(items.map{ case (k, v) => k -> mapJsValue(v)(f) }) - case JsArray(items) => JsArray(items.map(f)) - case value => f(value) - } + def mapJsValue(jsValue: JsValue)(f: JsValue => JsValue): JsValue = + jsValue match { + case JsObject(items) => + JsObject(items.map { case (k, v) => k -> mapJsValue(v)(f) }) + case JsArray(items) => JsArray(items.map(f)) + case value => f(value) + } - def jsonWithNullAsEmptyString(jsValue: JsValue): JsValue = mapJsValue(jsValue) { - case JsNull => JsString("") - case value => value - } + def jsonWithNullAsEmptyString(jsValue: JsValue): JsValue = + mapJsValue(jsValue) { + case JsNull => JsString("") + case value => value + } def valueMapWithNullForEmptyString(value: Map[String, JsValue]) = { val valueMap = new ValueMap() - value.map { case(k, v) => (k, if (v == JsNull) null else v) } - .foreach { case(k, v) => valueMap.withJSON(k, Json.stringify(v)) } + value + .map { case (k, v) => (k, if (v == JsNull) null else v) } + .foreach { case (k, v) => valueMap.withJSON(k, Json.stringify(v)) } valueMap } @@ -261,10 +306,14 @@ object DynamoDB { // TODO: Lists of different Types? JsArray is not type safe (because json lists aren't) // so this leaves us in a bit of a pickle when converting them. So for now we only support // List[String] - case v: JsArray => valueMap.withList(key, v.value.map { - case i: JsString => i.value - case i: JsValue => i.toString - }: _*) + case v: JsArray => + valueMap.withList( + key, + v.value.map { + case i: JsString => i.value + case i: JsValue => i.toString + }: _* + ) case _ => valueMap } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/Kinesis.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/Kinesis.scala index 05453cfa90..e943b64cf2 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/Kinesis.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/Kinesis.scala @@ -4,7 +4,10 @@ import java.nio.ByteBuffer import java.util.UUID import com.amazonaws.services.kinesis.model.PutRecordRequest -import com.amazonaws.services.kinesis.{AmazonKinesis, AmazonKinesisClientBuilder} +import com.amazonaws.services.kinesis.{ + AmazonKinesis, + AmazonKinesisClientBuilder +} import com.gu.mediaservice.lib.config.CommonConfig import com.gu.mediaservice.lib.json.JsonByteArrayUtil import com.gu.mediaservice.model.usage.UsageNotice @@ -15,30 +18,33 @@ import com.gu.mediaservice.lib.logging.GridLogging import org.joda.time.DateTime case class KinesisSenderConfig( - override val awsRegion: String, - override val awsCredentials: AWSCredentialsProvider, - override val awsLocalEndpoint: Option[String], - override val isDev: Boolean, - streamName: String + override val awsRegion: String, + override val awsCredentials: AWSCredentialsProvider, + override val awsLocalEndpoint: Option[String], + override val isDev: Boolean, + streamName: String ) extends AwsClientBuilderUtils -class Kinesis(config: KinesisSenderConfig) extends GridLogging{ +class Kinesis(config: KinesisSenderConfig) extends GridLogging { private val builder = AmazonKinesisClientBuilder.standard() - private def getKinesisClient: AmazonKinesis = config.withAWSCredentials(builder).build() + private def getKinesisClient: AmazonKinesis = + config.withAWSCredentials(builder).build() private lazy val kinesisClient: AmazonKinesis = getKinesisClient def publish(message: UpdateMessage) { val partitionKey = UUID.randomUUID().toString - implicit val yourJodaDateWrites: Writes[DateTime] = JodaWrites.JodaDateTimeWrites + implicit val yourJodaDateWrites: Writes[DateTime] = + JodaWrites.JodaDateTimeWrites implicit val unw: Writes[UsageNotice] = Json.writes[UsageNotice] val payload = JsonByteArrayUtil.toByteArray(message) - val markers: LogstashMarker = message.toLogMarker.and(Markers.append("compressed-size", payload.length)) + val markers: LogstashMarker = + message.toLogMarker.and(Markers.append("compressed-size", payload.length)) logger.info(markers, "Publishing message to kinesis") val data = ByteBuffer.wrap(payload) @@ -58,4 +64,3 @@ class Kinesis(config: KinesisSenderConfig) extends GridLogging{ } } } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/S3.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/S3.scala index fafc81872b..2706569991 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/S3.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/S3.scala @@ -19,9 +19,16 @@ import scala.concurrent.{ExecutionContext, Future} case class S3Object(uri: URI, size: Long, metadata: S3Metadata) -case class S3Metadata(userMetadata: Map[String, String], objectMetadata: S3ObjectMetadata) +case class S3Metadata( + userMetadata: Map[String, String], + objectMetadata: S3ObjectMetadata +) -case class S3ObjectMetadata(contentType: Option[MimeType], cacheControl: Option[String], lastModified: Option[DateTime] = None) +case class S3ObjectMetadata( + contentType: Option[MimeType], + cacheControl: Option[String], + lastModified: Option[DateTime] = None +) class S3(config: CommonConfig) extends GridLogging { type Bucket = String @@ -32,7 +39,8 @@ class S3(config: CommonConfig) extends GridLogging { lazy val client: AmazonS3 = S3Ops.buildS3Client(config) // also create a legacy client that uses v2 signatures for URL signing - private lazy val legacySigningClient: AmazonS3 = S3Ops.buildS3Client(config, forceV2Sigs = true) + private lazy val legacySigningClient: AmazonS3 = + S3Ops.buildS3Client(config, forceV2Sigs = true) private val log = LoggerFactory.getLogger(getClass) private def removeExtension(filename: String): String = { @@ -40,11 +48,15 @@ class S3(config: CommonConfig) extends GridLogging { regex.replaceAllIn(filename, "") } - private def getContentDispositionFilename(image: Image, imageType: ImageType, charset: Charset): String = { + private def getContentDispositionFilename( + image: Image, + imageType: ImageType, + charset: Charset + ): String = { val asset = imageType match { - case Source => image.source - case Thumbnail => image.thumbnail.getOrElse(image.source) + case Source => image.source + case Thumbnail => image.thumbnail.getOrElse(image.source) case OptimisedPng => image.optimisedPng.getOrElse(image.source) } @@ -57,7 +69,7 @@ class S3(config: CommonConfig) extends GridLogging { val baseFilename: String = image.uploadInfo.filename match { case Some(f) => s"${removeExtension(f)} (${image.id})$extension" - case _ => s"${image.id}$extension" + case _ => s"${image.id}$extension" } charset.displayName() match { @@ -73,24 +85,43 @@ class S3(config: CommonConfig) extends GridLogging { // use both `filename` and `filename*` parameters for compatibility with user agents not implementing RFC 5987 // they'll fallback to `filename`, which will be a UTF-8 string decoded as Latin-1 - this is a rubbish string, but only rubbish browsers don't support RFC 5987 (IE8 back) // See http://tools.ietf.org/html/rfc6266#section-5 - s"""attachment; filename="${getContentDispositionFilename(image, imageType, StandardCharsets.ISO_8859_1)}"; filename*=UTF-8''${getContentDispositionFilename(image, imageType, StandardCharsets.UTF_8)}""" + s"""attachment; filename="${getContentDispositionFilename( + image, + imageType, + StandardCharsets.ISO_8859_1 + )}"; filename*=UTF-8''${getContentDispositionFilename( + image, + imageType, + StandardCharsets.UTF_8 + )}""" } - private def roundDateTime(t: DateTime, d: Duration): DateTime = t minus (t.getMillis - (t.getMillis.toDouble / d.getMillis).round * d.getMillis) + private def roundDateTime(t: DateTime, d: Duration): DateTime = + t minus (t.getMillis - (t.getMillis.toDouble / d.getMillis).round * d.getMillis) // Round expiration time to try and hit the cache as much as possible // TODO: do we really need these expiration tokens? they kill our ability to cache... - private def defaultExpiration: DateTime = roundDateTime(DateTime.now, Duration.standardMinutes(10)).plusMinutes(20) - - def signUrl(bucket: Bucket, url: URI, image: Image, expiration: DateTime = defaultExpiration, imageType: ImageType = Source): String = { + private def defaultExpiration: DateTime = + roundDateTime(DateTime.now, Duration.standardMinutes(10)).plusMinutes(20) + + def signUrl( + bucket: Bucket, + url: URI, + image: Image, + expiration: DateTime = defaultExpiration, + imageType: ImageType = Source + ): String = { // get path and remove leading `/` val key: Key = url.getPath.drop(1) val contentDisposition = getContentDisposition(image, imageType) - val headers = new ResponseHeaderOverrides().withContentDisposition(contentDisposition) + val headers = + new ResponseHeaderOverrides().withContentDisposition(contentDisposition) - val request = new GeneratePresignedUrlRequest(bucket, key).withExpiration(expiration.toDate).withResponseHeaders(headers) + val request = new GeneratePresignedUrlRequest(bucket, key) + .withExpiration(expiration.toDate) + .withResponseHeaders(headers) legacySigningClient.generatePresignedUrl(request).toExternalForm } @@ -109,14 +140,19 @@ class S3(config: CommonConfig) extends GridLogging { case e: AmazonServiceException if e.getErrorCode == "NoSuchKey" => log.warn(s"Cannot find key: $key in bucket: $bucket") None - } - finally { + } finally { stream.close() } } - def store(bucket: Bucket, id: Key, file: File, mimeType: Option[MimeType], meta: UserMetadata = Map.empty, cacheControl: Option[String] = None) - (implicit ex: ExecutionContext, logMarker: LogMarker): Future[Unit] = + def store( + bucket: Bucket, + id: Key, + file: File, + mimeType: Option[MimeType], + meta: UserMetadata = Map.empty, + cacheControl: Option[String] = None + )(implicit ex: ExecutionContext, logMarker: LogMarker): Future[Unit] = Future { val metadata = new ObjectMetadata mimeType.foreach(m => metadata.setContentType(m.name)) @@ -126,26 +162,38 @@ class S3(config: CommonConfig) extends GridLogging { val fileMarkers = Map( "bucket" -> bucket, "fileName" -> id, - "mimeType" -> mimeType.getOrElse("none"), + "mimeType" -> mimeType.getOrElse("none") ) val markers = logMarker ++ fileMarkers val req = new PutObjectRequest(bucket, id, file).withMetadata(metadata) - Stopwatch(s"S3 client.putObject ($req)"){ + Stopwatch(s"S3 client.putObject ($req)") { client.putObject(req) }(markers) } - def list(bucket: Bucket, prefixDir: String) - (implicit ex: ExecutionContext): Future[List[S3Object]] = + def list(bucket: Bucket, prefixDir: String)(implicit + ex: ExecutionContext + ): Future[List[S3Object]] = Future { - val req = new ListObjectsRequest().withBucketName(bucket).withPrefix(s"$prefixDir/") + val req = new ListObjectsRequest() + .withBucketName(bucket) + .withPrefix(s"$prefixDir/") val listing = client.listObjects(req) val summaries = listing.getObjectSummaries.asScala - summaries.map(summary => (summary.getKey, summary)).foldLeft(List[S3Object]()) { - case (memo: List[S3Object], (key: String, summary: S3ObjectSummary)) => - S3Object(objectUrl(bucket, key), summary.getSize, getMetadata(bucket, key)) :: memo - } + summaries + .map(summary => (summary.getKey, summary)) + .foldLeft(List[S3Object]()) { + case ( + memo: List[S3Object], + (key: String, summary: S3ObjectSummary) + ) => + S3Object( + objectUrl(bucket, key), + summary.getSize, + getMetadata(bucket, key) + ) :: memo + } } def getMetadata(bucket: Bucket, key: Key): S3Metadata = { @@ -165,7 +213,9 @@ class S3(config: CommonConfig) extends GridLogging { client.getObjectMetadata(bucket, key).getUserMetadata.asScala.toMap def syncFindKey(bucket: Bucket, prefixName: String): Option[Key] = { - val req = new ListObjectsRequest().withBucketName(bucket).withPrefix(s"$prefixName-") + val req = new ListObjectsRequest() + .withBucketName(bucket) + .withPrefix(s"$prefixName-") val listing = client.listObjects(req) val summaries = listing.getObjectSummaries.asScala summaries.headOption.map(_.getKey) @@ -178,7 +228,11 @@ object S3Ops { // TODO: Make this region aware - i.e. RegionUtils.getRegion(region).getServiceEndpoint(AmazonS3.ENDPOINT_PREFIX) private val s3Endpoint = "s3.amazonaws.com" - def buildS3Client(config: CommonConfig, forceV2Sigs: Boolean = false, localstackAware: Boolean = true): AmazonS3 = { + def buildS3Client( + config: CommonConfig, + forceV2Sigs: Boolean = false, + localstackAware: Boolean = true + ): AmazonS3 = { val clientConfig = new ClientConfiguration() // Option to disable v4 signatures (https://github.com/aws/aws-sdk-java/issues/372) which is required by imgops @@ -193,7 +247,8 @@ object S3Ops { // see https://github.com/localstack/localstack/issues/1512 AmazonS3ClientBuilder.standard().withPathStyleAccessEnabled(true) } - case _ => AmazonS3ClientBuilder.standard().withClientConfiguration(clientConfig) + case _ => + AmazonS3ClientBuilder.standard().withClientConfiguration(clientConfig) } config.withAWSCredentials(builder, localstackAware).build() @@ -204,7 +259,13 @@ object S3Ops { new URI("http", bucketUrl, s"/$key", null) } - def projectFileAsS3Object(url: URI, file: File, mimeType: Option[MimeType], meta: Map[String, String], cacheControl: Option[String]): S3Object = { + def projectFileAsS3Object( + url: URI, + file: File, + mimeType: Option[MimeType], + meta: Map[String, String], + cacheControl: Option[String] + ): S3Object = { S3Object( url, file.length, @@ -218,7 +279,20 @@ object S3Ops { ) } - def projectFileAsS3Object(bucket: String, key: String, file: File, mimeType: Option[MimeType], meta: Map[String, String] = Map.empty, cacheControl: Option[String] = None): S3Object = { - projectFileAsS3Object(objectUrl(bucket, key), file, mimeType, meta, cacheControl) + def projectFileAsS3Object( + bucket: String, + key: String, + file: File, + mimeType: Option[MimeType], + meta: Map[String, String] = Map.empty, + cacheControl: Option[String] = None + ): S3Object = { + projectFileAsS3Object( + objectUrl(bucket, key), + file, + mimeType, + meta, + cacheControl + ) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SNS.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SNS.scala index e1152e0594..0714f84202 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SNS.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SNS.scala @@ -7,10 +7,13 @@ import com.gu.mediaservice.lib.logging.GridLogging import play.api.libs.json.{JsValue, Json} class SNS(config: CommonConfig, topicArn: String) extends GridLogging { - lazy val client: AmazonSNS = config.withAWSCredentials(AmazonSNSClientBuilder.standard()).build() + lazy val client: AmazonSNS = + config.withAWSCredentials(AmazonSNSClientBuilder.standard()).build() def publish(message: JsValue, subject: String) { - val result = client.publish(new PublishRequest(topicArn, Json.stringify(message), subject)) + val result = client.publish( + new PublishRequest(topicArn, Json.stringify(message), subject) + ) logger.info(s"Published message: $result") } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SqsMessageConsumer.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SqsMessageConsumer.scala index 803cae9f2a..7182e2e058 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SqsMessageConsumer.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/SqsMessageConsumer.scala @@ -7,7 +7,11 @@ import _root_.play.api.libs.functional.syntax._ import _root_.play.api.libs.json._ import akka.actor.ActorSystem import com.amazonaws.services.cloudwatch.model.Dimension -import com.amazonaws.services.sqs.model.{DeleteMessageRequest, ReceiveMessageRequest, Message => SQSMessage} +import com.amazonaws.services.sqs.model.{ + DeleteMessageRequest, + ReceiveMessageRequest, + Message => SQSMessage +} import com.amazonaws.services.sqs.{AmazonSQS, AmazonSQSClientBuilder} import com.gu.mediaservice.lib.ImageId import com.gu.mediaservice.lib.config.CommonConfig @@ -22,7 +26,11 @@ import scala.collection.JavaConverters._ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} -abstract class SqsMessageConsumer(queueUrl: String, config: CommonConfig, metric: Metric[Long]) extends ImageId { +abstract class SqsMessageConsumer( + queueUrl: String, + config: CommonConfig, + metric: Metric[Long] +) extends ImageId { val actorSystem = ActorSystem("MessageConsumer") private implicit val ctx: ExecutionContext = @@ -31,7 +39,8 @@ abstract class SqsMessageConsumer(queueUrl: String, config: CommonConfig, metric def startSchedule(): Unit = actorSystem.scheduler.scheduleOnce(0.seconds)(processMessages()) - lazy val client: AmazonSQS = config.withAWSCredentials(AmazonSQSClientBuilder.standard()).build() + lazy val client: AmazonSQS = + config.withAWSCredentials(AmazonSQSClientBuilder.standard()).build() def chooseProcessor(subject: String): Option[JsValue => Future[Any]] @@ -42,11 +51,15 @@ abstract class SqsMessageConsumer(queueUrl: String, config: CommonConfig, metric // http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-long-polling.html for (msg <- getMessages(waitTime = 20, maxMessages = 1)) { val future = for { - message <- Future(extractSNSMessage(msg) getOrElse sys.error("Invalid message structure (not via SNS?)")) + message <- Future( + extractSNSMessage(msg) getOrElse sys.error( + "Invalid message structure (not via SNS?)" + ) + ) processor = message.subject.flatMap(chooseProcessor) _ <- processor.fold( - sys.error(s"Unrecognised message subject ${message.subject}"))( - _.apply(message.body)) + sys.error(s"Unrecognised message subject ${message.subject}") + )(_.apply(message.body)) _ = recordMessageCount(message) } yield () future |> deleteOnSuccess(msg) @@ -57,8 +70,9 @@ abstract class SqsMessageConsumer(queueUrl: String, config: CommonConfig, metric private def recordMessageCount(message: SNSMessage) = { val dimensions = message.subject match { - case Some(subject) => List(new Dimension().withName("subject").withValue(subject)) - case None => List() + case Some(subject) => + List(new Dimension().withName("subject").withValue(subject)) + case None => List() } metric.runRecordOne(1L, dimensions) } @@ -67,17 +81,25 @@ abstract class SqsMessageConsumer(queueUrl: String, config: CommonConfig, metric f.foreach { _ => deleteMessage(msg) } private def getMessages(waitTime: Int, maxMessages: Int): Seq[SQSMessage] = - client.receiveMessage( - new ReceiveMessageRequest(queueUrl) - .withWaitTimeSeconds(waitTime) - .withMaxNumberOfMessages(maxMessages) - ).getMessages.asScala.toList + client + .receiveMessage( + new ReceiveMessageRequest(queueUrl) + .withWaitTimeSeconds(waitTime) + .withMaxNumberOfMessages(maxMessages) + ) + .getMessages + .asScala + .toList private def extractSNSMessage(sqsMessage: SQSMessage): Option[SNSMessage] = - Json.fromJson[SNSMessage](Json.parse(sqsMessage.getBody)) <| logParseErrors |> (_.asOpt) + Json.fromJson[SNSMessage]( + Json.parse(sqsMessage.getBody) + ) <| logParseErrors |> (_.asOpt) private def deleteMessage(message: SQSMessage): Unit = - client.deleteMessage(new DeleteMessageRequest(queueUrl, message.getReceiptHandle)) + client.deleteMessage( + new DeleteMessageRequest(queueUrl, message.getReceiptHandle) + ) } // TODO: improve and use this (for logging especially) else where. @@ -85,12 +107,12 @@ case class EsResponse(message: String) case class SNSBodyParseError(message: String) extends Exception case class SNSMessage( - messageType: String, - messageId: String, - topicArn: String, - subject: Option[String], - timestamp: DateTime, - body: JsValue + messageType: String, + messageId: String, + topicArn: String, + subject: Option[String], + timestamp: DateTime, + body: JsValue ) object SNSMessage { @@ -105,5 +127,5 @@ object SNSMessage { (__ \ "Subject").readNullable[String] ~ (__ \ "Timestamp").read[String].map(parseTimestamp) ~ (__ \ "Message").read[String].map(Json.parse) - )(SNSMessage(_, _, _, _, _, _)) + )(SNSMessage(_, _, _, _, _, _)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/ThrallMessageSender.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/ThrallMessageSender.scala index e482dc79ba..ce886df3b1 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/ThrallMessageSender.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/aws/ThrallMessageSender.scala @@ -20,8 +20,8 @@ class ThrallMessageSender(config: KinesisSenderConfig) { } case class BulkIndexRequest( - bucket: String, - key: String + bucket: String, + key: String ) object BulkIndexRequest { @@ -30,7 +30,8 @@ object BulkIndexRequest { } object UpdateMessage extends GridLogging { - implicit val yourJodaDateReads = JodaReads.DefaultJodaDateTimeReads.map(d => d.withZone(DateTimeZone.UTC)) + implicit val yourJodaDateReads = + JodaReads.DefaultJodaDateTimeReads.map(d => d.withZone(DateTimeZone.UTC)) implicit val yourJodaDateWrites = JodaWrites.JodaDateTimeWrites implicit val unw = Json.writes[UsageNotice] implicit val unr = Json.reads[UsageNotice] @@ -43,13 +44,18 @@ object UpdateMessage extends GridLogging { (__ \ "usageNotice").readNullable[UsageNotice] ~ (__ \ "edits").readNullable[Edits] ~ // We seem to get messages from _somewhere which don't have last modified on them. - (__ \ "lastModified").readNullable[DateTime].map{ d => d match { - case Some(date) => date - case None => { - logger.warn("Message received without a last modified date", __.toJsonString) - DateTime.now(DateTimeZone.UTC) + (__ \ "lastModified").readNullable[DateTime].map { d => + d match { + case Some(date) => date + case None => { + logger.warn( + "Message received without a last modified date", + __.toJsonString + ) + DateTime.now(DateTimeZone.UTC) + } } - }} ~ + } ~ (__ \ "collections").readNullable[Seq[Collection]] ~ (__ \ "leaseId").readNullable[String] ~ (__ \ "crops").readNullable[Seq[Crop]] ~ @@ -62,23 +68,23 @@ object UpdateMessage extends GridLogging { // TODO add RequestID case class UpdateMessage( - subject: String, - image: Option[Image] = None, - id: Option[String] = None, - usageNotice: Option[UsageNotice] = None, - edits: Option[Edits] = None, - lastModified: DateTime = DateTime.now(DateTimeZone.UTC), - collections: Option[Seq[Collection]] = None, - leaseId: Option[String] = None, - crops: Option[Seq[Crop]] = None, - mediaLease: Option[MediaLease] = None, - leases: Option[Seq[MediaLease]] = None, - syndicationRights: Option[SyndicationRights] = None, - bulkIndexRequest: Option[BulkIndexRequest] = None + subject: String, + image: Option[Image] = None, + id: Option[String] = None, + usageNotice: Option[UsageNotice] = None, + edits: Option[Edits] = None, + lastModified: DateTime = DateTime.now(DateTimeZone.UTC), + collections: Option[Seq[Collection]] = None, + leaseId: Option[String] = None, + crops: Option[Seq[Crop]] = None, + mediaLease: Option[MediaLease] = None, + leases: Option[Seq[MediaLease]] = None, + syndicationRights: Option[SyndicationRights] = None, + bulkIndexRequest: Option[BulkIndexRequest] = None ) extends LogMarker { override def markerContents = { val message = Json.stringify(Json.toJson(this)) - Map ( + Map( "subject" -> subject, "id" -> id.getOrElse(image.map(_.id).getOrElse("none")), "size" -> message.getBytes.length, diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCMetadataProcessor.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCMetadataProcessor.scala index b52ef1a5f6..052a4191e6 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCMetadataProcessor.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCMetadataProcessor.scala @@ -1,7 +1,14 @@ package com.gu.mediaservice.lib.bbc -import com.gu.mediaservice.lib.bbc.components.{BBCDependenciesConfig, BBCImageProcessorsDependencies} -import com.gu.mediaservice.lib.cleanup.{ImageProcessor, ImageProcessorResources, MetadataCleaners} +import com.gu.mediaservice.lib.bbc.components.{ + BBCDependenciesConfig, + BBCImageProcessorsDependencies +} +import com.gu.mediaservice.lib.cleanup.{ + ImageProcessor, + ImageProcessorResources, + MetadataCleaners +} import com.gu.mediaservice.lib.config.CommonConfig import com.gu.mediaservice.model.Image @@ -13,17 +20,18 @@ image.processors = [ "com.gu.mediaservice.lib.bbc.BBCMetadataProcessor", ... ] -*/ -class BBCMetadataProcessor(resources: ImageProcessorResources) extends ImageProcessor { + */ +class BBCMetadataProcessor(resources: ImageProcessorResources) + extends ImageProcessor { val config = BBCDependenciesConfig(resources) val metadataStore = BBCImageProcessorsDependencies.metadataStore(config) override def apply(image: Image): Image = { - val metadataConfig = metadataStore.get - val allPhotographers = metadataConfig.allPhotographers - val metaDataCleaner = new MetadataCleaners(allPhotographers) - metaDataCleaner.apply(image) + val metadataConfig = metadataStore.get + val allPhotographers = metadataConfig.allPhotographers + val metaDataCleaner = new MetadataCleaners(allPhotographers) + metaDataCleaner.apply(image) } override def description: String = "BBC Metadata Processor" diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCSupplierProcessors.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCSupplierProcessors.scala index 014fd249a5..86585cdf91 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCSupplierProcessors.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/BBCSupplierProcessors.scala @@ -1,10 +1,36 @@ package com.gu.mediaservice.lib.bbc -import com.gu.mediaservice.lib.bbc.components.{BBCDependenciesConfig, BBCImageProcessorsDependencies} -import com.gu.mediaservice.lib.cleanup.{AapParser, ActionImagesParser, AlamyParser, AllStarParser, ApParser, ComposeImageProcessors, CorbisParser, EpaParser, GettyCreditParser, GettyXmpParser, ImageProcessor, ImageProcessorResources, PaParser, PhotographerParser, ReutersParser, RexParser, RonaldGrantParser} +import com.gu.mediaservice.lib.bbc.components.{ + BBCDependenciesConfig, + BBCImageProcessorsDependencies +} +import com.gu.mediaservice.lib.cleanup.{ + AapParser, + ActionImagesParser, + AlamyParser, + AllStarParser, + ApParser, + ComposeImageProcessors, + CorbisParser, + EpaParser, + GettyCreditParser, + GettyXmpParser, + ImageProcessor, + ImageProcessorResources, + PaParser, + PhotographerParser, + ReutersParser, + RexParser, + RonaldGrantParser +} import com.gu.mediaservice.lib.config.{CommonConfig, KnownPhotographer} import com.gu.mediaservice.lib.config.PhotographersList.caseInsensitiveLookup -import com.gu.mediaservice.model.{ContractPhotographer, Image, Photographer, StaffPhotographer} +import com.gu.mediaservice.model.{ + ContractPhotographer, + Image, + Photographer, + StaffPhotographer +} import play.api.Configuration /* @@ -16,52 +42,70 @@ image.processors = [ "com.gu.mediaservice.lib.bbc.BBCPhotographerParser" ... ] -*/ + */ -object BBCSupplierProcessors extends ComposeImageProcessors( - GettyXmpParser, - GettyCreditParser, - AapParser, - ActionImagesParser, - AlamyParser, - AllStarParser, - ApParser, - CorbisParser, - EpaParser, - PaParser, - ReutersParser, - RexParser, - RonaldGrantParser -) +object BBCSupplierProcessors + extends ComposeImageProcessors( + GettyXmpParser, + GettyCreditParser, + AapParser, + ActionImagesParser, + AlamyParser, + AllStarParser, + ApParser, + CorbisParser, + EpaParser, + PaParser, + ReutersParser, + RexParser, + RonaldGrantParser + ) -class BBCPhotographerParser(resources: ImageProcessorResources) extends ImageProcessor { +class BBCPhotographerParser(resources: ImageProcessorResources) + extends ImageProcessor { import com.gu.mediaservice.lib.bbc.components.BBCMetadataConfig.companyPhotographersMap val config = BBCDependenciesConfig(resources) val metadataStore = BBCImageProcessorsDependencies.metadataStore(config) lazy val staffPhotographersBBC = metadataStore.get.staffPhotographers - lazy val contractedPhotographersBBC = metadataStore.get.contractedPhotographersMap - + lazy val contractedPhotographersBBC = + metadataStore.get.contractedPhotographersMap def getPhotographer(photographer: String): Option[Photographer] = { - caseInsensitiveLookup(companyPhotographersMap(staffPhotographersBBC), photographer).map { - case KnownPhotographer(name, publication) => StaffPhotographer(name, publication) - }.orElse(caseInsensitiveLookup(companyPhotographersMap(contractedPhotographersBBC), photographer).map { - case KnownPhotographer(name, publication) => ContractPhotographer(name, Some(publication)) - }) + caseInsensitiveLookup( + companyPhotographersMap(staffPhotographersBBC), + photographer + ).map { case KnownPhotographer(name, publication) => + StaffPhotographer(name, publication) + }.orElse( + caseInsensitiveLookup( + companyPhotographersMap(contractedPhotographersBBC), + photographer + ).map { case KnownPhotographer(name, publication) => + ContractPhotographer(name, Some(publication)) + } + ) } override def apply(image: Image): Image = { image.metadata.byline.flatMap { byline => - getPhotographer(byline).map{ - case p: StaffPhotographer => image.copy( - usageRights = p, - metadata = image.metadata.copy(credit = Some(p.publication), byline = Some(p.photographer)) - ) - case p: ContractPhotographer => image.copy( - usageRights = p, - metadata = image.metadata.copy(credit = p.publication, byline = Some(p.photographer)) - ) + getPhotographer(byline).map { + case p: StaffPhotographer => + image.copy( + usageRights = p, + metadata = image.metadata.copy( + credit = Some(p.publication), + byline = Some(p.photographer) + ) + ) + case p: ContractPhotographer => + image.copy( + usageRights = p, + metadata = image.metadata.copy( + credit = p.publication, + byline = Some(p.photographer) + ) + ) case _ => image } } @@ -69,5 +113,3 @@ class BBCPhotographerParser(resources: ImageProcessorResources) extends ImagePro override def description: String = "BBC Supplier Processor" } - - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCImageProcessorsDependencies.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCImageProcessorsDependencies.scala index d9653b6a76..6222b7c547 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCImageProcessorsDependencies.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCImageProcessorsDependencies.scala @@ -15,10 +15,12 @@ trait BBCDependenciesConfig { } object BBCDependenciesConfig { - def apply(resources: ImageProcessorResources): BBCDependenciesConfig = new BBCDependenciesConfig { - override def commonConfiguration: CommonConfig = resources.commonConfiguration - override def actorSystem: ActorSystem = resources.actorSystem - } + def apply(resources: ImageProcessorResources): BBCDependenciesConfig = + new BBCDependenciesConfig { + override def commonConfiguration: CommonConfig = + resources.commonConfiguration + override def actorSystem: ActorSystem = resources.actorSystem + } } object BBCImageProcessorsDependencies { @@ -39,21 +41,27 @@ object BBCImageProcessorsDependencies { } /* - * The laziness here guarantees that only the used dependencies are loaded - * */ - lazy val metadataStore: BBCDependenciesConfig => BBCMetadataStore = memoizeOnce { resources => - val bbcImageProcessorConfig = new BBCImageProcessorConfig(resources.commonConfiguration.configuration) - val bucket = bbcImageProcessorConfig.configBucket - val metadataStore = new BBCMetadataStore(bucket, resources.commonConfiguration) - metadataStore.scheduleUpdates(resources.actorSystem.scheduler) - metadataStore - } + * The laziness here guarantees that only the used dependencies are loaded + * */ + lazy val metadataStore: BBCDependenciesConfig => BBCMetadataStore = + memoizeOnce { resources => + val bbcImageProcessorConfig = + new BBCImageProcessorConfig(resources.commonConfiguration.configuration) + val bucket = bbcImageProcessorConfig.configBucket + val metadataStore = + new BBCMetadataStore(bucket, resources.commonConfiguration) + metadataStore.scheduleUpdates(resources.actorSystem.scheduler) + metadataStore + } - lazy val usageRightsStore: BBCDependenciesConfig => BBCUsageRightsStore = memoizeOnce { resources => - val bbcImageProcessorConfig = new BBCImageProcessorConfig(resources.commonConfiguration.configuration) - val bucket = bbcImageProcessorConfig.configBucket - val usageRightsStore = new BBCUsageRightsStore(bucket, resources.commonConfiguration) - usageRightsStore.scheduleUpdates(resources.actorSystem.scheduler) - usageRightsStore - } + lazy val usageRightsStore: BBCDependenciesConfig => BBCUsageRightsStore = + memoizeOnce { resources => + val bbcImageProcessorConfig = + new BBCImageProcessorConfig(resources.commonConfiguration.configuration) + val bucket = bbcImageProcessorConfig.configBucket + val usageRightsStore = + new BBCUsageRightsStore(bucket, resources.commonConfiguration) + usageRightsStore.scheduleUpdates(resources.actorSystem.scheduler) + usageRightsStore + } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataConfig.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataConfig.scala index 3b92b3da09..e71c6ca649 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataConfig.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataConfig.scala @@ -4,20 +4,26 @@ import com.gu.mediaservice.lib.config.KnownPhotographer import play.api.libs.json._ case class BBCMetadataConfig( - staffIllustrators: List[String], - creativeCommonsLicense: List[String], - externalStaffPhotographers: List[Company], - internalStaffPhotographers: List[Company], - contractedPhotographers: List[Company], - contractIllustrators: List[Company]) { - - val staffPhotographers: Map[String, List[String]] = BBCMetadataConfig.flattenCompanyListMap( - internalStaffPhotographers ++ externalStaffPhotographers) - - val allPhotographers: Map[String, List[String]] = BBCMetadataConfig.flattenCompanyListMap( - internalStaffPhotographers ++ externalStaffPhotographers ++ contractedPhotographers) - - val contractedPhotographersMap: Map[String, List[String]] = BBCMetadataConfig.flattenCompanyListMap(contractedPhotographers) + staffIllustrators: List[String], + creativeCommonsLicense: List[String], + externalStaffPhotographers: List[Company], + internalStaffPhotographers: List[Company], + contractedPhotographers: List[Company], + contractIllustrators: List[Company] +) { + + val staffPhotographers: Map[String, List[String]] = + BBCMetadataConfig.flattenCompanyListMap( + internalStaffPhotographers ++ externalStaffPhotographers + ) + + val allPhotographers: Map[String, List[String]] = + BBCMetadataConfig.flattenCompanyListMap( + internalStaffPhotographers ++ externalStaffPhotographers ++ contractedPhotographers + ) + + val contractedPhotographersMap: Map[String, List[String]] = + BBCMetadataConfig.flattenCompanyListMap(contractedPhotographers) } @@ -29,7 +35,9 @@ object Company { object BBCMetadataConfig { implicit val metadataConfigClassFormats = Json.format[BBCMetadataConfig] - def companyPhotographersMap(companyPhotographers: Map[String, List[String]]): List[KnownPhotographer] = { + def companyPhotographersMap( + companyPhotographers: Map[String, List[String]] + ): List[KnownPhotographer] = { companyPhotographers.flatMap { companyPhotographersItem => val company = companyPhotographersItem._1 val photographers = companyPhotographersItem._2 @@ -42,10 +50,14 @@ object BBCMetadataConfig { def flattenCompanyList(companies: List[Company]): List[Company] = companies .groupBy(_.name) - .map { case (group, companies) => Company(group, companies.flatMap(company => company.photographers)) } + .map { case (group, companies) => + Company(group, companies.flatMap(company => company.photographers)) + } .toList - def flattenCompanyListMap(companies: List[Company]) : Map[String, List[String]] = flattenCompanyList(companies) - .map {company => company.name -> company.photographers} - .toMap + def flattenCompanyListMap( + companies: List[Company] + ): Map[String, List[String]] = flattenCompanyList(companies).map { company => + company.name -> company.photographers + }.toMap } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataStore.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataStore.scala index 8ff821d0b8..4dfb981212 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataStore.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCMetadataStore.scala @@ -9,31 +9,37 @@ import play.api.libs.json.Json import scala.concurrent.ExecutionContext import scala.util.{Failure, Success, Try} -class BBCMetadataStore(bucket: String, config: CommonConfig)(implicit ec: ExecutionContext) - extends BaseStore[String, BBCMetadataConfig](bucket, config)(ec) { +class BBCMetadataStore(bucket: String, config: CommonConfig)(implicit + ec: ExecutionContext +) extends BaseStore[String, BBCMetadataConfig](bucket, config)(ec) { val metadataMapKey = "metadataConfig" val metadataStoreKey = "photographers.json" def apply() = fetchAll match { case Some(_) => Logger.info("Metadata config read in from config bucket") - case None => throw FailedToLoadMetadataConfigJson + case None => throw FailedToLoadMetadataConfigJson } def update() { lastUpdated.send(_ => DateTime.now()) fetchAll match { case Some(config) => store.send(_ => config) - case None => Logger.warn("Could not parse metadata config JSON into MetadataConfig class") + case None => + Logger.warn( + "Could not parse metadata config JSON into MetadataConfig class" + ) } } private def fetchAll: Option[Map[String, BBCMetadataConfig]] = { getS3Object(metadataStoreKey) match { - case Some(fileContents) => Try(Json.parse(fileContents).as[BBCMetadataConfig]) match { - case Success(metadataConfigClass) => Some(Map(metadataMapKey -> metadataConfigClass)) - case Failure(_) => None - } + case Some(fileContents) => + Try(Json.parse(fileContents).as[BBCMetadataConfig]) match { + case Success(metadataConfigClass) => + Some(Map(metadataMapKey -> metadataConfigClass)) + case Failure(_) => None + } case None => None } } @@ -42,14 +48,19 @@ class BBCMetadataStore(bucket: String, config: CommonConfig)(implicit ec: Execut } object BBCMetadataStore { - def apply(bucket: String, config: CommonConfig)(implicit ec: ExecutionContext): BBCMetadataStore = { + def apply(bucket: String, config: CommonConfig)(implicit + ec: ExecutionContext + ): BBCMetadataStore = { val store = new BBCMetadataStore(bucket, config)(ec) store.fetchAll match { case Some(_) => Logger.info("Metadata config read in from config bucket") - case None => throw FailedToLoadMetadataConfigJson + case None => throw FailedToLoadMetadataConfigJson } store } } -case object FailedToLoadMetadataConfigJson extends Exception("Failed to load metadataConfig from S3 config bucket on start up") +case object FailedToLoadMetadataConfigJson + extends Exception( + "Failed to load metadataConfig from S3 config bucket on start up" + ) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsConfig.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsConfig.scala index 54ed2b0699..0134733ef1 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsConfig.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsConfig.scala @@ -4,12 +4,13 @@ import com.gu.mediaservice.model.Cost import play.api.libs.json._ case class BBCUsageRightsConfig( - supplierCreditMatches: List[SupplierMatch], - supplierParsers: List[String], - supplierCostings: Map[String, Cost], - usageRights: List[String], - freeSuppliers: List[String], - suppliersCollectionExcl: Map[String, List[String]]) { + supplierCreditMatches: List[SupplierMatch], + supplierParsers: List[String], + supplierCostings: Map[String, Cost], + usageRights: List[String], + freeSuppliers: List[String], + suppliersCollectionExcl: Map[String, List[String]] +) { def isFreeSupplier(supplier: String) = freeSuppliers.contains(supplier) @@ -17,7 +18,11 @@ case class BBCUsageRightsConfig( suppliersCollectionExcl.get(supplier).exists(_.contains(supplierColl)) } -case class SupplierMatch(name: String, creditMatches: List[String], sourceMatches: List[String]) +case class SupplierMatch( + name: String, + creditMatches: List[String], + sourceMatches: List[String] +) object SupplierMatch { implicit val supplierMatchesFormats = Json.format[SupplierMatch] diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsStore.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsStore.scala index 4b85f6720a..b44c40358c 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsStore.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/bbc/components/BBCUsageRightsStore.scala @@ -9,14 +9,16 @@ import play.api.libs.json.Json import scala.concurrent.ExecutionContext import scala.util.{Failure, Success, Try} -class BBCUsageRightsStore(bucket: String, config: CommonConfig)(implicit ec: ExecutionContext) - extends BaseStore[String, BBCUsageRightsConfig](bucket, config)(ec) { +class BBCUsageRightsStore(bucket: String, config: CommonConfig)(implicit + ec: ExecutionContext +) extends BaseStore[String, BBCUsageRightsConfig](bucket, config)(ec) { val usageRightsMapKey = "usageRights" val usageRightsStoreKey = "usage_rights.json" def apply() = fetchAll match { - case Some(_) => Logger.info("Usage Rights config read in from config bucket") + case Some(_) => + Logger.info("Usage Rights config read in from config bucket") case None => throw FailedToLoadUsageRightsConfigJson } @@ -24,7 +26,10 @@ class BBCUsageRightsStore(bucket: String, config: CommonConfig)(implicit ec: Exe lastUpdated.send(_ => DateTime.now()) fetchAll match { case Some(config) => store.send(_ => config) - case None => Logger.warn("Could not parse usage rights config JSON into UsageRightsConfig class") + case None => + Logger.warn( + "Could not parse usage rights config JSON into UsageRightsConfig class" + ) } } @@ -32,7 +37,8 @@ class BBCUsageRightsStore(bucket: String, config: CommonConfig)(implicit ec: Exe getS3Object(usageRightsStoreKey) match { case Some(fileContents) => { Try(Json.parse(fileContents).as[BBCUsageRightsConfig]) match { - case Success(usageRightsConfigClass) => Some(Map(usageRightsMapKey -> usageRightsConfigClass)) + case Success(usageRightsConfigClass) => + Some(Map(usageRightsMapKey -> usageRightsConfigClass)) case Failure(e) => None } } @@ -45,14 +51,20 @@ class BBCUsageRightsStore(bucket: String, config: CommonConfig)(implicit ec: Exe } object UsageRightsStore { - def apply(bucket: String, config: CommonConfig)(implicit ec: ExecutionContext): BBCUsageRightsStore = { + def apply(bucket: String, config: CommonConfig)(implicit + ec: ExecutionContext + ): BBCUsageRightsStore = { val store = new BBCUsageRightsStore(bucket, config)(ec) store.fetchAll match { - case Some(_) => Logger.info("Usage rights config read in from config bucket") + case Some(_) => + Logger.info("Usage rights config read in from config bucket") case None => throw FailedToLoadMetadataConfigJson } store } } -case object FailedToLoadUsageRightsConfigJson extends Exception("Failed to load UsageRightsConfig from S3 config bucket on start up") +case object FailedToLoadUsageRightsConfigJson + extends Exception( + "Failed to load UsageRightsConfig from S3 config bucket on start up" + ) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromByline.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromByline.scala index 2ac92ecc65..bfff7059b5 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromByline.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromByline.scala @@ -2,28 +2,35 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * A generally useful cleaner that assigns credits based on bylines. +/** A generally useful cleaner that assigns credits based on bylines. * TODO: Make this more usefully configurable from config or similar? * @param bylines * @param credit */ -case class AttributeCreditFromByline(bylines: List[String], credit: String) extends MetadataCleaner { +case class AttributeCreditFromByline(bylines: List[String], credit: String) + extends MetadataCleaner { val lowercaseBylines = bylines.map(_.toLowerCase) - override def clean(metadata: ImageMetadata): ImageMetadata = metadata.byline match { - case Some(byline) if lowercaseBylines.contains(byline.toLowerCase) => metadata.copy(credit = Some(credit)) - case _ => metadata - } + override def clean(metadata: ImageMetadata): ImageMetadata = + metadata.byline match { + case Some(byline) if lowercaseBylines.contains(byline.toLowerCase) => + metadata.copy(credit = Some(credit)) + case _ => metadata + } override def description: String = s"AttributeCreditFromByline($credit)" } object AttributeCreditFromByline { - def fromCreditBylineMap(creditBylineMap: Map[String, List[String]]): ImageProcessor = { - ImageProcessor.compose("AttributeCreditFromBylines", creditBylineMap.map { case (credit, bylines) => - AttributeCreditFromByline(bylines, credit) - }.toSeq:_*) + def fromCreditBylineMap( + creditBylineMap: Map[String, List[String]] + ): ImageProcessor = { + ImageProcessor.compose( + "AttributeCreditFromBylines", + creditBylineMap.map { case (credit, bylines) => + AttributeCreditFromByline(bylines, credit) + }.toSeq: _* + ) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganise.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganise.scala index 6f2be99527..7958db3318 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganise.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganise.scala @@ -23,33 +23,37 @@ object BylineCreditReorganise extends MetadataCleaner { } def removeBylineFromCredit(bylineField: Field, creditField: Field) = - bylineField.map { byline => - val credit = creditField.getOrElse("") - val bylineParts = byline.split(" via |/").filter(!_.isEmpty) - val creditParts = credit.split(" via |/").filter(!_.isEmpty) - - // It's very difficult to decide how to reorganise the byline or credits if they're both single tokens - // since we'd need to know what's likely to be a name and what's likely to be an organisation. - val ambiguousBylineCredit = bylineParts.length == 0 || (bylineParts.length == 1 && creditParts.length == 1) - - if (ambiguousBylineCredit) { - (byline, credit) - } else { - val outputByline = bylineParts.head - - val outputCredit = (bylineParts.tail.filter(!creditParts.contains(_)) ++ creditParts.filter(_ != outputByline)).distinct.mkString("/") - - (outputByline, outputCredit) + bylineField + .map { byline => + val credit = creditField.getOrElse("") + val bylineParts = byline.split(" via |/").filter(!_.isEmpty) + val creditParts = credit.split(" via |/").filter(!_.isEmpty) + + // It's very difficult to decide how to reorganise the byline or credits if they're both single tokens + // since we'd need to know what's likely to be a name and what's likely to be an organisation. + val ambiguousBylineCredit = + bylineParts.length == 0 || (bylineParts.length == 1 && creditParts.length == 1) + + if (ambiguousBylineCredit) { + (byline, credit) + } else { + val outputByline = bylineParts.head + + val outputCredit = (bylineParts.tail.filter( + !creditParts.contains(_) + ) ++ creditParts.filter(_ != outputByline)).distinct.mkString("/") + + (outputByline, outputCredit) + } } - } - // Convert the strings back to `Option`s - .map{ case (b, c) => (Some(b), Some(c).filter(!_.isEmpty)) } - // return the defaults if they both didn't exist - .getOrElse((bylineField, creditField)) + // Convert the strings back to `Option`s + .map { case (b, c) => (Some(b), Some(c).filter(!_.isEmpty)) } + // return the defaults if they both didn't exist + .getOrElse((bylineField, creditField)) def cleanField(field: Field) = field.map(condenseSpaceySlashes) - def condenseSpaceySlashes(s: String): String = SpaceySlashes.replaceAllIn(s, "/") + def condenseSpaceySlashes(s: String): String = + SpaceySlashes.replaceAllIn(s, "/") } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CapitaliseProperty.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CapitaliseProperty.scala index 665c73cc2b..b4f7611819 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CapitaliseProperty.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CapitaliseProperty.scala @@ -7,7 +7,8 @@ import com.gu.mediaservice.model.ImageMetadata */ object CapitaliseByline extends MetadataCleaner with CapitalisationFixer { // Note: probably not exhaustive list - override val joinWords = List("van", "der", "den", "dem", "von", "de", "du", "la", "et") + override val joinWords = + List("van", "der", "den", "dem", "von", "de", "du", "la", "et") def clean(metadata: ImageMetadata) = metadata.copy(byline = metadata.byline.map(fixNameCapitalisation)) @@ -44,8 +45,6 @@ object CapitaliseSubLocation extends MetadataCleaner with CapitalisationFixer { metadata.copy(subLocation = metadata.subLocation.map(fixCapitalisation)) } - - trait CapitalisationFixer { def fixCapitalisation(s: String): String = @@ -80,7 +79,6 @@ trait CapitalisationFixer { def capitaliseAround(s: String, delimiter: String): String = s.split(delimiter).map(_.capitalize).mkString(delimiter) - def isAllUpperCase(s: String): Boolean = s == s.toUpperCase def isAllLowerCase(s: String): Boolean = s == s.toLowerCase } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocation.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocation.scala index 49dfabade0..706fece853 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocation.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocation.scala @@ -13,13 +13,13 @@ object CleanRubbishLocation extends MetadataCleaner { override def clean(metadata: ImageMetadata): ImageMetadata = metadata.copy( subLocation = metadata.subLocation.flatMap(cleanRubbish), - city = metadata.city.flatMap(cleanRubbish), - state = metadata.state.flatMap(cleanRubbish), - country = metadata.country.flatMap(cleanRubbish) + city = metadata.city.flatMap(cleanRubbish), + state = metadata.state.flatMap(cleanRubbish), + country = metadata.country.flatMap(cleanRubbish) ) def cleanRubbish(s: String): Option[String] = s match { case Rubbish(_) => None - case _ => Some(s) + case _ => Some(s) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CountryCode.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CountryCode.scala index 79efab2cde..4f9a174b44 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CountryCode.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/CountryCode.scala @@ -5,12 +5,11 @@ import java.util.Locale import com.gu.mediaservice.lib.logging.GridLogging import com.gu.mediaservice.model.ImageMetadata -/** - * Cleaner that maps 2/3 letter country codes onto country names +/** Cleaner that maps 2/3 letter country codes onto country names */ object CountryCode extends MetadataCleaner with GridLogging { - val TwoLetterCode = """([A-Z]{2})""".r + val TwoLetterCode = """([A-Z]{2})""".r val ThreeLetterCode = """([A-Z]{3})""".r val allLocales = Locale.getISOCountries.map(new Locale("", _)) @@ -34,11 +33,14 @@ object CountryCode extends MetadataCleaner with GridLogging { case c => c } - override def clean(metadata: ImageMetadata): ImageMetadata = metadata.country match { - case Some(TwoLetterCode(code)) => metadata.copy(country = Some(mapTwoLetterCode(code))) - case Some(ThreeLetterCode(code)) => metadata.copy(country = Some(mapThreeLetterCode(code))) - // No country or not a code, just pass through - case Some(country) => metadata - case None => metadata - } + override def clean(metadata: ImageMetadata): ImageMetadata = + metadata.country match { + case Some(TwoLetterCode(code)) => + metadata.copy(country = Some(mapTwoLetterCode(code))) + case Some(ThreeLetterCode(code)) => + metadata.copy(country = Some(mapThreeLetterCode(code))) + // No country or not a code, just pass through + case Some(country) => metadata + case None => metadata + } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitle.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitle.scala index 629dcc157e..7c29f5411a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitle.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitle.scala @@ -2,14 +2,15 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * Generic data cleaner that drops the title from an image if the text matches the start of the description. +/** Generic data cleaner that drops the title from an image if the text matches the start of the description. */ object DropRedundantTitle extends MetadataCleaner { - override def clean(metadata: ImageMetadata): ImageMetadata = (metadata.title, metadata.description) match { - case (Some(title), Some(description)) => metadata.copy(title = cleanTitle(title, description)) - case _ => metadata - } + override def clean(metadata: ImageMetadata): ImageMetadata = + (metadata.title, metadata.description) match { + case (Some(title), Some(description)) => + metadata.copy(title = cleanTitle(title, description)) + case _ => metadata + } def cleanTitle(title: String, description: String): Option[String] = if (description.startsWith(title)) { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromByline.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromByline.scala index d56460991e..99c30b8417 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromByline.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromByline.scala @@ -2,25 +2,28 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * Super Guardian-specific - really only important for old pictures. +/** Super Guardian-specific - really only important for old pictures. */ object ExtractGuardianCreditFromByline extends MetadataCleaner { val BylineForTheGuardian = """(?i)(.+) for the (Guardian|Observer)[.]?""".r val BylineForTheTrunc = """(?i)(.+) for the (.+)[.]?""".r - override def clean(metadata: ImageMetadata): ImageMetadata = metadata.byline match { - case Some(BylineForTheGuardian(byline, org)) => - val orgName = org.toLowerCase.capitalize - metadata.copy(byline = Some(byline), credit = Some(s"The $orgName")) - // Catch truncated bylines (IPTC allows 32 chars only) - case Some(field @ BylineForTheTrunc(byline, org)) if isThe("Guardian", org) && field.length == 31 => - metadata.copy(byline = Some(byline), credit = Some(s"The Guardian")) - case Some(field @ BylineForTheTrunc(byline, org)) if isThe("Observer", org) && field.length == 31 => - metadata.copy(byline = Some(byline), credit = Some(s"The Observer")) - case _ => metadata - } + override def clean(metadata: ImageMetadata): ImageMetadata = + metadata.byline match { + case Some(BylineForTheGuardian(byline, org)) => + val orgName = org.toLowerCase.capitalize + metadata.copy(byline = Some(byline), credit = Some(s"The $orgName")) + // Catch truncated bylines (IPTC allows 32 chars only) + case Some(field @ BylineForTheTrunc(byline, org)) + if isThe("Guardian", org) && field.length == 31 => + metadata.copy(byline = Some(byline), credit = Some(s"The Guardian")) + case Some(field @ BylineForTheTrunc(byline, org)) + if isThe("Observer", org) && field.length == 31 => + metadata.copy(byline = Some(byline), credit = Some(s"The Observer")) + case _ => metadata + } - private def isThe(s: String, full: String) = s.toLowerCase.startsWith(full.toLowerCase) + private def isThe(s: String, full: String) = + s.toLowerCase.startsWith(full.toLowerCase) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleByline.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleByline.scala index d2301a47a5..b4f73b2105 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleByline.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleByline.scala @@ -2,8 +2,7 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * TODO: Split this into two cleaners? +/** TODO: Split this into two cleaners? */ object GuardianStyleByline extends MetadataCleaner { override def clean(metadata: ImageMetadata): ImageMetadata = { @@ -12,7 +11,7 @@ object GuardianStyleByline extends MetadataCleaner { ) } - private def applyCleaners(byline: String): String = { + private def applyCleaners(byline: String): String = { val curly = replaceStraightQuoteWithCurly(byline) cleanInitials(curly) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessor.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessor.scala index 973ca6ceb8..9dba1a790e 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessor.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessor.scala @@ -2,8 +2,7 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.Image -/** - * An image processor has a single apply method that takes an `Image` and returns an `Image`. This can be used +/** An image processor has a single apply method that takes an `Image` and returns an `Image`. This can be used * to modify the image in any number of ways and is primarily used to identify and allocate images from different * suppliers and also to clean and conform metadata. */ @@ -21,12 +20,16 @@ object ImageProcessor { override def apply(image: Image): Image = image override def description: String = "identity" } + /** A convenience method that creates a new ComposedImageProcessor from the provided image processors * @param name The string name used to identify this composition * @param imageProcessors the underlying image processors that are to be composed * @return a new image processor that composes the provided image processors in order - * */ - def compose(name: String, imageProcessors: ImageProcessor*): ComposedImageProcessor = new ComposedImageProcessor { + */ + def compose( + name: String, + imageProcessors: ImageProcessor* + ): ComposedImageProcessor = new ComposedImageProcessor { def apply(image: Image): Image = imageProcessors .foldLeft(image) { case (i, processor) => processor(i) } @@ -39,12 +42,13 @@ object ImageProcessor { } } -/** - * An image processor that simply composes a number of other image processors together. +/** An image processor that simply composes a number of other image processors together. * @param imageProcessors the underlying image processors that are to be applied when this imageProcessor is used */ -class ComposeImageProcessors(val imageProcessors: ImageProcessor*) extends ComposedImageProcessor { - val underlying: ComposedImageProcessor = ImageProcessor.compose(getClass.getCanonicalName, imageProcessors:_*) +class ComposeImageProcessors(val imageProcessors: ImageProcessor*) + extends ComposedImageProcessor { + val underlying: ComposedImageProcessor = + ImageProcessor.compose(getClass.getCanonicalName, imageProcessors: _*) override def apply(image: Image): Image = underlying.apply(image) override def description: String = underlying.description override def processors: Seq[ImageProcessor] = underlying.processors diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessorResources.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessorResources.scala index 1de5e60543..e08a939025 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessorResources.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/ImageProcessorResources.scala @@ -4,7 +4,9 @@ import akka.actor.ActorSystem import com.gu.mediaservice.lib.config.CommonConfig import play.api.Configuration -/** - * Resources that can be injected into a dynamically loaded ImageProcessor +/** Resources that can be injected into a dynamically loaded ImageProcessor */ -case class ImageProcessorResources(commonConfiguration: CommonConfig, actorSystem: ActorSystem) +case class ImageProcessorResources( + commonConfiguration: CommonConfig, + actorSystem: ActorSystem +) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerByline.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerByline.scala index b2e2dab8e8..33b16da5cf 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerByline.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerByline.scala @@ -2,14 +2,15 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * Guardian style, depends on the GuardianStyleByline and the CapitaliseByline processors +/** Guardian style, depends on the GuardianStyleByline and the CapitaliseByline processors */ object InitialJoinerByline extends MetadataCleaner { // Squish together pairs of dangling initials. For example: "C P Scott" -> "CP Scott" override def clean(metadata: ImageMetadata): ImageMetadata = { metadata.copy( - byline = metadata.byline.map(_.replaceAll("\\b(\\p{Lu})\\s(\\p{Lu}(\\s|$))", "$1$2")) + byline = metadata.byline.map( + _.replaceAll("\\b(\\p{Lu})\\s(\\p{Lu}(\\s|$))", "$1$2") + ) ) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/MetadataCleaner.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/MetadataCleaner.scala index e87620586a..ea9e9912cc 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/MetadataCleaner.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/MetadataCleaner.scala @@ -6,31 +6,33 @@ import com.gu.mediaservice.model.{Image, ImageMetadata} trait MetadataCleaner extends ImageProcessor { def clean(metadata: ImageMetadata): ImageMetadata - override def apply(image: Image): Image = image.copy(metadata = clean(image.metadata)) + override def apply(image: Image): Image = + image.copy(metadata = clean(image.metadata)) } -class GuardianMetadataCleaners extends MetadataCleaners(MetadataConfig.allPhotographersMap) +class GuardianMetadataCleaners + extends MetadataCleaners(MetadataConfig.allPhotographersMap) class MetadataCleaners(creditBylineMap: Map[String, List[String]]) - extends ComposeImageProcessors( - CleanRubbishLocation, - StripCopyrightPrefix, - RedundantTokenRemover, - BylineCreditReorganise, - UseCanonicalGuardianCredit, - ExtractGuardianCreditFromByline, - AttributeCreditFromByline.fromCreditBylineMap(creditBylineMap), - CountryCode, - GuardianStyleByline, - CapitaliseByline, - InitialJoinerByline, - CapitaliseCountry, - CapitaliseState, - CapitaliseCity, - CapitaliseSubLocation, - DropRedundantTitle, - PhotographerRenamer - ) + extends ComposeImageProcessors( + CleanRubbishLocation, + StripCopyrightPrefix, + RedundantTokenRemover, + BylineCreditReorganise, + UseCanonicalGuardianCredit, + ExtractGuardianCreditFromByline, + AttributeCreditFromByline.fromCreditBylineMap(creditBylineMap), + CountryCode, + GuardianStyleByline, + CapitaliseByline, + InitialJoinerByline, + CapitaliseCountry, + CapitaliseState, + CapitaliseCity, + CapitaliseSubLocation, + DropRedundantTitle, + PhotographerRenamer + ) // By vague order of importance: diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamer.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamer.scala index c33cc936dd..7215b8c441 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamer.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamer.scala @@ -474,6 +474,8 @@ object PhotographerRenamer extends MetadataCleaner { ) override def clean(metadata: ImageMetadata): ImageMetadata = { - metadata.copy(byline = metadata.byline.flatMap(names.get(_).orElse(metadata.byline))) + metadata.copy(byline = + metadata.byline.flatMap(names.get(_).orElse(metadata.byline)) + ) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemover.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemover.scala index 51d56bff5f..db9b46c7a3 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemover.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemover.scala @@ -1,8 +1,7 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * Possibly generic cleaner that removes common tokens from byline/credit that are meaningless. Will never leave the credit empty. +/** Possibly generic cleaner that removes common tokens from byline/credit that are meaningless. Will never leave the credit empty. */ object RedundantTokenRemover extends MetadataCleaner { val toRemove = List( @@ -27,20 +26,29 @@ object RedundantTokenRemover extends MetadataCleaner { ) override def clean(metadata: ImageMetadata): ImageMetadata = metadata.copy( - byline = metadata.byline.map(removeHandoutTokens).filter(_.trim.nonEmpty).map(_.trim), - credit = metadata.credit.map(removeHandoutTokens).flatMap { c => - if (c.isEmpty) { - metadata.credit.flatMap(c => c.split(" via |/").lastOption) - } else { - Some(c) + byline = metadata.byline + .map(removeHandoutTokens) + .filter(_.trim.nonEmpty) + .map(_.trim), + credit = metadata.credit + .map(removeHandoutTokens) + .flatMap { c => + if (c.isEmpty) { + metadata.credit.flatMap(c => c.split(" via |/").lastOption) + } else { + Some(c) + } } - }.map(_.trim), + .map(_.trim) ) def removeHandoutTokens(text: String): String = { - text.split(" via |/").filter { tok => - val trimmedToken = tok.trim - !toRemove.contains(trimmedToken) - }.mkString("/") + text + .split(" via |/") + .filter { tok => + val trimmedToken = tok.trim + !toRemove.contains(trimmedToken) + } + .mkString("/") } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefix.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefix.scala index 3549014498..670484259b 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefix.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefix.scala @@ -2,23 +2,23 @@ package com.gu.mediaservice.lib.cleanup import com.gu.mediaservice.model.ImageMetadata -/** - * Remove any explicit copyright character or string. +/** Remove any explicit copyright character or string. * TODO: Remove processing from copyright field (see PR#2778) */ object StripCopyrightPrefix extends MetadataCleaner { // Prefix-match any combination of copyright (separated by whitespace) - val WithoutCopyrightPrefix = """(?i)(?:©|Copyright(?: of)?|\(c\)|\s|:)*(.*)""".r + val WithoutCopyrightPrefix = + """(?i)(?:©|Copyright(?: of)?|\(c\)|\s|:)*(.*)""".r override def clean(metadata: ImageMetadata): ImageMetadata = metadata.copy( byline = metadata.byline.map(stripCopyrightPrefix), - credit = metadata.credit.map(stripCopyrightPrefix), + credit = metadata.credit.map(stripCopyrightPrefix) ) def stripCopyrightPrefix(s: String): String = s match { case WithoutCopyrightPrefix(rest) => rest - case _ => s + case _ => s } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessors.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessors.scala index 9bd5202a6f..93c3fb07e7 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessors.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessors.scala @@ -1,44 +1,52 @@ package com.gu.mediaservice.lib.cleanup -import com.gu.mediaservice.model.{Agencies, Agency, Image, StaffPhotographer, ContractPhotographer} +import com.gu.mediaservice.model.{ + Agencies, + Agency, + Image, + StaffPhotographer, + ContractPhotographer +} import com.gu.mediaservice.lib.config.PhotographersList -/** - * This is largely generic or close to generic processing aside from the Guardian Photographer parser. +/** This is largely generic or close to generic processing aside from the Guardian Photographer parser. */ object SupplierProcessors - extends ComposeImageProcessors( - GettyXmpParser, - GettyCreditParser, - AapParser, - ActionImagesParser, - AlamyParser, - AllStarParser, - ApParser, - CorbisParser, - EpaParser, - PaParser, - ReutersParser, - RexParser, - RonaldGrantParser, - PhotographerParser - ) + extends ComposeImageProcessors( + GettyXmpParser, + GettyCreditParser, + AapParser, + ActionImagesParser, + AlamyParser, + AllStarParser, + ApParser, + CorbisParser, + EpaParser, + PaParser, + ReutersParser, + RexParser, + RonaldGrantParser, + PhotographerParser + ) -/** - * Guardian specific logic to correctly identify Guardian and Observer photographers and their contracts +/** Guardian specific logic to correctly identify Guardian and Observer photographers and their contracts */ object PhotographerParser extends ImageProcessor { def apply(image: Image): Image = { image.metadata.byline.flatMap { byline => - PhotographersList.getPhotographer(byline).map{ - case p: StaffPhotographer => image.copy( - usageRights = p, - metadata = image.metadata.copy(credit = Some(p.publication), byline = Some(p.photographer)) - ) - case p: ContractPhotographer => image.copy( - usageRights = p, - metadata = image.metadata.copy(credit = p.publication, byline = Some(p.photographer)) - ) + PhotographersList.getPhotographer(byline).map { + case p: StaffPhotographer => + image.copy( + usageRights = p, + metadata = image.metadata + .copy(credit = Some(p.publication), byline = Some(p.photographer)) + ) + case p: ContractPhotographer => + image.copy( + usageRights = p, + metadata = image.metadata + .copy(credit = p.publication, byline = Some(p.photographer)) + ) case _ => image } } @@ -47,29 +55,35 @@ object PhotographerParser extends ImageProcessor { object AapParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.credit match { - case Some("AAPIMAGE") | Some("AAP IMAGE") | Some("AAP") => image.copy( - usageRights = Agencies.get("aap"), - metadata = image.metadata.copy(credit = Some("AAP")) - ) + case Some("AAPIMAGE") | Some("AAP IMAGE") | Some("AAP") => + image.copy( + usageRights = Agencies.get("aap"), + metadata = image.metadata.copy(credit = Some("AAP")) + ) case _ => image } } object ActionImagesParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.credit match { - case Some("Action Images") | Some("Action Images/Reuters") => image.copy( - usageRights = Agency("Action Images") - ) + case Some("Action Images") | Some("Action Images/Reuters") => + image.copy( + usageRights = Agency("Action Images") + ) case _ => image } } object AlamyParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.credit match { - case Some(credit) if credit.contains("Alamy") && !credit.contains("Alamy Live News") => image.copy( - usageRights = Agencies.get("alamy"), - metadata = image.metadata.copy(credit = Some(credit.replace("Alamy Stock Photo", "Alamy"))) - ) + case Some(credit) + if credit.contains("Alamy") && !credit.contains("Alamy Live News") => + image.copy( + usageRights = Agencies.get("alamy"), + metadata = image.metadata.copy(credit = + Some(credit.replace("Alamy Stock Photo", "Alamy")) + ) + ) case _ => image } } @@ -82,7 +96,7 @@ object AllStarParser extends ImageProcessor { case Some("Allstar Picture Library") => withAllstarRights(image)(None) case Some(SlashAllstar(prefix)) => withAllstarRights(image)(Some(prefix)) case Some(AllstarSlash(suffix)) => withAllstarRights(image)(Some(suffix)) - case _ => image + case _ => image } def withAllstarRights(image: Image) = @@ -90,33 +104,41 @@ object AllStarParser extends ImageProcessor { stripAllstarFromByline andThen stripDuplicateByline - def asAllstarAgency(image: Image, suppliersCollection: Option[String]) = image.copy( - usageRights = Agency("Allstar Picture Library", suppliersCollection) - ) + def asAllstarAgency(image: Image, suppliersCollection: Option[String]) = + image.copy( + usageRights = Agency("Allstar Picture Library", suppliersCollection) + ) def stripAllstarFromByline(image: Image) = image.copy( - metadata = image.metadata.copy(byline = image.metadata.byline.map(stripAllstarSuffix)) + metadata = image.metadata.copy(byline = + image.metadata.byline.map(stripAllstarSuffix) + ) ) def stripAllstarSuffix(byline: String): String = byline match { case SlashAllstar(name) => name - case _ => byline + case _ => byline } // If suppliersCollection same as byline, remove byline but its byline casing for suppliersCollection and credit, // as they otherwise tend to be in ugly uppercase - def stripDuplicateByline(image: Image) = (image.usageRights, image.metadata.byline) match { - case (agency @ Agency(supplier, Some(supplColl), _), Some(byline)) if supplColl.toLowerCase == byline.toLowerCase => { - image.copy( - usageRights = agency.copy(suppliersCollection = image.metadata.byline), - metadata = image.metadata.copy( - credit = image.metadata.credit.map(credit => credit.replace(supplColl, byline)), - byline = None + def stripDuplicateByline(image: Image) = + (image.usageRights, image.metadata.byline) match { + case (agency @ Agency(supplier, Some(supplColl), _), Some(byline)) + if supplColl.toLowerCase == byline.toLowerCase => { + image.copy( + usageRights = + agency.copy(suppliersCollection = image.metadata.byline), + metadata = image.metadata.copy( + credit = image.metadata.credit.map(credit => + credit.replace(supplColl, byline) + ), + byline = None + ) ) - ) + } + case _ => image } - case _ => image - } } @@ -124,33 +146,38 @@ object ApParser extends ImageProcessor { val InvisionFor = "^invision for (.+)".r val PersonInvisionAp = "(.+)\\s*/invision/ap$".r - def apply(image: Image): Image = image.metadata.credit.map(_.toLowerCase) match { - case Some("ap") | Some("associated press") => image.copy( - usageRights = Agency("AP"), - metadata = image.metadata.copy(credit = Some("AP")) - ) - case Some("invision") | Some("invision/ap") | - Some(InvisionFor(_)) | Some(PersonInvisionAp(_)) => image.copy( - usageRights = Agency("AP", Some("Invision")) - ) - case _ => image - } + def apply(image: Image): Image = + image.metadata.credit.map(_.toLowerCase) match { + case Some("ap") | Some("associated press") => + image.copy( + usageRights = Agency("AP"), + metadata = image.metadata.copy(credit = Some("AP")) + ) + case Some("invision") | Some("invision/ap") | Some(InvisionFor(_)) | + Some(PersonInvisionAp(_)) => + image.copy( + usageRights = Agency("AP", Some("Invision")) + ) + case _ => image + } } object CorbisParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.source match { - case Some("Corbis") => image.copy( - usageRights = Agency("Corbis") - ) + case Some("Corbis") => + image.copy( + usageRights = Agency("Corbis") + ) case _ => image } } object EpaParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.credit match { - case Some(x) if x.matches(".*\\bEPA\\b.*") => image.copy( - usageRights = Agency("EPA") - ) + case Some(x) if x.matches(".*\\bEPA\\b.*") => + image.copy( + usageRights = Agency("EPA") + ) case _ => image } } @@ -164,33 +191,80 @@ trait GettyProcessor { object GettyXmpParser extends ImageProcessor with GettyProcessor { def apply(image: Image): Image = { val excludedCredit = List( - "Replay Images", "newspix international", "i-images", "photoshot", "Ian Jones", "Photo News/Panoramic", - "Panoramic/Avalon", "Panoramic", "Avalon", "INS News Agency Ltd", "Discovery.", "EPA", "EMPICS", "Empics News", - "S&G and Barratts/EMPICS Sport", "EMPICS Sport", "EMPICS SPORT", "EMPICS Sports Photo Agency", - "Empics Sports Photography Ltd.", "EMPICS Entertainment", "Empics Entertainment", "MatchDay Images Limited", - "S&G and Barratts/EMPICS Archive", "PPAUK", "SWNS.COM", "Euan Cherry", "Plumb Images", "Mercury Press", "SWNS", - "Athena Pictures", "Flick.digital", "Matthew Horwood", "Focus Images Ltd", "www.scottishphotographer.com", + "Replay Images", + "newspix international", + "i-images", + "photoshot", + "Ian Jones", + "Photo News/Panoramic", + "Panoramic/Avalon", + "Panoramic", + "Avalon", + "INS News Agency Ltd", + "Discovery.", + "EPA", + "EMPICS", + "Empics News", + "S&G and Barratts/EMPICS Sport", + "EMPICS Sport", + "EMPICS SPORT", + "EMPICS Sports Photo Agency", + "Empics Sports Photography Ltd.", + "EMPICS Entertainment", + "Empics Entertainment", + "MatchDay Images Limited", + "S&G and Barratts/EMPICS Archive", + "PPAUK", + "SWNS.COM", + "Euan Cherry", + "Plumb Images", + "Mercury Press", + "SWNS", + "Athena Pictures", + "Flick.digital", + "Matthew Horwood", + "Focus Images Ltd", + "www.scottishphotographer.com", "ZUMAPRESS.com" ) val excludedSource = List( - "www.capitalpictures.com", "Replay Images", "UKTV", "PinPep", "Pinnacle Photo Agency Ltd", "News Images", - "London News Pictures Ltd", "Showtime", "Propaganda", "Equinox Features", "Athena Picture Agency Ltd", - "www.edinburghelitemedia.co.uk", "WALES NEWS SERVICE", "Sports Inc", "UK Sports Pics Ltd", "Blitz Pictures", - "Consolidated News Photos", "MI News & Sport Ltd", "Parsons Media" + "www.capitalpictures.com", + "Replay Images", + "UKTV", + "PinPep", + "Pinnacle Photo Agency Ltd", + "News Images", + "London News Pictures Ltd", + "Showtime", + "Propaganda", + "Equinox Features", + "Athena Picture Agency Ltd", + "www.edinburghelitemedia.co.uk", + "WALES NEWS SERVICE", + "Sports Inc", + "UK Sports Pics Ltd", + "Blitz Pictures", + "Consolidated News Photos", + "MI News & Sport Ltd", + "Parsons Media" ) - val isExcludedByCredit = image.metadata.credit.exists(isExcluded(_, excludedCredit)) - val isExcludedBySource = image.metadata.source.exists(isExcluded(_, excludedSource)) + val isExcludedByCredit = + image.metadata.credit.exists(isExcluded(_, excludedCredit)) + val isExcludedBySource = + image.metadata.source.exists(isExcluded(_, excludedSource)) val hasGettyMetadata = image.fileMetadata.getty.nonEmpty - if(!hasGettyMetadata || isExcludedByCredit || isExcludedBySource) { + if (!hasGettyMetadata || isExcludedByCredit || isExcludedBySource) { image } else { image.copy( usageRights = gettyAgencyWithCollection(image.metadata.source), // Set a default "credit" for when Getty is too lazy to provide one - metadata = image.metadata.copy(credit = Some(image.metadata.credit.getOrElse("Getty Images"))) + metadata = image.metadata.copy(credit = + Some(image.metadata.credit.getOrElse("Getty Images")) + ) ) } } @@ -209,18 +283,20 @@ object GettyCreditParser extends ImageProcessor with GettyProcessor { val SlashGetty = ".+/Getty(?: .*)?".r def apply(image: Image): Image = image.metadata.credit match { - case Some(IncludesGetty()) | Some(ViaGetty()) | Some(SlashGetty()) => image.copy( - usageRights = gettyAgencyWithCollection(image.metadata.source) - ) + case Some(IncludesGetty()) | Some(ViaGetty()) | Some(SlashGetty()) => + image.copy( + usageRights = gettyAgencyWithCollection(image.metadata.source) + ) case Some(credit) => knownGettyCredits(image, credit) - case _ => image + case _ => image } def knownGettyCredits(image: Image, credit: String): Image = gettyCredits.find(_.toLowerCase == credit.toLowerCase) match { - case collection @ Some(_) => image.copy( - usageRights = gettyAgencyWithCollection(collection) - ) + case collection @ Some(_) => + image.copy( + usageRights = gettyAgencyWithCollection(collection) + ) case _ => image } } @@ -240,9 +316,11 @@ object PaParser extends ImageProcessor { ).map(_.toLowerCase) def apply(image: Image): Image = { - val isPa = List(image.metadata.credit, image.metadata.source).flatten.exists { creditOrSource => - paCredits.contains(creditOrSource.toLowerCase) - } + val isPa = + List(image.metadata.credit, image.metadata.source).flatten.exists { + creditOrSource => + paCredits.contains(creditOrSource.toLowerCase) + } if (isPa) { image.copy(usageRights = Agency("PA")) } else image @@ -253,18 +331,23 @@ object ReutersParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.credit match { // Reuters and other misspellings // TODO: use case-insensitive matching instead once credit is no longer indexed as case-sensitive - case Some("REUTERS") | Some("Reuters") | Some("RETUERS") | Some("REUETRS") | Some("REUTERS/") | Some("via REUTERS") | Some("VIA REUTERS") | Some("via Reuters") => image.copy( - usageRights = Agency("Reuters"), - metadata = image.metadata.copy(credit = Some("Reuters")) - ) + case Some("REUTERS") | Some("Reuters") | Some("RETUERS") | Some("REUETRS") | + Some("REUTERS/") | Some("via REUTERS") | Some("VIA REUTERS") | + Some("via Reuters") => + image.copy( + usageRights = Agency("Reuters"), + metadata = image.metadata.copy(credit = Some("Reuters")) + ) // Others via Reuters - case Some("USA TODAY Sports") => image.copy( - metadata = image.metadata.copy(credit = Some("USA Today Sports")), - usageRights = Agency("Reuters") - ) - case Some("USA Today Sports") | Some("TT NEWS AGENCY") => image.copy( - usageRights = Agency("Reuters") - ) + case Some("USA TODAY Sports") => + image.copy( + metadata = image.metadata.copy(credit = Some("USA Today Sports")), + usageRights = Agency("Reuters") + ) + case Some("USA Today Sports") | Some("TT NEWS AGENCY") => + image.copy( + usageRights = Agency("Reuters") + ) case _ => image } } @@ -273,21 +356,23 @@ object RexParser extends ImageProcessor { val rexAgency = Agencies.get("rex") val SlashRex = ".+/ Rex Features".r - def apply(image: Image): Image = (image.metadata.source, image.metadata.credit) match { - // TODO: cleanup byline/credit - case (Some("Rex Features"), _) => image.copy(usageRights = rexAgency) - case (_, Some(SlashRex())) => image.copy(usageRights = rexAgency) - case (Some("REX/Shutterstock"), _) => image.copy(usageRights = rexAgency) - case _ => image - } + def apply(image: Image): Image = + (image.metadata.source, image.metadata.credit) match { + // TODO: cleanup byline/credit + case (Some("Rex Features"), _) => image.copy(usageRights = rexAgency) + case (_, Some(SlashRex())) => image.copy(usageRights = rexAgency) + case (Some("REX/Shutterstock"), _) => image.copy(usageRights = rexAgency) + case _ => image + } } object RonaldGrantParser extends ImageProcessor { def apply(image: Image): Image = image.metadata.credit match { - case Some("www.ronaldgrantarchive.com") | Some("Ronald Grant Archive") => image.copy( - usageRights = Agency("Ronald Grant Archive"), - metadata = image.metadata.copy(credit = Some("Ronald Grant")) - ) + case Some("www.ronaldgrantarchive.com") | Some("Ronald Grant Archive") => + image.copy( + usageRights = Agency("Ronald Grant Archive"), + metadata = image.metadata.copy(credit = Some("Ronald Grant")) + ) case _ => image } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCredit.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCredit.scala index e95a5ce6d7..a9b7dac39e 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCredit.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCredit.scala @@ -9,8 +9,9 @@ import com.gu.mediaservice.model.ImageMetadata object UseCanonicalGuardianCredit extends MetadataCleaner { // Map "Guardian" credit (old style) to canonical "The Guardian" - override def clean(metadata: ImageMetadata): ImageMetadata = metadata.credit match { - case Some("Guardian") => metadata.copy(credit = Some("The Guardian")) - case _ => metadata - } + override def clean(metadata: ImageMetadata): ImageMetadata = + metadata.credit match { + case Some("Guardian") => metadata.copy(credit = Some("The Guardian")) + case _ => metadata + } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/collections/CollectionsManager.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/collections/CollectionsManager.scala index 60b3a001c2..78815db08f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/collections/CollectionsManager.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/collections/CollectionsManager.scala @@ -3,7 +3,6 @@ package com.gu.mediaservice.lib.collections import com.gu.mediaservice.lib.net.URI.{encode, decode} import com.gu.mediaservice.model.Collection - object CollectionsManager { val delimiter = "/" val doublequotes = "\"" @@ -16,16 +15,29 @@ object CollectionsManager { def sortBy(c: Collection) = c.pathId - def add(collection: Collection, collections: List[Collection]): List[Collection] = - (collection :: collections.filter(col => col.path != collection.path)).sortBy(sortBy) + def add( + collection: Collection, + collections: List[Collection] + ): List[Collection] = + (collection :: collections.filter(col => col.path != collection.path)) + .sortBy(sortBy) - def remove(path: List[String], collections: List[Collection]): List[Collection] = + def remove( + path: List[String], + collections: List[Collection] + ): List[Collection] = collections.filter(col => col.path != path) - def find(path: List[String], collections: List[Collection]): Option[Collection] = + def find( + path: List[String], + collections: List[Collection] + ): Option[Collection] = collections.find(col => col.path == path) - def findIndexes(path: List[String], collections: List[Collection]): List[Int] = + def findIndexes( + path: List[String], + collections: List[Collection] + ): List[Int] = collections.zipWithIndex.collect { case (collection, i) if collection.path == path => i } @@ -33,26 +45,32 @@ object CollectionsManager { def onlyLatest(collections: List[Collection]): List[Collection] = collections filter { collection => // if there isn't a collection with the same path created after itself. - !collections.exists { col => { - col.path == collection.path && col.actionData.date.isAfter(collection.actionData.date) - }} + !collections.exists { col => + { + col.path == collection.path && col.actionData.date.isAfter( + collection.actionData.date + ) + } + } } // We could use `ValidationNel`s here, but that's overkill - def isValidPathBit(s: String) = if (s.contains(delimiter) || s.contains(doublequotes)) false else true + def isValidPathBit(s: String) = + if (s.contains(delimiter) || s.contains(doublequotes)) false else true val collectionColours = Map( - "australia" -> "#ffb93e", - "culture" -> "#d1008b", + "australia" -> "#ffb93e", + "culture" -> "#d1008b", "film & music" -> "#b1532f", - "g2" -> "#000000", - "guide" -> "#8F1AB6", - "observer" -> "#006f94", - "sport" -> "#008000", - "travel" -> "#65C5FB" + "g2" -> "#000000", + "guide" -> "#8F1AB6", + "observer" -> "#006f94", + "sport" -> "#008000", + "travel" -> "#65C5FB" ) def getCollectionColour(s: String) = collectionColours.get(s) - def getCssColour(path: List[String]) = path.headOption.map(_.toLowerCase).flatMap(getCollectionColour) + def getCssColour(path: List[String]) = + path.headOption.map(_.toLowerCase).flatMap(getCollectionColour) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/AuthenticationProviderLoader.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/AuthenticationProviderLoader.scala index 5a264a6edf..cee5909888 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/AuthenticationProviderLoader.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/AuthenticationProviderLoader.scala @@ -1,6 +1,18 @@ package com.gu.mediaservice.lib.config -import com.gu.mediaservice.lib.auth.provider.{MachineAuthenticationProvider, AuthenticationProviderResources, UserAuthenticationProvider} +import com.gu.mediaservice.lib.auth.provider.{ + MachineAuthenticationProvider, + AuthenticationProviderResources, + UserAuthenticationProvider +} -object ApiAuthenticationProviderLoader extends ProviderLoader[MachineAuthenticationProvider, AuthenticationProviderResources]("api authentication provider") -object UserAuthenticationProviderLoader extends ProviderLoader[UserAuthenticationProvider, AuthenticationProviderResources]("user authentication provider") +object ApiAuthenticationProviderLoader + extends ProviderLoader[ + MachineAuthenticationProvider, + AuthenticationProviderResources + ]("api authentication provider") +object UserAuthenticationProviderLoader + extends ProviderLoader[ + UserAuthenticationProvider, + AuthenticationProviderResources + ]("user authentication provider") diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/CommonConfig.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/CommonConfig.scala index 8b5a876e84..8588f18aa7 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/CommonConfig.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/CommonConfig.scala @@ -9,8 +9,9 @@ import play.api.Configuration import scala.util.Try - -abstract class CommonConfig(val configuration: Configuration) extends AwsClientBuilderUtils with StrictLogging { +abstract class CommonConfig(val configuration: Configuration) + extends AwsClientBuilderUtils + with StrictLogging { final val elasticsearchStack = "media-service" final val elasticsearchApp = "elasticsearch" @@ -28,19 +29,26 @@ abstract class CommonConfig(val configuration: Configuration) extends AwsClientB override val awsRegion: String = stringDefault("aws.region", "eu-west-1") - override val awsLocalEndpoint: Option[String] = if(isDev) stringOpt("aws.local.endpoint") else None + override val awsLocalEndpoint: Option[String] = + if (isDev) stringOpt("aws.local.endpoint") else None val useLocalAuth: Boolean = isDev && boolean("auth.useLocal") - val permissionsBucket: String = stringDefault("permissions.bucket", "permissions-cache") + val permissionsBucket: String = + stringDefault("permissions.bucket", "permissions-cache") - val localLogShipping: Boolean = sys.env.getOrElse("LOCAL_LOG_SHIPPING", "false").toBoolean + val localLogShipping: Boolean = + sys.env.getOrElse("LOCAL_LOG_SHIPPING", "false").toBoolean val thrallKinesisStream = string("thrall.kinesis.stream.name") - val thrallKinesisLowPriorityStream = string("thrall.kinesis.lowPriorityStream.name") + val thrallKinesisLowPriorityStream = string( + "thrall.kinesis.lowPriorityStream.name" + ) val thrallKinesisStreamConfig = getKinesisConfigForStream(thrallKinesisStream) - val thrallKinesisLowPriorityStreamConfig = getKinesisConfigForStream(thrallKinesisLowPriorityStream) + val thrallKinesisLowPriorityStreamConfig = getKinesisConfigForStream( + thrallKinesisLowPriorityStream + ) val requestMetricsEnabled: Boolean = boolean("metrics.request.enabled") @@ -61,18 +69,27 @@ abstract class CommonConfig(val configuration: Configuration) extends AwsClientB stringDefault("hosts.authPrefix", s"$rootAppName-auth.") ) - val corsAllowedOrigins: Set[String] = getStringSet("security.cors.allowedOrigins") + val corsAllowedOrigins: Set[String] = getStringSet( + "security.cors.allowedOrigins" + ) val services = new Services(domainRoot, serviceHosts, corsAllowedOrigins) - private def getKinesisConfigForStream(streamName: String) = KinesisSenderConfig(awsRegion, awsCredentials, awsLocalEndpoint, isDev, streamName) + private def getKinesisConfigForStream(streamName: String) = + KinesisSenderConfig( + awsRegion, + awsCredentials, + awsLocalEndpoint, + isDev, + streamName + ) final def getStringSet(key: String): Set[String] = Try { configuration.get[Seq[String]](key) - }.recover { - case _:ConfigException.WrongType => configuration.get[String](key).split(",").toSeq.map(_.trim) + }.recover { case _: ConfigException.WrongType => + configuration.get[String](key).split(",").toSeq.map(_.trim) }.map(_.toSet) - .getOrElse(Set.empty) + .getOrElse(Set.empty) final def apply(key: String): String = string(key) @@ -83,7 +100,8 @@ abstract class CommonConfig(val configuration: Configuration) extends AwsClientB final def stringDefault(key: String, default: String): String = configuration.getOptional[String](key) getOrElse default - final def stringOpt(key: String): Option[String] = configuration.getOptional[String](key) + final def stringOpt(key: String): Option[String] = + configuration.getOptional[String](key) final def int(key: String): Int = configuration.getOptional[Int](key) getOrElse missing(key, "integer") diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigLoader.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigLoader.scala index dbe8eee5da..e72eacba1f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigLoader.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigLoader.scala @@ -24,10 +24,12 @@ object GridConfigLoader extends StrictLogging { s"/etc/grid/$appName.conf" ) - val baseConfig = Configuration.from(Map( - STAGE_KEY -> stageIdentifier.stage, - APP_KEY -> appName - )) + val baseConfig = Configuration.from( + Map( + STAGE_KEY -> stageIdentifier.stage, + APP_KEY -> appName + ) + ) val fileConfiguration: Configuration = { if (mode == Mode.Test) { @@ -47,7 +49,9 @@ object GridConfigLoader extends StrictLogging { if (file.exists) { logger.info(s"Loading config from $file") if (file.getPath.endsWith(".properties")) { - logger.warn(s"Configuring the Grid with Java properties files is deprecated as of #3011, please switch to .conf files. See #3037 for a conversion utility.") + logger.warn( + s"Configuring the Grid with Java properties files is deprecated as of #3011, please switch to .conf files. See #3037 for a conversion utility." + ) } Configuration(ConfigFactory.parseFile(file)) } else { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigResources.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigResources.scala index 0520f92465..de700d0819 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigResources.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/GridConfigResources.scala @@ -3,4 +3,7 @@ package com.gu.mediaservice.lib.config import akka.actor.ActorSystem import play.api.Configuration -case class GridConfigResources(configuration: Configuration, actorSystem: ActorSystem) +case class GridConfigResources( + configuration: Configuration, + actorSystem: ActorSystem +) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ImageProcessorLoader.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ImageProcessorLoader.scala index d5792bfa0f..51ad0d2b8f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ImageProcessorLoader.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ImageProcessorLoader.scala @@ -2,4 +2,7 @@ package com.gu.mediaservice.lib.config import com.gu.mediaservice.lib.cleanup.{ImageProcessor, ImageProcessorResources} -object ImageProcessorLoader extends ProviderLoader[ImageProcessor, ImageProcessorResources]("image processor") +object ImageProcessorLoader + extends ProviderLoader[ImageProcessor, ImageProcessorResources]( + "image processor" + ) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/MetadataConfig.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/MetadataConfig.scala index b3ba44679d..191283d4eb 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/MetadataConfig.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/MetadataConfig.scala @@ -4,84 +4,98 @@ package com.gu.mediaservice.lib.config import scalaz._ import Scalaz._ - - -import com.gu.mediaservice.model.{StaffPhotographer, ContractPhotographer, Photographer} +import com.gu.mediaservice.model.{ + StaffPhotographer, + ContractPhotographer, + Photographer +} case class KnownPhotographer(name: String, publication: String) object PhotographersList { type CreditBylineMap = Map[String, List[String]] - import MetadataConfig.{ Store, staffPhotographers, contractedPhotographers } + import MetadataConfig.{Store, staffPhotographers, contractedPhotographers} def creditBylineMap(store: Store): CreditBylineMap = store - .groupBy{ case KnownPhotographer(_, publication) => publication } - .map{ case (publication, photographers) => publication -> photographers.map(_.name).sortWith(_.toLowerCase < _.toLowerCase) } + .groupBy { case KnownPhotographer(_, publication) => publication } + .map { case (publication, photographers) => + publication -> photographers + .map(_.name) + .sortWith(_.toLowerCase < _.toLowerCase) + } - def list(store: Store) = store.map(_.name).sortWith(_.toLowerCase < _.toLowerCase) + def list(store: Store) = + store.map(_.name).sortWith(_.toLowerCase < _.toLowerCase) def caseInsensitiveLookup(store: Store, lookup: String) = - store.reverse.find{case KnownPhotographer(name, _) => name.toLowerCase == lookup.toLowerCase} + store.reverse.find { case KnownPhotographer(name, _) => + name.toLowerCase == lookup.toLowerCase + } def getPhotographer(photographer: String): Option[Photographer] = { - caseInsensitiveLookup(staffPhotographers, photographer).map { - case KnownPhotographer(name, publication) => StaffPhotographer(name, publication) - }.orElse(caseInsensitiveLookup(contractedPhotographers, photographer).map { - case KnownPhotographer(name, publication) => ContractPhotographer(name, Some(publication)) - }) + caseInsensitiveLookup(staffPhotographers, photographer) + .map { case KnownPhotographer(name, publication) => + StaffPhotographer(name, publication) + } + .orElse(caseInsensitiveLookup(contractedPhotographers, photographer).map { + case KnownPhotographer(name, publication) => + ContractPhotographer(name, Some(publication)) + }) } } object MetadataConfig { - type Store = List[KnownPhotographer] // not a Map at this point to allow for duplicate keys (as some photographers take pics for multiple publications) + type Store = + List[ + KnownPhotographer + ] // not a Map at this point to allow for duplicate keys (as some photographers take pics for multiple publications) implicit class KnownPhotographerOps(name: String) { - def ->(publication: String): KnownPhotographer = KnownPhotographer(name, publication) + def ->(publication: String): KnownPhotographer = + KnownPhotographer(name, publication) } val externalStaffPhotographers: Store = List( // Current - "Ben Doherty" -> "The Guardian", - "Bill Code" -> "The Guardian", + "Ben Doherty" -> "The Guardian", + "Bill Code" -> "The Guardian", "Calla Wahlquist" -> "The Guardian", - "David Sillitoe" -> "The Guardian", - "Graham Turner" -> "The Guardian", - "Helen Davidson" -> "The Guardian", - "Jill Mead" -> "The Guardian", + "David Sillitoe" -> "The Guardian", + "Graham Turner" -> "The Guardian", + "Helen Davidson" -> "The Guardian", + "Jill Mead" -> "The Guardian", //"Jonny Weeks" -> "The Guardian", (Commented out as Jonny's photo's aren't always as Staff.) "Joshua Robertson" -> "The Guardian", - "Rachel Vere" -> "The Guardian", - "Roger Tooth" -> "The Guardian", - "Sean Smith" -> "The Guardian", - "Melissa Davey" -> "The Guardian", - "Michael Safi" -> "The Guardian", - "Michael Slezak" -> "The Guardian", - "Sean Smith" -> "The Guardian", - "Carly Earl" -> "The Guardian", - + "Rachel Vere" -> "The Guardian", + "Roger Tooth" -> "The Guardian", + "Sean Smith" -> "The Guardian", + "Melissa Davey" -> "The Guardian", + "Michael Safi" -> "The Guardian", + "Michael Slezak" -> "The Guardian", + "Sean Smith" -> "The Guardian", + "Carly Earl" -> "The Guardian", // Past - "Dan Chung" -> "The Guardian", - "Denis Thorpe" -> "The Guardian", - "Don McPhee" -> "The Guardian", - "Frank Baron" -> "The Guardian", - "Frank Martin" -> "The Guardian", - "Garry Weaser" -> "The Guardian", - "Graham Finlayson" -> "The Guardian", - "Martin Argles" -> "The Guardian", - "Peter Johns" -> "The Guardian", - "Robert Smithies" -> "The Guardian", - "Tom Stuttard" -> "The Guardian", + "Dan Chung" -> "The Guardian", + "Denis Thorpe" -> "The Guardian", + "Don McPhee" -> "The Guardian", + "Frank Baron" -> "The Guardian", + "Frank Martin" -> "The Guardian", + "Garry Weaser" -> "The Guardian", + "Graham Finlayson" -> "The Guardian", + "Martin Argles" -> "The Guardian", + "Peter Johns" -> "The Guardian", + "Robert Smithies" -> "The Guardian", + "Tom Stuttard" -> "The Guardian", "Tricia De Courcy Ling" -> "The Guardian", - "Walter Doughty" -> "The Guardian", - "Eric Wadsworth" -> "The Guardian", - - "David Newell Smith" -> "The Observer", - "Tony McGrath" -> "The Observer", - "Catherine Shaw" -> "The Observer", - "John Reardon" -> "The Observer", - "Sean Gibson" -> "The Observer" + "Walter Doughty" -> "The Guardian", + "Eric Wadsworth" -> "The Guardian", + "David Newell Smith" -> "The Observer", + "Tony McGrath" -> "The Observer", + "Catherine Shaw" -> "The Observer", + "John Reardon" -> "The Observer", + "Sean Gibson" -> "The Observer" ) // these are people who aren't photographers by trade, but have taken photographs for us. @@ -89,43 +103,43 @@ object MetadataConfig { // them correctly. // TODO: Think about removin these once Picdar is dead. val internalStaffPhotographers: Store = List( - "E Hamilton West" -> "The Guardian", - "Harriet St Johnston" -> "The Guardian", - "Lorna Roach" -> "The Guardian", - "Rachel Vere" -> "The Guardian", - "Ken Saunders" -> "The Guardian" + "E Hamilton West" -> "The Guardian", + "Harriet St Johnston" -> "The Guardian", + "Lorna Roach" -> "The Guardian", + "Rachel Vere" -> "The Guardian", + "Ken Saunders" -> "The Guardian" ) - val staffPhotographers: Store = externalStaffPhotographers ++ internalStaffPhotographers + val staffPhotographers: Store = + externalStaffPhotographers ++ internalStaffPhotographers val contractedPhotographers: Store = List( - "Alicia Canter" -> "The Guardian", - "Antonio Olmos" -> "The Guardian", + "Alicia Canter" -> "The Guardian", + "Antonio Olmos" -> "The Guardian", "Christopher Thomond" -> "The Guardian", - "David Levene" -> "The Guardian", - "Eamonn McCabe" -> "The Guardian", - "Graeme Robertson" -> "The Guardian", - "Johanna Parkin" -> "The Guardian", - "Linda Nylind" -> "The Guardian", - "Louise Hagger" -> "The Guardian", - "Martin Godwin" -> "The Guardian", - "Mike Bowers" -> "The Guardian", - "Murdo MacLeod" -> "The Guardian", - "Sarah Lee" -> "The Guardian", - "Tom Jenkins" -> "The Guardian", - "Tristram Kenton" -> "The Guardian", - "Jill Mead" -> "The Guardian", - - "Andy Hall" -> "The Observer", - "Antonio Olmos" -> "The Observer", - "Gary Calton" -> "The Observer", - "Jane Bown" -> "The Observer", - "Jonathan Lovekin" -> "The Observer", - "Karen Robinson" -> "The Observer", + "David Levene" -> "The Guardian", + "Eamonn McCabe" -> "The Guardian", + "Graeme Robertson" -> "The Guardian", + "Johanna Parkin" -> "The Guardian", + "Linda Nylind" -> "The Guardian", + "Louise Hagger" -> "The Guardian", + "Martin Godwin" -> "The Guardian", + "Mike Bowers" -> "The Guardian", + "Murdo MacLeod" -> "The Guardian", + "Sarah Lee" -> "The Guardian", + "Tom Jenkins" -> "The Guardian", + "Tristram Kenton" -> "The Guardian", + "Jill Mead" -> "The Guardian", + "Andy Hall" -> "The Observer", + "Antonio Olmos" -> "The Observer", + "Gary Calton" -> "The Observer", + "Jane Bown" -> "The Observer", + "Jonathan Lovekin" -> "The Observer", + "Karen Robinson" -> "The Observer", "Katherine Anne Rose" -> "The Observer", - "Richard Saker" -> "The Observer", - "Sophia Evans" -> "The Observer", - "Suki Dhanda" -> "The Observer" + "Richard Saker" -> "The Observer", + "Sophia Evans" -> "The Observer", + "Suki Dhanda" -> "The Observer" ) val staffIllustrators = List( @@ -135,35 +149,40 @@ object MetadataConfig { ) val contractIllustrators: Store = List( - "Ben Lamb" -> "The Guardian", - "Andrzej Krauze" -> "The Guardian", - "David Squires" -> "The Guardian", + "Ben Lamb" -> "The Guardian", + "Andrzej Krauze" -> "The Guardian", + "David Squires" -> "The Guardian", "First Dog on the Moon" -> "The Guardian", - "Harry Venning" -> "The Guardian", - "Martin Rowson" -> "The Guardian", - "Matt Kenyon" -> "The Guardian", - "Matthew Blease" -> "The Guardian", - "Nicola Jennings" -> "The Guardian", - "Rosalind Asquith" -> "The Guardian", - "Steve Bell" -> "The Guardian", - "Steven Appleby" -> "The Guardian", - "Ben Jennings" -> "The Guardian", - - "Chris Riddell" -> "The Observer", - "David Foldvari" -> "The Observer", - "David Simonds" -> "The Observer", + "Harry Venning" -> "The Guardian", + "Martin Rowson" -> "The Guardian", + "Matt Kenyon" -> "The Guardian", + "Matthew Blease" -> "The Guardian", + "Nicola Jennings" -> "The Guardian", + "Rosalind Asquith" -> "The Guardian", + "Steve Bell" -> "The Guardian", + "Steven Appleby" -> "The Guardian", + "Ben Jennings" -> "The Guardian", + "Chris Riddell" -> "The Observer", + "David Foldvari" -> "The Observer", + "David Simonds" -> "The Observer" ) val allPhotographers = staffPhotographers ++ contractedPhotographers - val externalPhotographersMap = PhotographersList.creditBylineMap(externalStaffPhotographers) - val staffPhotographersMap = PhotographersList.creditBylineMap(staffPhotographers) - val contractPhotographersMap = PhotographersList.creditBylineMap(contractedPhotographers) + val externalPhotographersMap = + PhotographersList.creditBylineMap(externalStaffPhotographers) + val staffPhotographersMap = + PhotographersList.creditBylineMap(staffPhotographers) + val contractPhotographersMap = + PhotographersList.creditBylineMap(contractedPhotographers) val allPhotographersMap = PhotographersList.creditBylineMap(allPhotographers) - val contractIllustratorsMap = PhotographersList.creditBylineMap(contractIllustrators) + val contractIllustratorsMap = + PhotographersList.creditBylineMap(contractIllustrators) val creativeCommonsLicense = List( - "CC BY-4.0", "CC BY-SA-4.0", "CC BY-ND-4.0" + "CC BY-4.0", + "CC BY-SA-4.0", + "CC BY-ND-4.0" ) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Properties.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Properties.scala index f2cbbed282..28db130f67 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Properties.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Properties.scala @@ -25,7 +25,8 @@ object Properties { def fromStream(stream: InputStream): Map[String, String] = { val props = new java.util.Properties - try props.load(stream) finally stream.close() + try props.load(stream) + finally stream.close() props.asScala.toMap } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ProviderLoader.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ProviderLoader.scala index 2820adf65b..833dc783cc 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ProviderLoader.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/ProviderLoader.scala @@ -11,41 +11,77 @@ import scala.reflect.ClassTag import scala.util.Try import scala.util.control.NonFatal -case class ProviderResources[Resources](configuration: Configuration, resources: Resources) +case class ProviderResources[Resources]( + configuration: Configuration, + resources: Resources +) -class ProviderLoader[ProviderType, ResourcesType](providerDescription: String)(implicit providerTag: ClassTag[ProviderType], resourcesTag: ClassTag[ResourcesType]) extends StrictLogging { +class ProviderLoader[ProviderType, ResourcesType](providerDescription: String)( + implicit + providerTag: ClassTag[ProviderType], + resourcesTag: ClassTag[ResourcesType] +) extends StrictLogging { - private case class ConfigDetails(className: String, - config: Option[Configuration], - resources: ResourcesType, - origin: ConfigOrigin, - path: String) + private case class ConfigDetails( + className: String, + config: Option[Configuration], + resources: ResourcesType, + origin: ConfigOrigin, + path: String + ) - def seqConfigLoader(resources: ResourcesType): ConfigLoader[Seq[ProviderType]] = (config: Config, path: String) => { + def seqConfigLoader( + resources: ResourcesType + ): ConfigLoader[Seq[ProviderType]] = (config: Config, path: String) => { config .getList(path) .iterator() - .asScala.map { configValue => - parseConfigValue(configValue, path, resources) - }.map(loadProvider).toList + .asScala + .map { configValue => + parseConfigValue(configValue, path, resources) + } + .map(loadProvider) + .toList } - def singletonConfigLoader(resources: ResourcesType): ConfigLoader[ProviderType] = (config: Config, path: String) => { + def singletonConfigLoader( + resources: ResourcesType + ): ConfigLoader[ProviderType] = (config: Config, path: String) => { val configDetails = parseConfigValue(config.getValue(path), path, resources) loadProvider(configDetails) } - private def parseConfigValue(configValue: ConfigValue, path: String, resources: ResourcesType): ConfigDetails = { + private def parseConfigValue( + configValue: ConfigValue, + path: String, + resources: ResourcesType + ): ConfigDetails = { configValue match { case plainClass if plainClass.valueType == ConfigValueType.STRING => - ConfigDetails(plainClass.unwrapped.asInstanceOf[String], None, resources, plainClass.origin, path) - case withConfig:ConfigObject if validConfigObject(withConfig) => + ConfigDetails( + plainClass.unwrapped.asInstanceOf[String], + None, + resources, + plainClass.origin, + path + ) + case withConfig: ConfigObject if validConfigObject(withConfig) => val config = withConfig.toConfig val className = config.getString("className") val processorConfig = config.getConfig("config") - ConfigDetails(className, Some(Configuration(processorConfig)), resources, withConfig.origin, path) + ConfigDetails( + className, + Some(Configuration(processorConfig)), + resources, + withConfig.origin, + path + ) case _ => - throw new BadValue(configValue.origin, path, s"A ${providerDescription} can either be a class name (string) or object with className (string) and config (object) fields. This ${configValue.valueType} is not valid.") + throw new BadValue( + configValue.origin, + path, + s"A ${providerDescription} can either be a class name (string) or object with className (string) and config (object) fields. This ${configValue.valueType} is not valid." + ) } } @@ -55,47 +91,72 @@ class ProviderLoader[ProviderType, ResourcesType](providerDescription: String)(i } private def loadProvider(details: ConfigDetails): ProviderType = { - logger.info(s"Dynamically loading provider from ${details.className} as specified by config path ${details.path}") - val config = ProviderResources(details.config.getOrElse(Configuration.empty), details.resources) + logger.info( + s"Dynamically loading provider from ${details.className} as specified by config path ${details.path}" + ) + val config = ProviderResources( + details.config.getOrElse(Configuration.empty), + details.resources + ) loadProvider(details.className, config) match { case Right(provider) => provider case Left(error) => - val configError = s"Unable to instantiate ${providerDescription} from config: $error" + val configError = + s"Unable to instantiate ${providerDescription} from config: $error" logger.error(configError) throw new BadValue(details.origin, details.path, configError) } } - def loadProvider(className: String, config: ProviderResources[ResourcesType]): Either[String, ProviderType] = { + def loadProvider( + className: String, + config: ProviderResources[ResourcesType] + ): Either[String, ProviderType] = { for { imageProcessorClass <- loadClass(className) imageProcessorInstance <- instantiate(imageProcessorClass, config) } yield imageProcessorInstance } - private def loadClass(className: String): Either[String, Class[_]] = catchNonFatal(Class.forName(className)) { - case _: ClassNotFoundException => s"Unable to find ${providerDescription} class $className" - case other => - logger.error(s"Error whilst loading $className", other) - s"Unknown error whilst loading $className, check logs" - } + private def loadClass(className: String): Either[String, Class[_]] = + catchNonFatal(Class.forName(className)) { + case _: ClassNotFoundException => + s"Unable to find ${providerDescription} class $className" + case other => + logger.error(s"Error whilst loading $className", other) + s"Unknown error whilst loading $className, check logs" + } trait ProviderClassType - case class ProviderCompanionObject(companionObject: AnyRef) extends ProviderClassType + case class ProviderCompanionObject(companionObject: AnyRef) + extends ProviderClassType case class ProviderConstructor(ctor: Constructor[_]) extends ProviderClassType - private def instantiate(clazz: Class[_], resources: ProviderResources[ResourcesType]): Either[String, ProviderType] = { + private def instantiate( + clazz: Class[_], + resources: ProviderResources[ResourcesType] + ): Either[String, ProviderType] = { for { - providerClassType <- discoverProviderClassType(clazz, resources.configuration.keys.nonEmpty) + providerClassType <- discoverProviderClassType( + clazz, + resources.configuration.keys.nonEmpty + ) instance <- getProviderInstance(providerClassType, resources) castInstance <- castProvider(instance) } yield castInstance } - private def discoverProviderClassType(clazz: Class[_], configProvided: Boolean): Either[String, ProviderClassType] = { + private def discoverProviderClassType( + clazz: Class[_], + configProvided: Boolean + ): Either[String, ProviderClassType] = { Try(clazz.getField("MODULE$").get(clazz)).toOption match { - case Some(companionObject) if configProvided => Left(s"Configuration provided but ${clazz.getCanonicalName} is a companion object and doesn't take configuration.") - case Some(companionObject) => Right(ProviderCompanionObject(companionObject)) + case Some(companionObject) if configProvided => + Left( + s"Configuration provided but ${clazz.getCanonicalName} is a companion object and doesn't take configuration." + ) + case Some(companionObject) => + Right(ProviderCompanionObject(companionObject)) case None => for { ctor <- findConstructor(clazz, configProvided) @@ -103,7 +164,10 @@ class ProviderLoader[ProviderType, ResourcesType](providerDescription: String)(i } } - private def findConstructor(clazz: Class[_], configurationProvided: Boolean): Either[String, Constructor[_]] = { + private def findConstructor( + clazz: Class[_], + configurationProvided: Boolean + ): Either[String, Constructor[_]] = { /* if config is provided but the constructor doesn't take config then it violates our contract */ def configViolation(ctor: Constructor[_]): Boolean = { val paramTypes = ctor.getParameterTypes.toList @@ -114,53 +178,75 @@ class ProviderLoader[ProviderType, ResourcesType](providerDescription: String)(i // get all constructors val allConstructors = clazz.getConstructors.toList // get a list of constructors that we know how to use (this should be size one) - val validConstructors: List[Constructor[_]] = allConstructors.filter(validProviderConstructor) + val validConstructors: List[Constructor[_]] = + allConstructors.filter(validProviderConstructor) validConstructors match { - case configViolationConstructor :: Nil if configViolation(configViolationConstructor) => - Left(s"Configuration provided but constructor of ${clazz.getCanonicalName} with args ${constructorParamsString(configViolationConstructor)} doesn't take it.") + case configViolationConstructor :: Nil + if configViolation(configViolationConstructor) => + Left( + s"Configuration provided but constructor of ${clazz.getCanonicalName} with args ${constructorParamsString(configViolationConstructor)} doesn't take it." + ) case singleValidConstructor :: Nil => Right(singleValidConstructor) case otherCombinations => - Left(s"""A provider must have one and only one valid constructors taking arguments of type - |${resourcesTag.runtimeClass.getCanonicalName} or ${classOf[Configuration].getCanonicalName}. + Left( + s"""A provider must have one and only one valid constructors taking arguments of type + |${resourcesTag.runtimeClass.getCanonicalName} or ${classOf[ + Configuration + ].getCanonicalName}. |${clazz.getCanonicalName} has ${otherCombinations.length} constructors: - |${otherCombinations.map(constructorParamsString)}""".stripMargin) + |${otherCombinations.map( + constructorParamsString + )}""".stripMargin + ) } } - private def validProviderConstructor: Constructor[_] => Boolean = { constructor => - val paramTypes = constructor.getParameterTypes.toList - // if the same type appears twice then we don't know what to do - val noDuplicates = paramTypes.length == paramTypes.toSet.size - // only pick constructors that take types of resources or config - val onlyKnownTypes = paramTypes.forall { paramType => - paramType == resourcesTag.runtimeClass || paramType == classOf[Configuration] - } - noDuplicates && onlyKnownTypes + private def validProviderConstructor: Constructor[_] => Boolean = { + constructor => + val paramTypes = constructor.getParameterTypes.toList + // if the same type appears twice then we don't know what to do + val noDuplicates = paramTypes.length == paramTypes.toSet.size + // only pick constructors that take types of resources or config + val onlyKnownTypes = paramTypes.forall { paramType => + paramType == resourcesTag.runtimeClass || paramType == classOf[ + Configuration + ] + } + noDuplicates && onlyKnownTypes } - private def getProviderInstance(providerType: ProviderClassType, resources: ProviderResources[ResourcesType]): Either[String, ProviderType] = { + private def getProviderInstance( + providerType: ProviderClassType, + resources: ProviderResources[ResourcesType] + ): Either[String, ProviderType] = { for { instance <- providerType match { case ProviderCompanionObject(companionObject) => Right(companionObject) - case ProviderConstructor(ctor) => catchNonFatal(ctor.newInstance(paramsFor(ctor, resources):_*)){ - case ite: InvocationTargetException => - val cause = Option(ite.getCause) - val error = s"${cause.map(_.getClass.getName).getOrElse("Unknown exception")} thrown when executing constructor ${ctor.getClass.getCanonicalName}${constructorParamsString(ctor)}. Search logs for stack trace." - logger.error(error, cause.getOrElse(ite)) - error - case NonFatal(other) => - val error = s"${other.getClass.getName} thrown whilst creating a new instance using constructor ${ctor.getClass.getCanonicalName}${constructorParamsString(ctor)}. Search logs for stack trace." - logger.error(error, other) - error - } + case ProviderConstructor(ctor) => + catchNonFatal(ctor.newInstance(paramsFor(ctor, resources): _*)) { + case ite: InvocationTargetException => + val cause = Option(ite.getCause) + val error = + s"${cause.map(_.getClass.getName).getOrElse("Unknown exception")} thrown when executing constructor ${ctor.getClass.getCanonicalName}${constructorParamsString(ctor)}. Search logs for stack trace." + logger.error(error, cause.getOrElse(ite)) + error + case NonFatal(other) => + val error = + s"${other.getClass.getName} thrown whilst creating a new instance using constructor ${ctor.getClass.getCanonicalName}${constructorParamsString(ctor)}. Search logs for stack trace." + logger.error(error, other) + error + } } castInstance <- castProvider(instance) } yield castInstance } - private def paramsFor(ctor: Constructor[_], resources: ProviderResources[ResourcesType]): Array[Object] = { + private def paramsFor( + ctor: Constructor[_], + resources: ProviderResources[ResourcesType] + ): Array[Object] = { val array = new Array[Object](ctor.getParameterCount) ctor.getParameters.zipWithIndex.foreach { case (param, index) => @@ -178,13 +264,18 @@ class ProviderLoader[ProviderType, ResourcesType](providerDescription: String)(i if (providerTag.runtimeClass.isAssignableFrom(instance.getClass)) { Right(instance.asInstanceOf[ProviderType]) } else { - Left(s"Failed to cast ${instance.getClass.getCanonicalName} to a ${providerTag.runtimeClass.getCanonicalName}") + Left( + s"Failed to cast ${instance.getClass.getCanonicalName} to a ${providerTag.runtimeClass.getCanonicalName}" + ) } } - private def constructorParamsString(ctor: Constructor[_]): String = ctor.getParameterTypes.map(_.getCanonicalName).mkString("(", ", ", ")") + private def constructorParamsString(ctor: Constructor[_]): String = + ctor.getParameterTypes.map(_.getCanonicalName).mkString("(", ", ", ")") - private def catchNonFatal[T](block: => T)(error: Throwable => String): Either[String, T] = { + private def catchNonFatal[T]( + block: => T + )(error: Throwable => String): Either[String, T] = { try { Right(block) } catch { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Services.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Services.scala index 5df7ae6b14..e6a531b65d 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Services.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/Services.scala @@ -1,17 +1,17 @@ package com.gu.mediaservice.lib.config case class ServiceHosts( - kahunaPrefix: String, - apiPrefix: String, - loaderPrefix: String, - cropperPrefix: String, - adminToolsPrefix: String, - metadataPrefix: String, - imgopsPrefix: String, - usagePrefix: String, - collectionsPrefix: String, - leasesPrefix: String, - authPrefix: String + kahunaPrefix: String, + apiPrefix: String, + loaderPrefix: String, + cropperPrefix: String, + adminToolsPrefix: String, + metadataPrefix: String, + imgopsPrefix: String, + usagePrefix: String, + collectionsPrefix: String, + leasesPrefix: String, + authPrefix: String ) object ServiceHosts { @@ -36,30 +36,34 @@ object ServiceHosts { } } -class Services(val domainRoot: String, hosts: ServiceHosts, corsAllowedOrigins: Set[String]) { - val kahunaHost: String = s"${hosts.kahunaPrefix}$domainRoot" - val apiHost: String = s"${hosts.apiPrefix}$domainRoot" - val loaderHost: String = s"${hosts.loaderPrefix}$domainRoot" - val cropperHost: String = s"${hosts.cropperPrefix}$domainRoot" - val metadataHost: String = s"${hosts.metadataPrefix}$domainRoot" - val imgopsHost: String = s"${hosts.imgopsPrefix}$domainRoot" - val usageHost: String = s"${hosts.usagePrefix}$domainRoot" +class Services( + val domainRoot: String, + hosts: ServiceHosts, + corsAllowedOrigins: Set[String] +) { + val kahunaHost: String = s"${hosts.kahunaPrefix}$domainRoot" + val apiHost: String = s"${hosts.apiPrefix}$domainRoot" + val loaderHost: String = s"${hosts.loaderPrefix}$domainRoot" + val cropperHost: String = s"${hosts.cropperPrefix}$domainRoot" + val metadataHost: String = s"${hosts.metadataPrefix}$domainRoot" + val imgopsHost: String = s"${hosts.imgopsPrefix}$domainRoot" + val usageHost: String = s"${hosts.usagePrefix}$domainRoot" val collectionsHost: String = s"${hosts.collectionsPrefix}$domainRoot" - val leasesHost: String = s"${hosts.leasesPrefix}$domainRoot" - val authHost: String = s"${hosts.authPrefix}$domainRoot" - val adminToolsHost: String = s"${hosts.adminToolsPrefix}$domainRoot" + val leasesHost: String = s"${hosts.leasesPrefix}$domainRoot" + val authHost: String = s"${hosts.authPrefix}$domainRoot" + val adminToolsHost: String = s"${hosts.adminToolsPrefix}$domainRoot" - val kahunaBaseUri = baseUri(kahunaHost) - val apiBaseUri = baseUri(apiHost) - val loaderBaseUri = baseUri(loaderHost) - val cropperBaseUri = baseUri(cropperHost) - val metadataBaseUri = baseUri(metadataHost) - val imgopsBaseUri = baseUri(imgopsHost) - val usageBaseUri = baseUri(usageHost) + val kahunaBaseUri = baseUri(kahunaHost) + val apiBaseUri = baseUri(apiHost) + val loaderBaseUri = baseUri(loaderHost) + val cropperBaseUri = baseUri(cropperHost) + val metadataBaseUri = baseUri(metadataHost) + val imgopsBaseUri = baseUri(imgopsHost) + val usageBaseUri = baseUri(usageHost) val collectionsBaseUri = baseUri(collectionsHost) - val leasesBaseUri = baseUri(leasesHost) - val authBaseUri = baseUri(authHost) - val adminToolsBaseUri = baseUri(adminToolsHost) + val leasesBaseUri = baseUri(leasesHost) + val authBaseUri = baseUri(authHost) + val adminToolsBaseUri = baseUri(adminToolsHost) val guardianWitnessBaseUri: String = "https://n0ticeapis.com" diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/StageIdentifier.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/StageIdentifier.scala index 49f2d2e796..7f539f2a98 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/config/StageIdentifier.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/config/StageIdentifier.scala @@ -6,7 +6,9 @@ import scala.io.Source.fromFile class StageIdentifier { final val stage: String = - loadStageFile("/etc/grid/stage") orElse loadStageFile("/etc/gu/stage") getOrElse "DEV" + loadStageFile("/etc/grid/stage") orElse loadStageFile( + "/etc/gu/stage" + ) getOrElse "DEV" val isProd: Boolean = stage == "PROD" val isDev: Boolean = stage == "DEV" @@ -15,11 +17,12 @@ class StageIdentifier { val file = new File(fileName) if (file.exists) { val source = fromFile(file) - val stage = try { - source.mkString.trim - } finally { - source.close() - } + val stage = + try { + source.mkString.trim + } finally { + source.close() + } Some(stage.trim) } else None } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/discovery/EC2.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/discovery/EC2.scala index 59782e853c..86a1c3b6c5 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/discovery/EC2.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/discovery/EC2.scala @@ -1,7 +1,11 @@ package com.gu.mediaservice.lib.discovery import com.amazonaws.services.ec2.AmazonEC2 -import com.amazonaws.services.ec2.model.{DescribeInstancesRequest, Filter, InstanceStateName} +import com.amazonaws.services.ec2.model.{ + DescribeInstancesRequest, + Filter, + InstanceStateName +} import com.gu.mediaservice.lib.logging.GridLogging import scala.collection.JavaConverters._ @@ -10,16 +14,26 @@ import scala.util.Random object EC2 extends GridLogging { @annotation.tailrec - def findElasticsearchHostByTags(client: AmazonEC2, tags: Map[String, Seq[String]]): String = { - val instances = client.describeInstances(new DescribeInstancesRequest().withFilters( - new Filter("instance-state-name", List(InstanceStateName.Running.toString).asJava) +: - tagFilters(tags): _* - )) + def findElasticsearchHostByTags( + client: AmazonEC2, + tags: Map[String, Seq[String]] + ): String = { + val instances = client.describeInstances( + new DescribeInstancesRequest().withFilters( + new Filter( + "instance-state-name", + List(InstanceStateName.Running.toString).asJava + ) +: + tagFilters(tags): _* + ) + ) val hosts = instances.getReservations.asScala .flatMap(_.getInstances.asScala) .map(_.getPublicDnsName) - logger.info(s"Available Elasticsearch hosts in EC2: [${hosts.mkString(", ")}]") + logger.info( + s"Available Elasticsearch hosts in EC2: [${hosts.mkString(", ")}]" + ) Random.shuffle(hosts).headOption match { case None => @@ -33,6 +47,7 @@ object EC2 extends GridLogging { } def tagFilters(tags: Map[String, Seq[String]]): List[Filter] = - for ((key, values) <- tags.toList) yield new Filter(s"tag:$key", values.asJava) + for ((key, values) <- tags.toList) + yield new Filter(s"tag:$key", values.asJava) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchClient.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchClient.scala index b420646b05..351f867967 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchClient.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchClient.scala @@ -11,9 +11,11 @@ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.concurrent.{Await, Future} -case class ElasticSearchImageCounts(catCount: Long, - searchResponseCount: Long, - indexStatsCount: Long) +case class ElasticSearchImageCounts( + catCount: Long, + searchResponseCount: Long, + indexStatsCount: Long +) trait ElasticSearchClient extends ElasticSearchExecutions with GridLogging { @@ -51,44 +53,62 @@ trait ElasticSearchClient extends ElasticSearchExecutions with GridLogging { def waitUntilHealthy(): Unit = { logger.info("waiting for cluster health to be green") - val clusterHealthResponse = Await.result(client.execute(clusterHealth().waitForStatus(HealthStatus.Green).timeout("25s")), thirtySeconds) + val clusterHealthResponse = Await.result( + client.execute( + clusterHealth().waitForStatus(HealthStatus.Green).timeout("25s") + ), + thirtySeconds + ) logger.info("await cluster health response: " + clusterHealthResponse) if (clusterHealthResponse.isError) { - throw new RuntimeException("cluster health could not be confirmed as green") // TODO Exception isn't great but our callers aren't looking at our return value + throw new RuntimeException( + "cluster health could not be confirmed as green" + ) // TODO Exception isn't great but our callers aren't looking at our return value } } def healthCheck(): Future[Boolean] = { implicit val logMarker = MarkerMap() val request = search(imagesAlias) limit 0 - executeAndLog(request, "Healthcheck").map { _ => true}.recover { case _ => false} + executeAndLog(request, "Healthcheck").map { _ => true }.recover { case _ => + false + } } - def countImages(): Future[ElasticSearchImageCounts] = { implicit val logMarker = MarkerMap() - val queryCatCount = catCount("images") // document count only of index including live documents, not deleted documents which have not yet been removed by the merge process - val queryImageSearch = search("images") limit 0 // hits that match the query defined in the request - val queryStats = indexStats("images") // total accumulated values of an index for both primary and replica shards + val queryCatCount = catCount( + "images" + ) // document count only of index including live documents, not deleted documents which have not yet been removed by the merge process + val queryImageSearch = + search( + "images" + ) limit 0 // hits that match the query defined in the request + val queryStats = indexStats( + "images" + ) // total accumulated values of an index for both primary and replica shards for { catCount <- executeAndLog(queryCatCount, "Images cat count") imageSearch <- executeAndLog(queryImageSearch, "Images search") stats <- executeAndLog(queryStats, "Stats aggregation") - } yield - ElasticSearchImageCounts(catCount.result.count, - imageSearch.result.hits.total.value, - stats.result.indices("images").total.docs.count) + } yield ElasticSearchImageCounts( + catCount.result.count, + imageSearch.result.hits.total.value, + stats.result.indices("images").total.docs.count + ) } def ensureIndexExists(index: String): Unit = { logger.info("Checking index exists…") - val eventualIndexExistsResponse: Future[Response[IndexExistsResponse]] = client.execute { - indexExists(index) - } + val eventualIndexExistsResponse: Future[Response[IndexExistsResponse]] = + client.execute { + indexExists(index) + } - val indexExistsResponse = Await.result(eventualIndexExistsResponse, tenSeconds) + val indexExistsResponse = + Await.result(eventualIndexExistsResponse, tenSeconds) logger.info("Got index exists result: " + indexExistsResponse.result) logger.info("Index exists: " + indexExistsResponse.result.exists) @@ -98,37 +118,45 @@ trait ElasticSearchClient extends ElasticSearchExecutions with GridLogging { } def createImageIndex(index: String): Unit = { - logger.info(s"Creating image index '$index' with $shards shards and $replicas replicas") - - val eventualCreateIndexResponse: Future[Response[CreateIndexResponse]] = client.execute { - // File metadata indexing creates a potentially unbounded number of dynamic files; Elastic 1 had no limit. - // Elastic 6 limits it over index disk usage concerns. - // When this limit is hit, no new images with previously unseen fields can be indexed. - // https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html - // Do we really need to store all raw metadata in the index; only taking a bounded subset would greatly reduce the size of the index and - // remove the risk of field exhaustion bug striking in productions - val maximumFieldsOverride = Map("mapping.total_fields.limit" -> Integer.MAX_VALUE) - - // Deep pagination. It's fairly easy to scroll the grid past the default Elastic 6 pagination limit. - // Elastic start talking about why this is problematic in the 2.x docs and by 6 it's been defaulted to 10k. - // https://www.elastic.co/guide/en/elasticsearch/guide/current/pagination.html - // Override to 100,000 to preserve the existing behaviour without comprising the Elastic cluster. - // The grid UI should consider scrolling by datetime offsets if possible. - val maximumPaginationOverride = Map("max_result_window" -> 25000) - - val nonRecommendenedIndexSettingOverrides = maximumFieldsOverride ++ maximumPaginationOverride - logger.warn("Applying non recommended index setting overrides; please consider altering the application " + - "to remove the need for these: " + nonRecommendenedIndexSettingOverrides) - - createIndex(index). - mapping(Mappings.imageMapping). - analysis(IndexSettings.analysis). - settings(nonRecommendenedIndexSettingOverrides). - shards(shards). - replicas(replicas) - } - - val createIndexResponse = Await.result(eventualCreateIndexResponse, tenSeconds) + logger.info( + s"Creating image index '$index' with $shards shards and $replicas replicas" + ) + + val eventualCreateIndexResponse: Future[Response[CreateIndexResponse]] = + client.execute { + // File metadata indexing creates a potentially unbounded number of dynamic files; Elastic 1 had no limit. + // Elastic 6 limits it over index disk usage concerns. + // When this limit is hit, no new images with previously unseen fields can be indexed. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html + // Do we really need to store all raw metadata in the index; only taking a bounded subset would greatly reduce the size of the index and + // remove the risk of field exhaustion bug striking in productions + val maximumFieldsOverride = + Map("mapping.total_fields.limit" -> Integer.MAX_VALUE) + + // Deep pagination. It's fairly easy to scroll the grid past the default Elastic 6 pagination limit. + // Elastic start talking about why this is problematic in the 2.x docs and by 6 it's been defaulted to 10k. + // https://www.elastic.co/guide/en/elasticsearch/guide/current/pagination.html + // Override to 100,000 to preserve the existing behaviour without comprising the Elastic cluster. + // The grid UI should consider scrolling by datetime offsets if possible. + val maximumPaginationOverride = Map("max_result_window" -> 25000) + + val nonRecommendenedIndexSettingOverrides = + maximumFieldsOverride ++ maximumPaginationOverride + logger.warn( + "Applying non recommended index setting overrides; please consider altering the application " + + "to remove the need for these: " + nonRecommendenedIndexSettingOverrides + ) + + createIndex(index) + .mapping(Mappings.imageMapping) + .analysis(IndexSettings.analysis) + .settings(nonRecommendenedIndexSettingOverrides) + .shards(shards) + .replicas(replicas) + } + + val createIndexResponse = + Await.result(eventualCreateIndexResponse, tenSeconds) logger.info("Got index create result: " + createIndexResponse) if (createIndexResponse.isError) { @@ -148,25 +176,35 @@ trait ElasticSearchClient extends ElasticSearchExecutions with GridLogging { def assignAliasTo(index: String): Unit = { logger.info(s"Assigning alias $imagesAlias to $index") - val aliasActionResponse = Await.result(client.execute { - aliases( - addAlias(imagesAlias, index) - ) - }, tenSeconds) + val aliasActionResponse = Await.result( + client.execute { + aliases( + addAlias(imagesAlias, index) + ) + }, + tenSeconds + ) logger.info("Got alias action response: " + aliasActionResponse) } - def changeAliasTo(newIndex: String, oldIndex: String, alias: String = imagesAlias): Unit = { + def changeAliasTo( + newIndex: String, + oldIndex: String, + alias: String = imagesAlias + ): Unit = { logger.info(s"Assigning alias $alias to $newIndex") - val aliasActionResponse = Await.result(client.execute { - aliases( - removeAlias(alias, oldIndex), - addAlias(alias, newIndex) - ) - }, tenSeconds) + val aliasActionResponse = Await.result( + client.execute { + aliases( + removeAlias(alias, oldIndex), + addAlias(alias, newIndex) + ) + }, + tenSeconds + ) logger.info("Got alias action response: " + aliasActionResponse) } - def removeAliasFrom(index: String) = ??? + def removeAliasFrom(index: String) = ??? } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchConfig.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchConfig.scala index 0bd5432f79..6fe22a044c 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchConfig.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchConfig.scala @@ -1,3 +1,9 @@ package com.gu.mediaservice.lib.elasticsearch -case class ElasticSearchConfig(alias: String, url: String, cluster: String, shards: Int, replicas: Int) +case class ElasticSearchConfig( + alias: String, + url: String, + cluster: String, + shards: Int, + replicas: Int +) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchException.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchException.scala index f34953a5f1..7987ed5dd4 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchException.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchException.scala @@ -12,46 +12,82 @@ trait ElasticSearchError { object ElasticSearchException { - def causes(c: ElasticError.CausedBy):List[(String, Any)] = { + def causes(c: ElasticError.CausedBy): List[(String, Any)] = { val script = c.other("script").getOrElse("no script") val lang = c.other("lang").getOrElse("no language") - List("causedBy" -> c.toString(), "scriptStack" -> c.scriptStack.mkString("\n"), "script" -> script, "lang" -> lang ) + List( + "causedBy" -> c.toString(), + "scriptStack" -> c.scriptStack.mkString("\n"), + "script" -> script, + "lang" -> lang + ) } def apply(e: ElasticError): Exception with ElasticSearchError = { e match { - case ElasticError(t, r, _, _, _, Seq(), None, _, _, _) => // No root causes provided. - new Exception(s"query failed because: $r type: $t") with ElasticSearchError { + case ElasticError( + t, + r, + _, + _, + _, + Seq(), + None, + _, + _, + _ + ) => // No root causes provided. + new Exception(s"query failed because: $r type: $t") + with ElasticSearchError { override def error: ElasticError = e - override def markerContents: Map[String, Any] = Map("reason" -> r, "type" -> t) + override def markerContents: Map[String, Any] = + Map("reason" -> r, "type" -> t) } case ElasticError(t, r, _, _, _, Seq(), Some(c), _, _, _) => - new Exception(s"query failed because: $r type: $t caused by $c") with ElasticSearchError { + new Exception(s"query failed because: $r type: $t caused by $c") + with ElasticSearchError { override def error: ElasticError = e - override def markerContents: Map[String, Any] = (List("reason" -> r, "type" -> t) ::: causes(c)).toMap + override def markerContents: Map[String, Any] = + (List("reason" -> r, "type" -> t) ::: causes(c)).toMap } case ElasticError(t, r, _, _, _, s, None, _, _, _) => - new Exception(s"query failed because: $r type: $t root cause ${s.mkString(",\n ")}") with ElasticSearchError { + new Exception( + s"query failed because: $r type: $t root cause ${s.mkString(",\n ")}" + ) with ElasticSearchError { override def error: ElasticError = e - override def markerContents: Map[String, Any] = Map("reason" -> r, "type" -> t, "rootCause" -> s.mkString(",\n")) + override def markerContents: Map[String, Any] = + Map("reason" -> r, "type" -> t, "rootCause" -> s.mkString(",\n")) } case ElasticError(t, r, _, _, _, s, Some(c), _, _, _) => - new Exception(s"query failed because: $r type: $t root cause ${s.mkString(", ")}, caused by $c") with ElasticSearchError { + new Exception( + s"query failed because: $r type: $t root cause ${s.mkString(", ")}, caused by $c" + ) with ElasticSearchError { override def error: ElasticError = e - override def markerContents: Map[String, Any] = (List("reason" -> r, "type" -> t, "rootCause" -> s.mkString(",\n"), "causedBy" -> c.toString()) ::: causes(c)).toMap + override def markerContents: Map[String, Any] = (List( + "reason" -> r, + "type" -> t, + "rootCause" -> s.mkString(",\n"), + "causedBy" -> c.toString() + ) ::: causes(c)).toMap } - case _ => new Exception(s"query failed because: unknown error") with ElasticSearchError { - override def error: ElasticError = e + case _ => + new Exception(s"query failed because: unknown error") + with ElasticSearchError { + override def error: ElasticError = e - override def markerContents: Map[String, Any] = Map("reason" -> "unknown Elastic Search error") - } + override def markerContents: Map[String, Any] = Map( + "reason" -> "unknown Elastic Search error" + ) + } } } - def unapply(arg: ElasticSearchError): Option[(ElasticError, LogMarker)] = Some((arg.error, MarkerMap(arg.markerContents))) + def unapply(arg: ElasticSearchError): Option[(ElasticError, LogMarker)] = + Some((arg.error, MarkerMap(arg.markerContents))) } -case object ElasticNotFoundException extends Exception(s"Elastic Search Document Not Found") +case object ElasticNotFoundException + extends Exception(s"Elastic Search Document Not Found") diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchExecutions.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchExecutions.scala index 229f3f3064..a4b044eace 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchExecutions.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/ElasticSearchExecutions.scala @@ -9,13 +9,17 @@ trait ElasticSearchExecutions extends GridLogging { def client: ElasticClient - def executeAndLog[T, U](request: T, message: String, notFoundSuccessful: Boolean = false)(implicit - functor: Functor[Future], - executor: Executor[Future], - handler: Handler[T, U], - manifest: Manifest[U], - executionContext: ExecutionContext, - logMarkers: LogMarker + def executeAndLog[T, U]( + request: T, + message: String, + notFoundSuccessful: Boolean = false + )(implicit + functor: Functor[Future], + executor: Executor[Future], + handler: Handler[T, U], + manifest: Manifest[U], + executionContext: ExecutionContext, + logMarkers: LogMarker ): Future[Response[U]] = { val stopwatch = Stopwatch.start @@ -23,36 +27,53 @@ trait ElasticSearchExecutions extends GridLogging { case Success(r) => r.isSuccess match { case true => Success(r) - case false => r.status match { - case 404 if notFoundSuccessful => { - logger.warn(s"No image found for $message.") - Success(r) + case false => + r.status match { + case 404 if notFoundSuccessful => { + logger.warn(s"No image found for $message.") + Success(r) + } + case 404 => Failure(ElasticNotFoundException) + case _ => Failure(ElasticSearchException(r.error)) } - case 404 => Failure(ElasticNotFoundException) - case _ => Failure(ElasticSearchException(r.error)) - } } case Failure(f) => Failure(f) } result.foreach { r => val elapsed = stopwatch.elapsed - logger.info(combineMarkers(logMarkers, elapsed), s"$message - query returned successfully in ${elapsed.toMillis} ms") + logger.info( + combineMarkers(logMarkers, elapsed), + s"$message - query returned successfully in ${elapsed.toMillis} ms" + ) } result.failed.foreach { e => val elapsed = stopwatch.elapsed e match { - case ElasticNotFoundException => logger.error( - combineMarkers(logMarkers, elapsed, MarkerMap(Map("reason" -> "ElasticNotFoundException"))), - s"$message - query failed: Document not Found" - ) + case ElasticNotFoundException => + logger.error( + combineMarkers( + logMarkers, + elapsed, + MarkerMap(Map("reason" -> "ElasticNotFoundException")) + ), + s"$message - query failed: Document not Found" + ) case ElasticSearchException(error, marker) => - logger.error(combineMarkers(logMarkers, elapsed, marker), s"$message - query failed", e) + logger.error( + combineMarkers(logMarkers, elapsed, marker), + s"$message - query failed", + e + ) case _ => logger.error( - combineMarkers(logMarkers, elapsed, MarkerMap(Map("reason" -> "unknown es error"))), - s"$message - query failed", + combineMarkers( + logMarkers, + elapsed, + MarkerMap(Map("reason" -> "unknown es error")) + ), + s"$message - query failed", e ) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/IndexSettings.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/IndexSettings.scala index 7daa237cbb..4482e693b2 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/IndexSettings.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/IndexSettings.scala @@ -1,7 +1,18 @@ package com.gu.mediaservice.lib.elasticsearch -import com.sksamuel.elastic4s.requests.analysis.{Analysis, CustomAnalyzer, PathHierarchyTokenizer, StandardTokenizer, StemmerTokenFilter, StopTokenFilter, TokenFilter} -import com.sksamuel.elastic4s.requests.analyzers.{AsciiFoldingTokenFilter, LowercaseTokenFilter} +import com.sksamuel.elastic4s.requests.analysis.{ + Analysis, + CustomAnalyzer, + PathHierarchyTokenizer, + StandardTokenizer, + StemmerTokenFilter, + StopTokenFilter, + TokenFilter +} +import com.sksamuel.elastic4s.requests.analyzers.{ + AsciiFoldingTokenFilter, + LowercaseTokenFilter +} import org.elasticsearch.index.analysis.ASCIIFoldingTokenFilterFactory object IndexSettings { @@ -25,7 +36,10 @@ object IndexSettings { // I (Justin) don't think we need to specify these, but can just refer to them by name (below) // LowercaseTokenFilter, // AsciiFoldingTokenFilter, - StemmerTokenFilter(name = english_possessive_stemmer, lang = "possessive_english"), + StemmerTokenFilter( + name = english_possessive_stemmer, + lang = "possessive_english" + ), StopTokenFilter(name = gu_stopwords, stopwords = Seq("_english_")), StemmerTokenFilter(name = s_stemmer, lang = "minimal_english") ) @@ -55,7 +69,7 @@ object IndexSettings { Analysis( analyzers, tokenizers, - filters, + filters ) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/Mappings.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/Mappings.scala index d9b645befb..76fdb6adab 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/Mappings.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/elasticsearch/Mappings.scala @@ -1,8 +1,16 @@ package com.gu.mediaservice.lib.elasticsearch import com.sksamuel.elastic4s.ElasticDsl._ -import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.{DynamicMapping, DynamicTemplateRequest} -import com.sksamuel.elastic4s.requests.mappings.{FieldDefinition, MappingDefinition, NestedField, ObjectField} +import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.{ + DynamicMapping, + DynamicTemplateRequest +} +import com.sksamuel.elastic4s.requests.mappings.{ + FieldDefinition, + MappingDefinition, + NestedField, + ObjectField +} import org.yaml.snakeyaml.introspector.FieldProperty import play.api.libs.json.{JsObject, Json} @@ -19,17 +27,24 @@ object Mappings { val maximumBytesOfKeywordInUnderlyingLuceneIndex = 32766 // Unicode characters require more than 1 byte so allow some head room - val maximumStringLengthToStore = (maximumBytesOfKeywordInUnderlyingLuceneIndex * .9).toInt - - dynamicTemplate("file_metadata_fields_as_keywords"). - mapping(dynamicKeywordField().index(false).store(true).ignoreAbove(maximumStringLengthToStore)). - pathMatch("fileMetadata.*").matchMappingType("string") + val maximumStringLengthToStore = + (maximumBytesOfKeywordInUnderlyingLuceneIndex * .9).toInt + + dynamicTemplate("file_metadata_fields_as_keywords") + .mapping( + dynamicKeywordField() + .index(false) + .store(true) + .ignoreAbove(maximumStringLengthToStore) + ) + .pathMatch("fileMetadata.*") + .matchMappingType("string") } def storedJsonObjectTemplate: DynamicTemplateRequest = { - dynamicTemplate("stored_json_object_template"). - mapping(dynamicType().index(false).store(true)). - pathMatch("fileMetadata.*") + dynamicTemplate("stored_json_object_template") + .mapping(dynamicType().index(false).store(true)) + .pathMatch("fileMetadata.*") } MappingDefinition( @@ -58,8 +73,12 @@ object Mappings { simpleSuggester("suggestMetadataCredit"), usagesMapping("usages"), keywordField("usagesPlatform"), - keywordField("usagesStatus"), // TODO ES1 include_in_parent emulated with explict copy_to rollup field for nested field which is also used for image filtering - dateField("usagesLastModified"), // TODO ES1 include_in_parent emulated with explict copy_to rollup field for nested field which is also used for image filtering + keywordField( + "usagesStatus" + ), // TODO ES1 include_in_parent emulated with explict copy_to rollup field for nested field which is also used for image filtering + dateField( + "usagesLastModified" + ), // TODO ES1 include_in_parent emulated with explict copy_to rollup field for nested field which is also used for image filtering leasesMapping("leases"), collectionMapping("collections") ) @@ -79,66 +98,78 @@ object Mappings { dimensionsMapping("dimensions") ) - def metadataMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - dateField("dateTaken"), - sStemmerAnalysed("description"), - standardAnalysed("byline").copyTo("metadata.englishAnalysedCatchAll"), - standardAnalysed("bylineTitle"), - sStemmerAnalysed("title"), - keywordField("credit").copyTo("metadata.englishAnalysedCatchAll"), - keywordField("creditUri"), - standardAnalysed("copyright"), - standardAnalysed("suppliersReference").copyTo("metadata.englishAnalysedCatchAll"), - keywordField("source").copyTo("metadata.englishAnalysedCatchAll"), - nonAnalysedList("keywords").copyTo("metadata.englishAnalysedCatchAll"), - nonAnalysedList("subjects"), - keywordField("specialInstructions"), - standardAnalysed("subLocation").copyTo("metadata.englishAnalysedCatchAll"), - standardAnalysed("city").copyTo("metadata.englishAnalysedCatchAll"), - standardAnalysed("state").copyTo("metadata.englishAnalysedCatchAll"), - standardAnalysed("country").copyTo("metadata.englishAnalysedCatchAll"), - nonAnalysedList("peopleInImage").copyTo("metadata.englishAnalysedCatchAll"), - sStemmerAnalysed("englishAnalysedCatchAll") - ) + def metadataMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + dateField("dateTaken"), + sStemmerAnalysed("description"), + standardAnalysed("byline").copyTo("metadata.englishAnalysedCatchAll"), + standardAnalysed("bylineTitle"), + sStemmerAnalysed("title"), + keywordField("credit").copyTo("metadata.englishAnalysedCatchAll"), + keywordField("creditUri"), + standardAnalysed("copyright"), + standardAnalysed("suppliersReference").copyTo( + "metadata.englishAnalysedCatchAll" + ), + keywordField("source").copyTo("metadata.englishAnalysedCatchAll"), + nonAnalysedList("keywords").copyTo("metadata.englishAnalysedCatchAll"), + nonAnalysedList("subjects"), + keywordField("specialInstructions"), + standardAnalysed("subLocation").copyTo( + "metadata.englishAnalysedCatchAll" + ), + standardAnalysed("city").copyTo("metadata.englishAnalysedCatchAll"), + standardAnalysed("state").copyTo("metadata.englishAnalysedCatchAll"), + standardAnalysed("country").copyTo("metadata.englishAnalysedCatchAll"), + nonAnalysedList("peopleInImage").copyTo( + "metadata.englishAnalysedCatchAll" + ), + sStemmerAnalysed("englishAnalysedCatchAll") + ) - def usageRightsMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("category"), - standardAnalysed("restrictions"), - keywordField("supplier"), - keywordField("suppliersCollection"), - standardAnalysed("photographer"), - keywordField("publication"), - keywordField("creator"), - keywordField("licence"), - keywordField("source"), - keywordField("contentLink"), - standardAnalysed("suppliers") - ) + def usageRightsMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("category"), + standardAnalysed("restrictions"), + keywordField("supplier"), + keywordField("suppliersCollection"), + standardAnalysed("photographer"), + keywordField("publication"), + keywordField("creator"), + keywordField("licence"), + keywordField("source"), + keywordField("contentLink"), + standardAnalysed("suppliers") + ) - def syndicationRightsPropertiesMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("propertyCode"), - dateField("expiresOn"), - keywordField("value") - ) + def syndicationRightsPropertiesMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("propertyCode"), + dateField("expiresOn"), + keywordField("value") + ) - def syndicationRightsListMapping(name: String) = nonDynamicObjectField(name).fields( - keywordField("rightCode"), - booleanField("acquired"), - syndicationRightsPropertiesMapping("properties") - ) + def syndicationRightsListMapping(name: String) = + nonDynamicObjectField(name).fields( + keywordField("rightCode"), + booleanField("acquired"), + syndicationRightsPropertiesMapping("properties") + ) - def suppliersMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("supplierId"), - keywordField("supplierName"), - booleanField("prAgreement") - ) + def suppliersMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("supplierId"), + keywordField("supplierName"), + booleanField("prAgreement") + ) - def syndicationRightsMapping(name: String) = nonDynamicObjectField(name).fields( - dateField("published"), - suppliersMapping("suppliers"), - syndicationRightsListMapping("rights"), - booleanField("isInferred") - ) + def syndicationRightsMapping(name: String) = + nonDynamicObjectField(name).fields( + dateField("published"), + suppliersMapping("suppliers"), + syndicationRightsListMapping("rights"), + booleanField("isInferred") + ) def exportsMapping(name: String) = nonDynamicObjectField(name).fields( keywordField("id"), @@ -190,9 +221,10 @@ object Mappings { photoshootMapping("photoshoot") ) - def uploadInfoMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("filename") - ) + def uploadInfoMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("filename") + ) def usageReference(name: String): ObjectField = { nonDynamicObjectField(name).fields( @@ -227,28 +259,31 @@ object Mappings { ) } - def digitalUsageMetadata(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("webTitle"), - keywordField("webUrl"), - keywordField("sectionId"), - keywordField("composerUrl") - ) + def digitalUsageMetadata(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("webTitle"), + keywordField("webUrl"), + keywordField("sectionId"), + keywordField("composerUrl") + ) - def syndicationUsageMetadata(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("partnerName") - ) + def syndicationUsageMetadata(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("partnerName") + ) - def frontUsageMetadata(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("addedBy"), - keywordField("front") - ) + def frontUsageMetadata(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("addedBy"), + keywordField("front") + ) - def downloadUsageMetadata(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("downloadedBy") - ) + def downloadUsageMetadata(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("downloadedBy") + ) - def usagesMapping(name: String): NestedField = nestedField(name). - fields( + def usagesMapping(name: String): NestedField = nestedField(name).fields( keywordField("id"), sStemmerAnalysed("title"), usageReference("references"), @@ -265,45 +300,55 @@ object Mappings { downloadUsageMetadata("downloadUsageMetadata") ) - def leaseMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - keywordField("id"), - keywordField("leasedBy"), - dateField("startDate"), - dateField("endDate"), - keywordField("access"), - keywordField("active"), - sStemmerAnalysed("notes"), - keywordField("mediaId"), - dateField("createdAt") - ) + def leaseMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + keywordField("id"), + keywordField("leasedBy"), + dateField("startDate"), + dateField("endDate"), + keywordField("access"), + keywordField("active"), + sStemmerAnalysed("notes"), + keywordField("mediaId"), + dateField("createdAt") + ) - def leasesMapping(name: String): ObjectField = nonDynamicObjectField(name).fields( - leaseMapping("leases"), - dateField("lastModified") - ) + def leasesMapping(name: String): ObjectField = + nonDynamicObjectField(name).fields( + leaseMapping("leases"), + dateField("lastModified") + ) - private def nonDynamicObjectField(name: String) = ObjectField(name).dynamic("strict") + private def nonDynamicObjectField(name: String) = + ObjectField(name).dynamic("strict") - private def nestedField(name: String) = NestedField(name).dynamic("strict") // ES1 include_in_parent needs to be emulated with field bby field copy_tos + private def nestedField(name: String) = NestedField(name).dynamic( + "strict" + ) // ES1 include_in_parent needs to be emulated with field bby field copy_tos private def dynamicObj(name: String) = objectField(name).dynamic(true) private def nonIndexedString(name: String) = textField(name).index(false) - private def sStemmerAnalysed(name: String) = textField(name).analyzer(IndexSettings.englishSStemmerAnalyzerName) + private def sStemmerAnalysed(name: String) = + textField(name).analyzer(IndexSettings.englishSStemmerAnalyzerName) - private def hierarchyAnalysed(name: String) = textField(name).analyzer(IndexSettings.hierarchyAnalyserName) + private def hierarchyAnalysed(name: String) = + textField(name).analyzer(IndexSettings.hierarchyAnalyserName) - private def standardAnalysed(name: String) = textField(name).analyzer("standard") + private def standardAnalysed(name: String) = + textField(name).analyzer("standard") - private def simpleSuggester(name: String) = completionField(name).analyzer("simple").searchAnalyzer("simple") + private def simpleSuggester(name: String) = + completionField(name).analyzer("simple").searchAnalyzer("simple") //def nonAnalysedList(indexName: String) = Json.obj("type" -> "string", "index" -> "not_analyzed", "index_name" -> indexName) private def nonAnalysedList(name: String) = { keywordField(name) // TODO index_name } - private def withIndexName(indexName: String, obj: JsObject) = Json.obj("index_Name" -> indexName) ++ obj + private def withIndexName(indexName: String, obj: JsObject) = + Json.obj("index_Name" -> indexName) ++ obj // TODO could have kept this bit of indirection //val nonAnalyzedString = Json.obj("type" -> "string", "index" -> "not_analyzed") diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/formatting/package.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/formatting/package.scala index 3308e2f0d9..074f72dde8 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/formatting/package.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/formatting/package.scala @@ -14,16 +14,14 @@ package object formatting { ISODateTimeFormat.dateTime, ISODateTimeFormat.dateTimeNoMillis ).map(_.getParser) - new DateTimeFormatterBuilder(). - append(null, parsers). - toFormatter. - withZoneUTC + new DateTimeFormatterBuilder().append(null, parsers).toFormatter.withZoneUTC } val dateTimeFormat = parseDateTimeFormat.withZoneUTC def printDateTime(date: DateTime): String = date.toString() - def printOptDateTime(date: Option[DateTime]): Option[String] = date.map(printDateTime) + def printOptDateTime(date: Option[DateTime]): Option[String] = + date.map(printDateTime) // Only use this on dates that have been confidently written using printDateTime def unsafeParseDateTime(string: String): DateTime = @@ -31,11 +29,14 @@ package object formatting { def parseDateTime(string: String): Option[DateTime] = Try(parseDateTimeFormat.parseDateTime(string)).toOption - def parseOptDateTime(string: Option[String]): Option[DateTime] = string.flatMap(parseDateTime) + def parseOptDateTime(string: Option[String]): Option[DateTime] = + string.flatMap(parseDateTime) /** Parses either a UTC timestamp, or a duration before the current time (e.g. "30.days") */ def parseDateFromQuery(string: String): Option[DateTime] = - parseDateTime(string) orElse (parseDuration(string) map (DateTime.now minus _.toMillis)) + parseDateTime(string) orElse (parseDuration( + string + ) map (DateTime.now minus _.toMillis)) def parseDuration(string: String): Option[Duration] = Try(Duration(string)).toOption diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProvider.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProvider.scala index 891eefd02f..6dbcebec9f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProvider.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProvider.scala @@ -8,7 +8,16 @@ import com.gu.mediaservice.lib.auth.provider._ import com.gu.mediaservice.lib.aws.S3Ops import com.gu.pandomainauth.PanDomainAuthSettingsRefresher import com.gu.pandomainauth.action.AuthActions -import com.gu.pandomainauth.model.{AuthenticatedUser, User, Authenticated => PandaAuthenticated, Expired => PandaExpired, GracePeriod => PandaGracePeriod, InvalidCookie => PandaInvalidCookie, NotAuthenticated => PandaNotAuthenticated, NotAuthorized => PandaNotAuthorised} +import com.gu.pandomainauth.model.{ + AuthenticatedUser, + User, + Authenticated => PandaAuthenticated, + Expired => PandaExpired, + GracePeriod => PandaGracePeriod, + InvalidCookie => PandaInvalidCookie, + NotAuthenticated => PandaNotAuthenticated, + NotAuthorized => PandaNotAuthorised +} import com.gu.pandomainauth.service.{Google2FAGroupChecker, OAuthException} import com.typesafe.scalalogging.StrictLogging import play.api.Configuration @@ -20,115 +29,163 @@ import play.api.mvc.{ControllerComponents, Cookie, RequestHeader, Result} import scala.concurrent.{ExecutionContext, Future} import scala.util.Try -class PandaAuthenticationProvider(resources: AuthenticationProviderResources, providerConfiguration: Configuration) - extends UserAuthenticationProvider with AuthActions with StrictLogging with ArgoHelpers with HeaderNames { +class PandaAuthenticationProvider( + resources: AuthenticationProviderResources, + providerConfiguration: Configuration +) extends UserAuthenticationProvider + with AuthActions + with StrictLogging + with ArgoHelpers + with HeaderNames { implicit val ec: ExecutionContext = controllerComponents.executionContext - final override def authCallbackUrl: String = s"${resources.commonConfig.services.authBaseUri}/oauthCallback" - override lazy val panDomainSettings: PanDomainAuthSettingsRefresher = buildPandaSettings() + final override def authCallbackUrl: String = + s"${resources.commonConfig.services.authBaseUri}/oauthCallback" + override lazy val panDomainSettings: PanDomainAuthSettingsRefresher = + buildPandaSettings() override def wsClient: WSClient = resources.wsClient - override def controllerComponents: ControllerComponents = resources.controllerComponents + override def controllerComponents: ControllerComponents = + resources.controllerComponents val loginLinks = List( Link("login", resources.commonConfig.services.loginUriTemplate) ) - /** - * Establish the authentication status of the given request header. This can return an authenticated user or a number + /** Establish the authentication status of the given request header. This can return an authenticated user or a number * of reasons why a user is not authenticated. * * @param request The request header containing cookies and other request headers that can be used to establish the * authentication status of a request. * @return An authentication status expressing whether the */ - override def authenticateRequest(request: RequestHeader): AuthenticationStatus = { + override def authenticateRequest( + request: RequestHeader + ): AuthenticationStatus = { val pandaStatus = extractAuth(request) val providerStatus = pandaStatus match { case PandaNotAuthenticated => NotAuthenticated - case PandaInvalidCookie(e) => Invalid("error checking user's auth, clear cookie and re-auth", Some(e)) - case PandaExpired(authedUser) => Expired(gridUserFrom(authedUser.user, request)) - case PandaGracePeriod(authedUser) => Authenticated(gridUserFrom(authedUser.user, request)) - case PandaNotAuthorised(authedUser) => NotAuthorised(s"${authedUser.user.email} not authorised to use application") - case PandaAuthenticated(authedUser) => Authenticated(gridUserFrom(authedUser.user, request)) + case PandaInvalidCookie(e) => + Invalid("error checking user's auth, clear cookie and re-auth", Some(e)) + case PandaExpired(authedUser) => + Expired(gridUserFrom(authedUser.user, request)) + case PandaGracePeriod(authedUser) => + Authenticated(gridUserFrom(authedUser.user, request)) + case PandaNotAuthorised(authedUser) => + NotAuthorised( + s"${authedUser.user.email} not authorised to use application" + ) + case PandaAuthenticated(authedUser) => + Authenticated(gridUserFrom(authedUser.user, request)) } - logger.info(s"Authenticating request ${request.uri}. Panda $pandaStatus Provider $providerStatus") + logger.info( + s"Authenticating request ${request.uri}. Panda $pandaStatus Provider $providerStatus" + ) providerStatus } - /** - * If this provider supports sending a user that is not authorised to a federated auth provider then it should + /** If this provider supports sending a user that is not authorised to a federated auth provider then it should * provide a function here to redirect the user. */ - override def sendForAuthentication: Option[RequestHeader => Future[Result]] = Some({ requestHeader: RequestHeader => - val maybePrincipal = authenticateRequest(requestHeader) match { - case Expired(principal) => Some(principal) - case Authenticated(principal: UserPrincipal) => Some(principal) - case _ => None - } - val email = maybePrincipal.map(_.email) - sendForAuth(requestHeader, email) - }) + override def sendForAuthentication: Option[RequestHeader => Future[Result]] = + Some({ requestHeader: RequestHeader => + val maybePrincipal = authenticateRequest(requestHeader) match { + case Expired(principal) => Some(principal) + case Authenticated(principal: UserPrincipal) => Some(principal) + case _ => None + } + val email = maybePrincipal.map(_.email) + sendForAuth(requestHeader, email) + }) - /** - * If this provider supports sending a user that is not authorised to a federated auth provider then it should + /** If this provider supports sending a user that is not authorised to a federated auth provider then it should * provide an Play action here that deals with the return of a user from a federated provider. This should be * used to set a cookie or similar to ensure that a subsequent call to authenticateRequest will succeed. If * authentication failed then this should return an appropriate 4xx result. */ - override def sendForAuthenticationCallback: Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] = + override def sendForAuthenticationCallback + : Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] = Some({ (requestHeader: RequestHeader, maybeUri: Option[RedirectUri]) => // We use the `Try` here as the `GoogleAuthException` are thrown before we // get to the asynchronicity of the `Future` it returns. // We then have to flatten the Future[Future[T]]. Fiddly... - Future.fromTry(Try(processOAuthCallback()(requestHeader))).flatten.recover { - // This is when session session args are missing - case e: OAuthException => respondError(BadRequest, "google-auth-exception", e.getMessage, loginLinks) - - // Class `missing anti forgery token` as a 4XX - // see https://github.com/guardian/pan-domain-authentication/blob/master/pan-domain-auth-play_2-6/src/main/scala/com/gu/pandomainauth/service/GoogleAuth.scala#L63 - case e: IllegalArgumentException if e.getMessage == "The anti forgery token did not match" => { - logger.error("Anti-forgery exception encountered", e) - respondError(BadRequest, "google-auth-exception", e.getMessage, loginLinks) + Future + .fromTry(Try(processOAuthCallback()(requestHeader))) + .flatten + .recover { + // This is when session session args are missing + case e: OAuthException => + respondError( + BadRequest, + "google-auth-exception", + e.getMessage, + loginLinks + ) + + // Class `missing anti forgery token` as a 4XX + // see https://github.com/guardian/pan-domain-authentication/blob/master/pan-domain-auth-play_2-6/src/main/scala/com/gu/pandomainauth/service/GoogleAuth.scala#L63 + case e: IllegalArgumentException + if e.getMessage == "The anti forgery token did not match" => { + logger.error("Anti-forgery exception encountered", e) + respondError( + BadRequest, + "google-auth-exception", + e.getMessage, + loginLinks + ) + } + } + .map { + // not very elegant, but this will override the redirect from panda with any alternative destination + case overrideRedirect + if overrideRedirect.header.headers + .contains(LOCATION) && maybeUri.nonEmpty => + val uri = maybeUri.get + Redirect(uri).copy( + newCookies = overrideRedirect.newCookies, + newSession = overrideRedirect.newSession + ) + case other => other } - }.map { - // not very elegant, but this will override the redirect from panda with any alternative destination - case overrideRedirect if overrideRedirect.header.headers.contains(LOCATION) && maybeUri.nonEmpty => - val uri = maybeUri.get - Redirect(uri).copy(newCookies = overrideRedirect.newCookies, newSession = overrideRedirect.newSession) - case other => other - } }) - /** - * If this provider is able to clear user tokens (i.e. by clearing cookies) then it should provide a function to + /** If this provider is able to clear user tokens (i.e. by clearing cookies) then it should provide a function to * do that here which will be used to log users out and also if the token is invalid. * * @return */ - override def flushToken: Option[(RequestHeader, Result) => Result] = Some((rh, _) => processLogout(rh)) + override def flushToken: Option[(RequestHeader, Result) => Result] = + Some((rh, _) => processLogout(rh)) val PandaCookieKey: TypedKey[Cookie] = TypedKey[Cookie]("PandaCookie") - /** - * A function that allows downstream API calls to be made using the credentials of the inflight request + /** A function that allows downstream API calls to be made using the credentials of the inflight request * * @param request The request header of the inflight call * @return A function that adds appropriate data to a WSRequest */ - override def onBehalfOf(request: Principal): Either[String, WSRequest => WSRequest] = { + override def onBehalfOf( + request: Principal + ): Either[String, WSRequest => WSRequest] = { val cookieName = panDomainSettings.settings.cookieSettings.cookieName request.attributes.get(PandaCookieKey) match { - case Some(cookie) => Right { wsRequest: WSRequest => - wsRequest.addCookies(DefaultWSCookie(cookieName, cookie.value)) - } - case None => Left(s"Pan domain cookie $cookieName is missing in principal.") + case Some(cookie) => + Right { wsRequest: WSRequest => + wsRequest.addCookies(DefaultWSCookie(cookieName, cookie.value)) + } + case None => + Left(s"Pan domain cookie $cookieName is missing in principal.") } } - private def gridUserFrom(pandaUser: User, request: RequestHeader): UserPrincipal = { - val maybePandaCookie: Option[TypedEntry[Cookie]] = request.cookies.get(panDomainSettings.settings.cookieSettings.cookieName).map(TypedEntry[Cookie](PandaCookieKey, _)) - val attributes = TypedMap.empty + (maybePandaCookie.toSeq:_*) + private def gridUserFrom( + pandaUser: User, + request: RequestHeader + ): UserPrincipal = { + val maybePandaCookie: Option[TypedEntry[Cookie]] = request.cookies + .get(panDomainSettings.settings.cookieSettings.cookieName) + .map(TypedEntry[Cookie](PandaCookieKey, _)) + val attributes = TypedMap.empty + (maybePandaCookie.toSeq: _*) UserPrincipal( firstName = pandaUser.firstName, lastName = pandaUser.lastName, @@ -140,24 +197,46 @@ class PandaAuthenticationProvider(resources: AuthenticationProviderResources, pr private def buildPandaSettings() = { new PanDomainAuthSettingsRefresher( domain = resources.commonConfig.services.domainRoot, - system = providerConfiguration.getOptional[String]("panda.system").getOrElse("media-service"), - bucketName = providerConfiguration.getOptional[String]("panda.bucketName").getOrElse("pan-domain-auth-settings"), - settingsFileKey = providerConfiguration.getOptional[String]("panda.settingsFileKey").getOrElse(s"${resources.commonConfig.services.domainRoot}.settings"), - s3Client = S3Ops.buildS3Client(resources.commonConfig, localstackAware=resources.commonConfig.useLocalAuth) + system = providerConfiguration + .getOptional[String]("panda.system") + .getOrElse("media-service"), + bucketName = providerConfiguration + .getOptional[String]("panda.bucketName") + .getOrElse("pan-domain-auth-settings"), + settingsFileKey = providerConfiguration + .getOptional[String]("panda.settingsFileKey") + .getOrElse(s"${resources.commonConfig.services.domainRoot}.settings"), + s3Client = S3Ops.buildS3Client( + resources.commonConfig, + localstackAware = resources.commonConfig.useLocalAuth + ) ) } - private val userValidationEmailDomain = resources.commonConfig.stringOpt("panda.userDomain").getOrElse("guardian.co.uk") + private val userValidationEmailDomain = resources.commonConfig + .stringOpt("panda.userDomain") + .getOrElse("guardian.co.uk") final override def validateUser(authedUser: AuthenticatedUser): Boolean = { - PandaAuthenticationProvider.validateUser(authedUser, userValidationEmailDomain, multifactorChecker) + PandaAuthenticationProvider.validateUser( + authedUser, + userValidationEmailDomain, + multifactorChecker + ) } } object PandaAuthenticationProvider { - def validateUser(authedUser: AuthenticatedUser, userValidationEmailDomain: String, multifactorChecker: Option[Google2FAGroupChecker]): Boolean = { - val isValidDomain = authedUser.user.email.endsWith("@" + userValidationEmailDomain) - val passesMultifactor = if(multifactorChecker.nonEmpty) { authedUser.multiFactor } else { true } + def validateUser( + authedUser: AuthenticatedUser, + userValidationEmailDomain: String, + multifactorChecker: Option[Google2FAGroupChecker] + ): Boolean = { + val isValidDomain = + authedUser.user.email.endsWith("@" + userValidationEmailDomain) + val passesMultifactor = if (multifactorChecker.nonEmpty) { + authedUser.multiFactor + } else { true } isValidDomain && passesMultifactor } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/ImageOperations.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/ImageOperations.scala index c8f12e0580..813737d8d4 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/ImageOperations.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/ImageOperations.scala @@ -5,8 +5,15 @@ import java.io._ import org.im4java.core.IMOperation import com.gu.mediaservice.lib.Files._ import com.gu.mediaservice.lib.StorableThumbImage -import com.gu.mediaservice.lib.imaging.ImageOperations.{optimisedMimeType, thumbMimeType} -import com.gu.mediaservice.lib.imaging.im4jwrapper.ImageMagick.{addImage, format, runIdentifyCmd} +import com.gu.mediaservice.lib.imaging.ImageOperations.{ + optimisedMimeType, + thumbMimeType +} +import com.gu.mediaservice.lib.imaging.im4jwrapper.ImageMagick.{ + addImage, + format, + runIdentifyCmd +} import com.gu.mediaservice.lib.imaging.im4jwrapper.{ExifTool, ImageMagick} import com.gu.mediaservice.lib.logging.GridLogging import com.gu.mediaservice.model._ @@ -14,8 +21,11 @@ import com.gu.mediaservice.model._ import scala.concurrent.{ExecutionContext, Future} import scala.sys.process._ - -case class ExportResult(id: String, masterCrop: Asset, othersizings: List[Asset]) +case class ExportResult( + id: String, + masterCrop: Asset, + othersizings: List[Asset] +) class UnsupportedCropOutputTypeException extends Exception class ImageOperations(playPath: String) extends GridLogging { @@ -24,12 +34,16 @@ class ImageOperations(playPath: String) extends GridLogging { private def profilePath(fileName: String): String = s"$playPath/$fileName" - private def profileLocation(colourModel: String, optimised: Boolean = false): String = colourModel match { + private def profileLocation( + colourModel: String, + optimised: Boolean = false + ): String = colourModel match { case "RGB" if optimised => profilePath("facebook-TINYsRGB_c2.icc") case "RGB" => profilePath("srgb.icc") case "CMYK" => profilePath("cmyk.icc") case "GRAYSCALE" => profilePath("grayscale.icc") - case model => throw new Exception(s"Profile for invalid colour model requested: $model") + case model => + throw new Exception(s"Profile for invalid colour model requested: $model") } private def tagFilter(metadata: ImageMetadata) = { @@ -40,86 +54,131 @@ class ImageOperations(playPath: String) extends GridLogging { ).collect { case (key, Some(value)) => (key, value) } } - private def applyOutputProfile(base: IMOperation, optimised: Boolean = false) = profile(base)(profileLocation("RGB", optimised)) + private def applyOutputProfile( + base: IMOperation, + optimised: Boolean = false + ) = profile(base)(profileLocation("RGB", optimised)) // Optionally apply transforms to the base operation if the colour space // in the ICC profile doesn't match the colour model of the image data - private def correctColour(base: IMOperation)(iccColourSpace: Option[String], colourModel: Option[String]) = { + private def correctColour( + base: IMOperation + )(iccColourSpace: Option[String], colourModel: Option[String]) = { (iccColourSpace, colourModel) match { // If matching, all is well, just pass through case (icc, model) if icc == model => base // If no colour model detected, we can't do anything anyway so just hope all is well - case (_, None) => base + case (_, None) => base // If mismatching, strip any (incorrect) ICC profile and inject a profile matching the model // Note: Strip both ICC and ICM (Windows variant?) to be safe - case (_, Some(model)) => profile(stripProfile(base)("icm,icc"))(profileLocation(model)) + case (_, Some(model)) => + profile(stripProfile(base)("icm,icc"))(profileLocation(model)) } } - def cropImage(sourceFile: File, sourceMimeType: Option[MimeType], bounds: Bounds, qual: Double = 100d, tempDir: File, - iccColourSpace: Option[String], colourModel: Option[String], fileType: MimeType): Future[File] = { + def cropImage( + sourceFile: File, + sourceMimeType: Option[MimeType], + bounds: Bounds, + qual: Double = 100d, + tempDir: File, + iccColourSpace: Option[String], + colourModel: Option[String], + fileType: MimeType + ): Future[File] = { for { - outputFile <- createTempFile(s"crop-", s"${fileType.fileExtension}", tempDir) - cropSource = addImage(sourceFile) - qualified = quality(cropSource)(qual) - corrected = correctColour(qualified)(iccColourSpace, colourModel) - converted = applyOutputProfile(corrected) - stripped = stripMeta(converted) - profiled = applyOutputProfile(stripped) - cropped = crop(profiled)(bounds) + outputFile <- createTempFile( + s"crop-", + s"${fileType.fileExtension}", + tempDir + ) + cropSource = addImage(sourceFile) + qualified = quality(cropSource)(qual) + corrected = correctColour(qualified)(iccColourSpace, colourModel) + converted = applyOutputProfile(corrected) + stripped = stripMeta(converted) + profiled = applyOutputProfile(stripped) + cropped = crop(profiled)(bounds) depthAdjusted = depth(cropped)(8) - addOutput = addDestImage(depthAdjusted)(outputFile) - _ <- runConvertCmd(addOutput, useImageMagick = sourceMimeType.contains(Tiff)) - _ <- checkForOutputFileChange(outputFile) - } - yield outputFile + addOutput = addDestImage(depthAdjusted)(outputFile) + _ <- runConvertCmd( + addOutput, + useImageMagick = sourceMimeType.contains(Tiff) + ) + _ <- checkForOutputFileChange(outputFile) + } yield outputFile } // Updates metadata on existing file - def appendMetadata(sourceFile: File, metadata: ImageMetadata): Future[File] = { + def appendMetadata( + sourceFile: File, + metadata: ImageMetadata + ): Future[File] = { runExiftoolCmd( setTags(tagSource(sourceFile))(tagFilter(metadata)) - ).map(_ => sourceFile) + ).map(_ => sourceFile) } - def resizeImage(sourceFile: File, sourceMimeType: Option[MimeType], dimensions: Dimensions, - qual: Double = 100d, tempDir: File, fileType: MimeType): Future[File] = { + def resizeImage( + sourceFile: File, + sourceMimeType: Option[MimeType], + dimensions: Dimensions, + qual: Double = 100d, + tempDir: File, + fileType: MimeType + ): Future[File] = { for { - outputFile <- createTempFile(s"resize-", s".${fileType.fileExtension}", tempDir) + outputFile <- createTempFile( + s"resize-", + s".${fileType.fileExtension}", + tempDir + ) resizeSource = addImage(sourceFile) - qualified = quality(resizeSource)(qual) - resized = scale(qualified)(dimensions) - addOutput = addDestImage(resized)(outputFile) - _ <- runConvertCmd(addOutput, useImageMagick = sourceMimeType.contains(Tiff)) - } - yield outputFile + qualified = quality(resizeSource)(qual) + resized = scale(qualified)(dimensions) + addOutput = addDestImage(resized)(outputFile) + _ <- runConvertCmd( + addOutput, + useImageMagick = sourceMimeType.contains(Tiff) + ) + } yield outputFile } - def optimiseImage(resizedFile: File, mediaType: MimeType): File = mediaType match { - case Png => - val fileName: String = resizedFile.getAbsolutePath + def optimiseImage(resizedFile: File, mediaType: MimeType): File = + mediaType match { + case Png => + val fileName: String = resizedFile.getAbsolutePath - val optimisedImageName: String = fileName.split('.')(0) + "optimised.png" - Seq("pngquant", "--quality", "1-85", fileName, "--output", optimisedImageName).! + val optimisedImageName: String = + fileName.split('.')(0) + "optimised.png" + Seq( + "pngquant", + "--quality", + "1-85", + fileName, + "--output", + optimisedImageName + ).! - new File(optimisedImageName) - case Jpeg => resizedFile + new File(optimisedImageName) + case Jpeg => resizedFile - // This should never happen as we only ever crop as PNG or JPEG. See `Crops.cropType` and `CropsTest` - // TODO We should create a `CroppingMimeType` to enforce this at the type level. - // However we'd need to change the `Asset` model as source image and crop use this model - // and a source can legally be a `Tiff`. It's not a small change... - case Tiff => - logger.error("Attempting to optimize a Tiff crop. Cropping as Tiff is not supported.") - throw new UnsupportedCropOutputTypeException - } + // This should never happen as we only ever crop as PNG or JPEG. See `Crops.cropType` and `CropsTest` + // TODO We should create a `CroppingMimeType` to enforce this at the type level. + // However we'd need to change the `Asset` model as source image and crop use this model + // and a source can legally be a `Tiff`. It's not a small change... + case Tiff => + logger.error( + "Attempting to optimize a Tiff crop. Cropping as Tiff is not supported." + ) + throw new UnsupportedCropOutputTypeException + } val thumbUnsharpRadius = 0.5d val thumbUnsharpSigma = 0.5d val thumbUnsharpAmount = 0.8d - /** - * Given a source file containing a png (the 'browser viewable' file), + /** Given a source file containing a png (the 'browser viewable' file), * construct a thumbnail file in the provided temp directory, and return * the file with metadata about it. * @param sourceFile File containing browser viewable (ie not too big or colourful) image @@ -131,30 +190,42 @@ class ImageOperations(playPath: String) extends GridLogging { * @param colourModel Colour model - eg RGB or CMYK * @return The file created and the mimetype of the content of that file, in a future. */ - def createThumbnail(sourceFile: File, - sourceMimeType: Option[MimeType], - width: Int, - qual: Double = 100d, - tempDir: File, - iccColourSpace: Option[String], - colourModel: Option[String]): Future[(File, MimeType)] = { - val cropSource = addImage(sourceFile) + def createThumbnail( + sourceFile: File, + sourceMimeType: Option[MimeType], + width: Int, + qual: Double = 100d, + tempDir: File, + iccColourSpace: Option[String], + colourModel: Option[String] + ): Future[(File, MimeType)] = { + val cropSource = addImage(sourceFile) val thumbnailed = thumbnail(cropSource)(width) - val corrected = correctColour(thumbnailed)(iccColourSpace, colourModel) - val converted = applyOutputProfile(corrected, optimised = true) - val stripped = stripMeta(converted) - val profiled = applyOutputProfile(stripped, optimised = true) - val unsharpened = unsharp(profiled)(thumbUnsharpRadius, thumbUnsharpSigma, thumbUnsharpAmount) - val qualified = quality(unsharpened)(qual) - val addOutput = {file:File => addDestImage(qualified)(file)} + val corrected = correctColour(thumbnailed)(iccColourSpace, colourModel) + val converted = applyOutputProfile(corrected, optimised = true) + val stripped = stripMeta(converted) + val profiled = applyOutputProfile(stripped, optimised = true) + val unsharpened = unsharp(profiled)( + thumbUnsharpRadius, + thumbUnsharpSigma, + thumbUnsharpAmount + ) + val qualified = quality(unsharpened)(qual) + val addOutput = { file: File => addDestImage(qualified)(file) } for { - outputFile <- createTempFile(s"thumb-", thumbMimeType.fileExtension, tempDir) - _ <- runConvertCmd(addOutput(outputFile), useImageMagick = sourceMimeType.contains(Tiff)) + outputFile <- createTempFile( + s"thumb-", + thumbMimeType.fileExtension, + tempDir + ) + _ <- runConvertCmd( + addOutput(outputFile), + useImageMagick = sourceMimeType.contains(Tiff) + ) } yield (outputFile, thumbMimeType) } - /** - * Given a source file containing a file which requires optimising to make it suitable for viewing in + /** Given a source file containing a file which requires optimising to make it suitable for viewing in * a browser, construct a new image file in the provided temp directory, and return * * the file with metadata about it. * @param sourceFile File containing browser viewable (ie not too big or colourful) image @@ -162,14 +233,25 @@ class ImageOperations(playPath: String) extends GridLogging { * @param tempDir Location to create optimised file * @return The file created and the mimetype of the content of that file, in a future. */ - def transformImage(sourceFile: File, sourceMimeType: Option[MimeType], tempDir: File): Future[(File, MimeType)] = { + def transformImage( + sourceFile: File, + sourceMimeType: Option[MimeType], + tempDir: File + ): Future[(File, MimeType)] = { for { // png suffix is used by imagemagick to infer the required type - outputFile <- createTempFile(s"transformed-", optimisedMimeType.fileExtension, tempDir) + outputFile <- createTempFile( + s"transformed-", + optimisedMimeType.fileExtension, + tempDir + ) transformSource = addImage(sourceFile) - addOutput = addDestImage(transformSource)(outputFile) - _ <- runConvertCmd(addOutput, useImageMagick = sourceMimeType.contains(Tiff)) - _ <- checkForOutputFileChange(outputFile) + addOutput = addDestImage(transformSource)(outputFile) + _ <- runConvertCmd( + addOutput, + useImageMagick = sourceMimeType.contains(Tiff) + ) + _ <- checkForOutputFileChange(outputFile) } yield (outputFile, optimisedMimeType) } @@ -195,13 +277,17 @@ class ImageOperations(playPath: String) extends GridLogging { } @scala.annotation.tailrec - private def cleanUpLayerFiles(mainPart: String, extension: String, index: Int):Unit = { - val newFile = List(s"$mainPart-$index", extension).mkString(".") - val f3 = new File(newFile) - if (f3.exists()) { - f3.delete() - cleanUpLayerFiles(mainPart, extension, index+1) - } + private def cleanUpLayerFiles( + mainPart: String, + extension: String, + index: Int + ): Unit = { + val newFile = List(s"$mainPart-$index", extension).mkString(".") + val f3 = new File(newFile) + if (f3.exists()) { + f3.delete() + cleanUpLayerFiles(mainPart, extension, index + 1) + } } } @@ -209,7 +295,9 @@ class ImageOperations(playPath: String) extends GridLogging { object ImageOperations { val thumbMimeType = Jpeg val optimisedMimeType = Png - def identifyColourModel(sourceFile: File, mimeType: MimeType)(implicit ec: ExecutionContext): Future[Option[String]] = { + def identifyColourModel(sourceFile: File, mimeType: MimeType)(implicit + ec: ExecutionContext + ): Future[Option[String]] = { // TODO: use mimeType to lookup other properties once we support other formats mimeType match { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ExifTool.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ExifTool.scala index a1870cdc35..92149502f8 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ExifTool.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ExifTool.scala @@ -7,18 +7,23 @@ import scala.concurrent.{Future, ExecutionContext} import org.im4java.core.{ETOperation, ExiftoolCmd} import scalaz.syntax.id._ - object ExifTool { private implicit val ctx: ExecutionContext = - ExecutionContext.fromExecutor(Executors.newFixedThreadPool(Config.imagingThreadPoolSize)) + ExecutionContext.fromExecutor( + Executors.newFixedThreadPool(Config.imagingThreadPoolSize) + ) - def tagSource(source: File) = (new ETOperation()) <| (_.addImage(source.getAbsolutePath)) + def tagSource(source: File) = + (new ETOperation()) <| (_.addImage(source.getAbsolutePath)) - def setTags(ops: ETOperation)(tags: Map[String, String]): ETOperation = { - tags.foldLeft(ops) { case (ops, (key, value)) => ops <| (_.setTags(s"$key=$value")) } + def setTags(ops: ETOperation)(tags: Map[String, String]): ETOperation = { + tags.foldLeft(ops) { case (ops, (key, value)) => + ops <| (_.setTags(s"$key=$value")) + } } - def overwriteOriginal(ops: ETOperation): ETOperation = ops <| (_.overwrite_original()) + def overwriteOriginal(ops: ETOperation): ETOperation = + ops <| (_.overwrite_original()) def runExiftoolCmd(ops: ETOperation): Future[Unit] = { // Set overwrite original to ensure temporary file deletion diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ImageMagick.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ImageMagick.scala index 3d4c43346d..1c8aa41d06 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ImageMagick.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/imaging/im4jwrapper/ImageMagick.scala @@ -13,24 +13,38 @@ import scalaz.syntax.id._ import com.gu.mediaservice.model.{Bounds, Dimensions} object ImageMagick extends GridLogging { implicit val ctx: ExecutionContext = - ExecutionContext.fromExecutor(Executors.newFixedThreadPool(Config.imagingThreadPoolSize)) + ExecutionContext.fromExecutor( + Executors.newFixedThreadPool(Config.imagingThreadPoolSize) + ) - def addImage(source: File) = (new IMOperation()) <| { op => { op.addImage(source.getAbsolutePath) }} + def addImage(source: File) = (new IMOperation()) <| { op => + { op.addImage(source.getAbsolutePath) } + } def quality(op: IMOperation)(qual: Double) = op <| (_.quality(qual)) - def unsharp(op: IMOperation)(radius: Double, sigma: Double, amount: Double) = op <| (_.unsharp(radius, sigma, amount)) + def unsharp(op: IMOperation)(radius: Double, sigma: Double, amount: Double) = + op <| (_.unsharp(radius, sigma, amount)) def stripMeta(op: IMOperation) = op <| (_.strip()) - def stripProfile(op: IMOperation)(profile: String) = op <| (_.p_profile(profile)) - def addDestImage(op: IMOperation)(dest: File) = op <| (_.addImage(dest.getAbsolutePath)) - def crop(op: IMOperation)(b: Bounds): IMOperation = op <| (_.crop(b.width, b.height, b.x, b.y)) - def profile(op: IMOperation)(profileFileLocation: String): IMOperation = op <| (_.profile(profileFileLocation)) - def thumbnail(op: IMOperation)(width: Int): IMOperation = op <| (_.thumbnail(width)) - def resize(op: IMOperation)(maxSize: Int): IMOperation = op <| (_.resize(maxSize, maxSize)) - def scale(op: IMOperation)(dimensions: Dimensions): IMOperation = op <| (_.scale(dimensions.width, dimensions.height)) - def format(op: IMOperation)(definition: String): IMOperation = op <| (_.format(definition)) + def stripProfile(op: IMOperation)(profile: String) = + op <| (_.p_profile(profile)) + def addDestImage(op: IMOperation)(dest: File) = + op <| (_.addImage(dest.getAbsolutePath)) + def crop(op: IMOperation)(b: Bounds): IMOperation = + op <| (_.crop(b.width, b.height, b.x, b.y)) + def profile(op: IMOperation)(profileFileLocation: String): IMOperation = + op <| (_.profile(profileFileLocation)) + def thumbnail(op: IMOperation)(width: Int): IMOperation = + op <| (_.thumbnail(width)) + def resize(op: IMOperation)(maxSize: Int): IMOperation = + op <| (_.resize(maxSize, maxSize)) + def scale(op: IMOperation)(dimensions: Dimensions): IMOperation = + op <| (_.scale(dimensions.width, dimensions.height)) + def format(op: IMOperation)(definition: String): IMOperation = + op <| (_.format(definition)) def depth(op: IMOperation)(depth: Int): IMOperation = op <| (_.depth(depth)) def runConvertCmd(op: IMOperation, useImageMagick: Boolean): Future[Unit] = { - logger.info(s"Using ${if(useImageMagick) { "imagemagick" } else { "graphicsmagick" }} for imaging operation $op") + logger.info(s"Using ${if (useImageMagick) { "imagemagick" } + else { "graphicsmagick" }} for imaging operation $op") Future((new ConvertCmd(!useImageMagick)).run(op)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtil.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtil.scala index 654b84756c..4f7712e736 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtil.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtil.scala @@ -32,11 +32,15 @@ object JsonByteArrayUtil extends PlayJsonHelpers with GridLogging { decompressedBytes } - def hasCompressionMarker(bytes: Array[Byte]) = bytes.head == compressionMarkerByte + def hasCompressionMarker(bytes: Array[Byte]) = + bytes.head == compressionMarkerByte - def toByteArray[T](obj: T)(implicit writes: Writes[T]): Array[Byte] = compress(Json.toBytes(Json.toJson(obj))) + def toByteArray[T](obj: T)(implicit writes: Writes[T]): Array[Byte] = + compress(Json.toBytes(Json.toJson(obj))) - def fromByteArray[T](bytes: Array[Byte])(implicit reads: Reads[T]): Option[T] = { + def fromByteArray[T]( + bytes: Array[Byte] + )(implicit reads: Reads[T]): Option[T] = { val string = new String( if (hasCompressionMarker(bytes)) decompress(bytes) else bytes, StandardCharsets.UTF_8 diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/json/PlayJsonHelpers.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/json/PlayJsonHelpers.scala index 0fa2ad80c8..f91f19a3bd 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/json/PlayJsonHelpers.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/json/PlayJsonHelpers.scala @@ -7,7 +7,6 @@ import scala.PartialFunction.condOpt import play.api.libs.json._ import play.api.libs.json.JsString - trait PlayJsonHelpers { protected def logger: Logger @@ -15,9 +14,12 @@ trait PlayJsonHelpers { def logParseErrors(parseResult: JsResult[_]): Unit = parseResult.fold( _ map { case (path, errors) => - logger.error(s"Validation errors at $path: [${errors.map(_.message).mkString(", ")}]") + logger.error( + s"Validation errors at $path: [${errors.map(_.message).mkString(", ")}]" + ) }, - _ => ()) + _ => () + ) def string(v: JsValue): Option[String] = condOpt(v) { case JsString(s) => s } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/json/package.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/json/package.scala index c38ddf5462..01ce60d914 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/json/package.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/json/package.scala @@ -2,5 +2,4 @@ package com.gu.mediaservice.lib import com.gu.mediaservice.lib.logging.GridLogging - package object json extends PlayJsonHelpers with GridLogging diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/GridLogging.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/GridLogging.scala index 7b7fdc0f5a..3753b29d78 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/GridLogging.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/GridLogging.scala @@ -9,22 +9,33 @@ trait GridLogging extends StrictLogging { case class ImageId(id: String) implicit class LoggerWithHelpers(logger: Logger) { - def info(markers: Map[String, Any], message: String): Unit = logger.info(Markers.appendEntries(markers.asJava), message) - def info(markers: LogMarker, message: String): Unit = logger.info(markers.toLogMarker, message) - - def warn(markers: Map[String, Any], message: String): Unit = logger.warn(Markers.appendEntries(markers.asJava), message) - def warn(markers: LogMarker, message: String): Unit = logger.warn(markers.toLogMarker, message) - def warn(markers: LogMarker, message: String, cause: Throwable): Unit = logger.warn(markers.toLogMarker, message, cause) - - def error(markers: Map[String, Any], message: String): Unit = logger.error(Markers.appendEntries(markers.asJava), message) - def error(markers: LogMarker, message: String): Unit = logger.error(markers.toLogMarker, message) - def error(markers: LogMarker, message: String, cause: Throwable): Unit = logger.error(markers.toLogMarker, message, cause) - - def info(apiKey: ApiAccessor, message: String): Unit = info(apiKeyMarkers(apiKey), message) - - def info(apiKey: ApiAccessor, imageId: ImageId, message: String): Unit = info(apiKeyMarkers(apiKey) ++ imageIdMarker(imageId), message) - - def info(message: String, imageId: ImageId): Unit = info(imageIdMarker(imageId), message) + def info(markers: Map[String, Any], message: String): Unit = + logger.info(Markers.appendEntries(markers.asJava), message) + def info(markers: LogMarker, message: String): Unit = + logger.info(markers.toLogMarker, message) + + def warn(markers: Map[String, Any], message: String): Unit = + logger.warn(Markers.appendEntries(markers.asJava), message) + def warn(markers: LogMarker, message: String): Unit = + logger.warn(markers.toLogMarker, message) + def warn(markers: LogMarker, message: String, cause: Throwable): Unit = + logger.warn(markers.toLogMarker, message, cause) + + def error(markers: Map[String, Any], message: String): Unit = + logger.error(Markers.appendEntries(markers.asJava), message) + def error(markers: LogMarker, message: String): Unit = + logger.error(markers.toLogMarker, message) + def error(markers: LogMarker, message: String, cause: Throwable): Unit = + logger.error(markers.toLogMarker, message, cause) + + def info(apiKey: ApiAccessor, message: String): Unit = + info(apiKeyMarkers(apiKey), message) + + def info(apiKey: ApiAccessor, imageId: ImageId, message: String): Unit = + info(apiKeyMarkers(apiKey) ++ imageIdMarker(imageId), message) + + def info(message: String, imageId: ImageId): Unit = + info(imageIdMarker(imageId), message) def apiKeyMarkers(apiKey: ApiAccessor) = Map( "key-tier" -> apiKey.tier.toString, diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/LogConfig.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/LogConfig.scala index 7734f3ed6d..9663309b44 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/LogConfig.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/LogConfig.scala @@ -18,30 +18,45 @@ import scalaz.syntax.id._ import scala.util.Try - object LogConfig { - val rootLogger: LogbackLogger = LoggerFactory.getLogger(SLFLogger.ROOT_LOGGER_NAME).asInstanceOf[LogbackLogger] + val rootLogger: LogbackLogger = LoggerFactory + .getLogger(SLFLogger.ROOT_LOGGER_NAME) + .asInstanceOf[LogbackLogger] private val BUFFER_SIZE = 1000 - case class KinesisAppenderConfig(stream: String, region: String, roleArn: String, bufferSize: Int) + case class KinesisAppenderConfig( + stream: String, + region: String, + roleArn: String, + bufferSize: Int + ) private def makeCustomFields(config: CommonConfig): String = { val instanceId = Option(EC2MetadataUtils.getInstanceId).getOrElse("unknown") - Json.toJson(Map( - "stack" -> config.stackName, - "stage" -> config.stage.toUpperCase, - "app" -> config.appName, - "sessionId" -> config.sessionId, - "instanceId" -> instanceId - )).toString() + Json + .toJson( + Map( + "stack" -> config.stackName, + "stage" -> config.stage.toUpperCase, + "app" -> config.appName, + "sessionId" -> config.sessionId, + "instanceId" -> instanceId + ) + ) + .toString() } - private def makeLayout(customFields: String) = new LogstashLayout() <| (_.setCustomFields(customFields)) + private def makeLayout(customFields: String) = + new LogstashLayout() <| (_.setCustomFields(customFields)) - private def makeKinesisAppender(layout: LogstashLayout, context: LoggerContext, appenderConfig: KinesisAppenderConfig) = + private def makeKinesisAppender( + layout: LogstashLayout, + context: LoggerContext, + appenderConfig: KinesisAppenderConfig + ) = new KinesisAppender[ILoggingEvent]() <| { a => a.setStreamName(appenderConfig.stream) a.setRegion(appenderConfig.region) @@ -53,9 +68,12 @@ object LogConfig { layout.start() a.start() - } + } - private def makeLogstashAppender(config: CommonConfig, context: LoggerContext): LogstashTcpSocketAppender = { + private def makeLogstashAppender( + config: CommonConfig, + context: LoggerContext + ): LogstashTcpSocketAppender = { val customFields = makeCustomFields(config) new LogstashTcpSocketAppender() <| { appender => @@ -73,14 +91,14 @@ object LogConfig { } def initLocalLogShipping(config: CommonConfig): Unit = { - if(config.isDev && config.localLogShipping) { + if (config.isDev && config.localLogShipping) { Try { rootLogger.info("Configuring local logstash log shipping") val appender = makeLogstashAppender(config, rootLogger.getLoggerContext) rootLogger.addAppender(appender) rootLogger.info("Local logstash log shipping configured") - } recover { - case e => rootLogger.error("LogConfig Failed!", e) + } recover { case e => + rootLogger.error("LogConfig Failed!", e) } } } @@ -94,10 +112,12 @@ object LogConfig { rootLogger.info("Configuring Logback") val customFields = makeCustomFields(config) - val context = rootLogger.getLoggerContext - val layout = makeLayout(customFields) + val context = rootLogger.getLoggerContext + val layout = makeLayout(customFields) - val appender = makeKinesisAppender(layout, context, + val appender = makeKinesisAppender( + layout, + context, KinesisAppenderConfig( config.string("logger.kinesis.stream"), config.string("logger.kinesis.region"), @@ -108,8 +128,8 @@ object LogConfig { rootLogger.addAppender(appender) rootLogger.info("Configured Logback") - } recover { - case e => rootLogger.error("LogConfig Failed!", e) + } recover { case e => + rootLogger.error("LogConfig Failed!", e) } } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/MarkerUtils.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/MarkerUtils.scala index 4423accb17..1078171459 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/MarkerUtils.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/MarkerUtils.scala @@ -12,25 +12,33 @@ trait LogMarker { def markerContents: Map[String, Any] def +(marker: (String, Any)): LogMarker = MarkerMap(markerContents + marker) - def ++(marker: Map[String, Any]): LogMarker = MarkerMap(markerContents ++ marker) + def ++(marker: Map[String, Any]): LogMarker = MarkerMap( + markerContents ++ marker + ) } case class MarkerMap(markerContents: Map[String, Any]) extends LogMarker object MarkerMap { - def apply(entries: (String, Any)*):MarkerMap = MarkerMap(entries.toMap) + def apply(entries: (String, Any)*): MarkerMap = MarkerMap(entries.toMap) } trait MarkerUtils { val FALLBACK: String = "unknown" - def combineMarkers(markers: LogMarker*): LogMarker = MarkerMap(markers.flatMap(_.markerContents.toSeq).toMap) + def combineMarkers(markers: LogMarker*): LogMarker = MarkerMap( + markers.flatMap(_.markerContents.toSeq).toMap + ) - def addLogMarkers(markers: LogMarker*)(implicit marker: LogMarker): LogMarker = combineMarkers(markers :+ marker:_*) + def addLogMarkers(markers: LogMarker*)(implicit + marker: LogMarker + ): LogMarker = combineMarkers(markers :+ marker: _*) - def addMarkers(markers: (String, Any)*)(implicit marker: LogMarker): LogMarker = { + def addMarkers( + markers: (String, Any)* + )(implicit marker: LogMarker): LogMarker = { combineMarkers(MarkerMap(markers.toMap), marker) } - implicit def fromLogMarker(logMarker: LogMarker):MarkerContext = MarkerContext(logMarker.toLogMarker) + implicit def fromLogMarker(logMarker: LogMarker): MarkerContext = + MarkerContext(logMarker.toLogMarker) } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/RequestLoggingContext.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/RequestLoggingContext.scala index 3a92a63c53..32a45ef378 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/RequestLoggingContext.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/RequestLoggingContext.scala @@ -6,6 +6,10 @@ import net.logstash.logback.marker.{LogstashMarker, Markers} import scala.collection.JavaConverters._ -case class RequestLoggingContext(requestId: UUID = UUID.randomUUID(), initialMarkers: Map[String, String] = Map.empty) extends LogMarker { - override def markerContents: Map[String, Any] = (initialMarkers + ("requestId" -> requestId)) +case class RequestLoggingContext( + requestId: UUID = UUID.randomUUID(), + initialMarkers: Map[String, String] = Map.empty +) extends LogMarker { + override def markerContents: Map[String, Any] = + (initialMarkers + ("requestId" -> requestId)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/Stopwatch.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/Stopwatch.scala index 9a03f1b5c8..0f0cc10f9f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/Stopwatch.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/logging/Stopwatch.scala @@ -5,7 +5,8 @@ import com.gu.mediaservice.lib.DateTimeUtils import scala.concurrent.duration._ -case class DurationForLogging(startTime: ZonedDateTime, duration: Duration) extends LogMarker { +case class DurationForLogging(startTime: ZonedDateTime, duration: Duration) + extends LogMarker { def toMillis: Long = duration.toMillis override def markerContents: Map[String, Any] = Map( "start" -> DateTimeUtils.toString(startTime), @@ -17,7 +18,7 @@ case class DurationForLogging(startTime: ZonedDateTime, duration: Duration) exte } -class Stopwatch { +class Stopwatch { // This method can only be used to measure elapsed time and is not related to any other notion of system or wall-clock time. // Therefore we additionally have `startTime` to track the time. @@ -26,7 +27,8 @@ class Stopwatch { private val startTime = DateTimeUtils.now() - def elapsed: DurationForLogging = DurationForLogging(startTime, (System.nanoTime() - startedAt).nanos) + def elapsed: DurationForLogging = + DurationForLogging(startTime, (System.nanoTime() - startedAt).nanos) } object Stopwatch extends GridLogging { @@ -37,12 +39,17 @@ object Stopwatch extends GridLogging { val stopwatch = new Stopwatch try { val result = body - logger.info(addMarkers("elapsed" -> stopwatch.elapsed.duration.toString).toLogMarker, s"Stopwatch: $label") + logger.info( + addMarkers( + "elapsed" -> stopwatch.elapsed.duration.toString + ).toLogMarker, + s"Stopwatch: $label" + ) result } catch { - case e: Exception => logger.error(s"Stopwatch: $label ${stopwatch.elapsed} ns", e); throw e + case e: Exception => + logger.error(s"Stopwatch: $label ${stopwatch.elapsed} ns", e); throw e } } } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/management/Management.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/management/Management.scala index 741666aec0..a6e778ebba 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/management/Management.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/management/Management.scala @@ -2,7 +2,10 @@ package com.gu.mediaservice.lib.management import com.gu.mediaservice.lib.argo._ import com.gu.mediaservice.lib.auth.PermissionsHandler -import com.gu.mediaservice.lib.elasticsearch.{ElasticSearchClient, ElasticSearchImageCounts} +import com.gu.mediaservice.lib.elasticsearch.{ + ElasticSearchClient, + ElasticSearchImageCounts +} import com.gu.mediaservice.lib.logging.GridLogging import play.api.libs.json.{Format, Json} import play.api.mvc.{Action, AnyContent, BaseController, ControllerComponents} @@ -19,7 +22,10 @@ trait HealthCheck extends BaseController { } } -trait ManagementController extends HealthCheck with BaseController with ArgoHelpers { +trait ManagementController + extends HealthCheck + with BaseController + with ArgoHelpers { def buildInfo: BuildInfo def disallowRobots = Action { @@ -31,11 +37,18 @@ trait ManagementController extends HealthCheck with BaseController with ArgoHelp } } -class Management(override val controllerComponents: ControllerComponents, override val buildInfo: BuildInfo) extends ManagementController +class Management( + override val controllerComponents: ControllerComponents, + override val buildInfo: BuildInfo +) extends ManagementController -class ManagementWithPermissions(override val controllerComponents: ControllerComponents, permissionedController: PermissionsHandler, override val buildInfo: BuildInfo) extends ManagementController { +class ManagementWithPermissions( + override val controllerComponents: ControllerComponents, + permissionedController: PermissionsHandler, + override val buildInfo: BuildInfo +) extends ManagementController { override def healthCheck = Action { - if(permissionedController.storeIsEmpty) { + if (permissionedController.storeIsEmpty) { ServiceUnavailable("Permissions store is empty") } else { Ok("ok") @@ -43,8 +56,12 @@ class ManagementWithPermissions(override val controllerComponents: ControllerCom } } -class ElasticSearchHealthCheck(override val controllerComponents: ControllerComponents, elasticsearch: ElasticSearchClient)(implicit val ec: ExecutionContext) - extends HealthCheck with GridLogging { +class ElasticSearchHealthCheck( + override val controllerComponents: ControllerComponents, + elasticsearch: ElasticSearchClient +)(implicit val ec: ExecutionContext) + extends HealthCheck + with GridLogging { override def healthCheck: Action[AnyContent] = Action.async { elasticHealth.map { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverter.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverter.scala index e650139bf7..39c47be21b 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverter.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverter.scala @@ -13,7 +13,8 @@ object ImageMetadataConverter extends GridLogging { private def extractSubjects(fileMetadata: FileMetadata): List[String] = { val supplementalCategories = fileMetadata.iptc .get("Supplemental Category(s)") - .toList.flatMap(_.split("\\s+")) + .toList + .flatMap(_.split("\\s+")) val category = fileMetadata.iptc .get("Category") @@ -24,23 +25,31 @@ object ImageMetadataConverter extends GridLogging { .distinct } - private def extractXMPArrayStrings(field: String, fileMetadata: FileMetadata): Seq[String] = fileMetadata.xmp.get(field) match { - case Some(JsArray(items)) => items.toList.flatMap { - case JsString(value) => Some(value) - case _ => None - } + private def extractXMPArrayStrings( + field: String, + fileMetadata: FileMetadata + ): Seq[String] = fileMetadata.xmp.get(field) match { + case Some(JsArray(items)) => + items.toList.flatMap { + case JsString(value) => Some(value) + case _ => None + } case Some(value) => List(value.toString) - case _ => List() + case _ => List() } - private def extractPeople(fileMetadata: FileMetadata): Set[String] = { - val xmpIptcPeople = extractXMPArrayStrings("Iptc4xmpExt:PersonInImage", fileMetadata) - val xmpGettyPeople = extractXMPArrayStrings("GettyImagesGIFT:Personality", fileMetadata) + val xmpIptcPeople = + extractXMPArrayStrings("Iptc4xmpExt:PersonInImage", fileMetadata) + val xmpGettyPeople = + extractXMPArrayStrings("GettyImagesGIFT:Personality", fileMetadata) (xmpIptcPeople ++ xmpGettyPeople).toSet } - def fromFileMetadata(fileMetadata: FileMetadata, latestAllowedDateTime: Option[DateTime] = None): ImageMetadata = { + def fromFileMetadata( + fileMetadata: FileMetadata, + latestAllowedDateTime: Option[DateTime] = None + ): ImageMetadata = { val xmp = fileMetadata.xmp val readXmpHeadStringProp: String => Option[String] = (name: String) => { val res = xmp.get(name) match { @@ -53,48 +62,58 @@ object ImageMetadataConverter extends GridLogging { } ImageMetadata( - dateTaken = (fileMetadata.exifSub.get("Date/Time Original Composite") flatMap (parseRandomDate(_, latestAllowedDateTime))) orElse - (fileMetadata.iptc.get("Date Time Created Composite") flatMap (parseRandomDate(_, latestAllowedDateTime))) orElse - (readXmpHeadStringProp("photoshop:DateCreated") flatMap (parseRandomDate(_, latestAllowedDateTime))), - description = readXmpHeadStringProp("dc:description") orElse - fileMetadata.iptc.get("Caption/Abstract") orElse - fileMetadata.exif.get("Image Description"), - credit = readXmpHeadStringProp("photoshop:Credit") orElse - fileMetadata.iptc.get("Credit"), - byline = readXmpHeadStringProp("dc:creator") orElse - fileMetadata.iptc.get("By-line") orElse - fileMetadata.exif.get("Artist"), - bylineTitle = readXmpHeadStringProp("photoshop:AuthorsPosition") orElse - fileMetadata.iptc.get("By-line Title"), - title = readXmpHeadStringProp("photoshop:Headline") orElse - fileMetadata.iptc.get("Headline"), - copyright = readXmpHeadStringProp("dc:Rights") orElse - fileMetadata.iptc.get("Copyright Notice") orElse - fileMetadata.exif.get("Copyright"), + dateTaken = (fileMetadata.exifSub.get( + "Date/Time Original Composite" + ) flatMap (parseRandomDate(_, latestAllowedDateTime))) orElse + (fileMetadata.iptc.get( + "Date Time Created Composite" + ) flatMap (parseRandomDate(_, latestAllowedDateTime))) orElse + (readXmpHeadStringProp( + "photoshop:DateCreated" + ) flatMap (parseRandomDate(_, latestAllowedDateTime))), + description = readXmpHeadStringProp("dc:description") orElse + fileMetadata.iptc.get("Caption/Abstract") orElse + fileMetadata.exif.get("Image Description"), + credit = readXmpHeadStringProp("photoshop:Credit") orElse + fileMetadata.iptc.get("Credit"), + byline = readXmpHeadStringProp("dc:creator") orElse + fileMetadata.iptc.get("By-line") orElse + fileMetadata.exif.get("Artist"), + bylineTitle = readXmpHeadStringProp("photoshop:AuthorsPosition") orElse + fileMetadata.iptc.get("By-line Title"), + title = readXmpHeadStringProp("photoshop:Headline") orElse + fileMetadata.iptc.get("Headline"), + copyright = readXmpHeadStringProp("dc:Rights") orElse + fileMetadata.iptc.get("Copyright Notice") orElse + fileMetadata.exif.get("Copyright"), // Here we combine two separate fields, based on bad habits of our suppliers. - suppliersReference = readXmpHeadStringProp("photoshop:TransmissionReference") orElse - fileMetadata.iptc.get("Original Transmission Reference") orElse - readXmpHeadStringProp("dc:title") orElse - fileMetadata.iptc.get("Object Name"), - source = readXmpHeadStringProp("photoshop:Source") orElse - fileMetadata.iptc.get("Source"), - specialInstructions = readXmpHeadStringProp("photoshop:Instructions") orElse - fileMetadata.iptc.get("Special Instructions"), + suppliersReference = + readXmpHeadStringProp("photoshop:TransmissionReference") orElse + fileMetadata.iptc.get("Original Transmission Reference") orElse + readXmpHeadStringProp("dc:title") orElse + fileMetadata.iptc.get("Object Name"), + source = readXmpHeadStringProp("photoshop:Source") orElse + fileMetadata.iptc.get("Source"), + specialInstructions = + readXmpHeadStringProp("photoshop:Instructions") orElse + fileMetadata.iptc.get("Special Instructions"), // FIXME: Read XMP dc:subject array: - keywords = fileMetadata.iptc.get("Keywords") map (_.split(Array(';', ',')).distinct.map(_.trim).toList) getOrElse Nil, + keywords = fileMetadata.iptc.get("Keywords") map (_.split( + Array(';', ',') + ).distinct.map(_.trim).toList) getOrElse Nil, // FIXME: Parse newest location schema: http://www.iptc.org/std/photometadata/specification/IPTC-PhotoMetadata#location-structure - subLocation = readXmpHeadStringProp("Iptc4xmpCore:Location") orElse - fileMetadata.iptc.get("Sub-location"), - city = readXmpHeadStringProp("photoshop:City") orElse - fileMetadata.iptc.get("City"), - state = readXmpHeadStringProp("photoshop:State") orElse - fileMetadata.iptc.get("Province/State"), - country = readXmpHeadStringProp("Iptc4xmpCore:CountryCode") orElse - fileMetadata.iptc.get("Country/Primary Location Code") orElse - readXmpHeadStringProp("photoshop:Country") orElse - fileMetadata.iptc.get("Country/Primary Location Name"), - subjects = extractSubjects(fileMetadata), - peopleInImage = extractPeople(fileMetadata) + subLocation = readXmpHeadStringProp("Iptc4xmpCore:Location") orElse + fileMetadata.iptc.get("Sub-location"), + city = readXmpHeadStringProp("photoshop:City") orElse + fileMetadata.iptc.get("City"), + state = readXmpHeadStringProp("photoshop:State") orElse + fileMetadata.iptc.get("Province/State"), + country = readXmpHeadStringProp("Iptc4xmpCore:CountryCode") orElse + fileMetadata.iptc.get("Country/Primary Location Code") orElse + readXmpHeadStringProp("photoshop:Country") orElse + fileMetadata.iptc.get("Country/Primary Location Name"), + subjects = extractSubjects(fileMetadata), + peopleInImage = extractPeople(fileMetadata) ) } @@ -105,18 +124,15 @@ object ImageMetadataConverter extends GridLogging { DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"), DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss' 'ZZ"), DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZZ"), - // no timezone provided so force UTC rather than use the machine's timezone DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZoneUTC, DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS").withZoneUTC, - // 2014-12-16T02:23+01:00 - Same as above but missing seconds lol DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm.SSSZZ"), DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mmZZ"), // Tue Dec 16 01:23:45 GMT 2014 - Let's make machine metadata human readable! DateTimeFormat.forPattern("E MMM dd HH:mm:ss.SSS z yyyy"), DateTimeFormat.forPattern("E MMM dd HH:mm:ss z yyyy"), - /* `BST` can be: - British Summer Time @@ -125,39 +141,53 @@ object ImageMetadataConverter extends GridLogging { See https://24timezones.com/time-zone/bst Be ignorant and assume British Summer Time because we're Europe centric */ - DateTimeFormat.forPattern("E MMM dd HH:mm:ss.SSS 'BST' yyyy").withZone(DateTimeZone.forOffsetHours(1)), - DateTimeFormat.forPattern("E MMM dd HH:mm:ss 'BST' yyyy").withZone(DateTimeZone.forOffsetHours(1)), - + DateTimeFormat + .forPattern("E MMM dd HH:mm:ss.SSS 'BST' yyyy") + .withZone(DateTimeZone.forOffsetHours(1)), + DateTimeFormat + .forPattern("E MMM dd HH:mm:ss 'BST' yyyy") + .withZone(DateTimeZone.forOffsetHours(1)), DateTimeFormat.forPattern("yyyyMMdd"), DateTimeFormat.forPattern("yyyyMM"), DateTimeFormat.forPattern("yyyyddMM"), DateTimeFormat.forPattern("yyyy"), DateTimeFormat.forPattern("yyyy-MM"), - // 2014-12-16 - Maybe it's just a date // no timezone provided so force UTC rather than use the machine's timezone ISODateTimeFormat.date.withZoneUTC ) - private[metadata] def parseRandomDate(str: String, maxDate: Option[DateTime] = None): Option[DateTime] = { - dateTimeFormatters.foldLeft[Option[DateTime]](None){ - case (successfulDate@Some(_), _) => successfulDate - // NB We refuse parse results which result in future dates, if a max date is provided. - // eg If we get a pic today (22nd January 2021) with a date string of 20211201 we can be pretty sure - // that it should be parsed as (eg) US (12th Jan 2021), not EU (1st Dec 2021). - // So we refuse the (apparently successful) EU parse result. - case (None, formatter) => safeParsing(formatter.parseDateTime(str)) - .filter(d => maxDate.forall(md => d.isBefore(md))) - }.map(_.withZone(DateTimeZone.UTC)) + private[metadata] def parseRandomDate( + str: String, + maxDate: Option[DateTime] = None + ): Option[DateTime] = { + dateTimeFormatters + .foldLeft[Option[DateTime]](None) { + case (successfulDate @ Some(_), _) => successfulDate + // NB We refuse parse results which result in future dates, if a max date is provided. + // eg If we get a pic today (22nd January 2021) with a date string of 20211201 we can be pretty sure + // that it should be parsed as (eg) US (12th Jan 2021), not EU (1st Dec 2021). + // So we refuse the (apparently successful) EU parse result. + case (None, formatter) => + safeParsing(formatter.parseDateTime(str)) + .filter(d => maxDate.forall(md => d.isBefore(md))) + } + .map(_.withZone(DateTimeZone.UTC)) } private def safeParsing[A](parse: => A): Option[A] = Try(parse).toOption private def cleanDateFormat = ISODateTimeFormat.dateTime - def cleanDate(dirtyDate: String, fieldName: String = "none", imageId:String = "none"): String = parseRandomDate(dirtyDate) match { + def cleanDate( + dirtyDate: String, + fieldName: String = "none", + imageId: String = "none" + ): String = parseRandomDate(dirtyDate) match { case Some(cleanDate) => cleanDateFormat.print(cleanDate) case None => { - logger.info(s"Unable to parse date $dirtyDate from field $fieldName for image $imageId") + logger.info( + s"Unable to parse date $dirtyDate from field $fieldName for image $imageId" + ) dirtyDate } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/Subject.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/Subject.scala index a682fcbbef..c56d8c332a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/Subject.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/metadata/Subject.scala @@ -1,6 +1,5 @@ package com.gu.mediaservice.lib.metadata - object Subject extends Enumeration { type SupplierCategory = Value val Arts = Value("arts") @@ -24,52 +23,53 @@ object Subject extends Enumeration { val Weather = Value("weather") // These category codes are now deprecated but still populated - def create(category: String): Option[Subject.Value] = category.toLowerCase match { - // ANPA-1312 Codes: https://en.wikipedia.org/wiki/ANPA-1312 - // http://www.eznews.com/help/ezsend/index.html?ANPAStandard - case "f" => Some(Finance) - case "l" => Some(Lifestyle) - case "e" => Some(Arts) - case "s" => Some(Sport) - case "o" => Some(Weather) - case "p" => Some(Politics) - case "i" => Some(News) - case "a" => Some(News) + def create(category: String): Option[Subject.Value] = + category.toLowerCase match { + // ANPA-1312 Codes: https://en.wikipedia.org/wiki/ANPA-1312 + // http://www.eznews.com/help/ezsend/index.html?ANPAStandard + case "f" => Some(Finance) + case "l" => Some(Lifestyle) + case "e" => Some(Arts) + case "s" => Some(Sport) + case "o" => Some(Weather) + case "p" => Some(Politics) + case "i" => Some(News) + case "a" => Some(News) - // See: https://www.iptc.org/std/photometadata/documentation/GenericGuidelines/index.htm#!Documents/guidelineformappingcategorycodestosubjectnewscodes.htm - case "ace" => Some(Arts) - case "clj" => Some(Crime) - case "dis" => Some(Disaster) - case "fin" => Some(Finance) - case "edu" => Some(Education) - case "evn" => Some(Environment) - case "hth" => Some(Health) - case "hum" => Some(Human) - case "lab" => Some(Labour) - case "lif" => Some(Lifestyle) - case "pol" => Some(Politics) - case "rel" => Some(Religion) - case "sci" => Some(Science) - case "soi" => Some(Social) - case "spo" => Some(Sport) - case "war" => Some(War) - case "wea" => Some(Weather) + // See: https://www.iptc.org/std/photometadata/documentation/GenericGuidelines/index.htm#!Documents/guidelineformappingcategorycodestosubjectnewscodes.htm + case "ace" => Some(Arts) + case "clj" => Some(Crime) + case "dis" => Some(Disaster) + case "fin" => Some(Finance) + case "edu" => Some(Education) + case "evn" => Some(Environment) + case "hth" => Some(Health) + case "hum" => Some(Human) + case "lab" => Some(Labour) + case "lif" => Some(Lifestyle) + case "pol" => Some(Politics) + case "rel" => Some(Religion) + case "sci" => Some(Science) + case "soi" => Some(Social) + case "spo" => Some(Sport) + case "war" => Some(War) + case "wea" => Some(Weather) - // Added from an internally supplied list - case "ani" => Some(Nature) - case "nat" => Some(Nature) - case "wld" => Some(Nature) - case "biz" => Some(Finance) - case "max" => Some(Finance) - case "ent" => Some(Arts) - case "cel" => Some(Arts) - case "odd" => Some(Lifestyle) + // Added from an internally supplied list + case "ani" => Some(Nature) + case "nat" => Some(Nature) + case "wld" => Some(Nature) + case "biz" => Some(Finance) + case "max" => Some(Finance) + case "ent" => Some(Arts) + case "cel" => Some(Arts) + case "odd" => Some(Lifestyle) - // Other vaues used in supplemental categories - case "entertainment" => Some(Arts) - case "fashion" => Some(Arts) - case "showbiz" => Some(Arts) + // Other vaues used in supplemental categories + case "entertainment" => Some(Arts) + case "fashion" => Some(Arts) + case "showbiz" => Some(Arts) - case _ => None - } + case _ => None + } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/CloudWatchMetrics.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/CloudWatchMetrics.scala index 05d38357ee..64ab22972b 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/CloudWatchMetrics.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/CloudWatchMetrics.scala @@ -1,7 +1,10 @@ package com.gu.mediaservice.lib.metrics import com.amazonaws.services.cloudwatch.model._ -import com.amazonaws.services.cloudwatch.{AmazonCloudWatch, AmazonCloudWatchClientBuilder} +import com.amazonaws.services.cloudwatch.{ + AmazonCloudWatch, + AmazonCloudWatchClientBuilder +} import com.gu.mediaservice.lib.config.CommonConfig import org.slf4j.LoggerFactory import scalaz.concurrent.Task @@ -46,14 +49,17 @@ abstract class CloudWatchMetrics(namespace: String, config: CommonConfig) { class CountMetric(name: String) extends CloudWatchMetric[Long](name) { - protected def toDatum(a: Long, dimensions: List[Dimension]) = datum(StandardUnit.Count, a, dimensions) + protected def toDatum(a: Long, dimensions: List[Dimension]) = + datum(StandardUnit.Count, a, dimensions) - def increment(dimensions: List[Dimension] = Nil, n: Long = 1): Task[Unit] = recordOne(n, dimensions) + def increment(dimensions: List[Dimension] = Nil, n: Long = 1): Task[Unit] = + recordOne(n, dimensions) } class TimeMetric(name: String) extends CloudWatchMetric[Long](name) { - protected def toDatum(a: Long, dimensions: List[Dimension]) = datum(StandardUnit.Milliseconds, a, dimensions) + protected def toDatum(a: Long, dimensions: List[Dimension]) = + datum(StandardUnit.Milliseconds, a, dimensions) } private lazy val logger = LoggerFactory.getLogger(getClass) @@ -61,46 +67,73 @@ abstract class CloudWatchMetrics(namespace: String, config: CommonConfig) { private val topic: Topic[MetricDatum] = async.topic[MetricDatum]() private val sink: Sink[Task, Seq[MetricDatum]] = constant { data => - putData(data).handle { case e: RuntimeException => logger.error(s"Error while publishing metrics", e) } + putData(data).handle { case e: RuntimeException => + logger.error(s"Error while publishing metrics", e) + } } - private val client: AmazonCloudWatch = config.withAWSCredentials(AmazonCloudWatchClientBuilder.standard()).build() + private val client: AmazonCloudWatch = + config.withAWSCredentials(AmazonCloudWatchClientBuilder.standard()).build() private def putData(data: Seq[MetricDatum]): Task[Unit] = Task { val aggregatedMetrics: Seq[MetricDatum] = data .groupBy(metric => (metric.getMetricName, metric.getDimensions)) .map { case (_, values) => - values.reduce((m1, m2) => m1.clone() - .withValue(null) - .withStatisticValues(aggregateMetricStats(m1,m2))) + values.reduce((m1, m2) => + m1.clone() + .withValue(null) + .withStatisticValues(aggregateMetricStats(m1, m2)) + ) } .toSeq - aggregatedMetrics.grouped(20).foreach(chunkedMetrics => { //can only send max 20 metrics to CW at a time - client.putMetricData(new PutMetricDataRequest() - .withNamespace(namespace) - .withMetricData(chunkedMetrics.asJava)) - } + aggregatedMetrics + .grouped(20) + .foreach( + chunkedMetrics => { //can only send max 20 metrics to CW at a time + client.putMetricData( + new PutMetricDataRequest() + .withNamespace(namespace) + .withMetricData(chunkedMetrics.asJava) + ) + } + ) + + logger.info( + s"Put ${data.size} metric data points (aggregated to ${aggregatedMetrics.size} points) to namespace $namespace" ) - - logger.info(s"Put ${data.size} metric data points (aggregated to ${aggregatedMetrics.size} points) to namespace $namespace") } - private def aggregateMetricStats(metricDatumOriginal: MetricDatum, metricDatumNew: MetricDatum): StatisticSet = { + private def aggregateMetricStats( + metricDatumOriginal: MetricDatum, + metricDatumNew: MetricDatum + ): StatisticSet = { metricDatumOriginal.getStatisticValues match { case stats if stats == null => new StatisticSet() - .withMinimum(Math.min(metricDatumOriginal.getValue, metricDatumNew.getValue)) - .withMaximum(Math.max(metricDatumOriginal.getValue, metricDatumNew.getValue)) + .withMinimum( + Math.min(metricDatumOriginal.getValue, metricDatumNew.getValue) + ) + .withMaximum( + Math.max(metricDatumOriginal.getValue, metricDatumNew.getValue) + ) .withSum(metricDatumOriginal.getValue + metricDatumNew.getValue) - .withSampleCount(if (metricDatumOriginal.getUnit.equals(StandardUnit.Count.toString)) 1d else 2d) + .withSampleCount( + if (metricDatumOriginal.getUnit.equals(StandardUnit.Count.toString)) + 1d + else 2d + ) case stats => new StatisticSet() .withMinimum(Math.min(stats.getMinimum, metricDatumNew.getValue)) .withMaximum(Math.max(stats.getMinimum, metricDatumNew.getValue)) .withSum(stats.getSum + metricDatumNew.getValue) - .withSampleCount(if (metricDatumOriginal.getUnit.equals(StandardUnit.Count.toString)) 1d else stats.getSampleCount + 1) + .withSampleCount( + if (metricDatumOriginal.getUnit.equals(StandardUnit.Count.toString)) + 1d + else stats.getSampleCount + 1 + ) } } @@ -109,21 +142,32 @@ abstract class CloudWatchMetrics(namespace: String, config: CommonConfig) { final def recordOne(a: A, dimensions: List[Dimension] = Nil): Task[Unit] = topic.publishOne(toDatum(a, dimensions).withTimestamp(new java.util.Date)) - final def recordMany(as: Seq[A], dimensions: List[Dimension] = Nil): Task[Unit] = - emitAll(as map (a => toDatum(a, dimensions).withTimestamp(new java.util.Date))) - .toSource.to(topic.publish).run + final def recordMany( + as: Seq[A], + dimensions: List[Dimension] = Nil + ): Task[Unit] = + emitAll( + as map (a => toDatum(a, dimensions).withTimestamp(new java.util.Date)) + ).toSource.to(topic.publish).run final def runRecordOne(a: A, dimensions: List[Dimension] = Nil): Unit = recordOne(a, dimensions).runAsync(loggingErrors) - final def runRecordMany(as: Seq[A], dimensions: List[Dimension] = Nil): Unit = + final def runRecordMany( + as: Seq[A], + dimensions: List[Dimension] = Nil + ): Unit = recordMany(as, dimensions).runAsync(loggingErrors) /** Must be implemented to provide a way to turn an `A` into a `MetricDatum` */ protected def toDatum(a: A, dimensions: List[Dimension]): MetricDatum /** Convenience method for instantiating a `MetricDatum` with this metric's `name` and `dimension` */ - protected def datum(unit: StandardUnit, value: Double, dimensions: List[Dimension]): MetricDatum = + protected def datum( + unit: StandardUnit, + value: Double, + dimensions: List[Dimension] + ): MetricDatum = new MetricDatum() .withMetricName(name) .withUnit(unit) @@ -135,6 +179,10 @@ abstract class CloudWatchMetrics(namespace: String, config: CommonConfig) { import com.gu.mediaservice.lib.Processes._ /** Subscribe the metric publishing sink to the topic */ - topic.subscribe.chunkTimed(maxAge, maxChunkSize).to(sink).run.runAsync(loggingErrors) + topic.subscribe + .chunkTimed(maxAge, maxChunkSize) + .to(sink) + .run + .runAsync(loggingErrors) -} \ No newline at end of file +} diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/FutureSyntax.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/FutureSyntax.scala index 1e7bc6aa9c..1032daec40 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/FutureSyntax.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/metrics/FutureSyntax.scala @@ -10,18 +10,23 @@ trait FutureSyntax { implicit class FutureOps[A](self: Future[A])(implicit ex: ExecutionContext) { - def incrementOnSuccess[N](metric: Option[Metric[N]])(implicit N: Numeric[N]): Future[A] = + def incrementOnSuccess[N](metric: Option[Metric[N]])(implicit + N: Numeric[N] + ): Future[A] = toMetric(metric)(_ => N.fromInt(1)) - def incrementOnFailure[B](metric: Option[Metric[B]])(pfn: PartialFunction[Throwable, Boolean]) - (implicit B: Numeric[B]): Future[A] = { + def incrementOnFailure[B](metric: Option[Metric[B]])( + pfn: PartialFunction[Throwable, Boolean] + )(implicit B: Numeric[B]): Future[A] = { self.failed.foreach(pfn.andThen { b => if (b) metric.foreach(_.runRecordOne(B.fromInt(1))) }) self } - def toMetric[B](metric: Option[Metric[B]], dims: List[Dimension] = List())(f: A => B): Future[A] = { + def toMetric[B](metric: Option[Metric[B]], dims: List[Dimension] = List())( + f: A => B + ): Future[A] = { self.foreach { case a => metric.foreach(_.runRecordOne(f(a), dims)) } self } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/net/URI.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/net/URI.scala index c62805bbab..ad6d2e084a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/net/URI.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/net/URI.scala @@ -3,7 +3,8 @@ package com.gu.mediaservice.lib.net import java.net.{URI => JURI, URLDecoder, URLEncoder} object URI { - def encode(uri: String): String = URLEncoder.encode(uri, "UTF-8").replace("+", "%20") + def encode(uri: String): String = + URLEncoder.encode(uri, "UTF-8").replace("+", "%20") def decode(uri: String): String = URLDecoder.decode(uri, "UTF-8") def encodePlus(uri: String): String = uri.replace("+", "%2B") @@ -11,11 +12,12 @@ object URI { def ensureSecure(str: String): JURI = { val uri = JURI.create(str) - val secureString : String = (Option(uri.getScheme), Option(uri.getHost)) match { - case (Some("https"), _) => str - case (Some("http"), Some(host)) => s"https://$host" - case (_, _) => s"https://$str" - } + val secureString: String = + (Option(uri.getScheme), Option(uri.getHost)) match { + case (Some("https"), _) => str + case (Some("http"), Some(host)) => s"https://$host" + case (_, _) => s"https://$str" + } JURI.create(secureString) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/ConnectionBrokenFilter.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/ConnectionBrokenFilter.scala index 3681aa17c5..1d65e85d51 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/ConnectionBrokenFilter.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/ConnectionBrokenFilter.scala @@ -7,8 +7,7 @@ import play.api.mvc.{Filter, RequestHeader, Result, Results} import scala.concurrent.{ExecutionContext, Future} -/** - * When the GRID is reloaded during an upload, or the network is throttled / flaky +/** When the GRID is reloaded during an upload, or the network is throttled / flaky * an attempt to POST a large file will result in an EntityStreamException being thrown * on attempt to read the input stream. * This is, by default, logged as a server error (5XX) but cannot be usefully addressed @@ -18,13 +17,17 @@ import scala.concurrent.{ExecutionContext, Future} * * The client is almost certainly ignoring the response anyway. */ -class ConnectionBrokenFilter(override val mat: Materializer)(implicit ec: ExecutionContext) - extends Filter with Results with StrictLogging { - override def apply(next: (RequestHeader) => Future[Result])(rh: RequestHeader): Future[Result] = { - next(rh) recover { - case _:EntityStreamException => - logger.info(s"Upload failed with EntityStreamException. Request = $rh") - UnprocessableEntity("The upload did not complete") +class ConnectionBrokenFilter(override val mat: Materializer)(implicit + ec: ExecutionContext +) extends Filter + with Results + with StrictLogging { + override def apply( + next: (RequestHeader) => Future[Result] + )(rh: RequestHeader): Future[Result] = { + next(rh) recover { case _: EntityStreamException => + logger.info(s"Upload failed with EntityStreamException. Request = $rh") + UnprocessableEntity("The upload did not complete") } } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridAppLoader.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridAppLoader.scala index 99e86e6d59..a3028f7012 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridAppLoader.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridAppLoader.scala @@ -5,12 +5,19 @@ import com.gu.mediaservice.lib.logging.LogConfig import play.api.ApplicationLoader.Context import play.api.{Application, ApplicationLoader} -abstract class GridAppLoader[Config <: CommonConfig](appName: String, loadFn: Context => GridComponents[Config]) extends ApplicationLoader { +abstract class GridAppLoader[Config <: CommonConfig]( + appName: String, + loadFn: Context => GridComponents[Config] +) extends ApplicationLoader { final override def load(context: Context): Application = { LogConfig.initPlayLogging(context) val fileConfig = GridConfigLoader.read(appName, context.environment.mode) - val gridApp = loadFn(context.copy(initialConfiguration = context.initialConfiguration ++ fileConfig)) + val gridApp = loadFn( + context.copy(initialConfiguration = + context.initialConfiguration ++ fileConfig + ) + ) gridApp.application } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridComponents.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridComponents.scala index 6f8c775487..e4745ba4f6 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridComponents.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/GridComponents.scala @@ -1,8 +1,18 @@ package com.gu.mediaservice.lib.play import com.gu.mediaservice.lib.auth.Authentication -import com.gu.mediaservice.lib.auth.provider.{MachineAuthenticationProvider, AuthenticationProviderResources, AuthenticationProviders, UserAuthenticationProvider} -import com.gu.mediaservice.lib.config.{ApiAuthenticationProviderLoader, CommonConfig, GridConfigResources, UserAuthenticationProviderLoader} +import com.gu.mediaservice.lib.auth.provider.{ + MachineAuthenticationProvider, + AuthenticationProviderResources, + AuthenticationProviders, + UserAuthenticationProvider +} +import com.gu.mediaservice.lib.config.{ + ApiAuthenticationProviderLoader, + CommonConfig, + GridConfigResources, + UserAuthenticationProviderLoader +} import com.gu.mediaservice.lib.logging.LogConfig import com.gu.mediaservice.lib.management.{BuildInfo, Management} import play.api.ApplicationLoader.Context @@ -16,10 +26,18 @@ import play.filters.gzip.GzipFilterComponents import scala.concurrent.ExecutionContext -abstract class GridComponents[Config <: CommonConfig](context: Context, val loadConfig: GridConfigResources => Config) extends BuiltInComponentsFromContext(context) - with AhcWSComponents with HttpFiltersComponents with CORSComponents with GzipFilterComponents { +abstract class GridComponents[Config <: CommonConfig]( + context: Context, + val loadConfig: GridConfigResources => Config +) extends BuiltInComponentsFromContext(context) + with AhcWSComponents + with HttpFiltersComponents + with CORSComponents + with GzipFilterComponents { // first of all create the config for the service - val config: Config = loadConfig(GridConfigResources(configuration, actorSystem)) + val config: Config = loadConfig( + GridConfigResources(configuration, actorSystem) + ) // next thing is to set up log shipping LogConfig.initKinesisLogging(config) LogConfig.initLocalLogShipping(config) @@ -38,9 +56,16 @@ abstract class GridComponents[Config <: CommonConfig](context: Context, val load new RequestMetricFilter(config, materializer) ) - final override lazy val corsConfig: CORSConfig = CORSConfig.fromConfiguration(context.initialConfiguration).copy( - allowedOrigins = Origins.Matching(Set(config.services.kahunaBaseUri, config.services.apiBaseUri) ++ config.services.corsAllowedDomains) - ) + final override lazy val corsConfig: CORSConfig = CORSConfig + .fromConfiguration(context.initialConfiguration) + .copy( + allowedOrigins = Origins.Matching( + Set( + config.services.kahunaBaseUri, + config.services.apiBaseUri + ) ++ config.services.corsAllowedDomains + ) + ) lazy val management = new Management(controllerComponents, buildInfo) private val authProviderResources = AuthenticationProviderResources( @@ -51,13 +76,28 @@ abstract class GridComponents[Config <: CommonConfig](context: Context, val load ) val providers: AuthenticationProviders = AuthenticationProviders( - userProvider = config.configuration.get[UserAuthenticationProvider]("authentication.providers.user")(UserAuthenticationProviderLoader.singletonConfigLoader(authProviderResources)), - apiProvider = config.configuration.get[MachineAuthenticationProvider]("authentication.providers.machine")(ApiAuthenticationProviderLoader.singletonConfigLoader(authProviderResources)) + userProvider = config.configuration + .get[UserAuthenticationProvider]("authentication.providers.user")( + UserAuthenticationProviderLoader.singletonConfigLoader( + authProviderResources + ) + ), + apiProvider = config.configuration + .get[MachineAuthenticationProvider]("authentication.providers.machine")( + ApiAuthenticationProviderLoader.singletonConfigLoader( + authProviderResources + ) + ) ) providers.userProvider.initialise() applicationLifecycle.addStopHook(() => providers.userProvider.shutdown()) providers.apiProvider.initialise() applicationLifecycle.addStopHook(() => providers.apiProvider.shutdown()) - val auth = new Authentication(config, providers, controllerComponents.parsers.default, executionContext) + val auth = new Authentication( + config, + providers, + controllerComponents.parsers.default, + executionContext + ) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestLoggingFilter.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestLoggingFilter.scala index 221f228a6e..d2a8518dcb 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestLoggingFilter.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestLoggingFilter.scala @@ -10,11 +10,15 @@ import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} -class RequestLoggingFilter(override val mat: Materializer)(implicit ec: ExecutionContext) extends Filter { +class RequestLoggingFilter(override val mat: Materializer)(implicit + ec: ExecutionContext +) extends Filter { private val logger = Logger("request") - override def apply(next: (RequestHeader) => Future[Result])(rh: RequestHeader): Future[Result] = { + override def apply( + next: (RequestHeader) => Future[Result] + )(rh: RequestHeader): Future[Result] = { val start = System.currentTimeMillis() val result = next(rh) @@ -31,10 +35,16 @@ class RequestLoggingFilter(override val mat: Materializer)(implicit ec: Executio result } - private def logSuccess(request: RequestHeader, response: Result, duration: Long): Unit = { - val originIp = request.headers.get("X-Forwarded-For").getOrElse(request.remoteAddress) + private def logSuccess( + request: RequestHeader, + response: Result, + duration: Long + ): Unit = { + val originIp = + request.headers.get("X-Forwarded-For").getOrElse(request.remoteAddress) val referer = request.headers.get("Referer").getOrElse("") - val originalService = request.headers.get(Authentication.originalServiceHeaderName) + val originalService = + request.headers.get(Authentication.originalServiceHeaderName) val length = response.header.headers.getOrElse("Content-Length", 0) val mandatoryMarkers = Map( @@ -46,17 +56,27 @@ class RequestLoggingFilter(override val mat: Materializer)(implicit ec: Executio ) val optionalMarkers = originalService - .map { s => Map(Authentication.originalServiceHeaderName -> s ) } + .map { s => Map(Authentication.originalServiceHeaderName -> s) } .getOrElse(Map.empty) - val markers = MarkerContext(appendEntries((mandatoryMarkers ++ optionalMarkers).asJava)) - logger.info(s"""$originIp - "${request.method} ${request.uri} ${request.version}" ${response.header.status} $length "$referer" ${duration}ms""")(markers) + val markers = MarkerContext( + appendEntries((mandatoryMarkers ++ optionalMarkers).asJava) + ) + logger.info( + s"""$originIp - "${request.method} ${request.uri} ${request.version}" ${response.header.status} $length "$referer" ${duration}ms""" + )(markers) } - private def logFailure(request: RequestHeader, throwable: Throwable, duration: Long): Unit = { - val originIp = request.headers.get("X-Forwarded-For").getOrElse(request.remoteAddress) + private def logFailure( + request: RequestHeader, + throwable: Throwable, + duration: Long + ): Unit = { + val originIp = + request.headers.get("X-Forwarded-For").getOrElse(request.remoteAddress) val referer = request.headers.get("Referer").getOrElse("") - val originalService = request.headers.get(Authentication.originalServiceHeaderName) + val originalService = + request.headers.get(Authentication.originalServiceHeaderName) val mandatoryMarkers = Map( "origin" -> originIp, @@ -66,11 +86,15 @@ class RequestLoggingFilter(override val mat: Materializer)(implicit ec: Executio ) val optionalMarkers = originalService - .map { s => Map(Authentication.originalServiceHeaderName -> s ) } + .map { s => Map(Authentication.originalServiceHeaderName -> s) } .getOrElse(Map.empty) - val markers = MarkerContext(appendEntries((mandatoryMarkers ++ optionalMarkers).asJava)) - logger.info(s"""$originIp - "${request.method} ${request.uri} ${request.version}" ERROR "$referer" ${duration}ms""")(markers) + val markers = MarkerContext( + appendEntries((mandatoryMarkers ++ optionalMarkers).asJava) + ) + logger.info( + s"""$originIp - "${request.method} ${request.uri} ${request.version}" ERROR "$referer" ${duration}ms""" + )(markers) logger.error(s"Error for ${request.method} ${request.uri}", throwable) } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestMetricFilter.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestMetricFilter.scala index 0a977d7d6a..5497fc4f5b 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestMetricFilter.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/play/RequestMetricFilter.scala @@ -8,8 +8,13 @@ import play.api.mvc.{Filter, RequestHeader, Result} import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} -class RequestMetricFilter(val config: CommonConfig, override val mat: Materializer)(implicit ec: ExecutionContext) extends Filter { - val namespace: String = s"${config.stage}/${config.appName.split('-').map(_.toLowerCase.capitalize).mkString("")}" +class RequestMetricFilter( + val config: CommonConfig, + override val mat: Materializer +)(implicit ec: ExecutionContext) + extends Filter { + val namespace: String = + s"${config.stage}/${config.appName.split('-').map(_.toLowerCase.capitalize).mkString("")}" val enabled: Boolean = config.requestMetricsEnabled object RequestMetrics extends CloudWatchMetrics(namespace, config) { @@ -19,7 +24,9 @@ class RequestMetricFilter(val config: CommonConfig, override val mat: Materializ val requestDuration = new TimeMetric("RequestDuration") } - override def apply(next: RequestHeader => Future[Result])(rh: RequestHeader): Future[Result] = { + override def apply( + next: RequestHeader => Future[Result] + )(rh: RequestHeader): Future[Result] = { val start = System.currentTimeMillis() val result = next(rh) @@ -45,7 +52,7 @@ class RequestMetricFilter(val config: CommonConfig, override val mat: Materializ def shouldRecord(request: RequestHeader): Boolean = { request.path match { case "/management/healthcheck" => false - case _ => true + case _ => true } } -} \ No newline at end of file +} diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/resource/FutureResources.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/resource/FutureResources.scala index f75c6c5a2e..f200f36c19 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/resource/FutureResources.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/resource/FutureResources.scala @@ -7,10 +7,9 @@ object FutureResources { /** Bracket the creation of a Future with resource creation and cleanup actions. * The cleanup is run regardless of whether the Future was successful. */ - def bracket[R, A](acquire: => Future[R]) - (cleanup: R => Unit) - (f: R => Future[A]) - (implicit ctx: ExecutionContext): Future[A] = + def bracket[R, A](acquire: => Future[R])( + cleanup: R => Unit + )(f: R => Future[A])(implicit ctx: ExecutionContext): Future[A] = acquire.flatMap { resource => val future = f(resource) future.onComplete(_ => cleanup(resource)) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/ItemToMediaUsage.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/ItemToMediaUsage.scala index c7fdf7bb69..35782c255d 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/ItemToMediaUsage.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/ItemToMediaUsage.scala @@ -21,15 +21,20 @@ object ItemToMediaUsage { item.getString("media_type"), UsageStatus(item.getString("usage_status")), Option(item.getMap[Any]("print_metadata")) - .map(_.asScala.toMap).flatMap(buildPrint), + .map(_.asScala.toMap) + .flatMap(buildPrint), Option(item.getMap[Any]("digital_metadata")) - .map(_.asScala.toMap).flatMap(buildDigital), + .map(_.asScala.toMap) + .flatMap(buildDigital), Option(item.getMap[Any]("syndication_metadata")) - .map(_.asScala.toMap).flatMap(buildSyndication), + .map(_.asScala.toMap) + .flatMap(buildSyndication), Option(item.getMap[Any]("front_metadata")) - .map(_.asScala.toMap).flatMap(buildFront), + .map(_.asScala.toMap) + .flatMap(buildFront), Option(item.getMap[Any]("download_metadata")) - .map(_.asScala.toMap).flatMap(buildDownload), + .map(_.asScala.toMap) + .flatMap(buildDownload), new DateTime(item.getLong("last_modified")), Try { item.getLong("date_added") @@ -40,7 +45,9 @@ object ItemToMediaUsage { ) } - private def buildFront(metadataMap: Map[String, Any]): Option[FrontUsageMetadata] = { + private def buildFront( + metadataMap: Map[String, Any] + ): Option[FrontUsageMetadata] = { Try { FrontUsageMetadata( metadataMap("addedBy").asInstanceOf[String], @@ -49,7 +56,9 @@ object ItemToMediaUsage { }.toOption } - private def buildSyndication(metadataMap: Map[String, Any]): Option[SyndicationUsageMetadata] = { + private def buildSyndication( + metadataMap: Map[String, Any] + ): Option[SyndicationUsageMetadata] = { Try { SyndicationUsageMetadata( metadataMap("partnerName").asInstanceOf[String] @@ -57,31 +66,50 @@ object ItemToMediaUsage { }.toOption } - private def buildDigital(metadataMap: Map[String, Any]): Option[DigitalUsageMetadata] = { + private def buildDigital( + metadataMap: Map[String, Any] + ): Option[DigitalUsageMetadata] = { Try { DigitalUsageMetadata( URI.create(metadataMap("webUrl").asInstanceOf[String]), metadataMap("webTitle").asInstanceOf[String], metadataMap("sectionId").asInstanceOf[String], - metadataMap.get("composerUrl").map(x => URI.create(x.asInstanceOf[String])) + metadataMap + .get("composerUrl") + .map(x => URI.create(x.asInstanceOf[String])) ) }.toOption } - private def buildPrint(metadataMap: Map[String, Any]): Option[PrintUsageMetadata] = { + private def buildPrint( + metadataMap: Map[String, Any] + ): Option[PrintUsageMetadata] = { type JStringNumMap = java.util.LinkedHashMap[String, java.math.BigDecimal] Try { PrintUsageMetadata( sectionName = metadataMap.apply("sectionName").asInstanceOf[String], - issueDate = metadataMap.get("issueDate").map(_.asInstanceOf[String]) - .map(ISODateTimeFormat.dateTimeParser().parseDateTime).get, - pageNumber = metadataMap.apply("pageNumber").asInstanceOf[java.math.BigDecimal].intValue, + issueDate = metadataMap + .get("issueDate") + .map(_.asInstanceOf[String]) + .map(ISODateTimeFormat.dateTimeParser().parseDateTime) + .get, + pageNumber = metadataMap + .apply("pageNumber") + .asInstanceOf[java.math.BigDecimal] + .intValue, storyName = metadataMap.apply("storyName").asInstanceOf[String], - publicationCode = metadataMap.apply("publicationCode").asInstanceOf[String], - publicationName = metadataMap.apply("publicationName").asInstanceOf[String], - layoutId = metadataMap.get("layoutId").map(_.asInstanceOf[java.math.BigDecimal].intValue), - edition = metadataMap.get("edition").map(_.asInstanceOf[java.math.BigDecimal].intValue), - size = metadataMap.get("size") + publicationCode = + metadataMap.apply("publicationCode").asInstanceOf[String], + publicationName = + metadataMap.apply("publicationName").asInstanceOf[String], + layoutId = metadataMap + .get("layoutId") + .map(_.asInstanceOf[java.math.BigDecimal].intValue), + edition = metadataMap + .get("edition") + .map(_.asInstanceOf[java.math.BigDecimal].intValue), + size = metadataMap + .get("size") .map(_.asInstanceOf[JStringNumMap]) .map(m => PrintImageSize(m.get("x").intValue, m.get("y").intValue)), orderedBy = metadataMap.get("orderedBy").map(_.asInstanceOf[String]), @@ -92,7 +120,9 @@ object ItemToMediaUsage { }.toOption } - private def buildDownload(metadataMap: Map[String, Any]): Option[DownloadUsageMetadata] = { + private def buildDownload( + metadataMap: Map[String, Any] + ): Option[DownloadUsageMetadata] = { Try { DownloadUsageMetadata( metadataMap("downloadedBy").asInstanceOf[String] diff --git a/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/UsageBuilder.scala b/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/UsageBuilder.scala index d015333dea..f4bd543817 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/UsageBuilder.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/lib/usage/UsageBuilder.scala @@ -21,7 +21,8 @@ object UsageBuilder { usage.downloadUsageMetadata ) - private def buildStatusString(usage: MediaUsage): UsageStatus = if (usage.isRemoved) RemovedUsageStatus else usage.status + private def buildStatusString(usage: MediaUsage): UsageStatus = + if (usage.isRemoved) RemovedUsageStatus else usage.status private def buildId(usage: MediaUsage): String = { UsageTableFullKey.build(usage).toString @@ -29,55 +30,81 @@ object UsageBuilder { private def buildUsageReference(usage: MediaUsage): List[UsageReference] = { usage.usageType match { - case DigitalUsage => buildDigitalUsageReference(usage) - case PrintUsage => buildPrintUsageReference(usage) + case DigitalUsage => buildDigitalUsageReference(usage) + case PrintUsage => buildPrintUsageReference(usage) case SyndicationUsage => buildSyndicationUsageReference(usage) - case DownloadUsage => buildDownloadUsageReference(usage) + case DownloadUsage => buildDownloadUsageReference(usage) } } - private def buildPrintUsageReference(usage: MediaUsage):List[UsageReference] = - usage.printUsageMetadata.map(metadata => { - val title = List( - new DateTime(metadata.issueDate).toString("YYYY-MM-dd"), - metadata.publicationName, - metadata.sectionName, - s"Page ${metadata.pageNumber}" - ).mkString(", ") + private def buildPrintUsageReference( + usage: MediaUsage + ): List[UsageReference] = + usage.printUsageMetadata + .map(metadata => { + val title = List( + new DateTime(metadata.issueDate).toString("YYYY-MM-dd"), + metadata.publicationName, + metadata.sectionName, + s"Page ${metadata.pageNumber}" + ).mkString(", ") - List(UsageReference(InDesignUsageReference, None, Some(title))) + List(UsageReference(InDesignUsageReference, None, Some(title))) - }).getOrElse(List[UsageReference]()) + }) + .getOrElse(List[UsageReference]()) - private def buildDigitalUsageReference(usage: MediaUsage): List[UsageReference] = { + private def buildDigitalUsageReference( + usage: MediaUsage + ): List[UsageReference] = { (usage.digitalUsageMetadata, usage.frontUsageMetadata) match { - case (Some(metadata), None) => List( - UsageReference(FrontendUsageReference, Some(metadata.webUrl), Some(metadata.webTitle)) - ) ++ metadata.composerUrl.map(url => UsageReference(ComposerUsageReference, Some(url))) - case (None, Some(metadata)) => List( - UsageReference(FrontUsageReference, None, name = Some(metadata.front)) - ) + case (Some(metadata), None) => + List( + UsageReference( + FrontendUsageReference, + Some(metadata.webUrl), + Some(metadata.webTitle) + ) + ) ++ metadata.composerUrl.map(url => + UsageReference(ComposerUsageReference, Some(url)) + ) + case (None, Some(metadata)) => + List( + UsageReference(FrontUsageReference, None, name = Some(metadata.front)) + ) case (_, _) => List[UsageReference]() } } - private def buildSyndicationUsageReference(usage: MediaUsage): List[UsageReference] = usage.syndicationUsageMetadata.map (metadata => { - List( - UsageReference( - SyndicationUsageReference, None, Some(metadata.partnerName) + private def buildSyndicationUsageReference( + usage: MediaUsage + ): List[UsageReference] = usage.syndicationUsageMetadata + .map(metadata => { + List( + UsageReference( + SyndicationUsageReference, + None, + Some(metadata.partnerName) + ) ) + }) + .getOrElse( + List[UsageReference]() ) - }).getOrElse( - List[UsageReference]() - ) - private def buildDownloadUsageReference(usage: MediaUsage): List[UsageReference] = usage.downloadUsageMetadata.map (metadata => { - List( - UsageReference( - DownloadUsageReference, None, Some(metadata.downloadedBy) + private def buildDownloadUsageReference( + usage: MediaUsage + ): List[UsageReference] = usage.downloadUsageMetadata + .map(metadata => { + List( + UsageReference( + DownloadUsageReference, + None, + Some(metadata.downloadedBy) + ) ) + }) + .getOrElse( + List[UsageReference]() ) - }).getOrElse( - List[UsageReference]() - ) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Asset.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Asset.scala index e52efdaa1f..03552dca05 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Asset.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Asset.scala @@ -5,22 +5,27 @@ import play.api.libs.json._ import play.api.libs.functional.syntax._ import com.gu.mediaservice.lib.aws.S3Object - // FIXME: size, mimeType and dimensions not optional (must backfill first) -case class Asset(file: URI, size: Option[Long], mimeType: Option[MimeType], dimensions: Option[Dimensions], secureUrl: Option[URL] = None) +case class Asset( + file: URI, + size: Option[Long], + mimeType: Option[MimeType], + dimensions: Option[Dimensions], + secureUrl: Option[URL] = None +) object Asset { def fromS3Object(s3Object: S3Object, dims: Option[Dimensions]): Asset = { - val userMetadata = s3Object.metadata.userMetadata + val userMetadata = s3Object.metadata.userMetadata val objectMetadata = s3Object.metadata.objectMetadata Asset( - file = s3Object.uri, - size = Some(s3Object.size), - mimeType = objectMetadata.contentType, + file = s3Object.uri, + size = Some(s3Object.size), + mimeType = objectMetadata.contentType, dimensions = dims, - secureUrl = None + secureUrl = None ) } @@ -29,16 +34,18 @@ object Asset { (__ \ "size").readNullable[Long] ~ (__ \ "mimeType").readNullable[MimeType] ~ (__ \ "dimensions").readNullable[Dimensions] ~ - (__ \ "secureUrl").readNullable[String].map(_.map(new URL(_))) - )(Asset.apply _) + (__ \ "secureUrl").readNullable[String].map(_.map(new URL(_))))( + Asset.apply _ + ) implicit val assetWrites: Writes[Asset] = ((__ \ "file").write[String].contramap((_: URI).toString) ~ (__ \ "size").writeNullable[Long] ~ (__ \ "mimeType").writeNullable[MimeType] ~ (__ \ "dimensions").writeNullable[Dimensions] ~ - (__ \ "secureUrl").writeNullable[String].contramap((_: Option[URL]).map(_.toString)) - )(unlift(Asset.unapply)) + (__ \ "secureUrl") + .writeNullable[String] + .contramap((_: Option[URL]).map(_.toString)))(unlift(Asset.unapply)) } @@ -47,7 +54,5 @@ object Dimensions { implicit val dimensionsReads: Reads[Dimensions] = Json.reads[Dimensions] implicit val dimensionsWrites: Writes[Dimensions] = ((__ \ "width").write[Int] ~ - (__ \ "height").write[Int] - )(unlift(Dimensions.unapply)) + (__ \ "height").write[Int])(unlift(Dimensions.unapply)) } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Collection.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Collection.scala index d54f4cf70d..58461176a9 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Collection.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Collection.scala @@ -8,7 +8,11 @@ import play.api.libs.functional.syntax._ import com.gu.mediaservice.lib.collections.CollectionsManager -case class Collection private (path: List[String], actionData: ActionData, description: String) { +case class Collection private ( + path: List[String], + actionData: ActionData, + description: String +) { // We lowercase on pathId so that we can search case-insensitively val pathId = CollectionsManager.pathToPathId(path) } @@ -17,10 +21,12 @@ object Collection { val reads: Reads[Collection] = Json.reads[Collection] val writes: Writes[Collection] = ( (__ \ "path").write[List[String]] ~ - (__ \ "pathId").write[String] ~ - (__ \ "description").write[String] ~ - (__ \ "actionData").write[ActionData] - ){ col: Collection => (col.path, col.pathId, col.description, col.actionData) } + (__ \ "pathId").write[String] ~ + (__ \ "description").write[String] ~ + (__ \ "actionData").write[ActionData] + ) { col: Collection => + (col.path, col.pathId, col.description, col.actionData) + } implicit val formats: Format[Collection] = Format(reads, writes) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Cost.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Cost.scala index 166a96275f..89aec4b645 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Cost.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Cost.scala @@ -3,17 +3,13 @@ package com.gu.mediaservice.model import play.api.libs.json._ sealed trait Cost -case object Free - extends Cost { override def toString = "free" } +case object Free extends Cost { override def toString = "free" } -case object Conditional - extends Cost { override def toString = "conditional" } +case object Conditional extends Cost { override def toString = "conditional" } -case object Pay - extends Cost { override def toString = "pay" } +case object Pay extends Cost { override def toString = "pay" } -case object Overquota - extends Cost { override def toString = "overquota" } +case object Overquota extends Cost { override def toString = "overquota" } object Cost { def fromString(string: String): Cost = @@ -21,6 +17,7 @@ object Cost { implicit val CostReads: Reads[Cost] = __.read[String].map(fromString) - implicit val CostWrites: Writes[Cost] = Writes[Cost](c => JsString(c.toString)) + implicit val CostWrites: Writes[Cost] = + Writes[Cost](c => JsString(c.toString)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Crop.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Crop.scala index 879ddd4c17..fe481aa8c7 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Crop.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Crop.scala @@ -8,38 +8,64 @@ import JodaReads._ //FIXME: Both id and file size here should not be an Option and are awaiting backfilling the correct data in ES -case class Crop(id: Option[String], author: Option[String], date: Option[DateTime], specification: CropSpec, master: Option[Asset], assets: List[Asset]) +case class Crop( + id: Option[String], + author: Option[String], + date: Option[DateTime], + specification: CropSpec, + master: Option[Asset], + assets: List[Asset] +) object Crop { import com.gu.mediaservice.lib.formatting._ def getCropId(b: Bounds) = List(b.x, b.y, b.width, b.height).mkString("_") - def createFromCropSource(by: Option[String], timeRequested: Option[DateTime], specification: CropSpec, master: Option[Asset] = None, cropSizings: List[Asset] = Nil): Crop = - Crop(Some(getCropId(specification.bounds)), by, timeRequested, specification, master, cropSizings) + def createFromCropSource( + by: Option[String], + timeRequested: Option[DateTime], + specification: CropSpec, + master: Option[Asset] = None, + cropSizings: List[Asset] = Nil + ): Crop = + Crop( + Some(getCropId(specification.bounds)), + by, + timeRequested, + specification, + master, + cropSizings + ) def createFromCrop(crop: Crop, master: Asset, assets: List[Asset]): Crop = - Crop(crop.id, crop.author, crop.date, crop.specification, Some(master), assets) + Crop( + crop.id, + crop.author, + crop.date, + crop.specification, + Some(master), + assets + ) implicit val cropReads: Reads[Crop] = ( (__ \ "id").readNullable[String] ~ - (__ \ "author").readNullable[String] ~ - (__ \ "date").readNullable[DateTime] ~ - (__ \ "specification").read[CropSpec] ~ - (__ \ "master").readNullable[Asset] ~ - (__ \ "assets").read[List[Asset]] + (__ \ "author").readNullable[String] ~ + (__ \ "date").readNullable[DateTime] ~ + (__ \ "specification").read[CropSpec] ~ + (__ \ "master").readNullable[Asset] ~ + (__ \ "assets").read[List[Asset]] )(Crop.apply _) implicit val cropWrites: Writes[Crop] = ( (__ \ "id").writeNullable[String] ~ - (__ \ "author").writeNullable[String] ~ - (__ \ "date").writeNullable[String].contramap(printOptDateTime) ~ - (__ \ "specification").write[CropSpec] ~ - (__ \ "master").writeNullable[Asset] ~ - (__ \ "assets").write[List[Asset]] + (__ \ "author").writeNullable[String] ~ + (__ \ "date").writeNullable[String].contramap(printOptDateTime) ~ + (__ \ "specification").write[CropSpec] ~ + (__ \ "master").writeNullable[Asset] ~ + (__ \ "assets").write[List[Asset]] )(unlift(Crop.unapply)) } - sealed trait ExportType { val name: String } case object CropExport extends ExportType { val name = "crop" } case object FullExport extends ExportType { val name = "full" } @@ -53,25 +79,31 @@ object ExportType { case "full" => FullExport } - implicit val exportTypeWrites: Writes[ExportType] = Writes[ExportType](t => JsString(t.name)) + implicit val exportTypeWrites: Writes[ExportType] = + Writes[ExportType](t => JsString(t.name)) implicit val exportTypeReads: Reads[ExportType] = __.read[String].map(valueOf) } - -case class CropSpec(uri: String, bounds: Bounds, aspectRatio: Option[String], `type`: ExportType = ExportType.default) +case class CropSpec( + uri: String, + bounds: Bounds, + aspectRatio: Option[String], + `type`: ExportType = ExportType.default +) object CropSpec { implicit val cropSpecWrites: Writes[CropSpec] = Json.writes[CropSpec] implicit val cropSpecReads: Reads[CropSpec] = ( (__ \ "uri").read[String] ~ - (__ \ "bounds").read[Bounds] ~ - (__ \ "aspectRatio").readNullable[String] ~ - (__ \ "type").readNullable[ExportType].map(_.getOrElse(ExportType.default)) + (__ \ "bounds").read[Bounds] ~ + (__ \ "aspectRatio").readNullable[String] ~ + (__ \ "type") + .readNullable[ExportType] + .map(_.getOrElse(ExportType.default)) )(CropSpec.apply _) } - case class Bounds(x: Int, y: Int, width: Int, height: Int) { def isPortrait: Boolean = width < height } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Edits.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Edits.scala index 43da8ddeea..77eb2ee63d 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Edits.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Edits.scala @@ -6,13 +6,12 @@ import com.gu.mediaservice.lib.argo.model.{Action, EmbeddedEntity} import play.api.libs.json._ import play.api.libs.functional.syntax._ - case class Edits( - archived: Boolean = false, - labels: List[String] = List(), - metadata: ImageMetadata, - usageRights: Option[UsageRights] = None, - photoshoot: Option[Photoshoot] = None + archived: Boolean = false, + labels: List[String] = List(), + metadata: ImageMetadata, + usageRights: Option[UsageRights] = None, + photoshoot: Option[Photoshoot] = None ) object Edits { @@ -20,24 +19,28 @@ object Edits { implicit val EditsReads: Reads[Edits] = ( (__ \ "archived").readNullable[Boolean].map(_ getOrElse false) ~ - (__ \ "labels").readNullable[List[String]].map(_ getOrElse Nil) ~ - (__ \ "metadata").readNullable[ImageMetadata].map(_ getOrElse emptyMetadata) ~ - (__ \ "usageRights").readNullable[UsageRights] ~ - (__ \ "photoshoot").readNullable[Photoshoot] + (__ \ "labels").readNullable[List[String]].map(_ getOrElse Nil) ~ + (__ \ "metadata") + .readNullable[ImageMetadata] + .map(_ getOrElse emptyMetadata) ~ + (__ \ "usageRights").readNullable[UsageRights] ~ + (__ \ "photoshoot").readNullable[Photoshoot] )(Edits.apply _) implicit val EditsWrites: Writes[Edits] = ( (__ \ "archived").write[Boolean] ~ - (__ \ "labels").write[List[String]] ~ - (__ \ "metadata").writeNullable[ImageMetadata].contramap(noneIfEmptyMetadata) ~ - (__ \ "usageRights").writeNullable[UsageRights] ~ - (__ \ "photoshoot").writeNullable[Photoshoot] + (__ \ "labels").write[List[String]] ~ + (__ \ "metadata") + .writeNullable[ImageMetadata] + .contramap(noneIfEmptyMetadata) ~ + (__ \ "usageRights").writeNullable[UsageRights] ~ + (__ \ "photoshoot").writeNullable[Photoshoot] )(unlift(Edits.unapply)) def getEmpty = Edits(metadata = emptyMetadata) def noneIfEmptyMetadata(m: ImageMetadata): Option[ImageMetadata] = - if(m == emptyMetadata) None else Some(m) + if (m == emptyMetadata) None else Some(m) } @@ -55,33 +58,64 @@ trait EditsResponse { // the types are in the arguments because of a whining scala compiler def editsEntity(id: String): Writes[Edits] = ( - (__ \ "archived").write[ArchivedEntity].contramap(archivedEntity(id, _: Boolean)) ~ - (__ \ "labels").write[SetEntity].contramap(setEntity(id, "labels", _: List[String])) ~ - (__ \ "metadata").write[MetadataEntity].contramap(metadataEntity(id, _: ImageMetadata)) ~ - (__ \ "usageRights").write[UsageRightsEntity].contramap(usageRightsEntity(id, _: Option[UsageRights])) ~ - (__ \ "photoshoot").write[PhotoshootEntity].contramap(photoshootEntity(id, _: Option[Photoshoot])) - )(unlift(Edits.unapply)) - - def photoshootEntity(id: String, photoshoot: Option[Photoshoot]): PhotoshootEntity = + (__ \ "archived") + .write[ArchivedEntity] + .contramap(archivedEntity(id, _: Boolean)) ~ + (__ \ "labels") + .write[SetEntity] + .contramap(setEntity(id, "labels", _: List[String])) ~ + (__ \ "metadata") + .write[MetadataEntity] + .contramap(metadataEntity(id, _: ImageMetadata)) ~ + (__ \ "usageRights") + .write[UsageRightsEntity] + .contramap(usageRightsEntity(id, _: Option[UsageRights])) ~ + (__ \ "photoshoot") + .write[PhotoshootEntity] + .contramap(photoshootEntity(id, _: Option[Photoshoot])) + )(unlift(Edits.unapply)) + + def photoshootEntity( + id: String, + photoshoot: Option[Photoshoot] + ): PhotoshootEntity = EmbeddedEntity(entityUri(id, "/photoshoot"), photoshoot) def archivedEntity(id: String, a: Boolean): ArchivedEntity = EmbeddedEntity(entityUri(id, "/archived"), Some(a)) def metadataEntity(id: String, m: ImageMetadata): MetadataEntity = - EmbeddedEntity(entityUri(id, "/metadata"), Some(m), actions = List( - Action("set-from-usage-rights", entityUri(id, "/metadata/set-from-usage-rights"), "POST") - )) + EmbeddedEntity( + entityUri(id, "/metadata"), + Some(m), + actions = List( + Action( + "set-from-usage-rights", + entityUri(id, "/metadata/set-from-usage-rights"), + "POST" + ) + ) + ) def usageRightsEntity(id: String, u: Option[UsageRights]): UsageRightsEntity = u.map(i => EmbeddedEntity(entityUri(id, "/usage-rights"), Some(i))) - .getOrElse(EmbeddedEntity(entityUri(id, "/usage-rights"), None)) + .getOrElse(EmbeddedEntity(entityUri(id, "/usage-rights"), None)) def setEntity(id: String, setName: String, labels: List[String]): SetEntity = - EmbeddedEntity(entityUri(id, s"/$setName"), Some(labels.map(setUnitEntity(id, setName, _)))) - - def setUnitEntity(id: String, setName: String, name: String): EmbeddedEntity[String] = - EmbeddedEntity(entityUri(id, s"/$setName/${URLEncoder.encode(name, "UTF-8")}"), Some(name)) + EmbeddedEntity( + entityUri(id, s"/$setName"), + Some(labels.map(setUnitEntity(id, setName, _))) + ) + + def setUnitEntity( + id: String, + setName: String, + name: String + ): EmbeddedEntity[String] = + EmbeddedEntity( + entityUri(id, s"/$setName/${URLEncoder.encode(name, "UTF-8")}"), + Some(name) + ) private def entityUri(id: String, endpoint: String = ""): URI = URI.create(s"$metadataBaseUri/metadata/$id$endpoint") diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Export.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Export.scala index 6e9398d379..d388f133b7 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Export.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Export.scala @@ -5,35 +5,34 @@ import play.api.libs.functional.syntax._ import org.joda.time.DateTime - case class Export( - id: Option[String], - author: Option[String], - date: Option[DateTime], - specification: CropSpec, - master: Option[Asset], - assets: List[Asset] + id: Option[String], + author: Option[String], + date: Option[DateTime], + specification: CropSpec, + master: Option[Asset], + assets: List[Asset] ) object Export { import com.gu.mediaservice.lib.formatting._ def fromCrop(crop: Crop): Export = Export( - crop.id, - crop.author, - crop.date, - crop.specification, - crop.master, - crop.assets + crop.id, + crop.author, + crop.date, + crop.specification, + crop.master, + crop.assets ) implicit val exportWrites: Writes[Export] = ( (__ \ "id").writeNullable[String] ~ - (__ \ "author").writeNullable[String] ~ - (__ \ "date").writeNullable[String].contramap(printOptDateTime) ~ - (__ \ "specification").write[CropSpec] ~ - (__ \ "master").writeNullable[Asset] ~ - (__ \ "assets").write[List[Asset]] + (__ \ "author").writeNullable[String] ~ + (__ \ "date").writeNullable[String].contramap(printOptDateTime) ~ + (__ \ "specification").write[CropSpec] ~ + (__ \ "master").writeNullable[Asset] ~ + (__ \ "assets").write[List[Asset]] )(unlift(Export.unapply)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadata.scala index cfe6b38cc5..eca1c18064 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadata.scala @@ -8,17 +8,17 @@ import play.api.libs.functional.syntax._ import scala.collection.JavaConverters._ case class FileMetadata( - iptc: Map[String, String] = Map(), - exif: Map[String, String] = Map(), - exifSub: Map[String, String] = Map(), - xmp: Map[String, JsValue] = Map(), - icc: Map[String, String] = Map(), - getty: Map[String, String] = Map(), - colourModel: Option[String] = None, - colourModelInformation: Map[String, String] = Map() + iptc: Map[String, String] = Map(), + exif: Map[String, String] = Map(), + exifSub: Map[String, String] = Map(), + xmp: Map[String, JsValue] = Map(), + icc: Map[String, String] = Map(), + getty: Map[String, String] = Map(), + colourModel: Option[String] = None, + colourModelInformation: Map[String, String] = Map() ) { def toLogMarker: LogMarker = { - val fieldCountMarkers = Map ( + val fieldCountMarkers = Map( "iptcFieldCount" -> iptc.size, "exifFieldCount" -> exif.size, "exifSubFieldCount" -> exifSub.size, @@ -39,35 +39,43 @@ object FileMetadata { // TODO: reindex all images to make the getty map always present // for data consistency, so we can fallback to use the default Reads implicit val ImageMetadataReads: Reads[FileMetadata] = ( - (__ \ "iptc").read[Map[String,String]] ~ - (__ \ "exif").read[Map[String,String]] ~ - (__ \ "exifSub").read[Map[String,String]] ~ - (__ \ "xmp").read[Map[String,JsValue]] ~ - (__ \ "icc").readNullable[Map[String,String]].map(_ getOrElse Map()).map(removeLongValues) ~ - (__ \ "getty").readNullable[Map[String,String]].map(_ getOrElse Map()) ~ - (__ \ "colourModel").readNullable[String] ~ - (__ \ "colourModelInformation").readNullable[Map[String,String]].map(_ getOrElse Map()) - + (__ \ "iptc").read[Map[String, String]] ~ + (__ \ "exif").read[Map[String, String]] ~ + (__ \ "exifSub").read[Map[String, String]] ~ + (__ \ "xmp").read[Map[String, JsValue]] ~ + (__ \ "icc") + .readNullable[Map[String, String]] + .map(_ getOrElse Map()) + .map(removeLongValues) ~ + (__ \ "getty").readNullable[Map[String, String]].map(_ getOrElse Map()) ~ + (__ \ "colourModel").readNullable[String] ~ + (__ \ "colourModelInformation") + .readNullable[Map[String, String]] + .map(_ getOrElse Map()) )(FileMetadata.apply _) private val maximumValueLengthBytes = 5000 - private def removeLongValues = { m:Map[String, String] => { - val (short, long) = m.partition(_._2.length <= maximumValueLengthBytes) - if (long.size>0) { - short + ("removedFields" -> long.map(_._1).mkString(", ")) - } else { - m + private def removeLongValues = { m: Map[String, String] => + { + val (short, long) = m.partition(_._2.length <= maximumValueLengthBytes) + if (long.size > 0) { + short + ("removedFields" -> long.map(_._1).mkString(", ")) + } else { + m + } } - } } + } implicit val FileMetadataWrites: Writes[FileMetadata] = ( - (JsPath \ "iptc").write[Map[String,String]] and - (JsPath \ "exif").write[Map[String,String]] and - (JsPath \ "exifSub").write[Map[String,String]] and - (JsPath \ "xmp").write[Map[String,JsValue]] and - (JsPath \ "icc").write[Map[String,String]].contramap[Map[String, String]](removeLongValues) and - (JsPath \ "getty").write[Map[String,String]] and + (JsPath \ "iptc").write[Map[String, String]] and + (JsPath \ "exif").write[Map[String, String]] and + (JsPath \ "exifSub").write[Map[String, String]] and + (JsPath \ "xmp").write[Map[String, JsValue]] and + (JsPath \ "icc") + .write[Map[String, String]] + .contramap[Map[String, String]](removeLongValues) and + (JsPath \ "getty").write[Map[String, String]] and (JsPath \ "colourModel").writeNullable[String] and - (JsPath \ "colourModelInformation").write[Map[String,String]] + (JsPath \ "colourModelInformation").write[Map[String, String]] )(unlift(FileMetadata.unapply)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadataAggregator.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadataAggregator.scala index 4d928c848b..2580efa01e 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadataAggregator.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/FileMetadataAggregator.scala @@ -14,18 +14,25 @@ object FileMetadataAggregator { private case class MetadataEntry(index: Int, jsValue: JsValue) - private def entryToAggregatedKeyAndJsValue(k: String, v: MetadataEntry): (String, MetadataEntry) = { + private def entryToAggregatedKeyAndJsValue( + k: String, + v: MetadataEntry + ): (String, MetadataEntry) = { def toCustomObjectKeyAndValue(k: String, v: MetadataEntry) = { val slashIdx = k.lastIndexOf("/") val objectName = k.substring(0, slashIdx) val objectFieldName = k.substring(slashIdx + 1) - val stringifiedObj = Json.stringify(JsObject(Seq((objectFieldName, v.jsValue)))) + val stringifiedObj = Json + .stringify(JsObject(Seq((objectFieldName, v.jsValue)))) .replace("\"", "'") val newJsVal = JsArray(Seq(JsString(stringifiedObj))) - (objectName, v.copy( - jsValue = newJsVal - )) + ( + objectName, + v.copy( + jsValue = newJsVal + ) + ) } def toArrayKeyAndValue(k: String, v: MetadataEntry) = { @@ -33,12 +40,17 @@ object FileMetadataAggregator { (normaliseArrayKey(k), v.copy(jsValue = arrValue)) } - if (isArrayKey(k)) toArrayKeyAndValue(k, v) else if (isCustomObjectKey(k)) toCustomObjectKeyAndValue(k, v) else (k, v) + if (isArrayKey(k)) toArrayKeyAndValue(k, v) + else if (isCustomObjectKey(k)) toCustomObjectKeyAndValue(k, v) + else (k, v) } - private def getIdxBetweenArrayBrackets(k: String): Int = k.substring(k.lastIndexOf("[") + 1, k.lastIndexOf("]")).trim.toInt + private def getIdxBetweenArrayBrackets(k: String): Int = + k.substring(k.lastIndexOf("[") + 1, k.lastIndexOf("]")).trim.toInt - private def aggregateCurrentMetadataLevel(nodes: Map[String, MetadataEntry]): Map[String, MetadataEntry] = { + private def aggregateCurrentMetadataLevel( + nodes: Map[String, MetadataEntry] + ): Map[String, MetadataEntry] = { def toEntriesWithUpdatedIndexes(nodes: Map[String, MetadataEntry]) = { nodes.map { case (k, v) => @@ -48,22 +60,24 @@ object FileMetadataAggregator { } } - val entriesWithIndexes: Map[String, MetadataEntry] = toEntriesWithUpdatedIndexes(nodes) - - val mutableMap = scala.collection.mutable.Map[String, Either[MetadataEntry, List[MetadataEntry]]]() - - entriesWithIndexes.foreach { - case (k, v) => - val (aggregatedKey, newMetadataEntry) = entryToAggregatedKeyAndJsValue(k, v) - if (mutableMap.contains(aggregatedKey)) { - val updated: List[MetadataEntry] = mutableMap(aggregatedKey) match { - case scala.util.Left(value) => List(value, newMetadataEntry) - case scala.util.Right(value) => newMetadataEntry +: value - } - mutableMap(aggregatedKey) = scala.util.Right(updated) - } else { - mutableMap.put(aggregatedKey, scala.util.Left(newMetadataEntry)) + val entriesWithIndexes: Map[String, MetadataEntry] = + toEntriesWithUpdatedIndexes(nodes) + + val mutableMap = scala.collection.mutable + .Map[String, Either[MetadataEntry, List[MetadataEntry]]]() + + entriesWithIndexes.foreach { case (k, v) => + val (aggregatedKey, newMetadataEntry) = + entryToAggregatedKeyAndJsValue(k, v) + if (mutableMap.contains(aggregatedKey)) { + val updated: List[MetadataEntry] = mutableMap(aggregatedKey) match { + case scala.util.Left(value) => List(value, newMetadataEntry) + case scala.util.Right(value) => newMetadataEntry +: value } + mutableMap(aggregatedKey) = scala.util.Right(updated) + } else { + mutableMap.put(aggregatedKey, scala.util.Left(newMetadataEntry)) + } } val mapWithSortedValuesAtCurrentLevel = mutableMap.mapValues { @@ -71,12 +85,17 @@ object FileMetadataAggregator { case scala.util.Right(value) => { val sortedList = value.sortBy(_.index) - val (jsArrays, jsStrings) = sortedList.map(_.jsValue).partition(_.isInstanceOf[JsArray]) + val (jsArrays, jsStrings) = + sortedList.map(_.jsValue).partition(_.isInstanceOf[JsArray]) - val aggJsArrays: JsArray = jsArrays.map(_.as[JsArray]).foldLeft(JsArray.empty)((acc, arrayItem) => acc ++ arrayItem) - val aggJsStrings: JsArray = jsStrings.map(_.as[JsString]).foldLeft(JsArray.empty)((acc, item) => acc.append(item)) + val aggJsArrays: JsArray = jsArrays + .map(_.as[JsArray]) + .foldLeft(JsArray.empty)((acc, arrayItem) => acc ++ arrayItem) + val aggJsStrings: JsArray = jsStrings + .map(_.as[JsString]) + .foldLeft(JsArray.empty)((acc, item) => acc.append(item)) - val sorted: JsArray = aggJsArrays ++ aggJsStrings + val sorted: JsArray = aggJsArrays ++ aggJsStrings MetadataEntry(sortedList.head.index, sorted) } } @@ -87,8 +106,8 @@ object FileMetadataAggregator { if (isArrayKey(k) || isCustomObjectArrayKey(k)) { getIdxBetweenArrayBrackets(k) } else { - /** - * eventually any array key will become a simple value key + + /** eventually any array key will become a simple value key * that is why we have - 1 here as we want to prioritise it every iteration * such that simple values will be prioritised over custom nested objects * for example we want @@ -103,7 +122,9 @@ object FileMetadataAggregator { } } - def aggregateMetadataMap(flatProperties: Map[String, String]): Map[String, JsValue] = { + def aggregateMetadataMap( + flatProperties: Map[String, String] + ): Map[String, JsValue] = { def toInitialEntriesWithIndexes(nodes: Map[String, JsValue]) = { val previousIndex = Int.MaxValue @@ -113,18 +134,24 @@ object FileMetadataAggregator { } } - val initialMetadataStructure = toInitialEntriesWithIndexes(flatProperties.mapValues(JsString)) + val initialMetadataStructure = toInitialEntriesWithIndexes( + flatProperties.mapValues(JsString) + ) var aggMetadata = aggregateCurrentMetadataLevel(initialMetadataStructure) def anyKeyIsArrayKey(keys: Set[String]) = keys.exists(isArrayKey) - def anyKeyIsDynamicObjectKey(keys: Set[String]) = keys.exists(isCustomObjectKey) + def anyKeyIsDynamicObjectKey(keys: Set[String]) = + keys.exists(isCustomObjectKey) - while (anyKeyIsArrayKey(aggMetadata.keySet) || anyKeyIsDynamicObjectKey(aggMetadata.keySet)) aggMetadata = aggregateCurrentMetadataLevel(aggMetadata) + while ( + anyKeyIsArrayKey(aggMetadata.keySet) || anyKeyIsDynamicObjectKey( + aggMetadata.keySet + ) + ) aggMetadata = aggregateCurrentMetadataLevel(aggMetadata) aggMetadata.mapValues(_.jsValue) } } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Image.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Image.scala index d923b2f13e..ecae7b9bfe 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Image.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Image.scala @@ -1,35 +1,39 @@ package com.gu.mediaservice.model import com.gu.mediaservice.lib.logging._ -import com.gu.mediaservice.model.leases.{AllowSyndicationLease, DenySyndicationLease, LeasesByMedia} +import com.gu.mediaservice.model.leases.{ + AllowSyndicationLease, + DenySyndicationLease, + LeasesByMedia +} import com.gu.mediaservice.model.usage.{SyndicationUsage, Usage} import org.joda.time.DateTime import play.api.libs.functional.syntax._ import play.api.libs.json._ - case class Image( - id: String, - uploadTime: DateTime, - uploadedBy: String, - lastModified: Option[DateTime], - identifiers: Map[String, String], - uploadInfo: UploadInfo, - source: Asset, - thumbnail: Option[Asset], - optimisedPng: Option[Asset], - fileMetadata: FileMetadata, - userMetadata: Option[Edits], - metadata: ImageMetadata, - originalMetadata: ImageMetadata, - usageRights: UsageRights, - originalUsageRights: UsageRights, - exports: List[Crop] = Nil, - usages: List[Usage] = Nil, - leases: LeasesByMedia = LeasesByMedia.empty, - collections: List[Collection] = Nil, - syndicationRights: Option[SyndicationRights] = None, - userMetadataLastModified: Option[DateTime] = None) extends LogMarker { + id: String, + uploadTime: DateTime, + uploadedBy: String, + lastModified: Option[DateTime], + identifiers: Map[String, String], + uploadInfo: UploadInfo, + source: Asset, + thumbnail: Option[Asset], + optimisedPng: Option[Asset], + fileMetadata: FileMetadata, + userMetadata: Option[Edits], + metadata: ImageMetadata, + originalMetadata: ImageMetadata, + usageRights: UsageRights, + originalUsageRights: UsageRights, + exports: List[Crop] = Nil, + usages: List[Usage] = Nil, + leases: LeasesByMedia = LeasesByMedia.empty, + collections: List[Collection] = Nil, + syndicationRights: Option[SyndicationRights] = None, + userMetadataLastModified: Option[DateTime] = None +) extends LogMarker { def hasExports = exports.nonEmpty @@ -39,7 +43,8 @@ case class Image( def rcsPublishDate: Option[DateTime] = syndicationRights.flatMap(_.published) - def hasInferredSyndicationRightsOrNoRights: Boolean = syndicationRights.forall(_.isInferred) + def hasInferredSyndicationRightsOrNoRights: Boolean = + syndicationRights.forall(_.isInferred) def hasNonInferredRights: Boolean = !hasInferredSyndicationRightsOrNoRights @@ -54,13 +59,15 @@ case class Image( if (hasSyndicationUsage) { SentForSyndication } else { - val allowSyndicationLease = leases.leases.find(_.access == AllowSyndicationLease) - val denySyndicationLease = leases.leases.find(_.access == DenySyndicationLease) + val allowSyndicationLease = + leases.leases.find(_.access == AllowSyndicationLease) + val denySyndicationLease = + leases.leases.find(_.access == DenySyndicationLease) (allowSyndicationLease, denySyndicationLease) match { case (Some(_), None) => QueuedForSyndication case (None, Some(_)) => BlockedForSyndication - case (_, _) => AwaitingReviewForSyndication + case (_, _) => AwaitingReviewForSyndication } } } @@ -83,24 +90,38 @@ object Image { (__ \ "uploadTime").read[String].map(unsafeParseDateTime) ~ (__ \ "uploadedBy").read[String] ~ (__ \ "lastModified").readNullable[String].map(parseOptDateTime) ~ - (__ \ "identifiers").readNullable[Map[String, String]].map(_ getOrElse Map()) ~ - (__ \ "uploadInfo").readNullable[UploadInfo].map(_ getOrElse UploadInfo()) ~ + (__ \ "identifiers") + .readNullable[Map[String, String]] + .map(_ getOrElse Map()) ~ + (__ \ "uploadInfo") + .readNullable[UploadInfo] + .map(_ getOrElse UploadInfo()) ~ (__ \ "source").read[Asset] ~ (__ \ "thumbnail").readNullable[Asset] ~ (__ \ "optimisedPng").readNullable[Asset] ~ - (__ \ "fileMetadata").readNullable[FileMetadata].map(_ getOrElse FileMetadata()) ~ + (__ \ "fileMetadata") + .readNullable[FileMetadata] + .map(_ getOrElse FileMetadata()) ~ (__ \ "userMetadata").readNullable[Edits] ~ (__ \ "metadata").read[ImageMetadata] ~ - (__ \ "originalMetadata").readNullable[ImageMetadata].map(_ getOrElse ImageMetadata()) ~ + (__ \ "originalMetadata") + .readNullable[ImageMetadata] + .map(_ getOrElse ImageMetadata()) ~ (__ \ "usageRights").readNullable[UsageRights].map(_ getOrElse NoRights) ~ - (__ \ "originalUsageRights").readNullable[UsageRights].map(_ getOrElse NoRights) ~ + (__ \ "originalUsageRights") + .readNullable[UsageRights] + .map(_ getOrElse NoRights) ~ (__ \ "exports").readNullable[List[Crop]].map(_ getOrElse List()) ~ (__ \ "usages").readNullable[List[Usage]].map(_ getOrElse List()) ~ - (__ \ "leases").readNullable[LeasesByMedia].map(_ getOrElse LeasesByMedia.empty) ~ + (__ \ "leases") + .readNullable[LeasesByMedia] + .map(_ getOrElse LeasesByMedia.empty) ~ (__ \ "collections").readNullable[List[Collection]].map(_ getOrElse Nil) ~ (__ \ "syndicationRights").readNullable[SyndicationRights] ~ - (__ \ "userMetadataLastModified").readNullable[String].map(parseOptDateTime) - )(Image.apply _) + (__ \ "userMetadataLastModified") + .readNullable[String] + .map(parseOptDateTime) + )(Image.apply _) implicit val ImageWrites: Writes[Image] = ( (__ \ "id").write[String] ~ @@ -123,8 +144,9 @@ object Image { (__ \ "leases").write[LeasesByMedia] ~ (__ \ "collections").write[List[Collection]] ~ (__ \ "syndicationRights").writeNullable[SyndicationRights] ~ - (__ \ "userMetadataLastModified").writeNullable[String].contramap(printOptDateTime) - )(unlift(Image.unapply)) + (__ \ "userMetadataLastModified") + .writeNullable[String] + .contramap(printOptDateTime) + )(unlift(Image.unapply)) } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/ImageMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/ImageMetadata.scala index 41adb8c407..e3071d3505 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/ImageMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/ImageMetadata.scala @@ -8,24 +8,24 @@ import play.api.libs.json._ /* following are standard metadata fields that exist in multiple schemas, most canonical being https://www.iptc.org/std/photometadata/specification/IPTC-PhotoMetadata */ case class ImageMetadata( - dateTaken: Option[DateTime] = None, - description: Option[String] = None, - credit: Option[String] = None, - creditUri: Option[String] = None, - byline: Option[String] = None, - bylineTitle: Option[String] = None, - title: Option[String] = None, - copyright: Option[String] = None, - suppliersReference: Option[String] = None, - source: Option[String] = None, - specialInstructions: Option[String] = None, - keywords: List[String] = Nil, - subLocation: Option[String] = None, - city: Option[String] = None, - state: Option[String] = None, - country: Option[String] = None, - subjects: List[String] = Nil, - peopleInImage: Set[String] = Set(), + dateTaken: Option[DateTime] = None, + description: Option[String] = None, + credit: Option[String] = None, + creditUri: Option[String] = None, + byline: Option[String] = None, + bylineTitle: Option[String] = None, + title: Option[String] = None, + copyright: Option[String] = None, + suppliersReference: Option[String] = None, + source: Option[String] = None, + specialInstructions: Option[String] = None, + keywords: List[String] = Nil, + subLocation: Option[String] = None, + city: Option[String] = None, + state: Option[String] = None, + country: Option[String] = None, + subjects: List[String] = Nil, + peopleInImage: Set[String] = Set() ) object ImageMetadata { @@ -50,7 +50,7 @@ object ImageMetadata { (__ \ "country").readNullable[String] ~ (__ \ "subjects").readNullable[List[String]].map(_ getOrElse Nil) ~ (__ \ "peopleInImage").readNullable[Set[String]].map(_ getOrElse Set()) - )(ImageMetadata.apply _) + )(ImageMetadata.apply _) implicit val IptcMetadataWrites: Writes[ImageMetadata] = ( (__ \ "dateTaken").writeNullable[String].contramap(printOptDateTime) ~ @@ -64,13 +64,19 @@ object ImageMetadata { (__ \ "suppliersReference").writeNullable[String] ~ (__ \ "source").writeNullable[String] ~ (__ \ "specialInstructions").writeNullable[String] ~ - (__ \ "keywords").writeNullable[List[String]].contramap((l: List[String]) => if (l.isEmpty) None else Some(l)) ~ + (__ \ "keywords") + .writeNullable[List[String]] + .contramap((l: List[String]) => if (l.isEmpty) None else Some(l)) ~ (__ \ "subLocation").writeNullable[String] ~ (__ \ "city").writeNullable[String] ~ (__ \ "state").writeNullable[String] ~ (__ \ "country").writeNullable[String] ~ - (__ \ "subjects").writeNullable[List[String]].contramap((l: List[String]) => if (l.isEmpty) None else Some(l)) ~ - (__ \ "peopleInImage").writeNullable[Set[String]].contramap((l: Set[String]) => if (l.isEmpty) None else Some(l)) - )(unlift(ImageMetadata.unapply)) + (__ \ "subjects") + .writeNullable[List[String]] + .contramap((l: List[String]) => if (l.isEmpty) None else Some(l)) ~ + (__ \ "peopleInImage") + .writeNullable[Set[String]] + .contramap((l: Set[String]) => if (l.isEmpty) None else Some(l)) + )(unlift(ImageMetadata.unapply)) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/MimeType.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/MimeType.scala index 13e527625f..3542b9592f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/MimeType.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/MimeType.scala @@ -8,7 +8,7 @@ class UnsupportedMimeTypeException(val mimeType: String) extends Exception sealed trait MimeType { def name: String = this match { case Jpeg => "image/jpeg" - case Png => "image/png" + case Png => "image/png" case Tiff => "image/tiff" } @@ -20,7 +20,7 @@ sealed trait MimeType { object MimeType extends GridLogging { def apply(value: String): MimeType = value.toLowerCase match { case "image/jpeg" => Jpeg - case "image/png" => Png + case "image/png" => Png case "image/tiff" => Tiff // Support crops created in the early years of Grid (~2016) which state mime type w/out an 'image/' prefix @@ -42,7 +42,8 @@ object MimeType extends GridLogging { implicit val reads: Reads[MimeType] = JsPath.read[String].map(MimeType(_)) - implicit val writer: Writes[MimeType] = (mimeType: MimeType) => JsString(mimeType.toString) + implicit val writer: Writes[MimeType] = (mimeType: MimeType) => + JsString(mimeType.toString) } object Jpeg extends MimeType { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/Photoshoot.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/Photoshoot.scala index abf52f23fe..fc94705922 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/Photoshoot.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/Photoshoot.scala @@ -3,7 +3,7 @@ package com.gu.mediaservice.model import play.api.libs.json._ case class Photoshoot( - title: String + title: String ) object Photoshoot { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/SourceImage.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/SourceImage.scala index 7c85427ef0..780022c4f2 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/SourceImage.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/SourceImage.scala @@ -3,8 +3,13 @@ package com.gu.mediaservice.model import play.api.libs.json._ import play.api.libs.functional.syntax._ - -case class SourceImage(id: String, source: Asset, valid: Boolean, metadata: ImageMetadata, fileMetadata: FileMetadata) +case class SourceImage( + id: String, + source: Asset, + valid: Boolean, + metadata: ImageMetadata, + fileMetadata: FileMetadata +) object SourceImage { implicit val sourceImageReads: Reads[SourceImage] = @@ -12,6 +17,6 @@ object SourceImage { (__ \ "data" \ "source").read[Asset] ~ (__ \ "data" \ "valid").read[Boolean] ~ (__ \ "data" \ "metadata").read[ImageMetadata] ~ - (__ \ "data" \ "fileMetadata" \ "data").read[FileMetadata] - )(SourceImage.apply _) + (__ \ "data" \ "fileMetadata" \ "data") + .read[FileMetadata])(SourceImage.apply _) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationRights.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationRights.scala index 78d25127b5..b5e59eeff8 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationRights.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationRights.scala @@ -8,13 +8,14 @@ import play.api.libs.json._ import play.api.libs.functional.syntax._ case class SyndicationRights( - published: Option[DateTime], - suppliers: Seq[Supplier], - rights: Seq[Right], - isInferred: Boolean = false + published: Option[DateTime], + suppliers: Seq[Supplier], + rights: Seq[Right], + isInferred: Boolean = false ) { def isRightsAcquired: Boolean = rights.flatMap(_.acquired).contains(true) - def isAvailableForSyndication: Boolean = isRightsAcquired && published.exists(_.isBeforeNow) + def isAvailableForSyndication: Boolean = + isRightsAcquired && published.exists(_.isBeforeNow) } object SyndicationRights { implicit val dateWrites = jodaDateWrites("yyyy-MM-dd'T'HH:mm:ss.SSSZZ") @@ -23,55 +24,60 @@ object SyndicationRights { val reads: Reads[SyndicationRights] = ( (__ \ "published").readNullable[String].map(parseOptDateTime) ~ - (__ \ "suppliers").read[Seq[Supplier]] ~ - (__ \ "rights").read[Seq[Right]] ~ - (__ \ "isInferred").readNullable[Boolean].map(_.getOrElse(false)) + (__ \ "suppliers").read[Seq[Supplier]] ~ + (__ \ "rights").read[Seq[Right]] ~ + (__ \ "isInferred").readNullable[Boolean].map(_.getOrElse(false)) )(SyndicationRights.apply _) val writes: Writes[SyndicationRights] = ( (__ \ "published").writeNullable[DateTime] ~ - (__ \ "suppliers").write[Seq[Supplier]] ~ - (__ \ "rights").write[Seq[Right]] ~ - (__ \ "isInferred").write[Boolean] - ){ sr: SyndicationRights => (sr.published, sr.suppliers, sr.rights, sr.isInferred) } + (__ \ "suppliers").write[Seq[Supplier]] ~ + (__ \ "rights").write[Seq[Right]] ~ + (__ \ "isInferred").write[Boolean] + ) { sr: SyndicationRights => + (sr.published, sr.suppliers, sr.rights, sr.isInferred) + } implicit val formats: Format[SyndicationRights] = Format(reads, writes) } case class Supplier( - supplierName: Option[String], - supplierId: Option[String], - prAgreement: Option[Boolean]) + supplierName: Option[String], + supplierId: Option[String], + prAgreement: Option[Boolean] +) object Supplier { val reads: Reads[Supplier] = Json.reads[Supplier] val writes: Writes[Supplier] = ( (__ \ "supplierName").writeNullable[String] ~ - (__ \ "supplierId").writeNullable[String] ~ - (__ \ "prAgreement").writeNullable[Boolean] - ){ s: Supplier => (s.supplierName, s.supplierId, s.prAgreement) } + (__ \ "supplierId").writeNullable[String] ~ + (__ \ "prAgreement").writeNullable[Boolean] + ) { s: Supplier => (s.supplierName, s.supplierId, s.prAgreement) } implicit val formats: Format[Supplier] = Format(reads, writes) } case class Right( - rightCode: String, - acquired: Option[Boolean], - properties: Seq[Property]) + rightCode: String, + acquired: Option[Boolean], + properties: Seq[Property] +) object Right { val reads: Reads[Right] = Json.reads[Right] val writes: Writes[Right] = ( (__ \ "rightCode").write[String] ~ - (__ \ "acquired").writeNullable[Boolean] ~ - (__ \ "properties").write[Seq[Property]] - ){ r: Right => (r.rightCode, r.acquired, r.properties) } + (__ \ "acquired").writeNullable[Boolean] ~ + (__ \ "properties").write[Seq[Property]] + ) { r: Right => (r.rightCode, r.acquired, r.properties) } implicit val formats: Format[Right] = Format(reads, writes) } case class Property( - propertyCode: String, - expiresOn: Option[DateTime], - value: Option[String]) + propertyCode: String, + expiresOn: Option[DateTime], + value: Option[String] +) object Property { implicit val dateWrites = jodaDateWrites("yyyy-MM-dd'T'HH:mm:ss.SSSZZ") implicit val dateReads = jodaDateReads("yyyy-MM-dd'T'HH:mm:ss.SSSZZ") @@ -79,9 +85,9 @@ object Property { val reads: Reads[Property] = Json.reads[Property] val writes: Writes[Property] = ( (__ \ "propertyCode").write[String] ~ - (__ \ "expiresOn").writeNullable[DateTime] ~ - (__ \ "value").writeNullable[String] - ){ r: Property => (r.propertyCode, r.expiresOn, r.value) } + (__ \ "expiresOn").writeNullable[DateTime] ~ + (__ \ "value").writeNullable[String] + ) { r: Property => (r.propertyCode, r.expiresOn, r.value) } implicit val formats: Format[Property] = Format(reads, writes) } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationStatus.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationStatus.scala index 0a48d641fa..5229da60ac 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationStatus.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/SyndicationStatus.scala @@ -4,24 +4,25 @@ import play.api.libs.json._ sealed trait SyndicationStatus { override def toString: String = this match { - case SentForSyndication => "sent" - case QueuedForSyndication => "queued" - case BlockedForSyndication => "blocked" + case SentForSyndication => "sent" + case QueuedForSyndication => "queued" + case BlockedForSyndication => "blocked" case AwaitingReviewForSyndication => "review" - case UnsuitableForSyndication => "unsuitable" + case UnsuitableForSyndication => "unsuitable" } } object SyndicationStatus { def apply(status: String): SyndicationStatus = status.toLowerCase match { - case "sent" => SentForSyndication - case "queued" => QueuedForSyndication - case "blocked" => BlockedForSyndication - case "review" => AwaitingReviewForSyndication + case "sent" => SentForSyndication + case "queued" => QueuedForSyndication + case "blocked" => BlockedForSyndication + case "review" => AwaitingReviewForSyndication case "unsuitable" => UnsuitableForSyndication } - implicit val reads: Reads[SyndicationStatus] = JsPath.read[String].map(SyndicationStatus(_)) + implicit val reads: Reads[SyndicationStatus] = + JsPath.read[String].map(SyndicationStatus(_)) implicit val writer = new Writes[SyndicationStatus] { def writes(status: SyndicationStatus) = JsString(status.toString) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/UsageRights.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/UsageRights.scala index 3154ccad9c..ff6fe4815a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/UsageRights.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/UsageRights.scala @@ -2,7 +2,6 @@ package com.gu.mediaservice.model import play.api.libs.json._ - sealed trait UsageRights { // These two properties are used to infer cost // TODO: Remove these as they have nothing to do with the model really @@ -24,23 +23,43 @@ sealed trait UsageRightsSpec { object UsageRights { val all = List( - NoRights, Handout, PrImage, Screengrab, SocialMedia, - Agency, CommissionedAgency, Chargeable, Bylines, - StaffPhotographer, ContractPhotographer, CommissionedPhotographer, - CreativeCommons, GuardianWitness, Pool, CrownCopyright, Obituary, - ContractIllustrator, CommissionedIllustrator, StaffIllustrator, - Composite, PublicDomain + NoRights, + Handout, + PrImage, + Screengrab, + SocialMedia, + Agency, + CommissionedAgency, + Chargeable, + Bylines, + StaffPhotographer, + ContractPhotographer, + CommissionedPhotographer, + CreativeCommons, + GuardianWitness, + Pool, + CrownCopyright, + Obituary, + ContractIllustrator, + CommissionedIllustrator, + StaffIllustrator, + Composite, + PublicDomain ) - val photographer: List[UsageRightsSpec] = List(StaffPhotographer, ContractPhotographer, CommissionedPhotographer) - val illustrator: List[UsageRightsSpec] = List(StaffIllustrator, ContractIllustrator, CommissionedIllustrator) + val photographer: List[UsageRightsSpec] = + List(StaffPhotographer, ContractPhotographer, CommissionedPhotographer) + val illustrator: List[UsageRightsSpec] = + List(StaffIllustrator, ContractIllustrator, CommissionedIllustrator) val whollyOwned: List[UsageRightsSpec] = photographer ++ illustrator // this is a convenience method so that we use the same formatting for all subtypes // i.e. use the standard `Json.writes`. I still can't find a not have to pass the `f:Format[T]` // explicitly and inferring the type, but I think that has to do with the reflection that's used // in the serialisation. - def subtypeFormat[T <: UsageRights](category: String)(f: Format[T]): Format[T] = { + def subtypeFormat[T <: UsageRights]( + category: String + )(f: Format[T]): Format[T] = { val writes = Writes[T] { u => Json.obj("category" -> category) ++ f.writes(u).as[JsObject] } @@ -57,73 +76,77 @@ object UsageRights { // TODO: I haven't figured out why Json.toJson[T](o) doesn't work here, it'd // be good to know though. implicit def jsonWrites[T <: UsageRights]: Writes[T] = Writes[T] { - case o: Chargeable => Chargeable.formats.writes(o) - case o: Agency => Agency.formats.writes(o) - case o: CommissionedAgency => CommissionedAgency.formats.writes(o) - case o: PrImage => PrImage.formats.writes(o) - case o: Handout => Handout.formats.writes(o) - case o: Screengrab => Screengrab.formats.writes(o) - case o: GuardianWitness => GuardianWitness.formats.writes(o) - case o: SocialMedia => SocialMedia.formats.writes(o) - case o: Bylines => Bylines.formats.writes(o) - case o: Obituary => Obituary.formats.writes(o) - case o: StaffPhotographer => StaffPhotographer.formats.writes(o) + case o: Chargeable => Chargeable.formats.writes(o) + case o: Agency => Agency.formats.writes(o) + case o: CommissionedAgency => CommissionedAgency.formats.writes(o) + case o: PrImage => PrImage.formats.writes(o) + case o: Handout => Handout.formats.writes(o) + case o: Screengrab => Screengrab.formats.writes(o) + case o: GuardianWitness => GuardianWitness.formats.writes(o) + case o: SocialMedia => SocialMedia.formats.writes(o) + case o: Bylines => Bylines.formats.writes(o) + case o: Obituary => Obituary.formats.writes(o) + case o: StaffPhotographer => StaffPhotographer.formats.writes(o) case o: ContractPhotographer => ContractPhotographer.formats.writes(o) - case o: CommissionedPhotographer => CommissionedPhotographer.formats.writes(o) - case o: Pool => Pool.formats.writes(o) - case o: CrownCopyright => CrownCopyright.formats.writes(o) - case o: ContractIllustrator => ContractIllustrator.formats.writes(o) - case o: StaffIllustrator => StaffIllustrator.formats.writes(o) + case o: CommissionedPhotographer => + CommissionedPhotographer.formats.writes(o) + case o: Pool => Pool.formats.writes(o) + case o: CrownCopyright => CrownCopyright.formats.writes(o) + case o: ContractIllustrator => ContractIllustrator.formats.writes(o) + case o: StaffIllustrator => StaffIllustrator.formats.writes(o) case o: CommissionedIllustrator => CommissionedIllustrator.formats.writes(o) - case o: CreativeCommons => CreativeCommons.formats.writes(o) - case o: Composite => Composite.formats.writes(o) - case o: PublicDomain => PublicDomain.formats.writes(o) - case o: NoRights.type => NoRights.jsonWrites.writes(o) + case o: CreativeCommons => CreativeCommons.formats.writes(o) + case o: Composite => Composite.formats.writes(o) + case o: PublicDomain => PublicDomain.formats.writes(o) + case o: NoRights.type => NoRights.jsonWrites.writes(o) } implicit val jsonReads: Reads[UsageRights] = Reads[UsageRights] { json => - val category = (json \ "category").asOpt[String] - - // We use supplier as an indicator that an image is an Agency - // image as some images have been indexed without a category. - // TODO: Fix with reindex - val supplier = (json \ "supplier").asOpt[String] - - (category flatMap { - case Chargeable.category => json.asOpt[Chargeable] - case Agency.category => json.asOpt[Agency] - case CommissionedAgency.category => json.asOpt[CommissionedAgency] - case PrImage.category => json.asOpt[PrImage] - case Handout.category => json.asOpt[Handout] - case Screengrab.category => json.asOpt[Screengrab] - case GuardianWitness.category => json.asOpt[GuardianWitness] - case SocialMedia.category => json.asOpt[SocialMedia] - case Bylines.category => json.asOpt[Bylines] - case Obituary.category => json.asOpt[Obituary] - case StaffPhotographer.category => json.asOpt[StaffPhotographer] - case ContractPhotographer.category => json.asOpt[ContractPhotographer] - case CommissionedPhotographer.category => json.asOpt[CommissionedPhotographer] - case Pool.category => json.asOpt[Pool] - case CrownCopyright.category => json.asOpt[CrownCopyright] - case ContractIllustrator.category => json.asOpt[ContractIllustrator] - case StaffIllustrator.category => json.asOpt[StaffIllustrator] - case CommissionedIllustrator.category => json.asOpt[CommissionedIllustrator] - case CreativeCommons.category => json.asOpt[CreativeCommons] - case Composite.category => json.asOpt[Composite] - case PublicDomain.category => json.asOpt[PublicDomain] - case _ => None - }) - .orElse(supplier.flatMap(_ => json.asOpt[Agency])) - .orElse(json.asOpt[NoRights.type]) - .map(JsSuccess(_)) - .getOrElse(JsError(s"No such usage rights category: ${category.getOrElse("None")}")) - } + val category = (json \ "category").asOpt[String] + + // We use supplier as an indicator that an image is an Agency + // image as some images have been indexed without a category. + // TODO: Fix with reindex + val supplier = (json \ "supplier").asOpt[String] + + (category flatMap { + case Chargeable.category => json.asOpt[Chargeable] + case Agency.category => json.asOpt[Agency] + case CommissionedAgency.category => json.asOpt[CommissionedAgency] + case PrImage.category => json.asOpt[PrImage] + case Handout.category => json.asOpt[Handout] + case Screengrab.category => json.asOpt[Screengrab] + case GuardianWitness.category => json.asOpt[GuardianWitness] + case SocialMedia.category => json.asOpt[SocialMedia] + case Bylines.category => json.asOpt[Bylines] + case Obituary.category => json.asOpt[Obituary] + case StaffPhotographer.category => json.asOpt[StaffPhotographer] + case ContractPhotographer.category => json.asOpt[ContractPhotographer] + case CommissionedPhotographer.category => + json.asOpt[CommissionedPhotographer] + case Pool.category => json.asOpt[Pool] + case CrownCopyright.category => json.asOpt[CrownCopyright] + case ContractIllustrator.category => json.asOpt[ContractIllustrator] + case StaffIllustrator.category => json.asOpt[StaffIllustrator] + case CommissionedIllustrator.category => + json.asOpt[CommissionedIllustrator] + case CreativeCommons.category => json.asOpt[CreativeCommons] + case Composite.category => json.asOpt[Composite] + case PublicDomain.category => json.asOpt[PublicDomain] + case _ => None + }) + .orElse(supplier.flatMap(_ => json.asOpt[Agency])) + .orElse(json.asOpt[NoRights.type]) + .map(JsSuccess(_)) + .getOrElse( + JsError(s"No such usage rights category: ${category.getOrElse("None")}") + ) + } } // We have a custom writes and reads for NoRights as it is represented by `{}` // in the DB layer. -case object NoRights - extends UsageRights with UsageRightsSpec { +case object NoRights extends UsageRights with UsageRightsSpec { val category = "" val defaultCost = None val restrictions = None @@ -137,13 +160,15 @@ case object NoRights lazy val jsonVal = Json.obj() implicit val jsonReads: Reads[NoRights.type] = Reads[NoRights.type] { json => - if (json == jsonVal) JsSuccess(NoRights) else JsError("Value should be {} for no rights") + if (json == jsonVal) JsSuccess(NoRights) + else JsError("Value should be {} for no rights") } - implicit val jsonWrites: Writes[NoRights.type] = Writes[NoRights.type](_ => jsonVal) + implicit val jsonWrites: Writes[NoRights.type] = + Writes[NoRights.type](_ => jsonVal) } - -final case class Chargeable(restrictions: Option[String] = None) extends UsageRights { +final case class Chargeable(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = Chargeable.defaultCost } object Chargeable extends UsageRightsSpec { @@ -170,17 +195,22 @@ object Agencies { def get(id: String) = all.getOrElse(id, Agency(id)) def lookupId(lookupSupplierName: String): Option[String] = all.collectFirst { - case (id, Agency(supplierName, _, _)) if lookupSupplierName == supplierName => { id } + case (id, Agency(supplierName, _, _)) + if lookupSupplierName == supplierName => { id } } def getWithCollection(id: String, suppliersCollection: Option[String]) = - all.get(id) + all + .get(id) .map(_.copy(suppliersCollection = suppliersCollection)) .getOrElse(Agency(id, suppliersCollection)) } -final case class Agency(supplier: String, suppliersCollection: Option[String] = None, - restrictions: Option[String] = None) extends UsageRights { +final case class Agency( + supplier: String, + suppliersCollection: Option[String] = None, + restrictions: Option[String] = None +) extends UsageRights { val defaultCost = Agency.defaultCost def id: Option[String] = Agencies.lookupId(supplier) } @@ -195,8 +225,10 @@ object Agency extends UsageRightsSpec { UsageRights.subtypeFormat(Agency.category)(Json.format[Agency]) } - -final case class CommissionedAgency(supplier: String, restrictions: Option[String] = None) extends UsageRights { +final case class CommissionedAgency( + supplier: String, + restrictions: Option[String] = None +) extends UsageRights { val defaultCost = CommissionedAgency.defaultCost } object CommissionedAgency extends UsageRightsSpec { @@ -206,11 +238,13 @@ object CommissionedAgency extends UsageRightsSpec { val description = "Images commissioned from agencies on an ad hoc basis." implicit val formats: Format[CommissionedAgency] = - UsageRights.subtypeFormat(CommissionedAgency.category)(Json.format[CommissionedAgency]) + UsageRights.subtypeFormat(CommissionedAgency.category)( + Json.format[CommissionedAgency] + ) } - -final case class PrImage(restrictions: Option[String] = None) extends UsageRights { +final case class PrImage(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = PrImage.defaultCost } object PrImage extends UsageRightsSpec { @@ -222,14 +256,16 @@ object PrImage extends UsageRightsSpec { "promotional images, etc." override val caution = - Some("For use only within the context originally provided for (please state it below).") + Some( + "For use only within the context originally provided for (please state it below)." + ) implicit val formats: Format[PrImage] = UsageRights.subtypeFormat(PrImage.category)(Json.format[PrImage]) } - -final case class Handout(restrictions: Option[String] = None) extends UsageRights { +final case class Handout(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = Handout.defaultCost } object Handout extends UsageRightsSpec { @@ -241,16 +277,20 @@ object Handout extends UsageRightsSpec { "stories, family shots in biographical pieces, etc." override val caution = - Some("For use only within the context originally provided for (please state it below).") + Some( + "For use only within the context originally provided for (please state it below)." + ) implicit val formats: Format[Handout] = UsageRights.subtypeFormat(Handout.category)(Json.format[Handout]) } - // TODO: `source` should not be an Option, but because we added it later, we would need to backfill // the data -final case class Screengrab(source: Option[String], restrictions: Option[String] = None) extends UsageRights { +final case class Screengrab( + source: Option[String], + restrictions: Option[String] = None +) extends UsageRights { val defaultCost = Screengrab.defaultCost } object Screengrab extends UsageRightsSpec { @@ -265,8 +305,8 @@ object Screengrab extends UsageRightsSpec { UsageRights.subtypeFormat(Screengrab.category)(Json.format[Screengrab]) } - -final case class GuardianWitness(restrictions: Option[String] = None) extends UsageRights { +final case class GuardianWitness(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = GuardianWitness.defaultCost } object GuardianWitness extends UsageRightsSpec { @@ -277,11 +317,13 @@ object GuardianWitness extends UsageRightsSpec { "Images provided by readers in response to callouts and assignments on GuardianWitness." implicit val formats: Format[GuardianWitness] = - UsageRights.subtypeFormat(GuardianWitness.category)(Json.format[GuardianWitness]) + UsageRights.subtypeFormat(GuardianWitness.category)( + Json.format[GuardianWitness] + ) } - -final case class SocialMedia(restrictions: Option[String] = None) extends UsageRights { +final case class SocialMedia(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = SocialMedia.defaultCost } object SocialMedia extends UsageRightsSpec { @@ -293,13 +335,16 @@ object SocialMedia extends UsageRightsSpec { "from usual sources." override val caution = - Some("Approval needed from senior editor if permission from owner cannot be acquired") + Some( + "Approval needed from senior editor if permission from owner cannot be acquired" + ) implicit val formats: Format[SocialMedia] = UsageRights.subtypeFormat(SocialMedia.category)(Json.format[SocialMedia]) } -final case class Bylines(restrictions: Option[String] = None) extends UsageRights { +final case class Bylines(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = Bylines.defaultCost } object Bylines extends UsageRightsSpec { @@ -313,7 +358,8 @@ object Bylines extends UsageRightsSpec { UsageRights.subtypeFormat(Bylines.category)(Json.format[Bylines]) } -final case class Obituary(restrictions: Option[String] = None) extends UsageRights { +final case class Obituary(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = Obituary.defaultCost } object Obituary extends UsageRightsSpec { @@ -327,9 +373,11 @@ object Obituary extends UsageRightsSpec { UsageRights.subtypeFormat(Obituary.category)(Json.format[Obituary]) } - -final case class StaffPhotographer(photographer: String, publication: String, - restrictions: Option[String] = None) extends Photographer { +final case class StaffPhotographer( + photographer: String, + publication: String, + restrictions: Option[String] = None +) extends Photographer { val defaultCost = StaffPhotographer.defaultCost } object StaffPhotographer extends UsageRightsSpec { @@ -340,12 +388,16 @@ object StaffPhotographer extends UsageRightsSpec { "Images from photographers who are or were members of staff." implicit val formats: Format[StaffPhotographer] = - UsageRights.subtypeFormat(StaffPhotographer.category)(Json.format[StaffPhotographer]) + UsageRights.subtypeFormat(StaffPhotographer.category)( + Json.format[StaffPhotographer] + ) } - -final case class ContractPhotographer(photographer: String, publication: Option[String] = None, - restrictions: Option[String] = None) extends Photographer { +final case class ContractPhotographer( + photographer: String, + publication: Option[String] = None, + restrictions: Option[String] = None +) extends Photographer { val defaultCost = ContractPhotographer.defaultCost } object ContractPhotographer extends UsageRightsSpec { @@ -356,12 +408,16 @@ object ContractPhotographer extends UsageRightsSpec { "Images from freelance photographers on fixed-term contracts." implicit val formats: Format[ContractPhotographer] = - UsageRights.subtypeFormat(ContractPhotographer.category)(Json.format[ContractPhotographer]) + UsageRights.subtypeFormat(ContractPhotographer.category)( + Json.format[ContractPhotographer] + ) } - -final case class CommissionedPhotographer(photographer: String, publication: Option[String] = None, - restrictions: Option[String] = None) extends Photographer { +final case class CommissionedPhotographer( + photographer: String, + publication: Option[String] = None, + restrictions: Option[String] = None +) extends Photographer { val defaultCost = CommissionedPhotographer.defaultCost } object CommissionedPhotographer extends UsageRightsSpec { @@ -372,10 +428,11 @@ object CommissionedPhotographer extends UsageRightsSpec { "Images commissioned from freelance photographers on an ad hoc basis." implicit val formats: Format[CommissionedPhotographer] = - UsageRights.subtypeFormat(CommissionedPhotographer.category)(Json.format[CommissionedPhotographer]) + UsageRights.subtypeFormat(CommissionedPhotographer.category)( + Json.format[CommissionedPhotographer] + ) } - final case class Pool(restrictions: Option[String] = None) extends UsageRights { val defaultCost = Pool.defaultCost } @@ -391,8 +448,8 @@ object Pool extends UsageRightsSpec { UsageRights.subtypeFormat(Pool.category)(Json.format[Pool]) } - -final case class CrownCopyright(restrictions: Option[String] = None) extends UsageRights { +final case class CrownCopyright(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = CrownCopyright.defaultCost } object CrownCopyright extends UsageRightsSpec { @@ -404,11 +461,15 @@ object CrownCopyright extends UsageRightsSpec { "acknowledgement." implicit val formats: Format[CrownCopyright] = - UsageRights.subtypeFormat(CrownCopyright.category)(Json.format[CrownCopyright]) + UsageRights.subtypeFormat(CrownCopyright.category)( + Json.format[CrownCopyright] + ) } -final case class StaffIllustrator(creator: String, restrictions: Option[String] = None) - extends Illustrator { +final case class StaffIllustrator( + creator: String, + restrictions: Option[String] = None +) extends Illustrator { val defaultCost = StaffIllustrator.defaultCost } object StaffIllustrator extends UsageRightsSpec { @@ -419,11 +480,16 @@ object StaffIllustrator extends UsageRightsSpec { "Images from illustrators who are or were members of staff." implicit val formats: Format[StaffIllustrator] = - UsageRights.subtypeFormat(StaffIllustrator.category)(Json.format[StaffIllustrator]) + UsageRights.subtypeFormat(StaffIllustrator.category)( + Json.format[StaffIllustrator] + ) } -final case class ContractIllustrator(creator: String, publication: Option[String] = None, restrictions: Option[String] = None) - extends Illustrator { +final case class ContractIllustrator( + creator: String, + publication: Option[String] = None, + restrictions: Option[String] = None +) extends Illustrator { val defaultCost = ContractIllustrator.defaultCost } object ContractIllustrator extends UsageRightsSpec { @@ -434,11 +500,16 @@ object ContractIllustrator extends UsageRightsSpec { "Illustrations from freelance illustrators on fixed-term contracts." implicit val formats: Format[ContractIllustrator] = - UsageRights.subtypeFormat(ContractIllustrator.category)(Json.format[ContractIllustrator]) + UsageRights.subtypeFormat(ContractIllustrator.category)( + Json.format[ContractIllustrator] + ) } -final case class CommissionedIllustrator(creator: String, publication: Option[String] = None, restrictions: Option[String] = None) - extends Illustrator { +final case class CommissionedIllustrator( + creator: String, + publication: Option[String] = None, + restrictions: Option[String] = None +) extends Illustrator { val defaultCost = CommissionedIllustrator.defaultCost } object CommissionedIllustrator extends UsageRightsSpec { @@ -449,12 +520,18 @@ object CommissionedIllustrator extends UsageRightsSpec { "Illustrations commissioned from freelance illustrators on an ad hoc basis." implicit val formats: Format[CommissionedIllustrator] = - UsageRights.subtypeFormat(CommissionedIllustrator.category)(Json.format[CommissionedIllustrator]) -} - - -final case class CreativeCommons(licence: String, source: String, creator: String, contentLink: String, - restrictions: Option[String] = None) extends UsageRights { + UsageRights.subtypeFormat(CommissionedIllustrator.category)( + Json.format[CommissionedIllustrator] + ) +} + +final case class CreativeCommons( + licence: String, + source: String, + creator: String, + contentLink: String, + restrictions: Option[String] = None +) extends UsageRights { val defaultCost = CreativeCommons.defaultCost } object CreativeCommons extends UsageRightsSpec { @@ -465,14 +542,20 @@ object CreativeCommons extends UsageRightsSpec { "Images made available by rights holders on open licence terms that grant third parties " + "permission to use and share copyright material for free." - override val caution = Some("This only applies to COMMERCIAL creative commons licences.") + override val caution = Some( + "This only applies to COMMERCIAL creative commons licences." + ) implicit val formats: Format[CreativeCommons] = - UsageRights.subtypeFormat(CreativeCommons.category)(Json.format[CreativeCommons]) + UsageRights.subtypeFormat(CreativeCommons.category)( + Json.format[CreativeCommons] + ) } - -final case class Composite(suppliers: String, restrictions: Option[String] = None) extends UsageRights { +final case class Composite( + suppliers: String, + restrictions: Option[String] = None +) extends UsageRights { val defaultCost = Composite.defaultCost } object Composite extends UsageRightsSpec { @@ -482,13 +565,16 @@ object Composite extends UsageRightsSpec { val description = "Any restricted images within the composite must be identified." - override val caution = Some("All images should be free to use, or restrictions applied") + override val caution = Some( + "All images should be free to use, or restrictions applied" + ) implicit val formats: Format[Composite] = UsageRights.subtypeFormat(Composite.category)(Json.format[Composite]) } -final case class PublicDomain(restrictions: Option[String] = None) extends UsageRights { +final case class PublicDomain(restrictions: Option[String] = None) + extends UsageRights { val defaultCost = PublicDomain.defaultCost } object PublicDomain extends UsageRightsSpec { @@ -498,7 +584,9 @@ object PublicDomain extends UsageRightsSpec { val description = "Images out of copyright or bequeathed to the public." - override val caution = Some("ONLY use if out of copyright or bequeathed to public") + override val caution = Some( + "ONLY use if out of copyright or bequeathed to public" + ) implicit val formats: Format[PublicDomain] = UsageRights.subtypeFormat(PublicDomain.category)(Json.format[PublicDomain]) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/leases/LeasesByMedia.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/leases/LeasesByMedia.scala index 443dde88b2..3bf99d4e98 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/leases/LeasesByMedia.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/leases/LeasesByMedia.scala @@ -5,8 +5,8 @@ import org.joda.time.DateTime import JodaWrites._ case class LeasesByMedia private[leases] ( - leases: List[MediaLease], - lastModified: Option[DateTime] + leases: List[MediaLease], + lastModified: Option[DateTime] ) object LeasesByMedia { @@ -22,18 +22,23 @@ object LeasesByMedia { } } - implicit def dateTimeOrdering: Ordering[DateTime] = Ordering.fromLessThan(_ isBefore _) + implicit def dateTimeOrdering: Ordering[DateTime] = + Ordering.fromLessThan(_ isBefore _) def empty = LeasesByMedia(Nil, None) - private[leases] def apply(leases: List[MediaLease], lastModified: Option[DateTime]): LeasesByMedia = new LeasesByMedia(leases, lastModified) + private[leases] def apply( + leases: List[MediaLease], + lastModified: Option[DateTime] + ): LeasesByMedia = new LeasesByMedia(leases, lastModified) - def build (leases: List[MediaLease]) = { - val lastModified = leases.sortBy(_.createdAt).reverse.headOption.map(_.createdAt) + def build(leases: List[MediaLease]) = { + val lastModified = + leases.sortBy(_.createdAt).reverse.headOption.map(_.createdAt) LeasesByMedia(leases, lastModified) } - def toJson(leases: JsValue, lastModified: JsValue) : JsObject = { + def toJson(leases: JsValue, lastModified: JsValue): JsObject = { JsObject( Seq( "leases" -> leases, diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/leases/MediaLease.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/leases/MediaLease.scala index 4a73119922..28089793b2 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/leases/MediaLease.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/leases/MediaLease.scala @@ -10,65 +10,73 @@ sealed trait MediaLeaseType { def name: String } object MediaLeaseType { implicit val reads: Reads[MediaLeaseType] = { JsPath.read[String].map { - case "allow-use" => AllowUseLease - case "deny-use" => DenyUseLease - case "allow-syndication" => AllowSyndicationLease - case "deny-syndication" => DenySyndicationLease + case "allow-use" => AllowUseLease + case "deny-use" => DenyUseLease + case "allow-syndication" => AllowSyndicationLease + case "deny-syndication" => DenySyndicationLease // legacy values before syndication leases existed - case "allow" => AllowUseLease - case "deny" => DenyUseLease + case "allow" => AllowUseLease + case "deny" => DenyUseLease } } - implicit val writer: Writes[MediaLeaseType] = (mediaLeaseType: MediaLeaseType) => JsString(mediaLeaseType.name) + implicit val writer: Writes[MediaLeaseType] = + (mediaLeaseType: MediaLeaseType) => JsString(mediaLeaseType.name) def apply(leaseType: String): MediaLeaseType = leaseType match { - case "AllowUseLease" => AllowUseLease - case "DenyUseLease" => DenyUseLease + case "AllowUseLease" => AllowUseLease + case "DenyUseLease" => DenyUseLease case "AllowSyndicationLease" => AllowSyndicationLease - case "DenySyndicationLease" => DenySyndicationLease + case "DenySyndicationLease" => DenySyndicationLease } } case object AllowUseLease extends MediaLeaseType { val name = "allow-use" } case object DenyUseLease extends MediaLeaseType { val name = "deny-use" } -case object AllowSyndicationLease extends MediaLeaseType { val name = "allow-syndication" } -case object DenySyndicationLease extends MediaLeaseType { val name = "deny-syndication" } +case object AllowSyndicationLease extends MediaLeaseType { + val name = "allow-syndication" +} +case object DenySyndicationLease extends MediaLeaseType { + val name = "deny-syndication" +} case class MediaLease( - id: Option[String], - leasedBy: Option[String], - startDate: Option[DateTime] = None, - endDate: Option[DateTime] = None, - access: MediaLeaseType = AllowUseLease, - notes: Option[String], - mediaId: String, - createdAt: DateTime = new DateTime() + id: Option[String], + leasedBy: Option[String], + startDate: Option[DateTime] = None, + endDate: Option[DateTime] = None, + access: MediaLeaseType = AllowUseLease, + notes: Option[String], + mediaId: String, + createdAt: DateTime = new DateTime() ) { - private def afterStart = startDate.forall(start => new DateTime().isAfter(start)) - private def beforeEnd = endDate.forall(end => new DateTime().isBefore(end)) + private def afterStart = + startDate.forall(start => new DateTime().isAfter(start)) + private def beforeEnd = endDate.forall(end => new DateTime().isBefore(end)) private def withValidNotesField: MediaLease = notes match { - case Some(note) if note.trim.length == 0 => this.copy(notes = None) // cannot save empty string in dynamo + case Some(note) if note.trim.length == 0 => + this.copy(notes = None) // cannot save empty string in dynamo case _ => this } private def withValidEndDateField: MediaLease = - if (access == AllowSyndicationLease) this.copy(endDate = None) // an allow-syndication cannot end + if (access == AllowSyndicationLease) + this.copy(endDate = None) // an allow-syndication cannot end else this private def withValidStartDateField: MediaLease = - if (access == DenySyndicationLease) this.copy(startDate = None) // a deny-syndication cannot start + if (access == DenySyndicationLease) + this.copy(startDate = None) // a deny-syndication cannot start else this - def prepareForSave: MediaLease = this - .withValidNotesField - .withValidStartDateField - .withValidEndDateField + def prepareForSave: MediaLease = + this.withValidNotesField.withValidStartDateField.withValidEndDateField def active = afterStart && beforeEnd - def isSyndication = access == AllowSyndicationLease || access == DenySyndicationLease + def isSyndication = + access == AllowSyndicationLease || access == DenySyndicationLease def isUse = access == AllowUseLease || access == DenyUseLease } @@ -78,8 +86,11 @@ object MediaLease { val MediaLeasePlainWrites: OWrites[MediaLease] = Json.writes[MediaLease] - implicit val MediaLeaseWrites: Writes[MediaLease] = (mediaLease: MediaLease) => - Json.toJson(mediaLease)(MediaLeasePlainWrites).as[JsObject] + ("active" -> JsBoolean(mediaLease.active)) + implicit val MediaLeaseWrites: Writes[MediaLease] = + (mediaLease: MediaLease) => + Json + .toJson(mediaLease)(MediaLeasePlainWrites) + .as[JsObject] + ("active" -> JsBoolean(mediaLease.active)) def toJson(lease: MediaLease): JsValue = Json.obj( "id" -> lease.mediaId, diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DigitalUsageMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DigitalUsageMetadata.scala index df6337a5f8..23978c3a9f 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DigitalUsageMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DigitalUsageMetadata.scala @@ -4,14 +4,15 @@ import java.net.URI import play.api.libs.json._ import com.gu.mediaservice.syntax._ -case class DigitalUsageMetadata ( - webUrl: URI, - webTitle: String, - sectionId: String, - composerUrl: Option[URI] = None +case class DigitalUsageMetadata( + webUrl: URI, + webTitle: String, + sectionId: String, + composerUrl: Option[URI] = None ) extends UsageMetadata { private val placeholderWebTitle = "No title given" - private val dynamoSafeWebTitle = if(webTitle.isEmpty) placeholderWebTitle else webTitle + private val dynamoSafeWebTitle = + if (webTitle.isEmpty) placeholderWebTitle else webTitle override def toMap: Map[String, String] = Map( "webUrl" -> webUrl.toString, @@ -21,6 +22,8 @@ case class DigitalUsageMetadata ( } object DigitalUsageMetadata { - implicit val reader: Reads[DigitalUsageMetadata] = Json.reads[DigitalUsageMetadata] - implicit val writer: Writes[DigitalUsageMetadata] = Json.writes[DigitalUsageMetadata] + implicit val reader: Reads[DigitalUsageMetadata] = + Json.reads[DigitalUsageMetadata] + implicit val writer: Writes[DigitalUsageMetadata] = + Json.writes[DigitalUsageMetadata] } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DownloadUsageMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DownloadUsageMetadata.scala index 019968846d..afc1380a24 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DownloadUsageMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/DownloadUsageMetadata.scala @@ -3,7 +3,7 @@ package com.gu.mediaservice.model.usage import play.api.libs.json.{Json, Reads, Writes} case class DownloadUsageMetadata( - downloadedBy: String + downloadedBy: String ) extends UsageMetadata { override def toMap: Map[String, Any] = Map( "downloadedBy" -> downloadedBy @@ -11,6 +11,8 @@ case class DownloadUsageMetadata( } object DownloadUsageMetadata { - implicit val reader: Reads[DownloadUsageMetadata] = Json.reads[DownloadUsageMetadata] - implicit val writer: Writes[DownloadUsageMetadata] = Json.writes[DownloadUsageMetadata] + implicit val reader: Reads[DownloadUsageMetadata] = + Json.reads[DownloadUsageMetadata] + implicit val writer: Writes[DownloadUsageMetadata] = + Json.writes[DownloadUsageMetadata] } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/FrontUsageMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/FrontUsageMetadata.scala index 28edaa86cf..016d9388f9 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/FrontUsageMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/FrontUsageMetadata.scala @@ -3,8 +3,8 @@ package com.gu.mediaservice.model.usage import play.api.libs.json._ case class FrontUsageMetadata( - addedBy: String, - front: String + addedBy: String, + front: String ) extends UsageMetadata { override def toMap: Map[String, Any] = Map( "addedBy" -> addedBy, @@ -13,6 +13,8 @@ case class FrontUsageMetadata( } object FrontUsageMetadata { - implicit val reader: Reads[FrontUsageMetadata] = Json.reads[FrontUsageMetadata] - implicit val writer: Writes[FrontUsageMetadata] = Json.writes[FrontUsageMetadata] + implicit val reader: Reads[FrontUsageMetadata] = + Json.reads[FrontUsageMetadata] + implicit val writer: Writes[FrontUsageMetadata] = + Json.writes[FrontUsageMetadata] } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/MediaUsage.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/MediaUsage.scala index fcaa236d59..03166aebbc 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/MediaUsage.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/MediaUsage.scala @@ -7,26 +7,28 @@ case class UsageId(id: String) { } case class MediaUsage( - usageId: UsageId, - grouping: String, - mediaId: String, - usageType: UsageType, - mediaType: String, - status: UsageStatus, - printUsageMetadata: Option[PrintUsageMetadata], - digitalUsageMetadata: Option[DigitalUsageMetadata], - syndicationUsageMetadata: Option[SyndicationUsageMetadata], - frontUsageMetadata: Option[FrontUsageMetadata], - downloadUsageMetadata: Option[DownloadUsageMetadata], - lastModified: DateTime, - dateAdded: Option[DateTime] = None, - dateRemoved: Option[DateTime] = None + usageId: UsageId, + grouping: String, + mediaId: String, + usageType: UsageType, + mediaType: String, + status: UsageStatus, + printUsageMetadata: Option[PrintUsageMetadata], + digitalUsageMetadata: Option[DigitalUsageMetadata], + syndicationUsageMetadata: Option[SyndicationUsageMetadata], + frontUsageMetadata: Option[FrontUsageMetadata], + downloadUsageMetadata: Option[DownloadUsageMetadata], + lastModified: DateTime, + dateAdded: Option[DateTime] = None, + dateRemoved: Option[DateTime] = None ) extends GridLogging { def isGridLikeId: Boolean = { if (mediaId.startsWith("gu-image-")) { // remove events from CAPI that represent images previous to Grid existing - logger.info(s"MediaId $mediaId doesn't look like a Grid image. Ignoring usage $usageId.") + logger.info( + s"MediaId $mediaId doesn't look like a Grid image. Ignoring usage $usageId." + ) false } else { true @@ -42,10 +44,9 @@ case class MediaUsage( override def equals(obj: Any): Boolean = obj match { case mediaUsage: MediaUsage => { usageId == mediaUsage.usageId && - grouping == mediaUsage.grouping && - dateRemoved.isEmpty + grouping == mediaUsage.grouping && + dateRemoved.isEmpty } // TODO: This will work for checking if new items have been added/removed case _ => false } } - diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/PrintUsageMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/PrintUsageMetadata.scala index c8249346d2..30b434596e 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/PrintUsageMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/PrintUsageMetadata.scala @@ -6,8 +6,8 @@ import org.joda.time.DateTime import scala.collection.JavaConverters._ case class PrintImageSize( - x: Int, - y: Int + x: Int, + y: Int ) extends UsageMetadata { override def toMap = Map( "x" -> x, @@ -20,25 +20,25 @@ object PrintImageSize { } case class PrintUsageMetadata( - sectionName: String, - issueDate: DateTime, - pageNumber: Int, - storyName: String, - publicationCode: String, - publicationName: String, - layoutId: Option[Long] = None, - edition: Option[Int], - size: Option[PrintImageSize] = None, - orderedBy: Option[String] = None, - sectionCode: String, - notes: Option[String] = None, - source: Option[String] = None + sectionName: String, + issueDate: DateTime, + pageNumber: Int, + storyName: String, + publicationCode: String, + publicationName: String, + layoutId: Option[Long] = None, + edition: Option[Int], + size: Option[PrintImageSize] = None, + orderedBy: Option[String] = None, + sectionCode: String, + notes: Option[String] = None, + source: Option[String] = None ) extends UsageMetadata { type MapStringIntElement = List[(String, java.util.Map[String, Int])] - type StringElement = List[(String,String)] - type LongElement = List[(String,Long)] - type IntElement = List[(String,Int)] + type StringElement = List[(String, String)] + type LongElement = List[(String, Long)] + type IntElement = List[(String, Int)] override def toMap = Map( "sectionName" -> sectionName, @@ -48,17 +48,24 @@ case class PrintUsageMetadata( "publicationCode" -> publicationCode, "publicationName" -> publicationName, "sectionCode" -> sectionCode - ) ++ size.foldLeft[MapStringIntElement](Nil)((_,m) => List("size" -> m.toMap.asJava)) ++ - orderedBy.foldLeft[StringElement](Nil)((_,s) => List("orderedBy" -> s)) ++ - layoutId.foldLeft[LongElement](Nil)((_,l) => List("layoutId" -> l)) ++ - edition.foldLeft[IntElement](Nil)((_,i) => List("edition" -> i)) ++ - notes.foldLeft[StringElement](Nil)((_,s) => if(s.isEmpty) Nil else List("notes" -> s)) ++ - source.foldLeft[StringElement](Nil)((_,s) => if(s.isEmpty) Nil else List("source" -> s)) + ) ++ size.foldLeft[MapStringIntElement](Nil)((_, m) => + List("size" -> m.toMap.asJava) + ) ++ + orderedBy.foldLeft[StringElement](Nil)((_, s) => List("orderedBy" -> s)) ++ + layoutId.foldLeft[LongElement](Nil)((_, l) => List("layoutId" -> l)) ++ + edition.foldLeft[IntElement](Nil)((_, i) => List("edition" -> i)) ++ + notes.foldLeft[StringElement](Nil)((_, s) => + if (s.isEmpty) Nil else List("notes" -> s) + ) ++ + source.foldLeft[StringElement](Nil)((_, s) => + if (s.isEmpty) Nil else List("source" -> s) + ) } object PrintUsageMetadata { import JodaWrites._ import JodaReads._ implicit val reads: Reads[PrintUsageMetadata] = Json.reads[PrintUsageMetadata] - implicit val writes: Writes[PrintUsageMetadata] = Json.writes[PrintUsageMetadata] + implicit val writes: Writes[PrintUsageMetadata] = + Json.writes[PrintUsageMetadata] } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/SyndicationUsageMetadata.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/SyndicationUsageMetadata.scala index 4ede81dd58..dd3fe09afa 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/SyndicationUsageMetadata.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/SyndicationUsageMetadata.scala @@ -3,7 +3,7 @@ package com.gu.mediaservice.model.usage import play.api.libs.json._ case class SyndicationUsageMetadata( - partnerName: String + partnerName: String ) extends UsageMetadata { override def toMap: Map[String, Any] = Map( "partnerName" -> partnerName @@ -11,6 +11,8 @@ case class SyndicationUsageMetadata( } object SyndicationUsageMetadata { - implicit val reader: Reads[SyndicationUsageMetadata] = Json.reads[SyndicationUsageMetadata] - implicit val writer: Writes[SyndicationUsageMetadata] = Json.writes[SyndicationUsageMetadata] + implicit val reader: Reads[SyndicationUsageMetadata] = + Json.reads[SyndicationUsageMetadata] + implicit val writer: Writes[SyndicationUsageMetadata] = + Json.writes[SyndicationUsageMetadata] } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/Usage.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/Usage.scala index 89837c761e..5ed5f15a5a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/Usage.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/Usage.scala @@ -3,23 +3,21 @@ package com.gu.mediaservice.model.usage import play.api.libs.json._ import org.joda.time.DateTime - case class Usage( - id: String, - references: List[UsageReference], - platform: UsageType, - media: String, - status: UsageStatus, - dateAdded: Option[DateTime], - dateRemoved: Option[DateTime], - lastModified: DateTime, - - // TODO collapse this field into an `Option[UsageMetadata]` - printUsageMetadata: Option[PrintUsageMetadata] = None, - digitalUsageMetadata: Option[DigitalUsageMetadata] = None, - syndicationUsageMetadata: Option[SyndicationUsageMetadata] = None, - frontUsageMetadata: Option[FrontUsageMetadata] = None, - downloadUsageMetadata: Option[DownloadUsageMetadata] = None + id: String, + references: List[UsageReference], + platform: UsageType, + media: String, + status: UsageStatus, + dateAdded: Option[DateTime], + dateRemoved: Option[DateTime], + lastModified: DateTime, + // TODO collapse this field into an `Option[UsageMetadata]` + printUsageMetadata: Option[PrintUsageMetadata] = None, + digitalUsageMetadata: Option[DigitalUsageMetadata] = None, + syndicationUsageMetadata: Option[SyndicationUsageMetadata] = None, + frontUsageMetadata: Option[FrontUsageMetadata] = None, + downloadUsageMetadata: Option[DownloadUsageMetadata] = None ) object Usage { import JodaWrites._ diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageNotice.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageNotice.scala index 27abc9ec48..5dc2aebad5 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageNotice.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageNotice.scala @@ -13,15 +13,19 @@ case class UsageNotice(mediaId: String, usageJson: JsArray) { override def equals(o: Any) = o match { case that: UsageNotice => that.hashCode == this.hashCode - case _ => false + case _ => false } override def hashCode = { - val result = Json.toJson( - usageJson.as[List[JsObject]] - .map(_ - "lastModified") - .map(_ - "dateAdded") - ).as[JsArray].toString + val result = Json + .toJson( + usageJson + .as[List[JsObject]] + .map(_ - "lastModified") + .map(_ - "dateAdded") + ) + .as[JsArray] + .toString result.hashCode } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReference.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReference.scala index 85c42d278d..0ff3f4ab06 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReference.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReference.scala @@ -5,9 +5,9 @@ import play.api.libs.json._ import com.gu.mediaservice.syntax._ case class UsageReference( - `type`: UsageReferenceType, - uri: Option[URI] = None, - name: Option[String] = None + `type`: UsageReferenceType, + uri: Option[URI] = None, + name: Option[String] = None ) object UsageReference { implicit val writes: Writes[UsageReference] = Json.writes[UsageReference] diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReferenceType.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReferenceType.scala index 5602318a01..f316917714 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReferenceType.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageReferenceType.scala @@ -4,27 +4,31 @@ import play.api.libs.json._ trait UsageReferenceType { override def toString: String = this match { - case InDesignUsageReference => "indesign" - case FrontendUsageReference => "frontend" - case ComposerUsageReference => "composer" + case InDesignUsageReference => "indesign" + case FrontendUsageReference => "frontend" + case ComposerUsageReference => "composer" case SyndicationUsageReference => "syndication" - case FrontUsageReference => "front" - case DownloadUsageReference => "download" + case FrontUsageReference => "front" + case DownloadUsageReference => "download" } } object UsageReferenceType { - implicit val reads: Reads[UsageReferenceType] = JsPath.read[String].map(UsageReferenceType(_)) - implicit val writer: Writes[UsageReferenceType] = (usageReferenceType: UsageReferenceType) => JsString(usageReferenceType.toString) + implicit val reads: Reads[UsageReferenceType] = + JsPath.read[String].map(UsageReferenceType(_)) + implicit val writer: Writes[UsageReferenceType] = + (usageReferenceType: UsageReferenceType) => + JsString(usageReferenceType.toString) - def apply(usageReferenceType: String): UsageReferenceType = usageReferenceType.toLowerCase match { - case "indesign" => InDesignUsageReference - case "frontend" => FrontendUsageReference - case "composer" => ComposerUsageReference - case "syndication" => SyndicationUsageReference - case "front" => FrontUsageReference - case "download" => DownloadUsageReference - } + def apply(usageReferenceType: String): UsageReferenceType = + usageReferenceType.toLowerCase match { + case "indesign" => InDesignUsageReference + case "frontend" => FrontendUsageReference + case "composer" => ComposerUsageReference + case "syndication" => SyndicationUsageReference + case "front" => FrontUsageReference + case "download" => DownloadUsageReference + } } object InDesignUsageReference extends UsageReferenceType diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageStatus.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageStatus.scala index cbafb45fe7..61bbbe195a 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageStatus.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageStatus.scala @@ -4,26 +4,27 @@ import play.api.libs.json._ sealed trait UsageStatus { override def toString = this match { - case PendingUsageStatus => "pending" - case PublishedUsageStatus => "published" - case RemovedUsageStatus => "removed" + case PendingUsageStatus => "pending" + case PublishedUsageStatus => "published" + case RemovedUsageStatus => "removed" case SyndicatedUsageStatus => "syndicated" case DownloadedUsageStatus => "downloaded" - case UnknownUsageStatus => "unknown" + case UnknownUsageStatus => "unknown" } } object UsageStatus { def apply(status: String): UsageStatus = status.toLowerCase match { - case "pending" => PendingUsageStatus - case "published" => PublishedUsageStatus - case "removed" => RemovedUsageStatus + case "pending" => PendingUsageStatus + case "published" => PublishedUsageStatus + case "removed" => RemovedUsageStatus case "syndicated" => SyndicatedUsageStatus case "downloaded" => DownloadedUsageStatus - case "unknown" => UnknownUsageStatus + case "unknown" => UnknownUsageStatus } - implicit val reads: Reads[UsageStatus] = JsPath.read[String].map(UsageStatus(_)) + implicit val reads: Reads[UsageStatus] = + JsPath.read[String].map(UsageStatus(_)) implicit val writer = new Writes[UsageStatus] { def writes(usageStatus: UsageStatus) = JsString(usageStatus.toString) diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageTableFullKey.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageTableFullKey.scala index 07008a44aa..b176e7e5df 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageTableFullKey.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageTableFullKey.scala @@ -18,6 +18,6 @@ object UsageTableFullKey { def build(combinedKey: String): Option[UsageTableFullKey] = { val pair = combinedKey.split(keyDelimiter) - Try { pair match { case Array(h,r) => UsageTableFullKey(h, r) } }.toOption + Try { pair match { case Array(h, r) => UsageTableFullKey(h, r) } }.toOption } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageType.scala b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageType.scala index 247eba5310..ea20e68e95 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageType.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/model/usage/UsageType.scala @@ -4,22 +4,23 @@ import play.api.libs.json._ trait UsageType { override def toString = this match { - case PrintUsage => "print" - case DigitalUsage => "digital" + case PrintUsage => "print" + case DigitalUsage => "digital" case SyndicationUsage => "syndication" - case DownloadUsage => "download" + case DownloadUsage => "download" } } object UsageType { implicit val reads: Reads[UsageType] = JsPath.read[String].map(UsageType(_)) - implicit val writer: Writes[UsageType] = (usageType: UsageType) => JsString(usageType.toString) + implicit val writer: Writes[UsageType] = (usageType: UsageType) => + JsString(usageType.toString) def apply(usageType: String): UsageType = usageType.toLowerCase match { - case "print" => PrintUsage - case "digital" => DigitalUsage + case "print" => PrintUsage + case "digital" => DigitalUsage case "syndication" => SyndicationUsage - case "download" => DownloadUsage + case "download" => DownloadUsage } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/syntax/ElasticSearchSyntax.scala b/common-lib/src/main/scala/com/gu/mediaservice/syntax/ElasticSearchSyntax.scala index b21731b94c..b229375a53 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/syntax/ElasticSearchSyntax.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/syntax/ElasticSearchSyntax.scala @@ -4,7 +4,12 @@ package syntax import java.util.regex.Pattern import org.elasticsearch.action.get.GetResponse -import org.elasticsearch.action.{ActionRequest, ActionRequestBuilder, ActionResponse, ListenableActionFuture} +import org.elasticsearch.action.{ + ActionRequest, + ActionRequestBuilder, + ActionResponse, + ListenableActionFuture +} import org.elasticsearch.search.SearchHit import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder import play.api.{Logger, MarkerContext} @@ -14,40 +19,57 @@ import net.logstash.logback.marker.Markers.appendEntries import scala.collection.JavaConverters._ import scala.concurrent.{ExecutionContext, Future} - trait ElasticSearchSyntax { - final implicit class ListenableActionFutureSyntax[A](self: ListenableActionFuture[A]) { + final implicit class ListenableActionFutureSyntax[A]( + self: ListenableActionFuture[A] + ) { def asScala: Future[A] = FutureConversions(self) } final implicit class GetResponseSyntax(self: GetResponse) { - def sourceOpt: Option[JsValue] = Option(self.getSourceAsString) map Json.parse + def sourceOpt: Option[JsValue] = + Option(self.getSourceAsString) map Json.parse } - final implicit class ActionRequestBuilderSyntax[A <: ActionResponse] - (self: ActionRequestBuilder[_ <: ActionRequest[_], A, _, _]) { + final implicit class ActionRequestBuilderSyntax[A <: ActionResponse]( + self: ActionRequestBuilder[_ <: ActionRequest[_], A, _, _] + ) { - def executeAndLog(message: => String)(implicit ex: ExecutionContext): Future[A] = { + def executeAndLog( + message: => String + )(implicit ex: ExecutionContext): Future[A] = { val start = System.currentTimeMillis() val result = self.execute().asScala result.foreach { _ => val elapsed = System.currentTimeMillis() - start - val markers = MarkerContext(appendEntries(Map( - "duration" -> elapsed - ).asJava)) - - Logger.info(s"$message - query returned successfully in $elapsed ms")(markers) + val markers = MarkerContext( + appendEntries( + Map( + "duration" -> elapsed + ).asJava + ) + ) + + Logger.info(s"$message - query returned successfully in $elapsed ms")( + markers + ) } result.failed.foreach { e => val elapsed = System.currentTimeMillis() - start - val markers = MarkerContext(appendEntries(Map( - "duration" -> elapsed - ).asJava)) - - Logger.error(s"$message - query failed after $elapsed ms: ${e.getMessage} cs: ${e.getCause}")(markers) + val markers = MarkerContext( + appendEntries( + Map( + "duration" -> elapsed + ).asJava + ) + ) + + Logger.error( + s"$message - query failed after $elapsed ms: ${e.getMessage} cs: ${e.getCause}" + )(markers) } result @@ -55,7 +77,8 @@ trait ElasticSearchSyntax { } final implicit class SearchHitSyntax(self: SearchHit) { - def sourceOpt: Option[JsValue] = Option(self.getSourceAsString) map Json.parse + def sourceOpt: Option[JsValue] = + Option(self.getSourceAsString) map Json.parse } final implicit class TermsBuilderSyntax(self: TermsBuilder) { diff --git a/common-lib/src/main/scala/com/gu/mediaservice/syntax/PlayJsonSyntax.scala b/common-lib/src/main/scala/com/gu/mediaservice/syntax/PlayJsonSyntax.scala index fb6be7ea5d..8cfaf99618 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/syntax/PlayJsonSyntax.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/syntax/PlayJsonSyntax.scala @@ -19,12 +19,13 @@ trait PlayJsonSyntax { implicit val uriReads = new Reads[URI] { override def reads(json: JsValue): JsResult[URI] = json match { - case JsString(uriInJson) => Try { - new URI(uriInJson) - } match { - case Success(uri) => JsSuccess(uri) - case Failure(_) => JsError(s"Could not parse $uriInJson as valid URI") - } + case JsString(uriInJson) => + Try { + new URI(uriInJson) + } match { + case Success(uri) => JsSuccess(uri) + case Failure(_) => JsError(s"Could not parse $uriInJson as valid URI") + } case _ => JsError("URI as String expected") } } diff --git a/common-lib/src/main/scala/com/gu/mediaservice/syntax/package.scala b/common-lib/src/main/scala/com/gu/mediaservice/syntax/package.scala index 1bb57bf199..292a7ca4b6 100644 --- a/common-lib/src/main/scala/com/gu/mediaservice/syntax/package.scala +++ b/common-lib/src/main/scala/com/gu/mediaservice/syntax/package.scala @@ -3,8 +3,8 @@ package com.gu.mediaservice import com.gu.mediaservice.lib.metrics.FutureSyntax package object syntax - extends ElasticSearchSyntax - with PlayJsonSyntax - with RequestHeaderSyntax - with FutureSyntax - with ProcessSyntax + extends ElasticSearchSyntax + with PlayJsonSyntax + with RequestHeaderSyntax + with FutureSyntax + with ProcessSyntax diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/DateTimeUtilsTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/DateTimeUtilsTest.scala index 1fb7ce3fe4..08176d5ff8 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/DateTimeUtilsTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/DateTimeUtilsTest.scala @@ -4,19 +4,19 @@ import org.joda.time.DateTime import org.scalatest.{FunSpec, Matchers} class DateTimeUtilsTest extends FunSpec with Matchers { - it ("should convert a string to a DateTime") { + it("should convert a string to a DateTime") { val dateString = "2020-01-01T12:34:56.000Z" val actual = DateTimeUtils.fromValueOrNow(Some(dateString)) actual shouldBe a[DateTime] actual.toString shouldBe dateString } - it ("should handle an invalid date string input and return a DateTime") { + it("should handle an invalid date string input and return a DateTime") { val actual = DateTimeUtils.fromValueOrNow(Some("nonsense")) actual shouldBe a[DateTime] } - it ("should return a date with no input") { + it("should return a date with no input") { val actual = DateTimeUtils.fromValueOrNow(None) actual shouldBe a[DateTime] } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/ProcessesSpec.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/ProcessesSpec.scala index 0446925866..36680dc9bc 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/ProcessesSpec.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/ProcessesSpec.scala @@ -21,11 +21,12 @@ object ProcessesSpec extends Properties("Processes") { val smallPosInt = choose(1, 10) - property("emitEveryNth") = forAll(listOf(arbitrary[Int]), smallPosInt) { (xs, n) => - val p = Process(xs: _*) - val ys = p.pipe(emitEveryNth(n)).toList - val counts = xs.foldMap(x => Map(x -> 1)) - xs.forall(x => counts(x) / n == ys.count(_ == x)) + property("emitEveryNth") = forAll(listOf(arbitrary[Int]), smallPosInt) { + (xs, n) => + val p = Process(xs: _*) + val ys = p.pipe(emitEveryNth(n)).toList + val counts = xs.foldMap(x => Map(x -> 1)) + xs.forall(x => counts(x) / n == ys.count(_ == x)) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/ApiKeyAuthenticationProviderTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/ApiKeyAuthenticationProviderTest.scala index 587c2efdbb..d587ce15ef 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/ApiKeyAuthenticationProviderTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/ApiKeyAuthenticationProviderTest.scala @@ -2,7 +2,14 @@ package com.gu.mediaservice.lib.auth import akka.actor.ActorSystem import com.gu.mediaservice.lib.auth.Authentication.MachinePrincipal -import com.gu.mediaservice.lib.auth.provider.{ApiKeyAuthenticationProvider, Authenticated, AuthenticationProviderResources, Invalid, NotAuthenticated, NotAuthorised} +import com.gu.mediaservice.lib.auth.provider.{ + ApiKeyAuthenticationProvider, + Authenticated, + AuthenticationProviderResources, + Invalid, + NotAuthenticated, + NotAuthorised +} import com.gu.mediaservice.lib.config.CommonConfig import org.scalatest.Inside.inside import org.scalatest.{AsyncFreeSpec, BeforeAndAfterAll, EitherValues, Matchers} @@ -14,41 +21,61 @@ import scala.concurrent.ExecutionContext.global import scala.concurrent.Future //noinspection NotImplementedCode,SpellCheckingInspection -class ApiKeyAuthenticationProviderTest extends AsyncFreeSpec with Matchers with EitherValues with BeforeAndAfterAll { +class ApiKeyAuthenticationProviderTest + extends AsyncFreeSpec + with Matchers + with EitherValues + with BeforeAndAfterAll { private val actorSystem: ActorSystem = ActorSystem() private val wsClient = new WsTestClient.InternalWSClient("https", 443) - private val config = new CommonConfig(Configuration.load(Environment.simple())) {} + private val config = new CommonConfig( + Configuration.load(Environment.simple()) + ) {} private val providerConfig = Configuration.empty - private val controllerComponents: DefaultControllerComponents = DefaultControllerComponents(null, null, null, null, null, global) - private val resources = AuthenticationProviderResources(config, actorSystem, wsClient, controllerComponents) - private val provider = new ApiKeyAuthenticationProvider(providerConfig, resources) { - override def initialise(): Unit = { /* do nothing */ } + private val controllerComponents: DefaultControllerComponents = + DefaultControllerComponents(null, null, null, null, null, global) + private val resources = AuthenticationProviderResources( + config, + actorSystem, + wsClient, + controllerComponents + ) + private val provider = + new ApiKeyAuthenticationProvider(providerConfig, resources) { + override def initialise(): Unit = { /* do nothing */ } - override def shutdown(): Future[Unit] = { /* do nothing */ - Future.successful(()) - } + override def shutdown(): Future[Unit] = { /* do nothing */ + Future.successful(()) + } - override def keyStore: KeyStore = new KeyStore("not-used", resources.commonConfig) { - override def lookupIdentity(key: String): Option[ApiAccessor] = { - key match { - case "key-chuckle" => Some(ApiAccessor("brothers", Internal)) - case "key-limited" => Some(ApiAccessor("locked-down", ReadOnly)) - case _ => None + override def keyStore: KeyStore = + new KeyStore("not-used", resources.commonConfig) { + override def lookupIdentity(key: String): Option[ApiAccessor] = { + key match { + case "key-chuckle" => Some(ApiAccessor("brothers", Internal)) + case "key-limited" => Some(ApiAccessor("locked-down", ReadOnly)) + case _ => None + } + } } - } } - } "requestAuthentication" - { "should return Authenticated if the key is valid" in { - val testHeader = ApiKeyAuthenticationProvider.apiKeyHeaderName -> "key-chuckle" - val status = provider.authenticateRequest(FakeRequest().withHeaders(testHeader)) + val testHeader = + ApiKeyAuthenticationProvider.apiKeyHeaderName -> "key-chuckle" + val status = + provider.authenticateRequest(FakeRequest().withHeaders(testHeader)) inside(status) { case Authenticated(MachinePrincipal(apiAccessor, attributes)) => apiAccessor shouldBe ApiAccessor("brothers", Internal) - attributes.contains(ApiKeyAuthenticationProvider.ApiKeyHeader) shouldBe true - attributes.get(ApiKeyAuthenticationProvider.ApiKeyHeader) shouldBe Some(testHeader) + attributes.contains( + ApiKeyAuthenticationProvider.ApiKeyHeader + ) shouldBe true + attributes.get( + ApiKeyAuthenticationProvider.ApiKeyHeader + ) shouldBe Some(testHeader) } } "should return NotAuthenticated if the header is missing" in { @@ -56,19 +83,22 @@ class ApiKeyAuthenticationProviderTest extends AsyncFreeSpec with Matchers with status shouldBe NotAuthenticated } "should return Invalid if the key is invalid" in { - val testHeader = ApiKeyAuthenticationProvider.apiKeyHeaderName -> "key-banana" - val status = provider.authenticateRequest(FakeRequest().withHeaders(testHeader)) - inside(status) { - case Invalid(message, _) => - message shouldBe "API key not valid" + val testHeader = + ApiKeyAuthenticationProvider.apiKeyHeaderName -> "key-banana" + val status = + provider.authenticateRequest(FakeRequest().withHeaders(testHeader)) + inside(status) { case Invalid(message, _) => + message shouldBe "API key not valid" } } "should return NotAuthorised if the key doesn't have enough permissions" in { - val testHeader = ApiKeyAuthenticationProvider.apiKeyHeaderName -> "key-limited" - val status = provider.authenticateRequest(FakeRequest().withHeaders(testHeader).withMethod("POST")) - inside(status) { - case NotAuthorised(message) => - message shouldBe "API key locked-down valid but not authorised for this request" + val testHeader = + ApiKeyAuthenticationProvider.apiKeyHeaderName -> "key-limited" + val status = provider.authenticateRequest( + FakeRequest().withHeaders(testHeader).withMethod("POST") + ) + inside(status) { case NotAuthorised(message) => + message shouldBe "API key locked-down valid but not authorised for this request" } } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/AuthenticationTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/AuthenticationTest.scala index ddc0c1a305..36c144431f 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/AuthenticationTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/auth/AuthenticationTest.scala @@ -2,7 +2,11 @@ package com.gu.mediaservice.lib.auth import akka.actor.ActorSystem import akka.stream.ActorMaterializer -import com.gu.mediaservice.lib.auth.Authentication.{MachinePrincipal, OnBehalfOfPrincipal, UserPrincipal} +import com.gu.mediaservice.lib.auth.Authentication.{ + MachinePrincipal, + OnBehalfOfPrincipal, + UserPrincipal +} import com.gu.mediaservice.lib.auth.provider.AuthenticationProvider.RedirectUri import com.gu.mediaservice.lib.auth.provider._ import com.gu.mediaservice.lib.config.{CommonConfig, TestProvider} @@ -13,7 +17,13 @@ import play.api.http.Status import play.api.libs.json.{Format, Json} import play.api.libs.typedmap.{TypedKey, TypedMap} import play.api.libs.ws.{DefaultWSCookie, WSRequest} -import play.api.mvc.{Cookie, DiscardingCookie, PlayBodyParsers, RequestHeader, Result} +import play.api.mvc.{ + Cookie, + DiscardingCookie, + PlayBodyParsers, + RequestHeader, + Result +} import play.api.test.Helpers.defaultAwaitTimeout import play.api.test.{FakeRequest, Helpers, WsTestClient} import play.libs.ws.WSCookie @@ -24,18 +34,34 @@ import scala.concurrent.Future import scala.util.Try //noinspection NotImplementedCode,SpellCheckingInspection -class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues with BeforeAndAfterAll { +class AuthenticationTest + extends AsyncFreeSpec + with Matchers + with EitherValues + with BeforeAndAfterAll { implicit val actorSystem: ActorSystem = ActorSystem() implicit val materializer: ActorMaterializer = ActorMaterializer() private val COOKIE_NAME = "TestGridAuth" private val HEADER_NAME = "X-TestMachine-Auth" - private case class AuthToken(firstName: String, lastName: String, email: String, expired: Boolean, veryExpired: Boolean) { + private case class AuthToken( + firstName: String, + lastName: String, + email: String, + expired: Boolean, + veryExpired: Boolean + ) { def user: UserPrincipal = UserPrincipal(firstName, lastName, email) } private implicit val cookieFormats: Format[AuthToken] = Json.format[AuthToken] - private def makeCookie(firstName: String = "Test", lastName: String = "User", email: String = "test@user", expired: Boolean = false, veryExpired: Boolean = false): Cookie = { + private def makeCookie( + firstName: String = "Test", + lastName: String = "User", + email: String = "test@user", + expired: Boolean = false, + veryExpired: Boolean = false + ): Cookie = { val data = AuthToken(firstName, lastName, email, expired, veryExpired) val value = Json.stringify(Json.toJson(data)) Cookie(COOKIE_NAME, value) @@ -44,7 +70,9 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w Try(Json.parse(cookie.value)).toOption.flatMap(_.asOpt[AuthToken]) } - def makeAuthenticationInstance(testProviders: AuthenticationProviders): Authentication = { + def makeAuthenticationInstance( + testProviders: AuthenticationProviders + ): Authentication = { val config = new CommonConfig(Configuration.load(Environment.simple())) {} new Authentication( config = config, @@ -57,36 +85,54 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w "authenticationStatus" - { val testProviders = AuthenticationProviders( new UserAuthenticationProvider { - override def authenticateRequest(request: RequestHeader): AuthenticationStatus = { + override def authenticateRequest( + request: RequestHeader + ): AuthenticationStatus = { request.cookies.get(COOKIE_NAME) match { case None => NotAuthenticated case Some(cookie) => parseCookie(cookie) match { case None => Invalid("Token not valid") - case Some(token@AuthToken(_, _, _, _, true)) => Expired(token.user) - case Some(token) if token.email == "test@user" => Authenticated(token.user) - case Some(token) => NotAuthorised(s"${token.email} not authorised") + case Some(token @ AuthToken(_, _, _, _, true)) => + Expired(token.user) + case Some(token) if token.email == "test@user" => + Authenticated(token.user) + case Some(token) => + NotAuthorised(s"${token.email} not authorised") } } } - override def sendForAuthentication: Option[RequestHeader => Future[Result]] = ??? - override def sendForAuthenticationCallback: Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] = ??? - override def flushToken: Option[(RequestHeader, Result) => Result] = Some({(_: RequestHeader, result: Result) => - result.discardingCookies(DiscardingCookie(COOKIE_NAME)) - }) - override def onBehalfOf(request: Authentication.Principal): Either[String, WSRequest => WSRequest] = ??? + override def sendForAuthentication + : Option[RequestHeader => Future[Result]] = ??? + override def sendForAuthenticationCallback + : Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] = + ??? + override def flushToken: Option[(RequestHeader, Result) => Result] = + Some({ (_: RequestHeader, result: Result) => + result.discardingCookies(DiscardingCookie(COOKIE_NAME)) + }) + override def onBehalfOf( + request: Authentication.Principal + ): Either[String, WSRequest => WSRequest] = ??? }, new MachineAuthenticationProvider { - override def authenticateRequest(request: RequestHeader): ApiAuthenticationStatus = { + override def authenticateRequest( + request: RequestHeader + ): ApiAuthenticationStatus = { request.headers.get(HEADER_NAME) match { case None => NotAuthenticated - case Some(key) if key.startsWith("key-") && key.endsWith("-blocked") => NotAuthorised(s"$key is blocked") - case Some(key) if key.startsWith("key-") => Authenticated(MachinePrincipal(ApiAccessor(key, Internal))) + case Some(key) + if key.startsWith("key-") && key.endsWith("-blocked") => + NotAuthorised(s"$key is blocked") + case Some(key) if key.startsWith("key-") => + Authenticated(MachinePrincipal(ApiAccessor(key, Internal))) case Some(_) => Invalid("Key doesn't start with 'key-'") } } - override def onBehalfOf(request: Authentication.Principal): Either[String, WSRequest => WSRequest] = ??? + override def onBehalfOf( + request: Authentication.Principal + ): Either[String, WSRequest => WSRequest] = ??? } ) @@ -96,7 +142,10 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w val authStatus = auth.authenticationStatus(FakeRequest()) authStatus.left.value.map { result => result.header.status shouldBe Status.UNAUTHORIZED - Helpers.contentAsJson(Future.successful(result)).\("errorKey").as[String] shouldBe "authentication-failure" + Helpers + .contentAsJson(Future.successful(result)) + .\("errorKey") + .as[String] shouldBe "authentication-failure" } } "should return invalid if the cookie is present but invalid" in { @@ -104,7 +153,10 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w val authStatus = auth.authenticationStatus(request) authStatus.left.value.map { result => result.header.status shouldBe Status.UNAUTHORIZED - Helpers.contentAsJson(Future.successful(result)).\("errorKey").as[String] shouldBe "authentication-failure" + Helpers + .contentAsJson(Future.successful(result)) + .\("errorKey") + .as[String] shouldBe "authentication-failure" } } "should return user when valid" in { @@ -122,7 +174,10 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w val authStatus = auth.authenticationStatus(request) authStatus.left.value.map { result => result.header.status shouldBe 419 - Helpers.contentAsJson(Future.successful(result)).\("errorKey").as[String] shouldBe "authentication-expired" + Helpers + .contentAsJson(Future.successful(result)) + .\("errorKey") + .as[String] shouldBe "authentication-expired" } } "should return forbidden when user is not authorised by provider" in { @@ -130,20 +185,28 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w val authStatus = auth.authenticationStatus(request) authStatus.left.value.map { result => result.header.status shouldBe Status.FORBIDDEN - Helpers.contentAsJson(Future.successful(result)).\("errorKey").as[String] shouldBe "principal-not-authorised" + Helpers + .contentAsJson(Future.successful(result)) + .\("errorKey") + .as[String] shouldBe "principal-not-authorised" } } "should authenticate with an API key" in { val request = FakeRequest().withHeaders(HEADER_NAME -> "key-client") val authStatus = auth.authenticationStatus(request) - authStatus.right.value shouldBe MachinePrincipal(ApiAccessor("key-client", Internal)) + authStatus.right.value shouldBe MachinePrincipal( + ApiAccessor("key-client", Internal) + ) } "should return unauthorised when the API key is garbage" in { val request = FakeRequest().withHeaders(HEADER_NAME -> "garbage") val authStatus = auth.authenticationStatus(request) authStatus.left.value.map { result => result.header.status shouldBe Status.UNAUTHORIZED - Helpers.contentAsJson(Future.successful(result)).\("errorKey").as[String] shouldBe "authentication-failure" + Helpers + .contentAsJson(Future.successful(result)) + .\("errorKey") + .as[String] shouldBe "authentication-failure" } } "should return forbidden if valid key is blocked" in { @@ -151,13 +214,20 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w val authStatus = auth.authenticationStatus(request) authStatus.left.value.map { result => result.header.status shouldBe Status.FORBIDDEN - Helpers.contentAsJson(Future.successful(result)).\("errorKey").as[String] shouldBe "principal-not-authorised" + Helpers + .contentAsJson(Future.successful(result)) + .\("errorKey") + .as[String] shouldBe "principal-not-authorised" } } "should prioritise machine authentication over user authentication" in { - val request = FakeRequest().withCookies(makeCookie()).withHeaders(HEADER_NAME -> "key-client") + val request = FakeRequest() + .withCookies(makeCookie()) + .withHeaders(HEADER_NAME -> "key-client") val authStatus = auth.authenticationStatus(request) - authStatus.right.value shouldBe MachinePrincipal(ApiAccessor("key-client", Internal)) + authStatus.right.value shouldBe MachinePrincipal( + ApiAccessor("key-client", Internal) + ) } } @@ -166,29 +236,59 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w val HeaderKey: TypedKey[(String, String)] = TypedKey("header-key") val testProviders = AuthenticationProviders( new UserAuthenticationProvider { - override def authenticateRequest(request: RequestHeader): AuthenticationStatus = ??? - override def sendForAuthentication: Option[RequestHeader => Future[Result]] = ??? - override def sendForAuthenticationCallback: Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] = ??? + override def authenticateRequest( + request: RequestHeader + ): AuthenticationStatus = ??? + override def sendForAuthentication + : Option[RequestHeader => Future[Result]] = ??? + override def sendForAuthenticationCallback + : Option[(RequestHeader, Option[RedirectUri]) => Future[Result]] = + ??? override def flushToken: Option[(RequestHeader, Result) => Result] = ??? - override def onBehalfOf(request: Authentication.Principal): Either[String, WSRequest => WSRequest] = request match { - case UserPrincipal(_,_,_,attributes) if attributes.contains(CookieKey) => Right(req => req.addCookies(DefaultWSCookie(COOKIE_NAME, attributes.get(CookieKey).get.value))) - case UserPrincipal(_, _, email, _) => Left(s"Unable to build onBehalfOf function for $email") + override def onBehalfOf( + request: Authentication.Principal + ): Either[String, WSRequest => WSRequest] = request match { + case UserPrincipal(_, _, _, attributes) + if attributes.contains(CookieKey) => + Right(req => + req.addCookies( + DefaultWSCookie( + COOKIE_NAME, + attributes.get(CookieKey).get.value + ) + ) + ) + case UserPrincipal(_, _, email, _) => + Left(s"Unable to build onBehalfOf function for $email") } }, new MachineAuthenticationProvider { - override def authenticateRequest(request: RequestHeader): ApiAuthenticationStatus = ??? - override def onBehalfOf(request: Authentication.Principal): Either[String, WSRequest => WSRequest] = request match { - case MachinePrincipal(_, attributes) if attributes.contains(HeaderKey) => Right(req => req.addHttpHeaders(attributes.get(HeaderKey).get)) - case MachinePrincipal(ApiAccessor(identity, _), _) => Left(s"Unable to build onBehalfOf function for $identity") + override def authenticateRequest( + request: RequestHeader + ): ApiAuthenticationStatus = ??? + override def onBehalfOf( + request: Authentication.Principal + ): Either[String, WSRequest => WSRequest] = request match { + case MachinePrincipal(_, attributes) + if attributes.contains(HeaderKey) => + Right(req => req.addHttpHeaders(attributes.get(HeaderKey).get)) + case MachinePrincipal(ApiAccessor(identity, _), _) => + Left(s"Unable to build onBehalfOf function for $identity") } } ) val auth: Authentication = makeAuthenticationInstance(testProviders) "return function for user principal" in { - val testUser = UserPrincipal("Test", "User", "test@user", TypedMap(CookieKey -> Cookie(COOKIE_NAME, "this is my cookie value"))) - val onBehalfOfFn: OnBehalfOfPrincipal = auth.getOnBehalfOfPrincipal(testUser) - WsTestClient.withClient{ client => + val testUser = UserPrincipal( + "Test", + "User", + "test@user", + TypedMap(CookieKey -> Cookie(COOKIE_NAME, "this is my cookie value")) + ) + val onBehalfOfFn: OnBehalfOfPrincipal = + auth.getOnBehalfOfPrincipal(testUser) + WsTestClient.withClient { client => val req = client.url("https://example.com") val modifiedReq = onBehalfOfFn(req) val maybeCookie = modifiedReq.cookies.find(_.name == COOKIE_NAME) @@ -200,15 +300,20 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w "fail to get function for user principal if the user doesn't have the cookie" in { val testUser = UserPrincipal("Test", "User", "test@user") - the [IllegalStateException] thrownBy { - val onBehalfOfFn: OnBehalfOfPrincipal = auth.getOnBehalfOfPrincipal(testUser) + the[IllegalStateException] thrownBy { + val onBehalfOfFn: OnBehalfOfPrincipal = + auth.getOnBehalfOfPrincipal(testUser) } should have message "Unable to build onBehalfOf function for test@user" } "return function for machine principal" in { - val apiAccessor = MachinePrincipal(ApiAccessor("my-client-id", Internal), TypedMap(HeaderKey -> (HEADER_NAME -> "my-client-id-key"))) - val onBehalfOfFn: OnBehalfOfPrincipal = auth.getOnBehalfOfPrincipal(apiAccessor) - WsTestClient.withClient{ client => + val apiAccessor = MachinePrincipal( + ApiAccessor("my-client-id", Internal), + TypedMap(HeaderKey -> (HEADER_NAME -> "my-client-id-key")) + ) + val onBehalfOfFn: OnBehalfOfPrincipal = + auth.getOnBehalfOfPrincipal(apiAccessor) + WsTestClient.withClient { client => val req = client.url("https://example.com") val modifiedReq = onBehalfOfFn(req) val maybeHeader = modifiedReq.headers.get(HEADER_NAME) @@ -219,8 +324,9 @@ class AuthenticationTest extends AsyncFreeSpec with Matchers with EitherValues w "fail to get function for user principal if the api accessor doesn't have the header" in { val apiAccessor = MachinePrincipal(ApiAccessor("my-client-id", Internal)) - the [IllegalStateException] thrownBy { - val onBehalfOfFn: OnBehalfOfPrincipal = auth.getOnBehalfOfPrincipal(apiAccessor) + the[IllegalStateException] thrownBy { + val onBehalfOfFn: OnBehalfOfPrincipal = + auth.getOnBehalfOfPrincipal(apiAccessor) } should have message "Unable to build onBehalfOf function for my-client-id" } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/DynamoDBTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/DynamoDBTest.scala index a37d2fc4f0..b715fb59ae 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/DynamoDBTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/DynamoDBTest.scala @@ -9,27 +9,37 @@ import play.api.libs.json.{Format, JsObject, Json} class DynamoDBTest extends FunSpec with Matchers { describe("jsonToValueMap") { - it ("should convert a simple JsObject to a valueMap") { - val json = Json.toJson(SimpleDynamoDBObj("this is a string", 100, true, List("list"))).as[JsObject] + it("should convert a simple JsObject to a valueMap") { + val json = Json + .toJson(SimpleDynamoDBObj("this is a string", 100, true, List("list"))) + .as[JsObject] val valueMap = DynamoDB.jsonToValueMap(json) - // This is the only way to get stuff type safely out of the valueMap // It's not a problem as we shoulnd't be doing this anywhere else val s: String = valueMap.get("s").asInstanceOf[String] val d: BigDecimal = valueMap.get("d").asInstanceOf[java.math.BigDecimal] val b: Boolean = valueMap.get("b").asInstanceOf[Boolean] - val a: List[String] = List(valueMap.get("a").asInstanceOf[java.util.ArrayList[String]].toArray(): _*).asInstanceOf[List[String]] - - - s should be ("this is a string") - d should be (100) + val a: List[String] = List( + valueMap + .get("a") + .asInstanceOf[java.util.ArrayList[String]] + .toArray(): _* + ).asInstanceOf[List[String]] + + s should be("this is a string") + d should be(100) b should equal(true) a should equal(List("list")) } - it ("should convert a nested JsObject to a valueMap") { - val nestedObj = NestedDynamoDBObj("string", 100, false, SimpleDynamoDBObj("strang", 500, true, List("list"))) + it("should convert a nested JsObject to a valueMap") { + val nestedObj = NestedDynamoDBObj( + "string", + 100, + false, + SimpleDynamoDBObj("strang", 500, true, List("list")) + ) val json = Json.toJson(nestedObj).as[JsObject] val valueMap = DynamoDB.jsonToValueMap(json) @@ -42,43 +52,64 @@ class DynamoDBTest extends FunSpec with Matchers { val s: String = simpleMap.get("s").asInstanceOf[String] val d: BigDecimal = simpleMap.get("d").asInstanceOf[java.math.BigDecimal] val b: Boolean = simpleMap.get("b").asInstanceOf[Boolean] - val a: List[String] = List(simpleMap.get("a").asInstanceOf[java.util.ArrayList[String]].toArray: _*).asInstanceOf[List[String]] - + val a: List[String] = List( + simpleMap.get("a").asInstanceOf[java.util.ArrayList[String]].toArray: _* + ).asInstanceOf[List[String]] - ss should be ("string") - dd should be (100) + ss should be("string") + dd should be(100) bb should equal(false) - s should be ("strang") - d should be (500) + s should be("strang") + d should be(500) b should equal(true) a should equal(List("list")) } - it ("should convert a Collection to ValueMap") { - val collection = Collection.build(List("g2", "art", "batik"), ActionData("mighty.mouse@guardian.co.uk", DateTime.now)) + it("should convert a Collection to ValueMap") { + val collection = Collection.build( + List("g2", "art", "batik"), + ActionData("mighty.mouse@guardian.co.uk", DateTime.now) + ) val json = Json.toJson(collection).as[JsObject] val valueMap = DynamoDB.jsonToValueMap(json) - val path = List(valueMap.get("path").asInstanceOf[java.util.ArrayList[String]].toArray: _*).asInstanceOf[List[String]] + val path = List( + valueMap + .get("path") + .asInstanceOf[java.util.ArrayList[String]] + .toArray: _* + ).asInstanceOf[List[String]] val pathId = valueMap.get("pathId").asInstanceOf[String] val actionData = valueMap.get("actionData").asInstanceOf[ValueMap] val author = actionData.get("author").asInstanceOf[String] - pathId should be (collection.pathId) - author should be (collection.actionData.author) - path should be (collection.path) + pathId should be(collection.pathId) + author should be(collection.actionData.author) + path should be(collection.path) } } } -case class SimpleDynamoDBObj(s: String, d: BigDecimal, b: Boolean, a: List[String]) +case class SimpleDynamoDBObj( + s: String, + d: BigDecimal, + b: Boolean, + a: List[String] +) object SimpleDynamoDBObj { - implicit def formats: Format[SimpleDynamoDBObj] = Json.format[SimpleDynamoDBObj] + implicit def formats: Format[SimpleDynamoDBObj] = + Json.format[SimpleDynamoDBObj] } -case class NestedDynamoDBObj(ss: String, dd: BigDecimal, bb: Boolean, simple: SimpleDynamoDBObj) +case class NestedDynamoDBObj( + ss: String, + dd: BigDecimal, + bb: Boolean, + simple: SimpleDynamoDBObj +) object NestedDynamoDBObj { - implicit def formats: Format[NestedDynamoDBObj] = Json.format[NestedDynamoDBObj] + implicit def formats: Format[NestedDynamoDBObj] = + Json.format[NestedDynamoDBObj] } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/ThrallMessageSenderTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/ThrallMessageSenderTest.scala index 1a1a578333..aa846efc90 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/ThrallMessageSenderTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/aws/ThrallMessageSenderTest.scala @@ -10,21 +10,25 @@ class ThrallMessageSenderTest extends FunSpec with Matchers { describe("json to message and back") { // This is most interested for ensuring time zone correctness - it ("should convert a message to json and back again") { + it("should convert a message to json and back again") { val m = UpdateMessage(subject = "test") val j = Json.toJson(m).toString() val m2 = Json.parse(j).as[UpdateMessage] m2 shouldEqual m } - it ("should convert a message from an external source which does not have last modified") { + it( + "should convert a message from an external source which does not have last modified" + ) { val subject = "test" val j = s"""{"subject":"$subject"}""" val m = Json.parse(j).as[UpdateMessage] - m.lastModified.getZone.toString should be ("UTC") + m.lastModified.getZone.toString should be("UTC") } - it ("should convert a message last modified with an offset timezone to UTC") { + it( + "should convert a message last modified with an offset timezone to UTC" + ) { val now = DateTime.now(DateTimeZone.forOffsetHours(9)) val nowUtc = new DateTime(now.getMillis()).toDateTime(DateTimeZone.UTC) val m = UpdateMessage(subject = "test", lastModified = now) diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromBylineTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromBylineTest.scala index 45345b5a50..51359f5201 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromBylineTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/AttributeCreditFromBylineTest.scala @@ -2,19 +2,24 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class AttributeCreditFromBylineTest extends FunSpec with Matchers with MetadataHelper { +class AttributeCreditFromBylineTest + extends FunSpec + with Matchers + with MetadataHelper { val bylines = List("Sigmund Loch") val testCleaner = AttributeCreditFromByline(bylines, "Some Credit") it("should set the credit if the byline matches the configured list") { val metadata = createImageMetadata("byline" -> "Sigmund Loch") - testCleaner.clean(metadata).credit should be (Some("Some Credit")) + testCleaner.clean(metadata).credit should be(Some("Some Credit")) } - it("should not set the credit if the byline doesn't matches the configured list") { + it( + "should not set the credit if the byline doesn't matches the configured list" + ) { val metadata = createImageMetadata("byline" -> "Someone else") - testCleaner.clean(metadata).credit should be (None) + testCleaner.clean(metadata).credit should be(None) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganiseTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganiseTest.scala index 6910176137..7dbbf5a8ef 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganiseTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/BylineCreditReorganiseTest.scala @@ -2,84 +2,92 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class BylineCreditReorganiseTest extends FunSpec with Matchers with MetadataHelper { +class BylineCreditReorganiseTest + extends FunSpec + with Matchers + with MetadataHelper { - it ("should leave non matching, slashed credits") { + it("should leave non matching, slashed credits") { CreditByline("Ilyas Akengin", "AFP/Getty Images") - .whenCleaned("Ilyas Akengin", "AFP/Getty Images") + .whenCleaned("Ilyas Akengin", "AFP/Getty Images") } - it ("should remove spaces between slashes") { + it("should remove spaces between slashes") { CreditByline("Man /In /Suit", "Presseye/ INPHO /REX") - .whenCleaned("Man" , "In/Suit/Presseye/INPHO/REX") + .whenCleaned("Man", "In/Suit/Presseye/INPHO/REX") } - it ("should clean credits from byline but leave non-matching name") { + it("should clean credits from byline but leave non-matching name") { CreditByline("Ella/BPI/REX", "Ella Ling/BPI/REX") - .whenCleaned("Ella", "Ella Ling/BPI/REX") + .whenCleaned("Ella", "Ella Ling/BPI/REX") } - it ("should normalise via to slash") { + it("should normalise via to slash") { CreditByline("Philip Glass", "Anadolu Agency via Getty Images") .whenCleaned("Philip Glass", "Anadolu Agency/Getty Images") } - it ("should remove matching byline from credit in triple slash") { + it("should remove matching byline from credit in triple slash") { CreditByline("Ella Ling/BPI/REX", "Ella Ling/BPI/REX") - .whenCleaned("Ella Ling" , "BPI/REX") + .whenCleaned("Ella Ling", "BPI/REX") } - it ("should remove matching byline from double slash credit") { - CreditByline("Joe Newman / National Pictures", "Joe Newman / National Pictures") - .whenCleaned("Joe Newman" , "National Pictures") + it("should remove matching byline from double slash credit") { + CreditByline( + "Joe Newman / National Pictures", + "Joe Newman / National Pictures" + ) + .whenCleaned("Joe Newman", "National Pictures") } - it ("should remove the byline from credit if matching") { + it("should remove the byline from credit if matching") { CreditByline("Andy Rowland", "Andy Rowland/UK Sports Pics Ltd") - .whenCleaned("Andy Rowland", "UK Sports Pics Ltd") + .whenCleaned("Andy Rowland", "UK Sports Pics Ltd") } - it ("should remove the byline from credit if matching, via case") { + it("should remove the byline from credit if matching, via case") { CreditByline("Andy Rowland", "Andy Rowland via UK Sports Pics Ltd") .whenCleaned("Andy Rowland", "UK Sports Pics Ltd") } - it ("should return the same if matching") { - CreditByline("Barcroft Media", "Barcroft Media") + it("should return the same if matching") { + CreditByline("Barcroft Media", "Barcroft Media") .whenCleaned("Barcroft Media", "Barcroft Media") } - it ("should return the same if no slashes") { - CreditByline("Barcroft Media", "Philip Glass") + it("should return the same if no slashes") { + CreditByline("Barcroft Media", "Philip Glass") .whenCleaned("Barcroft Media", "Philip Glass") } - it ("should remove organisation from byline") { + it("should remove organisation from byline") { CreditByline("Philip Glass/Barcroft Media", "Barcroft Media") .whenCleaned("Philip Glass", "Barcroft Media") } - it ("should remove organisation from byline, via case") { + it("should remove organisation from byline, via case") { CreditByline("Philip Glass via Barcroft Media", "Barcroft Media") .whenCleaned("Philip Glass", "Barcroft Media") } - it ("should handle empty byline") { + it("should handle empty byline") { CreditByline("", "Barcroft Media") .whenCleaned("", "Barcroft Media") } - it ("should handle empty credit") { + it("should handle empty credit") { CreditByline("John Doe", "") .whenCleaned("John Doe", None) } - it ("should handle empty credit when byline has organisation names") { + it("should handle empty credit when byline has organisation names") { CreditByline("John Doe/BPI/REX", "") .whenCleaned("John Doe", "BPI/REX") } - it ("should handle empty credit when byline has organisation names, via case") { + it( + "should handle empty credit when byline has organisation names, via case" + ) { CreditByline("John Doe via BPI/REX", "") .whenCleaned("John Doe", "BPI/REX") } @@ -93,8 +101,8 @@ class BylineCreditReorganiseTest extends FunSpec with Matchers with MetadataHelp val cleanMetadata = BylineCreditReorganise.clean(metadata) - cleanMetadata.byline should be (Some(cByline)) - cleanMetadata.credit should be (cCredit) + cleanMetadata.byline should be(Some(cByline)) + cleanMetadata.credit should be(cCredit) } def whenCleaned(cByline: String, cCredit: String) = { @@ -104,10 +112,9 @@ class BylineCreditReorganiseTest extends FunSpec with Matchers with MetadataHelp ) val cleanMetadata = BylineCreditReorganise.clean(metadata) - cleanMetadata.byline should be (Some(cByline)) - cleanMetadata.credit should be (Some(cCredit)) + cleanMetadata.byline should be(Some(cByline)) + cleanMetadata.credit should be(Some(cCredit)) } } - } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitalisationFixerTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitalisationFixerTest.scala index 2b0f260680..05b7ecbeba 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitalisationFixerTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitalisationFixerTest.scala @@ -2,19 +2,22 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} - -class CapitalisationFixerTest extends FunSpec with Matchers with MetadataHelper with CapitalisationFixer { +class CapitalisationFixerTest + extends FunSpec + with Matchers + with MetadataHelper + with CapitalisationFixer { it("should capitalise single words") { - fixCapitalisation("NIGERIA") should be ("Nigeria") + fixCapitalisation("NIGERIA") should be("Nigeria") } it("should capitalise multiple words") { - fixCapitalisation("united states") should be ("United States") + fixCapitalisation("united states") should be("United States") } it("should capitalise hyphenated words") { - fixCapitalisation("JOUXTENS-MÉZERY") should be ("Jouxtens-Mézery") + fixCapitalisation("JOUXTENS-MÉZERY") should be("Jouxtens-Mézery") } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitaliseBylineTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitaliseBylineTest.scala index b6476e38fc..b450d43164 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitaliseBylineTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CapitaliseBylineTest.scala @@ -2,7 +2,6 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} - class CapitaliseBylineTest extends FunSpec with Matchers with MetadataHelper { it("should not apply capitalisation to single words") { @@ -60,7 +59,6 @@ class CapitaliseBylineTest extends FunSpec with Matchers with MetadataHelper { expectUnchanged("KGC-03") } - // Helpers def expectUnchanged(in: String): Unit = { @@ -68,11 +66,10 @@ class CapitaliseBylineTest extends FunSpec with Matchers with MetadataHelper { } def expectCleaned(in: String, out: String): Unit = { - val cleaned = CapitaliseByline.clean(createImageMetadata("byline" -> in)).byline + val cleaned = + CapitaliseByline.clean(createImageMetadata("byline" -> in)).byline - cleaned should be (Some(out)) + cleaned should be(Some(out)) } } - - diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocationTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocationTest.scala index fa3ee4b9f2..2d52510775 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocationTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CleanRubbishLocationTest.scala @@ -2,41 +2,45 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class CleanRubbishLocationTest extends FunSpec with Matchers with MetadataHelper { +class CleanRubbishLocationTest + extends FunSpec + with Matchers + with MetadataHelper { it("should not change a valid name") { - CleanRubbishLocation.cleanRubbish("Switzerland") should be (Some("Switzerland")) + CleanRubbishLocation.cleanRubbish("Switzerland") should be( + Some("Switzerland") + ) } it("should strip whitespace names") { - CleanRubbishLocation.cleanRubbish(" ") should be (None) + CleanRubbishLocation.cleanRubbish(" ") should be(None) } it("should strip '.' names") { - CleanRubbishLocation.cleanRubbish(".") should be (None) + CleanRubbishLocation.cleanRubbish(".") should be(None) } it("should strip '-' names") { - CleanRubbishLocation.cleanRubbish("-") should be (None) + CleanRubbishLocation.cleanRubbish("-") should be(None) } it("should strip '-' names with whitespace") { - CleanRubbishLocation.cleanRubbish(" - ") should be (None) + CleanRubbishLocation.cleanRubbish(" - ") should be(None) } - it("should clean all location fields") { val metadata = createImageMetadata( "subLocation" -> "-", - "city" -> "-", - "state" -> "-", - "country" -> "-" + "city" -> "-", + "state" -> "-", + "country" -> "-" ) val cleanedMetadata = CleanRubbishLocation.clean(metadata) - cleanedMetadata.subLocation should be (None) - cleanedMetadata.city should be (None) - cleanedMetadata.state should be (None) - cleanedMetadata.country should be (None) + cleanedMetadata.subLocation should be(None) + cleanedMetadata.city should be(None) + cleanedMetadata.state should be(None) + cleanedMetadata.country should be(None) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CountryCodeTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CountryCodeTest.scala index 31258e4443..4c994b0b3c 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CountryCodeTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/CountryCodeTest.scala @@ -2,58 +2,54 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} - class CountryCodeTest extends FunSpec with Matchers with MetadataHelper { it("should not change a correct country name") { - clean("Switzerland") should be (Some("Switzerland")) + clean("Switzerland") should be(Some("Switzerland")) } it("should not change an uppercase country name") { - clean("SWITZERLAND") should be (Some("SWITZERLAND")) + clean("SWITZERLAND") should be(Some("SWITZERLAND")) } it("should map a 2-letter country code to its name") { - clean("CH") should be (Some("Switzerland")) + clean("CH") should be(Some("Switzerland")) } it("should not change an invalid 2-letter country code") { - clean("XX") should be (Some("XX")) + clean("XX") should be(Some("XX")) } it("should map a 3-letter country code to its name") { - clean("CHN") should be (Some("China")) + clean("CHN") should be(Some("China")) } it("should not change an invalid 3-letter country code") { - clean("XXX") should be (Some("XXX")) + clean("XXX") should be(Some("XXX")) } // Exception: United Kingdom it("should map the UK country code to United Kingdom") { - clean("UK") should be (Some("United Kingdom")) + clean("UK") should be(Some("United Kingdom")) } it("should map the GB country code to United Kingdom") { - clean("GB") should be (Some("United Kingdom")) + clean("GB") should be(Some("United Kingdom")) } // Exception: United States it("should map the US country code to United States") { - clean("US") should be (Some("United States")) + clean("US") should be(Some("United States")) } it("should map the USA country code to United States") { - clean("USA") should be (Some("United States")) + clean("USA") should be(Some("United States")) } - def clean(country: String): Option[String] = { CountryCode.clean(createImageMetadata("country" -> country)).country } } - - diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitleTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitleTest.scala index 97414dcfc3..5ed2bf460a 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitleTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/DropRedundantTitleTest.scala @@ -5,28 +5,38 @@ import org.scalatest.{Matchers, FunSpec} class DropRedundantTitleTest extends FunSpec with Matchers with MetadataHelper { it("should be None if no title") { - val imageMetadata = createImageMetadata("description" -> "Brief description") - DropRedundantTitle.clean(imageMetadata).title should be (None) + val imageMetadata = + createImageMetadata("description" -> "Brief description") + DropRedundantTitle.clean(imageMetadata).title should be(None) } it("should be the title if no description") { val imageMetadata = createImageMetadata("title" -> "Brief title") - DropRedundantTitle.clean(imageMetadata).title should be (Some("Brief title")) + DropRedundantTitle.clean(imageMetadata).title should be(Some("Brief title")) } it("should be the title if not a prefix of the description") { - val imageMetadata = createImageMetadata("title" -> "Brief title", "description" -> "Brief description") - DropRedundantTitle.clean(imageMetadata).title should be (Some("Brief title")) + val imageMetadata = createImageMetadata( + "title" -> "Brief title", + "description" -> "Brief description" + ) + DropRedundantTitle.clean(imageMetadata).title should be(Some("Brief title")) } it("should be None if exactly the description") { - val imageMetadata = createImageMetadata("title" -> "Brief title", "description" -> "Brief title") - DropRedundantTitle.clean(imageMetadata).title should be (None) + val imageMetadata = createImageMetadata( + "title" -> "Brief title", + "description" -> "Brief title" + ) + DropRedundantTitle.clean(imageMetadata).title should be(None) } it("should be None if a prefix of the description") { - val imageMetadata = createImageMetadata("title" -> "Brief title", "description" -> "Brief title. Also more description.") - DropRedundantTitle.clean(imageMetadata).title should be (None) + val imageMetadata = createImageMetadata( + "title" -> "Brief title", + "description" -> "Brief title. Also more description." + ) + DropRedundantTitle.clean(imageMetadata).title should be(None) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromBylineTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromBylineTest.scala index 70c646be47..72bce2235a 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromBylineTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/ExtractGuardianCreditFromBylineTest.scala @@ -2,69 +2,97 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class ExtractGuardianCreditFromBylineTest extends FunSpec with Matchers with MetadataHelper { +class ExtractGuardianCreditFromBylineTest + extends FunSpec + with Matchers + with MetadataHelper { it("should not infer any credit from a plain byline") { - val metadata = createImageMetadata("byline" -> "Helmut Schon", "credit" -> "Getty Images") + val metadata = createImageMetadata( + "byline" -> "Helmut Schon", + "credit" -> "Getty Images" + ) val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Helmut Schon")) - mappedMetadata.credit should be (Some("Getty Images")) + mappedMetadata.byline should be(Some("Helmut Schon")) + mappedMetadata.credit should be(Some("Getty Images")) } it("should extract a Guardian credit from a 'for the Guardian' byline") { - val metadata = createImageMetadata("byline" -> "Helmut Schon for the Guardian") + val metadata = + createImageMetadata("byline" -> "Helmut Schon for the Guardian") val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Helmut Schon")) - mappedMetadata.credit should be (Some("The Guardian")) + mappedMetadata.byline should be(Some("Helmut Schon")) + mappedMetadata.credit should be(Some("The Guardian")) } - it("should extract a Guardian credit from a 'for The GUARDIAN' byline with bad capitalisation") { - val metadata = createImageMetadata("byline" -> "Helmut Schon for The GUARDIAN") + it( + "should extract a Guardian credit from a 'for The GUARDIAN' byline with bad capitalisation" + ) { + val metadata = + createImageMetadata("byline" -> "Helmut Schon for The GUARDIAN") val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Helmut Schon")) - mappedMetadata.credit should be (Some("The Guardian")) + mappedMetadata.byline should be(Some("Helmut Schon")) + mappedMetadata.credit should be(Some("The Guardian")) } - it("should extract a Guardian credit from a 'for the Guardian.' byline with trailing dot") { - val metadata = createImageMetadata("byline" -> "Helmut Schon for the Guardian.") + it( + "should extract a Guardian credit from a 'for the Guardian.' byline with trailing dot" + ) { + val metadata = + createImageMetadata("byline" -> "Helmut Schon for the Guardian.") val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Helmut Schon")) - mappedMetadata.credit should be (Some("The Guardian")) + mappedMetadata.byline should be(Some("Helmut Schon")) + mappedMetadata.credit should be(Some("The Guardian")) } - it("should extract a Guardian credit from a 'for the Guardian' byline and override any existing one") { - val metadata = createImageMetadata("byline" -> "Helmut Schon for the Guardian", "credit" -> "Whatever") + it( + "should extract a Guardian credit from a 'for the Guardian' byline and override any existing one" + ) { + val metadata = createImageMetadata( + "byline" -> "Helmut Schon for the Guardian", + "credit" -> "Whatever" + ) val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Helmut Schon")) - mappedMetadata.credit should be (Some("The Guardian")) + mappedMetadata.byline should be(Some("Helmut Schon")) + mappedMetadata.credit should be(Some("The Guardian")) } - it("should extract an Observer credit from a 'for the Observer' byline and override any existing one") { - val metadata = createImageMetadata("byline" -> "Helmut Schon for the Observer", "credit" -> "Whatever") + it( + "should extract an Observer credit from a 'for the Observer' byline and override any existing one" + ) { + val metadata = createImageMetadata( + "byline" -> "Helmut Schon for the Observer", + "credit" -> "Whatever" + ) val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Helmut Schon")) - mappedMetadata.credit should be (Some("The Observer")) + mappedMetadata.byline should be(Some("Helmut Schon")) + mappedMetadata.credit should be(Some("The Observer")) } it("should extract a truncated Guardian credit from a 'for the Gua' byline") { - val metadata = createImageMetadata("byline" -> "Christopher Thomond for the Gua") + val metadata = + createImageMetadata("byline" -> "Christopher Thomond for the Gua") val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Christopher Thomond")) - mappedMetadata.credit should be (Some("The Guardian")) + mappedMetadata.byline should be(Some("Christopher Thomond")) + mappedMetadata.credit should be(Some("The Guardian")) } it("should extract a truncated Observer credit from a 'for the O' byline") { - val metadata = createImageMetadata("byline" -> "Christopher Thomondxx for the O") + val metadata = + createImageMetadata("byline" -> "Christopher Thomondxx for the O") val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Christopher Thomondxx")) - mappedMetadata.credit should be (Some("The Observer")) + mappedMetadata.byline should be(Some("Christopher Thomondxx")) + mappedMetadata.credit should be(Some("The Observer")) } - it("should not extract a truncated non-Guardian credit from a 'for the Garden' byline") { - val metadata = createImageMetadata("byline" -> "Christopher Thom for the Garden") + it( + "should not extract a truncated non-Guardian credit from a 'for the Garden' byline" + ) { + val metadata = + createImageMetadata("byline" -> "Christopher Thom for the Garden") val mappedMetadata = ExtractGuardianCreditFromByline.clean(metadata) - mappedMetadata.byline should be (Some("Christopher Thom for the Garden")) - mappedMetadata.credit should be (None) + mappedMetadata.byline should be(Some("Christopher Thom for the Garden")) + mappedMetadata.credit should be(None) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleBylineTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleBylineTest.scala index d0de2f7f7f..31dd5c1037 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleBylineTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/GuardianStyleBylineTest.scala @@ -2,19 +2,22 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class GuardianStyleBylineTest extends FunSpec with Matchers with MetadataHelper { +class GuardianStyleBylineTest + extends FunSpec + with Matchers + with MetadataHelper { it("should replace straight quotes with curly quotes") { val metadata = createImageMetadata("byline" -> "Sam O'neill") val cleanedMetadata = GuardianStyleByline.clean(metadata) - cleanedMetadata.byline should be (Some("Sam O’neill")) + cleanedMetadata.byline should be(Some("Sam O’neill")) } it("should remove dots in initials") { val metadata = createImageMetadata("byline" -> "First M. Last") val cleanedMetadata = GuardianStyleByline.clean(metadata) - cleanedMetadata.byline should be (Some("First M Last")) + cleanedMetadata.byline should be(Some("First M Last")) } it("should remove dots in initials and insert spaces in unusual cases") { diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerBylineTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerBylineTest.scala index 44502aa27e..67bdfe760b 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerBylineTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/InitialJoinerBylineTest.scala @@ -2,7 +2,10 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class InitialJoinerBylineTest extends FunSpec with Matchers with MetadataHelper { +class InitialJoinerBylineTest + extends FunSpec + with Matchers + with MetadataHelper { it("should squish initials together at the start") { val metadata = createImageMetadata("byline" -> "C P Scott") val cleanedMetadata = InitialJoinerByline.clean(metadata) @@ -24,14 +27,18 @@ class InitialJoinerBylineTest extends FunSpec with Matchers with MetadataHelper cleanedMetadata.byline should be(Some("First AB")) } - it("should not squish together if it's actually part of a name, with straight quote") { + it( + "should not squish together if it's actually part of a name, with straight quote" + ) { val metadata = createImageMetadata("byline" -> "First A D'Last") val cleanedMetadata = InitialJoinerByline.clean(metadata) cleanedMetadata.byline should be(Some("First A D'Last")) } - it("should not squish together if it's actually part of a name, with curly quote") { + it( + "should not squish together if it's actually part of a name, with curly quote" + ) { val metadata = createImageMetadata("byline" -> "First A D’Last") val cleanedMetadata = InitialJoinerByline.clean(metadata) diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/MetadataHelper.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/MetadataHelper.scala index c970890a2e..b4cd34f11a 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/MetadataHelper.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/MetadataHelper.scala @@ -18,7 +18,8 @@ trait MetadataHelper { lastModified = None, identifiers = Map(), uploadInfo = UploadInfo(), - source = Asset(URI.create("http://example.com/image.jpg"), Some(0), None, None), + source = + Asset(URI.create("http://example.com/image.jpg"), Some(0), None, None), thumbnail = None, optimisedPng = None, fileMetadata = FileMetadata(), @@ -36,20 +37,20 @@ trait MetadataHelper { def createImageMetadata(metadata: Map[String, String]): ImageMetadata = ImageMetadata( - dateTaken = None, - description = metadata.get("description"), - credit = metadata.get("credit"), - byline = metadata.get("byline"), - bylineTitle = metadata.get("bylineTitle"), - title = metadata.get("title"), - copyright = metadata.get("copyright"), - suppliersReference = metadata.get("suppliersReference"), - source = metadata.get("source"), + dateTaken = None, + description = metadata.get("description"), + credit = metadata.get("credit"), + byline = metadata.get("byline"), + bylineTitle = metadata.get("bylineTitle"), + title = metadata.get("title"), + copyright = metadata.get("copyright"), + suppliersReference = metadata.get("suppliersReference"), + source = metadata.get("source"), specialInstructions = metadata.get("specialInstructions"), - keywords = List(), - subLocation = metadata.get("subLocation"), - city = metadata.get("city"), - state = metadata.get("state"), - country = metadata.get("country"), + keywords = List(), + subLocation = metadata.get("subLocation"), + city = metadata.get("city"), + state = metadata.get("state"), + country = metadata.get("country") ) } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamerTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamerTest.scala index 591d1d44fb..b3d774cd93 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamerTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/PhotographerRenamerTest.scala @@ -2,24 +2,28 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class PhotographerRenamerTest extends FunSpec with Matchers with MetadataHelper { +class PhotographerRenamerTest + extends FunSpec + with Matchers + with MetadataHelper { it("should rename a known misspelled byline") { val metadata = createImageMetadata("byline" -> "Czarek Sokolowski") val cleanedMetadata = PhotographerRenamer.clean(metadata) - cleanedMetadata.byline should be (Some("Czarek SokoÅ‚owski")) + cleanedMetadata.byline should be(Some("Czarek SokoÅ‚owski")) } it("should not rename an unknown byline") { val metadata = createImageMetadata("byline" -> "Sam Cutler") val cleanedMetadata = PhotographerRenamer.clean(metadata) - cleanedMetadata.byline should be (Some("Sam Cutler")) + cleanedMetadata.byline should be(Some("Sam Cutler")) } it("should leave byline alone if the match is not exact") { - val metadata = createImageMetadata("byline" -> "Czarek Sokolowski/Agencja Gazeta") + val metadata = + createImageMetadata("byline" -> "Czarek Sokolowski/Agencja Gazeta") val cleanedMetadata = PhotographerRenamer.clean(metadata) - cleanedMetadata.byline should be (Some("Czarek Sokolowski/Agencja Gazeta")) + cleanedMetadata.byline should be(Some("Czarek Sokolowski/Agencja Gazeta")) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemoverTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemoverTest.scala index f338a84270..015f427886 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemoverTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/RedundantTokenRemoverTest.scala @@ -2,37 +2,44 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class RedundantTokenRemoverTest extends FunSpec with Matchers with MetadataHelper { +class RedundantTokenRemoverTest + extends FunSpec + with Matchers + with MetadataHelper { // We've seen "/", " via " and " / " in the wild so test with both val separators = List("/", " / ", " via ") separators.foreach { s => - it (s"Remove redundant byline, keep redundant credit - '$s' separator") { + it(s"Remove redundant byline, keep redundant credit - '$s' separator") { BylineCredit("HANDOUT", "HANDOUT") .whenCleaned(None, "HANDOUT") } - it (s"Clean partially redundant byline, clean partially redundant credit - '$s' separator") { + it( + s"Clean partially redundant byline, clean partially redundant credit - '$s' separator" + ) { BylineCredit(s"HANDOUT${s}Byline", s"POOL${s}Credit") .whenCleaned(Some("Byline"), "Credit") } - it (s"Keep good byline, clean partially redundant credit - '$s' separator") { + it(s"Keep good byline, clean partially redundant credit - '$s' separator") { BylineCredit("Byline", s"POOL${s}Credit") .whenCleaned(Some("Byline"), "Credit") } - it (s"Keep good byline, simplify redundant credit (use the rightmost) - '$s' separator") { + it( + s"Keep good byline, simplify redundant credit (use the rightmost) - '$s' separator" + ) { BylineCredit("Byline", s"POOL${s}HANDOUT") .whenCleaned(Some("Byline"), "HANDOUT") } - it (s"Remove redundant byline, keep good credit - '$s' separator") { + it(s"Remove redundant byline, keep good credit - '$s' separator") { BylineCredit("HANDOUT", "Credit") .whenCleaned(None, "Credit") } - it (s"Keep good byline, keep good credit - '$s' separator") { + it(s"Keep good byline, keep good credit - '$s' separator") { BylineCredit("Byline", "Credit") .whenCleaned(Some("Byline"), "Credit") } @@ -46,8 +53,8 @@ class RedundantTokenRemoverTest extends FunSpec with Matchers with MetadataHelpe ) val cleanMetadata = RedundantTokenRemover.clean(metadata) - cleanMetadata.byline should be (cByline) - cleanMetadata.credit should be (Some(cCredit)) + cleanMetadata.byline should be(cByline) + cleanMetadata.credit should be(Some(cCredit)) } } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefixTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefixTest.scala index dfef232113..fef0146abb 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefixTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/StripCopyrightPrefixTest.scala @@ -2,70 +2,76 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class StripCopyrightPrefixTest extends FunSpec with Matchers with MetadataHelper { +class StripCopyrightPrefixTest + extends FunSpec + with Matchers + with MetadataHelper { it("should leave empty copyright empty") { val metadata = createImageMetadata() val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.copyright should be (None) + cleanedMetadata.copyright should be(None) } it("should leave unprefixed byline as-is") { val metadata = createImageMetadata("credit" -> "Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip any copyright symbol prefix") { val metadata = createImageMetadata("credit" -> "© Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip any Copyright text prefix") { val metadata = createImageMetadata("credit" -> "Copyright Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip lowercase copyright prefix") { val metadata = createImageMetadata("credit" -> "copyright Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip Copyright of prefix") { - val metadata = createImageMetadata("credit" -> "Copyright of Acme Corporation") + val metadata = + createImageMetadata("credit" -> "Copyright of Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip copyright followed by colon prefix") { - val metadata = createImageMetadata("credit" -> "Copyright : Acme Corporation") + val metadata = + createImageMetadata("credit" -> "Copyright : Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip any (c) prefix") { val metadata = createImageMetadata("credit" -> "(c) Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip a combination of copyright prefixes") { - val metadata = createImageMetadata("credit" -> "Copyright (c) Acme Corporation") + val metadata = + createImageMetadata("credit" -> "Copyright (c) Acme Corporation") val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should strip these from byline and credit") { val metadata = createImageMetadata( "byline" -> "© Acme Corporation", - "credit" -> "© Acme Corporation", + "credit" -> "© Acme Corporation" ) val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.byline should be (Some("Acme Corporation")) - cleanedMetadata.credit should be (Some("Acme Corporation")) + cleanedMetadata.byline should be(Some("Acme Corporation")) + cleanedMetadata.credit should be(Some("Acme Corporation")) } it("should leave these in fields like description") { @@ -73,7 +79,7 @@ class StripCopyrightPrefixTest extends FunSpec with Matchers with MetadataHelper "description" -> "© Acme Corporation" ) val cleanedMetadata = StripCopyrightPrefix.clean(metadata) - cleanedMetadata.description should be (Some("© Acme Corporation")) + cleanedMetadata.description should be(Some("© Acme Corporation")) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessorsTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessorsTest.scala index 9503e36c64..8dee7c5acf 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessorsTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/SupplierProcessorsTest.scala @@ -10,35 +10,43 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { val processedImage = applyProcessors(image) processedImage.usageRights should be(NoRights) - processedImage.metadata.credit should be (None) + processedImage.metadata.credit should be(None) } - it("should leave supplier and suppliersCollection empty if credit doesn't match") { + it( + "should leave supplier and suppliersCollection empty if credit doesn't match" + ) { val image = createImageFromMetadata("credit" -> "Unknown Party") val processedImage = applyProcessors(image) - processedImage.usageRights should be (NoRights) - processedImage.metadata.credit should be (Some("Unknown Party")) + processedImage.usageRights should be(NoRights) + processedImage.metadata.credit should be(Some("Unknown Party")) } describe("Photographer") { it("should match StaffPhotographer byline") { val image = createImageFromMetadata("byline" -> "Graham Turner") val processedImage = applyProcessors(image) - processedImage.usageRights should be(StaffPhotographer("Graham Turner", "The Guardian")) + processedImage.usageRights should be( + StaffPhotographer("Graham Turner", "The Guardian") + ) processedImage.metadata.credit should be(Some("The Guardian")) } it("should match ContractPhotographer byline") { val image = createImageFromMetadata("byline" -> "Linda Nylind") val processedImage = applyProcessors(image) - processedImage.usageRights should be(ContractPhotographer("Linda Nylind", Option("The Guardian"))) + processedImage.usageRights should be( + ContractPhotographer("Linda Nylind", Option("The Guardian")) + ) processedImage.metadata.credit should be(Some("The Guardian")) } - it ("should correct casing of photographer") { + it("should correct casing of photographer") { val image = createImageFromMetadata("byline" -> "Murdo MacLeod") val processedImage = applyProcessors(image) - processedImage.usageRights should be(ContractPhotographer("Murdo MacLeod", Option("The Guardian"))) + processedImage.usageRights should be( + ContractPhotographer("Murdo MacLeod", Option("The Guardian")) + ) processedImage.metadata.byline should be(Some("Murdo MacLeod")) } } @@ -52,7 +60,6 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - describe("Action Images") { it("should match Action Images credit") { val image = createImageFromMetadata("credit" -> "Action Images") @@ -75,29 +82,37 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { it("should match 'Alamy' credit") { val image = createImageFromMetadata("credit" -> "Alamy") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Alamy")) + processedImage.usageRights should be(Agency("Alamy")) processedImage.metadata.credit should be(Some("Alamy")) } - it("should match 'Alamy Stock Photo' credit, and replace 'Alamy Stock Photo' with 'Alamy'") { + it( + "should match 'Alamy Stock Photo' credit, and replace 'Alamy Stock Photo' with 'Alamy'" + ) { val image = createImageFromMetadata("credit" -> "Alamy Stock Photo") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Alamy")) + processedImage.usageRights should be(Agency("Alamy")) processedImage.metadata.credit should be(Some("Alamy")) } it("should match credit with Alamy as a suffix with '/'") { - val image = createImageFromMetadata("credit" -> "Prod.DB/Alamy Stock Photo") + val image = + createImageFromMetadata("credit" -> "Prod.DB/Alamy Stock Photo") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Alamy")) + processedImage.usageRights should be(Agency("Alamy")) processedImage.metadata.credit should be(Some("Prod.DB/Alamy")) } - it("should not match credit with Alamy when the credit contains 'Alamy Live News', because we only have rights after 48 hours, and there's no provision to add a 'deny' lease for that period yet") { - val image = createImageFromMetadata("credit" -> "Alamy Live News/Alamy Live News") + it( + "should not match credit with Alamy when the credit contains 'Alamy Live News', because we only have rights after 48 hours, and there's no provision to add a 'deny' lease for that period yet" + ) { + val image = + createImageFromMetadata("credit" -> "Alamy Live News/Alamy Live News") val processedImage = applyProcessors(image) - processedImage.usageRights should be (NoRights) - processedImage.metadata.credit should be(Some("Alamy Live News/Alamy Live News")) + processedImage.usageRights should be(NoRights) + processedImage.metadata.credit should be( + Some("Alamy Live News/Alamy Live News") + ) } } @@ -105,40 +120,56 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { it("should match 'Allstar Picture Library' credit") { val image = createImageFromMetadata("credit" -> "Allstar Picture Library") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Allstar Picture Library")) + processedImage.usageRights should be(Agency("Allstar Picture Library")) processedImage.metadata.credit should be(Some("Allstar Picture Library")) } it("should match 'Sportsphoto Ltd./Allstar' credit") { - val image = createImageFromMetadata("credit" -> "Sportsphoto Ltd./Allstar") + val image = + createImageFromMetadata("credit" -> "Sportsphoto Ltd./Allstar") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Allstar Picture Library", Some("Sportsphoto Ltd."))) + processedImage.usageRights should be( + Agency("Allstar Picture Library", Some("Sportsphoto Ltd.")) + ) processedImage.metadata.credit should be(Some("Sportsphoto Ltd./Allstar")) } it("should match 'Allstar/UNIVERSAL' credit") { val image = createImageFromMetadata("credit" -> "Allstar/UNIVERSAL") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Allstar Picture Library", Some("UNIVERSAL"))) + processedImage.usageRights should be( + Agency("Allstar Picture Library", Some("UNIVERSAL")) + ) processedImage.metadata.credit should be(Some("Allstar/UNIVERSAL")) } - it("should strip redundant byline but use it as canonical casing for credit") { - val image = createImageFromMetadata("credit" -> "Allstar/UNIVERSAL PICTURES", "byline" -> "Universal Pictures") - val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Allstar Picture Library", Some("Universal Pictures"))) - processedImage.metadata.credit should be(Some("Allstar/Universal Pictures")) + it( + "should strip redundant byline but use it as canonical casing for credit" + ) { + val image = createImageFromMetadata( + "credit" -> "Allstar/UNIVERSAL PICTURES", + "byline" -> "Universal Pictures" + ) + val processedImage = applyProcessors(image) + processedImage.usageRights should be( + Agency("Allstar Picture Library", Some("Universal Pictures")) + ) + processedImage.metadata.credit should be( + Some("Allstar/Universal Pictures") + ) processedImage.metadata.byline should be(None) } it("should strip '___/Allstar' suffix from byline") { - val image = createImageFromMetadata("credit" -> "Sportsphoto Ltd./Allstar", "byline" -> "David Gadd/Allstar") + val image = createImageFromMetadata( + "credit" -> "Sportsphoto Ltd./Allstar", + "byline" -> "David Gadd/Allstar" + ) val processedImage = applyProcessors(image) processedImage.metadata.byline should be(Some("David Gadd")) } } - describe("AP") { it("should match AP credit") { val image = createImageFromMetadata("credit" -> "AP") @@ -183,10 +214,12 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - describe("Corbis") { it("should match Corbis source") { - val image = createImageFromMetadata("credit" -> "Demotix/Corbis", "source" -> "Corbis") + val image = createImageFromMetadata( + "credit" -> "Demotix/Corbis", + "source" -> "Corbis" + ) val processedImage = applyProcessors(image) processedImage.usageRights should be(Agency("Corbis")) processedImage.metadata.credit should be(Some("Demotix/Corbis")) @@ -194,7 +227,6 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - describe("EPA") { it("should match EPA credit") { val image = createImageFromMetadata("credit" -> "EPA") @@ -203,34 +235,51 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - describe("Getty Images") { - it("should detect getty file metadata and use source as suppliersCollection") { - val image = createImageFromMetadata("credit" -> "AFP/Getty", "source" -> "AFP") - val gettyImage = image.copy(fileMetadata = FileMetadata(getty = Map("Original Filename" -> "lol.jpg"))) + it( + "should detect getty file metadata and use source as suppliersCollection" + ) { + val image = + createImageFromMetadata("credit" -> "AFP/Getty", "source" -> "AFP") + val gettyImage = image.copy(fileMetadata = + FileMetadata(getty = Map("Original Filename" -> "lol.jpg")) + ) val processedImage = applyProcessors(gettyImage) processedImage.usageRights should be(Agency("Getty Images", Some("AFP"))) processedImage.metadata.credit should be(Some("AFP/Getty")) processedImage.metadata.source should be(Some("AFP")) } - it("should exclude images that have Getty metadata that aren't from Getty") { + it( + "should exclude images that have Getty metadata that aren't from Getty" + ) { val image = createImageFromMetadata("credit" -> "NEWSPIX INTERNATIONAL") - val notGettyImage = image.copy(fileMetadata = FileMetadata(getty = Map("Composition" -> "Headshot"))) + val notGettyImage = image.copy(fileMetadata = + FileMetadata(getty = Map("Composition" -> "Headshot")) + ) val processedImage = applyProcessors(notGettyImage) processedImage.usageRights should be(NoRights) } - it("should exclude images that have Getty metadata that also have 'Pinnacle Photo Agency Ltd' as source") { - val image = createImageFromMetadata("source" -> "Pinnacle Photo Agency Ltd") - val notGettyImage = image.copy(fileMetadata = FileMetadata(getty = Map("dummy" -> "metadata"))) + it( + "should exclude images that have Getty metadata that also have 'Pinnacle Photo Agency Ltd' as source" + ) { + val image = + createImageFromMetadata("source" -> "Pinnacle Photo Agency Ltd") + val notGettyImage = image.copy(fileMetadata = + FileMetadata(getty = Map("dummy" -> "metadata")) + ) val processedImage = applyProcessors(notGettyImage) processedImage.usageRights should be(NoRights) } - it("should use 'Getty Images' as credit if missing from the file metadata") { + it( + "should use 'Getty Images' as credit if missing from the file metadata" + ) { val image = createImageFromMetadata() - val gettyImage = image.copy(fileMetadata = FileMetadata(getty = Map("Original Filename" -> "lol.jpg"))) + val gettyImage = image.copy(fileMetadata = + FileMetadata(getty = Map("Original Filename" -> "lol.jpg")) + ) val processedImage = applyProcessors(gettyImage) processedImage.metadata.credit should be(Some("Getty Images")) } @@ -251,37 +300,66 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { // Truncation FTW! it("should match 'The LIFE Images Collection/Getty' credit") { - val image = createImageFromMetadata("credit" -> "The LIFE Images Collection/Getty", "source" -> "The LIFE Images Collection") + val image = createImageFromMetadata( + "credit" -> "The LIFE Images Collection/Getty", + "source" -> "The LIFE Images Collection" + ) val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("The LIFE Images Collection"))) - processedImage.metadata.credit should be(Some("The LIFE Images Collection/Getty")) + processedImage.usageRights should be( + Agency("Getty Images", Some("The LIFE Images Collection")) + ) + processedImage.metadata.credit should be( + Some("The LIFE Images Collection/Getty") + ) } it("should match 'Getty Images/Ikon Images' credit") { - val image = createImageFromMetadata("credit" -> "Getty Images/Ikon Images", "source" -> "Ikon Images") - val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("Ikon Images"))) + val image = createImageFromMetadata( + "credit" -> "Getty Images/Ikon Images", + "source" -> "Ikon Images" + ) + val processedImage = applyProcessors(image) + processedImage.usageRights should be( + Agency("Getty Images", Some("Ikon Images")) + ) processedImage.metadata.credit should be(Some("Getty Images/Ikon Images")) } it("should match 'Bloomberg/Getty Images' credit") { - val image = createImageFromMetadata("credit" -> "Bloomberg/Getty Images", "source" -> "Bloomberg") - val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("Bloomberg"))) + val image = createImageFromMetadata( + "credit" -> "Bloomberg/Getty Images", + "source" -> "Bloomberg" + ) + val processedImage = applyProcessors(image) + processedImage.usageRights should be( + Agency("Getty Images", Some("Bloomberg")) + ) processedImage.metadata.credit should be(Some("Bloomberg/Getty Images")) } it("should match 'Some Long Provider/Getty Im' credit") { - val image = createImageFromMetadata("credit" -> "Some Long Provider/Getty Im", "source" -> "Some Long Provider") + val image = createImageFromMetadata( + "credit" -> "Some Long Provider/Getty Im", + "source" -> "Some Long Provider" + ) val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("Some Long Provider"))) - processedImage.metadata.credit should be(Some("Some Long Provider/Getty Im")) + processedImage.usageRights should be( + Agency("Getty Images", Some("Some Long Provider")) + ) + processedImage.metadata.credit should be( + Some("Some Long Provider/Getty Im") + ) } it("should match 'Getty Images for Apple' credit") { - val image = createImageFromMetadata("credit" -> "Getty Images for Apple", "source" -> "Getty Images Europe") - val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("Getty Images Europe"))) + val image = createImageFromMetadata( + "credit" -> "Getty Images for Apple", + "source" -> "Getty Images Europe" + ) + val processedImage = applyProcessors(image) + processedImage.usageRights should be( + Agency("Getty Images", Some("Getty Images Europe")) + ) processedImage.metadata.credit should be(Some("Getty Images for Apple")) } @@ -300,24 +378,29 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { it("should match 'FilmMagic' credit") { val image = createImageFromMetadata("credit" -> "FilmMagic") val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("FilmMagic"))) + processedImage.usageRights should be( + Agency("Getty Images", Some("FilmMagic")) + ) processedImage.metadata.credit should be(Some("FilmMagic")) } it("should match 'WireImage' credit") { val image = createImageFromMetadata("credit" -> "WireImage") val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("WireImage"))) + processedImage.usageRights should be( + Agency("Getty Images", Some("WireImage")) + ) processedImage.metadata.credit should be(Some("WireImage")) } it("should match 'Hulton' credit") { val image = createImageFromMetadata("credit" -> "Hulton") val processedImage = applyProcessors(image) - processedImage.usageRights should be(Agency("Getty Images", Some("Hulton"))) + processedImage.usageRights should be( + Agency("Getty Images", Some("Hulton")) + ) processedImage.metadata.credit should be(Some("Hulton")) } } - describe("PA") { it("should match PA credit") { val image = createImageFromMetadata("credit" -> "PA") @@ -326,7 +409,8 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } it("should match PA source if credit doesn't match") { - val image = createImageFromMetadata("credit" -> "BBC/PA", "source" -> "PA") + val image = + createImageFromMetadata("credit" -> "BBC/PA", "source" -> "PA") val processedImage = applyProcessors(image) processedImage.usageRights should be(Agency("PA")) } @@ -338,7 +422,8 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } it("should match 'Press Association Images' credit") { - val image = createImageFromMetadata("credit" -> "Press Association Images") + val image = + createImageFromMetadata("credit" -> "Press Association Images") val processedImage = applyProcessors(image) processedImage.usageRights should be(Agency("PA")) } @@ -350,7 +435,6 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - describe("Reuters") { it("should match REUTERS credit") { val image = createImageFromMetadata("credit" -> "REUTERS") @@ -388,35 +472,40 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - describe("Rex Features") { it("should match Rex Features source") { - val image = createImageFromMetadata("credit" -> "Tim Ireland/REX Shutterstock", "source" -> "Rex Features") + val image = createImageFromMetadata( + "credit" -> "Tim Ireland/REX Shutterstock", + "source" -> "Rex Features" + ) val processedImage = applyProcessors(image) processedImage.usageRights match { case u: Agency => { - u.supplier should be ("Rex Features") + u.supplier should be("Rex Features") u.suppliersCollection should be(None) } case _ => } - processedImage.metadata.credit should be(Some("Tim Ireland/REX Shutterstock")) + processedImage.metadata.credit should be( + Some("Tim Ireland/REX Shutterstock") + ) processedImage.metadata.source should be(Some("Rex Features")) } it("should match '*/ Rex Features' credit") { - val image = createImageFromMetadata("credit" -> "Bleddyn Butcher / Rex Features") + val image = + createImageFromMetadata("credit" -> "Bleddyn Butcher / Rex Features") val processedImage = applyProcessors(image) - processedImage.usageRights should be (Agency("Rex Features")) + processedImage.usageRights should be(Agency("Rex Features")) } } - describe("Ronald Grant") { it("should match www.ronaldgrantarchive.com credit") { - val image = createImageFromMetadata("credit" -> "www.ronaldgrantarchive.com") + val image = + createImageFromMetadata("credit" -> "www.ronaldgrantarchive.com") val processedImage = applyProcessors(image) processedImage.usageRights should be(Agency("Ronald Grant Archive")) processedImage.metadata.credit should be(Some("Ronald Grant")) @@ -430,9 +519,7 @@ class SupplierProcessorsTest extends FunSpec with Matchers with MetadataHelper { } } - def applyProcessors(image: Image): Image = SupplierProcessors.apply(image) - } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCreditTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCreditTest.scala index 7121a4590a..3aad133105 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCreditTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/cleanup/UseCanonicalGuardianCreditTest.scala @@ -2,21 +2,30 @@ package com.gu.mediaservice.lib.cleanup import org.scalatest.{FunSpec, Matchers} -class UseCanonicalGuardianCreditTest extends FunSpec with Matchers with MetadataHelper { +class UseCanonicalGuardianCreditTest + extends FunSpec + with Matchers + with MetadataHelper { it("should not change a non-Guardian credit") { val metadata = createImageMetadata("credit" -> "Getty Images") - UseCanonicalGuardianCredit.clean(metadata).credit should be (Some("Getty Images")) + UseCanonicalGuardianCredit.clean(metadata).credit should be( + Some("Getty Images") + ) } it("should not change a 'The Guardian' credit") { val metadata = createImageMetadata("credit" -> "The Guardian") - UseCanonicalGuardianCredit.clean(metadata).credit should be (Some("The Guardian")) + UseCanonicalGuardianCredit.clean(metadata).credit should be( + Some("The Guardian") + ) } it("should change a 'Guardian' credit") { val metadata = createImageMetadata("credit" -> "Guardian") - UseCanonicalGuardianCredit.clean(metadata).credit should be (Some("The Guardian")) + UseCanonicalGuardianCredit.clean(metadata).credit should be( + Some("The Guardian") + ) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/collections/CollectionsManagerTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/collections/CollectionsManagerTest.scala index f4f7794c3f..4b41085381 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/collections/CollectionsManagerTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/collections/CollectionsManagerTest.scala @@ -10,30 +10,39 @@ class CollectionsManagerTest extends FunSpec with Matchers { describe("CollectionManager") { describe("serialisation") { - it ("should convert path to string with /") { - CollectionsManager.pathToString(List("g2", "art", "film")) shouldBe "g2/art/film" + it("should convert path to string with /") { + CollectionsManager.pathToString( + List("g2", "art", "film") + ) shouldBe "g2/art/film" } - it ("should convert a string to a path") { - CollectionsManager.stringToPath("g2/art/film") shouldBe List("g2", "art", "film") + it("should convert a string to a path") { + CollectionsManager + .stringToPath("g2/art/film") shouldBe List("g2", "art", "film") } - it ("should convert a URI to a path") { - CollectionsManager.uriToPath("g2/art/rhythm+blues") shouldBe List("g2", "art", "rhythm blues") + it("should convert a URI to a path") { + CollectionsManager.uriToPath("g2/art/rhythm+blues") shouldBe List( + "g2", + "art", + "rhythm blues" + ) } - it ("should convert a path to a URI") { - CollectionsManager.pathToUri(List("g2", "art", "rhythm&blues")) shouldBe "g2/art/rhythm%26blues" + it("should convert a path to a URI") { + CollectionsManager.pathToUri( + List("g2", "art", "rhythm&blues") + ) shouldBe "g2/art/rhythm%26blues" } } describe("validation") { - it ("should allow strings") { + it("should allow strings") { CollectionsManager.isValidPathBit("{something¬hing}") shouldBe true } - it ("should not allow /") { + it("should not allow /") { CollectionsManager.isValidPathBit("this/that") shouldBe false } @@ -42,21 +51,27 @@ class CollectionsManagerTest extends FunSpec with Matchers { describe("create") { it("should lowercase pathId on creation") { - val col = Collection.build(List("G2", "ArT", "CasECrazY"), ActionData("me@you.com", DateTime.now)) + val col = Collection.build( + List("G2", "ArT", "CasECrazY"), + ActionData("me@you.com", DateTime.now) + ) col.path shouldEqual List("G2", "ArT", "CasECrazY") col.pathId shouldEqual "g2/art/casecrazy" } } - it ("should only show the latest collection with same ID") { + it("should only show the latest collection with same ID") { val date = DateTime.now() val laterDate = date.minusDays(5) val evenLaterDate = laterDate.minusDays(5) - val collection1 = Collection.build(List("g2"), ActionData("me@you.com", date)) - val collection2 = Collection.build(List("g2"), ActionData("you@me.com", laterDate)) - val collection3 = Collection.build(List("g2"), ActionData("them@they.com", evenLaterDate)) + val collection1 = + Collection.build(List("g2"), ActionData("me@you.com", date)) + val collection2 = + Collection.build(List("g2"), ActionData("you@me.com", laterDate)) + val collection3 = + Collection.build(List("g2"), ActionData("them@they.com", evenLaterDate)) val duped = List(collection2, collection1, collection3) @@ -67,7 +82,7 @@ class CollectionsManagerTest extends FunSpec with Matchers { } - it ("should find the index of a collection in a list") { + it("should find the index of a collection in a list") { val actionData = ActionData("me@you.com", DateTime.now()) val collections = List( Collection.build(List("g2"), actionData), @@ -78,8 +93,10 @@ class CollectionsManagerTest extends FunSpec with Matchers { ) val index = CollectionsManager.findIndexes(List("g2", "art"), collections) - val noIndex = CollectionsManager.findIndexes(List("not", "there"), collections) - val multiIndex = CollectionsManager.findIndexes(List("observer", "feature"), collections) + val noIndex = + CollectionsManager.findIndexes(List("not", "there"), collections) + val multiIndex = + CollectionsManager.findIndexes(List("observer", "feature"), collections) index shouldBe List(1) noIndex shouldBe Nil diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/config/ProviderLoaderTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/config/ProviderLoaderTest.scala index 2726848ee4..e2a556d6f8 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/config/ProviderLoaderTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/config/ProviderLoaderTest.scala @@ -20,7 +20,8 @@ class NoArgTestProvider extends TestProvider { override def info: String = "no-arg-test-provider" } -case class ResourceTestProvider(resources: TestProviderResources) extends TestProvider { +case class ResourceTestProvider(resources: TestProviderResources) + extends TestProvider { override def info: String = s"resource-test-provider ${resources.aResource}" } @@ -28,29 +29,42 @@ case class ConfigTestProvider(config: Configuration) extends TestProvider { override def info: String = s"config-test-provider ${config.hashCode}" } -case class ConfigResourceTestProvider(config: Configuration, resources: TestProviderResources) extends TestProvider { - override def info: String = s"config-resource-test-provider ${config.hashCode} ${resources.aResource}" +case class ConfigResourceTestProvider( + config: Configuration, + resources: TestProviderResources +) extends TestProvider { + override def info: String = + s"config-resource-test-provider ${config.hashCode} ${resources.aResource}" } -case class ResourceConfigTestProvider(resources: TestProviderResources, config: Configuration) extends TestProvider { - override def info: String = s"resource-config-test-provider ${resources.aResource} ${config.hashCode}" +case class ResourceConfigTestProvider( + resources: TestProviderResources, + config: Configuration +) extends TestProvider { + override def info: String = + s"resource-config-test-provider ${resources.aResource} ${config.hashCode}" } -case class BadTestProvider(resources: TestProviderResources, config: Configuration) extends TestProvider { +case class BadTestProvider( + resources: TestProviderResources, + config: Configuration +) extends TestProvider { throw new IllegalArgumentException("Oh dear, something went wrong") - override def info: String = s"resource-config-test-provider ${resources.aResource} ${config.hashCode}" + override def info: String = + s"resource-config-test-provider ${resources.aResource} ${config.hashCode}" } class NotATestProvider { def monkey: String = "not-test-provider" } -class TestProviderWithStringConstructor(configString: String) extends TestProvider { +class TestProviderWithStringConstructor(configString: String) + extends TestProvider { def info: String = s"not-test-provider $configString" } -object TestProviderLoader extends ProviderLoader[TestProvider, TestProviderResources]("test provider") - +object TestProviderLoader + extends ProviderLoader[TestProvider, TestProviderResources]("test provider") class ProviderLoaderTest extends FreeSpec with Matchers with EitherValues { @@ -60,42 +74,66 @@ class ProviderLoaderTest extends FreeSpec with Matchers with EitherValues { val providerResources = ProviderResources(emptyConfig, resources) "should successfully load a no arg TestProvider instance" in { - val instance = TestProviderLoader.loadProvider(classOf[NoArgTestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[NoArgTestProvider].getCanonicalName, + providerResources + ) instance.right.value.info shouldBe "no-arg-test-provider" } "should successfully load a companion object TestProvider" in { - val instance = TestProviderLoader.loadProvider(ObjectTestProvider.getClass.getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + ObjectTestProvider.getClass.getCanonicalName, + providerResources + ) instance.right.value.info shouldBe s"object-test-provider" } "should successfully load a config arg TestProvider instance" in { - val instance = TestProviderLoader.loadProvider(classOf[ConfigTestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[ConfigTestProvider].getCanonicalName, + providerResources + ) instance.right.value.info shouldBe s"config-test-provider ${emptyConfig.hashCode}" } "should successfully load a resource arg TestProvider instance" in { - val instance = TestProviderLoader.loadProvider(classOf[ResourceTestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[ResourceTestProvider].getCanonicalName, + providerResources + ) instance.right.value.info shouldBe s"resource-test-provider sausages" } "should successfully load a config, resource arg TestProvider instance" in { - val instance = TestProviderLoader.loadProvider(classOf[ConfigResourceTestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[ConfigResourceTestProvider].getCanonicalName, + providerResources + ) instance.right.value.info shouldBe s"config-resource-test-provider ${emptyConfig.hashCode} sausages" } "should successfully load a resource, config arg TestProvider instance" in { - val instance = TestProviderLoader.loadProvider(classOf[ResourceConfigTestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[ResourceConfigTestProvider].getCanonicalName, + providerResources + ) instance.right.value.info shouldBe s"resource-config-test-provider sausages ${emptyConfig.hashCode}" } "should fail to load something that isn't an TestProvider" in { - val instance = TestProviderLoader.loadProvider(classOf[NotATestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[NotATestProvider].getCanonicalName, + providerResources + ) instance.left.value shouldBe "Failed to cast com.gu.mediaservice.lib.config.NotATestProvider to a com.gu.mediaservice.lib.config.TestProvider" } "should fail to load something that doesn't have a suitable constructor" in { - val instance = TestProviderLoader.loadProvider(classOf[TestProviderWithStringConstructor].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[TestProviderWithStringConstructor].getCanonicalName, + providerResources + ) instance.left.value shouldBe """A provider must have one and only one valid constructors taking arguments of type |com.gu.mediaservice.lib.config.TestProviderResources or play.api.Configuration. |com.gu.mediaservice.lib.config.TestProviderWithStringConstructor has 0 constructors: @@ -103,25 +141,38 @@ class ProviderLoaderTest extends FreeSpec with Matchers with EitherValues { } "should fail to load something that doesn't exist" in { - val instance = TestProviderLoader.loadProvider("com.gu.mediaservice.lib.config.TestProviderThatDoesntExist", providerResources) + val instance = TestProviderLoader.loadProvider( + "com.gu.mediaservice.lib.config.TestProviderThatDoesntExist", + providerResources + ) instance.left.value shouldBe "Unable to find test provider class com.gu.mediaservice.lib.config.TestProviderThatDoesntExist" } val nonEmptyConfig = Configuration.from(Map("key" -> "value")) - val nonEmptyConfigProviderResources = ProviderResources(nonEmptyConfig, resources) + val nonEmptyConfigProviderResources = + ProviderResources(nonEmptyConfig, resources) "should fail to load a no arg processor that doesn't take configuration with non-empty configuration" in { - val instance = TestProviderLoader.loadProvider(classOf[NoArgTestProvider].getCanonicalName, nonEmptyConfigProviderResources) + val instance = TestProviderLoader.loadProvider( + classOf[NoArgTestProvider].getCanonicalName, + nonEmptyConfigProviderResources + ) instance.left.value shouldBe "Configuration provided but constructor of com.gu.mediaservice.lib.config.NoArgTestProvider with args () doesn't take it." } "should fail to load an object processor that doesn't take configuration with non-empty configuration" in { - val instance = TestProviderLoader.loadProvider(ObjectTestProvider.getClass.getCanonicalName, nonEmptyConfigProviderResources) + val instance = TestProviderLoader.loadProvider( + ObjectTestProvider.getClass.getCanonicalName, + nonEmptyConfigProviderResources + ) instance.left.value shouldBe "Configuration provided but com.gu.mediaservice.lib.config.ObjectTestProvider$ is a companion object and doesn't take configuration." } "should fail to load a provider if the constructor throws an exception" in { - val instance = TestProviderLoader.loadProvider(classOf[BadTestProvider].getCanonicalName, providerResources) + val instance = TestProviderLoader.loadProvider( + classOf[BadTestProvider].getCanonicalName, + providerResources + ) instance.left.value shouldBe "java.lang.IllegalArgumentException thrown when executing constructor java.lang.reflect.Constructor(com.gu.mediaservice.lib.config.TestProviderResources, play.api.Configuration). Search logs for stack trace." } } @@ -129,56 +180,63 @@ class ProviderLoaderTest extends FreeSpec with Matchers with EitherValues { "The config loader" - { val resources = TestProviderResources("sausages") - implicit val testProviderConfigLoader: ConfigLoader[TestProvider] = TestProviderLoader.singletonConfigLoader(resources) - implicit val testProvidersConfigLoader: ConfigLoader[Seq[TestProvider]] = TestProviderLoader.seqConfigLoader(resources) + implicit val testProviderConfigLoader: ConfigLoader[TestProvider] = + TestProviderLoader.singletonConfigLoader(resources) + implicit val testProvidersConfigLoader: ConfigLoader[Seq[TestProvider]] = + TestProviderLoader.seqConfigLoader(resources) "should load an image processor from a classname" in { - val conf:Configuration = Configuration.from(Map( - "some.path" -> List( - "com.gu.mediaservice.lib.config.NoArgTestProvider" + val conf: Configuration = Configuration.from( + Map( + "some.path" -> List( + "com.gu.mediaservice.lib.config.NoArgTestProvider" + ) ) - )) + ) val processors = conf.get[Seq[TestProvider]]("some.path") processors.head shouldBe a[NoArgTestProvider] } "should load an image processor which has configuration" in { - val conf:Configuration = Configuration.from(Map( - "some.path" -> List( - Map( - "className" -> "com.gu.mediaservice.lib.config.ConfigTestProvider", - "config" -> Map("parameter" -> "value") + val conf: Configuration = Configuration.from( + Map( + "some.path" -> List( + Map( + "className" -> "com.gu.mediaservice.lib.config.ConfigTestProvider", + "config" -> Map("parameter" -> "value") + ) ) ) - )) + ) val processors = conf.get[Seq[TestProvider]]("some.path") val processor = processors.head - inside(processor) { - case ConfigTestProvider(config) => config.get[String]("parameter") shouldBe "value" + inside(processor) { case ConfigTestProvider(config) => + config.get[String]("parameter") shouldBe "value" } } "should load multiple image processors of mixed config types" in { - val conf:Configuration = Configuration.from(Map( - "some.path" -> List( - "com.gu.mediaservice.lib.config.NoArgTestProvider", - Map( - "className" -> "com.gu.mediaservice.lib.config.ConfigTestProvider", - "config" -> Map("parameter" -> "value") + val conf: Configuration = Configuration.from( + Map( + "some.path" -> List( + "com.gu.mediaservice.lib.config.NoArgTestProvider", + Map( + "className" -> "com.gu.mediaservice.lib.config.ConfigTestProvider", + "config" -> Map("parameter" -> "value") + ) ) ) - )) + ) val processors = conf.get[Seq[TestProvider]]("some.path") processors.length shouldBe 2 processors.toList should matchPattern { - case (_:NoArgTestProvider) :: ConfigTestProvider(_) :: Nil => + case (_: NoArgTestProvider) :: ConfigTestProvider(_) :: Nil => } } "should load multiple image processors of mixed config types from HOCON" in { - val conf:Configuration = Configuration(ConfigFactory.parseString( - """ + val conf: Configuration = Configuration(ConfigFactory.parseString(""" |some.path: [ | com.gu.mediaservice.lib.config.NoArgTestProvider, | { @@ -192,38 +250,45 @@ class ProviderLoaderTest extends FreeSpec with Matchers with EitherValues { val processors = conf.get[Seq[TestProvider]]("some.path") processors.length shouldBe 2 processors.toList should matchPattern { - case (_:NoArgTestProvider) :: ConfigTestProvider(_) :: Nil => + case (_: NoArgTestProvider) :: ConfigTestProvider(_) :: Nil => } } "should fail to load multiple image processors if they don't meet the spec" in { - val conf:Configuration = Configuration.from(Map( - "some.path" -> List( - "com.gu.mediaservice.lib.config.NoArgTestProvider", - Map( - "noClassName" -> "com.gu.mediaservice.lib.config.ConfigTestProvider", - "config" -> Map("parameter" -> "value") + val conf: Configuration = Configuration.from( + Map( + "some.path" -> List( + "com.gu.mediaservice.lib.config.NoArgTestProvider", + Map( + "noClassName" -> "com.gu.mediaservice.lib.config.ConfigTestProvider", + "config" -> Map("parameter" -> "value") + ) ) ) - )) + ) val thrown = the[BadValue] thrownBy { conf.get[Seq[TestProvider]]("some.path") } - thrown.getMessage should include ("A test provider can either be a class name (string) or object with className (string) and config (object) fields. This OBJECT is not valid.") + thrown.getMessage should include( + "A test provider can either be a class name (string) or object with className (string) and config (object) fields. This OBJECT is not valid." + ) } "should fail to load an image processors if the config isn't a string" in { - val conf:Configuration = Configuration.from(Map( - "some.path" -> List( - List("fred") + val conf: Configuration = Configuration.from( + Map( + "some.path" -> List( + List("fred") + ) ) - )) + ) val thrown = the[BadValue] thrownBy { conf.get[Seq[TestProvider]]("some.path") } - thrown.getMessage should include ("A test provider can either be a class name (string) or object with className (string) and config (object) fields. This LIST is not valid") + thrown.getMessage should include( + "A test provider can either be a class name (string) or object with className (string) and config (object) fields. This LIST is not valid" + ) } - } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProviderTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProviderTest.scala index 1bb9f11303..c095860190 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProviderTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/guardian/auth/PandaAuthenticationProviderTest.scala @@ -8,8 +8,13 @@ import java.time.Instant class PandaAuthenticationProviderTest extends FunSuite with MustMatchers { import com.gu.mediaservice.lib.guardian.auth.PandaAuthenticationProvider.validateUser - val user: AuthenticatedUser = AuthenticatedUser(User("Barry", "Chuckle", "barry.chuckle@guardian.co.uk", None), - "media-service", Set("media-service"), Instant.now().plusSeconds(100).toEpochMilli, multiFactor = true) + val user: AuthenticatedUser = AuthenticatedUser( + User("Barry", "Chuckle", "barry.chuckle@guardian.co.uk", None), + "media-service", + Set("media-service"), + Instant.now().plusSeconds(100).toEpochMilli, + multiFactor = true + ) test("user fails email domain validation") { validateUser(user, "chucklevision.biz", None) must be(false) @@ -20,11 +25,19 @@ class PandaAuthenticationProviderTest extends FunSuite with MustMatchers { } test("user passes mfa check if no mfa checker configured") { - validateUser(user.copy(multiFactor = false), "guardian.co.uk", None) must be(true) + validateUser( + user.copy(multiFactor = false), + "guardian.co.uk", + None + ) must be(true) } test("user fails mfa check if missing mfa") { - validateUser(user.copy(multiFactor = false), "guardian.co.uk", Some(null)) must be(false) + validateUser( + user.copy(multiFactor = false), + "guardian.co.uk", + Some(null) + ) must be(false) } test("user passes mfa check") { diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/imaging/ImageOperationsTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/imaging/ImageOperationsTest.scala index 01ad5aa454..e51e9468bd 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/imaging/ImageOperationsTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/imaging/ImageOperationsTest.scala @@ -13,30 +13,37 @@ import scala.concurrent.ExecutionContext.Implicits.global @Ignore class ImageOperationsTest extends FunSpec with Matchers with ScalaFutures { - implicit override val patienceConfig = PatienceConfig(timeout = Span(1000, Millis), interval = Span(25, Millis)) + implicit override val patienceConfig = + PatienceConfig(timeout = Span(1000, Millis), interval = Span(25, Millis)) describe("identifyColourModel") { - it("should return RGB for a JPG image with RGB image data and no embedded profile") { + it( + "should return RGB for a JPG image with RGB image data and no embedded profile" + ) { val image = fileAt("rgb-wo-profile.jpg") val colourModelFuture = ImageOperations.identifyColourModel(image, Jpeg) whenReady(colourModelFuture) { colourModel => - colourModel should be (Some("RGB")) + colourModel should be(Some("RGB")) } } - it("should return RGB for a JPG image with RGB image data and an RGB embedded profile") { + it( + "should return RGB for a JPG image with RGB image data and an RGB embedded profile" + ) { val image = fileAt("rgb-with-rgb-profile.jpg") val colourModelFuture = ImageOperations.identifyColourModel(image, Jpeg) whenReady(colourModelFuture) { colourModel => - colourModel should be (Some("RGB")) + colourModel should be(Some("RGB")) } } - it("should return RGB for a JPG image with RGB image data and an incorrect CMYK embedded profile") { + it( + "should return RGB for a JPG image with RGB image data and an incorrect CMYK embedded profile" + ) { val image = fileAt("rgb-with-cmyk-profile.jpg") val colourModelFuture = ImageOperations.identifyColourModel(image, Jpeg) whenReady(colourModelFuture) { colourModel => - colourModel should be (Some("RGB")) + colourModel should be(Some("RGB")) } } @@ -44,15 +51,17 @@ class ImageOperationsTest extends FunSpec with Matchers with ScalaFutures { val image = fileAt("cmyk.jpg") val colourModelFuture = ImageOperations.identifyColourModel(image, Jpeg) whenReady(colourModelFuture) { colourModel => - colourModel should be (Some("CMYK")) + colourModel should be(Some("CMYK")) } } - it("should return GRAYSCALE for a JPG image with GRAYSCALE image data and no embedded profile") { + it( + "should return GRAYSCALE for a JPG image with GRAYSCALE image data and no embedded profile" + ) { val image = fileAt("grayscale-wo-profile.jpg") val colourModelFuture = ImageOperations.identifyColourModel(image, Jpeg) whenReady(colourModelFuture) { colourModel => - colourModel should be (Some("GRAYSCALE")) + colourModel should be(Some("GRAYSCALE")) } } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtilTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtilTest.scala index d86fa55224..05a5f1c2ab 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtilTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonByteArrayUtilTest.scala @@ -35,12 +35,18 @@ class JsonByteArrayUtilTest extends FunSuite with Matchers { val compressedBytes = JsonByteArrayUtil.toByteArray(shapes) compressedBytes.length < uncompressedBytes.length shouldBe true - JsonByteArrayUtil.fromByteArray[List[Shape]](uncompressedBytes) shouldBe Some(shapes) - JsonByteArrayUtil.fromByteArray[List[Shape]](compressedBytes) shouldBe Some(shapes) + JsonByteArrayUtil.fromByteArray[List[Shape]]( + uncompressedBytes + ) shouldBe Some(shapes) + JsonByteArrayUtil.fromByteArray[List[Shape]](compressedBytes) shouldBe Some( + shapes + ) } test("An uncompressed message can be read") { val uncompressedJson = Json.toBytes(Json.toJson(circle)) - JsonByteArrayUtil.fromByteArray[Shape](uncompressedJson) shouldBe Some(circle) + JsonByteArrayUtil.fromByteArray[Shape](uncompressedJson) shouldBe Some( + circle + ) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonOrderingTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonOrderingTest.scala index aa492eb8a3..0e17e81013 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonOrderingTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/json/JsonOrderingTest.scala @@ -1,6 +1,13 @@ package com.gu.mediaservice.lib.json -import com.gu.mediaservice.model.{Asset, FileMetadata, Handout, Image, ImageMetadata, UploadInfo} +import com.gu.mediaservice.model.{ + Asset, + FileMetadata, + Handout, + Image, + ImageMetadata, + UploadInfo +} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.Inside.inside import org.scalatest.{FreeSpec, Matchers} @@ -9,8 +16,8 @@ import play.api.libs.json.{JsObject, JsString, Json} import java.net.URI class JsonOrderingTest extends FreeSpec with Matchers { - /** - * The order of JSON documents is not strictly in accordance with the RFC but the Play library did maintain it + + /** The order of JSON documents is not strictly in accordance with the RFC but the Play library did maintain it * until 2.6.11 and does again from 2.8.0. See https://github.com/playframework/play-json/pull/253 * This is helpful for debugging and for the super-power-users that look at the API as it means that related fields * are grouped together throughout our API. @@ -18,8 +25,17 @@ class JsonOrderingTest extends FreeSpec with Matchers { * need to jump to Play 2.8 for our next upgrade (which is likely to be what we do anyway...) */ "Play Json writes maintain ordering" in { - val dt = new DateTime(2021,1,20,12,0,0, DateTimeZone.forID("America/New_York")) - val image = Image(id = "id", + val dt = new DateTime( + 2021, + 1, + 20, + 12, + 0, + 0, + DateTimeZone.forID("America/New_York") + ) + val image = Image( + id = "id", uploadTime = dt, identifiers = Map.empty, uploadedBy = "Biden", @@ -33,15 +49,14 @@ class JsonOrderingTest extends FreeSpec with Matchers { userMetadata = None, thumbnail = None, metadata = ImageMetadata(), - usageRights = Handout(None), + usageRights = Handout(None) ) val json = Json.toJson(image) - inside(json) { - case jso: JsObject => - /* this only seems to break when an extra field is added to the JsObject - * presumably this is done somewhere inside Play which was causing the mis-ordering */ - val newJso = jso + ("extraField" -> JsString("value")) - newJso.fields.map(_._1) shouldBe Seq( + inside(json) { case jso: JsObject => + /* this only seems to break when an extra field is added to the JsObject + * presumably this is done somewhere inside Play which was causing the mis-ordering */ + val newJso = jso + ("extraField" -> JsString("value")) + newJso.fields.map(_._1) shouldBe Seq( "id", "uploadTime", "uploadedBy", diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/logging/StopwatchTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/logging/StopwatchTest.scala index aed66cd506..9264f58d97 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/logging/StopwatchTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/logging/StopwatchTest.scala @@ -14,6 +14,8 @@ class StopwatchTest extends FunSpec with Matchers { markers.contains("start") shouldBe true markers.contains("end") shouldBe true - markers("duration").toString.toLong should be >= fiveSeconds // >= as time is needed to call the function + markers( + "duration" + ).toString.toLong should be >= fiveSeconds // >= as time is needed to call the function } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverterTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverterTest.scala index 74b84532d1..5389165d32 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverterTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/lib/metadata/ImageMetadataConverterTest.scala @@ -11,25 +11,27 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { it("should return an empty ImageMetadata for empty FileMetadata") { val fileMetadata = FileMetadata(Map(), Map(), Map(), Map()) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.dateTaken should be ('empty) - imageMetadata.description should be ('empty) - imageMetadata.credit should be ('empty) - imageMetadata.byline should be ('empty) - imageMetadata.bylineTitle should be ('empty) - imageMetadata.title should be ('empty) - imageMetadata.copyright should be ('empty) - imageMetadata.suppliersReference should be ('empty) - imageMetadata.source should be ('empty) - imageMetadata.specialInstructions should be ('empty) - imageMetadata.keywords should be ('empty) - imageMetadata.subLocation should be ('empty) - imageMetadata.city should be ('empty) - imageMetadata.state should be ('empty) - imageMetadata.country should be ('empty) - imageMetadata.peopleInImage should be ('empty) - } - - it("should populate string fields of ImageMetadata from default FileMetadata fields") { + imageMetadata.dateTaken should be('empty) + imageMetadata.description should be('empty) + imageMetadata.credit should be('empty) + imageMetadata.byline should be('empty) + imageMetadata.bylineTitle should be('empty) + imageMetadata.title should be('empty) + imageMetadata.copyright should be('empty) + imageMetadata.suppliersReference should be('empty) + imageMetadata.source should be('empty) + imageMetadata.specialInstructions should be('empty) + imageMetadata.keywords should be('empty) + imageMetadata.subLocation should be('empty) + imageMetadata.city should be('empty) + imageMetadata.state should be('empty) + imageMetadata.country should be('empty) + imageMetadata.peopleInImage should be('empty) + } + + it( + "should populate string fields of ImageMetadata from default FileMetadata fields" + ) { val fileMetadata = FileMetadata( iptc = Map( "Caption/Abstract" -> "the description", @@ -49,7 +51,6 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { "Copyright" -> "the copyright" ), exifSub = Map( - ), xmp = Map() ) @@ -63,14 +64,18 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { imageMetadata.copyright should be(Some("the copyright")) imageMetadata.suppliersReference should be(Some("the suppliers reference")) imageMetadata.source should be(Some("the source")) - imageMetadata.specialInstructions should be(Some("the special instructions")) + imageMetadata.specialInstructions should be( + Some("the special instructions") + ) imageMetadata.subLocation should be(Some("the sub location")) imageMetadata.city should be(Some("the city")) imageMetadata.state should be(Some("the state")) imageMetadata.country should be(Some("the country")) } - it("should populate string fields of ImageMetadata from default FileMetadata fields mainly from xmp") { + it( + "should populate string fields of ImageMetadata from default FileMetadata fields mainly from xmp" + ) { val fileMetadata = FileMetadata( iptc = Map( "Caption/Abstract" -> "the description", @@ -90,17 +95,20 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { "Copyright" -> "the copyright" ), exifSub = Map( - ), xmp = Map( - "dc:description" -> JsArray(Seq( - JsString("the xmp description"), - JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)), - )), - "dc:title" -> JsArray(Seq( - JsString("the xmp title"), - JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)), - )), + "dc:description" -> JsArray( + Seq( + JsString("the xmp description"), + JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)) + ) + ), + "dc:title" -> JsArray( + Seq( + JsString("the xmp title"), + JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)) + ) + ), "dc:creator" -> JsArray(Seq(JsString("xmp creator"))), "photoshop:DateCreated" -> JsString("2018-06-27T13:54:55"), "photoshop:Credit" -> JsString("xmp credit"), @@ -113,7 +121,7 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { "Iptc4xmpCore:Location" -> JsString("xmp subLocation"), "photoshop:City" -> JsString("xmp City"), "photoshop:State" -> JsString("xmp State"), - "photoshop:Country" -> JsString("xmp Country"), + "photoshop:Country" -> JsString("xmp Country") ) ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) @@ -134,25 +142,35 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { imageMetadata.country should be(Some("xmp Country")) } - it("should populate string fields of ImageMetadata from xmp fileMetadata properly " + - "even if xmp input had mixed order of entries") { + it( + "should populate string fields of ImageMetadata from xmp fileMetadata properly " + + "even if xmp input had mixed order of entries" + ) { val fileMetadata = FileMetadata( xmp = Map( - "dc:description" -> JsArray(Seq( - JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)), - JsString("the xmp description"), - )), - "dc:title" -> JsArray(Seq( - JsArray(Seq( - "{'test:2':'test2'}", - "{'xml:lang':'x-default'}", - "{'test:1':'test1'}", - ).map(JsString)), - JsString("the xmp title"), - JsArray(Seq( - "{'test:3':'test3'}", - ).map(JsString)), - )), + "dc:description" -> JsArray( + Seq( + JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)), + JsString("the xmp description") + ) + ), + "dc:title" -> JsArray( + Seq( + JsArray( + Seq( + "{'test:2':'test2'}", + "{'xml:lang':'x-default'}", + "{'test:1':'test1'}" + ).map(JsString) + ), + JsString("the xmp title"), + JsArray( + Seq( + "{'test:3':'test3'}" + ).map(JsString) + ) + ) + ), "dc:creator" -> JsArray(Seq(JsString("xmp creator"))), "photoshop:DateCreated" -> JsString("2018-06-27T13:54:55"), "photoshop:Credit" -> JsString("xmp credit"), @@ -165,7 +183,7 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { "Iptc4xmpCore:Location" -> JsString("xmp subLocation"), "photoshop:City" -> JsString("xmp City"), "photoshop:State" -> JsString("xmp State"), - "photoshop:Country" -> JsString("xmp Country"), + "photoshop:Country" -> JsString("xmp Country") ) ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) @@ -186,123 +204,257 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { imageMetadata.country should be(Some("xmp Country")) } - it("should fallback to Object Name for suppliersReference field of ImageMetadata if Original Transmission Reference is missing") { - val fileMetadata = FileMetadata(Map("Object Name" -> "the object name"), Map(), Map(), Map()) + it( + "should fallback to Object Name for suppliersReference field of ImageMetadata if Original Transmission Reference is missing" + ) { + val fileMetadata = + FileMetadata(Map("Object Name" -> "the object name"), Map(), Map(), Map()) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.suppliersReference should be(Some("the object name")) } - // Date Taken - private def parseDate(dateTime: String) = DateTime.parse(dateTime).withZone(DateTimeZone.UTC) + private def parseDate(dateTime: String) = + DateTime.parse(dateTime).withZone(DateTimeZone.UTC) - it("should populate the dateTaken field of ImageMetadata from EXIF Date/Time Original Composite (Mon Jun 18 01:23:45 BST 2018)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map("Date/Time Original Composite" -> "Mon Jun 18 01:23:45 BST 2018"), xmp = Map()) + it( + "should populate the dateTaken field of ImageMetadata from EXIF Date/Time Original Composite (Mon Jun 18 01:23:45 BST 2018)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = + Map("Date/Time Original Composite" -> "Mon Jun 18 01:23:45 BST 2018"), + xmp = Map() + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2018-06-18T00:23:45Z"))) } - it("should populate the dateTaken field of ImageMetadata from EXIF Date/Time Original Composite with milliseconds (Mon Jun 18 01:23:45.025 BST 2018)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map("Date/Time Original Composite" -> "Mon Jun 18 01:23:45.025 BST 2018"), xmp = Map()) + it( + "should populate the dateTaken field of ImageMetadata from EXIF Date/Time Original Composite with milliseconds (Mon Jun 18 01:23:45.025 BST 2018)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map( + "Date/Time Original Composite" -> "Mon Jun 18 01:23:45.025 BST 2018" + ), + xmp = Map() + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.dateTaken should be(Some(parseDate("2018-06-18T00:23:45.025Z"))) + imageMetadata.dateTaken should be( + Some(parseDate("2018-06-18T00:23:45.025Z")) + ) } - it("should populate the dateTaken field of ImageMetadata from IPTC Date Time Created Composite (Mon Jun 18 01:23:45 BST 2018)") { - val fileMetadata = FileMetadata(iptc = Map("Date Time Created Composite" -> "Mon Jun 18 01:23:45 BST 2018"), exif = Map(), exifSub = Map(), xmp = Map()) + it( + "should populate the dateTaken field of ImageMetadata from IPTC Date Time Created Composite (Mon Jun 18 01:23:45 BST 2018)" + ) { + val fileMetadata = FileMetadata( + iptc = + Map("Date Time Created Composite" -> "Mon Jun 18 01:23:45 BST 2018"), + exif = Map(), + exifSub = Map(), + xmp = Map() + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2018-06-18T00:23:45Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2014-12-16T02:23:45+01:00)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("2014-12-16T02:23:45+01:00"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2014-12-16T02:23:45+01:00)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = + Map("photoshop:DateCreated" -> JsString("2014-12-16T02:23:45+01:00")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2014-12-16T01:23:45Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2014-12-16T02:23+01:00)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("2014-12-16T02:23+01:00"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2014-12-16T02:23+01:00)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = Map("photoshop:DateCreated" -> JsString("2014-12-16T02:23+01:00")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2014-12-16T01:23:00Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2018-06-27T13:54:55)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("2018-06-27T13:54:55"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2018-06-27T13:54:55)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = Map("photoshop:DateCreated" -> JsString("2018-06-27T13:54:55")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2018-06-27T13:54:55Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2018-06-27T13:54:55.123)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("2018-06-27T13:54:55.123"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2018-06-27T13:54:55.123)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = Map("photoshop:DateCreated" -> JsString("2018-06-27T13:54:55.123")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.dateTaken should be(Some(parseDate("2018-06-27T13:54:55.123Z"))) + imageMetadata.dateTaken should be( + Some(parseDate("2018-06-27T13:54:55.123Z")) + ) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 GMT 2014)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 GMT 2014"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 GMT 2014)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = + Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 GMT 2014")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2014-12-16T01:23:45Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 UTC 2014)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 UTC 2014"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 UTC 2014)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = + Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 UTC 2014")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2014-12-16T01:23:45Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 BST 2014)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 BST 2014"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 BST 2014)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = + Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 BST 2014")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(Some(parseDate("2014-12-16T00:23:45Z"))) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 PDT 2014)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 PDT 2014"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (Tue Dec 16 01:23:45 PDT 2014)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = + Map("photoshop:DateCreated" -> JsString("Tue Dec 16 01:23:45 PDT 2014")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.dateTaken should be(Some(parseDate("2014-12-16T01:23:45-08:00"))) + imageMetadata.dateTaken should be( + Some(parseDate("2014-12-16T01:23:45-08:00")) + ) } - it("should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2014-12-16)") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("2014-12-16"))) + it( + "should populate the dateTaken field of ImageMetadata from XMP photoshop:DateCreated (2014-12-16)" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = Map("photoshop:DateCreated" -> JsString("2014-12-16")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.dateTaken should be(Some(DateTime.parse("2014-12-16T00:00:00Z"))) + imageMetadata.dateTaken should be( + Some(DateTime.parse("2014-12-16T00:00:00Z")) + ) } - it("should leave the dateTaken field of ImageMetadata empty if no date present") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map()) + it( + "should leave the dateTaken field of ImageMetadata empty if no date present" + ) { + val fileMetadata = + FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map()) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(None) } - it("should leave the dateTaken field of ImageMetadata empty if EXIF Date/Time Original Composite is not a valid date") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map("Date/Time Original Composite" -> "not a date"), xmp = Map()) + it( + "should leave the dateTaken field of ImageMetadata empty if EXIF Date/Time Original Composite is not a valid date" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map("Date/Time Original Composite" -> "not a date"), + xmp = Map() + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(None) } - it("should leave the dateTaken field of ImageMetadata empty if IPTC Date Time Created Composite is not a valid date") { - val fileMetadata = FileMetadata(iptc = Map("Date Time Created Composite" -> "not a date"), exif = Map(), exifSub = Map(), xmp = Map()) + it( + "should leave the dateTaken field of ImageMetadata empty if IPTC Date Time Created Composite is not a valid date" + ) { + val fileMetadata = FileMetadata( + iptc = Map("Date Time Created Composite" -> "not a date"), + exif = Map(), + exifSub = Map(), + xmp = Map() + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(None) } - it("should leave the dateTaken field of ImageMetadata empty if XMP photoshop:DateCreated is not a valid date") { - val fileMetadata = FileMetadata(iptc = Map(), exif = Map(), exifSub = Map(), xmp = Map("photoshop:DateCreated" -> JsString("not a date"))) + it( + "should leave the dateTaken field of ImageMetadata empty if XMP photoshop:DateCreated is not a valid date" + ) { + val fileMetadata = FileMetadata( + iptc = Map(), + exif = Map(), + exifSub = Map(), + xmp = Map("photoshop:DateCreated" -> JsString("not a date")) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.dateTaken should be(None) } // Keywords - it("should populate keywords field of ImageMetadata from comma-separated list of keywords") { - val fileMetadata = FileMetadata(Map("Keywords" -> "Foo,Bar, Baz"), Map(), Map(), Map()) + it( + "should populate keywords field of ImageMetadata from comma-separated list of keywords" + ) { + val fileMetadata = + FileMetadata(Map("Keywords" -> "Foo,Bar, Baz"), Map(), Map(), Map()) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.keywords should be(List("Foo", "Bar", "Baz")) } - it("should populate keywords field of ImageMetadata from semi-colon-separated list of keywords") { - val fileMetadata = FileMetadata(Map("Keywords" -> "Foo;Bar; Baz"), Map(), Map(), Map()) + it( + "should populate keywords field of ImageMetadata from semi-colon-separated list of keywords" + ) { + val fileMetadata = + FileMetadata(Map("Keywords" -> "Foo;Bar; Baz"), Map(), Map(), Map()) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) imageMetadata.keywords should be(List("Foo", "Bar", "Baz")) } @@ -312,104 +464,182 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { } it("should clean up 'just date' dates into iso format") { - ImageMetadataConverter.cleanDate("2014-12-16") shouldBe "2014-12-16T00:00:00.000Z" + ImageMetadataConverter.cleanDate( + "2014-12-16" + ) shouldBe "2014-12-16T00:00:00.000Z" } it("should clean up iso dates with seconds into iso format") { - ImageMetadataConverter.cleanDate("2014-12-16T01:02:03.040Z") shouldBe "2014-12-16T01:02:03.040Z" + ImageMetadataConverter.cleanDate( + "2014-12-16T01:02:03.040Z" + ) shouldBe "2014-12-16T01:02:03.040Z" } it("should clean up iso dates without sub-second precision into iso format") { - ImageMetadataConverter.cleanDate("2014-12-16T01:02:03Z") shouldBe "2014-12-16T01:02:03.000Z" + ImageMetadataConverter.cleanDate( + "2014-12-16T01:02:03Z" + ) shouldBe "2014-12-16T01:02:03.000Z" } it("should clean up iso dates without seconds into iso format") { - ImageMetadataConverter.cleanDate("2014-12-16T01:02Z") shouldBe "2014-12-16T01:02:00.000Z" + ImageMetadataConverter.cleanDate( + "2014-12-16T01:02Z" + ) shouldBe "2014-12-16T01:02:00.000Z" } - it("should clean up iso dates without seconds but with fractional seconds 'lol' into iso format") { - ImageMetadataConverter.cleanDate("2014-12-16T01:02.040Z") shouldBe "2014-12-16T01:02:00.040Z" + it( + "should clean up iso dates without seconds but with fractional seconds 'lol' into iso format" + ) { + ImageMetadataConverter.cleanDate( + "2014-12-16T01:02.040Z" + ) shouldBe "2014-12-16T01:02:00.040Z" } - it("should clean up machine dates with GMT time zone with subsecond precision into iso format") { - ImageMetadataConverter.cleanDate("Tue Dec 16 01:02:03.040 GMT 2014") shouldBe "2014-12-16T01:02:03.040Z" + it( + "should clean up machine dates with GMT time zone with subsecond precision into iso format" + ) { + ImageMetadataConverter.cleanDate( + "Tue Dec 16 01:02:03.040 GMT 2014" + ) shouldBe "2014-12-16T01:02:03.040Z" } - it("should clean up machine dates with GMT time zone without subsecond precision into iso format") { - ImageMetadataConverter.cleanDate("Tue Dec 16 01:02:03 GMT 2014") shouldBe "2014-12-16T01:02:03.000Z" + it( + "should clean up machine dates with GMT time zone without subsecond precision into iso format" + ) { + ImageMetadataConverter.cleanDate( + "Tue Dec 16 01:02:03 GMT 2014" + ) shouldBe "2014-12-16T01:02:03.000Z" } - it("should clean up machine dates with valid BST time zone and subsecond precision into iso format") { - ImageMetadataConverter.cleanDate("Sat Aug 16 01:02:03.040 BST 2014") shouldBe "2014-08-16T00:02:03.040Z" + it( + "should clean up machine dates with valid BST time zone and subsecond precision into iso format" + ) { + ImageMetadataConverter.cleanDate( + "Sat Aug 16 01:02:03.040 BST 2014" + ) shouldBe "2014-08-16T00:02:03.040Z" } - it("should clean up machine dates with valid BST time zone without subsecond precision into iso format") { - ImageMetadataConverter.cleanDate("Sat Aug 16 01:02:03 BST 2014") shouldBe "2014-08-16T00:02:03.000Z" + it( + "should clean up machine dates with valid BST time zone without subsecond precision into iso format" + ) { + ImageMetadataConverter.cleanDate( + "Sat Aug 16 01:02:03 BST 2014" + ) shouldBe "2014-08-16T00:02:03.000Z" } - it("should clean up machine dates with invalid BST time zone and subsecond precision into iso format") { - ImageMetadataConverter.cleanDate("Tue Dec 16 01:02:03.040 BST 2014") shouldBe "2014-12-16T00:02:03.040Z" + it( + "should clean up machine dates with invalid BST time zone and subsecond precision into iso format" + ) { + ImageMetadataConverter.cleanDate( + "Tue Dec 16 01:02:03.040 BST 2014" + ) shouldBe "2014-12-16T00:02:03.040Z" } - it("should clean up machine dates with invalid BST time zone without subsecond precision into iso format") { - ImageMetadataConverter.cleanDate("Tue Dec 16 01:02:03 BST 2014") shouldBe "2014-12-16T00:02:03.000Z" + it( + "should clean up machine dates with invalid BST time zone without subsecond precision into iso format" + ) { + ImageMetadataConverter.cleanDate( + "Tue Dec 16 01:02:03 BST 2014" + ) shouldBe "2014-12-16T00:02:03.000Z" } - // People in Image - it("should populate peopleInImage field of ImageMetadata from corresponding xmp iptc ext fields") { - val fileMetadata = FileMetadata(Map(), Map(), Map(), Map("Iptc4xmpExt:PersonInImage" -> JsArray(Seq(JsString("person 1"))))) + it( + "should populate peopleInImage field of ImageMetadata from corresponding xmp iptc ext fields" + ) { + val fileMetadata = FileMetadata( + Map(), + Map(), + Map(), + Map("Iptc4xmpExt:PersonInImage" -> JsArray(Seq(JsString("person 1")))) + ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.peopleInImage should be (Set("person 1")) + imageMetadata.peopleInImage should be(Set("person 1")) } - it("should populate peopleInImage field of ImageMetadata from multiple corresponding people xmp fields") { + it( + "should populate peopleInImage field of ImageMetadata from multiple corresponding people xmp fields" + ) { val fileMetadata = FileMetadata( - Map(), Map(), Map(), - Map("Iptc4xmpExt:PersonInImage" -> - JsArray(Seq( - JsString("person 1"), - JsString("person 2"), - JsString("person 3"))), - "GettyImagesGIFT:Personality" -> - JsArray(Seq(JsString("person 4"))) + Map(), + Map(), + Map(), + Map( + "Iptc4xmpExt:PersonInImage" -> + JsArray( + Seq( + JsString("person 1"), + JsString("person 2"), + JsString("person 3") + ) + ), + "GettyImagesGIFT:Personality" -> + JsArray(Seq(JsString("person 4"))) ) ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.peopleInImage should be (Set("person 1","person 2","person 3","person 4")) - } - - it("should distinctly populate peopleInImage field of ImageMetadata from multiple corresponding xmp iptc ext fields") { - val fileMetadata = FileMetadata(Map(), Map(), Map(), - Map("Iptc4xmpExt:PersonInImage" -> - JsArray(Seq( - JsString("person 1"), - JsString("person 2"), - JsString("person 2") - )) - )) - val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.peopleInImage should be (Set("person 1","person 2")) + imageMetadata.peopleInImage should be( + Set("person 1", "person 2", "person 3", "person 4") + ) } - it("should distinctly populate peopleInImage field of ImageMetadata from multiple corresponding xmp people fields") { - val fileMetadata = FileMetadata(Map(), Map(), Map(), + it( + "should distinctly populate peopleInImage field of ImageMetadata from multiple corresponding xmp iptc ext fields" + ) { + val fileMetadata = FileMetadata( + Map(), + Map(), + Map(), Map( - "Iptc4xmpExt:PersonInImage" -> JsArray(Seq( - JsString("person 1"), - JsString("person 2") - )), - "GettyImagesGIFT:Personality" -> JsArray(Seq( - JsString("person 2") - )) + "Iptc4xmpExt:PersonInImage" -> + JsArray( + Seq( + JsString("person 1"), + JsString("person 2"), + JsString("person 2") + ) + ) ) ) val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) - imageMetadata.peopleInImage should be (Set("person 1","person 2")) + imageMetadata.peopleInImage should be(Set("person 1", "person 2")) } - private def day(y:Int, M:Int = 1, d:Int = 1, h:Int = 0, m:Int = 0, s:Int = 0, ss:Int = 0) = + it( + "should distinctly populate peopleInImage field of ImageMetadata from multiple corresponding xmp people fields" + ) { + val fileMetadata = FileMetadata( + Map(), + Map(), + Map(), + Map( + "Iptc4xmpExt:PersonInImage" -> JsArray( + Seq( + JsString("person 1"), + JsString("person 2") + ) + ), + "GettyImagesGIFT:Personality" -> JsArray( + Seq( + JsString("person 2") + ) + ) + ) + ) + val imageMetadata = ImageMetadataConverter.fromFileMetadata(fileMetadata) + imageMetadata.peopleInImage should be(Set("person 1", "person 2")) + } + + private def day( + y: Int, + M: Int = 1, + d: Int = 1, + h: Int = 0, + m: Int = 0, + s: Int = 0, + ss: Int = 0 + ) = new DateTime() .withZone(DateTimeZone.UTC) .withYear(y) @@ -421,69 +651,109 @@ class ImageMetadataConverterTest extends FunSpec with Matchers { .withMillisOfSecond(ss) it("should cope with full date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06.007Z") should be(Some(day(2001, 2, 3, 4, 5, 6, 7))) + ImageMetadataConverter.parseRandomDate( + "2001-02-03T04:05:06.007Z" + ) should be(Some(day(2001, 2, 3, 4, 5, 6, 7))) } it("should cope with offset with space date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06 +00:00") should be (Some(day(2001, 2, 3, 4, 5, 6))) + ImageMetadataConverter.parseRandomDate( + "2001-02-03T04:05:06 +00:00" + ) should be(Some(day(2001, 2, 3, 4, 5, 6))) } it("should cope with offset without space date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06+00:00") should be (Some(day(2001, 2, 3, 4, 5, 6))) + ImageMetadataConverter.parseRandomDate( + "2001-02-03T04:05:06+00:00" + ) should be(Some(day(2001, 2, 3, 4, 5, 6))) } it("should cope with no offset date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06") should be (Some(day(2001, 2, 3, 4, 5, 6))) + ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06") should be( + Some(day(2001, 2, 3, 4, 5, 6)) + ) } it("should cope with nbo offset, no millis date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06.007") should be (Some(day(2001, 2, 3, 4, 5, 6, 7))) + ImageMetadataConverter.parseRandomDate("2001-02-03T04:05:06.007") should be( + Some(day(2001, 2, 3, 4, 5, 6, 7)) + ) } it("should cope with long seconds date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05.006+00:00") should be (Some(day(2001, 2, 3, 4, 5, 0, 6))) + ImageMetadataConverter.parseRandomDate( + "2001-02-03T04:05.006+00:00" + ) should be(Some(day(2001, 2, 3, 4, 5, 0, 6))) } it("should cope with no seconds date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03T04:05+00:00") should be (Some(day(2001, 2, 3, 4, 5))) + ImageMetadataConverter.parseRandomDate("2001-02-03T04:05+00:00") should be( + Some(day(2001, 2, 3, 4, 5)) + ) } it("should cope with full, textual zone, date formats") { - ImageMetadataConverter.parseRandomDate("Sat Feb 03 04:05:06.007 UTC 2001") should be (Some(day(2001, 2, 3, 4, 5, 6, 7))) + ImageMetadataConverter.parseRandomDate( + "Sat Feb 03 04:05:06.007 UTC 2001" + ) should be(Some(day(2001, 2, 3, 4, 5, 6, 7))) } it("should cope with full, textual zone, no millis date formats") { - ImageMetadataConverter.parseRandomDate("Sat Feb 03 04:05:06 UTC 2001") should be (Some(day(2001, 2, 3, 4, 5, 6))) + ImageMetadataConverter.parseRandomDate( + "Sat Feb 03 04:05:06 UTC 2001" + ) should be(Some(day(2001, 2, 3, 4, 5, 6))) } it("should cope with full, textual zone, non-UTC date formats") { - ImageMetadataConverter.parseRandomDate("Tue Jul 03 04:05:06.007 BST 2001") should be (Some(day(2001, 7, 3, 3, 5, 6, 7))) + ImageMetadataConverter.parseRandomDate( + "Tue Jul 03 04:05:06.007 BST 2001" + ) should be(Some(day(2001, 7, 3, 3, 5, 6, 7))) } - it("should cope with full, textual zone, non-UTC, no millis expected date formats") { - ImageMetadataConverter.parseRandomDate("Tue Jul 03 04:05:06 BST 2001") should be (Some(day(2001, 7, 3, 3, 5, 6))) + it( + "should cope with full, textual zone, non-UTC, no millis expected date formats" + ) { + ImageMetadataConverter.parseRandomDate( + "Tue Jul 03 04:05:06 BST 2001" + ) should be(Some(day(2001, 7, 3, 3, 5, 6))) } it("should cope with just year date formats") { - ImageMetadataConverter.parseRandomDate("2001") should be (Some(day(2001))) + ImageMetadataConverter.parseRandomDate("2001") should be(Some(day(2001))) } it("should cope with year, dash, month date formats") { - ImageMetadataConverter.parseRandomDate("2001-02") should be (Some(day(2001, 2))) + ImageMetadataConverter.parseRandomDate("2001-02") should be( + Some(day(2001, 2)) + ) } it("should cope with year month day date formats") { - ImageMetadataConverter.parseRandomDate("20010203") should be (Some(day(2001, 2, 3))) + ImageMetadataConverter.parseRandomDate("20010203") should be( + Some(day(2001, 2, 3)) + ) } it("should cope with US-style year day month date formats") { - ImageMetadataConverter.parseRandomDate("20012802") should be (Some(day(2001, 2, 28))) + ImageMetadataConverter.parseRandomDate("20012802") should be( + Some(day(2001, 2, 28)) + ) } it("should cope with year month date formats") { - ImageMetadataConverter.parseRandomDate("20012") should be (Some(day(2001, 2))) + ImageMetadataConverter.parseRandomDate("20012") should be( + Some(day(2001, 2)) + ) } it("should cope with year dash month dash day date formats") { - ImageMetadataConverter.parseRandomDate("2001-02-03") should be (Some(day(2001, 2, 3))) + ImageMetadataConverter.parseRandomDate("2001-02-03") should be( + Some(day(2001, 2, 3)) + ) } it("should cope with invalid dates and return None") { ImageMetadataConverter.parseRandomDate("2000-02-31") should be(None) } - it("should refuse future dates, if a 'maximum' date is provided which is before the image date") { + it( + "should refuse future dates, if a 'maximum' date is provided which is before the image date" + ) { val yesterday = ImageMetadataConverter.parseRandomDate("2020-12-31").get - val parsedDate = ImageMetadataConverter.parseRandomDate("2021-01-01", Some(yesterday)) - parsedDate.isDefined should be (false) + val parsedDate = + ImageMetadataConverter.parseRandomDate("2021-01-01", Some(yesterday)) + parsedDate.isDefined should be(false) } - it("should accept past dates, if a 'maximum' date is provided which is after the image date") { + it( + "should accept past dates, if a 'maximum' date is provided which is after the image date" + ) { val tomorrow = ImageMetadataConverter.parseRandomDate("2021-01-01").get - val parsedDate = ImageMetadataConverter.parseRandomDate("2020-12-31", Some(tomorrow)) - parsedDate.isDefined should be (true) + val parsedDate = + ImageMetadataConverter.parseRandomDate("2020-12-31", Some(tomorrow)) + parsedDate.isDefined should be(true) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataAggregatorTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataAggregatorTest.scala index a5ebbf6057..5c7285c501 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataAggregatorTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataAggregatorTest.scala @@ -79,116 +79,158 @@ class FileMetadataAggregatorTest extends FlatSpec with Matchers { "xmpMM:History[2]/stEvt:action" -> "saved", "xmpMM:InstanceID" -> "xmp.iid:d9500a13-3c27-401c-a2cc-1fd027b0424f", "schema:imageHasSubject" -> "http://id.ukpds.org/Cz0WNho9", - "schema:imageHasSubject/rdf:type" -> "http://id.ukpds.org/schema/Person", + "schema:imageHasSubject/rdf:type" -> "http://id.ukpds.org/schema/Person" ) val actual = FileMetadataAggregator.aggregateMetadataMap(testInput) val expected = Map( - "schema:imageHasSubject" -> JsArray(Seq( - "{'rdf:type':'http://id.ukpds.org/schema/Person'}", - "http://id.ukpds.org/Cz0WNho9" - ).map(JsString)), + "schema:imageHasSubject" -> JsArray( + Seq( + "{'rdf:type':'http://id.ukpds.org/schema/Person'}", + "http://id.ukpds.org/Cz0WNho9" + ).map(JsString) + ), "dc:format" -> JsString("image/png"), "photoshop:ColorMode" -> JsString("3"), - "dc:description" -> JsArray(Seq( - JsString("the xmp description"), - JsArray(Seq( - "{'test:2':'test2'}", - "{'xml:lang':'x-default'}", - "{'test:1':'test1'}", - ).map(JsString)), - )), - "dc:title" -> JsArray(Seq( - JsString("the xmp title"), - JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)), - )), + "dc:description" -> JsArray( + Seq( + JsString("the xmp description"), + JsArray( + Seq( + "{'test:2':'test2'}", + "{'xml:lang':'x-default'}", + "{'test:1':'test1'}" + ).map(JsString) + ) + ) + ), + "dc:title" -> JsArray( + Seq( + JsString("the xmp title"), + JsArray(Seq("{'xml:lang':'x-default'}").map(JsString)) + ) + ), "xmp:MetadataDate" -> JsString("2019-07-04T13:12:26.000Z"), - "xmpMM:DerivedFrom" -> JsArray(Seq( - "{'stRef:instanceID':'xmp.iid:adbc5207-3f5b-4480-9e67-ed2a1871deb9'}", - "{'stRef:documentID':'xmp.did:65d63b5e-a24e-4e51-89bd-6693ce193404'}", - "{'stRef:originalDocumentID':'xmp.did:65d63b5e-a24e-4e51-89bd-6693ce193404'}", - ).map(JsString)), + "xmpMM:DerivedFrom" -> JsArray( + Seq( + "{'stRef:instanceID':'xmp.iid:adbc5207-3f5b-4480-9e67-ed2a1871deb9'}", + "{'stRef:documentID':'xmp.did:65d63b5e-a24e-4e51-89bd-6693ce193404'}", + "{'stRef:originalDocumentID':'xmp.did:65d63b5e-a24e-4e51-89bd-6693ce193404'}" + ).map(JsString) + ), "exif:PixelXDimension" -> JsString("2000"), "photoshop:ICCProfile" -> JsString("sRGB IEC61966-2.1"), "xmp:ModifyDate" -> JsString("2019-07-04T13:12:26.000Z"), - "xmpMM:OriginalDocumentID" -> JsString("xmp.did:65d63b5e-a24e-4e51-89bd-6693ce193404"), + "xmpMM:OriginalDocumentID" -> JsString( + "xmp.did:65d63b5e-a24e-4e51-89bd-6693ce193404" + ), "tiff:YResolution" -> JsString("1181100/10000"), "exif:PixelYDimension" -> JsString("2000"), "xmp:CreateDate" -> JsString("2018-02-06T16:36:48.000Z"), - "dc:rights" -> JsArray(Seq( - JsString("B814F57A-329B-441B-8564-F6D3A0973F14"), - JsArray(Seq( - "{'xml:lang':'x-default'}" - ).map(JsString)), - )), + "dc:rights" -> JsArray( + Seq( + JsString("B814F57A-329B-441B-8564-F6D3A0973F14"), + JsArray( + Seq( + "{'xml:lang':'x-default'}" + ).map(JsString) + ) + ) + ), "xmp:CreatorTool" -> JsString("Adobe Photoshop CC 2019 (Macintosh)"), - "photoshop:DocumentAncestors" -> JsArray(Seq( - "0", - "00116C18A16B635936270C3F4DD02EF9", - "0024E0DBC7EAA19ECC90B9B2F5F1E071", - "00A4B614125CF2B9AC52D7A1198EE974" - ).map(JsString)), + "photoshop:DocumentAncestors" -> JsArray( + Seq( + "0", + "00116C18A16B635936270C3F4DD02EF9", + "0024E0DBC7EAA19ECC90B9B2F5F1E071", + "00A4B614125CF2B9AC52D7A1198EE974" + ).map(JsString) + ), "2darr:test" -> JsArray( Seq( JsArray(Seq("a", "b", "c").map(JsString)), JsArray(Seq("a").map(JsString)), - JsArray(Seq("a", "b", "c", "d").map(JsString)), + JsArray(Seq("a", "b", "c", "d").map(JsString)) ) ), - "test:nested-object" -> JsArray(Seq( - JsArray(Seq( - "{'prop':['0','1','2']}", - "{'prop2':['0']}", - ).map(JsString)), - JsArray(Seq( - "{'prop':['a']}", - ).map(JsString)) - )), - "xmpMM:DocumentID" -> JsString("adobe:docid:photoshop:b55c9154-805d-a14a-a383-6b3945315d73"), + "test:nested-object" -> JsArray( + Seq( + JsArray( + Seq( + "{'prop':['0','1','2']}", + "{'prop2':['0']}" + ).map(JsString) + ), + JsArray( + Seq( + "{'prop':['a']}" + ).map(JsString) + ) + ) + ), + "xmpMM:DocumentID" -> JsString( + "adobe:docid:photoshop:b55c9154-805d-a14a-a383-6b3945315d73" + ), "tiff:Orientation" -> JsString("1"), "dc:creator" -> JsArray(Seq(JsString("tmp"))), "exif:ColorSpace" -> JsString("1"), - "xmpMM:History" -> JsArray(Seq( - JsArray(Seq( - "{'stEvt:softwareAgent':'Adobe Photoshop CC (Macintosh)'}", - "{'stEvt:action':'created'}", - "{'stEvt:instanceID':'xmp.iid:65d63b5e-a24e-4e51-89bd-6693ce193404'}", - "{'stEvt:when':'2018-02-06T16:36:48Z'}", - ).map(JsString)), - JsArray(Seq( - "{'stEvt:action':'saved'}", - "{'stEvt:softwareAgent':'Adobe Photoshop CC (Macintosh)'}", - "{'stEvt:instanceID':'xmp.iid:f9859689-1601-43ae-99a2-9bfb3c159ded'}", - "{'stEvt:changed':'/'}", - "{'stEvt:when':'2018-02-06T16:37:53Z'}", - ).map(JsString)), - JsArray(Seq( - "{'stEvt:action':'saved'}", - "{'stEvt:when':'2019-07-04T14:12:26+01:00'}", - "{'stEvt:softwareAgent':'Adobe Photoshop CC 2019 (Macintosh)'}", - "{'stEvt:changed':'/'}", - "{'stEvt:instanceID':'xmp.iid:adbc5207-3f5b-4480-9e67-ed2a1871deb9'}", - ).map(JsString)), - JsArray(Seq( - "{'stEvt:parameters':'from application/vnd.adobe.photoshop to image/png'}", - "{'stEvt:action':'converted'}", - ).map(JsString)), - JsArray(Seq( - "{'stEvt:parameters':'converted from application/vnd.adobe.photoshop to image/png'}", - "{'stEvt:action':'derived'}", - ).map(JsString)), - JsArray(Seq( - "{'stEvt:changed':'/'}", - "{'stEvt:softwareAgent':'Adobe Photoshop CC 2019 (Macintosh)'}", - "{'stEvt:when':'2019-07-04T14:12:26+01:00'}", - "{'stEvt:instanceID':'xmp.iid:d9500a13-3c27-401c-a2cc-1fd027b0424f'}", - "{'stEvt:action':'saved'}", - ).map(JsString)) - )), + "xmpMM:History" -> JsArray( + Seq( + JsArray( + Seq( + "{'stEvt:softwareAgent':'Adobe Photoshop CC (Macintosh)'}", + "{'stEvt:action':'created'}", + "{'stEvt:instanceID':'xmp.iid:65d63b5e-a24e-4e51-89bd-6693ce193404'}", + "{'stEvt:when':'2018-02-06T16:36:48Z'}" + ).map(JsString) + ), + JsArray( + Seq( + "{'stEvt:action':'saved'}", + "{'stEvt:softwareAgent':'Adobe Photoshop CC (Macintosh)'}", + "{'stEvt:instanceID':'xmp.iid:f9859689-1601-43ae-99a2-9bfb3c159ded'}", + "{'stEvt:changed':'/'}", + "{'stEvt:when':'2018-02-06T16:37:53Z'}" + ).map(JsString) + ), + JsArray( + Seq( + "{'stEvt:action':'saved'}", + "{'stEvt:when':'2019-07-04T14:12:26+01:00'}", + "{'stEvt:softwareAgent':'Adobe Photoshop CC 2019 (Macintosh)'}", + "{'stEvt:changed':'/'}", + "{'stEvt:instanceID':'xmp.iid:adbc5207-3f5b-4480-9e67-ed2a1871deb9'}" + ).map(JsString) + ), + JsArray( + Seq( + "{'stEvt:parameters':'from application/vnd.adobe.photoshop to image/png'}", + "{'stEvt:action':'converted'}" + ).map(JsString) + ), + JsArray( + Seq( + "{'stEvt:parameters':'converted from application/vnd.adobe.photoshop to image/png'}", + "{'stEvt:action':'derived'}" + ).map(JsString) + ), + JsArray( + Seq( + "{'stEvt:changed':'/'}", + "{'stEvt:softwareAgent':'Adobe Photoshop CC 2019 (Macintosh)'}", + "{'stEvt:when':'2019-07-04T14:12:26+01:00'}", + "{'stEvt:instanceID':'xmp.iid:d9500a13-3c27-401c-a2cc-1fd027b0424f'}", + "{'stEvt:action':'saved'}" + ).map(JsString) + ) + ) + ), "tiff:ResolutionUnit" -> JsString("3"), "tiff:XResolution" -> JsString("1181100/10000"), - "xmpMM:InstanceID" -> JsString("xmp.iid:d9500a13-3c27-401c-a2cc-1fd027b0424f") + "xmpMM:InstanceID" -> JsString( + "xmp.iid:d9500a13-3c27-401c-a2cc-1fd027b0424f" + ) ) actual shouldEqual expected diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataTest.scala index 2c4b5444d7..c13d6fa4b0 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/FileMetadataTest.scala @@ -4,50 +4,147 @@ import org.scalatest.prop.{Checkers, PropertyChecks} import org.scalatest.{FreeSpec, Matchers} import play.api.libs.json.Json -class FileMetadataTest extends FreeSpec with Matchers with Checkers with PropertyChecks { +class FileMetadataTest + extends FreeSpec + with Matchers + with Checkers + with PropertyChecks { "Dehydrate a non-empty object" - { "Leave all short values alone" in { - val fm = new FileMetadata(Map(), Map(), Map(), Map(), Map(("hello" -> "goodbye")), Map(), None, Map()) + val fm = new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map(("hello" -> "goodbye")), + Map(), + None, + Map() + ) val json = Json.toJson(fm).toString() - json should be ("{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{\"hello\":\"goodbye\"},\"getty\":{},\"colourModelInformation\":{}}") + json should be( + "{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{\"hello\":\"goodbye\"},\"getty\":{},\"colourModelInformation\":{}}" + ) } "Remove a single long value" in { val A5000 = (1 to 5000).toList.mkString(",") - val fm = new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye", "A5000" -> A5000), Map(), None, Map()) + val fm = new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye", "A5000" -> A5000), + Map(), + None, + Map() + ) val json = Json.toJson(fm).toString() - json should be ("{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{\"hello\":\"goodbye\",\"removedFields\":\"A5000\"},\"getty\":{},\"colourModelInformation\":{}}") + json should be( + "{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{\"hello\":\"goodbye\",\"removedFields\":\"A5000\"},\"getty\":{},\"colourModelInformation\":{}}" + ) } "Remove multiple long values" in { val A5000 = (1 to 5000).toList.mkString(",") val B5000 = (1 to 10000).toList.mkString(",") - val fm = new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye", "A5000" -> A5000, "B5000" -> B5000), Map(), None, Map()) + val fm = new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye", "A5000" -> A5000, "B5000" -> B5000), + Map(), + None, + Map() + ) val json = Json.toJson(fm).toString() - json should be ("{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{\"hello\":\"goodbye\",\"removedFields\":\"A5000, B5000\"},\"getty\":{},\"colourModelInformation\":{}}") + json should be( + "{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{\"hello\":\"goodbye\",\"removedFields\":\"A5000, B5000\"},\"getty\":{},\"colourModelInformation\":{}}" + ) } } "Dehydrate and rehydrate a non-empty object" - { "Leave all short values alone" in { - val fm = new FileMetadata(Map(), Map(), Map(), Map(), Map(("hello" -> "goodbye")), Map(), None, Map()) + val fm = new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map(("hello" -> "goodbye")), + Map(), + None, + Map() + ) val json = Json.toJson(fm).toString() val fmRehydrated = Json.fromJson[FileMetadata](Json.parse(json)).get - fmRehydrated should be (new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye"), Map(), None, Map())) + fmRehydrated should be( + new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye"), + Map(), + None, + Map() + ) + ) } "Remove a single long value" in { val A5000 = (1 to 5000).toList.mkString(",") - val fm = new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye", "A5000" -> A5000), Map(), None, Map()) + val fm = new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye", "A5000" -> A5000), + Map(), + None, + Map() + ) val json = Json.toJson(fm).toString() val fmRehydrated = Json.fromJson[FileMetadata](Json.parse(json)).get - fmRehydrated should be ( new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye", "removedFields" -> "A5000"), Map(), None, Map())) + fmRehydrated should be( + new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye", "removedFields" -> "A5000"), + Map(), + None, + Map() + ) + ) } "Remove multiple long values" in { val A5000 = (1 to 5000).toList.mkString(",") val B5000 = (1 to 10000).toList.mkString(",") - val fm = new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye", "A5000" -> A5000, "B5000" -> B5000), Map(), None, Map()) + val fm = new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye", "A5000" -> A5000, "B5000" -> B5000), + Map(), + None, + Map() + ) val json = Json.toJson(fm).toString() val fmRehydrated = Json.fromJson[FileMetadata](Json.parse(json)).get - fmRehydrated should be ( new FileMetadata(Map(), Map(), Map(), Map(), Map("hello" -> "goodbye", "removedFields" -> "A5000, B5000"), Map(), None, Map())) + fmRehydrated should be( + new FileMetadata( + Map(), + Map(), + Map(), + Map(), + Map("hello" -> "goodbye", "removedFields" -> "A5000, B5000"), + Map(), + None, + Map() + ) + ) } } @@ -55,15 +152,19 @@ class FileMetadataTest extends FreeSpec with Matchers with Checkers with Propert "Dehydrate" in { val fm = new FileMetadata() val json = Json.toJson(fm).toString() - json should be ("{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{},\"getty\":{},\"colourModelInformation\":{}}") + json should be( + "{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{},\"getty\":{},\"colourModelInformation\":{}}" + ) } "Rehydrate" in { - val json = "{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{},\"getty\":{},\"colourModelInformation\":{}}" + val json = + "{\"iptc\":{},\"exif\":{},\"exifSub\":{},\"xmp\":{},\"icc\":{},\"getty\":{},\"colourModelInformation\":{}}" val fm = Json.fromJson[FileMetadata](Json.parse(json)).get - fm should be (new FileMetadata(Map(), Map(), Map(), Map(), Map(), Map(), None, Map())) + fm should be( + new FileMetadata(Map(), Map(), Map(), Map(), Map(), Map(), None, Map()) + ) } } - } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/ImageTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/ImageTest.scala index 52daed65fb..70c8e9cdc5 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/ImageTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/ImageTest.scala @@ -3,14 +3,24 @@ package com.gu.mediaservice.model import java.net.URI import java.util.UUID -import com.gu.mediaservice.model.leases.{AllowSyndicationLease, DenySyndicationLease, LeasesByMedia, MediaLease} +import com.gu.mediaservice.model.leases.{ + AllowSyndicationLease, + DenySyndicationLease, + LeasesByMedia, + MediaLease +} import com.gu.mediaservice.model.usage._ import org.joda.time.DateTime import org.scalatest.{FunSpec, Matchers} class ImageTest extends FunSpec with Matchers { - def createImage(id: String = UUID.randomUUID().toString, usages: List[Usage] = List(), leases: Option[LeasesByMedia] = None, syndicationRights: Option[SyndicationRights] = None): Image = { + def createImage( + id: String = UUID.randomUUID().toString, + usages: List[Usage] = List(), + leases: Option[LeasesByMedia] = None, + syndicationRights: Option[SyndicationRights] = None + ): Image = { Image( id = id, uploadTime = DateTime.now(), @@ -29,12 +39,15 @@ class ImageTest extends FunSpec with Matchers { optimisedPng = None, fileMetadata = FileMetadata(), userMetadata = None, - metadata = ImageMetadata(dateTaken = None, title = Some(s"Test image $id"), keywords = List()), + metadata = ImageMetadata( + dateTaken = None, + title = Some(s"Test image $id"), + keywords = List() + ), originalMetadata = ImageMetadata(), usageRights = StaffPhotographer("T. Hanks", "The Guardian"), originalUsageRights = StaffPhotographer("T. Hanks", "The Guardian"), exports = Nil, - syndicationRights = syndicationRights, usages = usages, leases = leases.getOrElse(LeasesByMedia.build(Nil)) @@ -63,8 +76,10 @@ class ImageTest extends FunSpec with Matchers { DateTime.now() ) - val rightsAcquired = SyndicationRights(None, Nil, List(Right("rights-code", Some(true), Nil))) - val noRightsAcquired = SyndicationRights(None, Nil, List(Right("rights-code", Some(false), Nil))) + val rightsAcquired = + SyndicationRights(None, Nil, List(Right("rights-code", Some(true), Nil))) + val noRightsAcquired = + SyndicationRights(None, Nil, List(Right("rights-code", Some(false), Nil))) describe("Image syndication status") { it("should be UnsuitableForSyndication by default") { @@ -77,7 +92,9 @@ class ImageTest extends FunSpec with Matchers { image.syndicationStatus shouldBe UnsuitableForSyndication } - it("should be AwaitingReviewForSyndication if syndication rights are acquired") { + it( + "should be AwaitingReviewForSyndication if syndication rights are acquired" + ) { val image = createImage( syndicationRights = Some(rightsAcquired) ) @@ -85,7 +102,9 @@ class ImageTest extends FunSpec with Matchers { image.syndicationStatus shouldBe AwaitingReviewForSyndication } - it("should be UnsuitableForSyndication if syndication rights are not acquired") { + it( + "should be UnsuitableForSyndication if syndication rights are not acquired" + ) { val image = createImage( syndicationRights = Some(noRightsAcquired) ) @@ -102,13 +121,15 @@ class ImageTest extends FunSpec with Matchers { ) val leaseByMedia = LeasesByMedia.build( - leases = List(MediaLease( - id = None, - leasedBy = None, - access = AllowSyndicationLease, - notes = None, - mediaId = imageId - )) + leases = List( + MediaLease( + id = None, + leasedBy = None, + access = AllowSyndicationLease, + notes = None, + mediaId = imageId + ) + ) ) val image = createImage( @@ -135,16 +156,22 @@ class ImageTest extends FunSpec with Matchers { } } - it("should be QueuedForSyndication if there is an allow syndication lease and no syndication usage") { + it( + "should be QueuedForSyndication if there is an allow syndication lease and no syndication usage" + ) { val imageId = UUID.randomUUID().toString - val leaseByMedia = LeasesByMedia.build(leases = List(MediaLease( - id = None, - leasedBy = None, - access = AllowSyndicationLease, - notes = None, - mediaId = imageId - ))) + val leaseByMedia = LeasesByMedia.build(leases = + List( + MediaLease( + id = None, + leasedBy = None, + access = AllowSyndicationLease, + notes = None, + mediaId = imageId + ) + ) + ) val usages = List( digitalUsage @@ -160,17 +187,21 @@ class ImageTest extends FunSpec with Matchers { image.syndicationStatus shouldBe QueuedForSyndication } - it("should be BlockedForSyndication if there is a deny syndication lease and no syndication usage") { + it( + "should be BlockedForSyndication if there is a deny syndication lease and no syndication usage" + ) { val imageId = UUID.randomUUID().toString val leaseByMedia = LeasesByMedia.build( - leases = List(MediaLease( - id = None, - leasedBy = None, - access = DenySyndicationLease, - notes = None, - mediaId = imageId - )) + leases = List( + MediaLease( + id = None, + leasedBy = None, + access = DenySyndicationLease, + notes = None, + mediaId = imageId + ) + ) ) val usages = List( diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/MimeTypeTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/MimeTypeTest.scala index 01bf4a0322..7658f4b5fc 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/MimeTypeTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/MimeTypeTest.scala @@ -5,51 +5,56 @@ import play.api.libs.json._ class MimeTypeTest extends FunSpec with Matchers { it("should construct a mime type from a known string") { - MimeType("image/jpeg") should be (Jpeg) - MimeType("image/png") should be (Png) - MimeType("image/tiff") should be (Tiff) + MimeType("image/jpeg") should be(Jpeg) + MimeType("image/png") should be(Png) + MimeType("image/tiff") should be(Tiff) } it("should construct a mime type from a legacy string") { - MimeType("jpg") should be (Jpeg) - MimeType("png") should be (Png) + MimeType("jpg") should be(Jpeg) + MimeType("png") should be(Png) } - it("should raise an UnsupportedMimeTypeException with an unsupported mime type") { - an [UnsupportedMimeTypeException] should be thrownBy MimeType("audio/mp3") + it( + "should raise an UnsupportedMimeTypeException with an unsupported mime type" + ) { + an[UnsupportedMimeTypeException] should be thrownBy MimeType("audio/mp3") } it("should be able to go to a string and back") { val mimeTypeString = Jpeg.toString - mimeTypeString should be ("image/jpeg") - MimeType(mimeTypeString) should be (Jpeg) + mimeTypeString should be("image/jpeg") + MimeType(mimeTypeString) should be(Jpeg) } it("should have a name") { - Jpeg.name should be ("image/jpeg") - Png.name should be ("image/png") - Tiff.name should be ("image/tiff") + Jpeg.name should be("image/jpeg") + Png.name should be("image/png") + Tiff.name should be("image/tiff") } it("should have a file extension") { - Jpeg.fileExtension should be (".jpg") - Png.fileExtension should be (".png") - Tiff.fileExtension should be (".tiff") + Jpeg.fileExtension should be(".jpg") + Png.fileExtension should be(".png") + Tiff.fileExtension should be(".tiff") } it("should serialise to json") { - Json.toJson(Jpeg) should be (JsString("image/jpeg")) - Json.toJson(Png) should be (JsString("image/png")) - Json.toJson(Tiff) should be (JsString("image/tiff")) + Json.toJson(Jpeg) should be(JsString("image/jpeg")) + Json.toJson(Png) should be(JsString("image/png")) + Json.toJson(Tiff) should be(JsString("image/tiff")) } it("should deserialise from json") { - JsString("image/jpeg").as[MimeType] should be (Jpeg) - JsString("image/png").as[MimeType] should be (Png) - JsString("image/tiff").as[MimeType] should be (Tiff) + JsString("image/jpeg").as[MimeType] should be(Jpeg) + JsString("image/png").as[MimeType] should be(Png) + JsString("image/tiff").as[MimeType] should be(Tiff) } - it("should raise an UnsupportedMimeTypeException when deserialising an unsupported mime type") { - an [UnsupportedMimeTypeException] should be thrownBy JsString("audio/mp3").as[MimeType] + it( + "should raise an UnsupportedMimeTypeException when deserialising an unsupported mime type" + ) { + an[UnsupportedMimeTypeException] should be thrownBy JsString("audio/mp3") + .as[MimeType] } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/PropertyTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/PropertyTest.scala index 0373983060..35dae53520 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/PropertyTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/PropertyTest.scala @@ -8,13 +8,13 @@ class PropertyTest extends FunSpec with Matchers { val property = Property("foo", None, None) val actual = Json.stringify(Json.toJson(property)) val expected = """{"propertyCode":"foo"}""" - actual should be (expected) + actual should be(expected) } it("should write optional fields that have a value") { val property = Property("foo", None, Some("bar")) val actual = Json.stringify(Json.toJson(property)) val expected = """{"propertyCode":"foo","value":"bar"}""" - actual should be (expected) + actual should be(expected) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/SyndicationRightsTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/SyndicationRightsTest.scala index 44bc26b8da..ea104c3daa 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/SyndicationRightsTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/SyndicationRightsTest.scala @@ -17,8 +17,9 @@ class SyndicationRightsTest extends FunSpec with Matchers { |""".stripMargin val parsedRightsJson = Json.parse(serialisedRights) println(parsedRightsJson) - val rights: SyndicationRights = Json.fromJson[SyndicationRights](parsedRightsJson).get - rights.isInferred should be (false) + val rights: SyndicationRights = + Json.fromJson[SyndicationRights](parsedRightsJson).get + rights.isInferred should be(false) } it("should deserialise with all fields except published") { @@ -32,8 +33,9 @@ class SyndicationRightsTest extends FunSpec with Matchers { |""".stripMargin val parsedRightsJson = Json.parse(serialisedRights) println(parsedRightsJson) - val rights: SyndicationRights = Json.fromJson[SyndicationRights](parsedRightsJson).get - rights.isInferred should be (false) + val rights: SyndicationRights = + Json.fromJson[SyndicationRights](parsedRightsJson).get + rights.isInferred should be(false) } it("should deserialise with all fields except isInferred") { @@ -47,8 +49,9 @@ class SyndicationRightsTest extends FunSpec with Matchers { |""".stripMargin val parsedRightsJson = Json.parse(serialisedRights) println(parsedRightsJson) - val rights: SyndicationRights = Json.fromJson[SyndicationRights](parsedRightsJson).get - rights.isInferred should be (false) + val rights: SyndicationRights = + Json.fromJson[SyndicationRights](parsedRightsJson).get + rights.isInferred should be(false) } } diff --git a/common-lib/src/test/scala/com/gu/mediaservice/model/UsageRightsTest.scala b/common-lib/src/test/scala/com/gu/mediaservice/model/UsageRightsTest.scala index 1c51f9f0fd..dd25f4bb7d 100644 --- a/common-lib/src/test/scala/com/gu/mediaservice/model/UsageRightsTest.scala +++ b/common-lib/src/test/scala/com/gu/mediaservice/model/UsageRightsTest.scala @@ -3,7 +3,6 @@ package com.gu.mediaservice.model import org.scalatest.{FunSpec, Matchers} import play.api.libs.json._ - case class TestImage(name: String, usageRights: UsageRights) object TestImage { implicit val jsonReads: Reads[TestImage] = Json.reads[TestImage] @@ -13,29 +12,30 @@ object TestImage { class UsageRightsTest extends FunSpec with Matchers { val invalidCategory = "animated-gif" - val invalidJson = Json.parse(s"""{ "category": "$invalidCategory", "fps": "∞" }""") + val invalidJson = + Json.parse(s"""{ "category": "$invalidCategory", "fps": "∞" }""") - it ("should serialise to JSON correctly") { + it("should serialise to JSON correctly") { val supplier = "Getty Images" val suppliersCollection = "AFP" val restrictions = Some("Don't use this") - val usageRights: UsageRights = Agency(supplier, Some(suppliersCollection), restrictions = restrictions) + val usageRights: UsageRights = + Agency(supplier, Some(suppliersCollection), restrictions = restrictions) val json = Json.toJson(usageRights) - (json \ "category").as[String] should be (Agency.category) - (json \ "supplier").as[String] should be (supplier) - (json \ "suppliersCollection").as[String] should be (suppliersCollection) - (json \ "restrictions").asOpt[String] should be (restrictions) + (json \ "category").as[String] should be(Agency.category) + (json \ "supplier").as[String] should be(supplier) + (json \ "suppliersCollection").as[String] should be(suppliersCollection) + (json \ "restrictions").asOpt[String] should be(restrictions) } - it ("should deserialise from JSON correctly") { + it("should deserialise from JSON correctly") { val supplier = "Getty Images" val suppliersCollection = "AFP" val category = "agency" - val json = Json.parse( - s""" + val json = Json.parse(s""" { "category": "agency", "supplier": "$supplier", @@ -45,61 +45,66 @@ class UsageRightsTest extends FunSpec with Matchers { val usageRights = json.as[UsageRights] - usageRights should be (Agency(supplier, Some(suppliersCollection))) + usageRights should be(Agency(supplier, Some(suppliersCollection))) } - // we have a slight edge case where NoRights is symbolised by `{}` - it ("should deserialise to NoRights from {}") { + it("should deserialise to NoRights from {}") { val json = Json.parse("{}") val usageRights = json.as[UsageRights] - usageRights should be (NoRights) + usageRights should be(NoRights) } - it ("should serialise to {} from NoRights") { + it("should serialise to {} from NoRights") { val jsonString = Json.toJson(NoRights).toString() - jsonString should be ("{}") + jsonString should be("{}") } - // invalid JSON - it ("should return None if it cannot deserialise the JSON") { + it("should return None if it cannot deserialise the JSON") { val usageRights = invalidJson.asOpt[UsageRights] - usageRights should be (None) + usageRights should be(None) } - it ("should through a `JsResultException` if you try to deserialise thr JSON with `as`") { + it( + "should through a `JsResultException` if you try to deserialise thr JSON with `as`" + ) { val jsError = intercept[JsResultException] { invalidJson.as[UsageRights] } jsError.errors.headOption.foreach { case (path, errors) => - errors.head.message should be (s"No such usage rights category: $invalidCategory") + errors.head.message should be( + s"No such usage rights category: $invalidCategory" + ) } } - it ("should deserialise as a property of a case class") { + it("should deserialise as a property of a case class") { val noRights = TestImage("test", NoRights) val agency = TestImage("test", Agency("Getty Images")) - (Json.toJson(noRights) \ "usageRights").get should be (NoRights.jsonVal) - (Json.toJson(agency) \ "usageRights" \ "supplier").as[String] should be ("Getty Images") + (Json.toJson(noRights) \ "usageRights").get should be(NoRights.jsonVal) + (Json.toJson(agency) \ "usageRights" \ "supplier").as[String] should be( + "Getty Images" + ) } - it ("should serialise as a property of a case class") { - val noRightsJson = Json.parse("""{ "name": "Test Image", "usageRights": {} }""") - val agencyJson = Json.parse("""{ "name": "Test Image", "usageRights": { "category": "agency", "supplier": "Getty Images" } }""") + it("should serialise as a property of a case class") { + val noRightsJson = + Json.parse("""{ "name": "Test Image", "usageRights": {} }""") + val agencyJson = Json.parse( + """{ "name": "Test Image", "usageRights": { "category": "agency", "supplier": "Getty Images" } }""" + ) val noRightsImage = noRightsJson.as[TestImage] - noRightsImage.usageRights should be (NoRights) + noRightsImage.usageRights should be(NoRights) val agencyImage = agencyJson.as[TestImage] - agencyImage.usageRights should be (Agency("Getty Images")) + agencyImage.usageRights should be(Agency("Getty Images")) } } - - diff --git a/cropper/app/CropperComponents.scala b/cropper/app/CropperComponents.scala index fffae5c6bc..b833b3407c 100644 --- a/cropper/app/CropperComponents.scala +++ b/cropper/app/CropperComponents.scala @@ -6,17 +6,30 @@ import lib.{CropStore, CropperConfig, Crops, Notifications} import play.api.ApplicationLoader.Context import router.Routes -class CropperComponents(context: Context) extends GridComponents(context, new CropperConfig(_)) { +class CropperComponents(context: Context) + extends GridComponents(context, new CropperConfig(_)) { final override val buildInfo = utils.buildinfo.BuildInfo val store = new CropStore(config) - val imageOperations = new ImageOperations(context.environment.rootPath.getAbsolutePath) + val imageOperations = new ImageOperations( + context.environment.rootPath.getAbsolutePath + ) val crops = new Crops(config, store, imageOperations) val notifications = new Notifications(config) - val controller = new CropperController(auth, crops, store, notifications, config, controllerComponents, wsClient) - val permissionsAwareManagement = new ManagementWithPermissions(controllerComponents, controller, buildInfo) + val controller = new CropperController( + auth, + crops, + store, + notifications, + config, + controllerComponents, + wsClient + ) + val permissionsAwareManagement = + new ManagementWithPermissions(controllerComponents, controller, buildInfo) - override lazy val router = new Routes(httpErrorHandler, controller, permissionsAwareManagement) + override lazy val router = + new Routes(httpErrorHandler, controller, permissionsAwareManagement) } diff --git a/cropper/app/controllers/CropperController.scala b/cropper/app/controllers/CropperController.scala index 6303d78664..0b7b950cb4 100644 --- a/cropper/app/controllers/CropperController.scala +++ b/cropper/app/controllers/CropperController.scala @@ -20,16 +20,23 @@ import java.net.URI import scala.concurrent.{ExecutionContext, Future} import scala.util.control.NonFatal - -case object InvalidSource extends Exception("Invalid source URI, not a media API URI") +case object InvalidSource + extends Exception("Invalid source URI, not a media API URI") case object ImageNotFound extends Exception("No such image found") case object ApiRequestFailed extends Exception("Failed to fetch the source") -class CropperController(auth: Authentication, crops: Crops, store: CropStore, notifications: Notifications, - override val config: CropperConfig, - override val controllerComponents: ControllerComponents, - ws: WSClient)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers with PermissionsHandler { +class CropperController( + auth: Authentication, + crops: Crops, + store: CropStore, + notifications: Notifications, + override val config: CropperConfig, + override val controllerComponents: ControllerComponents, + ws: WSClient +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers + with PermissionsHandler { // Stupid name clash between Argo and Play import com.gu.mediaservice.lib.argo.model.{Action => ArgoAction} @@ -49,47 +56,70 @@ class CropperController(auth: Authentication, crops: Crops, store: CropStore, no val user = httpRequest.user val onBehalfOfPrincipal = auth.getOnBehalfOfPrincipal(user) - executeRequest(exportRequest, user, onBehalfOfPrincipal).map { case (imageId, export) => - val cropJson = Json.toJson(export).as[JsObject] - val updateImageExports = "update-image-exports" - val updateMessage = UpdateMessage(subject = updateImageExports, id = Some(imageId), crops = Some(Seq(export))) - notifications.publish(updateMessage) + executeRequest(exportRequest, user, onBehalfOfPrincipal).map { + case (imageId, export) => + val cropJson = Json.toJson(export).as[JsObject] + val updateImageExports = "update-image-exports" + val updateMessage = UpdateMessage( + subject = updateImageExports, + id = Some(imageId), + crops = Some(Seq(export)) + ) + notifications.publish(updateMessage) - Ok(cropJson).as(ArgoMediaType) + Ok(cropJson).as(ArgoMediaType) } recover { - case InvalidSource => respondError(BadRequest, "invalid-source", InvalidSource.getMessage) - case ImageNotFound => respondError(BadRequest, "image-not-found", ImageNotFound.getMessage) - case InvalidImage => respondError(BadRequest, "invalid-image", InvalidImage.getMessage) - case MissingSecureSourceUrl => respondError(BadRequest, "no-source-image", MissingSecureSourceUrl.getMessage) - case InvalidCropRequest => respondError(BadRequest, "invalid-crop", InvalidCropRequest.getMessage) - case ApiRequestFailed => respondError(BadGateway, "api-failed", ApiRequestFailed.getMessage) + case InvalidSource => + respondError(BadRequest, "invalid-source", InvalidSource.getMessage) + case ImageNotFound => + respondError(BadRequest, "image-not-found", ImageNotFound.getMessage) + case InvalidImage => + respondError(BadRequest, "invalid-image", InvalidImage.getMessage) + case MissingSecureSourceUrl => + respondError( + BadRequest, + "no-source-image", + MissingSecureSourceUrl.getMessage + ) + case InvalidCropRequest => + respondError( + BadRequest, + "invalid-crop", + InvalidCropRequest.getMessage + ) + case ApiRequestFailed => + respondError(BadGateway, "api-failed", ApiRequestFailed.getMessage) } - } recoverTotal { - case e => - val validationErrors = for { - (_, errors) <- e.errors - errorDetails <- errors - } yield errorDetails.message - val errorMessage = validationErrors.headOption getOrElse "Invalid export request" - Future.successful(respondError(BadRequest, "bad-request", errorMessage)) + } recoverTotal { case e => + val validationErrors = for { + (_, errors) <- e.errors + errorDetails <- errors + } yield errorDetails.message + val errorMessage = + validationErrors.headOption getOrElse "Invalid export request" + Future.successful(respondError(BadRequest, "bad-request", errorMessage)) } } def getCrops(id: String) = auth.async { httpRequest => - store.listCrops(id) map (_.toList) map { crops => val deleteCropsAction = - ArgoAction("delete-crops", URI.create(s"${config.rootUri}/crops/$id"), "DELETE") + ArgoAction( + "delete-crops", + URI.create(s"${config.rootUri}/crops/$id"), + "DELETE" + ) val links = (for { crop <- crops.headOption link = Link("image", crop.specification.uri) } yield List(link)) getOrElse List() - val canDeleteCrops = hasPermission(httpRequest.user, Permissions.DeleteCrops) + val canDeleteCrops = + hasPermission(httpRequest.user, Permissions.DeleteCrops) - if(canDeleteCrops && crops.nonEmpty) { + if (canDeleteCrops && crops.nonEmpty) { respond(crops, links, List(deleteCropsAction)) } else { respond(crops, links) @@ -98,22 +128,34 @@ class CropperController(auth: Authentication, crops: Crops, store: CropStore, no } def deleteCrops(id: String) = auth.async { httpRequest => - val canDeleteCrops = hasPermission(httpRequest.user, Permissions.DeleteCrops) + val canDeleteCrops = + hasPermission(httpRequest.user, Permissions.DeleteCrops) - if(canDeleteCrops) { + if (canDeleteCrops) { store.deleteCrops(id).map { _ => - val updateMessage = UpdateMessage(subject = "delete-image-exports", id = Some(id)) + val updateMessage = + UpdateMessage(subject = "delete-image-exports", id = Some(id)) notifications.publish(updateMessage) Accepted - } recover { - case _ => respondError(BadRequest, "deletion-error", "Could not delete crops") + } recover { case _ => + respondError(BadRequest, "deletion-error", "Could not delete crops") } } else { - Future.successful(respondError(Unauthorized, "permission-denied", "You cannot delete crops")) + Future.successful( + respondError( + Unauthorized, + "permission-denied", + "You cannot delete crops" + ) + ) } } - def executeRequest(exportRequest: ExportRequest, user: Principal, onBehalfOfPrincipal: Authentication.OnBehalfOfPrincipal): Future[(String, Crop)] = { + def executeRequest( + exportRequest: ExportRequest, + user: Principal, + onBehalfOfPrincipal: Authentication.OnBehalfOfPrincipal + ): Future[(String, Crop)] = { implicit val context: RequestLoggingContext = RequestLoggingContext( initialMarkers = Map( "requestType" -> "executeRequest" @@ -127,7 +169,10 @@ class CropperController(auth: Authentication, crops: Crops, store: CropStore, no // Image should always have dimensions, but we want to safely extract the Option dimensions <- ifDefined(apiImage.source.dimensions, InvalidImage) cropSpec = ExportRequest.toCropSpec(exportRequest, dimensions) - _ <- verify(crops.isWithinImage(cropSpec.bounds, dimensions), InvalidCropRequest) + _ <- verify( + crops.isWithinImage(cropSpec.bounds, dimensions), + InvalidCropRequest + ) crop = Crop.createFromCropSource( by = Some(Authentication.getIdentity(user)), timeRequested = Some(new DateTime()), @@ -141,12 +186,20 @@ class CropperController(auth: Authentication, crops: Crops, store: CropStore, no // TODO: lame, parse into URI object and compare host instead def isMediaApiUri(uri: String): Boolean = uri.startsWith(config.apiUri) - def fetchSourceFromApi(uri: String, onBehalfOfPrincipal: Authentication.OnBehalfOfPrincipal): Future[SourceImage] = { + def fetchSourceFromApi( + uri: String, + onBehalfOfPrincipal: Authentication.OnBehalfOfPrincipal + ): Future[SourceImage] = { - case class HttpClientResponse(status: Int, statusText: String, json: JsValue) + case class HttpClientResponse( + status: Int, + statusText: String, + json: JsValue + ) // TODO we should proxy authentication from the original request rather than have a dedicated cropper API key - val baseRequest = ws.url(uri) + val baseRequest = ws + .url(uri) .withQueryStringParameters("include" -> "fileMetadata") .withHttpHeaders(Authentication.originalServiceHeaderName -> "cropper") @@ -156,23 +209,24 @@ class CropperController(auth: Authentication, crops: Crops, store: CropStore, no HttpClientResponse(r.status, r.statusText, Json.parse(r.body)) } - responseFuture recoverWith { - case NonFatal(e) => - Logger.warn(s"HTTP request to fetch source failed: $e") - Future.failed(ApiRequestFailed) + responseFuture recoverWith { case NonFatal(e) => + Logger.warn(s"HTTP request to fetch source failed: $e") + Future.failed(ApiRequestFailed) } for (resp <- responseFuture) - yield { - if (resp.status == 404) { - throw ImageNotFound - } else if (resp.status != 200) { - Logger.warn(s"HTTP status ${resp.status} ${resp.statusText} from $uri") - throw ApiRequestFailed - } else { - resp.json.as[SourceImage] + yield { + if (resp.status == 404) { + throw ImageNotFound + } else if (resp.status != 200) { + Logger.warn( + s"HTTP status ${resp.status} ${resp.statusText} from $uri" + ) + throw ApiRequestFailed + } else { + resp.json.as[SourceImage] + } } - } } def verify(cond: => Boolean, error: Throwable): Future[Unit] = diff --git a/cropper/app/lib/AspectRatio.scala b/cropper/app/lib/AspectRatio.scala index b093f1b587..dc85379f55 100644 --- a/cropper/app/lib/AspectRatio.scala +++ b/cropper/app/lib/AspectRatio.scala @@ -14,19 +14,21 @@ object AspectRatio { def clean(aspect: String): Option[Float] = knownRatios .find(_.friendly == aspect) - .map(ratio => (ratio.width.toFloat/ratio.height.toFloat)) + .map(ratio => (ratio.width.toFloat / ratio.height.toFloat)) @tailrec def gcd(a: Int, b: Int): Int = if (b == 0) a else gcd(b, a % b) - def calculate(width: Int, height: Int, tolerance: Int = 3) : Option[Ratio] = { + def calculate(width: Int, height: Int, tolerance: Int = 3): Option[Ratio] = { val matchingRatio = for { w <- width - tolerance until width + tolerance h <- height - tolerance until height + tolerance g = gcd(w, h) simplifiedWidth = w / g simplifiedHeight = h / g - ratio <- knownRatios.find(ratio => ratio.width == simplifiedWidth && ratio.height == simplifiedHeight) + ratio <- knownRatios.find(ratio => + ratio.width == simplifiedWidth && ratio.height == simplifiedHeight + ) } yield ratio matchingRatio.headOption } diff --git a/cropper/app/lib/CropStore.scala b/cropper/app/lib/CropStore.scala index 1e43eeace4..47ed86f92e 100644 --- a/cropper/app/lib/CropStore.scala +++ b/cropper/app/lib/CropStore.scala @@ -14,28 +14,45 @@ class CropStore(config: CropperConfig) extends S3ImageStorage(config) { import com.gu.mediaservice.lib.formatting._ def getSecureCropUri(uri: URI): Option[URL] = - config.imgPublishingSecureHost.map(new URI("https", _, uri.getPath, uri.getFragment).toURL) - - def storeCropSizing(file: File, filename: String, mimeType: MimeType, crop: Crop, dimensions: Dimensions)(implicit requestContext: RequestLoggingContext) : Future[Asset] = { + config.imgPublishingSecureHost.map( + new URI("https", _, uri.getPath, uri.getFragment).toURL + ) + + def storeCropSizing( + file: File, + filename: String, + mimeType: MimeType, + crop: Crop, + dimensions: Dimensions + )(implicit requestContext: RequestLoggingContext): Future[Asset] = { val CropSpec(sourceUri, Bounds(x, y, w, h), r, t) = crop.specification - val metadata = Map("source" -> sourceUri, - "bounds-x" -> x, - "bounds-y" -> y, - "bounds-width" -> w, - "bounds-height" -> h, - "type" -> t.name, - "author" -> crop.author, - "date" -> crop.date.map(printDateTime), - "width" -> dimensions.width, - "height" -> dimensions.height - ) ++ r.map("aspect-ratio" -> _) - - val filteredMetadata = metadata.collect { - case (key, Some(value)) => key -> value - case (key, value) => key -> value - }.mapValues(_.toString) - - storeImage(config.imgPublishingBucket, filename, file, Some(mimeType), filteredMetadata) map { s3Object => + val metadata = Map( + "source" -> sourceUri, + "bounds-x" -> x, + "bounds-y" -> y, + "bounds-width" -> w, + "bounds-height" -> h, + "type" -> t.name, + "author" -> crop.author, + "date" -> crop.date.map(printDateTime), + "width" -> dimensions.width, + "height" -> dimensions.height + ) ++ r.map("aspect-ratio" -> _) + + val filteredMetadata = metadata + .collect { + case (key, Some(value)) => key -> value + case (key, value) => key -> value + } + .mapValues(_.toString) + + storeImage( + config.imgPublishingBucket, + filename, + file, + Some(mimeType), + filteredMetadata + ) map { s3Object => Asset( translateImgHost(s3Object.uri), Some(s3Object.size), @@ -46,60 +63,96 @@ class CropStore(config: CropperConfig) extends S3ImageStorage(config) { } } - private def getOrElseOrNone(theMap: Map[String, String], preferredKey: String, fallbackKey: String): Option[String] = { + private def getOrElseOrNone( + theMap: Map[String, String], + preferredKey: String, + fallbackKey: String + ): Option[String] = { // Return the `preferredKey` value in `theMap` or the `fallbackKey` or `None` theMap.get(preferredKey).orElse(theMap.get(fallbackKey)) } def listCrops(id: String): Future[List[Crop]] = { list(config.imgPublishingBucket, id).map { crops => - crops.foldLeft(Map[String, Crop]()) { - case (map, (s3Object)) => { - val filename::containingFolder::_ = s3Object.uri.getPath.split("/").reverse.toList - var isMaster = containingFolder == "master" - val userMetadata = s3Object.metadata.userMetadata - val objectMetadata = s3Object.metadata.objectMetadata - - val updatedCrop = for { - // Note: if any is missing, the entry won't be registered - source <- userMetadata.get("source") - - // we've moved to kebab-case as localstack doesn't like `_` - // fallback to reading old values for older crops - // see https://github.com/localstack/localstack/issues/459 - x <- getOrElseOrNone(userMetadata, "bounds-x", "bounds_x").map(_.toInt) - y <- getOrElseOrNone(userMetadata, "bounds-y", "bounds_y").map(_.toInt) - w <- getOrElseOrNone(userMetadata, "bounds-width", "bounds_w").map(_.toInt) - h <- getOrElseOrNone(userMetadata, "bounds-height", "bounds_h").map(_.toInt) - width <- userMetadata.get("width").map(_.toInt) - height <- userMetadata.get("height").map(_.toInt) - - cid = s"$id-$x-$y-$w-$h" - ratio = getOrElseOrNone(userMetadata, "aspect-ratio", "aspect_ratio") - author = userMetadata.get("author") - date = userMetadata.get("date").flatMap(parseDateTime) - exportType = userMetadata.get("type").map(ExportType.valueOf).getOrElse(ExportType.default) - cropSource = CropSpec(source, Bounds(x, y, w, h), ratio, exportType) - dimensions = Dimensions(width, height) - - sizing = - Asset( - translateImgHost(s3Object.uri), - Some(s3Object.size), - objectMetadata.contentType, - Some(dimensions), - getSecureCropUri(s3Object.uri) + crops + .foldLeft(Map[String, Crop]()) { + case (map, (s3Object)) => { + val filename :: containingFolder :: _ = + s3Object.uri.getPath.split("/").reverse.toList + var isMaster = containingFolder == "master" + val userMetadata = s3Object.metadata.userMetadata + val objectMetadata = s3Object.metadata.objectMetadata + + val updatedCrop = for { + // Note: if any is missing, the entry won't be registered + source <- userMetadata.get("source") + + // we've moved to kebab-case as localstack doesn't like `_` + // fallback to reading old values for older crops + // see https://github.com/localstack/localstack/issues/459 + x <- getOrElseOrNone(userMetadata, "bounds-x", "bounds_x").map( + _.toInt ) - lastCrop = map.getOrElse(cid, Crop.createFromCropSource(author, date, cropSource)) - lastSizings = lastCrop.assets - - currentSizings = if (isMaster) lastSizings else lastSizings :+ sizing - masterSizing = if (isMaster) Some(sizing) else lastCrop.master - } yield cid -> Crop.createFromCropSource(author, date, cropSource, masterSizing, currentSizings) - - map ++ updatedCrop + y <- getOrElseOrNone(userMetadata, "bounds-y", "bounds_y").map( + _.toInt + ) + w <- getOrElseOrNone(userMetadata, "bounds-width", "bounds_w") + .map(_.toInt) + h <- getOrElseOrNone(userMetadata, "bounds-height", "bounds_h") + .map(_.toInt) + width <- userMetadata.get("width").map(_.toInt) + height <- userMetadata.get("height").map(_.toInt) + + cid = s"$id-$x-$y-$w-$h" + ratio = getOrElseOrNone( + userMetadata, + "aspect-ratio", + "aspect_ratio" + ) + author = userMetadata.get("author") + date = userMetadata.get("date").flatMap(parseDateTime) + exportType = userMetadata + .get("type") + .map(ExportType.valueOf) + .getOrElse(ExportType.default) + cropSource = CropSpec( + source, + Bounds(x, y, w, h), + ratio, + exportType + ) + dimensions = Dimensions(width, height) + + sizing = + Asset( + translateImgHost(s3Object.uri), + Some(s3Object.size), + objectMetadata.contentType, + Some(dimensions), + getSecureCropUri(s3Object.uri) + ) + lastCrop = map.getOrElse( + cid, + Crop.createFromCropSource(author, date, cropSource) + ) + lastSizings = lastCrop.assets + + currentSizings = + if (isMaster) lastSizings else lastSizings :+ sizing + masterSizing = if (isMaster) Some(sizing) else lastCrop.master + } yield cid -> Crop.createFromCropSource( + author, + date, + cropSource, + masterSizing, + currentSizings + ) + + map ++ updatedCrop + } } - }.collect { case (cid, s) => s }.toList + .collect { case (cid, s) => s } + .toList } } diff --git a/cropper/app/lib/CropperConfig.scala b/cropper/app/lib/CropperConfig.scala index f24992f520..b2cd8e136f 100644 --- a/cropper/app/lib/CropperConfig.scala +++ b/cropper/app/lib/CropperConfig.scala @@ -4,13 +4,14 @@ import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources} import java.io.File - -class CropperConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class CropperConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val imgPublishingBucket = string("publishing.image.bucket") val imgPublishingHost = string("publishing.image.host") // Note: work around CloudFormation not allowing optional parameters - val imgPublishingSecureHost = stringOpt("publishing.image.secure.host").filterNot(_.isEmpty) + val imgPublishingSecureHost = + stringOpt("publishing.image.secure.host").filterNot(_.isEmpty) val rootUri = services.cropperBaseUri val apiUri = services.apiBaseUri diff --git a/cropper/app/lib/Crops.scala b/cropper/app/lib/Crops.scala index c309dbcfdd..b6636dde3c 100644 --- a/cropper/app/lib/Crops.scala +++ b/cropper/app/lib/Crops.scala @@ -12,13 +12,25 @@ import scala.concurrent.Future import scala.util.Try case object InvalidImage extends Exception("Invalid image cannot be cropped") -case object MissingMimeType extends Exception("Missing mimeType from source API") -case object MissingSecureSourceUrl extends Exception("Missing secureUrl from source API") -case object InvalidCropRequest extends Exception("Crop request invalid for image dimensions") - -case class MasterCrop(sizing: Future[Asset], file: File, dimensions: Dimensions, aspectRatio: Float) - -class Crops(config: CropperConfig, store: CropStore, imageOperations: ImageOperations) { +case object MissingMimeType + extends Exception("Missing mimeType from source API") +case object MissingSecureSourceUrl + extends Exception("Missing secureUrl from source API") +case object InvalidCropRequest + extends Exception("Crop request invalid for image dimensions") + +case class MasterCrop( + sizing: Future[Asset], + file: File, + dimensions: Dimensions, + aspectRatio: Float +) + +class Crops( + config: CropperConfig, + store: CropStore, + imageOperations: ImageOperations +) { import Files._ import scala.concurrent.ExecutionContext.Implicits.global @@ -26,99 +38,190 @@ class Crops(config: CropperConfig, store: CropStore, imageOperations: ImageOpera private val cropQuality = 75d private val masterCropQuality = 95d - def outputFilename(source: SourceImage, bounds: Bounds, outputWidth: Int, fileType: MimeType, isMaster: Boolean = false): String = { + def outputFilename( + source: SourceImage, + bounds: Bounds, + outputWidth: Int, + fileType: MimeType, + isMaster: Boolean = false + ): String = { val masterString: String = if (isMaster) "master/" else "" s"${source.id}/${Crop.getCropId(bounds)}/${masterString}$outputWidth${fileType.fileExtension}" } - def createMasterCrop(apiImage: SourceImage, sourceFile: File, crop: Crop, mediaType: MimeType, colourModel: Option[String], - colourType: String)(implicit requestContext: RequestLoggingContext): Future[MasterCrop] = { + def createMasterCrop( + apiImage: SourceImage, + sourceFile: File, + crop: Crop, + mediaType: MimeType, + colourModel: Option[String], + colourType: String + )(implicit requestContext: RequestLoggingContext): Future[MasterCrop] = { - val source = crop.specification + val source = crop.specification val metadata = apiImage.metadata - val iccColourSpace = FileMetadataHelper.normalisedIccColourSpace(apiImage.fileMetadata) + val iccColourSpace = + FileMetadataHelper.normalisedIccColourSpace(apiImage.fileMetadata) for { - strip <- imageOperations.cropImage(sourceFile, apiImage.source.mimeType, source.bounds, masterCropQuality, config.tempDir, iccColourSpace, colourModel, mediaType) + strip <- imageOperations.cropImage( + sourceFile, + apiImage.source.mimeType, + source.bounds, + masterCropQuality, + config.tempDir, + iccColourSpace, + colourModel, + mediaType + ) file: File <- imageOperations.appendMetadata(strip, metadata) - dimensions = Dimensions(source.bounds.width, source.bounds.height) - filename = outputFilename(apiImage, source.bounds, dimensions.width, mediaType, isMaster = true) - sizing = store.storeCropSizing(file, filename, mediaType, crop, dimensions) + dimensions = Dimensions(source.bounds.width, source.bounds.height) + filename = outputFilename( + apiImage, + source.bounds, + dimensions.width, + mediaType, + isMaster = true + ) + sizing = store.storeCropSizing( + file, + filename, + mediaType, + crop, + dimensions + ) dirtyAspect = source.bounds.width.toFloat / source.bounds.height - aspect = crop.specification.aspectRatio.flatMap(AspectRatio.clean).getOrElse(dirtyAspect) - } - yield MasterCrop(sizing, file, dimensions, aspect) + aspect = crop.specification.aspectRatio + .flatMap(AspectRatio.clean) + .getOrElse(dirtyAspect) + } yield MasterCrop(sizing, file, dimensions, aspect) } - def createCrops(sourceFile: File, dimensionList: List[Dimensions], apiImage: SourceImage, crop: Crop, cropType: MimeType)(implicit requestContext: RequestLoggingContext): Future[List[Asset]] = { + def createCrops( + sourceFile: File, + dimensionList: List[Dimensions], + apiImage: SourceImage, + crop: Crop, + cropType: MimeType + )(implicit requestContext: RequestLoggingContext): Future[List[Asset]] = { Future.sequence[Asset, List](dimensionList.map { dimensions => for { - file <- imageOperations.resizeImage(sourceFile, apiImage.source.mimeType, dimensions, cropQuality, config.tempDir, cropType) + file <- imageOperations.resizeImage( + sourceFile, + apiImage.source.mimeType, + dimensions, + cropQuality, + config.tempDir, + cropType + ) optimisedFile = imageOperations.optimiseImage(file, cropType) - filename = outputFilename(apiImage, crop.specification.bounds, dimensions.width, cropType) - sizing <- store.storeCropSizing(optimisedFile, filename, cropType, crop, dimensions) - _ <- delete(file) - _ <- delete(optimisedFile) - } - yield sizing + filename = outputFilename( + apiImage, + crop.specification.bounds, + dimensions.width, + cropType + ) + sizing <- store.storeCropSizing( + optimisedFile, + filename, + cropType, + crop, + dimensions + ) + _ <- delete(file) + _ <- delete(optimisedFile) + } yield sizing }) } def deleteCrops(id: String): Future[Unit] = store.deleteCrops(id) - def dimensionsFromConfig(bounds: Bounds, aspectRatio: Float): List[Dimensions] = if (bounds.isPortrait) - config.portraitCropSizingHeights.filter(_ <= bounds.height).map(h => Dimensions(math.round(h * aspectRatio), h)) - else - config.landscapeCropSizingWidths.filter(_ <= bounds.width).map(w => Dimensions(w, math.round(w / aspectRatio))) + def dimensionsFromConfig( + bounds: Bounds, + aspectRatio: Float + ): List[Dimensions] = if (bounds.isPortrait) + config.portraitCropSizingHeights + .filter(_ <= bounds.height) + .map(h => Dimensions(math.round(h * aspectRatio), h)) + else + config.landscapeCropSizingWidths + .filter(_ <= bounds.width) + .map(w => Dimensions(w, math.round(w / aspectRatio))) def isWithinImage(bounds: Bounds, dimensions: Dimensions): Boolean = { - val positiveCoords = List(bounds.x, bounds.y ).forall(_ >= 0) - val strictlyPositiveSize = List(bounds.width, bounds.height).forall(_ > 0) - val withinBounds = (bounds.x + bounds.width <= dimensions.width ) && - (bounds.y + bounds.height <= dimensions.height) + val positiveCoords = List(bounds.x, bounds.y).forall(_ >= 0) + val strictlyPositiveSize = List(bounds.width, bounds.height).forall(_ > 0) + val withinBounds = (bounds.x + bounds.width <= dimensions.width) && + (bounds.y + bounds.height <= dimensions.height) positiveCoords && strictlyPositiveSize && withinBounds } - def export(apiImage: SourceImage, crop: Crop)(implicit requestContext: RequestLoggingContext): Future[ExportResult] = { - val source = crop.specification + def export(apiImage: SourceImage, crop: Crop)(implicit + requestContext: RequestLoggingContext + ): Future[ExportResult] = { + val source = crop.specification val mimeType = apiImage.source.mimeType.getOrElse(throw MissingMimeType) - val secureUrl = apiImage.source.secureUrl.getOrElse(throw MissingSecureSourceUrl) - val colourType = apiImage.fileMetadata.colourModelInformation.getOrElse("colorType", "") - val hasAlpha = apiImage.fileMetadata.colourModelInformation.get("hasAlpha").flatMap(a => Try(a.toBoolean).toOption).getOrElse(true) + val secureUrl = + apiImage.source.secureUrl.getOrElse(throw MissingSecureSourceUrl) + val colourType = + apiImage.fileMetadata.colourModelInformation.getOrElse("colorType", "") + val hasAlpha = apiImage.fileMetadata.colourModelInformation + .get("hasAlpha") + .flatMap(a => Try(a.toBoolean).toOption) + .getOrElse(true) val cropType = Crops.cropType(mimeType, colourType, hasAlpha) for { - sourceFile <- tempFileFromURL(secureUrl, "cropSource", "", config.tempDir) + sourceFile <- tempFileFromURL(secureUrl, "cropSource", "", config.tempDir) colourModel <- ImageOperations.identifyColourModel(sourceFile, mimeType) - masterCrop <- createMasterCrop(apiImage, sourceFile, crop, cropType, colourModel, colourType) - - outputDims = dimensionsFromConfig(source.bounds, masterCrop.aspectRatio) :+ masterCrop.dimensions - - sizes <- createCrops(masterCrop.file, outputDims, apiImage, crop, cropType) + masterCrop <- createMasterCrop( + apiImage, + sourceFile, + crop, + cropType, + colourModel, + colourType + ) + + outputDims = dimensionsFromConfig( + source.bounds, + masterCrop.aspectRatio + ) :+ masterCrop.dimensions + + sizes <- createCrops( + masterCrop.file, + outputDims, + apiImage, + crop, + cropType + ) masterSize <- masterCrop.sizing - _ <- Future.sequence(List(masterCrop.file,sourceFile).map(delete)) - } - yield ExportResult(apiImage.id, masterSize, sizes) + _ <- Future.sequence(List(masterCrop.file, sourceFile).map(delete)) + } yield ExportResult(apiImage.id, masterSize, sizes) } } object Crops { - /** - * The aim here is to decide whether the crops should be JPEG or PNGs depending on a predicted quality/size trade-off. + + /** The aim here is to decide whether the crops should be JPEG or PNGs depending on a predicted quality/size trade-off. * - If the image has transparency then it should always be a PNG as the transparency is not available in JPEG * - If the image is not true colour then we assume it is a graphic that should be retained as a PNG */ - def cropType(mediaType: MimeType, colourType: String, hasAlpha: Boolean): MimeType = { + def cropType( + mediaType: MimeType, + colourType: String, + hasAlpha: Boolean + ): MimeType = { val isGraphic = !colourType.matches("True[ ]?Color.*") val outputAsPng = hasAlpha || isGraphic mediaType match { - case Png if outputAsPng => Png + case Png if outputAsPng => Png case Tiff if outputAsPng => Png - case _ => Jpeg + case _ => Jpeg } } } diff --git a/cropper/app/lib/Notifications.scala b/cropper/app/lib/Notifications.scala index 0ce5d1486c..21d765e89e 100644 --- a/cropper/app/lib/Notifications.scala +++ b/cropper/app/lib/Notifications.scala @@ -2,4 +2,5 @@ package lib import com.gu.mediaservice.lib.aws.ThrallMessageSender -class Notifications(config: CropperConfig) extends ThrallMessageSender(config.thrallKinesisStreamConfig) +class Notifications(config: CropperConfig) + extends ThrallMessageSender(config.thrallKinesisStreamConfig) diff --git a/cropper/app/model/ExportRequest.scala b/cropper/app/model/ExportRequest.scala index f99bc6c019..223a12c168 100644 --- a/cropper/app/model/ExportRequest.scala +++ b/cropper/app/model/ExportRequest.scala @@ -7,15 +7,14 @@ import play.api.libs.json.Reads._ import com.gu.mediaservice.model._ - sealed trait ExportRequest { val uri: String } case class FullExportRequest(uri: String) extends ExportRequest -case class CropRequest(uri: String, bounds: Bounds, aspectRatio: Option[String]) extends ExportRequest - +case class CropRequest(uri: String, bounds: Bounds, aspectRatio: Option[String]) + extends ExportRequest object ExportRequest { @@ -23,35 +22,42 @@ object ExportRequest { private val readCropRequest: Reads[CropRequest] = ( (__ \ "source").read[String] ~ - __.read[Bounds] ~ - (__ \ "aspectRatio").readNullable[String](pattern(aspectRatioLike)) + __.read[Bounds] ~ + (__ \ "aspectRatio").readNullable[String](pattern(aspectRatioLike)) )(CropRequest.apply _) private val readFullExportRequest: Reads[FullExportRequest] = (__ \ "source").read[String].map(FullExportRequest.apply) - implicit val readExportRequest: Reads[ExportRequest] = Reads[ExportRequest](jsValue => - (jsValue \ "type").validate[String] match { - case JsSuccess("crop", _) => readCropRequest.reads(jsValue) - case JsSuccess("full", _) => readFullExportRequest.reads(jsValue) - case _ => JsError("invalid type") + implicit val readExportRequest: Reads[ExportRequest] = + Reads[ExportRequest](jsValue => + (jsValue \ "type").validate[String] match { + case JsSuccess("crop", _) => readCropRequest.reads(jsValue) + case JsSuccess("full", _) => readFullExportRequest.reads(jsValue) + case _ => JsError("invalid type") + } + ) + + def boundsFill(dimensions: Dimensions): Bounds = + Bounds(0, 0, dimensions.width, dimensions.height) + + def toCropSpec(cropRequest: ExportRequest, dimensions: Dimensions): CropSpec = + cropRequest match { + case FullExportRequest(uri) => + CropSpec( + uri, + boundsFill(dimensions), + AspectRatio + .calculate(dimensions.width, dimensions.height) + .map(_.friendly), + FullExport + ) + // Map "crop" that covers the whole image to a "full" export + case CropRequest(uri, bounds, ratio) + if bounds == boundsFill(dimensions) => + CropSpec(uri, boundsFill(dimensions), ratio, FullExport) + case CropRequest(uri, bounds, ratio) => + CropSpec(uri, bounds, ratio, CropExport) } - ) - - def boundsFill(dimensions: Dimensions): Bounds = Bounds(0, 0, dimensions.width, dimensions.height) - - def toCropSpec(cropRequest: ExportRequest, dimensions: Dimensions): CropSpec = cropRequest match { - case FullExportRequest(uri) => - CropSpec( - uri, - boundsFill(dimensions), - AspectRatio.calculate(dimensions.width, dimensions.height).map(_.friendly), - FullExport - ) - // Map "crop" that covers the whole image to a "full" export - case CropRequest(uri, bounds, ratio) if bounds == boundsFill(dimensions) - => CropSpec(uri, boundsFill(dimensions), ratio, FullExport) - case CropRequest(uri, bounds, ratio) => CropSpec(uri, bounds, ratio, CropExport) - } } diff --git a/cropper/test/lib/AspectRatioTest.scala b/cropper/test/lib/AspectRatioTest.scala index ec8d51a6d7..b4e39471e4 100644 --- a/cropper/test/lib/AspectRatioTest.scala +++ b/cropper/test/lib/AspectRatioTest.scala @@ -1,6 +1,6 @@ package lib -import org.scalatest.{ FunSpec, Matchers } +import org.scalatest.{FunSpec, Matchers} class AspectRatioTest extends FunSpec with Matchers { @@ -34,20 +34,25 @@ class AspectRatioTest extends FunSpec with Matchers { (9001, 1337) ) - val allExamples = fiveThreeExamples ++ twoThreeExamples ++ sixteenNineExamples ++ squareExamples + val allExamples = + fiveThreeExamples ++ twoThreeExamples ++ sixteenNineExamples ++ squareExamples describe("calculate") { - allExamples.foreach( r => - it(s"should correctly identify ${r.width} / ${r.height} as ${r.friendly}"){ - AspectRatio.calculate(r.width, r.height, 6).map(_.friendly) shouldEqual Some(r.friendly) - } + allExamples.foreach(r => + it( + s"should correctly identify ${r.width} / ${r.height} as ${r.friendly}" + ) { + AspectRatio + .calculate(r.width, r.height, 6) + .map(_.friendly) shouldEqual Some(r.friendly) + } ) - it("should return None for unknown ratios"){ - unknownRatios.foreach( r => + it("should return None for unknown ratios") { + unknownRatios.foreach(r => AspectRatio.calculate(r._1, r._2, 6) shouldEqual None ) } - } + } } diff --git a/cropper/test/lib/CropsTest.scala b/cropper/test/lib/CropsTest.scala index 3b8abcfb19..41f5fc2170 100644 --- a/cropper/test/lib/CropsTest.scala +++ b/cropper/test/lib/CropsTest.scala @@ -15,11 +15,15 @@ class CropsTest extends FunSpec with Matchers with MockitoSugar { Crops.cropType(Png, "Monkey", hasAlpha = true) shouldBe Png } - it("should return PNG when the input type is PNG and it has alpha even if it is True Color") { + it( + "should return PNG when the input type is PNG and it has alpha even if it is True Color" + ) { Crops.cropType(Png, "True Color", hasAlpha = true) shouldBe Png } - it("should return PNG when the input type is PNG and it is NOT true color (a graphic)") { + it( + "should return PNG when the input type is PNG and it is NOT true color (a graphic)" + ) { Crops.cropType(Png, "Monkey", hasAlpha = false) shouldBe Png } @@ -31,18 +35,28 @@ class CropsTest extends FunSpec with Matchers with MockitoSugar { Crops.cropType(Tiff, "Monkey", hasAlpha = true) shouldBe Png } - it("should return PNG when the input type is TIFF and it doesn't have alpha or is true color") { + it( + "should return PNG when the input type is TIFF and it doesn't have alpha or is true color" + ) { Crops.cropType(Tiff, "Monkey", hasAlpha = false) shouldBe Png } - it("should return JPEG when the input type is TIFF and it doesn't have alpha and it is true color") { + it( + "should return JPEG when the input type is TIFF and it doesn't have alpha and it is true color" + ) { Crops.cropType(Tiff, "TrueColor", hasAlpha = false) shouldBe Jpeg } private val config = mock[CropperConfig] private val store = mock[CropStore] private val imageOperations: ImageOperations = mock[ImageOperations] - private val source: SourceImage = SourceImage("test", mock[Asset], valid = true, mock[ImageMetadata], mock[FileMetadata]) + private val source: SourceImage = SourceImage( + "test", + mock[Asset], + valid = true, + mock[ImageMetadata], + mock[FileMetadata] + ) private val bounds: Bounds = Bounds(10, 20, 30, 40) private val outputWidth = 1234 diff --git a/image-loader/app/AppLoader.scala b/image-loader/app/AppLoader.scala index 88633900ba..dcea1a7156 100644 --- a/image-loader/app/AppLoader.scala +++ b/image-loader/app/AppLoader.scala @@ -1,3 +1,4 @@ import com.gu.mediaservice.lib.play.GridAppLoader -class AppLoader extends GridAppLoader("image-loader", new ImageLoaderComponents(_)) +class AppLoader + extends GridAppLoader("image-loader", new ImageLoaderComponents(_)) diff --git a/image-loader/app/ImageLoaderComponents.scala b/image-loader/app/ImageLoaderComponents.scala index b6c47b8212..433571bd31 100644 --- a/image-loader/app/ImageLoaderComponents.scala +++ b/image-loader/app/ImageLoaderComponents.scala @@ -8,30 +8,52 @@ import model.{Projector, Uploader, QuarantineUploader} import play.api.ApplicationLoader.Context import router.Routes -class ImageLoaderComponents(context: Context) extends GridComponents(context, new ImageLoaderConfig(_)) with GridLogging { +class ImageLoaderComponents(context: Context) + extends GridComponents(context, new ImageLoaderConfig(_)) + with GridLogging { final override val buildInfo = utils.buildinfo.BuildInfo - logger.info(s"Loaded ${config.imageProcessor.processors.size} image processors:") - config.imageProcessor.processors.zipWithIndex.foreach { case (processor, index) => - logger.info(s" $index -> ${processor.description}") + logger.info( + s"Loaded ${config.imageProcessor.processors.size} image processors:" + ) + config.imageProcessor.processors.zipWithIndex.foreach { + case (processor, index) => + logger.info(s" $index -> ${processor.description}") } val store = new ImageLoaderStore(config) - val imageOperations = new ImageOperations(context.environment.rootPath.getAbsolutePath) + val imageOperations = new ImageOperations( + context.environment.rootPath.getAbsolutePath + ) val notifications = new Notifications(config) val downloader = new Downloader() val uploader = new Uploader(store, config, imageOperations, notifications) val projector = Projector(config, imageOperations) - val quarantineUploader: Option[QuarantineUploader] = (config.uploadToQuarantineEnabled, config.quarantineBucket) match { - case (true, Some(bucketName)) =>{ - val quarantineStore = new QuarantineStore(config) - Some(new QuarantineUploader(quarantineStore, config)) + val quarantineUploader: Option[QuarantineUploader] = + (config.uploadToQuarantineEnabled, config.quarantineBucket) match { + case (true, Some(bucketName)) => { + val quarantineStore = new QuarantineStore(config) + Some(new QuarantineUploader(quarantineStore, config)) + } + case (true, None) => + throw new IllegalArgumentException( + s"Quarantining is enabled. upload.quarantine.enabled = ${config.uploadToQuarantineEnabled} but no bucket is configured. s3.quarantine.bucket isn't configured." + ) + case (false, _) => None } - case (true, None) => throw new IllegalArgumentException(s"Quarantining is enabled. upload.quarantine.enabled = ${config.uploadToQuarantineEnabled} but no bucket is configured. s3.quarantine.bucket isn't configured.") - case (false, _) => None - } val controller = new ImageLoaderController( - auth, downloader, store, notifications, config, uploader, quarantineUploader, projector, controllerComponents, wsClient) + auth, + downloader, + store, + notifications, + config, + uploader, + quarantineUploader, + projector, + controllerComponents, + wsClient + ) - override lazy val router = new Routes(httpErrorHandler, controller, management) + override lazy val router = + new Routes(httpErrorHandler, controller, management) } diff --git a/image-loader/app/controllers/ImageLoaderController.scala b/image-loader/app/controllers/ImageLoaderController.scala index b1eb106710..1dddbd293e 100644 --- a/image-loader/app/controllers/ImageLoaderController.scala +++ b/image-loader/app/controllers/ImageLoaderController.scala @@ -7,7 +7,12 @@ import com.drew.imaging.ImageProcessingException import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.argo.model.Link import com.gu.mediaservice.lib.auth._ -import com.gu.mediaservice.lib.logging.{FALLBACK, GridLogging, LogMarker, RequestLoggingContext} +import com.gu.mediaservice.lib.logging.{ + FALLBACK, + GridLogging, + LogMarker, + RequestLoggingContext +} import com.gu.mediaservice.lib.{DateTimeUtils, ImageIngestOperations} import com.gu.mediaservice.model.UnsupportedMimeTypeException import lib._ @@ -22,35 +27,52 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.Try import scala.util.control.NonFatal -class ImageLoaderController(auth: Authentication, - downloader: Downloader, - store: ImageLoaderStore, - notifications: Notifications, - config: ImageLoaderConfig, - uploader: Uploader, - quarantineUploader: Option[QuarantineUploader], - projector: Projector, - override val controllerComponents: ControllerComponents, - wSClient: WSClient) - (implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers { +class ImageLoaderController( + auth: Authentication, + downloader: Downloader, + store: ImageLoaderStore, + notifications: Notifications, + config: ImageLoaderConfig, + uploader: Uploader, + quarantineUploader: Option[QuarantineUploader], + projector: Projector, + override val controllerComponents: ControllerComponents, + wSClient: WSClient +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { private lazy val indexResponse: Result = { val indexData = Map("description" -> "This is the Loader Service") val indexLinks = List( - Link("load", s"${config.rootUri}/images{?uploadedBy,identifiers,uploadTime,filename}"), - Link("import", s"${config.rootUri}/imports{?uri,uploadedBy,identifiers,uploadTime,filename}") + Link( + "load", + s"${config.rootUri}/images{?uploadedBy,identifiers,uploadTime,filename}" + ), + Link( + "import", + s"${config.rootUri}/imports{?uri,uploadedBy,identifiers,uploadTime,filename}" + ) ) respond(indexData, indexLinks) } def index: Action[AnyContent] = auth { indexResponse } - def quarantineOrStoreImage(uploadRequest: UploadRequest)(implicit logMarker: LogMarker) = { - quarantineUploader.map(_.quarantineFile(uploadRequest)).getOrElse(uploader.storeFile(uploadRequest)) + def quarantineOrStoreImage( + uploadRequest: UploadRequest + )(implicit logMarker: LogMarker) = { + quarantineUploader + .map(_.quarantineFile(uploadRequest)) + .getOrElse(uploader.storeFile(uploadRequest)) } - - def loadImage(uploadedBy: Option[String], identifiers: Option[String], uploadTime: Option[String], filename: Option[String]): Action[DigestedFile] = { + + def loadImage( + uploadedBy: Option[String], + identifiers: Option[String], + uploadTime: Option[String], + filename: Option[String] + ): Action[DigestedFile] = { implicit val context: RequestLoggingContext = RequestLoggingContext( initialMarkers = Map( @@ -77,26 +99,32 @@ class ImageLoaderController(auth: Authentication, identifiers, DateTimeUtils.fromValueOrNow(uploadTime), filename.flatMap(_.trim.nonEmptyOpt), - context.requestId) + context.requestId + ) result <- quarantineOrStoreImage(uploadRequest) } yield result - result.onComplete( _ => Try { deleteTempFile(tempFile) } ) + result.onComplete(_ => Try { deleteTempFile(tempFile) }) result map { r => val result = Accepted(r).as(ArgoMediaType) logger.info("loadImage request end") result - } recover { - case e => - logger.error("loadImage request ended with a failure", e) - (e match { - case e: UnsupportedMimeTypeException => FailureResponse.unsupportedMimeType(e, config.supportedMimeTypes) - case e: ImageProcessingException => FailureResponse.notAnImage(e, config.supportedMimeTypes).as(ArgoMediaType) - case e: java.io.IOException => FailureResponse.badImage(e).as(ArgoMediaType) - case _ => - logger.error("Failed upload", e) - InternalServerError(Json.obj("error" -> e.getMessage)).as(ArgoMediaType) - }).as(ArgoMediaType) + } recover { case e => + logger.error("loadImage request ended with a failure", e) + (e match { + case e: UnsupportedMimeTypeException => + FailureResponse.unsupportedMimeType(e, config.supportedMimeTypes) + case e: ImageProcessingException => + FailureResponse + .notAnImage(e, config.supportedMimeTypes) + .as(ArgoMediaType) + case e: java.io.IOException => + FailureResponse.badImage(e).as(ArgoMediaType) + case _ => + logger.error("Failed upload", e) + InternalServerError(Json.obj("error" -> e.getMessage)) + .as(ArgoMediaType) + }).as(ArgoMediaType) } } } @@ -111,20 +139,32 @@ class ImageLoaderController(auth: Authentication, ) val tempFile = createTempFile(s"projection-$imageId") auth.async { _ => - val result= projector.projectS3ImageById(projector, imageId, tempFile, context.requestId) + val result = projector.projectS3ImageById( + projector, + imageId, + tempFile, + context.requestId + ) - result.onComplete( _ => Try { deleteTempFile(tempFile) } ) + result.onComplete(_ => Try { deleteTempFile(tempFile) }) result.map { case Some(img) => logger.info("image found") Ok(Json.toJson(img)).as(ArgoMediaType) case None => - val s3Path = "s3://" + config.imageBucket + "/" + ImageIngestOperations.fileKeyFromId(imageId) + val s3Path = + "s3://" + config.imageBucket + "/" + ImageIngestOperations + .fileKeyFromId(imageId) logger.info("image not found") - respondError(NotFound, "image-not-found", s"Could not find image: $imageId in s3 at $s3Path") + respondError( + NotFound, + "image-not-found", + s"Could not find image: $imageId in s3 at $s3Path" + ) } recover { - case _: NoSuchImageExistsInS3 => NotFound(Json.obj("imageId" -> imageId)) + case _: NoSuchImageExistsInS3 => + NotFound(Json.obj("imageId" -> imageId)) case _ => InternalServerError(Json.obj("imageId" -> imageId)) } @@ -132,12 +172,12 @@ class ImageLoaderController(auth: Authentication, } def importImage( - uri: String, - uploadedBy: Option[String], - identifiers: Option[String], - uploadTime: Option[String], - filename: Option[String] - ): Action[AnyContent] = { + uri: String, + uploadedBy: Option[String], + identifiers: Option[String], + uploadTime: Option[String], + filename: Option[String] + ): Action[AnyContent] = { auth.async { request => implicit val context: RequestLoggingContext = RequestLoggingContext( initialMarkers = Map( @@ -160,15 +200,16 @@ class ImageLoaderController(auth: Authentication, identifiers, DateTimeUtils.fromValueOrNow(uploadTime), filename.flatMap(_.trim.nonEmptyOpt), - context.requestId) + context.requestId + ) result <- uploader.storeFile(uploadRequest) } yield result - result.onComplete( _ => Try { deleteTempFile(tempFile) } ) + result.onComplete(_ => Try { deleteTempFile(tempFile) }) result - .map { - r => { + .map { r => + { logger.info("importImage request end") // NB This return code (202) is explicitly required by s3-watcher // Anything else (eg 200) will be logged as an error. DAMHIKIJKOK. @@ -176,11 +217,12 @@ class ImageLoaderController(auth: Authentication, } } .recover { - case e: UnsupportedMimeTypeException => FailureResponse.unsupportedMimeType(e, config.supportedMimeTypes) + case e: UnsupportedMimeTypeException => + FailureResponse.unsupportedMimeType(e, config.supportedMimeTypes) case _: IllegalArgumentException => FailureResponse.invalidUri case e: UserImageLoaderException => FailureResponse.badUserInput(e) - case NonFatal(_) => FailureResponse.failedUriDownload - } + case NonFatal(_) => FailureResponse.failedUriDownload + } } } @@ -197,7 +239,9 @@ class ImageLoaderController(auth: Authentication, tempFile } - def deleteTempFile(tempFile: File)(implicit logMarker: LogMarker): Future[Unit] = Future { + def deleteTempFile( + tempFile: File + )(implicit logMarker: LogMarker): Future[Unit] = Future { if (tempFile.delete()) { logger.info(s"Deleted temp file $tempFile") } else { @@ -205,4 +249,4 @@ class ImageLoaderController(auth: Authentication, } } -} \ No newline at end of file +} diff --git a/image-loader/app/lib/BodyParsers.scala b/image-loader/app/lib/BodyParsers.scala index bd58c279e7..c7c167f9b1 100644 --- a/image-loader/app/lib/BodyParsers.scala +++ b/image-loader/app/lib/BodyParsers.scala @@ -34,15 +34,19 @@ object DigestBodyParser extends ArgoHelpers { s"Incorrect content-length. The specified content-length does match that of the received file." ) - def slurp(to: File)(implicit ec: ExecutionContext): Accumulator[ByteString, (MessageDigest, FileOutputStream)] = { - Accumulator(Sink.fold[(MessageDigest, FileOutputStream), ByteString]( - (MessageDigest.getInstance("SHA-1"), new FileOutputStream(to))) { - case ((md, os), data) => + def slurp(to: File)(implicit + ec: ExecutionContext + ): Accumulator[ByteString, (MessageDigest, FileOutputStream)] = { + Accumulator( + Sink.fold[(MessageDigest, FileOutputStream), ByteString]( + (MessageDigest.getInstance("SHA-1"), new FileOutputStream(to)) + ) { case ((md, os), data) => md.update(data.toArray) os.write(data.toArray) (md, os) - }) + } + ) } def failValidation(foo: Result, message: String) = { @@ -50,22 +54,36 @@ object DigestBodyParser extends ArgoHelpers { Left(foo) } - def validate(request: RequestHeader, to: File, md: MessageDigest): Either[Result, DigestedFile] = { + def validate( + request: RequestHeader, + to: File, + md: MessageDigest + ): Either[Result, DigestedFile] = { request.headers.get("Content-Length") match { case Some(contentLength) => if (to.length == contentLength.toInt) Right(DigestedFile(to, md.digest)) - else failValidation(incorrectContentLengthError, "Received file does not match specified 'Content-Length'") + else + failValidation( + incorrectContentLengthError, + "Received file does not match specified 'Content-Length'" + ) case None => - failValidation(missingContentLengthError, "Missing content-length. Please specify a correct 'Content-Length' header") + failValidation( + missingContentLengthError, + "Missing content-length. Please specify a correct 'Content-Length' header" + ) } } - def create(to: File)(implicit ex: ExecutionContext): BodyParser[DigestedFile] = - BodyParser("digested file, to=" + to) { request => { - slurp(to).map { case (md, os) => - os.close() - validate(request, to, md) + def create( + to: File + )(implicit ex: ExecutionContext): BodyParser[DigestedFile] = + BodyParser("digested file, to=" + to) { request => + { + slurp(to).map { case (md, os) => + os.close() + validate(request, to, md) + } } } - } } diff --git a/image-loader/app/lib/Downloader.scala b/image-loader/app/lib/Downloader.scala index e2ba899684..b7b5f96419 100644 --- a/image-loader/app/lib/Downloader.scala +++ b/image-loader/app/lib/Downloader.scala @@ -25,7 +25,7 @@ class Downloader(implicit ec: ExecutionContext) extends GridLogging { val request = new Request.Builder().url(uri.toString).build() val response = client.newCall(request).execute() - val maybeExpectedSize = Try{response.header("Content-Length").toInt} + val maybeExpectedSize = Try { response.header("Content-Length").toInt } maybeExpectedSize match { case Failure(exception) => { diff --git a/image-loader/app/lib/FailureResponse.scala b/image-loader/app/lib/FailureResponse.scala index ed5adc0ed1..6f3dd7fd05 100644 --- a/image-loader/app/lib/FailureResponse.scala +++ b/image-loader/app/lib/FailureResponse.scala @@ -19,19 +19,35 @@ object FailureResponse extends ArgoHelpers { val failedUriDownload: Result = { logger.warn("importImage request failed") - respondError(BadRequest, "failed-uri-download", s"The provided 'uri' could not be downloaded") + respondError( + BadRequest, + "failed-uri-download", + s"The provided 'uri' could not be downloaded" + ) } - def unsupportedMimeType(unsupported: UnsupportedMimeTypeException, supportedMimeTypes: List[MimeType]): Result = { - logger.info(s"Rejected request to load file: mime-type is not supported", unsupported) + def unsupportedMimeType( + unsupported: UnsupportedMimeTypeException, + supportedMimeTypes: List[MimeType] + ): Result = { + logger.info( + s"Rejected request to load file: mime-type is not supported", + unsupported + ) respondError( UnsupportedMediaType, "unsupported-type", s"Unsupported mime-type: ${unsupported.mimeType}. Supported: ${supportedMimeTypes.mkString(", ")}" ) } - def notAnImage(exception: Exception, supportedMimeTypes: List[MimeType]): Result = { - logger.info(s"Rejected request to load file: file type is not supported", exception) + def notAnImage( + exception: Exception, + supportedMimeTypes: List[MimeType] + ): Result = { + logger.info( + s"Rejected request to load file: file type is not supported", + exception + ) respondError( UnsupportedMediaType, @@ -41,7 +57,10 @@ object FailureResponse extends ArgoHelpers { } def badImage(exception: Exception): Result = { - logger.info(s"Rejected request to load file: image file is not good", exception) + logger.info( + s"Rejected request to load file: image file is not good", + exception + ) respondError( UnsupportedMediaType, diff --git a/image-loader/app/lib/ImageLoaderConfig.scala b/image-loader/app/lib/ImageLoaderConfig.scala index 9a6657054d..f1f20970e4 100644 --- a/image-loader/app/lib/ImageLoaderConfig.scala +++ b/image-loader/app/lib/ImageLoaderConfig.scala @@ -1,12 +1,22 @@ package lib import java.io.File -import com.gu.mediaservice.lib.cleanup.{ComposedImageProcessor, ImageProcessor, ImageProcessorResources} -import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources, ImageProcessorLoader} +import com.gu.mediaservice.lib.cleanup.{ + ComposedImageProcessor, + ImageProcessor, + ImageProcessorResources +} +import com.gu.mediaservice.lib.config.{ + CommonConfig, + GridConfigResources, + ImageProcessorLoader +} import com.gu.mediaservice.model._ import com.typesafe.scalalogging.StrictLogging -class ImageLoaderConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) with StrictLogging { +class ImageLoaderConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) + with StrictLogging { val imageBucket: String = string("s3.image.bucket") val thumbnailBucket: String = string("s3.thumb.bucket") @@ -22,11 +32,12 @@ class ImageLoaderConfig(resources: GridConfigResources) extends CommonConfig(res val apiUri: String = services.apiBaseUri val loginUriTemplate: String = services.loginUriTemplate - val transcodedMimeTypes: List[MimeType] = getStringSet("transcoded.mime.types").toList.map(MimeType(_)) - val supportedMimeTypes: List[MimeType] = List(Jpeg, Png) ::: transcodedMimeTypes + val transcodedMimeTypes: List[MimeType] = + getStringSet("transcoded.mime.types").toList.map(MimeType(_)) + val supportedMimeTypes: List[MimeType] = + List(Jpeg, Png) ::: transcodedMimeTypes - /** - * Load in the chain of image processors from config. This can be a list of + /** Load in the chain of image processors from config. This can be a list of * companion objects, class names, both with and without config. * For example: * {{{ @@ -53,9 +64,11 @@ class ImageLoaderConfig(resources: GridConfigResources) extends CommonConfig(res * If a configuration is needed by is not provided by the config, the module configuration will be used instead. */ val imageProcessor: ComposedImageProcessor = { - val configLoader = ImageProcessorLoader.seqConfigLoader(ImageProcessorResources(this, resources.actorSystem)) + val configLoader = ImageProcessorLoader.seqConfigLoader( + ImageProcessorResources(this, resources.actorSystem) + ) val processors = configuration .get[Seq[ImageProcessor]]("image.processors")(configLoader) - ImageProcessor.compose("ImageConfigLoader-imageProcessor", processors:_*) + ImageProcessor.compose("ImageConfigLoader-imageProcessor", processors: _*) } -} \ No newline at end of file +} diff --git a/image-loader/app/lib/ImageLoaderStore.scala b/image-loader/app/lib/ImageLoaderStore.scala index 55e6474db8..edce699b06 100644 --- a/image-loader/app/lib/ImageLoaderStore.scala +++ b/image-loader/app/lib/ImageLoaderStore.scala @@ -3,4 +3,9 @@ package lib.storage import lib.ImageLoaderConfig import com.gu.mediaservice.lib -class ImageLoaderStore(config: ImageLoaderConfig) extends lib.ImageIngestOperations(config.imageBucket, config.thumbnailBucket, config) +class ImageLoaderStore(config: ImageLoaderConfig) + extends lib.ImageIngestOperations( + config.imageBucket, + config.thumbnailBucket, + config + ) diff --git a/image-loader/app/lib/Notifications.scala b/image-loader/app/lib/Notifications.scala index ca2c69a0ee..97411e347c 100644 --- a/image-loader/app/lib/Notifications.scala +++ b/image-loader/app/lib/Notifications.scala @@ -2,4 +2,5 @@ package lib import com.gu.mediaservice.lib.aws.ThrallMessageSender -class Notifications(config: ImageLoaderConfig) extends ThrallMessageSender(config.thrallKinesisStreamConfig) +class Notifications(config: ImageLoaderConfig) + extends ThrallMessageSender(config.thrallKinesisStreamConfig) diff --git a/image-loader/app/lib/QuarantineStore.scala b/image-loader/app/lib/QuarantineStore.scala index 1c750bb7c1..fe5a0b9d4b 100644 --- a/image-loader/app/lib/QuarantineStore.scala +++ b/image-loader/app/lib/QuarantineStore.scala @@ -3,4 +3,5 @@ package lib.storage import lib.ImageLoaderConfig import com.gu.mediaservice.lib -class QuarantineStore(config: ImageLoaderConfig) extends lib.ImageQuarantineOperations(config.quarantineBucket.get, config) \ No newline at end of file +class QuarantineStore(config: ImageLoaderConfig) + extends lib.ImageQuarantineOperations(config.quarantineBucket.get, config) diff --git a/image-loader/app/lib/imaging/FileMetadataReader.scala b/image-loader/app/lib/imaging/FileMetadataReader.scala index 9bc49be092..64631cb716 100644 --- a/image-loader/app/lib/imaging/FileMetadataReader.scala +++ b/image-loader/app/lib/imaging/FileMetadataReader.scala @@ -5,7 +5,11 @@ import java.util.concurrent.Executors import com.adobe.internal.xmp.XMPMetaFactory import com.drew.imaging.ImageMetadataReader -import com.drew.metadata.exif.{ExifDirectoryBase, ExifIFD0Directory, ExifSubIFDDirectory} +import com.drew.metadata.exif.{ + ExifDirectoryBase, + ExifIFD0Directory, + ExifSubIFDDirectory +} import com.drew.metadata.icc.IccDirectory import com.drew.metadata.iptc.IptcDirectory import com.drew.metadata.jpeg.JpegDirectory @@ -47,28 +51,42 @@ object FileMetadataReader { private val namespaces = Map( "GettyImagesGIFT" -> "http://xmp.gettyimages.com/gift/1.0/" ) - for ((prefix, namespaceUri) <- namespaces) XMPMetaFactory.getSchemaRegistry.registerNamespace(namespaceUri, prefix) + for ((prefix, namespaceUri) <- namespaces) + XMPMetaFactory.getSchemaRegistry.registerNamespace(namespaceUri, prefix) private implicit val ctx: ExecutionContext = ExecutionContext.fromExecutor(Executors.newCachedThreadPool) - def fromIPTCHeaders(image: File, imageId:String): Future[FileMetadata] = + def fromIPTCHeaders(image: File, imageId: String): Future[FileMetadata] = for { metadata <- readMetadata(image) - } - yield getMetadataWithIPTCHeaders(metadata, imageId) // FIXME: JPEG, JFIF, Photoshop, GPS, File + } yield getMetadataWithIPTCHeaders( + metadata, + imageId + ) // FIXME: JPEG, JFIF, Photoshop, GPS, File def fromIPTCHeadersWithColorInfo(image: ImageWrapper): Future[FileMetadata] = fromIPTCHeadersWithColorInfo(image.file, image.id, image.mimeType) - def fromIPTCHeadersWithColorInfo(image: File, imageId:String, mimeType: MimeType): Future[FileMetadata] = + def fromIPTCHeadersWithColorInfo( + image: File, + imageId: String, + mimeType: MimeType + ): Future[FileMetadata] = for { metadata <- readMetadata(image) - colourModelInformation <- getColorModelInformation(image, metadata, mimeType) - } - yield getMetadataWithIPTCHeaders(metadata, imageId).copy(colourModelInformation = colourModelInformation) - - private def getMetadataWithIPTCHeaders(metadata: Metadata, imageId:String): FileMetadata = + colourModelInformation <- getColorModelInformation( + image, + metadata, + mimeType + ) + } yield getMetadataWithIPTCHeaders(metadata, imageId) + .copy(colourModelInformation = colourModelInformation) + + private def getMetadataWithIPTCHeaders( + metadata: Metadata, + imageId: String + ): FileMetadata = FileMetadata( iptc = exportDirectory(metadata, classOf[IptcDirectory]), exif = exportDirectory(metadata, classOf[ExifIFD0Directory]), @@ -81,31 +99,45 @@ object FileMetadataReader { ) // Export all the metadata in the directory - private def exportDirectory[T <: Directory](metadata: Metadata, directoryClass: Class[T]): Map[String, String] = + private def exportDirectory[T <: Directory]( + metadata: Metadata, + directoryClass: Class[T] + ): Map[String, String] = Option(metadata.getFirstDirectoryOfType(directoryClass)) map { directory => - val metaTagsMap = directory.getTags.asScala. - filter(tag => tag.hasTagName). + val metaTagsMap = directory.getTags.asScala + .filter(tag => tag.hasTagName) + . // Ignore seemingly useless "Padding" fields // see: https://github.com/drewnoakes/metadata-extractor/issues/100 - filter(tag => tag.getTagName != "Padding"). + filter(tag => tag.getTagName != "Padding") + . // Ignore meta-metadata - filter(tag => tag.getTagName != "XMP Value Count"). - flatMap { tag => - nonEmptyTrimmed(tag.getDescription) map { value => tag.getTagName -> value } - }.toMap + filter(tag => tag.getTagName != "XMP Value Count") + .flatMap { tag => + nonEmptyTrimmed(tag.getDescription) map { value => + tag.getTagName -> value + } + } + .toMap directory match { case d: IptcDirectory => val dateTimeCreated = - Option(d.getDateCreated).map(d => dateToUTCString(new DateTime(d))).map("Date Time Created Composite" -> _) + Option(d.getDateCreated) + .map(d => dateToUTCString(new DateTime(d))) + .map("Date Time Created Composite" -> _) val digitalDateTimeCreated = - Option(d.getDigitalDateCreated).map(d => dateToUTCString(new DateTime(d))).map("Digital Date Time Created Composite" -> _) + Option(d.getDigitalDateCreated) + .map(d => dateToUTCString(new DateTime(d))) + .map("Digital Date Time Created Composite" -> _) metaTagsMap ++ dateTimeCreated ++ digitalDateTimeCreated case d: ExifSubIFDDirectory => - val dateTimeCreated = Option(d.getDateOriginal).map(d => dateToUTCString(new DateTime(d))).map("Date/Time Original Composite" -> _) + val dateTimeCreated = Option(d.getDateOriginal) + .map(d => dateToUTCString(new DateTime(d))) + .map("Date/Time Original Composite" -> _) metaTagsMap ++ dateTimeCreated case _ => metaTagsMap @@ -113,24 +145,38 @@ object FileMetadataReader { } getOrElse Map() private val datePattern = "(.*[Dd]ate.*)".r - private def xmpDirectoryToMap(directory: XmpDirectory, imageId: String): Map[String, String] = { - directory.getXmpProperties.asScala.toMap.mapValues(nonEmptyTrimmed).collect { - case (datePattern(key), Some(value)) => key -> ImageMetadataConverter.cleanDate(value, key, imageId) - case (key, Some(value)) => key -> value - } + private def xmpDirectoryToMap( + directory: XmpDirectory, + imageId: String + ): Map[String, String] = { + directory.getXmpProperties.asScala.toMap + .mapValues(nonEmptyTrimmed) + .collect { + case (datePattern(key), Some(value)) => + key -> ImageMetadataConverter.cleanDate(value, key, imageId) + case (key, Some(value)) => key -> value + } } - private def exportRawXmpProperties(metadata: Metadata, imageId:String): Map[String, String] = { - val directories = metadata.getDirectoriesOfType(classOf[XmpDirectory]).asScala.toList - val props: Map[String, String] = directories.foldLeft[Map[String, String]](Map.empty)((acc, dir) => { - // An image can have multiple xmp directories. A directory has multiple xmp properties. - // A property can be repeated across directories and its value may not be unique. - // Keep the first value encountered on the basis that there will only be multiple directories - // if there is no space in the previous one as directories have a maximum size. - acc ++ xmpDirectoryToMap(dir, imageId).filterKeys(k => !acc.contains(k)) - }) + private def exportRawXmpProperties( + metadata: Metadata, + imageId: String + ): Map[String, String] = { + val directories = + metadata.getDirectoriesOfType(classOf[XmpDirectory]).asScala.toList + val props: Map[String, String] = + directories.foldLeft[Map[String, String]](Map.empty)((acc, dir) => { + // An image can have multiple xmp directories. A directory has multiple xmp properties. + // A property can be repeated across directories and its value may not be unique. + // Keep the first value encountered on the basis that there will only be multiple directories + // if there is no space in the previous one as directories have a maximum size. + acc ++ xmpDirectoryToMap(dir, imageId).filterKeys(k => !acc.contains(k)) + }) props } - private def exportXmpPropertiesInTransformedSchema(metadata: Metadata, imageId:String): Map[String, JsValue] = { + private def exportXmpPropertiesInTransformedSchema( + metadata: Metadata, + imageId: String + ): Map[String, JsValue] = { val props = exportRawXmpProperties(metadata, imageId) FileMetadataAggregator.aggregateMetadataMap(props) } @@ -138,101 +184,150 @@ object FileMetadataReader { // Getty made up their own XMP namespace. // We're awaiting actual documentation of the properties available, so // this only extracts a small subset of properties as a means to identify Getty images. - private def exportGettyDirectory(metadata: Metadata, imageId:String): Map[String, String] = { - val xmpProperties = exportRawXmpProperties(metadata, imageId) - - def readProperty(name: String): Option[String] = xmpProperties.get(name) - - def readAssetId: Option[String] = readProperty("GettyImagesGIFT:AssetId").orElse(readProperty("GettyImagesGIFT:AssetID")) - - Map( - "Asset ID" -> readAssetId, - "Call For Image" -> readProperty("GettyImagesGIFT:CallForImage"), - "Camera Filename" -> readProperty("GettyImagesGIFT:CameraFilename"), - "Camera Make Model" -> readProperty("GettyImagesGIFT:CameraMakeModel"), - "Composition" -> readProperty("GettyImagesGIFT:Composition"), - "Exclusive Coverage" -> readProperty("GettyImagesGIFT:ExclusiveCoverage"), - "Image Rank" -> readProperty("GettyImagesGIFT:ImageRank"), - "Original Create Date Time" -> readProperty("GettyImagesGIFT:OriginalCreateDateTime"), - "Original Filename" -> readProperty("GettyImagesGIFT:OriginalFilename"), - "Personality" -> readProperty("GettyImagesGIFT:Personality"), - "Time Shot" -> readProperty("GettyImagesGIFT:TimeShot") - ).flattenOptions + private def exportGettyDirectory( + metadata: Metadata, + imageId: String + ): Map[String, String] = { + val xmpProperties = exportRawXmpProperties(metadata, imageId) + + def readProperty(name: String): Option[String] = xmpProperties.get(name) + + def readAssetId: Option[String] = + readProperty("GettyImagesGIFT:AssetId").orElse( + readProperty("GettyImagesGIFT:AssetID") + ) + + Map( + "Asset ID" -> readAssetId, + "Call For Image" -> readProperty("GettyImagesGIFT:CallForImage"), + "Camera Filename" -> readProperty("GettyImagesGIFT:CameraFilename"), + "Camera Make Model" -> readProperty("GettyImagesGIFT:CameraMakeModel"), + "Composition" -> readProperty("GettyImagesGIFT:Composition"), + "Exclusive Coverage" -> readProperty("GettyImagesGIFT:ExclusiveCoverage"), + "Image Rank" -> readProperty("GettyImagesGIFT:ImageRank"), + "Original Create Date Time" -> readProperty( + "GettyImagesGIFT:OriginalCreateDateTime" + ), + "Original Filename" -> readProperty("GettyImagesGIFT:OriginalFilename"), + "Personality" -> readProperty("GettyImagesGIFT:Personality"), + "Time Shot" -> readProperty("GettyImagesGIFT:TimeShot") + ).flattenOptions } - private def dateToUTCString(date: DateTime): String = ISODateTimeFormat.dateTime.print(date.withZone(DateTimeZone.UTC)) + private def dateToUTCString(date: DateTime): String = + ISODateTimeFormat.dateTime.print(date.withZone(DateTimeZone.UTC)) - def dimensions(image: File, mimeType: Option[MimeType]): Future[Option[Dimensions]] = + def dimensions( + image: File, + mimeType: Option[MimeType] + ): Future[Option[Dimensions]] = for { metadata <- readMetadata(image) - } - yield { + } yield { mimeType match { - case Some(Jpeg) => for { - jpegDir <- Option(metadata.getFirstDirectoryOfType(classOf[JpegDirectory])) - - } yield Dimensions(jpegDir.getImageWidth, jpegDir.getImageHeight) - - case Some(Png) => for { - pngDir <- Option(metadata.getFirstDirectoryOfType(classOf[PngDirectory])) - - } yield { - val width = pngDir.getInt(PngDirectory.TAG_IMAGE_WIDTH) - val height = pngDir.getInt(PngDirectory.TAG_IMAGE_HEIGHT) - Dimensions(width, height) - } - - case Some(Tiff) => for { - exifDir <- Option(metadata.getFirstDirectoryOfType(classOf[ExifIFD0Directory])) - - } yield { - val width = exifDir.getInt(ExifDirectoryBase.TAG_IMAGE_WIDTH) - val height = exifDir.getInt(ExifDirectoryBase.TAG_IMAGE_HEIGHT) - Dimensions(width, height) - } + case Some(Jpeg) => + for { + jpegDir <- Option( + metadata.getFirstDirectoryOfType(classOf[JpegDirectory]) + ) + + } yield Dimensions(jpegDir.getImageWidth, jpegDir.getImageHeight) + + case Some(Png) => + for { + pngDir <- Option( + metadata.getFirstDirectoryOfType(classOf[PngDirectory]) + ) + + } yield { + val width = pngDir.getInt(PngDirectory.TAG_IMAGE_WIDTH) + val height = pngDir.getInt(PngDirectory.TAG_IMAGE_HEIGHT) + Dimensions(width, height) + } + + case Some(Tiff) => + for { + exifDir <- Option( + metadata.getFirstDirectoryOfType(classOf[ExifIFD0Directory]) + ) + + } yield { + val width = exifDir.getInt(ExifDirectoryBase.TAG_IMAGE_WIDTH) + val height = exifDir.getInt(ExifDirectoryBase.TAG_IMAGE_HEIGHT) + Dimensions(width, height) + } case _ => None } } - def getColorModelInformation(image: File, metadata: Metadata, mimeType: MimeType): Future[Map[String, String]] = { + def getColorModelInformation( + image: File, + metadata: Metadata, + mimeType: MimeType + ): Future[Map[String, String]] = { val source = addImage(image) val formatter = format(source)("%r") - runIdentifyCmd(formatter).map{ imageType => getColourInformation(metadata, imageType.headOption, mimeType) } + runIdentifyCmd(formatter) + .map { imageType => + getColourInformation(metadata, imageType.headOption, mimeType) + } .recover { case _ => getColourInformation(metadata, None, mimeType) } } - private def getColourInformation(metadata: Metadata, maybeImageType: Option[String], mimeType: MimeType): Map[String, String] = { + private def getColourInformation( + metadata: Metadata, + maybeImageType: Option[String], + mimeType: MimeType + ): Map[String, String] = { - val hasAlpha = maybeImageType.map(imageType => if (imageType.contains("Matte")) "true" else "false") + val hasAlpha = maybeImageType.map(imageType => + if (imageType.contains("Matte")) "true" else "false" + ) mimeType match { - case Png => val metaDir = metadata.getFirstDirectoryOfType(classOf[PngDirectory]) + case Png => + val metaDir = metadata.getFirstDirectoryOfType(classOf[PngDirectory]) Map( "hasAlpha" -> hasAlpha, - "colorType" -> Option(metaDir.getDescription(PngDirectory.TAG_COLOR_TYPE)), - "bitsPerSample" -> Option(metaDir.getDescription(PngDirectory.TAG_BITS_PER_SAMPLE)), - "paletteHasTransparency" -> Option(metaDir.getDescription(PngDirectory.TAG_PALETTE_HAS_TRANSPARENCY)), - "paletteSize" -> Option(metaDir.getDescription(PngDirectory.TAG_PALETTE_SIZE)), - "iccProfileName" -> Option(metaDir.getDescription(PngDirectory.TAG_ICC_PROFILE_NAME)) + "colorType" -> Option( + metaDir.getDescription(PngDirectory.TAG_COLOR_TYPE) + ), + "bitsPerSample" -> Option( + metaDir.getDescription(PngDirectory.TAG_BITS_PER_SAMPLE) + ), + "paletteHasTransparency" -> Option( + metaDir.getDescription(PngDirectory.TAG_PALETTE_HAS_TRANSPARENCY) + ), + "paletteSize" -> Option( + metaDir.getDescription(PngDirectory.TAG_PALETTE_SIZE) + ), + "iccProfileName" -> Option( + metaDir.getDescription(PngDirectory.TAG_ICC_PROFILE_NAME) + ) ).flattenOptions - case _ => val metaDir = Option(metadata.getFirstDirectoryOfType(classOf[ExifIFD0Directory])) + case _ => + val metaDir = Option( + metadata.getFirstDirectoryOfType(classOf[ExifIFD0Directory]) + ) Map( "hasAlpha" -> hasAlpha, "colorType" -> maybeImageType, - "photometricInterpretation" -> metaDir.map(_.getDescription(ExifDirectoryBase.TAG_PHOTOMETRIC_INTERPRETATION)), - "bitsPerSample" -> metaDir.map(_.getDescription(ExifDirectoryBase.TAG_BITS_PER_SAMPLE)) + "photometricInterpretation" -> metaDir.map( + _.getDescription(ExifDirectoryBase.TAG_PHOTOMETRIC_INTERPRETATION) + ), + "bitsPerSample" -> metaDir.map( + _.getDescription(ExifDirectoryBase.TAG_BITS_PER_SAMPLE) + ) ).flattenOptions } - - } private def nonEmptyTrimmed(nullableStr: String): Option[String] = diff --git a/image-loader/app/lib/imaging/MimeTypeDetection.scala b/image-loader/app/lib/imaging/MimeTypeDetection.scala index 1285ad6a0c..b8611045c6 100644 --- a/image-loader/app/lib/imaging/MimeTypeDetection.scala +++ b/image-loader/app/lib/imaging/MimeTypeDetection.scala @@ -9,27 +9,42 @@ import org.apache.tika.Tika import scala.util.{Failure, Success, Try} object MimeTypeDetection extends GridLogging { - def guessMimeType(file: File): Either[UnsupportedMimeTypeException, MimeType] = Try(usingTika(file)) match { + def guessMimeType( + file: File + ): Either[UnsupportedMimeTypeException, MimeType] = Try( + usingTika(file) + ) match { case Success(mimeType) => Right(mimeType) case Failure(tikaAttempt: UnsupportedMimeTypeException) => { Try(usingMetadataExtractor(file)) match { case Success(mimeType) => { - logger.info(s"Using mime type from metadata extractor as tika mime type is unsupported (${tikaAttempt.mimeType})") + logger.info( + s"Using mime type from metadata extractor as tika mime type is unsupported (${tikaAttempt.mimeType})" + ) Right(mimeType) } - case Failure(metadataExtractorAttempt: UnsupportedMimeTypeException) => { - logger.warn(s"Unsupported mime type: tika was ${tikaAttempt.mimeType}, metadata extractor was ${metadataExtractorAttempt.mimeType}", metadataExtractorAttempt) + case Failure( + metadataExtractorAttempt: UnsupportedMimeTypeException + ) => { + logger.warn( + s"Unsupported mime type: tika was ${tikaAttempt.mimeType}, metadata extractor was ${metadataExtractorAttempt.mimeType}", + metadataExtractorAttempt + ) Left(metadataExtractorAttempt) } - case Failure(_: Throwable) => Left(new UnsupportedMimeTypeException(FALLBACK)) + case Failure(_: Throwable) => + Left(new UnsupportedMimeTypeException(FALLBACK)) } } - case Failure(_: Throwable) => Left(new UnsupportedMimeTypeException(FALLBACK)) + case Failure(_: Throwable) => + Left(new UnsupportedMimeTypeException(FALLBACK)) } - private def usingTika(file: File): MimeType = MimeType(new Tika().detect(file)) + private def usingTika(file: File): MimeType = MimeType( + new Tika().detect(file) + ) - private def usingMetadataExtractor(file: File) : MimeType = { + private def usingMetadataExtractor(file: File): MimeType = { val stream = new BufferedInputStream(new FileInputStream(file)) val fileType = FileTypeDetector.detectFileType(stream) MimeType(fileType.getMimeType) diff --git a/image-loader/app/lib/imaging/exceptions.scala b/image-loader/app/lib/imaging/exceptions.scala index 54d6ed2ae0..29ade62783 100644 --- a/image-loader/app/lib/imaging/exceptions.scala +++ b/image-loader/app/lib/imaging/exceptions.scala @@ -1,9 +1,14 @@ package lib.imaging -class ImageLoaderException(val message: String) extends RuntimeException(message) +class ImageLoaderException(val message: String) + extends RuntimeException(message) -class UserImageLoaderException(override val message: String) extends ImageLoaderException(message) +class UserImageLoaderException(override val message: String) + extends ImageLoaderException(message) -class ServerImageLoaderException(override val message: String) extends ImageLoaderException(message) +class ServerImageLoaderException(override val message: String) + extends ImageLoaderException(message) class NoSuchImageExistsInS3(val bucket: String, val key: String) - extends ServerImageLoaderException(s"Could not find image in $bucket with key $key") + extends ServerImageLoaderException( + s"Could not find image in $bucket with key $key" + ) diff --git a/image-loader/app/model/Projector.scala b/image-loader/app/model/Projector.scala index e28fe981f7..a062526344 100644 --- a/image-loader/app/model/Projector.scala +++ b/image-loader/app/model/Projector.scala @@ -5,7 +5,12 @@ import java.util.UUID import com.amazonaws.services.s3.AmazonS3 import com.amazonaws.services.s3.model.{ObjectMetadata, S3Object} -import com.gu.mediaservice.lib.{ImageIngestOperations, StorableOptimisedImage, StorableOriginalImage, StorableThumbImage} +import com.gu.mediaservice.lib.{ + ImageIngestOperations, + StorableOptimisedImage, + StorableOriginalImage, + StorableThumbImage +} import com.gu.mediaservice.lib.aws.S3Ops import com.gu.mediaservice.lib.cleanup.ImageProcessor import com.gu.mediaservice.lib.imaging.ImageOperations @@ -27,15 +32,21 @@ object Projector { import Uploader.toImageUploadOpsCfg - def apply(config: ImageLoaderConfig, imageOps: ImageOperations)(implicit ec: ExecutionContext): Projector - = new Projector(toImageUploadOpsCfg(config), S3Ops.buildS3Client(config), imageOps, config.imageProcessor) + def apply(config: ImageLoaderConfig, imageOps: ImageOperations)(implicit + ec: ExecutionContext + ): Projector = new Projector( + toImageUploadOpsCfg(config), + S3Ops.buildS3Client(config), + imageOps, + config.imageProcessor + ) } case class S3FileExtractedMetadata( - uploadedBy: String, - uploadTime: DateTime, - uploadFileName: Option[String], - picdarUrn: Option[String] + uploadedBy: String, + uploadTime: DateTime, + uploadFileName: Option[String], + picdarUrn: Option[String] ) object S3FileExtractedMetadata { @@ -44,7 +55,8 @@ object S3FileExtractedMetadata { val fileUserMetadata = s3ObjectMetadata.getUserMetadata.asScala.toMap val uploadedBy = fileUserMetadata.getOrElse("uploaded_by", "re-ingester") - val uploadedTimeRaw = fileUserMetadata.getOrElse("upload_time", lastModified) + val uploadedTimeRaw = + fileUserMetadata.getOrElse("upload_time", lastModified) val uploadTime = new DateTime(uploadedTimeRaw).withZone(DateTimeZone.UTC) val picdarUrn = fileUserMetadata.get("identifier!picdarurn") @@ -56,20 +68,30 @@ object S3FileExtractedMetadata { uploadedBy = uploadedBy, uploadTime = uploadTime, uploadFileName = uploadFileName, - picdarUrn = picdarUrn, + picdarUrn = picdarUrn ) } } -class Projector(config: ImageUploadOpsCfg, - s3: AmazonS3, - imageOps: ImageOperations, - processor: ImageProcessor) { - - private val imageUploadProjectionOps = new ImageUploadProjectionOps(config, imageOps, processor) - - def projectS3ImageById(imageUploadProjector: Projector, imageId: String, tempFile: File, requestId: UUID) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[Option[Image]] = { +class Projector( + config: ImageUploadOpsCfg, + s3: AmazonS3, + imageOps: ImageOperations, + processor: ImageProcessor +) { + + private val imageUploadProjectionOps = + new ImageUploadProjectionOps(config, imageOps, processor) + + def projectS3ImageById( + imageUploadProjector: Projector, + imageId: String, + tempFile: File, + requestId: UUID + )(implicit + ec: ExecutionContext, + logMarker: LogMarker + ): Future[Option[Image]] = { Future { import ImageIngestOperations.fileKeyFromId val s3Key = fileKeyFromId(imageId) @@ -77,31 +99,45 @@ class Projector(config: ImageUploadOpsCfg, if (!s3.doesObjectExist(config.originalFileBucket, s3Key)) throw new NoSuchImageExistsInS3(config.originalFileBucket, s3Key) - Logger.info(s"object exists, getting s3 object at s3://${config.originalFileBucket}/$s3Key to perform Image projection") + Logger.info( + s"object exists, getting s3 object at s3://${config.originalFileBucket}/$s3Key to perform Image projection" + ) val s3Source = s3.getObject(config.originalFileBucket, s3Key) - val digestedFile = getSrcFileDigestForProjection(s3Source, imageId, tempFile) + val digestedFile = + getSrcFileDigestForProjection(s3Source, imageId, tempFile) val extractedS3Meta = S3FileExtractedMetadata(s3Source.getObjectMetadata) - val finalImageFuture = imageUploadProjector.projectImage(digestedFile, extractedS3Meta, requestId) + val finalImageFuture = imageUploadProjector.projectImage( + digestedFile, + extractedS3Meta, + requestId + ) val finalImage = Await.result(finalImageFuture, Duration.Inf) Some(finalImage) } } - private def getSrcFileDigestForProjection(s3Src: S3Object, imageId: String, tempFile: File) = { + private def getSrcFileDigestForProjection( + s3Src: S3Object, + imageId: String, + tempFile: File + ) = { IOUtils.copy(s3Src.getObjectContent, new FileOutputStream(tempFile)) DigestedFile(tempFile, imageId) } - def projectImage(srcFileDigest: DigestedFile, extractedS3Meta: S3FileExtractedMetadata, requestId: UUID) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[Image] = { + def projectImage( + srcFileDigest: DigestedFile, + extractedS3Meta: S3FileExtractedMetadata, + requestId: UUID + )(implicit ec: ExecutionContext, logMarker: LogMarker): Future[Image] = { import extractedS3Meta._ val DigestedFile(tempFile_, id_) = srcFileDigest // TODO more identifiers_ to rehydrate val identifiers_ = picdarUrn match { case Some(value) => Map[String, String]("picdarURN" -> value) - case _ => Map[String, String]() + case _ => Map[String, String]() } val uploadInfo_ = UploadInfo(filename = uploadFileName) @@ -123,22 +159,34 @@ class Projector(config: ImageUploadOpsCfg, } } -class ImageUploadProjectionOps(config: ImageUploadOpsCfg, - imageOps: ImageOperations, - processor: ImageProcessor) { +class ImageUploadProjectionOps( + config: ImageUploadOpsCfg, + imageOps: ImageOperations, + processor: ImageProcessor +) { import Uploader.{fromUploadRequestShared, toMetaMap} - - def projectImageFromUploadRequest(uploadRequest: UploadRequest) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[Image] = { - val dependenciesWithProjectionsOnly = ImageUploadOpsDependencies(config, imageOps, - projectOriginalFileAsS3Model, projectThumbnailFileAsS3Model, projectOptimisedPNGFileAsS3Model) - fromUploadRequestShared(uploadRequest, dependenciesWithProjectionsOnly, processor) + def projectImageFromUploadRequest( + uploadRequest: UploadRequest + )(implicit ec: ExecutionContext, logMarker: LogMarker): Future[Image] = { + val dependenciesWithProjectionsOnly = ImageUploadOpsDependencies( + config, + imageOps, + projectOriginalFileAsS3Model, + projectThumbnailFileAsS3Model, + projectOptimisedPNGFileAsS3Model + ) + fromUploadRequestShared( + uploadRequest, + dependenciesWithProjectionsOnly, + processor + ) } - private def projectOriginalFileAsS3Model(storableOriginalImage: StorableOriginalImage) - (implicit ec: ExecutionContext)= Future { + private def projectOriginalFileAsS3Model( + storableOriginalImage: StorableOriginalImage + )(implicit ec: ExecutionContext) = Future { val key = ImageIngestOperations.fileKeyFromId(storableOriginalImage.id) S3Ops.projectFileAsS3Object( config.originalFileBucket, @@ -149,9 +197,13 @@ class ImageUploadProjectionOps(config: ImageUploadOpsCfg, ) } - private def projectThumbnailFileAsS3Model(storableThumbImage: StorableThumbImage)(implicit ec: ExecutionContext) = Future { + private def projectThumbnailFileAsS3Model( + storableThumbImage: StorableThumbImage + )(implicit ec: ExecutionContext) = Future { val key = ImageIngestOperations.fileKeyFromId(storableThumbImage.id) - val thumbMimeType = Some(OptimiseWithPngQuant.optimiseMimeType) // this IS what we will generate. + val thumbMimeType = Some( + OptimiseWithPngQuant.optimiseMimeType + ) // this IS what we will generate. S3Ops.projectFileAsS3Object( config.thumbBucket, key, @@ -160,9 +212,13 @@ class ImageUploadProjectionOps(config: ImageUploadOpsCfg, ) } - private def projectOptimisedPNGFileAsS3Model(storableOptimisedImage: StorableOptimisedImage)(implicit ec: ExecutionContext) = Future { - val key = ImageIngestOperations.optimisedPngKeyFromId(storableOptimisedImage.id) - val optimisedPngMimeType = Some(ImageOperations.thumbMimeType) // this IS what we will generate. + private def projectOptimisedPNGFileAsS3Model( + storableOptimisedImage: StorableOptimisedImage + )(implicit ec: ExecutionContext) = Future { + val key = + ImageIngestOperations.optimisedPngKeyFromId(storableOptimisedImage.id) + val optimisedPngMimeType = + Some(ImageOperations.thumbMimeType) // this IS what we will generate. S3Ops.projectFileAsS3Object( config.originalFileBucket, key, diff --git a/image-loader/app/model/QuarantineUploader.scala b/image-loader/app/model/QuarantineUploader.scala index 529ee1d932..8add09638c 100644 --- a/image-loader/app/model/QuarantineUploader.scala +++ b/image-loader/app/model/QuarantineUploader.scala @@ -1,6 +1,5 @@ package model - import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.auth.Authentication import com.gu.mediaservice.lib.auth.Authentication.Principal @@ -22,12 +21,15 @@ import java.net.URLEncoder import java.nio.charset.StandardCharsets import scala.concurrent.{ExecutionContext, Future} -class QuarantineUploader(val store: QuarantineStore, - val config: ImageLoaderConfig) - (implicit val ec: ExecutionContext) extends ArgoHelpers { +class QuarantineUploader( + val store: QuarantineStore, + val config: ImageLoaderConfig +)(implicit val ec: ExecutionContext) + extends ArgoHelpers { - private def storeQuarantineFile(uploadRequest: UploadRequest) - (implicit logMarker: LogMarker) = { + private def storeQuarantineFile( + uploadRequest: UploadRequest + )(implicit logMarker: LogMarker) = { val meta = Uploader.toMetaMap(uploadRequest) store.storeQuarantineImage( uploadRequest.imageId, @@ -36,10 +38,10 @@ class QuarantineUploader(val store: QuarantineStore, meta ) } - - def quarantineFile(uploadRequest: UploadRequest)( - implicit ec: ExecutionContext, - logMarker: LogMarker): Future[JsObject] = { + + def quarantineFile( + uploadRequest: UploadRequest + )(implicit ec: ExecutionContext, logMarker: LogMarker): Future[JsObject] = { logger.info("Quarantining file") @@ -50,4 +52,4 @@ class QuarantineUploader(val store: QuarantineStore, Json.obj("uri" -> uri) } } -} \ No newline at end of file +} diff --git a/image-loader/app/model/Uploader.scala b/image-loader/app/model/Uploader.scala index df36117605..0e383925ee 100644 --- a/image-loader/app/model/Uploader.scala +++ b/image-loader/app/model/Uploader.scala @@ -9,14 +9,26 @@ import java.util.UUID import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.auth.Authentication import com.gu.mediaservice.lib.auth.Authentication.Principal -import com.gu.mediaservice.lib.{BrowserViewableImage, StorableOptimisedImage, StorableOriginalImage, StorableThumbImage} +import com.gu.mediaservice.lib.{ + BrowserViewableImage, + StorableOptimisedImage, + StorableOriginalImage, + StorableThumbImage +} import com.gu.mediaservice.lib.aws.{S3Object, UpdateMessage} -import com.gu.mediaservice.lib.cleanup.{ImageProcessor, MetadataCleaners, SupplierProcessors} +import com.gu.mediaservice.lib.cleanup.{ + ImageProcessor, + MetadataCleaners, + SupplierProcessors +} import com.gu.mediaservice.lib.config.MetadataConfig import com.gu.mediaservice.lib.formatting._ import com.gu.mediaservice.lib.imaging.ImageOperations import com.gu.mediaservice.lib.logging._ -import com.gu.mediaservice.lib.metadata.{FileMetadataHelper, ImageMetadataConverter} +import com.gu.mediaservice.lib.metadata.{ + FileMetadataHelper, + ImageMetadataConverter +} import com.gu.mediaservice.model._ import lib.{DigestedFile, ImageLoaderConfig, Notifications} import lib.imaging.{FileMetadataReader, MimeTypeDetection} @@ -31,10 +43,18 @@ import scala.concurrent.{ExecutionContext, Future} case class ImageUpload(uploadRequest: UploadRequest, image: Image) case object ImageUpload { - val metadataCleaners = new MetadataCleaners(MetadataConfig.allPhotographersMap) - - def createImage(uploadRequest: UploadRequest, source: Asset, thumbnail: Asset, png: Option[Asset], - fileMetadata: FileMetadata, metadata: ImageMetadata): Image = { + val metadataCleaners = new MetadataCleaners( + MetadataConfig.allPhotographersMap + ) + + def createImage( + uploadRequest: UploadRequest, + source: Asset, + thumbnail: Asset, + png: Option[Asset], + fileMetadata: FileMetadata, + metadata: ImageMetadata + ): Image = { val usageRights = NoRights Image( uploadRequest.imageId, @@ -59,20 +79,20 @@ case object ImageUpload { } case class ImageUploadOpsCfg( - tempDir: File, - thumbWidth: Int, - thumbQuality: Double, - transcodedMimeTypes: List[MimeType], - originalFileBucket: String, - thumbBucket: String + tempDir: File, + thumbWidth: Int, + thumbQuality: Double, + transcodedMimeTypes: List[MimeType], + originalFileBucket: String, + thumbBucket: String ) case class ImageUploadOpsDependencies( - config: ImageUploadOpsCfg, - imageOps: ImageOperations, - storeOrProjectOriginalFile: StorableOriginalImage => Future[S3Object], - storeOrProjectThumbFile: StorableThumbImage => Future[S3Object], - storeOrProjectOptimisedImage: StorableOptimisedImage => Future[S3Object] + config: ImageUploadOpsCfg, + imageOps: ImageOperations, + storeOrProjectOriginalFile: StorableOriginalImage => Future[S3Object], + storeOrProjectThumbFile: StorableThumbImage => Future[S3Object], + storeOrProjectOptimisedImage: StorableOptimisedImage => Future[S3Object] ) object Uploader extends GridLogging { @@ -88,14 +108,21 @@ object Uploader extends GridLogging { ) } - def fromUploadRequestShared(uploadRequest: UploadRequest, deps: ImageUploadOpsDependencies, processor: ImageProcessor) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[Image] = { + def fromUploadRequestShared( + uploadRequest: UploadRequest, + deps: ImageUploadOpsDependencies, + processor: ImageProcessor + )(implicit ec: ExecutionContext, logMarker: LogMarker): Future[Image] = { import deps._ logger.info("Starting image ops") - val fileMetadataFuture = toFileMetadata(uploadRequest.tempFile, uploadRequest.imageId, uploadRequest.mimeType) + val fileMetadataFuture = toFileMetadata( + uploadRequest.tempFile, + uploadRequest.imageId, + uploadRequest.mimeType + ) logger.info("Have read file headers") @@ -108,30 +135,41 @@ object Uploader extends GridLogging { uploadRequest, deps, fileMetadata, - processor)(ec, addLogMarkers(fileMetadata.toLogMarker)) + processor + )(ec, addLogMarkers(fileMetadata.toLogMarker)) }) } - private[model] def uploadAndStoreImage(storeOrProjectOriginalFile: StorableOriginalImage => Future[S3Object], - storeOrProjectThumbFile: StorableThumbImage => Future[S3Object], - storeOrProjectOptimisedFile: StorableOptimisedImage => Future[S3Object], - optimiseOps: OptimiseOps, - uploadRequest: UploadRequest, - deps: ImageUploadOpsDependencies, - fileMetadata: FileMetadata, - processor: ImageProcessor) - (implicit ec: ExecutionContext, logMarker: LogMarker) = { + private[model] def uploadAndStoreImage( + storeOrProjectOriginalFile: StorableOriginalImage => Future[S3Object], + storeOrProjectThumbFile: StorableThumbImage => Future[S3Object], + storeOrProjectOptimisedFile: StorableOptimisedImage => Future[S3Object], + optimiseOps: OptimiseOps, + uploadRequest: UploadRequest, + deps: ImageUploadOpsDependencies, + fileMetadata: FileMetadata, + processor: ImageProcessor + )(implicit ec: ExecutionContext, logMarker: LogMarker) = { val originalMimeType = uploadRequest.mimeType - .orElse(MimeTypeDetection.guessMimeType(uploadRequest.tempFile).toOption) - match { + .orElse( + MimeTypeDetection.guessMimeType(uploadRequest.tempFile).toOption + ) match { case Some(a) => a - case None => throw new Exception("File of unknown and undetectable mime type") + case None => + throw new Exception("File of unknown and undetectable mime type") } - val makeNewDirInTempDirHere: File = Files.createTempDirectory(deps.config.tempDir.toPath, "upload").toFile + val makeNewDirInTempDirHere: File = + Files.createTempDirectory(deps.config.tempDir.toPath, "upload").toFile - val colourModelFuture = ImageOperations.identifyColourModel(uploadRequest.tempFile, originalMimeType) - val sourceDimensionsFuture = FileMetadataReader.dimensions(uploadRequest.tempFile, Some(originalMimeType)) + val colourModelFuture = ImageOperations.identifyColourModel( + uploadRequest.tempFile, + originalMimeType + ) + val sourceDimensionsFuture = FileMetadataReader.dimensions( + uploadRequest.tempFile, + Some(originalMimeType) + ) val storableOriginalImage = StorableOriginalImage( uploadRequest.imageId, @@ -140,32 +178,61 @@ object Uploader extends GridLogging { toMetaMap(uploadRequest) ) val sourceStoreFuture = storeOrProjectOriginalFile(storableOriginalImage) - val eventualBrowserViewableImage = createBrowserViewableFileFuture(uploadRequest, makeNewDirInTempDirHere, deps) - + val eventualBrowserViewableImage = createBrowserViewableFileFuture( + uploadRequest, + makeNewDirInTempDirHere, + deps + ) val eventualImage = for { browserViewableImage <- eventualBrowserViewableImage s3Source <- sourceStoreFuture - optimisedFileMetadata <- FileMetadataReader.fromIPTCHeadersWithColorInfo(browserViewableImage) - thumbViewableImage <- createThumbFuture(optimisedFileMetadata, colourModelFuture, browserViewableImage, deps) + optimisedFileMetadata <- FileMetadataReader.fromIPTCHeadersWithColorInfo( + browserViewableImage + ) + thumbViewableImage <- createThumbFuture( + optimisedFileMetadata, + colourModelFuture, + browserViewableImage, + deps + ) s3Thumb <- storeOrProjectThumbFile(thumbViewableImage) - maybeStorableOptimisedImage <- getStorableOptimisedImage(makeNewDirInTempDirHere, optimiseOps, browserViewableImage, optimisedFileMetadata) + maybeStorableOptimisedImage <- getStorableOptimisedImage( + makeNewDirInTempDirHere, + optimiseOps, + browserViewableImage, + optimisedFileMetadata + ) s3PngOption <- maybeStorableOptimisedImage match { - case Some(storableOptimisedImage) => storeOrProjectOptimisedFile(storableOptimisedImage).map(a=>Some(a)) + case Some(storableOptimisedImage) => + storeOrProjectOptimisedFile(storableOptimisedImage).map(a => Some(a)) case None => Future.successful(None) } sourceDimensions <- sourceDimensionsFuture - thumbDimensions <- FileMetadataReader.dimensions(thumbViewableImage.file, Some(Jpeg)) + thumbDimensions <- FileMetadataReader.dimensions( + thumbViewableImage.file, + Some(Jpeg) + ) colourModel <- colourModelFuture } yield { val fullFileMetadata = fileMetadata.copy(colourModel = colourModel) - val metadata = ImageMetadataConverter.fromFileMetadata(fullFileMetadata, s3Source.metadata.objectMetadata.lastModified) + val metadata = ImageMetadataConverter.fromFileMetadata( + fullFileMetadata, + s3Source.metadata.objectMetadata.lastModified + ) val sourceAsset = Asset.fromS3Object(s3Source, sourceDimensions) val thumbAsset = Asset.fromS3Object(s3Thumb, thumbDimensions) val pngAsset = s3PngOption.map(Asset.fromS3Object(_, sourceDimensions)) - val baseImage = ImageUpload.createImage(uploadRequest, sourceAsset, thumbAsset, pngAsset, fullFileMetadata, metadata) + val baseImage = ImageUpload.createImage( + uploadRequest, + sourceAsset, + thumbAsset, + pngAsset, + fullFileMetadata, + metadata + ) val processedImage = processor(baseImage) @@ -176,7 +243,7 @@ object Uploader extends GridLogging { originalUsageRights = processedImage.usageRights ) } - eventualImage.onComplete{ _ => + eventualImage.onComplete { _ => makeNewDirInTempDirHere.listFiles().map(f => f.delete()) makeNewDirInTempDirHere.delete() } @@ -184,15 +251,33 @@ object Uploader extends GridLogging { } private def getStorableOptimisedImage( - tempDir: File, - optimiseOps: OptimiseOps, - browserViewableImage: BrowserViewableImage, - optimisedFileMetadata: FileMetadata) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[Option[StorableOptimisedImage]] = { - if (optimiseOps.shouldOptimise(Some(browserViewableImage.mimeType), optimisedFileMetadata)) { + tempDir: File, + optimiseOps: OptimiseOps, + browserViewableImage: BrowserViewableImage, + optimisedFileMetadata: FileMetadata + )(implicit + ec: ExecutionContext, + logMarker: LogMarker + ): Future[Option[StorableOptimisedImage]] = { + if ( + optimiseOps.shouldOptimise( + Some(browserViewableImage.mimeType), + optimisedFileMetadata + ) + ) { for { - (optimisedFile: File, optimisedMimeType: MimeType) <- optimiseOps.toOptimisedFile(browserViewableImage.file, browserViewableImage, tempDir) - } yield Some(browserViewableImage.copy(file = optimisedFile).copy(mimeType = optimisedMimeType).asStorableOptimisedImage) + (optimisedFile: File, optimisedMimeType: MimeType) <- optimiseOps + .toOptimisedFile( + browserViewableImage.file, + browserViewableImage, + tempDir + ) + } yield Some( + browserViewableImage + .copy(file = optimisedFile) + .copy(mimeType = optimisedMimeType) + .asStorableOptimisedImage + ) } else if (browserViewableImage.mustUpload) { Future.successful(Some(browserViewableImage.asStorableOptimisedImage)) } else @@ -206,42 +291,69 @@ object Uploader extends GridLogging { ) ++ uploadRequest.identifiersMeta uploadRequest.uploadInfo.filename match { - case Some(f) => baseMeta ++ Map("file_name" -> URLEncoder.encode(f, StandardCharsets.UTF_8.name())) + case Some(f) => + baseMeta ++ Map( + "file_name" -> URLEncoder.encode(f, StandardCharsets.UTF_8.name()) + ) case _ => baseMeta } } - private def toFileMetadata(f: File, imageId: String, mimeType: Option[MimeType]): Future[FileMetadata] = { + private def toFileMetadata( + f: File, + imageId: String, + mimeType: Option[MimeType] + ): Future[FileMetadata] = { mimeType match { - case Some(Png | Tiff) => FileMetadataReader.fromIPTCHeadersWithColorInfo(f, imageId, mimeType.get) + case Some(Png | Tiff) => + FileMetadataReader.fromIPTCHeadersWithColorInfo( + f, + imageId, + mimeType.get + ) case _ => FileMetadataReader.fromIPTCHeaders(f, imageId) } } - private def createThumbFuture(fileMetadata: FileMetadata, - colourModelFuture: Future[Option[String]], - browserViewableImage: BrowserViewableImage, - deps: ImageUploadOpsDependencies)(implicit ec: ExecutionContext) = { + private def createThumbFuture( + fileMetadata: FileMetadata, + colourModelFuture: Future[Option[String]], + browserViewableImage: BrowserViewableImage, + deps: ImageUploadOpsDependencies + )(implicit ec: ExecutionContext) = { import deps._ for { colourModel <- colourModelFuture iccColourSpace = FileMetadataHelper.normalisedIccColourSpace(fileMetadata) (thumb, thumbMimeType) <- imageOps - .createThumbnail(browserViewableImage.file, Some(browserViewableImage.mimeType), config.thumbWidth, - config.thumbQuality, config.tempDir, iccColourSpace, colourModel) + .createThumbnail( + browserViewableImage.file, + Some(browserViewableImage.mimeType), + config.thumbWidth, + config.thumbQuality, + config.tempDir, + iccColourSpace, + colourModel + ) } yield browserViewableImage .copy(file = thumb, mimeType = thumbMimeType) .asStorableThumbImage } - private def createBrowserViewableFileFuture(uploadRequest: UploadRequest, - tempDir: File, - deps: ImageUploadOpsDependencies)(implicit ec: ExecutionContext): Future[BrowserViewableImage] = { + private def createBrowserViewableFileFuture( + uploadRequest: UploadRequest, + tempDir: File, + deps: ImageUploadOpsDependencies + )(implicit ec: ExecutionContext): Future[BrowserViewableImage] = { import deps._ uploadRequest.mimeType match { case Some(mime) if config.transcodedMimeTypes.contains(mime) => for { - (file, mimeType) <- imageOps.transformImage(uploadRequest.tempFile, uploadRequest.mimeType, tempDir) + (file, mimeType) <- imageOps.transformImage( + uploadRequest.tempFile, + uploadRequest.mimeType, + tempDir + ) } yield BrowserViewableImage( uploadRequest.imageId, file = file, @@ -253,52 +365,76 @@ object Uploader extends GridLogging { BrowserViewableImage( uploadRequest.imageId, file = uploadRequest.tempFile, - mimeType = mimeType) + mimeType = mimeType + ) + ) + case None => + Future.failed( + new Exception( + "This file is not an image with an identifiable mime type" + ) ) - case None => Future.failed(new Exception("This file is not an image with an identifiable mime type")) } } } -class Uploader(val store: ImageLoaderStore, - val config: ImageLoaderConfig, - val imageOps: ImageOperations, - val notifications: Notifications) - (implicit val ec: ExecutionContext) extends ArgoHelpers { - - - - - def fromUploadRequest(uploadRequest: UploadRequest) - (implicit logMarker: LogMarker): Future[ImageUpload] = { - val sideEffectDependencies = ImageUploadOpsDependencies(toImageUploadOpsCfg(config), imageOps, - storeSource, storeThumbnail, storeOptimisedImage) - val finalImage = fromUploadRequestShared(uploadRequest, sideEffectDependencies, config.imageProcessor) - finalImage.map(img => Stopwatch("finalImage"){ImageUpload(uploadRequest, img)}) +class Uploader( + val store: ImageLoaderStore, + val config: ImageLoaderConfig, + val imageOps: ImageOperations, + val notifications: Notifications +)(implicit val ec: ExecutionContext) + extends ArgoHelpers { + + def fromUploadRequest( + uploadRequest: UploadRequest + )(implicit logMarker: LogMarker): Future[ImageUpload] = { + val sideEffectDependencies = ImageUploadOpsDependencies( + toImageUploadOpsCfg(config), + imageOps, + storeSource, + storeThumbnail, + storeOptimisedImage + ) + val finalImage = fromUploadRequestShared( + uploadRequest, + sideEffectDependencies, + config.imageProcessor + ) + finalImage.map(img => + Stopwatch("finalImage") { ImageUpload(uploadRequest, img) } + ) } - private def storeSource(storableOriginalImage: StorableOriginalImage) - (implicit logMarker: LogMarker) = store.store(storableOriginalImage) - - private def storeThumbnail(storableThumbImage: StorableThumbImage) - (implicit logMarker: LogMarker) = store.store(storableThumbImage) - - private def storeOptimisedImage(storableOptimisedImage: StorableOptimisedImage) - (implicit logMarker: LogMarker) = store.store(storableOptimisedImage) - - def loadFile(digestedFile: DigestedFile, - user: Principal, - uploadedBy: Option[String], - identifiers: Option[String], - uploadTime: DateTime, - filename: Option[String], - requestId: UUID) - (implicit ec:ExecutionContext, - logMarker: LogMarker): Future[UploadRequest] = Future { + private def storeSource(storableOriginalImage: StorableOriginalImage)(implicit + logMarker: LogMarker + ) = store.store(storableOriginalImage) + + private def storeThumbnail(storableThumbImage: StorableThumbImage)(implicit + logMarker: LogMarker + ) = store.store(storableThumbImage) + + private def storeOptimisedImage( + storableOptimisedImage: StorableOptimisedImage + )(implicit logMarker: LogMarker) = store.store(storableOptimisedImage) + + def loadFile( + digestedFile: DigestedFile, + user: Principal, + uploadedBy: Option[String], + identifiers: Option[String], + uploadTime: DateTime, + filename: Option[String], + requestId: UUID + )(implicit + ec: ExecutionContext, + logMarker: LogMarker + ): Future[UploadRequest] = Future { val DigestedFile(tempFile, id) = digestedFile // TODO: should error if the JSON parsing failed - val identifiersMap = identifiers.map(Json.parse(_).as[Map[String, String]]) getOrElse Map() + val identifiersMap = + identifiers.map(Json.parse(_).as[Map[String, String]]) getOrElse Map() MimeTypeDetection.guessMimeType(tempFile) match { case util.Left(unsupported) => @@ -319,15 +455,18 @@ class Uploader(val store: ImageLoaderStore, } } - def storeFile(uploadRequest: UploadRequest) - (implicit ec:ExecutionContext, - logMarker: LogMarker): Future[JsObject] = { + def storeFile( + uploadRequest: UploadRequest + )(implicit ec: ExecutionContext, logMarker: LogMarker): Future[JsObject] = { logger.info("Storing file") for { imageUpload <- fromUploadRequest(uploadRequest) - updateMessage = UpdateMessage(subject = "image", image = Some(imageUpload.image)) + updateMessage = UpdateMessage( + subject = "image", + image = Some(imageUpload.image) + ) _ <- Future { notifications.publish(updateMessage) } // TODO: centralise where all these URLs are constructed uri = s"${config.apiUri}/images/${uploadRequest.imageId}" @@ -338,4 +477,3 @@ class Uploader(val store: ImageLoaderStore, } } - diff --git a/image-loader/app/model/upload/OptimiseOps.scala b/image-loader/app/model/upload/OptimiseOps.scala index 6bd497227e..247a678049 100644 --- a/image-loader/app/model/upload/OptimiseOps.scala +++ b/image-loader/app/model/upload/OptimiseOps.scala @@ -10,10 +10,16 @@ import scala.concurrent.{ExecutionContext, Future} import scala.sys.process._ trait OptimiseOps { - def toOptimisedFile(file: File, imageWrapper: ImageWrapper, tempDir: File) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[(File, MimeType)] + def toOptimisedFile(file: File, imageWrapper: ImageWrapper, tempDir: File)( + implicit + ec: ExecutionContext, + logMarker: LogMarker + ): Future[(File, MimeType)] def isTransformedFilePath(filePath: String): Boolean - def shouldOptimise(mimeType: Option[MimeType], fileMetadata: FileMetadata): Boolean + def shouldOptimise( + mimeType: Option[MimeType], + fileMetadata: FileMetadata + ): Boolean def optimiseMimeType: MimeType } @@ -21,36 +27,54 @@ object OptimiseWithPngQuant extends OptimiseOps { override def optimiseMimeType: MimeType = Png - def toOptimisedFile(file: File, imageWrapper: ImageWrapper, tempDir: File) - (implicit ec: ExecutionContext, logMarker: LogMarker): Future[(File, MimeType)] = Future { + def toOptimisedFile(file: File, imageWrapper: ImageWrapper, tempDir: File)( + implicit + ec: ExecutionContext, + logMarker: LogMarker + ): Future[(File, MimeType)] = Future { - val optimisedFilePath = tempDir.getAbsolutePath + "/optimisedpng - " + imageWrapper.id + optimiseMimeType.fileExtension + val optimisedFilePath = + tempDir.getAbsolutePath + "/optimisedpng - " + imageWrapper.id + optimiseMimeType.fileExtension Stopwatch("pngquant") { - val result = Seq("pngquant", "--quality", "1-85", file.getAbsolutePath, "--output", optimisedFilePath).! - if (result>0) - throw new Exception(s"pngquant failed to convert to optimised png file (rc = $result)") + val result = Seq( + "pngquant", + "--quality", + "1-85", + file.getAbsolutePath, + "--output", + optimisedFilePath + ).! + if (result > 0) + throw new Exception( + s"pngquant failed to convert to optimised png file (rc = $result)" + ) } val optimisedFile = new File(optimisedFilePath) if (optimisedFile.exists()) { (optimisedFile, Png) } else { - throw new Exception(s"Attempted to optimise PNG file ${optimisedFile.getPath}") + throw new Exception( + s"Attempted to optimise PNG file ${optimisedFile.getPath}" + ) } } - def isTransformedFilePath(filePath: String): Boolean = filePath.contains("transformed-") + def isTransformedFilePath(filePath: String): Boolean = + filePath.contains("transformed-") - def shouldOptimise(mimeType: Option[MimeType], fileMetadata: FileMetadata): Boolean = + def shouldOptimise( + mimeType: Option[MimeType], + fileMetadata: FileMetadata + ): Boolean = mimeType match { case Some(Png) => fileMetadata.colourModelInformation.get("colorType") match { - case Some("True Color") => true + case Some("True Color") => true case Some("True Color with Alpha") => true - case _ => false + case _ => false } case Some(Tiff) => true - case _ => false + case _ => false } } - diff --git a/image-loader/app/model/upload/UploadRequest.scala b/image-loader/app/model/upload/UploadRequest.scala index b256776a0a..7893765cb9 100644 --- a/image-loader/app/model/upload/UploadRequest.scala +++ b/image-loader/app/model/upload/UploadRequest.scala @@ -10,17 +10,19 @@ import org.joda.time.{DateTime, DateTimeZone} import scala.collection.JavaConverters._ case class UploadRequest( - requestId: UUID, - imageId: String, - tempFile: File, - mimeType: Option[MimeType], - uploadTime: DateTime, - uploadedBy: String, - identifiers: Map[String, String], - uploadInfo: UploadInfo - ) { - - val identifiersMeta: Map[String, String] = identifiers.map { case (k, v) => (s"identifier!$k", v) } + requestId: UUID, + imageId: String, + tempFile: File, + mimeType: Option[MimeType], + uploadTime: DateTime, + uploadedBy: String, + identifiers: Map[String, String], + uploadInfo: UploadInfo +) { + + val identifiersMeta: Map[String, String] = identifiers.map { case (k, v) => + (s"identifier!$k", v) + } def toLogMarker: LogstashMarker = { val fallback = "none" @@ -29,7 +31,9 @@ case class UploadRequest( "requestId" -> requestId, "imageId" -> imageId, "mimeType" -> mimeType.getOrElse(fallback), - "uploadTime" -> ISODateTimeFormat.dateTime.print(uploadTime.withZone(DateTimeZone.UTC)), + "uploadTime" -> ISODateTimeFormat.dateTime.print( + uploadTime.withZone(DateTimeZone.UTC) + ), "uploadedBy" -> uploadedBy, "filename" -> uploadInfo.filename.getOrElse(fallback), "filesize" -> tempFile.length diff --git a/image-loader/test/scala/lib/imaging/FileMetadataReaderTest.scala b/image-loader/test/scala/lib/imaging/FileMetadataReaderTest.scala index a05be8e859..0f16bb8b2d 100644 --- a/image-loader/test/scala/lib/imaging/FileMetadataReaderTest.scala +++ b/image-loader/test/scala/lib/imaging/FileMetadataReaderTest.scala @@ -7,18 +7,18 @@ import org.scalatest.time.{Millis, Span} import org.scalatest.{FunSpec, Matchers} import play.api.libs.json.{JsArray, JsString, JsValue} -/** - * Test that the Reader returns the expected FileMetadata. - * - * This is somewhat akin to a unit test of the drew metadata - * library (and our thin integration above it). It is meant to help - * highlight differences and integration issues when upgrading the library. - */ +/** Test that the Reader returns the expected FileMetadata. + * + * This is somewhat akin to a unit test of the drew metadata + * library (and our thin integration above it). It is meant to help + * highlight differences and integration issues when upgrading the library. + */ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { import test.lib.ResourceHelpers._ - implicit override val patienceConfig = PatienceConfig(timeout = Span(1000, Millis), interval = Span(25, Millis)) + implicit override val patienceConfig = + PatienceConfig(timeout = Span(1000, Millis), interval = Span(25, Millis)) it("should read the correct dimensions for a JPG image") { val image = fileAt("getty.jpg") @@ -91,27 +91,37 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { val xmp = Map( "GettyImagesGIFT:ImageRank" -> JsString("3"), "GettyImagesGIFT:OriginalFilename" -> JsString("43885812_SEA.jpg"), - "dc:title" -> JsArray(Seq( - JsString("536991815"), - JsArray(Seq(JsString("{'xml:lang':'x-default'}"))), - )), + "dc:title" -> JsArray( + Seq( + JsString("536991815"), + JsArray(Seq(JsString("{'xml:lang':'x-default'}"))) + ) + ), "dc:creator" -> JsArray(Seq(JsString("CHRISTOF STACHE"))), "photoshop:SupplementalCategories" -> JsArray(Seq(JsString("SKI"))), - "photoshop:Headline" -> JsString("Austria's Matthias Mayer attends the men"), + "photoshop:Headline" -> JsString( + "Austria's Matthias Mayer attends the men" + ), "photoshop:TransmissionReference" -> JsString("-"), "photoshop:AuthorsPosition" -> JsString("Stringer"), "photoshop:CaptionWriter" -> JsString("CS/IW"), "plus:ImageSupplierImageId" -> JsString("DV1945213"), - "dc:description" -> JsArray(Seq( - JsString("Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images"), - JsArray(Seq(JsString("{'xml:lang':'x-default'}"))), - )), + "dc:description" -> JsArray( + Seq( + JsString( + "Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images" + ), + JsArray(Seq(JsString("{'xml:lang':'x-default'}"))) + ) + ), "photoshop:City" -> JsString("KITZBUEHEL"), "GettyImagesGIFT:ExclusiveCoverage" -> JsString("False"), "photoshop:DateCreated" -> JsString("2015-01-22T00:00:00.000Z"), "photoshop:Credit" -> JsString("AFP/Getty Images"), "dc:Rights" -> JsString("CHRISTOF STACHE"), - "GettyImagesGIFT:OriginalCreateDateTime" -> JsString("0001-01-01T00:00:00.000Z"), + "GettyImagesGIFT:OriginalCreateDateTime" -> JsString( + "0001-01-01T00:00:00.000Z" + ), "Iptc4xmpCore:CountryCode" -> JsString("AUT"), "GettyImagesGIFT:CallForImage" -> JsString("False"), "photoshop:Country" -> JsString("AUSTRIA"), @@ -127,7 +137,9 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { } } - it("should read the xmp metadata as stored in the image (process image using GettyImagesGIFT prefix first)") { + it( + "should read the xmp metadata as stored in the image (process image using GettyImagesGIFT prefix first)" + ) { val rawPrefix0Xmp: Map[String, String] = Map( "GettyImagesGIFT:ImageRank" -> "3", "GettyImagesGIFT:OriginalFilename" -> "2008208_81774706JM148_England_v_Cze.jpg", @@ -188,40 +200,54 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { // `getty.jpg` uses the `GettyImagesGIFT` prefix, processing it first will populate the `XMPSchemaRegistry` cache, // resulting in `cech.jpg` to be read differently from the content in the file which uses the `prefix0` prefix. - val gettyGiftXmpFuture = FileMetadataReader.fromIPTCHeaders(fileAt("getty.jpg"), "dummy") + val gettyGiftXmpFuture = + FileMetadataReader.fromIPTCHeaders(fileAt("getty.jpg"), "dummy") whenReady(gettyGiftXmpFuture) { _ => - val prefix0MetadataFuture = FileMetadataReader.fromIPTCHeaders(fileAt("cech.jpg"), "dummy") + val prefix0MetadataFuture = + FileMetadataReader.fromIPTCHeaders(fileAt("cech.jpg"), "dummy") whenReady(prefix0MetadataFuture) { metadata => sameMaps(metadata.xmp, expected) } } } - it("should read the xmp metadata as stored in the image (process image using prefix0 prefix first)") { + it( + "should read the xmp metadata as stored in the image (process image using prefix0 prefix first)" + ) { val gettyGiftXmp: Map[String, JsValue] = Map( "GettyImagesGIFT:ImageRank" -> JsString("3"), "GettyImagesGIFT:OriginalFilename" -> JsString("43885812_SEA.jpg"), "dc:creator" -> JsArray(Seq(JsString("CHRISTOF STACHE"))), - "dc:title" -> JsArray(Seq( - JsString("536991815"), - JsArray(Seq(JsString("{'xml:lang':'x-default'}"))), - )), + "dc:title" -> JsArray( + Seq( + JsString("536991815"), + JsArray(Seq(JsString("{'xml:lang':'x-default'}"))) + ) + ), "photoshop:SupplementalCategories" -> JsArray(Seq(JsString("SKI"))), - "photoshop:Headline" -> JsString("Austria's Matthias Mayer attends the men"), + "photoshop:Headline" -> JsString( + "Austria's Matthias Mayer attends the men" + ), "photoshop:TransmissionReference" -> JsString("-"), "photoshop:AuthorsPosition" -> JsString("Stringer"), "photoshop:CaptionWriter" -> JsString("CS/IW"), "plus:ImageSupplierImageId" -> JsString("DV1945213"), - "dc:description" -> JsArray(Seq( - JsString("Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images"), - JsArray(Seq(JsString("{'xml:lang':'x-default'}"))), - )), + "dc:description" -> JsArray( + Seq( + JsString( + "Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images" + ), + JsArray(Seq(JsString("{'xml:lang':'x-default'}"))) + ) + ), "photoshop:City" -> JsString("KITZBUEHEL"), "GettyImagesGIFT:ExclusiveCoverage" -> JsString("False"), "photoshop:DateCreated" -> JsString("2015-01-22T00:00:00.000Z"), "photoshop:Credit" -> JsString("AFP/Getty Images"), "dc:Rights" -> JsString("CHRISTOF STACHE"), - "GettyImagesGIFT:OriginalCreateDateTime" -> JsString("0001-01-01T00:00:00.000Z"), + "GettyImagesGIFT:OriginalCreateDateTime" -> JsString( + "0001-01-01T00:00:00.000Z" + ), "Iptc4xmpCore:CountryCode" -> JsString("AUT"), "GettyImagesGIFT:CallForImage" -> JsString("False"), "photoshop:Country" -> JsString("AUSTRIA"), @@ -231,16 +257,20 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { // `cech.jpg` uses the `prefix0` prefix, processing it first will populate the `XMPSchemaRegistry` cache, // resulting in `getty.jpg` to be read differently from the content in the file which uses the `GettyImagesGIFT` prefix. - val prefix0MetadataFuture = FileMetadataReader.fromIPTCHeaders(fileAt("cech.jpg"), "dummy") + val prefix0MetadataFuture = + FileMetadataReader.fromIPTCHeaders(fileAt("cech.jpg"), "dummy") whenReady(prefix0MetadataFuture) { _ => - val gettyGiftXmpFuture = FileMetadataReader.fromIPTCHeaders(fileAt("getty.jpg"), "dummy") + val gettyGiftXmpFuture = + FileMetadataReader.fromIPTCHeaders(fileAt("getty.jpg"), "dummy") whenReady(gettyGiftXmpFuture) { metadata => sameMaps(metadata.xmp, gettyGiftXmp) } } } - it("should always use the GettyImagesGIFT namespace for XMP metadata using the Getty schema") { + it( + "should always use the GettyImagesGIFT namespace for XMP metadata using the Getty schema" + ) { val rawExpected: Map[String, String] = Map( "GettyImagesGIFT:ImageRank" -> "3", "GettyImagesGIFT:OriginalFilename" -> "2008208_81774706JM148_England_v_Cze.jpg", @@ -298,9 +328,9 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { ) val aggExpected = FileMetadataAggregator.aggregateMetadataMap(rawExpected) - val metadataFuture = FileMetadataReader.fromIPTCHeaders(fileAt("cech.jpg"), "dummy") + val metadataFuture = + FileMetadataReader.fromIPTCHeaders(fileAt("cech.jpg"), "dummy") whenReady(metadataFuture) { metadata => - sameMaps(metadata.xmp, aggExpected) } } @@ -596,7 +626,8 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { it("should read the correct metadata for a grayscale png") { val image = fileAt("schaik.com_pngsuite/basn0g08.png") - val metadataFuture = FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) + val metadataFuture = + FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) whenReady(metadataFuture) { metadata => metadata.colourModelInformation should contain( "colorType" -> "Greyscale" @@ -606,7 +637,8 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { it("should read the correct metadata for a colour 8bit paletted png") { val image = fileAt("schaik.com_pngsuite/basn3p08.png") - val metadataFuture = FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) + val metadataFuture = + FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) whenReady(metadataFuture) { metadata => metadata.colourModelInformation should contain( "colorType" -> "Indexed Color" @@ -614,9 +646,12 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { } } - it("should read the correct metadata for a truecolour png without alpha channel") { + it( + "should read the correct metadata for a truecolour png without alpha channel" + ) { val image = fileAt("schaik.com_pngsuite/basn2c08.png") - val metadataFuture = FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) + val metadataFuture = + FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) whenReady(metadataFuture) { metadata => metadata.colourModelInformation should contain( "colorType" -> "True Color" @@ -624,9 +659,12 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { } } - it("should read the correct metadata for a truecolour pnd with alpha channel") { + it( + "should read the correct metadata for a truecolour pnd with alpha channel" + ) { val image = fileAt("schaik.com_pngsuite/basn6a08.png") - val metadataFuture = FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) + val metadataFuture = + FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Png) whenReady(metadataFuture) { metadata => metadata.colourModelInformation should contain( "colorType" -> "True Color with Alpha" @@ -636,7 +674,8 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { it("should read the correct colour metadata for a greyscale tiff") { val image = fileAt("flower.tif") - val metadataFuture = FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Tiff) + val metadataFuture = + FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Tiff) whenReady(metadataFuture) { metadata => metadata.colourModelInformation should contain( "photometricInterpretation" -> "BlackIsZero" @@ -646,7 +685,8 @@ class FileMetadataReaderTest extends FunSpec with Matchers with ScalaFutures { it("should read the correct colour metadata for an alpha tiff") { val image = fileAt("lighthouse.tif") - val metadataFuture = FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Tiff) + val metadataFuture = + FileMetadataReader.fromIPTCHeadersWithColorInfo(image, "dummy", Tiff) whenReady(metadataFuture) { metadata => metadata.colourModelInformation should contain( "photometricInterpretation" -> "RGB" diff --git a/image-loader/test/scala/lib/imaging/MimeTypeDetectionTest.scala b/image-loader/test/scala/lib/imaging/MimeTypeDetectionTest.scala index 1380f2b80c..ae75f689c4 100644 --- a/image-loader/test/scala/lib/imaging/MimeTypeDetectionTest.scala +++ b/image-loader/test/scala/lib/imaging/MimeTypeDetectionTest.scala @@ -9,23 +9,37 @@ class MimeTypeDetectionTest extends FunSpec with Matchers with ScalaFutures { import test.lib.ResourceHelpers._ it("should detect jpeg mime types for images") { - for (fileName <- List("getty.jpg", "corbis.jpg", "guardian-turner.jpg", "pa.jpg")) { + for ( + fileName <- List( + "getty.jpg", + "corbis.jpg", + "guardian-turner.jpg", + "pa.jpg" + ) + ) { val image = fileAt(fileName) - MimeTypeDetection.guessMimeType(image) should be (Right(Jpeg)) + MimeTypeDetection.guessMimeType(image) should be(Right(Jpeg)) } } it("should detect png mime types for images") { - for (fileName <- List("basn0g08.png", "basn2c08.png", "basn3p08.png", "basn6a08.png")) { + for ( + fileName <- List( + "basn0g08.png", + "basn2c08.png", + "basn3p08.png", + "basn6a08.png" + ) + ) { val image = fileAt("schaik.com_pngsuite/" + fileName) - MimeTypeDetection.guessMimeType(image) should be (Right(Png)) + MimeTypeDetection.guessMimeType(image) should be(Right(Png)) } } it("should detect tiff mime types for images") { for (fileName <- List("flag.tif")) { val image = fileAt(fileName) - MimeTypeDetection.guessMimeType(image) should be (Right(Tiff)) + MimeTypeDetection.guessMimeType(image) should be(Right(Tiff)) } } diff --git a/image-loader/test/scala/model/ImageUploadTest.scala b/image-loader/test/scala/model/ImageUploadTest.scala index 4e6ec20e16..af31a7f9ea 100644 --- a/image-loader/test/scala/model/ImageUploadTest.scala +++ b/image-loader/test/scala/model/ImageUploadTest.scala @@ -5,12 +5,29 @@ import java.net.URI import java.util.UUID import com.drew.imaging.ImageProcessingException -import com.gu.mediaservice.lib.{StorableImage, StorableOptimisedImage, StorableOriginalImage, StorableThumbImage} -import com.gu.mediaservice.lib.aws.{S3Metadata, S3Object, S3ObjectMetadata, S3Ops} +import com.gu.mediaservice.lib.{ + StorableImage, + StorableOptimisedImage, + StorableOriginalImage, + StorableThumbImage +} +import com.gu.mediaservice.lib.aws.{ + S3Metadata, + S3Object, + S3ObjectMetadata, + S3Ops +} import com.gu.mediaservice.lib.cleanup.ImageProcessor import com.gu.mediaservice.lib.imaging.ImageOperations import com.gu.mediaservice.lib.logging.LogMarker -import com.gu.mediaservice.model.{FileMetadata, Jpeg, MimeType, Png, Tiff, UploadInfo} +import com.gu.mediaservice.model.{ + FileMetadata, + Jpeg, + MimeType, + Png, + Tiff, + UploadInfo +} import lib.imaging.MimeTypeDetection import model.upload.{OptimiseWithPngQuant, UploadRequest} import org.joda.time.DateTime @@ -29,21 +46,30 @@ class ImageUploadTest extends AsyncFunSuite with Matchers with MockitoSugar { } private implicit val logMarker: MockLogMarker = new MockLogMarker() - // For mime type info, see https://github.com/guardian/grid/pull/2568 - val tempDir = new File("/tmp") - val mockConfig: ImageUploadOpsCfg = ImageUploadOpsCfg(tempDir, 256, 85d, List(Tiff), "img-bucket", "thumb-bucket") - - /** - * @todo: I flailed about until I found a path that worked, but + // For mime type info, see https://github.com/guardian/grid/pull/2568 + val tempDir = new File("/tmp") + val mockConfig: ImageUploadOpsCfg = ImageUploadOpsCfg( + tempDir, + 256, + 85d, + List(Tiff), + "img-bucket", + "thumb-bucket" + ) + + /** @todo: I flailed about until I found a path that worked, but * what arcane magic System.getProperty relies upon, and exactly * _how_ it will break in CI, I do not know */ - val imageOps: ImageOperations = new ImageOperations(System.getProperty("user.dir")) + val imageOps: ImageOperations = new ImageOperations( + System.getProperty("user.dir") + ) private def imageUpload( - fileName: String, - expectedOriginalMimeType: MimeType, - expectOptimisedFile: Boolean = false): Future[Assertion] = { + fileName: String, + expectedOriginalMimeType: MimeType, + expectOptimisedFile: Boolean = false + ): Future[Assertion] = { val uuid = UUID.randomUUID() val randomId = UUID.randomUUID().toString + fileName @@ -53,12 +79,21 @@ class ImageUploadTest extends AsyncFunSuite with Matchers with MockitoSugar { def mockStore = (a: StorableImage) => Future.successful( - S3Ops.projectFileAsS3Object(new URI("http://madeupname/"), a.file, Some(a.mimeType), a.meta, None) + S3Ops.projectFileAsS3Object( + new URI("http://madeupname/"), + a.file, + Some(a.mimeType), + a.meta, + None + ) ) - def storeOrProjectOriginalFile: StorableOriginalImage => Future[S3Object] = mockStore - def storeOrProjectThumbFile: StorableThumbImage => Future[S3Object] = mockStore - def storeOrProjectOptimisedPNG: StorableOptimisedImage => Future[S3Object] = mockStore + def storeOrProjectOriginalFile: StorableOriginalImage => Future[S3Object] = + mockStore + def storeOrProjectThumbFile: StorableThumbImage => Future[S3Object] = + mockStore + def storeOrProjectOptimisedPNG: StorableOptimisedImage => Future[S3Object] = + mockStore val mockDependencies = ImageUploadOpsDependencies( mockConfig, @@ -97,15 +132,29 @@ class ImageUploadTest extends AsyncFunSuite with Matchers with MockitoSugar { futureImage.map(i => { // Assertions on original request assert(i.id == randomId, "Correct id comes back") - assert(i.source.mimeType.contains(expectedOriginalMimeType), "Should have the correct mime type") + assert( + i.source.mimeType.contains(expectedOriginalMimeType), + "Should have the correct mime type" + ) // Assertions on generated thumbnail image assert(i.thumbnail.isDefined, "Should always create a thumbnail") - assert(i.thumbnail.get.mimeType.get == Jpeg, "Should have correct thumb mime type") + assert( + i.thumbnail.get.mimeType.get == Jpeg, + "Should have correct thumb mime type" + ) // Assertions on optional generated optimised png image - assert(i.optimisedPng.isDefined == expectOptimisedFile, "Should have optimised file") - assert(!expectOptimisedFile || i.optimisedPng.flatMap(p => p.mimeType).contains(Png), "Should have correct optimised mime type") + assert( + i.optimisedPng.isDefined == expectOptimisedFile, + "Should have optimised file" + ) + assert( + !expectOptimisedFile || i.optimisedPng + .flatMap(p => p.mimeType) + .contains(Png), + "Should have correct optimised mime type" + ) }) } @@ -115,8 +164,14 @@ class ImageUploadTest extends AsyncFunSuite with Matchers with MockitoSugar { ignore("An opaque tiff file which requires optimising for UI") { imageUpload("lighthouse.tif", Tiff, expectOptimisedFile = true) } - ignore("A layered tiff file (will require renaming extracted file) which requires optimising for UI") { - imageUpload("tiff_8bpc_layered_withTransparency.tif", Tiff, expectOptimisedFile = true) + ignore( + "A layered tiff file (will require renaming extracted file) which requires optimising for UI" + ) { + imageUpload( + "tiff_8bpc_layered_withTransparency.tif", + Tiff, + expectOptimisedFile = true + ) } ignore("Another opaque tiff file which requires optimising for UI") { imageUpload("tiff_8bpc_flat.tif", Tiff, expectOptimisedFile = true) @@ -124,24 +179,33 @@ class ImageUploadTest extends AsyncFunSuite with Matchers with MockitoSugar { ignore("A png which is suitable for UI viewing") { imageUpload("IndexedColor.png", Png) } - ignore("A png which is not suitable (too many colours + transparency) for UI viewing") { - imageUpload("bgan6a16_TrueColorWithAlpha_16bit.png", Png, expectOptimisedFile = true) + ignore( + "A png which is not suitable (too many colours + transparency) for UI viewing" + ) { + imageUpload( + "bgan6a16_TrueColorWithAlpha_16bit.png", + Png, + expectOptimisedFile = true + ) } ignore("A png which is not suitable (too many colours) for UI viewing") { imageUpload("basn2c16_TrueColor_16bit.png", Png, expectOptimisedFile = true) } ignore("not an image but looks like one") { - imageUpload("thisisnotanimage.jpg", Png, expectOptimisedFile = true).transformWith{ - case Success(_) => fail("Should have thrown an error") - case Failure(e) => e match { - case e: ImageProcessingException => assert(e.getMessage == "File format could not be determined") + imageUpload("thisisnotanimage.jpg", Png, expectOptimisedFile = true) + .transformWith { + case Success(_) => fail("Should have thrown an error") + case Failure(e) => + e match { + case e: ImageProcessingException => + assert(e.getMessage == "File format could not be determined") + } } - } } ignore("not an image and does not look like one") { // this exception is thrown before the futures are resolved, and so does not need transformWith - val caught = the [Exception] thrownBy - imageUpload("thisisnotanimage.stupid", Png, expectOptimisedFile = true) + val caught = the[Exception] thrownBy + imageUpload("thisisnotanimage.stupid", Png, expectOptimisedFile = true) assert(caught.getMessage == "File of unknown and undetectable mime type") } } diff --git a/image-loader/test/scala/model/ProjectorTest.scala b/image-loader/test/scala/model/ProjectorTest.scala index ba860dc3e9..bb0614aee9 100644 --- a/image-loader/test/scala/model/ProjectorTest.scala +++ b/image-loader/test/scala/model/ProjectorTest.scala @@ -21,20 +21,33 @@ import test.lib.ResourceHelpers import scala.concurrent.ExecutionContext.Implicits.global -class ProjectorTest extends FunSuite with Matchers with ScalaFutures with MockitoSugar { +class ProjectorTest + extends FunSuite + with Matchers + with ScalaFutures + with MockitoSugar { import ResourceHelpers.fileAt - implicit override val patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(1000, Millis), interval = Span(25, Millis)) + implicit override val patienceConfig: PatienceConfig = + PatienceConfig(timeout = Span(1000, Millis), interval = Span(25, Millis)) private val ctxPath = new File("image-loader/").getAbsolutePath private val imageOperations = new ImageOperations(ctxPath) - private val config = ImageUploadOpsCfg(new File("/tmp"), 256, 85d, Nil, "img-bucket", "thumb-bucket") + private val config = ImageUploadOpsCfg( + new File("/tmp"), + 256, + 85d, + Nil, + "img-bucket", + "thumb-bucket" + ) private val s3 = mock[AmazonS3] - private val projector = new Projector(config, s3, imageOperations, ImageProcessor.identity) + private val projector = + new Projector(config, s3, imageOperations, ImageProcessor.identity) // FIXME temporary ignored as test is not executable in CI/CD machine // because graphic lib files like srgb.icc, cmyk.icc are in root directory instead of resources @@ -44,7 +57,8 @@ class ProjectorTest extends FunSuite with Matchers with ScalaFutures with Mockit val testFile = fileAt("resources/getty.jpg") val fileDigest = DigestedFile(testFile, "id123") val uploadedBy = "test" - val uploadTime = new DateTime("2020-01-24T17:36:08.456Z").withZone(DateTimeZone.UTC) + val uploadTime = + new DateTime("2020-01-24T17:36:08.456Z").withZone(DateTimeZone.UTC) val uploadFileName = Some("getty.jpg") // expected @@ -85,27 +99,37 @@ class ProjectorTest extends FunSuite with Matchers with ScalaFutures with Mockit val xmp = Map( "GettyImagesGIFT:ImageRank" -> JsString("3"), "GettyImagesGIFT:OriginalFilename" -> JsString("43885812_SEA.jpg"), - "dc:title" -> JsArray(Seq( - JsString("536991815"), - JsArray(Seq(JsString("{'xml:lang':'x-default'}"))), - )), + "dc:title" -> JsArray( + Seq( + JsString("536991815"), + JsArray(Seq(JsString("{'xml:lang':'x-default'}"))) + ) + ), "dc:creator" -> JsArray(Seq(JsString("CHRISTOF STACHE"))), "photoshop:SupplementalCategories" -> JsArray(Seq(JsString("SKI"))), - "photoshop:Headline" -> JsString("Austria's Matthias Mayer attends the men"), + "photoshop:Headline" -> JsString( + "Austria's Matthias Mayer attends the men" + ), "photoshop:TransmissionReference" -> JsString("-"), "photoshop:AuthorsPosition" -> JsString("Stringer"), "photoshop:CaptionWriter" -> JsString("CS/IW"), "plus:ImageSupplierImageId" -> JsString("DV1945213"), - "dc:description" -> JsArray(Seq( - JsString("Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images"), - JsArray(Seq(JsString("{'xml:lang':'x-default'}"))), - )), + "dc:description" -> JsArray( + Seq( + JsString( + "Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images" + ), + JsArray(Seq(JsString("{'xml:lang':'x-default'}"))) + ) + ), "photoshop:City" -> JsString("KITZBUEHEL"), "GettyImagesGIFT:ExclusiveCoverage" -> JsString("False"), "photoshop:DateCreated" -> JsString("2015-01-22T00:00:00.000Z"), "photoshop:Credit" -> JsString("AFP/Getty Images"), "dc:Rights" -> JsString("CHRISTOF STACHE"), - "GettyImagesGIFT:OriginalCreateDateTime" -> JsString("0001-01-01T00:00:00.000Z"), + "GettyImagesGIFT:OriginalCreateDateTime" -> JsString( + "0001-01-01T00:00:00.000Z" + ), "Iptc4xmpCore:CountryCode" -> JsString("AUT"), "GettyImagesGIFT:CallForImage" -> JsString("False"), "photoshop:Country" -> JsString("AUSTRIA"), @@ -113,38 +137,89 @@ class ProjectorTest extends FunSuite with Matchers with ScalaFutures with Mockit "photoshop:Category" -> JsString("S") ) - val gettyFileMetadataExpected = FileMetadata(iptc = iptc, exif = exif, xmp = xmp, getty = getty, colourModel = Some("RGB")) + val gettyFileMetadataExpected = FileMetadata( + iptc = iptc, + exif = exif, + xmp = xmp, + getty = getty, + colourModel = Some("RGB") + ) val expected = Image( id = "id123", - uploadTime = new DateTime("2020-01-24T17:36:08.456Z").withZone(DateTimeZone.UTC), + uploadTime = + new DateTime("2020-01-24T17:36:08.456Z").withZone(DateTimeZone.UTC), uploadedBy = "test", - lastModified = Some(new DateTime("2020-01-24T17:36:08.456Z").withZone(DateTimeZone.UTC)), + lastModified = Some( + new DateTime("2020-01-24T17:36:08.456Z").withZone(DateTimeZone.UTC) + ), identifiers = Map(), uploadInfo = UploadInfo(Some("getty.jpg")), - source = Asset(new URI("http://img-bucket.s3.amazonaws.com/i/d/1/2/3/id123"), + source = Asset( + new URI("http://img-bucket.s3.amazonaws.com/i/d/1/2/3/id123"), Some(12666), Some(Jpeg), - Some(Dimensions(100, 60)), None), - thumbnail = Some(Asset(new URI("http://thumb-bucket.s3.amazonaws.com/i/d/1/2/3/id123"), - Some(6404), - Some(Jpeg), - Some(Dimensions(256, 154)), None)), + Some(Dimensions(100, 60)), + None + ), + thumbnail = Some( + Asset( + new URI("http://thumb-bucket.s3.amazonaws.com/i/d/1/2/3/id123"), + Some(6404), + Some(Jpeg), + Some(Dimensions(256, 154)), + None + ) + ), optimisedPng = None, fileMetadata = gettyFileMetadataExpected, userMetadata = None, metadata = ImageMetadata( - Some(new DateTime("2015-01-22T00:00:00.000Z").withZone(DateTimeZone.UTC)), - Some("Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images"), + Some( + new DateTime("2015-01-22T00:00:00.000Z").withZone(DateTimeZone.UTC) + ), + Some( + "Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images" + ), Some("AFP/Getty Images"), - None, Some("Christof Stache"), Some("Stringer"), None, Some("CHRISTOF STACHE"), - Some("-"), Some("AFP"), None, Nil, None, Some("Kitzbuehel"), None, Some("Austria"), List("sport")), + None, + Some("Christof Stache"), + Some("Stringer"), + None, + Some("CHRISTOF STACHE"), + Some("-"), + Some("AFP"), + None, + Nil, + None, + Some("Kitzbuehel"), + None, + Some("Austria"), + List("sport") + ), originalMetadata = ImageMetadata( - Some(new DateTime("2015-01-22T00:00:00.000Z").withZone(DateTimeZone.UTC)), - Some("Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images"), - Some("AFP/Getty Images"), None, Some("Christof Stache"), Some("Stringer"), - None, Some("CHRISTOF STACHE"), Some("-"), - Some("AFP"), None, Nil, None, Some("Kitzbuehel"), None, Some("Austria"), List("sport")), + Some( + new DateTime("2015-01-22T00:00:00.000Z").withZone(DateTimeZone.UTC) + ), + Some( + "Austria's Matthias Mayer attends the men's downhill training of the FIS Alpine Skiing World Cup in Kitzbuehel, Austria, on January 22, 2015. AFP PHOTO / CHRISTOF STACHECHRISTOF STACHE/AFP/Getty Images" + ), + Some("AFP/Getty Images"), + None, + Some("Christof Stache"), + Some("Stringer"), + None, + Some("CHRISTOF STACHE"), + Some("-"), + Some("AFP"), + None, + Nil, + None, + Some("Kitzbuehel"), + None, + Some("Austria"), + List("sport") + ), usageRights = Agency("Getty Images", Some("AFP"), None), originalUsageRights = Agency("Getty Images", Some("AFP"), None), exports = Nil, @@ -159,12 +234,13 @@ class ProjectorTest extends FunSuite with Matchers with ScalaFutures with Mockit uploadedBy = uploadedBy, uploadTime = uploadTime, uploadFileName = uploadFileName, - picdarUrn = None, + picdarUrn = None ) implicit val requestLoggingContext = RequestLoggingContext() - val actualFuture = projector.projectImage(fileDigest, extractedS3Meta, UUID.randomUUID()) + val actualFuture = + projector.projectImage(fileDigest, extractedS3Meta, UUID.randomUUID()) whenReady(actualFuture) { actual => actual shouldEqual expected @@ -172,4 +248,3 @@ class ProjectorTest extends FunSuite with Matchers with ScalaFutures with Mockit } } - diff --git a/kahuna/app/KahunaComponents.scala b/kahuna/app/KahunaComponents.scala index 5fb2978b45..b61c4e75a3 100644 --- a/kahuna/app/KahunaComponents.scala +++ b/kahuna/app/KahunaComponents.scala @@ -7,18 +7,25 @@ import play.api.Configuration import play.filters.headers.SecurityHeadersConfig import router.Routes -class KahunaComponents(context: Context) extends GridComponents(context, new KahunaConfig(_)) with AssetsComponents { - final override lazy val securityHeadersConfig: SecurityHeadersConfig = KahunaSecurityConfig(config, context.initialConfiguration) +class KahunaComponents(context: Context) + extends GridComponents(context, new KahunaConfig(_)) + with AssetsComponents { + final override lazy val securityHeadersConfig: SecurityHeadersConfig = + KahunaSecurityConfig(config, context.initialConfiguration) final override val buildInfo = utils.buildinfo.BuildInfo val controller = new KahunaController(auth, config, controllerComponents) - final override val router = new Routes(httpErrorHandler, controller, assets, management) + final override val router = + new Routes(httpErrorHandler, controller, assets, management) } object KahunaSecurityConfig { - def apply(config: KahunaConfig, playConfig: Configuration): SecurityHeadersConfig = { + def apply( + config: KahunaConfig, + playConfig: Configuration + ): SecurityHeadersConfig = { val base = SecurityHeadersConfig.fromConfiguration(playConfig) val services = List( @@ -34,9 +41,12 @@ object KahunaSecurityConfig { config.services.guardianWitnessBaseUri ) - val frameSources = s"frame-src ${config.services.authBaseUri} ${config.services.kahunaBaseUri} https://accounts.google.com" - val frameAncestors = s"frame-ancestors ${config.frameAncestors.mkString(" ")}" - val connectSources = s"connect-src ${(services :+ config.imageOrigin).mkString(" ")} 'self' www.google-analytics.com" + val frameSources = + s"frame-src ${config.services.authBaseUri} ${config.services.kahunaBaseUri} https://accounts.google.com" + val frameAncestors = + s"frame-ancestors ${config.frameAncestors.mkString(" ")}" + val connectSources = + s"connect-src ${(services :+ config.imageOrigin).mkString(" ")} 'self' www.google-analytics.com" val imageSources: List[String] = List( "data:", @@ -56,7 +66,9 @@ object KahunaSecurityConfig { // covered by frame-ancestors in contentSecurityPolicy frameOptions = None, // We use inline styles and script tags - contentSecurityPolicy = Some(s"$frameSources; $frameAncestors; $connectSources; $fontSources; img-src ${imageSources.mkString(" ")}; default-src 'unsafe-inline' 'self'; script-src 'self' 'unsafe-inline' www.google-analytics.com;") + contentSecurityPolicy = Some( + s"$frameSources; $frameAncestors; $connectSources; $fontSources; img-src ${imageSources.mkString(" ")}; default-src 'unsafe-inline' 'self'; script-src 'self' 'unsafe-inline' www.google-analytics.com;" + ) ) } } diff --git a/kahuna/app/controllers/KahunaController.scala b/kahuna/app/controllers/KahunaController.scala index f1369a723d..58a9a01096 100644 --- a/kahuna/app/controllers/KahunaController.scala +++ b/kahuna/app/controllers/KahunaController.scala @@ -7,26 +7,33 @@ import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.ExecutionContext -class KahunaController(auth: Authentication, config: KahunaConfig, override val controllerComponents: ControllerComponents) - (implicit val ec: ExecutionContext) extends BaseController with ArgoHelpers { +class KahunaController( + auth: Authentication, + config: KahunaConfig, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { def index(ignored: String) = Action { req => val okPath = routes.KahunaController.ok.url // If the auth is successful, we redirect to the kahuna domain so the iframe // is on the same domain and can be read by the JS val returnUri = config.rootUri + okPath - Ok(views.html.main( - config.mediaApiUri, - config.authUri, - s"${config.authUri}/login?redirectUri=$returnUri", - config.sentryDsn, - config.sessionId, - config.googleTrackingId, - config.feedbackFormLink, - config.usageRightsHelpLink, - config.invalidSessionHelpLink, - config.supportEmail - )) + Ok( + views.html.main( + config.mediaApiUri, + config.authUri, + s"${config.authUri}/login?redirectUri=$returnUri", + config.sentryDsn, + config.sessionId, + config.googleTrackingId, + config.feedbackFormLink, + config.usageRightsHelpLink, + config.invalidSessionHelpLink, + config.supportEmail + ) + ) } def quotas = auth { req => diff --git a/kahuna/app/lib/KahunaConfig.scala b/kahuna/app/lib/KahunaConfig.scala index 256467ece8..da37352846 100644 --- a/kahuna/app/lib/KahunaConfig.scala +++ b/kahuna/app/lib/KahunaConfig.scala @@ -2,7 +2,8 @@ package lib import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources} -class KahunaConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class KahunaConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val rootUri: String = services.kahunaBaseUri val mediaApiUri: String = services.apiBaseUri val authUri: String = services.authBaseUri @@ -13,12 +14,17 @@ class KahunaConfig(resources: GridConfigResources) extends CommonConfig(resource val fullOrigin: String = string("origin.full") val cropOrigin: String = string("origin.crops") val imageOrigin: String = string("origin.images") - val googleTrackingId: Option[String] = stringOpt("google.tracking.id").filterNot(_.isEmpty) + val googleTrackingId: Option[String] = + stringOpt("google.tracking.id").filterNot(_.isEmpty) - val feedbackFormLink: Option[String]= stringOpt("links.feedbackForm").filterNot(_.isEmpty) - val usageRightsHelpLink: Option[String]= stringOpt("links.usageRightsHelp").filterNot(_.isEmpty) - val invalidSessionHelpLink: Option[String]= stringOpt("links.invalidSessionHelp").filterNot(_.isEmpty) - val supportEmail: Option[String]= stringOpt("links.supportEmail").filterNot(_.isEmpty) + val feedbackFormLink: Option[String] = + stringOpt("links.feedbackForm").filterNot(_.isEmpty) + val usageRightsHelpLink: Option[String] = + stringOpt("links.usageRightsHelp").filterNot(_.isEmpty) + val invalidSessionHelpLink: Option[String] = + stringOpt("links.invalidSessionHelp").filterNot(_.isEmpty) + val supportEmail: Option[String] = + stringOpt("links.supportEmail").filterNot(_.isEmpty) val frameAncestors: Set[String] = getStringSet("security.frameAncestors") } diff --git a/leases/app/LeasesComponents.scala b/leases/app/LeasesComponents.scala index 61562cf8bb..3e99491401 100644 --- a/leases/app/LeasesComponents.scala +++ b/leases/app/LeasesComponents.scala @@ -4,12 +4,20 @@ import lib.{LeaseNotifier, LeaseStore, LeasesConfig} import play.api.ApplicationLoader.Context import router.Routes -class LeasesComponents(context: Context) extends GridComponents(context, new LeasesConfig(_)) { +class LeasesComponents(context: Context) + extends GridComponents(context, new LeasesConfig(_)) { final override val buildInfo = utils.buildinfo.BuildInfo val store = new LeaseStore(config) val notifications = new LeaseNotifier(config, store) - val controller = new MediaLeaseController(auth, store, config, notifications, controllerComponents) - override lazy val router = new Routes(httpErrorHandler, controller, management) + val controller = new MediaLeaseController( + auth, + store, + config, + notifications, + controllerComponents + ) + override lazy val router = + new Routes(httpErrorHandler, controller, management) } diff --git a/leases/app/controllers/MediaLeaseController.scala b/leases/app/controllers/MediaLeaseController.scala index 8cc286b99d..1dc6371bb2 100644 --- a/leases/app/controllers/MediaLeaseController.scala +++ b/leases/app/controllers/MediaLeaseController.scala @@ -12,47 +12,75 @@ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} -case class AppIndex(name: String, - description: String, - config: Map[String, String] = Map()) +case class AppIndex( + name: String, + description: String, + config: Map[String, String] = Map() +) object AppIndex { implicit def jsonWrites: Writes[AppIndex] = Json.writes[AppIndex] } -class MediaLeaseController(auth: Authentication, store: LeaseStore, config: LeasesConfig, notifications: LeaseNotifier, - override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers { +class MediaLeaseController( + auth: Authentication, + store: LeaseStore, + config: LeasesConfig, + notifications: LeaseNotifier, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { private val notFound = respondNotFound("MediaLease not found") private val indexResponse = { val appIndex = AppIndex("media-leases", "Media leases service", Map()) - val indexLinks = List( + val indexLinks = List( Link("leases", s"${config.rootUri}/leases/{id}"), - Link("by-media-id", s"${config.rootUri}/leases/media/{id}")) + Link("by-media-id", s"${config.rootUri}/leases/media/{id}") + ) respond(appIndex, indexLinks) } private def clearLease(id: String) = store.get(id).map { lease => - store.delete(id).map { _ => notifications.sendRemoveLease(lease.mediaId, id)} + store.delete(id).map { _ => + notifications.sendRemoveLease(lease.mediaId, id) + } } - private def clearLeases(id: String) = Future.sequence(store.getForMedia(id) - .flatMap(_.id) - .flatten(clearLease)) - - private def badRequest(e: Seq[(JsPath, Seq[JsonValidationError])]) = - respondError(BadRequest, "media-leases-parse-failed", JsError.toJson(e).toString) + private def clearLeases(id: String) = Future.sequence( + store + .getForMedia(id) + .flatMap(_.id) + .flatten(clearLease) + ) + + private def badRequest(e: Seq[(JsPath, Seq[JsonValidationError])]) = + respondError( + BadRequest, + "media-leases-parse-failed", + JsError.toJson(e).toString + ) - private def prepareLeaseForSave(mediaLease: MediaLease, userId: Option[String]): MediaLease = - mediaLease.prepareForSave.copy(id = Some(UUID.randomUUID().toString), leasedBy = userId) + private def prepareLeaseForSave( + mediaLease: MediaLease, + userId: Option[String] + ): MediaLease = + mediaLease.prepareForSave.copy( + id = Some(UUID.randomUUID().toString), + leasedBy = userId + ) private def addLease(mediaLease: MediaLease, userId: Option[String]) = { val lease = prepareLeaseForSave(mediaLease, userId) if (lease.isSyndication) { val leasesForMedia = store.getForMedia(mediaLease.mediaId) val leasesWithoutSyndication = leasesForMedia.filter(!_.isSyndication) - replaceLeases(leasesWithoutSyndication :+ lease, mediaLease.mediaId, userId) + replaceLeases( + leasesWithoutSyndication :+ lease, + mediaLease.mediaId, + userId + ) } else { store.put(lease).map { _ => notifications.sendAddLease(lease) @@ -60,7 +88,11 @@ class MediaLeaseController(auth: Authentication, store: LeaseStore, config: Leas } } - private def replaceLeases(mediaLeases: List[MediaLease], imageId: String, userId: Option[String]) = { + private def replaceLeases( + mediaLeases: List[MediaLease], + imageId: String, + userId: Option[String] + ) = { val preparedMediaLeases = mediaLeases.map(prepareLeaseForSave(_, userId)) for { _ <- clearLeases(imageId) @@ -72,68 +104,91 @@ class MediaLeaseController(auth: Authentication, store: LeaseStore, config: Leas def index = auth { _ => indexResponse } - def reindex = auth.async { _ => Future { - store.forEach { leases => - leases - .foldLeft(Set[String]())((ids, lease) => ids + lease.mediaId) - .foreach(notifications.sendReindexLeases) + def reindex = auth.async { _ => + Future { + store.forEach { leases => + leases + .foldLeft(Set[String]())((ids, lease) => ids + lease.mediaId) + .foreach(notifications.sendReindexLeases) + } + Accepted } - Accepted - }} + } def postLease = auth.async(parse.json) { implicit request => request.body.validate[MediaLease] match { case JsSuccess(mediaLease, _) => - addLease(mediaLease, Some(Authentication.getIdentity(request.user))).map(_ => Accepted) + addLease(mediaLease, Some(Authentication.getIdentity(request.user))) + .map(_ => Accepted) case JsError(errors) => Future.successful(badRequest(errors)) } } - def deleteLease(id: String) = auth.async { implicit request => Future { + def deleteLease(id: String) = auth.async { implicit request => + Future { clearLease(id) Accepted } } - def getLease(id: String) = auth.async { _ => Future { + def getLease(id: String) = auth.async { _ => + Future { val leases = store.get(id) - leases.foldLeft(notFound)((_, lease) => respond[MediaLease]( + leases.foldLeft(notFound)((_, lease) => + respond[MediaLease]( uri = config.leaseUri(id), data = lease, links = lease.id .map(config.mediaApiLink) .toList - )) + ) + ) } } - - def deleteLeasesForMedia(id: String) = auth.async { _ => Future { + def deleteLeasesForMedia(id: String) = auth.async { _ => + Future { clearLeases(id) Accepted } } - def validateLeases(leases: List[MediaLease]) = leases.count { _.isSyndication } <= 1 - - def replaceLeasesForMedia(id: String) = auth.async(parse.json) { implicit request => Future { - request.body.validate[List[MediaLease]].fold( - badRequest, - mediaLeases => { - if (validateLeases(mediaLeases)) { - replaceLeases(mediaLeases, id, Some(Authentication.getIdentity(request.user))) - Accepted - } else { - respondError(BadRequest, "validation-error", "No more than one syndication lease per image") - } + def validateLeases(leases: List[MediaLease]) = leases.count { + _.isSyndication + } <= 1 + + def replaceLeasesForMedia(id: String) = auth.async(parse.json) { + implicit request => + Future { + request.body + .validate[List[MediaLease]] + .fold( + badRequest, + mediaLeases => { + if (validateLeases(mediaLeases)) { + replaceLeases( + mediaLeases, + id, + Some(Authentication.getIdentity(request.user)) + ) + Accepted + } else { + respondError( + BadRequest, + "validation-error", + "No more than one syndication lease per image" + ) + } + } + ) } - ) - }} + } - def getLeasesForMedia(id: String) = auth.async { _ => Future { + def getLeasesForMedia(id: String) = auth.async { _ => + Future { val leases = store.getForMedia(id) respond[LeasesByMedia]( diff --git a/leases/app/lib/LeaseNotifier.scala b/leases/app/lib/LeaseNotifier.scala index dc50164e26..31a292ac2d 100644 --- a/leases/app/lib/LeaseNotifier.scala +++ b/leases/app/lib/LeaseNotifier.scala @@ -4,30 +4,45 @@ import com.gu.mediaservice.lib.aws.{ThrallMessageSender, UpdateMessage} import com.gu.mediaservice.model.leases.MediaLease import org.joda.time.DateTime -class LeaseNotifier(config: LeasesConfig, store: LeaseStore) extends ThrallMessageSender(config.thrallKinesisStreamConfig) { +class LeaseNotifier(config: LeasesConfig, store: LeaseStore) + extends ThrallMessageSender(config.thrallKinesisStreamConfig) { def sendReindexLeases(mediaId: String) = { val replaceImageLeases = "replace-image-leases" val leases = store.getForMedia(mediaId) - val updateMessage = UpdateMessage(subject = replaceImageLeases, leases = Some(leases), id = Some(mediaId) ) + val updateMessage = UpdateMessage( + subject = replaceImageLeases, + leases = Some(leases), + id = Some(mediaId) + ) publish(updateMessage) } def sendAddLease(mediaLease: MediaLease) = { val addImageLease = "add-image-lease" - val updateMessage = UpdateMessage(subject = addImageLease, mediaLease = Some(mediaLease), id = Some(mediaLease.mediaId)) + val updateMessage = UpdateMessage( + subject = addImageLease, + mediaLease = Some(mediaLease), + id = Some(mediaLease.mediaId) + ) publish(updateMessage) } def sendAddLeases(mediaLeases: List[MediaLease], mediaId: String) = { val replaceImageLeases = "replace-image-leases" - val updateMessage = UpdateMessage(subject = replaceImageLeases, leases = Some(mediaLeases), id = Some(mediaId)) + val updateMessage = UpdateMessage( + subject = replaceImageLeases, + leases = Some(mediaLeases), + id = Some(mediaId) + ) publish(updateMessage) } def sendRemoveLease(mediaId: String, leaseId: String) = { val removeImageLease = "remove-image-lease" - val updateMessage = UpdateMessage(subject = removeImageLease, id = Some(mediaId), + val updateMessage = UpdateMessage( + subject = removeImageLease, + id = Some(mediaId), leaseId = Some(leaseId) ) publish(updateMessage) diff --git a/leases/app/lib/LeaseStore.scala b/leases/app/lib/LeaseStore.scala index 1fde3ae0fa..4aa1e33591 100644 --- a/leases/app/lib/LeaseStore.scala +++ b/leases/app/lib/LeaseStore.scala @@ -8,11 +8,16 @@ import org.joda.time.DateTime import scala.concurrent.ExecutionContext -class LeaseStore(config: LeasesConfig) extends DynamoDB(config, config.leasesTable) { +class LeaseStore(config: LeasesConfig) + extends DynamoDB(config, config.leasesTable) { implicit val dateTimeFormat = - DynamoFormat.coercedXmap[DateTime, String, IllegalArgumentException](DateTime.parse)(_.toString) + DynamoFormat.coercedXmap[DateTime, String, IllegalArgumentException]( + DateTime.parse + )(_.toString) implicit val enumFormat = - DynamoFormat.coercedXmap[MediaLeaseType, String, IllegalArgumentException](MediaLeaseType(_))(_.toString) + DynamoFormat.coercedXmap[MediaLeaseType, String, IllegalArgumentException]( + MediaLeaseType(_) + )(_.toString) private val leasesTable = Table[MediaLease](config.leasesTable) @@ -21,7 +26,9 @@ class LeaseStore(config: LeasesConfig) extends DynamoDB(config, config.leasesTab } def getForMedia(id: String): List[MediaLease] = { - Scanamo.exec(client)(leasesTable.index("mediaId").query('mediaId -> id)).flatMap(_.toOption) + Scanamo + .exec(client)(leasesTable.index("mediaId").query('mediaId -> id)) + .flatMap(_.toOption) } def put(lease: MediaLease)(implicit ec: ExecutionContext) = { @@ -36,9 +43,10 @@ class LeaseStore(config: LeasesConfig) extends DynamoDB(config, config.leasesTab ScanamoAsync.exec(client)(leasesTable.delete('id -> id)) } - def forEach(run: List[MediaLease] => Unit)(implicit ec: ExecutionContext) = ScanamoAsync.exec(client)( - leasesTable.scan - .map(ops => ops.flatMap(_.toOption)) - .map(run) - ) + def forEach(run: List[MediaLease] => Unit)(implicit ec: ExecutionContext) = + ScanamoAsync.exec(client)( + leasesTable.scan + .map(ops => ops.flatMap(_.toOption)) + .map(run) + ) } diff --git a/leases/app/lib/LeasesConfig.scala b/leases/app/lib/LeasesConfig.scala index e5cb887221..af2658bdc0 100644 --- a/leases/app/lib/LeasesConfig.scala +++ b/leases/app/lib/LeasesConfig.scala @@ -6,7 +6,8 @@ import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources} import java.net.URI import scala.util.Try -class LeasesConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class LeasesConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val leasesTable = string("dynamo.tablename.leasesTable") val rootUri: String = services.leasesBaseUri @@ -17,8 +18,12 @@ class LeasesConfig(resources: GridConfigResources) extends CommonConfig(resource private val leasesUri = uri(s"$rootUri/leases") - def leaseUri(leaseId: String): Option[URI] = Try { URI.create(s"$leasesUri/$leaseId") }.toOption - def leasesMediaUri(mediaId: String) = Try { URI.create(s"$leasesUri/media/$mediaId") }.toOption + def leaseUri(leaseId: String): Option[URI] = Try { + URI.create(s"$leasesUri/$leaseId") + }.toOption + def leasesMediaUri(mediaId: String) = Try { + URI.create(s"$leasesUri/media/$mediaId") + }.toOption private def mediaApiUri(id: String) = s"${services.apiBaseUri}/images/$id" def mediaApiLink(id: String) = Link("media", mediaApiUri(id)) diff --git a/media-api/app/MediaApiComponents.scala b/media-api/app/MediaApiComponents.scala index b6df3a850d..5e9e46dfb3 100644 --- a/media-api/app/MediaApiComponents.scala +++ b/media-api/app/MediaApiComponents.scala @@ -1,7 +1,10 @@ import com.gu.mediaservice.lib.aws.ThrallMessageSender import com.gu.mediaservice.lib.elasticsearch.ElasticSearchConfig import com.gu.mediaservice.lib.imaging.ImageOperations -import com.gu.mediaservice.lib.management.{ElasticSearchHealthCheck, ManagementWithPermissions} +import com.gu.mediaservice.lib.management.{ + ElasticSearchHealthCheck, + ManagementWithPermissions +} import com.gu.mediaservice.lib.play.GridComponents import controllers._ import lib._ @@ -11,10 +14,13 @@ import router.Routes import scala.concurrent.Future -class MediaApiComponents(context: Context) extends GridComponents(context, new MediaApiConfig(_)) { +class MediaApiComponents(context: Context) + extends GridComponents(context, new MediaApiConfig(_)) { final override val buildInfo = utils.buildinfo.BuildInfo - val imageOperations = new ImageOperations(context.environment.rootPath.getAbsolutePath) + val imageOperations = new ImageOperations( + context.environment.rootPath.getAbsolutePath + ) val messageSender = new ThrallMessageSender(config.thrallKinesisStreamConfig) val mediaApiMetrics = new MediaApiMetrics(config) @@ -32,19 +38,52 @@ class MediaApiComponents(context: Context) extends GridComponents(context, new M val usageQuota = new UsageQuota(config, actorSystem.scheduler) usageQuota.quotaStore.update() usageQuota.scheduleUpdates() - applicationLifecycle.addStopHook(() => Future{usageQuota.stopUpdates()}) + applicationLifecycle.addStopHook(() => Future { usageQuota.stopUpdates() }) - val elasticSearch = new ElasticSearch(config, mediaApiMetrics, es6Config, () => usageQuota.usageStore.overQuotaAgencies) + val elasticSearch = new ElasticSearch( + config, + mediaApiMetrics, + es6Config, + () => usageQuota.usageStore.overQuotaAgencies + ) elasticSearch.ensureAliasAssigned() val imageResponse = new ImageResponse(config, s3Client, usageQuota) - val mediaApi = new MediaApi(auth, messageSender, elasticSearch, imageResponse, config, controllerComponents, s3Client, mediaApiMetrics, wsClient) - val suggestionController = new SuggestionController(auth, elasticSearch, controllerComponents) - val aggController = new AggregationController(auth, elasticSearch, controllerComponents) - val usageController = new UsageController(auth, config, elasticSearch, usageQuota, controllerComponents) - val elasticSearchHealthCheck = new ElasticSearchHealthCheck(controllerComponents, elasticSearch) - val healthcheckController = new ManagementWithPermissions(controllerComponents, mediaApi, buildInfo) + val mediaApi = new MediaApi( + auth, + messageSender, + elasticSearch, + imageResponse, + config, + controllerComponents, + s3Client, + mediaApiMetrics, + wsClient + ) + val suggestionController = + new SuggestionController(auth, elasticSearch, controllerComponents) + val aggController = + new AggregationController(auth, elasticSearch, controllerComponents) + val usageController = new UsageController( + auth, + config, + elasticSearch, + usageQuota, + controllerComponents + ) + val elasticSearchHealthCheck = + new ElasticSearchHealthCheck(controllerComponents, elasticSearch) + val healthcheckController = + new ManagementWithPermissions(controllerComponents, mediaApi, buildInfo) - override val router = new Routes(httpErrorHandler, mediaApi, suggestionController, aggController, usageController, elasticSearchHealthCheck, healthcheckController) + override val router = new Routes( + httpErrorHandler, + mediaApi, + suggestionController, + aggController, + usageController, + elasticSearchHealthCheck, + healthcheckController + ) } diff --git a/media-api/app/controllers/AggregationController.scala b/media-api/app/controllers/AggregationController.scala index 22bf3c8828..8c5258c3c5 100644 --- a/media-api/app/controllers/AggregationController.scala +++ b/media-api/app/controllers/AggregationController.scala @@ -6,14 +6,19 @@ import play.api.mvc._ import scala.concurrent.ExecutionContext -class AggregationController(auth: Authentication, elasticSearch: ElasticSearch, - override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with AggregateResponses { +class AggregationController( + auth: Authentication, + elasticSearch: ElasticSearch, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with AggregateResponses { def dateHistogram(field: String, q: Option[String]) = auth.async { request => implicit val r = request - elasticSearch.dateHistogramAggregate(AggregateSearchParams(field, request)) + elasticSearch + .dateHistogramAggregate(AggregateSearchParams(field, request)) .map(aggregateResponse) } diff --git a/media-api/app/controllers/MediaApi.scala b/media-api/app/controllers/MediaApi.scala index 8fb85f6a85..abcc329d26 100644 --- a/media-api/app/controllers/MediaApi.scala +++ b/media-api/app/controllers/MediaApi.scala @@ -27,22 +27,47 @@ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} class MediaApi( - auth: Authentication, - messageSender: ThrallMessageSender, - elasticSearch: ElasticSearch, - imageResponse: ImageResponse, - override val config: MediaApiConfig, - override val controllerComponents: ControllerComponents, - s3Client: S3Client, - mediaApiMetrics: MediaApiMetrics, - ws: WSClient -)(implicit val ec: ExecutionContext) extends BaseController with ArgoHelpers with PermissionsHandler { - - private val searchParamList = List("q", "ids", "offset", "length", "orderBy", - "since", "until", "modifiedSince", "modifiedUntil", "takenSince", "takenUntil", - "uploadedBy", "archived", "valid", "free", "payType", - "hasExports", "hasIdentifier", "missingIdentifier", "hasMetadata", - "persisted", "usageStatus", "usagePlatform", "hasRightsAcquired", "syndicationStatus").mkString(",") + auth: Authentication, + messageSender: ThrallMessageSender, + elasticSearch: ElasticSearch, + imageResponse: ImageResponse, + override val config: MediaApiConfig, + override val controllerComponents: ControllerComponents, + s3Client: S3Client, + mediaApiMetrics: MediaApiMetrics, + ws: WSClient +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers + with PermissionsHandler { + + private val searchParamList = List( + "q", + "ids", + "offset", + "length", + "orderBy", + "since", + "until", + "modifiedSince", + "modifiedUntil", + "takenSince", + "takenUntil", + "uploadedBy", + "archived", + "valid", + "free", + "payType", + "hasExports", + "hasIdentifier", + "missingIdentifier", + "hasMetadata", + "persisted", + "usageStatus", + "usagePlatform", + "hasRightsAcquired", + "syndicationStatus" + ).mkString(",") private val searchLinkHref = s"${config.rootUri}/images{?$searchParamList}" @@ -55,42 +80,70 @@ class MediaApi( ) val indexLinks = List( searchLink, - Link("image", s"${config.rootUri}/images/{id}"), + Link("image", s"${config.rootUri}/images/{id}"), // FIXME: credit is the only field available for now as it's the only on // that we are indexing as a completion suggestion - Link("metadata-search", s"${config.rootUri}/suggest/metadata/{field}{?q}"), - Link("label-search", s"${config.rootUri}/images/edits/label{?q}"), - Link("cropper", config.cropperUri), - Link("loader", config.loaderUri), - Link("edits", config.metadataUri), - Link("session", s"${config.authUri}/session"), - Link("witness-report", s"${config.services.guardianWitnessBaseUri}/2/report/{id}"), - Link("collections", config.collectionsUri), - Link("permissions", s"${config.rootUri}/permissions"), - Link("leases", config.leasesUri), - Link("admin-tools", config.adminToolsUri) + Link( + "metadata-search", + s"${config.rootUri}/suggest/metadata/{field}{?q}" + ), + Link("label-search", s"${config.rootUri}/images/edits/label{?q}"), + Link("cropper", config.cropperUri), + Link("loader", config.loaderUri), + Link("edits", config.metadataUri), + Link("session", s"${config.authUri}/session"), + Link( + "witness-report", + s"${config.services.guardianWitnessBaseUri}/2/report/{id}" + ), + Link("collections", config.collectionsUri), + Link("permissions", s"${config.rootUri}/permissions"), + Link("leases", config.leasesUri), + Link("admin-tools", config.adminToolsUri) ) respond(indexData, indexLinks) } - private def ImageCannotBeDeleted = respondError(MethodNotAllowed, "cannot-delete", "Cannot delete persisted images") - private def ImageDeleteForbidden = respondError(Forbidden, "delete-not-allowed", "No permission to delete this image") - private def ImageEditForbidden = respondError(Forbidden, "edit-not-allowed", "No permission to edit this image") - private def ImageNotFound(id: String) = respondError(NotFound, "image-not-found", s"No image found with the given id $id") - private def ExportNotFound = respondError(NotFound, "export-not-found", "No export found with the given id") + private def ImageCannotBeDeleted = respondError( + MethodNotAllowed, + "cannot-delete", + "Cannot delete persisted images" + ) + private def ImageDeleteForbidden = respondError( + Forbidden, + "delete-not-allowed", + "No permission to delete this image" + ) + private def ImageEditForbidden = respondError( + Forbidden, + "edit-not-allowed", + "No permission to edit this image" + ) + private def ImageNotFound(id: String) = respondError( + NotFound, + "image-not-found", + s"No image found with the given id $id" + ) + private def ExportNotFound = respondError( + NotFound, + "export-not-found", + "No export found with the given id" + ) def index = auth { indexResponse } - def getIncludedFromParams(request: AuthenticatedRequest[AnyContent, Principal]): List[String] = { + def getIncludedFromParams( + request: AuthenticatedRequest[AnyContent, Principal] + ): List[String] = { val includedQuery: Option[String] = request.getQueryString("include") includedQuery.map(_.split(",").map(_.trim).toList).getOrElse(List()) } private def isUploaderOrHasPermission( - request: AuthenticatedRequest[AnyContent, Principal], - image: Image, - permission: PermissionDefinition + request: AuthenticatedRequest[AnyContent, Principal], + image: Image, + permission: PermissionDefinition ) = { request.user match { case user: UserPrincipal => @@ -99,26 +152,38 @@ class MediaApi( } else { hasPermission(user, permission) } - case service: MachinePrincipal if service.accessor.tier == Internal => true + case service: MachinePrincipal if service.accessor.tier == Internal => + true case _ => false } } - def canUserWriteMetadata(request: AuthenticatedRequest[AnyContent, Principal], image: Image): Boolean = { + def canUserWriteMetadata( + request: AuthenticatedRequest[AnyContent, Principal], + image: Image + ): Boolean = { isUploaderOrHasPermission(request, image, Permissions.EditMetadata) } - def canUserDeleteImage(request: AuthenticatedRequest[AnyContent, Principal], image: Image): Boolean = { + def canUserDeleteImage( + request: AuthenticatedRequest[AnyContent, Principal], + image: Image + ): Boolean = { isUploaderOrHasPermission(request, image, Permissions.DeleteImage) } - def canUserDeleteCropsOrUsages(user: Principal): Boolean = hasPermission(user, Permissions.DeleteCrops) + def canUserDeleteCropsOrUsages(user: Principal): Boolean = + hasPermission(user, Permissions.DeleteCrops) - private def isAvailableForSyndication(image: Image): Boolean = image.syndicationRights.exists(_.isAvailableForSyndication) + private def isAvailableForSyndication(image: Image): Boolean = + image.syndicationRights.exists(_.isAvailableForSyndication) - private def hasPermission(request: Authentication.Request[Any], image: Image): Boolean = request.user.accessor.tier match { + private def hasPermission( + request: Authentication.Request[Any], + image: Image + ): Boolean = request.user.accessor.tier match { case Syndication => isAvailableForSyndication(image) - case _ => true + case _ => true } def getImage(id: String) = auth.async { request => @@ -129,8 +194,7 @@ class MediaApi( } } - /** - * Get the raw response from ElasticSearch. + /** Get the raw response from ElasticSearch. */ def getImageFromElasticSearch(id: String) = auth.async { request => getImageResponseFromES(id, request) map { @@ -166,15 +230,18 @@ class MediaApi( } } - def getImageExport(imageId: String, exportId: String) = auth.async { request => - implicit val r = request + def getImageExport(imageId: String, exportId: String) = auth.async { + request => + implicit val r = request - elasticSearch.getImageById(imageId) map { - case Some(source) if hasPermission(request, source) => - val exportOption = source.exports.find(_.id.contains(exportId)) - exportOption.foldLeft(ExportNotFound)((memo, export) => respond(export)) - case _ => ImageNotFound(imageId) - } + elasticSearch.getImageById(imageId) map { + case Some(source) if hasPermission(request, source) => + val exportOption = source.exports.find(_.id.contains(exportId)) + exportOption.foldLeft(ExportNotFound)((memo, export) => + respond(export) + ) + case _ => ImageNotFound(imageId) + } } @@ -190,7 +257,8 @@ class MediaApi( if (canDelete) { val deleteImage = "delete-image" - val updateMessage = UpdateMessage(subject = deleteImage, id = Some(id)) + val updateMessage = + UpdateMessage(subject = deleteImage, id = Some(id)) messageSender.publish(updateMessage) Accepted } else { @@ -210,62 +278,125 @@ class MediaApi( elasticSearch.getImageById(id) flatMap { case Some(image) if hasPermission(request, image) => { val apiKey = request.user.accessor - logger.info(s"Download original image: $id from user: ${Authentication.getIdentity(request.user)}", apiKey, id) - mediaApiMetrics.incrementImageDownload(apiKey, mediaApiMetrics.OriginalDownloadType) + logger.info( + s"Download original image: $id from user: ${Authentication + .getIdentity(request.user)}", + apiKey, + id + ) + mediaApiMetrics.incrementImageDownload( + apiKey, + mediaApiMetrics.OriginalDownloadType + ) val s3Object = s3Client.getObject(config.imageBucket, image.source.file) - val file = StreamConverters.fromInputStream(() => s3Object.getObjectContent) - val entity = HttpEntity.Streamed(file, image.source.size, image.source.mimeType.map(_.name)) + val file = + StreamConverters.fromInputStream(() => s3Object.getObjectContent) + val entity = HttpEntity.Streamed( + file, + image.source.size, + image.source.mimeType.map(_.name) + ) - if(config.recordDownloadAsUsage) { - postToUsages(config.usageUri + "/usages/download", auth.getOnBehalfOfPrincipal(request.user), id, Authentication.getIdentity(request.user)) + if (config.recordDownloadAsUsage) { + postToUsages( + config.usageUri + "/usages/download", + auth.getOnBehalfOfPrincipal(request.user), + id, + Authentication.getIdentity(request.user) + ) } - Future.successful( - Result(ResponseHeader(OK), entity).withHeaders("Content-Disposition" -> s3Client.getContentDisposition(image, Source)) + Future.successful( + Result(ResponseHeader(OK), entity).withHeaders( + "Content-Disposition" -> s3Client.getContentDisposition( + image, + Source + ) ) + ) } case _ => Future.successful(ImageNotFound(id)) } } - def downloadOptimisedImage(id: String, width: Integer, height: Integer, quality: Integer) = auth.async { request => + def downloadOptimisedImage( + id: String, + width: Integer, + height: Integer, + quality: Integer + ) = auth.async { request => implicit val r = request elasticSearch.getImageById(id) flatMap { case Some(image) if hasPermission(request, image) => { val apiKey = request.user.accessor - logger.info(s"Download optimised image: $id from user: ${Authentication.getIdentity(request.user)}", apiKey, id) - mediaApiMetrics.incrementImageDownload(apiKey, mediaApiMetrics.OptimisedDownloadType) + logger.info( + s"Download optimised image: $id from user: ${Authentication + .getIdentity(request.user)}", + apiKey, + id + ) + mediaApiMetrics.incrementImageDownload( + apiKey, + mediaApiMetrics.OptimisedDownloadType + ) val sourceImageUri = - new URI(s3Client.signUrl(config.imageBucket, image.optimisedPng.getOrElse(image.source).file, image, imageType = image.optimisedPng match { - case Some(_) => OptimisedPng - case _ => Source - })) + new URI( + s3Client.signUrl( + config.imageBucket, + image.optimisedPng.getOrElse(image.source).file, + image, + imageType = image.optimisedPng match { + case Some(_) => OptimisedPng + case _ => Source + } + ) + ) - if(config.recordDownloadAsUsage) { - postToUsages(config.usageUri + "/usages/download", auth.getOnBehalfOfPrincipal(request.user), id, Authentication.getIdentity(request.user)) + if (config.recordDownloadAsUsage) { + postToUsages( + config.usageUri + "/usages/download", + auth.getOnBehalfOfPrincipal(request.user), + id, + Authentication.getIdentity(request.user) + ) } Future.successful( - Redirect(config.imgopsUri + List(sourceImageUri.getPath, sourceImageUri.getRawQuery).mkString("?") + s"&w=$width&h=$height&q=$quality") + Redirect( + config.imgopsUri + List( + sourceImageUri.getPath, + sourceImageUri.getRawQuery + ).mkString("?") + s"&w=$width&h=$height&q=$quality" + ) ) } case _ => Future.successful(ImageNotFound(id)) } } - def postToUsages(uri: String, onBehalfOfPrincipal: Authentication.OnBehalfOfPrincipal, mediaId: String, user: String) = { - val baseRequest = ws.url(uri) - .withHttpHeaders(Authentication.originalServiceHeaderName -> config.appName, + def postToUsages( + uri: String, + onBehalfOfPrincipal: Authentication.OnBehalfOfPrincipal, + mediaId: String, + user: String + ) = { + val baseRequest = ws + .url(uri) + .withHttpHeaders( + Authentication.originalServiceHeaderName -> config.appName, HttpHeaders.ORIGIN -> config.rootUri, - HttpHeaders.CONTENT_TYPE -> ContentType.APPLICATION_JSON.getMimeType) + HttpHeaders.CONTENT_TYPE -> ContentType.APPLICATION_JSON.getMimeType + ) val request = onBehalfOfPrincipal(baseRequest) - val usagesMetadata = Map("mediaId" -> mediaId, + val usagesMetadata = Map( + "mediaId" -> mediaId, "dateAdded" -> printDateTime(DateTime.now()), - "downloadedBy" -> user) + "downloadedBy" -> user + ) logger.info(s"Making usages download request") request.post(Json.toJson(Map("data" -> usagesMetadata))) //fire and forget @@ -275,16 +406,33 @@ class MediaApi( val include = getIncludedFromParams(request) - def hitToImageEntity(elasticId: String, image: Image): EmbeddedEntity[JsValue] = { + def hitToImageEntity( + elasticId: String, + image: Image + ): EmbeddedEntity[JsValue] = { val writePermission = canUserWriteMetadata(request, image) val deletePermission = canUserDeleteImage(request, image) - val deleteCropsOrUsagePermission = canUserDeleteCropsOrUsages(request.user) + val deleteCropsOrUsagePermission = + canUserDeleteCropsOrUsages(request.user) val (imageData, imageLinks, imageActions) = - imageResponse.create(elasticId, image, writePermission, deletePermission, deleteCropsOrUsagePermission, include, request.user.accessor.tier) + imageResponse.create( + elasticId, + image, + writePermission, + deletePermission, + deleteCropsOrUsagePermission, + include, + request.user.accessor.tier + ) val id = (imageData \ "id").as[String] val imageUri = URI.create(s"${config.rootUri}/images/$id") - EmbeddedEntity(uri = imageUri, data = Some(imageData), imageLinks, imageActions) + EmbeddedEntity( + uri = imageUri, + data = Some(imageData), + imageLinks, + imageActions + ) } def respondSuccess(searchParams: SearchParams) = for { @@ -293,21 +441,37 @@ class MediaApi( prevLink = getPrevLink(searchParams) nextLink = getNextLink(searchParams, totalCount) links = List(prevLink, nextLink).flatten - } yield respondCollection(imageEntities, Some(searchParams.offset), Some(totalCount), links) + } yield respondCollection( + imageEntities, + Some(searchParams.offset), + Some(totalCount), + links + ) val searchParams = SearchParams(request) - SearchParams.validate(searchParams).fold( - // TODO: respondErrorCollection? - errors => Future.successful(respondError(UnprocessableEntity, InvalidUriParams.errorKey, - // Annoyingly `NonEmptyList` and `IList` don't have `mkString` - errors.map(_.message).list.reduce(_+ ", " +_), List(searchLink)) - ), - params => respondSuccess(params) - ) + SearchParams + .validate(searchParams) + .fold( + // TODO: respondErrorCollection? + errors => + Future.successful( + respondError( + UnprocessableEntity, + InvalidUriParams.errorKey, + // Annoyingly `NonEmptyList` and `IList` don't have `mkString` + errors.map(_.message).list.reduce(_ + ", " + _), + List(searchLink) + ) + ), + params => respondSuccess(params) + ) } - private def getImageResponseFromES(id: String, request: Authentication.Request[AnyContent]): Future[Option[(Image, JsValue, List[Link], List[Action])]] = { + private def getImageResponseFromES( + id: String, + request: Authentication.Request[AnyContent] + ): Future[Option[(Image, JsValue, List[Link], List[Action])]] = { implicit val r: Authentication.Request[AnyContent] = request val include = getIncludedFromParams(request) @@ -316,7 +480,8 @@ class MediaApi( case Some(source) if hasPermission(request, source) => val writePermission = canUserWriteMetadata(request, source) val deleteImagePermission = canUserDeleteImage(request, source) - val deleteCropsOrUsagePermission = canUserDeleteCropsOrUsages(request.user) + val deleteCropsOrUsagePermission = + canUserDeleteCropsOrUsages(request.user) val (imageData, imageLinks, imageActions) = imageResponse.create( id, @@ -334,24 +499,34 @@ class MediaApi( } } - private def getSearchUrl(searchParams: SearchParams, updatedOffset: Int, length: Int): String = { + private def getSearchUrl( + searchParams: SearchParams, + updatedOffset: Int, + length: Int + ): String = { // Enforce a toDate to exclude new images since the current request val toDate = searchParams.until.getOrElse(DateTime.now) - val paramMap: Map[String, String] = SearchParams.toStringMap(searchParams) ++ Map( - "offset" -> updatedOffset.toString, - "length" -> length.toString, - "toDate" -> printDateTime(toDate) - ) + val paramMap: Map[String, String] = + SearchParams.toStringMap(searchParams) ++ Map( + "offset" -> updatedOffset.toString, + "length" -> length.toString, + "toDate" -> printDateTime(toDate) + ) - paramMap.foldLeft(UriTemplate()){ (acc, pair) => acc.expandAny(pair._1, pair._2)}.toString + paramMap + .foldLeft(UriTemplate()) { (acc, pair) => + acc.expandAny(pair._1, pair._2) + } + .toString } private def getPrevLink(searchParams: SearchParams): Option[Link] = { val prevOffset = List(searchParams.offset - searchParams.length, 0).max if (searchParams.offset > 0) { // adapt length to avoid overlapping with current - val prevLength = List(searchParams.length, searchParams.offset - prevOffset).min + val prevLength = + List(searchParams.length, searchParams.offset - prevOffset).min val prevUrl = getSearchUrl(searchParams, prevOffset, prevLength) Some(Link("prev", prevUrl)) } else { @@ -359,7 +534,10 @@ class MediaApi( } } - private def getNextLink(searchParams: SearchParams, totalCount: Long): Option[Link] = { + private def getNextLink( + searchParams: SearchParams, + totalCount: Long + ): Option[Link] = { val nextOffset = searchParams.offset + searchParams.length if (nextOffset < totalCount) { val nextUrl = getSearchUrl(searchParams, nextOffset, searchParams.length) diff --git a/media-api/app/controllers/SuggestionController.scala b/media-api/app/controllers/SuggestionController.scala index 278423f251..59525c9571 100644 --- a/media-api/app/controllers/SuggestionController.scala +++ b/media-api/app/controllers/SuggestionController.scala @@ -3,18 +3,30 @@ package controllers import com.gu.mediaservice.lib.ImageFields import com.gu.mediaservice.lib.argo.ArgoHelpers import com.gu.mediaservice.lib.auth.Authentication -import lib.elasticsearch.{AggregateSearchParams, CompletionSuggestionResults, ElasticSearch} +import lib.elasticsearch.{ + AggregateSearchParams, + CompletionSuggestionResults, + ElasticSearch +} import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.{ExecutionContext, Future} -class SuggestionController(auth: Authentication, elasticSearch: ElasticSearch, - override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers with ImageFields with AggregateResponses { +class SuggestionController( + auth: Authentication, + elasticSearch: ElasticSearch, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers + with ImageFields + with AggregateResponses { - def suggestMetadataCredit(q: Option[String], size: Option[Int]) = suggestion("suggestMetadataCredit", q, size) + def suggestMetadataCredit(q: Option[String], size: Option[Int]) = + suggestion("suggestMetadataCredit", q, size) - def suggestPhotoshoot(q: Option[String], size: Option[Int]) = suggestion(photoshootField("suggest"), q, size) + def suggestPhotoshoot(q: Option[String], size: Option[Int]) = + suggestion(photoshootField("suggest"), q, size) // TODO: work with analysed fields // TODO: recover with HTTP error if invalid field @@ -22,23 +34,35 @@ class SuggestionController(auth: Authentication, elasticSearch: ElasticSearch, def metadataSearch(field: String, q: Option[String]) = auth.async { request => implicit val r = request - elasticSearch.metadataSearch(AggregateSearchParams(field, request)) map aggregateResponse + elasticSearch.metadataSearch( + AggregateSearchParams(field, request) + ) map aggregateResponse } def editsSearch(field: String, q: Option[String]) = auth.async { request => implicit val r = request - elasticSearch.editsSearch(AggregateSearchParams(field, request)) map aggregateResponse + elasticSearch.editsSearch( + AggregateSearchParams(field, request) + ) map aggregateResponse } - private def suggestion(field: String, query: Option[String], size: Option[Int]) = auth.async { request => + private def suggestion( + field: String, + query: Option[String], + size: Option[Int] + ) = auth.async { request => implicit val r = request - query.flatMap(q => if (q.nonEmpty) Some(q) else None).map { q => - elasticSearch.completionSuggestion(field, q, size.getOrElse(10)) - }.getOrElse( - Future.successful(CompletionSuggestionResults(List.empty)) - ).map(c => respondCollection(c.results)) + query + .flatMap(q => if (q.nonEmpty) Some(q) else None) + .map { q => + elasticSearch.completionSuggestion(field, q, size.getOrElse(10)) + } + .getOrElse( + Future.successful(CompletionSuggestionResults(List.empty)) + ) + .map(c => respondCollection(c.results)) } } diff --git a/media-api/app/controllers/UsageController.scala b/media-api/app/controllers/UsageController.scala index f512addab8..e75b5e6338 100644 --- a/media-api/app/controllers/UsageController.scala +++ b/media-api/app/controllers/UsageController.scala @@ -11,63 +11,78 @@ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} - -class UsageController(auth: Authentication, config: MediaApiConfig, elasticSearch: ElasticSearch, usageQuota: UsageQuota, - override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers { +class UsageController( + auth: Authentication, + config: MediaApiConfig, + elasticSearch: ElasticSearch, + usageQuota: UsageQuota, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { val numberOfDayInPeriod = 30 def bySupplier = auth.async { request => implicit val r = request - Future.sequence( - Agencies.all.keys.map(elasticSearch.usageForSupplier(_, numberOfDayInPeriod))) - .map(_.toList) - .map((s: List[SupplierUsageSummary]) => respond(s)) - .recover { - case e => respondError(InternalServerError, "unknown-error", e.toString) - } + Future + .sequence( + Agencies.all.keys.map( + elasticSearch.usageForSupplier(_, numberOfDayInPeriod) + ) + ) + .map(_.toList) + .map((s: List[SupplierUsageSummary]) => respond(s)) + .recover { case e => + respondError(InternalServerError, "unknown-error", e.toString) + } } def forSupplier(id: String) = auth.async { request => implicit val r = request - elasticSearch.usageForSupplier(id, numberOfDayInPeriod) + elasticSearch + .usageForSupplier(id, numberOfDayInPeriod) .map((s: SupplierUsageSummary) => respond(s)) - .recover { - case e => respondError(InternalServerError, "unknown-error", e.toString) + .recover { case e => + respondError(InternalServerError, "unknown-error", e.toString) } } - def usageStatusForImage(id: String)(implicit request: AuthenticatedRequest[AnyContent, Principal]): Future[UsageStatus] = for { + def usageStatusForImage(id: String)(implicit + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[UsageStatus] = for { imageOption <- elasticSearch.getImageById(id) image <- Future { imageOption.get } .recover { case _ => throw new ImageNotFound } - usageStatus <- usageQuota.usageStore.getUsageStatusForUsageRights(image.usageRights) + usageStatus <- usageQuota.usageStore.getUsageStatusForUsageRights( + image.usageRights + ) } yield usageStatus - def quotaForImage(id: String) = auth.async { request => implicit val r = request usageStatusForImage(id) .map((u: UsageStatus) => respond(u)) .recover { - case e: ImageNotFound => respondError(NotFound, "image-not-found", e.toString) + case e: ImageNotFound => + respondError(NotFound, "image-not-found", e.toString) case e => respondError(InternalServerError, "unknown-error", e.toString) } } def quotas = auth.async { request => - usageQuota.usageStore.getUsageStatus() + usageQuota.usageStore + .getUsageStatus() .map((s: StoreAccess) => respond(s)) - .recover { - case e => respondError(InternalServerError, "unknown-error", e.toString) + .recover { case e => + respondError(InternalServerError, "unknown-error", e.toString) } } } diff --git a/media-api/app/lib/ImageExtras.scala b/media-api/app/lib/ImageExtras.scala index 45bbdec9a4..612b491d04 100644 --- a/media-api/app/lib/ImageExtras.scala +++ b/media-api/app/lib/ImageExtras.scala @@ -1,10 +1,19 @@ package lib import com.gu.mediaservice.model._ -import com.gu.mediaservice.model.leases.{AllowUseLease, DenyUseLease, LeasesByMedia, MediaLease} +import com.gu.mediaservice.model.leases.{ + AllowUseLease, + DenyUseLease, + LeasesByMedia, + MediaLease +} import lib.usagerights.CostCalculator -case class ValidityCheck(invalid: Boolean, overrideable: Boolean, shouldOverride: Boolean) { +case class ValidityCheck( + invalid: Boolean, + overrideable: Boolean, + shouldOverride: Boolean +) { val isValid: Boolean = !invalid || (overrideable && shouldOverride) } @@ -13,16 +22,19 @@ object ImageExtras { type ValidMap = Map[String, ValidityCheck] val validityDescription = Map( - "no_rights" -> "No rights to use this image", - "missing_credit" -> "Missing credit information *", - "missing_description" -> "Missing description *", - "paid_image" -> "Paid imagery requires a lease", - "over_quota" -> "The quota for this supplier has been exceeded", - "conditional_paid" -> "This image is restricted use", - "current_deny_lease" -> "Cropping has been denied using a lease" + "no_rights" -> "No rights to use this image", + "missing_credit" -> "Missing credit information *", + "missing_description" -> "Missing description *", + "paid_image" -> "Paid imagery requires a lease", + "over_quota" -> "The quota for this supplier has been exceeded", + "conditional_paid" -> "This image is restricted use", + "current_deny_lease" -> "Cropping has been denied using a lease" ) - def validityOverrides(image: Image, withWritePermission: Boolean): Map[String, Boolean] = Map( + def validityOverrides( + image: Image, + withWritePermission: Boolean + ): Map[String, Boolean] = Map( "current_allow_lease" -> hasCurrentAllowLease(image.leases), "has_write_permission" -> withWritePermission ) @@ -31,28 +43,44 @@ object ImageExtras { def hasCredit(meta: ImageMetadata) = meta.credit.isDefined def hasDescription(meta: ImageMetadata) = meta.description.isDefined - private def isCurrent(lease: MediaLease): Boolean = lease.active && lease.isUse + private def isCurrent(lease: MediaLease): Boolean = + lease.active && lease.isUse - def hasCurrentAllowLease(leases: LeasesByMedia): Boolean = leases.leases.exists(lease => lease.access == AllowUseLease && isCurrent(lease)) + def hasCurrentAllowLease(leases: LeasesByMedia): Boolean = + leases.leases.exists(lease => + lease.access == AllowUseLease && isCurrent(lease) + ) - def hasCurrentDenyLease(leases: LeasesByMedia): Boolean = leases.leases.exists(lease => lease.access == DenyUseLease && isCurrent(lease)) + def hasCurrentDenyLease(leases: LeasesByMedia): Boolean = + leases.leases.exists(lease => + lease.access == DenyUseLease && isCurrent(lease) + ) - def validityMap(image: Image, withWritePermission: Boolean)( - implicit cost: CostCalculator, quotas: UsageQuota): ValidMap = { + def validityMap(image: Image, withWritePermission: Boolean)(implicit + cost: CostCalculator, + quotas: UsageQuota + ): ValidMap = { - val shouldOverride = validityOverrides(image, withWritePermission).exists(_._2 == true) + val shouldOverride = + validityOverrides(image, withWritePermission).exists(_._2 == true) def createCheck(validCheck: Boolean, overrideable: Boolean = true) = ValidityCheck(validCheck, overrideable, shouldOverride) Map( - "paid_image" -> createCheck(cost.isPay(image.usageRights)), - "conditional_paid" -> createCheck(cost.isConditional(image.usageRights)), - "no_rights" -> createCheck(!hasRights(image.usageRights)), - "missing_credit" -> createCheck(!hasCredit(image.metadata), overrideable = false), - "missing_description" -> createCheck(!hasDescription(image.metadata), overrideable = false), - "current_deny_lease" -> createCheck(hasCurrentDenyLease(image.leases)), - "over_quota" -> createCheck(quotas.isOverQuota(image.usageRights)) + "paid_image" -> createCheck(cost.isPay(image.usageRights)), + "conditional_paid" -> createCheck(cost.isConditional(image.usageRights)), + "no_rights" -> createCheck(!hasRights(image.usageRights)), + "missing_credit" -> createCheck( + !hasCredit(image.metadata), + overrideable = false + ), + "missing_description" -> createCheck( + !hasDescription(image.metadata), + overrideable = false + ), + "current_deny_lease" -> createCheck(hasCurrentDenyLease(image.leases)), + "over_quota" -> createCheck(quotas.isOverQuota(image.usageRights)) ) } @@ -61,8 +89,9 @@ object ImageExtras { .map { case (id, _) => id -> validityDescription.get(id) } .map { case (id, Some(reason)) => id -> reason - case (id, None) => id -> s"Validity error: $id" + case (id, None) => id -> s"Validity error: $id" } - def isValid(validityMap: ValidMap): Boolean = validityMap.values.forall(_.isValid) + def isValid(validityMap: ValidMap): Boolean = + validityMap.values.forall(_.isValid) } diff --git a/media-api/app/lib/ImagePersistenceReasons.scala b/media-api/app/lib/ImagePersistenceReasons.scala index 2f0b65a60d..ff0b2acdf5 100644 --- a/media-api/app/lib/ImagePersistenceReasons.scala +++ b/media-api/app/lib/ImagePersistenceReasons.scala @@ -1,15 +1,28 @@ package lib -import com.gu.mediaservice.model.{CommissionedAgency, Illustrator, Image, ImageMetadata, Photographer, UsageRights} +import com.gu.mediaservice.model.{ + CommissionedAgency, + Illustrator, + Image, + ImageMetadata, + Photographer, + UsageRights +} import scala.collection.mutable.ListBuffer object ImagePersistenceReasons { - def apply(persistedRootCollections: List[String], persistenceIdentifier: String): ImagePersistenceReasons = + def apply( + persistedRootCollections: List[String], + persistenceIdentifier: String + ): ImagePersistenceReasons = new ImagePersistenceReasons(persistedRootCollections, persistenceIdentifier) } -class ImagePersistenceReasons(persistedRootCollections: List[String], persistenceIdentifier: String) { +class ImagePersistenceReasons( + persistedRootCollections: List[String], + persistenceIdentifier: String +) { def getImagePersistenceReasons(image: Image) = { val reasons = ListBuffer[String]() @@ -53,15 +66,20 @@ class ImagePersistenceReasons(persistedRootCollections: List[String], persistenc reasons.toList } - private def isInPersistedCollection(image: Image, persistedRootCollections: List[String]): Boolean = { + private def isInPersistedCollection( + image: Image, + persistedRootCollections: List[String] + ): Boolean = { // list of the first element of each collection's `path`, i.e all the root collections - val collectionPaths: List[String] = image.collections.flatMap(_.path.headOption) + val collectionPaths: List[String] = + image.collections.flatMap(_.path.headOption) // is image in at least one persisted collection? (collectionPaths diff persistedRootCollections).length < collectionPaths.length } - private def hasLabels(image: Image) = image.userMetadata.exists(_.labels.nonEmpty) + private def hasLabels(image: Image) = + image.userMetadata.exists(_.labels.nonEmpty) private def hasUserEdits(image: Image) = image.userMetadata.exists(ed => ed.metadata != ImageMetadata.empty) @@ -69,24 +87,28 @@ class ImagePersistenceReasons(persistedRootCollections: List[String], persistenc private def isIllustratorCategory[T <: UsageRights](usageRights: T) = usageRights match { case _: Illustrator => true - case _ => false + case _ => false } private def isAgencyCommissionedCategory[T <: UsageRights](usageRights: T) = usageRights match { case _: CommissionedAgency => true - case _ => false + case _ => false } private def isPhotographerCategory[T <: UsageRights](usageRights: T) = usageRights match { case _: Photographer => true - case _ => false + case _ => false } - private def hasPhotoshoot(image: Image): Boolean = image.userMetadata.exists(_.photoshoot.isDefined) + private def hasPhotoshoot(image: Image): Boolean = + image.userMetadata.exists(_.photoshoot.isDefined) - private def hasPersistenceIdentifier(image: Image, persistenceIdentifier: String) = { + private def hasPersistenceIdentifier( + image: Image, + persistenceIdentifier: String + ) = { image.identifiers.contains(persistenceIdentifier) } diff --git a/media-api/app/lib/ImageResponse.scala b/media-api/app/lib/ImageResponse.scala index 8fcd985fc8..4d68c477e9 100644 --- a/media-api/app/lib/ImageResponse.scala +++ b/media-api/app/lib/ImageResponse.scala @@ -18,8 +18,12 @@ import play.utils.UriEncoding import scala.util.{Failure, Try} -class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: UsageQuota) - extends EditsResponse with GridLogging { +class ImageResponse( + config: MediaApiConfig, + s3Client: S3Client, + usageQuota: UsageQuota +) extends EditsResponse + with GridLogging { // implicit val dateTimeFormat = DateFormat implicit val usageQuotas = usageQuota @@ -40,37 +44,54 @@ class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: Usag type MediaLeaseEntity = EmbeddedEntity[MediaLease] type MediaLeasesEntity = EmbeddedEntity[LeasesByMedia] - private val imgPersistenceReasons = ImagePersistenceReasons.apply(config.persistedRootCollections, config.persistenceIdentifier) + private val imgPersistenceReasons = ImagePersistenceReasons.apply( + config.persistedRootCollections, + config.persistenceIdentifier + ) - def imagePersistenceReasons(image: Image): List[String] = imgPersistenceReasons.getImagePersistenceReasons(image) + def imagePersistenceReasons(image: Image): List[String] = + imgPersistenceReasons.getImagePersistenceReasons(image) def canBeDeleted(image: Image) = image.canBeDeleted def create( - id: String, - image: Image, - withWritePermission: Boolean, - withDeleteImagePermission: Boolean, - withDeleteCropsOrUsagePermission: Boolean, - included: List[String] = List(), tier: Tier): (JsValue, List[Link], List[Action]) = { + id: String, + image: Image, + withWritePermission: Boolean, + withDeleteImagePermission: Boolean, + withDeleteCropsOrUsagePermission: Boolean, + included: List[String] = List(), + tier: Tier + ): (JsValue, List[Link], List[Action]) = { val source = Try { - Json.toJson(image)(imageResponseWrites(image.id, included.contains("fileMetadata"))) - }.recoverWith { - case e => - logger.error(s"Failed to read ElasticSearch response $id into Image object: ${e.getMessage}") - Failure(e) + Json.toJson(image)( + imageResponseWrites(image.id, included.contains("fileMetadata")) + ) + }.recoverWith { case e => + logger.error( + s"Failed to read ElasticSearch response $id into Image object: ${e.getMessage}" + ) + Failure(e) }.get val pngFileUri = image.optimisedPng.map(_.file) val fileUri = image.source.file - val imageUrl = s3Client.signUrl(config.imageBucket, fileUri, image, imageType = Source) + val imageUrl = + s3Client.signUrl(config.imageBucket, fileUri, image, imageType = Source) val pngUrl: Option[String] = pngFileUri - .map(s3Client.signUrl(config.imageBucket, _, image, imageType = OptimisedPng)) - - def s3SignedThumbUrl = s3Client.signUrl(config.thumbBucket, fileUri, image, imageType = Thumbnail) + .map( + s3Client.signUrl(config.imageBucket, _, image, imageType = OptimisedPng) + ) + + def s3SignedThumbUrl = s3Client.signUrl( + config.thumbBucket, + fileUri, + image, + imageType = Thumbnail + ) val thumbUrl = config.cloudFrontDomainThumbBucket .flatMap(s3Client.signedCloudFrontUrl(_, fileUri.getPath.drop(1))) @@ -83,63 +104,110 @@ class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: Usag val persistenceReasons = imagePersistenceReasons(image) val isPersisted = persistenceReasons.nonEmpty - val data = source.transform(addSecureSourceUrl(imageUrl)) + val data = source + .transform(addSecureSourceUrl(imageUrl)) .flatMap(_.transform(wrapUserMetadata(id))) .flatMap(_.transform(addSecureThumbUrl(thumbUrl))) - .flatMap(_.transform( - pngUrl - .map(url => addSecureOptimisedPngUrl(url)) - .getOrElse(__.json.pick) - )) + .flatMap( + _.transform( + pngUrl + .map(url => addSecureOptimisedPngUrl(url)) + .getOrElse(__.json.pick) + ) + ) .flatMap(_.transform(addValidity(valid))) .flatMap(_.transform(addInvalidReasons(invalidReasons))) .flatMap(_.transform(addUsageCost(source))) .flatMap(_.transform(addPersistedState(isPersisted, persistenceReasons))) - .flatMap(_.transform(addSyndicationStatus(image))).get + .flatMap(_.transform(addSyndicationStatus(image))) + .get val links: List[Link] = tier match { - case Internal => imageLinks(id, imageUrl, pngUrl, withWritePermission, valid) + case Internal => + imageLinks(id, imageUrl, pngUrl, withWritePermission, valid) case _ => List(downloadLink(id), downloadOptimisedLink(id)) } val isDeletable = canBeDeleted(image) && withDeleteImagePermission - val actions: List[Action] = if (tier == Internal) imageActions(id, isDeletable, withWritePermission, withDeleteCropsOrUsagePermission) else Nil + val actions: List[Action] = + if (tier == Internal) + imageActions( + id, + isDeletable, + withWritePermission, + withDeleteCropsOrUsagePermission + ) + else Nil (data, links, actions) } - def downloadLink(id: String) = Link("download", s"${config.rootUri}/images/$id/download") - def downloadOptimisedLink(id: String) = Link("downloadOptimised", s"${config.rootUri}/images/$id/downloadOptimised?{&width,height,quality}") - - def imageLinks(id: String, secureUrl: String, securePngUrl: Option[String], withWritePermission: Boolean, valid: Boolean) = { + def downloadLink(id: String) = + Link("download", s"${config.rootUri}/images/$id/download") + def downloadOptimisedLink(id: String) = Link( + "downloadOptimised", + s"${config.rootUri}/images/$id/downloadOptimised?{&width,height,quality}" + ) + + def imageLinks( + id: String, + secureUrl: String, + securePngUrl: Option[String], + withWritePermission: Boolean, + valid: Boolean + ) = { val cropLink = Link("crops", s"${config.cropperUri}/crops/$id") val editLink = Link("edits", s"${config.metadataUri}/metadata/$id") val optimisedLink = Link("optimised", makeImgopsUri(new URI(secureUrl))) val optimisedPngLink = securePngUrl match { - case Some(secureUrl) => Some(Link("optimisedPng", makeImgopsUri(new URI(secureUrl)))) + case Some(secureUrl) => + Some(Link("optimisedPng", makeImgopsUri(new URI(secureUrl)))) case _ => None } val imageLink = Link("ui:image", s"${config.kahunaUri}/images/$id") val usageLink = Link("usages", s"${config.usageUri}/usages/media/$id") val leasesLink = Link("leases", s"${config.leasesUri}/leases/media/$id") - val fileMetadataLink = Link("fileMetadata", s"${config.rootUri}/images/$id/fileMetadata") + val fileMetadataLink = + Link("fileMetadata", s"${config.rootUri}/images/$id/fileMetadata") val baseLinks = if (withWritePermission) { - List(editLink, optimisedLink, imageLink, usageLink, leasesLink, fileMetadataLink, downloadLink(id), downloadOptimisedLink(id)) + List( + editLink, + optimisedLink, + imageLink, + usageLink, + leasesLink, + fileMetadataLink, + downloadLink(id), + downloadOptimisedLink(id) + ) } else { - List(optimisedLink, imageLink, usageLink, leasesLink, fileMetadataLink, downloadLink(id), downloadOptimisedLink(id)) + List( + optimisedLink, + imageLink, + usageLink, + leasesLink, + fileMetadataLink, + downloadLink(id), + downloadOptimisedLink(id) + ) } val baseLinksWithOptimised = optimisedPngLink match { case Some(link) => link :: baseLinks - case None => baseLinks + case None => baseLinks } if (valid) cropLink :: baseLinksWithOptimised else baseLinksWithOptimised } - def imageActions(id: String, isDeletable: Boolean, withWritePermission: Boolean, withDeleteCropsOrUsagePermission: Boolean): List[Action] = { + def imageActions( + id: String, + isDeletable: Boolean, + withWritePermission: Boolean, + withDeleteCropsOrUsagePermission: Boolean + ): List[Action] = { val imageUri = URI.create(s"${config.rootUri}/images/$id") val reindexUri = URI.create(s"${config.rootUri}/images/$id/reindex") @@ -183,20 +251,33 @@ class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: Usag val cost = Costing.getCost(usageRights) - __.json.update(__.read[JsObject].map(_ ++ Json.obj("cost" -> cost.toString))) + __.json.update( + __.read[JsObject].map(_ ++ Json.obj("cost" -> cost.toString)) + ) } def addSyndicationStatus(image: Image): Reads[JsObject] = { - __.json.update(__.read[JsObject]).map(_ ++ Json.obj( - "syndicationStatus" -> image.syndicationStatus - )) + __.json + .update(__.read[JsObject]) + .map( + _ ++ Json.obj( + "syndicationStatus" -> image.syndicationStatus + ) + ) } - def addPersistedState(isPersisted: Boolean, persistenceReasons: List[String]): Reads[JsObject] = - __.json.update(__.read[JsObject]).map(_ ++ Json.obj( - "persisted" -> Json.obj( - "value" -> isPersisted, - "reasons" -> persistenceReasons))) + def addPersistedState( + isPersisted: Boolean, + persistenceReasons: List[String] + ): Reads[JsObject] = + __.json + .update(__.read[JsObject]) + .map( + _ ++ Json.obj( + "persisted" -> Json + .obj("value" -> isPersisted, "reasons" -> persistenceReasons) + ) + ) def wrapUserMetadata(id: String): Reads[JsObject] = __.read[JsObject].map { root => @@ -207,31 +288,45 @@ class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: Usag } def addSecureSourceUrl(url: String): Reads[JsObject] = - (__ \ "source").json.update(__.read[JsObject].map(_ ++ Json.obj("secureUrl" -> url))) + (__ \ "source").json.update( + __.read[JsObject].map(_ ++ Json.obj("secureUrl" -> url)) + ) def addSecureOptimisedPngUrl(url: String): Reads[JsObject] = - (__ \ "optimisedPng").json.update(__.read[JsObject].map(_ ++ Json.obj("secureUrl" -> url))) + (__ \ "optimisedPng").json.update( + __.read[JsObject].map(_ ++ Json.obj("secureUrl" -> url)) + ) def addSecureThumbUrl(url: String): Reads[JsObject] = - (__ \ "thumbnail").json.update(__.read[JsObject].map(_ ++ Json.obj("secureUrl" -> url))) + (__ \ "thumbnail").json.update( + __.read[JsObject].map(_ ++ Json.obj("secureUrl" -> url)) + ) def addValidity(valid: Boolean): Reads[JsObject] = __.json.update(__.read[JsObject]).map(_ ++ Json.obj("valid" -> valid)) def addInvalidReasons(reasons: Map[String, String]): Reads[JsObject] = - __.json.update(__.read[JsObject]).map(_ ++ Json.obj("invalidReasons" -> Json.toJson(reasons))) + __.json + .update(__.read[JsObject]) + .map(_ ++ Json.obj("invalidReasons" -> Json.toJson(reasons))) def makeImgopsUri(uri: URI): String = - config.imgopsUri + List(uri.getPath, uri.getRawQuery).mkString("?") + "{&w,h,q}" + config.imgopsUri + List(uri.getPath, uri.getRawQuery).mkString( + "?" + ) + "{&w,h,q}" def makeOptimisedPngImageopsUri(uri: URI): String = { - config.imgopsUri + List(uri.getPath, uri.getRawQuery).mkString("?") + "{&w, h, q}" + config.imgopsUri + List(uri.getPath, uri.getRawQuery).mkString( + "?" + ) + "{&w, h, q}" } - import play.api.libs.json.JodaWrites._ - def imageResponseWrites(id: String, expandFileMetaData: Boolean): Writes[Image] = ( + def imageResponseWrites( + id: String, + expandFileMetaData: Boolean + ): Writes[Image] = ( (__ \ "id").write[String] ~ (__ \ "uploadTime").write[DateTime] ~ (__ \ "uploadedBy").write[String] ~ @@ -241,58 +336,90 @@ class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: Usag (__ \ "source").write[Asset] ~ (__ \ "thumbnail").writeNullable[Asset] ~ (__ \ "optimisedPng").writeNullable[Asset] ~ - (__ \ "fileMetadata").write[FileMetadataEntity] - .contramap(fileMetadataEntity(id, expandFileMetaData, _: FileMetadata)) ~ + (__ \ "fileMetadata") + .write[FileMetadataEntity] + .contramap( + fileMetadataEntity(id, expandFileMetaData, _: FileMetadata) + ) ~ (__ \ "userMetadata").writeNullable[Edits] ~ - (__ \ "metadata").write[ImageMetadata](ImageResponse.newlineNormalisingImageMetadataWriter) ~ + (__ \ "metadata").write[ImageMetadata]( + ImageResponse.newlineNormalisingImageMetadataWriter + ) ~ (__ \ "originalMetadata").write[ImageMetadata] ~ (__ \ "usageRights").write[UsageRights] ~ (__ \ "originalUsageRights").write[UsageRights] ~ - (__ \ "exports").write[List[Export]] + (__ \ "exports") + .write[List[Export]] .contramap((crops: List[Crop]) => crops.map(Export.fromCrop(_: Crop))) ~ - (__ \ "usages").write[UsagesEntity] + (__ \ "usages") + .write[UsagesEntity] .contramap(usagesEntity(id, _: List[Usage])) ~ - (__ \ "leases").write[MediaLeasesEntity] + (__ \ "leases") + .write[MediaLeasesEntity] .contramap(leasesEntity(id, _: LeasesByMedia)) ~ - (__ \ "collections").write[List[EmbeddedEntity[CollectionResponse]]] - .contramap((collections: List[Collection]) => collections.map(c => collectionsEntity(id, c))) ~ + (__ \ "collections") + .write[List[EmbeddedEntity[CollectionResponse]]] + .contramap((collections: List[Collection]) => + collections.map(c => collectionsEntity(id, c)) + ) ~ (__ \ "syndicationRights").writeNullable[SyndicationRights] ~ (__ \ "usermetaDataLastModified").writeNullable[DateTime] + )(unlift(Image.unapply)) - ) (unlift(Image.unapply)) - - def fileMetaDataUri(id: String) = URI.create(s"${config.rootUri}/images/$id/fileMetadata") + def fileMetaDataUri(id: String) = + URI.create(s"${config.rootUri}/images/$id/fileMetadata") def usagesUri(id: String) = URI.create(s"${config.usageUri}/usages/media/$id") def usageUri(id: String) = { - URI.create(s"${config.usageUri}/usages/${UriEncoding.encodePathSegment(id, "UTF-8")}") + URI.create( + s"${config.usageUri}/usages/${UriEncoding.encodePathSegment(id, "UTF-8")}" + ) } - def leasesUri(id: String) = URI.create(s"${config.leasesUri}/leases/media/$id") + def leasesUri(id: String) = + URI.create(s"${config.leasesUri}/leases/media/$id") - def usageEntity(usage: Usage) = EmbeddedEntity[Usage](usageUri(usage.id), Some(usage)) + def usageEntity(usage: Usage) = + EmbeddedEntity[Usage](usageUri(usage.id), Some(usage)) def usagesEntity(id: String, usages: List[Usage]) = - EmbeddedEntity[List[UsageEntity]](usagesUri(id), Some(usages.map(usageEntity))) + EmbeddedEntity[List[UsageEntity]]( + usagesUri(id), + Some(usages.map(usageEntity)) + ) def leasesEntity(id: String, leaseByMedia: LeasesByMedia) = EmbeddedEntity[LeasesByMedia](leasesUri(id), Some(leaseByMedia)) - def collectionsEntity(id: String, c: Collection): EmbeddedEntity[CollectionResponse] = + def collectionsEntity( + id: String, + c: Collection + ): EmbeddedEntity[CollectionResponse] = collectionEntity(config.collectionsUri, id, c) def collectionEntity(rootUri: String, imageId: String, c: Collection) = { // TODO: Currently the GET for this URI does nothing - val uri = URI.create(s"$rootUri/images/$imageId/${CollectionsManager.pathToUri(c.path)}") + val uri = URI.create( + s"$rootUri/images/$imageId/${CollectionsManager.pathToUri(c.path)}" + ) val response = CollectionResponse.build(c) - EmbeddedEntity(uri, Some(response), actions = List( - Action("remove", uri, "DELETE") - )) + EmbeddedEntity( + uri, + Some(response), + actions = List( + Action("remove", uri, "DELETE") + ) + ) } - def fileMetadataEntity(id: String, expandFileMetaData: Boolean, fileMetadata: FileMetadata) = { - val displayableMetadata = if (expandFileMetaData) Some(fileMetadata) else None + def fileMetadataEntity( + id: String, + expandFileMetaData: Boolean, + fileMetadata: FileMetadata + ) = { + val displayableMetadata = + if (expandFileMetaData) Some(fileMetadata) else None EmbeddedEntity[FileMetadata](fileMetaDataUri(id), displayableMetadata) } @@ -300,20 +427,26 @@ class ImageResponse(config: MediaApiConfig, s3Client: S3Client, usageQuota: Usag object ImageResponse { - val newlineNormalisingImageMetadataWriter: Writes[ImageMetadata] = (input: ImageMetadata) => { - Json.toJson(normaliseNewLinesInImageMeta(input)) - } + val newlineNormalisingImageMetadataWriter: Writes[ImageMetadata] = + (input: ImageMetadata) => { + Json.toJson(normaliseNewLinesInImageMeta(input)) + } - def normaliseNewLinesInImageMeta(imageMetadata: ImageMetadata): ImageMetadata = imageMetadata.modifyAll( - _.description, - _.copyright, - _.specialInstructions, - _.suppliersReference - ).using(_.map(ImageResponse.normaliseNewlineChars)) + def normaliseNewLinesInImageMeta( + imageMetadata: ImageMetadata + ): ImageMetadata = imageMetadata + .modifyAll( + _.description, + _.copyright, + _.specialInstructions, + _.suppliersReference + ) + .using(_.map(ImageResponse.normaliseNewlineChars)) private val pattern = """[\r\n]+""".r - def normaliseNewlineChars(string: String): String = pattern.replaceAllIn(string, "\n") + def normaliseNewlineChars(string: String): String = + pattern.replaceAllIn(string, "\n") def canImgBeDeleted(image: Image) = !hasExports(image) && !hasUsages(image) @@ -323,11 +456,24 @@ object ImageResponse { } // We're using this to slightly hydrate the json response -case class CollectionResponse private(path: List[String], pathId: String, description: String, cssColour: Option[String], actionData: ActionData) +case class CollectionResponse private ( + path: List[String], + pathId: String, + description: String, + cssColour: Option[String], + actionData: ActionData +) object CollectionResponse { - implicit def writes: Writes[CollectionResponse] = Json.writes[CollectionResponse] + implicit def writes: Writes[CollectionResponse] = + Json.writes[CollectionResponse] def build(c: Collection) = - CollectionResponse(c.path, c.pathId, c.description, CollectionsManager.getCssColour(c.path), c.actionData) + CollectionResponse( + c.path, + c.pathId, + c.description, + CollectionsManager.getCssColour(c.path), + c.actionData + ) } diff --git a/media-api/app/lib/MediaApiConfig.scala b/media-api/app/lib/MediaApiConfig.scala index ae4e957d14..9889ad5dc0 100644 --- a/media-api/app/lib/MediaApiConfig.scala +++ b/media-api/app/lib/MediaApiConfig.scala @@ -6,11 +6,12 @@ import org.joda.time.DateTime import scala.util.Try case class StoreConfig( - storeBucket: String, - storeKey: String + storeBucket: String, + storeKey: String ) -class MediaApiConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class MediaApiConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val configBucket: String = string("s3.config.bucket") val usageMailBucket: String = string("s3.usagemail.bucket") @@ -18,13 +19,14 @@ class MediaApiConfig(resources: GridConfigResources) extends CommonConfig(resour val quotaStoreConfig: StoreConfig = StoreConfig(configBucket, quotaStoreKey) // quota updates can only be turned off in DEV - val quotaUpdateEnabled: Boolean = if (isDev) boolean("quota.update.enabled") else true + val quotaUpdateEnabled: Boolean = + if (isDev) boolean("quota.update.enabled") else true val recordDownloadAsUsage: Boolean = boolean("image.record.download") val imagesAlias: String = string("es.index.aliases.read") - val elasticsearch6Url: String = string("es6.url") + val elasticsearch6Url: String = string("es6.url") val elasticsearch6Cluster: String = string("es6.cluster") val elasticsearch6Shards: Int = string("es6.shards").toInt val elasticsearch6Replicas: Int = string("es6.replicas").toInt @@ -37,9 +39,13 @@ class MediaApiConfig(resources: GridConfigResources) extends CommonConfig(resour "/etc/gu/ssl/private/cloudfront.pem" // TODO - remove once migrated away from ) - val cloudFrontDomainImageBucket: Option[String] = stringOpt("cloudfront.domain.imagebucket") - val cloudFrontDomainThumbBucket: Option[String] = stringOpt("cloudfront.domain.thumbbucket") - val cloudFrontKeyPairId: Option[String] = stringOpt("cloudfront.keypair.id") + val cloudFrontDomainImageBucket: Option[String] = stringOpt( + "cloudfront.domain.imagebucket" + ) + val cloudFrontDomainThumbBucket: Option[String] = stringOpt( + "cloudfront.domain.thumbbucket" + ) + val cloudFrontKeyPairId: Option[String] = stringOpt("cloudfront.keypair.id") val rootUri: String = services.apiBaseUri val kahunaUri: String = services.kahunaBaseUri @@ -59,14 +65,18 @@ class MediaApiConfig(resources: GridConfigResources) extends CommonConfig(resour val persistenceIdentifier = string("persistence.identifier") val queriableIdentifiers = Seq(persistenceIdentifier) - val persistedRootCollections: List[String] = stringOpt("persistence.collections") match { + val persistedRootCollections: List[String] = stringOpt( + "persistence.collections" + ) match { case Some(collections) => collections.split(',').toList - case None => List("GNM Archive") + case None => List("GNM Archive") } def convertToInt(s: String): Option[Int] = Try { s.toInt }.toOption val syndicationStartDate: Option[DateTime] = Try { - stringOpt("syndication.start").map(d => DateTime.parse(d).withTimeAtStartOfDay()) + stringOpt("syndication.start").map(d => + DateTime.parse(d).withTimeAtStartOfDay() + ) }.toOption.flatten } diff --git a/media-api/app/lib/MediaApiMetrics.scala b/media-api/app/lib/MediaApiMetrics.scala index 41e68adf45..e394882916 100644 --- a/media-api/app/lib/MediaApiMetrics.scala +++ b/media-api/app/lib/MediaApiMetrics.scala @@ -4,7 +4,8 @@ import com.amazonaws.services.cloudwatch.model.Dimension import com.gu.mediaservice.lib.auth.{ApiAccessor, Syndication} import com.gu.mediaservice.lib.metrics.CloudWatchMetrics -class MediaApiMetrics(config: MediaApiConfig) extends CloudWatchMetrics(s"${config.stage}/MediaApi", config) { +class MediaApiMetrics(config: MediaApiConfig) + extends CloudWatchMetrics(s"${config.stage}/MediaApi", config) { val searchQueries = new TimeMetric("ElasticSearch") @@ -21,17 +22,22 @@ class MediaApiMetrics(config: MediaApiConfig) extends CloudWatchMetrics(s"${conf val metricName = "OptimisedImageDownload" } - def incrementImageDownload(apiKey: ApiAccessor, downloadType: DownloadType) = { + def incrementImageDownload( + apiKey: ApiAccessor, + downloadType: DownloadType + ) = { val metric = new CountMetric(apiKey.tier.toString) // CW Metrics have a maximum of 10 dimensions per metric. // Create a separate dimension per syndication partner and group other Tier types together. val dimensionValue: String = apiKey.tier match { case Syndication => apiKey.identity - case _ => apiKey.tier.toString + case _ => apiKey.tier.toString } - val dimension = new Dimension().withName(downloadType.metricName).withValue(dimensionValue) + val dimension = new Dimension() + .withName(downloadType.metricName) + .withValue(dimensionValue) metric.increment(List(dimension)).run } diff --git a/media-api/app/lib/S3Client.scala b/media-api/app/lib/S3Client.scala index 2088d432b5..8877a82fbe 100644 --- a/media-api/app/lib/S3Client.scala +++ b/media-api/app/lib/S3Client.scala @@ -19,18 +19,32 @@ trait CloudFrontDistributable { private def expiresAt: Date = DateTime.now.plusMinutes(validForMinutes).toDate private lazy val privateKeyFile: File = - privateKeyLocations.map { location => - new File(location) - }.find(_.exists).get - - def signedCloudFrontUrl(cloudFrontDomain: String, s3ObjectPath: String): Option[String] = Try { + privateKeyLocations + .map { location => + new File(location) + } + .find(_.exists) + .get + + def signedCloudFrontUrl( + cloudFrontDomain: String, + s3ObjectPath: String + ): Option[String] = Try { CloudFrontUrlSigner.getSignedURLWithCannedPolicy( - protocol, cloudFrontDomain, privateKeyFile, s3ObjectPath, keyPairId.get, expiresAt) + protocol, + cloudFrontDomain, + privateKeyFile, + s3ObjectPath, + keyPairId.get, + expiresAt + ) }.toOption } -class S3Client(config: MediaApiConfig) extends S3(config) with CloudFrontDistributable { - lazy val privateKeyLocations: Seq[String] = config.cloudFrontPrivateKeyLocations +class S3Client(config: MediaApiConfig) + extends S3(config) + with CloudFrontDistributable { + lazy val privateKeyLocations: Seq[String] = + config.cloudFrontPrivateKeyLocations lazy val keyPairId: Option[String] = config.cloudFrontKeyPairId } - diff --git a/media-api/app/lib/UsageQuota.scala b/media-api/app/lib/UsageQuota.scala index 7b88105852..12ff539bdc 100644 --- a/media-api/app/lib/UsageQuota.scala +++ b/media-api/app/lib/UsageQuota.scala @@ -38,7 +38,7 @@ class UsageQuota(config: MediaApiConfig, scheduler: Scheduler) { def isOverQuota(rights: UsageRights, waitMillis: Int = 100): Boolean = Try { Await.result( usageStore.getUsageStatusForUsageRights(rights), - waitMillis.millis) + waitMillis.millis + ) }.toOption.exists(_.exceeded) && FeatureToggle.get("usage-quota-ui") } - diff --git a/media-api/app/lib/UsageStore.scala b/media-api/app/lib/UsageStore.scala index 42b0e54219..e6fa5a5f51 100644 --- a/media-api/app/lib/UsageStore.scala +++ b/media-api/app/lib/UsageStore.scala @@ -20,12 +20,12 @@ case class SupplierUsageQuota(agency: Agency, count: Int) object SupplierUsageQuota { implicit val writes: Writes[SupplierUsageQuota] = ( (__ \ "agency").write[String].contramap((a: Agency) => a.supplier) ~ - (__ \ "count").write[Int] + (__ \ "count").write[Int] )(unlift(SupplierUsageQuota.unapply)) implicit val customReads: Reads[SupplierUsageQuota] = ( (__ \ "agency").read[String].map(Agency(_)) ~ - (__ \ "count").read[Int] + (__ \ "count").read[Int] )(SupplierUsageQuota.apply _) } @@ -33,20 +33,20 @@ case class SupplierUsageSummary(agency: Agency, count: Long) object SupplierUsageSummary { implicit val customReads: Reads[SupplierUsageSummary] = ( (__ \ "Supplier").read[String].map(Agency(_)) ~ - (__ \ "Usage").read[Long] + (__ \ "Usage").read[Long] )(SupplierUsageSummary.apply _) implicit val writes: Writes[SupplierUsageSummary] = ( (__ \ "agency").write[String].contramap((a: Agency) => a.supplier) ~ - (__ \ "count").write[Long] + (__ \ "count").write[Long] )(unlift(SupplierUsageSummary.unapply)) } case class UsageStatus( - exceeded: Boolean, - fractionOfQuota: Float, - usage: SupplierUsageSummary, - quota: Option[SupplierUsageQuota] + exceeded: Boolean, + fractionOfQuota: Float, + usage: SupplierUsageSummary, + quota: Option[SupplierUsageQuota] ) object UsageStatus { implicit val writes: Writes[UsageStatus] = Json.writes[UsageStatus] @@ -66,10 +66,13 @@ object UsageStore extends GridLogging { message.getContent match { case content: MimeMultipart => - val parts = for(n <- 0 until content.getCount) yield content.getBodyPart(n) + val parts = + for (n <- 0 until content.getCount) yield content.getBodyPart(n) val part = parts - .collectFirst { case part: MimeBodyPart if part.getEncoding == "base64" => part } + .collectFirst { + case part: MimeBodyPart if part.getEncoding == "base64" => part + } .map(_.getContent) part match { @@ -95,7 +98,7 @@ object UsageStore extends GridLogging { .map(_.map(stripQuotes)) .map(_.toList) - if(lines.exists(_.length != 2)) { + if (lines.exists(_.length != 2)) { logger.error("CSV header error. Expected 2 columns") throw new IllegalArgumentException("CSV header error. Expected 2 columns") } @@ -108,34 +111,47 @@ object UsageStore extends GridLogging { case _ => logger.error("CSV body error. Expected 2 columns") - throw new IllegalArgumentException("CSV body error. Expected 2 columns") + throw new IllegalArgumentException( + "CSV body error. Expected 2 columns" + ) } case other => - logger.error(s"Unexpected CSV headers [${other.mkString(",")}]. Expected [CproName, Id]") - throw new IllegalArgumentException(s"Unexpected CSV headers [${other.mkString(",")}]. Expected [CproName, Id]") + logger.error( + s"Unexpected CSV headers [${other.mkString(",")}]. Expected [CproName, Id]" + ) + throw new IllegalArgumentException( + s"Unexpected CSV headers [${other.mkString(",")}]. Expected [CproName, Id]" + ) } } } class UsageStore( - bucket: String, - config: MediaApiConfig, - quotaStore: QuotaStore -)(implicit val ec: ExecutionContext) extends BaseStore[String, UsageStatus](bucket, config) with GridLogging { + bucket: String, + config: MediaApiConfig, + quotaStore: QuotaStore +)(implicit val ec: ExecutionContext) + extends BaseStore[String, UsageStatus](bucket, config) + with GridLogging { import UsageStore._ - def getUsageStatusForUsageRights(usageRights: UsageRights): Future[UsageStatus] = { + def getUsageStatusForUsageRights( + usageRights: UsageRights + ): Future[UsageStatus] = { usageRights match { - case agency: Agency => Future.successful(store.get().getOrElse(agency.supplier, { throw NoUsageQuota() })) + case agency: Agency => + Future.successful( + store.get().getOrElse(agency.supplier, { throw NoUsageQuota() }) + ) case _ => Future.failed(new Exception("Image is not supplied by Agency")) } } def getUsageStatus(): Future[StoreAccess] = Future.successful((for { - s <- store - l <- lastUpdated - } yield StoreAccess(s,l)).get()) + s <- store + l <- lastUpdated + } yield StoreAccess(s, l)).get()) def overQuotaAgencies: List[Agency] = store.get.collect { case (_, status) if status.exceeded => status.usage.agency @@ -156,36 +172,47 @@ class UsageStore( logger.info(s"Last usage file has ${lines.length} lines") val summary: List[SupplierUsageSummary] = csvParser(lines) - def copyAgency(supplier: SupplierUsageSummary, id: String) = Agencies.all.get(id) - .map(a => supplier.copy(agency = a)) - .getOrElse(supplier) + def copyAgency(supplier: SupplierUsageSummary, id: String) = + Agencies.all + .get(id) + .map(a => supplier.copy(agency = a)) + .getOrElse(supplier) val cleanedSummary = summary .map { - case s if s.agency.supplier.contains("Rex Features") => copyAgency(s, "rex") - case s if s.agency.supplier.contains("Getty Images") => copyAgency(s, "getty") - case s if s.agency.supplier.contains("Australian Associated Press") => copyAgency(s, "aap") - case s if s.agency.supplier.contains("Alamy") => copyAgency(s, "alamy") + case s if s.agency.supplier.contains("Rex Features") => + copyAgency(s, "rex") + case s if s.agency.supplier.contains("Getty Images") => + copyAgency(s, "getty") + case s + if s.agency.supplier.contains("Australian Associated Press") => + copyAgency(s, "aap") + case s if s.agency.supplier.contains("Alamy") => + copyAgency(s, "alamy") case s => s } - quotaStore.getQuota.map { supplierQuota => { - cleanedSummary - .groupBy(_.agency.supplier) - .mapValues(_.head) - .mapValues((summary: SupplierUsageSummary) => { - val quota = summary.agency.id.flatMap(id => supplierQuota.get(id)) - val exceeded = quota.exists(q => summary.count > q.count) - val fractionOfQuota: Float = quota.map(q => summary.count.toFloat / q.count).getOrElse(0F) - - UsageStatus( - exceeded, - fractionOfQuota, - summary, - quota - ) - }) - }} + quotaStore.getQuota.map { supplierQuota => + { + cleanedSummary + .groupBy(_.agency.supplier) + .mapValues(_.head) + .mapValues((summary: SupplierUsageSummary) => { + val quota = + summary.agency.id.flatMap(id => supplierQuota.get(id)) + val exceeded = quota.exists(q => summary.count > q.count) + val fractionOfQuota: Float = + quota.map(q => summary.count.toFloat / q.count).getOrElse(0f) + + UsageStatus( + exceeded, + fractionOfQuota, + summary, + quota + ) + }) + } + } } case _ => Future.successful(Map.empty) } @@ -193,18 +220,22 @@ class UsageStore( } class QuotaStore( - quotaFile: String, - bucket: String, - config: MediaApiConfig -)(implicit ec: ExecutionContext) extends BaseStore[String, SupplierUsageQuota](bucket, config)(ec) { + quotaFile: String, + bucket: String, + config: MediaApiConfig +)(implicit ec: ExecutionContext) + extends BaseStore[String, SupplierUsageQuota](bucket, config)(ec) { - def getQuota: Future[Map[String, SupplierUsageQuota]] = Future.successful(store.get()) + def getQuota: Future[Map[String, SupplierUsageQuota]] = + Future.successful(store.get()) def update() { if (config.quotaUpdateEnabled) { store.send(_ => fetchQuota) } else { - logger.info("Quota store updates disabled. Set quota.update.enabled in media-api.properties to enable.") + logger.info( + "Quota store updates disabled. Set quota.update.enabled in media-api.properties to enable." + ) } } @@ -215,8 +246,8 @@ class QuotaStore( .parse(quotaFileString) .as[List[SupplierUsageQuota]] - summary.foldLeft(Map[String,SupplierUsageQuota]())((memo, quota) => { - memo + (quota.agency.supplier -> quota) - }) + summary.foldLeft(Map[String, SupplierUsageQuota]())((memo, quota) => { + memo + (quota.agency.supplier -> quota) + }) } } diff --git a/media-api/app/lib/elasticsearch/ElasticSearch.scala b/media-api/app/lib/elasticsearch/ElasticSearch.scala index 6afc0b74b5..7dd96b0692 100644 --- a/media-api/app/lib/elasticsearch/ElasticSearch.scala +++ b/media-api/app/lib/elasticsearch/ElasticSearch.scala @@ -4,13 +4,24 @@ import java.util.concurrent.TimeUnit import com.gu.mediaservice.lib.ImageFields import com.gu.mediaservice.lib.auth.Authentication.Principal -import com.gu.mediaservice.lib.elasticsearch.{ElasticSearchClient, ElasticSearchConfig, ElasticSearchExecutions, Mappings} +import com.gu.mediaservice.lib.elasticsearch.{ + ElasticSearchClient, + ElasticSearchConfig, + ElasticSearchExecutions, + Mappings +} import com.gu.mediaservice.lib.logging.{GridLogging, MarkerMap} import com.gu.mediaservice.lib.metrics.FutureSyntax import com.gu.mediaservice.model.{Agencies, Agency, Image} import com.sksamuel.elastic4s.ElasticDsl import com.sksamuel.elastic4s.ElasticDsl._ -import com.sksamuel.elastic4s.requests.searches.{Aggregations, DateHistogramInterval, SearchHit, SearchRequest, SearchResponse} +import com.sksamuel.elastic4s.requests.searches.{ + Aggregations, + DateHistogramInterval, + SearchHit, + SearchRequest, + SearchResponse +} import com.sksamuel.elastic4s.requests.searches.queries.Query import lib.elasticsearch._ import lib.querysyntax.{HierarchyField, Match, Phrase} @@ -26,8 +37,16 @@ import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ExecutionContext, Future} import com.sksamuel.elastic4s.requests.searches.aggs.Aggregation -class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics, elasticConfig: ElasticSearchConfig, overQuotaAgencies: () => List[Agency]) - extends ElasticSearchClient with ImageFields with MatchFields with FutureSyntax with GridLogging { +class ElasticSearch( + val config: MediaApiConfig, + mediaApiMetrics: MediaApiMetrics, + elasticConfig: ElasticSearchConfig, + overQuotaAgencies: () => List[Agency] +) extends ElasticSearchClient + with ImageFields + with MatchFields + with FutureSyntax + with GridLogging { lazy val imagesAlias = elasticConfig.alias lazy val url = elasticConfig.url @@ -38,72 +57,101 @@ class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics private val SearchQueryTimeout = FiniteDuration(10, TimeUnit.SECONDS) // there is 15 seconds timeout set on cluster level as well - /** - * int terms of search query timeout in GRID, - * there is a additional config `allow_partial_search_results` - * which is set to true by default, - * which means for example if i ask ES to give me photos that have field foo=bar without timeout it can give me 6500 results - * if i ask the same query with 1ms timeout it may give me for example 4000 results instead - **/ + /** int terms of search query timeout in GRID, + * there is a additional config `allow_partial_search_results` + * which is set to true by default, + * which means for example if i ask ES to give me photos that have field foo=bar without timeout it can give me 6500 results + * if i ask the same query with 1ms timeout it may give me for example 4000 results instead + */ val searchFilters = new SearchFilters(config) val syndicationFilter = new SyndicationFilter(config) val queryBuilder = new QueryBuilder(matchFields, overQuotaAgencies) - def getImageById(id: String)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[Option[Image]] = { + def getImageById(id: String)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[Option[Image]] = { implicit val logMarker = MarkerMap("id" -> id) executeAndLog(get(imagesAlias, id), s"get image by id $id").map { r => r.status match { case Status.OK => mapImageFrom(r.result.sourceAsString, id) - case _ => None + case _ => None } } } - def search(params: SearchParams)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[SearchResults] = { + def search(params: SearchParams)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[SearchResults] = { implicit val logMarker = MarkerMap() def resolveHit(hit: SearchHit) = mapImageFrom(hit.sourceAsString, hit.id) val query: Query = queryBuilder.makeQuery(params.structuredQuery) - val uploadTimeFilter = filters.date("uploadTime", params.since, params.until) - val lastModTimeFilter = filters.date("lastModified", params.modifiedSince, params.modifiedUntil) - val takenTimeFilter = filters.date("metadata.dateTaken", params.takenSince, params.takenUntil) + val uploadTimeFilter = + filters.date("uploadTime", params.since, params.until) + val lastModTimeFilter = + filters.date("lastModified", params.modifiedSince, params.modifiedUntil) + val takenTimeFilter = + filters.date("metadata.dateTaken", params.takenSince, params.takenUntil) // we only inject filters if there are actual date parameters - val dateFilterList = List(uploadTimeFilter, lastModTimeFilter, takenTimeFilter).flatten.toNel - val dateFilter = dateFilterList.map(dateFilters => filters.and(dateFilters.list: _*)) + val dateFilterList = + List(uploadTimeFilter, lastModTimeFilter, takenTimeFilter).flatten.toNel + val dateFilter = + dateFilterList.map(dateFilters => filters.and(dateFilters.list: _*)) val idsFilter = params.ids.map(filters.ids) val labelFilter = params.labels.toNel.map(filters.terms("labels", _)) - val metadataFilter = params.hasMetadata.map(metadataField).toNel.map(filters.exists) - val archivedFilter = params.archived.map(filters.existsOrMissing(editsField("archived"), _)) - val hasExports = params.hasExports.map(filters.existsOrMissing("exports", _)) - val hasIdentifier = params.hasIdentifier.map(idName => filters.exists(NonEmptyList(identifierField(idName)))) - val missingIdentifier = params.missingIdentifier.map(idName => filters.missing(NonEmptyList(identifierField(idName)))) - val uploadedByFilter = params.uploadedBy.map(uploadedBy => filters.terms("uploadedBy", NonEmptyList(uploadedBy))) - val simpleCostFilter = params.free.flatMap(free => if (free) searchFilters.freeFilter else searchFilters.nonFreeFilter) + val metadataFilter = + params.hasMetadata.map(metadataField).toNel.map(filters.exists) + val archivedFilter = + params.archived.map(filters.existsOrMissing(editsField("archived"), _)) + val hasExports = + params.hasExports.map(filters.existsOrMissing("exports", _)) + val hasIdentifier = params.hasIdentifier.map(idName => + filters.exists(NonEmptyList(identifierField(idName))) + ) + val missingIdentifier = params.missingIdentifier.map(idName => + filters.missing(NonEmptyList(identifierField(idName))) + ) + val uploadedByFilter = params.uploadedBy.map(uploadedBy => + filters.terms("uploadedBy", NonEmptyList(uploadedBy)) + ) + val simpleCostFilter = params.free.flatMap(free => + if (free) searchFilters.freeFilter else searchFilters.nonFreeFilter + ) val costFilter = params.payType match { - case Some(PayType.Free) => searchFilters.freeFilter + case Some(PayType.Free) => searchFilters.freeFilter case Some(PayType.MaybeFree) => searchFilters.maybeFreeFilter - case Some(PayType.Pay) => searchFilters.nonFreeFilter - case _ => None + case Some(PayType.Pay) => searchFilters.nonFreeFilter + case _ => None } - val hasRightsCategory = params.hasRightsCategory.filter(_ == true).map(_ => searchFilters.hasRightsCategoryFilter) + val hasRightsCategory = params.hasRightsCategory + .filter(_ == true) + .map(_ => searchFilters.hasRightsCategoryFilter) - val validityFilter = params.valid.flatMap(valid => if (valid) searchFilters.validFilter else searchFilters.invalidFilter) + val validityFilter = params.valid.flatMap(valid => + if (valid) searchFilters.validFilter else searchFilters.invalidFilter + ) val persistFilter = params.persisted map { - case true => searchFilters.persistedFilter + case true => searchFilters.persistedFilter case false => searchFilters.nonPersistedFilter } val usageFilter = - params.usageStatus.toNel.map(status => filters.terms("usagesStatus", status.map(_.toString))) ++ + params.usageStatus.toNel.map(status => + filters.terms("usagesStatus", status.map(_.toString)) + ) ++ params.usagePlatform.toNel.map(filters.terms("usagesPlatform", _)) - val syndicationStatusFilter = params.syndicationStatus.map(status => syndicationFilter.statusFilter(status)) + val syndicationStatusFilter = params.syndicationStatus.map(status => + syndicationFilter.statusFilter(status) + ) // Port of special case code in elastic1 sorts. Using the dateAddedToCollection sort implies an additional filter for reasons unknown val dateAddedToCollectionFilter = { @@ -111,7 +159,7 @@ class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics case Some("dateAddedToCollection") => { val pathHierarchyOpt = params.structuredQuery.flatMap { case Match(HierarchyField, Phrase(value)) => Some(value) - case _ => None + case _ => None }.headOption pathHierarchyOpt.map { pathHierarchy => @@ -141,30 +189,43 @@ class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics ++ searchFilters.tierFilter(params.tier) ++ syndicationStatusFilter ++ dateAddedToCollectionFilter - ).toNel.map(filter => filter.list.reduceLeft(filters.and(_, _))) + ).toNel.map(filter => filter.list.reduceLeft(filters.and(_, _))) - val withFilter = filterOpt.map { f => - boolQuery must (query) filter f - }.getOrElse(query) + val withFilter = filterOpt + .map { f => + boolQuery must (query) filter f + } + .getOrElse(query) val sort = params.orderBy match { - case Some("dateAddedToCollection") => sorts.dateAddedToCollectionDescending + case Some("dateAddedToCollection") => + sorts.dateAddedToCollectionDescending case _ => sorts.createSort(params.orderBy) } // We need to set trackHits to ensure that the total number of hits we return to users is accurate. // See https://www.elastic.co/guide/en/elasticsearch/reference/current/breaking-changes-7.0.html#hits-total-now-object-search-response - val searchRequest = prepareSearch(withFilter).copy(trackHits = Some(true)) from params.offset size params.length sortBy sort - - executeAndLog(searchRequest, "image search"). - toMetric(Some(mediaApiMetrics.searchQueries), List(mediaApiMetrics.searchTypeDimension("results")))(_.result.took).map { r => - logSearchQueryIfTimedOut(searchRequest, r.result) - val imageHits = r.result.hits.hits.map(resolveHit).toSeq.flatten.map(i => (i.id, i)) - SearchResults(hits = imageHits, total = r.result.totalHits) - } + val searchRequest = prepareSearch(withFilter).copy(trackHits = + Some(true) + ) from params.offset size params.length sortBy sort + + executeAndLog(searchRequest, "image search") + .toMetric( + Some(mediaApiMetrics.searchQueries), + List(mediaApiMetrics.searchTypeDimension("results")) + )(_.result.took) + .map { r => + logSearchQueryIfTimedOut(searchRequest, r.result) + val imageHits = + r.result.hits.hits.map(resolveHit).toSeq.flatten.map(i => (i.id, i)) + SearchResults(hits = imageHits, total = r.result.totalHits) + } } - def usageForSupplier(id: String, numDays: Int)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[SupplierUsageSummary] = { + def usageForSupplier(id: String, numDays: Int)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[SupplierUsageSummary] = { implicit val logMarker = MarkerMap() val supplier = Agencies.get(id) val supplierName = supplier.supplier @@ -179,7 +240,9 @@ class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics val beSupplier = termQuery("usageRights.supplier", supplierName) val haveNestedUsage = nestedQuery("usages", haveUsageInLastPeriod) - val query = boolQuery.must(matchAllQuery()).filter(boolQuery().must(beSupplier, haveNestedUsage)) + val query = boolQuery + .must(matchAllQuery()) + .filter(boolQuery().must(beSupplier, haveNestedUsage)) val search = prepareSearch(query) size 0 @@ -190,74 +253,133 @@ class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics } } - def dateHistogramAggregate(params: AggregateSearchParams)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[AggregateSearchResults] = { - - def fromDateHistogramAggregation(name: String, aggregations: Aggregations): Seq[BucketResult] = aggregations.dateHistogram(name). - buckets.map(b => BucketResult(b.date, b.docCount)) - - val aggregation = dateHistogramAggregation(params.field). - field(params.field). - calendarInterval(DateHistogramInterval.Month). - minDocCount(0) - aggregateSearch(params.field, params, aggregation, fromDateHistogramAggregation) + def dateHistogramAggregate(params: AggregateSearchParams)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[AggregateSearchResults] = { + + def fromDateHistogramAggregation( + name: String, + aggregations: Aggregations + ): Seq[BucketResult] = aggregations + .dateHistogram(name) + .buckets + .map(b => BucketResult(b.date, b.docCount)) + + val aggregation = dateHistogramAggregation(params.field) + .field(params.field) + .calendarInterval(DateHistogramInterval.Month) + .minDocCount(0) + aggregateSearch( + params.field, + params, + aggregation, + fromDateHistogramAggregation + ) } - def metadataSearch(params: AggregateSearchParams)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[AggregateSearchResults] = { - aggregateSearch("metadata", params, termsAggregation("metadata").field(metadataField(params.field)), fromTermAggregation) + def metadataSearch(params: AggregateSearchParams)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[AggregateSearchResults] = { + aggregateSearch( + "metadata", + params, + termsAggregation("metadata").field(metadataField(params.field)), + fromTermAggregation + ) } - def editsSearch(params: AggregateSearchParams)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[AggregateSearchResults] = { + def editsSearch(params: AggregateSearchParams)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[AggregateSearchResults] = { logger.info("Edit aggregation requested with params.field: " + params.field) val field = "labels" // TODO was - params.field - aggregateSearch("edits", params, termsAggregation("edits").field(editsField(field)), fromTermAggregation) + aggregateSearch( + "edits", + params, + termsAggregation("edits").field(editsField(field)), + fromTermAggregation + ) } - private def fromTermAggregation(name: String, aggregations: Aggregations): Seq[BucketResult] = aggregations.terms(name). - buckets.map(b => BucketResult(b.key, b.docCount)) - - private def aggregateSearch(name: String, params: AggregateSearchParams, aggregation: Aggregation, extract: (String, Aggregations) => Seq[BucketResult])(implicit ex: ExecutionContext): Future[AggregateSearchResults] = { + private def fromTermAggregation( + name: String, + aggregations: Aggregations + ): Seq[BucketResult] = + aggregations.terms(name).buckets.map(b => BucketResult(b.key, b.docCount)) + + private def aggregateSearch( + name: String, + params: AggregateSearchParams, + aggregation: Aggregation, + extract: (String, Aggregations) => Seq[BucketResult] + )(implicit ex: ExecutionContext): Future[AggregateSearchResults] = { implicit val logMarker = MarkerMap() - logger.info("aggregate search: " + name + " / " + params + " / " + aggregation) + logger.info( + "aggregate search: " + name + " / " + params + " / " + aggregation + ) val query = queryBuilder.makeQuery(params.structuredQuery) val search = prepareSearch(query) aggregations aggregation size 0 executeAndLog(search, s"$name aggregate search") - .toMetric(Some(mediaApiMetrics.searchQueries), List(mediaApiMetrics.searchTypeDimension("aggregate")))(_.result.took).map { r => - logSearchQueryIfTimedOut(search, r.result) - searchResultToAggregateResponse(r.result, name, extract) - } + .toMetric( + Some(mediaApiMetrics.searchQueries), + List(mediaApiMetrics.searchTypeDimension("aggregate")) + )(_.result.took) + .map { r => + logSearchQueryIfTimedOut(search, r.result) + searchResultToAggregateResponse(r.result, name, extract) + } } - private def searchResultToAggregateResponse(response: SearchResponse, aggregateName: String, extract: (String, Aggregations) => Seq[BucketResult]): AggregateSearchResults = { + private def searchResultToAggregateResponse( + response: SearchResponse, + aggregateName: String, + extract: (String, Aggregations) => Seq[BucketResult] + ): AggregateSearchResults = { val results = extract(aggregateName, response.aggregations) AggregateSearchResults(results, results.size) } - def completionSuggestion(name: String, q: String, size: Int)(implicit ex: ExecutionContext, request: AuthenticatedRequest[AnyContent, Principal]): Future[CompletionSuggestionResults] = { + def completionSuggestion(name: String, q: String, size: Int)(implicit + ex: ExecutionContext, + request: AuthenticatedRequest[AnyContent, Principal] + ): Future[CompletionSuggestionResults] = { implicit val logMarker = MarkerMap() val completionSuggestion = ElasticDsl.completionSuggestion(name, name).text(q).skipDuplicates(true) val search = ElasticDsl.search(imagesAlias) suggestions completionSuggestion - executeAndLog(search, "completion suggestion query"). - toMetric(Some(mediaApiMetrics.searchQueries), List(mediaApiMetrics.searchTypeDimension("suggestion-completion")))(_.result.took).map { r => - logSearchQueryIfTimedOut(search, r.result) - val x = r.result.suggestions.get(name).map { suggestions => - suggestions.flatMap { s => - s.toCompletion.options.map { o => - CompletionSuggestionResult(o.text, o.score.toFloat) + executeAndLog(search, "completion suggestion query") + .toMetric( + Some(mediaApiMetrics.searchQueries), + List(mediaApiMetrics.searchTypeDimension("suggestion-completion")) + )(_.result.took) + .map { r => + logSearchQueryIfTimedOut(search, r.result) + val x = r.result.suggestions + .get(name) + .map { suggestions => + suggestions.flatMap { s => + s.toCompletion.options.map { o => + CompletionSuggestionResult(o.text, o.score.toFloat) + } + } } - } - }.getOrElse(Seq.empty) - CompletionSuggestionResults(x.toList) - } + .getOrElse(Seq.empty) + CompletionSuggestionResults(x.toList) + } } - def totalImages()(implicit ex: ExecutionContext): Future[Long] = client.execute(ElasticDsl.search(imagesAlias)).map { - _.result.totalHits - } + def totalImages()(implicit ex: ExecutionContext): Future[Long] = + client.execute(ElasticDsl.search(imagesAlias)).map { + _.result.totalHits + } - def withSearchQueryTimeout(sr: SearchRequest): SearchRequest = sr timeout SearchQueryTimeout + def withSearchQueryTimeout(sr: SearchRequest): SearchRequest = + sr timeout SearchQueryTimeout private def prepareSearch(query: Query): SearchRequest = { val sr = ElasticDsl.search(imagesAlias) query query @@ -268,12 +390,20 @@ class ElasticSearch(val config: MediaApiConfig, mediaApiMetrics: MediaApiMetrics Json.parse(sourceAsString).validate[Image] match { case i: JsSuccess[Image] => Some(i.value) case e: JsError => - logger.error("Failed to parse image from source string " + id + ": " + e.toString) + logger.error( + "Failed to parse image from source string " + id + ": " + e.toString + ) None } } - private def logSearchQueryIfTimedOut(req: SearchRequest, res: SearchResponse) = - if (res.isTimedOut) logger.info(s"SearchQuery was TimedOut after $SearchQueryTimeout \nquery: ${req.show}") + private def logSearchQueryIfTimedOut( + req: SearchRequest, + res: SearchResponse + ) = + if (res.isTimedOut) + logger.info( + s"SearchQuery was TimedOut after $SearchQueryTimeout \nquery: ${req.show}" + ) } diff --git a/media-api/app/lib/elasticsearch/ElasticSearchModel.scala b/media-api/app/lib/elasticsearch/ElasticSearchModel.scala index 12ad4b273b..5986ea670e 100644 --- a/media-api/app/lib/elasticsearch/ElasticSearchModel.scala +++ b/media-api/app/lib/elasticsearch/ElasticSearchModel.scala @@ -25,7 +25,9 @@ object CompletionSuggestionResult { implicit val jsonWrites = Json.writes[CompletionSuggestionResult] } -case class CompletionSuggestionResults(results: List[CompletionSuggestionResult]) +case class CompletionSuggestionResults( + results: List[CompletionSuggestionResult] +) object CompletionSuggestionResults { implicit val jsonWrites = Json.writes[CompletionSuggestionResults] @@ -37,14 +39,19 @@ object BucketResult { implicit val jsonWrites = Json.writes[BucketResult] } -case class AggregateSearchParams(field: String, - q: Option[String], - structuredQuery: List[Condition]) +case class AggregateSearchParams( + field: String, + q: Option[String], + structuredQuery: List[Condition] +) object AggregateSearchParams { def parseIntFromQuery(s: String): Option[Int] = Try(s.toInt).toOption - def apply(field: String, request: Request[AnyContent]): AggregateSearchParams = { + def apply( + field: String, + request: Request[AnyContent] + ): AggregateSearchParams = { val query = request.getQueryString("q") val structuredQuery = query.map(Parser.run) getOrElse List[Condition]() new AggregateSearchParams( @@ -56,35 +63,35 @@ object AggregateSearchParams { } case class SearchParams( - query: Option[String] = None, - structuredQuery: List[Condition] = List.empty, - ids: Option[List[String]] = None, - offset: Int = 0, - length: Int = 10, - orderBy: Option[String] = None, - since: Option[DateTime] = None, - until: Option[DateTime] = None, - modifiedSince: Option[DateTime] = None, - modifiedUntil: Option[DateTime] = None, - takenSince: Option[DateTime] = None, - takenUntil: Option[DateTime] = None, - archived: Option[Boolean] = None, - hasExports: Option[Boolean] = None, - hasIdentifier: Option[String] = None, - missingIdentifier: Option[String] = None, - valid: Option[Boolean] = None, - free: Option[Boolean] = None, - payType: Option[PayType.Value] = None, - hasRightsCategory: Option[Boolean] = None, - uploadedBy: Option[String] = None, - labels: List[String] = List.empty, - hasMetadata: List[String] = List.empty, - persisted: Option[Boolean] = None, - usageStatus: List[UsageStatus] = List.empty, - usagePlatform: List[String] = List.empty, - tier: Tier, - syndicationStatus: Option[SyndicationStatus] = None - ) + query: Option[String] = None, + structuredQuery: List[Condition] = List.empty, + ids: Option[List[String]] = None, + offset: Int = 0, + length: Int = 10, + orderBy: Option[String] = None, + since: Option[DateTime] = None, + until: Option[DateTime] = None, + modifiedSince: Option[DateTime] = None, + modifiedUntil: Option[DateTime] = None, + takenSince: Option[DateTime] = None, + takenUntil: Option[DateTime] = None, + archived: Option[Boolean] = None, + hasExports: Option[Boolean] = None, + hasIdentifier: Option[String] = None, + missingIdentifier: Option[String] = None, + valid: Option[Boolean] = None, + free: Option[Boolean] = None, + payType: Option[PayType.Value] = None, + hasRightsCategory: Option[Boolean] = None, + uploadedBy: Option[String] = None, + labels: List[String] = List.empty, + hasMetadata: List[String] = List.empty, + persisted: Option[Boolean] = None, + usageStatus: List[UsageStatus] = List.empty, + usagePlatform: List[String] = List.empty, + tier: Tier, + syndicationStatus: Option[SyndicationStatus] = None +) case class InvalidUriParams(message: String) extends Throwable object InvalidUriParams { @@ -99,27 +106,34 @@ object PayType extends Enumeration { val Pay = Value("pay") def create(s: String) = s match { - case "free" => Some(Free) + case "free" => Some(Free) case "maybe-free" => Some(MaybeFree) - case "all" => Some(All) - case "pay" => Some(Pay) - case _ => None + case "all" => Some(All) + case "pay" => Some(Pay) + case _ => None } } object SearchParams { def commasToList(s: String): List[String] = s.trim.split(',').toList - def listToCommas(list: List[String]): Option[String] = list.toNel.map(_.list.mkString(",")) + def listToCommas(list: List[String]): Option[String] = + list.toNel.map(_.list.mkString(",")) // TODO: return descriptive 400 error if invalid def parseIntFromQuery(s: String): Option[Int] = Try(s.toInt).toOption - def parsePayTypeFromQuery(s: String): Option[PayType.Value] = PayType.create(s) - def parseBooleanFromQuery(s: String): Option[Boolean] = Try(s.toBoolean).toOption - def parseSyndicationStatus(s: String): Option[SyndicationStatus] = Some(SyndicationStatus(s)) + def parsePayTypeFromQuery(s: String): Option[PayType.Value] = + PayType.create(s) + def parseBooleanFromQuery(s: String): Option[Boolean] = Try( + s.toBoolean + ).toOption + def parseSyndicationStatus(s: String): Option[SyndicationStatus] = Some( + SyndicationStatus(s) + ) def apply(request: Authentication.Request[Any]): SearchParams = { - def commaSep(key: String): List[String] = request.getQueryString(key).toList.flatMap(commasToList) + def commaSep(key: String): List[String] = + request.getQueryString(key).toList.flatMap(commasToList) val query = request.getQueryString("q") val structuredQuery = query.map(Parser.run) getOrElse List() @@ -156,35 +170,34 @@ object SearchParams { ) } - def toStringMap(searchParams: SearchParams): Map[String, String] = Map( - "q" -> searchParams.query, - "ids" -> searchParams.ids.map(_.mkString(",")), - "offset" -> Some(searchParams.offset.toString), - "length" -> Some(searchParams.length.toString), - "since" -> searchParams.since.map(printDateTime), - "until" -> searchParams.until.map(printDateTime), - "modifiedSince" -> searchParams.modifiedSince.map(printDateTime), - "modifiedUntil" -> searchParams.modifiedUntil.map(printDateTime), - "takenSince" -> searchParams.takenSince.map(printDateTime), - "takenUntil" -> searchParams.takenUntil.map(printDateTime), - "archived" -> searchParams.archived.map(_.toString), - "hasExports" -> searchParams.hasExports.map(_.toString), - "hasIdentifier" -> searchParams.hasIdentifier, + "q" -> searchParams.query, + "ids" -> searchParams.ids.map(_.mkString(",")), + "offset" -> Some(searchParams.offset.toString), + "length" -> Some(searchParams.length.toString), + "since" -> searchParams.since.map(printDateTime), + "until" -> searchParams.until.map(printDateTime), + "modifiedSince" -> searchParams.modifiedSince.map(printDateTime), + "modifiedUntil" -> searchParams.modifiedUntil.map(printDateTime), + "takenSince" -> searchParams.takenSince.map(printDateTime), + "takenUntil" -> searchParams.takenUntil.map(printDateTime), + "archived" -> searchParams.archived.map(_.toString), + "hasExports" -> searchParams.hasExports.map(_.toString), + "hasIdentifier" -> searchParams.hasIdentifier, "missingIdentifier" -> searchParams.missingIdentifier, - "valid" -> searchParams.valid.map(_.toString), - "free" -> searchParams.free.map(_.toString), - "payType" -> searchParams.payType.map(_.toString), - "uploadedBy" -> searchParams.uploadedBy, - "labels" -> listToCommas(searchParams.labels), - "hasMetadata" -> listToCommas(searchParams.hasMetadata), - "persisted" -> searchParams.persisted.map(_.toString), - "usageStatus" -> listToCommas(searchParams.usageStatus.map(_.toString)), - "usagePlatform" -> listToCommas(searchParams.usagePlatform) + "valid" -> searchParams.valid.map(_.toString), + "free" -> searchParams.free.map(_.toString), + "payType" -> searchParams.payType.map(_.toString), + "uploadedBy" -> searchParams.uploadedBy, + "labels" -> listToCommas(searchParams.labels), + "hasMetadata" -> listToCommas(searchParams.hasMetadata), + "persisted" -> searchParams.persisted.map(_.toString), + "usageStatus" -> listToCommas(searchParams.usageStatus.map(_.toString)), + "usagePlatform" -> listToCommas(searchParams.usagePlatform) ).foldLeft(Map[String, String]()) { case (acc, (key, Some(value))) => acc + (key -> value) - case (acc, (_, None)) => acc + case (acc, (_, None)) => acc } type SearchParamValidation = Validation[InvalidUriParams, SearchParams] @@ -194,15 +207,21 @@ object SearchParams { def validate(searchParams: SearchParams): SearchParamValidations = { // we just need to return the first `searchParams` as we don't need to manipulate them // TODO: try reduce these - (validateLength(searchParams).toValidationNel |@| validateOffset(searchParams).toValidationNel)((s1, s2) => s1) + (validateLength(searchParams).toValidationNel |@| validateOffset( + searchParams + ).toValidationNel)((s1, s2) => s1) } def validateOffset(searchParams: SearchParams): SearchParamValidation = { - if (searchParams.offset < 0) InvalidUriParams("offset cannot be less than 0").failure else searchParams.success + if (searchParams.offset < 0) + InvalidUriParams("offset cannot be less than 0").failure + else searchParams.success } def validateLength(searchParams: SearchParams): SearchParamValidation = { - if (searchParams.length > maxSize) InvalidUriParams(s"length cannot exceed $maxSize").failure else searchParams.success + if (searchParams.length > maxSize) + InvalidUriParams(s"length cannot exceed $maxSize").failure + else searchParams.success } } diff --git a/media-api/app/lib/elasticsearch/IsQueryFilter.scala b/media-api/app/lib/elasticsearch/IsQueryFilter.scala index 28f545d8da..cc535622d9 100644 --- a/media-api/app/lib/elasticsearch/IsQueryFilter.scala +++ b/media-api/app/lib/elasticsearch/IsQueryFilter.scala @@ -10,44 +10,60 @@ sealed trait IsQueryFilter extends Query with ImageFields { def query: Query override def toString: String = this match { - case IsOwnedPhotograph => "gnm-owned-photo" + case IsOwnedPhotograph => "gnm-owned-photo" case IsOwnedIllustration => "gnm-owned-illustration" - case IsOwnedImage => "gnm-owned" - case _: IsUnderQuota => "under-quota" + case IsOwnedImage => "gnm-owned" + case _: IsUnderQuota => "under-quota" } } object IsQueryFilter { // for readability, the client capitalises gnm, so `toLowerCase` it before matching - def apply(value: String, overQuotaAgencies: () => List[Agency]): Option[IsQueryFilter] = value.toLowerCase match { - case "gnm-owned-photo" => Some(IsOwnedPhotograph) + def apply( + value: String, + overQuotaAgencies: () => List[Agency] + ): Option[IsQueryFilter] = value.toLowerCase match { + case "gnm-owned-photo" => Some(IsOwnedPhotograph) case "gnm-owned-illustration" => Some(IsOwnedIllustration) - case "gnm-owned" => Some(IsOwnedImage) - case "under-quota" => Some(IsUnderQuota(overQuotaAgencies())) - case _ => None + case "gnm-owned" => Some(IsOwnedImage) + case "under-quota" => Some(IsUnderQuota(overQuotaAgencies())) + case _ => None } } object IsOwnedPhotograph extends IsQueryFilter { override def query: Query = filters.or( - filters.terms(usageRightsField("category"), UsageRights.photographer.toNel.get.map(_.category)) + filters.terms( + usageRightsField("category"), + UsageRights.photographer.toNel.get.map(_.category) + ) ) } object IsOwnedIllustration extends IsQueryFilter { override def query: Query = filters.or( - filters.terms(usageRightsField("category"), UsageRights.illustrator.toNel.get.map(_.category)) + filters.terms( + usageRightsField("category"), + UsageRights.illustrator.toNel.get.map(_.category) + ) ) } object IsOwnedImage extends IsQueryFilter { override def query: Query = filters.or( - filters.terms(usageRightsField("category"), UsageRights.whollyOwned.toNel.get.map(_.category)) + filters.terms( + usageRightsField("category"), + UsageRights.whollyOwned.toNel.get.map(_.category) + ) ) } case class IsUnderQuota(overQuotaAgencies: List[Agency]) extends IsQueryFilter { override def query: Query = overQuotaAgencies.toNel - .map(agency => filters.mustNot(filters.terms(usageRightsField("supplier"), agency.map(_.supplier)))) + .map(agency => + filters.mustNot( + filters.terms(usageRightsField("supplier"), agency.map(_.supplier)) + ) + ) .getOrElse(matchAllQuery) } diff --git a/media-api/app/lib/elasticsearch/MatchFields.scala b/media-api/app/lib/elasticsearch/MatchFields.scala index 69154aee94..3fc9a91eff 100644 --- a/media-api/app/lib/elasticsearch/MatchFields.scala +++ b/media-api/app/lib/elasticsearch/MatchFields.scala @@ -7,12 +7,25 @@ trait MatchFields extends ImageFields { def config: MediaApiConfig - val matchFields: Seq[String] = Seq("id") ++ Seq("mimeType").map(sourceField) ++ - Seq("description", "title", "byline", "source", "credit", "keywords", - "subLocation", "city", "state", "country", "suppliersReference", - "peopleInImage", "englishAnalysedCatchAll").map(metadataField) ++ - Seq("labels").map(editsField) ++ - config.queriableIdentifiers.map(identifierField) ++ - Seq("restrictions").map(usageRightsField) + val matchFields: Seq[String] = + Seq("id") ++ Seq("mimeType").map(sourceField) ++ + Seq( + "description", + "title", + "byline", + "source", + "credit", + "keywords", + "subLocation", + "city", + "state", + "country", + "suppliersReference", + "peopleInImage", + "englishAnalysedCatchAll" + ).map(metadataField) ++ + Seq("labels").map(editsField) ++ + config.queriableIdentifiers.map(identifierField) ++ + Seq("restrictions").map(usageRightsField) } diff --git a/media-api/app/lib/elasticsearch/QueryBuilder.scala b/media-api/app/lib/elasticsearch/QueryBuilder.scala index cf00e25f92..1a53274957 100644 --- a/media-api/app/lib/elasticsearch/QueryBuilder.scala +++ b/media-api/app/lib/elasticsearch/QueryBuilder.scala @@ -9,54 +9,87 @@ import com.sksamuel.elastic4s.ElasticDsl import com.sksamuel.elastic4s.ElasticDsl._ import com.sksamuel.elastic4s.requests.common.Operator import com.sksamuel.elastic4s.requests.searches.queries.Query -import com.sksamuel.elastic4s.requests.searches.queries.matches.{MultiMatchQuery, MultiMatchQueryBuilderType} +import com.sksamuel.elastic4s.requests.searches.queries.matches.{ + MultiMatchQuery, + MultiMatchQueryBuilderType +} import lib.querysyntax._ -class QueryBuilder(matchFields: Seq[String], overQuotaAgencies: () => List[Agency]) extends ImageFields with GridLogging { +class QueryBuilder( + matchFields: Seq[String], + overQuotaAgencies: () => List[Agency] +) extends ImageFields + with GridLogging { // For some sad reason, there was no helpful alias for this in the ES library - private def multiMatchPhraseQuery(value: String, fields: Seq[String]): MultiMatchQuery = - ElasticDsl.multiMatchQuery(value).fields(fields).matchType(MultiMatchQueryBuilderType.PHRASE) + private def multiMatchPhraseQuery( + value: String, + fields: Seq[String] + ): MultiMatchQuery = + ElasticDsl + .multiMatchQuery(value) + .fields(fields) + .matchType(MultiMatchQueryBuilderType.PHRASE) - private def makeMultiQuery(value: Value, fields: Seq[String]): MultiMatchQuery = value match { - case Words(value) => ElasticDsl.multiMatchQuery(value).fields(fields). - operator(Operator.AND). - matchType(MultiMatchQueryBuilderType.CROSS_FIELDS) + private def makeMultiQuery( + value: Value, + fields: Seq[String] + ): MultiMatchQuery = value match { + case Words(value) => + ElasticDsl + .multiMatchQuery(value) + .fields(fields) + .operator(Operator.AND) + .matchType(MultiMatchQueryBuilderType.CROSS_FIELDS) case Phrase(string) => multiMatchPhraseQuery(string, fields) // That's OK, we only do date queries on a single field at a time case e => throw InvalidQuery(s"Cannot do multiQuery on $e") } private def makeQueryBit(condition: Match): Query = condition.field match { - case AnyField => makeMultiQuery(condition.value, matchFields) + case AnyField => makeMultiQuery(condition.value, matchFields) case MultipleField(fields) => makeMultiQuery(condition.value, fields) - case SingleField(field) => condition.value match { - // Force AND operator else it will only require *any* of the words, not *all* - case Words(value) => matchQuery(getFieldPath(field), value).operator(Operator.AND) - case Phrase(value) => matchPhraseQuery(getFieldPath(field), value) - case DateRange(start, end) => rangeQuery(getFieldPath(field)).gte(printDateTime(start)).lte(printDateTime(end)) - case e => throw InvalidQuery(s"Cannot do single field query on $e") - } - case HierarchyField => condition.value match { - case Phrase(value) => termQuery(getFieldPath("pathHierarchy"), value) - case _ => throw InvalidQuery("Cannot accept non-Phrase value for HierarchyField Match") - } - case HasField => condition.value match { - case HasValue(value) => boolQuery().filter(existsQuery(getFieldPath(value))) - case _ => throw InvalidQuery(s"Cannot perform has field on ${condition.value}") - } - case IsField => condition.value match { - case IsValue(value) => IsQueryFilter.apply(value, overQuotaAgencies) match { - case Some(isQuery) => isQuery.query + case SingleField(field) => + condition.value match { + // Force AND operator else it will only require *any* of the words, not *all* + case Words(value) => + matchQuery(getFieldPath(field), value).operator(Operator.AND) + case Phrase(value) => matchPhraseQuery(getFieldPath(field), value) + case DateRange(start, end) => + rangeQuery(getFieldPath(field)) + .gte(printDateTime(start)) + .lte(printDateTime(end)) + case e => throw InvalidQuery(s"Cannot do single field query on $e") + } + case HierarchyField => + condition.value match { + case Phrase(value) => termQuery(getFieldPath("pathHierarchy"), value) + case _ => + throw InvalidQuery( + "Cannot accept non-Phrase value for HierarchyField Match" + ) + } + case HasField => + condition.value match { + case HasValue(value) => + boolQuery().filter(existsQuery(getFieldPath(value))) + case _ => + throw InvalidQuery(s"Cannot perform has field on ${condition.value}") + } + case IsField => + condition.value match { + case IsValue(value) => + IsQueryFilter.apply(value, overQuotaAgencies) match { + case Some(isQuery) => isQuery.query + case _ => { + logger.info(s"Cannot perform IS query on ${condition.value}") + matchNoneQuery + } + } case _ => { logger.info(s"Cannot perform IS query on ${condition.value}") matchNoneQuery } } - case _ => { - logger.info(s"Cannot perform IS query on ${condition.value}") - matchNoneQuery - } - } } def makeQuery(conditions: List[Condition]) = conditions match { @@ -69,9 +102,9 @@ class QueryBuilder(matchFields: Seq[String], overQuotaAgencies: () => List[Agenc ) val query = normal.foldLeft(boolQuery) { - case (query, Negation(cond)) => query.withNot(makeQueryBit(cond)) - case (query, cond@Match(_, _)) => query.withMust(makeQueryBit(cond)) - case (query, _) => query + case (query, Negation(cond)) => query.withNot(makeQueryBit(cond)) + case (query, cond @ Match(_, _)) => query.withMust(makeQueryBit(cond)) + case (query, _) => query } val nestedQueries = nested @@ -80,16 +113,21 @@ class QueryBuilder(matchFields: Seq[String], overQuotaAgencies: () => List[Agenc case (parent: SingleField, n: List[Nested]) => { val nested = n.foldLeft(boolQuery) { - case (query, Nested(_, f, v)) => query.withMust(makeQueryBit(Match(f, v))) + case (query, Nested(_, f, v)) => + query.withMust(makeQueryBit(Match(f, v))) case (query, _) => query } nestedQuery(parent.name, nested) } - case _ => throw InvalidQuery("Can only accept SingleField for Nested Query parent") + case _ => + throw InvalidQuery( + "Can only accept SingleField for Nested Query parent" + ) - }.toList + } + .toList nestedQueries.foldLeft(query) { case (q, nestedQ) => q.withMust(nestedQ) } } diff --git a/media-api/app/lib/elasticsearch/SearchFilters.scala b/media-api/app/lib/elasticsearch/SearchFilters.scala index a5e1d73ef9..1f2c98b7d8 100644 --- a/media-api/app/lib/elasticsearch/SearchFilters.scala +++ b/media-api/app/lib/elasticsearch/SearchFilters.scala @@ -9,7 +9,7 @@ import lib.MediaApiConfig import scalaz.NonEmptyList import scalaz.syntax.std.list._ -class SearchFilters(config: MediaApiConfig) extends ImageFields { +class SearchFilters(config: MediaApiConfig) extends ImageFields { val syndicationFilter = new SyndicationFilter(config) @@ -17,38 +17,60 @@ class SearchFilters(config: MediaApiConfig) extends ImageFields { // Warning: The current media-api definition of invalid includes other requirements // so does not match this filter exactly! - val validFilter: Option[Query] = config.requiredMetadata.map(metadataField).toNel.map(filters.exists) - val invalidFilter: Option[Query] = config.requiredMetadata.map(metadataField).toNel.map(filters.anyMissing) + val validFilter: Option[Query] = + config.requiredMetadata.map(metadataField).toNel.map(filters.exists) + val invalidFilter: Option[Query] = + config.requiredMetadata.map(metadataField).toNel.map(filters.anyMissing) - val (suppliersWithExclusions, suppliersNoExclusions) = freeSuppliers.partition(suppliersCollectionExcl.contains) + val (suppliersWithExclusions, suppliersNoExclusions) = + freeSuppliers.partition(suppliersCollectionExcl.contains) val suppliersWithExclusionsFilters: List[Query] = for { - supplier <- suppliersWithExclusions - excludedCollections <- suppliersCollectionExcl.get(supplier).flatMap(_.toNel) + supplier <- suppliersWithExclusions + excludedCollections <- suppliersCollectionExcl + .get(supplier) + .flatMap(_.toNel) } yield { filters.mustWithMustNot( filters.term(usageRightsField("supplier"), supplier), - filters.terms(usageRightsField("suppliersCollection"), excludedCollections) + filters.terms( + usageRightsField("suppliersCollection"), + excludedCollections + ) ) } - val suppliersWithExclusionsFilter: Option[Query] = suppliersWithExclusionsFilters.toNel.map(filters.or) - val suppliersNoExclusionsFilter: Option[Query] = suppliersNoExclusions.toNel.map(filters.terms(usageRightsField("supplier"), _)) - val freeSupplierFilter: Option[Query] = filterOrFilter(suppliersWithExclusionsFilter, suppliersNoExclusionsFilter) + val suppliersWithExclusionsFilter: Option[Query] = + suppliersWithExclusionsFilters.toNel.map(filters.or) + val suppliersNoExclusionsFilter: Option[Query] = + suppliersNoExclusions.toNel.map( + filters.terms(usageRightsField("supplier"), _) + ) + val freeSupplierFilter: Option[Query] = + filterOrFilter(suppliersWithExclusionsFilter, suppliersNoExclusionsFilter) // We're showing `Conditional` here too as we're considering them potentially // free. We could look into sending over the search query as a cost filter // that could take a comma separated list e.g. `cost=free,conditional`. - val freeUsageRightsFilter: Option[Query] = freeToUseCategories.toNel.map(filters.terms(usageRightsField("category"), _)) + val freeUsageRightsFilter: Option[Query] = freeToUseCategories.toNel.map( + filters.terms(usageRightsField("category"), _) + ) - val hasRightsCategoryFilter: Query = filters.existsOrMissing(usageRightsField("category"), exists = true) + val hasRightsCategoryFilter: Query = + filters.existsOrMissing(usageRightsField("category"), exists = true) - val freeFilter: Option[Query] = filterOrFilter(freeSupplierFilter, freeUsageRightsFilter) + val freeFilter: Option[Query] = + filterOrFilter(freeSupplierFilter, freeUsageRightsFilter) val nonFreeFilter: Option[Query] = freeFilter.map(filters.not) - val maybeFreeFilter: Option[Query] = filterOrFilter(freeFilter, Some(filters.not(hasRightsCategoryFilter))) + val maybeFreeFilter: Option[Query] = + filterOrFilter(freeFilter, Some(filters.not(hasRightsCategoryFilter))) lazy val freeToUseCategories: List[String] = - UsageRights.all.filter(ur => ur.defaultCost.exists(cost => cost == Free || cost == Conditional)).map(ur => ur.category) + UsageRights.all + .filter(ur => + ur.defaultCost.exists(cost => cost == Free || cost == Conditional) + ) + .map(ur => ur.category) val persistedCategories = NonEmptyList( StaffPhotographer.category, @@ -60,13 +82,25 @@ class SearchFilters(config: MediaApiConfig) extends ImageFields { CommissionedAgency.category ) - val hasCrops = filters.bool.must(filters.existsOrMissing("exports", exists = true)) - val usedInContent = filters.nested("usages", filters.exists(NonEmptyList("usages"))) - val existedPreGrid = filters.exists(NonEmptyList(identifierField(config.persistenceIdentifier))) - val addedToLibrary = filters.bool.must(filters.boolTerm(editsField("archived"), value = true)) - val hasUserEditsToImageMetadata = filters.exists(NonEmptyList(editsField("metadata"))) - val hasPersistedUsageRights = filters.bool.must(filters.terms(usageRightsField("category"), persistedCategories)) - val addedGNMArchiveOrPersistedCollections = filters.bool.must(filters.terms(collectionsField("path"), config.persistedRootCollections.toNel.get)) + val hasCrops = + filters.bool.must(filters.existsOrMissing("exports", exists = true)) + val usedInContent = + filters.nested("usages", filters.exists(NonEmptyList("usages"))) + val existedPreGrid = + filters.exists(NonEmptyList(identifierField(config.persistenceIdentifier))) + val addedToLibrary = + filters.bool.must(filters.boolTerm(editsField("archived"), value = true)) + val hasUserEditsToImageMetadata = + filters.exists(NonEmptyList(editsField("metadata"))) + val hasPersistedUsageRights = filters.bool.must( + filters.terms(usageRightsField("category"), persistedCategories) + ) + val addedGNMArchiveOrPersistedCollections = filters.bool.must( + filters.terms( + collectionsField("path"), + config.persistedRootCollections.toNel.get + ) + ) val addedToPhotoshoot = filters.exists(NonEmptyList(editsField("photoshoot"))) val hasLabels = filters.exists(NonEmptyList(editsField("labels"))) @@ -85,18 +119,27 @@ class SearchFilters(config: MediaApiConfig) extends ImageFields { val nonPersistedFilter: Query = filters.not(persistedFilter) def tierFilter(tier: Tier): Option[Query] = tier match { - case Syndication => Some(syndicationFilter.statusFilter(QueuedForSyndication)) + case Syndication => + Some(syndicationFilter.statusFilter(QueuedForSyndication)) case _ => None } - def filterOrFilter(filter: Option[Query], orFilter: Option[Query]): Option[Query] = (filter, orFilter) match { - case (Some(someFilter), Some(orSomeFilter)) => Some(filters.or(someFilter, orSomeFilter)) - case (filterOpt, orFilterOpt) => filterOpt orElse orFilterOpt + def filterOrFilter( + filter: Option[Query], + orFilter: Option[Query] + ): Option[Query] = (filter, orFilter) match { + case (Some(someFilter), Some(orSomeFilter)) => + Some(filters.or(someFilter, orSomeFilter)) + case (filterOpt, orFilterOpt) => filterOpt orElse orFilterOpt } - def filterAndFilter(filter: Option[Query], andFilter: Option[Query]): Option[Query] = (filter, andFilter) match { - case (Some(someFilter), Some(andSomeFilter)) => Some(filters.and(someFilter, andSomeFilter)) - case (filterOpt, andFilterOpt) => filterOpt orElse andFilterOpt + def filterAndFilter( + filter: Option[Query], + andFilter: Option[Query] + ): Option[Query] = (filter, andFilter) match { + case (Some(someFilter), Some(andSomeFilter)) => + Some(filters.and(someFilter, andSomeFilter)) + case (filterOpt, andFilterOpt) => filterOpt orElse andFilterOpt } } diff --git a/media-api/app/lib/elasticsearch/SyndicationFilter.scala b/media-api/app/lib/elasticsearch/SyndicationFilter.scala index 69c060b09b..dc6fd892e0 100644 --- a/media-api/app/lib/elasticsearch/SyndicationFilter.scala +++ b/media-api/app/lib/elasticsearch/SyndicationFilter.scala @@ -2,7 +2,10 @@ package lib.elasticsearch import com.gu.mediaservice.lib.ImageFields import com.gu.mediaservice.model._ -import com.gu.mediaservice.model.leases.{AllowSyndicationLease, DenySyndicationLease} +import com.gu.mediaservice.model.leases.{ + AllowSyndicationLease, + DenySyndicationLease +} import com.gu.mediaservice.model.usage.SyndicationUsage import com.sksamuel.elastic4s.requests.searches.queries.Query import lib.MediaApiConfig @@ -12,13 +15,15 @@ class SyndicationFilter(config: MediaApiConfig) extends ImageFields { val isSyndicationDateFilterActive = config.isProd - private def syndicationRightsAcquired(acquired: Boolean): Query = filters.boolTerm( - field = "syndicationRights.rights.acquired", - value = acquired - ) + private def syndicationRightsAcquired(acquired: Boolean): Query = + filters.boolTerm( + field = "syndicationRights.rights.acquired", + value = acquired + ) private val noRightsAcquired: Query = filters.or( - filters.existsOrMissing("syndicationRights.rights.acquired", exists = false), + filters + .existsOrMissing("syndicationRights.rights.acquired", exists = false), syndicationRightsAcquired(false) ) @@ -57,24 +62,27 @@ class SyndicationFilter(config: MediaApiConfig) extends ImageFields { private val syndicatableCategory: Query = IsOwnedPhotograph.query def statusFilter(status: SyndicationStatus): Query = status match { - case SentForSyndication => filters.and( - hasRightsAcquired, - hasAllowLease, - hasSyndicationUsage - ) - case QueuedForSyndication => filters.and( - hasRightsAcquired, - filters.mustNot(hasSyndicationUsage), + case SentForSyndication => filters.and( + hasRightsAcquired, hasAllowLease, - leaseHasStarted, - syndicationRightsPublished + hasSyndicationUsage + ) + case QueuedForSyndication => + filters.and( + hasRightsAcquired, + filters.mustNot(hasSyndicationUsage), + filters.and( + hasAllowLease, + leaseHasStarted, + syndicationRightsPublished + ) + ) + case BlockedForSyndication => + filters.and( + hasRightsAcquired, + hasDenyLease ) - ) - case BlockedForSyndication => filters.and( - hasRightsAcquired, - hasDenyLease - ) case AwaitingReviewForSyndication => { val rightsAcquiredNoLeaseFilter = filters.and( hasRightsAcquired, @@ -89,10 +97,11 @@ class SyndicationFilter(config: MediaApiConfig) extends ImageFields { ) config.syndicationStartDate match { - case Some(date) if config.isProd => filters.and( - filters.date("uploadTime", Some(date), None).get, - rightsAcquiredNoLeaseFilter - ) + case Some(date) if config.isProd => + filters.and( + filters.date("uploadTime", Some(date), None).get, + rightsAcquiredNoLeaseFilter + ) case _ => rightsAcquiredNoLeaseFilter } } diff --git a/media-api/app/lib/elasticsearch/filters.scala b/media-api/app/lib/elasticsearch/filters.scala index 581d6b4c0a..2bf80b9aa6 100644 --- a/media-api/app/lib/elasticsearch/filters.scala +++ b/media-api/app/lib/elasticsearch/filters.scala @@ -3,7 +3,11 @@ package lib.elasticsearch import com.gu.mediaservice.lib.formatting.printDateTime import com.sksamuel.elastic4s.ElasticDsl import com.sksamuel.elastic4s.ElasticDsl._ -import com.sksamuel.elastic4s.requests.searches.queries.{BoolQuery, NestedQuery, Query} +import com.sksamuel.elastic4s.requests.searches.queries.{ + BoolQuery, + NestedQuery, + Query +} import org.joda.time.DateTime import scalaz.NonEmptyList import scalaz.syntax.foldable1._ @@ -20,7 +24,11 @@ object filters { def boolTerm(field: String, value: Boolean): Query = termQuery(field, value) - def date(field: String, from: Option[DateTime], to: Option[DateTime]): Option[Query] = + def date( + field: String, + from: Option[DateTime], + to: Option[DateTime] + ): Option[Query] = if (from.isDefined || to.isDefined) { val builder = rangeQuery(field) val withFrom = from.fold(builder)(f => builder.gt(printDateTime(f))) @@ -62,11 +70,13 @@ object filters { } def mustWithMustNot(mustClause: Query, mustNotClause: Query): Query = { - bool.must( - mustClause - ).withNot( - mustNotClause - ) + bool + .must( + mustClause + ) + .withNot( + mustNotClause + ) } def nested(path: String, query: Query) = NestedQuery(path, query) diff --git a/media-api/app/lib/elasticsearch/sorts.scala b/media-api/app/lib/elasticsearch/sorts.scala index cc6c4e7781..760f3c6d7a 100644 --- a/media-api/app/lib/elasticsearch/sorts.scala +++ b/media-api/app/lib/elasticsearch/sorts.scala @@ -5,7 +5,8 @@ import com.sksamuel.elastic4s.requests.searches.sort.{Sort, SortOrder} object sorts { - private val UploadTimeDescending: Sort = fieldSort("uploadTime").order(SortOrder.DESC) + private val UploadTimeDescending: Sort = + fieldSort("uploadTime").order(SortOrder.DESC) private val HasDescFieldPrefix = "-(.+)".r def createSort(sortBy: Option[String]): Seq[Sort] = { @@ -13,13 +14,15 @@ object sorts { } // This is a special case in the elastic1 code which does not fit well as it also effects the query criteria - def dateAddedToCollectionDescending: Seq[Sort] = Seq(fieldSort("collections.actionData.date").order(SortOrder.DESC)) + def dateAddedToCollectionDescending: Seq[Sort] = Seq( + fieldSort("collections.actionData.date").order(SortOrder.DESC) + ) private def parseSortBy(sortBy: String): Seq[Sort] = { - sortBy.split(',').toList.map { - case HasDescFieldPrefix(field) => fieldSort(field).order(SortOrder.DESC) - case field => fieldSort(field).order(SortOrder.ASC) - } + sortBy.split(',').toList.map { + case HasDescFieldPrefix(field) => fieldSort(field).order(SortOrder.DESC) + case field => fieldSort(field).order(SortOrder.ASC) + } } } diff --git a/media-api/app/lib/querysyntax/DateRangeParser.scala b/media-api/app/lib/querysyntax/DateRangeParser.scala index df37179904..b67f4224dd 100644 --- a/media-api/app/lib/querysyntax/DateRangeParser.scala +++ b/media-api/app/lib/querysyntax/DateRangeParser.scala @@ -10,22 +10,28 @@ trait DateParser { lazy val formatter = DateTimeFormat.forPattern(format) def parseRange(expr: String): Option[DateRange] - def parseDate(expr: String): Option[DateTime] = Try(DateTime.parse(expr, formatter)).toOption + def parseDate(expr: String): Option[DateTime] = Try( + DateTime.parse(expr, formatter) + ).toOption } -case class DateAliasParser(alias: String, start: DateTime, end: DateTime) extends DateParser { +case class DateAliasParser(alias: String, start: DateTime, end: DateTime) + extends DateParser { val format = "" def parseRange(expr: String) = if (expr == alias) Some(DateRange(start, end.minusMillis(1))) else None } -case class DateFormatParser(format: String, calculateEnd: Option[(DateTime) => DateTime] = None) extends DateParser { +case class DateFormatParser( + format: String, + calculateEnd: Option[(DateTime) => DateTime] = None +) extends DateParser { def parseRange(expr: String): Option[DateRange] = parseDate(expr).map(start => { val sameDay = (start: DateTime) => start.plusDays(1) - val end = calculateEnd.getOrElse(sameDay)(start).minusMillis(1) + val end = calculateEnd.getOrElse(sameDay)(start).minusMillis(1) DateRange(start, end) }) diff --git a/media-api/app/lib/querysyntax/Parser.scala b/media-api/app/lib/querysyntax/Parser.scala index 187a090d9f..c18910e8af 100644 --- a/media-api/app/lib/querysyntax/Parser.scala +++ b/media-api/app/lib/querysyntax/Parser.scala @@ -9,14 +9,18 @@ object Parser { new QuerySyntax(input.trim).Query.run().map(_.toList) getOrElse List() // Post-hoc normalisation that are harder to do via the PEG grammar - def normalise(conditions: List[Condition]): List[Condition] = conditions match { - // Merge consecutive terms into a single match (e.g. "cats and dogs") - case Match(AnyField, Words(words1)) :: Match(AnyField, Words(words2)) :: xs => - normalise(Match(AnyField, Words(s"$words1 $words2")) :: xs) - // Else, recursively match the next list tail - case x :: xs => x :: normalise(xs) - // Until we reach the end of the list - case Nil => Nil - } + def normalise(conditions: List[Condition]): List[Condition] = + conditions match { + // Merge consecutive terms into a single match (e.g. "cats and dogs") + case Match(AnyField, Words(words1)) :: Match( + AnyField, + Words(words2) + ) :: xs => + normalise(Match(AnyField, Words(s"$words1 $words2")) :: xs) + // Else, recursively match the next list tail + case x :: xs => x :: normalise(xs) + // Until we reach the end of the list + case Nil => Nil + } } diff --git a/media-api/app/lib/querysyntax/QuerySyntax.scala b/media-api/app/lib/querysyntax/QuerySyntax.scala index 2ec6df9d6e..a27350b904 100644 --- a/media-api/app/lib/querysyntax/QuerySyntax.scala +++ b/media-api/app/lib/querysyntax/QuerySyntax.scala @@ -25,17 +25,17 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { def NestedFilter = rule { NestedMatch ~> Nested | - NestedDateMatch + NestedDateMatch } def Filter = rule { HasMatch ~> Match | - IsMatch ~> Match | - ScopedMatch ~> Match | HashMatch | CollectionRule | - DateConstraintMatch | - DateRangeMatch ~> Match | AtMatch | - FileTypeMatch ~> Match | - AnyMatch + IsMatch ~> Match | + ScopedMatch ~> Match | HashMatch | CollectionRule | + DateConstraintMatch | + DateRangeMatch ~> Match | AtMatch | + FileTypeMatch ~> Match | + AnyMatch } def HasMatch = rule { HasMatchField ~ ':' ~ HasMatchValue } @@ -48,26 +48,31 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { def IsFieldName = rule { "is" } def IsMatchValue = rule { String ~> IsValue } - def NestedMatch = rule { ParentField ~ "@" ~ NestedField ~ ':' ~ ExactMatchValue } - def NestedDateMatch = rule { ParentField ~ "@" ~ DateConstraintMatch ~> ( - (parentField: Field, dateMatch: Match) => { - Nested(parentField, dateMatch.field, dateMatch.value) - } - )} - - def DateConstraintMatch = rule { DateConstraint ~ DateMatch ~> ( - (constraint: String, dateMatch: Match) => { - val dateRange = dateMatch.value match { - case Date(d) => constraint match { - case ">" => DateRange(d, tomorrow) - case "<" => DateRange(beginningOfTime, d) - } + def NestedMatch = rule { + ParentField ~ "@" ~ NestedField ~ ':' ~ ExactMatchValue + } + def NestedDateMatch = rule { + ParentField ~ "@" ~ DateConstraintMatch ~> ( + (parentField: Field, dateMatch: Match) => { + Nested(parentField, dateMatch.field, dateMatch.value) + } + ) + } + + def DateConstraintMatch = rule { + DateConstraint ~ DateMatch ~> ((constraint: String, dateMatch: Match) => { + val dateRange = dateMatch.value match { + case Date(d) => + constraint match { + case ">" => DateRange(d, tomorrow) + case "<" => DateRange(beginningOfTime, d) + } case _ => throw new InvalidQuery("No date for date constraint!") } Match(dateMatch.field, dateRange) - } - )} + }) + } def DateConstraint = rule { capture(AllowedDateConstraints) } def AllowedDateConstraints = rule { @@ -76,22 +81,30 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { def ScopedMatch = rule { MatchField ~ ':' ~ MatchValue } - def HashMatch = rule { '#' ~ MatchValue ~> ( - label => Match( - SingleField(getFieldPath("labels")), - label + def HashMatch = rule { + '#' ~ MatchValue ~> (label => + Match( + SingleField(getFieldPath("labels")), + label + ) ) - )} + } - def CollectionRule = rule { '~' ~ ExactMatchValue ~> ( - collection => Match( - HierarchyField, - Phrase(collection.string.toLowerCase) + def CollectionRule = rule { + '~' ~ ExactMatchValue ~> (collection => + Match( + HierarchyField, + Phrase(collection.string.toLowerCase) + ) ) - )} + } - def ParentField = rule { capture(AllowedParentFieldName) ~> resolveNamedField _ } - def NestedField = rule { capture(AllowedNestedFieldName) ~> resolveNamedField _ } + def ParentField = rule { + capture(AllowedParentFieldName) ~> resolveNamedField _ + } + def NestedField = rule { + capture(AllowedNestedFieldName) ~> resolveNamedField _ + } def MatchField = rule { capture(AllowedFieldName) ~> resolveNamedField _ } def AllowedParentFieldName = rule { "usages" } @@ -100,24 +113,24 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { } def AllowedFieldName = rule { "illustrator" | - "uploader" | - "location" | "city" | "state" | "country" | "in" | - "byline" | "by" | "photographer" | - "description" | - "credit" | - "copyright" | - "source" | - "category" | - "subject" | - "supplier" | - "collection" | - "keyword" | - "label" | - "croppedBy" | - "filename" | - "photoshoot" | - "leasedBy" | - "person" + "uploader" | + "location" | "city" | "state" | "country" | "in" | + "byline" | "by" | "photographer" | + "description" | + "credit" | + "copyright" | + "source" | + "category" | + "subject" | + "supplier" | + "collection" | + "keyword" | + "label" | + "croppedBy" | + "filename" | + "photoshoot" | + "leasedBy" | + "person" } def resolveNamedField(name: String): Field = (name match { @@ -132,10 +145,15 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { case "person" => "peopleInImage" case fieldName => fieldName }) match { - case "publication" => MultipleField(List("publicationName", "publicationCode")) - case "section" => MultipleField(List("sectionId","sectionCode")) - case "reference" => MultipleField(List("references.uri", "references.name").map(usagesField)) - case "in" => MultipleField(List("subLocation", "city", "state", "country").map(getFieldPath)) + case "publication" => + MultipleField(List("publicationName", "publicationCode")) + case "section" => MultipleField(List("sectionId", "sectionCode")) + case "reference" => + MultipleField(List("references.uri", "references.name").map(usagesField)) + case "in" => + MultipleField( + List("subLocation", "city", "state", "country").map(getFieldPath) + ) case field => SingleField(getFieldPath(field)) } @@ -149,7 +167,9 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { def String = rule { capture(Chars) } def DateMatch = rule { - MatchDateField ~ ':' ~ MatchDateValue ~> ((field, date) => Match(field, Date(date))) + MatchDateField ~ ':' ~ MatchDateValue ~> ((field, date) => + Match(field, Date(date)) + ) } def DateRangeMatch = rule { @@ -160,9 +180,15 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { MatchMimeTypeField ~ ':' ~ MatchMimeTypeValue } - def AtMatch = rule { '@' ~ MatchDateRangeValue ~> (range => Match(SingleField(getFieldPath("uploadTime")), range)) } + def AtMatch = rule { + '@' ~ MatchDateRangeValue ~> (range => + Match(SingleField(getFieldPath("uploadTime")), range) + ) + } - def MatchDateField = rule { capture(AllowedDateFieldName) ~> resolveDateField _ } + def MatchDateField = rule { + capture(AllowedDateFieldName) ~> resolveDateField _ + } def MatchMimeTypeField = rule { capture("fileType") ~> resolveMimeTypeField _ @@ -187,9 +213,11 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { } def MatchDateRangeValue = rule { - (QuotedString | String) ~> normaliseDateExpr _ ~> parseDateRange _ ~> (d => { - test(d.isDefined) ~ push(d.get) - }) + (QuotedString | String) ~> normaliseDateExpr _ ~> parseDateRange _ ~> ( + d => { + test(d.isDefined) ~ push(d.get) + } + ) } def MatchMimeTypeValue = rule { @@ -201,23 +229,25 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { def translateMimeType(expr: String): MimeType = expr match { case s if s.equals("tif") || s.equals("tiff") => Tiff case s if s.equals("jpg") || s.equals("jpeg") => Jpeg - case s if s.equals("png") => Png + case s if s.equals("png") => Png } - def parseMimeType(expr: String): Value = Words(translateMimeType(expr).toString) + def parseMimeType(expr: String): Value = Words( + translateMimeType(expr).toString + ) def normaliseDateExpr(expr: String): String = expr.replaceAll("\\.", " ") - val todayParser = DateAliasParser("today", today, tomorrow) - val yesterdayParser = DateAliasParser("yesterday", yesterday, today) + val todayParser = DateAliasParser("today", today, tomorrow) + val yesterdayParser = DateAliasParser("yesterday", yesterday, today) - val humanDateParser = DateFormatParser("dd MMMMM YYYY") - val slashDateParser = DateFormatParser("d/M/YYYY") + val humanDateParser = DateFormatParser("dd MMMMM YYYY") + val slashDateParser = DateFormatParser("d/M/YYYY") val paddedslashDateParser = DateFormatParser("dd/MM/YYYY") - val isoDateParser = DateFormatParser("YYYY-MM-dd") + val isoDateParser = DateFormatParser("YYYY-MM-dd") val humanMonthParser = DateFormatParser("MMMMM YYYY", Some(_.plusMonths(1))) - val yearParser = DateFormatParser("YYYY", Some(_.plusYears(1))) + val yearParser = DateFormatParser("YYYY", Some(_.plusYears(1))) val dateParsers: List[DateParser] = List( todayParser, @@ -245,8 +275,10 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { // Quoted strings def SingleQuote = "'" def DoubleQuote = "\"" - def QuotedString = rule { SingleQuote ~ capture(NotSingleQuote) ~ SingleQuote | - DoubleQuote ~ capture(NotDoubleQuote) ~ DoubleQuote } + def QuotedString = rule { + SingleQuote ~ capture(NotSingleQuote) ~ SingleQuote | + DoubleQuote ~ capture(NotDoubleQuote) ~ DoubleQuote + } // TODO: unless escaped? def NotSingleQuote = rule { oneOrMore(noneOf(SingleQuote)) } def NotDoubleQuote = rule { oneOrMore(noneOf(DoubleQuote)) } @@ -254,7 +286,6 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { def Whitespace = rule { oneOrMore(' ') } def Chars = rule { oneOrMore(visibleChars) } - // Note: this is a somewhat arbitrarily list of common Unicode ranges that we // expect people to want to use (e.g. Latin1 accented characters, curly quotes, etc). // This is likely not exhaustive and will need reviewing in the future. @@ -263,7 +294,8 @@ class QuerySyntax(val input: ParserInput) extends Parser with ImageFields { val latin1ExtendedB = CharPredicate('\u0180' to '\u024f') val generalPunctuation = CharPredicate('\u2010' to '\u203d') val latin1ExtendedAdditional = CharPredicate('\u1e00' to '\u1eff') - val extraVisibleCharacters = latin1SupplementSubset ++ latin1ExtendedA ++ latin1ExtendedB ++ generalPunctuation + val extraVisibleCharacters = + latin1SupplementSubset ++ latin1ExtendedA ++ latin1ExtendedB ++ generalPunctuation val visibleChars = CharPredicate.Visible ++ extraVisibleCharacters diff --git a/media-api/app/lib/querysyntax/model.scala b/media-api/app/lib/querysyntax/model.scala index 1176e4a556..07800d21fb 100644 --- a/media-api/app/lib/querysyntax/model.scala +++ b/media-api/app/lib/querysyntax/model.scala @@ -5,7 +5,8 @@ import org.joda.time.DateTime sealed trait Condition final case class Negation(m: Match) extends Condition final case class Match(field: Field, value: Value) extends Condition -final case class Nested(parentField: Field, field: Field, value: Value) extends Condition +final case class Nested(parentField: Field, field: Field, value: Value) + extends Condition sealed trait Field final case object AnyField extends Field diff --git a/media-api/app/lib/usagerights/CostCalculator.scala b/media-api/app/lib/usagerights/CostCalculator.scala index bb67eba1a5..e0306331d0 100644 --- a/media-api/app/lib/usagerights/CostCalculator.scala +++ b/media-api/app/lib/usagerights/CostCalculator.scala @@ -11,8 +11,10 @@ trait CostCalculator { val quotas: UsageQuota def getCost(supplier: String, collection: Option[String]): Option[Cost] = { - val free = isFreeSupplier(supplier) && ! collection.exists(isExcludedColl(supplier, _)) - if (free) Some(Free) else None + val free = isFreeSupplier(supplier) && !collection.exists( + isExcludedColl(supplier, _) + ) + if (free) Some(Free) else None } def isConditional(usageRights: UsageRights): Boolean = @@ -29,22 +31,24 @@ trait CostCalculator { } def getCost(usageRights: UsageRights): Cost = { - val restricted : Option[Cost] = usageRights.restrictions.map(r => Conditional) - val categoryCost: Option[Cost] = usageRights.defaultCost - val overQuota: Option[Cost] = getOverQuota(usageRights) - val supplierCost: Option[Cost] = usageRights match { - case u: Agency => getCost(u.supplier, u.suppliersCollection) - case _ => None - } - - restricted - .orElse(overQuota) - .orElse(categoryCost) - .orElse(supplierCost) - .getOrElse(defaultCost) + val restricted: Option[Cost] = + usageRights.restrictions.map(r => Conditional) + val categoryCost: Option[Cost] = usageRights.defaultCost + val overQuota: Option[Cost] = getOverQuota(usageRights) + val supplierCost: Option[Cost] = usageRights match { + case u: Agency => getCost(u.supplier, u.suppliersCollection) + case _ => None + } + + restricted + .orElse(overQuota) + .orElse(categoryCost) + .orElse(supplierCost) + .getOrElse(defaultCost) } - private def isFreeSupplier(supplier: String) = freeSuppliers.contains(supplier) + private def isFreeSupplier(supplier: String) = + freeSuppliers.contains(supplier) private def isExcludedColl(supplier: String, supplierColl: String) = suppliersCollectionExcl.get(supplier).exists(_.contains(supplierColl)) diff --git a/media-api/test/lib/ImagePersistenceReasonsTest.scala b/media-api/test/lib/ImagePersistenceReasonsTest.scala index b652f7c133..951824b11b 100644 --- a/media-api/test/lib/ImagePersistenceReasonsTest.scala +++ b/media-api/test/lib/ImagePersistenceReasonsTest.scala @@ -14,42 +14,109 @@ class ImagePersistenceReasonsTest extends FunSpec with Matchers { val persistedIdentifier = "test-p-id" val persistedRootCollections = List("coll1", "coll2", "coll3") - val imgPersistenceReasons = ImagePersistenceReasons.apply(persistedRootCollections, persistedIdentifier) + val imgPersistenceReasons = ImagePersistenceReasons.apply( + persistedRootCollections, + persistedIdentifier + ) imgPersistenceReasons.getImagePersistenceReasons(img) shouldBe Nil - val imgWithPersistenceIdentifier = img.copy(identifiers = Map(persistedIdentifier -> "test-id")) - imgPersistenceReasons.getImagePersistenceReasons(imgWithPersistenceIdentifier) shouldBe List("persistence-identifier") - val imgWithExports = img.copy(exports = List(Crop(None, None, None, null, None, Nil))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithExports) shouldBe List("exports") - val imgWithUsages = img.copy(usages = List(Usage("test", Nil, null, "img", null, None, None, now()))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithUsages) shouldBe List("usages") - val imgWithArchive = img.copy(userMetadata = Some(Edits(archived = true, metadata = ImageMetadata.empty))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithArchive) shouldBe List("archived") - val imgWithPhotographerCategory = img.copy(usageRights = ContractPhotographer("test")) - imgPersistenceReasons.getImagePersistenceReasons(imgWithPhotographerCategory) shouldBe List("photographer-category") - val imgWithIllustratorCategory = img.copy(usageRights = StaffIllustrator("test")) - imgPersistenceReasons.getImagePersistenceReasons(imgWithIllustratorCategory) shouldBe List("illustrator-category") - val imgWithAgencyCommissionedCategory = img.copy(usageRights = CommissionedAgency("test")) - imgPersistenceReasons.getImagePersistenceReasons(imgWithAgencyCommissionedCategory) shouldBe List(CommissionedAgency.category) - val imgWithLeases = img.copy(leases = LeasesByMedia.build(List(MediaLease(id = None, leasedBy = None, notes = None, mediaId = "test")))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithLeases) shouldBe List("leases") - val imgWithPersistedRootCollections = img.copy(collections = List(Collection.build(persistedRootCollections.tail, ActionData("testAuthor", now())))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithPersistedRootCollections) shouldBe List("persisted-collection") - - val imgWithPhotoshoot = img.copy(userMetadata = Some(Edits(metadata = ImageMetadata.empty, photoshoot = Some(Photoshoot("test"))))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithPhotoshoot) shouldBe List("photoshoot") - - val imgWithUserEdits = img.copy(userMetadata = Some(Edits(metadata = ImageMetadata(title = Some("test"))))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithUserEdits) shouldBe List("edited") - - val imgWithLabels = img.copy(userMetadata = Some(Edits(metadata = ImageMetadata.empty, labels = List("test-label")))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithLabels) shouldBe List("labeled") - - val imgWithMultipleReasons = img.copy(userMetadata = Some(Edits( - labels = List("test-label"), - metadata = ImageMetadata(title = Some("test")), - photoshoot = Some(Photoshoot("test"))))) - imgPersistenceReasons.getImagePersistenceReasons(imgWithMultipleReasons) should contain theSameElementsAs List("labeled", "edited", "photoshoot") + val imgWithPersistenceIdentifier = + img.copy(identifiers = Map(persistedIdentifier -> "test-id")) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithPersistenceIdentifier + ) shouldBe List("persistence-identifier") + val imgWithExports = + img.copy(exports = List(Crop(None, None, None, null, None, Nil))) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithExports + ) shouldBe List("exports") + val imgWithUsages = img.copy(usages = + List(Usage("test", Nil, null, "img", null, None, None, now())) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithUsages + ) shouldBe List("usages") + val imgWithArchive = img.copy(userMetadata = + Some(Edits(archived = true, metadata = ImageMetadata.empty)) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithArchive + ) shouldBe List("archived") + val imgWithPhotographerCategory = + img.copy(usageRights = ContractPhotographer("test")) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithPhotographerCategory + ) shouldBe List("photographer-category") + val imgWithIllustratorCategory = + img.copy(usageRights = StaffIllustrator("test")) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithIllustratorCategory + ) shouldBe List("illustrator-category") + val imgWithAgencyCommissionedCategory = + img.copy(usageRights = CommissionedAgency("test")) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithAgencyCommissionedCategory + ) shouldBe List(CommissionedAgency.category) + val imgWithLeases = img.copy(leases = + LeasesByMedia.build( + List( + MediaLease(id = None, leasedBy = None, notes = None, mediaId = "test") + ) + ) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithLeases + ) shouldBe List("leases") + val imgWithPersistedRootCollections = img.copy(collections = + List( + Collection.build( + persistedRootCollections.tail, + ActionData("testAuthor", now()) + ) + ) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithPersistedRootCollections + ) shouldBe List("persisted-collection") + + val imgWithPhotoshoot = img.copy(userMetadata = + Some( + Edits( + metadata = ImageMetadata.empty, + photoshoot = Some(Photoshoot("test")) + ) + ) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithPhotoshoot + ) shouldBe List("photoshoot") + + val imgWithUserEdits = img.copy(userMetadata = + Some(Edits(metadata = ImageMetadata(title = Some("test")))) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithUserEdits + ) shouldBe List("edited") + + val imgWithLabels = img.copy(userMetadata = + Some(Edits(metadata = ImageMetadata.empty, labels = List("test-label"))) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithLabels + ) shouldBe List("labeled") + + val imgWithMultipleReasons = img.copy(userMetadata = + Some( + Edits( + labels = List("test-label"), + metadata = ImageMetadata(title = Some("test")), + photoshoot = Some(Photoshoot("test")) + ) + ) + ) + imgPersistenceReasons.getImagePersistenceReasons( + imgWithMultipleReasons + ) should contain theSameElementsAs List("labeled", "edited", "photoshoot") } } diff --git a/media-api/test/lib/ImageResponseTest.scala b/media-api/test/lib/ImageResponseTest.scala index 7c198a6244..04ee618c7b 100644 --- a/media-api/test/lib/ImageResponseTest.scala +++ b/media-api/test/lib/ImageResponseTest.scala @@ -18,7 +18,9 @@ class ImageResponseTest extends FunSpec with Matchers { normalisedText shouldBe "Here is some text\nthat spans across\nmultiple lines\n" } - it("not cause a stack overflow when many consecutive newline characters are present") { + it( + "not cause a stack overflow when many consecutive newline characters are present" + ) { val text = "\n\r\n\n\n\r\r\r\n" * 10000 val normalisedText = ImageResponse.normaliseNewlineChars(text) normalisedText shouldBe "\n" @@ -30,18 +32,37 @@ class ImageResponseTest extends FunSpec with Matchers { normalisedText shouldBe "Here is some text\nthat spans across\nmultiple lines\n" } - it("should indicate if image can be deleted" + - "(it can be deleted if there is no exports or usages)") { + it( + "should indicate if image can be deleted" + + "(it can be deleted if there is no exports or usages)" + ) { import TestUtils._ - val testCrop = Crop(Some("crop-id"), None, None, CropSpec("test-uri", Bounds(0, 0, 0, 0), None), None, Nil) - val testUsage = Usage(id = "usage-id", references = Nil, platform = PrintUsage, media = "test", status = PendingUsageStatus, dateAdded = None, dateRemoved = None, now()) + val testCrop = Crop( + Some("crop-id"), + None, + None, + CropSpec("test-uri", Bounds(0, 0, 0, 0), None), + None, + Nil + ) + val testUsage = Usage( + id = "usage-id", + references = Nil, + platform = PrintUsage, + media = "test", + status = PendingUsageStatus, + dateAdded = None, + dateRemoved = None, + now() + ) val imgWithNoExportsAndUsages = img import ImageResponse.canImgBeDeleted canImgBeDeleted(imgWithNoExportsAndUsages) shouldEqual true - val imgWithExportsAndUsages = img.copy(exports = List(testCrop)).copy(usages = List(testUsage)) + val imgWithExportsAndUsages = + img.copy(exports = List(testCrop)).copy(usages = List(testUsage)) canImgBeDeleted(imgWithExportsAndUsages) shouldEqual false val imgWithOnlyUsages = img.copy(usages = List(testUsage)) canImgBeDeleted(imgWithOnlyUsages) shouldEqual false diff --git a/media-api/test/lib/UsageStoreTest.scala b/media-api/test/lib/UsageStoreTest.scala index 70fffebc58..5af5b26b0d 100644 --- a/media-api/test/lib/UsageStoreTest.scala +++ b/media-api/test/lib/UsageStoreTest.scala @@ -10,12 +10,19 @@ class UsageStoreTest extends FunSpec with Matchers { val lines = UsageStore.extractEmail(stream) - lines.head should be ("\"Cpro Name\",\"Id\"") - lines.tail.head should be ("\"Australian Associated Press Pty Limited (Stacey Shipton)\",\"397\"") + lines.head should be("\"Cpro Name\",\"Id\"") + lines.tail.head should be( + "\"Australian Associated Press Pty Limited (Stacey Shipton)\",\"397\"" + ) val list = UsageStore.csvParser(lines) - list.head should be (SupplierUsageSummary(Agency("Australian Associated Press Pty Limited (Stacey Shipton)"), 397)) + list.head should be( + SupplierUsageSummary( + Agency("Australian Associated Press Pty Limited (Stacey Shipton)"), + 397 + ) + ) } } } diff --git a/media-api/test/lib/elasticsearch/ConditionFixtures.scala b/media-api/test/lib/elasticsearch/ConditionFixtures.scala index a40aebe461..33df2df823 100644 --- a/media-api/test/lib/elasticsearch/ConditionFixtures.scala +++ b/media-api/test/lib/elasticsearch/ConditionFixtures.scala @@ -7,26 +7,41 @@ trait ConditionFixtures { val fieldPhraseMatchCondition = Match(SingleField("afield"), Phrase("avalue")) val wordsMatchCondition = Match(SingleField("awordfield"), Words("foo bar")) - val anotherFieldPhraseMatchCondition = Match(SingleField("anotherfield"), Phrase("anothervalue")) + val anotherFieldPhraseMatchCondition = + Match(SingleField("anotherfield"), Phrase("anothervalue")) - val dateRangeStart: DateTime = new DateTime(2016, 1, 1, 0, 0).withZone(DateTimeZone.UTC) + val dateRangeStart: DateTime = + new DateTime(2016, 1, 1, 0, 0).withZone(DateTimeZone.UTC) val dateRangeEnd: DateTime = dateRangeStart.plusHours(1) - val dateMatchCondition = Match(SingleField("adatefield"), DateRange(dateRangeStart, dateRangeEnd)) + val dateMatchCondition = + Match(SingleField("adatefield"), DateRange(dateRangeStart, dateRangeEnd)) val hasFieldCondition = Match(HasField, HasValue("foo")) - val isOwnedPhotoCondition = Match(IsField, IsValue(IsOwnedPhotograph.toString)) - val isOwnedIllustrationCondition = Match(IsField, IsValue(IsOwnedIllustration.toString)) + val isOwnedPhotoCondition = + Match(IsField, IsValue(IsOwnedPhotograph.toString)) + val isOwnedIllustrationCondition = + Match(IsField, IsValue(IsOwnedIllustration.toString)) val isOwnedImageCondition = Match(IsField, IsValue(IsOwnedImage.toString)) - val isUnderQuotaCondition = Match(IsField, IsValue(IsUnderQuota(Nil).toString)) + val isUnderQuotaCondition = + Match(IsField, IsValue(IsUnderQuota(Nil).toString)) val isInvalidCondition = Match(IsField, IsValue("a-random-string")) val hierarchyFieldPhraseCondition = Match(HierarchyField, Phrase("foo")) val anyFieldPhraseCondition = Match(AnyField, Phrase("cats and dogs")) val anyFieldWordsCondition = Match(AnyField, Words("cats dogs")) - val multipleFieldWordsCondition = Match(MultipleField(List("foo", "bar")), Phrase("cats and dogs")) - - val nestedCondition: Condition = Nested(SingleField("usages"), SingleField("usages.status"), Words("pending")) - val anotherNestedCondition: Condition = Nested(SingleField("something"), SingleField("something.field"), Phrase("dogs")) + val multipleFieldWordsCondition = + Match(MultipleField(List("foo", "bar")), Phrase("cats and dogs")) + + val nestedCondition: Condition = Nested( + SingleField("usages"), + SingleField("usages.status"), + Words("pending") + ) + val anotherNestedCondition: Condition = Nested( + SingleField("something"), + SingleField("something.field"), + Phrase("dogs") + ) } diff --git a/media-api/test/lib/elasticsearch/ElasticSearchTest.scala b/media-api/test/lib/elasticsearch/ElasticSearchTest.scala index 3c18e3d709..c9b1862da0 100644 --- a/media-api/test/lib/elasticsearch/ElasticSearchTest.scala +++ b/media-api/test/lib/elasticsearch/ElasticSearchTest.scala @@ -3,7 +3,10 @@ package lib.elasticsearch import com.gu.mediaservice.lib.auth.Authentication.Principal import com.gu.mediaservice.lib.auth.{Internal, ReadOnly, Syndication} import com.gu.mediaservice.lib.config.{GridConfigLoader, GridConfigResources} -import com.gu.mediaservice.lib.elasticsearch.{ElasticSearchConfig, ElasticSearchExecutions} +import com.gu.mediaservice.lib.elasticsearch.{ + ElasticSearchConfig, + ElasticSearchExecutions +} import com.gu.mediaservice.lib.logging.{LogMarker, MarkerMap} import com.gu.mediaservice.model._ import com.gu.mediaservice.model.leases.DenySyndicationLease @@ -25,7 +28,11 @@ import play.api.mvc.Security.AuthenticatedRequest import scala.concurrent.duration._ import scala.concurrent.{Await, Future} -class ElasticSearchTest extends ElasticSearchTestBase with Eventually with ElasticSearchExecutions with MockitoSugar { +class ElasticSearchTest + extends ElasticSearchTestBase + with Eventually + with ElasticSearchExecutions + with MockitoSugar { implicit val request = mock[AuthenticatedRequest[AnyContent, Principal]] @@ -50,28 +57,53 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast "grid.appName" ) - private val mediaApiConfig = new MediaApiConfig(GridConfigResources( - Configuration.from(Map( - "es6.shards" -> 0, - "es6.replicas" -> 0 - ) ++ MOCK_CONFIG_KEYS.map(_ -> NOT_USED_IN_TEST).toMap), - null - )) + private val mediaApiConfig = new MediaApiConfig( + GridConfigResources( + Configuration.from( + Map( + "es6.shards" -> 0, + "es6.replicas" -> 0 + ) ++ MOCK_CONFIG_KEYS.map(_ -> NOT_USED_IN_TEST).toMap + ), + null + ) + ) private val mediaApiMetrics = new MediaApiMetrics(mediaApiConfig) - val elasticConfig = ElasticSearchConfig(alias = "readalias", url = es6TestUrl, - cluster = "media-service-test", shards = 1, replicas = 0) + val elasticConfig = ElasticSearchConfig( + alias = "readalias", + url = es6TestUrl, + cluster = "media-service-test", + shards = 1, + replicas = 0 + ) - private val ES = new ElasticSearch(mediaApiConfig, mediaApiMetrics, elasticConfig, () => List.empty) + private val ES = new ElasticSearch( + mediaApiConfig, + mediaApiMetrics, + elasticConfig, + () => List.empty + ) val client = ES.client - def esContainer = if (useEsDocker) Some(DockerContainer("docker.elastic.co/elasticsearch/elasticsearch:7.5.2") - .withPorts(9200 -> Some(9200)) - .withEnv("cluster.name=media-service", "xpack.security.enabled=false", "discovery.type=single-node", "network.host=0.0.0.0") - .withReadyChecker( - DockerReadyChecker.HttpResponseCode(9200, "/", Some("0.0.0.0")).within(10.minutes).looped(40, 1250.millis) + def esContainer = if (useEsDocker) + Some( + DockerContainer("docker.elastic.co/elasticsearch/elasticsearch:7.5.2") + .withPorts(9200 -> Some(9200)) + .withEnv( + "cluster.name=media-service", + "xpack.security.enabled=false", + "discovery.type=single-node", + "network.host=0.0.0.0" + ) + .withReadyChecker( + DockerReadyChecker + .HttpResponseCode(9200, "/", Some("0.0.0.0")) + .within(10.minutes) + .looped(40, 1250.millis) + ) ) - ) else None + else None private val expectedNumberOfImages = images.size @@ -86,26 +118,33 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast Await.ready(saveImages(images), 1.minute) // allow the cluster to distribute documents... eventual consistency! - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(totalImages shouldBe expectedNumberOfImages) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + totalImages shouldBe expectedNumberOfImages + ) } override def afterAll = purgeTestImages describe("Native elastic search sanity checks") { - def eventualMatchAllSearchResponse = client.execute(ElasticDsl.search(index) size expectedNumberOfImages * 2) + def eventualMatchAllSearchResponse = + client.execute(ElasticDsl.search(index) size expectedNumberOfImages * 2) it("images are actually persisted in Elastic search") { - val searchResponse = Await.result(eventualMatchAllSearchResponse, fiveSeconds) + val searchResponse = + Await.result(eventualMatchAllSearchResponse, fiveSeconds) searchResponse.result.totalHits shouldBe expectedNumberOfImages searchResponse.result.hits.size shouldBe expectedNumberOfImages } it("image hits read back from Elastic search can be parsed as images") { - val searchResponse = Await.result(eventualMatchAllSearchResponse, fiveSeconds) + val searchResponse = + Await.result(eventualMatchAllSearchResponse, fiveSeconds) - val reloadedImages = searchResponse.result.hits.hits.flatMap(h => Json.parse(h.sourceAsString).validate[Image].asOpt) + val reloadedImages = searchResponse.result.hits.hits.flatMap(h => + Json.parse(h.sourceAsString).validate[Image].asOpt + ) reloadedImages.size shouldBe expectedNumberOfImages } @@ -162,12 +201,19 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast describe("usages for supplier") { it("can count published agency images within the last number of days") { - val publishedAgencyImages = images.filter(i => i.usageRights.isInstanceOf[Agency] && i.usages.exists(_.status == PublishedUsageStatus)) + val publishedAgencyImages = images.filter(i => + i.usageRights.isInstanceOf[Agency] && i.usages.exists( + _.status == PublishedUsageStatus + ) + ) publishedAgencyImages.size shouldBe 2 // Reporting date range is implemented as round down to last full day - val withinReportedDateRange = publishedAgencyImages.filter(i => i.usages. - exists(u => u.dateAdded.exists(_.isBefore(DateTime.now.withTimeAtStartOfDay())))) + val withinReportedDateRange = publishedAgencyImages.filter(i => + i.usages.exists(u => + u.dateAdded.exists(_.isBefore(DateTime.now.withTimeAtStartOfDay())) + ) + ) withinReportedDateRange.size shouldBe 1 val results = Await.result(ES.usageForSupplier("ACME", 5), fiveSeconds) @@ -178,18 +224,32 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast describe("aggregations") { it("can load date aggregations") { - val aggregateSearchParams = AggregateSearchParams(field = "uploadTime", q = None, structuredQuery = List.empty) + val aggregateSearchParams = AggregateSearchParams( + field = "uploadTime", + q = None, + structuredQuery = List.empty + ) - val results = Await.result(ES.dateHistogramAggregate(aggregateSearchParams), fiveSeconds) + val results = Await.result( + ES.dateHistogramAggregate(aggregateSearchParams), + fiveSeconds + ) results.total shouldBe 2 - results.results.foldLeft(0: Long)((a, b) => a + b.count) shouldBe images.size + results.results.foldLeft(0: Long)((a, b) => + a + b.count + ) shouldBe images.size } it("can load metadata aggregations") { - val aggregateSearchParams = AggregateSearchParams(field = "keywords", q = None, structuredQuery = List.empty) + val aggregateSearchParams = AggregateSearchParams( + field = "keywords", + q = None, + structuredQuery = List.empty + ) - val results = Await.result(ES.metadataSearch(aggregateSearchParams), fiveSeconds) + val results = + Await.result(ES.metadataSearch(aggregateSearchParams), fiveSeconds) results.total shouldBe 2 results.results.find(b => b.key == "es").get.count shouldBe images.size @@ -198,7 +258,9 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast } describe("Tiered API access") { - it("ES should return only rights acquired pictures with an allow syndication lease for a syndication tier search") { + it( + "ES should return only rights acquired pictures with an allow syndication lease for a syndication tier search" + ) { val searchParams = SearchParams(tier = Syndication) val searchResult = ES.search(searchParams) whenReady(searchResult, timeout, interval) { result => @@ -230,16 +292,26 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast } describe("syndicationStatus query on the Syndication tier") { - it("should return 0 results if a Syndication tier queries for SentForSyndication images") { - val search = SearchParams(tier = Syndication, syndicationStatus = Some(SentForSyndication)) + it( + "should return 0 results if a Syndication tier queries for SentForSyndication images" + ) { + val search = SearchParams( + tier = Syndication, + syndicationStatus = Some(SentForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 0 } } - it("should return 3 results if a Syndication tier queries for QueuedForSyndication images") { - val search = SearchParams(tier = Syndication, syndicationStatus = Some(QueuedForSyndication)) + it( + "should return 3 results if a Syndication tier queries for QueuedForSyndication images" + ) { + val search = SearchParams( + tier = Syndication, + syndicationStatus = Some(QueuedForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 3 @@ -252,16 +324,26 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast } } - it("should return 0 results if a Syndication tier queries for BlockedForSyndication images") { - val search = SearchParams(tier = Syndication, syndicationStatus = Some(BlockedForSyndication)) + it( + "should return 0 results if a Syndication tier queries for BlockedForSyndication images" + ) { + val search = SearchParams( + tier = Syndication, + syndicationStatus = Some(BlockedForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 0 } } - it("should return 0 results if a Syndication tier queries for AwaitingReviewForSyndication images") { - val search = SearchParams(tier = Syndication, syndicationStatus = Some(AwaitingReviewForSyndication)) + it( + "should return 0 results if a Syndication tier queries for AwaitingReviewForSyndication images" + ) { + val search = SearchParams( + tier = Syndication, + syndicationStatus = Some(AwaitingReviewForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 0 @@ -270,35 +352,57 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast } describe("syndicationStatus query on the internal tier") { - it("should return 1 image if an Internal tier queries for SentForSyndication images") { - val search = SearchParams(tier = Internal, syndicationStatus = Some(SentForSyndication)) + it( + "should return 1 image if an Internal tier queries for SentForSyndication images" + ) { + val search = SearchParams( + tier = Internal, + syndicationStatus = Some(SentForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 1 } } - it("should return 3 images if an Internal tier queries for QueuedForSyndication images") { - val search = SearchParams(tier = Internal, syndicationStatus = Some(QueuedForSyndication)) + it( + "should return 3 images if an Internal tier queries for QueuedForSyndication images" + ) { + val search = SearchParams( + tier = Internal, + syndicationStatus = Some(QueuedForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 3 } } - it("should return 3 images if an Internal tier queries for BlockedForSyndication images") { - val search = SearchParams(tier = Internal, syndicationStatus = Some(BlockedForSyndication)) + it( + "should return 3 images if an Internal tier queries for BlockedForSyndication images" + ) { + val search = SearchParams( + tier = Internal, + syndicationStatus = Some(BlockedForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.hits.forall(h => h._2.leases.leases.nonEmpty) shouldBe true - result.hits.forall(h => h._2.leases.leases.forall(l => l.access == DenySyndicationLease)) shouldBe true + result.hits.forall(h => + h._2.leases.leases.forall(l => l.access == DenySyndicationLease) + ) shouldBe true result.total shouldBe 3 } } - it("should return 3 images if an Internal tier queries for AwaitingReviewForSyndication images") { + it( + "should return 3 images if an Internal tier queries for AwaitingReviewForSyndication images" + ) { // Elastic1 implementation is returning the images with reviewed and blocked syndicationStatus - val search = SearchParams(tier = Internal, syndicationStatus = Some(AwaitingReviewForSyndication)) + val search = SearchParams( + tier = Internal, + syndicationStatus = Some(AwaitingReviewForSyndication) + ) val searchResult = ES.search(search) whenReady(searchResult, timeout, interval) { result => result.total shouldBe 3 @@ -311,38 +415,62 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast val hasTitleCondition = Match(HasField, HasValue("title")) val unknownFieldCondition = Match(HasField, HasValue("unknownfield")) - val hasTitleSearch = SearchParams(tier = Internal, structuredQuery = List(hasTitleCondition)) + val hasTitleSearch = + SearchParams(tier = Internal, structuredQuery = List(hasTitleCondition)) whenReady(ES.search(hasTitleSearch), timeout, interval) { result => result.total shouldBe expectedNumberOfImages } - val hasUnknownFieldTitleSearch = SearchParams(tier = Internal, structuredQuery = List(unknownFieldCondition)) - whenReady(ES.search(hasUnknownFieldTitleSearch), timeout, interval) { result => - result.total shouldBe 0 + val hasUnknownFieldTitleSearch = SearchParams( + tier = Internal, + structuredQuery = List(unknownFieldCondition) + ) + whenReady(ES.search(hasUnknownFieldTitleSearch), timeout, interval) { + result => + result.total shouldBe 0 } } - it("should be able to filter images with fileMetadata even though fileMetadata fields are not indexed") { + it( + "should be able to filter images with fileMetadata even though fileMetadata fields are not indexed" + ) { val hasFileMetadataCondition = Match(HasField, HasValue("fileMetadata")) - val hasFileMetadataSearch = SearchParams(tier = Internal, structuredQuery = List(hasFileMetadataCondition)) + val hasFileMetadataSearch = SearchParams( + tier = Internal, + structuredQuery = List(hasFileMetadataCondition) + ) whenReady(ES.search(hasFileMetadataSearch), timeout, interval) { result => result.total shouldBe 1 result.hits.head._2.fileMetadata.xmp.nonEmpty shouldBe true } } - it("should be able to filter images which have specific fileMetadata fields even though fileMetadata fields are not indexed") { - val hasFileMetadataCondition = Match(HasField, HasValue("fileMetadata.xmp.foo")) - val hasFileMetadataSearch = SearchParams(tier = Internal, structuredQuery = List(hasFileMetadataCondition)) + it( + "should be able to filter images which have specific fileMetadata fields even though fileMetadata fields are not indexed" + ) { + val hasFileMetadataCondition = + Match(HasField, HasValue("fileMetadata.xmp.foo")) + val hasFileMetadataSearch = SearchParams( + tier = Internal, + structuredQuery = List(hasFileMetadataCondition) + ) whenReady(ES.search(hasFileMetadataSearch), timeout, interval) { result => result.total shouldBe 1 - result.hits.head._2.fileMetadata.xmp.get("foo") shouldBe Some(JsString("bar")) + result.hits.head._2.fileMetadata.xmp.get("foo") shouldBe Some( + JsString("bar") + ) } } - it("file metadata files which are too long cannot by persisted as keywords and will not contribute to has field search results") { - val hasFileMetadataCondition = Match(HasField, HasValue("fileMetadata.xmp.toolong")) - val hasFileMetadataSearch = SearchParams(tier = Internal, structuredQuery = List(hasFileMetadataCondition)) + it( + "file metadata files which are too long cannot by persisted as keywords and will not contribute to has field search results" + ) { + val hasFileMetadataCondition = + Match(HasField, HasValue("fileMetadata.xmp.toolong")) + val hasFileMetadataSearch = SearchParams( + tier = Internal, + structuredQuery = List(hasFileMetadataCondition) + ) whenReady(ES.search(hasFileMetadataSearch), timeout, interval) { result => result.total shouldBe 0 } @@ -351,106 +479,139 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast describe("is field filter") { it("should return no images with an invalid search") { - val search = SearchParams(tier = Internal, structuredQuery = List(isInvalidCondition)) - whenReady(ES.search(search), timeout, interval) { result => { - result.total shouldBe 0 - } + val search = SearchParams( + tier = Internal, + structuredQuery = List(isInvalidCondition) + ) + whenReady(ES.search(search), timeout, interval) { result => + { + result.total shouldBe 0 + } } } it("should return owned photographs") { - val search = SearchParams(tier = Internal, structuredQuery = List(isOwnedPhotoCondition), length = 50) - whenReady(ES.search(search), timeout, interval) { result => { - val expected = List( - "iron-suit", - "green-leaf", - "test-image-1", - "test-image-2", - "test-image-3", - "test-image-4", - "test-image-5", - "test-image-6", - "test-image-7", - "test-image-8", - "test-image-12", - "test-image-13" - ) - - val imageIds = result.hits.map(_._1) - imageIds.size shouldBe expected.size - expected.foreach(imageIds.contains(_) shouldBe true) - } + val search = SearchParams( + tier = Internal, + structuredQuery = List(isOwnedPhotoCondition), + length = 50 + ) + whenReady(ES.search(search), timeout, interval) { result => + { + val expected = List( + "iron-suit", + "green-leaf", + "test-image-1", + "test-image-2", + "test-image-3", + "test-image-4", + "test-image-5", + "test-image-6", + "test-image-7", + "test-image-8", + "test-image-12", + "test-image-13" + ) + + val imageIds = result.hits.map(_._1) + imageIds.size shouldBe expected.size + expected.foreach(imageIds.contains(_) shouldBe true) + } } } it("should return owned illustrations") { - val search = SearchParams(tier = Internal, structuredQuery = List(isOwnedIllustrationCondition)) - whenReady(ES.search(search), timeout, interval) { result => { - val expected = List( - "green-giant", - "hammer-hammer-hammer" - ) - - val imageIds = result.hits.map(_._1) - imageIds.size shouldBe expected.size - expected.foreach(imageIds.contains(_) shouldBe true) - } + val search = SearchParams( + tier = Internal, + structuredQuery = List(isOwnedIllustrationCondition) + ) + whenReady(ES.search(search), timeout, interval) { result => + { + val expected = List( + "green-giant", + "hammer-hammer-hammer" + ) + + val imageIds = result.hits.map(_._1) + imageIds.size shouldBe expected.size + expected.foreach(imageIds.contains(_) shouldBe true) + } } } it("should return all owned images") { - val search = SearchParams(tier = Internal, structuredQuery = List(isOwnedImageCondition), length = 50) - whenReady(ES.search(search), timeout, interval) { result => { - val expected = List( - "iron-suit", - "green-leaf", - "test-image-1", - "test-image-2", - "test-image-3", - "test-image-4", - "test-image-5", - "test-image-6", - "test-image-7", - "test-image-8", - "test-image-12", - "test-image-13", - "green-giant", - "hammer-hammer-hammer" - ) - - val imageIds = result.hits.map(_._1) - imageIds.size shouldBe expected.size - expected.foreach(imageIds.contains(_) shouldBe true) - } + val search = SearchParams( + tier = Internal, + structuredQuery = List(isOwnedImageCondition), + length = 50 + ) + whenReady(ES.search(search), timeout, interval) { result => + { + val expected = List( + "iron-suit", + "green-leaf", + "test-image-1", + "test-image-2", + "test-image-3", + "test-image-4", + "test-image-5", + "test-image-6", + "test-image-7", + "test-image-8", + "test-image-12", + "test-image-13", + "green-giant", + "hammer-hammer-hammer" + ) + + val imageIds = result.hits.map(_._1) + imageIds.size shouldBe expected.size + expected.foreach(imageIds.contains(_) shouldBe true) + } } } it("should return all images when no agencies are over quota") { - val search = SearchParams(tier = Internal, structuredQuery = List(isUnderQuotaCondition)) + val search = SearchParams( + tier = Internal, + structuredQuery = List(isUnderQuotaCondition) + ) - whenReady(ES.search(search), timeout, interval) { result => { - result.total shouldBe images.size - } + whenReady(ES.search(search), timeout, interval) { result => + { + result.total shouldBe images.size + } } } it("should return any image whose agency is not over quota") { def overQuotaAgencies = List(Agency("Getty Images"), Agency("AP")) - val search = SearchParams(tier = Internal, structuredQuery = List(isUnderQuotaCondition), length = 50) - val elasticsearch = new ElasticSearch(mediaApiConfig, mediaApiMetrics, elasticConfig, () => overQuotaAgencies) + val search = SearchParams( + tier = Internal, + structuredQuery = List(isUnderQuotaCondition), + length = 50 + ) + val elasticsearch = new ElasticSearch( + mediaApiConfig, + mediaApiMetrics, + elasticConfig, + () => overQuotaAgencies + ) - whenReady(elasticsearch.search(search), timeout, interval) { result => { - val overQuotaImages = List( - "getty-image-1", - "getty-image-2", - "ap-image-1" - ) - val expectedUnderQuotaImages = images.map(_.id).filterNot(overQuotaImages.contains) - result.total shouldBe expectedUnderQuotaImages.size - val imageIds = result.hits.map(_._1) - expectedUnderQuotaImages.foreach(imageIds.contains(_) shouldBe true) - } + whenReady(elasticsearch.search(search), timeout, interval) { result => + { + val overQuotaImages = List( + "getty-image-1", + "getty-image-2", + "ap-image-1" + ) + val expectedUnderQuotaImages = + images.map(_.id).filterNot(overQuotaImages.contains) + result.total shouldBe expectedUnderQuotaImages.size + val imageIds = result.hits.map(_._1) + expectedUnderQuotaImages.foreach(imageIds.contains(_) shouldBe true) + } } } } @@ -459,19 +620,26 @@ class ElasticSearchTest extends ElasticSearchTestBase with Eventually with Elast implicit val logMarker: LogMarker = MarkerMap() Future.sequence(images.map { i => - executeAndLog(indexInto(index) id i.id source Json.stringify(Json.toJson(i)), s"Indexing test image") + executeAndLog( + indexInto(index) id i.id source Json.stringify(Json.toJson(i)), + s"Indexing test image" + ) }) } - private def totalImages: Long = Await.result(ES.totalImages(), oneHundredMilliseconds) + private def totalImages: Long = + Await.result(ES.totalImages(), oneHundredMilliseconds) private def purgeTestImages = { implicit val logMarker: LogMarker = MarkerMap() - def deleteImages = executeAndLog(deleteByQuery(index, matchAllQuery()), s"Deleting images") + def deleteImages = + executeAndLog(deleteByQuery(index, matchAllQuery()), s"Deleting images") Await.result(deleteImages, fiveSeconds) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(totalImages shouldBe 0) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + totalImages shouldBe 0 + ) } } diff --git a/media-api/test/lib/elasticsearch/ElasticSearchTestBase.scala b/media-api/test/lib/elasticsearch/ElasticSearchTestBase.scala index dff5f75558..5472e35614 100644 --- a/media-api/test/lib/elasticsearch/ElasticSearchTestBase.scala +++ b/media-api/test/lib/elasticsearch/ElasticSearchTestBase.scala @@ -17,13 +17,22 @@ import play.api.libs.json.JsString import scala.concurrent.duration._ import scala.util.Properties -trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers with ScalaFutures with Fixtures with DockerKit with DockerTestKit with DockerKitSpotify with ConditionFixtures { - +trait ElasticSearchTestBase + extends FunSpec + with BeforeAndAfterAll + with Matchers + with ScalaFutures + with Fixtures + with DockerKit + with DockerTestKit + with DockerKitSpotify + with ConditionFixtures { val interval = Interval(Span(100, Milliseconds)) val timeout = Timeout(Span(10, Seconds)) - val useEsDocker = Properties.envOrElse("USE_DOCKER_FOR_TESTS", "true").toBoolean + val useEsDocker = + Properties.envOrElse("USE_DOCKER_FOR_TESTS", "true").toBoolean val es6TestUrl = Properties.envOrElse("ES6_TEST_URL", "http://localhost:9200") def esContainer: Option[DockerContainer] @@ -42,19 +51,23 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers createImage("iron-suit", CommissionedPhotographer("Iron Man")), createImage("green-giant", StaffIllustrator("Hulk")), createImage("hammer-hammer-hammer", ContractIllustrator("Thor")), - createImage("green-leaf", StaffPhotographer("Yellow Giraffe", "The Guardian")), - createImage(UUID.randomUUID().toString, Handout(), usages = List(createDigitalUsage())), - + createImage( + "green-leaf", + StaffPhotographer("Yellow Giraffe", "The Guardian") + ), + createImage( + UUID.randomUUID().toString, + Handout(), + usages = List(createDigitalUsage()) + ), createImageUploadedInThePast("persisted-because-edited").copy( - userMetadata = Some(Edits(metadata = ImageMetadata(credit = Some("author")))) + userMetadata = + Some(Edits(metadata = ImageMetadata(credit = Some("author")))) ), - createImageUploadedInThePast("test-image-14-unedited"), - createImageUploadedInThePast("persisted-because-usage").copy( usages = List(createPrintUsage()) ), - // available for syndication createImageForSyndication( id = "test-image-1", @@ -62,7 +75,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers Some(DateTime.parse("2018-01-01T00:00:00")), Some(createSyndicationLease(allowed = true, "test-image-1")) ), - // has a digital usage, still eligible for syndication createImageForSyndication( id = "test-image-2", @@ -71,7 +83,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers Some(createSyndicationLease(allowed = true, "test-image-2")), List(createDigitalUsage()) ), - // has syndication usage, not available for syndication createImageForSyndication( id = "test-image-3", @@ -80,7 +91,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers Some(createSyndicationLease(allowed = true, "test-image-3")), List(createDigitalUsage(), createSyndicationUsage()) ), - // rights acquired, explicit allow syndication lease and unknown publish date, available for syndication createImageForSyndication( id = "test-image-4", @@ -88,7 +98,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers None, Some(createSyndicationLease(allowed = true, "test-image-4")) ), - // explicit deny syndication lease with no end date, not available for syndication createImageForSyndication( id = "test-image-5", @@ -96,15 +105,19 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers None, Some(createSyndicationLease(allowed = false, "test-image-5")) ), - // explicit deny syndication lease with end date before now, available for syndication createImageForSyndication( id = "test-image-6", rightsAcquired = true, Some(DateTime.parse("2018-01-01T00:00:00")), - Some(createSyndicationLease(allowed = false, "test-image-6", endDate = Some(DateTime.parse("2018-01-01T00:00:00")))) + Some( + createSyndicationLease( + allowed = false, + "test-image-6", + endDate = Some(DateTime.parse("2018-01-01T00:00:00")) + ) + ) ), - // images published after "today", not available for syndication createImageForSyndication( id = "test-image-7", @@ -112,22 +125,28 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers Some(DateTime.parse("2018-07-02T00:00:00")), Some(createSyndicationLease(allowed = false, "test-image-7")) ), - // with fileMetadata createImageForSyndication( id = "test-image-8", rightsAcquired = true, Some(DateTime.parse("2018-07-03T00:00:00")), None, - fileMetadata = Some(FileMetadata(xmp = Map( - "foo" -> JsString("bar"), - "toolong" -> JsString(stringLongerThan(100000)) - ))) + fileMetadata = Some( + FileMetadata(xmp = + Map( + "foo" -> JsString("bar"), + "toolong" -> JsString(stringLongerThan(100000)) + ) + ) + ) ), - // no rights acquired, not available for syndication - createImageForSyndication("test-image-13", rightsAcquired = false, None, None), - + createImageForSyndication( + "test-image-13", + rightsAcquired = false, + None, + None + ), // Agency image with published usage yesterday createImageForSyndication( id = "test-image-9", @@ -137,7 +156,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers usageRights = agency, usages = List(createDigitalUsage(date = DateTime.now.minusDays(1))) ), - // Agency image with published just now createImageForSyndication( id = "test-image-10", @@ -147,7 +165,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers usageRights = agency, usages = List(createDigitalUsage(date = DateTime.now)) ), - // Screen grab with rights acquired, not eligible for syndication review createImageForSyndication( id = "test-image-11", @@ -157,7 +174,6 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers usageRights = screengrab, usages = List(createDigitalUsage(date = DateTime.now)) ), - // Staff photographer with rights acquired, eligible for syndication review createImageForSyndication( id = "test-image-12", @@ -166,7 +182,7 @@ trait ElasticSearchTestBase extends FunSpec with BeforeAndAfterAll with Matchers lease = None, usageRights = staffPhotographer, usages = List(createDigitalUsage(date = DateTime.now)) - ), + ) // TODO this test image *should* be in `AwaitingReviewForSyndication` but instead its in `BlockedForSyndication` // see https://www.elastic.co/guide/en/elasticsearch/reference/current/nested.html to understand why diff --git a/media-api/test/lib/elasticsearch/Fixtures.scala b/media-api/test/lib/elasticsearch/Fixtures.scala index 2f23cceead..c09c8e6f52 100644 --- a/media-api/test/lib/elasticsearch/Fixtures.scala +++ b/media-api/test/lib/elasticsearch/Fixtures.scala @@ -3,7 +3,12 @@ package lib.elasticsearch import java.net.URI import java.util.UUID -import com.gu.mediaservice.model.leases.{AllowSyndicationLease, DenySyndicationLease, LeasesByMedia, MediaLease} +import com.gu.mediaservice.model.leases.{ + AllowSyndicationLease, + DenySyndicationLease, + LeasesByMedia, + MediaLease +} import com.gu.mediaservice.model.usage.{UsageStatus => Status, _} import com.gu.mediaservice.model.{StaffPhotographer, _} import org.joda.time.DateTime @@ -16,12 +21,12 @@ trait Fixtures { val screengrab = Screengrab(None, None) def createImage( - id: String, - usageRights: UsageRights, - syndicationRights: Option[SyndicationRights] = None, - leases: Option[LeasesByMedia] = None, - usages: List[Usage] = Nil, - fileMetadata: Option[FileMetadata] = None + id: String, + usageRights: UsageRights, + syndicationRights: Option[SyndicationRights] = None, + leases: Option[LeasesByMedia] = None, + usages: List[Usage] = Nil, + fileMetadata: Option[FileMetadata] = None ): Image = { Image( id = id, @@ -35,17 +40,25 @@ trait Fixtures { size = Some(292265L), mimeType = Some(Jpeg), dimensions = Some(Dimensions(width = 2800, height = 1600)), - secureUrl = None), - thumbnail = Some(Asset( - file = new URI(s"http://file/thumbnail/$id"), - size = Some(292265L), - mimeType = Some(Jpeg), - dimensions = Some(Dimensions(width = 800, height = 100)), - secureUrl = None)), + secureUrl = None + ), + thumbnail = Some( + Asset( + file = new URI(s"http://file/thumbnail/$id"), + size = Some(292265L), + mimeType = Some(Jpeg), + dimensions = Some(Dimensions(width = 800, height = 100)), + secureUrl = None + ) + ), optimisedPng = None, fileMetadata = fileMetadata.getOrElse(FileMetadata()), userMetadata = None, - metadata = ImageMetadata(dateTaken = None, title = Some(s"Test image $id"), keywords = List("test", "es")), + metadata = ImageMetadata( + dateTaken = None, + title = Some(s"Test image $id"), + keywords = List("test", "es") + ), originalMetadata = ImageMetadata(), usageRights = usageRights, originalUsageRights = usageRights, @@ -56,18 +69,19 @@ trait Fixtures { ) } - def createImageUploadedInThePast(id: String): Image = createImage(id = id, Handout()).copy( - uploadTime = DateTime.now.minusMonths(1) - ) + def createImageUploadedInThePast(id: String): Image = + createImage(id = id, Handout()).copy( + uploadTime = DateTime.now.minusMonths(1) + ) def createImageForSyndication( - id: String, - rightsAcquired: Boolean, - rcsPublishDate: Option[DateTime], - lease: Option[MediaLease], - usages: List[Usage] = Nil, - usageRights: UsageRights = staffPhotographer, - fileMetadata: Option[FileMetadata] = None + id: String, + rightsAcquired: Boolean, + rcsPublishDate: Option[DateTime], + lease: Option[MediaLease], + usages: List[Usage] = Nil, + usageRights: UsageRights = staffPhotographer, + fileMetadata: Option[FileMetadata] = None ): Image = { val rights = List( Right("test", Some(rightsAcquired), Nil) @@ -77,10 +91,22 @@ trait Fixtures { val leaseByMedia = lease.map(l => LeasesByMedia.build(List(l))) - createImage(id, usageRights, Some(syndicationRights), leaseByMedia, usages, fileMetadata) + createImage( + id, + usageRights, + Some(syndicationRights), + leaseByMedia, + usages, + fileMetadata + ) } - def createSyndicationLease(allowed: Boolean, imageId: String, startDate: Option[DateTime] = None, endDate: Option[DateTime] = None): MediaLease = { + def createSyndicationLease( + allowed: Boolean, + imageId: String, + startDate: Option[DateTime] = None, + endDate: Option[DateTime] = None + ): MediaLease = { MediaLease( id = None, leasedBy = None, @@ -93,14 +119,25 @@ trait Fixtures { } def createSyndicationUsage(date: DateTime = DateTime.now): Usage = { - createUsage(SyndicationUsageReference, SyndicationUsage, SyndicatedUsageStatus, date) + createUsage( + SyndicationUsageReference, + SyndicationUsage, + SyndicatedUsageStatus, + date + ) } def createDigitalUsage(date: DateTime = DateTime.now): Usage = { - createUsage(ComposerUsageReference, DigitalUsage, PublishedUsageStatus, date) + createUsage( + ComposerUsageReference, + DigitalUsage, + PublishedUsageStatus, + date + ) } - def createPrintUsage(date: DateTime = DateTime.now): Usage = createUsage(InDesignUsageReference, PrintUsage, PendingUsageStatus, date) + def createPrintUsage(date: DateTime = DateTime.now): Usage = + createUsage(InDesignUsageReference, PrintUsage, PendingUsageStatus, date) def stringLongerThan(i: Int): String = { var out = "" @@ -110,7 +147,12 @@ trait Fixtures { out } - private def createUsage(t: UsageReferenceType, usageType: UsageType, status: Status, date: DateTime): Usage = { + private def createUsage( + t: UsageReferenceType, + usageType: UsageType, + status: Status, + date: DateTime + ): Usage = { Usage( UUID.randomUUID().toString, List(UsageReference(t)), diff --git a/media-api/test/lib/elasticsearch/QueryBuilderTest.scala b/media-api/test/lib/elasticsearch/QueryBuilderTest.scala index da84ed4ed4..d883936240 100644 --- a/media-api/test/lib/elasticsearch/QueryBuilderTest.scala +++ b/media-api/test/lib/elasticsearch/QueryBuilderTest.scala @@ -5,8 +5,16 @@ import com.sksamuel.elastic4s.ElasticDsl import com.sksamuel.elastic4s.requests.common.Operator import com.sksamuel.elastic4s.requests.searches.queries.QueryBuilderFn import com.sksamuel.elastic4s.requests.searches.queries._ -import com.sksamuel.elastic4s.requests.searches.queries.matches.{MatchPhrase, MatchQuery, MultiMatchQuery, MultiMatchQueryBuilderType} -import com.sksamuel.elastic4s.requests.searches.queries.term.{TermQuery, TermsQuery} +import com.sksamuel.elastic4s.requests.searches.queries.matches.{ + MatchPhrase, + MatchQuery, + MultiMatchQuery, + MultiMatchQueryBuilderType +} +import com.sksamuel.elastic4s.requests.searches.queries.term.{ + TermQuery, + TermsQuery +} import lib.querysyntax.Negation import org.scalatest.{FunSpec, Matchers} @@ -40,7 +48,11 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { } it("multiple conditions should give multiple must conditions") { - val query = queryBuilder.makeQuery(List(fieldPhraseMatchCondition, anotherFieldPhraseMatchCondition)).asInstanceOf[BoolQuery] + val query = queryBuilder + .makeQuery( + List(fieldPhraseMatchCondition, anotherFieldPhraseMatchCondition) + ) + .asInstanceOf[BoolQuery] query.must.size shouldBe 2 query.must(0).asInstanceOf[MatchPhrase].field shouldBe "afield" @@ -50,15 +62,20 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { it("negated conditions should be expressed using must not clauses") { val negatedCondition = Negation(fieldPhraseMatchCondition) - val query = queryBuilder.makeQuery(List(negatedCondition)).asInstanceOf[BoolQuery] + val query = + queryBuilder.makeQuery(List(negatedCondition)).asInstanceOf[BoolQuery] query.not.size shouldBe 1 query.not.head.asInstanceOf[MatchPhrase].field shouldBe "afield" query.not.head.asInstanceOf[MatchPhrase].value shouldBe "avalue" } - it("word list matches should set the AND operator so that all words need to match") { - val query = queryBuilder.makeQuery(List(wordsMatchCondition)).asInstanceOf[BoolQuery] + it( + "word list matches should set the AND operator so that all words need to match" + ) { + val query = queryBuilder + .makeQuery(List(wordsMatchCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 val wordsClause = query.must.head.asInstanceOf[MatchQuery] @@ -67,8 +84,11 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { wordsClause.operator shouldBe Some(Operator.And) } - it("date ranges are expressed range queries which include the lower and upper bounds") { - val query = queryBuilder.makeQuery(List(dateMatchCondition)).asInstanceOf[BoolQuery] + it( + "date ranges are expressed range queries which include the lower and upper bounds" + ) { + val query = + queryBuilder.makeQuery(List(dateMatchCondition)).asInstanceOf[BoolQuery] query.must.size shouldBe 1 val dateRangeClause = query.must.head.asInstanceOf[RangeQuery] @@ -77,24 +97,31 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { } it("has field conditions are expressed as exists filters") { - val query = queryBuilder.makeQuery(List(hasFieldCondition)).asInstanceOf[BoolQuery] + val query = + queryBuilder.makeQuery(List(hasFieldCondition)).asInstanceOf[BoolQuery] query.must.size shouldBe 1 val hasClause = query.must.head.asInstanceOf[BoolQuery] hasClause.must.size shouldBe 0 hasClause.filters.size shouldBe 1 hasClause.filters.head.asInstanceOf[ExistsQuery].field shouldBe "foo" - } + } it("hierarchy field phrase is expressed as a term query") { - val query = queryBuilder.makeQuery(List(hierarchyFieldPhraseCondition)).asInstanceOf[BoolQuery] + val query = queryBuilder + .makeQuery(List(hierarchyFieldPhraseCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 query.must.head.asInstanceOf[TermQuery].value shouldBe "foo" } - it("any field phrase queries should be applied to all of the match fields") { - val query = queryBuilder.makeQuery(List(anyFieldPhraseCondition)).asInstanceOf[BoolQuery] + it( + "any field phrase queries should be applied to all of the match fields" + ) { + val query = queryBuilder + .makeQuery(List(anyFieldPhraseCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 val multiMatchClause = query.must.head.asInstanceOf[MultiMatchQuery] @@ -103,19 +130,29 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { multiMatchClause.`type` shouldBe Some(MultiMatchQueryBuilderType.PHRASE) } - it("any field words queries should be applied to all of the match fields with cross fields type, operator and analyzers set") { - val query = queryBuilder.makeQuery(List(anyFieldWordsCondition)).asInstanceOf[BoolQuery] + it( + "any field words queries should be applied to all of the match fields with cross fields type, operator and analyzers set" + ) { + val query = queryBuilder + .makeQuery(List(anyFieldWordsCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 val multiMatchClause = query.must.head.asInstanceOf[MultiMatchQuery] multiMatchClause.text shouldBe "cats dogs" multiMatchClause.fields.map(_.field) shouldBe matchFields multiMatchClause.operator shouldBe Some(Operator.AND) - multiMatchClause.`type` shouldBe Some(MultiMatchQueryBuilderType.CROSS_FIELDS) + multiMatchClause.`type` shouldBe Some( + MultiMatchQueryBuilderType.CROSS_FIELDS + ) } - it("multiple field queries should query against the requested fields only") { - val query = queryBuilder.makeQuery(List(multipleFieldWordsCondition)).asInstanceOf[BoolQuery] + it( + "multiple field queries should query against the requested fields only" + ) { + val query = queryBuilder + .makeQuery(List(multipleFieldWordsCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 val multiMatchClause = query.must.head.asInstanceOf[MultiMatchQuery] @@ -124,18 +161,25 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { } it("nested queries should be expressed using nested queries") { - val query = queryBuilder.makeQuery(List(nestedCondition)).asInstanceOf[BoolQuery] + val query = + queryBuilder.makeQuery(List(nestedCondition)).asInstanceOf[BoolQuery] query.must.size shouldBe 1 val nestedQuery = query.must.head.asInstanceOf[NestedQuery] - val nestedMatchQuery = nestedQuery.query.asInstanceOf[BoolQuery].must.head.asInstanceOf[MatchQuery] + val nestedMatchQuery = nestedQuery.query + .asInstanceOf[BoolQuery] + .must + .head + .asInstanceOf[MatchQuery] nestedMatchQuery.field shouldBe "usages.status" nestedMatchQuery.value shouldBe "pending" nestedMatchQuery.operator shouldBe Some(Operator.AND) } it("multiple nested queries result in multiple must clauses") { - val query = queryBuilder.makeQuery(List(nestedCondition, anotherNestedCondition)).asInstanceOf[BoolQuery] + val query = queryBuilder + .makeQuery(List(nestedCondition, anotherNestedCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 2 } @@ -143,7 +187,9 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { describe("is search filter") { it("should correctly construct an is owned photo query") { - val query = queryBuilder.makeQuery(List(isOwnedPhotoCondition)).asInstanceOf[BoolQuery] + val query = queryBuilder + .makeQuery(List(isOwnedPhotoCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 @@ -159,7 +205,9 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { } it("should correctly construct an is owned illustration query") { - val query = queryBuilder.makeQuery(List(isOwnedIllustrationCondition)).asInstanceOf[BoolQuery] + val query = queryBuilder + .makeQuery(List(isOwnedIllustrationCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 @@ -175,7 +223,9 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { } it("should correctly construct an is owned image query") { - val query = queryBuilder.makeQuery(List(isOwnedImageCondition)).asInstanceOf[BoolQuery] + val query = queryBuilder + .makeQuery(List(isOwnedImageCondition)) + .asInstanceOf[BoolQuery] query.must.size shouldBe 1 @@ -191,7 +241,8 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { } it("should return the match none query on an invalid is query") { - val query = queryBuilder.makeQuery(List(isInvalidCondition)).asInstanceOf[BoolQuery] + val query = + queryBuilder.makeQuery(List(isInvalidCondition)).asInstanceOf[BoolQuery] query.must.size shouldBe 1 query.must.head shouldBe ElasticDsl.matchNoneQuery() @@ -199,7 +250,8 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { it("should return the match all query when no agencies are over quota") { val qBuilder = new QueryBuilder(matchFields, () => List.empty) - val query = qBuilder.makeQuery(List(isUnderQuotaCondition)).asInstanceOf[BoolQuery] + val query = + qBuilder.makeQuery(List(isUnderQuotaCondition)).asInstanceOf[BoolQuery] query.must.size shouldBe 1 query.must.head shouldBe ElasticDsl.matchAllQuery() } @@ -208,7 +260,8 @@ class QueryBuilderTest extends FunSpec with Matchers with ConditionFixtures { def overQuotaAgencies = List(Agency("Getty Images"), Agency("AP")) val qBuilder = new QueryBuilder(matchFields, () => overQuotaAgencies) - val query = qBuilder.makeQuery(List(isUnderQuotaCondition)).asInstanceOf[BoolQuery] + val query = + qBuilder.makeQuery(List(isUnderQuotaCondition)).asInstanceOf[BoolQuery] query.must.size shouldBe 1 val mustQuery = query.must.head.asInstanceOf[BoolQuery] diff --git a/media-api/test/scala/lib/ImageExtrasTest.scala b/media-api/test/scala/lib/ImageExtrasTest.scala index 865275a07c..5b0ddc3842 100644 --- a/media-api/test/scala/lib/ImageExtrasTest.scala +++ b/media-api/test/scala/lib/ImageExtrasTest.scala @@ -48,40 +48,74 @@ class ImageExtrasTest extends FunSpec with Matchers with MockitoSugar { ) val baseValidityMap = Map( - "paid_image" -> ValidityCheck(invalid = true,overrideable = true,shouldOverride = false), - "missing_description" -> ValidityCheck(invalid = true,overrideable = false,shouldOverride = false), - "missing_credit" -> ValidityCheck(invalid = true,overrideable = false,shouldOverride = false), - "over_quota" -> ValidityCheck(invalid = false,overrideable = true,shouldOverride = false), - "current_deny_lease" -> ValidityCheck(invalid = false,overrideable = true,shouldOverride = false), - "no_rights" -> ValidityCheck(invalid = true,overrideable = true,shouldOverride = false), - "conditional_paid" -> ValidityCheck(invalid = false,overrideable = true,shouldOverride = false) + "paid_image" -> ValidityCheck( + invalid = true, + overrideable = true, + shouldOverride = false + ), + "missing_description" -> ValidityCheck( + invalid = true, + overrideable = false, + shouldOverride = false + ), + "missing_credit" -> ValidityCheck( + invalid = true, + overrideable = false, + shouldOverride = false + ), + "over_quota" -> ValidityCheck( + invalid = false, + overrideable = true, + shouldOverride = false + ), + "current_deny_lease" -> ValidityCheck( + invalid = false, + overrideable = true, + shouldOverride = false + ), + "no_rights" -> ValidityCheck( + invalid = true, + overrideable = true, + shouldOverride = false + ), + "conditional_paid" -> ValidityCheck( + invalid = false, + overrideable = true, + shouldOverride = false + ) ) describe("Invalid Images") { it("should generate validityMaps") { - baseValidityMap should be(ImageExtras.validityMap(baseImage, withWritePermission = false)) + baseValidityMap should be( + ImageExtras.validityMap(baseImage, withWritePermission = false) + ) } it("should report validity") { - val validityMap = ImageExtras.validityMap(baseImage, withWritePermission = false) + val validityMap = + ImageExtras.validityMap(baseImage, withWritePermission = false) val validity = ImageExtras.isValid(validityMap) validity should be(false) } it("should report overriden validity") { val overrideableImage = baseImage.copy(metadata = validImageMetadata) - val validityMap = ImageExtras.validityMap(overrideableImage, withWritePermission = true) + val validityMap = + ImageExtras.validityMap(overrideableImage, withWritePermission = true) val validity = ImageExtras.isValid(validityMap) validity should be(true) } it("should report invalid when fields cannot be overriden") { - val validityMap = ImageExtras.validityMap(baseImage, withWritePermission = true) + val validityMap = + ImageExtras.validityMap(baseImage, withWritePermission = true) val validity = ImageExtras.isValid(validityMap) validity should be(false) } it("should report all invalid reasons") { - val validityMap = ImageExtras.validityMap(baseImage, withWritePermission = false) + val validityMap = + ImageExtras.validityMap(baseImage, withWritePermission = false) val invalidReasons = ImageExtras.invalidReasons(validityMap) val expectedInvalidReasons = Map( "paid_image" -> "Paid imagery requires a lease", @@ -94,7 +128,8 @@ class ImageExtrasTest extends FunSpec with Matchers with MockitoSugar { } it("should report all invalid reasons if write permissions are true") { - val validityMap = ImageExtras.validityMap(baseImage, withWritePermission = true) + val validityMap = + ImageExtras.validityMap(baseImage, withWritePermission = true) val invalidReasons = ImageExtras.invalidReasons(validityMap) val expectedInvalidReasons = Map( "missing_description" -> "Missing description *", diff --git a/media-api/test/scala/lib/querysyntax/ParserTest.scala b/media-api/test/scala/lib/querysyntax/ParserTest.scala index c32c1cee77..2e3c15ec8e 100644 --- a/media-api/test/scala/lib/querysyntax/ParserTest.scala +++ b/media-api/test/scala/lib/querysyntax/ParserTest.scala @@ -5,54 +5,72 @@ import org.scalatest.{BeforeAndAfter, FunSpec, Matchers} import org.joda.time.DateTime import org.joda.time.DateTimeUtils -class ParserTest extends FunSpec with Matchers with BeforeAndAfter with ImageFields { - val creditField = SingleField(getFieldPath("credit")) - val bylineField = SingleField(getFieldPath("byline")) - val labelsField = SingleField(getFieldPath("labels")) +class ParserTest + extends FunSpec + with Matchers + with BeforeAndAfter + with ImageFields { + val creditField = SingleField(getFieldPath("credit")) + val bylineField = SingleField(getFieldPath("byline")) + val labelsField = SingleField(getFieldPath("labels")) val uploadTimeField = SingleField(getFieldPath("uploadTime")) describe("text") { it("should match single terms") { - Parser.run("cats") should be (List(Match(AnyField, Words("cats")))) + Parser.run("cats") should be(List(Match(AnyField, Words("cats")))) } it("should match single terms with accents") { - Parser.run("séb") should be (List(Match(AnyField, Words("séb")))) + Parser.run("séb") should be(List(Match(AnyField, Words("séb")))) } it("should match single terms with curly apostrophe") { - Parser.run("l’apostrophe") should be (List(Match(AnyField, Words("l’apostrophe")))) + Parser.run("l’apostrophe") should be( + List(Match(AnyField, Words("l’apostrophe"))) + ) } it("should ignore surrounding whitespace") { - Parser.run(" cats ") should be (List(Match(AnyField, Words("cats")))) + Parser.run(" cats ") should be(List(Match(AnyField, Words("cats")))) } it("should match multiple terms") { - Parser.run("cats dogs") should be (List(Match(AnyField, Words("cats dogs")))) + Parser.run("cats dogs") should be( + List(Match(AnyField, Words("cats dogs"))) + ) } it("should match multiple terms separated by multiple whitespace") { - Parser.run("cats dogs") should be (List(Match(AnyField, Words("cats dogs")))) + Parser.run("cats dogs") should be( + List(Match(AnyField, Words("cats dogs"))) + ) } it("should match multiple terms including 'in'") { - Parser.run("cats in dogs") should be (List(Match(AnyField, Words("cats in dogs")))) + Parser.run("cats in dogs") should be( + List(Match(AnyField, Words("cats in dogs"))) + ) } it("should match multiple terms including 'by'") { - Parser.run("cats by dogs") should be (List(Match(AnyField, Words("cats by dogs")))) + Parser.run("cats by dogs") should be( + List(Match(AnyField, Words("cats by dogs"))) + ) } it("should match multiple terms including apostrophes") { - Parser.run("it's a cat") should be (List(Match(AnyField, Words("it's a cat")))) + Parser.run("it's a cat") should be( + List(Match(AnyField, Words("it's a cat"))) + ) } it("should match multiple terms including commas") { - Parser.run("cats, dogs") should be (List(Match(AnyField, Words("cats, dogs")))) + Parser.run("cats, dogs") should be( + List(Match(AnyField, Words("cats, dogs"))) + ) } it("should match multiple terms including single double quotes") { - Parser.run("5\" cats") should be (List(Match(AnyField, Words("5\" cats")))) + Parser.run("5\" cats") should be(List(Match(AnyField, Words("5\" cats")))) } // it("should match multiple terms including '#' character") { @@ -61,129 +79,163 @@ class ParserTest extends FunSpec with Matchers with BeforeAndAfter with ImageFie // } it("should match a quoted phrase") { - Parser.run(""""cats dogs"""") should be (List(Match(AnyField, Phrase("cats dogs")))) + Parser.run(""""cats dogs"""") should be( + List(Match(AnyField, Phrase("cats dogs"))) + ) } it("should match faceted terms") { - Parser.run("credit:cats") should be (List(Match(creditField, Words("cats")))) + Parser.run("credit:cats") should be( + List(Match(creditField, Words("cats"))) + ) } it("should match multiple faceted terms on the same facet") { - Parser.run("label:cats label:dogs") should be (List( - Match(labelsField, Words("cats")), - Match(labelsField, Words("dogs")) - )) + Parser.run("label:cats label:dogs") should be( + List( + Match(labelsField, Words("cats")), + Match(labelsField, Words("dogs")) + ) + ) } it("should match multiple faceted terms on different facets") { - Parser.run("credit:cats label:dogs") should be (List( - Match(creditField, Words("cats")), - Match(labelsField, Words("dogs")) - )) + Parser.run("credit:cats label:dogs") should be( + List( + Match(creditField, Words("cats")), + Match(labelsField, Words("dogs")) + ) + ) } } - describe("date") { describe("exact") { it("should match year") { - Parser.run("date:2014") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-12-31T23:59:59.999Z") + Parser.run("date:2014") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-12-31T23:59:59.999Z") + ) ) - )) + ) ) } it("should match month, quoted") { - Parser.run("""date:"january 2014"""") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-31T23:59:59.999Z") + Parser.run("""date:"january 2014"""") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-31T23:59:59.999Z") + ) ) - )) + ) ) } it("should match month, with dot") { - Parser.run("date:january.2014") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-31T23:59:59.999Z") + Parser.run("date:january.2014") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-31T23:59:59.999Z") + ) ) - )) + ) ) } it("should match human date, quoted") { - Parser.run("""date:"1 january 2014"""") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T23:59:59.999Z") + Parser.run("""date:"1 january 2014"""") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-01T23:59:59.999Z") + ) ) - )) + ) ) } it("should match human date, with dot") { - Parser.run("date:1.january.2014") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T23:59:59.999Z") + Parser.run("date:1.january.2014") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-01T23:59:59.999Z") + ) ) - )) + ) ) } it("should match human date, with dot and capitals") { - Parser.run("date:1.January.2014") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T23:59:59.999Z") + Parser.run("date:1.January.2014") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-01T23:59:59.999Z") + ) ) - )) + ) ) } it("should match date with slashes") { - Parser.run("date:1/1/2014") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T23:59:59.999Z") + Parser.run("date:1/1/2014") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-01T23:59:59.999Z") + ) ) - )) + ) ) } it("should match date with slashes and zero-padding") { - Parser.run("date:01/01/2014") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T23:59:59.999Z") + Parser.run("date:01/01/2014") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-01T23:59:59.999Z") + ) ) - )) + ) ) } it("should match date") { - Parser.run("date:2014-01-01") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2014-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T23:59:59.999Z") + Parser.run("date:2014-01-01") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2014-01-01T00:00:00.000Z"), + new DateTime("2014-01-01T23:59:59.999Z") + ) ) - )) + ) ) } @@ -191,7 +243,6 @@ class ParserTest extends FunSpec with Matchers with BeforeAndAfter with ImageFie } - describe("relative") { // Mock current time so we can assert based on the fake "now" before { @@ -204,24 +255,30 @@ class ParserTest extends FunSpec with Matchers with BeforeAndAfter with ImageFie } it("should match today") { - Parser.run("date:today") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2000-01-02T00:00:00.000Z"), - new DateTime("2000-01-02T23:59:59.999Z") + Parser.run("date:today") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2000-01-02T00:00:00.000Z"), + new DateTime("2000-01-02T23:59:59.999Z") + ) ) - )) + ) ) } it("should match yesterday") { - Parser.run("date:yesterday") should be (List( - Match(uploadTimeField, - DateRange( - new DateTime("2000-01-01T00:00:00.000Z"), - new DateTime("2000-01-01T23:59:59.999Z") + Parser.run("date:yesterday") should be( + List( + Match( + uploadTimeField, + DateRange( + new DateTime("2000-01-01T00:00:00.000Z"), + new DateTime("2000-01-01T23:59:59.999Z") + ) ) - )) + ) ) } @@ -236,38 +293,48 @@ class ParserTest extends FunSpec with Matchers with BeforeAndAfter with ImageFie describe("nested usage") { it("should match nested usage status query") { - Parser.run("usages@status:pending") should be (List( - Nested( - SingleField("usages"), - SingleField("usages.status"), - Phrase("pending") - )) + Parser.run("usages@status:pending") should be( + List( + Nested( + SingleField("usages"), + SingleField("usages.status"), + Phrase("pending") + ) + ) ) } it("should match nested usage reference query") { - Parser.run("usages@reference:foo") should be (List( - Nested( - SingleField("usages"), - MultipleField(List( - "usages.references.uri", - "usages.references.name" - )), - Phrase("foo") - )) + Parser.run("usages@reference:foo") should be( + List( + Nested( + SingleField("usages"), + MultipleField( + List( + "usages.references.uri", + "usages.references.name" + ) + ), + Phrase("foo") + ) + ) ) } it("should match nested usage reference query with url supplied") { - Parser.run("usages@reference:https://generic.cms/1234") should be (List( - Nested( - SingleField("usages"), - MultipleField(List( - "usages.references.uri", - "usages.references.name" - )), - Phrase("https://generic.cms/1234") - )) + Parser.run("usages@reference:https://generic.cms/1234") should be( + List( + Nested( + SingleField("usages"), + MultipleField( + List( + "usages.references.uri", + "usages.references.name" + ) + ), + Phrase("https://generic.cms/1234") + ) + ) ) } } @@ -275,26 +342,33 @@ class ParserTest extends FunSpec with Matchers with BeforeAndAfter with ImageFie describe("date constraint") { it("should match date constraint") { - Parser.run(" messageConsumer.actorSystem.terminate() + context.lifecycle.addStopHook { () => + messageConsumer.actorSystem.terminate() } - val editsController = new EditsController(auth, store, notifications, config, controllerComponents) + val editsController = new EditsController( + auth, + store, + notifications, + config, + controllerComponents + ) val controller = new EditsApi(auth, config, controllerComponents) - override val router = new Routes(httpErrorHandler, controller, editsController, management) + override val router = + new Routes(httpErrorHandler, controller, editsController, management) } diff --git a/metadata-editor/app/controllers/EditsApi.scala b/metadata-editor/app/controllers/EditsApi.scala index 2d49740cab..e4d0d9e041 100644 --- a/metadata-editor/app/controllers/EditsApi.scala +++ b/metadata-editor/app/controllers/EditsApi.scala @@ -11,20 +11,23 @@ import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.ExecutionContext -class EditsApi(auth: Authentication, config: EditsConfig, - override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers { +class EditsApi( + auth: Authentication, + config: EditsConfig, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { - - // TODO: add links to the different responses esp. to the reference image + // TODO: add links to the different responses esp. to the reference image val indexResponse = { val indexData = Map("description" -> "This is the Metadata Editor Service") val indexLinks = List( - Link("edits", s"${config.rootUri}/metadata/{id}"), - Link("archived", s"${config.rootUri}/metadata/{id}/archived"), - Link("labels", s"${config.rootUri}/metadata/{id}/labels"), - Link("usageRights", s"${config.rootUri}/metadata/{id}/usage-rights"), - Link("metadata", s"${config.rootUri}/metadata/{id}/metadata"), + Link("edits", s"${config.rootUri}/metadata/{id}"), + Link("archived", s"${config.rootUri}/metadata/{id}/archived"), + Link("labels", s"${config.rootUri}/metadata/{id}/labels"), + Link("usageRights", s"${config.rootUri}/metadata/{id}/usage-rights"), + Link("metadata", s"${config.rootUri}/metadata/{id}/metadata"), Link("usage-rights-list", s"${config.rootUri}/usage-rights/categories") ) respond(indexData, indexLinks) @@ -42,28 +45,29 @@ class EditsApi(auth: Authentication, config: EditsConfig, } case class CategoryResponse( - value: String, - name: String, - cost: String, - description: String, - defaultRestrictions: Option[String], - caution: Option[String], - properties: List[UsageRightsProperty] = List() + value: String, + name: String, + cost: String, + description: String, + defaultRestrictions: Option[String], + caution: Option[String], + properties: List[UsageRightsProperty] = List() ) object CategoryResponse { // I'd like to have an override of the `apply`, but who knows how you do that // with the JSON parsing stuff def fromUsageRights(u: UsageRightsSpec): CategoryResponse = CategoryResponse( - value = u.category, - name = u.name, - cost = u.defaultCost.getOrElse(Pay).toString, - description = u.description, + value = u.category, + name = u.name, + cost = u.defaultCost.getOrElse(Pay).toString, + description = u.description, defaultRestrictions = u.defaultRestrictions, - caution = u.caution, - properties = UsageRightsProperty.getPropertiesForSpec(u) + caution = u.caution, + properties = UsageRightsProperty.getPropertiesForSpec(u) ) - implicit val categoryResponseWrites: Writes[CategoryResponse] = Json.writes[CategoryResponse] + implicit val categoryResponseWrites: Writes[CategoryResponse] = + Json.writes[CategoryResponse] } diff --git a/metadata-editor/app/controllers/EditsController.scala b/metadata-editor/app/controllers/EditsController.scala index f2d867326e..57b23ab822 100644 --- a/metadata-editor/app/controllers/EditsController.scala +++ b/metadata-editor/app/controllers/EditsController.scala @@ -1,6 +1,5 @@ package controllers - import java.net.URI import java.net.URLDecoder.decode @@ -20,7 +19,6 @@ import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.{ExecutionContext, Future} - // FIXME: the argoHelpers are all returning `Ok`s (200) // Some of these responses should be `Accepted` (202) // TODO: Look at adding actions e.g. to collections / sets where we could `PUT` @@ -39,9 +37,16 @@ import scala.concurrent.{ExecutionContext, Future} // } // } -class EditsController(auth: Authentication, store: EditsStore, notifications: Notifications, config: EditsConfig, - override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers with EditsResponse { +class EditsController( + auth: Authentication, + store: EditsStore, + notifications: Notifications, + config: EditsConfig, + override val controllerComponents: ControllerComponents +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers + with EditsResponse { import UsageRightsMetadataMapper.usageRightsToMetadata @@ -53,7 +58,8 @@ class EditsController(auth: Authentication, store: EditsStore, notifications: No def getAllMetadata(id: String) = auth.async { val emptyResponse = respond(Edits.getEmpty)(editsEntity(id)) store.get(id) map { dynamoEntry => - dynamoEntry.asOpt[Edits] + dynamoEntry + .asOpt[Edits] .map(respond(_)(editsEntity(id))) .getOrElse(emptyResponse) } recover { case NoItemFound => emptyResponse } @@ -69,104 +75,122 @@ class EditsController(auth: Authentication, store: EditsStore, notifications: No def getArchived(id: String) = auth.async { store.booleanGet(id, "archived") map { archived => respond(archived.getOrElse(false)) - } recover { - case NoItemFound => respond(false) + } recover { case NoItemFound => + respond(false) } } def setArchived(id: String) = auth.async(parse.json) { implicit req => - (req.body \ "data").validate[Boolean].fold( - errors => - Future.successful(BadRequest(errors.toString())), - archived => - store.booleanSetOrRemove(id, "archived", archived) - .map(publish(id)) - .map(edits => respond(edits.archived)) - ) + (req.body \ "data") + .validate[Boolean] + .fold( + errors => Future.successful(BadRequest(errors.toString())), + archived => + store + .booleanSetOrRemove(id, "archived", archived) + .map(publish(id)) + .map(edits => respond(edits.archived)) + ) } def unsetArchived(id: String) = auth.async { - store.removeKey(id, "archived") + store + .removeKey(id, "archived") .map(publish(id)) .map(_ => respond(false)) } - def getLabels(id: String) = auth.async { - store.setGet(id, "labels") + store + .setGet(id, "labels") .map(labelsCollection(id, _)) - .map {case (uri, labels) => respondCollection(labels)} recover { + .map { case (uri, labels) => respondCollection(labels) } recover { case NoItemFound => respond(Array[String]()) } } def getPhotoshoot(id: String) = auth.async { - store.jsonGet(id, "photoshoot").map(dynamoEntry => { - (dynamoEntry \ "photoshoot").toOption match { - case Some(photoshoot) => respond(photoshoot.as[Photoshoot]) - case None => respondNotFound("No photoshoot found") - } - }) recover { - case NoItemFound => respondNotFound("No photoshoot found") + store + .jsonGet(id, "photoshoot") + .map(dynamoEntry => { + (dynamoEntry \ "photoshoot").toOption match { + case Some(photoshoot) => respond(photoshoot.as[Photoshoot]) + case None => respondNotFound("No photoshoot found") + } + }) recover { case NoItemFound => + respondNotFound("No photoshoot found") } } - def setPhotoshoot(id: String) = auth.async(parse.json) { req => { - (req.body \ "data").asOpt[Photoshoot].map(photoshoot => { - store.jsonAdd(id, "photoshoot", caseClassToMap(photoshoot)) - .map(publish(id, "update-image-photoshoot")) - .map(_ => respond(photoshoot)) - }).getOrElse( - Future.successful(respondError(BadRequest, "invalid-form-data", "Invalid form data")) - ) - }} + def setPhotoshoot(id: String) = auth.async(parse.json) { req => + { + (req.body \ "data") + .asOpt[Photoshoot] + .map(photoshoot => { + store + .jsonAdd(id, "photoshoot", caseClassToMap(photoshoot)) + .map(publish(id, "update-image-photoshoot")) + .map(_ => respond(photoshoot)) + }) + .getOrElse( + Future.successful( + respondError(BadRequest, "invalid-form-data", "Invalid form data") + ) + ) + } + } def deletePhotoshoot(id: String) = auth.async { - store.removeKey(id, "photoshoot") + store + .removeKey(id, "photoshoot") .map(publish(id, "update-image-photoshoot")) .map(_ => Accepted) } def addLabels(id: String) = auth.async(parse.json) { req => - (req.body \ "data").validate[List[String]].fold( - errors => - Future.successful(BadRequest(errors.toString())), - labels => - store - .setAdd(id, "labels", labels) - .map(publish(id)) - .map(edits => labelsCollection(id, edits.labels.toSet)) - .map { case (uri, l) => respondCollection(l) } recover { + (req.body \ "data") + .validate[List[String]] + .fold( + errors => Future.successful(BadRequest(errors.toString())), + labels => + store + .setAdd(id, "labels", labels) + .map(publish(id)) + .map(edits => labelsCollection(id, edits.labels.toSet)) + .map { case (uri, l) => respondCollection(l) } recover { case _: AmazonServiceException => BadRequest } - ) + ) } def removeLabel(id: String, label: String) = auth.async { - store.setDelete(id, "labels", decodeUriParam(label)) + store + .setDelete(id, "labels", decodeUriParam(label)) .map(publish(id)) .map(edits => labelsCollection(id, edits.labels.toSet)) - .map {case (uri, labels) => respondCollection(labels, uri=Some(uri))} + .map { case (uri, labels) => respondCollection(labels, uri = Some(uri)) } } - def getMetadata(id: String) = auth.async { store.jsonGet(id, "metadata").map { dynamoEntry => val metadata = (dynamoEntry \ "metadata").as[ImageMetadata] respond(metadata) - } recover { - case NoItemFound => respond(Json.toJson(JsObject(Nil))) + } recover { case NoItemFound => + respond(Json.toJson(JsObject(Nil))) } } def setMetadata(id: String) = auth.async(parse.json) { req => - (req.body \ "data").validate[ImageMetadata].fold( - errors => Future.successful(BadRequest(errors.toString())), - metadata => - store.jsonAdd(id, "metadata", metadataAsMap(metadata)) - .map(publish(id)) - .map(edits => respond(edits.metadata)) - ) + (req.body \ "data") + .validate[ImageMetadata] + .fold( + errors => Future.successful(BadRequest(errors.toString())), + metadata => + store + .jsonAdd(id, "metadata", metadataAsMap(metadata)) + .map(publish(id)) + .map(edits => respond(edits.metadata)) + ) } def setMetadataFromUsageRights(id: String) = auth.async { req => @@ -181,15 +205,16 @@ class EditsController(auth: Authentication, store: EditsStore, notifications: No credit = metadata.credit orElse originalMetadata.credit ) - store.jsonAdd(id, "metadata", metadataAsMap(mergedMetadata)) + store + .jsonAdd(id, "metadata", metadataAsMap(mergedMetadata)) .map(publish(id)) .map(edits => respond(edits.metadata, uri = Some(metadataUri(id)))) } getOrElse { // just return the unmodified Future.successful(respond(edits.metadata, uri = Some(metadataUri(id)))) } - } recover { - case NoItemFound => respondError(NotFound, "item-not-found", "Could not find image") + } recover { case NoItemFound => + respondError(NotFound, "item-not-found", "Could not find image") } } @@ -197,17 +222,25 @@ class EditsController(auth: Authentication, store: EditsStore, notifications: No store.jsonGet(id, "usageRights").map { dynamoEntry => val usageRights = (dynamoEntry \ "usageRights").as[UsageRights] respond(usageRights) - } recover { - case NoItemFound => respondNotFound("No usage rights overrides found") + } recover { case NoItemFound => + respondNotFound("No usage rights overrides found") } } def setUsageRights(id: String) = auth.async(parse.json) { req => - (req.body \ "data").asOpt[UsageRights].map(usageRight => { - store.jsonAdd(id, "usageRights", caseClassToMap(usageRight)) - .map(publish(id)) - .map(edits => respond(usageRight)) - }).getOrElse(Future.successful(respondError(BadRequest, "invalid-form-data", "Invalid form data"))) + (req.body \ "data") + .asOpt[UsageRights] + .map(usageRight => { + store + .jsonAdd(id, "usageRights", caseClassToMap(usageRight)) + .map(publish(id)) + .map(edits => respond(usageRight)) + }) + .getOrElse( + Future.successful( + respondError(BadRequest, "invalid-form-data", "Invalid form data") + ) + ) } def deleteUsageRights(id: String) = auth.async { req => @@ -215,15 +248,23 @@ class EditsController(auth: Authentication, store: EditsStore, notifications: No } // TODO: Move this to the dynamo lib - def caseClassToMap[T](caseClass: T)(implicit tjs: Writes[T]): Map[String, JsValue] = + def caseClassToMap[T](caseClass: T)(implicit + tjs: Writes[T] + ): Map[String, JsValue] = Json.toJson[T](caseClass).as[JsObject].as[Map[String, JsValue]] - def labelsCollection(id: String, labels: Set[String]): (URI, Seq[EmbeddedEntity[String]]) = + def labelsCollection( + id: String, + labels: Set[String] + ): (URI, Seq[EmbeddedEntity[String]]) = (labelsUri(id), labels.map(setUnitEntity(id, "labels", _)).toSeq) - def publish(id: String, subject: String = "update-image-user-metadata")(metadata: JsObject): Edits = { + def publish(id: String, subject: String = "update-image-user-metadata")( + metadata: JsObject + ): Edits = { val edits = metadata.as[Edits] - val updateMessage = UpdateMessage(subject = subject, id = Some(id), edits = Some(edits)) + val updateMessage = + UpdateMessage(subject = subject, id = Some(id), edits = Some(edits)) notifications.publish(updateMessage) edits } @@ -232,7 +273,6 @@ class EditsController(auth: Authentication, store: EditsStore, notifications: No (Json.toJson(metadata).as[JsObject]).as[Map[String, JsValue]] } - } case class EditsValidationError(key: String, message: String) extends Throwable diff --git a/metadata-editor/app/lib/EditsConfig.scala b/metadata-editor/app/lib/EditsConfig.scala index ea29b60921..74f8322b83 100644 --- a/metadata-editor/app/lib/EditsConfig.scala +++ b/metadata-editor/app/lib/EditsConfig.scala @@ -3,8 +3,8 @@ package lib import com.amazonaws.regions.{Region, RegionUtils} import com.gu.mediaservice.lib.config.{CommonConfig, GridConfigResources} - -class EditsConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class EditsConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val dynamoRegion: Region = RegionUtils.getRegion(string("aws.region")) val collectionsBucket: String = string("s3.collections.bucket") diff --git a/metadata-editor/app/lib/EditsStore.scala b/metadata-editor/app/lib/EditsStore.scala index e9c9946f64..ddc0b6b980 100644 --- a/metadata-editor/app/lib/EditsStore.scala +++ b/metadata-editor/app/lib/EditsStore.scala @@ -2,4 +2,5 @@ package lib import com.gu.mediaservice.lib.aws.DynamoDB -class EditsStore(config: EditsConfig) extends DynamoDB(config, config.editsTable) +class EditsStore(config: EditsConfig) + extends DynamoDB(config, config.editsTable) diff --git a/metadata-editor/app/lib/MetadataEditorMetrics.scala b/metadata-editor/app/lib/MetadataEditorMetrics.scala index 592e88e11f..7d2ba5d453 100644 --- a/metadata-editor/app/lib/MetadataEditorMetrics.scala +++ b/metadata-editor/app/lib/MetadataEditorMetrics.scala @@ -2,7 +2,8 @@ package lib import com.gu.mediaservice.lib.metrics.CloudWatchMetrics -class MetadataEditorMetrics(config: EditsConfig) extends CloudWatchMetrics(s"${config.stage}/MetadataEditor", config) { +class MetadataEditorMetrics(config: EditsConfig) + extends CloudWatchMetrics(s"${config.stage}/MetadataEditor", config) { val snsMessage = new CountMetric("SNSMessage") diff --git a/metadata-editor/app/lib/MetadataSqsMessageConsumer.scala b/metadata-editor/app/lib/MetadataSqsMessageConsumer.scala index 060fd06704..40dacfb7b8 100644 --- a/metadata-editor/app/lib/MetadataSqsMessageConsumer.scala +++ b/metadata-editor/app/lib/MetadataSqsMessageConsumer.scala @@ -6,15 +6,24 @@ import play.api.libs.json._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future -class MetadataSqsMessageConsumer(config: EditsConfig, metadataEditorMetrics: MetadataEditorMetrics, store: EditsStore) extends SqsMessageConsumer( - config.queueUrl, config, metadataEditorMetrics.snsMessage) { +class MetadataSqsMessageConsumer( + config: EditsConfig, + metadataEditorMetrics: MetadataEditorMetrics, + store: EditsStore +) extends SqsMessageConsumer( + config.queueUrl, + config, + metadataEditorMetrics.snsMessage + ) { - override def chooseProcessor(subject: String): Option[JsValue => Future[Any]] = - PartialFunction.condOpt(subject) { - case "image-deleted" => processDeletedImage + override def chooseProcessor( + subject: String + ): Option[JsValue => Future[Any]] = + PartialFunction.condOpt(subject) { case "image-deleted" => + processDeletedImage } def processDeletedImage(message: JsValue) = Future { - withImageId(message)(id => store.deleteItem(id)) + withImageId(message)(id => store.deleteItem(id)) } } diff --git a/metadata-editor/app/lib/Notifications.scala b/metadata-editor/app/lib/Notifications.scala index b46f79d382..4d382cf7ba 100644 --- a/metadata-editor/app/lib/Notifications.scala +++ b/metadata-editor/app/lib/Notifications.scala @@ -2,4 +2,5 @@ package lib import com.gu.mediaservice.lib.aws.ThrallMessageSender -class Notifications(config: EditsConfig) extends ThrallMessageSender(config.thrallKinesisStreamConfig) +class Notifications(config: EditsConfig) + extends ThrallMessageSender(config.thrallKinesisStreamConfig) diff --git a/metadata-editor/app/lib/UsageRightsMetadataMapper.scala b/metadata-editor/app/lib/UsageRightsMetadataMapper.scala index bdefbd7932..a7a3a93758 100644 --- a/metadata-editor/app/lib/UsageRightsMetadataMapper.scala +++ b/metadata-editor/app/lib/UsageRightsMetadataMapper.scala @@ -5,16 +5,27 @@ import com.gu.mediaservice.model._ object UsageRightsMetadataMapper { def usageRightsToMetadata(usageRights: UsageRights): Option[ImageMetadata] = { - val toImageMetadata: PartialFunction[UsageRights, ImageMetadata] = (ur: UsageRights) => ur match { - case u: StaffPhotographer => ImageMetadata(byline = Some(u.photographer), credit = Some(u.publication)) - case u: ContractPhotographer => ImageMetadata(byline = Some(u.photographer), credit = u.publication) - case u: CommissionedPhotographer => ImageMetadata(byline = Some(u.photographer), credit = u.publication) - case u: ContractIllustrator => ImageMetadata(byline = Some(u.creator), credit = u.publication) - case u: StaffIllustrator => ImageMetadata(byline = Some(u.creator), credit = Some(u.creator)) - case u: CommissionedIllustrator => ImageMetadata(byline = Some(u.creator), credit = u.publication) - case u: Composite => ImageMetadata(credit = Some(u.suppliers)) - case u: Screengrab => ImageMetadata(credit = u.source) - } + val toImageMetadata: PartialFunction[UsageRights, ImageMetadata] = + (ur: UsageRights) => + ur match { + case u: StaffPhotographer => + ImageMetadata( + byline = Some(u.photographer), + credit = Some(u.publication) + ) + case u: ContractPhotographer => + ImageMetadata(byline = Some(u.photographer), credit = u.publication) + case u: CommissionedPhotographer => + ImageMetadata(byline = Some(u.photographer), credit = u.publication) + case u: ContractIllustrator => + ImageMetadata(byline = Some(u.creator), credit = u.publication) + case u: StaffIllustrator => + ImageMetadata(byline = Some(u.creator), credit = Some(u.creator)) + case u: CommissionedIllustrator => + ImageMetadata(byline = Some(u.creator), credit = u.publication) + case u: Composite => ImageMetadata(credit = Some(u.suppliers)) + case u: Screengrab => ImageMetadata(credit = u.source) + } // if we don't match, return None toImageMetadata.lift(usageRights) diff --git a/metadata-editor/app/model/UsageRightsProperty.scala b/metadata-editor/app/model/UsageRightsProperty.scala index 6db0a0e925..f821627bd9 100644 --- a/metadata-editor/app/model/UsageRightsProperty.scala +++ b/metadata-editor/app/model/UsageRightsProperty.scala @@ -4,119 +4,209 @@ import play.api.libs.json._ import com.gu.mediaservice.lib.config.{MetadataConfig, UsageRightsConfig} import com.gu.mediaservice.model._ - // TODO: We'll be able to deprecate this and build it up directly from case // classes. // TODO: turn this into a case class? case class UsageRightsProperty( - name: String, - label: String, - `type`: String, - required: Boolean, - options: Option[List[String]] = None, - optionsMap: Option[Map[String, List[String]]] = None, - optionsMapKey: Option[String] = None, - examples: Option[String] = None + name: String, + label: String, + `type`: String, + required: Boolean, + options: Option[List[String]] = None, + optionsMap: Option[Map[String, List[String]]] = None, + optionsMapKey: Option[String] = None, + examples: Option[String] = None ) - object UsageRightsProperty { type OptionsMap = Map[String, List[String]] type Options = List[String] - import MetadataConfig.{contractPhotographersMap, staffPhotographersMap, contractIllustratorsMap, staffIllustrators, creativeCommonsLicense} + import MetadataConfig.{ + contractPhotographersMap, + staffPhotographersMap, + contractIllustratorsMap, + staffIllustrators, + creativeCommonsLicense + } import UsageRightsConfig.freeSuppliers - implicit val jsonWrites: Writes[UsageRightsProperty] = Json.writes[UsageRightsProperty] + implicit val jsonWrites: Writes[UsageRightsProperty] = + Json.writes[UsageRightsProperty] def sortList(l: List[String]) = l.sortWith(_.toLowerCase < _.toLowerCase) val props: List[(UsageRightsSpec) => List[UsageRightsProperty]] = List(categoryUsageRightsProperties, restrictionProperties) - def getPropertiesForSpec(u: UsageRightsSpec): List[UsageRightsProperty] = props.flatMap(f => f(u)) + def getPropertiesForSpec(u: UsageRightsSpec): List[UsageRightsProperty] = + props.flatMap(f => f(u)) private def requiredStringField( - name: String, - label: String, - options: Option[List[String]] = None, - examples: Option[String] = None, - optionsMap: Option[Map[String, List[String]]] = None, - optionsMapKey: Option[String] = None - ) = UsageRightsProperty(name, label, "string", required = true, options, - optionsMap, optionsMapKey, examples) - - private def publicationField(required: Boolean) = - UsageRightsProperty("publication", "Publication", "string", required, - Some(sortList(staffPhotographersMap.keys.toList))) + name: String, + label: String, + options: Option[List[String]] = None, + examples: Option[String] = None, + optionsMap: Option[Map[String, List[String]]] = None, + optionsMapKey: Option[String] = None + ) = UsageRightsProperty( + name, + label, + "string", + required = true, + options, + optionsMap, + optionsMapKey, + examples + ) + + private def publicationField(required: Boolean) = + UsageRightsProperty( + "publication", + "Publication", + "string", + required, + Some(sortList(staffPhotographersMap.keys.toList)) + ) private def photographerField(examples: String) = - requiredStringField("photographer", "Photographer", examples = Some(examples)) + requiredStringField( + "photographer", + "Photographer", + examples = Some(examples) + ) private def photographerField(photographers: OptionsMap, key: String) = - requiredStringField("photographer", "Photographer", - optionsMap = Some(photographers), optionsMapKey = Some(key)) + requiredStringField( + "photographer", + "Photographer", + optionsMap = Some(photographers), + optionsMapKey = Some(key) + ) private def illustratorField(illustrators: OptionsMap, key: String) = - requiredStringField("creator", "Illustrator", - optionsMap = Some(illustrators), optionsMapKey = Some(key)) + requiredStringField( + "creator", + "Illustrator", + optionsMap = Some(illustrators), + optionsMapKey = Some(key) + ) - private def restrictionProperties(u: UsageRightsSpec): List[UsageRightsProperty] = u match { + private def restrictionProperties( + u: UsageRightsSpec + ): List[UsageRightsProperty] = u match { case NoRights => List() - case _ => List(UsageRightsProperty("restrictions", "Restrictions", "text", u.defaultCost.contains(Conditional))) + case _ => + List( + UsageRightsProperty( + "restrictions", + "Restrictions", + "text", + u.defaultCost.contains(Conditional) + ) + ) } def categoryUsageRightsProperties(u: UsageRightsSpec) = u match { - case Agency => List( - requiredStringField("supplier", "Supplier", Some(sortList(freeSuppliers))), - UsageRightsProperty( - "suppliersCollection", "Collection", "string", required = false, - examples = Some("AFP, FilmMagic, WireImage")) - ) - - case CommissionedAgency => List(requiredStringField("supplier", "Supplier", examples = Some("Demotix"))) - - case StaffPhotographer => List( - publicationField(true), - photographerField(staffPhotographersMap, "publication") - ) - - case ContractPhotographer => List( - publicationField(true), - photographerField(contractPhotographersMap, "publication") - ) - - case CommissionedPhotographer => List( - publicationField(false), - photographerField("Sophia Evans, Murdo MacLeod") - ) - - case ContractIllustrator => List( - publicationField(true), - illustratorField(contractIllustratorsMap, "publication") - ) - - case StaffIllustrator => List( - requiredStringField("creator", "Illustrator", Some(sortList(staffIllustrators)))) - - case CommissionedIllustrator => List( - publicationField(false), - requiredStringField("creator", "Illustrator", examples = Some("Ellie Foreman Peck, Matt Bors"))) - - case CreativeCommons => List( - requiredStringField("licence", "Licence", Some(creativeCommonsLicense)), - requiredStringField("source", "Source", examples = Some("Wikimedia Commons")), - requiredStringField("creator", "Owner", examples = Some("User:Colin")), - requiredStringField("contentLink", "Link to content", examples = Some("https://commons.wikimedia.org/wiki/File:Foreign_and_Commonwealth_Office_-_Durbar_Court.jpg")) - ) - - case Composite => List( - requiredStringField("suppliers", "Suppliers", examples = Some("REX/Getty Images/Corbis, Corbis/Reuters")) - ) - - case Screengrab => List( - requiredStringField("source", "Source", examples = Some("BBC News, HBO, ITV")) - ) + case Agency => + List( + requiredStringField( + "supplier", + "Supplier", + Some(sortList(freeSuppliers)) + ), + UsageRightsProperty( + "suppliersCollection", + "Collection", + "string", + required = false, + examples = Some("AFP, FilmMagic, WireImage") + ) + ) + + case CommissionedAgency => + List( + requiredStringField("supplier", "Supplier", examples = Some("Demotix")) + ) + + case StaffPhotographer => + List( + publicationField(true), + photographerField(staffPhotographersMap, "publication") + ) + + case ContractPhotographer => + List( + publicationField(true), + photographerField(contractPhotographersMap, "publication") + ) + + case CommissionedPhotographer => + List( + publicationField(false), + photographerField("Sophia Evans, Murdo MacLeod") + ) + + case ContractIllustrator => + List( + publicationField(true), + illustratorField(contractIllustratorsMap, "publication") + ) + + case StaffIllustrator => + List( + requiredStringField( + "creator", + "Illustrator", + Some(sortList(staffIllustrators)) + ) + ) + + case CommissionedIllustrator => + List( + publicationField(false), + requiredStringField( + "creator", + "Illustrator", + examples = Some("Ellie Foreman Peck, Matt Bors") + ) + ) + + case CreativeCommons => + List( + requiredStringField("licence", "Licence", Some(creativeCommonsLicense)), + requiredStringField( + "source", + "Source", + examples = Some("Wikimedia Commons") + ), + requiredStringField("creator", "Owner", examples = Some("User:Colin")), + requiredStringField( + "contentLink", + "Link to content", + examples = Some( + "https://commons.wikimedia.org/wiki/File:Foreign_and_Commonwealth_Office_-_Durbar_Court.jpg" + ) + ) + ) + + case Composite => + List( + requiredStringField( + "suppliers", + "Suppliers", + examples = Some("REX/Getty Images/Corbis, Corbis/Reuters") + ) + ) + + case Screengrab => + List( + requiredStringField( + "source", + "Source", + examples = Some("BBC News, HBO, ITV") + ) + ) case _ => List() } diff --git a/metadata-editor/test/UsageRightsMetadataMapperTest.scala b/metadata-editor/test/UsageRightsMetadataMapperTest.scala index 2d369fc454..265c512e2b 100644 --- a/metadata-editor/test/UsageRightsMetadataMapperTest.scala +++ b/metadata-editor/test/UsageRightsMetadataMapperTest.scala @@ -9,49 +9,61 @@ class UsageRightsMetadataMapperTest extends FunSpec with Matchers { describe("UsageRights => ImageMetadata") { - it ("should convert StaffPhotographers") { + it("should convert StaffPhotographers") { val ur = StaffPhotographer("Alicia Canter", "The Guardian") usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("The Guardian"), byline = Some("Alicia Canter"))) + Some( + ImageMetadata( + credit = Some("The Guardian"), + byline = Some("Alicia Canter") + ) + ) } - it ("should convert ContractPhotographers") { + it("should convert ContractPhotographers") { val ur = StaffPhotographer("Andy Hall", "The Observer") usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("The Observer"), byline = Some("Andy Hall"))) + Some( + ImageMetadata(credit = Some("The Observer"), byline = Some("Andy Hall")) + ) } - it ("should convert CommissionedPhotographers") { + it("should convert CommissionedPhotographers") { val ur = CommissionedPhotographer("Mr. Photo", Some("Weekend Magazine")) usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("Weekend Magazine"), byline = Some("Mr. Photo"))) + Some( + ImageMetadata( + credit = Some("Weekend Magazine"), + byline = Some("Mr. Photo") + ) + ) } - it ("should convert ContractIllustrators") { + it("should convert ContractIllustrators") { val ur = ContractIllustrator("First Dog on the Moon Institute") usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("First Dog on the Moon Institute"))) + Some(ImageMetadata(credit = Some("First Dog on the Moon Institute"))) } - it ("should convert CommissionedIllustrators") { + it("should convert CommissionedIllustrators") { val ur = CommissionedIllustrator("Roger Rabbit") usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("Roger Rabit"))) + Some(ImageMetadata(credit = Some("Roger Rabit"))) } - it ("should convert Composites") { + it("should convert Composites") { val ur = Composite("REX/Getty Images") usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("REX/Getty Images"))) + Some(ImageMetadata(credit = Some("REX/Getty Images"))) } - it ("should convert Screengrabs") { + it("should convert Screengrabs") { val ur = Screengrab(Some("BBC News")) usageRightsToMetadata(ur) should be - Some(ImageMetadata(credit = Some("BBC News"))) + Some(ImageMetadata(credit = Some("BBC News"))) } - it ("should not convert Agencies") { + it("should not convert Agencies") { val ur = Agency("Rex Features") usageRightsToMetadata(ur) should be(None) } diff --git a/thrall/app/ThrallComponents.scala b/thrall/app/ThrallComponents.scala index db8179bd1b..edd0598c64 100644 --- a/thrall/app/ThrallComponents.scala +++ b/thrall/app/ThrallComponents.scala @@ -13,7 +13,8 @@ import router.Routes import scala.concurrent.Future -class ThrallComponents(context: Context) extends GridComponents(context, new ThrallConfig(_)) { +class ThrallComponents(context: Context) + extends GridComponents(context, new ThrallConfig(_)) { final override val buildInfo = utils.buildinfo.BuildInfo val store = new ThrallStore(config) @@ -31,19 +32,44 @@ class ThrallComponents(context: Context) extends GridComponents(context, new Thr val es = new ElasticSearch(esConfig, Some(thrallMetrics)) es.ensureAliasAssigned() - val highPriorityKinesisConfig: KinesisClientLibConfiguration = KinesisConfig.kinesisConfig(config.kinesisConfig) - val lowPriorityKinesisConfig: KinesisClientLibConfiguration = KinesisConfig.kinesisConfig(config.kinesisLowPriorityConfig) + val highPriorityKinesisConfig: KinesisClientLibConfiguration = + KinesisConfig.kinesisConfig(config.kinesisConfig) + val lowPriorityKinesisConfig: KinesisClientLibConfiguration = + KinesisConfig.kinesisConfig(config.kinesisLowPriorityConfig) - val highPrioritySource: Source[KinesisRecord, Future[Done]] = KinesisSource(highPriorityKinesisConfig) - val lowPrioritySource: Source[KinesisRecord, Future[Done]] = KinesisSource(lowPriorityKinesisConfig) + val highPrioritySource: Source[KinesisRecord, Future[Done]] = KinesisSource( + highPriorityKinesisConfig + ) + val lowPrioritySource: Source[KinesisRecord, Future[Done]] = KinesisSource( + lowPriorityKinesisConfig + ) - val thrallEventConsumer = new ThrallEventConsumer(es, thrallMetrics, store, metadataEditorNotifications, new SyndicationRightsOps(es), actorSystem) - val thrallStreamProcessor = new ThrallStreamProcessor(highPrioritySource, lowPrioritySource, thrallEventConsumer, actorSystem, materializer) + val thrallEventConsumer = new ThrallEventConsumer( + es, + thrallMetrics, + store, + metadataEditorNotifications, + new SyndicationRightsOps(es), + actorSystem + ) + val thrallStreamProcessor = new ThrallStreamProcessor( + highPrioritySource, + lowPrioritySource, + thrallEventConsumer, + actorSystem, + materializer + ) val streamRunning: Future[Done] = thrallStreamProcessor.run() val thrallController = new ThrallController(controllerComponents) - val healthCheckController = new HealthCheck(es, streamRunning.isCompleted, config, controllerComponents) + val healthCheckController = + new HealthCheck(es, streamRunning.isCompleted, config, controllerComponents) - override lazy val router = new Routes(httpErrorHandler, thrallController, healthCheckController, management) + override lazy val router = new Routes( + httpErrorHandler, + thrallController, + healthCheckController, + management + ) } diff --git a/thrall/app/controllers/HealthCheck.scala b/thrall/app/controllers/HealthCheck.scala index 3147a8bd52..0e5409c795 100644 --- a/thrall/app/controllers/HealthCheck.scala +++ b/thrall/app/controllers/HealthCheck.scala @@ -9,8 +9,14 @@ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} -class HealthCheck(elasticsearch: ElasticSearch, streamRunning: => Boolean, config: ThrallConfig, override val controllerComponents: ControllerComponents)(implicit override val ec: ExecutionContext) - extends ElasticSearchHealthCheck(controllerComponents, elasticsearch) with ArgoHelpers { +class HealthCheck( + elasticsearch: ElasticSearch, + streamRunning: => Boolean, + config: ThrallConfig, + override val controllerComponents: ControllerComponents +)(implicit override val ec: ExecutionContext) + extends ElasticSearchHealthCheck(controllerComponents, elasticsearch) + with ArgoHelpers { override def healthCheck = Action.async { elasticHealth.map { esHealth => diff --git a/thrall/app/controllers/ThrallController.scala b/thrall/app/controllers/ThrallController.scala index 5af3c58d82..322d85c5e2 100644 --- a/thrall/app/controllers/ThrallController.scala +++ b/thrall/app/controllers/ThrallController.scala @@ -6,7 +6,9 @@ import play.api.mvc.{BaseController, ControllerComponents} import scala.concurrent.ExecutionContext -class ThrallController(override val controllerComponents: ControllerComponents)(implicit val ec: ExecutionContext) extends BaseController { +class ThrallController(override val controllerComponents: ControllerComponents)( + implicit val ec: ExecutionContext +) extends BaseController { private implicit val ctx: ExecutionContext = ExecutionContext.fromExecutor(Executors.newCachedThreadPool) diff --git a/thrall/app/lib/MetadataEditorNotifications.scala b/thrall/app/lib/MetadataEditorNotifications.scala index df855e5e9f..fc860ce063 100644 --- a/thrall/app/lib/MetadataEditorNotifications.scala +++ b/thrall/app/lib/MetadataEditorNotifications.scala @@ -5,6 +5,8 @@ import play.api.libs.json.Json import scala.concurrent.ExecutionContext -class MetadataEditorNotifications(config: ThrallConfig) extends SNS(config, config.metadataTopicArn) { - def publishImageDeletion(id: String)(implicit ec: ExecutionContext) = publish(Json.obj("id" -> id), "image-deleted") +class MetadataEditorNotifications(config: ThrallConfig) + extends SNS(config, config.metadataTopicArn) { + def publishImageDeletion(id: String)(implicit ec: ExecutionContext) = + publish(Json.obj("id" -> id), "image-deleted") } diff --git a/thrall/app/lib/OrderedFutureRunner.scala b/thrall/app/lib/OrderedFutureRunner.scala index 2ab42d0ae0..8b649179b6 100644 --- a/thrall/app/lib/OrderedFutureRunner.scala +++ b/thrall/app/lib/OrderedFutureRunner.scala @@ -5,7 +5,9 @@ import scala.concurrent.{Await, Future} import scala.util.Try object OrderedFutureRunner { - def run[A, B](f: A => Future[B], timeout: Duration)(as: List[A]): List[Try[B]] = { + def run[A, B](f: A => Future[B], timeout: Duration)( + as: List[A] + ): List[Try[B]] = { as.map { a => Try(Await.result(f(a), timeout)) } diff --git a/thrall/app/lib/RetryHandler.scala b/thrall/app/lib/RetryHandler.scala index 5e83717120..44397be82d 100644 --- a/thrall/app/lib/RetryHandler.scala +++ b/thrall/app/lib/RetryHandler.scala @@ -1,6 +1,5 @@ package lib - import akka.actor.ActorSystem import akka.pattern.{after, retry} import com.gu.mediaservice.lib.logging.{LogMarker, MarkerMap, combineMarkers} @@ -13,50 +12,69 @@ import scala.util.{Failure, Success} object RetryHandler { type WithMarkers[T] = (LogMarker) => Future[T] - def handleWithRetryAndTimeout[T](f: WithMarkers[T], - retries: Int, - timeout: FiniteDuration, - delay: FiniteDuration, - marker: LogMarker - )(implicit actorSystem: ActorSystem, - executionContext: ExecutionContext, - ): Future[T] = { - def logFailures[T](f: WithMarkers[T]):WithMarkers[T] = { - (marker: LogMarker) => { - f(marker).transform { - case Success(x) => Success(x) - case Failure(t: TimeoutException) => { - Logger.error("Failed with timeout. Will retry")(marker.toLogMarker) - Failure(t) - } - case Failure(exception) => { - Logger.error("Failed with exception.", exception)(marker.toLogMarker) - Failure(exception) + def handleWithRetryAndTimeout[T]( + f: WithMarkers[T], + retries: Int, + timeout: FiniteDuration, + delay: FiniteDuration, + marker: LogMarker + )(implicit + actorSystem: ActorSystem, + executionContext: ExecutionContext + ): Future[T] = { + def logFailures[T](f: WithMarkers[T]): WithMarkers[T] = { + (marker: LogMarker) => + { + f(marker).transform { + case Success(x) => Success(x) + case Failure(t: TimeoutException) => { + Logger.error("Failed with timeout. Will retry")( + marker.toLogMarker + ) + Failure(t) + } + case Failure(exception) => { + Logger.error("Failed with exception.", exception)( + marker.toLogMarker + ) + Failure(exception) + } } } - } } - def handleWithTimeout[T](f: WithMarkers[T], attemptTimeout: FiniteDuration): WithMarkers[T] = ( marker ) => { - val timeout = after(attemptTimeout, using = actorSystem.scheduler)(Future.failed( - new TimeoutException(s"Timeout of $attemptTimeout reached.") - )) + def handleWithTimeout[T]( + f: WithMarkers[T], + attemptTimeout: FiniteDuration + ): WithMarkers[T] = (marker) => { + val timeout = after(attemptTimeout, using = actorSystem.scheduler)( + Future.failed( + new TimeoutException(s"Timeout of $attemptTimeout reached.") + ) + ) Future.firstCompletedOf(Seq(timeout, f(marker))) } - def handleWithRetry[T](f: WithMarkers[T], retries: Int, delay: FiniteDuration): WithMarkers[T] = (marker) => { + def handleWithRetry[T]( + f: WithMarkers[T], + retries: Int, + delay: FiniteDuration + ): WithMarkers[T] = (marker) => { implicit val scheduler = actorSystem.scheduler var count = 0 def attempt = () => { count = count + 1 - val markerWithRetry = combineMarkers(marker, MarkerMap("retryCount" -> count)) + val markerWithRetry = + combineMarkers(marker, MarkerMap("retryCount" -> count)) Logger.info(s"Attempt $count of $retries")(markerWithRetry) f(markerWithRetry) } retry(attempt, retries, delay) } - handleWithRetry(handleWithTimeout(logFailures(f), timeout), retries, delay)(marker) + handleWithRetry(handleWithTimeout(logFailures(f), timeout), retries, delay)( + marker + ) } } diff --git a/thrall/app/lib/ThrallConfig.scala b/thrall/app/lib/ThrallConfig.scala index 253459761b..61cc3f9100 100644 --- a/thrall/app/lib/ThrallConfig.scala +++ b/thrall/app/lib/ThrallConfig.scala @@ -8,17 +8,21 @@ import org.joda.time.DateTime import org.joda.time.format.ISODateTimeFormat case class KinesisReceiverConfig( - override val awsRegion: String, - override val awsCredentials: AWSCredentialsProvider, - override val awsLocalEndpoint: Option[String], - override val isDev: Boolean, - streamName: String, - rewindFrom: Option[DateTime], - metricsLevel: MetricsLevel = MetricsLevel.DETAILED + override val awsRegion: String, + override val awsCredentials: AWSCredentialsProvider, + override val awsLocalEndpoint: Option[String], + override val isDev: Boolean, + streamName: String, + rewindFrom: Option[DateTime], + metricsLevel: MetricsLevel = MetricsLevel.DETAILED ) extends AwsClientBuilderUtils object KinesisReceiverConfig { - def apply(streamName: String, rewindFrom: Option[DateTime], thrallConfig: ThrallConfig): KinesisReceiverConfig = KinesisReceiverConfig( + def apply( + streamName: String, + rewindFrom: Option[DateTime], + thrallConfig: ThrallConfig + ): KinesisReceiverConfig = KinesisReceiverConfig( thrallConfig.awsRegion, thrallConfig.awsCredentials, thrallConfig.awsLocalEndpoint, @@ -28,25 +32,35 @@ object KinesisReceiverConfig { ) } -class ThrallConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) { +class ThrallConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) { val imageBucket: String = string("s3.image.bucket") val writeAlias: String = string("es.index.aliases.write") val thumbnailBucket: String = string("s3.thumb.bucket") - val elasticsearch6Url: String = string("es6.url") + val elasticsearch6Url: String = string("es6.url") val elasticsearch6Cluster: String = string("es6.cluster") val elasticsearch6Shards: Int = string("es6.shards").toInt val elasticsearch6Replicas: Int = string("es6.replicas").toInt val metadataTopicArn: String = string("indexed.image.sns.topic.arn") - val rewindFrom: Option[DateTime] = stringOpt("thrall.kinesis.stream.rewindFrom").map(ISODateTimeFormat.dateTime.parseDateTime) - val lowPriorityRewindFrom: Option[DateTime] = stringOpt("thrall.kinesis.lowPriorityStream.rewindFrom").map(ISODateTimeFormat.dateTime.parseDateTime) + val rewindFrom: Option[DateTime] = stringOpt( + "thrall.kinesis.stream.rewindFrom" + ).map(ISODateTimeFormat.dateTime.parseDateTime) + val lowPriorityRewindFrom: Option[DateTime] = stringOpt( + "thrall.kinesis.lowPriorityStream.rewindFrom" + ).map(ISODateTimeFormat.dateTime.parseDateTime) val isVersionedS3: Boolean = boolean("s3.image.versioned") - def kinesisConfig: KinesisReceiverConfig = KinesisReceiverConfig(thrallKinesisStream, rewindFrom, this) - def kinesisLowPriorityConfig: KinesisReceiverConfig = KinesisReceiverConfig(thrallKinesisLowPriorityStream, lowPriorityRewindFrom, this) + def kinesisConfig: KinesisReceiverConfig = + KinesisReceiverConfig(thrallKinesisStream, rewindFrom, this) + def kinesisLowPriorityConfig: KinesisReceiverConfig = KinesisReceiverConfig( + thrallKinesisLowPriorityStream, + lowPriorityRewindFrom, + this + ) } diff --git a/thrall/app/lib/ThrallMetrics.scala b/thrall/app/lib/ThrallMetrics.scala index d3e5efb85c..baa716c1da 100644 --- a/thrall/app/lib/ThrallMetrics.scala +++ b/thrall/app/lib/ThrallMetrics.scala @@ -2,7 +2,8 @@ package lib import com.gu.mediaservice.lib.metrics.CloudWatchMetrics -class ThrallMetrics(config: ThrallConfig) extends CloudWatchMetrics(s"${config.stage}/Thrall", config) { +class ThrallMetrics(config: ThrallConfig) + extends CloudWatchMetrics(s"${config.stage}/Thrall", config) { val indexedImages = new CountMetric("IndexedImages") @@ -18,7 +19,9 @@ class ThrallMetrics(config: ThrallConfig) extends CloudWatchMetrics(s"${config.s val failedUsagesUpdates = new CountMetric("FailedUsagesUpdates") - val failedSyndicationRightsUpdates = new CountMetric("FailedSyndicationRightsUpdates") + val failedSyndicationRightsUpdates = new CountMetric( + "FailedSyndicationRightsUpdates" + ) val failedQueryUpdates = new CountMetric("FailedQueryUpdates") diff --git a/thrall/app/lib/ThrallStore.scala b/thrall/app/lib/ThrallStore.scala index a221c0471c..c1ecf620f7 100644 --- a/thrall/app/lib/ThrallStore.scala +++ b/thrall/app/lib/ThrallStore.scala @@ -2,4 +2,10 @@ package lib import com.gu.mediaservice.lib -class ThrallStore(config: ThrallConfig) extends lib.ImageIngestOperations(config.imageBucket, config.thumbnailBucket, config, config.isVersionedS3) +class ThrallStore(config: ThrallConfig) + extends lib.ImageIngestOperations( + config.imageBucket, + config.thumbnailBucket, + config, + config.isVersionedS3 + ) diff --git a/thrall/app/lib/ThrallStreamProcessor.scala b/thrall/app/lib/ThrallStreamProcessor.scala index dc231601bd..e228a59a7e 100644 --- a/thrall/app/lib/ThrallStreamProcessor.scala +++ b/thrall/app/lib/ThrallStreamProcessor.scala @@ -19,64 +19,82 @@ case object LowPriority extends Priority { case object HighPriority extends Priority { override def toString = "high" } -case class TaggedRecord(record: KinesisRecord, priority: Priority) extends LogMarker { +case class TaggedRecord(record: KinesisRecord, priority: Priority) + extends LogMarker { override def markerContents = Map( "recordPriority" -> priority.toString, - "recordArrivalTime" -> DateTimeUtils.toString(record.approximateArrivalTimestamp) + "recordArrivalTime" -> DateTimeUtils.toString( + record.approximateArrivalTimestamp + ) ) } class ThrallStreamProcessor( - highPrioritySource: Source[KinesisRecord, Future[Done]], - lowPrioritySource: Source[KinesisRecord, Future[Done]], - consumer: ThrallEventConsumer, - actorSystem: ActorSystem, - materializer: Materializer) extends GridLogging { + highPrioritySource: Source[KinesisRecord, Future[Done]], + lowPrioritySource: Source[KinesisRecord, Future[Done]], + consumer: ThrallEventConsumer, + actorSystem: ActorSystem, + materializer: Materializer +) extends GridLogging { implicit val mat = materializer implicit val dispatcher = actorSystem.getDispatcher - val mergedKinesisSource: Source[TaggedRecord, NotUsed] = Source.fromGraph(GraphDSL.create() { implicit g => - import GraphDSL.Implicits._ - val highPriorityKinesisSource = highPrioritySource.map(TaggedRecord(_, HighPriority)) - val lowPriorityKinesisSource = lowPrioritySource.map(TaggedRecord(_, LowPriority)) + val mergedKinesisSource: Source[TaggedRecord, NotUsed] = + Source.fromGraph(GraphDSL.create() { implicit g => + import GraphDSL.Implicits._ + val highPriorityKinesisSource = + highPrioritySource.map(TaggedRecord(_, HighPriority)) + val lowPriorityKinesisSource = + lowPrioritySource.map(TaggedRecord(_, LowPriority)) - val mergePreferred = g.add(MergePreferred[TaggedRecord](1)) + val mergePreferred = g.add(MergePreferred[TaggedRecord](1)) - highPriorityKinesisSource ~> mergePreferred.preferred - lowPriorityKinesisSource ~> mergePreferred.in(0) + highPriorityKinesisSource ~> mergePreferred.preferred + lowPriorityKinesisSource ~> mergePreferred.in(0) - SourceShape(mergePreferred.out) - }) + SourceShape(mergePreferred.out) + }) - def createStream(): Source[(TaggedRecord, Stopwatch, Option[UpdateMessage]), NotUsed] = { - mergedKinesisSource.map{ taggedRecord => - taggedRecord -> ThrallEventConsumer.parseRecord(taggedRecord.record.data.toArray, taggedRecord.record.approximateArrivalTimestamp) - }.mapAsync(1) { result => - val stopwatch = Stopwatch.start - result match { - case (record, Some(updateMessage)) => - consumer.processUpdateMessage(updateMessage) - .recover { case _ => () } - .map(_ => (record, stopwatch, Some(updateMessage))) - case (record, _) => Future.successful((record, stopwatch, None)) + def createStream() + : Source[(TaggedRecord, Stopwatch, Option[UpdateMessage]), NotUsed] = { + mergedKinesisSource + .map { taggedRecord => + taggedRecord -> ThrallEventConsumer.parseRecord( + taggedRecord.record.data.toArray, + taggedRecord.record.approximateArrivalTimestamp + ) + } + .mapAsync(1) { result => + val stopwatch = Stopwatch.start + result match { + case (record, Some(updateMessage)) => + consumer + .processUpdateMessage(updateMessage) + .recover { case _ => () } + .map(_ => (record, stopwatch, Some(updateMessage))) + case (record, _) => Future.successful((record, stopwatch, None)) + } } - } } def run(): Future[Done] = { val stream = this.createStream().runForeach { case (taggedRecord, stopwatch, maybeUpdateMessage) => val basicMakers = combineMarkers(taggedRecord, stopwatch.elapsed) - val markers = maybeUpdateMessage.map(combineMarkers(basicMakers, _)).getOrElse(basicMakers) + val markers = maybeUpdateMessage + .map(combineMarkers(basicMakers, _)) + .getOrElse(basicMakers) logger.info(markers, "Record processed") taggedRecord.record.markProcessed() } stream.onComplete { - case Failure(exception) => logger.error("stream completed with failure", exception) - case Success(_) => logger.info("Stream completed with done, probably shutting down") + case Failure(exception) => + logger.error("stream completed with failure", exception) + case Success(_) => + logger.info("Stream completed with done, probably shutting down") } stream diff --git a/thrall/app/lib/elasticsearch/ElasticSearch.scala b/thrall/app/lib/elasticsearch/ElasticSearch.scala index d00084db45..6336faab4d 100644 --- a/thrall/app/lib/elasticsearch/ElasticSearch.scala +++ b/thrall/app/lib/elasticsearch/ElasticSearch.scala @@ -1,7 +1,11 @@ package lib.elasticsearch import com.gu.mediaservice.lib.ImageFields -import com.gu.mediaservice.lib.elasticsearch.{ElasticSearchClient, ElasticSearchConfig, ElasticSearchExecutions} +import com.gu.mediaservice.lib.elasticsearch.{ + ElasticSearchClient, + ElasticSearchConfig, + ElasticSearchExecutions +} import com.gu.mediaservice.lib.formatting.printDateTime import com.gu.mediaservice.lib.logging.LogMarker import com.gu.mediaservice.model._ @@ -22,8 +26,10 @@ import scala.concurrent.{ExecutionContext, Future} object ImageNotDeletable extends Throwable("Image cannot be deleted") -class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) extends ElasticSearchClient - with ImageFields with ElasticSearchExecutions { +class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) + extends ElasticSearchClient + with ImageFields + with ElasticSearchExecutions { lazy val imagesAlias: String = config.alias lazy val url: String = config.url @@ -31,30 +37,38 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) lazy val shards: Int = config.shards lazy val replicas: Int = config.replicas - def bulkInsert(images: Seq[Image])(implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchBulkUpdateResponse]] = { + def bulkInsert(images: Seq[Image])(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchBulkUpdateResponse]] = { val (requests, totalSize) = - images.foldLeft[(Seq[IndexRequest], Int)](List(), 0) - { (collector: (Seq[IndexRequest], Int), img) => - val (requestsSoFar, sizeSoFar) = collector - val document = Json.stringify(Json.toJson(img)) - ( - requestsSoFar :+ - indexInto(imagesAlias) - .id(img.id) - .source(document), - sizeSoFar + document.length() - ) - } + images.foldLeft[(Seq[IndexRequest], Int)](List(), 0) { + (collector: (Seq[IndexRequest], Int), img) => + val (requestsSoFar, sizeSoFar) = collector + val document = Json.stringify(Json.toJson(img)) + ( + requestsSoFar :+ + indexInto(imagesAlias) + .id(img.id) + .source(document), + sizeSoFar + document.length() + ) + } val request = bulk { requests } - val response = executeAndLog(request, s"Bulk inserting ${images.length} images, total size $totalSize") + val response = executeAndLog( + request, + s"Bulk inserting ${images.length} images, total size $totalSize" + ) List(response.map(_ => ElasticSearchBulkUpdateResponse())) } - def indexImage(id: String, image: Image, lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def indexImage(id: String, image: Image, lastModified: DateTime)(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { // On insert, we know we will not have a lastModified to consider, so we always take the one we get val insertImage = image.copy(lastModified = Some(lastModified)) val insertImageAsJson = Json.toJson(insertImage) @@ -83,13 +97,15 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) |$refreshEditsScript | """) - val script: Script = prepareScript(scriptSource, lastModified, + val script: Script = prepareScript( + scriptSource, + lastModified, ("update_doc", asNestedMap(asImageUpdate(upsertImageAsJson))) ) - val indexRequest = updateById(imagesAlias, id). - upsert(Json.stringify(insertImageAsJson)). - script(script) + val indexRequest = updateById(imagesAlias, id) + .upsert(Json.stringify(insertImageAsJson)) + .script(script) val indexResponse = executeAndLog(indexRequest, s"ES6 indexing image $id") @@ -98,7 +114,10 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) }) } - def getImage(id: String)(implicit ex: ExecutionContext, logMarker: LogMarker): Future[Option[Image]] = { + def getImage(id: String)(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): Future[Option[Image]] = { executeAndLog(get(imagesAlias, id), s"ES6 get image by $id").map { r => if (r.result.found) { Some(Json.parse(r.result.sourceAsString).as[Image]) @@ -108,8 +127,11 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) } } - def updateImageUsages(id: String, usages: Seq[Usage], lastModified: DateTime) - (implicit ex: ExecutionContext,logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def updateImageUsages(id: String, usages: Seq[Usage], lastModified: DateTime)( + implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val replaceUsagesScript = loadUpdatingModificationPainless(s""" | def lastUpdatedDate = ctx._source.usagesLastModified != null ? Date.from(Instant.from(DateTimeFormatter.ISO_DATE_TIME.parse(ctx._source.usagesLastModified))) : null; | if (lastUpdatedDate == null || modificationDate.after(lastUpdatedDate)) { @@ -119,34 +141,64 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) """) val usagesParameter = usages.map(i => asNestedMap(Json.toJson(i))) - val updateRequest: UpdateRequest = prepareUpdateRequest(id, replaceUsagesScript, lastModified, ("usages", usagesParameter) + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + replaceUsagesScript, + lastModified, + ("usages", usagesParameter) ) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 updating usages on image $id") - .incrementOnFailure(metrics.map(_.failedUsagesUpdates)){case _ => true} + val eventualUpdateResponse = + executeAndLog(updateRequest, s"ES6 updating usages on image $id") + .incrementOnFailure(metrics.map(_.failedUsagesUpdates)) { case _ => + true + } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } - def updateImageSyndicationRights(id: String, rights: Option[SyndicationRights], lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def updateImageSyndicationRights( + id: String, + rights: Option[SyndicationRights], + lastModified: DateTime + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val replaceSyndicationRightsScript = """ | ctx._source.syndicationRights = params.syndicationRights; """.stripMargin - val rightsParameter = rights.map(sr => asNestedMap(sr)).orNull - val scriptSource = loadUpdatingModificationPainless(replaceSyndicationRightsScript) + val scriptSource = loadUpdatingModificationPainless( + replaceSyndicationRightsScript + ) - val updateRequest: UpdateRequest = prepareUpdateRequest(id, scriptSource, lastModified, ("syndicationRights", rightsParameter)) + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + scriptSource, + lastModified, + ("syndicationRights", rightsParameter) + ) - List(executeAndLog(updateRequest, s"ES6 updating syndicationRights on image $id with rights $rightsParameter").map(_ => ElasticSearchUpdateResponse())) + List( + executeAndLog( + updateRequest, + s"ES6 updating syndicationRights on image $id with rights $rightsParameter" + ).map(_ => ElasticSearchUpdateResponse()) + ) } - def applyImageMetadataOverride(id: String, metadata: Edits, lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def applyImageMetadataOverride( + id: String, + metadata: Edits, + lastModified: DateTime + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val photoshootSuggestionScript = """ | if (ctx._source.userMetadata.photoshoot != null) { @@ -154,7 +206,8 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) | } """.stripMargin - val metadataParameter = JsDefined(Json.toJson(metadata)).toOption.map(asNestedMap).orNull + val metadataParameter = + JsDefined(Json.toJson(metadata)).toOption.map(asNestedMap).orNull val replaceUserMetadata = """ @@ -173,85 +226,129 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) """ ) - val updateRequest: UpdateRequest = prepareUpdateRequest(id, scriptSource, lastModified, ("userMetadata", metadataParameter)) + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + scriptSource, + lastModified, + ("userMetadata", metadataParameter) + ) - List(executeAndLog(updateRequest, s"ES6 updating user metadata on image $id with lastModified $lastModified").map(_ => ElasticSearchUpdateResponse())) + List( + executeAndLog( + updateRequest, + s"ES6 updating user metadata on image $id with lastModified $lastModified" + ).map(_ => ElasticSearchUpdateResponse()) + ) } - def getInferredSyndicationRightsImages(photoshoot: Photoshoot, excludedImageId: Option[String]) - (implicit ex: ExecutionContext, logMarker: LogMarker): Future[List[Image]] = { // TODO could be a Seq - val inferredSyndicationRights = not(termQuery("syndicationRights.isInferred", false)) // Using 'not' to include nulls + def getInferredSyndicationRightsImages( + photoshoot: Photoshoot, + excludedImageId: Option[String] + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): Future[List[Image]] = { // TODO could be a Seq + val inferredSyndicationRights = not( + termQuery("syndicationRights.isInferred", false) + ) // Using 'not' to include nulls val filter = excludedImageId match { - case Some(imageId) => boolQuery must( - inferredSyndicationRights, - not(idsQuery(imageId)) - ) + case Some(imageId) => + boolQuery must ( + inferredSyndicationRights, + not(idsQuery(imageId)) + ) case _ => inferredSyndicationRights } - val filteredMatches: BoolQuery = boolQuery must( + val filteredMatches: BoolQuery = boolQuery must ( matchQuery(photoshootField("title"), photoshoot.title), filter ) - val request = search(imagesAlias) bool filteredMatches limit 200 // TODO no order? + val request = + search(imagesAlias) bool filteredMatches limit 200 // TODO no order? - executeAndLog(request, s"ES6 get images in photoshoot ${photoshoot.title} with inferred syndication rights (excluding $excludedImageId)").map { r => + executeAndLog( + request, + s"ES6 get images in photoshoot ${photoshoot.title} with inferred syndication rights (excluding $excludedImageId)" + ).map { r => r.result.hits.hits.toList.map { h => Json.parse(h.sourceAsString).as[Image] } } } - def getLatestSyndicationRights(photoshoot: Photoshoot, excludedImageId: Option[String]) - (implicit ex: ExecutionContext, logMarker: LogMarker): Future[Option[Image]] = { - val nonInferredSyndicationRights = termQuery("syndicationRights.isInferred", false) + def getLatestSyndicationRights( + photoshoot: Photoshoot, + excludedImageId: Option[String] + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): Future[Option[Image]] = { + val nonInferredSyndicationRights = + termQuery("syndicationRights.isInferred", false) val filter = excludedImageId match { - case Some(imageId) => boolQuery must( - nonInferredSyndicationRights, - not(idsQuery(imageId)) - ) + case Some(imageId) => + boolQuery must ( + nonInferredSyndicationRights, + not(idsQuery(imageId)) + ) case _ => nonInferredSyndicationRights } - val filteredMatches = boolQuery must( + val filteredMatches = boolQuery must ( matchQuery(photoshootField("title"), photoshoot.title), filter ) - val syndicationRightsPublishedDescending = fieldSort("syndicationRights.published").order(SortOrder.DESC) + val syndicationRightsPublishedDescending = + fieldSort("syndicationRights.published").order(SortOrder.DESC) - val request = search(imagesAlias) bool filteredMatches sortBy syndicationRightsPublishedDescending + val request = search( + imagesAlias + ) bool filteredMatches sortBy syndicationRightsPublishedDescending - executeAndLog(request, s"ES6 get image in photoshoot ${photoshoot.title} with latest rcs syndication rights (excluding $excludedImageId)").map { r => + executeAndLog( + request, + s"ES6 get image in photoshoot ${photoshoot.title} with latest rcs syndication rights (excluding $excludedImageId)" + ).map { r => r.result.hits.hits.toList.headOption.map { h => Json.parse(h.sourceAsString).as[Image] } } } - def deleteImage(id: String) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchDeleteResponse]] = { + def deleteImage(id: String)(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchDeleteResponse]] = { // search for the image first, and then only delete and succeed // this is because the delete query does not respond with anything useful // TODO: is there a more efficient way to do this? - val deletableImage = boolQuery.withMust( - idsQuery(id)).withNot( - existsQuery("exports"), - nestedQuery("usages").query(existsQuery("usages")) - ) + val deletableImage = boolQuery + .withMust(idsQuery(id)) + .withNot( + existsQuery("exports"), + nestedQuery("usages").query(existsQuery("usages")) + ) - val eventualDeleteResponse = executeAndLog(count(imagesAlias).query(deletableImage), s"ES6 searching for image to delete: $id").flatMap { r => + val eventualDeleteResponse = executeAndLog( + count(imagesAlias).query(deletableImage), + s"ES6 searching for image to delete: $id" + ).flatMap { r => val deleteFuture = r.result.count match { - case 1 => executeAndLog(deleteById(imagesAlias, id), s"ES6 deleting image $id") + case 1 => + executeAndLog(deleteById(imagesAlias, id), s"ES6 deleting image $id") case _ => Future.failed(ImageNotDeletable) } deleteFuture .incrementOnSuccess(metrics.map(_.deletedImages)) - .incrementOnFailure(metrics.map(_.failedDeletedImages)) { case ImageNotDeletable => true } + .incrementOnFailure(metrics.map(_.failedDeletedImages)) { + case ImageNotDeletable => true + } } List(eventualDeleteResponse.map { _ => @@ -259,37 +356,53 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) }) } - def deleteAllImageUsages(id: String, - lastModified: DateTime - ) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { - val deleteUsagesScript = loadUpdatingModificationPainless("ctx._source.remove('usages');") + def deleteAllImageUsages(id: String, lastModified: DateTime)(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { + val deleteUsagesScript = loadUpdatingModificationPainless( + "ctx._source.remove('usages');" + ) - val updateRequest = prepareUpdateRequest(id, deleteUsagesScript, lastModified) + val updateRequest = + prepareUpdateRequest(id, deleteUsagesScript, lastModified) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 removing all usages on image $id", true) - .incrementOnFailure(metrics.map(_.failedUsagesUpdates)){case _ => true} + val eventualUpdateResponse = executeAndLog( + updateRequest, + s"ES6 removing all usages on image $id", + true + ) + .incrementOnFailure(metrics.map(_.failedUsagesUpdates)) { case _ => true } List(eventualUpdateResponse.map(response => { - if(response.status == 404){ + if (response.status == 404) { logger.warn("Attempted to delete usages for non-existant image.") } ElasticSearchUpdateResponse() })) } - def deleteSyndicationRights(id: String, lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def deleteSyndicationRights(id: String, lastModified: DateTime)(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val deleteSyndicationRightsScript = s""" | $modificationDateFormatting | ctx._source.remove('syndicationRights'); | $updateLastModifiedScript """.stripMargin - val updateRequest= prepareUpdateRequest(id, deleteSyndicationRightsScript, lastModified) + val updateRequest = + prepareUpdateRequest(id, deleteSyndicationRightsScript, lastModified) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 removing syndication rights on image $id", true) - .incrementOnFailure(metrics.map(_.failedSyndicationRightsUpdates)){case _ => true} + val eventualUpdateResponse = executeAndLog( + updateRequest, + s"ES6 removing syndication rights on image $id", + true + ) + .incrementOnFailure(metrics.map(_.failedSyndicationRightsUpdates)) { + case _ => true + } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } @@ -298,8 +411,14 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) updateById(imagesAlias, id) .script(Script(script = script).lang("painless")) - def replaceImageLeases(id: String, leases: Seq[MediaLease], lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def replaceImageLeases( + id: String, + leases: Seq[MediaLease], + lastModified: DateTime + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val replaceLeasesScript = """ | ctx._source.leases = ["leases": params.leases, "lastModified": params.lastModified]; @@ -308,25 +427,58 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) val scriptSource = loadUpdatingModificationPainless(replaceLeasesScript) val leasesParameter = leases.map(l => asNestedMap(Json.toJson(l))) - val updateRequest: UpdateRequest = prepareUpdateRequest(id, scriptSource, lastModified, ("leases", leasesParameter)) + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + scriptSource, + lastModified, + ("leases", leasesParameter) + ) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 updating all leases on image $id with: ${leases.toString}") - .incrementOnFailure(metrics.map(_.failedSyndicationRightsUpdates)){case _ => true} + val eventualUpdateResponse = executeAndLog( + updateRequest, + s"ES6 updating all leases on image $id with: ${leases.toString}" + ) + .incrementOnFailure(metrics.map(_.failedSyndicationRightsUpdates)) { + case _ => true + } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } - private def prepareScript(scriptSource: String, lastModified: DateTime, params: (String, Object)*) = - Script(script = scriptSource).lang("painless").param("lastModified", printDateTime(lastModified)).params(params) - - private def prepareUpdateRequest(id: String, scriptSource: String, lastModified: DateTime, params: (String, Object)*) = - updateById(imagesAlias, id).script(prepareScript(scriptSource, lastModified, params:_*)) + private def prepareScript( + scriptSource: String, + lastModified: DateTime, + params: (String, Object)* + ) = + Script(script = scriptSource) + .lang("painless") + .param("lastModified", printDateTime(lastModified)) + .params(params) + + private def prepareUpdateRequest( + id: String, + scriptSource: String, + lastModified: DateTime, + params: (String, Object)* + ) = + updateById(imagesAlias, id).script( + prepareScript(scriptSource, lastModified, params: _*) + ) - private def prepareUpdateRequest(id: String, scriptSource: String, lastModified: DateTime) = - updateById(imagesAlias, id).script(prepareScript(scriptSource, lastModified)) + private def prepareUpdateRequest( + id: String, + scriptSource: String, + lastModified: DateTime + ) = + updateById(imagesAlias, id).script( + prepareScript(scriptSource, lastModified) + ) - def addImageLease(id: String, lease: MediaLease, lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def addImageLease(id: String, lease: MediaLease, lastModified: DateTime)( + implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val addLeaseScript = """| if (ctx._source.leases == null || ctx._source.leases.leases == null) { @@ -339,18 +491,35 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) val scriptSource = loadUpdatingModificationPainless(addLeaseScript) - val leaseParameter = JsDefined(Json.toJson(lease)).toOption.map(_.as[MediaLease]).map(i => asNestedMap(Json.toJson(i))).orNull + val leaseParameter = JsDefined(Json.toJson(lease)).toOption + .map(_.as[MediaLease]) + .map(i => asNestedMap(Json.toJson(i))) + .orNull - val updateRequest: UpdateRequest = prepareUpdateRequest(id, scriptSource, lastModified, ("lease", leaseParameter)) + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + scriptSource, + lastModified, + ("lease", leaseParameter) + ) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 adding lease on image $id with: $leaseParameter") - .incrementOnFailure(metrics.map(_.failedUsagesUpdates)){case _ => true} + val eventualUpdateResponse = executeAndLog( + updateRequest, + s"ES6 adding lease on image $id with: $leaseParameter" + ) + .incrementOnFailure(metrics.map(_.failedUsagesUpdates)) { case _ => true } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } - def removeImageLease(id: String, leaseId: Option[String], lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def removeImageLease( + id: String, + leaseId: Option[String], + lastModified: DateTime + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val removeLeaseScript = """| | for(int i = 0; i < ctx._source.leases.leases.size(); i++) { @@ -363,22 +532,37 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) val scriptSource = loadUpdatingModificationPainless(removeLeaseScript) - val leaseIdParameter = JsDefined(Json.toJson(leaseId)).toOption.map(_.as[String]).orNull + val leaseIdParameter = + JsDefined(Json.toJson(leaseId)).toOption.map(_.as[String]).orNull - val updateRequest = prepareUpdateRequest(id, scriptSource, lastModified, ("leaseId", leaseIdParameter)) + val updateRequest = prepareUpdateRequest( + id, + scriptSource, + lastModified, + ("leaseId", leaseIdParameter) + ) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 removing lease with id $leaseIdParameter from image $id", true) + val eventualUpdateResponse = executeAndLog( + updateRequest, + s"ES6 removing lease with id $leaseIdParameter from image $id", + true + ) .incrementOnFailure(metrics.map(_.failedUsagesUpdates)) { case _ => true } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } - def updateImageExports(id: String, exports: Seq[Crop], lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { - + def updateImageExports( + id: String, + exports: Seq[Crop], + lastModified: DateTime + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val addExportsScript = - """| if (ctx._source.exports == null) { + """| if (ctx._source.exports == null) { | ctx._source.exports = params.exports; | } else { | ctx._source.exports.addAll(params.exports); @@ -387,49 +571,81 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) val scriptSource = loadUpdatingModificationPainless(addExportsScript) - val exportsParameter = JsDefined(Json.toJson(exports)).toOption.map { cs: JsValue => // TODO deduplicate with set collections - cs.as[JsArray].value.map { c => - asNestedMap(c) - } + val exportsParameter = JsDefined(Json.toJson(exports)).toOption.map { + cs: JsValue => // TODO deduplicate with set collections + cs.as[JsArray].value.map { c => + asNestedMap(c) + } }.orNull - val updateRequest: UpdateRequest = prepareUpdateRequest(id, scriptSource, lastModified, ("exports", exportsParameter)) + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + scriptSource, + lastModified, + ("exports", exportsParameter) + ) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 updating exports on image $id") - .incrementOnFailure(metrics.map(_.failedExportsUpdates)) { case _ => true } + val eventualUpdateResponse = + executeAndLog(updateRequest, s"ES6 updating exports on image $id") + .incrementOnFailure(metrics.map(_.failedExportsUpdates)) { case _ => + true + } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } - def deleteImageExports(id: String, lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { + def deleteImageExports(id: String, lastModified: DateTime)(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { val deleteExportsScript = "ctx._source.remove('exports');" val scriptSource = loadUpdatingModificationPainless(deleteExportsScript) val updateRequest = prepareUpdateRequest(id, scriptSource, lastModified) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 removing exports from image $id", true) - .incrementOnFailure(metrics.map(_.failedExportsUpdates)) { case _ => true } + val eventualUpdateResponse = + executeAndLog(updateRequest, s"ES6 removing exports from image $id", true) + .incrementOnFailure(metrics.map(_.failedExportsUpdates)) { case _ => + true + } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } - def setImageCollections(id: String, collections: Seq[Collection], lastModified: DateTime) - (implicit ex: ExecutionContext, logMarker: LogMarker): List[Future[ElasticSearchUpdateResponse]] = { - val setImagesCollectionScript = "ctx._source.collections = params.collections;" - val setImageCollectionsScript = loadUpdatingModificationPainless(setImagesCollectionScript) - - val collectionsParameter = JsDefined(Json.toJson(collections)).toOption.map { cs: JsValue => - cs.as[JsArray].value.map { c => - asNestedMap(c) - } - }.orNull + def setImageCollections( + id: String, + collections: Seq[Collection], + lastModified: DateTime + )(implicit + ex: ExecutionContext, + logMarker: LogMarker + ): List[Future[ElasticSearchUpdateResponse]] = { + val setImagesCollectionScript = + "ctx._source.collections = params.collections;" + val setImageCollectionsScript = loadUpdatingModificationPainless( + setImagesCollectionScript + ) - val updateRequest: UpdateRequest = prepareUpdateRequest(id, setImageCollectionsScript, lastModified, ("collections", collectionsParameter)) + val collectionsParameter = + JsDefined(Json.toJson(collections)).toOption.map { cs: JsValue => + cs.as[JsArray].value.map { c => + asNestedMap(c) + } + }.orNull + + val updateRequest: UpdateRequest = prepareUpdateRequest( + id, + setImageCollectionsScript, + lastModified, + ("collections", collectionsParameter) + ) - val eventualUpdateResponse = executeAndLog(updateRequest, s"ES6 setting collections on image $id") - .incrementOnFailure(metrics.map(_.failedCollectionsUpdates)) { case _ => true } + val eventualUpdateResponse = + executeAndLog(updateRequest, s"ES6 setting collections on image $id") + .incrementOnFailure(metrics.map(_.failedCollectionsUpdates)) { case _ => + true + } List(eventualUpdateResponse.map(_ => ElasticSearchUpdateResponse())) } @@ -457,10 +673,16 @@ class ElasticSearch(config: ElasticSearchConfig, metrics: Option[ThrallMetrics]) | } """.stripMargin - private val refreshEditsScript = refreshMetadataScript + refreshUsageRightsScript + private val refreshEditsScript = + refreshMetadataScript + refreshUsageRightsScript - private def loadPainless(str: String) = str.stripMargin.split('\n').map(_.trim.filter(_ >= ' ')).mkString // remove ctrl chars and leading, trailing whitespace - private def loadUpdatingModificationPainless(str: String) = loadPainless(modificationDateFormatting + "\n" + str + "\n" + updateLastModifiedScript) + private def loadPainless(str: String) = str.stripMargin + .split('\n') + .map(_.trim.filter(_ >= ' ')) + .mkString // remove ctrl chars and leading, trailing whitespace + private def loadUpdatingModificationPainless(str: String) = loadPainless( + modificationDateFormatting + "\n" + str + "\n" + updateLastModifiedScript + ) private val modificationDateFormatting = """ diff --git a/thrall/app/lib/elasticsearch/SyndicationRightsOps.scala b/thrall/app/lib/elasticsearch/SyndicationRightsOps.scala index e523f25367..15708e7d1a 100644 --- a/thrall/app/lib/elasticsearch/SyndicationRightsOps.scala +++ b/thrall/app/lib/elasticsearch/SyndicationRightsOps.scala @@ -7,92 +7,144 @@ import org.joda.time.DateTime import scala.concurrent.{ExecutionContext, Future} -class SyndicationRightsOps(es: ElasticSearch)(implicit ex: ExecutionContext) extends GridLogging { - /** - * Upserting syndication rights and updating photoshoots accordingly. +class SyndicationRightsOps(es: ElasticSearch)(implicit ex: ExecutionContext) + extends GridLogging { + + /** Upserting syndication rights and updating photoshoots accordingly. * @param image - image that has been updated. Should include any new rights. * @param currentPhotoshootOpt - new photoshoot if that's the case * @param previousPhotoshootOpt - old photoshoot; defined when image had been moved to (or removed from) a photoshoot * @return */ - def upsertOrRefreshRights(image: Image, - currentPhotoshootOpt: Option[Photoshoot] = None, - previousPhotoshootOpt: Option[Photoshoot] = None, - lastModified: DateTime)( - implicit logMarker: LogMarker - ): Future[Unit] = for { + def upsertOrRefreshRights( + image: Image, + currentPhotoshootOpt: Option[Photoshoot] = None, + previousPhotoshootOpt: Option[Photoshoot] = None, + lastModified: DateTime + )(implicit + logMarker: LogMarker + ): Future[Unit] = for { _ <- refreshPreviousPhotoshoot(image, previousPhotoshootOpt, lastModified) _ <- newRightsUpsert(image, currentPhotoshootOpt, lastModified) } yield () - private def newRightsUpsert(image: Image, - currentPhotoshootOpt: Option[Photoshoot], - lastModified: DateTime - ) - ( - implicit logMarker: LogMarker - ): Future[Unit] = + private def newRightsUpsert( + image: Image, + currentPhotoshootOpt: Option[Photoshoot], + lastModified: DateTime + )(implicit + logMarker: LogMarker + ): Future[Unit] = image.syndicationRights match { - case Some(_) => for { - _ <- Future.sequence(es.updateImageSyndicationRights(image.id, image.syndicationRights, lastModified)) - _ <- refreshCurrentPhotoshoot(image, currentPhotoshootOpt, lastModified) - } yield () + case Some(_) => + for { + _ <- Future.sequence( + es.updateImageSyndicationRights( + image.id, + image.syndicationRights, + lastModified + ) + ) + _ <- refreshCurrentPhotoshoot( + image, + currentPhotoshootOpt, + lastModified + ) + } yield () case None => refreshCurrentPhotoshoot(image, currentPhotoshootOpt, lastModified) } - private def refreshPreviousPhotoshoot(image: Image, - previousPhotoshootOpt: Option[Photoshoot], - lastModified: DateTime - )( - implicit logMarker: LogMarker - ): Future[Unit] = + private def refreshPreviousPhotoshoot( + image: Image, + previousPhotoshootOpt: Option[Photoshoot], + lastModified: DateTime + )(implicit + logMarker: LogMarker + ): Future[Unit] = previousPhotoshootOpt match { - case Some(photoshoot) => refreshPhotoshoot(image, photoshoot, Some(image.id), lastModified) + case Some(photoshoot) => + refreshPhotoshoot(image, photoshoot, Some(image.id), lastModified) case None => Future.successful(()) } - private def refreshCurrentPhotoshoot(image: Image, - currentPhotoshootOpt: Option[Photoshoot], - lastModified: DateTime)( - implicit logMarker: LogMarker - ): Future[Unit] = + private def refreshCurrentPhotoshoot( + image: Image, + currentPhotoshootOpt: Option[Photoshoot], + lastModified: DateTime + )(implicit + logMarker: LogMarker + ): Future[Unit] = currentPhotoshootOpt match { - case Some(photoshoot) => refreshPhotoshoot(image, photoshoot, None, lastModified) + case Some(photoshoot) => + refreshPhotoshoot(image, photoshoot, None, lastModified) case None => if (image.hasInferredSyndicationRightsOrNoRights) - Future.sequence(es.deleteSyndicationRights(image.id, lastModified)).map(_ => ()) + Future + .sequence(es.deleteSyndicationRights(image.id, lastModified)) + .map(_ => ()) else Future.successful(()) } - private def refreshPhotoshoot(image: Image, - photoshoot: Photoshoot, - excludedImageId: Option[String] = None, - lastModified: DateTime - )( - implicit logMarker: LogMarker - ): Future[Unit] = + private def refreshPhotoshoot( + image: Image, + photoshoot: Photoshoot, + excludedImageId: Option[String] = None, + lastModified: DateTime + )(implicit + logMarker: LogMarker + ): Future[Unit] = for { - latestRights <- getLatestSyndicationRights(image, photoshoot, excludedImageId) - inferredImages <- getInferredSyndicationRightsImages(image, photoshoot, excludedImageId) - } yield updateRights(image, photoshoot, latestRights, inferredImages, lastModified) + latestRights <- getLatestSyndicationRights( + image, + photoshoot, + excludedImageId + ) + inferredImages <- getInferredSyndicationRightsImages( + image, + photoshoot, + excludedImageId + ) + } yield updateRights( + image, + photoshoot, + latestRights, + inferredImages, + lastModified + ) - private def updateRights(image: Image, - photoshoot: Photoshoot, - latestRights: Option[SyndicationRights], - inferredImages: List[Image], - lastModified: DateTime - )( - implicit logMarker: LogMarker - ): Unit = + private def updateRights( + image: Image, + photoshoot: Photoshoot, + latestRights: Option[SyndicationRights], + inferredImages: List[Image], + lastModified: DateTime + )(implicit + logMarker: LogMarker + ): Unit = latestRights match { - case updatedRights@Some(rights) if updateRequired(image, inferredImages) => - logger.info(s"Using rights ${Json.toJson(rights)} to infer syndication rights for ${inferredImages.length} image id(s) in photoshoot $photoshoot: ${inferredImages.map(_.id)}") - inferredImages.foreach(img => es.updateImageSyndicationRights(img.id, updatedRights.map(_.copy(isInferred = true)), lastModified)) + case updatedRights @ Some(rights) + if updateRequired(image, inferredImages) => + logger.info( + s"Using rights ${Json.toJson(rights)} to infer syndication rights for ${inferredImages.length} image id(s) in photoshoot $photoshoot: ${inferredImages + .map(_.id)}" + ) + inferredImages.foreach(img => + es.updateImageSyndicationRights( + img.id, + updatedRights.map(_.copy(isInferred = true)), + lastModified + ) + ) case None if image.hasNonInferredRights => - logger.info(s"Removing rights from images (photoshoot $photoshoot): ${inferredImages.map(_.id)} (total = ${inferredImages.length}).") - inferredImages.foreach(img => es.updateImageSyndicationRights(img.id, None, lastModified)) + logger.info( + s"Removing rights from images (photoshoot $photoshoot): ${inferredImages + .map(_.id)} (total = ${inferredImages.length})." + ) + inferredImages.foreach(img => + es.updateImageSyndicationRights(img.id, None, lastModified) + ) case _ => logger.info(s"No rights to refresh in photoshoot $photoshoot") } @@ -102,7 +154,12 @@ class SyndicationRightsOps(es: ElasticSearch)(implicit ex: ExecutionContext) ext * - the list of images with inferred rights has at least one image with syndication rights (inferred or not inferred) * Both these condition indicate something has changed in the state of the photoshoot and we need to update the rights. */ - private def updateRequired(image: Image, inferredImages: List[Image]): Boolean = image.hasNonInferredRights || inferredImages.exists(_.syndicationRights.isDefined) + private def updateRequired( + image: Image, + inferredImages: List[Image] + ): Boolean = image.hasNonInferredRights || inferredImages.exists( + _.syndicationRights.isDefined + ) /* The following methods are needed because ES is eventually consistent. * When we move an image into a photoshoot we have to refresh the photoshoot by querying for the latest syndication @@ -111,46 +168,62 @@ class SyndicationRightsOps(es: ElasticSearch)(implicit ex: ExecutionContext) ext * risk missing important rights information. * Therefore, we have to make sure we are taking it into consideration. */ - private def getLatestSyndicationRights(image: Image, - photoshoot: Photoshoot, - excludedImageId: Option[String] = None - )( - implicit logMarker: LogMarker - ): Future[Option[SyndicationRights]] = + private def getLatestSyndicationRights( + image: Image, + photoshoot: Photoshoot, + excludedImageId: Option[String] = None + )(implicit + logMarker: LogMarker + ): Future[Option[SyndicationRights]] = excludedImageId match { - case Some(_) => es.getLatestSyndicationRights(photoshoot, excludedImageId).map(_.flatMap(_.syndicationRights)) + case Some(_) => + es.getLatestSyndicationRights(photoshoot, excludedImageId) + .map(_.flatMap(_.syndicationRights)) case None => - val hasInferredRights: Boolean = image.hasInferredSyndicationRightsOrNoRights + val hasInferredRights: Boolean = + image.hasInferredSyndicationRightsOrNoRights es.getLatestSyndicationRights(photoshoot, None).map { case Some(dbImage) => - if (!hasInferredRights) mostRecentSyndicationRights(dbImage, image) else dbImage.syndicationRights + if (!hasInferredRights) mostRecentSyndicationRights(dbImage, image) + else dbImage.syndicationRights case None => if (!hasInferredRights) image.syndicationRights else None } } - def mostRecentSyndicationRights(image1: Image, image2: Image): Option[SyndicationRights] = (image1.rcsPublishDate, image2.rcsPublishDate) match { - case (Some(date1), Some(date2)) => if(date1.isAfter(date2)) image1.syndicationRights else image2.syndicationRights - case (Some(_), None) => image1.syndicationRights - case (None, Some(_)) => image2.syndicationRights - case (None, None) => None - } + def mostRecentSyndicationRights( + image1: Image, + image2: Image + ): Option[SyndicationRights] = + (image1.rcsPublishDate, image2.rcsPublishDate) match { + case (Some(date1), Some(date2)) => + if (date1.isAfter(date2)) image1.syndicationRights + else image2.syndicationRights + case (Some(_), None) => image1.syndicationRights + case (None, Some(_)) => image2.syndicationRights + case (None, None) => None + } - private def getInferredSyndicationRightsImages(image: Image, - photoshoot: Photoshoot, - excludedImageId: Option[String] = None - )( - implicit logMarker: LogMarker - ): Future[List[Image]] = + private def getInferredSyndicationRightsImages( + image: Image, + photoshoot: Photoshoot, + excludedImageId: Option[String] = None + )(implicit + logMarker: LogMarker + ): Future[List[Image]] = excludedImageId match { - case Some(_) => es.getInferredSyndicationRightsImages(photoshoot, excludedImageId) + case Some(_) => + es.getInferredSyndicationRightsImages(photoshoot, excludedImageId) case None => - val imageId = if (!image.hasInferredSyndicationRightsOrNoRights) Some(image.id) else None - es.getInferredSyndicationRightsImages(photoshoot, imageId).map { images => - if (image.hasInferredSyndicationRightsOrNoRights) - images :+ image - else - images + val imageId = + if (!image.hasInferredSyndicationRightsOrNoRights) Some(image.id) + else None + es.getInferredSyndicationRightsImages(photoshoot, imageId).map { + images => + if (image.hasInferredSyndicationRightsOrNoRights) + images :+ image + else + images } } } diff --git a/thrall/app/lib/kinesis/KinesisConfig.scala b/thrall/app/lib/kinesis/KinesisConfig.scala index 49e082634c..e060d852b3 100644 --- a/thrall/app/lib/kinesis/KinesisConfig.scala +++ b/thrall/app/lib/kinesis/KinesisConfig.scala @@ -3,15 +3,21 @@ package lib.kinesis import java.net.InetAddress import java.util.UUID -import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration} +import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{ + InitialPositionInStream, + KinesisClientLibConfiguration +} import com.amazonaws.services.kinesis.metrics.interfaces.MetricsLevel import com.gu.mediaservice.lib.logging.GridLogging import lib.KinesisReceiverConfig import org.joda.time.DateTime object KinesisConfig extends GridLogging { - private val workerId = InetAddress.getLocalHost.getCanonicalHostName + ":" + UUID.randomUUID() + private val workerId = + InetAddress.getLocalHost.getCanonicalHostName + ":" + UUID.randomUUID() - def kinesisConfig(config: KinesisReceiverConfig): KinesisClientLibConfiguration = { + def kinesisConfig( + config: KinesisReceiverConfig + ): KinesisClientLibConfiguration = { val clientConfig = kinesisClientLibConfig( kinesisAppName = config.streamName, streamName = config.streamName, @@ -20,13 +26,23 @@ object KinesisConfig extends GridLogging { config.metricsLevel ) - config.awsLocalEndpoint.map(endpoint => { - logger.info(s"creating kinesis consumer with endpoint=$endpoint") - clientConfig.withKinesisEndpoint(endpoint).withDynamoDBEndpoint(endpoint) - }).getOrElse(clientConfig) + config.awsLocalEndpoint + .map(endpoint => { + logger.info(s"creating kinesis consumer with endpoint=$endpoint") + clientConfig + .withKinesisEndpoint(endpoint) + .withDynamoDBEndpoint(endpoint) + }) + .getOrElse(clientConfig) } - private def kinesisClientLibConfig(kinesisAppName: String, streamName: String, config: KinesisReceiverConfig, from: Option[DateTime], metricsLevel: MetricsLevel): KinesisClientLibConfiguration = { + private def kinesisClientLibConfig( + kinesisAppName: String, + streamName: String, + config: KinesisReceiverConfig, + from: Option[DateTime], + metricsLevel: MetricsLevel + ): KinesisClientLibConfiguration = { val credentialsProvider = config.awsCredentials val kinesisConfig = new KinesisClientLibConfiguration( @@ -36,16 +52,18 @@ object KinesisConfig extends GridLogging { credentialsProvider, credentialsProvider, workerId - ).withRegionName(config.awsRegion). - withMaxRecords(100). - withIdleMillisBetweenCalls(1000). - withIdleTimeBetweenReadsInMillis(250). - withCallProcessRecordsEvenForEmptyRecordList(true). - withMetricsLevel(metricsLevel) + ).withRegionName(config.awsRegion) + .withMaxRecords(100) + .withIdleMillisBetweenCalls(1000) + .withIdleTimeBetweenReadsInMillis(250) + .withCallProcessRecordsEvenForEmptyRecordList(true) + .withMetricsLevel(metricsLevel) from.fold( - kinesisConfig.withInitialPositionInStream(InitialPositionInStream.TRIM_HORIZON) - ){ f => + kinesisConfig.withInitialPositionInStream( + InitialPositionInStream.TRIM_HORIZON + ) + ) { f => kinesisConfig.withTimestampAtInitialPositionInStream(f.toDate) } } diff --git a/thrall/app/lib/kinesis/MessageProcessor.scala b/thrall/app/lib/kinesis/MessageProcessor.scala index ef6d73bc7b..b54bdae7f1 100644 --- a/thrall/app/lib/kinesis/MessageProcessor.scala +++ b/thrall/app/lib/kinesis/MessageProcessor.scala @@ -13,132 +13,215 @@ import play.api.libs.json._ import scala.concurrent.{ExecutionContext, Future} +class MessageProcessor( + es: ElasticSearch, + store: ThrallStore, + metadataEditorNotifications: MetadataEditorNotifications, + syndicationRightsOps: SyndicationRightsOps +) extends GridLogging { -class MessageProcessor(es: ElasticSearch, - store: ThrallStore, - metadataEditorNotifications: MetadataEditorNotifications, - syndicationRightsOps: SyndicationRightsOps - ) extends GridLogging { - - def process(updateMessage: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext): Future[Any] = { + def process(updateMessage: UpdateMessage, logMarker: LogMarker)(implicit + ec: ExecutionContext + ): Future[Any] = { updateMessage.subject match { - case "image" => indexImage(updateMessage, logMarker) + case "image" => indexImage(updateMessage, logMarker) case "reingest-image" => indexImage(updateMessage, logMarker) - case "delete-image" => deleteImage(updateMessage, logMarker) - case "update-image" => indexImage(updateMessage, logMarker) - case "delete-image-exports" => deleteImageExports(updateMessage, logMarker) - case "update-image-exports" => updateImageExports(updateMessage, logMarker) - case "update-image-user-metadata" => updateImageUserMetadata(updateMessage, logMarker) + case "delete-image" => deleteImage(updateMessage, logMarker) + case "update-image" => indexImage(updateMessage, logMarker) + case "delete-image-exports" => + deleteImageExports(updateMessage, logMarker) + case "update-image-exports" => + updateImageExports(updateMessage, logMarker) + case "update-image-user-metadata" => + updateImageUserMetadata(updateMessage, logMarker) case "update-image-usages" => updateImageUsages(updateMessage, logMarker) - case "replace-image-leases" => replaceImageLeases(updateMessage, logMarker) - case "add-image-lease" => addImageLease(updateMessage, logMarker) + case "replace-image-leases" => + replaceImageLeases(updateMessage, logMarker) + case "add-image-lease" => addImageLease(updateMessage, logMarker) case "remove-image-lease" => removeImageLease(updateMessage, logMarker) - case "set-image-collections" => setImageCollections(updateMessage, logMarker) + case "set-image-collections" => + setImageCollections(updateMessage, logMarker) case "delete-usages" => deleteAllUsages(updateMessage, logMarker) - case "upsert-rcs-rights" => upsertSyndicationRights(updateMessage, logMarker) - case "update-image-photoshoot" => updateImagePhotoshoot(updateMessage, logMarker) - case unknownSubject => Future.failed(ProcessorNotFoundException(unknownSubject)) - } + case "upsert-rcs-rights" => + upsertSyndicationRights(updateMessage, logMarker) + case "update-image-photoshoot" => + updateImagePhotoshoot(updateMessage, logMarker) + case unknownSubject => + Future.failed(ProcessorNotFoundException(unknownSubject)) + } } - def updateImageUsages(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext): Future[List[ElasticSearchUpdateResponse]] = { + def updateImageUsages(message: UpdateMessage, logMarker: LogMarker)(implicit + ec: ExecutionContext + ): Future[List[ElasticSearchUpdateResponse]] = { implicit val unw: OWrites[UsageNotice] = Json.writes[UsageNotice] implicit val lm: LogMarker = logMarker withId(message) { id => withUsageNotice(message) { usageNotice => val usages = usageNotice.usageJson.as[Seq[Usage]] - Future.traverse(es.updateImageUsages(id, usages, message.lastModified))(_.recoverWith { - case ElasticNotFoundException => Future.successful(ElasticSearchUpdateResponse()) - }) + Future.traverse(es.updateImageUsages(id, usages, message.lastModified))( + _.recoverWith { case ElasticNotFoundException => + Future.successful(ElasticSearchUpdateResponse()) + } + ) } } } - private def reindexImage(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = { - logger.info(logMarker, s"Reindexing image: ${message.image.map(_.id).getOrElse("image not found")}") + private def reindexImage(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = { + logger.info( + logMarker, + s"Reindexing image: ${message.image.map(_.id).getOrElse("image not found")}" + ) indexImage(message, logMarker) } - private def indexImage(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def indexImage(message: UpdateMessage, logMarker: LogMarker)(implicit + ec: ExecutionContext + ) = withImage(message)(i => Future.sequence( - es.indexImage(i.id, i, message.lastModified)(ec,logMarker) + es.indexImage(i.id, i, message.lastModified)(ec, logMarker) ) ) - private def updateImageExports(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def updateImageExports(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = withId(message)(id => withCrops(message)(crops => Future.sequence( - es.updateImageExports(id, crops, message.lastModified)(ec,logMarker)))) + es.updateImageExports(id, crops, message.lastModified)(ec, logMarker) + ) + ) + ) - private def deleteImageExports(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def deleteImageExports(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = withId(message)(id => Future.sequence( - es.deleteImageExports(id, message.lastModified)(ec, logMarker))) + es.deleteImageExports(id, message.lastModified)(ec, logMarker) + ) + ) - private def updateImageUserMetadata(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = - withEdits(message)( edits => + private def updateImageUserMetadata( + message: UpdateMessage, + logMarker: LogMarker + )(implicit ec: ExecutionContext) = + withEdits(message)(edits => withId(message)(id => - Future.sequence(es.applyImageMetadataOverride(id, edits, message.lastModified)(ec,logMarker)))) + Future.sequence( + es.applyImageMetadataOverride(id, edits, message.lastModified)( + ec, + logMarker + ) + ) + ) + ) - private def replaceImageLeases(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def replaceImageLeases(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = withId(message)(id => withLeases(message)(leases => - Future.sequence(es.replaceImageLeases(id, leases, message.lastModified)(ec, logMarker)))) + Future.sequence( + es.replaceImageLeases(id, leases, message.lastModified)(ec, logMarker) + ) + ) + ) - private def addImageLease(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def addImageLease(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = withId(message)(id => - withLease(message)( mediaLease => - Future.sequence(es.addImageLease(id, mediaLease, message.lastModified)(ec, logMarker)))) + withLease(message)(mediaLease => + Future.sequence( + es.addImageLease(id, mediaLease, message.lastModified)(ec, logMarker) + ) + ) + ) - private def removeImageLease(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext): Future[List[ElasticSearchUpdateResponse]] = + private def removeImageLease(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ): Future[List[ElasticSearchUpdateResponse]] = withId(message)(id => - withLeaseId(message)( leaseId => - Future.sequence(es.removeImageLease(id, Some(leaseId), message.lastModified)(ec, logMarker)))) + withLeaseId(message)(leaseId => + Future.sequence( + es.removeImageLease(id, Some(leaseId), message.lastModified)( + ec, + logMarker + ) + ) + ) + ) - private def setImageCollections(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def setImageCollections(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = withId(message)(id => withCollections(message)(collections => - Future.sequence(es.setImageCollections(id, collections, message.lastModified)(ec, logMarker)))) + Future.sequence( + es.setImageCollections(id, collections, message.lastModified)( + ec, + logMarker + ) + ) + ) + ) - private def deleteImage(updateMessage: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = { + private def deleteImage(updateMessage: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = { Future.sequence( withId(updateMessage) { id => - implicit val marker: LogMarker = logMarker ++ logger.imageIdMarker(ImageId(id)) + implicit val marker: LogMarker = + logMarker ++ logger.imageIdMarker(ImageId(id)) // if we cannot delete the image as it's "protected", succeed and delete // the message anyway. logger.info(marker, "ES6 Deleting image: " + id) es.deleteImage(id).map { requests => - requests.map { - _: ElasticSearchDeleteResponse => - store.deleteOriginal(id) - store.deleteThumbnail(id) - store.deletePng(id) - metadataEditorNotifications.publishImageDeletion(id) - EsResponse(s"Image deleted: $id") - } recoverWith { - case ImageNotDeletable => - logger.info(marker, "Could not delete image") - Future.successful(EsResponse(s"Image cannot be deleted: $id")) + requests.map { _: ElasticSearchDeleteResponse => + store.deleteOriginal(id) + store.deleteThumbnail(id) + store.deletePng(id) + metadataEditorNotifications.publishImageDeletion(id) + EsResponse(s"Image deleted: $id") + } recoverWith { case ImageNotDeletable => + logger.info(marker, "Could not delete image") + Future.successful(EsResponse(s"Image cannot be deleted: $id")) } } } ) } - private def deleteAllUsages(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext) = + private def deleteAllUsages(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ) = withId(message)(id => - Future.sequence(es.deleteAllImageUsages(id, message.lastModified)(ec, logMarker))) + Future.sequence( + es.deleteAllImageUsages(id, message.lastModified)(ec, logMarker) + ) + ) - def upsertSyndicationRights(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext): Future[Any] = - withId(message){ id => - implicit val marker: LogMarker = logMarker ++ logger.imageIdMarker(ImageId(id)) + def upsertSyndicationRights(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ): Future[Any] = + withId(message) { id => + implicit val marker: LogMarker = + logMarker ++ logger.imageIdMarker(ImageId(id)) withSyndicationRights(message)(syndicationRights => es.getImage(id) map { case Some(image) => val photoshoot = image.userMetadata.flatMap(_.photoshoot) - logger.info(marker, s"Upserting syndication rights for image $id in photoshoot $photoshoot with rights ${Json.toJson(syndicationRights)}") + logger.info( + marker, + s"Upserting syndication rights for image $id in photoshoot $photoshoot with rights ${Json + .toJson(syndicationRights)}" + ) syndicationRightsOps.upsertOrRefreshRights( image = image.copy(syndicationRights = Some(syndicationRights)), currentPhotoshootOpt = photoshoot, @@ -150,16 +233,24 @@ class MessageProcessor(es: ElasticSearch, ) } - def updateImagePhotoshoot(message: UpdateMessage, logMarker: LogMarker)(implicit ec: ExecutionContext): Future[Unit] = { + def updateImagePhotoshoot(message: UpdateMessage, logMarker: LogMarker)( + implicit ec: ExecutionContext + ): Future[Unit] = { withId(message) { id => - implicit val marker: LogMarker = logMarker ++ logger.imageIdMarker(ImageId(id)) + implicit val marker: LogMarker = + logMarker ++ logger.imageIdMarker(ImageId(id)) withEdits(message) { upcomingEdits => for { imageOpt <- es.getImage(id) - prevPhotoshootOpt = imageOpt.flatMap(_.userMetadata.flatMap(_.photoshoot)) + prevPhotoshootOpt = imageOpt.flatMap( + _.userMetadata.flatMap(_.photoshoot) + ) _ <- updateImageUserMetadata(message, logMarker) _ <- { - logger.info(marker, s"Upserting syndication rights for image $id. Moving from photoshoot $prevPhotoshootOpt to ${upcomingEdits.photoshoot}.") + logger.info( + marker, + s"Upserting syndication rights for image $id. Moving from photoshoot $prevPhotoshootOpt to ${upcomingEdits.photoshoot}." + ) syndicationRightsOps.upsertOrRefreshRights( image = imageOpt.get, currentPhotoshootOpt = upcomingEdits.photoshoot, @@ -167,7 +258,10 @@ class MessageProcessor(es: ElasticSearch, message.lastModified ) } - } yield logger.info(marker, s"Moved image $id from $prevPhotoshootOpt to ${upcomingEdits.photoshoot}") + } yield logger.info( + marker, + s"Moved image $id from $prevPhotoshootOpt to ${upcomingEdits.photoshoot}" + ) } } } @@ -184,7 +278,9 @@ class MessageProcessor(es: ElasticSearch, } } - private def withCollections[A](message: UpdateMessage)(f: Seq[Collection] => A): A = { + private def withCollections[A]( + message: UpdateMessage + )(f: Seq[Collection] => A): A = { message.collections.map(f).getOrElse { sys.error(s"No edits present in message: $message") } @@ -208,7 +304,9 @@ class MessageProcessor(es: ElasticSearch, } } - private def withLeases[A](message: UpdateMessage)(f: Seq[MediaLease] => A): A = { + private def withLeases[A]( + message: UpdateMessage + )(f: Seq[MediaLease] => A): A = { message.leases.map(f).getOrElse { sys.error(s"No media leases present in message: $message") } @@ -220,13 +318,19 @@ class MessageProcessor(es: ElasticSearch, } } - private def withSyndicationRights[A](message: UpdateMessage)(f: SyndicationRights => A): A = { + private def withSyndicationRights[A]( + message: UpdateMessage + )(f: SyndicationRights => A): A = { message.syndicationRights.map(f).getOrElse { - sys.error(s"No syndication rights present on data field message: $message") + sys.error( + s"No syndication rights present on data field message: $message" + ) } } - private def withUsageNotice[A](message: UpdateMessage)(f: UsageNotice => A): A = { + private def withUsageNotice[A]( + message: UpdateMessage + )(f: UsageNotice => A): A = { message.usageNotice.map(f).getOrElse { sys.error(s"No usage notice present in message: $message") } @@ -234,4 +338,5 @@ class MessageProcessor(es: ElasticSearch, } -case class ProcessorNotFoundException(unknownSubject: String) extends Exception(s"Could not find processor for $unknownSubject message") +case class ProcessorNotFoundException(unknownSubject: String) + extends Exception(s"Could not find processor for $unknownSubject message") diff --git a/thrall/app/lib/kinesis/ThrallEventConsumer.scala b/thrall/app/lib/kinesis/ThrallEventConsumer.scala index ff73565ef9..f3114a564b 100644 --- a/thrall/app/lib/kinesis/ThrallEventConsumer.scala +++ b/thrall/app/lib/kinesis/ThrallEventConsumer.scala @@ -17,84 +17,109 @@ import scala.concurrent.duration.{FiniteDuration, MILLISECONDS, SECONDS} import scala.concurrent.{ExecutionContext, Future, TimeoutException} import scala.util.{Failure, Success, Try} -class ThrallEventConsumer(es: ElasticSearch, - thrallMetrics: ThrallMetrics, - store: ThrallStore, - metadataEditorNotifications: MetadataEditorNotifications, - syndicationRightsOps: SyndicationRightsOps, - actorSystem: ActorSystem) extends PlayJsonHelpers with GridLogging { +class ThrallEventConsumer( + es: ElasticSearch, + thrallMetrics: ThrallMetrics, + store: ThrallStore, + metadataEditorNotifications: MetadataEditorNotifications, + syndicationRightsOps: SyndicationRightsOps, + actorSystem: ActorSystem +) extends PlayJsonHelpers + with GridLogging { private val attemptTimeout = FiniteDuration(20, SECONDS) private val delay = FiniteDuration(1, MILLISECONDS) private val attempts = 2 private val timeout = attemptTimeout * attempts + delay * (attempts - 1) - private val messageProcessor = new MessageProcessor(es, store, metadataEditorNotifications, syndicationRightsOps) + private val messageProcessor = new MessageProcessor( + es, + store, + metadataEditorNotifications, + syndicationRightsOps + ) private implicit val implicitActorSystem: ActorSystem = actorSystem private implicit val executionContext: ExecutionContext = ExecutionContext.fromExecutor(Executors.newCachedThreadPool) - def processUpdateMessage(updateMessage: UpdateMessage): Future[UpdateMessage] = { + def processUpdateMessage( + updateMessage: UpdateMessage + ): Future[UpdateMessage] = { val marker = updateMessage val stopwatch = Stopwatch.start //Try to process the update message twice, and give them both 30 seconds to run. - RetryHandler.handleWithRetryAndTimeout( - /* - * Brief note on retry strategy: - * Trying a second time might be dangerous, hopefully waiting a reasonable length of time should mitigate this. - * From the logs, trying again after 30 seconds should only affect 1/300,000 messages. - * - */ - (marker) => { - messageProcessor.process(updateMessage, marker) - }, attempts, attemptTimeout, delay, marker - ).transform { - case Success(_) => { - logger.info( - combineMarkers(marker, stopwatch.elapsed), - s"Completed processing of ${updateMessage.subject} message" - ) - Success(updateMessage) - } - case Failure(processorNotFoundException: ProcessorNotFoundException) => { - logger.error( - s"Could not find processor for ${processorNotFoundException.unknownSubject} message; message will be ignored" - ) - Failure(processorNotFoundException) - } - case Failure(timeoutException: TimeoutException) => { - logger.error( - combineMarkers(marker, stopwatch.elapsed), - s"Timeout of $timeout reached while processing ${updateMessage.subject} message; message will be ignored:", - timeoutException - ) - Failure(timeoutException) - } - case Failure(e: Throwable) => { - logger.error( - combineMarkers(marker, stopwatch.elapsed), - s"Failed to process ${updateMessage.subject} message; message will be ignored:", e - ) - Failure(e) - } + RetryHandler + .handleWithRetryAndTimeout( + /* + * Brief note on retry strategy: + * Trying a second time might be dangerous, hopefully waiting a reasonable length of time should mitigate this. + * From the logs, trying again after 30 seconds should only affect 1/300,000 messages. + * + */ + (marker) => { + messageProcessor.process(updateMessage, marker) + }, + attempts, + attemptTimeout, + delay, + marker + ) + .transform { + case Success(_) => { + logger.info( + combineMarkers(marker, stopwatch.elapsed), + s"Completed processing of ${updateMessage.subject} message" + ) + Success(updateMessage) + } + case Failure( + processorNotFoundException: ProcessorNotFoundException + ) => { + logger.error( + s"Could not find processor for ${processorNotFoundException.unknownSubject} message; message will be ignored" + ) + Failure(processorNotFoundException) + } + case Failure(timeoutException: TimeoutException) => { + logger.error( + combineMarkers(marker, stopwatch.elapsed), + s"Timeout of $timeout reached while processing ${updateMessage.subject} message; message will be ignored:", + timeoutException + ) + Failure(timeoutException) + } + case Failure(e: Throwable) => { + logger.error( + combineMarkers(marker, stopwatch.elapsed), + s"Failed to process ${updateMessage.subject} message; message will be ignored:", + e + ) + Failure(e) } } + } } object ThrallEventConsumer extends GridLogging { - def parseRecord(r: Array[Byte], timestamp: Instant):Option[UpdateMessage] = { + def parseRecord(r: Array[Byte], timestamp: Instant): Option[UpdateMessage] = { Try(JsonByteArrayUtil.fromByteArray[UpdateMessage](r)) match { case Success(Some(updateMessage: UpdateMessage)) => { - logger.info(updateMessage.toLogMarker, s"Received ${updateMessage.subject} message at $timestamp") + logger.info( + updateMessage.toLogMarker, + s"Received ${updateMessage.subject} message at $timestamp" + ) Some(updateMessage) } - case Success(None)=> { - logger.warn(s"No message present in record at $timestamp", new String(r)) + case Success(None) => { + logger.warn( + s"No message present in record at $timestamp", + new String(r) + ) None //No message received } case Failure(e) => { diff --git a/thrall/test/helpers/Fixtures.scala b/thrall/test/helpers/Fixtures.scala index d8a97e9821..1503a5515a 100644 --- a/thrall/test/helpers/Fixtures.scala +++ b/thrall/test/helpers/Fixtures.scala @@ -5,61 +5,75 @@ import java.util.UUID import com.gu.mediaservice.model._ import com.gu.mediaservice.model.leases.{LeasesByMedia, MediaLease} -import com.gu.mediaservice.model.usage.{DigitalUsage, PublishedUsageStatus, Usage} +import com.gu.mediaservice.model.usage.{ + DigitalUsage, + PublishedUsageStatus, + Usage +} import org.joda.time.{DateTime, DateTimeZone} trait Fixtures { def createImage( - id: String, - usageRights: UsageRights, - syndicationRights: Option[SyndicationRights] = None, - leases: Option[LeasesByMedia] = None, - usages: List[Usage] = Nil, - optPhotoshoot: Option[Photoshoot] = None, - fileMetadata: Option[FileMetadata] = None - ): Image = + id: String, + usageRights: UsageRights, + syndicationRights: Option[SyndicationRights] = None, + leases: Option[LeasesByMedia] = None, + usages: List[Usage] = Nil, + optPhotoshoot: Option[Photoshoot] = None, + fileMetadata: Option[FileMetadata] = None + ): Image = Image( - id = id, - uploadTime = now, - uploadedBy = "yellow.giraffe@theguardian.com", - lastModified = None, - identifiers = Map.empty, - uploadInfo = UploadInfo(filename = Some(s"test_$id.jpeg")), - source = Asset( - file = new URI(s"http://file/$id"), - size = Some(292265L), - mimeType = Some(Jpeg), - dimensions = Some(Dimensions(width = 2800, height = 1600)), - secureUrl = None), - thumbnail = Some(Asset( - file = new URI(s"http://file/thumbnail/$id"), - size = Some(292265L), - mimeType = Some(Jpeg), - dimensions = Some(Dimensions(width = 800, height = 100)), - secureUrl = None)), - optimisedPng = None, - fileMetadata = fileMetadata.getOrElse(FileMetadata()), - userMetadata = optPhotoshoot.map(_ => Edits(metadata = ImageMetadata(), photoshoot = optPhotoshoot)), - metadata = ImageMetadata(dateTaken = None, title = Some(s"Test image $id"), keywords = List("test", "es")), - originalMetadata = ImageMetadata(), - usageRights = usageRights, - originalUsageRights = usageRights, - exports = Nil, - syndicationRights = syndicationRights, - leases = leases.getOrElse(LeasesByMedia.build(Nil)), - usages = usages - ) + id = id, + uploadTime = now, + uploadedBy = "yellow.giraffe@theguardian.com", + lastModified = None, + identifiers = Map.empty, + uploadInfo = UploadInfo(filename = Some(s"test_$id.jpeg")), + source = Asset( + file = new URI(s"http://file/$id"), + size = Some(292265L), + mimeType = Some(Jpeg), + dimensions = Some(Dimensions(width = 2800, height = 1600)), + secureUrl = None + ), + thumbnail = Some( + Asset( + file = new URI(s"http://file/thumbnail/$id"), + size = Some(292265L), + mimeType = Some(Jpeg), + dimensions = Some(Dimensions(width = 800, height = 100)), + secureUrl = None + ) + ), + optimisedPng = None, + fileMetadata = fileMetadata.getOrElse(FileMetadata()), + userMetadata = optPhotoshoot.map(_ => + Edits(metadata = ImageMetadata(), photoshoot = optPhotoshoot) + ), + metadata = ImageMetadata( + dateTaken = None, + title = Some(s"Test image $id"), + keywords = List("test", "es") + ), + originalMetadata = ImageMetadata(), + usageRights = usageRights, + originalUsageRights = usageRights, + exports = Nil, + syndicationRights = syndicationRights, + leases = leases.getOrElse(LeasesByMedia.build(Nil)), + usages = usages + ) def createImageForSyndication( - id: String, - rightsAcquired: Boolean, - rcsPublishDate: Option[DateTime], - lease: Option[MediaLease], - usages: List[Usage] = Nil, - fileMetadata: Option[FileMetadata] = None, - leasesLastModified: Option[DateTime] = None - ): Image = { + id: String, + rightsAcquired: Boolean, + rcsPublishDate: Option[DateTime], + lease: Option[MediaLease], + usages: List[Usage] = Nil, + fileMetadata: Option[FileMetadata] = None, + leasesLastModified: Option[DateTime] = None + ): Image = { val rights = List( Right("test", Some(rightsAcquired), Nil) ) @@ -68,28 +82,66 @@ trait Fixtures { val leaseByMedia = lease.map(l => LeasesByMedia.build(List(l))) - createImage(id, StaffPhotographer("Tom Jenkins", "The Guardian"), Some(syndicationRights), leaseByMedia, usages, fileMetadata = fileMetadata) + createImage( + id, + StaffPhotographer("Tom Jenkins", "The Guardian"), + Some(syndicationRights), + leaseByMedia, + usages, + fileMetadata = fileMetadata + ) } private def now = DateTime.now(DateTimeZone.UTC) - def someSyndRights = Some(SyndicationRights( - published = Some(now), - suppliers = List(Supplier(supplierName = Some("supplier"), supplierId = Some("supplier-id"), prAgreement = Some(true))), - rights = List(Right(rightCode = "code", acquired = Some(true), properties = Seq.empty)), - isInferred = false)) - - def imageWithNoSyndRights: Image = createImage(id = UUID.randomUUID().toString, usageRights = StaffPhotographer("Tom Jenkins", "The Guardian")) - def imageWithSyndRights: Image = createImage(id = UUID.randomUUID().toString, usageRights = StaffPhotographer("Tom Jenkins", "The Guardian"), syndicationRights = someSyndRights) - - def imageWithPhotoshoot(photoshoot: Photoshoot): Image = createImage(id = UUID.randomUUID().toString, StaffPhotographer("Tom Jenkins", "The Guardian"), optPhotoshoot = Some(photoshoot)) + def someSyndRights = Some( + SyndicationRights( + published = Some(now), + suppliers = List( + Supplier( + supplierName = Some("supplier"), + supplierId = Some("supplier-id"), + prAgreement = Some(true) + ) + ), + rights = List( + Right(rightCode = "code", acquired = Some(true), properties = Seq.empty) + ), + isInferred = false + ) + ) + + def imageWithNoSyndRights: Image = createImage( + id = UUID.randomUUID().toString, + usageRights = StaffPhotographer("Tom Jenkins", "The Guardian") + ) + def imageWithSyndRights: Image = createImage( + id = UUID.randomUUID().toString, + usageRights = StaffPhotographer("Tom Jenkins", "The Guardian"), + syndicationRights = someSyndRights + ) + + def imageWithPhotoshoot(photoshoot: Photoshoot): Image = createImage( + id = UUID.randomUUID().toString, + StaffPhotographer("Tom Jenkins", "The Guardian"), + optPhotoshoot = Some(photoshoot) + ) def crop = { - val cropSpec = CropSpec("/test", Bounds(0,0,0,0), None) + val cropSpec = CropSpec("/test", Bounds(0, 0, 0, 0), None) Crop(None, None, None, cropSpec: CropSpec, None, List.empty) } - def usage(id: String = UUID.randomUUID().toString) = Usage(id, List.empty, DigitalUsage, "test", PublishedUsageStatus, None, None, now) + def usage(id: String = UUID.randomUUID().toString) = Usage( + id, + List.empty, + DigitalUsage, + "test", + PublishedUsageStatus, + None, + None, + now + ) def stringLongerThan(i: Int): String = { var out = "" diff --git a/thrall/test/lib/OrderedFutureRunnerTest.scala b/thrall/test/lib/OrderedFutureRunnerTest.scala index b0bab77b18..81442ff122 100644 --- a/thrall/test/lib/OrderedFutureRunnerTest.scala +++ b/thrall/test/lib/OrderedFutureRunnerTest.scala @@ -13,7 +13,6 @@ class OrderedFutureRunnerTest extends FlatSpec with Matchers { private var current = 0 implicit val executionContext = ExecutionContext.Implicits.global - "OrderedFutureRunner.run" should "execute the futures in order" in { def runner(n: Int) = { val x = current @@ -41,8 +40,10 @@ class OrderedFutureRunnerTest extends FlatSpec with Matchers { } } - noException should be thrownBy OrderedFutureRunner.run(runner, Duration(1, SECONDS))(list) - + noException should be thrownBy OrderedFutureRunner.run( + runner, + Duration(1, SECONDS) + )(list) } } diff --git a/thrall/test/lib/ThrallStreamProcessorTest.scala b/thrall/test/lib/ThrallStreamProcessorTest.scala index 678c97ed83..2a4f0defad 100644 --- a/thrall/test/lib/ThrallStreamProcessorTest.scala +++ b/thrall/test/lib/ThrallStreamProcessorTest.scala @@ -15,25 +15,50 @@ import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers} import scala.concurrent.duration._ import scala.concurrent.{Await, Future} -private class ThrallStreamProcessorTest extends FunSpec with BeforeAndAfterAll with Matchers with MockitoSugar { +private class ThrallStreamProcessorTest + extends FunSpec + with BeforeAndAfterAll + with Matchers + with MockitoSugar { private implicit val actorSystem: ActorSystem = ActorSystem() private implicit val materializer: ActorMaterializer = ActorMaterializer() - def createRecord: KinesisRecord = KinesisRecord(ByteString.empty, "", None, "", None, OffsetDateTime.now().toInstant, "") + def createRecord: KinesisRecord = KinesisRecord( + ByteString.empty, + "", + None, + "", + None, + OffsetDateTime.now().toInstant, + "" + ) - val highPrioritySource: Source[KinesisRecord, Future[Done.type]] = Source.repeat(createRecord).mapMaterializedValue(_ => Future.successful(Done)).take(100) - val lowPrioritySource: Source[KinesisRecord, Future[Done.type]] = Source.repeat(createRecord).mapMaterializedValue(_ => Future.successful(Done)).take(100) + val highPrioritySource: Source[KinesisRecord, Future[Done.type]] = Source + .repeat(createRecord) + .mapMaterializedValue(_ => Future.successful(Done)) + .take(100) + val lowPrioritySource: Source[KinesisRecord, Future[Done.type]] = Source + .repeat(createRecord) + .mapMaterializedValue(_ => Future.successful(Done)) + .take(100) lazy val mockConsumer: ThrallEventConsumer = mock[ThrallEventConsumer] - lazy val streamProcessor = new ThrallStreamProcessor(highPrioritySource, lowPrioritySource, mockConsumer, actorSystem, materializer) + lazy val streamProcessor = new ThrallStreamProcessor( + highPrioritySource, + lowPrioritySource, + mockConsumer, + actorSystem, + materializer + ) describe("Stream merging strategy") { it("should process high priority events first") { val stream = streamProcessor.createStream() - val prioritiesFromMessages = Await.result(stream.take(200).runWith(Sink.seq), 5.minutes).map { - case (record, _, _) => record.priority - } + val prioritiesFromMessages = + Await.result(stream.take(200).runWith(Sink.seq), 5.minutes).map { + case (record, _, _) => record.priority + } prioritiesFromMessages.length shouldBe 200 @@ -42,7 +67,9 @@ private class ThrallStreamProcessorTest extends FunSpec with BeforeAndAfterAll w // `alternatingSegment` represents this behaviour val alternatingSegment = Seq(LowPriority, HighPriority, LowPriority) - val expected: Seq[Priority] = alternatingSegment ++ (1 to 98).map(_ => HighPriority) ++ alternatingSegment ++ (1 to 96).map(_ => LowPriority) + val expected: Seq[Priority] = alternatingSegment ++ (1 to 98).map(_ => + HighPriority + ) ++ alternatingSegment ++ (1 to 96).map(_ => LowPriority) prioritiesFromMessages.toList shouldBe expected.toList } } diff --git a/thrall/test/lib/elasticsearch/ElasticSearchTest.scala b/thrall/test/lib/elasticsearch/ElasticSearchTest.scala index 2b54343b5e..f24064b8cc 100644 --- a/thrall/test/lib/elasticsearch/ElasticSearchTest.scala +++ b/thrall/test/lib/elasticsearch/ElasticSearchTest.scala @@ -16,36 +16,61 @@ import scala.concurrent.{Await, Future} class ElasticSearchTest extends ElasticSearchTestBase { "Elasticsearch" - { - implicit val logMarker: LogMarker = MarkerMap() - + implicit val logMarker: LogMarker = MarkerMap() "images" - { "bulk inserting" - { "can bulk insert images" in { - val imageOne = createImage("batman", StaffPhotographer("Bruce Wayne", "Wayne Enterprises")).copy( - userMetadata = Some(Edits(labels = List("foo", "bar"), metadata = ImageMetadata(description = Some("my description")))) + val imageOne = createImage( + "batman", + StaffPhotographer("Bruce Wayne", "Wayne Enterprises") + ).copy( + userMetadata = Some( + Edits( + labels = List("foo", "bar"), + metadata = ImageMetadata(description = Some("my description")) + ) + ) ) - val imageTwo = createImage("superman", StaffPhotographer("Clark Kent", "Kent Farm")).copy( + val imageTwo = createImage( + "superman", + StaffPhotographer("Clark Kent", "Kent Farm") + ).copy( usages = List(usage()) ) val images: List[Image] = List(imageOne, imageTwo) // in a clean index, we should have 0 documents - ES.client.execute(ElasticDsl.count(ES.initialImagesIndex)).await.result.count shouldBe 0 + ES.client + .execute(ElasticDsl.count(ES.initialImagesIndex)) + .await + .result + .count shouldBe 0 Await.result(Future.sequence(ES.bulkInsert(images)), fiveSeconds) // force ES to refresh https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html - Await.result(ES.client.execute(ElasticDsl.refreshIndex(ES.initialImagesIndex)), fiveSeconds) + Await.result( + ES.client.execute(ElasticDsl.refreshIndex(ES.initialImagesIndex)), + fiveSeconds + ) // after bulk inserting, we should have 2 documents - ES.client.execute(ElasticDsl.count(ES.initialImagesIndex)).await.result.count shouldBe images.length - - Json.toJson(reloadedImage("batman").get) shouldBe Json.toJson(imageOne) - Json.toJson(reloadedImage("superman").get) shouldBe Json.toJson(imageTwo) + ES.client + .execute(ElasticDsl.count(ES.initialImagesIndex)) + .await + .result + .count shouldBe images.length + + Json.toJson(reloadedImage("batman").get) shouldBe Json.toJson( + imageOne + ) + Json.toJson(reloadedImage("superman").get) shouldBe Json.toJson( + imageTwo + ) } } @@ -53,19 +78,32 @@ class ElasticSearchTest extends ElasticSearchTestBase { "can index and retrieve images by id" in { val id = UUID.randomUUID().toString - val userMetadata = Some(Edits(metadata = ImageMetadata( - description = Some("My boring image"), - title = Some("User supplied title"), - subjects = List("foo", "bar"), - specialInstructions = Some("Testing") - ))) + val userMetadata = Some( + Edits(metadata = + ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title"), + subjects = List("foo", "bar"), + specialInstructions = Some("Testing") + ) + ) + ) - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None). - copy(userMetadata = userMetadata) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ).copy(userMetadata = userMetadata) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) reloadedImage(id).get.id shouldBe image.id } @@ -73,37 +111,73 @@ class ElasticSearchTest extends ElasticSearchTestBase { "file metadata fields longer than the index keyword limit are still persisted" in { val id = UUID.randomUUID().toString val reallyLongTRC = stringLongerThan(250000) - val fileMetadata = FileMetadata(xmp = Map("foo" -> JsString("bar")), exif = Map("Green TRC" -> reallyLongTRC)) + val fileMetadata = FileMetadata( + xmp = Map("foo" -> JsString("bar")), + exif = Map("Green TRC" -> reallyLongTRC) + ) - val imageWithReallyLongMetadataField = createImageForSyndication(id = UUID.randomUUID().toString, + val imageWithReallyLongMetadataField = createImageForSyndication( + id = UUID.randomUUID().toString, rightsAcquired = true, rcsPublishDate = Some(now), - lease = None, fileMetadata = Some(fileMetadata)) + lease = None, + fileMetadata = Some(fileMetadata) + ) - Await.result(Future.sequence(ES.indexImage(id, imageWithReallyLongMetadataField, now)), fiveSeconds) + Await.result( + Future.sequence( + ES.indexImage(id, imageWithReallyLongMetadataField, now) + ), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithReallyLongMetadataField.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some( + imageWithReallyLongMetadataField.id + ) + ) reloadedImage(id).get.id shouldBe imageWithReallyLongMetadataField.id - reloadedImage(id).get.fileMetadata.exif("Green TRC").length shouldBe reallyLongTRC.length + reloadedImage(id).get.fileMetadata + .exif("Green TRC") + .length shouldBe reallyLongTRC.length } "initial indexing does not add lastModified to the leases object" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) val loadedImage = reloadedImage(id).get loadedImage.leases.lastModified shouldBe None } "updating an existing image should set the last modified date" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) val lastModified = reloadedImage(id).get.lastModified lastModified.nonEmpty shouldBe true @@ -111,11 +185,24 @@ class ElasticSearchTest extends ElasticSearchTestBase { "initial index calls do not refresh metadata from user metadata" in { val id = UUID.randomUUID().toString - val originalUserMetadata = Some(Edits(metadata = ImageMetadata(description = Some("My boring image"), title = Some("User supplied title")))) - val imageWithBoringMetadata = createImageForSyndication(id = id, true, Some(now), None).copy(userMetadata = originalUserMetadata) + val originalUserMetadata = Some( + Edits(metadata = + ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title") + ) + ) + ) + val imageWithBoringMetadata = + createImageForSyndication(id = id, true, Some(now), None) + .copy(userMetadata = originalUserMetadata) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some( + imageWithBoringMetadata.id + ) + ) reloadedImage(id).get.metadata.title shouldBe Some("Test image " + id) reloadedImage(id).get.metadata.description shouldBe None @@ -123,43 +210,98 @@ class ElasticSearchTest extends ElasticSearchTestBase { "reindex calls refresh metadata from user metadata" in { val id = UUID.randomUUID().toString - val originalUserMetadata = Some(Edits(metadata = ImageMetadata(description = Some("My boring image"), title = Some("User supplied title")))) - val imageWithBoringMetadata = createImageForSyndication(id = id, true, Some(now), None).copy(userMetadata = originalUserMetadata) + val originalUserMetadata = Some( + Edits(metadata = + ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title") + ) + ) + ) + val imageWithBoringMetadata = + createImageForSyndication(id = id, true, Some(now), None) + .copy(userMetadata = originalUserMetadata) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some( + imageWithBoringMetadata.id + ) + ) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).get.metadata.title shouldBe Some("User supplied title")) - reloadedImage(id).get.metadata.description shouldBe Some("My boring image") + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).get.metadata.title shouldBe Some( + "User supplied title" + ) + ) + reloadedImage(id).get.metadata.description shouldBe Some( + "My boring image" + ) } "empty user metadata fields should be omitted from updated user metadata" in { val id = UUID.randomUUID().toString - val originalUserMetadata = Some(Edits(metadata = ImageMetadata(description = Some("My boring image"), title = Some("User supplied title"), credit = Some("")))) - val image = createImageForSyndication(id = id, true, Some(now), None).copy(userMetadata = originalUserMetadata) + val originalUserMetadata = Some( + Edits(metadata = + ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title"), + credit = Some("") + ) + ) + ) + val image = createImageForSyndication(id = id, true, Some(now), None) + .copy(userMetadata = originalUserMetadata) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).get.metadata.title shouldBe Some("User supplied title")) - reloadedImage(id).get.metadata.description shouldBe Some("My boring image") + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).get.metadata.title shouldBe Some( + "User supplied title" + ) + ) + reloadedImage(id).get.metadata.description shouldBe Some( + "My boring image" + ) reloadedImage(id).get.metadata.credit shouldBe None } "reindex calls refresh usage rights from user metadata" in { val id = UUID.randomUUID().toString - val updatedUsageRights: UsageRights = StaffPhotographer("Test", "Testing") - val usageMetadata = Some(Edits(usageRights = Some(updatedUsageRights), metadata = ImageMetadata(description = Some("My boring image"), title = Some("User supplied title")))) - val image = createImageForSyndication(id = id, true, Some(now), None).copy(userMetadata = usageMetadata) + val updatedUsageRights: UsageRights = + StaffPhotographer("Test", "Testing") + val usageMetadata = Some( + Edits( + usageRights = Some(updatedUsageRights), + metadata = ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title") + ) + ) + ) + val image = createImageForSyndication(id = id, true, Some(now), None) + .copy(userMetadata = usageMetadata) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).get.usageRights.asInstanceOf[StaffPhotographer].photographer shouldBe "Test") + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).get.usageRights + .asInstanceOf[StaffPhotographer] + .photographer shouldBe "Test" + ) } "reindexing should preserve existing identifiers" in { @@ -173,21 +315,34 @@ class ElasticSearchTest extends ElasticSearchTestBase { "reindexing does not over write certain existing uploadTime, userMetadata, exports, uploadedBy, collections, leases and usages fields" in { val id = UUID.randomUUID().toString - val updatedUsageRights: UsageRights = StaffPhotographer("Test", "Testing") - val usageMetadata = Some(Edits(usageRights = Some(updatedUsageRights), metadata = ImageMetadata(description = Some("My boring image"), title = Some("User supplied title")))) - val image = createImageForSyndication(id = id, true, Some(now), None).copy(userMetadata = usageMetadata) + val updatedUsageRights: UsageRights = + StaffPhotographer("Test", "Testing") + val usageMetadata = Some( + Edits( + usageRights = Some(updatedUsageRights), + metadata = ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title") + ) + ) + ) + val image = createImageForSyndication(id = id, true, Some(now), None) + .copy(userMetadata = usageMetadata) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) val attemptedOverwrite = image.copy( uploadTime = DateTime.now, uploadedBy = "someone else" - ) ES.indexImage(id, attemptedOverwrite, now) - reloadedImage(id).get.uploadTime.getMillis shouldBe image.uploadTime.getMillis + reloadedImage( + id + ).get.uploadTime.getMillis shouldBe image.uploadTime.getMillis reloadedImage(id).get.uploadedBy shouldBe image.uploadedBy } @@ -196,9 +351,19 @@ class ElasticSearchTest extends ElasticSearchTestBase { "deleting" - { "can delete image" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(indexedImage(id).map(_.id) shouldBe Some(image.id)) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + indexedImage(id).map(_.id) shouldBe Some(image.id) + ) Await.result(Future.sequence(ES.deleteImage(id)), fiveSeconds) @@ -207,9 +372,19 @@ class ElasticSearchTest extends ElasticSearchTestBase { "failed deletes are indiciated with a failed future" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) val unknownImage = UUID.randomUUID().toString @@ -220,9 +395,19 @@ class ElasticSearchTest extends ElasticSearchTestBase { "should not delete images with usages" in { val id = UUID.randomUUID().toString - val imageWithUsages = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None).copy(usages = List(usage())) - Await.result(Future.sequence(ES.indexImage(id, imageWithUsages, now)), fiveSeconds) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithUsages.id)) + val imageWithUsages = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ).copy(usages = List(usage())) + Await.result( + Future.sequence(ES.indexImage(id, imageWithUsages, now)), + fiveSeconds + ) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithUsages.id) + ) whenReady(ES.deleteImage(id).head.failed) { ex => ex shouldBe ImageNotDeletable @@ -231,9 +416,19 @@ class ElasticSearchTest extends ElasticSearchTestBase { "should not delete images with exports" in { val id = UUID.randomUUID().toString - val imageWithExports = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None).copy(exports = List(crop)) - Await.result(Future.sequence(ES.indexImage(id, imageWithExports, now)), fiveSeconds) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithExports.id)) + val imageWithExports = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ).copy(exports = List(crop)) + Await.result( + Future.sequence(ES.indexImage(id, imageWithExports, now)), + fiveSeconds + ) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithExports.id) + ) whenReady(ES.deleteImage(id).head.failed) { ex => ex shouldBe ImageNotDeletable @@ -247,18 +442,39 @@ class ElasticSearchTest extends ElasticSearchTestBase { "can set image collections" in { val id = UUID.randomUUID().toString - val imageWithExports = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None).copy(exports = List(crop)) - Await.result(Future.sequence(ES.indexImage(id, imageWithExports, now)), fiveSeconds) + val imageWithExports = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ).copy(exports = List(crop)) + Await.result( + Future.sequence(ES.indexImage(id, imageWithExports, now)), + fiveSeconds + ) - val collection = Collection(path = List("/somewhere"), actionData = ActionData("Test author", DateTime.now), "A test collection") - val anotherCollection = Collection(path = List("/somewhere-else"), actionData = ActionData("Test author", DateTime.now), "Another test collection") + val collection = Collection( + path = List("/somewhere"), + actionData = ActionData("Test author", DateTime.now), + "A test collection" + ) + val anotherCollection = Collection( + path = List("/somewhere-else"), + actionData = ActionData("Test author", DateTime.now), + "Another test collection" + ) val collections = List(collection, anotherCollection) - Await.result(Future.sequence(ES.setImageCollections(id, collections, now)), fiveSeconds) + Await.result( + Future.sequence(ES.setImageCollections(id, collections, now)), + fiveSeconds + ) reloadedImage(id).get.collections.size shouldBe 2 - reloadedImage(id).get.collections.head.description shouldEqual "A test collection" + reloadedImage( + id + ).get.collections.head.description shouldEqual "A test collection" } } @@ -266,12 +482,23 @@ class ElasticSearchTest extends ElasticSearchTestBase { "can add exports" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) reloadedImage(id).get.exports.isEmpty shouldBe true val exports = List(crop) - Await.result(Future.sequence(ES.updateImageExports(id, exports, now)), fiveSeconds) // TODO rename to add + Await.result( + Future.sequence(ES.updateImageExports(id, exports, now)), + fiveSeconds + ) // TODO rename to add reloadedImage(id).get.exports.nonEmpty shouldBe true reloadedImage(id).get.exports.head.id shouldBe crop.id @@ -279,18 +506,32 @@ class ElasticSearchTest extends ElasticSearchTestBase { "can delete exports" in { val id = UUID.randomUUID().toString - val imageWithExports = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None).copy(exports = List(crop)) - Await.result(Future.sequence(ES.indexImage(id, imageWithExports, now)), fiveSeconds) + val imageWithExports = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ).copy(exports = List(crop)) + Await.result( + Future.sequence(ES.indexImage(id, imageWithExports, now)), + fiveSeconds + ) reloadedImage(id).get.exports.nonEmpty shouldBe true - Await.result(Future.sequence(ES.deleteImageExports(id, now)), fiveSeconds) + Await.result( + Future.sequence(ES.deleteImageExports(id, now)), + fiveSeconds + ) reloadedImage(id).get.exports.isEmpty shouldBe true } "deleting exports for a non-existant image is not an error" in { val id = UUID.randomUUID().toString - val result = Await.result(Future.sequence(ES.deleteImageExports(id, now)), fiveSeconds) + val result = Await.result( + Future.sequence(ES.deleteImageExports(id, now)), + fiveSeconds + ) result should have length 1 } } @@ -305,8 +546,12 @@ class ElasticSearchTest extends ElasticSearchTestBase { true, Some(now), None, - leasesLastModified = Some(timeBeforeEdit)) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + leasesLastModified = Some(timeBeforeEdit) + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) reloadedImage(id).get.leases.leases.isEmpty shouldBe true val lease = model.leases.MediaLease( @@ -316,7 +561,10 @@ class ElasticSearchTest extends ElasticSearchTestBase { mediaId = UUID.randomUUID().toString ) - Await.result(Future.sequence(ES.addImageLease(id, lease, now)), fiveSeconds) + Await.result( + Future.sequence(ES.addImageLease(id, lease, now)), + fiveSeconds + ) val newLeases = reloadedImage(id).get.leases newLeases.leases.nonEmpty shouldBe true @@ -325,40 +573,70 @@ class ElasticSearchTest extends ElasticSearchTestBase { } "can remove image lease" in { - val lease = model.leases.MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("A test lease"), mediaId = UUID.randomUUID().toString) + val lease = model.leases.MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("A test lease"), + mediaId = UUID.randomUUID().toString + ) val id = UUID.randomUUID().toString - val image = createImageForSyndication(id, true, Some(now), lease = Some(lease)) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + val image = + createImageForSyndication(id, true, Some(now), lease = Some(lease)) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) reloadedImage(id).get.leases.leases.nonEmpty shouldBe true - Await.result(Future.sequence(ES.removeImageLease(id, lease.id, now)), fiveSeconds) + Await.result( + Future.sequence(ES.removeImageLease(id, lease.id, now)), + fiveSeconds + ) reloadedImage(id).get.leases.leases.isEmpty shouldBe true } "can remove image lease for an image which doesn't exist" in { - val lease = model.leases.MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("A test lease"), mediaId = UUID.randomUUID().toString) + val lease = model.leases.MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("A test lease"), + mediaId = UUID.randomUUID().toString + ) val id = UUID.randomUUID().toString - val result = Await.result(Future.sequence(ES.removeImageLease(id, lease.id, now)), fiveSeconds) + val result = Await.result( + Future.sequence(ES.removeImageLease(id, lease.id, now)), + fiveSeconds + ) result should have length 1 } "removing a lease should update the leases last modified time" in { - val lease = model.leases.MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("A test lease"), mediaId = UUID.randomUUID().toString) + val lease = model.leases.MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("A test lease"), + mediaId = UUID.randomUUID().toString + ) val timeBeforeEdit = DateTime.now val id = UUID.randomUUID().toString val image = createImageForSyndication( id = UUID.randomUUID().toString, - true, Some(now), lease = Some(lease) ) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) reloadedImage(id).get.leases.leases.nonEmpty shouldBe true - Await.result(Future.sequence(ES.removeImageLease(id, lease.id, now)), fiveSeconds) + Await.result( + Future.sequence(ES.removeImageLease(id, lease.id, now)), + fiveSeconds + ) val newLeases = reloadedImage(id).get.leases newLeases.leases.isEmpty shouldBe true @@ -366,7 +644,12 @@ class ElasticSearchTest extends ElasticSearchTestBase { } "can replace leases" in { - val lease = MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("A test lease"), mediaId = UUID.randomUUID().toString) + val lease = MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("A test lease"), + mediaId = UUID.randomUUID().toString + ) val id = UUID.randomUUID().toString val timeBeforeEdit = DateTime.now val image = createImageForSyndication( @@ -376,14 +659,30 @@ class ElasticSearchTest extends ElasticSearchTestBase { lease = Some(lease), leasesLastModified = Some(timeBeforeEdit) ) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - val updatedLease = MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("An updated lease"), mediaId = UUID.randomUUID().toString) - val anotherUpdatedLease = MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("Another updated lease"), mediaId = UUID.randomUUID().toString) + val updatedLease = MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("An updated lease"), + mediaId = UUID.randomUUID().toString + ) + val anotherUpdatedLease = MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("Another updated lease"), + mediaId = UUID.randomUUID().toString + ) val updatedLeases = Seq(updatedLease, anotherUpdatedLease) updatedLeases.size shouldBe 2 - Await.result(Future.sequence(ES.replaceImageLeases(id, updatedLeases, now)), fiveSeconds) + Await.result( + Future.sequence(ES.replaceImageLeases(id, updatedLeases, now)), + fiveSeconds + ) val newLeases = reloadedImage(id).get.leases newLeases.leases.size shouldBe 2 @@ -392,7 +691,12 @@ class ElasticSearchTest extends ElasticSearchTestBase { } "can replace leases when they are empty" in { - val lease = MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("A test lease"), mediaId = UUID.randomUUID().toString) + val lease = MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("A test lease"), + mediaId = UUID.randomUUID().toString + ) val id = UUID.randomUUID().toString val timeBeforeEdit = DateTime.now val image = createImageForSyndication( @@ -402,14 +706,30 @@ class ElasticSearchTest extends ElasticSearchTestBase { lease = None, leasesLastModified = Some(timeBeforeEdit) ).copy(leases = LeasesByMedia.empty) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - val updatedLease = MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("An updated lease"), mediaId = UUID.randomUUID().toString) - val anotherUpdatedLease = MediaLease(id = Some(UUID.randomUUID().toString), leasedBy = None, notes = Some("Another updated lease"), mediaId = UUID.randomUUID().toString) + val updatedLease = MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("An updated lease"), + mediaId = UUID.randomUUID().toString + ) + val anotherUpdatedLease = MediaLease( + id = Some(UUID.randomUUID().toString), + leasedBy = None, + notes = Some("Another updated lease"), + mediaId = UUID.randomUUID().toString + ) val updatedLeases = Seq(updatedLease, anotherUpdatedLease) updatedLeases.size shouldBe 2 - Await.result(Future.sequence(ES.replaceImageLeases(id, updatedLeases, now)), fiveSeconds) + Await.result( + Future.sequence(ES.replaceImageLeases(id, updatedLeases, now)), + fiveSeconds + ) val newLeases = reloadedImage(id).get.leases newLeases.leases.size shouldBe 2 @@ -422,14 +742,18 @@ class ElasticSearchTest extends ElasticSearchTestBase { "initial write populates last modified" in { val id = UUID.randomUUID().toString - val image = createImage(id, StaffPhotographer("Bruce Wayne", "Wayne Enterprises")) + val image = + createImage(id, StaffPhotographer("Bruce Wayne", "Wayne Enterprises")) val date = now.withSecondOfMinute(0) // Write date - Await.result(Future.sequence(ES.indexImage(id, image, date)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, date)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( { + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))({ val image = reloadedImage(id) image.get.lastModified.get shouldBe date }) @@ -437,17 +761,25 @@ class ElasticSearchTest extends ElasticSearchTestBase { "last modified gets updated in normal order" in { val id = UUID.randomUUID().toString - val image = createImage(id, StaffPhotographer("Bruce Wayne", "Wayne Enterprises")) + val image = + createImage(id, StaffPhotographer("Bruce Wayne", "Wayne Enterprises")) val earlierDate = now.withSecondOfMinute(0) - val laterDate = earlierDate.withSecondOfMinute(30) // Clearly thirty seconds later. + val laterDate = + earlierDate.withSecondOfMinute(30) // Clearly thirty seconds later. // Write first date first - Await.result(Future.sequence(ES.indexImage(id, image, earlierDate)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, earlierDate)), + fiveSeconds + ) // Write second date second - Await.result(Future.sequence(ES.indexImage(id, image, laterDate)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, laterDate)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( { + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))({ val image = reloadedImage(id) image.get.lastModified.get shouldBe laterDate }) @@ -455,26 +787,37 @@ class ElasticSearchTest extends ElasticSearchTestBase { "last modified does not get updated in wrong order" in { val id = UUID.randomUUID().toString - val image = createImage(id, StaffPhotographer("Bruce Wayne", "Wayne Enterprises")) + val image = + createImage(id, StaffPhotographer("Bruce Wayne", "Wayne Enterprises")) val earlierDate = now.withSecondOfMinute(0) - val laterDate = earlierDate.withSecondOfMinute(30) // Clearly thirty seconds later. + val laterDate = + earlierDate.withSecondOfMinute(30) // Clearly thirty seconds later. // Write second date first - Await.result(Future.sequence(ES.indexImage(id, image, laterDate)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, laterDate)), + fiveSeconds + ) - val updatedImage = eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( { - val image = reloadedImage(id) - image.get - }) - .copy(lastModified = Some(earlierDate)) - .copy(usageRights = StaffPhotographer("Dr. Pamela Lillian Isley", "Poison Ivy Inc.")) + val updatedImage = + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))({ + val image = reloadedImage(id) + image.get + }) + .copy(lastModified = Some(earlierDate)) + .copy(usageRights = + StaffPhotographer("Dr. Pamela Lillian Isley", "Poison Ivy Inc.") + ) // Write first date second - Await.result(Future.sequence(ES.indexImage(id, updatedImage, earlierDate)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, updatedImage, earlierDate)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( { - val image = reloadedImage(id) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))({ + val image = reloadedImage(id) image.get.lastModified.get shouldBe laterDate }) } @@ -486,42 +829,83 @@ class ElasticSearchTest extends ElasticSearchTestBase { "can delete all usages for an image which does not exist" in { val id = UUID.randomUUID().toString - val result = Await.result(Future.sequence(ES.deleteAllImageUsages(id, now)), fiveSeconds) + val result = Await.result( + Future.sequence(ES.deleteAllImageUsages(id, now)), + fiveSeconds + ) result should have length 1 } "can delete all usages for an image" in { val id = UUID.randomUUID().toString - val imageWithUsages = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None).copy(usages = List(usage())) - Await.result(Future.sequence(ES.indexImage(id, imageWithUsages, now)), fiveSeconds) + val imageWithUsages = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ).copy(usages = List(usage())) + Await.result( + Future.sequence(ES.indexImage(id, imageWithUsages, now)), + fiveSeconds + ) - Await.result(Future.sequence(ES.deleteAllImageUsages(id, now)), fiveSeconds) + Await.result( + Future.sequence(ES.deleteAllImageUsages(id, now)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).get.usages.isEmpty shouldBe true) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).get.usages.isEmpty shouldBe true + ) } "can update usages" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - Await.result(Future.sequence(ES.updateImageUsages(id, List(usage()), now)), fiveSeconds) + Await.result( + Future.sequence(ES.updateImageUsages(id, List(usage()), now)), + fiveSeconds + ) reloadedImage(id).get.usages.size shouldBe 1 } "can update usages if the modification date of the update is new than the existing one" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) val existingUsage = usage(id = "existing") - Await.result(Future.sequence(ES.updateImageUsages(id, List(existingUsage), now)), fiveSeconds) + Await.result( + Future.sequence(ES.updateImageUsages(id, List(existingUsage), now)), + fiveSeconds + ) reloadedImage(id).get.usages.head.id shouldEqual ("existing") val moreRecentUsage = usage(id = "most-recent") - Await.result(Future.sequence(ES.updateImageUsages(id, List(moreRecentUsage), now)), fiveSeconds) + Await.result( + Future.sequence(ES.updateImageUsages(id, List(moreRecentUsage), now)), + fiveSeconds + ) reloadedImage(id).get.usages.size shouldBe 1 reloadedImage(id).get.usages.head.id shouldEqual ("most-recent") @@ -529,15 +913,31 @@ class ElasticSearchTest extends ElasticSearchTestBase { "should ignore usage update requests when the proposed last modified date is older than the current" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) val mostRecentUsage = usage(id = "recent") - Await.result(Future.sequence(ES.updateImageUsages(id, List(mostRecentUsage), now)), fiveSeconds) + Await.result( + Future.sequence(ES.updateImageUsages(id, List(mostRecentUsage), now)), + fiveSeconds + ) val staleUsage = usage(id = "stale") val staleLastModified = DateTime.now.minusWeeks(1) - Await.result(Future.sequence(ES.updateImageUsages(id, List(staleUsage), staleLastModified)), fiveSeconds) + Await.result( + Future.sequence( + ES.updateImageUsages(id, List(staleUsage), staleLastModified) + ), + fiveSeconds + ) reloadedImage(id).get.usages.head.id shouldEqual ("recent") } @@ -546,15 +946,33 @@ class ElasticSearchTest extends ElasticSearchTestBase { "syndication rights" - { "updated syndication rights should be persisted" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) - val newSyndicationRights = SyndicationRights(published = Some(now), suppliers = Seq.empty, rights = Seq.empty) + val newSyndicationRights = SyndicationRights( + published = Some(now), + suppliers = Seq.empty, + rights = Seq.empty + ) - Await.result(Future.sequence(ES.updateImageSyndicationRights(id, Some(newSyndicationRights), now)), fiveSeconds) + Await.result( + Future.sequence( + ES.updateImageSyndicationRights(id, Some(newSyndicationRights), now) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.syndicationRights) shouldEqual Some(newSyndicationRights) + reloadedImage(id).flatMap(_.syndicationRights) shouldEqual Some( + newSyndicationRights + ) } "updating syndication rights should update last modified date" in { @@ -568,23 +986,45 @@ class ElasticSearchTest extends ElasticSearchTestBase { leasesLastModified = Some(beforeUpdate) ) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) - val newSyndicationRights = SyndicationRights(published = Some(now.minusWeeks(1)), suppliers = Seq.empty, rights = Seq.empty) + val newSyndicationRights = SyndicationRights( + published = Some(now.minusWeeks(1)), + suppliers = Seq.empty, + rights = Seq.empty + ) - Await.result(Future.sequence(ES.updateImageSyndicationRights(id, Some(newSyndicationRights), now)), fiveSeconds) + Await.result( + Future.sequence( + ES.updateImageSyndicationRights(id, Some(newSyndicationRights), now) + ), + fiveSeconds + ) - reloadedImage(id).get.lastModified.get.isAfter(beforeUpdate) shouldEqual true + reloadedImage(id).get.lastModified.get + .isAfter(beforeUpdate) shouldEqual true } "can delete syndication rights" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) reloadedImage(id).get.syndicationRights.nonEmpty shouldBe true - Await.result(Future.sequence(ES.deleteSyndicationRights(id, now)), fiveSeconds) + Await.result( + Future.sequence(ES.deleteSyndicationRights(id, now)), + fiveSeconds + ) reloadedImage(id).get.syndicationRights.isEmpty shouldBe true } @@ -592,7 +1032,10 @@ class ElasticSearchTest extends ElasticSearchTestBase { "can delete syndication rights from an image which does not exist" in { val id = UUID.randomUUID().toString - val result = Await.result(Future.sequence(ES.deleteSyndicationRights(id, now)), fiveSeconds) + val result = Await.result( + Future.sequence(ES.deleteSyndicationRights(id, now)), + fiveSeconds + ) result should have length 1 } @@ -601,169 +1044,331 @@ class ElasticSearchTest extends ElasticSearchTestBase { "user metadata" - { "can update user metadata for an existing image" in { val id = UUID.randomUUID().toString - val imageWithBoringMetadata = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val imageWithBoringMetadata = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id) + ) - val updatedMetadata = Edits(metadata = imageWithBoringMetadata.metadata.copy(description = Some("An interesting image"))) + val updatedMetadata = Edits(metadata = + imageWithBoringMetadata.metadata.copy(description = + Some("An interesting image") + ) + ) val updatedLastModifiedDate = DateTime.now - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - updatedMetadata, - updatedLastModifiedDate)), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + updatedMetadata, + updatedLastModifiedDate + ) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.userMetadata.get.metadata.description) shouldBe Some("An interesting image") + reloadedImage(id).flatMap( + _.userMetadata.get.metadata.description + ) shouldBe Some("An interesting image") } "updating user metadata should update the image and user meta data last modified dates" in { val id = UUID.randomUUID().toString - val imageWithBoringMetadata = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val imageWithBoringMetadata = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id) + ) - val updatedMetadata = Edits(metadata = imageWithBoringMetadata.metadata.copy(description = Some("An updated image"))) + val updatedMetadata = Edits(metadata = + imageWithBoringMetadata.metadata.copy(description = + Some("An updated image") + ) + ) val updatedLastModifiedDate = DateTime.now.withZone(DateTimeZone.UTC) - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - updatedMetadata, - updatedLastModifiedDate)), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + updatedMetadata, + updatedLastModifiedDate + ) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.userMetadataLastModified) shouldEqual Some(updatedLastModifiedDate) - reloadedImage(id).flatMap(_.lastModified) shouldEqual Some(updatedLastModifiedDate) + reloadedImage(id).flatMap(_.userMetadataLastModified) shouldEqual Some( + updatedLastModifiedDate + ) + reloadedImage(id).flatMap(_.lastModified) shouldEqual Some( + updatedLastModifiedDate + ) } "original metadata is unchanged by a user metadata edit" in { val id = UUID.randomUUID().toString - val imageWithBoringMetadata = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val imageWithBoringMetadata = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id) + ) - val updatedMetadata = Edits(metadata = imageWithBoringMetadata.metadata.copy(description = Some("An interesting image"))) + val updatedMetadata = Edits(metadata = + imageWithBoringMetadata.metadata.copy(description = + Some("An interesting image") + ) + ) val updatedLastModifiedDate = DateTime.now - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - updatedMetadata, - updatedLastModifiedDate)), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + updatedMetadata, + updatedLastModifiedDate + ) + ), + fiveSeconds + ) - reloadedImage(id).map(_.originalMetadata) shouldEqual Some(imageWithBoringMetadata.originalMetadata) + reloadedImage(id).map(_.originalMetadata) shouldEqual Some( + imageWithBoringMetadata.originalMetadata + ) } "should apply metadata update if user metadata is set but before new modified date" in { val id = UUID.randomUUID().toString - val image = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, image, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) - val userMetadata = ImageMetadata(description = Some("An updated image"), subjects = List("sausages")) + val userMetadata = ImageMetadata( + description = Some("An updated image"), + subjects = List("sausages") + ) val updatedLastModifiedDate = DateTime.now.withZone(DateTimeZone.UTC) - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - Edits(labels = List("foo"), metadata = userMetadata), - updatedLastModifiedDate)), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + Edits(labels = List("foo"), metadata = userMetadata), + updatedLastModifiedDate + ) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.userMetadataLastModified) shouldEqual Some(updatedLastModifiedDate) - reloadedImage(id).get.userMetadata.get.metadata.subjects shouldEqual List("sausages") + reloadedImage(id).flatMap(_.userMetadataLastModified) shouldEqual Some( + updatedLastModifiedDate + ) + reloadedImage( + id + ).get.userMetadata.get.metadata.subjects shouldEqual List("sausages") reloadedImage(id).get.userMetadata.get.labels shouldEqual List("foo") - val furtherUpdatedMetadata = userMetadata.copy(description = Some("A further updated image"), subjects = List("sausages", "chips")) + val furtherUpdatedMetadata = userMetadata.copy( + description = Some("A further updated image"), + subjects = List("sausages", "chips") + ) - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - Edits(labels = List("foo", "bar"), metadata = furtherUpdatedMetadata), - updatedLastModifiedDate.plusSeconds(1))), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + Edits( + labels = List("foo", "bar"), + metadata = furtherUpdatedMetadata + ), + updatedLastModifiedDate.plusSeconds(1) + ) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.userMetadata.get.metadata.description) shouldEqual Some("A further updated image") - reloadedImage(id).get.userMetadata.get.metadata.subjects shouldEqual List("sausages", "chips") - reloadedImage(id).get.userMetadata.get.labels shouldEqual List("foo", "bar") + reloadedImage(id).flatMap( + _.userMetadata.get.metadata.description + ) shouldEqual Some("A further updated image") + reloadedImage( + id + ).get.userMetadata.get.metadata.subjects shouldEqual List( + "sausages", + "chips" + ) + reloadedImage(id).get.userMetadata.get.labels shouldEqual List( + "foo", + "bar" + ) } "should ignore update if the proposed modification date is older than the current user metadata last modified date" in { val id = UUID.randomUUID().toString - val imageWithBoringMetadata = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val imageWithBoringMetadata = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id) + ) - val latestMetadata = Edits(metadata = imageWithBoringMetadata.metadata.copy(description = Some("Latest edit"))) + val latestMetadata = Edits(metadata = + imageWithBoringMetadata.metadata.copy(description = + Some("Latest edit") + ) + ) val latestLastModifiedDate = DateTime.now.withZone(DateTimeZone.UTC) - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - latestMetadata, - latestLastModifiedDate)), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + latestMetadata, + latestLastModifiedDate + ) + ), + fiveSeconds + ) - val staleMetadata = Edits(metadata = imageWithBoringMetadata.metadata.copy(description = Some("A stale edit"))) + val staleMetadata = Edits(metadata = + imageWithBoringMetadata.metadata.copy(description = + Some("A stale edit") + ) + ) val staleLastModifiedDate = latestLastModifiedDate.minusSeconds(1) - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - staleMetadata, - staleLastModifiedDate)), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + staleMetadata, + staleLastModifiedDate + ) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.userMetadata.get.metadata.description) shouldBe Some("Latest edit") - reloadedImage(id).flatMap(_.userMetadataLastModified) shouldEqual Some(latestLastModifiedDate) + reloadedImage(id).flatMap( + _.userMetadata.get.metadata.description + ) shouldBe Some("Latest edit") + reloadedImage(id).flatMap(_.userMetadataLastModified) shouldEqual Some( + latestLastModifiedDate + ) } "updating user metadata with new usage rights should update usage rights" in { val id = UUID.randomUUID().toString - val imageWithUsageRights = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val imageWithUsageRights = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, imageWithUsageRights, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithUsageRights.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithUsageRights.id) + ) - val newPhotographer = StaffPhotographer(photographer = "Test Photographer", publication = "Testing") + val newPhotographer = StaffPhotographer( + photographer = "Test Photographer", + publication = "Testing" + ) - val metadataWithUpdatedUsageRights = Edits(usageRights = Some(newPhotographer), metadata = imageWithUsageRights.metadata) + val metadataWithUpdatedUsageRights = Edits( + usageRights = Some(newPhotographer), + metadata = imageWithUsageRights.metadata + ) - Await.result(Future.sequence( - ES.applyImageMetadataOverride(id, - metadataWithUpdatedUsageRights, - DateTime.now.withZone(DateTimeZone.UTC))), - fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride( + id, + metadataWithUpdatedUsageRights, + DateTime.now.withZone(DateTimeZone.UTC) + ) + ), + fiveSeconds + ) - reloadedImage(id).get.usageRights.asInstanceOf[StaffPhotographer].photographer shouldEqual "Test Photographer" + reloadedImage(id).get.usageRights + .asInstanceOf[StaffPhotographer] + .photographer shouldEqual "Test Photographer" } "updating user metadata should update photoshoot suggestions" in { val id = UUID.randomUUID().toString - val imageWithBoringMetadata = createImageForSyndication(id = UUID.randomUUID().toString, true, Some(now), None) + val imageWithBoringMetadata = createImageForSyndication( + id = UUID.randomUUID().toString, + true, + Some(now), + None + ) ES.indexImage(id, imageWithBoringMetadata, now) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(imageWithBoringMetadata.id) + ) val newPhotoshoot = Photoshoot("Test photoshoot") - val updatedMetadata = Edits(photoshoot = Some(newPhotoshoot), metadata = imageWithBoringMetadata.metadata.copy()) + val updatedMetadata = Edits( + photoshoot = Some(newPhotoshoot), + metadata = imageWithBoringMetadata.metadata.copy() + ) - Await.result(Future.sequence(ES.applyImageMetadataOverride(id, updatedMetadata, now)), fiveSeconds) + Await.result( + Future.sequence( + ES.applyImageMetadataOverride(id, updatedMetadata, now) + ), + fiveSeconds + ) - reloadedImage(id).flatMap(_.userMetadata.get.photoshoot.map(_.title)) shouldEqual Some("Test photoshoot") + reloadedImage(id).flatMap( + _.userMetadata.get.photoshoot.map(_.title) + ) shouldEqual Some("Test photoshoot") // TODO how to assert that the suggestion was added? } } - "date checks" - { - "correct zone" in { - import com.gu.mediaservice.lib.formatting.parseOptDateTime - val parsedDate = parseOptDateTime(Some("2021-01-13T15:26:27.234Z")) - parsedDate.get.getZone shouldEqual(DateTimeZone.UTC) + "date checks" - { + "correct zone" in { + import com.gu.mediaservice.lib.formatting.parseOptDateTime + val parsedDate = parseOptDateTime(Some("2021-01-13T15:26:27.234Z")) + parsedDate.get.getZone shouldEqual (DateTimeZone.UTC) + } } - } } private def now = DateTime.now(DateTimeZone.UTC) diff --git a/thrall/test/lib/elasticsearch/ElasticSearchTestBase.scala b/thrall/test/lib/elasticsearch/ElasticSearchTestBase.scala index c1c6727605..36683252ff 100644 --- a/thrall/test/lib/elasticsearch/ElasticSearchTestBase.scala +++ b/thrall/test/lib/elasticsearch/ElasticSearchTestBase.scala @@ -17,24 +17,48 @@ import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.Properties -trait ElasticSearchTestBase extends FreeSpec with Matchers with Fixtures with BeforeAndAfterAll with BeforeAndAfterEach with Eventually with ScalaFutures with DockerKit with DockerTestKit with DockerKitSpotify { - - val useEsDocker = Properties.envOrElse("USE_DOCKER_FOR_TESTS", "true").toBoolean +trait ElasticSearchTestBase + extends FreeSpec + with Matchers + with Fixtures + with BeforeAndAfterAll + with BeforeAndAfterEach + with Eventually + with ScalaFutures + with DockerKit + with DockerTestKit + with DockerKitSpotify { + + val useEsDocker = + Properties.envOrElse("USE_DOCKER_FOR_TESTS", "true").toBoolean val esTestUrl = Properties.envOrElse("ES6_TEST_URL", "http://localhost:9200") val oneHundredMilliseconds = Duration(100, MILLISECONDS) val fiveSeconds = Duration(5, SECONDS) - val elasticSearchConfig = ElasticSearchConfig("writealias", esTestUrl, "media-service-test", 1, 0) + val elasticSearchConfig = + ElasticSearchConfig("writealias", esTestUrl, "media-service-test", 1, 0) val ES = new ElasticSearch(elasticSearchConfig, None) - val esContainer = if (useEsDocker) Some(DockerContainer("docker.elastic.co/elasticsearch/elasticsearch:7.5.2") - .withPorts(9200 -> Some(9200)) - .withEnv("cluster.name=media-service", "xpack.security.enabled=false", "discovery.type=single-node", "network.host=0.0.0.0") - .withReadyChecker( - DockerReadyChecker.HttpResponseCode(9200, "/", Some("0.0.0.0")).within(10.minutes).looped(40, 1250.millis) - ) - ) else None + val esContainer = + if (useEsDocker) + Some( + DockerContainer("docker.elastic.co/elasticsearch/elasticsearch:7.5.2") + .withPorts(9200 -> Some(9200)) + .withEnv( + "cluster.name=media-service", + "xpack.security.enabled=false", + "discovery.type=single-node", + "network.host=0.0.0.0" + ) + .withReadyChecker( + DockerReadyChecker + .HttpResponseCode(9200, "/", Some("0.0.0.0")) + .within(10.minutes) + .looped(40, 1250.millis) + ) + ) + else None override def beforeAll { super.beforeAll() @@ -46,11 +70,17 @@ trait ElasticSearchTestBase extends FreeSpec with Matchers with Fixtures with Be // Ensure to reset the state of ES between tests by deleting all documents... Await.ready( ES.client.execute( - ElasticDsl.deleteByQuery(ES.initialImagesIndex, ElasticDsl.matchAllQuery()) - ), fiveSeconds) + ElasticDsl + .deleteByQuery(ES.initialImagesIndex, ElasticDsl.matchAllQuery()) + ), + fiveSeconds + ) // ...and then forcing a refresh. These operations need to be done in serial. - Await.result(ES.client.execute(ElasticDsl.refreshIndex(ES.initialImagesIndex)), fiveSeconds) + Await.result( + ES.client.execute(ElasticDsl.refreshIndex(ES.initialImagesIndex)), + fiveSeconds + ) } override def afterAll: Unit = { @@ -62,7 +92,6 @@ trait ElasticSearchTestBase extends FreeSpec with Matchers with Fixtures with Be final override val StartContainersTimeout = 1.minute - def reloadedImage(id: String) = { implicit val logMarker: LogMarker = MarkerMap() Await.result(ES.getImage(id), fiveSeconds) @@ -74,5 +103,7 @@ trait ElasticSearchTestBase extends FreeSpec with Matchers with Fixtures with Be Await.result(ES.getImage(id), fiveSeconds) } - def asJsLookup(d: DateTime): JsLookupResult = JsDefined(Json.toJson(d.toString)) + def asJsLookup(d: DateTime): JsLookupResult = JsDefined( + Json.toJson(d.toString) + ) } diff --git a/thrall/test/lib/elasticsearch/SyndicationRightsOpsTest.scala b/thrall/test/lib/elasticsearch/SyndicationRightsOpsTest.scala index 8474a91d96..1cbdb37d36 100644 --- a/thrall/test/lib/elasticsearch/SyndicationRightsOpsTest.scala +++ b/thrall/test/lib/elasticsearch/SyndicationRightsOpsTest.scala @@ -19,7 +19,9 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { test(image) } - def withPhotoshoot(photoshoot: Photoshoot)(test: List[Image] => Unit): Unit = { + def withPhotoshoot( + photoshoot: Photoshoot + )(test: List[Image] => Unit): Unit = { implicit val logMarker: LogMarker = MarkerMap() val images = (1 to 5).map { _ => @@ -31,20 +33,40 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { test(images) } - private def addSyndicationRights(image: Image, someRights: Option[SyndicationRights]): Image = image.copy(syndicationRights = someRights) + private def addSyndicationRights( + image: Image, + someRights: Option[SyndicationRights] + ): Image = image.copy(syndicationRights = someRights) - private def makeSyndicationRightsInferred(imageWithRights: Image): Option[SyndicationRights] = imageWithRights.syndicationRights.map(_.copy(isInferred = true)) + private def makeSyndicationRightsInferred( + imageWithRights: Image + ): Option[SyndicationRights] = + imageWithRights.syndicationRights.map(_.copy(isInferred = true)) implicit val logMarker: MarkerMap = MarkerMap() - implicit val defaultPatience: PatienceConfig = PatienceConfig(timeout = Span(30, Seconds), interval = Span(250, Millis)) + implicit val defaultPatience: PatienceConfig = + PatienceConfig(timeout = Span(30, Seconds), interval = Span(250, Millis)) "SyndicationRightsOps" - { "General logic" - { "return the most recent syndication rights" in { - val image1 = createImageForSyndication(UUID.randomUUID().toString, rightsAcquired = true, Some(DateTime.now()), None) - val image2 = createImageForSyndication(UUID.randomUUID().toString, rightsAcquired = true, Some(DateTime.now().minusDays(5)), None) + val image1 = createImageForSyndication( + UUID.randomUUID().toString, + rightsAcquired = true, + Some(DateTime.now()), + None + ) + val image2 = createImageForSyndication( + UUID.randomUUID().toString, + rightsAcquired = true, + Some(DateTime.now().minusDays(5)), + None + ) - syndRightsOps.mostRecentSyndicationRights(image1, image2) shouldBe image1.syndicationRights + syndRightsOps.mostRecentSyndicationRights( + image1, + image2 + ) shouldBe image1.syndicationRights } } @@ -52,7 +74,14 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { val syndRights = someSyndRights "save rights on image" in { withImage(imageWithNoSyndRights) { image => - whenReady(syndRightsOps.upsertOrRefreshRights(image = addSyndicationRights(image, syndRights), currentPhotoshootOpt = None, previousPhotoshootOpt = None, lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = addSyndicationRights(image, syndRights), + currentPhotoshootOpt = None, + previousPhotoshootOpt = None, + lastModified = now + ) + ) { _ => whenReady(ES.getImage(image.id)) { optImg => optImg.get.syndicationRights shouldBe defined optImg.get.syndicationRights shouldBe syndRights @@ -68,10 +97,19 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { withPhotoshoot(photoshootTitle) { images => withImage(imageWithSyndRights) { imageWithRights => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => images.foreach { img => whenReady(ES.getImage(img.id)) { optImg => - optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred(imageWithRights) + optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred( + imageWithRights + ) } } } @@ -84,7 +122,14 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { withPhotoshoot(photoshootTitle) { images => withImage(imageWithNoSyndRights) { imageWithNoRights => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithNoRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithNoRights, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => images.foreach { img => whenReady(ES.getImage(img.id)) { optImg => optImg.get.syndicationRights shouldBe None @@ -103,13 +148,15 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { val imageWithNoRights = images.head val otherImagesInShoot = images.tail - val imageWithRights = addSyndicationRights(imageWithNoRights, syndRights) + val imageWithRights = + addSyndicationRights(imageWithNoRights, syndRights) whenReady( syndRightsOps.upsertOrRefreshRights( image = imageWithRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), - lastModified = now) + lastModified = now + ) ) { _ => whenReady(ES.getImage(imageWithNoRights.id)) { img => withClue("the original image should have syndication rights") { @@ -118,8 +165,12 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { } otherImagesInShoot.foreach { img => whenReady(ES.getImage(img.id)) { optImg => - withClue("the other images in the shoot should gain inferred syndication rights") { - optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred(imageWithRights) + withClue( + "the other images in the shoot should gain inferred syndication rights" + ) { + optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred( + imageWithRights + ) } } } @@ -135,11 +186,27 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { withPhotoshoot(photoshootTitle) { images => withImage(imageWithSyndRights) { imageWithRights1 => withImage(imageWithSyndRights) { imageWithRights2 => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights1, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights2, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights1, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights2, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => images.foreach { img => whenReady(ES.getImage(img.id)) { optImg => - optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred(imageWithRights2) + optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred( + imageWithRights2 + ) } } } @@ -155,13 +222,30 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { withPhotoshoot(photoshootTitle) { images => val imageWithNoRights = images.head val syndRights = someSyndRights - val imageWithRights = addSyndicationRights(imageWithNoRights, syndRights) - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => + val imageWithRights = + addSyndicationRights(imageWithNoRights, syndRights) + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => withImage(imageWithNoSyndRights) { imageWithNoRights => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithNoRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithNoRights, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => (images.tail :+ imageWithNoRights).foreach { img => whenReady(ES.getImage(img.id)) { optImg => - optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred(imageWithRights) + optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred( + imageWithRights + ) } } } @@ -175,8 +259,22 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { withPhotoshoot(photoshootTitle) { images => withImage(imageWithSyndRights) { imageWithRights => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights, previousPhotoshootOpt = Some(photoshootTitle), currentPhotoshootOpt = None, lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights, + previousPhotoshootOpt = Some(photoshootTitle), + currentPhotoshootOpt = None, + lastModified = now + ) + ) { _ => images.foreach { img => whenReady(ES.getImage(img.id)) { optImg => optImg.get.syndicationRights shouldBe None @@ -196,11 +294,30 @@ class SyndicationRightsOpsTest extends ElasticSearchTestBase { withPhotoshoot(photoshootTitle) { images => withImage(imageWithSyndRights) { imageWithRights => - whenReady(syndRightsOps.upsertOrRefreshRights(image = imageWithRights, previousPhotoshootOpt = None, currentPhotoshootOpt = Some(photoshootTitle), lastModified = now)) { _ => - whenReady(syndRightsOps.upsertOrRefreshRights(image = addSyndicationRights(images.head, makeSyndicationRightsInferred(imageWithRights)), previousPhotoshootOpt = Some(photoshootTitle), currentPhotoshootOpt = None, lastModified = now)) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = imageWithRights, + previousPhotoshootOpt = None, + currentPhotoshootOpt = Some(photoshootTitle), + lastModified = now + ) + ) { _ => + whenReady( + syndRightsOps.upsertOrRefreshRights( + image = addSyndicationRights( + images.head, + makeSyndicationRightsInferred(imageWithRights) + ), + previousPhotoshootOpt = Some(photoshootTitle), + currentPhotoshootOpt = None, + lastModified = now + ) + ) { _ => images.tail.foreach { img => whenReady(ES.getImage(img.id)) { optImg => - optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred(imageWithRights) + optImg.get.syndicationRights shouldBe makeSyndicationRightsInferred( + imageWithRights + ) } } whenReady(ES.getImage(images.head.id)) { optImg => diff --git a/thrall/test/lib/kinesis/MessageProcessorTest.scala b/thrall/test/lib/kinesis/MessageProcessorTest.scala index d70040ee31..c2b0711ec4 100644 --- a/thrall/test/lib/kinesis/MessageProcessorTest.scala +++ b/thrall/test/lib/kinesis/MessageProcessorTest.scala @@ -7,7 +7,11 @@ import com.gu.mediaservice.lib.logging.MarkerMap import com.gu.mediaservice.model.usage.UsageNotice import com.gu.mediaservice.model.{Edits, ImageMetadata} import lib.{MetadataEditorNotifications, ThrallStore} -import lib.elasticsearch.{ElasticSearchTestBase, ElasticSearchUpdateResponse, SyndicationRightsOps} +import lib.elasticsearch.{ + ElasticSearchTestBase, + ElasticSearchUpdateResponse, + SyndicationRightsOps +} import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.mockito.MockitoSugar import play.api.libs.json.JsArray @@ -15,7 +19,6 @@ import play.api.libs.json.JsArray import scala.concurrent.{Await, Future} import scala.util.{Success, Try} - class MessageProcessorTest extends ElasticSearchTestBase with MockitoSugar { implicit val logMarker: MarkerMap = MarkerMap() "MessageProcessor" - { @@ -23,29 +26,44 @@ class MessageProcessorTest extends ElasticSearchTestBase with MockitoSugar { es = ES, store = mock[ThrallStore], metadataEditorNotifications = mock[MetadataEditorNotifications], - syndicationRightsOps = mock[SyndicationRightsOps]) + syndicationRightsOps = mock[SyndicationRightsOps] + ) "usages" - { "adds usages" - { "for an image that exists" in { - val expected: Success[List[ElasticSearchUpdateResponse]] = Success(List(ElasticSearchUpdateResponse())) + val expected: Success[List[ElasticSearchUpdateResponse]] = + Success(List(ElasticSearchUpdateResponse())) val id = UUID.randomUUID().toString - val userMetadata = Some(Edits(metadata = ImageMetadata( - description = Some("My boring image"), - title = Some("User supplied title"), - subjects = List("foo", "bar"), - specialInstructions = Some("Testing") - ))) + val userMetadata = Some( + Edits(metadata = + ImageMetadata( + description = Some("My boring image"), + title = Some("User supplied title"), + subjects = List("foo", "bar"), + specialInstructions = Some("Testing") + ) + ) + ) - val image = createImageForSyndication(id = UUID.randomUUID().toString, rightsAcquired = true, Some(DateTime.now()), None). - copy(userMetadata = userMetadata) + val image = createImageForSyndication( + id = UUID.randomUUID().toString, + rightsAcquired = true, + Some(DateTime.now()), + None + ).copy(userMetadata = userMetadata) - Await.result(Future.sequence(ES.indexImage(id, image, now)), fiveSeconds) + Await.result( + Future.sequence(ES.indexImage(id, image, now)), + fiveSeconds + ) - eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))(reloadedImage(id).map(_.id) shouldBe Some(image.id)) + eventually(timeout(fiveSeconds), interval(oneHundredMilliseconds))( + reloadedImage(id).map(_.id) shouldBe Some(image.id) + ) val message = UpdateMessage( "update-image-usages", @@ -61,10 +79,16 @@ class MessageProcessorTest extends ElasticSearchTestBase with MockitoSugar { syndicationRights = None, bulkIndexRequest = None ) - Try(Await.result(messageProcessor.updateImageUsages(message, logMarker), fiveSeconds)) shouldBe expected + Try( + Await.result( + messageProcessor.updateImageUsages(message, logMarker), + fiveSeconds + ) + ) shouldBe expected } "not crash for an image that doesn't exist 👻🖼" in { - val expected: Success[List[ElasticSearchUpdateResponse]] = Success(List(ElasticSearchUpdateResponse())) + val expected: Success[List[ElasticSearchUpdateResponse]] = + Success(List(ElasticSearchUpdateResponse())) val id = UUID.randomUUID().toString val message = UpdateMessage( @@ -81,7 +105,12 @@ class MessageProcessorTest extends ElasticSearchTestBase with MockitoSugar { syndicationRights = None, bulkIndexRequest = None ) - Try(Await.result(messageProcessor.updateImageUsages(message, logMarker), fiveSeconds)) shouldBe expected + Try( + Await.result( + messageProcessor.updateImageUsages(message, logMarker), + fiveSeconds + ) + ) shouldBe expected } } } diff --git a/thrall/test/lib/kinesis/ThrallEventConsumerTest.scala b/thrall/test/lib/kinesis/ThrallEventConsumerTest.scala index 1c855b24eb..82ed392342 100644 --- a/thrall/test/lib/kinesis/ThrallEventConsumerTest.scala +++ b/thrall/test/lib/kinesis/ThrallEventConsumerTest.scala @@ -32,4 +32,3 @@ class ThrallEventConsumerTest extends ElasticSearchTestBase with MockitoSugar { } } } - diff --git a/usage/app/UsageComponents.scala b/usage/app/UsageComponents.scala index fddb911d2f..2d4b02a438 100644 --- a/usage/app/UsageComponents.scala +++ b/usage/app/UsageComponents.scala @@ -7,7 +7,8 @@ import router.Routes import scala.concurrent.Future -class UsageComponents(context: Context) extends GridComponents(context, new UsageConfig(_)) { +class UsageComponents(context: Context) + extends GridComponents(context, new UsageConfig(_)) { final override val buildInfo = utils.buildinfo.BuildInfo @@ -19,10 +20,16 @@ class UsageComponents(context: Context) extends GridComponents(context, new Usag val usageMetrics = new UsageMetrics(config) val usageNotifier = new UsageNotifier(config, usageTable) val usageStream = new UsageStream(usageGroup) - val usageRecorder = new UsageRecorder(usageMetrics, usageTable, usageStream, usageNotifier, usageNotifier) + val usageRecorder = new UsageRecorder( + usageMetrics, + usageTable, + usageStream, + usageNotifier, + usageNotifier + ) val notifications = new Notifications(config) - if(!config.apiOnly) { + if (!config.apiOnly) { val crierReader = new CrierStreamReader(config) crierReader.start() } @@ -33,7 +40,18 @@ class UsageComponents(context: Context) extends GridComponents(context, new Usag Future.successful(()) }) - val controller = new UsageApi(auth, usageTable, usageGroup, notifications, config, usageRecorder, liveContentApi, controllerComponents, playBodyParsers) - - override lazy val router = new Routes(httpErrorHandler, controller, management) + val controller = new UsageApi( + auth, + usageTable, + usageGroup, + notifications, + config, + usageRecorder, + liveContentApi, + controllerComponents, + playBodyParsers + ) + + override lazy val router = + new Routes(httpErrorHandler, controller, management) } diff --git a/usage/app/controllers/UsageApi.scala b/usage/app/controllers/UsageApi.scala index f19e5be12b..a7187bbdda 100644 --- a/usage/app/controllers/UsageApi.scala +++ b/usage/app/controllers/UsageApi.scala @@ -4,7 +4,11 @@ import java.net.URI import com.gu.contentapi.client.model.ItemQuery import com.gu.mediaservice.lib.argo.ArgoHelpers -import com.gu.mediaservice.lib.argo.model.{EntityResponse, Link, Action => ArgoAction} +import com.gu.mediaservice.lib.argo.model.{ + EntityResponse, + Link, + Action => ArgoAction +} import com.gu.mediaservice.lib.auth.Authentication import com.gu.mediaservice.lib.aws.UpdateMessage import com.gu.mediaservice.lib.usage.UsageBuilder @@ -18,9 +22,19 @@ import play.utils.UriEncoding import scala.concurrent.{ExecutionContext, Future} import scala.util.Try -class UsageApi(auth: Authentication, usageTable: UsageTable, usageGroup: UsageGroupOps, notifications: Notifications, config: UsageConfig, usageRecorder: UsageRecorder, liveContentApi: LiveContentApi, - override val controllerComponents: ControllerComponents, playBodyParsers: PlayBodyParsers)(implicit val ec: ExecutionContext) - extends BaseController with ArgoHelpers { +class UsageApi( + auth: Authentication, + usageTable: UsageTable, + usageGroup: UsageGroupOps, + notifications: Notifications, + config: UsageConfig, + usageRecorder: UsageRecorder, + liveContentApi: LiveContentApi, + override val controllerComponents: ControllerComponents, + playBodyParsers: PlayBodyParsers +)(implicit val ec: ExecutionContext) + extends BaseController + with ArgoHelpers { private def wrapUsage(usage: Usage): EntityResponse[Usage] = { EntityResponse( @@ -54,25 +68,34 @@ class UsageApi(auth: Authentication, usageTable: UsageTable, usageGroup: UsageGr logger.info(s"Request for single usage $usageId") val usageFuture = usageTable.queryByUsageId(usageId) - usageFuture.map[play.api.mvc.Result]((mediaUsageOption: Option[MediaUsage]) => { - mediaUsageOption.foldLeft( - respondNotFound("No usages found.") - )((_, mediaUsage: MediaUsage) => { - val usage = UsageBuilder.build(mediaUsage) - val mediaId = mediaUsage.mediaId - - val uri = usageUri(usage.id) - val links = List( - Link("media", s"${config.services.apiBaseUri}/images/$mediaId"), - Link("media-usage", s"${config.services.usageBaseUri}/usages/media/$mediaId") - ) + usageFuture + .map[play.api.mvc.Result]((mediaUsageOption: Option[MediaUsage]) => { + mediaUsageOption.foldLeft( + respondNotFound("No usages found.") + )((_, mediaUsage: MediaUsage) => { + val usage = UsageBuilder.build(mediaUsage) + val mediaId = mediaUsage.mediaId - respond[Usage](data = usage, uri = uri, links = links) + val uri = usageUri(usage.id) + val links = List( + Link("media", s"${config.services.apiBaseUri}/images/$mediaId"), + Link( + "media-usage", + s"${config.services.usageBaseUri}/usages/media/$mediaId" + ) + ) + + respond[Usage](data = usage, uri = uri, links = links) + }) }) - }).recover { case error: Exception => - logger.error("UsageApi returned an error.", error) - respondError(InternalServerError, "usage-retrieve-failed", error.getMessage) - } + .recover { case error: Exception => + logger.error("UsageApi returned an error.", error) + respondError( + InternalServerError, + "usage-retrieve-failed", + error.getMessage + ) + } } @@ -81,24 +104,29 @@ class UsageApi(auth: Authentication, usageTable: UsageTable, usageGroup: UsageGr .showFields("all") .showElements("all") - val result = liveContentApi.getResponse(query).map(response => { - response.content match { - case Some(content) => - val contentFirstPublished = - liveContentApi.getContentFirstPublished(content) - val container = contentFirstPublished - .map(LiveContentItem(content, _)) - .map(_.copy(isReindex = true)) - - container.foreach(LiveCrierContentStream.observable.onNext(_)) - case _ => Unit - } - }) + val result = liveContentApi + .getResponse(query) + .map(response => { + response.content match { + case Some(content) => + val contentFirstPublished = + liveContentApi.getContentFirstPublished(content) + val container = contentFirstPublished + .map(LiveContentItem(content, _)) + .map(_.copy(isReindex = true)) + + container.foreach(LiveCrierContentStream.observable.onNext(_)) + case _ => Unit + } + }) result .map(_ => Accepted) .recover { case error: Exception => - logger.error(s"UsageApi reindex for for content ($contentId) failed!", error) + logger.error( + s"UsageApi reindex for for content ($contentId) failed!", + error + ) InternalServerError } } @@ -106,44 +134,65 @@ class UsageApi(auth: Authentication, usageTable: UsageTable, usageGroup: UsageGr def forMedia(mediaId: String) = auth.async { val usagesFuture = usageTable.queryByImageId(mediaId) - usagesFuture.map[play.api.mvc.Result]((mediaUsages: Set[MediaUsage]) => { - val usages = mediaUsages.toList.map(UsageBuilder.build) + usagesFuture + .map[play.api.mvc.Result]((mediaUsages: Set[MediaUsage]) => { + val usages = mediaUsages.toList.map(UsageBuilder.build) - usages match { - case Nil => respondNotFound("No usages found.") - case usage :: _ => - val uri = Try { URI.create(s"${config.services.usageBaseUri}/usages/media/$mediaId") }.toOption - val links = List( - Link("media", s"${config.services.apiBaseUri}/images/$mediaId") - ) + usages match { + case Nil => respondNotFound("No usages found.") + case usage :: _ => + val uri = Try { + URI.create( + s"${config.services.usageBaseUri}/usages/media/$mediaId" + ) + }.toOption + val links = List( + Link("media", s"${config.services.apiBaseUri}/images/$mediaId") + ) - respondCollection[EntityResponse[Usage]]( - uri = uri, - links = links, - data = usages.map(wrapUsage) + respondCollection[EntityResponse[Usage]]( + uri = uri, + links = links, + data = usages.map(wrapUsage) + ) + } + }) + .recover { + case error: BadInputException => + logger.error("UsageApi returned an error.", error) + respondError( + BadRequest, + "image-usage-retrieve-failed", + error.getMessage + ) + case error: Exception => + logger.error("UsageApi returned an error.", error) + respondError( + InternalServerError, + "image-usage-retrieve-failed", + error.getMessage ) } - }).recover { - case error: BadInputException => - logger.error("UsageApi returned an error.", error) - respondError(BadRequest, "image-usage-retrieve-failed", error.getMessage) - case error: Exception => - logger.error("UsageApi returned an error.", error) - respondError(InternalServerError, "image-usage-retrieve-failed", error.getMessage) - } } val maxPrintRequestLength: Int = 1024 * config.maxPrintRequestLengthInKb - val setPrintRequestBodyParser: BodyParser[JsValue] = playBodyParsers.json(maxLength = maxPrintRequestLength) + val setPrintRequestBodyParser: BodyParser[JsValue] = + playBodyParsers.json(maxLength = maxPrintRequestLength) - def setPrintUsages = auth(setPrintRequestBodyParser) { request => { + def setPrintUsages = auth(setPrintRequestBodyParser) { request => + { val printUsageRequestResult = request.body.validate[PrintUsageRequest] printUsageRequestResult.fold( e => { - respondError(BadRequest, "print-usage-request-parse-failed", JsError.toJson(e).toString) + respondError( + BadRequest, + "print-usage-request-parse-failed", + JsError.toJson(e).toString + ) }, printUsageRequest => { - val usageGroups = usageGroup.build(printUsageRequest.printUsageRecords) + val usageGroups = + usageGroup.build(printUsageRequest.printUsageRecords) usageGroups.foreach(usageRecorder.usageSubject.onNext) Accepted @@ -152,70 +201,104 @@ class UsageApi(auth: Authentication, usageTable: UsageTable, usageGroup: UsageGr } } - def setSyndicationUsages() = auth(parse.json) { req => { - val syndicationUsageRequest = (req.body \ "data").validate[SyndicationUsageRequest] - syndicationUsageRequest.fold( - e => respondError( - BadRequest, - errorKey = "syndication-usage-parse-failed", - errorMessage = JsError.toJson(e).toString - ), - sur => { - logger.info(req.user.accessor, ImageId(sur.mediaId), "recording syndication usage") - val group = usageGroup.build(sur) - usageRecorder.usageSubject.onNext(group) - Accepted - } - ) - }} - - def setFrontUsages() = auth(parse.json) { req => { - val request = (req.body \ "data").validate[FrontUsageRequest] - request.fold( - e => respondError( - BadRequest, - errorKey = "front-usage-parse-failed", - errorMessage = JsError.toJson(e).toString - ), - fur => { - logger.info(req.user.accessor, ImageId(fur.mediaId), "recording front usage") - val group = usageGroup.build(fur) - usageRecorder.usageSubject.onNext(group) - Accepted - } - ) - }} - - def setDownloadUsages() = auth(parse.json) { req => { - val request = (req.body \ "data").validate[DownloadUsageRequest] - request.fold( - e => respondError( - BadRequest, - errorKey = "download-usage-parse-failed", - errorMessage = JsError.toJson(e).toString - ), - usageRequest => { - logger.info(req.user.accessor, ImageId(usageRequest.mediaId), "recording download usage") - val group = usageGroup.build(usageRequest) - usageRecorder.usageSubject.onNext(group) - Accepted - } - ) - }} + def setSyndicationUsages() = auth(parse.json) { req => + { + val syndicationUsageRequest = + (req.body \ "data").validate[SyndicationUsageRequest] + syndicationUsageRequest.fold( + e => + respondError( + BadRequest, + errorKey = "syndication-usage-parse-failed", + errorMessage = JsError.toJson(e).toString + ), + sur => { + logger.info( + req.user.accessor, + ImageId(sur.mediaId), + "recording syndication usage" + ) + val group = usageGroup.build(sur) + usageRecorder.usageSubject.onNext(group) + Accepted + } + ) + } + } - def deleteUsages(mediaId: String) = auth.async { - usageTable.queryByImageId(mediaId).map(usages => { - usages.foreach(usageTable.deleteRecord) - }).recover{ - case error: BadInputException => - logger.warn("UsageApi returned an error.", error) - respondError(BadRequest, "image-usage-delete-failed", error.getMessage) - case error: Exception => - logger.error("UsageApi returned an error.", error) - respondError(InternalServerError, "image-usage-delete-failed", error.getMessage) + def setFrontUsages() = auth(parse.json) { req => + { + val request = (req.body \ "data").validate[FrontUsageRequest] + request.fold( + e => + respondError( + BadRequest, + errorKey = "front-usage-parse-failed", + errorMessage = JsError.toJson(e).toString + ), + fur => { + logger.info( + req.user.accessor, + ImageId(fur.mediaId), + "recording front usage" + ) + val group = usageGroup.build(fur) + usageRecorder.usageSubject.onNext(group) + Accepted + } + ) } + } + + def setDownloadUsages() = auth(parse.json) { req => + { + val request = (req.body \ "data").validate[DownloadUsageRequest] + request.fold( + e => + respondError( + BadRequest, + errorKey = "download-usage-parse-failed", + errorMessage = JsError.toJson(e).toString + ), + usageRequest => { + logger.info( + req.user.accessor, + ImageId(usageRequest.mediaId), + "recording download usage" + ) + val group = usageGroup.build(usageRequest) + usageRecorder.usageSubject.onNext(group) + Accepted + } + ) + } + } + + def deleteUsages(mediaId: String) = auth.async { + usageTable + .queryByImageId(mediaId) + .map(usages => { + usages.foreach(usageTable.deleteRecord) + }) + .recover { + case error: BadInputException => + logger.warn("UsageApi returned an error.", error) + respondError( + BadRequest, + "image-usage-delete-failed", + error.getMessage + ) + case error: Exception => + logger.error("UsageApi returned an error.", error) + respondError( + InternalServerError, + "image-usage-delete-failed", + error.getMessage + ) + } - val updateMessage = UpdateMessage(subject = "delete-usages", id = Some(mediaId)) + val updateMessage = + UpdateMessage(subject = "delete-usages", id = Some(mediaId)) notifications.publish(updateMessage) Future.successful(Ok) } diff --git a/usage/app/lib/ContentApi.scala b/usage/app/lib/ContentApi.scala index fdd8e34104..6ef4afeb32 100644 --- a/usage/app/lib/ContentApi.scala +++ b/usage/app/lib/ContentApi.scala @@ -13,9 +13,11 @@ trait ContentHelpers { } -class LiveContentApi(config: UsageConfig) extends ContentApiRequestBuilder(config) { +class LiveContentApi(config: UsageConfig) + extends ContentApiRequestBuilder(config) { override val targetUrl = config.capiLiveUrl } -class ContentApiRequestBuilder(config: UsageConfig) extends GuardianContentClient(apiKey = config.capiApiKey) with ContentHelpers - +class ContentApiRequestBuilder(config: UsageConfig) + extends GuardianContentClient(apiKey = config.capiApiKey) + with ContentHelpers diff --git a/usage/app/lib/ContentStream.scala b/usage/app/lib/ContentStream.scala index 5b511a119a..a57a7d33cf 100644 --- a/usage/app/lib/ContentStream.scala +++ b/usage/app/lib/ContentStream.scala @@ -21,5 +21,13 @@ trait ContentContainer { val isReindex: Boolean } -case class LiveContentItem(content: Content, lastModified: DateTime, isReindex: Boolean = false) extends ContentContainer -case class PreviewContentItem(content: Content, lastModified: DateTime, isReindex: Boolean = false) extends ContentContainer +case class LiveContentItem( + content: Content, + lastModified: DateTime, + isReindex: Boolean = false +) extends ContentContainer +case class PreviewContentItem( + content: Content, + lastModified: DateTime, + isReindex: Boolean = false +) extends ContentContainer diff --git a/usage/app/lib/CrierStreamReader.scala b/usage/app/lib/CrierStreamReader.scala index f4b08a91ba..47ac54344e 100644 --- a/usage/app/lib/CrierStreamReader.scala +++ b/usage/app/lib/CrierStreamReader.scala @@ -6,13 +6,21 @@ import java.util.UUID import com.amazonaws.auth._ import com.amazonaws.auth.InstanceProfileCredentialsProvider import com.amazonaws.auth.profile.ProfileCredentialsProvider -import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorFactory} -import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration, Worker} +import com.amazonaws.services.kinesis.clientlibrary.interfaces.{ + IRecordProcessor, + IRecordProcessorFactory +} +import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{ + InitialPositionInStream, + KinesisClientLibConfiguration, + Worker +} import com.gu.mediaservice.lib.logging.GridLogging class CrierStreamReader(config: UsageConfig) extends GridLogging { - lazy val workerId: String = InetAddress.getLocalHost.getCanonicalHostName + ":" + UUID.randomUUID() + lazy val workerId: String = + InetAddress.getLocalHost.getCanonicalHostName + ":" + UUID.randomUUID() val credentialsProvider = new AWSCredentialsProviderChain( new ProfileCredentialsProvider("media-service"), @@ -24,10 +32,12 @@ class CrierStreamReader(config: UsageConfig) extends GridLogging { lazy val sessionId: String = "session" + Math.random() val initialPosition = InitialPositionInStream.TRIM_HORIZON - private def kinesisCredentialsProvider(arn: String) = new AWSCredentialsProviderChain( - new ProfileCredentialsProvider("capi"), - new STSAssumeRoleSessionCredentialsProvider.Builder(arn, sessionId).build() - ) + private def kinesisCredentialsProvider(arn: String) = + new AWSCredentialsProviderChain( + new ProfileCredentialsProvider("capi"), + new STSAssumeRoleSessionCredentialsProvider.Builder(arn, sessionId) + .build() + ) private def kinesisClientLibConfig(kinesisReaderConfig: KinesisReaderConfig) = new KinesisClientLibConfiguration( @@ -38,7 +48,7 @@ class CrierStreamReader(config: UsageConfig) extends GridLogging { credentialsProvider, workerId ).withInitialPositionInStream(initialPosition) - .withRegionName(config.awsRegionName) + .withRegionName(config.awsRegionName) private lazy val liveConfig = config.liveKinesisReaderConfig.map(kinesisClientLibConfig) @@ -56,8 +66,18 @@ class CrierStreamReader(config: UsageConfig) extends GridLogging { new CrierPreviewEventProcessor(config) } - lazy val liveWorker = liveConfig.map(new Worker.Builder().recordProcessorFactory(LiveEventProcessorFactory).config(_).build()) - lazy val previewWorker = previewConfig.map(new Worker.Builder().recordProcessorFactory(PreviewEventProcessorFactory).config(_).build()) + lazy val liveWorker = liveConfig.map( + new Worker.Builder() + .recordProcessorFactory(LiveEventProcessorFactory) + .config(_) + .build() + ) + lazy val previewWorker = previewConfig.map( + new Worker.Builder() + .recordProcessorFactory(PreviewEventProcessorFactory) + .config(_) + .build() + ) private def makeThread(worker: Runnable) = new Thread(worker, s"${getClass.getSimpleName}-$workerId") diff --git a/usage/app/lib/EventProcessor.scala b/usage/app/lib/EventProcessor.scala index be9e28ee92..90f9252224 100644 --- a/usage/app/lib/EventProcessor.scala +++ b/usage/app/lib/EventProcessor.scala @@ -3,7 +3,10 @@ package lib import java.nio.ByteBuffer import java.util.{List => JList} -import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorCheckpointer} +import com.amazonaws.services.kinesis.clientlibrary.interfaces.{ + IRecordProcessor, + IRecordProcessorCheckpointer +} import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason import com.amazonaws.services.kinesis.model.Record import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream @@ -20,7 +23,9 @@ import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext.Implicits.global import scala.util.Try -abstract class EventProcessor(config: UsageConfig) extends IRecordProcessor with GridLogging { +abstract class EventProcessor(config: UsageConfig) + extends IRecordProcessor + with GridLogging { implicit val codec = Event @@ -30,10 +35,15 @@ abstract class EventProcessor(config: UsageConfig) extends IRecordProcessor with logger.debug(s"Initialized an event processor for shard $shardId") } - override def processRecords(records: JList[Record], checkpointer: IRecordProcessorCheckpointer): Unit - + override def processRecords( + records: JList[Record], + checkpointer: IRecordProcessorCheckpointer + ): Unit - override def shutdown(checkpointer: IRecordProcessorCheckpointer, reason: ShutdownReason): Unit = { + override def shutdown( + checkpointer: IRecordProcessorCheckpointer, + reason: ShutdownReason + ): Unit = { if (reason == ShutdownReason.TERMINATE) { checkpointer.checkpoint() } @@ -41,24 +51,24 @@ abstract class EventProcessor(config: UsageConfig) extends IRecordProcessor with def getContentItem(content: Content, time: DateTime): ContentContainer - def processEvent(event: Event): Unit = { val dateTime: DateTime = new DateTime(event.dateTime) event.eventType match { case EventType.Update => - event.payload match { case Some(content: EventPayload.Content) => val container = getContentItem(content.content, dateTime) contentStream.observable.onNext(container) - case _ => logger.debug(s"Received crier update for ${event.payloadId} without payload") + case _ => + logger.debug( + s"Received crier update for ${event.payloadId} without payload" + ) } case EventType.Delete => - //TODO: how do we deal with a piece of content that has been deleted? + //TODO: how do we deal with a piece of content that has been deleted? case EventType.RetrievableUpdate => - event.payload match { case Some(retrievableContent: EventPayload.RetrievableContent) => val capiUrl = retrievableContent.retrievableContent.capiUrl @@ -67,17 +77,24 @@ abstract class EventProcessor(config: UsageConfig) extends IRecordProcessor with val query = ItemQuery(capiUrl, Map()) - capi.getResponse(query).map(response => { - - response.content match { - case Some(content) => - - val container = new LiveContentItem(content, dateTime) - LiveCrierContentStream.observable.onNext(container) - case _ => logger.debug(s"Received retrievable update for ${retrievableContent.retrievableContent.id} without content") - } - }) - case _ => logger.debug(s"Received crier update for ${event.payloadId} without payload") + capi + .getResponse(query) + .map(response => { + + response.content match { + case Some(content) => + val container = new LiveContentItem(content, dateTime) + LiveCrierContentStream.observable.onNext(container) + case _ => + logger.debug( + s"Received retrievable update for ${retrievableContent.retrievableContent.id} without content" + ) + } + }) + case _ => + logger.debug( + s"Received crier update for ${event.payloadId} without payload" + ) } case _ => logger.debug(s"Unsupported event type $EventType") @@ -85,16 +102,20 @@ abstract class EventProcessor(config: UsageConfig) extends IRecordProcessor with } } -private class CrierLiveEventProcessor(config: UsageConfig) extends EventProcessor(config) { +private class CrierLiveEventProcessor(config: UsageConfig) + extends EventProcessor(config) { val contentStream = LiveCrierContentStream - def getContentItem(content: Content, date: DateTime): ContentContainer = LiveContentItem(content, date) + def getContentItem(content: Content, date: DateTime): ContentContainer = + LiveContentItem(content, date) - override def processRecords(records: JList[Record], checkpointer: IRecordProcessorCheckpointer): Unit = { + override def processRecords( + records: JList[Record], + checkpointer: IRecordProcessorCheckpointer + ): Unit = { records.asScala.map { record => - val buffer: Array[Byte] = record.getData.array() ThriftDeserializer.deserialize(buffer).map(processEvent) @@ -104,16 +125,20 @@ private class CrierLiveEventProcessor(config: UsageConfig) extends EventProcesso } } -private class CrierPreviewEventProcessor(config: UsageConfig) extends EventProcessor(config) { +private class CrierPreviewEventProcessor(config: UsageConfig) + extends EventProcessor(config) { val contentStream = PreviewCrierContentStream - def getContentItem(content: Content, date: DateTime): ContentContainer = PreviewContentItem(content, date) + def getContentItem(content: Content, date: DateTime): ContentContainer = + PreviewContentItem(content, date) - override def processRecords(records: JList[Record], checkpointer: IRecordProcessorCheckpointer): Unit = { + override def processRecords( + records: JList[Record], + checkpointer: IRecordProcessorCheckpointer + ): Unit = { records.asScala.map { record => - val buffer: Array[Byte] = record.getData.array() ThriftDeserializer.deserialize(buffer).map(processEvent) diff --git a/usage/app/lib/MediaUsageBuilder.scala b/usage/app/lib/MediaUsageBuilder.scala index 02abf7b47a..b449bfe3dc 100644 --- a/usage/app/lib/MediaUsageBuilder.scala +++ b/usage/app/lib/MediaUsageBuilder.scala @@ -3,23 +3,23 @@ package lib import com.gu.mediaservice.model.usage._ import model._ - object MediaUsageBuilder { - def build(printUsage: PrintUsageRecord, usageId: UsageId, grouping: String) = MediaUsage( - usageId, - grouping, - printUsage.mediaId, - PrintUsage, - "image", - printUsage.usageStatus, - Some(printUsage.printUsageMetadata), - None, - None, - None, - None, - printUsage.dateAdded - ) + def build(printUsage: PrintUsageRecord, usageId: UsageId, grouping: String) = + MediaUsage( + usageId, + grouping, + printUsage.mediaId, + PrintUsage, + "image", + printUsage.usageStatus, + Some(printUsage.printUsageMetadata), + None, + None, + None, + None, + printUsage.dateAdded + ) def build(mediaWrapper: MediaWrapper): MediaUsage = { val usageId = UsageIdBuilder.build(mediaWrapper) @@ -40,7 +40,10 @@ object MediaUsageBuilder { ) } - def build(syndicationUsageRequest: SyndicationUsageRequest, groupId: String): MediaUsage = { + def build( + syndicationUsageRequest: SyndicationUsageRequest, + groupId: String + ): MediaUsage = { val usageId = UsageIdBuilder.build(syndicationUsageRequest) MediaUsage( usageId, @@ -58,7 +61,10 @@ object MediaUsageBuilder { ) } - def build(frontUsageRequest: FrontUsageRequest, groupId: String): MediaUsage = { + def build( + frontUsageRequest: FrontUsageRequest, + groupId: String + ): MediaUsage = { val usageId = UsageIdBuilder.build(frontUsageRequest) MediaUsage( @@ -77,10 +83,13 @@ object MediaUsageBuilder { ) } - def build(downloadUsageRequest: DownloadUsageRequest, groupId: String): MediaUsage = { + def build( + downloadUsageRequest: DownloadUsageRequest, + groupId: String + ): MediaUsage = { val usageId = UsageIdBuilder.build(downloadUsageRequest) - MediaUsage ( + MediaUsage( usageId, groupId, downloadUsageRequest.mediaId, diff --git a/usage/app/lib/Notifications.scala b/usage/app/lib/Notifications.scala index 0aa1a605fe..5d10acc872 100644 --- a/usage/app/lib/Notifications.scala +++ b/usage/app/lib/Notifications.scala @@ -2,4 +2,5 @@ package lib import com.gu.mediaservice.lib.aws.ThrallMessageSender -class Notifications(config: UsageConfig) extends ThrallMessageSender(config.thrallKinesisStreamConfig) +class Notifications(config: UsageConfig) + extends ThrallMessageSender(config.thrallKinesisStreamConfig) diff --git a/usage/app/lib/SingleThreadedScheduler.scala b/usage/app/lib/SingleThreadedScheduler.scala index e05fc75ea6..741ae308f4 100644 --- a/usage/app/lib/SingleThreadedScheduler.scala +++ b/usage/app/lib/SingleThreadedScheduler.scala @@ -8,7 +8,7 @@ import scala.concurrent.ExecutionContext trait SingleThreadedScheduler { private val singleThreadedExecutor = Executors.newSingleThreadExecutor() - val scheduler = ExecutionContextScheduler(ExecutionContext.fromExecutor(singleThreadedExecutor)) + val scheduler = ExecutionContextScheduler( + ExecutionContext.fromExecutor(singleThreadedExecutor) + ) } - - diff --git a/usage/app/lib/UsageConfig.scala b/usage/app/lib/UsageConfig.scala index c05d34efdc..8f4d15791f 100644 --- a/usage/app/lib/UsageConfig.scala +++ b/usage/app/lib/UsageConfig.scala @@ -8,10 +8,11 @@ import com.gu.mediaservice.lib.net.URI.ensureSecure import scala.util.Try - case class KinesisReaderConfig(streamName: String, arn: String, appName: String) -class UsageConfig(resources: GridConfigResources) extends CommonConfig(resources.configuration) with GridLogging { +class UsageConfig(resources: GridConfigResources) + extends CommonConfig(resources.configuration) + with GridLogging { val rootUri: String = services.metadataBaseUri val kahunaUri: String = services.kahunaBaseUri val usageUri: String = services.usageBaseUri @@ -23,14 +24,16 @@ class UsageConfig(resources: GridConfigResources) extends CommonConfig(resources val defaultMaxPrintRequestSizeInKb = 500 val defaultDateLimit = "2016-01-01T00:00:00+00:00" - val maxPrintRequestLengthInKb: Int = intDefault("api.setPrint.maxLength", defaultMaxPrintRequestSizeInKb) + val maxPrintRequestLengthInKb: Int = + intDefault("api.setPrint.maxLength", defaultMaxPrintRequestSizeInKb) val capiLiveUrl = string("capi.live.url") val capiApiKey = string("capi.apiKey") val capiPageSize: Int = intDefault("capi.page.size", defaultPageSize) val capiMaxRetries: Int = intDefault("capi.maxRetries", defaultMaxRetries) - val usageDateLimit: String = stringDefault("usage.dateLimit", defaultDateLimit) + val usageDateLimit: String = + stringDefault("usage.dateLimit", defaultDateLimit) private val composerBaseUrlProperty: String = string("composer.baseUrl") private val composerBaseUrl = ensureSecure(composerBaseUrlProperty) @@ -58,14 +61,17 @@ class UsageConfig(resources: GridConfigResources) extends CommonConfig(resources previewArn <- crierPreviewArn } yield KinesisReaderConfig(previewStream, previewArn, previewAppName) - private val iamClient: AmazonIdentityManagement = withAWSCredentials(AmazonIdentityManagementClientBuilder.standard()).build() + private val iamClient: AmazonIdentityManagement = + withAWSCredentials(AmazonIdentityManagementClientBuilder.standard()).build() val postfix: String = if (isDev) { try { iamClient.getUser.getUser.getUserName } catch { - case e:com.amazonaws.AmazonServiceException=> - logger.warn("Unable to determine current IAM user, probably because you're using temp credentials. Usage may not be able to determine the live/preview app names") + case e: com.amazonaws.AmazonServiceException => + logger.warn( + "Unable to determine current IAM user, probably because you're using temp credentials. Usage may not be able to determine the live/preview app names" + ) "tempcredentials" } } else { diff --git a/usage/app/lib/UsageMetadataBuilder.scala b/usage/app/lib/UsageMetadataBuilder.scala index 4f342f7837..b4563d569e 100644 --- a/usage/app/lib/UsageMetadataBuilder.scala +++ b/usage/app/lib/UsageMetadataBuilder.scala @@ -15,7 +15,9 @@ class UsageMetadataBuilder(config: UsageConfig) { Try(URI.create(s"${config.composerContentBaseUrl}/$composerId")).toOption }) - def buildDownload(metadataMap: Map[String, Any]): Option[DownloadUsageMetadata] = { + def buildDownload( + metadataMap: Map[String, Any] + ): Option[DownloadUsageMetadata] = { Try { DownloadUsageMetadata( metadataMap("downloadedBy").asInstanceOf[String] diff --git a/usage/app/lib/UsageMetrics.scala b/usage/app/lib/UsageMetrics.scala index 2fe61c1b9b..f82dfbdad5 100644 --- a/usage/app/lib/UsageMetrics.scala +++ b/usage/app/lib/UsageMetrics.scala @@ -2,7 +2,8 @@ package lib import com.gu.mediaservice.lib.metrics.CloudWatchMetrics -class UsageMetrics(config: UsageConfig) extends CloudWatchMetrics(s"${config.stage}/Usage", config) { +class UsageMetrics(config: UsageConfig) + extends CloudWatchMetrics(s"${config.stage}/Usage", config) { def incrementUpdated = updates.increment().run def incrementErrors = errors.increment().run diff --git a/usage/app/lib/UsageNotifier.scala b/usage/app/lib/UsageNotifier.scala index 1da45154c1..4ceca2579d 100644 --- a/usage/app/lib/UsageNotifier.scala +++ b/usage/app/lib/UsageNotifier.scala @@ -12,17 +12,25 @@ import rx.lang.scala.Observable import scala.concurrent.ExecutionContext.Implicits.global class UsageNotifier(config: UsageConfig, usageTable: UsageTable) - extends ThrallMessageSender(config.thrallKinesisLowPriorityStreamConfig) with GridLogging { + extends ThrallMessageSender(config.thrallKinesisLowPriorityStreamConfig) + with GridLogging { def build(mediaId: String) = Observable.from( - usageTable.queryByImageId(mediaId).map((usages: Set[MediaUsage]) => { - val usageJson = Json.toJson(usages.map(UsageBuilder.build)).as[JsArray] - UsageNotice(mediaId, usageJson) - })) + usageTable + .queryByImageId(mediaId) + .map((usages: Set[MediaUsage]) => { + val usageJson = Json.toJson(usages.map(UsageBuilder.build)).as[JsArray] + UsageNotice(mediaId, usageJson) + }) + ) def send(usageNotice: UsageNotice) = { logger.info(s"Sending usage notice for ${usageNotice.mediaId}") - val updateMessage = UpdateMessage(subject = "update-image-usages", id = Some(usageNotice.mediaId), usageNotice = Some(usageNotice)) + val updateMessage = UpdateMessage( + subject = "update-image-usages", + id = Some(usageNotice.mediaId), + usageNotice = Some(usageNotice) + ) publish(updateMessage) } } diff --git a/usage/app/lib/UsageRecorder.scala b/usage/app/lib/UsageRecorder.scala index 6e4c676999..9fa0851f33 100644 --- a/usage/app/lib/UsageRecorder.scala +++ b/usage/app/lib/UsageRecorder.scala @@ -9,12 +9,21 @@ import rx.lang.scala.{Observable, Subscriber, Subscription} case class ResetException() extends Exception -class UsageRecorder(usageMetrics: UsageMetrics, usageTable: UsageTable, usageStream: UsageStream, usageNotice: UsageNotifier, usageNotifier: UsageNotifier) extends StrictLogging { +class UsageRecorder( + usageMetrics: UsageMetrics, + usageTable: UsageTable, + usageStream: UsageStream, + usageNotice: UsageNotifier, + usageNotifier: UsageNotifier +) extends StrictLogging { val usageSubject = PublishSubject[UsageGroup]() - val previewUsageStream: Observable[UsageGroup] = usageStream.previewObservable.merge(usageSubject) - val liveUsageStream: Observable[UsageGroup] = usageStream.liveObservable.merge(usageSubject) + val previewUsageStream: Observable[UsageGroup] = + usageStream.previewObservable.merge(usageSubject) + val liveUsageStream: Observable[UsageGroup] = + usageStream.liveObservable.merge(usageSubject) - val subscriber = Subscriber((_:Any) => logger.debug(s"Sent Usage Notification")) + val subscriber = + Subscriber((_: Any) => logger.debug(s"Sent Usage Notification")) var subscribeToPreview: Option[Subscription] = None var subscribeToLive: Option[Subscription] = None @@ -26,11 +35,16 @@ class UsageRecorder(usageMetrics: UsageMetrics, usageTable: UsageTable, usageStr update } - val previewDbMatchStream: Observable[MatchedUsageGroup] = previewUsageStream.flatMap(matchDb) - val liveDbMatchStream: Observable[MatchedUsageGroup] = liveUsageStream.flatMap(matchDb) + val previewDbMatchStream: Observable[MatchedUsageGroup] = + previewUsageStream.flatMap(matchDb) + val liveDbMatchStream: Observable[MatchedUsageGroup] = + liveUsageStream.flatMap(matchDb) case class MatchedUsageGroup(usageGroup: UsageGroup, dbUsageGroup: UsageGroup) - case class MatchedUsageUpdate(updates: Seq[JsObject], matchUsageGroup: MatchedUsageGroup) + case class MatchedUsageUpdate( + updates: Seq[JsObject], + matchUsageGroup: MatchedUsageGroup + ) def start(): Unit = { // Eval subscription to start stream @@ -43,35 +57,51 @@ class UsageRecorder(usageMetrics: UsageMetrics, usageTable: UsageTable, usageStr subscribeToLive.foreach(_.unsubscribe()) } - def matchDb(usageGroup: UsageGroup): Observable[MatchedUsageGroup] = usageTable.matchUsageGroup(usageGroup) - .retry((_, error) => { - logger.error(s"Encountered an error trying to match usage group (${usageGroup.grouping}", error) + def matchDb(usageGroup: UsageGroup): Observable[MatchedUsageGroup] = + usageTable + .matchUsageGroup(usageGroup) + .retry((_, error) => { + logger.error( + s"Encountered an error trying to match usage group (${usageGroup.grouping}", + error + ) - true - }) - .map(MatchedUsageGroup(usageGroup, _)) - .map(matchedUsageGroup => { - logger.info(s"Built MatchedUsageGroup for ${usageGroup.grouping}") - - matchedUsageGroup - }) - - val previewDbUpdateStream: Observable[MatchedUsageUpdate] = getUpdatesStream(previewDbMatchStream) - val liveDbUpdateStream: Observable[MatchedUsageUpdate] = getUpdatesStream(liveDbMatchStream) - - val previewNotificationStream: Observable[UsageNotice] = getNotificationStream(previewDbUpdateStream) - val liveNotificationStream: Observable[UsageNotice] = getNotificationStream(liveDbUpdateStream) - - val distinctPreviewNotificationStream: Observable[UsageNotice] = previewNotificationStream.groupBy(_.mediaId).flatMap { - case (_, s) => s.distinctUntilChanged - } + true + }) + .map(MatchedUsageGroup(usageGroup, _)) + .map(matchedUsageGroup => { + logger.info(s"Built MatchedUsageGroup for ${usageGroup.grouping}") - val distinctLiveNotificationStream: Observable[UsageNotice] = liveNotificationStream.groupBy(_.mediaId).flatMap { - case (_, s) => s.distinctUntilChanged - } + matchedUsageGroup + }) - val previewNotifiedStream: Observable[Unit] = distinctPreviewNotificationStream.map(usageNotifier.send) - val liveNotifiedStream: Observable[Unit] = distinctLiveNotificationStream.map(usageNotifier.send) + val previewDbUpdateStream: Observable[MatchedUsageUpdate] = getUpdatesStream( + previewDbMatchStream + ) + val liveDbUpdateStream: Observable[MatchedUsageUpdate] = getUpdatesStream( + liveDbMatchStream + ) + + val previewNotificationStream: Observable[UsageNotice] = + getNotificationStream(previewDbUpdateStream) + val liveNotificationStream: Observable[UsageNotice] = getNotificationStream( + liveDbUpdateStream + ) + + val distinctPreviewNotificationStream: Observable[UsageNotice] = + previewNotificationStream.groupBy(_.mediaId).flatMap { case (_, s) => + s.distinctUntilChanged + } + + val distinctLiveNotificationStream: Observable[UsageNotice] = + liveNotificationStream.groupBy(_.mediaId).flatMap { case (_, s) => + s.distinctUntilChanged + } + + val previewNotifiedStream: Observable[Unit] = + distinctPreviewNotificationStream.map(usageNotifier.send) + val liveNotifiedStream: Observable[Unit] = + distinctLiveNotificationStream.map(usageNotifier.send) def reportStreamError(i: Int, error: Throwable): Boolean = { logger.error("UsageRecorder encountered an error.", error) @@ -80,16 +110,18 @@ class UsageRecorder(usageMetrics: UsageMetrics, usageTable: UsageTable, usageStr true } - val previewObservable: Observable[Unit] = previewNotifiedStream.retry((i, e) => reportStreamError(i,e)) - val liveObservable: Observable[Unit] = liveNotifiedStream.retry((i, e) => reportStreamError(i,e)) + val previewObservable: Observable[Unit] = + previewNotifiedStream.retry((i, e) => reportStreamError(i, e)) + val liveObservable: Observable[Unit] = + liveNotifiedStream.retry((i, e) => reportStreamError(i, e)) - private def getUpdatesStream(dbMatchStream: Observable[MatchedUsageGroup]) = { + private def getUpdatesStream(dbMatchStream: Observable[MatchedUsageGroup]) = { dbMatchStream.flatMap(matchUsageGroup => { // Generate unique UUID to track extract job val uuid = java.util.UUID.randomUUID.toString val dbUsageGroup = matchUsageGroup.dbUsageGroup - val usageGroup = matchUsageGroup.usageGroup + val usageGroup = matchUsageGroup.usageGroup dbUsageGroup.usages.foreach(g => { logger.info(s"Seen DB Usage for ${g.mediaId} (job-$uuid)") @@ -98,32 +130,46 @@ class UsageRecorder(usageMetrics: UsageMetrics, usageTable: UsageTable, usageStr logger.info(s"Seen Stream Usage for ${g.mediaId} (job-$uuid)") }) - val deletes = (dbUsageGroup.usages -- usageGroup.usages).map(usageTable.delete) - val creates = (if(usageGroup.isReindex) usageGroup.usages else usageGroup.usages -- dbUsageGroup.usages) + val deletes = + (dbUsageGroup.usages -- usageGroup.usages).map(usageTable.delete) + val creates = (if (usageGroup.isReindex) usageGroup.usages + else usageGroup.usages -- dbUsageGroup.usages) .map(usageTable.create) - val updates = (if(usageGroup.isReindex) Set() else usageGroup.usages & dbUsageGroup.usages) + val updates = (if (usageGroup.isReindex) Set() + else usageGroup.usages & dbUsageGroup.usages) .map(usageTable.update) - logger.info(s"DB Operations d(${deletes.size}), u(${updates.size}), c(${creates.size}) (job-$uuid)") + logger.info( + s"DB Operations d(${deletes.size}), u(${updates.size}), c(${creates.size}) (job-$uuid)" + ) - Observable.from(deletes ++ updates ++ creates).flatten[JsObject] + Observable + .from(deletes ++ updates ++ creates) + .flatten[JsObject] .map(recordUpdate) - .toSeq.map(MatchedUsageUpdate(_, matchUsageGroup)) + .toSeq + .map(MatchedUsageUpdate(_, matchUsageGroup)) }) } - private def getNotificationStream(dbUpdateStream: Observable[MatchedUsageUpdate]) = { + private def getNotificationStream( + dbUpdateStream: Observable[MatchedUsageUpdate] + ) = { dbUpdateStream.flatMap(matchedUsageUpdates => { def buildNotifications(usages: Set[MediaUsage]) = Observable.from( usages .filter(_.isGridLikeId) .map(_.mediaId) - .toList.distinct.map(usageNotice.build)) + .toList + .distinct + .map(usageNotice.build) + ) val usageGroup = matchedUsageUpdates.matchUsageGroup.usageGroup val dbUsageGroup = matchedUsageUpdates.matchUsageGroup.dbUsageGroup - buildNotifications(usageGroup.usages ++ dbUsageGroup.usages).flatten[UsageNotice] + buildNotifications(usageGroup.usages ++ dbUsageGroup.usages) + .flatten[UsageNotice] }) } diff --git a/usage/app/lib/UsageStream.scala b/usage/app/lib/UsageStream.scala index 29888405ab..2a7dddf1ba 100644 --- a/usage/app/lib/UsageStream.scala +++ b/usage/app/lib/UsageStream.scala @@ -1,30 +1,42 @@ package lib -import com.gu.mediaservice.model.usage.{PendingUsageStatus, PublishedUsageStatus} +import com.gu.mediaservice.model.usage.{ + PendingUsageStatus, + PublishedUsageStatus +} import model._ import rx.lang.scala.Observable class UsageStream(usageGroup: UsageGroupOps) { - val previewContentStream: Observable[ContentContainer] = PreviewCrierContentStream.observable - val liveContentStream: Observable[ContentContainer] = LiveCrierContentStream.observable + val previewContentStream: Observable[ContentContainer] = + PreviewCrierContentStream.observable + val liveContentStream: Observable[ContentContainer] = + LiveCrierContentStream.observable - val previewObservable: Observable[UsageGroup] = getObservable(previewContentStream) + val previewObservable: Observable[UsageGroup] = getObservable( + previewContentStream + ) val liveObservable: Observable[UsageGroup] = getObservable(liveContentStream) def createStatus(container: ContentContainer) = container match { - case PreviewContentItem(_,_,_) => PendingUsageStatus - case LiveContentItem(_,_,_) => PublishedUsageStatus + case PreviewContentItem(_, _, _) => PendingUsageStatus + case LiveContentItem(_, _, _) => PublishedUsageStatus } private def getObservable(contentStream: Observable[ContentContainer]) = { contentStream.flatMap((container: ContentContainer) => { - val usageGroupOption: Option[UsageGroup] = usageGroup.build(container.content, createStatus(container), container.lastModified, container.isReindex) + val usageGroupOption: Option[UsageGroup] = usageGroup.build( + container.content, + createStatus(container), + container.lastModified, + container.isReindex + ) val observable: Observable[UsageGroup] = usageGroupOption match { case Some(usgGroup) => Observable.from(Some(usgGroup)) - case _ => Observable.empty + case _ => Observable.empty } observable diff --git a/usage/app/model/ContentWrapper.scala b/usage/app/model/ContentWrapper.scala index cfe42ef58c..4bd08a6e9b 100644 --- a/usage/app/model/ContentWrapper.scala +++ b/usage/app/model/ContentWrapper.scala @@ -5,19 +5,24 @@ import com.gu.mediaservice.model.usage.UsageStatus import org.joda.time.DateTime - case class ContentWrapper( - id: String, - status: UsageStatus, - lastModified: DateTime, - content: Content + id: String, + status: UsageStatus, + lastModified: DateTime, + content: Content ) object ContentWrapper { - def build(content: Content, status: UsageStatus, lastModified: DateTime): Option[ContentWrapper] = { + def build( + content: Content, + status: UsageStatus, + lastModified: DateTime + ): Option[ContentWrapper] = { extractId(content).map(ContentWrapper(_, status, lastModified, content)) } def extractId(content: Content): Option[String] = { - content.fields.flatMap(_.internalComposerCode).map(composerId => s"composer/${composerId}") + content.fields + .flatMap(_.internalComposerCode) + .map(composerId => s"composer/${composerId}") } } diff --git a/usage/app/model/DownloadUsageRequest.scala b/usage/app/model/DownloadUsageRequest.scala index 86ffbc3e8b..21eb7ec98c 100644 --- a/usage/app/model/DownloadUsageRequest.scala +++ b/usage/app/model/DownloadUsageRequest.scala @@ -1,13 +1,17 @@ package model -import com.gu.mediaservice.model.usage.{DownloadUsageMetadata, DownloadedUsageStatus, UsageStatus} +import com.gu.mediaservice.model.usage.{ + DownloadUsageMetadata, + DownloadedUsageStatus, + UsageStatus +} import org.joda.time.DateTime import play.api.libs.json.{JodaReads, JodaWrites, Json, Reads, Writes} -case class DownloadUsageRequest ( - dateAdded: DateTime, - downloadedBy: String, - mediaId: String +case class DownloadUsageRequest( + dateAdded: DateTime, + downloadedBy: String, + mediaId: String ) { val metadata: DownloadUsageMetadata = DownloadUsageMetadata(downloadedBy) val status: UsageStatus = DownloadedUsageStatus @@ -16,7 +20,8 @@ object DownloadUsageRequest { import JodaWrites._ import JodaReads._ - implicit val reads: Reads[DownloadUsageRequest] = Json.reads[DownloadUsageRequest] - implicit val writes: Writes[DownloadUsageRequest] = Json.writes[DownloadUsageRequest] + implicit val reads: Reads[DownloadUsageRequest] = + Json.reads[DownloadUsageRequest] + implicit val writes: Writes[DownloadUsageRequest] = + Json.writes[DownloadUsageRequest] } - diff --git a/usage/app/model/FrontUsageRequest.scala b/usage/app/model/FrontUsageRequest.scala index 0bca72db42..7073fbc028 100644 --- a/usage/app/model/FrontUsageRequest.scala +++ b/usage/app/model/FrontUsageRequest.scala @@ -1,15 +1,18 @@ package model -import com.gu.mediaservice.model.usage.{FrontUsageMetadata, UnknownUsageStatus, UsageStatus} +import com.gu.mediaservice.model.usage.{ + FrontUsageMetadata, + UnknownUsageStatus, + UsageStatus +} import org.joda.time.DateTime import play.api.libs.json._ - -case class FrontUsageRequest ( - dateAdded: DateTime, - addedBy: String, - front: String, - mediaId: String +case class FrontUsageRequest( + dateAdded: DateTime, + addedBy: String, + front: String, + mediaId: String ) { val metadata: FrontUsageMetadata = FrontUsageMetadata(addedBy, front) val status: UsageStatus = UnknownUsageStatus @@ -20,5 +23,6 @@ object FrontUsageRequest { import JodaReads._ implicit val reads: Reads[FrontUsageRequest] = Json.reads[FrontUsageRequest] - implicit val writes: Writes[FrontUsageRequest] = Json.writes[FrontUsageRequest] + implicit val writes: Writes[FrontUsageRequest] = + Json.writes[FrontUsageRequest] } diff --git a/usage/app/model/PrintUsageRequest.scala b/usage/app/model/PrintUsageRequest.scala index 99bd93c630..9027277008 100644 --- a/usage/app/model/PrintUsageRequest.scala +++ b/usage/app/model/PrintUsageRequest.scala @@ -4,19 +4,19 @@ import com.gu.mediaservice.model.usage.{PrintUsageMetadata, UsageStatus} import org.joda.time.DateTime import play.api.libs.json._ - case class PrintUsageRequest(printUsageRecords: List[PrintUsageRecord]) object PrintUsageRequest { implicit val reads: Reads[PrintUsageRequest] = Json.reads[PrintUsageRequest] - implicit val writes: Writes[PrintUsageRequest] = Json.writes[PrintUsageRequest] + implicit val writes: Writes[PrintUsageRequest] = + Json.writes[PrintUsageRequest] } case class PrintUsageRecord( - dateAdded: DateTime, - mediaId: String, - printUsageMetadata: PrintUsageMetadata, - containerId: String, - usageId: String, - usageStatus: UsageStatus + dateAdded: DateTime, + mediaId: String, + printUsageMetadata: PrintUsageMetadata, + containerId: String, + usageId: String, + usageStatus: UsageStatus ) object PrintUsageRecord { import JodaWrites._ @@ -25,4 +25,3 @@ object PrintUsageRecord { implicit val reads: Reads[PrintUsageRecord] = Json.reads[PrintUsageRecord] implicit val writes: Writes[PrintUsageRecord] = Json.writes[PrintUsageRecord] } - diff --git a/usage/app/model/SyndicationUsageRequest.scala b/usage/app/model/SyndicationUsageRequest.scala index ff05282034..2e5702350f 100644 --- a/usage/app/model/SyndicationUsageRequest.scala +++ b/usage/app/model/SyndicationUsageRequest.scala @@ -1,13 +1,17 @@ package model -import com.gu.mediaservice.model.usage.{SyndicatedUsageStatus, SyndicationUsageMetadata, UsageStatus} +import com.gu.mediaservice.model.usage.{ + SyndicatedUsageStatus, + SyndicationUsageMetadata, + UsageStatus +} import org.joda.time.DateTime import play.api.libs.json._ -case class SyndicationUsageRequest ( - partnerName: String, - mediaId: String, - dateAdded: DateTime +case class SyndicationUsageRequest( + partnerName: String, + mediaId: String, + dateAdded: DateTime ) { val status: UsageStatus = SyndicatedUsageStatus val metadata: SyndicationUsageMetadata = SyndicationUsageMetadata(partnerName) @@ -16,6 +20,8 @@ object SyndicationUsageRequest { import JodaWrites._ import JodaReads._ - implicit val reads: Reads[SyndicationUsageRequest] = Json.reads[SyndicationUsageRequest] - implicit val writes: Writes[SyndicationUsageRequest] = Json.writes[SyndicationUsageRequest] + implicit val reads: Reads[SyndicationUsageRequest] = + Json.reads[SyndicationUsageRequest] + implicit val writes: Writes[SyndicationUsageRequest] = + Json.writes[SyndicationUsageRequest] } diff --git a/usage/app/model/UsageGroup.scala b/usage/app/model/UsageGroup.scala index acf2c0b2bc..5064c55e53 100644 --- a/usage/app/model/UsageGroup.scala +++ b/usage/app/model/UsageGroup.scala @@ -4,63 +4,92 @@ import play.api.libs.json._ import com.gu.contentapi.client.model.v1.{Content, Element, ElementType} import com.gu.contentatom.thrift.{Atom, AtomData} import com.gu.mediaservice.lib.logging.GridLogging -import com.gu.mediaservice.model.usage.{DigitalUsageMetadata, MediaUsage, PublishedUsageStatus, UsageStatus} +import com.gu.mediaservice.model.usage.{ + DigitalUsageMetadata, + MediaUsage, + PublishedUsageStatus, + UsageStatus +} import lib.{LiveContentApi, MD5, UsageConfig, UsageMetadataBuilder} import org.joda.time.DateTime import lib.MediaUsageBuilder case class UsageGroup( - usages: Set[MediaUsage], - grouping: String, - status: UsageStatus, - lastModified: DateTime, - isReindex: Boolean = false + usages: Set[MediaUsage], + grouping: String, + status: UsageStatus, + lastModified: DateTime, + isReindex: Boolean = false ) -class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWrapperOps: MediaWrapperOps) - extends GridLogging { +class UsageGroupOps( + config: UsageConfig, + liveContentApi: LiveContentApi, + mediaWrapperOps: MediaWrapperOps +) extends GridLogging { def buildId(contentWrapper: ContentWrapper) = contentWrapper.id - def buildId(printUsage: PrintUsageRecord) = s"print/${MD5.hash(List( - Some(printUsage.mediaId), - Some(printUsage.printUsageMetadata.pageNumber), - Some(printUsage.printUsageMetadata.sectionCode), - Some(printUsage.printUsageMetadata.issueDate) - ).flatten.map(_.toString).mkString("_"))}" - - def buildId(syndicationUsageRequest: SyndicationUsageRequest): String = s"syndication/${ - MD5.hash(List( - syndicationUsageRequest.metadata.partnerName, - syndicationUsageRequest.mediaId - ).mkString("_")) - }" - - def buildId(frontUsageRequest: FrontUsageRequest): String = s"front/${ - MD5.hash(List( - frontUsageRequest.mediaId, - frontUsageRequest.metadata.front - ).mkString("_")) - }" - - def buildId(downloadUsageRequest: DownloadUsageRequest): String = s"download/${ - MD5.hash(List( - downloadUsageRequest.mediaId, - downloadUsageRequest.metadata.downloadedBy - ).mkString("_")) - }" - - def build(content: Content, status: UsageStatus, lastModified: DateTime, isReindex: Boolean) = - ContentWrapper.build(content, status, lastModified).map(contentWrapper => { - val usages = createUsages(contentWrapper, isReindex) - logger.info(s"Built UsageGroup: ${contentWrapper.id}") - UsageGroup(usages.toSet, contentWrapper.id, status, lastModified, isReindex) - }) + def buildId(printUsage: PrintUsageRecord) = s"print/${MD5.hash( + List( + Some(printUsage.mediaId), + Some(printUsage.printUsageMetadata.pageNumber), + Some(printUsage.printUsageMetadata.sectionCode), + Some(printUsage.printUsageMetadata.issueDate) + ).flatten.map(_.toString).mkString("_") + )}" + + def buildId(syndicationUsageRequest: SyndicationUsageRequest): String = + s"syndication/${MD5.hash( + List( + syndicationUsageRequest.metadata.partnerName, + syndicationUsageRequest.mediaId + ).mkString("_") + )}" + + def buildId(frontUsageRequest: FrontUsageRequest): String = + s"front/${MD5.hash( + List( + frontUsageRequest.mediaId, + frontUsageRequest.metadata.front + ).mkString("_") + )}" + + def buildId(downloadUsageRequest: DownloadUsageRequest): String = + s"download/${MD5.hash( + List( + downloadUsageRequest.mediaId, + downloadUsageRequest.metadata.downloadedBy + ).mkString("_") + )}" + + def build( + content: Content, + status: UsageStatus, + lastModified: DateTime, + isReindex: Boolean + ) = + ContentWrapper + .build(content, status, lastModified) + .map(contentWrapper => { + val usages = createUsages(contentWrapper, isReindex) + logger.info(s"Built UsageGroup: ${contentWrapper.id}") + UsageGroup( + usages.toSet, + contentWrapper.id, + status, + lastModified, + isReindex + ) + }) def build(printUsageRecords: List[PrintUsageRecord]) = printUsageRecords.map(printUsageRecord => { val usageId = UsageIdBuilder.build(printUsageRecord) UsageGroup( - Set(MediaUsageBuilder.build(printUsageRecord, usageId, buildId(printUsageRecord))), + Set( + MediaUsageBuilder + .build(printUsageRecord, usageId, buildId(printUsageRecord)) + ), usageId.toString, printUsageRecord.usageStatus, printUsageRecord.dateAdded @@ -106,20 +135,34 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr logger.info(s"Extracting images (job-$uuid) from ${content.id}") - val mediaAtomsUsages = extractMediaAtoms(uuid, content, usageStatus, isReindex).zipWithIndex.flatMap { case (atom, index) => - getImageId(atom) match { - case Some(id) => - val mediaWrapper = mediaWrapperOps.build(index, id, contentWrapper, buildId(contentWrapper)) + val mediaAtomsUsages = + extractMediaAtoms(uuid, content, usageStatus, isReindex).zipWithIndex + .flatMap { case (atom, index) => + getImageId(atom) match { + case Some(id) => + val mediaWrapper = mediaWrapperOps.build( + index, + id, + contentWrapper, + buildId(contentWrapper) + ) + val usage = MediaUsageBuilder.build(mediaWrapper) + Seq(createUsagesLogging(usage)) + case None => Seq.empty + } + } + val imageElementUsages = + extractImageElements(uuid, content, usageStatus, isReindex).zipWithIndex + .map { case (element, index) => + val mediaWrapper = mediaWrapperOps.build( + index, + element.id, + contentWrapper, + buildId(contentWrapper) + ) val usage = MediaUsageBuilder.build(mediaWrapper) - Seq(createUsagesLogging(usage)) - case None => Seq.empty - } - } - val imageElementUsages = extractImageElements(uuid, content, usageStatus, isReindex).zipWithIndex.map { case (element, index) => - val mediaWrapper = mediaWrapperOps.build(index, element.id, contentWrapper, buildId(contentWrapper)) - val usage = MediaUsageBuilder.build(mediaWrapper) - createUsagesLogging(usage) - } + createUsagesLogging(usage) + } mediaAtomsUsages ++ imageElementUsages } @@ -137,16 +180,25 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr usage } - private def isNewContent(content: Content, usageStatus: UsageStatus): Boolean = { + private def isNewContent( + content: Content, + usageStatus: UsageStatus + ): Boolean = { val dateLimit = new DateTime(config.usageDateLimit) val contentFirstPublished = liveContentApi.getContentFirstPublished(content) usageStatus match { - case PublishedUsageStatus => contentFirstPublished.exists(_.isAfter(dateLimit)) + case PublishedUsageStatus => + contentFirstPublished.exists(_.isAfter(dateLimit)) case _ => true } } - private def extractMediaAtoms(uuid: String, content: Content, usageStatus: UsageStatus, isReindex: Boolean) = { + private def extractMediaAtoms( + uuid: String, + content: Content, + usageStatus: UsageStatus, + isReindex: Boolean + ) = { val isNew = isNewContent(content, usageStatus) val shouldRecordUsages = isNew || isReindex @@ -157,13 +209,19 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr if (groupedMediaAtoms.isEmpty) { logger.info(s"No Matching media atoms found (job-$uuid)") } else { - logger.info(s"${groupedMediaAtoms.length} media atoms found (job-$uuid)") - groupedMediaAtoms.foreach(atom => logger.info(s"Matching media atom ${atom.id} found (job-$uuid)")) + logger.info( + s"${groupedMediaAtoms.length} media atoms found (job-$uuid)" + ) + groupedMediaAtoms.foreach(atom => + logger.info(s"Matching media atom ${atom.id} found (job-$uuid)") + ) } groupedMediaAtoms } else { - logger.info(s"Failed shouldRecordUsages for media atoms: isNew-$isNew isReindex-$isReindex (job-$uuid)") + logger.info( + s"Failed shouldRecordUsages for media atoms: isNew-$isNew isReindex-$isReindex (job-$uuid)" + ) Seq.empty } } @@ -173,7 +231,7 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr case Some(atoms) => atoms.media match { case Some(mediaAtoms) => filterOutAtomsWithNoImage(mediaAtoms) - case _ => Seq.empty + case _ => Seq.empty } case _ => Seq.empty } @@ -192,7 +250,8 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr try { val posterImage = atom.data.asInstanceOf[AtomData.Media].media.posterImage posterImage match { - case Some(image) => Some(image.mediaId.replace(s"${config.apiUri}/images/", "")) + case Some(image) => + Some(image.mediaId.replace(s"${config.apiUri}/images/", "")) case _ => None } } catch { @@ -200,7 +259,12 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr } } - private def extractImageElements(uuid: String, content: Content, usageStatus: UsageStatus, isReindex: Boolean): Seq[Element] = { + private def extractImageElements( + uuid: String, + content: Content, + usageStatus: UsageStatus, + isReindex: Boolean + ): Seq[Element] = { val isNew = isNewContent(content, usageStatus) val shouldRecordUsages = isNew || isReindex @@ -213,22 +277,28 @@ class UsageGroupOps(config: UsageConfig, liveContentApi: LiveContentApi, mediaWr } else { groupedElements.foreach(elements => { logger.info(s"${elements.length} elements found (job-$uuid)") - elements.foreach(element => logger.info(s"Matching element ${element.id} found (job-$uuid)")) + elements.foreach(element => + logger.info(s"Matching element ${element.id} found (job-$uuid)") + ) }) } groupedElements.getOrElse(Seq.empty) } else { - logger.info(s"Failed shouldRecordUsages: isNew-$isNew isReindex-$isReindex (job-$uuid)") + logger.info( + s"Failed shouldRecordUsages: isNew-$isNew isReindex-$isReindex (job-$uuid)" + ) Seq.empty } } private def groupImageElements(content: Content): Option[Seq[Element]] = { content.elements.map(elements => { - elements.filter(_.`type` == ElementType.Image) + elements + .filter(_.`type` == ElementType.Image) .groupBy(_.id) - .map(_._2.head).to[collection.immutable.Seq] + .map(_._2.head) + .to[collection.immutable.Seq] }) } } @@ -239,11 +309,24 @@ case class MediaWrapper( usageGroupId: String, contentStatus: UsageStatus, usageMetadata: DigitalUsageMetadata, - lastModified: DateTime) + lastModified: DateTime +) class MediaWrapperOps(usageMetadataBuilder: UsageMetadataBuilder) { - def build(index: Int, mediaId: String, contentWrapper: ContentWrapper, usageGroupId: String): MediaWrapper = { + def build( + index: Int, + mediaId: String, + contentWrapper: ContentWrapper, + usageGroupId: String + ): MediaWrapper = { val usageMetadata = usageMetadataBuilder.build(contentWrapper.content) - MediaWrapper(index, mediaId, usageGroupId, contentWrapper.status, usageMetadata, contentWrapper.lastModified) + MediaWrapper( + index, + mediaId, + usageGroupId, + contentWrapper.status, + usageMetadata, + contentWrapper.lastModified + ) } } diff --git a/usage/app/model/UsageIdBuilder.scala b/usage/app/model/UsageIdBuilder.scala index ca0e7f16f2..eb3959e975 100644 --- a/usage/app/model/UsageIdBuilder.scala +++ b/usage/app/model/UsageIdBuilder.scala @@ -7,35 +7,45 @@ object UsageIdBuilder { def buildId(parts: List[Option[Any]]) = UsageId(MD5.hash(parts.flatten.map(_.toString).mkString("_"))) - def build(printUsageRecord: PrintUsageRecord) = buildId(List( - Some(printUsageRecord.mediaId), - Some(printUsageRecord.printUsageMetadata.pageNumber), - Some(printUsageRecord.printUsageMetadata.sectionCode), - Some(printUsageRecord.printUsageMetadata.issueDate), - Some(printUsageRecord.usageStatus) - )) + def build(printUsageRecord: PrintUsageRecord) = buildId( + List( + Some(printUsageRecord.mediaId), + Some(printUsageRecord.printUsageMetadata.pageNumber), + Some(printUsageRecord.printUsageMetadata.sectionCode), + Some(printUsageRecord.printUsageMetadata.issueDate), + Some(printUsageRecord.usageStatus) + ) + ) - def build(mediaWrapper: MediaWrapper) = buildId(List( - Some(mediaWrapper.mediaId), - Some(mediaWrapper.index), - Some(mediaWrapper.contentStatus) - )) + def build(mediaWrapper: MediaWrapper) = buildId( + List( + Some(mediaWrapper.mediaId), + Some(mediaWrapper.index), + Some(mediaWrapper.contentStatus) + ) + ) - def build(syndicationUsageRequest: SyndicationUsageRequest) = buildId(List( - Some(syndicationUsageRequest.mediaId), - Some(syndicationUsageRequest.metadata.partnerName), - Some(syndicationUsageRequest.status) - )) + def build(syndicationUsageRequest: SyndicationUsageRequest) = buildId( + List( + Some(syndicationUsageRequest.mediaId), + Some(syndicationUsageRequest.metadata.partnerName), + Some(syndicationUsageRequest.status) + ) + ) - def build(frontUsageRequest: FrontUsageRequest) = buildId(List( - Some(frontUsageRequest.mediaId), - Some(frontUsageRequest.metadata.front), - Some(frontUsageRequest.status) - )) + def build(frontUsageRequest: FrontUsageRequest) = buildId( + List( + Some(frontUsageRequest.mediaId), + Some(frontUsageRequest.metadata.front), + Some(frontUsageRequest.status) + ) + ) - def build(downloadUsageRequest: DownloadUsageRequest) = buildId(List( - Some(downloadUsageRequest.mediaId), - Some(downloadUsageRequest.metadata.downloadedBy), - Some(downloadUsageRequest.status) - )) + def build(downloadUsageRequest: DownloadUsageRequest) = buildId( + List( + Some(downloadUsageRequest.mediaId), + Some(downloadUsageRequest.metadata.downloadedBy), + Some(downloadUsageRequest.status) + ) + ) } diff --git a/usage/app/model/UsageRecord.scala b/usage/app/model/UsageRecord.scala index 5ef81ff517..213d3761a1 100644 --- a/usage/app/model/UsageRecord.scala +++ b/usage/app/model/UsageRecord.scala @@ -9,22 +9,22 @@ import scala.collection.JavaConverters._ import org.joda.time.DateTime case class UsageRecord( - hashKey: String, - rangeKey: String, - mediaId: Option[String] = None, - usageType: Option[UsageType] = None, - mediaType: Option[String] = None, - lastModified: Option[DateTime] = None, - usageStatus: Option[String] = None, - printUsageMetadata: Option[PrintUsageMetadata] = None, - digitalUsageMetadata: Option[DigitalUsageMetadata] = None, - syndicationUsageMetadata: Option[SyndicationUsageMetadata] = None, - frontUsageMetadata: Option[FrontUsageMetadata] = None, - downloadUsageMetadata: Option[DownloadUsageMetadata] = None, - dateAdded: Option[DateTime] = None, - // Either is used here to represent 3 possible states: - // remove-date, add-date and no-date - dateRemoved: Either[String, Option[DateTime]] = Right(None) + hashKey: String, + rangeKey: String, + mediaId: Option[String] = None, + usageType: Option[UsageType] = None, + mediaType: Option[String] = None, + lastModified: Option[DateTime] = None, + usageStatus: Option[String] = None, + printUsageMetadata: Option[PrintUsageMetadata] = None, + digitalUsageMetadata: Option[DigitalUsageMetadata] = None, + syndicationUsageMetadata: Option[SyndicationUsageMetadata] = None, + frontUsageMetadata: Option[FrontUsageMetadata] = None, + downloadUsageMetadata: Option[DownloadUsageMetadata] = None, + dateAdded: Option[DateTime] = None, + // Either is used here to represent 3 possible states: + // remove-date, add-date and no-date + dateRemoved: Either[String, Option[DateTime]] = Right(None) ) { def toXSpec = { (new ExpressionSpecBuilder() <| (xspec => { @@ -34,11 +34,21 @@ case class UsageRecord( mediaType.filter(_.nonEmpty).map(S("media_type").set(_)), lastModified.map(lastMod => N("last_modified").set(lastMod.getMillis)), usageStatus.filter(_.nonEmpty).map(S("usage_status").set(_)), - printUsageMetadata.map(_.toMap).map(map => M("print_metadata").set(map.asJava)), - digitalUsageMetadata.map(_.toMap).map(map => M("digital_metadata").set(map.asJava)), - syndicationUsageMetadata.map(_.toMap).map(map => M("syndication_metadata").set(map.asJava)), - frontUsageMetadata.map(_.toMap).map(map => M("front_metadata").set(map.asJava)), - downloadUsageMetadata.map(_.toMap).map(map => M("download_metadata").set(map.asJava)), + printUsageMetadata + .map(_.toMap) + .map(map => M("print_metadata").set(map.asJava)), + digitalUsageMetadata + .map(_.toMap) + .map(map => M("digital_metadata").set(map.asJava)), + syndicationUsageMetadata + .map(_.toMap) + .map(map => M("syndication_metadata").set(map.asJava)), + frontUsageMetadata + .map(_.toMap) + .map(map => M("front_metadata").set(map.asJava)), + downloadUsageMetadata + .map(_.toMap) + .map(map => M("download_metadata").set(map.asJava)), dateAdded.map(dateAdd => N("date_added").set(dateAdd.getMillis)), dateRemoved.fold( _ => Some(N("date_removed").remove), diff --git a/usage/app/model/UsageTable.scala b/usage/app/model/UsageTable.scala index bcc99a9880..21f1238eb7 100644 --- a/usage/app/model/UsageTable.scala +++ b/usage/app/model/UsageTable.scala @@ -1,12 +1,23 @@ package model -import com.amazonaws.services.dynamodbv2.document.spec.{DeleteItemSpec, UpdateItemSpec} -import com.amazonaws.services.dynamodbv2.document.{KeyAttribute, RangeKeyCondition} +import com.amazonaws.services.dynamodbv2.document.spec.{ + DeleteItemSpec, + UpdateItemSpec +} +import com.amazonaws.services.dynamodbv2.document.{ + KeyAttribute, + RangeKeyCondition +} import com.amazonaws.services.dynamodbv2.model.ReturnValue import com.gu.mediaservice.lib.aws.DynamoDB import com.gu.mediaservice.lib.logging.GridLogging import com.gu.mediaservice.lib.usage.ItemToMediaUsage -import com.gu.mediaservice.model.usage.{MediaUsage, PendingUsageStatus, PublishedUsageStatus, UsageTableFullKey} +import com.gu.mediaservice.model.usage.{ + MediaUsage, + PendingUsageStatus, + PublishedUsageStatus, + UsageTableFullKey +} import lib.{BadInputException, UsageConfig} import org.joda.time.DateTime import play.api.libs.json._ @@ -17,21 +28,27 @@ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.util.Try -class UsageTable(config: UsageConfig) extends DynamoDB(config, config.usageRecordTable) with GridLogging { +class UsageTable(config: UsageConfig) + extends DynamoDB(config, config.usageRecordTable) + with GridLogging { val hashKeyName = "grouping" val rangeKeyName = "usage_id" val imageIndexName = "media_id" def queryByUsageId(id: String): Future[Option[MediaUsage]] = Future { - UsageTableFullKey.build(id).flatMap((tableFullKey: UsageTableFullKey) => { - val keyAttribute: KeyAttribute = new KeyAttribute(hashKeyName, tableFullKey.hashKey) - val rangeKeyCondition: RangeKeyCondition = new RangeKeyCondition(rangeKeyName).eq(tableFullKey.rangeKey) + UsageTableFullKey + .build(id) + .flatMap((tableFullKey: UsageTableFullKey) => { + val keyAttribute: KeyAttribute = + new KeyAttribute(hashKeyName, tableFullKey.hashKey) + val rangeKeyCondition: RangeKeyCondition = + new RangeKeyCondition(rangeKeyName).eq(tableFullKey.rangeKey) - val queryResult = table.query(keyAttribute, rangeKeyCondition) + val queryResult = table.query(keyAttribute, rangeKeyCondition) - queryResult.asScala.map(ItemToMediaUsage.transform).headOption - }) + queryResult.asScala.map(ItemToMediaUsage.transform).headOption + }) } def queryByImageId(id: String): Future[Set[MediaUsage]] = Future { @@ -43,32 +60,35 @@ class UsageTable(config: UsageConfig) extends DynamoDB(config, config.usageRecor val keyAttribute = new KeyAttribute(imageIndexName, id) val queryResult = imageIndex.query(keyAttribute) - val fullSet = queryResult.asScala.map(ItemToMediaUsage.transform).toSet[MediaUsage] + val fullSet = + queryResult.asScala.map(ItemToMediaUsage.transform).toSet[MediaUsage] - hidePendingIfPublished( - hidePendingIfRemoved(fullSet)) + hidePendingIfPublished(hidePendingIfRemoved(fullSet)) } - def hidePendingIfRemoved(usages: Set[MediaUsage]): Set[MediaUsage] = usages.filterNot((mediaUsage: MediaUsage) => { - mediaUsage.status match { - case PendingUsageStatus => mediaUsage.isRemoved - case _ => false - } - }) + def hidePendingIfRemoved(usages: Set[MediaUsage]): Set[MediaUsage] = + usages.filterNot((mediaUsage: MediaUsage) => { + mediaUsage.status match { + case PendingUsageStatus => mediaUsage.isRemoved + case _ => false + } + }) - def hidePendingIfPublished(usages: Set[MediaUsage]): Set[MediaUsage] = usages.groupBy(_.grouping).flatMap { - case (grouping, groupedUsages) => + def hidePendingIfPublished(usages: Set[MediaUsage]): Set[MediaUsage] = usages + .groupBy(_.grouping) + .flatMap { case (grouping, groupedUsages) => val publishedUsage = groupedUsages.find(_.status match { case PublishedUsageStatus => true - case _ => false + case _ => false }) if (publishedUsage.isEmpty) { - groupedUsages.headOption + groupedUsages.headOption } else { - publishedUsage + publishedUsage } - }.toSet + } + .toSet def matchUsageGroup(usageGroup: UsageGroup): Observable[UsageGroup] = { logger.info(s"Trying to match UsageGroup: ${usageGroup.grouping}") @@ -86,7 +106,9 @@ class UsageTable(config: UsageConfig) extends DynamoDB(config, config.usageRecor .filter(_.status == status) .toSet - logger.info(s"Built matched UsageGroup ${usageGroup.grouping} (${usages.size})") + logger.info( + s"Built matched UsageGroup ${usageGroup.grouping} (${usages.size})" + ) UsageGroup(usages, grouping, usageGroup.status, new DateTime) }) @@ -104,7 +126,9 @@ class UsageTable(config: UsageConfig) extends DynamoDB(config, config.usageRecor def deleteRecord(mediaUsage: MediaUsage) = { val record = UsageRecord.buildDeleteRecord(mediaUsage) - logger.info(s"deleting usage ${mediaUsage.usageId} for media id ${mediaUsage.mediaId}") + logger.info( + s"deleting usage ${mediaUsage.usageId} for media id ${mediaUsage.mediaId}" + ) val deleteSpec = new DeleteItemSpec() .withPrimaryKey( @@ -117,24 +141,25 @@ class UsageTable(config: UsageConfig) extends DynamoDB(config, config.usageRecor table.deleteItem(deleteSpec) } - def updateFromRecord(record: UsageRecord): Observable[JsObject] = Observable.from(Future { + def updateFromRecord(record: UsageRecord): Observable[JsObject] = Observable + .from(Future { - val updateSpec = new UpdateItemSpec() - .withPrimaryKey( - hashKeyName, - record.hashKey, - rangeKeyName, - record.rangeKey - ) - .withExpressionSpec(record.toXSpec) - .withReturnValues(ReturnValue.ALL_NEW) + val updateSpec = new UpdateItemSpec() + .withPrimaryKey( + hashKeyName, + record.hashKey, + rangeKeyName, + record.rangeKey + ) + .withExpressionSpec(record.toXSpec) + .withReturnValues(ReturnValue.ALL_NEW) - table.updateItem(updateSpec) + table.updateItem(updateSpec) - }) - .onErrorResumeNext(e => { - logger.error(s"Dynamo update fail for $record!", e) - Observable.error(e) - }) - .map(asJsObject) + }) + .onErrorResumeNext(e => { + logger.error(s"Dynamo update fail for $record!", e) + Observable.error(e) + }) + .map(asJsObject) }