Skip to content

Commit

Permalink
feat: added Vary: Accept to all http/200 responses (#408)
Browse files Browse the repository at this point in the history
Also:
- formatting with prettier
- bumped some dependencies
  • Loading branch information
thisismana committed Jun 25, 2024
1 parent d72261a commit fdf557e
Show file tree
Hide file tree
Showing 8 changed files with 396 additions and 383 deletions.
6 changes: 6 additions & 0 deletions .prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"singleQuote": true,
"trailingComma": "all",
"arrowParens": "avoid",
"printWidth": 120
}
20 changes: 10 additions & 10 deletions source/image-handler/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,21 @@
"version": "5.1.0",
"private": true,
"dependencies": {
"@aws-sdk/client-s3": "3.433.0",
"sharp": "0.33.0"
"@aws-sdk/client-s3": "3.600.0",
"sharp": "0.33.4"
},
"devDependencies": {
"@aws-lambda-powertools/logger": "1.14.0",
"@aws-lambda-powertools/logger": "1.18.1",
"@types/color": "^3.0.6",
"@types/color-name": "^1.1.3",
"@types/color-name": "^1.1.4",
"@types/sharp": "^0.32.0",
"@types/aws-lambda": "8.10.130",
"aws-sdk-client-mock": "3.0.0",
"aws-sdk-client-mock-jest": "3.0.0",
"@types/aws-lambda": "8.10.140",
"aws-sdk-client-mock": "4.0.1",
"aws-sdk-client-mock-jest": "4.0.1",
"@aws-sdk/util-stream-node": "3.374.0",
"prettier": "3.0.3",
"tsup": "7.2.0",
"vitest": "^1.0.4"
"prettier": "3.3.2",
"tsup": "7.3.0",
"vitest": "^1.6.0"
},
"scripts": {
"pretest": "npm i --quiet",
Expand Down
158 changes: 80 additions & 78 deletions source/image-handler/src/image-handler.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

import {Logger} from "@aws-lambda-powertools/logger";
import {S3} from "@aws-sdk/client-s3";
import { Logger } from '@aws-lambda-powertools/logger';
import { S3 } from '@aws-sdk/client-s3';
import sharp from 'sharp';

import {LogStashFormatter} from "./lib/logging/LogStashFormatter";
import {ImageRequest} from "./image-request";
import { LogStashFormatter } from './lib/logging/LogStashFormatter';
import { ImageRequest } from './image-request';

const logger = new Logger({
serviceName: process.env.AWS_LAMBDA_FUNCTION_NAME ?? 'image-handler',
logFormatter: new LogStashFormatter(),
})
});

const ApiGWResponseSizeLimit = 6 * 1024 * 1024;

Expand Down Expand Up @@ -39,18 +39,18 @@ export class ImageHandler {
if (hasEdits || hasCropping) {
const keys = Object.keys(edits);

if (keys.includes("rotate") && edits.rotate === null) {
image = sharp(originalImage, {failOnError: false}).withMetadata();
if (keys.includes('rotate') && edits.rotate === null) {
image = sharp(originalImage, { failOnError: false }).withMetadata();
} else {
const metadata = await sharp(originalImage, {
failOnError: false
failOnError: false,
}).metadata();
if (metadata.orientation) {
image = sharp(originalImage, {failOnError: false}).withMetadata({
orientation: metadata.orientation
image = sharp(originalImage, { failOnError: false }).withMetadata({
orientation: metadata.orientation,
});
} else {
image = sharp(originalImage, {failOnError: false}).withMetadata();
image = sharp(originalImage, { failOnError: false }).withMetadata();
}
}

Expand All @@ -61,54 +61,53 @@ export class ImageHandler {
if (cropping.left + cropping.width > width || cropping.top + cropping.height > height) {
throw {
status: 400,
code: "CropOutOfBounds",
message:
`The cropping ${cropping.left},${cropping.top}x${cropping.width}:${cropping.height} is outside the image boundary of ${width}x${height}`
code: 'CropOutOfBounds',
message: `The cropping ${cropping.left},${cropping.top}x${cropping.width}:${cropping.height} is outside the image boundary of ${width}x${height}`,
};
}
if (cropping.width === 0 || cropping.height === 0) {
throw {
status: 400,
code: "CropHasZeroDimension",
message: `The cropping with dimension ${cropping.width}x${cropping.height} is invalid`
}
code: 'CropHasZeroDimension',
message: `The cropping with dimension ${cropping.width}x${cropping.height} is invalid`,
};
}
image = image.extract(cropping);
}
if (hasEdits) {
image = await this.applyEdits(image, edits);
}
} else {
image = sharp(originalImage, {failOnError: false}).withMetadata();
image = sharp(originalImage, { failOnError: false }).withMetadata();
}

if ('image/webp' === request.ContentType && request.outputFormat === "webp") {
image.webp({effort: 6, alphaQuality: 75});
} else if ("image/png" === request.ContentType) {
image.png({quality: 100, effort: 7, compressionLevel: 6});
} else if ("image/jpeg" === request.ContentType) {
image.jpeg({mozjpeg: true});
if ('image/webp' === request.ContentType && request.outputFormat === 'webp') {
image.webp({ effort: 6, alphaQuality: 75 });
} else if ('image/png' === request.ContentType) {
image.png({ palette: true, quality: 100, effort: 7, compressionLevel: 6 });
} else if ('image/jpeg' === request.ContentType) {
image.jpeg({ mozjpeg: true });
} else if (request.outputFormat !== undefined) {
image.toFormat(request.outputFormat);
}

try {
const bufferImage = await image.toBuffer();
returnImage = bufferImage.toString("base64");
returnImage = bufferImage.toString('base64');
} catch (e) {
throw {
status: 400,
code: 'Cropping failed',
message: `Cropping failed with "${e}"`
}
message: `Cropping failed with "${e}"`,
};
}

// If the converted image is larger than Lambda's payload hard limit, throw an error.
if (returnImage.length > ApiGWResponseSizeLimit) {
throw {
status: 413,
code: "TooLargeImageException",
message: `The converted image is too large to return. Actual = ${returnImage.length} - max ${ApiGWResponseSizeLimit}`
code: 'TooLargeImageException',
message: `The converted image is too large to return. Actual = ${returnImage.length} - max ${ApiGWResponseSizeLimit}`,
};
}

Expand All @@ -124,7 +123,7 @@ export class ImageHandler {
async applyEdits(image: sharp.Sharp, edits: any) {
if (edits.resize === undefined) {
edits.resize = {};
edits.resize.fit = "inside";
edits.resize.fit = 'inside';
} else {
if (edits.resize.width) edits.resize.width = Math.round(Number(edits.resize.width));
if (edits.resize.height) edits.resize.height = Math.round(Number(edits.resize.height));
Expand All @@ -133,69 +132,60 @@ export class ImageHandler {
// Apply the image edits
for (const editKey in edits) {
const value = edits[editKey];
if (editKey === "overlayWith") {
if (editKey === 'overlayWith') {
let imageMetadata = await image.metadata();
if (edits.resize) {
let imageBuffer = await image.toBuffer();
imageMetadata = await sharp(imageBuffer)
.resize(edits.resize)
.metadata();
imageMetadata = await sharp(imageBuffer).resize(edits.resize).metadata();
}

const {bucket, key, wRatio, hRatio, alpha} = value;
const overlay = await this.getOverlayImage(
bucket,
key,
wRatio,
hRatio,
alpha,
imageMetadata
);
const { bucket, key, wRatio, hRatio, alpha } = value;
const overlay = await this.getOverlayImage(bucket, key, wRatio, hRatio, alpha, imageMetadata);
const overlayMetadata = await sharp(overlay).metadata();

let {options} = value;
let { options } = value;
if (options) {
if (options.left !== undefined) {
let left = options.left;
if (isNaN(left) && left.endsWith("p")) {
left = parseInt(left.replace("p", ""));
if (isNaN(left) && left.endsWith('p')) {
left = parseInt(left.replace('p', ''));
if (left < 0) {
left = imageMetadata.width + (imageMetadata.width * left / 100) - overlayMetadata.width;
left = imageMetadata.width + (imageMetadata.width * left) / 100 - overlayMetadata.width;
} else {
left = imageMetadata.width * left / 100;
left = (imageMetadata.width * left) / 100;
}
} else {
left = parseInt(left);
if (left < 0) {
left = imageMetadata.width + left - overlayMetadata.width;
}
}
isNaN(left) ? delete options.left : options.left = left;
isNaN(left) ? delete options.left : (options.left = left);
}
if (options.top !== undefined) {
let top = options.top;
if (isNaN(top) && top.endsWith("p")) {
top = parseInt(top.replace("p", ""));
if (isNaN(top) && top.endsWith('p')) {
top = parseInt(top.replace('p', ''));
if (top < 0) {
top = imageMetadata.height + (imageMetadata.height * top / 100) - overlayMetadata.height;
top = imageMetadata.height + (imageMetadata.height * top) / 100 - overlayMetadata.height;
} else {
top = imageMetadata.height * top / 100;
top = (imageMetadata.height * top) / 100;
}
} else {
top = parseInt(top);
if (top < 0) {
top = imageMetadata.height + top - overlayMetadata.height;
}
}
isNaN(top) ? delete options.top : options.top = top;
isNaN(top) ? delete options.top : (options.top = top);
}
}

const params = [{...options, input: overlay}];
const params = [{ ...options, input: overlay }];
image.composite(params);
} else if (editKey === 'roundCrop') {
const options = value;
const imageBuffer = await image.toBuffer({resolveWithObject: true});
const imageBuffer = await image.toBuffer({ resolveWithObject: true });
let width = imageBuffer.info.width;
let height = imageBuffer.info.height;

Expand All @@ -206,14 +196,16 @@ export class ImageHandler {
const leftOffset = options.left && options.left >= 0 ? options.left : width / 2;

if (options) {
const ellipse = Buffer.from(`<svg viewBox="0 0 ${width} ${height}"> <ellipse cx="${leftOffset}" cy="${topOffset}" rx="${radiusX}" ry="${radiusY}" /></svg>`);
const params: any = [{input: ellipse, blend: 'dest-in'}];
let data = await image.composite(params)
const ellipse = Buffer.from(
`<svg viewBox="0 0 ${width} ${height}"> <ellipse cx="${leftOffset}" cy="${topOffset}" rx="${radiusX}" ry="${radiusY}" /></svg>`,
);
const params: any = [{ input: ellipse, blend: 'dest-in' }];
let data = await image
.composite(params)
.png() // transparent background instead of black background
.toBuffer();
image = sharp(data).withMetadata().trim();
}

} else {
image[editKey](value);
}
Expand All @@ -232,22 +224,29 @@ export class ImageHandler {
* @param {number} alpha - The transparency alpha to the overlay.
* @param {object} sourceImageMetadata - The metadata of the source image.
*/
async getOverlayImage(bucket: any, key: any, wRatio: any, hRatio: any, alpha: any, sourceImageMetadata: sharp.Metadata): Promise<Buffer> {
const params = {Bucket: bucket, Key: key};
async getOverlayImage(
bucket: any,
key: any,
wRatio: any,
hRatio: any,
alpha: any,
sourceImageMetadata: sharp.Metadata,
): Promise<Buffer> {
const params = { Bucket: bucket, Key: key };
try {
const {width, height}: sharp.Metadata = sourceImageMetadata;
const { width, height }: sharp.Metadata = sourceImageMetadata;
const overlayImage = await this.s3.getObject(params);
let resize: Record<any, any> = {
fit: 'inside'
fit: 'inside',
};

// Set width and height of the watermark image based on the ratio
const zeroToHundred = /^(100|[1-9]?[0-9])$/;
if (zeroToHundred.test(wRatio)) {
resize['width'] = Math.floor(width! * wRatio / 100);
resize['width'] = Math.floor((width! * wRatio) / 100);
}
if (zeroToHundred.test(hRatio)) {
resize['height'] = Math.floor(height! * hRatio / 100);
resize['height'] = Math.floor((height! * hRatio) / 100);
}

// If alpha is not within 0-100, the default alpha is 0 (fully opaque).
Expand All @@ -260,22 +259,25 @@ export class ImageHandler {
let input = Buffer.from(await overlayImage.Body?.transformToByteArray()!);
return await sharp(input)
.resize(resize)
.composite([{
input: Buffer.from([255, 255, 255, 255 * (1 - alpha / 100)]),
raw: {
width: 1,
height: 1,
channels: 4
.composite([
{
input: Buffer.from([255, 255, 255, 255 * (1 - alpha / 100)]),
raw: {
width: 1,
height: 1,
channels: 4,
},
tile: true,
blend: 'dest-in',
},
tile: true,
blend: 'dest-in'
}]).toBuffer();
])
.toBuffer();
} catch (err: any) {
throw {
status: err.statusCode ? err.statusCode : 500,
code: (err.code).toString(),
message: err.message
code: err.code.toString(),
message: err.message,
};
}
}
}
}
Loading

0 comments on commit fdf557e

Please sign in to comment.