Skip to content

Commit

Permalink
Merge pull request #3771 from nulib/3751-posters-dimensions
Browse files Browse the repository at this point in the history
Save width and height as S3 metadata on posters
  • Loading branch information
kdid authored Jan 16, 2024
2 parents 0949368 + e4adf06 commit fa956ad
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 38 deletions.
16 changes: 16 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,22 @@ And force a re-index:
Meadow.Data.Indexer.reindex_all()
```

### Doing development on the Meadow Pipeline lambdas

In the AWS developer environment, the lambdas associated with the pipeline are shared amongst developers. In order to do development and see whether it's working you can override the configuration to use your local files instead of the deployed lambdas. Example below (you don't have to override them all. Just the ones you need).

Edit `config/dev.local.exs` to get the lambdas to use the local copy through the port:`

```elixir
config :meadow, :lambda,
digester: {:local, {Path.expand("../lambdas/digester/index.js"), "handler"}},
exif: {:local, {Path.expand("../lambdas/exif/index.js"), "handler"}},
frame_extractor: {:local, {Path.expand("../lambdas/frame-extractor/index.js"), "handler"}},
mediainfo: {:local, {Path.expand("../lambdas/mediainfo/index.js"), "handler"}},
mime_type: {:local, {Path.expand("../lambdas/mime-type/index.js"), "handler"}},
tiff: {:local, {Path.expand("../lambdas/pyramid-tiff/index.js"), "handler"}}
```

### TypeScript/GraphQL Types

Meadow now supports TypeScript and GraphQL types in the React app. To generate types, run the following commands:
Expand Down
7 changes: 7 additions & 0 deletions app/test/pipeline/actions/generate_poster_image_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,13 @@ defmodule Meadow.Pipeline.Actions.GeneratePosterImageTest do

assert(object_exists?(FileSets.poster_uri_for(file_set)))

with {:ok, %{headers: headers}} <-
ExAws.S3.head_object(@pyramid_bucket, "posters/#{Pairtree.poster_path(file_set.id)}")
|> ExAws.request() do
assert headers |> Enum.member?({"x-amz-meta-width", "1920"})
assert headers |> Enum.member?({"x-amz-meta-height", "1080"})
end

assert(
FileSets.get_file_set!(file_set.id).derivatives["poster"] ==
FileSets.poster_uri_for(file_set)
Expand Down
118 changes: 80 additions & 38 deletions lambdas/frame-extractor/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,32 +39,52 @@ const extractFrameFromPlaylist = async (source, destination, offset) => {
.then(({ location, segmentOffset }) => {
const segOffInSeconds = segmentOffset / 1000;

let dimensions = {};

const s3Client = new S3ClientShim({ httpOptions: { timeout: 600000 } });
s3Client
.send(new GetObjectCommand({ Bucket: uri.host, Key: location }))
.then(({ Body: readStream }) => {
readStream.on("error", (error) => console.error(error));

let ffmpegProcess = new ffmpeg(readStream)
.seek(segOffInSeconds)
.outputOptions(["-vframes 1"])
.toFormat("image2")
.on("error", function (err, _stdout, _stderr) {
console.error("Cannot process video: " + err.message);
})
.on("end", function (_stdout, _stderr) {
console.log("Transcoding succeeded");
});

const uploadStream = concat((data) => {
uploadToS3(data, destination)
.then((result) => resolve(result))
.catch((err) => reject(err));
let ffmpegProcess = ffmpeg(readStream);

ffmpegProcess.ffprobe((err, data) => {
if (err) {
reject("Error running ffprobe: " + err.message);
} else {
dimensions.width = data.streams[0].width;
dimensions.height = data.streams[0].height;

s3Client
.send(new GetObjectCommand({ Bucket: uri.host, Key: location }))
.then(({ Body: secondReadStream }) => {
secondReadStream.on("error", (error) => console.error(error));

ffmpegProcess = ffmpeg(secondReadStream)
.seek(segOffInSeconds)
.outputOptions(["-vframes 1"])
.toFormat("image2")
.on("error", function (err, _stdout, _stderr) {
console.error("Cannot process video: " + err.message);
})
.on("end", function (_stdout, _stderr) {
console.log("Transcoding succeeded");
});

const uploadStream = concat((data) => {
uploadToS3(data, destination, dimensions)
.then((result) => resolve(result))
.catch((err) => reject(err));
});

ffmpegProcess.pipe(uploadStream, { end: true });
})
.catch((err) => reject(err));
}
});

ffmpegProcess.pipe(uploadStream, { end: true });
})
.catch((err) => reject(err));
})
.catch((err) => reject(err));
})
.catch((err) => reject(err));
});
Expand All @@ -80,24 +100,44 @@ const extractFrameFromVideo = async (source, destination, offset) => {

return new Promise((resolve, reject) => {
try {
let ffmpegProcess = new ffmpeg(readStream)
.seek(offset / 1000.0)
.outputOptions(["-vframes 1"])
.toFormat("image2")
.on("error", function (err, _stdout, _stderr) {
console.error("Cannot process video: " + err.message);
})
.on("end", function (_stdout, _stderr) {
console.log("Transcoding succeeded !");
});

const uploadStream = concat((data) => {
uploadToS3(data, destination)
.then((result) => resolve(result))
.catch((err) => reject(err));
});
let dimensions = {};
let ffmpegProcess = new ffmpeg(readStream);

ffmpegProcess.ffprobe((err, data) => {
if (err) {
reject("Error running ffprobe: " + err.message);
} else {
dimensions.width = data.streams[0].width;
dimensions.height = data.streams[0].height;
console.log("Video dimensions: ", dimensions.width, "x", dimensions.height);

s3Client
.send(new GetObjectCommand({ Bucket: uri.host, Key: key }))
.then(async ({ Body: secondReadStream }) => {
secondReadStream.on("error", (error) => console.error(error));

ffmpegProcess = new ffmpeg(secondReadStream)
.seek(offset / 1000.0)
.outputOptions(["-vframes 1"])
.toFormat("image2")
.on("error", function (err, _stdout, _stderr) {
console.error("Cannot process video: " + err.message);
})
.on("end", function (_stdout, _stderr) {
console.log("Transcoding succeeded");
});

const uploadStream = concat((data) => {
uploadToS3(data, destination, dimensions)
.then((result) => resolve(result))
.catch((err) => reject(err));
});

ffmpegProcess.pipe(uploadStream, { end: true });
ffmpegProcess.pipe(uploadStream, { end: true });
})
.catch((err) => reject(err));
}
});
} catch (err) {
reject(err);
}
Expand Down Expand Up @@ -147,9 +187,11 @@ const parsePlaylist = async (bucket, key, offset) => {
}
};

const uploadToS3 = (data, destination) => {
const uploadToS3 = (data, destination, dimensions) => {
const metadata = {
"Content-Type": "image"
"Content-Type": "image",
width: dimensions.width.toString(),
height: dimensions.height.toString()
};
return new Promise((resolve, reject) => {
const poster = URI.parse(destination);
Expand Down

0 comments on commit fa956ad

Please sign in to comment.