From c6b1685f8979da3fb81336266ac332e563af2e2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eero=20H=C3=A4kkinen?= Date: Sun, 6 Feb 2022 00:16:55 +0200 Subject: [PATCH] fixup! Face detection Incorporate mediacapture-transform changes. --- index.html | 38 ++++++++++++++++++-------------------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/index.html b/index.html index 1d87d4c..c8de8d7 100644 --- a/index.html +++ b/index.html @@ -808,41 +808,39 @@

Examples

// Open camera with face detection enabled const stream = await navigator.mediaDevices.getUserMedia({ - video: { faceDetectionMode: 'contour', - faceDetectionNumContourPoints: { exact: 4 } } + video: { + faceDetectionMode: 'contour', + faceDetectionNumContourPoints: {exact: 4} + } }); const [videoTrack] = stream.getVideoTracks(); // Use a video worker and show to user. -const videoElement = document.querySelector("video"); -const videoGenerator = new MediaStreamTrackGenerator({kind: 'video'}); -const videoProcessor = new MediaStreamTrackProcessor({track: videoTrack}); -const videoSettings = videoTrack.getSettings(); +const videoElement = document.querySelector('video'); const videoWorker = new Worker('video-worker.js'); -videoWorker.postMessage({ - videoReadable: videoProcessor.readable, - videoWritable: videoGenerator.writable -}, [videoProcessor.readable, videoGenerator.writable]); -videoElement.srcObject = new MediaStream([videoGenerator]); +videoWorker.postMessage({track: videoTrack}, [videoTrack]); +const {data} = await new Promise(r => videoWorker.onmessage); +videoElement.srcObject = new MediaStream([data.videoTrack]); // video-worker.js: -self.onmessage = async function(e) { - const videoTransformer = new TransformStream({ - async transform(videoFrame, controller) { - for (const face of videoFrame.detectedFaces) { +self.onmessage = async ({data: {track}}) => { + const generator = new VideoTrackGenerator(); + parent.postMessage({videoTrack: generator.track}, [generator.track]); + const {readable} = new MediaStreamTrackProcessor({track}); + const transformer = new TransformStream({ + async transform(frame, controller) { + for (const face of frame.detectedFaces) { console.log( `Face @ (${face.contour[0].x}, ${face.contour[0].y}), ` + `(${face.contour[1].x}, ${face.contour[1].y}), ` + `(${face.contour[2].x}, ${face.contour[2].y}), ` + `(${face.contour[3].x}, ${face.contour[3].y})`); - controller.enqueue(videoFrame); + controller.enqueue(frame); } } }); - e.data.videoReadable - .pipeThrough(videoTransformer) - .pipeTo(e.data.videoWritable); -} + await readable.pipeThrough(transformer).pipeTo(generator.writable); +};