我尝试使用来处理现有的MP4文件。我已将视频文件和下面的HTML / JavaScript放在node.js服务器中并在localhost上运行,因此不应该出现严重的CORS问题。
探测器正确启动,但当我向其发送图像数据时,每次都会发生同样的事情:
faces
数组worker code reported an exception14920304
。我不确定如何处理该消息 - 有人有任何建议吗?
//- FrameDetector.pug
html
head
title FrameDetector Demo
script(src='https://download.affectiva.com/js/3.2/affdex.js')
body
canvas#canvas
video#video-to-analyze(preload="auto" controls="true")
source(type="video/mp4" src="video/my-video.mp4")
script(src='js/FrameDetector.js')
和
// FrameDetector.js
var heartbeat, startTimestamp;
document.addEventListener('DOMContentLoaded', function(){
var v = document.getElementById('video-to-analyze');
var canvas = document.getElementById('canvas');
var context = canvas.getContext('2d');
var cw = Math.floor(canvas.clientWidth / 100);
var ch = Math.floor(canvas.clientHeight / 100);
canvas.width = cw;
canvas.height = ch;
v.addEventListener('play', function(){
draw(this,context,cw,ch);
},false);
},false);
function draw(v,c,w,h) {
if(v.paused || v.ended) return false;
c.drawImage(v,0,0,w,h);
setTimeout(draw,20,v,c,w,h);
}
function analyzeVideoFrame() {
//Get a canvas element from DOM
var aCanvas = document.getElementById("canvas");
var context = aCanvas.getContext('2d');
//Get imageData object.
var imageData = context.getImageData(0, 0, 640, 360);
console.log("Captured imageData.", imageData);
//Get current time in seconds
var now = (new Date()).getTime() / 1000;
//Get delta time between the first frame and the current frame.
var deltaTime = now - startTimestamp;
//Process the frame
detector.process(imageData, deltaTime);
}
function onImageResultsSuccess(faces, image, timestamp) {
console.log("onImageResultsSuccess:", timestamp, faces.length, faces[0]);
}
function onImageResultsFailure(image, timestamp, err_detail) {
console.error("onImageResultsFailure:", timestamp, err_detail);
clearInterval(heartbeat);
}
if (typeof(affdex)=="undefined") {
console.log("The affdex global variable has not been loaded.");
}
var detector = new affdex.FrameDetector(affdex.FaceDetectorMode.LARGE_FACES);
detector.detectAllExpressions();
detector.detectAllEmotions();
detector.detectAllAppearance();
detector.addEventListener("onInitializeSuccess", function() {
document.getElementById('video-to-analyze').play();
startTimestamp = (new Date()).getTime() / 1000;
heartbeat = setInterval(analyzeVideoFrame, 1000);
});
detector.addEventListener("onInitializeFailure", function() {
console.error("Affectiva failed to initialize.");
});
detector.addEventListener("onImageResultsSuccess", onImageResultsSuccess);
detector.addEventListener("onImageResultsFailure", onImageResultsFailure);
detector.start();
控制台中的输出:
Captured imageData. ImageData {data: Uint8ClampedArray(921600), width: 640, height: 360}
onImageResultsSuccess: 0.005000114440917969 0 undefined
Captured imageData. ImageData {data: Uint8ClampedArray(921600), width: 640, height: 360}
onImageResultsFailure: 0.0009999275207519531 worker code reported an exception14920304
答案 0 :(得分:0)
知道了。一旦我将图像正确绘制到canvas元素,Affectiva代码就可以正常工作。这是我更正后的代码:
//- FrameDetector.pug
html
head
title FrameDetector Demo
script(src='http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js')
script(src='https://download.affectiva.com/js/3.2/affdex.js')
body
canvas#canvas(width="640" height="360" style="display:none;")
video#video(preload="auto" controls="true")
source(type="video/mp4" src="video/my-video.mp4")
script(src='js/FrameDetector.js')
和
// FrameDetector.js
var heartbeat, startTimestamp;
function onVideoPlay() {
var $this = this; //cache
(function loop() {
if (!$this.paused && !$this.ended) {
ctx.drawImage($this, 0, 0);
setTimeout(loop, 1000 / 30); // drawing at 30fps
}
})();
}
function analyzeVideoFrame() {
//Get a canvas element from DOM
var aCanvas = document.getElementById("canvas");
var context = aCanvas.getContext('2d');
//Get imageData object.
var imageData = context.getImageData(0, 0, 640, 360);
console.log("Captured imageData.", imageData);
//Get current time in seconds
var now = (new Date()).getTime() / 1000;
//Get delta time between the first frame and the current frame.
var deltaTime = now - startTimestamp;
//Process the frame
detector.process(imageData, deltaTime);
}
function onImageResultsSuccess(faces, image, timestamp) {
console.log("onImageResultsSuccess:", timestamp, faces.length, faces[0]);
}
function onImageResultsFailure(image, timestamp, err_detail) {
console.error("onImageResultsFailure:", timestamp, err_detail);
clearInterval(heartbeat);
}
$(function() {
if (typeof(affdex)=="undefined") {
console.log("The affdex global variable has not been loaded.");
}
var canvas = document.getElementById('canvas');
var ctx = canvas.getContext('2d');
var video = document.getElementById('video');
var detector = new affdex.FrameDetector(affdex.FaceDetectorMode.LARGE_FACES);
// Set up a loop to draw frames to the canvas element
video.addEventListener('play', onVideoPlay, 0);
// Set up and start the detector
detector.detectAllExpressions();
detector.detectAllEmotions();
detector.detectAllAppearance();
detector.addEventListener("onInitializeSuccess", function() {
document.getElementById('video').play();
startTimestamp = (new Date()).getTime() / 1000;
heartbeat = setInterval(analyzeVideoFrame, 1000);
});
detector.addEventListener("onInitializeFailure", function() {
console.error("Affectiva failed to initialize.");
});
detector.addEventListener("onImageResultsSuccess", onImageResultsSuccess);
detector.addEventListener("onImageResultsFailure", onImageResultsFailure);
detector.start();
});