我正在开发一个用于检测网络摄像头帧内运动的应用程序。
为此,我使用IBasicVideoEffect
从MediaCapture
逐个提取帧。我创建了继承CustomEffect
的类IBasicVideoEffect
。我使用OpenCV
进行运动检测,它工作正常。它也给我运动检测水平。如果运动级别大于阈值,我想从CustomEffect
引发事件。
但是对于videoDefination代码是:
var videoDefinition = new VideoEffectDefinition(typeof(CustomEffect).ToString());
这里针对videoDefinition
构造函数,它要求ClassID
,
如何从CustomEffect
对象获取事件。
我想从CustomEffect
(例如:MotionDetectedEvent
)
这是我的CustomEffect类:
public sealed class CustomEffect : IBasicVideoEffect
{
private OpenCVHelper _helper;
private IPropertySet _configuration;
internal event EventHandler<EventArgs> MotionDetected;
public void SetProperties(IPropertySet configuration)
{
_configuration = configuration;
}
public void SetEncodingProperties(VideoEncodingProperties encodingProperties,
IDirect3DDevice device)
{
}
private bool IsToDetectMotion
{
get
{
object val;
if (_configuration != null &&
_configuration.TryGetValue("IsToDetectMotion", out val))
return (bool) val;
return false;
}
}
public void ProcessFrame(ProcessVideoFrameContext context)
{
var tempBitmap = context.OutputFrame.SoftwareBitmap;
context.InputFrame.SoftwareBitmap.CopyTo(tempBitmap);
var originalBitmap = SoftwareBitmap.Convert(tempBitmap, BitmapPixelFormat.Bgra8,
BitmapAlphaMode.Straight);
var outputBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8,
originalBitmap.PixelWidth, originalBitmap.PixelHeight,
BitmapAlphaMode.Straight);
if (!IsToDetectMotion)
{
context.InputFrame.SoftwareBitmap.CopyTo(context.OutputFrame.SoftwareBitmap);
return;
}
if (_helper == null)
_helper = new OpenCVHelper();
var level = _helper.MotionDetector(tempBitmap, outputBitmap);
RaiseMotionDetectedEvent();
Debug.WriteLine(level.ToString());
outputBitmap.CopyTo(context.OutputFrame.SoftwareBitmap);
}
private void RaiseMotionDetectedEvent()
{
if (MotionDetected != null)
MotionDetected(this, new EventArgs());
}
public void Close(MediaEffectClosedReason reason)
{
}
public void DiscardQueuedFrames()
{
}
public bool IsReadOnly { get; }
public IReadOnlyList<VideoEncodingProperties> SupportedEncodingProperties
{
get
{
var encodingProperties = new VideoEncodingProperties();
encodingProperties.Subtype = "ARGB32";
return new List<VideoEncodingProperties> {encodingProperties};
// If the list is empty, the encoding type will be ARGB32.
// return new List<VideoEncodingProperties>();
}
}
public MediaMemoryTypes SupportedMemoryTypes { get; }
public bool TimeIndependent { get; }
}
答案 0 :(得分:0)
//in Windows Runtime Component
public sealed class FrameArgs
{
public FrameArgs(int frameCount)
{
FrameCount = frameCount;
}
public int FrameCount
{ get; }
}
public sealed partial class CustomEffect
{
#region ProcessFrameCompleted
public EventHandler<Object> ProcessFrameCompleted
{
get
{
object val;
if (configuration != null && configuration.TryGetValue(nameof(ProcessFrameCompleted), out val))
{
return (EventHandler<Object>)val;
}
return null;
}
}
public void RaiseProcessFrameCompleted(FrameArgs args)
{
ProcessFrameCompleted?.Invoke(null, (Object)args);
}
#endregion
//call as necessary
//RaiseProcessFrameCompleted(new FrameArgs(frameCount));
}
//in your app
public static async Task<IMediaExtension> AddCustomEffect(MediaCapture mediaCapture, EventHandler<FrameArgs> callBack)
{
if (mediaCapture == null)
{
throw new ArgumentException("Parameter cannot be null", nameof(mediaCapture));
}
var videoEffectDefinition =
// ReSharper disable once AssignNullToNotNullAttribute
new VideoEffectDefinition(typeof(CustomEffect).FullName);
var videoEffect =
await mediaCapture.AddVideoEffectAsync(videoEffectDefinition, MediaStreamType.VideoPreview);
videoEffect.SetProperties(
new PropertySet()
{
{
"ProcessFrameCompleted",
new EventHandler<object>((sender, e) =>
{
var args = (FrameArgs)e;
int frameCount = args.FrameCount;
callBack?.Invoke(sender, args);
})
}
});
return videoEffect;
}