我试图找到一个可以用IOS录制视频的工作样本(使用xamarin),但总有一些东西丢失或者不适合我。 我最好尝试使用几个论坛帖子和样本如下:
using System;
using CoreGraphics;
using Foundation;
using UIKit;
using AVFoundation;
using CoreVideo;
using CoreMedia;
using CoreFoundation;
using System.IO;
using AssetsLibrary;
namespace avcaptureframes {
public partial class AppDelegate : UIApplicationDelegate {
public static UIImageView ImageView;
UIViewController vc;
AVCaptureSession session;
OutputRecorder outputRecorder;
DispatchQueue queue;
public override bool FinishedLaunching (UIApplication application, NSDictionary launchOptions)
{
ImageView = new UIImageView (new CGRect (10f, 10f, 200f, 200f));
ImageView.ContentMode = UIViewContentMode.Top;
vc = new UIViewController {
View = ImageView
};
window.RootViewController = vc;
window.MakeKeyAndVisible ();
window.BackgroundColor = UIColor.Black;
if (!SetupCaptureSession ())
window.AddSubview (new UILabel (new CGRect (20f, 20f, 200f, 60f)) {
Text = "No input device"
});
return true;
}
bool SetupCaptureSession ()
{
// configure the capture session for low resolution, change this if your code
// can cope with more data or volume
session = new AVCaptureSession {
SessionPreset = AVCaptureSession.PresetMedium
};
// create a device input and attach it to the session
var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
if (captureDevice == null) {
Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
return false;
}
//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
NSError error = null;
captureDevice.LockForConfiguration (out error);
if (error != null) {
Console.WriteLine (error);
captureDevice.UnlockForConfiguration ();
return false;
}
if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0))
captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);
captureDevice.UnlockForConfiguration ();
var input = AVCaptureDeviceInput.FromDevice (captureDevice);
if (input == null) {
Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
return false;
}
session.AddInput (input);
// create a VideoDataOutput and add it to the sesion
var settings = new CVPixelBufferAttributes {
PixelFormatType = CVPixelFormatType.CV32BGRA
};
using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) {
queue = new DispatchQueue ("myQueue");
outputRecorder = new OutputRecorder ();
output.SetSampleBufferDelegate (outputRecorder, queue);
session.AddOutput (output);
}
session.StartRunning ();
return true;
}
public override void OnActivated (UIApplication application)
{
}
public class OutputRecorder : AVCaptureVideoDataOutputSampleBufferDelegate
{
AVAssetWriter writer=null;
AVAssetWriterInput writerinput= null;
CMTime lastSampleTime;
int frame=0;
NSUrl url;
public OutputRecorder()
{
string tempFile = Path.Combine(Path.GetTempPath(), "NewVideo.mp4");
if (File.Exists(tempFile)) File.Delete(tempFile);
url = NSUrl.FromFilename(tempFile);
NSError assetWriterError;
writer = new AVAssetWriter(url, AVFileType.Mpeg4, out assetWriterError);
var outputSettings = new AVVideoSettingsCompressed()
{
Height = 300,
Width = 300,
Codec = AVVideoCodec.H264,
CodecSettings = new AVVideoCodecSettings
{
AverageBitRate = 1000000
}
};
writerinput = new AVAssetWriterInput(mediaType: AVMediaType.Video, outputSettings: outputSettings);
writerinput.ExpectsMediaDataInRealTime = false;
writer.AddInput(writerinput);
}
public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
{
try
{
lastSampleTime = sampleBuffer.PresentationTimeStamp;
var image = ImageFromSampleBuffer(sampleBuffer);
if (frame == 0)
{
writer.StartWriting();
writer.StartSessionAtSourceTime(lastSampleTime);
frame = 1;
}
String infoString = "";
if (writerinput.ReadyForMoreMediaData)
{
if (!writerinput.AppendSampleBuffer(sampleBuffer))
{
infoString = "Failed to append sample buffer";
}
else
{
infoString = String.Format("{0} frames captured", frame++);
}
}
else
{
infoString = "Writer not ready";
}
Console.WriteLine(infoString);
ImageView.BeginInvokeOnMainThread(() => ImageView.Image = image);
}
catch (Exception e)
{
Console.WriteLine(e);
}
finally
{
sampleBuffer.Dispose();
}
}
UIImage ImageFromSampleBuffer (CMSampleBuffer sampleBuffer)
{
// Get the CoreVideo image
using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer)
{
// Lock the base address
pixelBuffer.Lock (CVOptionFlags.None);
// Get the number of bytes per row for the pixel buffer
var baseAddress = pixelBuffer.BaseAddress;
var bytesPerRow = (int)pixelBuffer.BytesPerRow;
var width = (int)pixelBuffer.Width;
var height = (int)pixelBuffer.Height;
var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;
// Create a CGImage on the RGB colorspace from the configured parameter above
using (var cs = CGColorSpace.CreateDeviceRGB ())
{
using (var context = new CGBitmapContext (baseAddress, width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo)flags))
{
using (CGImage cgImage = context.ToImage ())
{
pixelBuffer.Unlock (CVOptionFlags.None);
return UIImage.FromImage (cgImage);
}
}
}
}
}
void TryDispose (IDisposable obj)
{
if (obj != null)
obj.Dispose ();
}
}
}
}
这可以显示实时摄像机图像,我可以拍摄#34帧。在consol中的消息,但我不知道如何记录到文件。
我在某处读到有关添加VideoCapture
的内容,但我不知道如何链接我的代码。
欢迎任何帮助。
答案 0 :(得分:1)
从您的代码中,在类OutputRecorder
的构造中,您已经定义了要保存录制内容的网址:
string tempFile = Path.Combine(Path.GetTempPath(), "NewVideo.mp4");
if (File.Exists(tempFile)) File.Delete(tempFile);
url = NSUrl.FromFilename(tempFile);
这意味着您要将视频保存在应用程序沙箱中的tmp文件夹中。如果您想稍后使用该视频,我建议您使用以下命令将文件夹更改为文档:
string filePath = Path.Combine(NSSearchPath.GetDirectories(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomain.User)[0], "NewVideo.mp4");
我注意到您已在方法session.StartRunning();
中调用bool SetupCaptureSession()
来开始录制。请添加session.StopRunning();
以结束录制,然后视频将保存在我们上面定义的路径中。
此外,您可以使用以下路径检索视频:
NSData videoData = NSData.FromFile(filePath);