您好我遵循Jared Davidson的课程来创建自定义相机视图并使用AVFoundation保存图片。 https://www.youtube.com/watch?v=w0O3ZGUS3pk
但是我想录制和保存视频而不是图像。有人可以帮我吗?我确信它很简单,但苹果的文档是用Objective-C编写的,我无法解读它。
这是我的代码。感谢。
import UIKit
import AVFoundation
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(animated: Bool) {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
}
}
catch{
print("Error")
}
}
}
}
@IBAction func TakePhoto(sender: AnyObject) {
if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){
sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: {
buffer, error in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil)
})
}
}
}
答案 0 :(得分:19)
您可以通过在捕获会话中创建并添加AVCaptureMovieFileOutput
并将ViewController
符合AVCaptureFileOutputRecordingDelegate
来保存将视频记录到文件中。
此示例将5秒的视频记录到应用程序的Documents目录中名为“output.mov”的文件中。
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(animated: Bool) {
self.cameraView = self.view
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == AVCaptureDevicePosition.Front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let fileUrl = paths[0].URLByAppendingPathComponent("output.mov")
try? NSFileManager.defaultManager().removeItemAtURL(fileUrl)
movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)
let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC)))
dispatch_after(delayTime, dispatch_get_main_queue()) {
print("stopping")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
}
}
}
答案 1 :(得分:7)
谢谢你。这对我很有帮助。以下是使用所需的import语句和委托方法移植到Swift 3的Rhythmic Fistman答案版本。
import UIKit
import AVFoundation
class ViewController: UIViewController,
AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(_ animated: Bool) {
self.cameraView = self.view
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == AVCaptureDevicePosition.front{
do{
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent("output.mov")
try? FileManager.default.removeItem(at: fileUrl)
movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self)
let delayTime = DispatchTime.now() + 5
DispatchQueue.main.asyncAfter(deadline: delayTime) {
print("stopping")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
//MARK: AVCaptureFileOutputRecordingDelegate Methods
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
}
答案 2 :(得分:2)
您可以使用此代码将视频保存到照片库中,您必须提供以下参数,其中最重要的是OutputURL.path,这是要保存到“相机胶卷”相册中的电影文件的文件系统路径,对于其余参数,您可以在此处传递各自的值,也可以根据需要分配nil
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, #selector(CameraController.video(_:didFinishSavingWithError:contextInfo:)), nil)
}
outputURL = nil
}
答案 3 :(得分:0)
if (device as AnyObject).position == AVCaptureDevicePosition.front{
之后
添加
// Audio Input
let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
do
{
let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)
// Add Audio Input
if captureSession.canAddInput(audioInput)
{
captureSession.addInput(audioInput)
}
else
{
NSLog("Can't Add Audio Input")
}
}
catch let error
{
NSLog("Error Getting Input Device: \(error)")
}
由于
答案 4 :(得分:0)
对于录音问题,
创建captureSession时添加此代码
askMicroPhonePermission(完成:{(isMicrophonePermissionGiven)in
if isMicrophonePermissionGiven {
do {
try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio))
} catch {
print("Error creating the database")
}
}
})
/////////////////////////////////////////////// /////////////////
askMicroPhonePermission函数如下
func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) {
switch AVAudioSession.sharedInstance().recordPermission() {
case AVAudioSessionRecordPermission.granted:
completion(true)
case AVAudioSessionRecordPermission.denied:
completion(false) //show alert if required
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in
if granted {
completion(true)
} else {
completion(false) // show alert if required
}
})
default:
completion(false)
}
}
您必须在info.plist文件中添加NSMicrophoneUsageDescription键值。
答案 5 :(得分:0)
func getCurrentFrame(url:String) -> UIImage? {
let asset = AVAsset(url: URL(string: url)!)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
//Can set this to improve performance if target size is known before hand
//assetImgGenerate.maximumSize = CGSize(width,height)
let time = CMTimeMakeWithSeconds(1.0, preferredTimescale: 600)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
return thumbnail
} catch {
print(error.localizedDescription)
return nil
}
}
答案 6 :(得分:0)
对于没有被调用的委托的人来说,只有一个小提示:(我在 Xcode 12.x / iOS 14.5 下)
假设你有一个 f。打开本地目标。 URL(上一个示例中的代码...文档目录..)
如果你这样做:
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
self.addDest()
}
它有效,
但是如果你交换:
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
**self.addDest()**
self.captureSession.startRunning()
}
未调用委托。
设置输出后调用“startRunning”:
if captureSession.canAddInput(input!) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input!)
captureSession.addOutput(stillImageOutput)
setupLivePreview()
self.videoPreviewLayer.frame = self.previewView.bounds
if addVideoOutput(){
DispatchQueue.global(qos: .userInitiated).async { //[weak self] in
self.captureSession.startRunning()
self.addDest()
}
}
...
哪里
func addVideoOutput() -> Bool{
movieOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(movieOutput){
captureSession.addOutput(movieOutput)
return true
}
return false
}