我正在尝试录制视频并在其上添加动态文本,然后将其保存到PhotoGallery。当我保存该文件时,只有视频文件在没有音频的情况下被保存,我仍然不确定我将如何在每个帧上添加文本标签。
以下是我的VideoWriter和控制器代码。
VideoWriter.swift
import Foundation
import AVFoundation
import PhotosUI
import AssetsLibrary
class VideoWriter : NSObject{
var fileWriter: AVAssetWriter!
var videoInput: AVAssetWriterInput!
var audioInput: AVAssetWriterInput!
var fileUrl: URL!
let fileManager = FileManager.default
init(height:Int, width:Int, channels:Int = 2, samples:Float64 = 44100){
let directory = VideoWriter.directoryForNewVideo()
fileUrl = directory.appendingPathComponent(UUID().uuidString + "video.mp4")
fileWriter = try? AVAssetWriter(outputURL: fileUrl as URL, fileType: AVFileType.mov)
let videoOutputSettings: Dictionary<String, AnyObject> = [
AVVideoCodecKey : AVVideoCodecType.h264 as AnyObject,
AVVideoWidthKey : width as AnyObject,
AVVideoHeightKey : height as AnyObject
];
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
videoInput.expectsMediaDataInRealTime = true
fileWriter.add(videoInput)
let audioOutputSettings: Dictionary<String, AnyObject> = [
AVFormatIDKey : Int(kAudioFormatMPEG4AAC) as AnyObject,
AVNumberOfChannelsKey : channels as AnyObject,
AVSampleRateKey : samples as AnyObject,
AVEncoderBitRateKey : 128000 as AnyObject
]
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
audioInput.expectsMediaDataInRealTime = true
fileWriter.add(audioInput)
}
func write(sample: CMSampleBuffer, isVideo: Bool){
if CMSampleBufferDataIsReady(sample) {
if fileWriter.status == AVAssetWriterStatus.unknown {
let startTime = CMSampleBufferGetPresentationTimeStamp(sample)
fileWriter.startWriting()
fileWriter.startSession(atSourceTime: startTime)
}
if fileWriter.status == AVAssetWriterStatus.failed {
return
}
if isVideo {
if videoInput.isReadyForMoreMediaData {
videoInput.append(sample)
}
}else{
if audioInput.isReadyForMoreMediaData {
audioInput.append(sample)
}
}
}
}
static func directoryForNewVideo() -> URL {
let videoDir = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first?.appendingPathComponent("videos")
return videoDir!
}
// MARK: - Helpers
func saveVideoToLibrary(videoURL:URL) {
let videoLibrary = PHPhotoLibrary.shared()
videoLibrary.performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: videoURL)
})
}
func finish(callback: @escaping () -> ()){
fileWriter.finishWriting(completionHandler: callback)
self.saveVideoToLibrary(videoURL: self.fileUrl)
if fileManager.isDeletableFile(atPath: fileUrl.absoluteString){
do{
try fileManager.removeItem(atPath: fileUrl.absoluteString)
}
catch{
print("Error")
}
}
}
}
CameraViewController.swift
import UIKit
import AVFoundation
class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
@IBAction func switchCamera(_ sender: UIBarButtonItem) {self.switchCamera()}
@IBAction func backButton(_ sender: UIBarButtonItem) { self.backToHome()}
@IBOutlet var bgImage: UIImageView!
@IBAction func recording(_ sender: Any) {self.startCapture()}
@IBOutlet var camPreview: UIView!
let outputSettings: Dictionary<CFString, Any> = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32BGRA]
var assetWriterInput:AVAssetWriterInput!
let cameraButton = UIView()
let captureSession = AVCaptureSession()
let movieOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var activeInput: AVCaptureDeviceInput!
var outputURL: URL!
var currentCameraPosition: CameraPosition?
var frontCameraInput: AVCaptureDeviceInput?
var frontCamera: AVCaptureDevice?
var rearCamera: AVCaptureDevice?
var rearCameraInput: AVCaptureDeviceInput?
var videoWriter:VideoWriter!
var isRecording = false
override func viewDidLoad() {
super.viewDidLoad()
if setupSession() {
setupPreview()
self.bgImage.isHidden = true
startSession()
self.videoWriter = VideoWriter(height: Int(self.camPreview.frame.height), width: Int(self.camPreview.frame.width))
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// self.write(image: UIImage(named: "STANDARD")!, toBuffer: sampleBuffer)
let isVideo:Bool = output == movieOutput
self.videoWriter.write(sample: sampleBuffer, isVideo: isVideo)
}
func write(image overlayImage:UIImage, toBuffer sample:CMSampleBuffer){
if let pixelBuffer = CMSampleBufferGetImageBuffer(sample){
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
let context = CGContext(data: CVPixelBufferGetBaseAddress(pixelBuffer), width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: bitmapInfo)
context?.draw(overlayImage.cgImage!, in: CGRect(x: 0.0, y: 0.0, width: overlayImage.size.width, height: overlayImage.size.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
}
}
func switchCamera(){
// Make sure the device has more than 1 camera.
if AVCaptureDevice.devices(for: AVMediaType.video).count > 1 {
// Check which position the active camera is.
var newPosition: AVCaptureDevice.Position!
if activeInput.device.position == AVCaptureDevice.Position.back {
newPosition = AVCaptureDevice.Position.front
} else {
newPosition = AVCaptureDevice.Position.back
}
// Get camera at new position.
var newCamera: AVCaptureDevice!
let devices = AVCaptureDevice.devices(for: AVMediaType.video)
for device in devices {
if (device as AnyObject).position == newPosition {
newCamera = device
}
}
// Create new input and update capture session.
do {
let input = try AVCaptureDeviceInput(device: newCamera)
captureSession.beginConfiguration()
// Remove input for active camera.
captureSession.removeInput(activeInput)
// Add input for new camera.
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
} else {
captureSession.addInput(activeInput)
}
captureSession.commitConfiguration()
} catch {
print("Error switching cameras: \(error)")
}
}
}
func backToHome(){
let storyboard = UIStoryboard(name: "Main", bundle: nil)
let vc = storyboard.instantiateViewController(withIdentifier: "HomeViewController") as! FrontPageTableViewController
self.navigationController?.pushViewController(vc, animated: true)
}
// MARK: - Setup session and preview
func setupSession() -> Bool {
self.assetWriterInput = AVAssetWriterInput.init(mediaType: .video, outputSettings: outputSettings as [String : Any])
captureSession.sessionPreset = AVCaptureSession.Preset.medium
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: camera!)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)
do {
let micInput = try AVCaptureDeviceInput(device: microphone!)
if captureSession.canAddInput(micInput){
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
self.movieOutput.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
]
let captureSessionQueue = DispatchQueue(label: "CameraSessionQueue", attributes: [])
self.movieOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
if captureSession.canAddOutput(audioOutput){
captureSession.addOutput(audioOutput)
}
self.captureSession.commitConfiguration()
return true
}
func setupPreview() {
// Configure previewLayer
camPreview.frame = CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.camPreview.frame.height)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camPreview.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
camPreview.layer.addSublayer(previewLayer)
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
// setup URL
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
func setupCaptureMode(_ mode: Int) {
// Video Mode
}
// Camera Orientation
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
@objc func startCapture() {
self.startRecording()
}
// StartRecording
func startRecording() {
if !self.isRecording{
self.isRecording = true
let connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
}else{
self.isRecording = false
self.stopRecording()
}
}
func stopRecording() {
self.captureSession.stopRunning()
self.videoWriter.finish(callback: {})
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let vc = segue.destination as! VideoPlayback
vc.videoURL = sender as! URL
}
}
enum CameraPosition {
case front
case rear
}
extension CameraViewController {
enum CameraControllerError: Swift.Error {
case captureSessionAlreadyRunning
case captureSessionIsMissing
case inputsAreInvalid
case invalidOperation
case noCamerasAvailable
case unknown
}
}
我需要帮助解决此代码中的两件事 -
1)使用音频保存视频
2)在每个框架上添加计时器或任意随机文字。