当我使用以下视图控制器录制视频时:
class AVCameraViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
initializeMotionManager()
sessionQueue.async {
let movieFileOutput = AVCaptureMovieFileOutput()
if self.session.canAddOutput(movieFileOutput) {
self.session.beginConfiguration()
self.session.addOutput(movieFileOutput)
self.session.sessionPreset = .high
if let connection = movieFileOutput.connection(with: .video) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
self.session.commitConfiguration()
movieFileOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 60)
self.movieFileOutput = movieFileOutput
DispatchQueue.main.async {
self.recordButton.isEnabled = true
}
}
}
}
func fileOutput(_ output: AVCaptureFileOutput,
didFinishRecordingTo outputFileURL: URL,
from connections: [AVCaptureConnection],
error: Error?) {
// Note: Since we use a unique file path for each recording, a new recording won't overwrite a recording mid-
save.
UIApplication.shared.isIdleTimerDisabled = false
func cleanup() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
if let currentBackgroundRecordingID = backgroundRecordingID {
backgroundRecordingID = UIBackgroundTaskIdentifier.invalid
if currentBackgroundRecordingID != UIBackgroundTaskIdentifier.invalid {
UIApplication.shared.endBackgroundTask(currentBackgroundRecordingID)
}
}
}
var success = true
if error != nil {
print("Movie file finishing error: \(String(describing: error))")
success = (((error! as NSError).userInfo[AVErrorRecordingSuccessfullyFinishedKey] as AnyObject).boolValue)!
}
if success {
// Check authorization status.
UIView.animate(withDuration: 0.5){
self.overlay.alpha = 0.9
self.navigationController?.navigationBar.isTranslucent = false
}
footageURL = outputFileURL
performSegue(withIdentifier: "TrimFootage", sender: nil)
} else {
cleanup()
}
// Enable the Camera and Record buttons to let the user switch camera and start another recording.
DispatchQueue.main.async {
// Only enable the ability to change camera if the device has more than one camera.
self.recordButton.isEnabled = true
// self.recordButton.setImage(#imageLiteral(resourceName: "CaptureVideo"), for: [])
}
}
}
如您所见,我将maxRecordedDuration设置为2分钟。成功完成记录后,它最终会转至另一个视图控制器。
问题是现在它只记录一分钟,然后停止记录并继续播放。我不确定我是否未正确设置maxRecordedDuration还是必须做其他事情。
答案 0 :(得分:0)
我假设self.session引用AVCaptureSession的一个实例。尝试将maxRecordedDefinition
的定义后移到movieFileOutput
属性的定义
let movieFileOutput = AVCaptureMovieFileOutput()
movieFileOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 1)
您可能会使用不正确的设置过早提交会话配置。
(此外,请使用preferredTimescale为1,代表一整秒-但我想您已经在上面的评论中指出了这一点。)
答案 1 :(得分:0)
我只是使用了以下代码:(其中大部分来自this接受的答案。谢谢@gwinyai。您应该赞成他的答案。我已经这样做了;)
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
@IBOutlet weak var camPreview: UIView!
let cameraButton = UIView()
let captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var previewLayer: AVCaptureVideoPreviewLayer!
var activeInput: AVCaptureDeviceInput!
var outputURL: URL!
override func viewDidLoad() {
super.viewDidLoad()
movieOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 600)
if setupSession() {
setupPreview()
startSession()
}
cameraButton.isUserInteractionEnabled = true
let cameraButtonRecognizer = UITapGestureRecognizer(target: self, action: #selector(ViewController.startCapture))
cameraButton.addGestureRecognizer(cameraButtonRecognizer)
cameraButton.frame = CGRect(x: 0, y: 0, width: 100, height: 100)
cameraButton.backgroundColor = UIColor.red
camPreview.addSubview(cameraButton)
}
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = camPreview.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
camPreview.layer.addSublayer(previewLayer)
}
//MARK:- Setup Camera
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)!
do {
let input = try AVCaptureDeviceInput(device: camera)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)!
do {
let micInput = try AVCaptureDeviceInput(device: microphone)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func setupCaptureMode(_ mode: Int) {
// Video Mode
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
@objc func startCapture() {
if movieOutput.isRecording == false {
startRecording()
print("-------- startRecording --------")
} else {
stopRecording()
print("-------- stopRecording --------")
}
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
if movieOutput.isRecording == false {
let connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if let error = error,
let nserror = error as NSError? {
switch nserror.code {
case AVError.Code.maximumDurationReached.rawValue:
//no error because we want to stop if max duration is reached
print(output.maxRecordedDuration.seconds, "<<<<<<<<<<<<" )
recordingEnded()
return
default:
//handle error
print(nserror.userInfo)
break
}
} else {
//user manually stopped the video before maxduration was reached
recordingEnded()
}
}
func recordingEnded() -> Void {
print("recording ended successfully")
let videoRecorded = outputURL! as URL
}
}
在120秒后停止记录。
有效!您只需要在情节提要中添加camPreview
,并确保已将Privacy - Microphone Usage Description
和Privacy - Camera Usage Description
添加到.plist
中。
为什么movieOutput.maxRecordedDuration = CMTime(seconds: 120, preferredTimescale: 600)
Apple建议视频的时标为600,并附有解释 600是常见视频帧速率(24、25和30 FPS)。如果需要,您可能希望将其提高到60,000或更高 音频文件上的样本精确索引。 ....
选中here。
答案 2 :(得分:0)
如果无法通过maxRecordedDuration对其进行分类,我建议尝试删除它并设置一个计时器。它会在记录开始时触发并运行120秒。如果您之前按过stop键,则将其无效,以免触发它。如果计时器运行结束,只需调用stopRecording函数,该函数将停止记录并触发您想要的segue。
能解决吗?
private var timer: Timer?
private func startRecording() {
// Code to start recording, you can start timer here once you start recording
self.timer = Timer.scheduledTimer(withTimeInterval: 120, repeats: false, block: { [weak self] (t) in
guard let welf = self else {return}
welf.stopRecording()
})
}
private func stopRecording() {
// Code to stop recording and segue further
}