在Mac中并行运行电影录制(快速时间播放器)时,此代码不起作用。苹果审查员拒绝了此问题的申请。
我想录制iPad屏幕以上传到AppStore。
inputNode.installTap(onBus: 0, bufferSize: 1024, format: format)
Crash Log:
2019-11-05 13:29:44.554421+0530 SampleApp[711:75220] [] tcp_input [C7.1:3] flags=[R.] seq=1082348669, ack=827396102, win=237 state=FIN_WAIT_2 rcv_nxt=1082348669, snd_una=827396102
2019-11-05 13:29:47.929097+0530 SampleApp[711:74972] [avae] AVAEInternal.h:76 required condition is false: [AVAudioIONodeImpl.mm:1064:SetOutputFormat: (format.sampleRate == hwFormat.sampleRate)]
2019-11-05 13:29:47.930526+0530 SampleApp[711:74972] *** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: format.sampleRate == hwFormat.sampleRate'
*** First throw call stack:
(0x1b58d380c 0x1b55fbfa4 0x1b57d5c4c 0x1c2375b8c 0x1c2375afc 0x1c2410888 0x1c23b6378 0x1c23a88a8 0x1c24248d4 0x1c240b228 0x100b8e0e8 0x100b9161c 0x100b8b2c0 0x100b8b484 0x1b993dad8 0x101102088 0x1b937594c 0x1b9375cb0 0x1b9374ccc 0x1b99772b0 0x1b99785f0 0x1b9954ba0 0x101101ee8 0x1b99ccf20 0x1b99cf47c 0x1b99c82f0 0x1b58517c4 0x1b585171c 0x1b5850eb4 0x1b584c000 0x1b584b8a0 0x1bf7a3328 0x1b993c768 0x100a71b20 0x1b56d6360)
libc++abi.dylib: terminating with uncaught exception of type NSException
我尝试了不记录屏幕的正常工作
let settings = [AVFormatIDKey: kAudioFormatLinearPCM, AVLinearPCMBitDepthKey: 16, AVLinearPCMIsFloatKey: true, AVSampleRateKey: Float64(44100), AVNumberOfChannelsKey: 1] as [String: Any]
//changing for iOS-13 crash fix
var audioEngine: AVAudioEngine!
private func startRecording() {
//changing for iOS-13 crash fix
self.audioEngine = AVAudioEngine()
self.isAudioRunning(true)
runTimer()
self.recordButton.backgroundColor = UIColor(red: 0.84, green: 0.33, blue: 0.31, alpha: 1)
self.recordButton.setTitle("Stop", for: .normal)
if testType == .hardware {
self.clickOnRecordLabel.text = "Click on the 'Stop' button to stop the recording."
}
self.recordingTs = NSDate().timeIntervalSince1970
self.silenceTs = 0
do {
let session = AVAudioSession.sharedInstance()
try session.setCategory(.playAndRecord, mode: .default)
try session.setActive(true)
} catch let error as NSError {
print(error.localizedDescription)
return
}
let inputNode = self.audioEngine.inputNode
guard let format = self.format() else {
return
}
inputNode.installTap(onBus: 0, bufferSize: 1024, format: format) {[unowned self] (buffer, _) in
self.audioMetering(buffer: buffer)
let level: Float = -50
let length: UInt32 = 1024
buffer.frameLength = length
let channels = UnsafeBufferPointer(start: buffer.floatChannelData, count: Int(buffer.format.channelCount))
var value: Float = 0
vDSP_meamgv(channels[0], 1, &value, vDSP_Length(length))
var average: Float = ((value == 0) ? -100 : 20.0 * log10f(value))
if average > 0 {
average = 0
} else if average < -100 {
average = -100
}
let silent = average < level
let ts = NSDate().timeIntervalSince1970
if ts - self.renderTs > 0.1 {
let floats = UnsafeBufferPointer(start: channels[0], count: Int(buffer.frameLength))
let frame = floats.map({ (f) -> Int in
return Int(f * Float(Int16.max))
})
DispatchQueue.main.async {
_ = (ts - self.recordingTs)
//self.labelTimer.text = seconds.toTimeString
self.renderTs = ts
let len = self.audioView.waveforms.count
for i in 0 ..< len {
let idx = ((frame.count - 1) * i) / len
let f: Float = sqrt(1.5 * abs(Float(frame[idx])) / Float(Int16.max))
self.audioView.waveforms[i] = min(49, Int(f * 50))
}
self.audioView.active = !silent
self.audioView.setNeedsDisplay()
}
}
let write = true
if write {
if self.audioFile == nil {
self.audioFile = self.createAudioRecordFile()
}
if let f = self.audioFile {
do {
try f.write(from: buffer)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
do {
self.audioEngine.prepare()
try self.audioEngine.start()
} catch let error as NSError {
print(error.localizedDescription)
return
}
UIApplication.shared.isIdleTimerDisabled = true
self.audioView.isHidden = false
self.labelTimer.isHidden = false
}
在录制运行时如何控制崩溃。