没有报告而崩溃

时间:2018-09-05 15:47:35

标签: ios swift crash crash-reports avassetexportsession

我的程序中有一个类,该类将大量视频文件组合在一起以产生1个整体视频。我有1个主要使用的主要资产,并将其他资产应用在最上面。唯一使用的音频文件来自主要资产。这是代码:

import UIKit
import AVFoundation
import Photos


class Merger: NSObject {

    var controller:EditVideoViewController!
    var button:AddAssetButton!
    var view:UIView!
    var difference:Double!
    var changed:Bool = false
    var AI:AIView!

    convenience init(controller:EditVideoViewController, button:AddAssetButton) {
        self.init()
        self.controller = controller
        self.button = button

        self.view = UIView(frame: controller.view.bounds)
        self.view.backgroundColor = UIColor.black.withAlphaComponent(0.7)
        self.controller.view.addSubview(self.view)
    }  

    func setupAI() {
        self.AI = AIView(view: self.view)
        self.AI.start()
    }

    func removeAI() {
        self.AI.stop()
        self.AI.removeEverything()
    }

    //The video is displaying in Portrait after merge.
    func merge(completion:@escaping () -> Void, assets:[Asset]) {

        self.setupAI()

        let assets = assets.sorted(by: { $0.layer.zPosition < $1.layer.zPosition })
        if let firstAsset = controller.firstAsset {

            let mixComposition = AVMutableComposition()

            let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
                                                                     preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            do {
                try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
                                           of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0],
                                           at: kCMTimeZero)
            } catch _ {
                print("Failed to load first track")
            }

            var myTracks:[AVMutableCompositionTrack] = []

            for asset in assets {

                let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
                                                                          preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
                secondTrack.preferredTransform = asset.asset.preferredTransform
                do {
                    try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.endTime-asset.beginTime),
                                               of: asset.asset.tracks(withMediaType: AVMediaTypeVideo)[0],
                                               at: CMTime(seconds: CMTimeGetSeconds(asset.beginTime), preferredTimescale: 600000))
                } catch _ {
                    print("Failed to load second track")
                }
                myTracks.append(secondTrack)
            }

            if let loadedAudioAsset = controller.audioAsset {
                let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
                do {
                    try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
                                               of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
                                               at: kCMTimeZero)
                } catch _ {
                    print("Failed to load Audio track")
                }
            }

            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.controller.realDuration)

            let firstInstruction = videoCompositionInstructionForTrack(firstTrack, firstAsset)
            var instructions:[AVMutableVideoCompositionLayerInstruction] = []
            var counter:Int = 0
            for tracks in myTracks {
                firstInstruction.setOpacity(0.0, at: assets[counter].beginTime)
                let secondInstruction = videoCompositionInstructionForTrack(tracks, assets[counter].asset, type:true)
                secondInstruction.setOpacity(0.0, at: assets[counter].endTime)
                firstInstruction.setOpacity(1.0, at: assets[counter].endTime)
                instructions.append(secondInstruction)
                counter += 1
            }

            mainInstruction.layerInstructions = [firstInstruction] + instructions
            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(1, 30)
            mainComposition.renderSize = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo.mov")
            let url = URL(fileURLWithPath: savePath)
           _ = try? FileManager().removeItem(at: url)

            guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
            exporter.outputFileType = AVFileTypeMPEG4
            exporter.outputURL = url
            exporter.videoComposition = mainComposition


            exporter.exportAsynchronously(completionHandler: {
                DispatchQueue.main.async(execute: {
                    self.exportDidFinish(exporter)
                    self.removeAI()
                    completion()
                })
            })
        }
    }
    func exportDidFinish(_ exporter:AVAssetExportSession) {
         if(exporter.status == AVAssetExportSessionStatus.completed) {
            print("cool")
        }
        else if(exporter.status == AVAssetExportSessionStatus.failed) {
            print(exporter.error as Any)
        }
    }

    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, _ asset: AVAsset, type:Bool = false) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        var transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)
        let width = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width/assetTrack.naturalSize.width
        var height = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height

        if assetInfo.isPortrait {
            //Vert Video taken from camera -- vert video from lib
            height = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.width
            transform = transform.scaledBy(x: height, y: height)
            let movement = ((1/height)*assetTrack.naturalSize.height)-assetTrack.naturalSize.height
            transform = transform.translatedBy(x: 0, y: movement)
            let totalBlackDistance = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width-transform.tx
            transform = transform.translatedBy(x: 0, y: -(totalBlackDistance/2)*(1/height))

        } else {
            //Main Video -- hor photo from camera -- hor video from camera -- hor photo frmo lib -- hor vid frmo lib -- vert photos lib - vert photos camera
            transform = transform.scaledBy(x: width, y: height)
            let scale:CGFloat = ((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height)*(assetTrack.naturalSize.width))/self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width
            transform = transform.scaledBy(x: scale, y: 1)
            let movement = ((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width-((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height)*(assetTrack.naturalSize.width)))/2)*(1/(self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height))
            transform = transform.translatedBy(x: movement, y: 0)
        }
        instruction.setTransform(transform, at: kCMTimeZero)
        return instruction
    }

    func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }

        return (assetOrientation, isPortrait)
    }
}

对于我的手机iPhone 6s,它永远不会崩溃,而且我也从未遇到任何问题。

我的一名测试人员(拥有iPhone 5s)在导出过程中随机崩溃。当他的手机崩溃时,它似乎并没有崩溃那么严重。该应用程序完全关闭,没有任何反应。没有崩溃报告发送到我的管理器窗口(这种情况经常发生),并且似乎没有任何问题。测试此问题的另一个问题是他的应用程序是在不同的应用程序上执行的。即使选择了相同的资产,它也不会每次都崩溃。如果我可以通过一些标准化定期重复该问题,那么我就不会有太多问题。但是,我不确定自己从哪里去。

这个测试仪不在我附近-我使用Apple的Organizer窗口获取崩溃报告

潜在的解决方案:

内存:我当时认为这可能是内存问题,因为我知道如果内存过载,它将终止应用程序。但是,没有任何内容发送到“ DidReceiveMemoryWarning”,该应用程序刚刚终止。

有什么建议/解决方案吗?感谢所有帮助。

1 个答案:

答案 0 :(得分:0)

Xcode Organizer不会为您的应用程序提供每个崩溃报告,只有Apple知道他们如何选择提供或不提供的报告。在通过TestFlight进行Beta测试时,如果用户批准向开发人员提供崩溃报告数据,则有时可能需要几天的时间才能生成报告。

即使该应用会由于其他原因(例如,操作系统)而被操作系统杀死如果过多的内存消耗,则崩溃报告将被写入设备并提供给Apple(如果提供该设备的所有要求都是肯定的),例如用户批准提供此数据。

没有报告,就无法说出发生的原因以及原因,因此对内存使用问题的假设是一个疯狂的猜测,陈述的理由是无效的。

您需要获取崩溃报告并对其进行符号化。您的用户可以通过导航至“设置>常规>关于>诊断和使用情况数据”在设备上找到崩溃报告,然后复制内容并将其粘贴,例如进入电子邮件并发送给您。