首页
学习
活动
专区
圈层
工具
发布
社区首页 >问答首页 >无报告坠机

无报告坠机
EN

Stack Overflow用户
提问于 2018-09-05 15:47:34
回答 1查看 151关注 0票数 1

我在我的程序中有一个类,它将多种视频文件组合在一起,生成1个整体视频。我有一个主要资产,我主要使用和应用其他资产的顶部。唯一使用的音频文件来自主资产。以下是代码:

代码语言:javascript
复制
import UIKit
import AVFoundation
import Photos


class Merger: NSObject {

    var controller:EditVideoViewController!
    var button:AddAssetButton!
    var view:UIView!
    var difference:Double!
    var changed:Bool = false
    var AI:AIView!

    convenience init(controller:EditVideoViewController, button:AddAssetButton) {
        self.init()
        self.controller = controller
        self.button = button

        self.view = UIView(frame: controller.view.bounds)
        self.view.backgroundColor = UIColor.black.withAlphaComponent(0.7)
        self.controller.view.addSubview(self.view)
    }  

    func setupAI() {
        self.AI = AIView(view: self.view)
        self.AI.start()
    }

    func removeAI() {
        self.AI.stop()
        self.AI.removeEverything()
    }

    //The video is displaying in Portrait after merge.
    func merge(completion:@escaping () -> Void, assets:[Asset]) {

        self.setupAI()

        let assets = assets.sorted(by: { $0.layer.zPosition < $1.layer.zPosition })
        if let firstAsset = controller.firstAsset {

            let mixComposition = AVMutableComposition()

            let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
                                                                     preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            do {
                try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
                                           of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0],
                                           at: kCMTimeZero)
            } catch _ {
                print("Failed to load first track")
            }

            var myTracks:[AVMutableCompositionTrack] = []

            for asset in assets {

                let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
                                                                          preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
                secondTrack.preferredTransform = asset.asset.preferredTransform
                do {
                    try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.endTime-asset.beginTime),
                                               of: asset.asset.tracks(withMediaType: AVMediaTypeVideo)[0],
                                               at: CMTime(seconds: CMTimeGetSeconds(asset.beginTime), preferredTimescale: 600000))
                } catch _ {
                    print("Failed to load second track")
                }
                myTracks.append(secondTrack)
            }

            if let loadedAudioAsset = controller.audioAsset {
                let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
                do {
                    try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
                                               of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
                                               at: kCMTimeZero)
                } catch _ {
                    print("Failed to load Audio track")
                }
            }

            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.controller.realDuration)

            let firstInstruction = videoCompositionInstructionForTrack(firstTrack, firstAsset)
            var instructions:[AVMutableVideoCompositionLayerInstruction] = []
            var counter:Int = 0
            for tracks in myTracks {
                firstInstruction.setOpacity(0.0, at: assets[counter].beginTime)
                let secondInstruction = videoCompositionInstructionForTrack(tracks, assets[counter].asset, type:true)
                secondInstruction.setOpacity(0.0, at: assets[counter].endTime)
                firstInstruction.setOpacity(1.0, at: assets[counter].endTime)
                instructions.append(secondInstruction)
                counter += 1
            }

            mainInstruction.layerInstructions = [firstInstruction] + instructions
            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(1, 30)
            mainComposition.renderSize = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo.mov")
            let url = URL(fileURLWithPath: savePath)
           _ = try? FileManager().removeItem(at: url)

            guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
            exporter.outputFileType = AVFileTypeMPEG4
            exporter.outputURL = url
            exporter.videoComposition = mainComposition


            exporter.exportAsynchronously(completionHandler: {
                DispatchQueue.main.async(execute: {
                    self.exportDidFinish(exporter)
                    self.removeAI()
                    completion()
                })
            })
        }
    }
    func exportDidFinish(_ exporter:AVAssetExportSession) {
         if(exporter.status == AVAssetExportSessionStatus.completed) {
            print("cool")
        }
        else if(exporter.status == AVAssetExportSessionStatus.failed) {
            print(exporter.error as Any)
        }
    }

    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, _ asset: AVAsset, type:Bool = false) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        var transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)
        let width = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width/assetTrack.naturalSize.width
        var height = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height

        if assetInfo.isPortrait {
            //Vert Video taken from camera -- vert video from lib
            height = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.width
            transform = transform.scaledBy(x: height, y: height)
            let movement = ((1/height)*assetTrack.naturalSize.height)-assetTrack.naturalSize.height
            transform = transform.translatedBy(x: 0, y: movement)
            let totalBlackDistance = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width-transform.tx
            transform = transform.translatedBy(x: 0, y: -(totalBlackDistance/2)*(1/height))

        } else {
            //Main Video -- hor photo from camera -- hor video from camera -- hor photo frmo lib -- hor vid frmo lib -- vert photos lib - vert photos camera
            transform = transform.scaledBy(x: width, y: height)
            let scale:CGFloat = ((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height)*(assetTrack.naturalSize.width))/self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width
            transform = transform.scaledBy(x: scale, y: 1)
            let movement = ((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width-((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height)*(assetTrack.naturalSize.width)))/2)*(1/(self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height))
            transform = transform.translatedBy(x: movement, y: 0)
        }
        instruction.setTransform(transform, at: kCMTimeZero)
        return instruction
    }

    func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }

        return (assetOrientation, isPortrait)
    }
}

对于我的手机,iPhone 6,这是从来没有崩溃,我从来没有问题。

我的一个测试人员,他有一个iPhone 5,在导出过程中随机崩溃。当他的手机崩溃时,它似乎并没有像终止一样崩溃。该应用程序完全关闭,什么都没有发生。没有将崩溃报告发送到“我的组织者”窗口(这种情况经常发生),而且似乎实际上没有任何问题。测试的另一个问题是,他的应用程序会在不同的应用程序上进行测试。它并不是每次都崩溃,即使选择了相同的资产。如果我能用一些标准化的方法定期重复这个问题,我就不会有那么多问题了。但是,不太确定从这里往哪里走。

这个测试器不在我附近-我使用苹果的管理器窗口获取崩溃报告

势解:

内存:我在想,这可能是内存问题,因为我知道如果内存超载了,那么它会对应用程序造成太大影响。然而,没有任何东西被发送到"DidReceiveMemoryWarning",应用程序只是终止。

有什么建议/解决方案吗?所有的帮助都很感激。

EN

回答 1

Stack Overflow用户

发布于 2018-09-06 18:32:36

Xcode管理器并不是为您的应用程序提供所有的崩溃报告,只有Apple知道如何选择它们提供的和不提供的。虽然通过TestFlight进行测试,但如果用户同意向开发人员提供崩溃报告数据,报告可能需要几天时间才能到达。

即使该应用程序由于另一个原因而被操作系统关闭,例如内存消耗过大,如果提供该应用程序的所有要求都是肯定的,则会向该设备写入崩溃报告,并提供给Apple,例如用户批准提供这些数据。

如果没有报告,就不可能说出发生了什么和原因,因此,关于内存使用问题的假设是空想的,而声明的原因是无效的。

你需要拿到坠机报告并将其符号化。用户可以在设备上找到崩溃报告,方法是导航到"Settings > general > about >诊断和使用数据“,然后复制内容并将其粘贴到电子邮件中并发送给您。

票数 0
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/52189091

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档