Xamarin.iOS-合并多个视频时为每个视频设置音频采样率

时间:2018-11-06 17:50:56

标签: c# audio xamarin.forms xamarin.ios avfoundation

尝试使用AVAssetTrack和AVMutableCompositionTrack拼接多个视频。上传的视频可能具有不同的音频设置(例如采样率)-将AVAssetTracks添加到AVMutableVideoComposition时,传递所需音频设置的最佳方法是什么?当前使用以下代码,在IE / Edge上播放时,某些48KHz与44.1KHz缝合的视频不会发出声音,但在其他浏览器上可以播放

Composition = new AVMutableComposition();
AVMutableCompositionTrack VideoCompositionTrack = Composition.AddMutableTrack(AVMediaType.Video, 0);
AVMutableCompositionTrack AudioCompositionTrack = Composition.AddMutableTrack(AVMediaType.Audio, 1);
AVMutableVideoCompositionLayerInstruction LayerInstruction = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(VideoCompositionTrack);

CMTime StartTime = CMTime.Zero;
AVUrlAssetOptions Options = new AVUrlAssetOptions
{
    PreferPreciseDurationAndTiming = true
};
CMTimeRange TimeRange;
NSError InsertError = null;
int Counter = 0;
CGSize FinalRenderSize = new CGSize();

foreach (NSUrl VideoLocation in SelectedVideoLocations)
{
    using (AVAsset Asset = new AVUrlAsset(VideoLocation, Options))
    {
        TimeRange = new CMTimeRange()
        {
            Start = CMTime.Zero,
            Duration = Asset.Duration
        };

        if (Asset.TracksWithMediaType(AVMediaType.Video).Length > 0)
        {
            using (AVAssetTrack VideoAssetTrack = Asset.TracksWithMediaType(AVMediaType.Video)[0])
            {
                if (Counter == 0)
                {
                    FinalRenderSize = VideoAssetTrack.NaturalSize;
                }

                LayerInstruction.SetTransform(VideoAssetTrack.PreferredTransform, StartTime);
                VideoCompositionTrack.InsertTimeRange(TimeRange, VideoAssetTrack, StartTime, out InsertError);
            }
        }

        if (Asset.TracksWithMediaType(AVMediaType.Audio).Length > 0)
        {
            using (AVAssetTrack AudioAssetTrack = Asset.TracksWithMediaType(AVMediaType.Audio)[0])
            {
                LayerInstruction.SetTransform(AudioAssetTrack.PreferredTransform, StartTime);
                AudioCompositionTrack.InsertTimeRange(TimeRange, AudioAssetTrack, StartTime, out InsertError);
            }
        }

        StartTime = CMTime.Add(StartTime, Asset.Duration);
        Counter++;
    }
}

AVMutableVideoCompositionInstruction MainInstruction = new AVMutableVideoCompositionInstruction
{
    TimeRange = VideoCompositionTrack.TimeRange,
    LayerInstructions = new AVVideoCompositionLayerInstruction[1] { LayerInstruction }
};

AVMutableVideoComposition CompositionInstruction = AVMutableVideoComposition.Create(Composition);
CompositionInstruction.Instructions = new AVMutableVideoCompositionInstruction[1] { MainInstruction };
CompositionInstruction.FrameDuration = new CMTime(1, 30);
CompositionInstruction.RenderScale = 1.0f;
CompositionInstruction.RenderSize = FinalRenderSize;
string FilePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Personal), FolderName, FileName);
await LocalStorage.DeleteFileAsync(FilePath);
NSUrl FilePathURL = NSUrl.CreateFileUrl(new string[] { FilePath });
MediaURL = FilePath;
AVAssetExportSession ExportSession = new AVAssetExportSession(Composition, AVAssetExportSessionPreset.Preset960x540);

if (CompositionInstruction != null)
{
    ExportSession.VideoComposition = CompositionInstruction;
}
ExportSession.ShouldOptimizeForNetworkUse = true;
ExportSession.OutputUrl = FilePathURL;
ExportSession.OutputFileType = AVFileType.Mpeg4;

await ExportSession.ExportTaskAsync();
if (ExportSession.Status != AVAssetExportSessionStatus.Completed)
{
    throw new Exception(ExportSession.Error.DebugDescription);
}

0 个答案:

没有答案