加入收藏 | 设为首页 | 会员中心 | 我要投稿 李大同 (https://www.lidatong.com.cn/)- 科技、建站、经验、云计算、5G、大数据,站长网!
当前位置: 首页 > 百科 > 正文

swift – AVAssetWriter连续段

发布时间:2020-12-14 04:55:50 所属栏目:百科 来源:网络整理
导读:我想要录制一系列片段,这些片段通过视频播放器或ffmpeg -f concat一起播放时无缝播放. 在任何一种情况下,我都会在每个分段连接点处得到非常明显的音频打嗝. 我目前的策略是维护2个AssetWriter实例.在每个截止点,我开始一个新的作家,等到它准备好,然后开始给
我想要录制一系列片段,这些片段通过视频播放器或ffmpeg -f concat一起播放时无缝播放.

在任何一种情况下,我都会在每个分段连接点处得到非常明显的音频打嗝.

我目前的策略是维护2个AssetWriter实例.在每个截止点,我开始一个新的作家,等到它准备好,然后开始给它样品.当视频和音频样本在特定时间点完成时,我关闭最后一个作者.

如何修改此项以获得连续剪辑录制?根本原因是什么问题?

import Foundation
import UIKit
import AVFoundation

class StreamController: UIViewController,AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureVideoDataOutputSampleBufferDelegate {
    @IBOutlet weak var previewView: UIView!

    var closingVideoInput: AVAssetWriterInput?
    var closingAudioInput: AVAssetWriterInput?
    var closingAssetWriter: AVAssetWriter?

    var currentVideoInput: AVAssetWriterInput?
    var currentAudioInput: AVAssetWriterInput?
    var currentAssetWriter: AVAssetWriter?

    var nextVideoInput: AVAssetWriterInput?
    var nextAudioInput: AVAssetWriterInput?
    var nextAssetWriter: AVAssetWriter?

    var previewLayer: AVCaptureVideoPreviewLayer?
    var videoHelper: VideoHelper?

    var startTime: NSTimeInterval = 0
    override func viewDidLoad() {
        super.viewDidLoad()
        startTime = NSDate().timeIntervalSince1970
        createSegmentWriter()
        videoHelper = VideoHelper()
        videoHelper!.delegate = self
        videoHelper!.startSession()
        NSTimer.scheduledTimerWithTimeInterval(5,target: self,selector: "createSegmentWriter",userInfo: nil,repeats: true)
    }

    func createSegmentWriter() {
        print("Creating segment writer at t=(NSDate().timeIntervalSince1970 - self.startTime)")
        nextAssetWriter = try! AVAssetWriter(URL: NSURL(fileURLWithPath: OutputFileNameHelper.instance.pathForOutput()),fileType: AVFileTypeMPEG4)
        nextAssetWriter!.shouldOptimizeForNetworkUse = true

        let videoSettings: [String:AnyObject] = [AVVideoCodecKey: AVVideoCodecH264,AVVideoWidthKey: 960,AVVideoHeightKey: 540]
        nextVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,outputSettings: videoSettings)
        nextVideoInput!.expectsMediaDataInRealTime = true
        nextAssetWriter?.addInput(nextVideoInput!)

        let audioSettings: [String:AnyObject] = [
                AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC),AVSampleRateKey: 44100.0,AVNumberOfChannelsKey: 2,]
        nextAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio,outputSettings: audioSettings)
        nextAudioInput!.expectsMediaDataInRealTime = true
        nextAssetWriter?.addInput(nextAudioInput!)

        nextAssetWriter!.startWriting()
    }

    override func viewDidAppear(animated: Bool) {
        super.viewDidAppear(animated)
        previewLayer = AVCaptureVideoPreviewLayer(session: videoHelper!.captureSession)
        previewLayer!.frame = self.previewView.bounds
        previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
        if ((previewLayer?.connection?.supportsVideoOrientation) != nil) {
            previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
        }
        self.previewView.layer.addSublayer(previewLayer!)
    }

    func closeWriter() {
        if videoFinished && audioFinished {
            let outputFile = closingAssetWriter?.outputURL.pathComponents?.last
            closingAssetWriter?.finishWritingWithCompletionHandler() {
                let delta = NSDate().timeIntervalSince1970 - self.startTime
                print("segment (outputFile) finished at t=(delta)")
            }
            self.closingAudioInput = nil
            self.closingVideoInput = nil
            self.closingAssetWriter = nil
            audioFinished = false
            videoFinished = false
        }
    }

    func closingVideoFinished() {
        if closingVideoInput != nil {
            videoFinished = true
            closeWriter()
        }
    }

    func closingAudioFinished() {
        if closingAudioInput != nil {
            audioFinished = true
            closeWriter()
        }
    }

    var closingTime: CMTime = kCMTimeZero
    var audioFinished = false
    var videoFinished = false
    func captureOutput(captureOutput: AVCaptureOutput!,didOutputSampleBuffer sampleBuffer: CMSampleBufferRef,fromConnection connection: AVCaptureConnection!) {
        let sampleTime: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        if let nextWriter = nextAssetWriter {
            if nextWriter.status.rawValue != 0 {
                print("Switching asset writers at t=(NSDate().timeIntervalSince1970 - self.startTime)")

                closingAssetWriter = currentAssetWriter
                closingVideoInput = currentVideoInput
                closingAudioInput = currentAudioInput

                currentAssetWriter = nextAssetWriter
                currentVideoInput = nextVideoInput
                currentAudioInput = nextAudioInput

                nextAssetWriter = nil
                nextVideoInput = nil
                nextAudioInput = nil

                closingTime = sampleTime
                currentAssetWriter!.startSessionAtSourceTime(sampleTime)
            }
        }

        if currentAssetWriter != nil {
            if let _ = captureOutput as? AVCaptureVideoDataOutput {
                if (CMTimeCompare(sampleTime,closingTime) < 0) {
                    if closingVideoInput?.readyForMoreMediaData == true {
                        closingVideoInput?.appendSampleBuffer(sampleBuffer)
                    }
                } else {
                    closingVideoFinished()
                    if currentVideoInput?.readyForMoreMediaData == true {
                        currentVideoInput?.appendSampleBuffer(sampleBuffer)
                    }
                }

            } else if let _ = captureOutput as? AVCaptureAudioDataOutput {
                if (CMTimeCompare(sampleTime,closingTime) < 0) {
                    if currentAudioInput?.readyForMoreMediaData == true {
                        currentAudioInput?.appendSampleBuffer(sampleBuffer)
                    }
                } else {
                    closingAudioFinished()
                    if currentAudioInput?.readyForMoreMediaData == true {
                        currentAudioInput?.appendSampleBuffer(sampleBuffer)
                    }
                }
            }
        }
    }

    override func shouldAutorotate() -> Bool {
        return true
    }

    override func supportedInterfaceOrientations() -> UIInterfaceOrientationMask {
        return [UIInterfaceOrientationMask.LandscapeRight]
    }
}

解决方法

我认为根本原因是由于视频和音频CMSampleBuffers代表不同的时间间隔.您需要拆分并加入音频CMSampleBuffers,以使它们无缝插入到AVAssetWriter的时间轴中,该时间轴可能基于视频演示时间戳.

为什么音频必须改变而不是视频?它似乎不对称,但我想这是因为音频具有更高的采样率.

附:实际上创建新的拆分样本缓冲区看起来令人生畏. CMSampleBufferCreate有一吨参数. CMSampleBufferCopySampleBufferForRange可能更容易使用,也更有效.

(编辑:李大同)

【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容!

    推荐文章
      热点阅读