swift – 无法使用AVVideoCompositionCoreAnimationTool在视频中
更新6:
我已经设法完全解决了我的问题,但我仍然想要一个更好的解释,而不是我猜的是如果我不正确它是不起作用的原因 我一直试图在视频上制作精灵表,但每次导出视频时,最终结果都是我开始的示例视频. 这是我的代码: 首先我的自定义CALayer来处理我自己的精灵表 class SpriteLayer: CALayer { var frameIndex: Int override init() { // Using 0 as a default state self.frameIndex = 0 super.init() } required init?(coder aDecoder: NSCoder) { self.frameIndex = 0 super.init(coder: aDecoder) } override func display() { let currentFrameIndex = self.frameIndex if currentFrameIndex == 0 { return } let frameSize = self.contentsRect.size self.contentsRect = CGRect(x: 0,y: CGFloat(currentFrameIndex - 1) * frameSize.height,width: frameSize.width,height: frameSize.height) } override func action(forKey event: String) -> CAAction? { if event == "contentsRect" { return nil } return super.action(forKey: event) } override class func needsDisplay(forKey key: String) -> Bool { return key == "frameIndex" } } Gif是一个没有花哨的基本课程,工作得很好. gif.Strip是表示gif的垂直精灵表的UIImage. 现在出现了应该导出新视频的方法(它是用于导出的更大类的一部分. func convertAndExport(to url :URL,completion: @escaping () -> Void ) { // Get Initial info and make sure our destination is available self.outputURL = url let stripCgImage = self.gif.strip!.cgImage! // This is used to time how long the export took let start = DispatchTime.now() do { try FileManager.default.removeItem(at: outputURL) } catch { print("Remove Error: (error.localizedDescription)") print(error) } // Find and load "sample.mp4" as a AVAsset let videoPath = Bundle.main.path(forResource: "sample",ofType: "mp4")! let videoUrl = URL(fileURLWithPath: videoPath) let videoAsset = AVAsset(url: videoUrl) // Start a new mutable Composition with the same base video track let mixComposition = AVMutableComposition() let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,preferredTrackID: kCMPersistentTrackID_Invalid)! let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first! do { try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,videoAsset.duration),of: clipVideoTrack,at: kCMTimeZero) } catch { print("Insert Error: (error.localizedDescription)") print(error) return } compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform // Quick access to the video size let videoSize = clipVideoTrack.naturalSize // Setup CALayer and it's animation let aLayer = SpriteLayer() aLayer.contents = stripCgImage aLayer.frame = CGRect(x: 0,y: 0,width: videoSize.width,height: videoSize.height) aLayer.opacity = 1.0 aLayer.masksToBounds = true aLayer.bounds = CGRect(x: 0,height: videoSize.height) aLayer.contentsRect = CGRect(x: 0,width: 1,height: 1.0 / 3.0) let spriteAnimation = CABasicAnimation(keyPath: "frameIndex") spriteAnimation.fromValue = 1 spriteAnimation.toValue = 4 spriteAnimation.duration = 2.25 spriteAnimation.repeatCount = .infinity spriteAnimation.autoreverses = false spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero aLayer.add(spriteAnimation,forKey: nil) // Setup Layers for AVVideoCompositionCoreAnimationTool let parentLayer = CALayer() let videoLayer = CALayer() parentLayer.frame = CGRect(x: 0,height: videoSize.height) videoLayer.frame = CGRect(x: 0,height: videoSize.height) parentLayer.addSublayer(videoLayer) parentLayer.addSublayer(aLayer) // Create the mutable video composition let videoComp = AVMutableVideoComposition() videoComp.renderSize = videoSize videoComp.frameDuration = CMTimeMake(1,30) videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer,in: parentLayer) // Set the video composition to apply to the composition's video track let instruction = AVMutableVideoCompositionInstruction() instruction.timeRange = CMTimeRangeMake(kCMTimeZero,mixComposition.duration) let videoTrack = mixComposition.tracks(withMediaType: .video).first! let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) instruction.layerInstructions = [layerInstruction] videoComp.instructions = [instruction] // Initialize export session let assetExport = AVAssetExportSession(asset: mixComposition,presetName: AVAssetExportPresetPassthrough)! assetExport.videoComposition = videoComp assetExport.outputFileType = AVFileType.mp4 assetExport.outputURL = self.outputURL assetExport.shouldOptimizeForNetworkUse = true // Export assetExport.exportAsynchronously { let status = assetExport.status switch status { case .failed: print("Export Failed") print("Export Error: (assetExport.error!.localizedDescription)") print(assetExport.error!) case .unknown: print("Export Unknown") case .exporting: print("Export Exporting") case .waiting: print("Export Waiting") case .cancelled: print("Export Cancelled") case .completed: let end = DispatchTime.now() let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds let timeInterval = Double(nanoTime) / 1_000_000_000 // Function is now over,we can print how long it took print("Time to generate video: (timeInterval) seconds") completion() } } } 编辑: > SpriteLayer and how to use it 更新1: 更新2: let aLayer = CATextLayer() aLayer.string = "This is a test" aLayer.fontSize = videoSize.height / 6 aLayer.alignmentMode = kCAAlignmentCenter aLayer.foregroundColor = UIColor.white.cgColor aLayer.bounds = CGRect(x: 0,height: videoSize.height / 6) 更新3: convenience init(postProcessingAsVideoLayer videoLayer: CALayer,in animationLayer: CALayer) 意思是我的parentLayer是它的animationLayer,可能意味着任何动画应该在这一层完成. 更新4: let aLayer = CALayer() aLayer.frame = CGRect(x: 0,height: videoSize.height) aLayer.backgroundColor = UIColor.white.cgColor 好吧,这绝对没有,我仍然在我的outputUrl上获取我的示例视频(如果你想“玩”,我开始在操场上用以下代码测试它) import PlaygroundSupport import UIKit import Foundation import AVFoundation func convertAndExport(to url :URL,completion: @escaping () -> Void ) { let start = DispatchTime.now() do { try FileManager.default.removeItem(at: url) } catch { print("Remove Error: (error.localizedDescription)") print(error) } let videoPath = Bundle.main.path(forResource: "sample",ofType: "mp4")! let videoUrl = URL(fileURLWithPath: videoPath) let videoAsset = AVURLAsset(url: videoUrl) let mixComposition = AVMutableComposition() let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,preferredTrackID: kCMPersistentTrackID_Invalid)! let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first! do { try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,at: kCMTimeZero) } catch { print("Insert Error: (error.localizedDescription)") print(error) return } compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform let videoSize = clipVideoTrack.naturalSize print("Video Size Detected: (videoSize.width) x (videoSize.height)") let aLayer = CALayer() aLayer.frame = CGRect(x: 0,height: videoSize.height) aLayer.backgroundColor = UIColor.white.cgColor let parentLayer = CALayer() let videoLayer = CALayer() parentLayer.frame = CGRect(x: 0,height: videoSize.height) videoLayer.frame = CGRect(x: 0,height: videoSize.height) parentLayer.addSublayer(videoLayer) parentLayer.addSublayer(aLayer) aLayer.setNeedsDisplay() let videoComp = AVMutableVideoComposition() videoComp.renderSize = videoSize videoComp.frameDuration = CMTimeMake(1,30) videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer,in: parentLayer) let instruction = AVMutableVideoCompositionInstruction() instruction.timeRange = CMTimeRangeMake(kCMTimeZero,mixComposition.duration) let videoTrack = mixComposition.tracks(withMediaType: .video).first! let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) instruction.layerInstructions = [layerInstruction] videoComp.instructions = [instruction] let assetExport = AVAssetExportSession(asset: mixComposition,presetName: AVAssetExportPresetPassthrough)! assetExport.videoComposition = videoComp assetExport.outputFileType = AVFileType.mp4 assetExport.outputURL = url assetExport.shouldOptimizeForNetworkUse = true assetExport.exportAsynchronously { let status = assetExport.status switch status { case .failed: print("Export Failed") print("Export Error: (assetExport.error!.localizedDescription)") print(assetExport.error!) case .unknown: print("Export Unknown") case .exporting: print("Export Exporting") case .waiting: print("Export Waiting") case .cancelled: print("Export Cancelled") case .completed: let end = DispatchTime.now() let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds let timeInterval = Double(nanoTime) / 1_000_000_000 print("Time to generate video: (timeInterval) seconds") completion() } } } let outputUrl = FileManager.default.temporaryDirectory.appendingPathComponent("test.mp4") convertAndExport(to: outputUrl) { print(outputUrl) } 请有人帮我理解我做错了什么…… 更新5: 解决方法
好的,终于让它按照我一直想要的方式工作.
首先,即使他删除了他的评论,感谢Matt链接到一个工作示例,帮助我将我的代码错误拼凑在一起. >首先 let assetExport = AVAssetExportSession(asset: mixComposition,presetName: AVAssetExportPresetPassthrough)! 我需要使用AVAssetExportPresetHighestQuality而不是AVAssetExportPresetPassthrough.我的猜测是,直通预设意味着你不进行任何重新编码,因此将其设置为最高(因为我导出的视频超过400×400而不是中等)使得我可以实际重新编码我的视频.我猜这是阻止导出的视频包含我尝试的任何CALayer(甚至用白色覆盖视频)的原因. >其次(不确定这是否真的影响但我会稍后再试) parentLayer.addSublayer(aLayer) 我将其替换为: videoLayer.addSublayer(aLayer) 不确定这是否真的很重要,但我的理解是,这实际上是AVVideoCompositionCoreAnimationTool的动画层,而parentLayer只是一个容器,并不意味着包含更多,但我可能错了. >我做了第三次改变 let spriteAnimation = CABasicAnimation(keyPath: "frameIndex") spriteAnimation.fromValue = 1 spriteAnimation.toValue = 4 spriteAnimation.duration = 2.25 spriteAnimation.repeatCount = .infinity spriteAnimation.autoreverses = false spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero aLayer.add(spriteAnimation,forKey: nil) 我改成了这个: let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contentsRect)) animation.duration = 2.25 animation.calculationMode = kCAAnimationDiscrete animation.repeatCount = .infinity animation.values = [ CGRect(x: 0,height: 1/3.0),CGRect(x: 0,y: 1/3.0,y: 2/3.0,height: 1/3.0) ] as [CGRect] animation.beginTime = AVCoreAnimationBeginTimeAtZero animation.fillMode = kCAFillModeBackwards animation.isRemovedOnCompletion = false aLayer.add(animation,forKey: nil) 这个改变主要是删除我的精灵表的自定义动画(因为它总是一样的,我首先想要一个工作的例子,然后我将它概括,并可能将它添加到我的私人UI Pod).但最重要的是animation.isRemovedOnCompletion = false我注意到删除它会使得动画根本无法在导出的视频上播放.因此,对于CABasicAnimation导出后未对视频进行动画处理的任何人,请尝试查看您的动画上是否正确设置了isRemovedOnCompletion. 我认为这几乎都是我所做的改变. 虽然我在技术上回答了我的问题,但我的理解仍然是了解AVVideoCompositionCoreAnimationTool和AVAssetExport是如何工作的,以及为什么我必须做的更改,如果有人有兴趣解释,我最终会让它工作. 再次感谢Matt,你通过向我展示你是如何做到的来帮助我的. (编辑:李大同) 【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容! |