合并iPhone应用程序中的两个视频文件
发布时间:2020-12-15 01:48:33 所属栏目:百科 来源:网络整理
导读:在我的一个应用程序中,我需要在视频中添加一些图像.所以我分两部分切断视频,并从该图像制作一个视频.现在我想结合这三个视频文件,制作一个视频文件.但我不知道将这三个视频结合起来.我在这里看到一些代码.但这对我没有帮助.对于中断视频和从图像制作视频我现
在我的一个应用程序中,我需要在视频中添加一些图像.所以我分两部分切断视频,并从该图像制作一个视频.现在我想结合这三个视频文件,制作一个视频文件.但我不知道将这三个视频结合起来.我在这里看到一些代码.但这对我没有帮助.对于中断视频和从图像制作视频我现在使用下面的代码我希望代码合并这个所有视频.
将当前视图屏幕放在视频文件之间的任何其他想法. 用于中断视频文件 NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"Affagogato" ofType:@"mp4"]]; AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil]; for(int i = 0; i < 2; i++) { AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:anAsset presetName:AVAssetExportPresetLowQuality]; NSString *filePath = nil; NSUInteger count = 0; do { filePath = NSTemporaryDirectory(); NSString *numberString = count > 0 ? [NSString stringWithFormat:@"-%i",count] : @""; filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:@"Output-%@.mov",numberString]]; count++; } while([[NSFileManager defaultManager] fileExistsAtPath:filePath]); exportSession.outputURL = [NSURL fileURLWithPath:filePath]; exportSession.outputFileType = AVFileTypeQuickTimeMovie; CMTimeRange range; if(i == 0){ CMTime start = CMTimeMakeWithSeconds(0.0,600); CMTime duration = CMTimeMakeWithSeconds(10.0,600); range = CMTimeRangeMake(start,duration); }else{ CMTime start = CMTimeMakeWithSeconds(10.0,anAsset.duration); } exportSession.timeRange = range; [exportSession exportAsynchronouslyWithCompletionHandler:^ { dispatch_async(dispatch_get_main_queue(),^{ [self exportDidFinish:exportSession Tag:i]; }); }]; } 从图像中获取视频 CGRect rect=CGRectMake(0,320,480); view = [[UIView alloc]initWithFrame:rect]; NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES); NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; NSString *path = [documentsDirectory stringByAppendingPathComponent:[@"video2" stringByAppendingString:@".mov"]]; CGSize size = self.view.frame.size; NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init]; [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; [attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString*)kCVPixelBufferWidthKey]; [attributes setObject:[NSNumber numberWithUnsignedInt:480] forKey:(NSString*)kCVPixelBufferHeightKey]; NSError *error = nil; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(videoWriter); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264,AVVideoCodecKey,[NSNumber numberWithInt:size.width],AVVideoWidthKey,[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil]; AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil]; NSParameterAssert(writerInput); NSParameterAssert([videoWriter canAddInput:writerInput]); [videoWriter addInput:writerInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; CVPixelBufferRef buffer = NULL; //convert uiimage to CGImage. xPixel=0; yPixel=250; buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]]; CVPixelBufferPoolCreatePixelBuffer (NULL,adaptor.pixelBufferPool,&buffer); [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; for (int i = 0;i<2; i++) { if([writerInput isReadyForMoreMediaData]) { //NSLog(@"inside for loop %d",i); for(int pframetime=1;pframetime<=2;pframetime++) { CMTime frameTime = CMTimeMake(pframetime,25); CMTime lastTime=CMTimeMake(i,1); //i is from 0 to 19 of the loop above CMTime presentTime=CMTimeAdd(lastTime,frameTime); if(i==0) buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]]; else buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"2.jpeg"] CGImage]]; while ( ![writerInput isReadyForMoreMediaData] ) { [NSThread sleepForTimeInterval:0.05]; } [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]; i++; } if(buffer) CVBufferRelease(buffer); //[NSThread sleepForTimeInterval:0.1]; } } [writerInput markAsFinished]; [videoWriter finishWriting]; [videoPathArray addObject:path]; //Finish the session: [videoWriter release]; [writerInput release]; CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 对于合并视频文件,我尝试此代码,但这里没有用,是视频之间的一些空白屏幕 AVMutableComposition* mixComposition = [AVMutableComposition composition]; NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,YES) objectAtIndex:0]; NSString* video_inputFilePath1 = [videoPathArray objectAtIndex:1]; NSURL* video_inputFileUrl1 = [NSURL fileURLWithPath:video_inputFilePath1]; NSString* video_inputFilePath2 = [videoPathArray objectAtIndex:0]; NSURL* video_inputFileUrl2 = [NSURL fileURLWithPath:video_inputFilePath2]; NSString* video_inputFilePath3 = [videoPathArray objectAtIndex:2]; NSURL* video_inputFileUrl3 = [NSURL fileURLWithPath:video_inputFilePath3]; NSString* outputFileName = @"outputFile.mov"; NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,outputFileName]; NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath]; if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; CMTime nextClipStartTime = kCMTimeZero; AVURLAsset* videoAsset1 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl1 options:nil]; AVURLAsset* videoAsset2 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl2 options:nil]; AVURLAsset* videoAsset3 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl3 options:nil]; CMTimeRange video_timeRange1 = CMTimeRangeMake(kCMTimeZero,videoAsset1.duration); AVMutableCompositionTrack *a_compositionVideoTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [a_compositionVideoTrack1 insertTimeRange:video_timeRange1 ofTrack:[[videoAsset1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; CMTimeRange video_timeRange3 = CMTimeRangeMake(nextClipStartTime,videoAsset3.duration); [a_compositionVideoTrack1 insertTimeRange:video_timeRange3 ofTrack:[[videoAsset3 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil]; CMTimeRange video_timeRange2 = CMTimeRangeMake(nextClipStartTime,videoAsset1.duration); [a_compositionVideoTrack1 insertTimeRange:video_timeRange2 ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil]; AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality]; _assetExport.shouldOptimizeForNetworkUse = YES; _assetExport.outputFileType = @"com.apple.quicktime-movie"; _assetExport.outputURL = outputFileUrl; 解决方法
尝试在Apple开发人员门户网站上观看名为“在AV Foundation中使用媒体”的视频.它告诉你如何做你正在描述的.
https://developer.apple.com/videos/wwdc/2011/ (编辑:李大同) 【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容! |