2014-07-26 2 views
0

다른 입력에서 2 개의 비디오 스트림을 하나의 비디오 파일로 믹스해야합니다. 하나의 비디오 프레임을 다른 비디오 프레임에 임베드하고 싶습니다.iOS에서 비디오 스트림 믹스

내 소스는 다음과 같습니다 iOS 앱의 화면 녹화 및 전면 카메라는

출력 : 그것을 할 수있는 방법 어떤 형식

의 videofile?

답변

1
 NSString * str_first=[NSString stringWithFormat:@"%@",[ary_urls objectAtIndex:0]]; 
     NSString * str_secnd=[NSString stringWithFormat:@"%@",[ary_urls objectAtIndex:1]]; 
     NSString * str_third=[NSString stringWithFormat:@"%@",[ary_urls objectAtIndex:2]]; 
     NSURL *url_frst =[[NSURL alloc]initWithString:str_first]; 
     NSURL *url_second =[[NSURL alloc]initWithString:str_secnd]; 
     NSURL *url_third =[[NSURL alloc]initWithString:str_third]; 
     firstAsset = [AVAsset assetWithURL:url_frst]; 
     secondAsset = [AVAsset assetWithURL:url_second]; 
     thirdAsset = [AVAsset assetWithURL:url_third]; 
    if(firstAsset !=nil && secondAsset!=nil){ 
     [ActivityView startAnimating]; 
     //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. 
     AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

     //VIDEO TRACK 
     AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
     [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 

     AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
     [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil]; 

//  AUDIO TRACK 
//    if(audioAsset!=nil){ 
//     AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
//     [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
//    } 

     AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)); 


     //FIXING ORIENTATION// 


     AVMutableVideoCompositionLayerInstruction *firstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; 
     AVAssetTrack * firstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
     UIImageOrientation firstAssetOrientation_ = UIImageOrientationUp; 
     BOOL isFirstAssetPortrait_ = NO; 
     CGAffineTransform firstTransform = firstAssetTrack.preferredTransform; 
     if (firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) { 
      firstAssetOrientation_ = UIImageOrientationRight; 
      isFirstAssetPortrait_ = YES; 
     } 
     if (firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) { 
      firstAssetOrientation_ = UIImageOrientationLeft; 
      isFirstAssetPortrait_ = YES; 
     } 
     if (firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) { 
      firstAssetOrientation_ = UIImageOrientationUp; 
     } 
     if (firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) { 
      firstAssetOrientation_ = UIImageOrientationDown; 
     } 
     [firstlayerInstruction setTransform:firstAsset.preferredTransform atTime:kCMTimeZero]; 
     [firstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 


     CGFloat FirstAssetScaleToFitRatio = 320.0/firstAssetTrack.naturalSize.width; 
     if(isFirstAssetPortrait_) 
     { 
      FirstAssetScaleToFitRatio = 320.0/firstAssetTrack.naturalSize.height; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [firstlayerInstruction setTransform:CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
     }else 
     { 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [firstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero]; 
     } 
     [firstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 


AVMutableVideoCompositionLayerInstruction *secondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; 
AVAssetTrack *secondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
UIImageOrientation secondAssetOrientation_ = UIImageOrientationUp; 
BOOL isSecondAssetPortrait_ = NO; 
CGAffineTransform secondTransform = secondAssetTrack.preferredTransform; 
if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) { 
    secondAssetOrientation_= UIImageOrientationRight; 
    isSecondAssetPortrait_ = YES; 
} 
if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) { 
    secondAssetOrientation_ = UIImageOrientationLeft; 
    isSecondAssetPortrait_ = YES; 
} 
if (secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) { 
    secondAssetOrientation_ = UIImageOrientationUp; 
} 
if (secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) { 
    secondAssetOrientation_ = UIImageOrientationDown; 
} 
[secondlayerInstruction setTransform:secondAsset.preferredTransform atTime:firstAsset.duration]; 


CGFloat SecondAssetScaleToFitRatio = 320.0/secondAssetTrack.naturalSize.width; 
if(isSecondAssetPortrait_){ 
    SecondAssetScaleToFitRatio = 320.0/secondAssetTrack.naturalSize.height; 
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio); 
    [secondlayerInstruction setTransform:CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration]; 
}else{ 
    ; 
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio); 
    [secondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:firstAsset.duration]; 
} 



     MainInstruction.layerInstructions = [NSArray arrayWithObjects:firstlayerInstruction, secondlayerInstruction,nil]; 
     AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition]; 
     mainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
     mainCompositionInst.frameDuration = CMTimeMake(1, 30); 

     CGSize naturalSizeFirst, naturalSizeSecond; 
     if(isFirstAssetPortrait_){ 
      naturalSizeFirst = CGSizeMake(firstAssetTrack.naturalSize.height, firstAssetTrack.naturalSize.width); 
     } else { 
      naturalSizeFirst = firstAssetTrack.naturalSize; 
     } 
     if(isSecondAssetPortrait_){ 
      naturalSizeSecond = CGSizeMake(secondAssetTrack.naturalSize.height, secondAssetTrack.naturalSize.width); 
     } else { 
      naturalSizeSecond = secondAssetTrack.naturalSize; 
     } 

     float renderWidth, renderHeight; 
     if(naturalSizeFirst.width > naturalSizeSecond.width) { 
      renderWidth = naturalSizeFirst.width; 
     } else { 
      renderWidth = naturalSizeSecond.width; 
     } 
     if(naturalSizeFirst.height > naturalSizeSecond.height) { 
      renderHeight = naturalSizeFirst.height; 
     } else { 
      renderHeight = naturalSizeSecond.height; 
     } 


     mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight); 
     NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
     NSString *documentsDirectory = [paths objectAtIndex:0]; 
     NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mp4",arc4random() % 1000]]; 
     url_album = [NSURL fileURLWithPath:myPathDocs]; 


     [ary_temp_url replaceObjectAtIndex:0 withObject:url_album]; 


     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
     exporter.outputURL=url_album; 
     exporter.outputFileType = AVFileTypeQuickTimeMovie; 
     exporter.videoComposition = mainCompositionInst; 
     exporter.shouldOptimizeForNetworkUse = YES; 
     [exporter exportAsynchronouslyWithCompletionHandler:^ 
     { 
      dispatch_async(dispatch_get_main_queue(), ^{ 
       [self exportDidFinish:exporter]; 
      }); 
     }]; 
    } 
}