2017-10-10 4 views
0

두 개의 비디오를 결합한 응용 프로그램을 만들고이를 하나로 병합 한 다음 비디오에 사용자 정의 audiotrack을 넣습니다. 이 모든 것이 완벽하게 작동합니다. 이제 오디오를 페이드 아웃하고 싶습니다. 반면에 이것은 효과가 없으며 그 이유는 모르겠습니다. 이 내 코드입니다 :AVAssetExportSession, AudioMix가 작동하지 않는 AVMutableComposition은 작동하지 않습니다.

let duration = composition.duration 
let durationInSeconds = CMTimeGetSeconds(duration) * 10 
let composition = AVMutableComposition() 

let item = AVPlayerItem(asset: composition) 
let params = AVMutableAudioMixInputParameters(track: composition.tracks.first! as AVAssetTrack) 

let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-10, 10), CMTimeMakeWithSeconds(1, 1)) 

params.setVolumeRamp(fromStartVolume: 1, toEndVolume: 0, timeRange: lastSecond) 

let mix = AVMutableAudioMix() 
mix.inputParameters = [params] 

item.audioMix = mix 

// Put the track under the video 
do { 
    try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, current), of: backgroundAudio.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero) 
} catch _ { 
    print("Failed to load Audio track") 
} 

guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else { return } 
exporter.audioMix = mix 
exporter.outputURL = URL(fileURLWithPath: finalVideoPath) 
exporter.outputFileType = AVFileTypeMPEG4 
exporter.shouldOptimizeForNetworkUse = true 

그리고 코드 블록 후 비디오 자체가 렌더링 다른 방법으로 계속합니다. 누군가가 왜 이것이 작동하지 않고 어떻게 해결할 수 있는지 설명 할 수 있습니까?

미리 감사드립니다.

답변

0

다음은 비디오 및 오디오 URL을 추가하고 둘 다 설정하는 코드입니다. 결과 비디오가 페이드 인되어 페이드 아웃 효과를냅니다.

func mergeVideoAndMusicWithVolume(videoURL: NSURL, audioURL: NSURL, startAudioTime: Float64, volumeVideo: Float, volumeAudio: Float, complete: ((NSURL?)) -> Void) -> Void { 

    //The goal is merging a video and a music from iPod library, and set it a volume 

    //Get the path of App Document Directory 
    let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true) 
    let docsDir = dirPaths[0] as String 

    //Create Asset from record and music 
    let assetVideo: AVURLAsset = AVURLAsset(URL: videoURL) 
    let assetMusic: AVURLAsset = AVURLAsset(URL: audioURL) 

    let composition: AVMutableComposition = AVMutableComposition() 
    let compositionVideo : AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID()) 
    let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) 
    let compositionAudioMusic: AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) 

    //Add video to the final record 

    do { 
     try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack:assetVideo.tracksWithMediaType(AVMediaTypeVideo)[0], atTime: kCMTimeZero) 
    } catch _ { 
    } 

    //Extract audio from the video and the music 
    let audioMix: AVMutableAudioMix = AVMutableAudioMix() 
    var audioMixParam: [AVMutableAudioMixInputParameters] = [] 

    let assetVideoTrack: AVAssetTrack = assetVideo.tracksWithMediaType(AVMediaTypeAudio)[0] 
    let assetMusicTrack: AVAssetTrack = assetMusic.tracksWithMediaType(AVMediaTypeAudio)[0] 

    let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack) 
    videoParam.trackID = compositionAudioVideo.trackID 

    let musicParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetMusicTrack) 
    musicParam.trackID = compositionAudioMusic.trackID 

    //Set final volume of the audio record and the music 
    videoParam.setVolume(volumeVideo, atTime: kCMTimeZero) 
    musicParam.setVolume(volumeAudio, atTime: kCMTimeZero) 

    //Add setting 
    audioMixParam.append(musicParam) 
    audioMixParam.append(videoParam) 

    //Add audio on final record 
    //First: the audio of the record and Second: the music 
    do { 
    try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, assetVideo.duration), ofTrack: assetVideoTrack, atTime: kCMTimeZero) 
    } catch _ { 
    assertionFailure() 
    } 

    do { 
    try compositionAudioMusic.insertTimeRange(CMTimeRangeMake(CMTimeMake(Int64(startAudioTime * 10000), 10000), assetVideo.duration), ofTrack: assetMusicTrack, atTime: kCMTimeZero) 
    } catch _ { 
    assertionFailure() 
    } 

    //Add parameter 
    audioMix.inputParameters = audioMixParam 

    //Remove the previous temp video if exist 
    let filemgr = NSFileManager.defaultManager() 
     do { 
      if filemgr.fileExistsAtPath("\(docsDir)"){ 
       try filemgr.removeItemAtPath("\(docsDir)/movie-merge-music.mp4") 
      } else { 
      } 
      } catch _ { 
     } 
    //Exporte the final record’ 
    let completeMovie = "\(docsDir)/\(randomString(5)).mp4" 
    let completeMovieUrl = NSURL(fileURLWithPath: completeMovie) 
    let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)! 

    exporter.outputURL = completeMovieUrl 
    exporter.outputFileType = AVFileTypeMPEG4 
    exporter.audioMix = audioMix 
    exporter.exportAsynchronouslyWithCompletionHandler({ 

    switch exporter.status { 

    case AVAssetExportSessionStatus.Completed: 
     print("success with output url \(completeMovieUrl)") 
     case AVAssetExportSessionStatus.Failed: 
      print("failed \(String(exporter.error))") 
     case AVAssetExportSessionStatus.Cancelled: 
      print("cancelled \(String(exporter.error))") 
     default: 
      print("complete") 
     }    
    }) 
} 
관련 문제