2016-05-13 1 views
0

Ich möchte 2 Videodateien mit einer einzigen Datei mit Audio zusammenführen. Ich bin in der Lage, 2 Videodateien zusammenzuführen, aber nach dem Zusammenführen von output Video-Datei hat keinen Audio-Sound. Sogar beide alten Videodateien haben einen ordentlichen Ton. Ich folge diesem Tutorial: https://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios Irgendein Vorschlag wird groß sein. Danke Jungs. mein Code:iOS - Mischen Sie zwei Video in einzelne Videodatei mit Audio

- (IBAction)MergeAndSave:(id)sender{ 
if(firstAsset !=nil && secondAsset!=nil){ 
    [ActivityView startAnimating]; 
    //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. 
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

    //VIDEO TRACK 
    AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 

    AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil]; 

    //AUDIO TRACK 
    if(audioAsset!=nil){ 
     AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
     [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
    } 

    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)); 

    //FIXING ORIENTATION// 
    AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; 
    AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 
    BOOL isFirstAssetPortrait_ = NO; 
    CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; 
    if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;} 
    if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;} 
    if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;} 
    if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;} 
    CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width; 
    if(isFirstAssetPortrait_){ 
     FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height; 
     CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
     [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
    }else{ 
     CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
     [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; 
    } 
    [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 

    AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; 
    AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp; 
    BOOL isSecondAssetPortrait_ = NO; 
    CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform; 
    if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;} 
    if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;} 
    if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;} 
    if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;} 
    CGFloat SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.width; 
    if(isSecondAssetPortrait_){ 
     SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.height; 
     CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio); 
     [SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration]; 
    }else{ 
     ; 
     CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio); 
     [SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:firstAsset.duration]; 
    } 


    MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];; 

    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
    MainCompositionInst.frameDuration = CMTimeMake(1, 30); 
    MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0); 

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]]; 

    NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
    exporter.outputURL=url; 
    exporter.outputFileType = AVFileTypeQuickTimeMovie; 
    exporter.videoComposition = MainCompositionInst; 
    exporter.shouldOptimizeForNetworkUse = YES; 
    [exporter exportAsynchronouslyWithCompletionHandler:^ 
    { 
     dispatch_async(dispatch_get_main_queue(), ^{ 
      [self exportDidFinish:exporter]; 
     }); 
    }]; 
} 

}

+0

Dieses Tutorial Ihnen helfen könnte. https://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios – iMHitesh

Antwort

2

ich mehrere Videos erstellt haben, in einem Video mit Audio verschmelzen.

Objective C

https://github.com/Datt1994/DPVideoMerger

Swift

https://github.com/Datt1994/DPVideoMerger-Swift

Verwenden unten Code:

AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
AVMutableCompositionTrack *AudioTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
[AudioTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:firstAsset.duration error:nil]; 

statt Ihrer unten Code:

if(audioAsset!=nil){ 
     AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
     [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
    } 
+0

Ihr Repo ist nicht sehr gut dokumentiert. Können Sie erklären, wie Sie das Problem lösen können, falls jemand Ihr Repo nicht verwenden möchte? –

+0

Ihr Repo behandelt auch nicht unterschiedliche Ausrichtung von Videoclips. – Vats

+0

@ Datt1994 Gute Arbeit Github Demo .... :) – sohil