объединить два видео в приложении ios, по-прежнему сохраняя ориентацию каждого видео?

Я работаю над объединением двух видео и обнаружил странную проблему: когда я пытаюсь объединить портретное видео и альбомное видео, на выходе отображается альбомная версия видео в портретном режиме, чего я не хочу. Я почти уверен, что мне не хватает чего-то фундаментального в управлении ориентацией.

Мой код здесь:

NSMutableArray  *videoClipPaths=[[NSMutableArray alloc]init];


[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL1]];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL2]];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL1]];

float start_time=startSeconds;
float end_time=endSeconds;




  AVMutableComposition *mixComposition = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    __block CMTime time = kCMTimeZero;
    __block CGAffineTransform translate;
    __block CGSize size;

for (int i=0; i<[videoClipPaths count]; i++)
{
    AVURLAsset *assetClip = [AVURLAsset URLAssetWithURL:[videoClipPaths objectAtIndex:i] options:nil];
    AVAssetTrack *clipVideoTrackB = [[assetClip tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:[videoClipPaths objectAtIndex:i] options:nil];
    CMTime start;
    CMTime duration;
    CMTimeRange video_timeRange;
    if (i==0){
        start = CMTimeMakeWithSeconds(0.0f, anAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
        video_timeRange = CMTimeRangeMake(kCMTimeZero,duration);
        [compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:start error:nil];

        size = CGSizeMake(clipVideoTrackB.naturalSize.height, clipVideoTrackB.naturalSize.height);

        translate = CGAffineTransformMakeTranslation(-420, 0);
        CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
        [compositionTrack setPreferredTransform:newTransform];
        time = CMTimeAdd(time, duration);

    }else if (i==1){
        CMTime duration = anAsset.duration;
        float seconds = CMTimeGetSeconds(duration);

        start = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(seconds, anAsset.duration.timescale);
        video_timeRange = CMTimeRangeMake(kCMTimeZero,duration);
        [compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:start error:nil];

        translate = CGAffineTransformMakeTranslation(-420, 0);
        CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
        [compositionTrack setPreferredTransform:newTransform];
        time = CMTimeAdd(time, duration);
    }
    else if (i==2){
        CMTime duration = anAsset.duration;
        float seconds = CMTimeGetSeconds(duration);

        start = CMTimeMakeWithSeconds(end_time, anAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(seconds-start_time, anAsset.duration.timescale);
        //            duration = CMTimeMakeWithSeconds(anAsset.duration.timescale, anAsset.duration.timescale);
        video_timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale),duration);
        [compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:kCMTimeInvalid error:nil];

        translate = CGAffineTransformMakeTranslation(-420, 0);
        CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
        [compositionTrack setPreferredTransform:newTransform];
        time = CMTimeAdd(time, duration);
    }
    //        video_timeRange = CMTimeRangeMake(start,duration);

    //merge audio of video files
    AVAssetTrack *clipVideoTrackB1 = [[assetClip tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    /*CMTime start1;
     CMTime duration1;
     CMTimeRange video_timeRange1;
     if (i==0){
     start1 = CMTimeMakeWithSeconds(0.0, anAsset.duration.timescale);
     duration1 = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);

     }else if (i==1){
     start1 = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
     duration1 = CMTimeMakeWithSeconds(end_time-start_time, anAsset.duration.timescale);
     }
     else if (i==2){
     start1 = CMTimeMakeWithSeconds(end_time, anAsset.duration.timescale);
     duration1 = CMTimeMakeWithSeconds(anAsset.duration.timescale, anAsset.duration.timescale);
     }
     video_timeRange1 = CMTimeRangeMake(start,duration);*/
    [compositionTrack2 insertTimeRange:video_timeRange ofTrack:clipVideoTrackB1 atTime:start error:nil];

}

Приведенный выше код определяет временной диапазон для 3 видео (на самом деле 2 видео), но мне нужно вставить 2-е видео внутри 1-го видео в заданный интервал времени.

Приведенный ниже код создаст изменяемую композицию всех активов, которые у меня есть:

AVMutableVideoCompositionInstruction *vtemp = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
vtemp.timeRange = CMTimeRangeMake(kCMTimeZero, time);
NSLog(@"\nInstruction vtemp's time range is %f %f", CMTimeGetSeconds( vtemp.timeRange.start),
      CMTimeGetSeconds(vtemp.timeRange.duration));

// Also tried videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack
AVMutableVideoCompositionLayerInstruction *vLayerInstruction = [AVMutableVideoCompositionLayerInstruction
                                                                videoCompositionLayerInstructionWithAssetTrack:compositionTrack];


[vLayerInstruction setTransform:compositionTrack.preferredTransform atTime:kCMTimeZero];
vtemp.layerInstructions = @[vLayerInstruction];

AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = size;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.instructions = @[vtemp];

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
NSParameterAssert(exporter != nil);

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsPath = [paths objectAtIndex:0]; //Get the docs directory
documentsPath=[documentsPath stringByAppendingString:@"/MergeVideos"];

.....остальное сохранение видео в библиотеку Фото

Заранее спасибо !!


person Jigar Tank    schedule 03.08.2014    source источник
comment
Это может вам помочь: stackoverflow.com/questions/13246557/   -  person iOSAaronDavid    schedule 13.08.2014


Ответы (1)


У меня версия Swift. Мой работает отлично. Вот мой. После нескольких часов кодирования наконец-то это заработало. Извините, что публикую ответ в Swift. Надеюсь, вы найдете это полезным.

readyVideoURLs содержит все URL-адреса видео. И вы можете объединить любое количество видео с этим кодом.

И я создаю видео квадратного размера.

mainComposition.renderSize = CGSize(width: 600, height: 600)

func mergeVideos() {

        let composition = AVMutableComposition()
        let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

        var current = kCMTimeZero
        var assetArr = [AVURLAsset]()

        let mainInstruction = AVMutableVideoCompositionInstruction()

        for url in readyVideoURLs {
            assetArr.append(AVURLAsset(url: url))
            print("readyVideoURL vid url:- \(url)")

        }

        for asset in assetArr {
            do {
                try compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)

            } catch let error {
                print(error.localizedDescription)
            }


            let instruction = videoCompositionInstructionForTrack(track: compositionTrack, asset: asset)
            mainInstruction.layerInstructions.append(instruction)
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(current, asset.duration))
            current = CMTimeAdd(current, asset.duration)
        }


        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = CGSize(width: 600, height: 600)


        //add audio track
//        if let loadedAudioAsset = audioAsset {
//            let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
//            do {
//                try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, current),
//                                               of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
//            } catch _ {
//                print("Failed to load Audio track")
//            }
//        }

        let finalVideoPath = NSHomeDirectory().appending("/Documents/finalDailyVideo.mp4")
        if FileManager.default.fileExists(atPath: finalVideoPath) {
            do {
                try FileManager.default.removeItem(atPath: finalVideoPath)
            } catch let error {
                print(error.localizedDescription)
            }
        }

        guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else { return }
        exporter.outputURL = URL(fileURLWithPath: finalVideoPath)
        exporter.outputFileType = AVFileTypeMPEG4
        exporter.shouldOptimizeForNetworkUse = true

        saveToAlbum(mergedVidUrl: URL(string:finalVideoPath)!, exporter: exporter)

    }

Сохранение в альбом

func saveToAlbum(mergedVidUrl : URL, exporter : AVAssetExportSession)  {
        exportFinalVideo(exporter: exporter) { (completed) in
            if completed {

                let assetLib = ALAssetsLibrary()
                assetLib.writeVideoAtPath(toSavedPhotosAlbum: mergedVidUrl, completionBlock: { (assetUrl, error) in
                    if error == nil {
                        DispatchQueue.main.async {
                            self.showAlert(title: "Congrats..",message: "Your daily Vlog was saved in albums", actionTitle: "Got it")
                        }
                    }

                })
            }
        }
    }

Получение завершения экспортера

func exportFinalVideo(exporter: AVAssetExportSession, completion:@escaping(Bool) -> ())  {
        exporter.exportAsynchronously() {

            if exporter.status == .exporting {
                print("EXPORTING...")

            } else if exporter.status == .completed {
                print("merged video exporting DONE")
                DispatchQueue.main.async {
                    self.dismiss(animated: true, completion: nil)
                }
                completion(true)
            } else if exporter.status == .failed {
                completion(false)
                self.showAlert(title: "Oops!", message: "Something went wrong. Video could not be created.", actionTitle: "Okay!")

            }
        }

    }

Инструкции к видеослою для ориентации

func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
                                     at: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                let windowBounds = UIScreen.main.bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
            }
            instruction.setTransform(concat, at: kCMTimeZero)
        }

        return instruction
    }

    func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }
person MacKa    schedule 25.04.2017
comment
это не сработало для меня... ориентация была неправильной! - person ; 26.08.2020