15

このコードは、以前は iOS9 でビデオに透かしとテキストを追加するために機能していましたが、iOS10 以降では機能しなくなりました。提出されたiOS 10 のバグがありますが、Apple からの回答はありません。ビデオに透かしとテキストを追加するための回避策を実装できませんでした。このコードを使用すると、ビデオが正常にエクスポートされる場合もありますが、ほとんどの場合、エクスポートされません。

AVVideoCompositionCoreAnimationTooliOS9と同じよう に使うにはどうすればいいですか?

let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()

videoComposition.frameDuration = CMTimeMake(1, 60)
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)


let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))

// transformer is applied to set the video in portrait otherwise it is rotated by 90 degrees
let transformer: AVMutableVideoCompositionLayerInstruction =
    AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)

let t1: CGAffineTransform = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height)/2)

let t2: CGAffineTransform = t1.rotated(by: CGFloat(M_PI_2))

var finalTransform: CGAffineTransform = t2

transformer.setTransform(finalTransform, at: kCMTimeZero)

instruction.layerInstructions = NSArray(object: transformer) as! [AVVideoCompositionLayerInstruction]

videoComposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]



let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)


do {
    try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: clipVideoTrack, at: kCMTimeZero)
} catch {
    print(error)
}


//Add watermark


let myImage = UIImage(named: "logo")

let aLayer = CALayer()
aLayer.contents = myImage!.cgImage
aLayer.frame = CGRect(x: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-45))/self.view.bounds.width, y: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-40))/self.view.bounds.width, width: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width, height: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width)

let titleLayer = CATextLayer()
titleLayer.string = "text"
titleLayer.font = UIFont(name: "helvetica", size: 0)
titleLayer.fontSize = clipVideoTrack.naturalSize.height/16
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = kCAAlignmentCenter
titleLayer.frame = CGRect(x: 0, y: 0, width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height/6)
titleLayer.display()


let videoSize = asset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)

parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
parentLayer.addSublayer(titleLayer)


videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)



do { try FileManager.default.removeItem(at: filePath) }
catch let error as NSError {
    NSLog("\(error), \(error.localizedDescription)")
}



var exportUrl: URL = filePath
self.videoUrl = filePath as NSURL


var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)

exporter!.videoComposition = videoComposition
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.outputURL = URL(fileURLWithPath: exportUrl.path)


exporter!.exportAsynchronously(completionHandler: {

    DispatchQueue.main.async {


        self.view.layer.addSublayer(self.avPlayerLayer)

        let item = AVPlayerItem(url: exportUrl)
        self.player.replaceCurrentItem(with: item)

        if (self.player.currentItem != nil) {
            print("Starting playback!")
            self.player.play()
        }

    }

})

注意: 削除するAVVideoCompositionCoreAnimationToolと、ビデオは常にエクスポートされますが、ビデオに透かしやテキストは表示されません。AVVideoCompositionCoreAnimationToolと競合しないようにするにはどうすればよいAVAssetExportSessionですか?

プロトコルを使用した回避策が実装されているものもcustomVideoCompositorClassありAVVideoCompositingますが、これは以前の動作に比べて重い回避策のようです。

4

3 に答える 3

3

私はここからこの答えを得ました、そしてそれは私のために働いています。それがあなたのために働いているかどうかを確認してください。

import UIKit
import AssetsLibrary
import AVFoundation

enum QUWatermarkPosition {
    case TopLeft
    case TopRight
    case BottomLeft
    case BottomRight
    case Default
}

class QUWatermarkManager: NSObject {

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status: status, session: session, outputURL: outputURL)
        }
    }

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status: status, session: session, outputURL: outputURL)
        }
    }

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {

        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), { () -> Void in
            var mixComposition = AVMutableComposition()

            var compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            var clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
            compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero, error: nil)
            clipVideoTrack.preferredTransform

            let videoSize = clipVideoTrack.naturalSize

            var parentLayer = CALayer()
            var videoLayer = CALayer()
            parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
            videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
            parentLayer.addSublayer(videoLayer)

            if text != nil {
                var titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.redColor().CGColor
                titleLayer.string = text
                titleLayer.font = "Helvetica"
                titleLayer.fontSize = 15
                titleLayer.alignmentMode = kCAAlignmentCenter
                titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height)
                parentLayer.addSublayer(titleLayer)
            } else if name != nil {
                var watermarkImage = UIImage(named: name)
                var imageLayer = CALayer()
                imageLayer.contents = watermarkImage?.CGImage

                var xPosition : CGFloat = 0.0
                var yPosition : CGFloat = 0.0
                let imageSize : CGFloat = 57.0

                switch (position) {
                case .TopLeft:
                    xPosition = 0
                    yPosition = 0
                    break
                case .TopRight:
                    xPosition = videoSize.width - imageSize
                    yPosition = 0
                    break
                case .BottomLeft:
                    xPosition = 0
                    yPosition = videoSize.height - imageSize
                    break
                case .BottomRight, .Default:
                    xPosition = videoSize.width - imageSize
                    yPosition = videoSize.height - imageSize
                    break
                default:
                    break
                }


                imageLayer.frame = CGRectMake(xPosition, yPosition, imageSize, imageSize)
                imageLayer.opacity = 0.65
                parentLayer.addSublayer(imageLayer)
            }

            var videoComp = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
            videoComp.frameDuration = CMTimeMake(1, 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)

            var instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
            var videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack

            let layerInstruction = self.videoCompositionInstructionForTrack(compositionVideoTrack, asset: videoAsset)

            instruction.layerInstructions = [layerInstruction]
            videoComp.instructions = [instruction]

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String
            var dateFormatter = NSDateFormatter()
            dateFormatter.dateStyle = .LongStyle
            dateFormatter.timeStyle = .ShortStyle
            let date = dateFormatter.stringFromDate(NSDate())
            let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
            let url = NSURL(fileURLWithPath: savePath)

            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter.outputURL = url
            exporter.outputFileType = AVFileTypeQuickTimeMovie
            exporter.shouldOptimizeForNetworkUse = true
            exporter.videoComposition = videoComp

            exporter.exportAsynchronouslyWithCompletionHandler() {
                dispatch_async(dispatch_get_main_queue(), { () -> Void in
                    if exporter.status == AVAssetExportSessionStatus.Completed {
                        let outputURL = exporter.outputURL
                        if flag {
                            // Save to library
                            let library = ALAssetsLibrary()
                            if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
                                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
                                    completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
                                        completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL)
                                })
                            }
                        } else {
                            completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL)
                        }

                    } else {
                        // Error
                        completion!(status: exporter.status, session: exporter, outputURL: nil)
                    }
                })
            }
        })
    }


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.Up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .Right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .Left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .Up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .Down
        }
        return (assetOrientation, isPortrait)
    }

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack

        var transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)

        var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
                atTime: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
            if assetInfo.orientation == .Down {
                let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
                let windowBounds = UIScreen.mainScreen().bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
                concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
            }
            instruction.setTransform(concat, atTime: kCMTimeZero)
        }

        return instruction
    }
}
于 2016-11-18T11:03:23.103 に答える