animate a moving watermark over video using calayer - swift

I am on swift and trying to make a watermark move positions over a video. so far all I'm getting is a watermark that stays still over the video that is playing. how can I make it move? Basically what I'm trying to replicate is like the watermark from tiktok videos that pop in from one location to the other. I am really confused as to why this doesn't work. please help. thanks!
func processVideoWithElements(item: MediaItem, completion: #escaping ProcessCompletionHandler) {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack = item.sourceAsset.tracks(withMediaType: AVMediaType.video).first
let clipAudioTrack = item.sourceAsset.tracks(withMediaType: AVMediaType.audio).first
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: item.sourceAsset.duration), of: clipVideoTrack!, at: CMTime.zero)
} catch {
completion(MediaProcessResult(processedUrl: nil, image: nil), error)
}
if (clipAudioTrack != nil) {
let compositionAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: item.sourceAsset.duration), of: clipAudioTrack!, at: CMTime.zero)
} catch {
completion(MediaProcessResult(processedUrl: nil, image: nil), error)
}
}
compositionVideoTrack?.preferredTransform = (item.sourceAsset.tracks(withMediaType: AVMediaType.video).first?.preferredTransform)!
let sizeOfVideo = item.size
let optionalLayer = CALayer()
processAndAddElements(item: item, layer: optionalLayer)
optionalLayer.frame = CGRect(x: 0, y: 0, width: sizeOfVideo.width, height: sizeOfVideo.height)
optionalLayer.masksToBounds = true
// animate layer ---------------------------------------
let positionAnimation = CABasicAnimation(keyPath: #keyPath(CALayer.position))
positionAnimation.beginTime = CMTime.zero.seconds
positionAnimation.fromValue = CGPoint(x: -20, y: 100)
positionAnimation.toValue = CGPoint(x: 280, y: 100)
positionAnimation.duration = 3//item.sourceAsset.duration.seconds
positionAnimation.repeatCount = 2
positionAnimation.autoreverses = true
optionalLayer.add(positionAnimation, forKey: #keyPath(CALayer.position))
//------------------------------------
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: sizeOfVideo.width, height: sizeOfVideo.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: sizeOfVideo.width, height: sizeOfVideo.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(optionalLayer)
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(value: kMediaContentTimeValue, timescale: kMediaContentTimeScale)
videoComposition.renderSize = sizeOfVideo
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: mixComposition.duration)
let videoTrack = mixComposition.tracks(withMediaType: AVMediaType.video).first
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack!)
layerInstruction.setTransform(transform(avAsset: item.sourceAsset, scaleFactor: kMediaContentDefaultScale), at: CMTime.zero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]
let processedUrl = processedMoviePath()
clearTemporaryData(url: processedUrl, completion: completion)
let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exportSession?.videoComposition = videoComposition
exportSession?.outputURL = processedUrl
exportSession?.outputFileType = AVFileType.mp4
exportSession?.exportAsynchronously(completionHandler: {
if exportSession?.status == AVAssetExportSession.Status.completed {
completion(MediaProcessResult(processedUrl: processedUrl, image: nil), nil)
} else {
completion(MediaProcessResult(processedUrl: nil, image: nil), exportSession?.error)
}
})

Related

How to combine (hstack) multiple videos side by side with AVMutableVideoComposition?

I'm trying to combine 3 videos into single video where videos are cropped and put side by side in hstack.
Here is my current solution which puts only the first video 3 times (repeating) side by side. I can not figure out what am I doing wrong.
import Foundation
import AVFoundation
func hstackVideos() {
let videoPaths: [String] = [
"path/to/video1.mp4",
"path/to/video2.mp4",
"path/to/video3.mp4",
]
let composition = AVMutableComposition()
let assetInfos: [(AVURLAsset, AVAssetTrack, AVMutableCompositionTrack)] = videoPaths.map {
let asset = AVURLAsset(url: URL(fileURLWithPath: $0))
let track = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)!
let videoAssetTrack = asset.tracks(withMediaType: .video)[0]
try! track.insertTimeRange(CMTimeRangeMake(start: videoAssetTrack.timeRange.start, duration: videoAssetTrack.timeRange.duration), of: videoAssetTrack, at: videoAssetTrack.timeRange.start)
return (asset, videoAssetTrack, track)
}
let stackComposition = AVMutableVideoComposition()
stackComposition.renderSize = CGSize(width: 512, height: 288)
stackComposition.frameDuration = CMTime(seconds: 1/30, preferredTimescale: 600)
var i = 0
let instructions: [AVMutableVideoCompositionLayerInstruction] = assetInfos.map { (asset, assetTrack, compTrack) in
let lInst = AVMutableVideoCompositionLayerInstruction(assetTrack: assetTrack)
let w: CGFloat = 512/CGFloat(assetInfos.count)
let inRatio = assetTrack.naturalSize.width / assetTrack.naturalSize.height
let cropRatio = w / 288
let scale: CGFloat
if inRatio < cropRatio {
scale = w / assetTrack.naturalSize.width
} else {
scale = 288 / assetTrack.naturalSize.height
}
lInst.setCropRectangle(CGRect(x: w/scale, y: 0, width: w/scale, height: 288/scale), at: CMTime.zero)
let transform = CGAffineTransform(scaleX: scale, y: scale)
let t2 = transform.concatenating(CGAffineTransform(translationX: -w + CGFloat(i)*w, y: 0))
lInst.setTransform(t2, at: CMTime.zero)
i += 1
return lInst
}
let inst = AVMutableVideoCompositionInstruction()
inst.timeRange = CMTimeRange(start: CMTime.zero, duration: assetInfos[0].0.duration)
inst.layerInstructions = instructions
stackComposition.instructions = [inst]
let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
let outPath = "path/to/finalVideo.mp4"
let outUrl = URL(fileURLWithPath: outPath)
try? FileManager.default.removeItem(at: outUrl)
exporter.outputURL = outUrl
exporter.videoComposition = stackComposition
exporter.outputFileType = .mp4
exporter.shouldOptimizeForNetworkUse = true
let group = DispatchGroup()
group.enter()
exporter.exportAsynchronously(completionHandler: {
switch exporter.status {
case .completed:
print("SUCCESS!")
if exporter.error != nil {
print("Error: \(String(describing: exporter.error))")
print("Description: \(exporter.description)")
}
group.leave()
case .exporting:
let progress = exporter.progress
print("Progress: \(progress)")
case .failed:
print("Error: \(String(describing: exporter.error))")
print("Description: \(exporter.description)")
group.leave()
default:
break
}
})
group.wait()
}
You're applying instructions to source asset track, but you need to apply them to output composition track, like this:
let instructions: [AVMutableVideoCompositionLayerInstruction] = assetInfos.map { (asset, assetTrack, compTrack) in
let lInst = AVMutableVideoCompositionLayerInstruction(assetTrack: compTrack)
let w: CGFloat = 512/CGFloat(assetInfos.count)
let inRatio = compTrack.naturalSize.width / compTrack.naturalSize.height
let cropRatio = w / 288
let scale: CGFloat
if inRatio < cropRatio {
scale = w / compTrack.naturalSize.width
} else {
scale = 288 / compTrack.naturalSize.height
}
lInst.setCropRectangle(CGRect(x: w/scale, y: 0, width: w/scale, height: 288/scale), at: CMTime.zero)
let transform = CGAffineTransform(scaleX: scale, y: scale)
let t2 = transform.concatenating(CGAffineTransform(translationX: -w + CGFloat(i)*w, y: 0))
lInst.setTransform(t2, at: CMTime.zero)
i += 1
return lInst
}

Why is my CGImage changing colors when exporting through AVAssetExportSession?

I'm overlaying a CGImage on a video through AVMutableComposition and that works great (see Screenshot A below). The problem is when I export the video with AVAssetExportSession, I get some weird outlines and colors (see Screenshot B). How do I export this video retaining the proper colors of the CGImage and without the odd yellow/black outlines?
Here's the code I'm using to export:
func saveVideo(composition: AVMutableComposition, vidComp: AVMutableVideoComposition, audioMix: AVMutableAudioMix) {
let preset = "AVAssetExportPreset1920x1080"
guard let exportSession = AVAssetExportSession(asset: composition, presetName: preset) else {
return
}
let videoSize: CGSize = vidComp.renderSize
let parentLayer = CALayer()
parentLayer.isGeometryFlipped = true
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
let videoLayer = CALayer()
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
let url = URL(fileURLWithPath: Bundle.main.path(forResource: "Fire1", ofType: "mov")!)
let asset = AVURLAsset(url: url)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
generator.requestedTimeToleranceBefore = .zero
generator.requestedTimeToleranceAfter = .zero
do {
let cgImg = try generator.copyCGImage(at: CMTime(seconds: 0.5, preferredTimescale: 1000), actualTime: nil)
let imgLayer = CALayer()
imgLayer.contentsGravity = .resizeAspect
imgLayer.contents = cgImg
imgLayer.frame = parentLayer.bounds
parentLayer.addSublayer(imgLayer)
} catch {
print("No thumbnail")
}
vidComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
exportSession.videoComposition = vidComp
exportSession.audioMix = audioMix
exportSession.outputURL = self.uniqueURLForSave()
exportSession.outputFileType = AVFileType.mov
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously {
switch exportSession.status {
case .completed:
UISaveVideoAtPathToSavedPhotosAlbum(exportSession.outputURL!.relativePath, self, nil, nil)
default:
print("Error")
}
}
}
Screenshot A (proper image):
Screenshot B (faulty image with yellow and black outlines):

How to make a square video?

Trying to make a square video through animationTool.
See code below. The video is enlarged (https://i.stack.imgur.com/HscTk.jpg), how can i fix it?
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
let strFilePath: String = generateMergedVideoFilePath()
try? FileManager.default.removeItem(atPath: strFilePath)
exportSession?.outputURL = URL(fileURLWithPath: strFilePath)
exportSession?.outputFileType = .mp4
exportSession?.shouldOptimizeForNetworkUse = true
let mutableVideoComposition = AVMutableVideoComposition(propertiesOf: composition)
mutableVideoComposition.instructions = instructions
mutableVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
mutableVideoComposition.renderSize = CGSize(width: 1080, height: 1080)
let parentLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: 1080, height: 1080)
let videoLayer = CALayer()
videoLayer.frame.size = videoSize
videoLayer.position = parentLayer.position
videoLayer.contentsGravity = .resizeAspectFill
parentLayer.addSublayer(videoLayer)
mutableVideoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
mutableVideoComposition.renderSize = CGSize (width: 1080, height: 1080)
With this resolution, it takes the image relative to the top point, so I moved transform up
let coeConst = videoAssetWidth/videoAssetHeight
transform.translatedBy(x: -(videoAssetHeight-videoAssetHeight*coeConst)/2, y: 0)

Can't show animated CALayer in video using AVVideoCompositionCoreAnimationTool

UPDATE 6:
I've managed to fix my issue completely but I still would like a better explanation than what I'm guessing is the reason it didn't work if I'm incorrect
I've been trying to animate a sprite sheet over a video but every time I export the video the end result is the sample video I start with.
Here's my code:
First up my custom CALayer to handle my own sprite sheets
class SpriteLayer: CALayer {
var frameIndex: Int
override init() {
// Using 0 as a default state
self.frameIndex = 0
super.init()
}
required init?(coder aDecoder: NSCoder) {
self.frameIndex = 0
super.init(coder: aDecoder)
}
override func display() {
let currentFrameIndex = self.frameIndex
if currentFrameIndex == 0 {
return
}
let frameSize = self.contentsRect.size
self.contentsRect = CGRect(x: 0, y: CGFloat(currentFrameIndex - 1) * frameSize.height, width: frameSize.width, height: frameSize.height)
}
override func action(forKey event: String) -> CAAction? {
if event == "contentsRect" {
return nil
}
return super.action(forKey: event)
}
override class func needsDisplay(forKey key: String) -> Bool {
return key == "frameIndex"
}
}
Gif is a basic class with nothing fancy and works just fine. gif.Strip is a UIImage of a vertical sprite sheet representing the gif.
Now comes the method that should export a new video (it is part of a larger class used for exporting.
func convertAndExport(to url :URL , completion: #escaping () -> Void ) {
// Get Initial info and make sure our destination is available
self.outputURL = url
let stripCgImage = self.gif.strip!.cgImage!
// This is used to time how long the export took
let start = DispatchTime.now()
do {
try FileManager.default.removeItem(at: outputURL)
} catch {
print("Remove Error: \(error.localizedDescription)")
print(error)
}
// Find and load "sample.mp4" as a AVAsset
let videoPath = Bundle.main.path(forResource: "sample", ofType: "mp4")!
let videoUrl = URL(fileURLWithPath: videoPath)
let videoAsset = AVAsset(url: videoUrl)
// Start a new mutable Composition with the same base video track
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first!
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
} catch {
print("Insert Error: \(error.localizedDescription)")
print(error)
return
}
compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform
// Quick access to the video size
let videoSize = clipVideoTrack.naturalSize
// Setup CALayer and it's animation
let aLayer = SpriteLayer()
aLayer.contents = stripCgImage
aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
aLayer.opacity = 1.0
aLayer.masksToBounds = true
aLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
aLayer.contentsRect = CGRect(x: 0, y: 0, width: 1, height: 1.0 / 3.0)
let spriteAnimation = CABasicAnimation(keyPath: "frameIndex")
spriteAnimation.fromValue = 1
spriteAnimation.toValue = 4
spriteAnimation.duration = 2.25
spriteAnimation.repeatCount = .infinity
spriteAnimation.autoreverses = false
spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
aLayer.add(spriteAnimation, forKey: nil)
// Setup Layers for AVVideoCompositionCoreAnimationTool
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
// Create the mutable video composition
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
// Set the video composition to apply to the composition's video track
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videoTrack = mixComposition.tracks(withMediaType: .video).first!
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
// Initialize export session
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
assetExport.videoComposition = videoComp
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = self.outputURL
assetExport.shouldOptimizeForNetworkUse = true
// Export
assetExport.exportAsynchronously {
let status = assetExport.status
switch status {
case .failed:
print("Export Failed")
print("Export Error: \(assetExport.error!.localizedDescription)")
print(assetExport.error!)
case .unknown:
print("Export Unknown")
case .exporting:
print("Export Exporting")
case .waiting:
print("Export Waiting")
case .cancelled:
print("Export Cancelled")
case .completed:
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
let timeInterval = Double(nanoTime) / 1_000_000_000
// Function is now over, we can print how long it took
print("Time to generate video: \(timeInterval) seconds")
completion()
}
}
}
EDIT:
I based my code on the following links
SpriteLayer and how to use it
CABasicAnimation on a video
Using AVVideoCompositionCoreAnimationTool and AVAssetExportSession to save the new video
UPDATE 1:
I've tried removing the CABasicAnimation part of my code and played around with my CALayer but to no avail. I can't even get the image to show up.
To test things out I tried animating this sprite sheet using a CAKeyframeAnimation on contentsRect in a Xcode Playground and it worked fine so I don't think the issue is with the CABasicAnimation, and maybe not even with the CALayer itself. I could really use some help on this because I don't understand why I can't even get an image to show over my sample video on the export.
UPDATE 2:
In response to matt's comment I've tried forgetting about the sprite sheet for a bit and changed it into a CATextLayer but still not seeing anything on my video (it has dark images so white text should be perfectly visible)
let aLayer = CATextLayer()
aLayer.string = "This is a test"
aLayer.fontSize = videoSize.height / 6
aLayer.alignmentMode = kCAAlignmentCenter
aLayer.foregroundColor = UIColor.white.cgColor
aLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height / 6)
UPDATE 3:
As per Matt's request I tried changing parentLayer.addSublayer(aLayer) to videoLayer.addSublayer(aLayer) but still nothing changed, but I thought as much because the documentation for the AVVideoCompositionCoreAnimationTool is as follows
convenience init(postProcessingAsVideoLayer videoLayer: CALayer,
in animationLayer: CALayer)
meaning my parentLayer is it's animationLayer and probably means any animations should be done in this layer.
UPDATE 4:
I'm starting to go crazy over here, I've given up for now the idea of showing text or an animated image I just want to affect my video in any way possible so I changed aLayer to this:
let aLayer = CALayer()
aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
aLayer.backgroundColor = UIColor.white.cgColor
Well, this does absolutely nothing, I still get my sample video at my outputUrl (I started testing this in a playground with the following code if you want to "play" along)
import PlaygroundSupport
import UIKit
import Foundation
import AVFoundation
func convertAndExport(to url :URL , completion: #escaping () -> Void ) {
let start = DispatchTime.now()
do {
try FileManager.default.removeItem(at: url)
} catch {
print("Remove Error: \(error.localizedDescription)")
print(error)
}
let videoPath = Bundle.main.path(forResource: "sample", ofType: "mp4")!
let videoUrl = URL(fileURLWithPath: videoPath)
let videoAsset = AVURLAsset(url: videoUrl)
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
let clipVideoTrack = videoAsset.tracks(withMediaType: .video).first!
do {
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
} catch {
print("Insert Error: \(error.localizedDescription)")
print(error)
return
}
compositionVideoTrack.preferredTransform = clipVideoTrack.preferredTransform
let videoSize = clipVideoTrack.naturalSize
print("Video Size Detected: \(videoSize.width) x \(videoSize.height)")
let aLayer = CALayer()
aLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
aLayer.backgroundColor = UIColor.white.cgColor
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
aLayer.setNeedsDisplay()
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videoTrack = mixComposition.tracks(withMediaType: .video).first!
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
assetExport.videoComposition = videoComp
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = url
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously {
let status = assetExport.status
switch status {
case .failed:
print("Export Failed")
print("Export Error: \(assetExport.error!.localizedDescription)")
print(assetExport.error!)
case .unknown:
print("Export Unknown")
case .exporting:
print("Export Exporting")
case .waiting:
print("Export Waiting")
case .cancelled:
print("Export Cancelled")
case .completed:
let end = DispatchTime.now()
let nanoTime = end.uptimeNanoseconds - start.uptimeNanoseconds
let timeInterval = Double(nanoTime) / 1_000_000_000
print("Time to generate video: \(timeInterval) seconds")
completion()
}
}
}
let outputUrl = FileManager.default.temporaryDirectory.appendingPathComponent("test.mp4")
convertAndExport(to: outputUrl) {
print(outputUrl)
}
Please someone help me understand what I'm doing wrong...
UPDATE 5:
I am running everything except playground tests from an iPad Air 2 (so no simulator) because I use the camera to take pictures and then stitch them into a sprite sheet I then planned on animating on a video I would send by email. I started doing Playground testing because every test from the iPad required me to go through the whole app cycle (countdown, photos, form, email sending/receiving)
Ok, Finally got it to work as I always wanted it to.
First off even if he deleted his comments, thanks to Matt for the link to a working example that helped me piece together what was wrong with my code.
First off
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)!
I needed to use AVAssetExportPresetHighestQuality instead of AVAssetExportPresetPassthrough. My guess is that the passthrough preset means you don't do any re-encoding so setting it to highest (not medium because my exported video is of over 400x400) made it so that I could actually re-encode my video. I'm guessing this is what was stopping the exported video from containing any of the CALayer I was trying out (even covering the video in white).
Secondly (not sure if this affects really but I'll try later)
parentLayer.addSublayer(aLayer)
I replaced this with:
videoLayer.addSublayer(aLayer)
Not sure if this really mattered but my understanding was that this was actually the animation layer for AVVideoCompositionCoreAnimationTool and parentLayer was just a container not meant to contain more than this, but I'm likely wrong.
Third change I did
let spriteAnimation = CABasicAnimation(keyPath: "frameIndex")
spriteAnimation.fromValue = 1
spriteAnimation.toValue = 4
spriteAnimation.duration = 2.25
spriteAnimation.repeatCount = .infinity
spriteAnimation.autoreverses = false
spriteAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
aLayer.add(spriteAnimation, forKey: nil)
I changed it to this:
let animation = CAKeyframeAnimation(keyPath: #keyPath(CALayer.contentsRect))
animation.duration = 2.25
animation.calculationMode = kCAAnimationDiscrete
animation.repeatCount = .infinity
animation.values = [
CGRect(x: 0, y: 0, width: 1, height: 1/3.0),
CGRect(x: 0, y: 1/3.0, width: 1, height: 1/3.0),
CGRect(x: 0, y: 2/3.0, width: 1, height: 1/3.0)
] as [CGRect]
animation.beginTime = AVCoreAnimationBeginTimeAtZero
animation.fillMode = kCAFillModeBackwards
animation.isRemovedOnCompletion = false
aLayer.add(animation, forKey: nil)
This change was mainly removing my custom animations for the sprite sheet (since it will always be the same I first wanted a working example then I'll generalise it and probably add it to my private UI Pod). But most importantly animation.isRemovedOnCompletion = false I noticed that removing this makes it so the animation simply does not play on the exported video. So for anyone with CABasicAnimation not animating on the video after an export, try looking if your isRemovedOnCompletion is set correctly on your animation.
I think that's pretty much all the changed I did.
Although I technically answered my question my bounty remains to understand how AVVideoCompositionCoreAnimationTool and AVAssetExport work and why I had to do the changes I did to finally get it to work if anyone is interested in explaining.
Thanks again to Matt, you helped me out by showing me how you did it.

Trimming a video, while keeping quality

I am trimming an existing (stored in the gallery) video into smaller chunks; but I find the resulting video quality to be too degraded:
var chunkNumber = 1
let startTime = CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(600.0))
let theDuration = CMTime(seconds: 10.0, preferredTimescale: CMTimeScale(600.0))
// the AVMutableComposition holds the track instances
let mixCompostion = AVMutableComposition()
// video
let videoTrack = mixCompostion.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try videoTrack?.insertTimeRange(CMTimeRangeMake(startTime, theDuration),
of: (asset?.tracks(withMediaType: AVMediaType.video)[0])!, at: kCMTimeZero)
} catch {
print ("failed to load the video track")
}
// audio
if ((asset?.tracks(withMediaType: AVMediaType.audio).count)! > 0) {
let audioTrack = mixCompostion.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: 0)
do {
try audioTrack?.insertTimeRange(CMTimeRangeMake(startTime, theDuration),
of: (asset?.tracks(withMediaType: AVMediaType.audio)[0])!, at: kCMTimeZero)
} catch {
print ("failed to load the audio track")
}
}
// layers
let parentLayer = CALayer()
let videoLayer = CALayer()
let layerFrame = CGRect(x: 0.0, y: 0.0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
parentLayer.frame = layerFrame
videoLayer.frame = layerFrame
parentLayer.addSublayer(videoLayer)
// master instruction wraps entire set of instructions
let masterInstruction = AVMutableVideoCompositionInstruction()
masterInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, theDuration)
let videoLayerInstruction = videoCompositionInstructionForTrack(track: videoTrack!, asset: asset!)
// add instructions to master, prepare composition
masterInstruction.layerInstructions = [videoLayerInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [masterInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: mainCompositionWidth, height: mainCompositionHeight)
mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
// get path
let date = Date()
let savePath = (documentDirectory as NSString).appendingPathComponent("\(date)-\(chunkNumber).mov")
let url = URL(fileURLWithPath: savePath)
chunkNumber += 1
// create exporter
guard let exporter = AVAssetExportSession(asset: mixCompostion, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = url
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
// export
exporter.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.global().async {
self.exportDidFinish(session: exporter)
}
})
func exportDidFinish(session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
PHPhotoLibrary.shared().performChanges({
_ = PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: session.outputURL!)
}, completionHandler: { (success, error) in
let completionString = success ? "Success." : error?.localizedDescription
print ("Finished updating asset: \(String(describing: completionString))")
})
}
else if session.status == AVAssetExportSessionStatus.failed {
print ("Export failed -> Reason: \(String(describing: session.error))")
delegate?.exportFailed()
}
}
Is there anything different I could be doing, to keep the quality of the video as equal, or at least closer to the original?
For better control over video manipulation you could use ffmpeg via one of the wrapper libraries - see these links for example:
https://github.com/chrisballinger/FFmpeg-iOS
https://cocoapods.org/pods/FFmpegWrapper
I have not used one of the iOS ones, but I have done the same thing on Android and it works well, with the usual caveat that video processing on mobile devices is slow.