AVAssetWriterInput ReadyForMoreMediaData always false - swift

I'm trying to record CVPixelbuffer in realtime.
But I can't append buffer because assetWriterInput.isReadyForMoreMediaData always return false.
Can someone explain why this value always false? Thank's.
class VideoRecorder{
static var shared = VideoRecorder()
var avAssetWriter: AVAssetWriter?
var Adaptor: AVAssetWriterInputPixelBufferAdaptor?
var Settings: RecorderSetting?
struct RecorderSetting{
var videoSetting: [String : Any]
var Path: URL
}
func makeVideoSettings(width: Int, height: Int, BitRate: Double) -> [String : Any]{
let VideoCompressionProperties = [
AVVideoAverageBitRateKey: Double(width * height) * BitRate
]
let videoSettings:[String : Any] = [
AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoCompressionPropertiesKey: VideoCompressionProperties
]
return videoSettings
}
func makePath(FileName: String) -> URL{
return URL(fileURLWithPath:
NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + "/\(FileName).mp4")
}
func setup(width: Int, height: Int,
BitRate: Double, FileName: String){
let setting = makeVideoSettings(width: width, height: height, BitRate: BitRate)
let Path = makePath(FileName: FileName)
Settings = RecorderSetting(videoSetting: setting, Path: Path)
}
func StartSession(FirstFrame: CVPixelBuffer) throws{
let attribute: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String: CVPixelBufferGetPixelFormatType(FirstFrame),
kCVPixelBufferWidthKey as String: CVPixelBufferGetWidth(FirstFrame),
kCVPixelBufferHeightKey as String: CVPixelBufferGetHeight(FirstFrame)
]
if (Settings == nil){throw "Settings invalid"}
let writerInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: Settings!.videoSetting)
Adaptor =
AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: attribute)
Adaptor?.assetWriterInput.expectsMediaDataInRealTime = true
do{
avAssetWriter = try AVAssetWriter(url: Settings!.Path, fileType: AVFileType.mp4)
if (avAssetWriter!.canAdd(writerInput)){
let StartTime = Date().timeIntervalSince1970.toCMTime()
avAssetWriter?.startWriting()
avAssetWriter?.startSession(atSourceTime: StartTime)
try? WriteBuffer(Buffer: FirstFrame, time: StartTime)
}else{
throw "Add AVWriterInput Error"
}
}catch{
throw "Initializing Error"
}
}
func StopSession(){
if(Adaptor?.assetWriterInput.isReadyForMoreMediaData == false){return}
Adaptor?.assetWriterInput.markAsFinished()
avAssetWriter?.finishWriting(completionHandler: {
if let outputPath = self.Settings?.Path{
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputPath)
}) { saved, error in
if saved {
try? FileManager().removeItem(at: outputPath)
print("Saved ")
/*let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).firstObject
// fetchResult is your latest video PHAsset
// To fetch latest image replace .video with .image*/
}
}
}
})
}
func WriteBuffer(Buffer: CVPixelBuffer, time: CMTime) throws{
if(self.Adaptor != nil){
if (self.Adaptor!.assetWriterInput.isReadyForMoreMediaData){
let whetherPixelBufferAppendedtoAdaptor = self.Adaptor!.append(Buffer, withPresentationTime: time)
if(!whetherPixelBufferAppendedtoAdaptor){
print(avAssetWriter?.error as Any)
}
}else{
throw "Writer Input is not Ready"
}
}else{
throw "PixelBufferAdaptor invalild"
}
}
}

Find the problem.
This method AVAssetWriterInput.canAdd() only check can asset writer add the input,
You need to call .add() to add asset input before you start writing.

Related

Share ringtone to Garageband via UIActivityViewController

I am implementing an ringtone application and I would like to share the audio file to GarageBand to let users continue the process,
Here is the code on how I share the media
fileprivate func shareMedia(url: URL) {
let activityViewController:UIActivityViewController = UIActivityViewController(activityItems: [url], applicationActivities: nil)
activityViewController.popoverPresentationController?.sourceView = saveAsRingtone
activityViewController.popoverPresentationController?.sourceRect = saveAsRingtone.frame
present(activityViewController, animated: true, completion: nil)
}
To Answer my own question here is the full answer
class AudioUtils {
class func trimAudio(audioToTrim: URL, startTime: Double, setExportSession: (AVAssetExportSession) -> Void, completion: #escaping (URL?, String?) -> Void) {
let audioFileInput = audioToTrim
let mixedAudio: String = "ringtone.aif"
let exportPathDirectory = NSTemporaryDirectory()
// let audioFileDirectory = URL(fileURLWithPath: exportPathDirectory).appendingPathComponent("Project").appendingPathExtension("band")
let projectBandDirectory = exportPathDirectory + "Project.band"
if (FileManager.default.fileExists(atPath: projectBandDirectory)) {
try! FileManager.default.removeItem(at: URL(fileURLWithPath: projectBandDirectory))
}
try! FileManager.default.createDirectory(atPath: projectBandDirectory, withIntermediateDirectories: true, attributes: nil)
let exportPath: String = projectBandDirectory + "/"
let bundlePath = Bundle.main.path(forResource: "projectData", ofType: "")
let fullDestPath = NSURL(fileURLWithPath: exportPath).appendingPathComponent("projectData")
let fullDestPathString = (fullDestPath?.path)!
try! FileManager.default.createDirectory(atPath: exportPath + "Media", withIntermediateDirectories: true, attributes: nil)
try! FileManager.default.createDirectory(atPath: exportPath + "Output", withIntermediateDirectories: true, attributes: nil)
let audioFileOutput = URL(fileURLWithPath: exportPathDirectory + mixedAudio)//.appendingPathExtension("band")
print("Will export to \(audioFileOutput.absoluteString)")
try? FileManager.default.removeItem(at: audioFileOutput)
let asset = AVAsset(url: audioFileInput)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough)
if (exportSession == nil) {
completion(nil, "ExportSession is nil")
return
}
setExportSession(exportSession!)
let startCMTime = CMTimeMakeWithSeconds(startTime, preferredTimescale: 1)
let stopCMtime = CMTimeMakeWithSeconds(startTime + 30, preferredTimescale: 1)
let exportTimeRange = CMTimeRangeFromTimeToTime(start: startCMTime, end: stopCMtime)
exportSession?.outputURL = audioFileOutput
exportSession?.outputFileType = AVFileType.caf
exportSession?.timeRange = exportTimeRange
exportSession?.exportAsynchronously {
switch (exportSession?.status) {
case .completed:
var options = AKConverter.Options()
// any options left nil will assume the value of the input file
options.format = "aif"
options.sampleRate = 48000
options.bitDepth = 24
let sourceUrl = audioFileOutput
let destUrl = URL(fileURLWithPath: exportPath + "Media/ringtone.aiff")
convertAudio(sourceUrl, outputURL: destUrl)
let fileData = try! Data.init(contentsOf: destUrl)
var fileStream:String = fileData.base64EncodedString(options: NSData.Base64EncodingOptions.init(rawValue: 0))
fileStream = String(fileStream.dropLast())
let playersDictionary = NSMutableDictionary(contentsOfFile: bundlePath!)
let playersNamesArray = (playersDictionary?.object(forKey: "$objects"))! as! NSMutableArray
let nsDataParentDictionary = (playersNamesArray.object(at: 4)) as! NSMutableDictionary
nsDataParentDictionary.setValue(fileStream, forKey: "NS.data")
playersNamesArray.removeObject(at: 4)
playersNamesArray.insert(nsDataParentDictionary, at: 4)
playersDictionary?.setValue(playersNamesArray, forKey: "$objects")
playersDictionary?.write(toFile: fullDestPathString, atomically: true)
do {
try FileManager.default.copyItem(atPath: bundlePath!, toPath: fullDestPathString)
completion(URL(fileURLWithPath: exportPath), nil)
} catch let exception {
completion(nil, exception.localizedDescription)
}
break
default:
completion(nil, exportSession?.error?.localizedDescription)
break
}
}
}
class func convertAudio(_ url: URL, outputURL: URL) {
var error : OSStatus = noErr
var destinationFile : ExtAudioFileRef? = nil
var sourceFile : ExtAudioFileRef? = nil
var srcFormat : AudioStreamBasicDescription = AudioStreamBasicDescription()
var dstFormat : AudioStreamBasicDescription = AudioStreamBasicDescription()
ExtAudioFileOpenURL(url as CFURL, &sourceFile)
var thePropertySize: UInt32 = UInt32(MemoryLayout.stride(ofValue: srcFormat))
ExtAudioFileGetProperty(sourceFile!,
kExtAudioFileProperty_FileDataFormat,
&thePropertySize, &srcFormat)
dstFormat.mSampleRate = 44100 //Set sample rate
dstFormat.mFormatID = kAudioFormatLinearPCM
dstFormat.mChannelsPerFrame = 1
dstFormat.mBitsPerChannel = 16
dstFormat.mBytesPerPacket = 2 * dstFormat.mChannelsPerFrame
dstFormat.mBytesPerFrame = 2 * dstFormat.mChannelsPerFrame
dstFormat.mFramesPerPacket = 1
dstFormat.mFormatFlags = kAudioFormatFlagIsBigEndian |
kAudioFormatFlagIsSignedInteger
// Create destination file
error = ExtAudioFileCreateWithURL(
outputURL as CFURL,
kAudioFileAIFFType,
&dstFormat,
nil,
AudioFileFlags.eraseFile.rawValue,
&destinationFile)
print("Error = \(error)")
error = ExtAudioFileSetProperty(sourceFile!,
kExtAudioFileProperty_ClientDataFormat,
thePropertySize,
&dstFormat)
print("Error = \(error)")
error = ExtAudioFileSetProperty(destinationFile!,
kExtAudioFileProperty_ClientDataFormat,
thePropertySize,
&dstFormat)
print("Error = \(error)")
let bufferByteSize : UInt32 = 32768
var srcBuffer = [UInt8](repeating: 0, count: 32768)
var sourceFrameOffset : ULONG = 0
while(true){
var fillBufList = AudioBufferList(
mNumberBuffers: 1,
mBuffers: AudioBuffer(
mNumberChannels: 2,
mDataByteSize: UInt32(srcBuffer.count),
mData: &srcBuffer
)
)
var numFrames : UInt32 = 0
if(dstFormat.mBytesPerFrame > 0){
numFrames = bufferByteSize / dstFormat.mBytesPerFrame
}
error = ExtAudioFileRead(sourceFile!, &numFrames, &fillBufList)
print("Error = \(error)")
if(numFrames == 0){
error = noErr;
break;
}
sourceFrameOffset += numFrames
error = ExtAudioFileWrite(destinationFile!, numFrames, &fillBufList)
print("Error = \(error)")
}
error = ExtAudioFileDispose(destinationFile!)
print("Error = \(error)")
error = ExtAudioFileDispose(sourceFile!)
print("Error = \(error)")
}
}
What happens here?...
First of all here is a function to trim the audio for 30 seconds
I have checked an existing .band project file and I got its components, and tried to create these files programmatically. Please feel free to ask me anything about the code
There is projectData file that you should use, this is a file found in every .garageBand folder
https://www.dropbox.com/s/7r7uh2ekzigyy3u/projectData?dl=0
There's no problem with this method for sharing a track on Garageband
Just Make sure that your shared file Extension is "band"
.appendingPathExtension("band") not anything else

Recording video and audio with AVAssetWriter in Swift

Im trying to add the devices microphone audio to a video recording from the devices camera. The video is filtered with a CIFilter and works as expected. My problem is the mic audio is not attached to the video once saved.
I have tried setting the audio settings manually like this
let audioSettings : [String : Any] = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: 1,
AVSampleRateKey : 44100,
AVEncoderBitRateKey : 64000
]
but using the recommendedAudioSettingsForAssetWriter method seems like the correct approach as the video recording works with the recommendedAudioSettingsForAssetWriter method.
Can anyone tell me how to achieve this or point me in the right direction?
My code so far:
import UIKit
import AVFoundation
class VideoViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
#IBOutlet weak var imageView: UIImageView!
lazy var cameraDevice: AVCaptureDevice? = {
return AVCaptureDevice.default(for: .video)
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.default(for: .audio)
}()
var captureSession = AVCaptureSession()
var outputURL: URL!
var orientation: AVCaptureVideoOrientation = .landscapeRight
var filterObject = FilterObject()
var assetWriter: AVAssetWriter?
var assetWriterInput: AVAssetWriterInput?
var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
var fileName = ""
var recordingState = RecordingState.idle
var time: Double = 0
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
let context = CIContext()
override func viewDidLoad() {
super.viewDidLoad()
setupCameraDevice()
setupAudioDevice()
setupInputOutput()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setUpAuthStatus()
}
#IBAction func recordPressed(_ sender: UIButton) {
switch recordingState {
case .idle:
recordingState = .start
case .capturing:
recordingState = .end
default:
break
}
}
func setUpAuthStatus() {
if AVCaptureDevice.authorizationStatus(for: AVMediaType.video) != .authorized {
AVCaptureDevice.requestAccess(for: AVMediaType.video, completionHandler: { (authorized) in
DispatchQueue.main.async {
if authorized {
self.setupInputOutput()
}
}
})
}
if AVCaptureDevice.authorizationStatus(for: AVMediaType.audio) != .authorized {
AVCaptureDevice.requestAccess(for: AVMediaType.audio, completionHandler: { (authorized) in
DispatchQueue.main.async {
if authorized {
self.setupInputOutput()
}
}
})
}
}
func setupCameraDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: .video, position: .unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == .back {
cameraDevice = device
}
}
}
func setupAudioDevice() {
let audioDeviceDisoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInMicrophone], mediaType: .audio, position: .unspecified)
let devices = audioDeviceDisoverySession.devices
micDevice = devices[0]
}
func setupInputOutput() {
do {
guard let cameraDevice = cameraDevice else { return }
let captureDeviceInput = try AVCaptureDeviceInput(device: cameraDevice)
guard let micDevice = micDevice else { return }
let micDeviceInput = try AVCaptureDeviceInput(device: micDevice)
captureSession.sessionPreset = AVCaptureSession.Preset.hd1920x1080
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
}
if captureSession.canAddInput(micDeviceInput) {
captureSession.addInput(micDeviceInput)
}
let queue = DispatchQueue(label: "com.apple.sample.capturepipeline.video", attributes: [])
if captureSession.canAddOutput(videoOutput) {
videoOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(videoOutput)
}
if captureSession.canAddOutput(audioOutput) {
audioOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(audioOutput)
}
captureSession.commitConfiguration()
captureSession.startRunning()
} catch {
print(error)
}
}
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
audioOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {return}
let cameraImage = CIImage(cvImageBuffer: imageBuffer)
guard let name = filterObject.name else {return}
let effect = FilterType.genericFilter(name: name, cameraImage: cameraImage)
effect.setValue(cameraImage, forKey: kCIInputImageKey)
TableData.setFilterValues(withFilterName: name, effect: effect, values: [value1, value2])
guard let outputImage = effect.outputImage else { return }
context.render(outputImage, to: imageBuffer)
guard let cgImage = self.context.createCGImage(outputImage, from: cameraImage.extent) else { return }
DispatchQueue.main.async {
let filteredImage = UIImage(cgImage: cgImage)
self.imageView.image = filteredImage
}
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds
switch recordingState {
case .start:
fileName = UUID().uuidString
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(fileName).mov")
let writer = try! AVAssetWriter(outputURL: videoPath, fileType: .mov)
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: .mov)
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
videoInput.mediaTimeScale = CMTimeScale(bitPattern: 600)
videoInput.expectsMediaDataInRealTime = true
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .m4a)
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings as? [String : Any])
audioInput.expectsMediaDataInRealTime = true
//videoInput.transform = CGAffineTransform(rotationAngle: .pi/2)
let pixelAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: nil)
if writer.canAdd(videoInput) {
writer.add(videoInput)
}
if writer.canAdd(audioInput) {
writer.add(audioInput)
}
writer.startWriting()
writer.startSession(atSourceTime: .zero)
assetWriter = writer
assetWriterInput = videoInput
pixelBufferAdaptor = pixelAdapter
recordingState = .capturing
time = timestamp
case .capturing:
if assetWriterInput?.isReadyForMoreMediaData == true {
let newTime = CMTime(seconds: timestamp - time, preferredTimescale: CMTimeScale(600))
pixelBufferAdaptor?.append(imageBuffer, withPresentationTime: newTime)
}
break
case .end:
guard assetWriterInput?.isReadyForMoreMediaData == true, assetWriter!.status != .failed else { break }
let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(fileName).mov")
assetWriterInput?.markAsFinished()
assetWriter?.finishWriting { [weak self] in
self?.recordingState = .idle
self?.assetWriter = nil
self?.assetWriterInput = nil
DispatchQueue.main.async {
let activity = UIActivityViewController(activityItems: [url], applicationActivities: nil)
self?.present(activity, animated: true, completion: nil)
}
}
default:
break
}
}
}
Your audio settings do not look correct. The AVSampleRateKey should come from the number of samples from the description of the first audio sample that comes in. Your value of 44100 should be set as the value for the AVEncoderBitRateKey and that should maybe be set to AVEncoderBitRateKey: Int(48_000)
To get the number of sample first call
let fmt = CMSampleBufferGetFormatDescription(sampleBuffer)
let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt!)
and then the sample rate will be asbd?.pointee.mSampleRate and then that should be set as the AVSampleRateKey value in the audio settings (I think)

Video local file URL not compatible with saved photos album

I am converting a single image into a video and then trying to save the video into the camera roll, but UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(url.path) is always false.
Selecting image from camera roll:
if UIImagePickerController.isSourceTypeAvailable(.photoLibrary){
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = .photoLibrary
imagePicker.allowsEditing = false
present(imagePicker, animated: true, completion: nil)
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
dismiss(animated: true, completion: nil)
var message = ""
if let img = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
self.img = img
message = "image chosen"
} else { print("FAILED IN PICKER"); return }
let alert = UIAlertController(title: "Asset Loaded", message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
On a button press, trying to convert to video and save to camera roll
guard let img = img else { return }
let images = [img]
VideoCreator.buildVideoFromImageArray(with: images) { (url) in
print("PATH: " + url.path)
if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(url.path) {
print("PATHHHHH")
UISaveVideoAtPathToSavedPhotosAlbum(
url.path,
self,
#selector(self.video(_:didFinishSavingWithError:contextInfo:)),
nil)
}
}
Video Creator: (ignore the lack of refactoring and gross code)
import UIKit
import AVFoundation
class VideoCreator {
static private var outputSize = CGSize(width: 1920, height: 1280) // placeholder
static private let imagesPerSecond: TimeInterval = 10 //each image will be stay for 3 secs
static private var selectedPhotosArray = [UIImage]()
static private var imageArrayToVideoURL = NSURL()
static private let audioIsEnabled: Bool = false //if your video has no sound
static private var asset: AVAsset!
static func buildVideoFromImageArray(with images: [UIImage], completionHandler: #escaping (_ url: URL) -> Void) {
outputSize = images[0].size
for image in images {
selectedPhotosArray.append(image)
}
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return }
imageArrayToVideoURL = NSURL(fileURLWithPath: documentDirectory.path + "/video.mov")
removeFileAtURLIfExists(url: imageArrayToVideoURL)
guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mov) else {
fatalError("AVAssetWriter error")
}
let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
fatalError("Negative : Can't apply the Output settings...")
}
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
if videoWriter.startWriting() {
let zeroTime = CMTimeMake(value: Int64(imagesPerSecond),timescale: Int32(1))
videoWriter.startSession(atSourceTime: zeroTime)
assert(pixelBufferAdaptor.pixelBufferPool != nil)
let media_queue = DispatchQueue(label: "mediaInputQueue")
videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
let fps: Int32 = 1
let framePerSecond: Int64 = Int64(self.imagesPerSecond)
let frameDuration = CMTimeMake(value: Int64(self.imagesPerSecond), timescale: fps)
var frameCount: Int64 = 0
var appendSucceeded = true
while (!self.selectedPhotosArray.isEmpty) {
if (videoWriterInput.isReadyForMoreMediaData) {
let nextPhoto = self.selectedPhotosArray.remove(at: 0)
let lastFrameTime = CMTimeMake(value: frameCount * framePerSecond, timescale: fps)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
var pixelBuffer: CVPixelBuffer? = nil
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
if let pixelBuffer = pixelBuffer, status == 0 {
let managedPixelBuffer = pixelBuffer
CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
//let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
} else {
print("Failed to allocate pixel buffer")
appendSucceeded = false
}
}
if !appendSucceeded {
break
}
frameCount += 1
}
videoWriterInput.markAsFinished()
videoWriter.finishWriting { () -> Void in
print("-----video1 url = \(self.imageArrayToVideoURL)")
completionHandler(self.imageArrayToVideoURL as URL)
}
})
}
}
private static func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("Couldn't remove existing destination file: \(error)")
}
}
}
}
}
You can always save your file and check if it was successfully saved to Photos using :
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: tempUrl)
})
UISaveVideoAtPathToSavedPhotosAlbum(::::) doesn’t work for some.

Appending to AVAssetWriterInputPixelBufferAdaptor fails

I want to output a video file from the depth camera. Displaying the data works, but cannot append the pixelBuffer to the AVAssetWriterInputPixelBufferAdaptor. Unfortunately, there is no more info then a negative boolean.
In this function we setup the writer
func createWriter() {
do {
assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileType.mov)
} catch let error as NSError {
print(error.localizedDescription)
return
}
let outputSettings = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : Int(currentVideoDimensions!.width),
AVVideoHeightKey : Int(currentVideoDimensions!.height)
] as [String : Any]
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0))
assetWriterVideoInput.expectsMediaDataInRealTime = true
let sourcePixelBufferAttributesDictionary = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue!
] as [String : Any]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if assetWriter!.canAdd(assetWriterVideoInput) {
assetWriter!.add(assetWriterVideoInput)
} else {
print("no way\(assetWriterVideoInput)")
}
}
which is used later to append the pixelbuffers after a frame is recorded
public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
autoreleasepool {
connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;
let pixelBuffer : CVPixelBuffer? = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer!)
let previewImage: CIImage
previewImage = depthMap ?? cameraImage
let displayImage = UIImage(ciImage: previewImage)
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
if self.isWriting {
if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {let success = self.assetWriterPixelBufferInput?.append(pixelBuffer!, withPresentationTime: self.currentSampleTime!)
if success == false {
print("Pixel Buffer failed")
}
}
}
DispatchQueue.main.async { [weak self] in
self?.imageView.image = displayImage
}
}
}
We checked Image and Buffer sizes and they were the same, appending still doesn't work and there is no proper error message.

How to add transitions to set images exported as video in Swift

I am making an app which has functionality for Transitions like example-video should play from top to bottom or from left to right transitions effect.I am able to generate video from set of image array in swift 3.0 but i am not getting how to add transitions.
I got the code from this answer How do I export UIImage array as a movie?
I am using like this working fine only problem to get trasnitions
func writeImagesAsMovie(allImages: [UIImage], videoPath: String, videoSize: CGSize, videoFPS: Int32) {
// Create AVAssetWriter to write video
guard let assetWriter = createAssetWriter(videoPath, size: videoSize) else {
print("Error converting images to video: AVAssetWriter not created")
return
}
// If here, AVAssetWriter exists so create AVAssetWriterInputPixelBufferAdaptor
let writerInput = assetWriter.inputs.filter{ $0.mediaType == AVMediaTypeVideo }.first!
let sourceBufferAttributes : [String : AnyObject] = [
kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32ARGB) as AnyObject,
kCVPixelBufferWidthKey as String : videoSize.width as AnyObject,
kCVPixelBufferHeightKey as String : videoSize.height as AnyObject,
]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourceBufferAttributes)
// Start writing session
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: kCMTimeZero)
if (pixelBufferAdaptor.pixelBufferPool == nil) {
print("Error converting images to video: pixelBufferPool nil after starting session")
return
}
// -- Create queue for <requestMediaDataWhenReadyOnQueue>
let mediaQueue = DispatchQueue(label: "mediaInputQueue", attributes: [])
// -- Set video parameters
let frameDuration = CMTimeMake(1, videoFPS)
var frameCount = 0
// -- Add images to video
let numImages = allImages.count
writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { () -> Void in
// Append unadded images to video but only while input ready
while (writerInput.isReadyForMoreMediaData && frameCount < numImages) {
let lastFrameTime = CMTimeMake(Int64(frameCount), videoFPS)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
if !self.appendPixelBufferForImageAtURL(image: allImages[frameCount], pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
return
}
frameCount += 1
}
// No more images to add? End video.
if (frameCount >= numImages) {
writerInput.markAsFinished()
assetWriter.finishWriting {
if (assetWriter.error != nil) {
print("Error converting images to video: \(String(describing: assetWriter.error))")
} else {
print("Converted images to movie # \(videoPath)")
}
}
}
})
}
func createAssetWriter(_ path: String, size: CGSize) -> AVAssetWriter? {
// Convert <path> to NSURL object
let pathURL = URL(fileURLWithPath: path)
// Return new asset writer or nil
do {
// Create asset writer
let newWriter = try AVAssetWriter(outputURL: pathURL, fileType: AVFileTypeMPEG4)
// Define settings for video input
let videoSettings: [String : AnyObject] = [
AVVideoCodecKey : AVVideoCodecH264 as AnyObject,
AVVideoWidthKey : size.width as AnyObject,
AVVideoHeightKey : size.height as AnyObject,
]
// Add video input to writer
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
newWriter.add(assetWriterVideoInput)
// Return writer
print("Created asset writer for \(size.width)x\(size.height) video")
return newWriter
} catch {
print("Error creating asset writer: \(error)")
return nil
}
}
func appendPixelBufferForImageAtURL(image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
if let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity:1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
if let pixelBuffer = pixelBufferPointer.pointee , status == 0 {
fillPixelBufferFromImage(image: image, pixelBuffer: pixelBuffer)
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
pixelBufferPointer.deinitialize()
} else {
NSLog("Error: Failed to allocate pixel buffer from pool")
}
pixelBufferPointer.deallocate(capacity: 1)
}
}
return appendSucceeded
}
func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
// Create CGBitmapContext
let context = CGContext(
data: pixelData,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
)
// Draw image into context
context?.draw(image .cgImage!, in: CGRect(x: 0.0,y: 0.0,width: image.size.width,height: image .size.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
}