Video local file URL not compatible with saved photos album - swift

I am converting a single image into a video and then trying to save the video into the camera roll, but UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(url.path) is always false.
Selecting image from camera roll:
if UIImagePickerController.isSourceTypeAvailable(.photoLibrary){
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = .photoLibrary
imagePicker.allowsEditing = false
present(imagePicker, animated: true, completion: nil)
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
dismiss(animated: true, completion: nil)
var message = ""
if let img = info[UIImagePickerController.InfoKey.originalImage] as? UIImage {
self.img = img
message = "image chosen"
} else { print("FAILED IN PICKER"); return }
let alert = UIAlertController(title: "Asset Loaded", message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertAction.Style.cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
On a button press, trying to convert to video and save to camera roll
guard let img = img else { return }
let images = [img]
VideoCreator.buildVideoFromImageArray(with: images) { (url) in
print("PATH: " + url.path)
if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(url.path) {
print("PATHHHHH")
UISaveVideoAtPathToSavedPhotosAlbum(
url.path,
self,
#selector(self.video(_:didFinishSavingWithError:contextInfo:)),
nil)
}
}
Video Creator: (ignore the lack of refactoring and gross code)
import UIKit
import AVFoundation
class VideoCreator {
static private var outputSize = CGSize(width: 1920, height: 1280) // placeholder
static private let imagesPerSecond: TimeInterval = 10 //each image will be stay for 3 secs
static private var selectedPhotosArray = [UIImage]()
static private var imageArrayToVideoURL = NSURL()
static private let audioIsEnabled: Bool = false //if your video has no sound
static private var asset: AVAsset!
static func buildVideoFromImageArray(with images: [UIImage], completionHandler: #escaping (_ url: URL) -> Void) {
outputSize = images[0].size
for image in images {
selectedPhotosArray.append(image)
}
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { return }
imageArrayToVideoURL = NSURL(fileURLWithPath: documentDirectory.path + "/video.mov")
removeFileAtURLIfExists(url: imageArrayToVideoURL)
guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mov) else {
fatalError("AVAssetWriter error")
}
let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
fatalError("Negative : Can't apply the Output settings...")
}
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
if videoWriter.startWriting() {
let zeroTime = CMTimeMake(value: Int64(imagesPerSecond),timescale: Int32(1))
videoWriter.startSession(atSourceTime: zeroTime)
assert(pixelBufferAdaptor.pixelBufferPool != nil)
let media_queue = DispatchQueue(label: "mediaInputQueue")
videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
let fps: Int32 = 1
let framePerSecond: Int64 = Int64(self.imagesPerSecond)
let frameDuration = CMTimeMake(value: Int64(self.imagesPerSecond), timescale: fps)
var frameCount: Int64 = 0
var appendSucceeded = true
while (!self.selectedPhotosArray.isEmpty) {
if (videoWriterInput.isReadyForMoreMediaData) {
let nextPhoto = self.selectedPhotosArray.remove(at: 0)
let lastFrameTime = CMTimeMake(value: frameCount * framePerSecond, timescale: fps)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
var pixelBuffer: CVPixelBuffer? = nil
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
if let pixelBuffer = pixelBuffer, status == 0 {
let managedPixelBuffer = pixelBuffer
CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
//let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
} else {
print("Failed to allocate pixel buffer")
appendSucceeded = false
}
}
if !appendSucceeded {
break
}
frameCount += 1
}
videoWriterInput.markAsFinished()
videoWriter.finishWriting { () -> Void in
print("-----video1 url = \(self.imageArrayToVideoURL)")
completionHandler(self.imageArrayToVideoURL as URL)
}
})
}
}
private static func removeFileAtURLIfExists(url: NSURL) {
if let filePath = url.path {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
do{
try fileManager.removeItem(atPath: filePath)
} catch let error as NSError {
print("Couldn't remove existing destination file: \(error)")
}
}
}
}
}

You can always save your file and check if it was successfully saved to Photos using :
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: tempUrl)
})
UISaveVideoAtPathToSavedPhotosAlbum(::::) doesn’t work for some.

Related

AVAssetWriterInput ReadyForMoreMediaData always false

I'm trying to record CVPixelbuffer in realtime.
But I can't append buffer because assetWriterInput.isReadyForMoreMediaData always return false.
Can someone explain why this value always false? Thank's.
class VideoRecorder{
static var shared = VideoRecorder()
var avAssetWriter: AVAssetWriter?
var Adaptor: AVAssetWriterInputPixelBufferAdaptor?
var Settings: RecorderSetting?
struct RecorderSetting{
var videoSetting: [String : Any]
var Path: URL
}
func makeVideoSettings(width: Int, height: Int, BitRate: Double) -> [String : Any]{
let VideoCompressionProperties = [
AVVideoAverageBitRateKey: Double(width * height) * BitRate
]
let videoSettings:[String : Any] = [
AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoCompressionPropertiesKey: VideoCompressionProperties
]
return videoSettings
}
func makePath(FileName: String) -> URL{
return URL(fileURLWithPath:
NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + "/\(FileName).mp4")
}
func setup(width: Int, height: Int,
BitRate: Double, FileName: String){
let setting = makeVideoSettings(width: width, height: height, BitRate: BitRate)
let Path = makePath(FileName: FileName)
Settings = RecorderSetting(videoSetting: setting, Path: Path)
}
func StartSession(FirstFrame: CVPixelBuffer) throws{
let attribute: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String: CVPixelBufferGetPixelFormatType(FirstFrame),
kCVPixelBufferWidthKey as String: CVPixelBufferGetWidth(FirstFrame),
kCVPixelBufferHeightKey as String: CVPixelBufferGetHeight(FirstFrame)
]
if (Settings == nil){throw "Settings invalid"}
let writerInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: Settings!.videoSetting)
Adaptor =
AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: attribute)
Adaptor?.assetWriterInput.expectsMediaDataInRealTime = true
do{
avAssetWriter = try AVAssetWriter(url: Settings!.Path, fileType: AVFileType.mp4)
if (avAssetWriter!.canAdd(writerInput)){
let StartTime = Date().timeIntervalSince1970.toCMTime()
avAssetWriter?.startWriting()
avAssetWriter?.startSession(atSourceTime: StartTime)
try? WriteBuffer(Buffer: FirstFrame, time: StartTime)
}else{
throw "Add AVWriterInput Error"
}
}catch{
throw "Initializing Error"
}
}
func StopSession(){
if(Adaptor?.assetWriterInput.isReadyForMoreMediaData == false){return}
Adaptor?.assetWriterInput.markAsFinished()
avAssetWriter?.finishWriting(completionHandler: {
if let outputPath = self.Settings?.Path{
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputPath)
}) { saved, error in
if saved {
try? FileManager().removeItem(at: outputPath)
print("Saved ")
/*let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).firstObject
// fetchResult is your latest video PHAsset
// To fetch latest image replace .video with .image*/
}
}
}
})
}
func WriteBuffer(Buffer: CVPixelBuffer, time: CMTime) throws{
if(self.Adaptor != nil){
if (self.Adaptor!.assetWriterInput.isReadyForMoreMediaData){
let whetherPixelBufferAppendedtoAdaptor = self.Adaptor!.append(Buffer, withPresentationTime: time)
if(!whetherPixelBufferAppendedtoAdaptor){
print(avAssetWriter?.error as Any)
}
}else{
throw "Writer Input is not Ready"
}
}else{
throw "PixelBufferAdaptor invalild"
}
}
}
Find the problem.
This method AVAssetWriterInput.canAdd() only check can asset writer add the input,
You need to call .add() to add asset input before you start writing.

Video recording fails when adding timestamp to video

I'm making iOS video recording app. When I convert CMSampleBuffer to UIImage and add timestamp and then covert it back to CMSampleBuffer and append to AVAssetWriterInput. When I did not add timestamp and add received CMSampleBuffer directly to AVAssetWriterInput it finish recording correctly but when I add timestamp to received image, it fails recording video. How can I fix this?
private func setupCaptureSession() {
session.sessionPreset = .vga640x480
guard
let videoDevice = AVCaptureDevice.default(for: .video),
let audioDevice = AVCaptureDevice.default(for: .audio),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
fatalError()
}
session.beginConfiguration()
session.addInput(videoInput)
session.addInput(audioInput)
session.addOutput(videoOutput)
session.addOutput(audioOutput)
session.commitConfiguration()
DispatchQueue.main.async { [weak self] in
self?.session.startRunning()
}
}
private func startRecording() {
self.startUnixtime = DateUtility.getUnixtime()
self.startTimeForDisplayingTimeCounter = Date()
self.startTimer()
self.elapsedTimeLabel.text = "00:00:00"
// AVAssetWriter
assetWriter = try! AVAssetWriter(outputURL: self.exportURL!, fileType: .mov)
// video
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 640,
AVVideoHeightKey : 480
])
videoInput.expectsMediaDataInRealTime = true
assetWriter?.add(videoInput)
// audio
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
audioInput.expectsMediaDataInRealTime = true
assetWriter?.add(audioInput)
assetWriter?.startWriting()
// Delegate
let queue = DispatchQueue.global()
videoOutput.setSampleBufferDelegate(self, queue: queue)
audioOutput.setSampleBufferDelegate(self, queue: queue)
}
private func finishRecording() {
self.endUnixtime = DateUtility.getUnixtime()
self.elapsedTimeLabel.text = "00:00:00"
self.timer?.invalidate()
videoOutput.setSampleBufferDelegate(nil, queue: nil)
audioOutput.setSampleBufferDelegate(nil, queue: nil)
startTime = nil
assetWriter?.finishWriting { [weak self] in
guard let self = self else { return }
guard self.assetWriter!.status == .completed else { fatalError("failed recording") }
self.saveToPhotoLibrary { isSaveToPhotoLibrarySucceed in
print("vide saved to photo library")
guard isSaveToPhotoLibrarySucceed else {
print("Save to photo library failed")
return
}
self.saveToRealmFromTempVideo {
self.uploadVideoToServer()
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard isRecording else { fatalError() }
guard CMSampleBufferDataIsReady(sampleBuffer) else {
print("not ready")
return
}
if startTime == nil {
startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
assetWriter?.startSession(atSourceTime: startTime!)
}
// Append video or audio
let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
if mediaType == .video {
appendVideo(from: sampleBuffer)
} else if mediaType == .audio {
appendAudio(from: sampleBuffer)
} else {
fatalError("should not reach here")
}
}
// Append Video
func appendVideo(from sampleBuffer: CMSampleBuffer) {
// - Guards
guard let videoInput = assetWriter?.inputs.first(where: { $0.mediaType == .video }) else {
print("video input not found")
return
}
guard videoInput.isReadyForMoreMediaData else {
print("video input not ready for more media data")
return
}
// - Timestamp
let sample: Sample = Sample(sampleBuffer: sampleBuffer)
guard let ciImage = generateCIImage(from: sampleBuffer) else {
print("CIImage creation from sampleBuffer failed")
return
}
let uiImage = UIImage(ciImage: ciImage)
guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
fatalError("should not reach here")
}
guard let timestampAddedCvpixelBuffer = timestampAddedImage.toCVPixelBuffer() else {
print("CVPixelBuffer creation from CIImage failed")
return
}
guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
print("CMSampleBuffer creation from CVPixelBufer failed")
return
}
DispatchQueue.main.sync { [weak self] in
self?.compositeImageView.image = timestampAddedImage
}
print("append video")
videoInput.append(timestampAddedSampleBuffer)
}
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
let newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext()
return newImage
}
func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
var sampleBuffer: CMSampleBuffer?
var timimgInfo: CMSampleTimingInfo = timingInfo
var videoInfo: CMVideoFormatDescription!
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: cvPixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo,
sampleTiming: &timimgInfo,
sampleBufferOut: &sampleBuffer)
return sampleBuffer
}
private extension UIImage {
func toCVPixelBuffer() -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer : CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
guard status == kCVReturnSuccess else {
return nil
}
if let pixelBuffer = pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: self.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
return nil
}
}
private final class Sample {
let timingInfo: CMSampleTimingInfo
init(sampleBuffer: CMSampleBuffer) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let duration = CMSampleBufferGetDuration(sampleBuffer)
let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
}
}

Can not generate thumbnail from url video in ios 13

I am using the below code to generate a frame from a video URL. This code was working fine for sometime before. right now it's not working. it is throwing an error that it can not decode and the file may be damaged. can someone help with this?
func previewImageFromVideo(url: NSURL) -> UIImage? {
let url = url as URL
let request = URLRequest(url: url)
let cache = URLCache.shared
if
let cachedResponse = cache.cachedResponse(for: request),
let image = UIImage(data: cachedResponse.data)
{
return image
}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSize(width: 250, height: 120)
var time = asset.duration
time.value = min(time.value, 2)
var image: UIImage?
do {
let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
image = UIImage(cgImage: cgImage)
} catch { }
if
let image = image,
let data = UIImagePNGRepresentation(image),
let response = HTTPURLResponse(url: url, statusCode: 200, httpVersion: nil, headerFields: nil)
{
let cachedResponse = CachedURLResponse(response: response, data: data)
cache.storeCachedResponse(cachedResponse, for: request)
}
return image
}
This code throws an error while capturing a frame from the URL video. It says the file might be damaged.
I tried your code on Playground, it works perfectly. The file could be damaged as the error message says.
import UIKit
import AVKit
import PlaygroundSupport
let container = UIView(frame: CGRect(x: 0, y: 0, width: 200, height: 200))
let imageView = UIImageView(frame: CGRect(x: 0, y: 0, width: 200, height: 200))
container.addSubview(imageView)
PlaygroundPage.current.liveView = container
PlaygroundPage.current.needsIndefiniteExecution = true
func previewImageFromVideo(url: NSURL) -> UIImage? {
let url = url as URL
let request = URLRequest(url: url)
let cache = URLCache.shared
if
let cachedResponse = cache.cachedResponse(for: request),
let image = UIImage(data: cachedResponse.data)
{
return image
}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSize(width: 250, height: 120)
var time = asset.duration
time.value = min(time.value, 2)
var image: UIImage?
do {
let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
image = UIImage(cgImage: cgImage)
} catch { }
if
let image = image,
let data = image.pngData(),
let response = HTTPURLResponse(url: url, statusCode: 200, httpVersion: nil, headerFields: nil)
{
let cachedResponse = CachedURLResponse(response: response, data: data)
cache.storeCachedResponse(cachedResponse, for: request)
}
return image
}
imageView.image = previewImageFromVideo(url: NSURL(string: "https://www.w3schools.com/html/mov_bbb.mp4")!)
I have made some changes to Suh's answer, and I have used it on the background thread so that while generating the thumbnail our UI won't get blocked.
func createVideoThumbnail( url: String?, completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.global().async {
let url = url as URL
let request = URLRequest(url: url)
let cache = URLCache.shared
if
let cachedResponse = cache.cachedResponse(for: request),
let image = UIImage(data: cachedResponse.data)
{
DispatchQueue.main.async {
completion(image)
}
}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
var time = asset.duration
time.value = min(time.value, 2)
var image: UIImage?
do {
let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
image = UIImage(cgImage: cgImage)
} catch { DispatchQueue.main.async {
completion(nil)
} }
if
let image = image,
let data = image.pngData(),
let response = HTTPURLResponse(url: url, statusCode: 200, httpVersion: nil, headerFields: nil)
{
let cachedResponse = CachedURLResponse(response: response, data: data)
cache.storeCachedResponse(cachedResponse, for: request)
}
DispatchQueue.main.async {
completion(image)
}
}
}
Usage:
createVideoThumbnail(url: data.url ?? "") { [weak self] (img) in
guard let strongSelf = self else { return }
if let image = img {
strongSelf.mediaImg.image = image
}
}

How to Create thumbnail from local files

I want to create a thumbnail image for files (word, excel, video ....)
This what i did:
import QuickLook
class ThumbsCreator: NSObject {
private var file : File?
init(file: File?) {
super.init()
self.file = file
}
func createThumb() {
let url = URL(string: (self.file?.path()))
}
}
After a lot of search, I found this solution :
import PDFKit
import AVKit
import WebKit
func createThumb() {
let url = URL(string: (self.file?.path()))
switch file?.type {
case: FileType.image.rawValue:
let image = UIImage(contentsOfFile: (url?.path)!)
_finalImage = self.createScaledImage(image: image!)
break
case: FileType.office.rawValue:
//Loading.......
break
case FileType.Pdf.rawValue:
guard let doc = PDFDocument(url: url!) else {return}
guard let page = doc.page(at: 0) else {return}
_finalImage = page.thumbnail(of: CGSize(width: 768, height: 1024), for: .cropBox)
break
case: FileType.video.rawValue:
let asset = AVAsset(url: url!)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 2, preferredTimescale: 1)
do {
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
_finalImage = UIImage(cgImage: imageRef)
} catch let error{
print("Error: \(error)")
}
break
}
}
func createScaledImage(image: UIImage) {
let THUMB_WIDTH = 150.0 - 40.0
let THUMB_HEIGHT = THUMB_WIDTH - 23.0
var itemThumb = resizeImage(image: image, constraintSize: CGSize(width: THUMB_WIDTH, height: THUMB_HEIGHT))
let thumbRect = CGRect(x: 0, y: 0, width: 10, height: 10)
UIGraphicsBeginImageContextWithOptions(thumbRect.size, true, 0.0)
let context = UIGraphicsGetCurrentContext()
// Fill a white rect
context?.setFillColor(gray: 1.0, alpha: 1.0)
context?.fill(thumbRect)
// Stroke a gray rect
let comps : [CGFloat] = [0.8, 0.8, 0.8, 1]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let strokeColor = CGColor(colorSpace: colorSpace, components: comps)
context?.setStrokeColor(strokeColor!)
UIRectFrame(thumbRect)
//CGColorRelease(strokeColor!)
itemThumb.draw(in: thumbRect.insetBy(dx: 1, dy: 1))
itemThumb = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
self.finishThumCreation(image: image)
}
}
Starting from iOS 13 and macOS 10.15, there is the QuickLook Thumbnailing API. It supports any file format for which the OS can provide a preview: either because the OS knows this format or because the owner of the third-party format provided a QuickLook plugin.
Here is an example based on Apple's tutorial:
func thumbnail(for fileURL: URL, size: CGSize, scale: CGFloat) {
let request = QLThumbnailGenerator
.Request(fileAt: fileURL, size: size, scale: scale,
representationTypes: .lowQualityThumbnail)
QLThumbnailGenerator.shared.generateRepresentations(for: request)
{ (thumbnail, type, error) in
DispatchQueue.main.async {
if thumbnail == nil || error != nil {
// Handle the error case gracefully.
} else {
// Display the thumbnail that you created.
}
}
}
}
On macOS before 10.15, in my app I fallback to NSWorkspace.shared.icon(forFile:) which provides a document icon based on the file type (but not a thumbnail).
You can use https://developer.apple.com/documentation/uikit/uidocumentinteractioncontroller/1616801-icons
var icons: [UIImage] { get }
let controller = UIDocumentInteractionController(url:someUrl)
print(controller.icons.first)
Only for a video
extension UIViewController {
func thumbnail(_ sourceURL:URL) -> UIImage {
let asset = AVAsset(url: sourceURL)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
let time = CMTime(seconds: 1, preferredTimescale: 1)
do {
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
return UIImage(cgImage: imageRef)
} catch {
print(error)
return UIImage(named: "NoVideos")!
}
}
}
There's no good API for this yet. There is NSURLThumbnailDictionaryKey, but YMMV. You can indeed get icons via UIDocumentInteractionController.

How to add transitions to set images exported as video in Swift

I am making an app which has functionality for Transitions like example-video should play from top to bottom or from left to right transitions effect.I am able to generate video from set of image array in swift 3.0 but i am not getting how to add transitions.
I got the code from this answer How do I export UIImage array as a movie?
I am using like this working fine only problem to get trasnitions
func writeImagesAsMovie(allImages: [UIImage], videoPath: String, videoSize: CGSize, videoFPS: Int32) {
// Create AVAssetWriter to write video
guard let assetWriter = createAssetWriter(videoPath, size: videoSize) else {
print("Error converting images to video: AVAssetWriter not created")
return
}
// If here, AVAssetWriter exists so create AVAssetWriterInputPixelBufferAdaptor
let writerInput = assetWriter.inputs.filter{ $0.mediaType == AVMediaTypeVideo }.first!
let sourceBufferAttributes : [String : AnyObject] = [
kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32ARGB) as AnyObject,
kCVPixelBufferWidthKey as String : videoSize.width as AnyObject,
kCVPixelBufferHeightKey as String : videoSize.height as AnyObject,
]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourceBufferAttributes)
// Start writing session
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: kCMTimeZero)
if (pixelBufferAdaptor.pixelBufferPool == nil) {
print("Error converting images to video: pixelBufferPool nil after starting session")
return
}
// -- Create queue for <requestMediaDataWhenReadyOnQueue>
let mediaQueue = DispatchQueue(label: "mediaInputQueue", attributes: [])
// -- Set video parameters
let frameDuration = CMTimeMake(1, videoFPS)
var frameCount = 0
// -- Add images to video
let numImages = allImages.count
writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { () -> Void in
// Append unadded images to video but only while input ready
while (writerInput.isReadyForMoreMediaData && frameCount < numImages) {
let lastFrameTime = CMTimeMake(Int64(frameCount), videoFPS)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
if !self.appendPixelBufferForImageAtURL(image: allImages[frameCount], pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
return
}
frameCount += 1
}
// No more images to add? End video.
if (frameCount >= numImages) {
writerInput.markAsFinished()
assetWriter.finishWriting {
if (assetWriter.error != nil) {
print("Error converting images to video: \(String(describing: assetWriter.error))")
} else {
print("Converted images to movie # \(videoPath)")
}
}
}
})
}
func createAssetWriter(_ path: String, size: CGSize) -> AVAssetWriter? {
// Convert <path> to NSURL object
let pathURL = URL(fileURLWithPath: path)
// Return new asset writer or nil
do {
// Create asset writer
let newWriter = try AVAssetWriter(outputURL: pathURL, fileType: AVFileTypeMPEG4)
// Define settings for video input
let videoSettings: [String : AnyObject] = [
AVVideoCodecKey : AVVideoCodecH264 as AnyObject,
AVVideoWidthKey : size.width as AnyObject,
AVVideoHeightKey : size.height as AnyObject,
]
// Add video input to writer
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
newWriter.add(assetWriterVideoInput)
// Return writer
print("Created asset writer for \(size.width)x\(size.height) video")
return newWriter
} catch {
print("Error creating asset writer: \(error)")
return nil
}
}
func appendPixelBufferForImageAtURL(image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
if let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity:1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
if let pixelBuffer = pixelBufferPointer.pointee , status == 0 {
fillPixelBufferFromImage(image: image, pixelBuffer: pixelBuffer)
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
pixelBufferPointer.deinitialize()
} else {
NSLog("Error: Failed to allocate pixel buffer from pool")
}
pixelBufferPointer.deallocate(capacity: 1)
}
}
return appendSucceeded
}
func fillPixelBufferFromImage(image: UIImage, pixelBuffer: CVPixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
// Create CGBitmapContext
let context = CGContext(
data: pixelData,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
)
// Draw image into context
context?.draw(image .cgImage!, in: CGRect(x: 0.0,y: 0.0,width: image.size.width,height: image .size.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
}