swift video compression removes sound when video length gets to a certain amount - swift

im using
func compressVideo(inputURL: URL,
outputURL: URL,
handler:#escaping (_ exportSession: AVAssetExportSession?) -> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset,
presetName: AVAssetExportPresetLowQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
exportSession.exportAsynchronously {
handler(exportSession)
}
}
this code to compress video but when the video goes over a certain length sound from the video is removed/deleted what can I do to resolve this problem

I found the solution on my own. I've changed the video format to .mov and it worked.

Related

Swift code to use AVWriter/AVReader to extract .wav audio from .mp4 video writing zero byte file

I've gone through the documentation and all the posts here, and I've gotten this far.
The function below should take an AVAsset and write out a .wav file. However, the file written out is of zero bytes. I'm not sure I can even inspect what the writer is writing at each step.
What am I missing?
static func writeAudioTrackToUrl(asset: AVAsset, _ url: URL) throws {
// initialize asset reader, writer
let assetReader = try AVAssetReader(asset: asset)
let assetWriter = try AVAssetWriter(outputURL: URL(fileURLWithPath: "/tmp/audiowav.wav"), fileType: .wav)
// get audio track
let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first!
// configure output audio settings
let audioSettings: [String : Any] = [
AVFormatIDKey: kAudioFormatLinearPCM,
AVSampleRateKey: 22050.0,
AVNumberOfChannelsKey: 1,
AVLinearPCMBitDepthKey: 16,
AVLinearPCMIsFloatKey: false,
AVLinearPCMIsBigEndianKey: false,
AVLinearPCMIsNonInterleaved: false
]
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings)
if assetReader.canAdd(assetReaderAudioOutput) {
assetReader.add(assetReaderAudioOutput)
} else {
fatalError("could not add audio output reader")
}
let inputAudioSettings: [String:Any] = [AVFormatIDKey : kAudioFormatLinearPCM]
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: inputAudioSettings, sourceFormatHint: (audioTrack.formatDescriptions[0] as! CMFormatDescription))
let audioInputQueue = DispatchQueue(label: "audioQueue")
assetWriter.add(audioInput)
assetWriter.startWriting()
assetReader.startReading()
assetWriter.startSession(atSourceTime: CMTime.zero)
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while (audioInput.isReadyForMoreMediaData) {
let sample = assetReaderAudioOutput.copyNextSampleBuffer()
if (sample != nil) {
audioInput.append(sample!)
} else {
audioInput.markAsFinished()
DispatchQueue.main.async {
assetWriter.finishWriting {
assetReader.cancelReading()
}
}
break
}
}
}
}
The problem here is that you convert the input audio to the LPCM format described by audioSettings, but then you give a sourceFormatHint of audioTrack.formatDescriptions[0] to the AVAssetWriterInput.
This is a problem because the audio track format descriptions are not going to be LPCM but a compressed format, like kAudioFormatMPEG4AAC.
Just drop the hint, I think it's for passing through compressed formats anyway.
Further, the LPCM in inputAudioSettings is under specified - why not pass audioSettings directly?
In summary, try this:
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioSettings)
p.s. don't forget to delete the output file before running, AVAssetWriter doesn't seem to overwrite existing files

Can't able to get Video Tracks from AVURLAsset for HLS videos(.m3u8 format) for AVPlayer?

I am developing a custom video player to stream HLS videos from server. I can successfully play HLS videos using AVPlayerItem and AVPlayer.
After that I want to add subtitle track and audio tracks for my video player. So I used AVMutableComposition to do so. So now the issue is when I am creating AVURLAsset for HLS Videos, I can't able to get video tracks from AVURLAsset. It is giving me always 0 tracks. I tried "loadValuesAsynchronously" of AVURLAsset and I tried adding KVO for "tracks" of AVPlayerItem. But None of these producing me any positive result.
I am using the following code.
func playVideo() {
let videoAsset = AVURLAsset(url: videoURL!)
let composition = AVMutableComposition()
// Video
let videoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
let tracks = videoAsset.tracks(withMediaType: .video)
guard let track = tracks.first else {
print("Can't get first video track")
return
}
try videoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: track, at: kCMTimeZero)
} catch {
print(error)
return
}
guard let subtitlesUrl = Bundle.main.url(forResource: "en", withExtension: "vtt") else {
print("Can't load en.vtt from bundle")
return
}
//Subtitles
let subtitleAsset = AVURLAsset(url: subtitlesUrl)
let subtitleTrack = composition.addMutableTrack(withMediaType: .text, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
let subTracks = subtitleAsset.tracks(withMediaType: AVMediaType.text)
guard let subTrack = subTracks.first else {
print("Can't get first subtitles track")
return
}
try subtitleTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: subTrack, at: kCMTimeZero)
} catch {
print(error)
return
}
// Prepare item and play it
let item = AVPlayerItem(asset: composition)
self.player = AVPlayer(playerItem: item)
self.playerLayer = AVPlayerLayer.init()
self.playerLayer.frame = self.bounds
self.playerLayer.contentsGravity = kCAGravityResizeAspect
self.playerLayer.player = player
self.layer.addSublayer(self.playerLayer)
self.player.addObserver(self, forKeyPath: "currentItem.loadedTimeRanges", options: .new, context: nil)
self.player.play()
}
This procedure working well for .mp4 videos but not for HLS Videos(.m3u8). Anyone have some working solution for this?
or
How can we get tracks from HLS videos using AVURLAsset? If this is not possible then How can achieve similar result ?
Please let me know you feedback.
Many more thanks in advance.
For HLS video tracks(withMediaType: .video) will return an empty array.
Use this instead: player.currentItem.presentationSize.width and player.currentItem.presentationSize.height.
Pls let me know if it works.
I didn't have the exact same problem as you. But I got around a similar problem (querying for HDR) by instead of querying the tracks on the AVURLAsset, I queried the tracks on the AVPlayerItem.
Set up an observer on the item status:
player?.observe(\AVPlayer.currentItem?.status,
options: [.new, .initial], changeHandler: { [weak self] player, _ in
DispatchQueue.main.async {
self?.observedItemStatus(from: player)
}
})
Then query the AVMediaType of your choice (in your case text).
func observedItemStatus(from avPlayer: AVPlayer) {
guard let currentItem = avPlayer.currentItem else { return }
// ideally execute code based on currentItem.status...for the brevity of this example I won't.
let hasLegibleMedia = currentItem.tracks.first(where: {
$0.assetTrack?.mediaType == AVMediaType.text
})?.assetTrack.hasMediaCharacteristic(.legible)
}
Alternatively if you need more than just a Bool, you could do a loop to access the assetTrack you really want.

Why do I get this output when using AVPlayer in Swift?

I am trying to play an audio file using avplayer in swift, when I play a file i generated combining two files, i get this output
playing file:"file location".m4a -- file:///
however when I play another remade sound file it plays fine, and i don't get the -- file:/// in the output after playing it
this is how I am playing the audio
func play(url:NSURL) {
do {
soundPlayer = AVPlayer(url: url as URL)
soundPlayer.volume = 1.0
soundPlayer.play()
} catch let error as NSError {
print(error.localizedDescription)
} catch {
print("failed")
}
}
and this is what I am using to concatenate two audio files
func makeSounds(sounds: [NSURL], preName: String){
let composition = AVMutableComposition()
print(sounds)
for sound in sounds {
let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let avAsset = AVURLAsset(url: sound as URL)
let track = avAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = NSURL(fileURLWithPath: Urls.user)
var fileDestinationUrl = documentDirectoryURL.appendingPathComponent("\(SoundData.Name)\(preName).m4a")
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = fileDestinationUrl
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
soundsToPlay.soundLocation = String(describing: fileDestinationUrl!)
print("Audio Concatenation Complete")
}
})
}
the audio file location appears correct to the url i am setting it to be exported to, but it doesn't play the sound file, i just get that error
AVAssetExportSession needs a path string in 'File' format, which is usually obtained through URL.relativeString, which includes 'file:///'

Extract audio from video file

How can I extrace Audio from Video file without using FFmpeg?
I want to use AVMutableComposition and AVURLAsset for solving it.e.g. conversion from .mov to .m4a file.
The following Swift 5 / iOS 12.3 code shows how to extract audio from a movie file (.mov) and convert it to an audio file (.m4a) by using AVURLAsset, AVMutableComposition and AVAssetExportSession:
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBAction func extractAudioAndExport(_ sender: UIButton) {
// Create a composition
let composition = AVMutableComposition()
do {
let sourceUrl = Bundle.main.url(forResource: "Movie", withExtension: "mov")!
let asset = AVURLAsset(url: sourceUrl)
guard let audioAssetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else { return }
guard let audioCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid) else { return }
try audioCompositionTrack.insertTimeRange(audioAssetTrack.timeRange, of: audioAssetTrack, at: CMTime.zero)
} catch {
print(error)
}
// Get url for output
let outputUrl = URL(fileURLWithPath: NSTemporaryDirectory() + "out.m4a")
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
// Create an export session
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = outputUrl
// Export file
exportSession.exportAsynchronously {
guard case exportSession.status = AVAssetExportSession.Status.completed else { return }
DispatchQueue.main.async {
// Present a UIActivityViewController to share audio file
guard let outputURL = exportSession.outputURL else { return }
let activityViewController = UIActivityViewController(activityItems: [outputURL], applicationActivities: [])
self.present(activityViewController, animated: true, completion: nil)
}
}
}
}
In all multimedia formats, audio is encoded separately from video, and their frames are interleaved in the file. So removing the video from a multimedia file does not require any messing with encoders and decoders: you can write a file format parser that will drop the video track, without using the multimedia APIs on the phone.
To do this without using a 3rd party library, you need to write the parser from scratch, which could be simple or difficult depending on the file format you wish to use. For example, FLV is very simple so stripping a track out of it is very easy (just go over the stream, detect the frame beginnings and drop the '0x09'=video frames). MP4 a bit more complex, its header (MOOV) has a hierarchical structure in which you have headers for each of the tracks (TRAK atoms). You need to drop the video TRAK, and then copy the interleaved bitstream atom (MDAT) skipping all the video data clusters as you copy.
There are 3rd party libraries you can use, aside from ffmpeg. One that comes in mind is GPAC MP4BOX (LGPL license). If the LGPL is a problem, there are plenty of commercial SDKs that you can use.

How to record a video and make it slow motion

I am working on an iPhone app for school and need some help. The app should record video, make it slow motion (about 2x), then save it to the photo library. So far I have everything except how to make the video slow motion. I know it can be done as there is already an app in the App Store that does it.
How can I take a video I've saved to a temp url and adjust the speed before saving it to the photo library?
If you need to export your video then you need to use the AVMutableComposition Class
Then add your video as an AVAsset to an AVMutableComposition and scale it with:
- (void)scaleTimeRange:(CMTimeRange)timeRange toDuration:(CMTime)duration
Finally you export it using AVAssetExportSession Class
I written a code that makes your video in "slow motion" and saves it in Photos Library. "Main Thing This Code Works In Swift 5". Creating "Slow motion" video in iOS swift is not easy, that I came across many "slow motion" that came to know not working or some of the codes in them are depreciated. And so I finally figured a way to make slow motion in Swift.
This code can be used for 120fps are greater than that too. Just add the url of your video and make it slow
Here is the "code snippet I created for achieving slow motion"
func slowMotion(pathUrl: URL) {
let videoAsset = AVURLAsset.init(url: pathUrl, options: nil)
let currentAsset = AVAsset.init(url: pathUrl)
let vdoTrack = currentAsset.tracks(withMediaType: .video)[0]
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoInsertError: Error? = nil
var videoInsertResult = false
do {
try compositionVideoTrack?.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: .zero)
videoInsertResult = true
} catch let videoInsertError {
}
if !videoInsertResult || videoInsertError != nil {
//handle error
return
}
var duration: CMTime = .zero
duration = CMTimeAdd(duration, currentAsset.duration)
//MARK: You see this constant (videoScaleFactor) this helps in achieving the slow motion that you wanted. This increases the time scale of the video that makes slow motion
// just increase the videoScaleFactor value in order to play video in higher frames rates(more slowly)
let videoScaleFactor = 2.0
let videoDuration = videoAsset.duration
compositionVideoTrack?.scaleTimeRange(
CMTimeRangeMake(start: .zero, duration: videoDuration),
toDuration: CMTimeMake(value: videoDuration.value * Int64(videoScaleFactor), timescale: videoDuration.timescale))
compositionVideoTrack?.preferredTransform = vdoTrack.preferredTransform
let dirPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).map(\.path)
let docsDir = dirPaths[0]
let outputFilePath = URL(fileURLWithPath: docsDir).appendingPathComponent("slowMotion\(UUID().uuidString).mp4").path
if FileManager.default.fileExists(atPath: outputFilePath) {
do {
try FileManager.default.removeItem(atPath: outputFilePath)
} catch {
}
}
let filePath = URL(fileURLWithPath: outputFilePath)
let assetExport = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
assetExport?.outputURL = filePath
assetExport?.outputFileType = .mp4
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport?.status {
case .failed:
print("asset output media url = \(String(describing: assetExport?.outputURL))")
print("Export session faiied with error: \(String(describing: assetExport?.error))")
DispatchQueue.main.async(execute: {
// completion(nil);
})
case .completed:
print("Successful")
let outputURL = assetExport!.outputURL
print("url path = \(String(describing: outputURL))")
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
}) { saved, error in
if saved {
print("video successfully saved in photos gallery view video in photos gallery")
}
if (error != nil) {
print("error in saing video \(String(describing: error?.localizedDescription))")
}
}
DispatchQueue.main.async(execute: {
// completion(_filePath);
})
case .none:
break
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .cancelled:
break
case .some(_):
break
}
})
}
slowmoVideo is an OSS project which appears to do this very nicely, though I don't know that it would work on an iPhone.
It does not simply make your videos play at 0.01× speed. You can
smoothly slow down and speed up your footage, optionally with motion
blur. How does slow motion work? slowmoVideo tries to find out where
pixels move in the video (this information is called Optical Flow),
and then uses this information to calculate the additional frames.