Accessing Pixels Outside of the CVPixelBuffer that has been extended with Padding - swift

I am trying to extend a CVPixelBuffer so that accesses to memory that is outside of the buffer does not cause a EXC_BAD_ACCESS error by reinitializing the CVPixelBuffer with padding. However, it doesn't seem to work. Any tips on what I am doing wrong would be greatly appreciated.
let paddingLeft = abs(min(cropX, 0))
let paddingRight = max((cropX + cropWidth) - (srcWidth - 1), 0)
let paddingBottom = max((cropY + cropHeight) - (srcHeight - 1), 0)
let paddingTop = abs(min(cropY, 0))
let attr = [kCVPixelBufferExtendedPixelsLeftKey: paddingLeft*40 + 1 as CFNumber,
kCVPixelBufferExtendedPixelsTopKey: paddingTop*40 + 1 as CFNumber,
kCVPixelBufferExtendedPixelsRightKey: paddingRight*40 + 1 as CFNumber,
kCVPixelBufferExtendedPixelsBottomKey: paddingBottom*40 + 1 as CFNumber]
guard kCVReturnSuccess == CVPixelBufferCreateWithBytes(kCFAllocatorDefault, srcWidth, srcHeight, pixelFormat, srcData, srcBytesPerRow, nil, nil, attr as CFDictionary, &paddedSrcPixelBuffer) else {
print("failed to allocate a new padded pixel buffer")
return nil
}
With an extended CVPixelBuffer, accessing data outside of the CVPixelBuffer (such as when the x,y is negative or bigger than the width/height of the buffer) should be defined behavior based on my understanding. Yet the following piece of code crashes on the last line inside of the VImageScale_ARGB8888 with EXC_BAD_ACCESS Code 1.
This presumably means that the data being accessed is being unmapped.
guard let paddedSrcData = CVPixelBufferGetBaseAddress(paddedSrcPixelBuffer) else {
print("Error: could not get padded pixel buffer base address")
return nil
}
srcBuffer = vImage_Buffer(data: paddedSrcData.advanced(by: offset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: srcBytesPerRow)
let destBytesPerRow = scaleWidth*4
let destData = malloc(scaleHeight*destBytesPerRow)
var destBuffer = vImage_Buffer(data: destData,
height: vImagePixelCount(scaleHeight),
width: vImagePixelCount(scaleWidth),
rowBytes: destBytesPerRow)
let vImageFlags: vImage_Flags = vImage_Flags(kvImageEdgeExtend)
let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImageFlags) // crashes here due to EXC_BAD_ACCESS Code: 1
Many Thanks!

Here is a modified version of a resize function that will take create a padded buffer by copying contiguous sections of memory into a newly allocated buffer with the correct shape.
public func resizePixelBuffer(_ srcPixelBuffer: CVPixelBuffer,
cropX: Int,
cropY: Int,
cropWidth: Int,
cropHeight: Int,
scaleWidth: Int,
scaleHeight: Int) -> CVPixelBuffer? {
let flags = CVPixelBufferLockFlags(rawValue: 0)
let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(srcPixelBuffer, flags) else {
return nil
}
defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, flags) }
guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
print("Error: could not get pixel buffer base address")
return nil
}
let srcHeight = CVPixelBufferGetHeight(srcPixelBuffer)
let srcWidth = CVPixelBufferGetWidth(srcPixelBuffer)
let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
let offset = cropY*srcBytesPerRow + cropX*4
var srcBuffer: vImage_Buffer!
var paddedSrcPixelBuffer: CVPixelBuffer!
if (cropX < 0 || cropY < 0 || cropX + cropWidth > srcWidth || cropY + cropHeight > srcHeight) {
let paddingLeft = abs(min(cropX, 0))
let paddingRight = max((cropX + cropWidth) - (srcWidth - 1), 0)
let paddingBottom = max((cropY + cropHeight) - (srcHeight - 1), 0)
let paddingTop = abs(min(cropY, 0))
let paddedHeight = paddingTop + srcHeight + paddingBottom
let paddedWidth = paddingLeft + srcWidth + paddingRight
guard kCVReturnSuccess == CVPixelBufferCreate(kCFAllocatorDefault, paddedWidth, paddedHeight, pixelFormat, nil, &paddedSrcPixelBuffer) else {
print("failed to allocate a new padded pixel buffer")
return nil
}
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(paddedSrcPixelBuffer, flags) else {
return nil
}
guard let paddedSrcData = CVPixelBufferGetBaseAddress(paddedSrcPixelBuffer) else {
print("Error: could not get padded pixel buffer base address")
return nil
}
let paddedBytesPerRow = CVPixelBufferGetBytesPerRow(paddedSrcPixelBuffer)
for yIndex in paddingTop..<srcHeight+paddingTop {
let dstRowStart = paddedSrcData.advanced(by: yIndex*paddedBytesPerRow).advanced(by: paddingLeft*4)
let srcRowStart = srcData.advanced(by: (yIndex - paddingTop)*srcBytesPerRow)
dstRowStart.copyMemory(from: srcRowStart, byteCount: srcBytesPerRow)
}
let paddedOffset = (cropY + paddingTop)*paddedBytesPerRow + (cropX + paddingLeft)*4
srcBuffer = vImage_Buffer(data: paddedSrcData.advanced(by: paddedOffset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: paddedBytesPerRow)
} else {
srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: srcBytesPerRow)
}
let destBytesPerRow = scaleWidth*4
guard let destData = malloc(scaleHeight*destBytesPerRow) else {
print("Error: out of memory")
return nil
}
var destBuffer = vImage_Buffer(data: destData,
height: vImagePixelCount(scaleHeight),
width: vImagePixelCount(scaleWidth),
rowBytes: destBytesPerRow)
let vImageFlags: vImage_Flags = vImage_Flags(kvImageEdgeExtend)
let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImageFlags)
if error != kvImageNoError {
print("Error:", error)
free(destData)
return nil
}
let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
if let ptr = ptr {
free(UnsafeMutableRawPointer(mutating: ptr))
}
}
var dstPixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreateWithBytes(nil, scaleWidth, scaleHeight,
pixelFormat, destData,
destBytesPerRow, releaseCallback,
nil, nil, &dstPixelBuffer)
if status != kCVReturnSuccess {
print("Error: could not create new pixel buffer")
free(destData)
return nil
}
if paddedSrcPixelBuffer != nil {
CVPixelBufferUnlockBaseAddress(paddedSrcPixelBuffer, flags)
}
return dstPixelBuffer
}

Related

Get Image resolution from an URL in the background thread

I'm trying to get the resolution of an image from URL without actually downloading it. So I have a function for that:
public func resolutionForImage(url: String) -> CGSize? {
guard let url = URL(string: url) else { return nil }
guard let source = CGImageSourceCreateWithURL(url as CFURL, nil) else {
return nil
}
let propertiesOptions = [kCGImageSourceShouldCache: false] as CFDictionary
guard let properties = CGImageSourceCopyPropertiesAtIndex(source, 0, propertiesOptions) as? [CFString: Any] else {
return nil
}
if let width = properties[kCGImagePropertyPixelWidth] as? CGFloat,
let height = properties[kCGImagePropertyPixelHeight] as? CGFloat {
return CGSize(width: width, height: height)
} else {
return nil
}
}
It works fine, but I need to run it in the Background thread, in the main thread in block the UI
And this function is called in an another function in a collectionView cell, so in the end calculateImageHeight output should be in the main thread, could anyone help me manage it, still not in good in the threading
public func calculateImageHeight(ratio: Double = 0.0) -> CGFloat {
var calculatedRatio = ratio
if ratio == 0.0 {
if let size = resolutionForImage(url: imageUrl) {
calculatedRatio = size.height/size.width
}
}
let height = imageViewWidth * calculatedRatio
return height.isFinite ? height : 100
}

How can mp3 data in memory be loaded into an AVAudioPCMBuffer in Swift?

I have a class method to read an mp3 file into an AVAudioPCMBuffer as follows:
private(set) var fullAudio: AVAudioPCMBuffer?
func initAudio(audioFileURL: URL) -> Bool {
var status = true
do {
let audioFile = try AVAudioFile(forReading: audioFileURL)
let audioFormat = audioFile.processingFormat
let audioFrameLength = UInt32(audioFile.length)
fullAudio = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameLength)
if let fullAudio = fullAudio {
try audioFile.read(into: fullAudio)
// processing of full audio
}
} catch {
status = false
}
return status
}
However, I now need to be able to read the same mp3 info from memory (rather than a file) into the AVAudioPCMBuffer without using the file system, where the info is held in the Data type, for example using a function declaration of the form
func initAudio(audioFileData: Data) -> Bool {
// some code setting up fullAudio
}
How can this be done? I've looked to see whether there is a route from Data holding mp3 info to AVAudioPCMBuffer (e.g. via AVAudioBuffer or AVAudioCompressedBuffer), but haven't seen a way forward.
I went down the rabbit hole on this one. Here is what probably amounts to a Rube Goldberg-esque solution:
A lot of the pain comes from using C from Swift.
func data_AudioFile_ReadProc(_ inClientData: UnsafeMutableRawPointer, _ inPosition: Int64, _ requestCount: UInt32, _ buffer: UnsafeMutableRawPointer, _ actualCount: UnsafeMutablePointer<UInt32>) -> OSStatus {
let data = inClientData.assumingMemoryBound(to: Data.self).pointee
let bufferPointer = UnsafeMutableRawBufferPointer(start: buffer, count: Int(requestCount))
let copied = data.copyBytes(to: bufferPointer, from: Int(inPosition) ..< Int(inPosition) + Int(requestCount))
actualCount.pointee = UInt32(copied)
return noErr
}
func data_AudioFile_GetSizeProc(_ inClientData: UnsafeMutableRawPointer) -> Int64 {
let data = inClientData.assumingMemoryBound(to: Data.self).pointee
return Int64(data.count)
}
extension Data {
func convertedTo(_ format: AVAudioFormat) -> AVAudioPCMBuffer? {
var data = self
var af: AudioFileID? = nil
var status = AudioFileOpenWithCallbacks(&data, data_AudioFile_ReadProc, nil, data_AudioFile_GetSizeProc(_:), nil, 0, &af)
guard status == noErr, af != nil else {
return nil
}
defer {
AudioFileClose(af!)
}
var eaf: ExtAudioFileRef? = nil
status = ExtAudioFileWrapAudioFileID(af!, false, &eaf)
guard status == noErr, eaf != nil else {
return nil
}
defer {
ExtAudioFileDispose(eaf!)
}
var clientFormat = format.streamDescription.pointee
status = ExtAudioFileSetProperty(eaf!, kExtAudioFileProperty_ClientDataFormat, UInt32(MemoryLayout.size(ofValue: clientFormat)), &clientFormat)
guard status == noErr else {
return nil
}
if let channelLayout = format.channelLayout {
var clientChannelLayout = channelLayout.layout.pointee
status = ExtAudioFileSetProperty(eaf!, kExtAudioFileProperty_ClientChannelLayout, UInt32(MemoryLayout.size(ofValue: clientChannelLayout)), &clientChannelLayout)
guard status == noErr else {
return nil
}
}
var frameLength: Int64 = 0
var propertySize: UInt32 = UInt32(MemoryLayout.size(ofValue: frameLength))
status = ExtAudioFileGetProperty(eaf!, kExtAudioFileProperty_FileLengthFrames, &propertySize, &frameLength)
guard status == noErr else {
return nil
}
guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(frameLength)) else {
return nil
}
let bufferSizeFrames = 512
let bufferSizeBytes = Int(format.streamDescription.pointee.mBytesPerFrame) * bufferSizeFrames
let numBuffers = format.isInterleaved ? 1 : Int(format.channelCount)
let numInterleavedChannels = format.isInterleaved ? Int(format.channelCount) : 1
let audioBufferList = AudioBufferList.allocate(maximumBuffers: numBuffers)
for i in 0 ..< numBuffers {
audioBufferList[i] = AudioBuffer(mNumberChannels: UInt32(numInterleavedChannels), mDataByteSize: UInt32(bufferSizeBytes), mData: malloc(bufferSizeBytes))
}
defer {
for buffer in audioBufferList {
free(buffer.mData)
}
free(audioBufferList.unsafeMutablePointer)
}
while true {
var frameCount: UInt32 = UInt32(bufferSizeFrames)
status = ExtAudioFileRead(eaf!, &frameCount, audioBufferList.unsafeMutablePointer)
guard status == noErr else {
return nil
}
if frameCount == 0 {
break
}
let src = audioBufferList
let dst = UnsafeMutableAudioBufferListPointer(pcmBuffer.mutableAudioBufferList)
if src.count != dst.count {
return nil
}
for i in 0 ..< src.count {
let srcBuf = src[i]
let dstBuf = dst[i]
memcpy(dstBuf.mData?.advanced(by: Int(dstBuf.mDataByteSize)), srcBuf.mData, Int(srcBuf.mDataByteSize))
}
pcmBuffer.frameLength += frameCount
}
return pcmBuffer
}
}
A more robust solution would probably read the sample rate and channel count and give the option to preserve them.
Tested using:
let url = URL(fileURLWithPath: "/tmp/test.mp3")
let data = try! Data(contentsOf: url)
let format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)!
if let d = data.convertedTo(format) {
let avf = try! AVAudioFile(forWriting: URL(fileURLWithPath: "/tmp/foo.wav"), settings: format.settings, commonFormat: format.commonFormat, interleaved: format.isInterleaved)
try! avf.write(from: d)
}

Sometimes methods fails with Fatal error: UnsafeMutablePointer.initialize overlapping range

I have the following code to decompress some Data back to a String in Swift 5. The method mostly works fine, but sometimes it fails with the following error message:
Thread 1: Fatal error: UnsafeMutablePointer.initialize overlapping range
extension Data
{
func decompress(destinationSize: Int) -> String?
{
let destinationBuffer = UnsafeMutablePointer<UInt8>.allocate(capacity: destinationSize)
let decodedString = self.withUnsafeBytes
{
unsafeRawBufferPointer -> String? in
let unsafeBufferPointer = unsafeRawBufferPointer.bindMemory(to: UInt8.self)
if let unsafePointer = unsafeBufferPointer.baseAddress
{
let decompressedSize = compression_decode_buffer(destinationBuffer, destinationSize, unsafePointer, self.count, nil, COMPRESSION_ZLIB)
if decompressedSize == 0
{
return String.empty
}
let string = String(cString: destinationBuffer)
let substring = string.substring(0, decompressedSize)
return substring
}
return nil
}
return decodedString
}
}
The error occurs at the following line:
let string = String(cString: destinationBuffer)
Can someone please explain why this (sometimes) fails?
I have switched to the following code and now everything works fine (Swift 5):
import Compression
extension Data
{
func compress() -> Data?
{
return self.withUnsafeBytes
{
dataBytes in
let sourcePtr: UnsafePointer<UInt8> = dataBytes.baseAddress!.assumingMemoryBound(to: UInt8.self)
return self.perform(operation: COMPRESSION_STREAM_ENCODE, source: sourcePtr, sourceSize: self.count)
}
}
func decompress() -> Data?
{
return self.withUnsafeBytes
{
unsafeRawBufferPointer -> Data? in
let unsafeBufferPointer = unsafeRawBufferPointer.bindMemory(to: UInt8.self)
if let unsafePointer = unsafeBufferPointer.baseAddress
{
return self.perform(operation: COMPRESSION_STREAM_DECODE, source: unsafePointer, sourceSize: self.count)
}
return nil
}
}
fileprivate func perform(operation: compression_stream_operation, source: UnsafePointer<UInt8>, sourceSize: Int, preload: Data = Data()) -> Data?
{
guard sourceSize > 0 else { return nil }
let streamBase = UnsafeMutablePointer<compression_stream>.allocate(capacity: 1)
defer { streamBase.deallocate() }
var stream = streamBase.pointee
let status = compression_stream_init(&stream, operation, COMPRESSION_ZLIB)
guard status != COMPRESSION_STATUS_ERROR else { return nil }
defer { compression_stream_destroy(&stream) }
var result = preload
var flags: Int32 = Int32(COMPRESSION_STREAM_FINALIZE.rawValue)
let blockLimit = 64 * 1024
var bufferSize = Swift.max(sourceSize, 64)
if sourceSize > blockLimit
{
bufferSize = blockLimit
}
let buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: bufferSize)
defer { buffer.deallocate() }
stream.dst_ptr = buffer
stream.dst_size = bufferSize
stream.src_ptr = source
stream.src_size = sourceSize
while true
{
switch compression_stream_process(&stream, flags)
{
case COMPRESSION_STATUS_OK:
guard stream.dst_size == 0 else { return nil }
result.append(buffer, count: stream.dst_ptr - buffer)
stream.dst_ptr = buffer
stream.dst_size = bufferSize
if flags == 0 && stream.src_size == 0
{
flags = Int32(COMPRESSION_STREAM_FINALIZE.rawValue)
}
case COMPRESSION_STATUS_END:
result.append(buffer, count: stream.dst_ptr - buffer)
return result
default:
return nil
}
}
}
}

FBSimulatorControl : Convert CVPixelBufferRef data to jpeg data for streaming

I am developing a CLI in swift and using FBSimulatorControl in it. I want to stream the data obtained from consumeData: delegate method. As per the data obtained in the method is data obtained from CVPixelBufferRef
I logged an issue in the github repo of FBSimulatorControl.
I tried to regenerate the CVPixelBuffer from the data and tried to obtain the CIImage from it and convert that to jpeg data. But it does not seem to work. Can anyone help me with this? Adding below the code I've tried
var pixelBuffer:CVPixelBuffer? = nil
let result = CVPixelBufferCreate(kCFAllocatorDefault, 750, 1334, kCVPixelFormatType_32BGRA, nil, &pixelBuffer)
if result != kCVReturnSuccess {
print("pixel buffer create success")
return
}
CVPixelBufferLockBaseAddress(pixelBuffer!, .init(rawValue: 0))
let yDestPlane:UnsafeMutableRawPointer? = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer!, 0)
if yDestPlane == nil {
print("failed to create ydestplane")
return
}
let nsData = data as NSData
let rawPtr = nsData.bytes
memcpy(yDestPlane!, rawPtr, 750*1334*4)
CVPixelBufferUnlockBaseAddress(pixelBuffer!, .init(rawValue: 0))
kCVPixelFormatType_32BGRA, getbasea, 2208, NULL, NULL, nil, &pixelBuffer)
if #available(OSX 10.12, *) {
let ciImage = CIImage.init(cvPixelBuffer: pixelBuffer!)
let tempContext = CIContext.init(options: nil)
let videoImage = tempContext.createCGImage(ciImage, from: CGRect.init(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer!), height: CVPixelBufferGetHeight(pixelBuffer!)))
let imageSize: NSSize = NSMakeSize(750, 1334)
let nsImageTest = NSImage(cgImage: videoImage!, size: imageSize)
if let bits = nsImageTest.representations.first as? NSBitmapImageRep {
let jpegFinalData = bits.representation(using: .JPEG, properties: [:])
if self.isStreaming {
var simIndex:Int?
for i in 0...(AUConnectionSimulatorMap.instance.simConnectionMap.count-1) {
if sim.udid == AUConnectionSimulatorMap.instance.simConnectionMap[i].sim.sim.udid {
simIndex = i
break
}
}
var finalData:Data = Data()
let finalDict = ["data":["type":"onScreenFrame","value":jpegFinalData!]] as Dictionary<String,Any>
try! finalData.pack(finalDict)
AUConnectionSimulatorMap.instance.simConnectionMap[simIndex!].ws?.write(data: finalData)
}
}
} else {
// Fallback on earlier versions
}

Why can't I iterate through the full array in Metal

I have the following code, which I think allocates 2 arrays of 16kb of memory, fills one with 7 and the other with 4.
I then run the code and find that only the first 511 items get added together. I'm 80% certain it's to do with the size of the thread groups/thread counts. Any Ideas?
func test()
{
var (device, commandQueue, defaultLibrary, commandBuffer, computeCommandEncoder) = initMetal()
let alignment:Int = 0x4000
var xpointer: UnsafeMutableRawPointer? = nil
var ypointer: UnsafeMutableRawPointer? = nil
let numberOfFloats = 4096 //56
let numberOfBytes: Int = 16384
let retx = posix_memalign(&xpointer, alignment, numberOfBytes)
if retx != noErr {
let err = String(validatingUTF8: strerror(retx)) ?? "unknown error"
fatalError("Unable to allocate aligned memory: \(err).")
}
let rety = posix_memalign(&ypointer, alignment, numberOfBytes)
if rety != noErr {
let err = String(validatingUTF8: strerror(rety)) ?? "unknown error"
fatalError("Unable to allocate aligned memory: \(err).")
}
let datax = xpointer!.bindMemory(to: Float.self, capacity: numberOfFloats)
for index in 0..<numberOfFloats {
datax[index] = 7.0
}
let datay = ypointer!.bindMemory(to: Float.self, capacity: numberOfFloats)
for index in 0..<numberOfFloats {
datay[index] = 4.0
}
kernelFunction = defaultLibrary.makeFunction(name: "sigmoid")
do
{
pipelineState = try device.makeComputePipelineState(function: kernelFunction!)
}
catch
{
fatalError("Unable to create pipeline state")
}
let startTime = CFAbsoluteTimeGetCurrent()
computeCommandEncoder.setComputePipelineState(pipelineState)
var xvectorBufferNoCopy = device.makeBuffer(bytesNoCopy: xpointer!, length: numberOfBytes, options: [], deallocator: nil)
computeCommandEncoder.setBuffer(xvectorBufferNoCopy, offset: 0, at: 0)
var yvectorBufferNoCopy = device.makeBuffer(bytesNoCopy: ypointer!, length: numberOfBytes, options: [], deallocator: nil)
computeCommandEncoder.setBuffer(yvectorBufferNoCopy, offset: 0, at: 1)
var threadgroupCounts = MTLSize(width:32,height:1,depth:1)
var threadgroups = MTLSize(width:1024, height:1, depth:1)
computeCommandEncoder.dispatchThreadgroups(threadgroups, threadsPerThreadgroup: threadgroupCounts)
computeCommandEncoder.endEncoding()
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
let timeElapsed = CFAbsoluteTimeGetCurrent() - startTime
print("Time elapsed for \(title): \(timeElapsed) s")
let data = ypointer!.bindMemory(to: Float.self, capacity: numberOfFloats)
for index in 0..<numberOfFloats {
print("\(index) - \(data[index])")
}
}
kernel void addTest(const device float *inVector [[ buffer(0) ]],
device float *outVector [[ buffer(1) ]],
uint id [[ thread_position_in_grid ]]) {
outVector[id] = inVector[id] + outVector[id];
}