How to create custom empty Texture correctly? - swift

I need to create a MTLTexture with my custom data (which is currently filled with 0), in order to do it I use such an implementation
private func createTexture(frame: UnsafeMutableRawPointer) -> MTLTexture? {
let width = 2048
let height = 2048
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: MTLPixelFormat.rgba8Unorm,
width: width,
height: height,
mipmapped: false)
textureDescriptor.usage = [.shaderWrite, .shaderRead]
guard let texture: MTLTexture = device?.makeTexture(descriptor: textureDescriptor) else
{
logger?.log(severity: .error, msg: "create texture FAILED.")
return nil
}
let region = MTLRegion.init(origin: MTLOrigin.init(x: 0, y: 0, z: 0), size: MTLSize.init(width: texture.width, height: texture.height, depth: 4));
//MARK: >>> JUST FOR TEST
let count = width * height * 4
let stride = MemoryLayout<CChar>.stride
let alignment = MemoryLayout<CChar>.alignment
let byteCount = stride * count
let p = UnsafeMutableRawPointer.allocate(byteCount: byteCount, alignment: alignment)
let data = p.initializeMemory(as: CChar.self, repeating: 0, count: count)
//MARK: <<<
texture.replace(region: region, mipmapLevel: 0, withBytes: data, bytesPerRow: width * 4)
return texture
}
So, here I created a descriptor with 4 channels, then I created a region with depth: 4, then created UnsafeMutableRawPointer filled with data stride * count size, but I get an error in this line
texture.replace(region: region, mipmapLevel: 0, withBytes: data, bytesPerRow: width * 4)
_validateReplaceRegion:155: failed assertion `(origin.z + size.depth)(4) must be <= depth(1).'
what am I doing wrong?

The depth property in the following line is incorrect:
let region = MTLRegion.init(origin: MTLOrigin.init(x: 0, y: 0, z: 0), size: MTLSize.init(width: texture.width, height: texture.height, depth: 4));
The depth property describes the number of elements in the z dimension. For 2D texture it should be 1.

Related

How to create NSImage without device screen scaling

I would like to generate 64x64 pixel NSImage and then perform pixel by pixel processing on the data. However, when I generate the image in a fairly canonical way on my retina iMac, the resulting image is 128x128. I tried adding a custom imageRep, but that had no effect. Here is some playground code:
let side = 64
let size = NSSize(width: side, height: side)
let imRect = NSRect(origin: NSPoint(x: 0, y: 0), size: size)
let result = NSImage(size: size)
let rep2 = NSBitmapImageRep(bitmapDataPlanes: nil,
pixelsWide: side, pixelsHigh: side,
bitsPerSample: 8, samplesPerPixel: 4,
hasAlpha: true, isPlanar: false,
colorSpaceName: NSColorSpaceName.calibratedRGB,
bytesPerRow: side * 4, bitsPerPixel: 32)
result.addRepresentation(rep2!)
let backColor = NSColor.green
result.lockFocus()
backColor.setFill()
imRect.fill()
randomAttrGlyph.draw(at: NSPoint(x: 2, y: 2))
result.unlockFocus()
let cgImg = result.cgImage(forProposedRect: nil, context: nil, hints: nil)
result and cgImg are both 128x128.
Also, pulling the RGBA data produces junk, for example a .white pixel produces an
RGBA data of (0, 60, 0, 60) instead of the expected (255, 255, 255, 255)
The code I'm using for pixel data is
let size = image.size
let cgImg = (image.cgImage(forProposedRect: nil, context: nil, hints: nil)!)
let cgWidth = cgImg.width
let cgHeight = cgImg.height
var returnPixels = [[Pixel]](repeating: [Pixel](repeating: black, count: Int(cgWidth)), count: Int(cgHeight)) // two d array for scaled grayscale
let pixelData = (image.cgImage(forProposedRect: nil, context: nil, hints: nil)!).dataProvider!.data
let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
for y in 0..<Int(cgHeight) {
for x in 0..<Int(cgWidth) {
let pos = CGPoint(x: x, y: y)
let pixelInfo: Int = ((Int(image.size.width) * Int(pos.y) * 4) + Int(pos.x) * 4)
let r = data[pixelInfo]
let g = data[pixelInfo + 1]
let b = data[pixelInfo + 2]
let a = data[pixelInfo + 3]
let sum = UInt16(r) + UInt16(g) + UInt16(b)
returnPixels[y][x] = Pixel(a: a, r: r, g: g, b: b, grey: sum) //no alpha
}
}
Any suggestions?

Swift: Apply LUT (Lookup Table) to an image using CIColorCube slow performance

I'm applying a LUT (from a .png - Example LUT Image) to an image using CIColorCube. It works well. The only problem I'm facing is that when I create the buttons thumbnails the app stops for a few seconds.
The buttons look like this -> Buttons Example Image
This is my code:
#IBOutlet weak var filtersScrollView: UIScrollView!
var filters = ["-", "Filter1", "Filter2", "Filter3", "Filter4"]
override func viewDidLoad() {
createFilters()
}
func createFilters() {
var x: CGFloat = 10
let y: CGFloat = 0
let width: CGFloat = 60
let height: CGFloat = 83
let gap: CGFloat = 2
for i in 0..<filters.count {
let filterButton = UIButton(type: .custom)
filterButton.frame = CGRect(x: x, y: y, width: width, height: height)
filterButton.imageView?.contentMode = .scaleAspectFill
filterButton.setTitleColor(#colorLiteral(red: 1, green: 1, blue: 1, alpha: 0), for: .normal)
let text = UILabel()
text.frame = CGRect(x: 0, y: height - 21, width: filterButton.frame.width, height: 21)
text.textAlignment = .center
text.backgroundColor = #colorLiteral(red: 0.9372549057, green: 0.3490196168, blue: 0.1921568662, alpha: 1)
text.textColor = .white
text.font = .systemFont(ofSize: 8.5, weight: .medium)
filterButton.addSubview(text)
filtersScrollView.insertSubview(filterButton, at: 1)
x += width + gap
if i == 0 {
filterButton.setImage(originalImage, for: .normal)
text.text = "-"
text.backgroundColor = #colorLiteral(red: 0.1215686275, green: 0.1215686275, blue: 0.1215686275, alpha: 1)
}
else {
// THIS LINE MAKES THE APP STOP FOR A FEW SECONDS
let filteredImage = filterFromLUT(inputImage: originalCIImage, lut: "\(filters[i])")?.outputImage
filterButton.setImage(UIImage(ciImage: filteredImage!), for: .normal)
text.text = "\(filters[i])"
}
}
filtersScrollView.contentSize = CGSize(width: x, height: height)
}
func filterFromLUT(inputImage: CIImage, lut: String) -> CIFilter? {
let dimension = 64
let lutImage = UIImage(named: lut)!.cgImage
let width = lutImage!.width
let height = lutImage!.height
let rowNum = width / dimension
let columnNum = height / dimension
let bitmap = createBitmap(image: lutImage!)
let dataSize = dimension * dimension * dimension * MemoryLayout<Float>.size * 4
var array = Array<Float>(repeating: 0, count: dataSize)
var bitmapOffest: Int = 0
var z: Int = 0
for _ in stride(from: 0, to: rowNum, by: 1) {
for y in stride(from: 0, to: dimension, by: 1) {
let tmp = z
for _ in stride(from: 0, to: columnNum, by: 1) {
for x in stride(from: 0, to: dimension, by: 1) {
let dataOffset = (z * dimension * dimension + y * dimension + x) * 4
let position = bitmap!
.advanced(by: bitmapOffest)
array[dataOffset + 0] = Float(position
.advanced(by: 0)
.pointee) / 255
array[dataOffset + 1] = Float(position
.advanced(by: 1)
.pointee) / 255
array[dataOffset + 2] = Float(position
.advanced(by: 2)
.pointee) / 255
array[dataOffset + 3] = Float(position
.advanced(by: 3)
.pointee) / 255
bitmapOffest += 4
}
z += 1
}
z = tmp
}
z += columnNum
}
free(bitmap)
let data = Data.init(bytes: array, count: dataSize)
// Create CIColorCube filter
let filter = CIFilter.colorCube()
filter.inputImage = inputImage
filter.cubeData = data
filter.cubeDimension = Float(dimension)
return filter
}
func createBitmap(image: CGImage) -> UnsafeMutablePointer<UInt8>? {
let width = image.width
let height = image.height
let bitsPerComponent = 8
let bytesPerRow = width * 4
let bitmapSize = bytesPerRow * height
guard let data = malloc(bitmapSize) else {
return nil
}
let context = CGContext(
data: data,
width: width,
height: height,
bitsPerComponent: bitsPerComponent,
bytesPerRow: bytesPerRow,
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue,
releaseCallback: nil,
releaseInfo: nil)
context!.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))
return data.bindMemory(to: UInt8.self, capacity: bitmapSize)
}
I think that maybe the CGContext inside the createBitmap() function is causing this. Does anyone know how to solve this?
There are a few things you can do to improve performance:
Currently, you are processing the original input image (which I assume is pretty large) just to display the result in a 60 x 83 button. Consider scaling the image down first before putting it through the filters.
You can avoid blocking the UI by making the image processing code asynchronous. Just create the buttons in their proper size and DispatchQueue.global().async { ... } the image processing.
Don't use .setImage(UIImage(ciImage: filteredImage). In my experience creating a UIImage from a CIImage this way to very unpredictable. Rather use a CIContext to render the filtered image into a CGImage and convert that into a UIImage afterward. Also try to re-use a single CIContext instead of re-creating it again for each image.
The code for converting the LUT image into a float data array can be sped-up by using vDSP (see below).
Using vDSP for creating the LUT data:
let lutImage = UIImage(named: lut)!.cgImage
let dimension = lutImage.height
// get data from image
let lutImageData = lutImage.dataProvider?.data
let lutImageDataPtr = CFDataGetBytePtr(lutImageData)!
// convert to float and divide by 255
let numElements = dimension * dimension * dimension * 4
let inDataFloat = UnsafeMutablePointer<Float>.allocate(capacity: numElements)
vDSP_vfltu8(lutImageDataPtr, 1, inDataFloat, 1, vDSP_Length(numElements))
var div: Float = 255.0
vDSP_vsdiv(inDataFloat, 1, &div, inDataFloat, 1, vDSP_Length(numElements))
// convert buffer pointer to data
let lutData = NSData(bytesNoCopy: inDataFloat, length: numElements * MemoryLayout<Float>.size, freeWhenDone: true)

Create solid color texture from a scratch

I'm creating an engine and need a dummy single-pixel texture for objects which do not have one of required materials. It should be 4 channel texture and example invocation could look as follows:
let dummy = device.makeSolidTexture(device: device, color: simd_float4(0, 0, 1, 1))!
Code I'm using
extension MTLDevice {
func makeSolidTexture(device: MTLDevice,
color: simd_float4,
pixelFormat: MTLPixelFormat = .bgra8Unorm) -> MTLTexture? {
let descriptor = MTLTextureDescriptor()
descriptor.width = 1
descriptor.height = 1
descriptor.mipmapLevelCount = 1
descriptor.storageMode = .managed
descriptor.arrayLength = 1
descriptor.sampleCount = 1
descriptor.cpuCacheMode = .writeCombined
descriptor.allowGPUOptimizedContents = false
descriptor.pixelFormat = pixelFormat
descriptor.textureType = .type2D
descriptor.usage = .shaderRead
guard let texture = device.makeTexture(descriptor: descriptor) else {
return nil
}
let origin = MTLOrigin(x: 0, y: 0, z: 0)
let size = MTLSize(width: texture.width, height: texture.height, depth: texture.depth)
let region = MTLRegion(origin: origin, size: size)
withUnsafePointer(to: color) { ptr in
texture.replace(region: region, mipmapLevel: 0, withBytes: ptr, bytesPerRow: 4)
}
return texture
}
}
The problem is that when I inspect the texture after capturing a frame I see:
What do I miss? Why is it black?
It turned out that I made two mistakes there. First is that a textures MUST be aligned to 256 bytes, so I extended to size to 8 x 8 what considering the pixel format meets the requirement. My second mistake was to create a texture from float vector directly. Underlying type of the texture store the number in range 0-255, so the float vector passed to the function must be scaled and then converted to simd_uchar4 type.
Here is how it should look like
extension MTLDevice {
func makeSolid2DTexture(device: MTLDevice,
color: simd_float4,
pixelFormat: MTLPixelFormat = .bgra8Unorm) -> MTLTexture? {
let descriptor = MTLTextureDescriptor()
descriptor.width = 8
descriptor.height = 8
descriptor.mipmapLevelCount = 1
descriptor.storageMode = .managed
descriptor.arrayLength = 1
descriptor.sampleCount = 1
descriptor.cpuCacheMode = .writeCombined
descriptor.allowGPUOptimizedContents = false
descriptor.pixelFormat = pixelFormat
descriptor.textureType = .type2D
descriptor.usage = .shaderRead
guard let texture = device.makeTexture(descriptor: descriptor) else {
return nil
}
let origin = MTLOrigin(x: 0, y: 0, z: 0)
let size = MTLSize(width: texture.width, height: texture.height, depth: texture.depth)
let region = MTLRegion(origin: origin, size: size)
let mappedColor = simd_uchar4(color * 255)
Array<simd_uchar4>(repeating: mappedColor, count: 64).withUnsafeBytes { ptr in
texture.replace(region: region, mipmapLevel: 0, withBytes: ptr.baseAddress!, bytesPerRow: 32)
}
return texture
}
}
// bgra format in range [0, 1]
let foo = device.makeSolid2DTexture(device: device, color: simd_float4(1, 0, 0, 1))!

Get Alpha Percentage of image Swift

I have function to calculate alpha of image. But I got crash for iPhone 5, work well with iPhone 6 and later.
private func alphaOnlyPersentage(img: UIImage) -> Float {
let width = Int(img.size.width)
let height = Int(img.size.height)
let bitmapBytesPerRow = width
let bitmapByteCount = bitmapBytesPerRow * height
let pixelData = UnsafeMutablePointer<UInt8>.allocate(capacity: bitmapByteCount)
let colorSpace = CGColorSpaceCreateDeviceGray()
let context = CGContext(data: pixelData,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bitmapBytesPerRow,
space: colorSpace,
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.alphaOnly.rawValue).rawValue)!
let rect = CGRect(x: 0, y: 0, width: width, height: height)
context.clear(rect)
context.draw(img.cgImage!, in: rect)
var alphaOnlyPixels = 0
for x in 0...Int(width) {
for y in 0...Int(height) {
if pixelData[y * width + x] == 0 {
alphaOnlyPixels += 1
}
}
}
free(pixelData)
return Float(alphaOnlyPixels) / Float(bitmapByteCount)
}
Please help me to fix! Thank you. Sorry I am newbie with iOS coding.
Replace ... with ..< otherwise you are accessing one row and one column too many.
Note the crash is random depending on how the memory is allocated and whether you have access to the bytes at the given address, which is outside of the chunk allocated for you.
Or replace the iteration by simpler:
for i in 0 ..< bitmapByteCount {
if pixelData[i] == 0 {
alphaOnlyPixels += 1
}
}
You can also use Data to create your bytes, which will later simplify iteration:
var pixelData = Data(count: bitmapByteCount)
pixelData.withUnsafeMutableBytes { (bytes: UnsafeMutablePointer<UInt8>) in
let context = CGContext(data: bytes,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bitmapBytesPerRow,
space: colorSpace,
bitmapInfo: CGImageAlphaInfo.alphaOnly.rawValue)!
let rect = CGRect(x: 0, y: 0, width: width, height: height)
context.clear(rect)
context.draw(img.cgImage!, in: rect)
}
let alphaOnlyPixels = pixelData.filter { $0 == 0 }.count

Metal Texture buffer not getting flushed in Swift 2.2

I am using Metal to display 3D objects with texture.
On first load the texture is showing fine. But if I change it runtime with a new texture then the new one is getting merger with the old one.
My code is:
let texture = MetalTexture(resourceName: "", ext: "", mipmaped: true)
// texture.path = "\(NSBundle.mainBundle().bundlePath)/smlroot/models/bodies/FB020.png"
texture.path = pngPath
texture.loadTexture(device: device, commandQ: commandQ, flip: false)
super.readPly(vertices,faces: faces, texture: texture.texture)
Here I am creating a new texture object everytime I change the texture runtime. Then
Using this code to load it:
func loadTexture(device device: MTLDevice, commandQ: MTLCommandQueue, flip: Bool){
let image = UIImage(contentsOfFile: path)?.CGImage
let colorSpace = CGColorSpaceCreateDeviceRGB()
width = CGImageGetWidth(image!)
height = CGImageGetHeight(image!)
let rowBytes = width * bytesPerPixel
let context = CGBitmapContextCreate(nil, width, height, bitsPerComponent, rowBytes, colorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)
let bounds = CGRect(x: 0, y: 0, width: Int(width), height: Int(height))
CGContextClearRect(context!, bounds)
if flip == false{
CGContextTranslateCTM(context!, 0, CGFloat(self.height))
CGContextScaleCTM(context!, 1.0, -1.0)
}
CGContextDrawImage(context!, bounds, image!)
let texDescriptor = MTLTextureDescriptor.texture2DDescriptorWithPixelFormat(MTLPixelFormat.RGBA8Unorm, width: Int(width), height: Int(height), mipmapped: isMipmaped)
target = texDescriptor.textureType
texture = device.newTextureWithDescriptor(texDescriptor)
let pixelsData = CGBitmapContextGetData(context!)
let region = MTLRegionMake2D(0, 0, Int(width), Int(height))
texture.replaceRegion(region, mipmapLevel: 0, withBytes: pixelsData, bytesPerRow: Int(rowBytes))
if (isMipmaped == true){
generateMipMapLayersUsingSystemFunc(texture, device: device, commandQ: commandQ, block: { (buffer) -> Void in
print("mips generated")
})
}
print("mipCount:\(texture.mipmapLevelCount)")
}
while rendering the object I am using the bellow pipeline to load it:
renderEncoder.setVertexBuffer(vertexBuffer, offset: 0, atIndex: 0)
renderEncoder.setCullMode(.None)
renderEncoder.setFragmentTexture(texture, atIndex: 0)
if let samplerState = samplerState{
renderEncoder.setFragmentSamplerState(samplerState, atIndex: 0)
}
let nodeModelMatrix = self.modelMatrix(rotationY)
nodeModelMatrix.multiplyLeft(parentModelViewMatrix)
let uniformBuffer = bufferProvider.nextUniformsBuffer(projectionMatrix, modelViewMatrix: nodeModelMatrix, light: light)
renderEncoder.setVertexBuffer(uniformBuffer, offset: 0, atIndex: 1)
renderEncoder.setFragmentBuffer(uniformBuffer, offset: 0, atIndex: 1)
//change this.
//renderEncoder.drawPrimitives(.Triangle, vertexStart: 0, vertexCount: vertexCount)
renderEncoder.drawIndexedPrimitives(.Triangle, indexCount: indexCount, indexType: .UInt32, indexBuffer: indexBuffer, indexBufferOffset: 0)
And I am getting a overlapped texture. Can anyone help me to identify the problem?