I have this cameraView and on top of it I have this UIImageView. The UIImageView is basically a filter that changes the color of the cameraView. I was able to take a picture with the code below and the output image changes the portrait image to landscape. Why does that happen? Thanks!
#IBAction func takePicture(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo:
AVCapturePhoto, error: Error?) {
let renderer = UIGraphicsImageRenderer(size: view.bounds.size)
let image = renderer.image { ctx in
view.drawHierarchy(in: view.bounds, afterScreenUpdates: true)
}
if let imageData = photo.fileDataRepresentation() {
if let cameraImage = UIImage(data: imageData) {
let newImage = self.composite(image:cameraImage, overlay: image, scaleOverlay:true)
captureImageView.image = newImage
}
}
}
//combine imageview with uiview
func composite(image:UIImage, overlay:(UIImage), scaleOverlay: Bool = false)->UIImage?{
UIGraphicsBeginImageContext(image.size)
var rect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
image.draw(in: rect)
if scaleOverlay == false {
rect = CGRect(x: 0, y: 0, width: overlay.size.width, height: overlay.size.height)
}
overlay.draw(in: rect)
return UIGraphicsGetImageFromCurrentImageContext()
}
Related
I'm taking snapshot from a PDFView in PDFKit for streaming (20 times per sec), and I use this extesnsion
extension UIView {
func asImageBackground(viewLayer: CALayer, viewBounds: CGRect) -> UIImage {
let renderer = UIGraphicsImageRenderer(bounds: viewBounds)
return renderer.image { rendererContext in
viewLayer.render(in: rendererContext.cgContext)
}
}
}
But the output UIImage from this extension has a high resolution which make it difficult to stream. I can reduce it by this extension
extension UIImage {
func resize(_ max_size: CGFloat) -> UIImage {
// adjust for device pixel density
let max_size_pixels = max_size / UIScreen.main.scale
// work out aspect ratio
let aspectRatio = size.width/size.height
// variables for storing calculated data
var width: CGFloat
var height: CGFloat
var newImage: UIImage
if aspectRatio > 1 {
// landscape
width = max_size_pixels
height = max_size_pixels / aspectRatio
} else {
// portrait
height = max_size_pixels
width = max_size_pixels * aspectRatio
}
// create an image renderer of the correct size
let renderer = UIGraphicsImageRenderer(size: CGSize(width: width, height: height), format: UIGraphicsImageRendererFormat.default())
// render the image
newImage = renderer.image {
(context) in
self.draw(in: CGRect(x: 0, y: 0, width: width, height: height))
}
// return the image
return newImage
}
}
but it add an additional workload which make the process even worse. Is there any better way?
Thanks
You can downsample it using ImageIO which is recommended by Apple:
extension UIImage {
func downsample(to resolution: CGSize) -> UIImage? {
let imageSourceOptions = [kCGImageSourceShouldCache: false] as CFDictionary
guard let data = self.jpegData(compressionQuality: 0.75) as? CFData, let imageSource = CGImageSourceCreateWithData(data, imageSourceOptions) else {
return nil
}
let maxDimensionInPixels = Swift.max(resolution.width, resolution.height) * 3
let downsampleOptions = [
kCGImageSourceCreateThumbnailFromImageAlways: true,
kCGImageSourceShouldCacheImmediately: true,
kCGImageSourceCreateThumbnailWithTransform: true,
kCGImageSourceThumbnailMaxPixelSize: maxDimensionInPixels
] as CFDictionary
guard let downsampledImage = CGImageSourceCreateThumbnailAtIndex(imageSource, 0, downsampleOptions) else {
return nil
}
return UIImage(cgImage: downsampledImage)
}
}
This is the code I am using
extension UIImage {
var ellipseMasked: UIImage? {
guard let cgImage = cgImage else { return nil }
let rect = CGRect(origin: .zero, size: size)
return UIGraphicsImageRenderer(size: size, format: imageRendererFormat)
.image{ _ in
UIBezierPath(ovalIn: rect).addClip()
UIImage(cgImage: cgImage, scale: scale, orientation: imageOrientation)
.draw(in: rect)
}
}
}
This is the image I got
The background color is black.
How can I make the background transparent?
I tried different ways but haven't made it work yet.
You can subclass UIImageView and mask its CALayer instead of clipping the image itself:
extension CAShapeLayer {
convenience init(path: UIBezierPath) {
self.init()
self.path = path.cgPath
}
}
class EllipsedView: UIImageView {
override func layoutSubviews() {
super.layoutSubviews()
layer.mask = CAShapeLayer(path: .init(ovalIn: bounds))
}
}
let profilePicture = UIImage(data: try! Data(contentsOf: URL(string:"http://i.stack.imgur.com/Xs4RX.jpg")!))!
let iv = EllipsedView(image: profilePicture)
edit/update
If you need to clip the UIImage itself you can do it as follow:
extension UIImage {
var ellipseMasked: UIImage? {
UIGraphicsBeginImageContextWithOptions(size, false, scale)
defer { UIGraphicsEndImageContext() }
UIBezierPath(ovalIn: .init(origin: .zero, size: size)).addClip()
draw(in: .init(origin: .zero, size: size))
return UIGraphicsGetImageFromCurrentImageContext()
}
}
For iOS10+ you can use UIGraphicsImageRenderer.
extension UIImage {
var ellipseMasked: UIImage {
let rect = CGRect(origin: .zero, size: size)
let format = imageRendererFormat
format.opaque = false
return UIGraphicsImageRenderer(size: size, format: format).image{ _ in
UIBezierPath(ovalIn: rect).addClip()
draw(in: rect)
}
}
}
let profilePicture = UIImage(data: try! Data(contentsOf: URL(string:"http://i.stack.imgur.com/Xs4RX.jpg")!))!
profilePicture.ellipseMasked
Here are two solutions using SwiftUI.
This solution can be used to clip the image shape to a circle.
Image("imagename").resizable()
.clipShape(Circle())
.scaledToFit()
This solution can be used to get more of an eclipse or oval shape from the image.
Image("imagename").resizable()
.cornerRadius(100)
.scaledToFit()
.padding()
i have application that uses ARSCNView. i'm trying to take a screenshot on click of a button and saved that image in the gallery. But when i take a screenshot it does not show the content on that screen. Just show that image, i have some labels on it but it does not show that in an image. This is my code,
#IBAction func captureImage(_ sender: Any) {
image = sceneView.snapshot()
UIImageWriteToSavedPhotosAlbum(image!, nil, nil, nil)
}
How can i show that labels and buttons on ARSCView in a screenshot?
snapshot() will only take screenshot of Scene.
To take screenshot of Scene with Labels and Buttons use below method:
func snapshot(of rect: CGRect? = nil) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(self.view.bounds.size, self.view.isOpaque, 0)
self.view.drawHierarchy(in: self.view.bounds, afterScreenUpdates: true)
let fullImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
guard let image = fullImage, let rect = rect else { return fullImage }
let scale = image.scale
let scaledRect = CGRect(x: rect.origin.x * scale, y: rect.origin.y * scale, width: rect.size.width * scale, height: rect.size.height * scale)
guard let cgImage = image.cgImage?.cropping(to: scaledRect) else { return nil }
return UIImage(cgImage: cgImage, scale: scale, orientation: .up)
}
To Take Screenshot:
#IBAction func takeScreenShotTapped(_ sender: Any) {
let screenShot = snapshot(of: CGRect(x: 80, y: 80, width: 100, height: 100))
}
If you want to take screenshot of fullscreen then just call snapshot().
Hope this will help you :)
I am changing the color of a UISlider by calling .thumbTintColor
#IBAction func slider1Master(sender: AnyObject) {
slider1.thumbTintColor = UIColor.orangeColor()}
It works, but I want the color to change back to it's original state when the touch ends (user lifts finger).
Does anyone have any suggestions? Thank you.
You can use "setThumbImage" instead.
Then you have the option of setting an image for a specific state of action.
For the image, just create a rounder image with the color you desire.
//Creating an Image with rounded corners:
extension UIImage {
class func createThumbImage(size: CGFloat, color: UIColor) -> UIImage {
let layerFrame = CGRectMake(0, 0, size, size)
let shapeLayer = CAShapeLayer()
shapeLayer.path = CGPathCreateWithEllipseInRect(layerFrame.insetBy(dx: 1, dy: 1), nil)
shapeLayer.fillColor = color.CGColor
shapeLayer.strokeColor = color.colorWithAlphaComponent(0.65).CGColor
let layer = CALayer.init()
layer.frame = layerFrame
layer.addSublayer(shapeLayer)
return self.imageFromLayer(layer)
}
class func imageFromLayer(layer: CALayer) -> UIImage {
UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, UIScreen.mainScreen().scale)
layer.renderInContext(UIGraphicsGetCurrentContext()!)
let outputImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return outputImage
}
}
//Setting the image for a selected state of UISlider:
func setupSlider() {
let size:CGFloat = 12
let highlightedStateOrangeColorImage = UIImage.createThumbImage(size, color: UIColor.orangeColor())
let defaultStateBlueColorImage = UIImage.createThumbImage(size, color: UIColor.blueColor())
self.slider.setThumbImage(highlightedStateOrangeColorImage, forState: UIControlState.Highlighted)
self.slider.setThumbImage(defaultStateBlueColorImage, forState: UIControlState.Normal)
}
You can safely accept McMatan’s solution as your answer. It is good for several reasons.
the colour changes back to its original state when the user lifts a finger, as you requested
using the extension to create a shape does away with image assets for the UISlider
it could also be used to draw images for circular UIButtons and circular UIViews.
it can also create a shape with colours matching other UISlider design elements (if so desired).
The code below does just that. I used McMatan’s UIImage extension with no changes other than translation to Swift 3. But I have split his function setUpSlider() into two, one for drawing the circular image in its default state, the other for drawing it in its highlighted state.
By accepting McMatan’s solution, you will encourage those who contribute their experience and free time to continue making this forum worthwhile for the rest of us. So please do.
class ViewController: UIViewController {
var slider: UISlider!
let defaultColour = UIColor.blue
let highlightedColour = UIColor.orange
let thumbSize = 20
override func viewDidLoad() {
super.viewDidLoad()
slider = UISlider(frame: CGRect(x: 0, y: 0, width: 200, height: 23))
slider.minimumValue = 0
slider.minimumTrackTintColor = defaultColour
slider.maximumValue = 100
slider.maximumTrackTintColor = highlightedColour
slider.center = view.center
slider.value = slider.maximumValue / 2.0
let highlightedImage = makeHighlightedImage()
let defaultImage = makeDefaultImage()
slider.setThumbImage(highlightedImage, for: UIControlState.highlighted)
slider.setThumbImage(defaultImage, for: UIControlState.normal)
slider.isContinuous = false
view.addSubview(slider)
slider.addTarget(self, action: #selector(sliderValueChanged), for: UIControlEvents.valueChanged)
}
func sliderValueChanged(sender: UISlider){
print(sender.value)
}
func makeHighlightedImage() -> (UIImage) {
let size = CGFloat(thumbSize)
let highlightedStateImage = UIImage.createThumbImage(size: size, color: highlightedColour)
return (highlightedStateImage)
}
func makeDefaultImage() -> (UIImage) {
let size = CGFloat(thumbSize)
let defaultStateImage = UIImage.createThumbImage(size: size, color: defaultColour)
return (defaultStateImage)
}
}
Extension translated to Swift 3
import UIKit
extension UIImage {
class func createThumbImage(size: CGFloat, color: UIColor) -> UIImage {
let layerFrame = CGRect(x: 0, y: 0, width: size, height: size)
let shapeLayer = CAShapeLayer()
shapeLayer.path = CGPath(ellipseIn: layerFrame.insetBy(dx: 1, dy: 1), transform: nil)
shapeLayer.fillColor = color.cgColor
shapeLayer.strokeColor = color.withAlphaComponent(0.65).cgColor
let layer = CALayer.init()
layer.frame = layerFrame
layer.addSublayer(shapeLayer)
return self.imageFromLayer(layer: layer)
}
class func imageFromLayer(layer: CALayer) -> UIImage {
UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, UIScreen.main.scale)
layer.render(in: UIGraphicsGetCurrentContext()!)
let outputImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return outputImage!
}
}
I came up with an answer similar to MCMatan's but without the need for a UIImage extension:
func setThumbnailImage(for slider: UISlider, thumbnailHeight: CGFloat, thumbnailColor: UIColor) {
let cornerRadius: CGFloat = 25
let rect = CGRect(x: 0, y: 0, width: thumbnailHeight, height: thumbnailHeight)
let size = CGSize(width: thumbnailHeight, height: thumbnailHeight)
UIGraphicsBeginImageContextWithOptions(size, false, 0)
// this is what makes it round
UIBezierPath(roundedRect: rect, cornerRadius: cornerRadius).addClip()
thumbnailColor.setFill()
UIRectFill(rect)
if let newImage = UIGraphicsGetImageFromCurrentImageContext() {
slider.setThumbImage(nil, for: .normal)
slider.setThumbImage(newImage, for: .normal)
}
UIGraphicsEndImageContext()
}
To use:
override func viewDidLoad() {
super.viewDidLoad()
setThumbnailImage(for: yourSlider, thumbnailHeight: 20.0, thumbnailColor: UIColor.red)
}
or
func someActionJustFinishedNowUpdateThumbnail() {
setThumbnailImage(for: yourSlider, thumbnailHeight: 40.0, thumbnailColor: UIColor.blue)
}
or
func setThumbnailToSliderHeight() {
let sliderHeight = yourSlider.frame.size.height
setThumbnailImage(for: yourSlider, thumbnailHeight: sliderHeight, thumbnailColor: UIColor.purple)
}
I want just a specific part of my captured image to be cropped in a circular shape right before it's viewed.
Here is the code for capturing still images:
#IBAction func takePhoto(sender: UIButton) {
self.stillImageOutput!.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo)) { (buffer:CMSampleBuffer!, error:NSError!) -> Void in
var image = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
var data_image = UIImage(data: image)
self.previewImage.image = data_image
}
}
Thank you
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) {
if let image = UIImage(data: imageData) {
let square = image.size.width < image.size.height ? CGSize(width: image.size.width, height: image.size.width) : CGSize(width: image.size.height, height: image.size.height)
let imageView = UIImageView(frame: CGRect(origin: CGPoint(x: 0, y: 0), size: square))
imageView.contentMode = UIViewContentMode.ScaleAspectFill
imageView.image = image
imageView.layer.cornerRadius = square.width/2
imageView.layer.masksToBounds = true
UIGraphicsBeginImageContext(imageView.bounds.size)
imageView.layer.renderInContext(UIGraphicsGetCurrentContext())
let result = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.previewImage.image = result
}
}