Mask VisualEffect blur with drawn image - swift

So in my swift app I'm allowing the user to paint with there finger on touch. I'm using cgcontext to do this. After the user lifts the finger and the touch ends I am dynamically adding a visualeffect view on top of the shape with the same height and width of the drawn shape. What i want to do next is use the shape as a mask for visualeffect view. The problem right now is if i try to mask the visualeffect view with the shape. the masked view does not show unless the origin point of the shape is (0,0). Here is a link to how i'm currently attempting to implementing this (https://pastebin.com/JaM9kx4G)
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = false
if let touch = touches.first as? UITouch {
if (touch.view == sideView){
return
}
tempPath = UIBezierPath()
lastPoint = touch.location(in: view)
tempPath.move(to:lastPoint)
}
}
func drawLineFrom(fromPoint: CGPoint, toPoint: CGPoint) {
tempPath.addLine(to: CGPoint(x: toPoint.x, y: toPoint.y))
UIGraphicsBeginImageContext(view.frame.size)
let context = UIGraphicsGetCurrentContext()
otherImageView.image?.draw(in: CGRect(x: 0, y: 0, width: view.frame.size.width, height: view.frame.size.height))
// 2
context!.move(to: CGPoint(x:fromPoint.x,y:fromPoint.y))
context!.addLine(to: CGPoint(x:toPoint.x, y:toPoint.y))
// 3
context!.setLineCap(CGLineCap.round)
context!.setLineWidth(brushWidth)
context!.setStrokeColor(red: red, green: green, blue: blue, alpha: 1.0)
context!.setBlendMode(CGBlendMode.normal)
// 4
context!.strokePath()
// 5
otherImageView.image = UIGraphicsGetImageFromCurrentImageContext()
otherImageView.alpha = opacity
UIGraphicsEndImageContext()
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = true
if let touch = touches.first as? UITouch {
let currentPoint = touch.location(in: view)
drawLineFrom(fromPoint: lastPoint, toPoint: currentPoint)
// 7
lastPoint = currentPoint
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
tempPath.close()
let tempImage = UIImageView(frame: CGRect(x: 0, y: 0, width: view.frame.size.width, height: view.frame.size.height))
UIGraphicsBeginImageContext(tempImage.frame.size)
tempImage.image?.draw(in: CGRect(x: 0, y: 0, width: view.frame.size.width, height: view.frame.size.height), blendMode: CGBlendMode.normal, alpha: 1.0)
otherImageView.image?.draw(in: CGRect(x: 0, y: 0, width: view.frame.size.width, height: view.frame.size.height), blendMode: CGBlendMode.normal, alpha: opacity)
let context = UIGraphicsGetCurrentContext()
let image = UIGraphicsGetImageFromCurrentImageContext()
tempImage.image = image
otherImageView.image = nil
imageView.addSubview(tempImage)
let blur = VisualEffectView()
blur.frame = CGRect(x:tempPath.bounds.origin.x,y:tempPath.bounds.origin.y, width:tempPath.bounds.width, height:tempPath.bounds.height)
blur.blurRadius = 5
blur.layer.mask = tempImage.layer
}

Related

Swift: Image losing aspect ratio when drawing starts

I am attempting to make an image editing VC for my app and encountered the above issue. Whenever I start drawing on my image, the image would warp and then lose the aspect ratio.
Gif is:
My full code is:
class DrawImageController: UIViewController {
var canvasImageView: UIImageView = {
let iv = UIImageView()
iv.translatesAutoresizingMaskIntoConstraints = false
iv.backgroundColor = .yellow
iv.contentMode = .scaleAspectFit
return iv
}()
var lastTouch = CGPoint.zero
override func viewDidLoad() {
super.viewDidLoad()
setupViews()
}
func setupViews() {
view.backgroundColor = .black
view.addSubview(canvasImageView)
canvasImageView.centerYAnchor.constraint(equalTo: view.centerYAnchor).isActive = true
canvasImageView.leadingAnchor.constraint(equalTo: view.safeAreaLayoutGuide.leadingAnchor).isActive = true
canvasImageView.trailingAnchor.constraint(equalTo: view.safeAreaLayoutGuide.trailingAnchor).isActive = true
canvasImageView.heightAnchor.constraint(equalToConstant: 300).isActive = true
canvasImageView.image = UIImage(named: "testImage")
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
if let firstTouch = touches.first {
lastTouch = firstTouch.location(in: canvasImageView)
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
if let firstTouch = touches.first {
let touchLocation = firstTouch.location(in: canvasImageView)
drawLine(from: lastTouch, to: touchLocation)
lastTouch = touchLocation
}
}
func drawLine(from: CGPoint, to: CGPoint) {
UIGraphicsBeginImageContext(canvasImageView.frame.size)
if let context = UIGraphicsGetCurrentContext() {
canvasImageView.image?.draw(in: CGRect(x: 0, y: 0, width: canvasImageView.frame.size.width, height: canvasImageView.frame.size.height))
context.move(to: from)
context.addLine(to: to)
context.setLineCap(.round)
context.setLineWidth(5.0)
context.setStrokeColor(UIColor.blue.cgColor)
context.strokePath()
let image = UIGraphicsGetImageFromCurrentImageContext()
canvasImageView.image = image
UIGraphicsEndImageContext()
}
}
}
I adapted my draw method from various tutorials on YouTube, GitHub and SO. Where have I gone wrong?
Solved
With the advice from #Sweeper, I have modified my code in setupViews() and drawLine to account for aspect ratio of the image and imageView.
func setupViews() {
view.backgroundColor = .black
view.addSubview(canvasImageView)
canvasImageView.centerYAnchor.constraint(equalTo: view.centerYAnchor).isActive = true
canvasImageView.leadingAnchor.constraint(equalTo: view.safeAreaLayoutGuide.leadingAnchor).isActive = true
canvasImageView.trailingAnchor.constraint(equalTo: view.safeAreaLayoutGuide.trailingAnchor).isActive = true
let aspectRatio = getImageAspectRatio(image: UIImage(named: "testImage")!)
let screenWidth = UIScreen.main.bounds.width
let height = CGFloat(1.0) / aspectRatio * screenWidth
canvasImageView.heightAnchor.constraint(equalToConstant: height).isActive = true
canvasImageView.image = UIImage(named: "testImage")
}
func drawLine(from: CGPoint, to: CGPoint) {
UIGraphicsBeginImageContext(canvasImageView.frame.size)
guard let context = UIGraphicsGetCurrentContext() else {return}
if let canvasImage = canvasImageView.image {
let imageViewAspectRatio = getAspectRatio(frame: canvasImageView.frame)
let imageAspectRatio = getImageAspectRatio(image: canvasImage)
if imageViewAspectRatio > imageAspectRatio {
canvasImageView.image?.draw(in: CGRect(x: 0, y: 0, width: imageAspectRatio * canvasImageView.frame.size.height, height: canvasImageView.frame.size.height))
} else if imageViewAspectRatio < imageAspectRatio {
canvasImageView.image?.draw(in: CGRect(x: 0, y: 0, width: canvasImageView.frame.size.width, height: CGFloat(1.0) / imageAspectRatio * canvasImageView.frame.size.width))
} else {
canvasImageView.image?.draw(in: CGRect(x: 0, y: 0, width: canvasImageView.frame.size.width, height: canvasImageView.frame.size.height))
}
context.move(to: from)
context.addLine(to: to)
context.setLineCap(.round)
context.setLineWidth(5.0)
context.setStrokeColor(UIColor.blue.cgColor)
context.strokePath()
let image = UIGraphicsGetImageFromCurrentImageContext()
canvasImageView.image = image
UIGraphicsEndImageContext()
}
}
The problem is here:
canvasImageView.image?.draw(in: CGRect(x: 0, y: 0, width: canvasImageView.frame.size.width, height: canvasImageView.frame.size.height))
You are drawing the image using the image view's frame. This stretches the image.
You need to draw the image as if contentMode is .scaleAspectFit.
To do this, first determine the image's aspect ratio (W:H). You can do this by access the size property of UIImage. Compare this ratio to the aspect ratio of the image view.
If the image's ratio is smaller than the view's, then that means the height at which you draw the image can be the same as the image view height, and the image width can be calculated using the aspect ratio of the image.
If the image's ratio is larger than the view's, then that means the width at which you draw the image can be the same as the image view width, and the image height can be calculated,

merging imageviews in drawing app swift

I am working on drawing app. I have three image views -
imageView - Contains base Image
tempImageView - for drawing annotations. drawLineFrom function takes a point and draw then lines on tempImageView
func drawLineFrom(fromPoint: CGPoint, toPoint: CGPoint)
{
//print("drawLineFrom")
let mid1 = CGPoint(x:(prevPoint1.x + prevPoint2.x)*0.5, y:(prevPoint1.y + prevPoint2.y)*0.5)
let mid2 = CGPoint(x:(toPoint.x + prevPoint1.x)*0.5, y:(toPoint.y + prevPoint1.y)*0.5)
UIGraphicsBeginImageContextWithOptions(self.tempImageView.boundsSize, false, 0.0)
if let context = UIGraphicsGetCurrentContext()
{
tempImageView.image?.draw(in: CGRect(x: 0, y: 0, width: self.tempImageView.frame.size.width, height: self.tempImageView.frame.size.height))
let annotaionPath = UIBezierPath()
annotaionPath.move(to: CGPoint(x: mid1.x, y: mid1.y))
annotaionPath.addQuadCurve(to: CGPoint(x:mid2.x,y:mid2.y), controlPoint: CGPoint(x:prevPoint1.x,y:prevPoint1.y))
annotaionPath.lineCapStyle = CGLineCap.round
annotaionPath.lineJoinStyle = CGLineJoin.round
annotaionPath.lineWidth = editorPanelView.brushWidth
context.setStrokeColor(editorPanelView.drawingColor.cgColor)
annotaionPath.stroke()
tempImageView.image = UIGraphicsGetImageFromCurrentImageContext()
tempImageView.alpha = editorPanelView.opacity
UIGraphicsEndImageContext()
}
}
drawingImageView - after each touchesEnded method I am merging tempImageView with drawingImageView and setting tempImageView.image = nil .
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?)
{
isDrawing = false
if !swiped
{
drawLineFrom(fromPoint: lastPoint, toPoint: lastPoint)
}
annotationArray.append(annotationsPoints)
annotationsPoints.removeAll()
// Merge tempImageView into drawingImageView
UIGraphicsBeginImageContext(drawingImageView.frame.size)
drawingImageView.image?.draw(in: CGRect(x: 0, y: 0, width: drawingImageView.frame.size.width, height: drawingImageView.frame.size.height), blendMode: CGBlendMode.normal, alpha: 1.0)
tempImageView.image?.draw(in: CGRect(x: 0, y: 0, width: drawingImageView.frame.size.width, height: drawingImageView.frame.size.height), blendMode: CGBlendMode.normal, alpha: editorPanelView.opacity)
drawingImageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
tempImageView.image = nil
}
When save button clicked,
let drawingImage = self.drawingImageView.image
let combinedImage = self.imageView.combineWithOverlay(overlayImageView: self.drawingImageView)
and I am saving combinedImage.
Problem is, when I merge tempImage view with drawing image view, the annotations get blurred.I want to maintain same clarity. I am not able to find any solution for this. Any help (even if it's just a kick in the right direction) would be appreciated.
I think the issue is with using UIGraphicsBeginImageContext(drawingImageView.frame.size).
The default scale it uses is 1.0 so if you're using a retina screen, it will cause the content to be scaled up 2 or 3 times causing the blurry appearance.
You should use UIGraphicsBeginImageContextWithOptions like you have in drawLineFrom with a scale of 0.0 which will default to the screens scale.

strange bug with drawing in UISplitViewController

I'm trying to make screen with view in which user can draw something. I created custom view with such code:
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = false
if let touch = touches.first {
lastPoint = touch.location(in: imageView)
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = true
if let touch = touches.first {
let currentPoint = touch.location(in: imageView)
drawLine(fromPoint: lastPoint, toPoint: currentPoint)
lastPoint = currentPoint
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if !swiped {
// draw a single point
drawLine(fromPoint: lastPoint, toPoint: lastPoint)
}
and drawing function
func drawLine(fromPoint: CGPoint, toPoint: CGPoint) {
UIGraphicsBeginImageContext(imageView.frame.size)
let context = UIGraphicsGetCurrentContext()
imageView.image?.draw(in: CGRect(x: 0, y: 0, width: imageView.frame.size.width, height: imageView.frame.size.height))
context?.move(to: fromPoint)
context?.addLine(to: toPoint)
context?.setLineCap(.round)
context?.setLineWidth(lineWidth)
context?.setStrokeColor(lineColor.cgColor)
context?.strokePath()
imageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
when I show that view in view controller all is normal:
but when I show it in detail view in UISplitViewController, while user continue drawing, part of already drew image moving and fading out:
I can't find anything about what bug in web and have no idea what is produced such behaviour
Is anybody have any ideas about that?
That is example project where you can reproduce that bug:
https://github.com/fizzy871/DrawingBug
btw, in real project not only master view of split view controller, but navigation bar affect drawing too
Turned out that it behaves such way because of imageView frame with fractional part in size.
I just multiplied drawing context by 2 and problem solved:
func drawLine(fromPoint fromPoint: CGPoint, toPoint: CGPoint) {
// multiply to avoid problems when imageView frame value is XX.5
let fixedFrameForDrawing = CGRect(x: 0, y: 0, width: imageView.frame.size.width*2, height: imageView.frame.size.height*2)
let point1 = CGPoint(x: fromPoint.x*2, y: fromPoint.y*2)
let point2 = CGPoint(x: toPoint.x*2, y: toPoint.y*2)
UIGraphicsBeginImageContext(fixedFrameForDrawing.size)
if let context = UIGraphicsGetCurrentContext() {
imageView.image?.draw(in: fixedFrameForDrawing)
context.move(to: point1)
context.addLine(to: point2)
context.setLineCap(.round)
context.setLineWidth(lineWidth*2)
context.setStrokeColor(lineColor.cgColor)
context.strokePath()
let imageFromContext = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
imageView.image = imageFromContext
}

How to draw a line in Swift 3

I would like the user to touch 2 points and then a line is drawn between those two points. Here is what I have so far:
func drawline(){
let context = UIGraphicsGetCurrentContext()
context!.beginPath()
context?.move(to: pointA)
context?.addLine(to: pointB)
context!.strokePath()
}
pointA is the first point the user touched and pointB is the second point. I get the error:
thread 1:EXC_BREAKPOINT
Thanks in advance for your help.
To draw a line between two points the first thing you need is get the CGPoints from the current UIView, there are several ways of achieve this. I going to use an UITapGestureRecognizer for the sake of the sample to detect when you make a tap.
The another step is once you have the two points saved draw the line between the two points, and for this again you can use the graphics context as you try before or use CAShapeLayer.
So translating the explained above we get the following code:
class ViewController: UIViewController {
var tapGestureRecognizer: UITapGestureRecognizer!
var firstPoint: CGPoint?
var secondPoint: CGPoint?
override func viewDidLoad() {
super.viewDidLoad()
tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.showMoreActions(touch:)))
tapGestureRecognizer.numberOfTapsRequired = 1
view.addGestureRecognizer(tapGestureRecognizer)
}
func showMoreActions(touch: UITapGestureRecognizer) {
let touchPoint = touch.location(in: self.view)
guard let _ = firstPoint else {
firstPoint = touchPoint
return
}
guard let _ = secondPoint else {
secondPoint = touchPoint
addLine(fromPoint: firstPoint!, toPoint: secondPoint!)
firstPoint = nil
secondPoint = nil
return
}
}
func addLine(fromPoint start: CGPoint, toPoint end:CGPoint) {
let line = CAShapeLayer()
let linePath = UIBezierPath()
linePath.move(to: start)
linePath.addLine(to: end)
line.path = linePath.cgPath
line.strokeColor = UIColor.red.cgColor
line.lineWidth = 1
line.lineJoin = kCALineJoinRound
self.view.layer.addSublayer(line)
}
}
The above code is going to draw a line every time two points are selected and you can customize the above function as you like.
I hope this help you.
Draw line in Swift 4.1
class MyViewController: UIViewController {
#IBOutlet weak var imgViewDraw: UIImageView!
var lastPoint = CGPoint.zero
var red: CGFloat = 0.0
var green: CGFloat = 0.0
var blue: CGFloat = 0.0
var brushWidth: CGFloat = 10.0
var opacity: CGFloat = 1.0
var isSwiping:Bool!
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: Touch events
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
isSwiping = false
if let touch = touches.first{
lastPoint = touch.location(in: imgViewDraw)
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
isSwiping = true;
if let touch = touches.first{
let currentPoint = touch.location(in: imgViewDraw)
UIGraphicsBeginImageContext(self.imgViewDraw.frame.size)
self.imgViewDraw.image?.draw(in: CGRect(x:0, y:0,width:self.imgViewDraw.frame.size.width, height:self.imgViewDraw.frame.size.height))
UIGraphicsGetCurrentContext()?.move(to: CGPoint(x: lastPoint.x, y: lastPoint.y))
UIGraphicsGetCurrentContext()?.addLine(to: CGPoint(x: currentPoint.x, y: currentPoint.y))
UIGraphicsGetCurrentContext()?.setLineCap(CGLineCap.round)
UIGraphicsGetCurrentContext()?.setLineWidth(self.brushWidth)
UIGraphicsGetCurrentContext()?.setStrokeColor(red: red, green: green, blue: blue, alpha: 1.0)
UIGraphicsGetCurrentContext()?.strokePath()
self.imgViewDraw.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
lastPoint = currentPoint
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if(!isSwiping) {
// This is a single touch, draw a point
UIGraphicsBeginImageContext(self.imgViewDraw.frame.size)
self.imgViewDraw.image?.draw(in: CGRect(x:0, y:0,width:self.imgViewDraw.frame.size.width, height:self.imgViewDraw.frame.size.height))
UIGraphicsGetCurrentContext()?.setLineCap(CGLineCap.round)
UIGraphicsGetCurrentContext()?.setLineWidth(self.brushWidth)
UIGraphicsGetCurrentContext()?.move(to: CGPoint(x: lastPoint.x, y: lastPoint.y))
UIGraphicsGetCurrentContext()?.addLine(to: CGPoint(x: lastPoint.x, y: lastPoint.y))
UIGraphicsGetCurrentContext()?.setStrokeColor(red: red, green: green, blue: blue, alpha: 1.0)
UIGraphicsGetCurrentContext()?.strokePath()
self.imgViewDraw.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
}
}

Drawing on UIImage in UIScrollView

The Problem
This is going to sound crazy. I'm making a drawing app and I want users to be able to draw on images that are bigger or smaller than the screen. So when the user selects an image from his photo library it is put into an image view in a scroll view. The user draws on image views that are the same dimensions as the selected image and in another scroll view on top of the other one. The scrolling of the two scroll views is synchronized so when you draw then scroll the drawing appears to be above the image (in the right place). For some reason however, when the user selects a long image (let's say 400 x 2000), the drawing works at the top of the image, but when you scroll down to draw, the lines you draw go to the top. I can't figure out what's going wrong... My code is below.
About The Code
cameraStill is the image view containing the image
drawable is the height of the image
myScroll is the scroll view for the image
mainImageView, tempImageView, undo1, undo2, undo3 are the drawing layers
drawScroll is the scroll view for the drawing layers
Image Selection
func imagePickerController(picker: UIImagePickerController, didFinishPickingImage image: UIImage!, editingInfo: [NSObject : AnyObject]!) {
self.dismissViewControllerAnimated(true, completion: { () -> Void in
})
if (image != nil) {
self.cameraStill.contentMode = UIViewContentMode.ScaleAspectFit
cameraStill.frame = CGRectMake(0, 0, screenWidth, screenWidth*(image.size.height/image.size.width))
// change uiimageivews size
mainImageView.frame = CGRectMake(0, 0, screenWidth, screenWidth*(image.size.height/image.size.width))
tempImageView.frame = CGRectMake(0, 0, screenWidth, screenWidth*(image.size.height/image.size.width))
undo1.frame = CGRectMake(0, 0, screenWidth, screenWidth*(image.size.height/image.size.width))
undo2.frame = CGRectMake(0, 0, screenWidth, screenWidth*(image.size.height/image.size.width))
undo3.frame = CGRectMake(0, 0, screenWidth, screenWidth*(image.size.height/image.size.width))
drawable = screenWidth*(image.size.height/image.size.width)
myScroll.contentSize = CGSize(width: screenWidth,height: screenWidth*(image.size.height/image.size.width))
drawScroll.contentSize = CGSize(width: screenWidth,height: screenWidth*(image.size.height/image.size.width))
if (screenWidth*(image.size.height/image.size.width) > (screenHeight-130)) {
myScroll.scrollEnabled = true
drawScroll.scrollEnabled = true
}
else {
myScroll.scrollEnabled = false
drawScroll.scrollEnabled = false
cameraStill.center = CGPoint(x: screenWidth/2, y: (screenHeight-130)/2)
mainImageView.center = CGPoint(x: screenWidth/2, y: (screenHeight-130)/2)
tempImageView.center = CGPoint(x: screenWidth/2, y: (screenHeight-130)/2)
undo1.center = CGPoint(x: screenWidth/2, y: (screenHeight-130)/2)
undo2.center = CGPoint(x: screenWidth/2, y: (screenHeight-130)/2)
undo3.center = CGPoint(x: screenWidth/2, y: (screenHeight-130)/2)
}
self.camera!.stopCamera()
}
//drawView.alpha = 1.0
}
Drawing
override func touchesBegan(touches: Set<NSObject>, withEvent event: UIEvent) {
println("began")
if (drawingEnabled == true) {
c1 = 3
closeAllExtras()
swiped = false
if let touch = touches.first as? UITouch {
lastPoint = touch.locationInView(self.view)
}
}
}
func drawLineFrom(fromPoint: CGPoint, toPoint: CGPoint) {
//if (fromPoint.y > 50 && fromPoint.y < screenHeight-80 && toPoint.y > 50 && toPoint.y < screenHeight-80) {
// 1
UIGraphicsBeginImageContext(CGSize(width: view.frame.size.width,height: drawable))
let context = UIGraphicsGetCurrentContext()
tempImageView.image?.drawInRect(CGRect(x: 0, y: 0, width: view.frame.size.width, height: drawable))
// 2
CGContextMoveToPoint(context, fromPoint.x, fromPoint.y)
CGContextAddLineToPoint(context, toPoint.x, toPoint.y)
// 3
CGContextSetLineCap(context, kCGLineCapRound)
CGContextSetLineWidth(context, brushWidth)
CGContextSetRGBStrokeColor(context, red, green, blue, 1.0)
CGContextSetBlendMode(context, kCGBlendModeNormal)
// 4
CGContextStrokePath(context)
// 5
tempImageView.image = UIGraphicsGetImageFromCurrentImageContext()
tempImageView.alpha = opacity
UIGraphicsEndImageContext()
//}
}
override func touchesMoved(touches: Set<NSObject>, withEvent event: UIEvent) {
// 6
if (drawingEnabled == true) {
swiped = true
if let touch = touches.first as? UITouch {
let currentPoint = touch.locationInView(view)
drawLineFrom(lastPoint, toPoint: currentPoint)
// 7
lastPoint = currentPoint
}
}
}
func mergeViewContext(v1 : UIImageView, v2: UIImageView) {
UIGraphicsBeginImageContext(v1.frame.size)
v1.image?.drawInRect(CGRect(x: 0, y: 0, width: view.frame.size.width, height: drawable), blendMode: kCGBlendModeNormal, alpha: 1.0)
v2.image?.drawInRect(CGRect(x: 0, y: 0, width: view.frame.size.width, height: drawable), blendMode: kCGBlendModeNormal, alpha: 1.0)
v1.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
v2.image = nil
}
override func touchesEnded(touches: Set<NSObject>, withEvent event: UIEvent) {
if (drawingEnabled == true) {
if !swiped {
// draw a single point
drawLineFrom(lastPoint, toPoint: lastPoint)
}
mergeViewContext(mainImageView, v2: undo1)
undo1.image = undo2.image
undo2.image = nil
undo2.image = undo3.image
undo3.image = nil
UIGraphicsBeginImageContext(undo3.frame.size)
undo3.image?.drawInRect(CGRect(x: 0, y: 0, width: view.frame.size.width, height: drawable), blendMode: kCGBlendModeNormal, alpha: 1.0)
tempImageView.image?.drawInRect(CGRect(x: 0, y: 0, width: view.frame.size.width, height: drawable), blendMode: kCGBlendModeNormal, alpha: opacity)
undo3.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
tempImageView.image = nil
}
Synching Both Scroll Views
func scrollViewDidScroll(scrollView: UIScrollView) {
if (scrollView == drawScroll) {
var offset = scrollView.contentOffset
myScroll.setContentOffset(offset, animated: false)
}
}
I got it to work by correcting the following values with the offset of the scroll view. However, I get some blurring for long images and a very strange bug with short ones. No idea what's wrong.
CGContextMoveToPoint(context, fromPoint.x, fromPoint.y)
CGContextAddLineToPoint(context, toPoint.x, toPoint.y)