How to implement different GestureRecognizer on same image views? - iphone

I am implementing the rotation,pich,tap gesture recognizers in my app.I have an image view where i get the user image,then there is button to move to the stamps view,where there are 120 scrollable stamp images of 1000*100.The problem is that when i choose one stampimage,the gesture works fine.But when i again move to the stamp view and choose stamp,the first one stamp become static and does not recognize any gesture,only the current stamp recognize the gesture.
What i am performing is to select multiple stamps and then i can rotate them,strech them,pinch them.
Here is the code which i am implementing.Just help me how to acheive this...
-(void)viewWillAppear:(BOOL)animated
{
if (stampImageView) {
[stampImageView release];
}
stampImageView=[[UIImageView alloc]initWithFrame:CGRectMake(self.view.center.x-100, 200, 80, 80)];
stampImageView.tag=(int)mAppDel.frameImageString;
NSLog(#"tag is %#",stampImageView.tag);
stampImageView.userInteractionEnabled=YES;
if(mAppDel.frameImageString)
stampImageView.image=[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:mAppDel.frameImageString ofType:#"png"]];
[self.view addSubview:stampImageView];
stampImageView.userInteractionEnabled=YES;
[self.view bringSubviewToFront:stampImageView];
UIRotationGestureRecognizer *rotationGesture = [[UIRotationGestureRecognizer alloc] initWithTarget:self action:#selector(rotatePiece:)];
[stampImageView addGestureRecognizer:rotationGesture];
[rotationGesture release];
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:#selector(scalePiece:)];
[pinchGesture setDelegate:self];
[stampImageView addGestureRecognizer:pinchGesture];
[pinchGesture release];
UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(panPiece:)];
[panGesture setMaximumNumberOfTouches:1];
[panGesture setDelegate:self];
[stampImageView addGestureRecognizer:panGesture];
[panGesture release];
}

Did u try using the delegate method
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer
shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer;

I implement this logic for monotouch but you can use it in you app
void AddGestureRecognizersToImage (UIImageView imgView)
{
imgView.UserInteractionEnabled = true;
// rotate the images
var rotationGesture = new UIRotationGestureRecognizer (this, new Selector ("RotateImage"));
imgView.AddGestureRecognizer (rotationGesture);
// Zoom the image
var pinchGesture = new UIPinchGestureRecognizer (this, new Selector ("ScaleImage"));
//pinchGesture.Enabled = true;
pinchGesture.Delegate = new GestureDelegate (this);
imgView.AddGestureRecognizer (pinchGesture);
var panGesture = new UIPanGestureRecognizer(this, new Selector ("PanImage"));
//panGesture.Enabled = true;
panGesture.MaximumNumberOfTouches = 2;
panGesture.Delegate = new GestureDelegate (this);
imgView.AddGestureRecognizer (panGesture);
var longPressGesture = new UILongPressGestureRecognizer (this, new Selector ("ShowResetMenu"));
imgView.AddGestureRecognizer (longPressGesture);
}
void AdjustAnchorPointForGestureRecognizer (UIGestureRecognizer gestureRecognizer)
{
if (gestureRecognizer.State == UIGestureRecognizerState.Began)
{
var image = gestureRecognizer.View;
var locationInView = gestureRecognizer.LocationInView (image);
var locationInSuperview = gestureRecognizer.LocationInView (image.Superview);
image.Layer.AnchorPoint = new PointF (locationInView.X / image.Bounds.Size.Width, locationInView.Y / image.Bounds.Size.Height);
image.Center = locationInSuperview;
}
}
[Export("RotateImage")]
void RotateImage (UIRotationGestureRecognizer gestureRecognizer)
{
AdjustAnchorPointForGestureRecognizer (gestureRecognizer);
if (gestureRecognizer.State == UIGestureRecognizerState.Began || gestureRecognizer.State == UIGestureRecognizerState.Changed)
{
gestureRecognizer.View.Transform *= CGAffineTransform.MakeRotation (gestureRecognizer.Rotation);
// Reset the gesture recognizer's rotation - the next callback will get a delta from the current rotation.
gestureRecognizer.Rotation = 0;
}
}
// Zoom the image by the current scale
[Export("ScaleImage")]
void ScaleImage (UIPinchGestureRecognizer gestureRecognizer)
{
AdjustAnchorPointForGestureRecognizer (gestureRecognizer);
if (gestureRecognizer.State == UIGestureRecognizerState.Began || gestureRecognizer.State == UIGestureRecognizerState.Changed)
{
gestureRecognizer.View.Transform *= CGAffineTransform.MakeScale (gestureRecognizer.Scale, gestureRecognizer.Scale);
// Reset the gesture recognizer's scale - the next callback will get a delta from the current scale.
gestureRecognizer.Scale = 1;
}
}
// Shift the image's center by the pan amount
[Export("PanImage")]
void PanImage (UIPanGestureRecognizer gestureRecognizer)
{
gestureRecognizer.Enabled = true;
AdjustAnchorPointForGestureRecognizer (gestureRecognizer);
var image = gestureRecognizer.View;
if (gestureRecognizer.State == UIGestureRecognizerState.Began || gestureRecognizer.State == UIGestureRecognizerState.Changed)
{
var translation = gestureRecognizer.TranslationInView (this.window);
gestureRecognizer.View.Center = new PointF (gestureRecognizer.View.Center.X + translation.X, gestureRecognizer.View.Center.Y + translation.Y);
//image.Center = new PointF (image.Center.X + translation.X, image.Center.Y + translation.Y);
// Reset the gesture recognizer's translation to {0, 0} - the next callback will get a delta from the current position.
gestureRecognizer.SetTranslation (PointF.Empty, image);
}
}

Related

Users should be able to drag the images and place it on image view in ios

I am creating an app in which users can design their own stage with the images present over there. I have created buttons as images, now I want some code which enables user to drag and drop the images(buttons) in the particular image view area.
You can achieve this with tap gestures (e.g. tap first image view, tap on destination image view), but it's not very intuitive. Doing a proper drag-and-drop with a UIPanGestureRecognizer will be far more intuitive for your end user.
Technically, you're not dragging an image from the image view, but rather you'll actually be dragging the image view itself. (An image, itself, has no visual representation without an image view.) And when you let go, you'll animate the changing of the image view frames to complete the illusion.
If you had a NSArray of imageViews, you could add a gesture to their superview:
#property (nonatomic, strong) NSMutableArray *imageViews;
- (void)viewDidLoad
{
[super viewDidLoad];
[self createImageViewArray];
UIPanGestureRecognizer *gesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:#selector(handlePan:)];
[self.view addGestureRecognizer:gesture];
}
- (void)createImageViewArray
{
self.imageViews = [NSMutableArray array];
}
- (void)handlePan:(UIPanGestureRecognizer *)gesture
{
static UIImageView *draggedImage = nil;
static CGRect draggedImageOriginalFrame;
CGPoint location = [gesture locationInView:gesture.view];
if (gesture.state == UIGestureRecognizerStateBegan)
{
draggedImage = [self determineImageForLocation:location];
if (draggedImage)
{
draggedImageOriginalFrame = draggedImage.frame;
[draggedImage.superview bringSubviewToFront:draggedImage];
}
}
else if (gesture.state == UIGestureRecognizerStateChanged && draggedImage != nil)
{
CGPoint translation = [gesture translationInView:gesture.view];
CGRect frame = draggedImageOriginalFrame;
frame.origin.x += translation.x;
frame.origin.y += translation.y;
draggedImage.frame = frame;
}
else if (draggedImage != nil && (gesture.state == UIGestureRecognizerStateEnded ||
gesture.state == UIGestureRecognizerStateCancelled ||
gesture.state == UIGestureRecognizerStateFailed))
{
UIImageView *droppedOver = nil;
if (gesture.state == UIGestureRecognizerStateEnded)
droppedOver = [self draggedImageView:draggedImage toLocation:location];
if (droppedOver == nil)
{
[UIView animateWithDuration:0.25
animations:^{
draggedImage.frame = draggedImageOriginalFrame;
}];
}
else
{
[droppedOver.superview bringSubviewToFront:droppedOver];
[UIView animateWithDuration:0.25
animations:^{
draggedImage.frame = droppedOver.frame;
droppedOver.frame = draggedImageOriginalFrame;
}];
}
}
}
- (UIImageView *)draggedImageView:(UIImageView *)draggedView toLocation:(CGPoint)location
{
for (UIImageView *imageview in self.imageViews)
if (CGRectContainsPoint(imageview.frame, location) && imageview != draggedView)
return imageview;
return nil;
}
- (UIImageView *)determineImageForLocation:(CGPoint)location
{
return [self draggedImageView:nil toLocation:location];
}

Pinch Gesture not working with imageVIew

I am trying to zoom in and out an imageView
here is the code
- (void)pinch:(UIPinchGestureRecognizer *)gesture
{
if (handSelected == YES)
{
if (gesture.state == UIGestureRecognizerStateEnded || gesture.state == UIGestureRecognizerStateChanged)
{
NSLog(#"gesture.scale = %f", gesture.scale);
CGFloat currentScale = self.imgHand.frame.size.width / self.imgHand.bounds.size.width;
CGFloat newScale = currentScale * gesture.scale;
if (newScale < 1.0) {
newScale = 1.0;
}
if (newScale > 4.0) {
newScale = 4.0;
}
CGAffineTransform transform = CGAffineTransformMakeScale(newScale, newScale);
self.imgHand.transform = transform;
gesture.scale = 1;
}
}
}
- (void)viewDidLoad
{
[super viewDidLoad];
[self adjustRingPressed:self];
self.view.multipleTouchEnabled = YES;
self.imgHand.multipleTouchEnabled = YES;
UIPinchGestureRecognizer *gst = [[UIPinchGestureRecognizer alloc] initWithTarget:self.imgHand action:#selector(pinch:)];
[gst setDelegate:self];
[self.imgHand addGestureRecognizer:gst];
}
it seems that my pinch Code never runs
try to add:
self.imgHand.userInteractionEnabled = TRUE;
by default UIImageView has userInteractionEnabled = FALSE
make it a function of gesturelike template code below:
- (IBAction)handlePinch:(UIPinchGestureRecognizer *)recognizer
{
recognizer.view.transform = CGAffineTransformScale(recognizer.view.transform, recognizer.scale, recognizer.scale);
recognizer.scale = 1;
}

Scaling UIImageView inside UIScrollView with maintaining the rotation

I have a problem in scaling the uiimageview which is placed inside the uiscrollview. I have googled and checked all the questions related to my problem in StackOverflow as well. I tried all the answers that are posted in the StackOverflow also. Nothing worked for me.
First I am placing the uiimageview inside uiscrollview in nib file and I am taking the image from Camera roll and filling the image view. Then I am using uirotationgesturerecognizer to rotate the image.
Here is the code that I am trying to do.
- (void)viewDidLoad
{
[super viewDidLoad];
NSLog(#"%#",[[UIDevice currentDevice] model]);
// Do any additional setup after loading the view, typically from a nib.
self.imagePicker = [[[UIImagePickerController alloc] init] autorelease];
self.picChosenImageView.layer.shouldRasterize = YES;
self.picChosenImageView.layer.rasterizationScale = [UIScreen mainScreen].scale;
self.picChosenImageView.layer.contents = (id)[UIImage imageNamed:#"test"].CGImage;
self.picChosenImageView.layer.shadowColor = [UIColor blackColor].CGColor;
self.picChosenImageView.layer.shadowOpacity = 0.8f;
self.picChosenImageView.layer.shadowRadius = 8;
self.picChosenImageView.layer.shadowPath = [UIBezierPath bezierPathWithRect:self.picChosenImageView.bounds].CGPath;
UIRotationGestureRecognizer *rotationRecognizer = [[[UIRotationGestureRecognizer alloc]initWithTarget:self
action:#selector(handleRotate:)] autorelease];
rotationRecognizer.delegate = self;
[self.picChosenImageView addGestureRecognizer:rotationRecognizer];
self.containerView.delegate = self;
self.containerView.contentSize = self.picChosenImageView.layer.frame.size;
self.containerView.maximumZoomScale = 4.0f;
self.containerView.minimumZoomScale = 1.0f;
angle = 0.0f;
useRotation = 0.0;
isRotationStarted=FALSE;
isZoomingStarted = FALSE;
}
-(void)lockZoom
{
maximumZoomScale = self.containerView.maximumZoomScale;
minimumZoomScale = self.containerView.minimumZoomScale;
self.containerView.maximumZoomScale = 1.0;
self.containerView.minimumZoomScale = 1.0;
self.containerView.clipsToBounds = false;
self.containerView.scrollEnabled = false;
}
-(void)unlockZoom
{
self.containerView.maximumZoomScale = maximumZoomScale;
self.containerView.minimumZoomScale = minimumZoomScale;
self.containerView.clipsToBounds = true;
self.containerView.scrollEnabled = true;
}
#pragma mark - ScrollView delegate methods
- (UIView *)viewForZoomingInScrollView:(UIScrollView *)scrollView
{
return self.picChosenImageView;
}
- (void)scrollViewDidZoom:(UIScrollView *)scrollView
{
CGRect frame = self.picChosenImageView.frame;
frame.origin = CGPointZero;
self.picChosenImageView.frame = frame;
//self.picChosenImageView.transform = prevTransform;
}
-(void) scrollViewWillBeginZooming:(UIScrollView *)scrollView withView:(UIView *)view
{
if(!isZoomingStarted)
{
self.picChosenImageView.transform = CGAffineTransformRotate(self.picChosenImageView.transform, angle);
NSLog(#"The zooming started");
isZoomingStarted = TRUE;
CGSize contentSize = self.containerView.bounds.size;
CGRect contentFrame = self.containerView.bounds;
NSLog(#"frame on start: %#", NSStringFromCGRect(contentFrame));
NSLog(#"size on start: %#", NSStringFromCGSize(contentSize));
//prevTransform = self.picChosenImageView.transform;
}
}
-(void) scrollViewDidEndZooming:(UIScrollView *)scrollView withView:(UIView *)view atScale:(float)scale
{
if(isZoomingStarted)
{
self.picChosenImageView.transform = CGAffineTransformRotate(self.picChosenImageView.transform, angle);
isZoomingStarted = FALSE;
CGSize contentSize = self.containerView.contentSize;
CGRect contentFrame = self.containerView.bounds;
NSLog(#"frame on end: %#", NSStringFromCGRect(contentFrame));
NSLog(#"size on end: %#", NSStringFromCGSize(contentSize));
}
}
#pragma mark - GestureRecognizer methods
- (void) handleRotate:(UIRotationGestureRecognizer *)recognizer
{
if(isZoomingStarted == FALSE)
{
if([recognizer state] == UIGestureRecognizerStateBegan)
{
angle = 0.0f;
[self lockZoom];
}
useRotation+= recognizer.rotation;
while( useRotation < -M_PI )
{
useRotation += M_PI*2;
}
while( useRotation > M_PI )
{
useRotation -= M_PI*2;
}
NSLog(#"The rotated value is %f",RADIANS_TO_DEGREES(useRotation));
self.picChosenImageView.transform = CGAffineTransformRotate([self.picChosenImageView transform],
recognizer.rotation);
[recognizer setRotation:0];
if([recognizer state] == UIGestureRecognizerStateEnded)
{
angle = useRotation;
useRotation = 0.0f;
isRotationStarted = FALSE;
self.containerView.hidden = NO;
//prevTransform = self.picChosenImageView.transform;
[self unlockZoom];
}
}
}
My problem is, I am able to successfully do a zoom in and zoom out. I am able to rotate the uiimageview as I wanted to. After rotating the uiimageview to a certain angle, and when I am trying to zoom in, the imageview gets back to the original position (rotate itself back to zero degree) and then the zooming happens. I want to retain the rotation and also zoom. I tried saving the previous transform and assign in back scrollDidzoom and scrollDidBegin delegate methods. None worked. Please help me to spot my mistake which I am overlooking.
try using CGAffineTransformScale instead of just resizing the frame for zooming:
anImage.transform = CGAffineTransformScale(anImage.transform, 2.0, 2.0);
changing the transform for scaling might fix your rotation issue.
hope this helps.
I had the same problem. UIScrollView is taking control over UIImageView and it is using transform without rotation.
So I do not give image reference to scroll and I have added UIPinchGestureRecognizer for scaling.
func viewForZoomingInScrollView(scrollView: UIScrollView) -> UIView? {
return nil
}
Dragging is still working :)
// viewDidLoad
var pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(pinchRecogniezed))
scrollView.addGestureRecognizer(pinchGestureRecognizer)
func pinchRecogniezed(sender: UIPinchGestureRecognizer) {
if sender.state == .Began || sender.state == .Changed {
let scale = sender.scale
imageView.transform = CGAffineTransformScale(imageView.transform, scale, scale)
sender.scale = 1
}
}

How to zoom while keeping the previous rotation transform as it is?

I have an imageView, to which, I have added UIPinchGestureRecognizer and UIRotationGestureRecognizer.
in pinch gesture, I transform and scale the View and in rotation gesture I apply rotation transform to it.
The problem is when I rotate the imageView and then start zooming. Zooming always begins from the normal state.
So What i want is when I rotate it to say 30 degree clockwise and then zoom it. it should zoom while remaining that 30 degree on the clockwise direction.
Here is the code:
- (void)viewDidLoad{
[super viewDidLoad];
//setting up the image view
mTotalRotation = 0.0;
self.imageView.image = self.photo;
self.imageView.userInteractionEnabled = YES;
UIRotationGestureRecognizer *twoFingersRotate =
[[[UIRotationGestureRecognizer alloc] initWithTarget:self action:#selector(twoFingersRotate:)] autorelease];
[self.imageView addGestureRecognizer:twoFingersRotate];
UIPinchGestureRecognizer *pinchGesture = [[[UIPinchGestureRecognizer alloc] initWithTarget:self action:#selector(pinchZoom:)] autorelease];
[self.imageView addGestureRecognizer:pinchGesture];
// Do any additional setup after loading the view from its nib.
}
// Rotation gesture handler
- (void)twoFingersRotate:(UIRotationGestureRecognizer *)recognizer
{
if ([recognizer state] == UIGestureRecognizerStateEnded) {
mTotalRotation += recognizer.rotation;
return;
}
self.imageView.transform = CGAffineTransformMakeRotation(mTotalRotation + recognizer.rotation);
}
// Pinch Gesture
-(void)pinchZoom:(UIPinchGestureRecognizer*)recognizer{
self.imageView.transform = CGAffineTransformMakeScale(recognizer.scale, recognizer.scale) ;
}
Change the line:
self.imageView.transform = CGAffineTransformMakeRotation(mTotalRotation + recognizer.rotation);
with:
self.imageView.transform = CGAffineTransformRotate(self.imageView.transform, recognizer.rotation);
And the line:
self.imageView.transform = CGAffineTransformMakeScale(recognizer.scale, recognizer.scale);
with:
self.imageView.transform = CGAffineTransformScale(self.imageView.transform, recognizer.scale, recognizer.scale);
Edit
To limit the scale, you can do the following:
CGAffineTransform transform = self.imageView.transform;
float newScale = recognizer.scale * sqrt(transform.a*transform.a + transform.c*transform.c);
if (newScale > scaleLimit) {
self.imageView.transform = CGAffineTransformScale(transform, recognizer.scale, recognizer.scale);
}
Maybe you could have your scale and rotate view as a subview of a view that you zoom?

UIPinchGestureRecognizer problem

I am using UIPinchGestureRecognizer.can i write two action for pinch in and pinch out..is there any specific Method(delegate)?I have written only one it is called when i pinched in...
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:#selector(handlePinchGesture:)];
[self.view addGestureRecognizer:pinchGesture];
[pinchGesture release];
You could check the gesture's .scale in -handlePinchGesture:. If it is < 1, it is a pinch-in, otherwise is a pinch-out.
// Zoom the image by the current scale
[Export("ScaleImage")]
void ScaleImage (UIPinchGestureRecognizer gestureRecognizer)
{
AdjustAnchorPointForGestureRecognizer (gestureRecognizer);
if (gestureRecognizer.State == UIGestureRecognizerState.Began || gestureRecognizer.State == UIGestureRecognizerState.Changed)
{
gestureRecognizer.View.Transform *= CGAffineTransform.MakeScale (gestureRecognizer.Scale, gestureRecognizer.Scale);
// Reset the gesture recognizer's scale - the next callback will get a delta from the current scale.
gestureRecognizer.Scale = 1;
}
}
void AdjustAnchorPointForGestureRecognizer (UIGestureRecognizer gestureRecognizer)
{
if (gestureRecognizer.State == UIGestureRecognizerState.Began)
{
var image = gestureRecognizer.View;
var locationInView = gestureRecognizer.LocationInView (image);
var locationInSuperview = gestureRecognizer.LocationInView (image.Superview);
image.Layer.AnchorPoint = new PointF (locationInView.X / image.Bounds.Size.Width, locationInView.Y / image.Bounds.Size.Height);
image.Center = locationInSuperview;
}
}
// Zoom the image by the current scale
[Export("ScaleImage")]
void ScaleImage (UIPinchGestureRecognizer gestureRecognizer)
{
AdjustAnchorPointForGestureRecognizer (gestureRecognizer);
if (gestureRecognizer.State == UIGestureRecognizerState.Began || gestureRecognizer.State == UIGestureRecognizerState.Changed)
{
gestureRecognizer.View.Transform *= CGAffineTransform.MakeScale (gestureRecognizer.Scale, gestureRecognizer.Scale);
// Reset the gesture recognizer's scale - the next callback will get a delta from the current scale.
gestureRecognizer.Scale = 1;
}
}