I had a function for setting up a timer, how can I perform an action every 3 times in each(1,2,3 and 4 = 1 ,5 = 2, 6 = 3) still 6 or more in for loop?
func duration(interval:Double,rep:Int){
let queue = DispatchQueue.main
let timer = DispatchSource.makeTimerSource(queue: queue)
var num = 0
for i in 0...12{
if (i%3 == 0){
perform something
num ++
}else if (i%2 == 0){
perform something
num ++
}else{
perform something
num ++
}
if num == 12{
timer.cancel()
}
}
}```
The SwiftUI way to do this is a Timer publisher.
Example:
struct ContentView: View {
#State private var number = 0
#State private var multiple: Int?
private let startDate = Date()
private let timer = Timer.publish(every: 1, on: .main, in: .common).autoconnect()
var body: some View {
VStack {
Text("Number: \(number)")
Text("Multiple: \(multiple != nil ? String(multiple!) : "no")")
}
.onReceive(timer) { date in
let interval = Int(date.timeIntervalSince(startDate))
print(interval)
number = interval
if interval.isMultiple(of: 3) {
// multiple of 3
multiple = 3
} else if interval.isMultiple(of: 2) {
// multiple of 2 (careful - 6 will be ignored because it is already a multiple of 3)
multiple = 2
} else {
// neither multiple of 2 or 3
multiple = nil
}
if interval == 12 {
timer.upstream.connect().cancel()
}
}
}
}
Result:
I'm working to convert to Swift this code which helps get me get audio data for visualizations. The code I'm working with in Obj C, which works well, is:
while (reader.status == AVAssetReaderStatusReading) {
AVAssetReaderTrackOutput *trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
self.sampleBufferRef = [trackOutput copyNextSampleBuffer];
if (self.sampleBufferRef) {
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(self.sampleBufferRef);
size_t bufferLength = CMBlockBufferGetDataLength(blockBufferRef);
void *data = malloc(bufferLength);
CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data);
SInt16 *samples = (SInt16 *)data;
int sampleCount = bufferLength / bytesPerInputSample;
for (int i=0; i<sampleCount; i+=100) {
Float32 sample = (Float32) *samples++;
sample = decibel(sample);
sample = minMaxX(sample,noiseFloor,0);
tally += sample;
for (int j=1; j<channelCount; j++)
samples++;
tallyCount++;
if (tallyCount == downsampleFactor) {
sample = tally / tallyCount;
maximum = maximum > sample ? maximum : sample;
[fullSongData appendBytes:&sample length:sizeof(sample)];//tried dividing the sample by 2
tally = 0;
tallyCount = 0;
outSamples++;
}
}
CMSampleBufferInvalidate(self.sampleBufferRef);
CFRelease(self.sampleBufferRef);
free(data);
}
}
In Swift, I'm trying to write is this part:
while (reader.status == AVAssetReaderStatus.Reading) {
var trackOutput = reader.outputs[0] as! AVAssetReaderTrackOutput
self.sampleBufferRef = trackOutput.copyNextSampleBuffer()
if (self.sampleBufferRef != nil) {
let blockBufferRef = CMSampleBufferGetDataBuffer(self.sampleBufferRef)
let bufferLength = CMBlockBufferGetDataLength(blockBufferRef)
var data = NSMutableData(length: bufferLength)
CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data!.mutableBytes)
var samples = UnsafeMutablePointer<Int16>(data!.mutableBytes)
var sampleCount = Int32(bufferLength)/bytesPerInputSample
for var i = 0; i < Int(sampleCount); i++ {
var sampleValue = CGFloat(samples[i]) etc. etc.
However, when I println() sampleValue is just comes out (Opaque Value) in the console. I can't figure out how to actually read the sampleValue.
I'm new at trying to read audio data for visualization purposes. Any help on getting a buffer of audio data to work with would be helpful. Thank you.
Use stride?
let bytesPerInputSample = 4 // assumption ;)
var samplePtr = data.mutableBytes
for _ in stride(from: 0, to: data.length, by: bytesPerInputSample) {
let currentSample = Data(bytes: samplePtr, count: bytesPerInputSample)
// do whatever is needed with current sample
//...
// increase ptr by size of sample
samplePtr = samplePtr + bytesPerInputSample
}
i cant figure out this problem. Im deleting a certain SpriteNode, than re-adding it sometimes on a condition, however it crashes every time im calling addChild(). I know a SpriteNode can only have one parent so i dont understand this. Here is the relevant code:
override func touchesBegan(touches: NSSet, withEvent event:UIEvent) {
var touch: UITouch = touches.anyObject() as UITouch
var location = touch.locationInNode(self)
var node = self.nodeAtPoint(location)
for var i=0; i < tileNodeArray.count; i++
{
if (node == tileNodeArray[i]) {
flippedTilesCount++;
flippedTilesArray.append(tileNodeArray[i])
let removeAction = SKAction.removeFromParent()
tileNodeArray[i].runAction(removeAction)
if flippedTilesCount == 2
{
var helperNode1 = newMemoLabelNode("first",x: 0,y: 0,aka: "first")
var helperNode2 = newMemoLabelNode("second",x: 0,y: 0,aka: "second")
for var k = 0; k < labelNodeArray.count ;k++
{
if labelNodeArray[k].position == flippedTilesArray[0].position
{
helperNode1 = labelNodeArray[k]
}
if labelNodeArray[k].position == flippedTilesArray[1].position
{
helperNode2 = labelNodeArray[k]
}
}
if helperNode1.name == helperNode2.name
{
erasedTiles = erasedTiles + 2;
}
else
{
for var j = 0; j < flippedTilesArray.count ;j++
{
let waitAction = SKAction.waitForDuration(1.0)
flippedTilesArray[j].runAction(waitAction)
//self.addChild(flippedTilesArray[j]);
}
}
flippedTilesCount = 0;
flippedTilesArray = []
println("erased tiles:")
println(erasedTiles)
}
}
}
}
Appreciate your help!
I would recommend you not to use SKAction.removeFromParent but remove the node itself by calling:
tileNodeArray[i].removeFromParent()
instead of:
let removeAction = SKAction.removeFromParent()
tileNodeArray[i].runAction(removeAction)
The problem might be, that the SKActions don't wait for each other to finish. For example if you call the waitAction, the other actions will keep running.
I want to query the orientation the iPhone is currently in. Using
[UIDevice currentDevice].orientation
works as long as the device isn't orientation-locked. If it is locked, however, it always responds with the locked orientation, not with the actual orientation of the device.
Is there a high-level way to get the actual device orientation?
Also you can use CoreMotion
Orientation detection algorithm:
if abs( y ) < abs( x ) your iPhone is in landscape position, look sign of x to detect right
or left
else your iPhone is in portrait position, look sign of y to detect up or upside-down.
If you are interested in face-up or down, look value of z.
import CoreMotion
var uMM: CMMotionManager!
override func
viewWillAppear( p: Bool ) {
super.viewWillAppear( p )
uMM = CMMotionManager()
uMM.accelerometerUpdateInterval = 0.2
// Using main queue is not recommended. So create new operation queue and pass it to startAccelerometerUpdatesToQueue.
// Dispatch U/I code to main thread using dispach_async in the handler.
uMM.startAccelerometerUpdatesToQueue( NSOperationQueue() ) { p, _ in
if p != nil {
println(
abs( p.acceleration.y ) < abs( p.acceleration.x )
? p.acceleration.x > 0 ? "Right" : "Left"
: p.acceleration.y > 0 ? "Down" : "Up"
)
}
}
}
override func
viewDidDisappear( p: Bool ) {
super.viewDidDisappear( p )
uMM.stopAccelerometerUpdates()
}
That functionality is correct. If it always returned the device orientation, even if it was locked, the orientation changed notifications would fire. This would defeat the purpose of the lock.
To answer your question, there is no way to read the raw values from the accelerometer, without using private APIs.
Edit:
After reviewing the documentation, it seems that the UIAccelerometer class provides this data, even when the orientation is locked. This change was applied in iOS 4 and above. Even though you can use this data, you still need to process it to determine the orientation. This is not an easy task as you need to monitor the changes constantly and compare them to older values.
Also, take a look at this guide for handling motion events. This may provide you with another route to determining the orientation.
Set up your view controller or whatever to support the UIAccelerometerProtocol, and start listening for changes (you can set it to 10 hz).
#define kAccelerometerFrequency 10.0 //Hz
-(void)viewDidAppear:(BOOL)animated {
DLog(#"viewDidAppear");
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
UIAccelerometer* a = [UIAccelerometer sharedAccelerometer];
a.updateInterval = 1 / kAccelerometerFrequency;
a.delegate = self;
}
-(void)viewWillDisappear:(BOOL)animated {
DLog(#"viewWillDisappear");
UIAccelerometer* a = [UIAccelerometer sharedAccelerometer];
a.delegate = nil;
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
}
#ifdef DEBUG
+(NSString*)orientationToText:(const UIInterfaceOrientation)ORIENTATION {
switch (ORIENTATION) {
case UIInterfaceOrientationPortrait:
return #"UIInterfaceOrientationPortrait";
case UIInterfaceOrientationPortraitUpsideDown:
return #"UIInterfaceOrientationPortraitUpsideDown";
case UIInterfaceOrientationLandscapeLeft:
return #"UIInterfaceOrientationLandscapeLeft";
case UIInterfaceOrientationLandscapeRight:
return #"UIInterfaceOrientationLandscapeRight";
}
return #"Unknown orientation!";
}
#endif
#pragma mark UIAccelerometerDelegate
-(void)accelerometer:(UIAccelerometer *)accelerometer didAccelerate:(UIAcceleration *)acceleration {
UIInterfaceOrientation orientationNew;
if (acceleration.x >= 0.75) {
orientationNew = UIInterfaceOrientationLandscapeLeft;
}
else if (acceleration.x <= -0.75) {
orientationNew = UIInterfaceOrientationLandscapeRight;
}
else if (acceleration.y <= -0.75) {
orientationNew = UIInterfaceOrientationPortrait;
}
else if (acceleration.y >= 0.75) {
orientationNew = UIInterfaceOrientationPortraitUpsideDown;
}
else {
// Consider same as last time
return;
}
if (orientationNew == orientationLast)
return;
NSLog(#"Going from %# to %#!", [[self class] orientationToText:orientationLast], [[self class] orientationToText:orientationNew]);
orientationLast = orientationNew;
}
#pragma mark -
You need to define UIInterfaceOrientation orientationLast as a member variable and you're set.
Handling all 6 orientations
Though we don't often care about FaceUp / FaceDown orientations, they're still important.
Taking them into account leads to a much more appropriate sensitivity for orientation changes, while leaving them out can lead to metastability & hysteresis.
Here's how I handled it -
- (void)startMonitoring
{
[self.motionManager startAccelerometerUpdatesToQueue:self.opQueue withHandler:^(CMAccelerometerData * _Nullable accelerometerData, NSError * _Nullable error) {
if (error != nil)
{
NSLog(#"Accelerometer error: %#", error);
}
else
{
float const threshold = 40.0;
BOOL (^isNearValue) (float value1, float value2) = ^BOOL(float value1, float value2)
{
return fabsf(value1 - value2) < threshold;
};
BOOL (^isNearValueABS) (float value1, float value2) = ^BOOL(float value1, float value2)
{
return isNearValue(fabsf(value1), fabsf(value2));
};
float yxAtan = (atan2(accelerometerData.acceleration.y, accelerometerData.acceleration.x)) * 180 / M_PI;
float zyAtan = (atan2(accelerometerData.acceleration.z, accelerometerData.acceleration.y)) * 180 / M_PI;
float zxAtan = (atan2(accelerometerData.acceleration.z, accelerometerData.acceleration.x)) * 180 / M_PI;
UIDeviceOrientation orientation = self.orientation;
if (isNearValue(-90.0, yxAtan) && isNearValueABS(180.0, zyAtan))
{
orientation = UIDeviceOrientationPortrait;
}
else if (isNearValueABS(180.0, yxAtan) && isNearValueABS(180.0, zxAtan))
{
orientation = UIDeviceOrientationLandscapeLeft;
}
else if (isNearValueABS(0.0, yxAtan) && isNearValueABS(0.0, zxAtan))
{
orientation = UIDeviceOrientationLandscapeRight;
}
else if (isNearValue(90.0, yxAtan) && isNearValueABS(0.0, zyAtan))
{
orientation = UIDeviceOrientationPortraitUpsideDown;
}
else if (isNearValue(-90.0, zyAtan) && isNearValue(-90.0, zxAtan))
{
orientation = UIDeviceOrientationFaceUp;
}
else if (isNearValue(90.0, zyAtan) && isNearValue(90.0, zxAtan))
{
orientation = UIDeviceOrientationFaceDown;
}
if (self.orientation != orientation)
{
dispatch_async(dispatch_get_main_queue(), ^{
[self orientationDidChange:orientation];
});
}
}
}];
}
Additionally, I've added a threshold value of 40.0 (instead of 45.0). This makes changes less sensitive, preventing hysteresis at inflection points.
If you only want to react to changes of the main 4 orientations, just do this
if (UIDeviceOrientationIsPortrait(orientation) || UIDeviceOrientationIsLandscape(orientation))
{
// Do something
}
The UIAccelerometer class continues to function when the device orientation is locked. You'll have to work out your own methods of turning its variables into orientation values, but it shouldn't be especially complicated.
Have a play with Apple's AcceleromoterGraph sample app to see what values the accelerometer outputs in different orientations.
my solution using coremotion,it work even when the device has his orientation locked.
let motionManager: CMMotionManager = CMMotionManager()
on the did load method
motionManager.deviceMotionUpdateInterval = 0.01
if motionManager.accelerometerAvailable{
let queue = NSOperationQueue()
motionManager.startAccelerometerUpdatesToQueue(queue, withHandler:
{data, error in
guard let data = data else{
return
}
let angle = (atan2(data.acceleration.y,data.acceleration.x))*180/M_PI;
print(angle)
if(fabs(angle)<=45){
self.orientation = AVCaptureVideoOrientation.LandscapeLeft
print("landscape left")
}else if((fabs(angle)>45)&&(fabs(angle)<135)){
if(angle>0){
self.orientation = AVCaptureVideoOrientation.PortraitUpsideDown
print("portrait upside Down")
}else{
self.orientation = AVCaptureVideoOrientation.Portrait
print("portrait")
}
}else{
self.orientation = AVCaptureVideoOrientation.LandscapeRight
print("landscape right")
}
}
)
} else {
print("Accelerometer is not available")
}
hope it helps.
Most of the answers are using accelerometer, which is the overall acceleration = user + gravity.
But to get a device orientation, it is more accurate to use the gravity acceleration. Using gravity will prevent the edge case when user moves in a particular direction. To access the gravity, we have to use startDeviceMotionUpdates API instead.
let motionManager = CMMotionManager()
motionManager.startDeviceMotionUpdates(to: OperationQueue()) { (data, error) in
guard let gravity = data?.gravity else { return }
let newDeviceOrientation: UIDeviceOrientation
if abs(gravity.y) < abs(gravity.x) {
newDeviceOrientation = gravity.x > 0 ? .landscapeRight : .landscapeLeft
} else {
newDeviceOrientation = gravity.y > 0 ? .portraitUpsideDown : .portrait
}
}
Here isĀ an example of detect device rotation and return UIDeviceOrientation.
This solution using CoreMotion and works in all cases.
Example
let orientationManager = APOrientationManager()
orientationManager.delegate = self
/// start detect rotation
orientationManager.startMeasuring()
/// get current interface orientation
let orientation = orientationManager.currentInterfaceOrientation()
print(orientation.rawValue)
/// stop detect rotation
orientationManager.stopMeasuring()
orientationManager.delegate = nil
conform delegate
extension ViewController: APOrientationManagerDelegate {
func didChange(deviceOrientation: UIDeviceOrientation) {
/// update UI in main thread
}
}
APOrientationManager.swift
import Foundation
import CoreMotion
import AVFoundation
import UIKit
protocol APOrientationManagerDelegate: class {
func didChange(deviceOrientation: UIDeviceOrientation)
}
class APOrientationManager {
private let motionManager = CMMotionManager()
private let queue = OperationQueue()
private var deviceOrientation: UIDeviceOrientation = .unknown
weak var delegate: APOrientationManagerDelegate?
init() {
motionManager.accelerometerUpdateInterval = 1.0
motionManager.deviceMotionUpdateInterval = 1.0
motionManager.gyroUpdateInterval = 1.0
motionManager.magnetometerUpdateInterval = 1.0
}
func startMeasuring() {
guard motionManager.isDeviceMotionAvailable else {
return
}
motionManager.startAccelerometerUpdates(to: queue) { [weak self] (accelerometerData, error) in
guard let strongSelf = self else {
return
}
guard let accelerometerData = accelerometerData else {
return
}
let acceleration = accelerometerData.acceleration
let xx = -acceleration.x
let yy = acceleration.y
let z = acceleration.z
let angle = atan2(yy, xx)
var deviceOrientation = strongSelf.deviceOrientation
let absoluteZ = fabs(z)
if deviceOrientation == .faceUp || deviceOrientation == .faceDown {
if absoluteZ < 0.845 {
if angle < -2.6 {
deviceOrientation = .landscapeRight
} else if angle > -2.05 && angle < -1.1 {
deviceOrientation = .portrait
} else if angle > -0.48 && angle < 0.48 {
deviceOrientation = .landscapeLeft
} else if angle > 1.08 && angle < 2.08 {
deviceOrientation = .portraitUpsideDown
}
} else if z < 0 {
deviceOrientation = .faceUp
} else if z > 0 {
deviceOrientation = .faceDown
}
} else {
if z > 0.875 {
deviceOrientation = .faceDown
} else if z < -0.875 {
deviceOrientation = .faceUp
} else {
switch deviceOrientation {
case .landscapeLeft:
if angle < -1.07 {
deviceOrientation = .portrait
}
if angle > 1.08 {
deviceOrientation = .portraitUpsideDown
}
case .landscapeRight:
if angle < 0 && angle > -2.05 {
deviceOrientation = .portrait
}
if angle > 0 && angle < 2.05 {
deviceOrientation = .portraitUpsideDown
}
case .portraitUpsideDown:
if angle > 2.66 {
deviceOrientation = .landscapeRight
}
if angle < 0.48 {
deviceOrientation = .landscapeLeft
}
case .portrait:
if angle > -0.47 {
deviceOrientation = .landscapeLeft
}
if angle < -2.64 {
deviceOrientation = .landscapeRight
}
default:
if angle > -0.47 {
deviceOrientation = .landscapeLeft
}
if angle < -2.64 {
deviceOrientation = .landscapeRight
}
}
}
}
if strongSelf.deviceOrientation != deviceOrientation {
strongSelf.deviceOrientation = deviceOrientation
strongSelf.delegate?.didChange(deviceOrientation: deviceOrientation)
}
}
}
func stopMeasuring() {
motionManager.stopAccelerometerUpdates()
}
func currentInterfaceOrientation() -> AVCaptureVideoOrientation {
switch deviceOrientation {
case .portrait:
return .portrait
case .landscapeRight:
return .landscapeLeft
case .landscapeLeft:
return .landscapeRight
case .portraitUpsideDown:
return .portraitUpsideDown
default:
return .portrait
}
}
}
Use of CMMotionManager may help, but not the above way. The above logic is not a stable one. I have tested throughly and found that by seeing the values of acceleration.x/y/z are not helping to determine the orientation.
Instead, I got a way to find the orientation WRT the angle i.e.
float angle = (atan2(accelerometerData.acceleration.y,accelerometerData.acceleration.x))*180/M_PI;
And for orientation,-
if(fabs(angle<=45)currOrientation=UIDeviceOrientationLandscapeRight;
else if((fabs(angle)>45)&&(fabs(angle)<135))currOrientation=((angle>0)?UIDeviceOrientationPortraitUpsideDown:UIDeviceOrientationPortrait);
else currOrientation = UIDeviceOrientationLandscapeLeft;
This might come handy for someone, though this doesn't help me to find 2 other orientations i.e. UIDeviceOrientationFaceUp & UIDeviceOrientationFaceDown.
Using Satachito's great answer here is code which will also detect if the device is face up or face down
import CoreMotion
var mm: CMMotionManager!
init() {
self.mm = CMMotionManager()
self.mm.accelerometerUpdateInterval = 0.2
}
public func startOrientationUpdates() {
// Using main queue is not recommended. So create new operation queue and pass it to startAccelerometerUpdatesToQueue.
// Dispatch U/I code to main thread using dispach_async in the handler.
self.mm.startAccelerometerUpdates( to: OperationQueue() ) { p, _ in
if let p = p {
if(p.acceleration.x > -0.3 && p.acceleration.x < 0.3 && p.acceleration.z < -0.95) {
print("face up")
}
else if(p.acceleration.x > -0.3 && p.acceleration.x < 0.3 && p.acceleration.z > 0.95) {
print("face down")
}
else {
print(
abs( p.acceleration.y ) < abs( p.acceleration.x )
? p.acceleration.x > 0 ? "Right" : "Left"
: p.acceleration.y > 0 ? "Down" : "Up"
)
}
}
}
}
public func endOrientationUpdates() {
self.mm.stopAccelerometerUpdates()
}
I am using the accelerometer to scroll multiple subViews in a UIScrollVIew. I want the view (portrait orientation) to scroll to the right when the user flicks the iPhone to the right, and scroll to the left when the device is flicked to the left.
I thought I could do that just by noting positive or negative x acceleration values, but I see that the values are usually a mixture of positive and negative values. I have set the floor at 1.5g to eliminate non-shake movement, and am looking at the x values over the duration of .5 seconds.
I'm sure there is a trigonometrical method for determining the overall direction of a flick, and that you have to measure values over the duration of the flick motion. I'm also sure that someone has already figured this one out.
Any ideas out there?
Thanks
I developed a solution which gives me better feedback than the proposed solution (only for left and right shake).
The way I did it here is quite sensitive (recognizes a small shake), but sensitivity can be adapted by changing tresholdFirstMove and tresholdBackMove (increase for lower sensitivity)
In Swift :
(in your viewController. And add "import CoreMotion")
var startedLeftTilt = false
var startedRightTilt = false
var dateLastShake = NSDate(timeIntervalSinceNow: -2)
var dateStartedTilt = NSDate(timeIntervalSinceNow: -2)
var motionManager = CMMotionManager()
let tresholdFirstMove = 3.0
let tresholdBackMove = 0.5
override func viewDidLoad() {
// your code
motionManager.gyroUpdateInterval = 0.01
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
motionManager.startGyroUpdatesToQueue(NSOperationQueue.currentQueue(), withHandler: { (gyroData, error) -> Void in
self.handleGyroData(gyroData.rotationRate)
})
}
private func handleGyroData(rotation: CMRotationRate) {
if fabs(rotation.z) > tresholdFirstMove && fabs(dateLastShake.timeIntervalSinceNow) > 0.3
{
if !startedRightTilt && !startedLeftTilt
{
dateStartedTilt = NSDate()
if (rotation.z > 0)
{
startedLeftTilt = true
startedRightTilt = false
}
else
{
startedRightTilt = true
startedLeftTilt = false
}
}
}
if fabs(dateStartedTilt.timeIntervalSinceNow) >= 0.3
{
startedRightTilt = false
startedLeftTilt = false
}
else
{
if (fabs(rotation.z) > tresholdBackMove)
{
if startedLeftTilt && rotation.z < 0
{
dateLastShake = NSDate()
startedRightTilt = false
startedLeftTilt = false
println("\\\n Shaked left\n/")
}
else if startedRightTilt && rotation.z > 0
{
dateLastShake = NSDate()
startedRightTilt = false
startedLeftTilt = false
println("\\\n Shaked right\n/")
}
}
}
}
OK, worked out a solution. When I detect a shake motion (acceleration greater than 1.5 on the x axis), I start a timer and set a BOOL to true. While the BOOL is true I add acceleration values. When the timer expires, I stop adding acceleration values and determine direction of the shake by the sign of the total acceleration.
- (void)accelerometer:(UIAccelerometer *)acel didAccelerate:(UIAcceleration *)aceler {
if (fabsf(aceler.x) > 1.5)
{
shake = YES;
NSTimeInterval myInterval = .75;
[NSTimer scheduledTimerWithTimeInterval:myInterval target:self selector:#selector(endShake) userInfo:nil repeats:NO];
return;
}
if(shake)
{
totalG += aceler.x;
}
}
- (void) endShake {
shake = NO;
int direction;
if (totalG isLessThan 0) direction = 1;
if(totalG isGreaterThan 0) direction = -1;
[self changePageByShake:direction];
totalG = 0;
}
Note: I couldn't get the < and > symbols to format correctly in the codeblock above, so I substituted isLessThan and isGreaterThan for the symbols.