How to simulate a click on home screen of (jailbroken) iPhone? - iphone

I want to simulate a click on the icon of an app on the home screen of jailbroken iPhone. I used the code below, but it could not take effect. Did I do something wrong?
Is the method to getFrontMostAppPort() right? Event if I tried to use GSSendSystemEvent(), it had no effect.
BTW, I mean jailbroken device. Could anyone give me a help? I very much appreciate it.
// Framework Paths
#define SBSERVPATH "/System/Library/PrivateFrameworks/SpringBoardServices.framework/SpringBoardServices"
static mach_port_t getFrontMostAppPort() {
bool locked;
bool passcode;
mach_port_t *port;
void *lib = dlopen(SBSERVPATH, RTLD_LAZY);
int (*SBSSpringBoardServerPort)() = dlsym(lib, "SBSSpringBoardServerPort");
void* (*SBGetScreenLockStatus)(mach_port_t* port, bool *lockStatus, bool *passcodeEnabled) = dlsym(lib, "SBGetScreenLockStatus");
port = (mach_port_t *)SBSSpringBoardServerPort();
dlclose(lib);
SBGetScreenLockStatus(port, &locked, &passcode);
void *(*SBFrontmostApplicationDisplayIdentifier)(mach_port_t *port, char *result) = dlsym(lib, "SBFrontmostApplicationDisplayIdentifier");
char appId[256];
memset(appId, 0, sizeof(appId));
SBFrontmostApplicationDisplayIdentifier(port, appId);
NSString * frontmostApp=[NSString stringWithFormat:#"%s",appId];
if([frontmostApp length] == 0 || locked)
return GSGetPurpleSystemEventPort();
else
return GSCopyPurpleNamedPort(appId);
}
static void sendTouchEvent(GSHandInfoType handInfoType, CGPoint point) {
uint8_t touchEvent[sizeof(GSEventRecord) + sizeof(GSHandInfo) + sizeof(GSPathInfo)];
// structure of touch GSEvent
struct GSTouchEvent {
GSEventRecord record;
GSHandInfo handInfo;
} * event = (struct GSTouchEvent*) &touchEvent;
bzero(touchEvent, sizeof(touchEvent));
// set up GSEvent
event->record.type = kGSEventHand;
event->record.subtype = kGSEventSubTypeUnknown;
event->record.windowLocation = point;
event->record.timestamp = GSCurrentEventTimestamp();
event->record.infoSize = sizeof(GSHandInfo) + sizeof(GSPathInfo);
event->handInfo.type = handInfoType;
event->handInfo.x52 = 1;
bzero(&event->handInfo.pathInfos[0], sizeof(GSPathInfo));
event->handInfo.pathInfos[0].pathIndex = 1;
event->handInfo.pathInfos[0].pathIdentity = 2;
event->handInfo.pathInfos[0].pathProximity = (handInfoType == kGSHandInfoTypeTouchDown || handInfoType == kGSHandInfoTypeTouchDragged || handInfoType == kGSHandInfoTypeTouchMoved) ? 0x03 : 0x00;;
event->handInfo.pathInfos[0].pathLocation = point;
mach_port_t port = (mach_port_t)getFrontMostAppPort();
GSSendEvent((GSEventRecord *)event, port);
}
// why nothing happened?
static clickOnHome() {
sendTouchEvent(kGSHandInfoTypeTouchDown, CGPointMake(100, 200));
sleep(1);
sendTouchEvent(kGSHandInfoTypeTouchUp, CGPointMake(100, 200));
}

Yes, I think your getFrontMostAppPort() method is fine, if you got it from here.
I'm trying to understand what you want:
If you simply want to open up a particular app (by bundleId), then I would recommend using the command-line open utility available on Cydia. You can call this programmatically with system(), or an exec call. Or, if you want to build this "open" capability into your own app, see my answer here.
Now, maybe you are writing code for an app or tweak that's in the background, not the foreground app. And, maybe you really do need to touch a specific {x,y} coordinate, not open an app by its bundleId. If you really want that, this code works for me (based on this answer ... with corrections):
static void sendTouchEvent(GSHandInfoType handInfoType, CGPoint point) {
uint8_t gsTouchEvent[sizeof(GSEventRecord) + sizeof(GSHandInfo) + sizeof(GSPathInfo)];
// structure of touch GSEvent
struct GSTouchEvent {
GSEventRecord record;
GSHandInfo handInfo;
} * touchEvent = (struct GSTouchEvent*) &gsTouchEvent;
bzero(touchEvent, sizeof(touchEvent));
touchEvent->record.type = kGSEventHand;
touchEvent->record.subtype = kGSEventSubTypeUnknown;
touchEvent->record.location = point;
touchEvent->record.windowLocation = point;
touchEvent->record.infoSize = sizeof(GSHandInfo) + sizeof(GSPathInfo);
touchEvent->record.timestamp = GSCurrentEventTimestamp();
//touchEvent->record.window = winRef;
//touchEvent->record.senderPID = 919;
bzero(&touchEvent->handInfo, sizeof(GSHandInfo));
bzero(&touchEvent->handInfo.pathInfos[0], sizeof(GSPathInfo));
GSHandInfo touchEventHandInfo;
touchEventHandInfo._0x5C = 0;
touchEventHandInfo.deltaX = 0;
touchEventHandInfo.deltaY = 0;
touchEventHandInfo.height = 0;
touchEventHandInfo.width = 0;
touchEvent->handInfo = touchEventHandInfo;
touchEvent->handInfo.type = (handInfoType == kGSHandInfoTypeTouchDown) ? 2 : 1;
touchEvent->handInfo.deltaX = 1;
touchEvent->handInfo.deltaY = 1;
touchEvent->handInfo.pathInfosCount = 0;
touchEvent->handInfo.pathInfos[0].pathIndex = 1;
touchEvent->handInfo.pathInfos[0].pathIdentity = 2;
touchEvent->handInfo.pathInfos[0].pathProximity = (handInfoType == kGSHandInfoTypeTouchDown || handInfoType == kGSHandInfoTypeTouchDragged || handInfoType == kGSHandInfoTypeTouchMoved) ? 0x03: 0x00;
touchEvent->handInfo.x52 = 1; // iOS 5+, I believe
touchEvent->handInfo.pathInfos[0].pathLocation = point;
//touchEvent->handInfo.pathInfos[0].pathWindow = winRef;
GSEventRecord* record = (GSEventRecord*) touchEvent;
record->timestamp = GSCurrentEventTimestamp();
GSSendEvent(record, getFrontMostAppPort());
}
- (void) simulateHomeScreenTouch {
CGPoint location = CGPointMake(50, 50);
sendTouchEvent(kGSHandInfoTypeTouchDown, location);
double delayInSeconds = 0.1;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
sendTouchEvent(kGSHandInfoTypeTouchUp, location);
});
}
This will send a touch down/up event pair to SpringBoard, as long as SpringBoard is the frontmost app (no other app open). Also, you can see where the handInfoType value is mapped to 2 or 1 for touch down and up events. The code above assumes that those are the only two types of events you're generating.
Note: I first tried with your coordinates of x=100, y=200, and nothing happened. I think that coordinate is between two home screen icons. Using other coordinates (e.g. x=50, y=50) works fine.
Note II: I don't believe this solution actually requires jailbreaking. It uses Private APIs, so it can't be submitted to the App Store, but this should work for jailed phones in an enterprise, or hobbyist deployment.

Related

ESP32-CAM buffer

When I initialize the camera I use this:
#include "esp_camera.h"
bool initCamera(){
Serial.print("Iniciando Cámara ...");
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
...
...
config.xclk_freq_hz = 20000000;
config.pixel_format = PIXFORMAT_JPEG;
config.frame_size = FRAMESIZE_XGA;
config.jpeg_quality = 8;
config.fb_count = 1;
config.grab_mode = CAMERA_GRAB_LATEST; <<<<<<<
// camera init
esp_err_t err = esp_camera_init(&config);
if (err != ESP_OK){
Serial.printf("... error 0x%x", err);
return false;
}
sensor_t *s = esp_camera_sensor_get();
Serial.println("... OK");
CAMARA_OK = true;
return true;
}
But when I ask for a capture:
static esp_err_t capture_handler(httpd_req_t *req){
cambiosConfiguracion(req);
camera_fb_t *fb = NULL;
esp_err_t res = ESP_OK;
fb = esp_camera_fb_get();
if (!fb){
Serial.println("Camera capture failed");
httpd_resp_send_500(req);
return ESP_FAIL;
}
...
the image it shows me is from before, it takes a delayed capture.
I want that when I ask for a capture, it gives me the one from that moment, not the one stored in the buffer
I have the same problem, I've been looking for the solution for days and although it seems to be fine, it doesn't do what it's supposed to do.
At the moment I am using this "trick":
fb = esp_camera_fb_get();
esp_camera_fb_return(fb);
fb = esp_camera_fb_get();
I ask for a capture, I discard it and then I ask for another one, which is current

Toggle GPIO in Netdevice

Hy i wanted to toggle a GPIO pin always when i send a package. But I have a weird behafure sometimes my pin gets toggled and sometimes not. I checked my Kernel logs and there is no Information that the pin could not be toggled.
My Ops look like this:
static const struct net_device_ops stmuart_netdev_ops = {
.ndo_init = stmuart_netdev_init,
.ndo_uninit = stmuart_netdev_uninit,
.ndo_open = stmuart_netdev_open,
.ndo_stop = stmuart_netdev_close,
.ndo_start_xmit = stmuart_netdev_xmit,
.ndo_set_mac_address = eth_mac_addr,
.ndo_tx_timeout = stmuart_netdev_tx_timeout,
.ndo_validate_addr = eth_validate_addr,
};
So as far as i know the ndo_start_xmit method is called as soon as i want to send a package. so i only toggle my pin when this method is called. But when i look on my Oscilloscope i can see that the pin is only sometimes toggled.
My method for sending looks like this:
static netdev_tx_t
stmuart_netdev_xmit(struct sk_buff *skb, struct net_device *dev)
{
struct net_device_stats *n_stats = &dev->stats;
struct stmuart *stm = netdev_priv(dev);
u8 pad_len = 0;
int written;
u8 *pos;
spin_lock(&stm->lock);
gpiod_set_value(stm->rts_gpio, 1);
WARN_ON(stm->tx_left);
if (!netif_running(dev)) {
spin_unlock(&stm->lock);
netdev_warn(stm->net_dev, "xmit: iface is down\n");
goto out;
}
pos = stm->tx_buffer;
if (skb->len < STMFRM_MIN_LEN)
pad_len = STMFRM_MIN_LEN - skb->len;
pos += stmfrm_create_header(pos, skb->len + pad_len);
memcpy(pos, skb->data, skb->len);
pos += skb->len;
if (pad_len) {
memset(pos, 0, pad_len);
pos += pad_len;
}
pos += stmfrm_create_footer(pos);
netif_stop_queue(stm->net_dev);
written = serdev_device_write_buf(stm->serdev, stm->tx_buffer,
pos - stm->tx_buffer);
if (written > 0) {
stm->tx_left = (pos - stm->tx_buffer) - written;
stm->tx_head = stm->tx_buffer + written;
n_stats->tx_bytes += written;
}
spin_unlock(&stm->lock);
out:
gpiod_set_value(stm->rts_gpio, 0);
netif_trans_update(dev);
dev_kfree_skb_any(skb);
return NETDEV_TX_OK;
}
So i checked the netdevice.h but there i did not saw another method that gets called when the net device wants to send a package.
It seems to me like always when the netdevice sends a ARP-Request the pin gets not toggled.

Why are audio clicks/pops occurring when the position of a PannerNode is updated rapidly?

I've been experimenting with the PannerNode for quite some time and in cases where I'm updating position of the panner repeatedly using either requestAnimationFrame() or setInterval(), I've noticed clicking/popping of audio.
Why is this happening and how can it be resolved?
I'd hoped that it would be fixed with the introduction of position as an AudioParam or I could circumvent the issue by using a ramp. I've tried setting the position with and without a ramp on Chrome Canary and the issue is still present.
You can hear it for yourself here: https://jsfiddle.net/txLke4fh/
Code snippet:
var ctx = new AudioContext();
var osc = ctx.createOscillator();
var panner = ctx.createPanner();
var temp = true;
osc.frequency.value = 220;
osc.connect(panner);
panner.panningModel = 'HRTF';
panner.setPosition(0, 0, 0);
panner.connect(ctx.destination);
osc.start(0);
osc.stop(ctx.currentTime + 10);
setInterval(function() {
if (temp) {
panner.setPosition(50, 0, 0);
temp = false;
} else {
panner.setPosition(-50, 0, 0);
temp = true;
}
}, 100);
EDIT: It can be observed on Chrome Canary too, with and without a linear ramp.
var ctx = new AudioContext();
var osc = ctx.createOscillator();
var panner = ctx.createPanner();
var temp = true;
osc.frequency.value = 220;
osc.connect(panner);
panner.panningModel = 'HRTF';
panner.positionX.value = 0;
panner.connect(ctx.destination);
osc.start(0);
osc.stop(ctx.currentTime + 10);
setInterval(function() {
if (temp) {
panner.positionX.linearRampToValueAtTime(50, ctx.currentTime + 0.01);
temp = false;
} else {
panner.positionX.linearRampToValueAtTime(-50, ctx.currentTime + 0.01);
temp = true;
}
}, 1000);
You're doing an instantaneous move from one position to another. I certainly would expect a click, depending on how where you moved.
If you have Chrome canary, you should be able to use automation (but don't use setPosition() which doesn't do automation). If you have an example of where it still clicks with automation, please post it; I'd like to see such an example.

Stdout redirection not working in iOS without debugger

I am trying to redirect output so I can send it over the network. For some reason if you run the code while debugger attached it works perfectly. Once you start the application in normal way the code freezes on the read function and never returns. If someone has any pointers I will highly appreciate it.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^(void) {
static int pipePair[2];
if ( pipe(pipePair) != 0) {
return;
}
dup2(pipePair[1],STDOUT_FILENO);
while (true) {
char * buffer = calloc(sizeof(char), 1024);
ssize_t readCount = read(pipePair[0],buffer,1023);
if (readCount > 0) {
buffer[readCount] = 0;
NSString * log = [NSString stringWithCString:buffer encoding:NSUTF8StringEncoding];
//sent it over network
}
if (readCount == -1) {
return;
}
}
});
Apparently in iOS 5.1 writing to stdout was disallowed. http://spouliot.wordpress.com/2012/03/13/ios-5-1-vs-stdout/

Strange mouse event position problem

I have a function which returns mouse event positions.
// returns the xy point where the mouse event was occured.
function getXY(ev){
var xypoint = new Point();
if (ev.layerX || ev.layerY) { // Firefox
xypoint.x = ev.layerX;
xypoint.y = ev.layerY;
} else if (ev.offsetX || ev.offsetX == 0) { // Opera
xypoint.x = ev.offsetX;
xypoint.y = ev.offsetY;
}
return xypoint;
}
I am capturing mouse events to perform drawings on html5 canvas. Sometimes I am getting -ve values for xypoint. When I debug the application using firebug I am getting really strange behavior. for example if I put my break point at the 4th line of this function with condition (xypoint.x<0 || xypoint.y<0), it stops at the break point and I can see that layer.x, layer.y was positive and correct. But xypoint.x or xypoint.y is negative. If I reassign the values using firbug console I am getting correct values in xypoint. Can anyone explain me what is happening.
The above works fine if I move mouse with normal speed. If I am moving mouse at very rapid speed I am getting this behavior.
Thanks
Handling mouse position was an absolute pain with Canvas. You have to make a ton of adjustments. I use this, which has a few minor errors, but works even with the drag-and-drop divs I use in my app:
getCurrentMousePosition = function(e) {
// Take mouse position, subtract element position to get relative position.
if (document.layers) {
xMousePos = e.pageX;
yMousePos = e.pageY;
xMousePosMax = window.innerWidth+window.pageXOffset;
yMousePosMax = window.innerHeight+window.pageYOffset;
} else if (document.all) {
xMousePos = window.event.x+document.body.scrollLeft;
yMousePos = window.event.y+document.body.scrollTop;
xMousePosMax = document.body.clientWidth+document.body.scrollLeft;
yMousePosMax = document.body.clientHeight+document.body.scrollTop;
} else if (document.getElementById) {
xMousePos = e.pageX;
yMousePos = e.pageY;
xMousePosMax = window.innerWidth+window.pageXOffset;
yMousePosMax = window.innerHeight+window.pageYOffset;
}
elPos = getElementPosition(document.getElementById("cvs"));
xMousePos = xMousePos - elPos.left;
yMousePos = yMousePos - elPos.top;
return {x: xMousePos, y: yMousePos};
}
getElementPosition = function(el) {
var _x = 0,
_y = 0;
if(document.body.style.marginLeft == "" && document.body.style.marginRight == "" ) {
_x += (window.innerWidth - document.body.offsetWidth) / 2;
}
if(el.offsetParent != null) while(1) {
_x += el.offsetLeft;
_y += el.offsetTop;
if(!el.offsetParent) break;
el = el.offsetParent;
} else if(el.x || el.y) {
if(el.x) _x = el.x;
if(el.y) _y = el.y;
}
return { top: _y, left: _x };
}
Moral of the story? You need to figure in the offset of the canvas to have proper results. You're capturing the XY from the event, which has an offset that's not being captured in relation to the window's XY. Make sense?