AWS IoT - Connection and publishing operations using paho-mqtt not working - publish-subscribe

Trying to work with AWS IoT, I have the following code that was working yesterday:
import paho.mqtt.client as mqtt
import ssl, random
from time import sleep
mqtt_url = "XXXXXXXX.iot.us-east-2.amazonaws.com"
root_ca = './certs/iotRootCA.pem'
public_crt = './certs/deviceCert.crt'
private_key = './certs/deviceCert.key'
connflag = False
def on_connect(client, userdata, flags, response_code):
global connflag
connflag = True
print("Connected with status: {0}".format(response_code))
def on_publish(client, userdata, mid):
print userdata + " -- " + mid
#client.disconnect()
if __name__ == "__main__":
print "Loaded MQTT configuration information."
print "Endpoint URL: " + mqtt_url
print "Root Cert: " + root_ca
print "Device Cert: " + public_crt
print "Private Key: " + private_key
client = mqtt.Client()
client.tls_set(root_ca,
certfile = public_crt,
keyfile = private_key,
cert_reqs = ssl.CERT_REQUIRED,
tls_version = ssl.PROTOCOL_TLSv1_2,
ciphers = None)
client.on_connect = on_connect
# client.on_publish = on_publish
print "Connecting to AWS IoT Broker..."
client.connect(mqtt_url, port = 8883, keepalive=60)
client.loop_start()
# client.loop_forever()
while 1==1:
sleep(0.5)
print connflag
if connflag == True:
print "Publishing..."
ap_measurement = random.uniform(25.0, 150.0)
client.publish("ActivePower", ap_measurement, qos=1)
print("ActivePower published: " + "%.2f" % ap_measurement )
else:
print "waiting for connection..."
As I said, yesterday this code was working. Today, I am getting the following (there is no connection):
python awsiot-publish.py
Loaded MQTT configuration information.
Endpoint URL: XXXXXXX.iot.us-east-2.amazonaws.com
Root Cert: ./certs/iotRootCA.pem
Device Cert: ./certs/deviceCert.crt
Private Key: ./certs/deviceCert.key
Connecting to AWS IoT Broker...
False
waiting for connection...
False
waiting for connection...
False
waiting for connection...
False
I do not know if there is a problem with AWS IoT... I just think the documentation is deficient: it is not clear how we can use our code...

I believe your problem is that your certificate's policy does not have the proper permissions to connect. If not specified paho genereates a random client_id. You should set the client_id. You also need a policy that allows your certificate to connect using that client id.
{
"Effect": "Allow",
"Action": "iot:Connect",
"Resource":"arn:aws:iot:us-east1:123456789012:client/yourClientIdGoesHere"
}
It can be useful to set your client_id to the same as your thingname. (This is not necessary though.) You can also set the resource in your policy to * and then connect with any client_id:
{
"Effect": "Allow",
"Action": "iot:Connect",
"Resource":"*"
}

Related

packer error Failed to send shutdown command: dial tcp 172.29.48.100:22: i/o timeout

I am trying packer builder with provisioner "shell-local". After successful OS installation I am trying to attach second network adapter. But it stuck in this error. Platform Hyper-V. Code looks like:
source "hyperv-iso" "build-debian" {
boot_command = ["<wait><wait><wait><esc><wait><wait><wait>",
"/install.amd/vmlinuz ",
"initrd=/install.amd/initrd.gz ", "auto=true ", "interface=eth0 ",
"netcfg/disable_dhcp=true ",
"netcfg/confirm_static=true ", "netcfg/get_ipaddress=172.29.48.100 ",
"netcfg/get_netmask=255.255.255.0 ",
"netcfg/get_gateway=172.29.48.1 ", "netcfg/get_nameservers=8.8.8.8 8.8.4.4 ",
"netcfg/get_domain=domain ",
"netcfg/get_hostname=hostname ", "url=http://{{ .HTTPIP }}:{{ .HTTPPort }}/preseed.cfg ",
"vga=788 noprompt quiet --<enter> "]
boot_wait = "10s"
configuration_version = "${var.hyperv_version}"
cpus = "${var.cpus}"
disk_block_size = "${var.hyperv_disk_block_size}"
disk_size = "${var.disk_size}"
memory = "${var.memory}"
generation = "${var.hyperv_generation}"
guest_additions_mode = "disable"
http_directory = "${local.http_directory}"
iso_checksum = "sha256:e307d0e583b4a8f7e5b436f8413d4707dd4242b70aea61eb08591dc0378522f3"
iso_url = "http://debian.mirror.vu.lt/debian-cd/11.5.0/amd64/iso-cd/debian-11.5.0-amd64-netinst.iso"
output_directory = "${var.build_directory}/packer-${local.template}-${var.git_sha}"
shutdown_command = "echo 'vagrant' | sudo -S /sbin/halt -h -p"
ssh_host = "${var.ip_address_eth0}"
ssh_keep_alive_interval = "-1s"
ssh_password = "vagrant"
ssh_port = 22
ssh_timeout = "120m"
ssh_username = "vagrant"
headless = "false"
switch_name = "VmNAT"
vm_name = "${local.template}-${var.git_sha}"
}
build {
name = "BUILD: Debian v11.5"
source "hyperv-iso.build-debian" {
}
provisioner "shell-local" {
execute_command = ["powershell.exe", "{{.Vars}} {{.Script}}"]
env_var_format = "$env:%s=\"%s\"; "
tempfile_extension = ".ps1"
pause_before = "60s"
inline =["Import-Module Hyper-V",
"Stop-VM -Name ${local.template}-${var.git_sha}",
"Timeout /T 20",
"Add-VMNetworkAdapter -VMName ${local.template}-${var.git_sha} -SwitchName ${var.hyperv_switch} -Name Static -DeviceNaming Off",
"Start-VM -Name ${local.template}-${var.git_sha}"
}
}
packer logs
Maybe I'm doing something wrong? And someone know how to fix ? Ty for any help
EDITED:
I made some changes and I think its problem with timeout. While provisioned VM are restarted packer after that tries reconnect to VM, but in this time VM still booting and I get errors like. Is that possible that ssh_timeout works only on first boot ?

How to fetch collection of Zuora Accounts using REST API

I want to fetch all customer accounts from Zuora. Apart from Exports REST API, Is there any API available to fetch all accounts in a paginated list?
This is the format I used to fetch revenue invoices, use this code and change the endpoint
import pandas as pd
# Set the sleep time to 10 seconds
sleep = 10
# Zuora OAUTH token URL
token_url = "https://rest.apisandbox.zuora.com/oauth/token"
# URL for the DataQuery
query_url = "https://rest.apisandbox.zuora.com/query/jobs"
# OAUTH client_id & client_secret
client_id = 'your client id'
client_secret = 'your client secret'
# Set the grant type to client credential
token_data = {'grant_type': 'client_credentials'}
# Send the POST request for the OAUTH token
access_token_resp = requests.post(token_url, data=token_data,
auth=(client_id, client_secret))
# Print the OAUTH token respose text
#print access_token_resp.text
# Parse the tokens as json data from the repsonse
tokens = access_token_resp.json()
#print "access token: " + tokens['access_token']
# Use the access token in future API calls & Add to the headers
query_job_headers = {'Content-Type':'application/json',
'Authorization': 'Bearer ' + tokens['access_token']}
# JSON Data for our DataQuery
json_data = {
"query": "select * from revenuescheduleiteminvoiceitem",
"outputFormat": "JSON",
"compression": "NONE",
"retries": 3,
"output": {
"target": "s3"
}
}
# Parse the JSON output
data = json.dumps(json_data)
# Send the POST request for the dataquery
query_job_resp = requests.post(query_url, data=data,
headers=query_job_headers)
# Print the respose text
#print query_job_resp.text
# Check the Job Status
# 1) Parse the Query Job Response JSON data
query_job = query_job_resp.json()
# 2) Create the Job URL with the id from the response
query_job_url = query_url+'/'+query_job["data"]["id"]
# 3) Send the GETrequest to check on the status of the query
query_status_resp = requests.get(query_job_url, headers = query_job_headers)
#print query_status_resp.text
# Parse the status from teh response
query_status = query_status_resp.json()["data"]["queryStatus"]
#print ('query status:'+query_status)
# Loop until the status == completed
# Exit if there is an error
while (query_status != 'completed'):
time.sleep(sleep)
query_status_resp = requests.get(query_job_url, headers = query_job_headers)
#print query_status_resp.text
query_status = query_status_resp.json()["data"]["queryStatus"]
if (query_status == 'failed'):
print ("query: "+query_status_resp.json()["data"]["query"]+' Failed!\n')
exit(1)
# Query Job has completed
#print ('query status:'+query_status)
# Get the File URL
file_url = query_status_resp.json()["data"]["dataFile"]
print (file_url)```
If you don't want to use Data Query or any queue-based solution like that, use Zoql instead.
Note! You need to know all fields from the Account object you need, the asterisk (select *) doesn't work here:
select Id, ParentId, AccountNumber, Name from Account
You may also add custom fields into your selection. You will get up to 200 records per page.

AS400 RPGLE unable to consume REST API with SSL

I have successfully used IBM AxiscTransport API in AS400 (i 7.2) to consume REST API with HTTP. However, when there is SSL, my program fails with the following error.
TransportFlush() call failed: 55:AxisTransportException: Cannot open a channel
to the remote end. Failed to open connection to server, the operation gsk_secure_soc_init() failed. GSKit Error is 410 - Peer not recognized or badly formatted message received.
My sample source code:
uri = 'https://jsonplaceholder.typicode.com/posts/1';
propBuf = 'GET' + X'00';
axiscTransportSetProperty(tHandle: AXISC_PROPERTY_HTTP_METHOD: %addr(propBuf));
propBuf = 'xx.xx.xx.xx' + X'00';
propInt = 80;
rc = axiscTransportSetProperty(tHandle:AXISC_PROPERTY_HTTP_PROXY:
%addr(propBuf):
%addr(propInt));
propBuf3 = 'true' + X'00';
rc = axiscTransportSetProperty(tHandle: AXISC_PROPERTY_HTTP_PROXYSSL:
%addr(propBuf3));
NONE = 'NONE' + X'00';
propBuf = '/QIBM/USERDATA/ICSS/CERT/SERVER/DEFAULT.KDB' + X'00';
propBuf3 = 'true' + X'00';
rc = axiscTransportSetProperty(tHandle: AXISC_PROPERTY_HTTP_SSL:
%addr(propBuf):
%addr(NULLSTR):%addr(NULLSTR):
%addr(NONE):%addr(NONE):
%addr(NONE):%addr(NONE):
%addr(ENABLED):%addr(propBuf3):*NULL);
propBuf3 = 'true' + X'00';
axiscTransportSetProperty(tHandle: AXISC_PROPERTY_CONVERT_PAYLOAD:
%addr(propBuf3));
I am stuck at this for months, and still unable to resolve it. Any help will be appreciated. Thanks.
I got this working by doing this
pTolerate = 'true' + X'00';
pAppID = 'API_MAN' + X'00';
axiscTransportSetProperty(t
Handle
:AXISC_PROPERTY_HTTP_SSL
:%addr(pKeyRingFile )
:%addr(pKeyRingSorP )
:%addr(pKeyRingLabel)
:%addr(pV2Cipher )
:%addr(pV3Cipher )
:%addr(pTLSCipher )
:%addr(pTLSv11Cipher)
:%addr(pTLSv12Cipher)
:%addr(pTolerate )
:%addr(pAppID ));
The pAppID needs to be setup on the ISeries and the SSL certificate in the store linked to the Application ID.
All other parameters were blanks.

MQTT subscription gets lost in Bluemix container

I am using the Bluemix IoT service. My program consists of the following elements:
Publisher (Local Machine)
Subscribed (Bluemix)
Publisher (Bluemix)
Subscriber (Local Machine)
I am currently following the steps
Publisher (local machine) > Subscriber (Bluemix) > Publisher (Bluemix) > Subscriber (local machine)
The issue I am facing is the moment I try to use both the subscribers together the service unsubscribes from both the ends. If I keep only subscriber the steps work perfect. The topics I am using are as follows:
topic = "iot-2/type/mymqttdevice/id/mynewdev/evt/iotData/fmt/json"
topic2 = "iot-2/type/mymqttdevice/id/mynewdev/evt/iotFile/fmt/json"
Can someone guide what am I doing wrong here?
EDIT: Adding code
Publisher on local machine is a python file consisting of typical connect and publish method. After each publish I disconnect from the IoT service.
Subscriber code on Bluemix:
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import paho.mqtt.client as mqtt
import os, json
import time
organization = "xel7"
username = ""
password = ""
#Set the variables for connecting to the iot service
broker = ""
devicename = "mynewdev"
topic = "iot-2/type/mymqttdevice/id/mynewdev/evt/iotData/fmt/json"
deviceType = "mymqttdevice"
topic2 = "iot-2/type/mymqttdevice/id/mynewdev/evt/iotFile/fmt/json"
clientID = "a:" + organization + ":appId"
broker = organization + ".messaging.internetofthings.ibmcloud.com"
mqttc = mqtt.Client(clientID)
if username is not "":
mqttc.username_pw_set(username, password=password)
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
def on_subscribe(mosq, obj, mid, granted_qos):
print("Subscribed: " + str(mid) + " " + str(granted_qos))
def on_message(client, userdata, msg):
with open('indurator.txt', 'w') as fd:
txt = (msg.payload.decode('string_escape'))
fd.write(txt)
#print txt
fd.close()
mqttc.publish(topic2,msg.payload);
mqttc.connect(host=broker, port=1883, keepalive=60)
test = mqttc.subscribe(topic,0)
mqttc.on_connect = on_connect
mqttc.on_subscribe = on_subscribe
mqttc.on_message = on_message
mqttc.loop_forever()
Subscriber code on local machine to receive file published from Bluemix subscriber:
-- coding: utf-8 --
#!/usr/bin/env python
import paho.mqtt.client as mqtt
import os, json
import time
organization = "xel7"
username = ""
password = ""
#Set the variables for connecting to the iot service
broker = ""
devicename = "mynewdev"
deviceType = "mymqttdevice"
topic = "iot-2/type/mymqttdevice/id/mynewdev/evt/iotFile/fmt/json"
clientID = "a:" + organization + ":appId"
broker = organization + ".messaging.internetofthings.ibmcloud.com"
mqttc = mqtt.Client(clientID)
if username is not "":
mqttc.username_pw_set(username, password=password)
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
def on_subscribe(mosq, obj, mid, granted_qos):
print("Subscribed: " + str(mid) + " " + str(granted_qos))
def on_message(client, userdata, msg):
with open('receivednew.txt', 'w') as fd:
txt = (msg.payload.decode('string_escape'))
fd.write(txt)
#print txt
fd.close()
mqttc.connect(host=broker, port=1883, keepalive=60)
test = mqttc.subscribe(topic,0)
mqttc.on_connect = on_connect
mqttc.on_subscribe = on_subscribe
mqttc.on_message = on_message
mqttc.loop_forever()
Glad you figured out the solution. To summarize as hardillb and amadain mentioned, the same client ID should not be used simultaneously per the Watson IoT Platform documentation.
If a client ID is being re-used, when you attempt to connect to the IoT platform, your device or application receives an error. This may indicate your disconnects are due to the clientID being re-used or “stolen”.
If you have two devices connecting with the same clientId and credentials – that leads to the clientId stealing. Only one unique connection is allowed per clientID; you can not have two concurrent connections using the same ID.
If 2 clients attempt to connect to IoT at the same time using the same client ID, a connection error occurs

usbmuxd for multiple idevices

By using usbmuxd with the help of following command..
tcprelay.py -t 5900
I am able to connect to iphone with VNC viewer by giving hostname as 127.0.0.1 and port as 5900.
By using above command i able to connect to the last connected iphone.
Now how can i connect to multiple devices at same time by using usbmuxd.
i studied in documentation of usbmuxd that we can connect to multiple devices.
I seen help but unable to workout on it.
can anyone kindly give suggestions.
Iphone is jail broken with veency, openssh installed
Thanks in advance.
In the latest version of tcprelay.py (http://cgit.sukimashita.com/usbmuxd.git, v1.0.8), multiple device support is not yet implemented. Commands are always sent to the first device:
print "Waiting for devices..."
if not mux.devices:
mux.process(1.0)
if not mux.devices:
print "No device found"
self.request.close()
return
dev = mux.devices[0]
This has to be changed to something like:
dev = None
while dev is None:
for d in mux.devices:
if d.serial == self.server.serial:
dev = d
print "Found the device %s"%str(dev)
break
if dev is None:
mux.process()
Here's the complete tcprelay.py, I'm using:
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# tcprelay.py - TCP connection relay for usbmuxd
#
# Copyright (C) 2009 Hector Martin "marcan" <hector#marcansoft.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 or version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import usbmux
import SocketServer
import select
from optparse import OptionParser
import sys
import threading
class SocketRelay(object):
def __init__(self, a, b, maxbuf=65535):
self.a = a
self.b = b
self.atob = ""
self.btoa = ""
self.maxbuf = maxbuf
def handle(self):
while True:
rlist = []
wlist = []
xlist = [self.a, self.b]
if self.atob:
wlist.append(self.b)
if self.btoa:
wlist.append(self.a)
if len(self.atob) < self.maxbuf:
rlist.append(self.a)
if len(self.btoa) < self.maxbuf:
rlist.append(self.b)
rlo, wlo, xlo = select.select(rlist, wlist, xlist)
if xlo:
return
if self.a in wlo:
n = self.a.send(self.btoa)
self.btoa = self.btoa[n:]
if self.b in wlo:
n = self.b.send(self.atob)
self.atob = self.atob[n:]
if self.a in rlo:
s = self.a.recv(self.maxbuf - len(self.atob))
if not s:
return
self.atob += s
if self.b in rlo:
s = self.b.recv(self.maxbuf - len(self.btoa))
if not s:
return
self.btoa += s
#print "Relay iter: %8d atob, %8d btoa, lists: %r %r %r"%(len(self.atob), len(self.btoa), rlo, wlo, xlo)
class TCPRelay(SocketServer.BaseRequestHandler):
def handle(self):
print "Incoming connection to %d"%self.server.server_address[1]
mux = usbmux.USBMux(options.sockpath)
#print "Waiting for devices..."
#if not mux.devices:
# mux.process(1.0)
#if not mux.devices:
# print "No device found"
# self.request.close()
# return
#dev = mux.devices[0]
dev = None
while dev is None:
for d in mux.devices:
if d.serial == self.server.serial:
dev = d
print "Found the device %s"%str(dev)
break
if dev is None:
mux.process()
print "Connecting to device %s"%str(dev)
dsock = mux.connect(dev, self.server.rport)
lsock = self.request
print "Connection established, relaying data"
try:
fwd = SocketRelay(dsock, lsock, self.server.bufsize * 1024)
fwd.handle()
finally:
dsock.close()
lsock.close()
print "Connection closed"
class TCPServer(SocketServer.TCPServer):
allow_reuse_address = True
class ThreadedTCPServer(SocketServer.ThreadingMixIn, TCPServer):
pass
HOST = "localhost"
parser = OptionParser(usage="usage: %prog [OPTIONS] RemotePort[:LocalPort] [RemotePort[:LocalPort]]...")
parser.add_option("-t", "--threaded", dest='threaded', action='store_true', default=False, help="use threading to handle multiple connections at once")
parser.add_option("-b", "--bufsize", dest='bufsize', action='store', metavar='KILOBYTES', type='int', default=128, help="specify buffer size for socket forwarding")
parser.add_option("-s", "--socket", dest='sockpath', action='store', metavar='PATH', type='str', default=None, help="specify the path of the usbmuxd socket")
options, args = parser.parse_args()
serverclass = TCPServer
if options.threaded:
serverclass = ThreadedTCPServer
if len(args) == 0:
parser.print_help()
sys.exit(1)
ports = []
for arg in args:
try:
# if ':' in arg:
# rport, lport = arg.split(":")
# lport = int(lport)
# ports.append((rport, lport))
# else:
# ports.append((int(arg), int(arg)))
rport, lport, serial = arg.split(":")
lport = int(lport)
ports.append((int(rport), int(lport), serial))
except:
parser.print_help()
sys.exit(1)
servers=[]
for rport, lport, serial in ports:
print "Forwarding local port %d to remote port %d"%(lport, rport)
server = serverclass((HOST, lport), TCPRelay)
server.rport = rport
server.bufsize = options.bufsize
server.serial = serial
servers.append(server)
alive = True
while alive:
try:
rl, wl, xl = select.select(servers, [], [])
for server in rl:
server.handle_request()
except:
alive = False