Android Enterprises Device Enrollment Stuck with NodeJs Generated QR Code with Service Account Authentication - google-api-nodejs-client

As mentioned in the google documents i have tested the following process
URL to quick start: https://colab.research.google.com/github/google/android-management-api-samples/blob/master/notebooks/quickstart.ipynb#scrollTo=pjHfDSb8BoBP
Create Enterprise
Create Policy
Enroll the device
Then I have used the NODEJS API of Android Enterprises to develop the server based solution, which is working fine as per the documentation for all the functions such as get, create, delete the policy, devices, enterprises.
The issue i am facing is with the QR code generated from NODE application, when i scan the QR code generated from NODEJS application, the device got stuck at system update.
Following is my Policy update function
router.post('/update/:id', async function(req, res) {
const {title,policy_body,update_mask,enroll_url} = req.body;
// here we are callng the android managment API to and then the response we will update to database
const amApiBody = {
name: policy_body.name,
updateMask:update_mask,
requestBody:policy_body
}
const policy_update_response = await amApi.updatePolicy(amApiBody);
const p = await policyModel.update(req.params.id,title,policy_update_response,enroll_url);
res.json(p)
});
AmAPI file
this.updatePolicy = async function (body)
{
const auth = new google.auth.GoogleAuth({
scopes: ['https://www.googleapis.com/auth/androidmanagement'],
});
const authClient = await auth.getClient();
google.options({auth: authClient});
// Get the list of available policies
const res = await androidmanagement.enterprises.policies.patch(body);
console.log('requestFinalBody=',body);
return res.data;
}
Following is my policy data obtained by running above function
policy_create_response= {
name: 'enterprises/LC019rjnor/policies/policy1',
version: '14',
applications: [
{
packageName: 'com.google.samples.apps.iosched',
installType: 'FORCE_INSTALLED',
autoUpdateMode: 'AUTO_UPDATE_HIGH_PRIORITY'
},
{
packageName: 'com.dekaisheng.courier',
installType: 'FORCE_INSTALLED',
autoUpdateMode: 'AUTO_UPDATE_HIGH_PRIORITY'
}
],
keyguardDisabledFeatures: [ 'KEYGUARD_DISABLED_FEATURE_UNSPECIFIED' ],
defaultPermissionPolicy: 'GRANT',
uninstallAppsDisabled: true,
keyguardDisabled: true,
tetheringConfigDisabled: true,
dataRoamingDisabled: true,
networkEscapeHatchEnabled: true,
bluetoothDisabled: true,
debuggingFeaturesAllowed: true,
funDisabled: true,
kioskCustomLauncherEnabled: true
}
Note i have exported the variable to the terminal as follows before running the app, the auth.json is the service account credential file.
export GOOGLE_APPLICATION_CREDENTIALS="/Users/Mac/Projects/wajid/mdm/server/env/auth.json"
Thanks for the help in advance

I figured out that in nodeJS API I was passing wrong property name of Policy value in the request body.
Code before fix
parent: this.getParent(policyName),
requestBody:{
“name”: “my_policy"
}
Code after fix
parent: this.getParent(policyName),
requestBody:{
"policyName”: “my_policy"
}

Related

POST data to Google Sheet web app from AWS Lambda

CURRENTLY
I have a Google Sheets App Script 'web app'
Script in Goolge Sheets
function doPost(e) {
const ss = SpreadsheetApp.getActiveSpreadsheet();
const sheet = ss.getSheetByName("Sheet1");
sheet.getRange("A1").setValue("Hello!")
return "Success!"
}
Google Apps Script Web App Config:
Execute as: Me // or as User. I've tried both.
Who has access: Anyone within MyOrganisation
I want to make a POST request to the above Web App from AWS Lambda.
AWS Lambda .js:
const { GoogleSpreadsheet } = require("google-spreadsheet");
const doc = new GoogleSpreadsheet(
{spreadsheetId}
);
await doc.useServiceAccountAuth({
client_email: process.env.GOOGLE_SERVICE_ACCOUNT_EMAIL,
private_key: process.env.GOOGLE_PRIVATE_KEY.replace(/\\n/g, "\n"),
});
let token = doc["jwtClient"]["credentials"]["access_token"];
await new Promise((resolve, reject) => {
const options = {
host: 'script.google.com',
path: "/macros/s/{myscriptid}/exec", //<-- my web app path!
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': "Bearer "+ token
}
};
//create the request object with the callback with the result
const req = HTTPS.request(options, (res) => {
resolve(JSON.stringify(res.statusCode));
});
// handle the possible errors
req.on('error', (e) => {
reject(e.message);
});
//do the request
req.write(JSON.stringify(data));
//finish the request
req.end();
});
console.log("response:"+JSON.stringify(response))
GCP Service Account
I have a GCP Service Account, with permission to Google Sheets API, and otherwise unrestricted access.
This Service account has EDIT access to the Google Sheet with the doPost(e) script.
Token Output:
"jwtClient": {
"_events": {},
"_eventsCount": 0,
"transporter": {},
"credentials": {
"access_token": "somelongvalue...............", //<-- what I use
"token_type": "Bearer",
"expiry_date": 1661662492000,
"refresh_token": "jwt-placeholder"
},
"certificateCache": {},
"certificateExpiry": null,
"certificateCacheFormat": "PEM",
"refreshTokenPromises": {},
"eagerRefreshThresholdMillis": 300000,
"forceRefreshOnFailure": false,
"email": "serviceaccount#appspot.gserviceaccount.com",
"key": "-----BEGIN PRIVATE KEY-----\nsomelongvalue=\n-----END PRIVATE KEY-----\n",
"scopes": [
"https://www.googleapis.com/auth/spreadsheets"
],
"subject": null,
"gtoken": {
"key": "-----BEGIN PRIVATE KEY-----\nsomelongvalue=\n-----END PRIVATE KEY-----\n",
"rawToken": {
"access_token": "somelongvalue...............",
"expires_in": 3599,
"token_type": "Bearer"
},
"iss": "serviceaccount#appspot.gserviceaccount.com",
"sub": null,
"scope": "https://www.googleapis.com/auth/spreadsheets",
"expiresAt": 1661662492000
}
}
ISSUE
Current response:
response:"401"
I cannot find any Google documentation on how to setup the headers to authenticate a request (from my service account) to my organisation restricted web app.
When the Web App is open to "Anyone" then it runs fine, but as soon as I restrict to MyOrganisation, I struggle to find a way to authenticate my POST request.
HELP!
How do I set up a POST request to my Google Sheets web app such that it can be protected by authentication? Right now, I'd be happy to find ANY means to authenticate this request (not necessarily a service account) that doesn't leave it completed open to public.
Should I use this hack?
One idea I had was to put a "secret" into my lambda function, and then make the web app public. The web app would check the secret, if if matched, would execute the function.
Modification points:
In order to access Web Apps using the access token with a script, the scopes of Drive API are required to be included. Those are https://www.googleapis.com/auth/drive.readonly, https://www.googleapis.com/auth/drive, and so on. Ref
When I saw your showing script, it seems that the access token is retrieved using google-spreadsheet. When I saw the script of google-spreadsheet, it seems that this uses only the scope of https://www.googleapis.com/auth/spreadsheets. Ref
From this situation, I thought that the reason for your current issue might be due to this. If my understanding is correct, how about the following modification? In this modification, the access token is retrieved by googleapis for Node.js from the service account. Ref
Modified script:
Google Apps Script side:
function doPost(e) {
const ss = SpreadsheetApp.getActiveSpreadsheet();
const sheet = ss.getSheetByName("Sheet1");
sheet.getRange("A1").setValue("Hello!")
return ContentService.createTextOutput("Success!"); // Modified
}
When you modified the Google Apps Script, please modify the deployment as a new version. By this, the modified script is reflected in Web Apps. Please be careful about this.
You can see the detail of this in the report "Redeploying Web Apps without Changing URL of Web Apps for new IDE".
Node.js side:
const { google } = require("googleapis");
const HTTPS = require("https");
const auth = new google.auth.JWT(
"###", // Please set client_email here.
null,
"###", // Please set private_key here. When you set private_key of service account, please include \n.
["https://www.googleapis.com/auth/drive.readonly"],
null
);
function req(token) {
return new Promise((resolve, reject) => {
const data = { key1: "value1" }; // Please set your value.
const options = {
host: "script.google.com",
path: "/macros/s/{myscriptid}/exec", //<-- my web app path!
method: "POST",
headers: {Authorization: "Bearer " + token},
};
const req = HTTPS.request(options, (res) => {
if (res.statusCode == 302) {
HTTPS.get(res.headers.location, (res) => {
if (res.statusCode == 200) {
res.setEncoding("utf8");
res.on("data", (r) => resolve(r));
}
});
} else {
res.setEncoding("utf8");
res.on("data", (r) => resolve(r));
}
});
req.on("error", (e) => reject(e.message));
req.write(JSON.stringify(data));
req.end();
});
}
auth.getAccessToken().then(({ token }) => {
req(token).then((e) => console.log(e)).catch((e) => console.log(e));
});
When this script is run, when the Web Apps is correctly deployed, the script of Web Apps is run and Success! is returned.
Note:
If this modified script was not useful for your Web Apps setting, please test as follows.
Please confirm whether your service account can access to the Spreadsheet again.
Please share the email address of the service account on the Spreadsheet. From your showing Google Apps Script, I thought that your Google Apps Script is the container-bound script of the Spreadsheet.
Please reflect the latest script to the Web Apps.
When you modified the Google Apps Script, please modify the deployment as a new version. By this, the modified script is reflected in Web Apps. Please be careful about this.
You can see the detail of this in the report "Redeploying Web Apps without Changing URL of Web Apps for new IDE".
When you set private_key of service account, please include \n.
References:
Web Apps
Taking advantage of Web Apps with Google Apps Script
Added:
When you will directly put the value to the Spreadsheet using Sheets API with google-spreadsheet module, you can also use the following script.
const { GoogleSpreadsheet } = require("google-spreadsheet");
const sample = async () => {
const doc = new GoogleSpreadsheet("###"); // Please set your Spreadsheet ID.
await doc.useServiceAccountAuth({
client_email: client_email: process.env.GOOGLE_SERVICE_ACCOUNT_EMAIL,
private_key: process.env.GOOGLE_PRIVATE_KEY,
});
await doc.loadInfo();
const sheet = doc.sheetsByTitle["Sheet1"];
await sheet.loadCells("A1");
sheet.getCell(0, 0).value = "Hello!";
await sheet.saveUpdatedCells();
};
sample();
In this case, your service account is required to be able to access to the Spreadsheet. Please be careful about this.

Firestore emulator for testing security rules - running the tests

I have installed the emulator following the instructions at enter link description here and I can start it, so far so good.
After picking some code here and there I have written my first test, here it is:
import * as firebasetesting from '#firebase/testing';
import * as firebase from 'firebase';
import * as fs from 'fs';
const projectId = 'my-firebase-project';
const rules = fs.readFileSync('firestore.rules', 'utf8');
beforeAll(async () => {
// Make your test app load your firestore rules
await firebasetesting.loadFirestoreRules({ projectId, rules });
});
beforeEach(async () => {
// Reset our data from our test database
await firebasetesting.clearFirestoreData({ projectId });
});
after(async () => {
// Shut down all testing Firestore applications after testing is done.
await Promise.all(firebasetesting.apps().map(app => app.delete()));
});
describe("TRACKERS AND ALLIES", () => {
it('TRACKER UP', async () => {
let user = {username: "Bob", uid: 'bobuid'}
let target = { username: "Alice", uid: 'aliceuid'}
const auth = { uid: bob.uid, token: {isadmin: false} };
const app = firebasetesting.initializeTestApp({ projectId, auth }).firestore();
const ref = app.doc('users/'+ user.uid + '/contact/' + target.uid);
await firebasetesting.assertSucceeds(ref.update({ up: true, username: target.uid, timestamp: firebase.firestore.FieldValue.serverTimestamp() }));
});
})
And my question is very simple: how do I run it?
EDIT: I may just add that I am new to Firestore and Javascript in general... The link above simply states
After running a suite of tests, you can access test coverage reports that show how each of your security rules was evaluated.
So I guess it must be simple, but I cannot find the "run" command anywhere...
If you have a nodejs script, run it with node your-script.js. You must have node installed.
If you want to run the script along with the emulator, and shut the emulator down after the script finishes, the page you linked to says:
In many cases you want to start the emulator, run a test suite, and
then shut down the emulator after the tests run. You can do this
easily using the emulators:exec command:
firebase emulators:exec --only firestore "./my-test-script.sh"
If you found the documentation confusing or incomplete, you should use the "send feedback" button at the top right of the page.

I'm getting a Web Push Error Code Status 403, which is driving me nuts, because its telling me to use firebase. What's going on?

I keep getting a WebPush Error (Status Code 403) fro Chrome for a PWA I'm building and the body says that I need to use the VAPID server key from the 'firebase console' but I used nodes Web-Push library to generate the VAPID Keys, whats going on? Do I have to use firebase to build PWAs in Chrome?
Here's the Error Message I'm getting from the browser when I send a push notification:
name: 'WebPushError',
message: 'Received unexpected response code',
statusCode: 403,
headers:
{ 'content-type': 'text/plain; charset=utf-8',
'x-content-type-options': 'nosniff',
'x-frame-options': 'SAMEORIGIN',
'x-xss-protection': '0',
date: 'Thu, 31 Oct 2019 19:59:02 GMT',
'content-length': '194',
'alt-svc':
'quic=":443"; ma=2592000; v="46,43",h3-Q049=":443"; ma=2592000,h3-Q048=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000',
connection: 'close' },
body:
'the key in the authorization header does not correspond to the sender ID used to subscribe this user. Please ensure
you are using the correct sender ID and server Key from the Firebase console.\n',
endpoint:
'https://fcm.googleapis.com/fcm/send/exXmW3OFOTY:APA91bEKW_vxnvOZohog34pprDH6XvBsxtfnUpBdYY7z_7q4GZGa4wrmtBBg4kTRwLtgy3lNpCs8SMlvOr4nY-Fu_4zUus6zEJh69581Ier14QZxkEEVXyZHKRaZcmHa3zmbZRB4VD7Z
and here's the code that is running my node server:
//Handle imports
const express = require('express')
const cors = require('cors')
const bodyParser = require('body-parser')
const webPush = require('web-push')
const vapidKeys = require('./vapid.json')
const path = require('path')
//Setup application
const app = express()
app.use(cors())
app.use(bodyParser.json())
app.use('/static', express.static(path.join(__dirname,'frontend')))
const port = 8080
//Set up webpush
webPush.setVapidDetails(
'mailto: <email>',
vapidKeys.publicKey,
vapidKeys.privateKey
)
const pushOptions = {
proxy: '<proxy>'
}
//setup Push Notification
const sendNotification = (subscription, dataToSend='') => {
webPush.sendNotification(subscription, dataToSend, pushOptions).catch(error => { console.log('Damn it: ', error.message, '||', error)
})
}
//Server Routes Defined
app.get('/', (req, res) => res.sendFile('index.html', { root: './' }))
//Setup Database Methods
const dummyDb = {subscription: null}
const saveToDatabase = async subscription => {
dummyDb.subscription = subscription
}
//Other Server Routes
app.post('/save-subscription', async (req, res) => {
const subscription = req.body
await saveToDatabase(subscription)
console.log('subscribed!')
res.json({message: 'success'})
})
app.get('/send-notification', (req, res) => {
const subscription = dummyDb.subscription
const message = 'hello world'
sendNotification(subscription, message)
res.json({message: dummyDb.subscription})
})
app.listen(port, () => console.log(`Example app listening on port ${port}!`))
I have node.js express, postgres, angular 8 app.
I had the same problem and I got it working by adding the "gcm_sender_id": in the manifest.webmanifest file (or manifest.json I also used firebase generated public and private keys.
your gcm_sender_id is your project id in google cloud or firebase sender id
Same situation and almost lost my sanity. I tried inserting gcm_sender_id with a Firebase senderId and worked finally. I didn't have a Firebase account, but I was able to create a project in seconds and my senderId was ready to be used in the messaging settings.
But a caveat: After my modification in the manifest.json (in my case) in the root's folder, it was needed to uninstall the current service worker and restart my React project. Then I followed again all steps back by asking permissions and subscribe the user and finally trigger a push notification.
During my heat researches for a solution, I found that gcm_sender_id is also used to send and validate push messages from other browsers. According to Google Web Updates:
For Chrome prior to version 52, Opera Android and the Samsung Browser,
you're also still required to include a 'gcm_sender_id' in your web
app's manifest.json. The API key and sender ID are used to check
whether the server making the requests is actually allowed to send
messages to the receiving user.

Puppeteer's page.cookies() not retrieving all cookies shown in the Chrome dev tools

Using puppeteer, I am trying to retrieve all cookies for a specific web site (i.e. https://google.com) from Node.js.
My code is:
// Launch browser and open a new page
const browser = await puppeteer.launch({ headless: true, args: ['--disable-dev-shm-usage'] });
const page = await browser.newPage();
await page.goto(url, { waitUntil: 'networkidle2' });
var cookies = await page.cookies();
console.log(cookies);
await browser.close();
It only retrieves 2 cookies, named 1P_JAR and NID. However, when I open the Chrome Dev tools, it shows a lot more.
I tried using the Chrome Dev Tools directly instead of puppeteer but I am getting the same results.
Is there another function I should call? Am I doing it correctly?
The page.cookies() call only gets cookies that are available to JavaScript applications inside the browser, and not the ones marked httpOnly, which you see in the Chrome DevTools. The solution is to ask for all available cookies through the Devtools protocol and then filter for the site you're interested in.
var data = await page._client.send('Network.getAllCookies');
You can utilise Chrome DevTools Protocol -> getAllCookies
To get all browser cookies, regardless of any flags.
const client = await page.target().createCDPSession();
const cookies = (await client.send('Network.getAllCookies')).cookies;
This will also play nice with typescript and tslint since something like
const cookies = await page._client.send('Network.getAllCookies');
Will raise an error TS2341: Property '_client' is private and only accessible within class 'Page'..
Thanks #try-catch-finally. I got it resolved and it was a simple rookie mistake.
I was comparing cookies in my own Google Chrome instance with the Puppeteer instance. However, in my instance, I was logged in to my Google account and Puppeteer (obviously) was not.
Google uses 2 cookies when you are NOT logged in and 12 when you are logged in.
If you use Playwright in place of Puppeteer, httponly cookies are readily accessible:
const { chromium } = require('playwright')
(async () => {
const browser = await chromium.launch()
const context = await browser.newContext()
const page = await context.newPage()
await page.goto('https://google.com', { waitUntil: 'networkidle' })
let allCookies = await context.cookies()
console.log (allCookies)
})();
returns:
[
{
sameSite: 'None',
name: '1P_JAR',
value: '2021-01-27-19',
domain: '.google.com',
path: '/',
expires: 1614369040.389115,
httpOnly: false,
secure: true
},
{
sameSite: 'None',
name: 'NID',
value: '208=VXtmbaUL...',
domain: '.google.com',
path: '/',
expires: 1627588239.572781,
httpOnly: true,
secure: false
}
]
Just use it await page.goto('https://google.com', { waitUntil: 'networkidle2' }). And you can get all the cookies related.

google-api-nodejs-client: how to call google+ domain api locally? (by the plusDomains.media.insert)

I am going to use the Nodejs google api client(google-api-nodejs-client) to post a photo to my google+. (I have listed all my code at end of this post.)
Let me introduce a little bit background:
I have created a project on: console.developers.google.com
I have enabled google+ domain API for this project.
I have created credentials for this project as well. (it is a OAuth 2.0 client ID)
I have a little bit experience of using the client (google-api-nodejs-client) and I can post images and files to my google drive by it.
However, posting to google+ photo is different, the auth is the key different. I have tried several different ways, but none of them works.
The api always return me this:
{ [Error: Forbidden]
code: 403,
errors: [ { domain: 'global', reason: 'forbidden', message: 'Forbidden' } ] }
I also found this:
Warning: The Google+ Sign-In button and the plus.login scope used by
Google+ Sign-In, are not currently supported for use with the Google+
Domains API. Requests that are made to the Google+ Domains API using
an authentication token granted for the
www.googleapis.com/auth/plus.login scope, or generated by the
Google+ Sign-In button, will fail.
If it doesn't support the sign-button, what does it support?
This page tell me to add a domain delegation (https://developers.google.com/+/domains/authentication/delegation), but i haven't push my program into any server, i just try to run it locally.
I was wondering if it is possible to use this client to post photo to google+ by run a nodejs program locally?
var CLIENT_ID = "xxxxxxx.apps.googleusercontent.com";
var CLIENT_SECRET = "xxxxxxxx";
var REDIRECT_URL = "https://xxxxxxx";
var readline = require('readline');
var async = require('async');
var google = require('googleapis');
var request = require('request');
var OAuth2 = google.auth.OAuth2;
var oauth2Client = new OAuth2(CLIENT_ID, CLIENT_SECRET, REDIRECT_URL);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function getAccessToken (oauth2Client, callback) {
// generate consent page url
var scopes = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/plus.stream.read',
'https://www.googleapis.com/auth/plus.stream.write',
'https://www.googleapis.com/auth/plus.circles.read',
'https://www.googleapis.com/auth/plus.circles.write',
'https://www.googleapis.com/auth/plus.media.upload'
];
var url = oauth2Client.generateAuthUrl({
access_type: 'offline', // 'online' (default) or 'offline' (gets refresh_token)
scope: scopes, // If you only need one scope you can pass it as string,
key: 'p7UALH460Deqodhvb2zESYya'
});
console.log('Visit the url: ', url);
rl.question('Enter the code here:', function (code) {
// request access token
oauth2Client.getToken(code, function (err, tokens) {
if (err) {
return callback(err);
}
// set tokens to the client
// TODO: tokens should be set by OAuth2 client.
oauth2Client.setCredentials(tokens);
console.dir(tokens);
callback();
});
});
}
getAccessToken(oauth2Client, function () {
var plusDomains = google.plusDomains({ version: 'v1', auth: oauth2Client });
var requestObj = request({url:'http://asset1.cxnmarksandspencer.com/is/image/mands/2643f540b32fe8c6cccdec95b3a2c5239166232f?$editorial_430x430$'});
const Readable = require('stream').Readable;
var iamgeStream = new Readable().wrap(requestObj);
plusDomains.media.insert({
userId: 'me',
collection: 'cloud',
resource: {
name: 'testimage.png',
mimeType: 'image/png'
},
media: {
mimeType: 'image/png',
body: iamgeStream
},
access:{domainRestricted :"true"}
}, callbackFn);
function callbackFn(argument) {
console.dir(argument);
}
});
Thanks you very much!
Peter