I'm adding X-Hub verification to my messenger app. I learned about it here: https://developers.facebook.com/docs/messenger-platform/webhook-reference
I've successfully gotten verification to work with simple text messages, but when I send a location instead of a text message, verification fails because the hash I generate and the hash Facebook gives me in the header do not match. Here is my verification code:
module.exports.requestIsValid = function(event) {
if (event['headers']['X-Hub-Signature']) {
var sha = event['headers']['X-Hub-Signature']
var body = JSON.stringify(event.body);
return sha == `sha1=${crypto.createHmac('sha1', config.APP_SECRET).update(body).digest('hex')}`;
}
return false; // this return is never called, I know that the first return is the one returning false when it should be true
}
Here is an example of a regular text message payload:
{"body":{"object":"page","entry":[{"id":"1366222643461024","time":1499114399253,"messaging":[{"sender":{"id":"1582085681843981"},"recipient":{"id":"1366222643461024"},"timestamp":1499114399084,"message":{"mid":"mid.$cAASAZhi0_wRjO3OtbFdCi5lV2qe4","seq":52192,"text":"Test"}}]}]},"method":"POST","principalId":"offlineContext_authorizer_principalId","headers":{"X-Real-Ip":"173.252.88.182","X-Forwarded-For":"173.252.88.182","Host":"test.localtunnel.me","X-Forwarded-Proto":"https","X-Nginx-Proxy":"true","Connection":"close","Content-Length":"270","Accept":"*/*","Accept-Encoding":"deflate, gzip","Content-Type":"application/json","X-Hub-Signature":"sha1=0f51d788fe5f1111846097ad016728cdcd06029f"},"query":{},"path":{},"identity":{"accountId":"offlineContext_accountId","apiKey":"offlineContext_apiKey","caller":"offlineContext_caller","cognitoAuthenticationProvider":"offlineContext_cognitoAuthenticationProvider","cognitoAuthenticationType":"offlineContext_cognitoAuthenticationType","sourceIp":"127.0.0.1","user":"offlineContext_user","userAgent":"","userArn":"offlineContext_userArn"},"stageVariables":{},"isOffline":true}
And here is an example of a payload with a location:
{"body":{"object":"page","entry":[{"id":"1366222643461024","time":1499114451619,"messaging":[{"sender":{"id":"1582085681843981"},"recipient":{"id":"1366222643461024"},"timestamp":1499114451469,"message":{"mid":"mid.$cAASAZhi0_wRjO3R6DVdCi8v9yqk0","seq":52196,"attachments":[{"title":"Brandon's Location","url":"https://l.facebook.com/l.php?u=https%3A%2F%2Fwww.bing.com%2Fmaps%2Fdefault.aspx%3Fv%3D2%26pc%3DFACEBK%26mid%3D8100%26where1%3D35.142236316764%252C%2B-106.53531087607%26FORM%3DFBKPL1%26mkt%3Den-US&h=ATOu8uYrLDiFl6wG8RVfhXvwkMl7uB_l2MHqB_uKLhk8qC9p1ua0EOLpGkznVX7Y8YfxSXP7vDuAR7swPmDCw1esH2bwKhNNsZKxVPC2ViC2AFMO_g&s=1&enc=AZMYxff8btvCZWHtzUR4oFL7K2Mg6nXM_O_tRXXL-L8z508UAOauiSRztoRvWdlGCWU1dNRdNK1ls2CGulM8lvzR","type":"location","payload":{"coordinates":{"lat":35.142236316764,"long":-106.53531087607}}}]}}]}]},"method":"POST","principalId":"offlineContext_authorizer_principalId","headers":{"X-Real-Ip":"173.252.90.239","X-Forwarded-For":"173.252.90.239","Host":"test.localtunnel.me","X-Forwarded-Proto":"https","X-Nginx-Proxy":"true","Connection":"close","Content-Length":"911","Accept":"*/*","Accept-Encoding":"deflate, gzip","Content-Type":"application/json","X-Hub-Signature":"sha1=34f23436b2744b9b0cc8776922e7386c454786db"},"query":{},"path":{},"identity":{"accountId":"offlineContext_accountId","apiKey":"offlineContext_apiKey","caller":"offlineContext_caller","cognitoAuthenticationProvider":"offlineContext_cognitoAuthenticationProvider","cognitoAuthenticationType":"offlineContext_cognitoAuthenticationType","sourceIp":"127.0.0.1","user":"offlineContext_user","userAgent":"","userArn":"offlineContext_userArn"},"stageVariables":{},"isOffline":true}
Happy to provide extra info if needed.
UPDATE:
Upon furthere inspection it appears that the verifcation only fails when there is an "attatchments" field in the payload. So if also fails when I send a picture or gif or something like that.
You should apply the SHA1 algorithm to the 'raw request body'. If you parse body and convert it back to json; it may not work.
Your bodyParserJSON should return rawBody:
bodyParser.json({
verify(req, res, buf) {
req.rawBody = buf;
},
})
Here is a middleware that I've written. It uses crypto module to generate sha1
fbWebhookAuth: (req, res, next) => {
const hmac = crypto.createHmac('sha1', process.env.FB_APP_SECRET);
hmac.update(req.rawBody, 'utf-8');
if (req.headers['x-hub-signature'] === `sha1=${hmac.digest('hex')}`) next();
else res.status(400).send('Invalid signature');
}
and finally in your route you can use it as:
app.post('/webhook/facebook', middlewares.fbWebhookAuth, facebook.webhook);
If you are using express js
app.use(express.json({
verify:(req, res, buf)=>{
req.rawBody = buf;
}}))
your middleware
const fbWebhookAuth = (req, res, next) => {
const hmac = crypto.createHmac('sha1', process.env.FB_APP_SECRET);
hmac.update(req.rawBody);
if (req.headers['x-hub-signature'] === `sha1=${hmac.digest('hex')}`) next();
else res.status(400).send('Invalid signature'); }
your endpoint
app.post('/webhook/Facebook', fbWebhookAuth, facebook.webhook);
Now facebook uses sha256 Hmac
To verify against them, follow this
A. make your body parser return the raw body as well:
webapi.use(bodyParser.json({
verify: function (req, res, buf, encoding) {
req.rawBody = buf;
}
}));
B.
fbVerify = (req, res, next) => {
req.isXHubValid = false;
try {
if (req.headers['x-hub-signature-256']) {
let hmac = crypto.createHmac('sha256', proccess.env.fbAppSecret);
hmac.update(req.rawBody, 'utf-8');
if (req.headers['x-hub-signature-256'] === `sha256=${hmac.digest('hex')}`)
req.isXHubValid = true;
} else if (req.headers['x-hub-signature']) { // leave it for backward compatibility
let hmac = crypto.createHmac('sha1', proccess.env.fbAppSecret);
hmac.update(req.rawBody, 'utf-8');
if (req.headers['x-hub-signature'] === `sha1=${hmac.digest('hex')}`)
req.isXHubValid = true;
}
} catch (err) {
console.error(err);
}
next();
}
C. in your webhook function
app.post('/webhook/facebook', fbVerify, (req, res) = > {
if (!req.isXHubValid) {
return res.sendStatus(404);
}
// continue here, the request is valid
});
Related
I'm struggling with migrating a HAPI function that verifies a JWT token and then makes a database call using the decoded credentials.
The problem is that jwt.verify uses a callback, but Hapi and Hapi.MySQL2 have both been updated to use async functions
The main function is as follows
exports.LoadAuth = (req, h) => {
let token = req.headers.authorization.split(' ')[1]
VerifyToken(token, async function (err, decoded) {
if (!err) {
let sql = '#SELECT STATEMENT USING decoded.id'
const [data] = await mfjobs.query(sql, decoded.id)
let auids = []
data.forEach(function (ag) {
auids.push(ag.Name)
})
auids = base64(auids.toString())
return auids
} else {
return {message: 'Not Authorised'}
}
})
}
The VerifyToken function is as follows:
VerifyToken = (tok, done) => {
jwt.verify(tok, Buffer.from(secret, 'base64'), function (err, decTok) {
if (err) {
done(err)
} else {
done(null, decTok)
}
})
}
Debugging everything above works up to the point that the data should be returned to the front end. At which point I get an ERROR 500
I know that the issue is with the VerifyToken function as if I omit this and hard code the decoded.id into the query the correct data reaches the front end.
Any pointers?
You can convert your VerifyToken function to Promises.
let VerifyToken = (tok) => {
return new Promise((resolve, reject) => {
jwt.verify(tok, Buffer.from(secret, 'base64'), function (err, decTok) {
if (err) {
reject(err)
} else {
resolve(decTok)
}
})
});
}
Now you have a function that you can use with async await notation and internally checks jwt validation via callbacks.
Then we can slightly modify your controller as follows.
exports.LoadAuth = async (req, h) => {
let token = req.headers.authorization.split(' ')[1];
try {
let decoded = await VerifyToken(token);
let sql = '#SELECT STATEMENT USING decoded.id';
const [data] = await mfjobs.query(sql, decoded.id);
let auids = [];
data.forEach(function (ag) {
auids.push(ag.Name)
});
auids = base64(auids.toString());
return auids
} catch (e) {
return {message: 'Not Authorised'}
}
}
We just converted your handler function to async function, and we already have a VerifyToken function that returns a promise so, we can call it with the await operator.
I have some upload working for superagent. It involves posting to an api for cloudinary. My question is how do I do the same thing with axios. I'm not sure what superagent.attach and superagent.field relate to in axios.
Basically when I make the post request I need to attach all these fields to the request or else I get bad request and I want to do this in axios not superagent as I am switching over to axios.
Here are all the params:
const image = files[0];
const cloudName = 'tbaustin';
const url = `https://api.cloudinary.com/v1_1/${cloudName}/image/upload`;
const timestamp = Date.now()/1000;
const uploadPreset = 'cnh7rzwp';
const paramsStr = `timestamp=${timestamp}&upload_preset=${uploadPreset}ORor-6scjYwQGpNBvMW2HGMkc8k`;
const signature = sha1(paramsStr);
const params = {
'api_key': '177287448318217',
'timestamp': timestamp,
'upload_preset': uploadPreset,
'signature': signature
}
Here is the superagent post request:
let uploadRequest = superagent.post(url)
uploadRequest.attach('file', image);
Object.keys(params).forEach((key) => {
uploadRequest.field(key, params[key]);
});
uploadRequest.end((err, res) => {
if(err) {
alert(err);
return
}
You would need to use FromData as follows:
var url = `https://api.cloudinary.com/v1_1/${cloudName}/upload`;
var fd = new FormData();
fd.append("upload_preset", unsignedUploadPreset);
fd.append("tags", "browser_upload"); // Optional - add tag for image admin in Cloudinary
fd.append("signature", signature);
fd.append("file", file);
const config = {
headers: { "X-Requested-With": "XMLHttpRequest" },
onUploadProgress: function(progressEvent) {
// Do something with the native progress event
}
};
axios.post(url, fd, config)
.then(function (res) {
// File uploaded successfully
console.log(res.data);
})
.catch(function (err) {
console.error('err', err);
});
See full example here
I am having a problem with waterlock-local-auth. Basically I've been playing around with waterlock all day trying to figure out how to create a new user (with hashed password and all), and also how to authenticate the user from a form on a server side sails.js view. But have been completely unsuccessful. Below is the code in my LoginController that my login form is posting to. Any help will be greatly appreciated. Thanks!
module.exports = {
login: function(req, res) {
var isAuthenticated = function(){...} <-- Authenticated by waterlocks
if(isAuthenticated) {
res.view('home');
}
else {
res.view('login', {errorMessage: "Invalid username or password"});
}
}
};
Ok, so basically I went with the solution posted here (Sails.js Waterlock /auth/register causes error 500). ;0)
module.exports = require('waterlock').waterlocked({
// Endpoint for registering new users. Taken from: https://stackoverflow.com/questions/29944905/sails-js-waterlock-auth-register-causes-error-500/29949255#29949255
register: function (req, res) {
var params = req.params.all(),
def = waterlock.Auth.definition,
criteria = {},
scopeKey = def.email !== undefined ? 'email' : 'username'; // Determines if the credentials are using username or emailaddess.
var attr = { password: params.password }
attr[scopeKey] = params[scopeKey];
criteria[scopeKey] = attr[scopeKey];
waterlock.engine.findAuth(criteria, function (err, user) {
if (user)
return res.badRequest("User already exists");
else
waterlock.engine.findOrCreateAuth(criteria, attr, function (err, user) {
if (err)
return res.badRequest(err);
delete user.password;
return res.ok(user);
});
});
}
});
When I try to add a review to my product from the front-end I am getting a 404 error for PUT http://localhost:3000/products. But I am to add/update data using the following curl command using my routes:
curl --data "name=Product 1&description=Product 1 Description&shine=10&price=29.95&rarity=200&color=blue&faces=3" http://localhost:3000/products
My products router
// This handles retrieving of products
// Includes Express
var express = require('express');
// Initialize the router
var router = express.Router();
var moment = require('moment');
var _ = require('underscore');
var color = require('cli-color');
var mongoose = require('mongoose');
var Product = mongoose.model('Product');
var Review = mongoose.model('Review');
// Route middleware
router.use(function(req, res, next) {
console.log("Something is happening in products!!");
next();
});
// GET route for all Products
router.get('/', function (req, res, next) {
Product.find( function (err, products) {
if (err) {
return next(err);
}
res.json(products);
});
});
// POST route for adding a Product
router.post('/', function (req, res, next) {
var product = new Product (req.body);
product.save( function (err, post) {
if(err) {
return next(err);
}
res.json(product);
});
});
// Pre-loading product object
router.param('product', function (req, res, next, id) {
var query = Product.findById(id);
query.exec( function (err, product) {
if (err) {
return next(err);
}
if(!product) {
return next(new Error('can\'t find product'));
}
req.product = product;
return next();
})
});
// GET route for retrieving a single product
router.get('/:product', function (req, res) {
req.product.populate('reviews', function (err, product) {
if (err) {
return next(err);
}
res.json(req.product);
});
});
// POST route for creating a review
router.post('/:product:reviews', function (req, res, next) {
var review = new Review(req.body);
review.product = req.product;
review.save( function (err, review){
if (err) {
return next(err);
}
req.product.reviews.push(review);
req.product.save( function (err, review) {
if (err) {
return next(err);
}
res.json(review);
});
});
});
This code is taken from a tutorial on thinkster for [MEAN stackl2
Original Post
I am having trouble figuring out how to update an existing entry in my mongodb database using a service I defined with ngResource in my Angular app. So far I have been unable to create a function that will update the back-end after a user clicks my submit button. I have been looking around for a solution for about 2 days but so far have not found a solution. I know the solution is similar to how I delete users in My User's Controller, but nothing I have tried has worked.
My Product Service
angular.module('gemStoreApp.productService',['ngResource'])
.factory('productsService', function($resource) {
return $resource('/products/:id', {},{
'update': { method: 'PUT'}
});
});
My Product Detail
angular.module('gemStoreApp')
.controller("ReviewCtrl", ['$scope', '$resource', 'productsService', function ($scope, $resource, productsService) {
this.review = {};
this.addReview = function(product){
product.reviews.push(this.review);
productService.save({id: product._id}, function() {
// I have tried .update, .$update, and .save methods
});
this.review = {};
};
}]);
I have verified that the products.review variable contains the update. Here is a sample of my JSON output from my console before and after adding the review:
Before the review is added to the front end
{"_id":"product_id","name":"Product 1","description":"Product 1 Description",...,"reviews":[{}]}
After the review is added to the front end
{"_id":"product_id","name":"Product 1","description":"Product 1 Description",...,"reviews":[{"stars":4,"body":"An Awesome review!","author":"user#domain.com","createdOn":1436963056994}]}
And I know that my productsService.save() function is being called as well, as I can put a console log in and see it run when I view in the browser.
My User's Controller
angular.module('gemStoreApp')
.controller('UsersCtrl', ['$scope', '$http', 'usersService', function ($scope, $http, usersService) {
$scope.users = {};
$scope.users = usersService.query();
$scope.remove = function(id) {
var user = $scope.users[id];
usersService.remove({id: user._id}, function() {
$scope.users.splice(user, 1);
});
};
}]);
My full source code is available on my Github page. Any help will be greatly appreciated.
I actually put it into work in this plunker
Took the same factory :
app.factory('productsService', function($resource) {
return $resource('product/:id', {id:"#id"},{
'update': { method: 'PUT'}
});
});
here is my controller :
$scope.products = productsService.query();
$scope.saveProduct = function(product){
product.$update();
}
and how i pass the value in the HTML :
<div ng-repeat="product in products">
<input type="text" ng-model="product.text">
<button ng-click="saveProduct(product)">Update</button>
</div>
If you track the networks request in the javascript console you will see a request : PUT /product/id with the updated data.
Hope it helped. If you have anymore question fell free to ask.
When I login using the local strategy subsequent requests having the sessionAuth policy on them fail because req.session.authenticated is undefined. I've excerpted a portion of the login function from api/services/protocols/local.js. I've inserted a comment for the code I believe is missing.
passport.validatePassword(password, function (err, res) {
if (err) {
return next(err);
}
if (!res) {
req.flash('error', 'Error.Passport.Password.Wrong');
return next(null, false);
} else {
///// Shouldn't authenticated get set true here?
///// req.session.authenticated = true;
return next(null, user);
}
});
As suggested by Alberto Souza the local strategy works if you change sessionAuth.js from:
module.exports = function(req, res, next) {
if (req.session.authenticated) {
return next();
}
return res.forbidden('You are not permitted to perform this action.');
};
to:
module.exports = function(req, res, next) {
if (req.isAuthenticated()) {
return next();
}
return res.forbidden('You are not permitted to perform this action.');
};
So the answer to my question seems to be sessionAuth.js is part of the default sails app generation and not created when you run sails generate auth and therefore is a change you need to make manually that the documentation neglects to tell you about.