401 error accessing Cloudant from IBM Cloud Function - ibm-cloud

I have created a Cloudant service and credentials (1) along with a database and a couple of documents. I want to access that database from an IBM Cloud Function so created a function to do that. I tested the function by adding the copied and pasted credentials from (1) into the "Invoke with credentials" box (along with the and the db name and a valid docid). The function seems to take all that info correctly but I get a 401 error... any help would be massively appreciated!
{
"error": {
"description": "couch returned 401",
"errid": "non_200",
"error": "unauthorized",
"headers": {
"cache-control": "must-revalidate",
"content-type": "application/json",
"date": "Sat, 30 May 2020 16:37:25 GMT",
"statusCode": 401,
"strict-transport-security": "max-age=31536000",
"uri": "xxxxxxx",
"via": "1.1 lb1.bm-cc-eu-gb-04 (Glum/1.89.6)",
"www-authenticate": "Basic realm=\"Cloudant Private Database\"",
"x-cloudant-action": "cloudantnosqldb.any-document.read",
"x-cloudant-backend": "bm-cc-eu-gb-04",
"x-cloudant-request-class": "lookup",
"x-content-type-options": "nosniff",
"x-couch-request-id": "03e7fe91bb"
},
"message": "_reader access is required for this request",
"name": "Error",
"reason": "_reader access is required for this request",
"request": {
"headers": {
"accept": "application/json",
"content-type": "application/json"
},
"method": "GET",
"uri": "xxxxx"
},
"scope": "couch",
"stack": "Error: _reader access is required for this request\n at Object.clientCallback (/node_modules/#cloudant/cloudant/node_modules/nano/lib/nano.js:151:15)\n at Request._callback (/node_modules/#cloudant/cloudant/lib/clientutils.js:162:11)\n at Request.self.callback (/node_modules/request/request.js:185:22)\n at Request.emit (events.js:198:13)\n at Request.self._source.emit (/node_modules/#cloudant/cloudant/lib/eventrelay.js:78:21)\n at Request.<anonymous> (/node_modules/request/request.js:1161:10)\n at Request.emit (events.js:198:13)\n at Request.self._source.emit (/node_modules/#cloudant/cloudant/lib/eventrelay.js:78:21)\n at IncomingMessage.<anonymous> (/node_modules/request/request.js:1083:12)",
"statusCode": 401
}
}
The function code is as follows:
/**
* Read a document in Cloudant database:
* https://docs.cloudant.com/document.html#read
**/
function main(message) {
var cloudantOrError = getCloudantAccount(message);
if (typeof cloudantOrError !== 'object') {
return Promise.reject(cloudantOrError);
}
var cloudant = cloudantOrError;
var dbName = message.dbname;
var docId = message.docid || message.id;
var params = {};
if (!dbName) {
return Promise.reject('dbname is required.');
}
if (!docId) {
return Promise.reject('docid is required.');
}
var cloudantDb = cloudant.use(dbName);
if (typeof message.params === 'object') {
params = message.params;
} else if (typeof message.params === 'string') {
try {
params = JSON.parse(message.params);
} catch (e) {
return Promise.reject('params field cannot be parsed. Ensure it is valid JSON.');
}
}
return readDocument(cloudantDb, docId, params);
}
function readDocument(cloudantDb, docId, params) {
return new Promise(function (resolve, reject) {
cloudantDb.get(docId, params, function (error, response) {
if (!error) {
resolve(response);
} else {
console.error('error', error);
reject(error);
}
});
});
}
function getCloudantAccount(params) {
var Cloudant = require('#cloudant/cloudant');
var cloudant;
if (!params.iamApiKey && params.url) {
cloudant = Cloudant(params.url);
} else {
checkForBXCreds(params);
if (!params.host) {
return 'Cloudant account host is required.';
}
if (!params.iamApiKey) {
if (!params.username || !params.password) {
return 'You must specify parameter/s of iamApiKey or username/password';
}
}
var protocol = params.protocol || 'https';
if (params.iamApiKey) {
var dbURL = `${protocol}://${params.host}`;
if (params.port) {
dbURL += ':' + params.port;
}
cloudant = new Cloudant({
url: dbURL,
plugins: {iamauth: {iamApiKey: params.iamApiKey, iamTokenUrl: params.iamUrl}}
});
} else {
var url = `${protocol}://${params.username}:${params.password}#${params.host}`;
if (params.port) {
url += ':' + params.port;
}
cloudant = Cloudant(url);
}
}
return cloudant;
}
function checkForBXCreds(params) {
if (params.__bx_creds && (params.__bx_creds.cloudantnosqldb || params.__bx_creds.cloudantNoSQLDB)) {
var cloudantCreds = params.__bx_creds.cloudantnosqldb || params.__bx_creds.cloudantNoSQLDB;
if (!params.host) {
params.host = cloudantCreds.host || (cloudantCreds.username + '.cloudant.com');
}
if (!params.iamApiKey && !cloudantCreds.apikey) {
if (!params.username) {
params.username = cloudantCreds.username;
}
if (!params.password) {
params.password = cloudantCreds.password;
}
} else if (!params.iamApiKey) {
params.iamApiKey = cloudantCreds.apikey;
}
}
}

Basically, copying and pasting those credentials led to it not working. Not sure why. To get a test invocation working I added url, docid, dbname, host, url and iamApiKey values to the parameters section of the function. That worked.

Related

why mongoose populate() request does not work?

I try to populate some data from other collection to an other collection.i had googled the search and also i follow the tutorial step by step but the population had fail.any help is appreciate friends. this is the code:
router.get("/", passport.authenticate("jwt", {session: false}), (req, res)=> {
const errors = {};
Profile.findOne({user: req.user.id})
.then(profile => {
if (!profile) {
errors.noprofile = "there is no profile for this user"
return res.status(404).json(errors);
}
res.json(profile);
}).catch(err=> res.status(404).json(err))
});
// #route POST api/profile
//#desc Create or edit user profile
//#access Private
router.get("/", passport.authenticate("jwt", {session: false}), (req, res)=> {
const {errors, isValid} = validateProfileInput(req.body);
//Check validation
if(!isValid) {
return res.status(400).json(errors);
}
// Get profile data
const profileData = {};
profileData.user = req.user.id;
if(req.body.handle) {
profileData.handle = req.body.handle
};
if(req.body.company) {
profileData.company = req.body.company
};
if(req.body.website) {
profileData.website = req.body.website
};
if(req.body.location) {
profileData.location = req.body.location
};
if(req.body.status) {
profileData.status = req.body.status
};
if(typeof req.body.skills !== 'undefined') {
profileData.skills = req.body.skills.split(',');
}
//social
profileData.social = {};
if(req.body.youtube) {
profileData.social.youtube = req.body.youtube
};
if(req.body.twitter) {
profileData.social.twitter = req.body.twitter
};
if(req.body.facebook) {
profileData.social.facebook = req.body.facebook
};
if(req.body.instagram) {
profileData.social.instagram = req.body.instagram
};
Profile.findOne({user: req.user.id})
.populate(
"user",
["name, avatar"]
)
this is the result that I get from the postman :
"_id": "62ee1058ceb295ccdfedffce",
"user": "62e6825958870d3db69d2da5",
"handle": "pablo",
"status": "developper",
"skills": [
"design web"
],
and the correct result must be :
"_id": "62ee1058ceb295ccdfedffce",
"user": {"_id": "62e6825958870d3db69d2da5",
"name": "pablo",
"avatar": "//www.gravatar.com/avatar/1ffsrenbdgeajks-ghsdereys1dkkdhddbc"
}
"handle": "pablo",
"status": "developper",
"skills": [
"design web"
],

IOWebSocketChannel Flutter & GraphQL Apollo

I'm having an issue connecting to a GraphQL endpoint using Web-sockets.
The issues are noted in the comments. I cannot get this working. It works on the browser (separate test application) so the server is fine.
IOWebSocketChannel? _channel;
StreamSubscription? _getSubscription;
connectToWebsocket(BuildContext context) {
// Nothing to listen to. Auth users only.
final auth = authProviderRead(context);
if (auth.modelUser == null) {
return;
}
_channel?.sink.close();
_getSubscription?.cancel();
final headers = {
"Authorization": auth.jwt ?? "",
"Content-Type": "application/json",
};
_channel = IOWebSocketChannel.connect(
Uri.parse(getWebStockUrl()),
headers: headers,
protocols: ["graphql-ws"],
);
// Fails: Just fires "onDone"
// _channel?.sink.add(jsonEncode({"data": subscriptionQuery}));
// Fails with {"type":"connection_error","payload":{"message":"Cannot read properties of undefined (reading 'Authorization')"}}
// _channel?.sink.add(json.encode({"type": "connection_init"}));
// Fails with {"type":"error","payload":{"message":"Invalid message type!"}}
// _channel?.sink.add(jsonEncode(
// {
// "type": "data",
// "query": subscriptionQuery,
// },
// ));
_getSubscription = _channel!.stream.listen((message) {
// Is never fired?
if (kDebugMode) {
print("Got live message");
print(message);
}
// channel!.sink.add('received!');
// channel!.sink.close();
})
..onData((data) {
if (kDebugMode) {
print("onData - WebSocket");
print(data);
}
})
..onDone(() {
if (kDebugMode) {
print("onDone - WebSocket");
}
})
..onError((e) {
if (kDebugMode) {
print("onError - WebSocket");
print(e);
}
});
}
const subscriptionQuery = r'''
subscription Subscription {
gotChatMessage {
messageResults {
message {
markdown
}
}
}
}
''';
I figure it out, there are some additional things that it requires.
From https://github.com/apollographql/subscriptions-transport-ws/blob/master/src/message-types.ts
_channel?.sink.add(jsonEncode({
"type": "connection_init",
"payload": {"Authorization": auth.jwt}
}));
_channel?.sink.add(jsonEncode({
"type": "start",
"payload": {"query": subscriptionQuery}
}));

MongoDB GridFSBucket upload stream doesn't return length of base64 image stream

I'm refactoring my team code to remove Deprecation Warnings from MongoDB.
I removed gridfs-stream library and I'm using GridFSBucket instead
But there is a problem: upload stream using GridFSBucket doesn't return the length of base64 image, instead it returns 0. This breaks one of the tests in our code.
This is the code using GridFSBucket:
function getGrid() {
return new mongoose.mongo.GridFSBucket(conn.db);
}
module.exports.store = function(id, contentType, metadata, stream, options, callback) {
... // unrelated things
var strMongoId = mongoId.toHexString(); // http://stackoverflow.com/a/27176168
var opts = {
contentType: contentType,
metadata: metadata
};
options = options || {};
if (options.filename) {
opts.filename = options.filename;
}
if (options.chunk_size) {
var size = parseInt(options.chunk_size, 10);
if (!isNaN(size) && size > 0 && size < 255) {
opts.chunkSizeBytes = chunk_size * size;
}
}
var gfs = getGrid();
var writeStream = gfs.openUploadStreamWithId(strMongoId, opts.filename, opts);
writeStream.on('finish', function(file) {
console.log(file) // <== Notice the log for this line below
return callback(null, file);
});
writeStream.on('error', function(err) {
return callback(err);
});
stream.pipe(writeStream);
};
The result for console.log(file) is:
{
"_id": "5ea7a3ffbc7dd36e7df4561e",
"length": 0, // <== Notice length is 0 here
"chunkSize": 10240,
"uploadDate": "2020-04-28T03:33:19.734Z",
"filename": "default_profile.png",
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"contentType": "image/png",
"metadata": {
"name": "default_profile.png",
"creator": {
"objectType": "user",
"id": "5ea7a3febc7dd36e7df4560a"
}
}
}
This is the old code using gridfs-stream:
var Grid = require('gridfs-stream');
...
function getGrid() {
return new Grid(mongoose.connection.db, mongoose.mongo);
}
module.exports.store = function(id, contentType, metadata, stream, options, callback) {
... // unrelated things
var strMongoId = mongoId.toHexString(); // http://stackoverflow.com/a/27176168
var opts = {
_id: strMongoId,
mode: 'w',
content_type: contentType
};
options = options || {};
if (options.filename) {
opts.filename = options.filename;
}
if (options.chunk_size) {
var size = parseInt(options.chunk_size, 10);
if (!isNaN(size) && size > 0 && size < 255) {
opts.chunk_size = chunk_size * size;
}
}
opts.metadata = metadata;
var gfs = getGrid();
var writeStream = gfs.createWriteStream(opts);
writeStream.on('close', function(file) {
console.log(file) // <== Notice the log for this line below
return callback(null, file);
});
writeStream.on('error', function(err) {
return callback(err);
});
stream.pipe(writeStream);
};
And here is the result of console.log(file):
{
"_id": "5ea7a43a5d6eea73e0a3c8b1",
"filename": "default_profile.png",
"contentType": "image/png",
"length": 634, // <== We have the length here
"chunkSize": 10240,
"uploadDate": "2020-04-28T03:34:18.069Z",
"metadata": {
"name": "default_profile.png",
"creator": {
"objectType": "user",
"id": "5ea7a4395d6eea73e0a3c89d"
}
},
"md5": "c37659eb6a9e741656a8d0348765c668"
}
So how can I get the length using GridFSBucket?
Thank you!
UPDATE:
This is the log of variable stream in both cases:
{
"contentType": "image/png",
"fileName": "default_profile.png",
"transferEncoding": "base64",
"contentDisposition": "attachment",
"generatedFileName": "default_profile.png",
"contentId": "216d9aab57544410901f8ba7981e63aa#mailparser",
"stream": {
"_events": {},
"_eventsCount": 0,
"writable": true,
"checksum": {
"_handle": {},
"writable": true,
"readable": true
},
"length": 0,
"current": ""
}
}

Hosting a Forge Autodesk viewer on Github

I've an issue with the Forge viewer I'm developping : Im' trying to host it using Github-page, but it doesn't seem to work correctly.
The issue is on the File tree : when I load the viewer page from the Github pages, the file tree seems stuck on "Loading...". However, it correctly loads when I load the page from localhost.
The code of the File tree :
$(document).ready(function () {
prepareAppBucketTree();
$('#refreshBuckets').click(function () {
$('#appBuckets').jstree(true).refresh();
});
$('#createNewBucket').click(function () {
createNewBucket();
});
$('#createBucketModal').on('shown.bs.modal', function () {
$("#newBucketKey").focus();
})
$('#hiddenUploadField').change(function () {
var node = $('#appBuckets').jstree(true).get_selected(true)[0];
var _this = this;
if (_this.files.length == 0) return;
var file = _this.files[0];
switch (node.type) {
case 'bucket':
var formData = new FormData();
formData.append('fileToUpload', file);
formData.append('bucketKey', node.id);
$.ajax({
url: '/api/forge/oss/objects',
data: formData,
processData: false,
contentType: false,
type: 'POST',
success: function (data) {
$('#appBuckets').jstree(true).refresh_node(node);
_this.value = '';
}
});
break;
}
});
});
function createNewBucket() {
var bucketKey = $('#newBucketKey').val();
var policyKey = $('#newBucketPolicyKey').val();
console.log(bucketKey)
jQuery.post({
url: '/api/forge/oss/buckets',
contentType: 'application/json',
data: JSON.stringify({ 'bucketKey': bucketKey, 'policyKey': policyKey }),
success: function (res) {
$('#appBuckets').jstree(true).refresh();
$('#createBucketModal').modal('toggle');
},
error: function (err) {
if (err.status == 409)
alert('Bucket already exists - 409: Duplicated')
console.log(err);
}
});
}
function prepareAppBucketTree() {
$('#appBuckets').jstree({
'core': {
'themes': { "icons": true },
'data': {
"url": '/api/forge/oss/buckets',
"dataType": "json",
'multiple': false,
"data": function (node) {
return { "id": node.id };
}
}
},
'types': {
'default': {
'icon': 'glyphicon glyphicon-question-sign'
},
'#': {
'icon': 'glyphicon glyphicon-cloud'
},
'bucket': {
'icon': 'glyphicon glyphicon-folder-open'
},
'object': {
'icon': 'glyphicon glyphicon-file'
}
},
"plugins": ["types", "state", "sort", "contextmenu"],
contextmenu: { items: autodeskCustomMenu }
}).on('loaded.jstree', function () {
$('#appBuckets').jstree('open_all');
}).bind("activate_node.jstree", function (evt, data) {
if (data != null && data.node != null && data.node.type == 'object') {
// $("#MyViewerDiv").empty();
var urn = data.node.id;
getForgeToken(function (access_token) {
jQuery.ajax({
url: 'https://developer.api.autodesk.com/modelderivative/v2/designdata/' + urn + '/manifest',
headers: { 'Authorization': 'Bearer ' + access_token },
success: function (res) {
if (res.status === 'success') callByUrn('urn:'+urn);
else $("#MyViewerDiv").html('The translation job still running: ' + res.progress + '. Please try again in a moment.');
},
error: function (err) {
var msgButton = 'This file is not translated yet! ' +
'<button class="btn btn-xs btn-info" onclick="translateObject()"><span class="glyphicon glyphicon-eye-open"></span> ' +
'Start translation</button>'
$("#MyViewerDiv").html(msgButton);
}
});
})
}
});
}
function autodeskCustomMenu(autodeskNode) {
var items;
switch (autodeskNode.type) {
case "bucket":
items = {
uploadFile: {
label: "Upload file",
action: function () {
uploadFile();
},
icon: 'glyphicon glyphicon-cloud-upload'
}
};
break;
case "object":
items = {
translateFile: {
label: "Translate",
action: function () {
var treeNode = $('#appBuckets').jstree(true).get_selected(true)[0];
translateObject(treeNode);
},
icon: 'glyphicon glyphicon-eye-open'
}
};
break;
}
return items;
}
function uploadFile() {
$('#hiddenUploadField').click();
}
function translateObject(node) {
$("#MyViewerDiv").empty();
if (node == null) node = $('#appBuckets').jstree(true).get_selected(true)[0];
var bucketKey = node.parents[0];
var objectKey = node.id;
jQuery.post({
url: '/api/forge/modelderivative/jobs',
contentType: 'application/json',
data: JSON.stringify({ 'bucketKey': bucketKey, 'objectName': objectKey }),
success: function (res) {
$("#MyViewerDiv").html('Translation started! Please try again in a moment.');
},
});
}
Please note that Github Pages are used for serving static pages without any special server-side logic. Your Forge application requires a server to talk to as well, for example, to obtain a list of buckets for the tree view (by making a request to /api/forge/oss/buckets).
You could potentially host your application's server-side logic on something like Heroku, and then have your static HTML/CSS/JavaScript page on Github talk to that server (for example, https://my-forge-app.herokuapp.com/api/forge/oss/buckets). Just be careful about CORS.

Unable to update Data

Am trying to update the json data through an api call.
I was able to GET the data without any issues, as am not passing any Options in the request.
For UPDATE,
//saga.js
export function* BlurideaTitler(opt) {
const id = opt.id; // 4
const updatedTitle = opt.newTitle; // "title changed"
let options = {
crossDomain: true,
method: 'PUT',
json: true,
headers: {'Content-Type': 'application/json'},
body: {
title: updatedTitle
}
};
const requestURL = `http://localhost:3000/ideas/${id}`;
try {
yield call(request, requestURL, options);
} catch (err) {
console.log(err);
}
}
// request.js
export default function request(url, options) {
return fetch(url, options)
.then(checkStatus)
.then(parseJSON);
}
//db.json
JSON am trying to update.,
{
"ideas": [
{
"id": 4,
"title": "My fourth Idea",
"body": "Description of my fourth idea",
"created_date": "14-Apr-2019"
}
]
}
This is supposed to update the value of title. But it throws error'Bad request' . Can someone please let me know what am missing here.