Sending a mail with attachment on failed expect - protractor

I am trying to attach a .png file to the mail that will be sent via nodemailer on a expect/spec failure run via Protractor.
Worth mentioning is that I am using protractor-jasmine2-screenshot-reporter for screenshot capture.
What I am doing:
browser.driver.wait(function() {
return helper.checkURLAddress(browser.params.Test.URL.mojOLX); //will return false
}, 2000)
.then(function() {
// success code
},
//failure code goes below
function() {
var htmlFilePath = 'D:/Test/target/screenshots/my-report.html';
var htmlFileContent = String(fs.readFileSync(htmlFilePath));
var screenshotDirectory = "D:/Test/target/screenshots/chrome";
helper.sendHTMLMail(htmlFileContent, helper.getMostRecentFileName(screenshotDirectory));
}
The function for getting the most recent file:
function getMostRecentFileName(dir) {
var files = fs.readdirSync(dir);
return _.max(files, function (f) {
var fullpath = path.join(dir, f);
return fs.statSync(fullpath).ctime;
});
}
And the mailOptions with attachments:
var mailOptions = {
//from, to, subject go here
attachments: {
path: htmlFilePath
}
};
The error I am getting is:
Error: ENOENT: no such file or directory, open 'D:/Test/Screenshotname.png'.
The filepath of the screenshot is actually incorrect and is missing 3 directories in the path to get to the PNG(target, screenshots, chrome).
I pressume that it is due to the directories not created, as mentioned in this thread. But the solution here is to wait for the pdf creation, which is done by the user, which is not the case here.
When exactly is the screenshot saved?
Why does the function not use the file it shows in the error?
EDITED Question:
How to call the sendHTMLMail after the screenshots are created?

Related

Fromname in mail using sendgrid mail api

I'm trying to send emails using sendgrid mail API.
Everything works fine. However, I want my emails to have a specific name.
Not the prefix of the sender's address, which is coming up by default.
I changed the From value to "MY_email_name <sender#example.com>". But it didn't work.
I have set the From_Name field to "MY_email_name". That too didn't work.
However, it's working when I not read the html content from an external file and instead give some inline. In that case it is sending me the email_name.
Any idea about how I can do this with reading the content.
Thanks.
var sendgrid = require('sendgrid')('MY_APP_SECRET');
var fs = require('fs');
var content;
// First I want to read the file
fs.readFile(__dirname+'/email.html', function read(err, data) {
if (err) {
throw err;
}
content = data;
// Invoke the next step here however you like
//console.log(content); // Put all of the code here (not the best solution)
processFile(); // Or put the next step in a function and invoke it
});
function processFile() {
console.log(content);
}
module.exports = function sendMail(mailObject){
return new Promise(function (resolve, reject){
// create a new email instance
var email = new sendgrid.Email();
email.addTo('some1#example.com');
email.setFrom('sender#example.com');
email.setSubject('My-Email-body');
email.setFromName("Email-Name");
email.setHtml(content);
email.addHeader('X-Sent-Using', 'SendGrid-API');
email.addHeader('X-Transport', 'web');
email.setASMGroupID(835);
//send mail
sendgrid.send(email, function(err, json) {
//if something went wrong
if (err) { reject({
error:err,
res : json,
}); }
//else
resolve({
statusText: 'OK',
res : json
});
});
})
}

How do I send a PDF to Watson's Document Conversion service without writing it to disk first?

I am trying to convert this document (http://www.redbooks.ibm.com/redbooks/pdfs/ga195486.pdf) to answer units in Watson's Document Conversion service using the watson-developer-cloud node.js library.
In the actual program (not this test program), I am retrieving the document and converting it on-the-fly, without writing it to disk first. I have done this before with other documents, but the latest version of the library (v 1.7.0) seems to have changed and it no longer works the way I was using it. But even before I started using the latest version, this particular document would not convert.
The annotated test code that I am using is below. I have tried several ways to get this to work, the variations of which are all commented out under var opts={ below. You have to uncomment one of them at a time to see the results.
'use strict';
var bluemix = require('./bluemix');
var extend=require('util')._extend;
var fs=require('fs');
var watson=require('watson-developer-cloud');
var streams = require('memory-streams');
var dcCredentials = extend({
url: '<url>',
version: 'v1',
username: '<username>',
password: '<password>'
}, bluemix.getServiceCreds('document_conversion')); // VCAP_SERVICES
var document_conversion = watson.document_conversion(dcCredentials);
var bookpdf=getBook('ga195486.pdf');
convert(bookpdf);
function getBook(filename)
{
var bl=fs.readFileSync(filename,'utf8');
return bl;
}
function convert(content)
{
var opts={ //uncomment ONE of these
// file: new Buffer(content), //See message #1 below
// file: {value: new Buffer(content), options: {}}, //see message #2 below
// file: {value: new Buffer(content), options: {contentType: "application/pdf"}}, //This used to work. See message #2 (again) below
// file: new streams.ReadableStream(content),//see message #3 below
conversion_target: "ANSWER_UNITS",
content_type:'application/pdf'
};
document_conversion.convert(opts,
function (err, response)
{
if (err)
{
console.log("Error converting doc: ", err);
}
else if (response.answer_units.length==0)
{
var msg="No answer units";
console.log(msg,response);
}
else
{
console.log('Works!');
console.dir(response);
}
}
);
}
//Message #1: This returns:
// No answer units { source_document_id: '',
// timestamp: '2016-05-23T16:18:23.825Z',
// media_type_detected: 'application/pdf',
// metadata: [],
// answer_units: [],
// warnings:
// [ { phase: 'pdf',
// warning_id: 'empty_input_to_converter',
// description: 'The input provided to the converter phase is empty or doesn\'t contain text that can be converted.' },
// { phase: 'normalized_html',
// warning_id: 'empty_input_to_converter',
// description: 'The input HTML document has no body content.' },
// { phase: 'answer_units',
// warning_id: 'empty_input_to_converter',
// description: 'The input provided to the converter phase is empty or doesn\'t contain text that can be converted.' } ] }
//Message #2: These return:
///home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/lib/delayed_stream.js:33
// source.on('error', function() {});
//
//TypeError: source.on is not a function
// at Function.DelayedStream.create (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/node_modules/combined-stream/node_modules/delayed-stream/lib/delayed_stream.js:33:10)
// at FormData.CombinedStream.append (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/node_modules/combined-stream/lib/combined_stream.js:43:37)
// at FormData.append (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/node_modules/form-data/lib/form_data.js:68:3)
// at appendFormValue (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/request.js:339:21)
// at Request.init (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/request.js:352:11)
// at new Request (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/request.js:142:8)
// at request (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/node_modules/request/index.js:55:10)
// at createRequest (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/lib/requestwrapper.js:134:10)
// at DocumentConversion.convert (/home/david/git/ccb-contentbridge/node_modules/watson-developer-cloud/services/document_conversion/v1.js:134:10)
// at convert (/home/david/git/ccb-contentbridge/testRedbooks.js:35:24)
//Message #3: This returns and then it hangs there:
//Error converting doc: { code: 400, error: 'Error in the web application' }
Can someone please tell me what I am doing wrong?
That particular file is larger than what the Document Conversion service can currently handle. Unfortunately I don't have very good info on exactly what the limits are right now, but the team is aware of this and looking into making improvements.
If you can provide an example that worked previously but broke with the v1.7.0 of the node.js library, I'll take a look at that and hopefully be able to provide better info.
Oh, and specifying 'utf8' on your fs.readfileSync() call may be causing some of the trouble you're experiencing.
Looks like the limit for Doc Con is 50 MB per this and our documents are smaller than that... must be some other problem.

Using Sailsjs Skipper file uploading with Flowjs

I'm trying to use skipper and flowjs with ng-flow together for big file uploading.
Based on sample for Nodejs located in flowjs repository, I've created my sails controller and service to handle file uploads. When I uploading a small file it's works fine, but if I try to upload bigger file (e.g. video of 200 Mb) I'm receiving errors (listed below) and array req.file('file')._files is empty. Intersting fact that it happening only few times during uploading. For example, if flowjs cut the file for 150 chunks, in sails console these errors will appear only 3-5 times. So, almost all chunks will uploaded to the server, but a few are lost and in result file is corrupted.
verbose: Unable to expose body parameter `flowChunkNumber` in streaming upload! Client tried to send a text parameter (flowChunkNumber) after one or more files had already been sent. Make sure you always send text params first, then your files.
These errors appears for all flowjs parameters.
I know about that text parameters must be sent first for correct work with skipper. And in chrome network console I've checked that flowjs sends this data in a correct order.
Any suggestions?
Controller method
upload: function (req, res) {
flow.post(req, function (status, filename, original_filename, identifier) {
sails.log.debug('Flow: POST', status, original_filename, identifier);
res.status(status).send();
});
}
Service post method
$.post = function(req, callback) {
var fields = req.body;
var file = req.file($.fileParameterName);
if (!file || !file._files.length) {
console.log('no file', req);
file.upload(function() {});
}
var stream = file._files[0].stream;
var chunkNumber = fields.flowChunkNumber;
var chunkSize = fields.flowChunkSize;
var totalSize = fields.flowTotalSize;
var identifier = cleanIdentifier(fields.flowIdentifier);
var filename = fields.flowFilename;
if (file._files.length === 0 || !stream.byteCount)
{
callback('invalid_flow_request', null, null, null);
return;
}
var original_filename = stream.filename;
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, stream.byteCount);
if (validation == 'valid')
{
var chunkFilename = getChunkFilename(chunkNumber, identifier);
// Save the chunk by skipper file upload api
file.upload({saveAs:chunkFilename},function(err, uploadedFiles){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
var testChunkExists = function()
{
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists)
{
if (exists)
{
currentTestChunk++;
if (currentTestChunk > numberOfChunks)
{
callback('done', filename, original_filename, identifier);
} else {
// Recursion
testChunkExists();
}
} else {
callback('partly_done', filename, original_filename, identifier);
}
});
};
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}};
Edit
Found solution to set flowjs property maxChunkRetries: 5, because by default it's 0.
On the server side, if req.file('file')._files is empty I'm throwing not permanent(in context of flowjs) error.
So, it's solves my problem, but question why it behave like this is still open. Sample code for flowjs and Nodejs uses connect-multiparty and has no any additional error handling code, so it's most likely skipper bodyparser bug.

Make the sails.js server monitor file change using chokidar and then emit socket message

I use chokidar to monitor if files have been changed in a folder. At the moment it is triggered when a user updates an Experiment Model in my ExperimentController.
var chokidar = require('chokidar');
...
var watcher = chokidar.watch('assets/output-model-files', {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path) {
...read my changed file and update the content of my database, and send a socket publishUpdate message...
... read file content in an asynchronous way
fs.readFile(path,"utf-8", function (err, data) {
... update the experiment object with the content of the changed file
Experiment.update(req.param('id'), ExpObj, function expUpdated(err) {});
... send a message via socket saying that the exeriment object has been updated
Experiment.publishUpdate(req.param('id'), {name: exp.name,
results: JSON.stringify(myres),
action: ('file has just been updated. nb of trajectories: '+totalNbTrajectories)
});
But I would like to constantly monitor any change in the target folder and send Experiment.publishUpdate messages when it happens from the moment when the sails.js server starts, and not when a user update an experiment object.
Where could I place that chokidar.watch(...) code on the server side so as to update an experiment object from a file change? socket.js?
Ok. I found that locating the code in bootstrap.js seems to do the job perfectly reagrding the even triggering...
My bootstrap.js now looks like:
var chokidar = require('chokidar');
var fs = require('fs');
var os = require('os');
var sys = require('sys');
module.exports.bootstrap = function(cb) {
// It's very important to trigger this callack method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
User.update({}, {online: false},
function userUpdated(err, users) {
if (err) {
console.log(err);
} else {
var watcher = chokidar.watch('assets/output-model-files', {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path) {
console.log('File', path, 'has been changed');
// do file reading and presumably Experiment publish update here
});
}
cb();
}
)
};

ERROR:the operation couldn't be completed. (com.facebook.sdk error 5.)

I am getting sdk problem, what I change in the code for removing error?
Please help me, I am new in titanium.
var f = Ti.Filesystem.getFile('pumpkin.jpg');
var blob = f.read();
var data = {
message: 'This is a pumpkin',
picture: blob
};
Titanium.Facebook.requestWithGraphPath('me/photos', data, 'POST', function(e) {
if (e.success) {
alert("Success! From FB: " + e.result);
} else {
if (e.error) {
alert(e.error);
} else {
alert("Unkown result");
}
}
});
you are trying to execute example code from online doc so if you are using traditional structure then put one pic with pumpkin.jpg in resources directory because i think that file should be missing.
and be sure that you are calling after login.