Using Sailsjs Skipper file uploading with Flowjs - sails.js

I'm trying to use skipper and flowjs with ng-flow together for big file uploading.
Based on sample for Nodejs located in flowjs repository, I've created my sails controller and service to handle file uploads. When I uploading a small file it's works fine, but if I try to upload bigger file (e.g. video of 200 Mb) I'm receiving errors (listed below) and array req.file('file')._files is empty. Intersting fact that it happening only few times during uploading. For example, if flowjs cut the file for 150 chunks, in sails console these errors will appear only 3-5 times. So, almost all chunks will uploaded to the server, but a few are lost and in result file is corrupted.
verbose: Unable to expose body parameter `flowChunkNumber` in streaming upload! Client tried to send a text parameter (flowChunkNumber) after one or more files had already been sent. Make sure you always send text params first, then your files.
These errors appears for all flowjs parameters.
I know about that text parameters must be sent first for correct work with skipper. And in chrome network console I've checked that flowjs sends this data in a correct order.
Any suggestions?
Controller method
upload: function (req, res) {
flow.post(req, function (status, filename, original_filename, identifier) {
sails.log.debug('Flow: POST', status, original_filename, identifier);
res.status(status).send();
});
}
Service post method
$.post = function(req, callback) {
var fields = req.body;
var file = req.file($.fileParameterName);
if (!file || !file._files.length) {
console.log('no file', req);
file.upload(function() {});
}
var stream = file._files[0].stream;
var chunkNumber = fields.flowChunkNumber;
var chunkSize = fields.flowChunkSize;
var totalSize = fields.flowTotalSize;
var identifier = cleanIdentifier(fields.flowIdentifier);
var filename = fields.flowFilename;
if (file._files.length === 0 || !stream.byteCount)
{
callback('invalid_flow_request', null, null, null);
return;
}
var original_filename = stream.filename;
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, stream.byteCount);
if (validation == 'valid')
{
var chunkFilename = getChunkFilename(chunkNumber, identifier);
// Save the chunk by skipper file upload api
file.upload({saveAs:chunkFilename},function(err, uploadedFiles){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
var testChunkExists = function()
{
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists)
{
if (exists)
{
currentTestChunk++;
if (currentTestChunk > numberOfChunks)
{
callback('done', filename, original_filename, identifier);
} else {
// Recursion
testChunkExists();
}
} else {
callback('partly_done', filename, original_filename, identifier);
}
});
};
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}};
Edit
Found solution to set flowjs property maxChunkRetries: 5, because by default it's 0.
On the server side, if req.file('file')._files is empty I'm throwing not permanent(in context of flowjs) error.
So, it's solves my problem, but question why it behave like this is still open. Sample code for flowjs and Nodejs uses connect-multiparty and has no any additional error handling code, so it's most likely skipper bodyparser bug.

Related

How can I get a continuous stream of samples from the JavaScript AudioAPI

I'd like to get a continuous stream of samples in JavaScript from the audio API. The only way I've found to get samples is through the MediaRecorder object in the JavaScript Audio API.
I set up my recorder like this:
var options = {
mimeType: "audio/webm;codec=raw",
}
this.mediaRecorder = new MediaRecorder(stream, options);
this.mediaRecorder.ondataavailable = function (e) {
this.decodeChunk(e.data);
}.bind(this);
this.mediaRecorder.start(/*timeslice=*/ 100 /*ms*/);
This gives me a callback 10 times a second with new data. All good so far.
The data is encoded, so I use audioCtx.decodeAudioData to process it:
let fileReader = new FileReader();
fileReader.onloadend = () => {
let encodedData = fileReader.result;
// console.log("Encoded length: " + encodedData.byteLength);
this.audioCtx.decodeAudioData(encodedData,
(decodedSamples) => {
let newSamples = decodedSamples.getChannelData(0)
.slice(this.firstChunkSize, decodedSamples.length);
// The callback which handles the decodedSamples goes here. All good.
if (this.firstChunkSize == 0) {
this.firstChunkSize = decodedSamples.length;
}
});
};
This all works fine too.
Setting up the data for the file reader is where it gets strange:
let blob;
if (!this.firstChunk) {
this.firstChunk = chunk;
blob = new Blob([chunk], { 'type': chunk.type });
} else {
blob = new Blob([this.firstChunk, chunk], { 'type': chunk.type });
}
fileReader.readAsArrayBuffer(blob);
The first chunk works just fine, but the second and later chunks fail to decode unless I combine them with the first chunk. I'm guessing what is happening here is that the first chunk has a header that is required to decode the data. I remove the samples decoded from the first chunk after decoding them a second time. See this.firstChunkSize above.
This all executes without error, but the audio that I get back has a vibrato-like effect at 10Hz. A few hypotheses:
I have some simple mistake in my "firstChunkSize" and "splice" logic
The first chunk has some header which is causing the remaining data to be interpreted in a strange way.
There is some strange interaction with some option when creating the audio source (noise cancellation?)
You want codecs=, not codec=.
var options = {
mimeType: "audio/webm;codecs=pcm",
}
Though MediaRecorder.isSupported will return true with codec= it is only because this parameter is being ignored. For example:
MediaRecorder.isTypeSupported("audio/webm;codec=pcm")
true
MediaRecorder.isTypeSupported("audio/webm;codecs=pcm")
true
MediaRecorder.isTypeSupported("audio/webm;codecs=asdfasd")
false
MediaRecorder.isTypeSupported("audio/webm;codec=asdfasd")
true
The garbage codec name asdfasd is "supported" if you specify codec instead of codecs.

Javascript injection goes wrong

In our Android project (download manager) we need to show built-in web browser so we able to catch downloads there with the all data (headers, cookies, post data) so we can handle them properly.
Unfortunately, WebView control we use does not provide any way to access POST data of the requests it makes.
So we use a hacky way to get this data. We inject this javascript code in the each html code the browser loads:
<script language="JavaScript">
HTMLFormElement.prototype._submit = HTMLFormElement.prototype.submit;
HTMLFormElement.prototype.submit = formSubmitMonitor;
window.addEventListener('submit', function(e) {
formSubmitMonitor(e);
}, true);
function formSubmitMonitor(e) {
var frm = e ? e.target : this;
formSubmitMonitor_onsubmit(frm);
frm._submit();
}
function formSubmitMonitor_onsubmit(f) {
var data = "";
for (i = 0; i < f.elements.length; i++) {
var name = f.elements[i].name;
var value = f.elements[i].value;
//var type = f.elements[i].type;
if (name)
{
if (data !== "")
data += '&';
data += encodeURIComponent(name) + '=' + encodeURIComponent(value);
}
}
postDataMonitor.onBeforeSendPostData(
f.attributes['method'] === undefined ? null : f.attributes['method'].nodeValue,
new URL(f.action, document.baseURI).href,
data,
f.attributes['enctype'] === undefined ? null : f.attributes['enctype'].nodeValue);
}
XMLHttpRequest.prototype.origOpen = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function(method, url, async, user, password) {
// these will be the key to retrieve the payload
this.recordedMethod = method;
this.recordedUrl = url;
this.origOpen(method, url, async, user, password);
};
XMLHttpRequest.prototype.origSend = XMLHttpRequest.prototype.send;
XMLHttpRequest.prototype.send = function(body) {
if (body)
{
postDataMonitor.onBeforeSendPostData(
this.recordedMethod,
this.recordedUrl,
body,
null);
}
this.origSend(body);
};
const origFetch = window.fetch;
window.fetch = function()
{
postDataMonitor.onBeforeSendPostData(
"POST",
"test",
"TEST",
null);
return origFetch.apply(this, arguments);
}
</script>
Generally, it works fine.
But in Google Mail web interface, it's not working for some unknown reason. E.g. when the user enters his login name and presses Next. I thought it's using Fetch API, so I've added interception for it too. But this did not help. Please note, that we do not need to intercept the user credentials, but we need to be able to intercept all, or nothing. Unfortunately, this is the way the whole system works there...
Addition #1.
I've found another way: don't override shouldInterceptRequest, but override onPageStarted instead and call evaluateJavascript there. That way it works even on Google Mail web site! But why the first method is not working then? We break HTML code somehow?

Fiddler: Cannot set the Response Code in

We are using the Fiddler customRules.js script to handle our API testing (external APIs from other companies when they do not have Test Servers for us) where we send a response file back to the requestor if one is present, otherwise we build the response. This is working fine, but I cannot set the HTTP Status code.
When we generate the response, in some cases we want to be able to specify the HTTP Status to what the external API might send.
static function OnBeforeResponse(oSession: Session) {
if (m_Hide304s && oSession.responseCode == 304) {
oSession["ui-hide"] = "true";
}
// Set Header values for later
var HeaderContentType = 'text/xml;charset=utf-8';
var HeaderServer = 'Apache-Coyote/1.1';
var HttpStatus = 200;
... // This is the removed code that determines text or file to return
// At the end of our process to determine to send a file or error we try to send an error value in this case. For simplicity, I am just hard assigning it without using a variable as we normally would.
oSession.responseCode = 500;
oSession.oResponse.headers.HTTPResponseCode = 500;
oSession.oResponse.headers.HTTPResponseStatus = "500 SERVER ERROR";
oSession.ResponseHeaders.SetStatus(500, 'Server Error'); // This also does not work
// However this does work to add the file contents into the response when the file exists.
var ResponseFile = new ActiveXObject("Scripting.FileSystemObject");
if (ResponseFile.FileExists(ReturnFileName)) {
oSession["x-replywithfile"] = ReturnFileName;
// Error message returned as the ReturnBody was not populated and Response File not found
} else {
oSession.utilSetResponseBody(ErrorMessage);
}
return;
}
Finally tracked it down. The problem is that I am often returning a file when returning an error using the oSession["x-replywithfile"]. However, this always makes the status an 200 OK, even if I try to change the status after the oSession["x-replywithfile"] setting.
oSession["x-replywithfile"] = ReturnFileName;
oSession.responseCode = 500;
This will still always return a 200 OK.
Changing to the following will work.
var FileContents = ReadFile(ReturnFileName);
oSession.utilSetResponseBody(FileContents);
oSession.responseCode = 500;

Sending a mail with attachment on failed expect

I am trying to attach a .png file to the mail that will be sent via nodemailer on a expect/spec failure run via Protractor.
Worth mentioning is that I am using protractor-jasmine2-screenshot-reporter for screenshot capture.
What I am doing:
browser.driver.wait(function() {
return helper.checkURLAddress(browser.params.Test.URL.mojOLX); //will return false
}, 2000)
.then(function() {
// success code
},
//failure code goes below
function() {
var htmlFilePath = 'D:/Test/target/screenshots/my-report.html';
var htmlFileContent = String(fs.readFileSync(htmlFilePath));
var screenshotDirectory = "D:/Test/target/screenshots/chrome";
helper.sendHTMLMail(htmlFileContent, helper.getMostRecentFileName(screenshotDirectory));
}
The function for getting the most recent file:
function getMostRecentFileName(dir) {
var files = fs.readdirSync(dir);
return _.max(files, function (f) {
var fullpath = path.join(dir, f);
return fs.statSync(fullpath).ctime;
});
}
And the mailOptions with attachments:
var mailOptions = {
//from, to, subject go here
attachments: {
path: htmlFilePath
}
};
The error I am getting is:
Error: ENOENT: no such file or directory, open 'D:/Test/Screenshotname.png'.
The filepath of the screenshot is actually incorrect and is missing 3 directories in the path to get to the PNG(target, screenshots, chrome).
I pressume that it is due to the directories not created, as mentioned in this thread. But the solution here is to wait for the pdf creation, which is done by the user, which is not the case here.
When exactly is the screenshot saved?
Why does the function not use the file it shows in the error?
EDITED Question:
How to call the sendHTMLMail after the screenshots are created?

Uploading Blobs in Blocks using REST API times out on second chunk

NOTE: Could somebody give me an example SAS string (with the block info appended in the right area) that needs to be sent to the Azure blob storage? I think that's the issue that I'm having. I need to figure out the order of the uri, key, etc. in the string that is sent to Azure with each block.
What I'm trying to accomplish is to grab a SAS key from a service, modify the string key so that azure knows that I'm sending in blocks, and then send the individual blocks of the file with the sas key from the web client. I'm chunking each file into 2MB blocks and sending those 2MB blocks with a JavaScript library at a time. So each "file" in the code below is just a 2MB chunk of a file.
THE PROBLEM: I can successfully grab the SAS key from the service, modify the key so that it has the block chunk info in it, send in the FIRST chunk, and then receive a response back from the blob storage server. When I send in the second chunk, however, the request for a stream to the blob storage hangs and then eventually times out. The time-out seems to happen specifically on the second request for a stream to the blob storage. This bit of code right here:
SERVER WEB CLIENT CODE:
using (Stream requestStream = request.GetRequestStream())
{
inputStream.CopyTo(requestStream, file.ContentLength);
}
What could be causing the second chunk to time out? Could it be that the window for the key closes too soon? Below is my code and some screenshots:
private void WriteToBlob(HttpPostedFileBase file, string BlockId, FileProp fp)
{
var inputStream = file.InputStream;
Microsoft.WindowsAzure.StorageCredentialsSharedAccessSignature credentials =
new Microsoft.WindowsAzure.StorageCredentialsSharedAccessSignature(fp.facct);
string queryString = (new Uri(fp.folderName)).Query;
string RequestUri = string.Format(System.Globalization.CultureInfo.InvariantCulture, "{0}/{1}{2}&comp=block&blockid={3}",
fp.folderName, fp.fileName, queryString, Convert.ToBase64String(Encoding.UTF8.GetBytes(BlockId)));
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(RequestUri);
request.Method = "PUT";
request.ContentLength = inputStream.Length;
using (Stream requestStream = request.GetRequestStream())
{
inputStream.CopyTo(requestStream, file.ContentLength);
}
}
JAVASCRIPT CODE SENDING THE CHUNKS TO THE WEB SERVER CLIENT:
var running = 0;
var chunksize = (Modernizr.blobconstructor) ? uploadChunkSize : null; //if browser support Blob API
window.xhrPool = [];
$('#fileupload').fileupload({
url: url,
//formData: [{ name: 'param1', value: 1 }, { name: 'param2', value: 2}],
singleFileUploads: true, //each file is using an individual XHR request
//limitMultiFileUploads: 2, //This option is ignored, if singleFileUploads is set to true.
multipart: true,
maxChunkSize: chunksize, //server side is in streaming mode
sequentialUploads: true, //Set this option to true to issue all file upload requests in a sequential order instead of simultaneous requests.
dataType: 'json',
autoUpload: true,
//acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i,
progressInterval: 100,
bitrateInterval: 100,
maxFileSize: uploadFileSizeLimit
}).on('fileuploadadd', function (e, data) {
var filename = data.files[0].name;
var filesize = data.files[0].size;
if (filesize == 0) {
var zeroSizeErrMsg = sceneLayoutService.format('This file {filename} is empty please select files again without it. ', { filename: filename });
sceneLayoutService.showErrorDialog(zeroSizeErrMsg);
return;
}
if (window.availableStorageSize != null && window.availableStorageSize != '') {
if (filesize > window.availableStorageSize) {
var overSizeErrMsg = sceneLayoutService.format('File size of {filename} exceeds available storage space in your cloud drive. ', { filename: filename });
sceneLayoutService.showErrorDialog(overSizeErrMsg);
return;
}
} else {
alert('Unable to retrieve the storage usage.');
}
data.jqXHR = data.submit();
window.xhrPool.push(data.jqXHR);
sceneLayoutService.addFileToProgressDialog(data, cancelButton);
}).on('fileuploadprocessalways', function (e, data) {
}).on('fileuploadprogressall', function (e, data) {
}).on('fileuploadsubmit', function (e, data) {
var filesize = data.files[0].size;
if (filesize == 0) {
return false;
}
if (window.availableStorageSize != null && window.availableStorageSize != '') {
if (filesize > window.availableStorageSize) {
return false;
}
}
$('#dlgProgress').parent().show();
running++;
sceneLayoutService.showProgressDialog('Uploading files to ' + currentUser + '\'s Cloud Storage ...', abortAllUploads);
return true;
}).on('fileuploaddone', function (e, data) {
running--;
updateStorageQuota(function () {
var usedStorageSize = (window.usedStorageSize != null) ? bytesToSize(window.usedStorageSize, 2) : 0;
var totalStorageSize = (window.totalStorageSize != null) ? bytesToSize(window.totalStorageSize, 2) : 0;
var usageFooterStr = sceneLayoutService.format("Using {used} of {total} (%)", { used: usedStorageSize, total: totalStorageSize });
$('div.dlgProgressFooter').text(usageFooterStr);
});
var docGridUrl = window.baseUrl + '/CloudStorage/ChangePage?page=1&rand=' + sceneLayoutService.getRandomString(4);
$('#docGridPartial').load(docGridUrl, function () {
grid.init({
pageNumber: 1,
url: window.baseUrl + '/CloudStorage/ChangePage',
sortColumn: '',
sortDirection: ''
});
});
sceneLayoutService.updateFileUploadFinalStatus(data, 'done');
if (!data.result.success) {
debugger;
var errMsg = "";
if (data.result != null) {
if (data.result.message != null) {
errMsg += data.result.message;
}
if (data.result.error != null)
errMsg += data.result.error;
}
sceneLayoutService.showErrorDialog(errMsg);
}
window.removeXHRfromPool(data);
if (running == 0) {
$('#dlgProgress').parent().hide();
$('#progresses').empty();
}
}).on('fileuploadfail', function (e, data) {
running--;
sceneLayoutService.updateFileUploadFinalStatus(data, 'fail');
window.removeXHRfromPool(data);
if (running == 0) {
$('#dlgProgress').parent().hide();
$('#progresses').empty();
}
}).on('fileuploadprogress', function (e, data) {
//XHR upload onProgress event not fired at server-defined intervals/not supported in IE8 and IE9,
//will be supported in IE10 in terms of XMLHttpRequest Level 2 specification, http://caniuse.com/xhr2
sceneLayoutService.updateFileUploadProgress(data);
});
Issue resolved. It turns out the format of the SAS URI was incorrect. Here is how a Sas URI (for a container, in my case) should look:
http://container_uri/filename?key