Make the sails.js server monitor file change using chokidar and then emit socket message - sockets

I use chokidar to monitor if files have been changed in a folder. At the moment it is triggered when a user updates an Experiment Model in my ExperimentController.
var chokidar = require('chokidar');
...
var watcher = chokidar.watch('assets/output-model-files', {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path) {
...read my changed file and update the content of my database, and send a socket publishUpdate message...
... read file content in an asynchronous way
fs.readFile(path,"utf-8", function (err, data) {
... update the experiment object with the content of the changed file
Experiment.update(req.param('id'), ExpObj, function expUpdated(err) {});
... send a message via socket saying that the exeriment object has been updated
Experiment.publishUpdate(req.param('id'), {name: exp.name,
results: JSON.stringify(myres),
action: ('file has just been updated. nb of trajectories: '+totalNbTrajectories)
});
But I would like to constantly monitor any change in the target folder and send Experiment.publishUpdate messages when it happens from the moment when the sails.js server starts, and not when a user update an experiment object.
Where could I place that chokidar.watch(...) code on the server side so as to update an experiment object from a file change? socket.js?

Ok. I found that locating the code in bootstrap.js seems to do the job perfectly reagrding the even triggering...
My bootstrap.js now looks like:
var chokidar = require('chokidar');
var fs = require('fs');
var os = require('os');
var sys = require('sys');
module.exports.bootstrap = function(cb) {
// It's very important to trigger this callack method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
User.update({}, {online: false},
function userUpdated(err, users) {
if (err) {
console.log(err);
} else {
var watcher = chokidar.watch('assets/output-model-files', {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path) {
console.log('File', path, 'has been changed');
// do file reading and presumably Experiment publish update here
});
}
cb();
}
)
};

Related

how to update service worker when online and reloading the page

Hello all I am having the following code to load the page on offline, I have a requirement where I need to cache few pages and exclude few from cache. The code I wrote is working fine but when ever I push any updates to the site it is loading from cache how to reload it from server instead of cache when online
I referred to this blog and sum up with some modifications
https://www.charistheo.io/blog/2021/03/cache-handling-with-service-workers-and-the-cache-api/
self.addEventListener("install", function (e) {
self.skipWaiting();
e.waitUntil(async function () {
const cache = await caches.open("app");
await cache.addAll([
"/Scripts/jquery-3.6.0.min.js",
"/ReportableEvent/Index",
"/NearMissReport/Index",
"/ReportableEvent/InternalReview?ReportableEventId=0"
)];
}());
});
var uncachedPaths = new Set(["/Reportable/Reports", "/Dashboard/Index"]);
self.addEventListener("fetch", async function (e) {
if (e.request.method !== "GET") {
return;
}
e.respondWith(
caches.match(e.request, { ignoreSearch: true }).then(async cachedResponse => {
if (cachedResponse) {
return cachedResponse;
}
else {
if (e.request.url.includes("service-worker.js") && !navigator.onLine) {
return;
}
// Fetch the requested resource from the network if it does not exist in the cache.
const networkResponse = await fetch(e.request);
// Response needs to be cloned if going to be used more than once.
const clonedResponse = networkResponse.clone();
if (!uncachedPaths.has(new URL(e.request.url).pathname)) {
// Save response to runtime cache for later use.
const runtimeCache = await caches.open("app");
runtimeCache.put(e.request, networkResponse);
}
// Respond with the cloned network response.
return Promise.resolve(clonedResponse);
}
})
);
});
Also is this correct to cache the page which has Query strings "/ReportableEvent/InternalReview?ReportableEventId=0"
There are various approaches how to serve files from cache and how to update/invalidate the cache. please, refer here.
Also, if you are updating your service worker js files and want to update the cache as well, try appending version to cache name. For instance,
var version = 1.0;
var cachename= 'app'+version;
And every time you update the file, you can increment the version. This way every updates makes it new cache and files are stored afresh in cache.

Loki.js lost data in Ionic Apps

I am developing an Ionic app using loki.js, but every time it refresh the app press f5 i loss all data stored in loki database.
Why it happen?
I using no chache in my ionic app.
It could be that when you press F5 the data saved in memory is not written to the target json file yet.
You can try to set explicitly a time range to save the data when you instantiate loki:
var _db = new Loki('./database/db.json', {
autoload: true,
autosave: true,
autosaveInterval: 5000 // 5 secs
});
function add(newPatient) {
return $q(function(resolve, reject) {
try {
var _patientsColl = GetCollection(dbCollections.PATIENTS);
if (!_patientsColl) {
_patientsColl = _db.addCollection(dbCollections.PATIENTS,{indices:['firstname','lastname']});
}
_patientsColl.insert(newPatient);
console.log('Collection data: ', _patientsColl.data);
resolve();
}
catch (err) {
reject(err);
}
});
}
function GetCollection(collectionName){
return _db.getCollection(collectionName);
}
With "autosaveInterval" the data in memory will be written to the JSON file every 5 seconds (you can adjust this value as you prefer).
EDIT
I added the code I use to save a new document into my collection and even with page refresh, the data is stored correctly. I use "autosave" among the db settings, maybe you can set it as well, in case there is a path that is not correctly catch when you explicitly trigger saving.
Put the file in the /Users/xxx directory,then magical things happened, the program is running normally.

Using Sailsjs Skipper file uploading with Flowjs

I'm trying to use skipper and flowjs with ng-flow together for big file uploading.
Based on sample for Nodejs located in flowjs repository, I've created my sails controller and service to handle file uploads. When I uploading a small file it's works fine, but if I try to upload bigger file (e.g. video of 200 Mb) I'm receiving errors (listed below) and array req.file('file')._files is empty. Intersting fact that it happening only few times during uploading. For example, if flowjs cut the file for 150 chunks, in sails console these errors will appear only 3-5 times. So, almost all chunks will uploaded to the server, but a few are lost and in result file is corrupted.
verbose: Unable to expose body parameter `flowChunkNumber` in streaming upload! Client tried to send a text parameter (flowChunkNumber) after one or more files had already been sent. Make sure you always send text params first, then your files.
These errors appears for all flowjs parameters.
I know about that text parameters must be sent first for correct work with skipper. And in chrome network console I've checked that flowjs sends this data in a correct order.
Any suggestions?
Controller method
upload: function (req, res) {
flow.post(req, function (status, filename, original_filename, identifier) {
sails.log.debug('Flow: POST', status, original_filename, identifier);
res.status(status).send();
});
}
Service post method
$.post = function(req, callback) {
var fields = req.body;
var file = req.file($.fileParameterName);
if (!file || !file._files.length) {
console.log('no file', req);
file.upload(function() {});
}
var stream = file._files[0].stream;
var chunkNumber = fields.flowChunkNumber;
var chunkSize = fields.flowChunkSize;
var totalSize = fields.flowTotalSize;
var identifier = cleanIdentifier(fields.flowIdentifier);
var filename = fields.flowFilename;
if (file._files.length === 0 || !stream.byteCount)
{
callback('invalid_flow_request', null, null, null);
return;
}
var original_filename = stream.filename;
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, stream.byteCount);
if (validation == 'valid')
{
var chunkFilename = getChunkFilename(chunkNumber, identifier);
// Save the chunk by skipper file upload api
file.upload({saveAs:chunkFilename},function(err, uploadedFiles){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
var testChunkExists = function()
{
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists)
{
if (exists)
{
currentTestChunk++;
if (currentTestChunk > numberOfChunks)
{
callback('done', filename, original_filename, identifier);
} else {
// Recursion
testChunkExists();
}
} else {
callback('partly_done', filename, original_filename, identifier);
}
});
};
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}};
Edit
Found solution to set flowjs property maxChunkRetries: 5, because by default it's 0.
On the server side, if req.file('file')._files is empty I'm throwing not permanent(in context of flowjs) error.
So, it's solves my problem, but question why it behave like this is still open. Sample code for flowjs and Nodejs uses connect-multiparty and has no any additional error handling code, so it's most likely skipper bodyparser bug.

Is there a way to create and run a dynamic script from karma.conf.js

I'm using karma to run tests on an angularjs application.
There are a couple JavaScript functions that I would like to run at start-up, but they need to be dynamically created based on some system data. When running the app, this is handled with node.
Is there any way to create a script as a var and pass it to the files: [] rather than just using a pattern to load an existing file?
I can make this work by creating the file, saving it to disk then loading it normally, but that's messy.
You can create your own karma preprocessor script.
For a starting point use the following as example:
var fs = require('fs'),
path = require('path');
var createCustomPreprocessor = function (config, helper, logger) {
var log = logger.create('custom'),
// get here the configuration set in the karma.conf.js file
customConfig = config.customConfig || {};
// read more config here in case needed
...
// a preprocessor has to return a function that when completed will call
// the done callback with the preprocessed content
return function (content, file, done) {
log.debug('custom: processing "%s"\n', file.originalPath);
// your crazy code here
fs.writeFile(path.join(outputDirectory, name), ... , function (err) {
if (err) {
log.error(err);
}
done(content);
});
}
};
createCustomPreprocessor.$inject = ['config', 'helper', 'logger'];
module.exports = {
'preprocessor:custom': ['factory', createCustomPreprocessor]
};
Add a package.json with the dependencies and serve it as a module. ;)
For more examples have a look to more modules here: https://www.npmjs.org/search?q=karma%20preprocessor

OpenWhisk stops responding, after the first call

I am using Vagrant VM on Windows 10. I have written an action function in Node.Js. This is a light weigh experimental function to check connectivity to MongoDB from OpenWhisk action, written in Node.JS. Following is the code.
function entryPoint(args) {
var mongoose = require('mongoose');
var MongoClient = require('mongodb').MongoClient;
var Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
return new Promise((resolve, reject) => {
mongoose.connect("mongodb://192.168.16.1:27017/angularcrud").then(
() =>
{
var Coin = new Schema({
name: {
type: String
},
price: {
type: Number
}
});
var coinModel = mongoose.model('Coin', Coin);
coinModel.find(function (err, coins){
if(err){
console.log(err);
}
else {
resolve({coins})
}
});
},
err => { return reject(err)}
);
})
}
module.exports.main = entryPoint;
I have zipped the NodeJS files and created the action in wsk. When I run this for the first time
I get the expected results. But when I run it for the second time, the call hands for a minute or so and then I see the following error.
ok: invoked /_/getcoins, but the request has not yet finished, with id 612da4ebb5f64795ada4ebb5f6e7957c
I do not know where do I start investigating, as I am totally new to both OpenWhisk and MongoDb.
To start investigating this, use wsk activation list 612da4ebb5f64795ada4ebb5f6e7957c in this case to get at least the logs of your failed activation.
I think the error handling in your action is not correct. If the find call returns an error, the Promise is never resolved.
I had a brief look at mongooses documentation. find can apparently return a Promise if you call exec on it.
In any case, make sure to use Promise combinators as much as possible, so you're pretty much guaranteed that any edge-case is properly handled and the Promise always completes in one way or the other. If you need to back out for the "manual" creation of Promises, scope them as small as possible.
function entryPoint(args) {
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
return mongoose.connect("mongodb://192.168.16.1:27017/angularcrud").then(() => {
var Coin = new Schema({
name: {
type: String
},
price: {
type: Number
}
});
var coinModel = mongoose.model('Coin', Coin);
return coinModel.find().exec();
});
}
module.exports.main = entryPoint;
A few remarks I have based on the commands:
1. The request has not yet finished, with id 612da4ebb5f64795ada4ebb5f6e7957c
Try "wsk -i activation get 612da4ebb5f64795ada4ebb5f6e7957c" to see the information about this activation. It should be the status or the result in there.
I suspect that this is an asynchronous call. I suggest you can also try "wsk -i action invoke --blocking" to wait for the result to come back. --blocking means to return with results.