I am looping through a bunch of records in mongodb and putting them in kue for some processing, I won't put in all that code here because it really does not seem to apply but I do something like the following:
var record;
for(var x = 0; x < results.length; x++){
record = results[x];
console.log(record);
queue.create("process_images", record).priority('normal').save();
}
If there is something I shouldn't do in this case, like not create queue in a loop please point it out. But anyway when I do the console.log I get stuff like the following:
{ id: 5577fe8a4b795f3c068fbb10,
collection_name: 'Image',
data_source: 'Mongo',
origin_dir: 'path to origin dir',
img_src: '/uploads/images/5577/5577fe8a4b795f3c068fbb10/IMG_0158.JPG' }
{ id: 5577feb84b795f3c068fbb11,
collection_name: 'Image',
data_source: 'Mongo',
origin_dir: 'path to origin dir',
img_src: '/uploads/images/5577/5577feb84b795f3c068fbb11/IMG_0018.JPG' }
which is what I expect.
But in my watcher when I do the following:
var queue = kue.createQueue();
queue.on('job complete', function(id, result) {
kue.Job.get(id, function(err, job){
if (err) return;
job.remove(function(err){
if (err) throw err;
console.log('Removed completed job #%d', job.id);
});
});
});
queue.process("process_images", function(job, done){
check_it(job.data, done);
});
function check_it(data, done) {
console.log("handling data");
console.log(data);
// email send stuff...
done();
}
I get the following:
Removed completed job #669
handling data
{ id: '55cd97a68c9f913608c5101b',
collection_name: 'Image',
data_source: 'Mongo',
origin_dir: 'path to origin dir',
img_src: '/uploads/images/55cd/55cd97a68c9f913608c5101b/11378154_422142334643881_2122676904_n.jpg' }
Removed completed job #670
handling data
{ id: '55cd97a68c9f913608c5101b',
collection_name: 'Image',
data_source: 'Mongo',
origin_dir: 'path to origin dir',
img_src: '/uploads/images/55cd/55cd97a68c9f913608c5101b/11378154_422142334643881_2122676904_n.jpg' }
again, and again, and again. All the time only that record, which implies to me that there is some async situation that I can't see, or that I have misunderstood completely how kue is going to work given that this is actually the last job that I saved. My main problem here is probably that I am sort of used to how resque does things, and this seems a little off.
Related
I am using Protractor-Cucumber framework with protractor 5.2.2 and cucumber 3.2. I have a requirement of posting in no.of locations. So I have written a script in a loop for it. But it randomly fails before completing the loop. So when the script ends abnormally, is there like an exception handling section that gets control before exiting.The script can be fail due to any of the reasons like web driver issue,NoSuchElementError,ElementIsNotIntractable,ElementIsNotVisible etc.So whatever be the issue I have to handle that, and if it fails, I have to do an email notification. I have tried try catch, as given below, but it does not work for me.
When(/^I login$/, function () {
try{
element(by.css(".signin")).click();
var count=post_details.length ;
for (var i=0; i<count; i++){
post();
}
}
catch(e){
console.log("failed");
}
});
How we can do this in protractor-cucumber.Thanks in advance
For the exception problem you can try this. ignoreUncaughtException
For the email part create a hooks.js file. Here you can setup the After() function, to check your scenario fails or not. Cucumber Docs.
Example:
After(function (scenario) {
if (scenario.result.status === Status.FAILED)
{
failed = true;
const attach = this.attach;
//creates a screenshot for the report
return browser.takeScreenshot().then(function(png) {
return attach(new Buffer(png, "base64"), "image/png");
});
}
});
Then you can use nodemailer to send messages. Nodemailer
In your AfterAll() function you can handle the send part.
Example:
AfterAll(function(callback){
console.log("AfterAll");
if (failed)
{
var transporter = nodemailer.createTransport(
{
host: 'host.com',
port: xx,
secure: false,
//proxy: 'http://10.10.10.6:1098',
auth: {
user: userMail,
pass: pw
}
});
var mailOptions = {
from: 'xx', // sender address (who sends)
to: xxxxxx#mail.com',
subject: 'your subject', // Subject line
text: 'Your test failed....', // plaintext body
/*attachments: [
{
filename: 'report.html',
path: htmlReport,
}]*/
};
transporter.sendMail(mailOptions, function(error, info)
{
if(error)
{
return console.log(error);
}
console.log('Email sent: ' + info.response);
console.log(info);
});
} else {
//do your stuff
}
setTimeout(callback, 2000);
});
This is quite a broad question, however I currently have a Sails API server and a React Front-end (Standalone).
Note: The React Front-End is NOT part of Sails
I'm trying to get to grips with sockets, so I figured I would start simple. I want to achieve the following:
User visits my website (React)
React opens a socket and connects to Sails
Sails streams the data from within a function/model
React updates when new data is added to the model
I semi understand how this works using Express and React, however I cannot get my head around how Sails implements their version of WebSockets on top of Sockets.io.
What I've done is install the sockets.io-client within React, and then trying to use sails.sockets inside Sails.
This is what I currently have:
React Component NB: I don't think this is correct at all
componentDidMount =()=> {
this.getSessionData();
UserStore.listen(this.getSessionData);
Socket.emit('/listSessions', function(data){
console.log(data);
})
}
Sails Function (listSessions)
listSessions: function(req, res) {
Session.find({ where: {visible: true}, sort: 'createdAt DESC'},
function(err, sessions){
if(req.isSocket){
Session.watch(req.socket);
console.log('User subscribed to ' + req.socket.id);
}
if(err) return res.json(500, {
error: err,
message: 'Something went wrong when finding trades'
});
return res.json(200, {
sessions: sessions,
});
})
},
Sails Function (createSession) Trying to use publishCreate to use in conjunction with Session.watch in the above function
createSession: function(req, res){
var token = jwt.sign({
expiresIn: 30,
}, 'overwatch');
Session.create({
username: req.body.username,
platform: req.body.platform,
lookingFor: req.body.lookingFor,
microphone: req.body.microphone,
gameMode: req.body.gameMode,
comments: req.body.comments,
avatar: null,
level: null,
hash: token,
competitiveRank: null,
region: req.body.region,
visible: true,
}).exec(function(err, created){
Session.publishCreate(created);
if(err) {
console.log(err);
return res.send({
error: err,
message: 'Something went wrong when adding a session',
code: 91
})
}
if(req.isSocket){
Session.watch(req.socket);
console.log('User subscribed to ' + req.socket.id);
}
return res.send({
session: created,
code: 00,
})
});
},
Both of the Sails functions are called using POST/GET.
I'm completely stumped as where to go with this, and it seems to documentation or explanation on how to get this working is limited. All the Sails documentation on Sockets seems to relate to using Sails as a front-end and server
OK so I managed to solve this:
Simply put:
Within React, I had to include https://github.com/balderdashy/sails.io.js/tree/master
Then within my React component I did:
componentDidMount =()=> {
io.socket.get('/listSessions',(resData, jwres) => {
console.log('test');
this.setState({
sessions: resData.sessions,
loaded: true,
})
})
io.socket.on('session', (event) => {
if(event.verb == 'created') {
let sessions = this.state.sessions;
sessions.push(event.data);
this.setState({
sessions: sessions
})
} else {
console.log('nah');
}
});
}
This makes a virtual get request to Sails using Socket.io, and sets the response in state. It also watches for updates to the 'session' connection and updates the state with these updates meaning I can update a list in real time
Within my Sails controller I have:
listSessions: function(req, res) {
if(req.isSocket){
Session.find({ where: {visible: true}, sort: 'createdAt DESC'},
function(err, sessions){
Session.watch(req.socket);
if(err) return res.json(500, {
error: err,
message: 'Something went wrong when finding trades'
});
return res.json(200, {
sessions: sessions,
});
})
}
},
The Session.watch line listens for updates via publishCreate on the model which is found in my model as follows:
afterCreate: function(message, next) {
Session.publishCreate(message);
next();
},
Adding to answer by #K20GH , add the following to my "index.js" in React to help get sails.io.js from the CDN :
const fetchJsFromCDN = (src, externals = []) => {
return new Promise((resolve, reject) => {
const script = document.createElement('script');
script.setAttribute('src', src);
script.addEventListener('load', () => {
resolve(
externals.map(key => {
const ext = window[key];
typeof ext === 'undefined' &&
console.warn(`No external named '${key}' in window`);
return ext;
})
);
});
script.addEventListener('error', reject);
document.body.appendChild(script);
});
};
fetchJsFromCDN(
'https://cdnjs.cloudflare.com/ajax/libs/sails.io.js/1.0.1/sails.io.min.js',
['io']
).then(([io]) => {
if (process.env.NODE_ENV === 'development') {
io.sails.url = 'http://localhost:1337';
}
});
Once you have this, you'll be able to use the HTTP type GET, PUT, POST and DELETE methods. So here you can do:
componentDidMount =()=> {
io.socket.get('/listSessions',(resData, jwres) => {
console.log('test');
this.setState({
sessions: resData.sessions,
loaded: true,
})
})
io.socket.on('session', (event) => {
if(event.verb == 'created') {
let sessions = this.state.sessions;
sessions.push(event.data);
this.setState({
sessions: sessions
})
} else {
console.log('Not created session');
}
});
}
And you can do the required setup in sails for the models of sessions as suggested above
I have a mongoose schema with a unique field and I am trying to write a backend (express) integration test which checks that POSTing the same entity twice results in HTTP 400. When testing manually behaviour is as excpected. Automatic testing however requires a wait:
it('should not accept two projects with the same name', function(done) {
var project = // ...
postProjectExpect201(project,
() => {
setTimeout( () => {
postProjectExpect400(project, done);
},100);
}
);
});
The two post... methods do as named and the code above works fine, but if the timeout is removed, BOTH requests receive HTTP 200 (though only one entity created in the database).
I'm new to those technologies and I'm not sure what's going on. Could this be a mongodb related concurrency issue and if so how should I deal with it?
The database call looks like this:
Project.create(req.body)
.then(respondWithResult(res, 201))
.catch(next);
I already tried connecting to mongodb with ?w=1 option btw.
Update:
To be more verbose: Project is a mongoose model and next is my express error handler which catches the duplicate error.
The test functions:
var postProjectExpect201=function(project, done, validateProject) {
request(app)
.post('/api/projects')
.send(project)
.expect(201)
.expect('Content-Type', /json/)
.end((err, res) => {
if (err) {
return done(err);
}
validateProject && validateProject(res.body);
done();
});
};
var postProjectExpect400=function(project, done) {
request(app)
.post('/api/projects')
.send(project)
.expect(400)
.end((err, res) => {
if (err) {
return done(err);
}
done();
});
};
Currently I am capturing the image of the camera, this Base64 format,and I'm sending through ajax.
xhr({
uri: 'http://localhost:1337/file/upload',
method: 'post',
body:'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAA...'
}
0 file(s) uploaded successfully!
Here is a nice link that will guide you to do send an image from an Ajax Client to an ajax server.
http://www.nickdesteffen.com/blog/file-uploading-over-ajax-using-html5
You can read this sails documentation to receive files on a sails server :
http://sailsjs.org/documentation/reference/request-req/req-file
You can do as the following example :
Client side ( ajax ):
var files = [];
$("input[type=file]").change(function(event) {
$.each(event.target.files, function(index, file) {
var reader = new FileReader();
reader.onload = function(event) {
object = {};
object.filename = file.name;
object.data = event.target.result;
files.push(object);
};
reader.readAsDataURL(file);
});
});
$("form").submit(function(form) {
$.each(files, function(index, file) {
$.ajax({url: "/ajax-upload",
type: 'POST',
data: {filename: file.filename, data: file.data}, // file.data is your base 64
success: function(data, status, xhr) {}
});
});
files = [];
form.preventDefault();
});
Server side ( sails ) :
[let's say you have a model Picture that take an ID and a URL]
[here is a sample of Picture controller, just to give you an idea]
module.exports = {
uploadPicture: function(req, res) {
req.file('picture').upload({
// don't allow the total upload size to exceed ~10MB
maxBytes: 10000000
},
function onDone(err, uploadedFiles) {
if (err) {
return res.negotiate(err);
}
// If no files were uploaded, respond with an error.
if (uploadedFiles.length === 0){
return res.badRequest('No file was uploaded');
}
// Save the "fd" and the url where the avatar for a user can be accessed
Picture
.update(777, { // give real ID
// Generate a unique URL where the avatar can be downloaded.
pictureURL: require('util').format('%s/user/pictures/%s', sails.getBaseUrl(), 777), // GIVE REAL ID
// Grab the first file and use it's `fd` (file descriptor)
pictureFD: uploadedFiles[0].fd
})
.exec(function (err){
if (err) return res.negotiate(err);
return res.ok();
});
});
}
};
Hope this will help in your research.
I also recommand you to use Postman to test your API first, then code your client.
I am trying to modify the http status code of create.
POST /api/users
{
"lastname": "wqe",
"firstname": "qwe",
}
Returns 200 instead of 201
I can do something like that for errors:
var err = new Error();
err.statusCode = 406;
return callback(err, info);
But I can't find how to change status code for create.
I found the create method:
MySQL.prototype.create = function (model, data, callback) {
var fields = this.toFields(model, data);
var sql = 'INSERT INTO ' + this.tableEscaped(model);
if (fields) {
sql += ' SET ' + fields;
} else {
sql += ' VALUES ()';
}
this.query(sql, function (err, info) {
callback(err, info && info.insertId);
});
};
In your call to remoteMethod you can add a function to the response directly. This is accomplished with the rest.after option:
function responseStatus(status) {
return function(context, callback) {
var result = context.result;
if(testResult(result)) { // testResult is some method for checking that you have the correct return data
context.res.statusCode = status;
}
return callback();
}
}
MyModel.remoteMethod('create', {
description: 'Create a new object and persist it into the data source',
accepts: {arg: 'data', type: 'object', description: 'Model instance data', http: {source: 'body'}},
returns: {arg: 'data', type: mname, root: true},
http: {verb: 'post', path: '/'},
rest: {after: responseStatus(201) }
});
Note: It appears that strongloop will force a 204 "No Content" if the context.result value is falsey. To get around this I simply pass back an empty object {} with my desired status code.
You can specify a default success response code for a remote method in the http parameter.
MyModel.remoteMethod(
'create',
{
http: {path: '/', verb: 'post', status: 201},
...
}
);
For loopback verion 2 and 3+: you can also use afterRemote hook to modify the response:
module.exports = function(MyModel) {
MyModel.afterRemote('create', function(
context,
remoteMethodOutput,
next
) {
context.res.statusCode = 201;
next();
});
};
This way, you don't have to modify or touch original method or its signature. You can also customize the output along with the status code from this hook.