How to check if value already exists in the data received from api before inserting it into db - mongodb

I am having hard times trying to write data received from a api to db.
I successfully got data and then have to write it to db. The point is to check whether the quote is already exists in my collection.
The problem I am dealing with is that every value gets inserted in my collection, not regarding if it exists or not.
const { MongoClient } = require('mongodb')
const mongoUrl = 'mongodb://localhost/kanye_quotes'
async function connectToDb() {
const client = new MongoClient(mongoUrl, { useNewUrlParser: true })
await client.connect()
db = client.db()
}
async function addQuote(data) {
await connectToDb()
try {
const collection = db.collection('quotes')
let quotes = [];
quotes = await collection.find({}).toArray()
if (quotes = []) { // I added this piece of code because if not check for [], no values will be inserted
collection.insertOne(data, (err, result) => {
if (err) {
return
}
console.log(result.insertedId);
return
})
}
quotes.forEach(quote => {
if (quote.quote !== data.quote) { // I compare received data with data in collection, it actually works fine(the comparison works as it supposed to)
collection.insertOne(data, (err, result) => {
if (err) {
return
}
console.log(result.insertedId);
})
} else console.log('repeated value found'); // repeated value gets inserted. Why?
})
}
catch (err) {
console.log(err)
}
}

Hi it's probably better to set unique: true indexing on your schema. That way you won't have duplicated values.

Related

Order of save() and find() in NodeJS with MongoDB

I'm trying to create a new record in my MongoDB ("thisPlayer") and save it to my database, then find all records in my database (including the new one) and render them.
I am having trouble understanding why my save() function actually occurs after my find() function. When this code executes, the find() function does not include my new thisPlayer record. However, after the find() runs, the save occurs -- the record is saved to the database AFTER the find() ran.
Thanks in advance!
const playerNumber = async function countPlayers() {
return new Promise((resolve, reject) => {
Player.count(function(err, numOfDocs) {
err ? reject(err) : resolve(numOfDocs);
console.log('I have '+numOfDocs+' documents in my collection');
});
});
}
async function playerProfile() {
var count = await playerNumber();
console.log("count already in db: "+ count);
if (count===0) {
teamCaptain=1;
} else {teamCaptain=0};
count++;
const thisPlayer = new Player({
playerNum: count,
playerName: Name,
});
thisPlayer.save();
Player.find({}, function(err, playaz){
var playerOne;
if (playaz.length > 0) {
playerOne = playaz[0].playerName;
} else {
playerOne = "";
}
res.renderPjax("leavetakings",
{player1: "1: " + playerOne}
);
});
}
playerProfile();
You need to use await
for example.
await Player.find({})

.find() returning nothing even when data exists

I've a mongo database with 3 collections for 3 different kind of users as User,Partner,Admin. Whenever a new user of any type signup I'm searching all three collections to check if username and email exist already. I'm trying to achieve this by calling a function as:
function checkAttribute(attr,val,callback){
User.find({attr: val},function(err,user){
if(err){
console.log(err);
}else{
if(user.length === 0){
Partner.find({attr: val},function(err,partner){
if(err){
console.log(err);
}else{
if(partner.length === 0){
Admin.find({attr: val},function(err,admin){
if(err){
console.log(err);
}else{
if(admin.length === 0){
return callback(null,true);
}else{
return callback(null,false);
}
}
});
}else{
return callback(null,false);
}
}
});
}else{
return callback(null,false);
}
}
});
};
Calling function line:
checkAttribute("username",newUser.username,function(error,response){
.......
});
But this is not working as it returns true always even when users with passed username/email exists already. I am unable to find the problem. Any one knows why this is happening?
Thanks in advance.
Since you are passing in the attribute as a variable in the function parameters, the query document
{ attr: val } is an object with the key "attr", not the dynamic attribute you pass in.
To fix this, you need to use computed property names in your query object as
{ [attr]: val }
Also, the function can use async/await pattern to be more readable and for the purpose of finding if a document exist findOne does the job so
well as it returns a document if it exists and null otherwise.
So your function can be refactored as
async function checkAttribute(attr, val, callback) {
try {
const query = { [attr]: val }
const user = await User.findOne(query).exec()
const partner = await Partner.findOne(query).exec()
const admin = await Admin.findOne(query).exec()
const found = (user || partner || admin) ? true: false
return callback(null, found)
} catch (err) {
console.error(err)
return callback(err, null)
}
};
attr: in your queries will search for a db field called attr. If you want to use the function parameter attr, use [attr]: as the key.
Example:
attr = 'username'
User.find({ [attr]: val }, function (err, user) {
if (err) {
console.log(err);
}
})
This is a feature available since ES6 so should work fine. See the docs here for more info

Loopback - How to use bulkUpdate method

I'm using Loopback v3 currently and wanted to upsert many records at once in a collection; I found this method bulkUpsert from the documentation (http://apidocs.loopback.io/loopback/#persistedmodel-bulkupdate) but I couldn't figure out how to make it work.
How can I create the updates array from createUpdates() method as mentioned in the documentation? Can anyone help me with a simple example of using this method?
There is an alternative way to do the bulkUpdate method, found in Stackoverflow MongoDB aggregation on Loopback
A mixin can be easily created and reused over the Models. My sample code of bulkUpsert mixin is below:
Model.bulkUpsert = function(body, cb) {
try {
Model.getDataSource().connector.connect(async (err, db) => {
if (err) {
return cb(err);
}
// Define variable to hold the description of the first set of validation errors found
let validationErrors = '';
// Build array of updateOne objects used for MongoDB connector's bulkWrite method
const updateOneArray = [];
// Loop through all body content and stop the loop if a validation error is found
const hasError = body.some(row => {
// Check if it is a valid model instance
const instance = new Model(row);
if (!instance.isValid()) {
// A validation error has been found
validationErrors = JSON.stringify(instance.errors);
// By returning true we stop/break the loop
return true;
}
// Remove ID in the row
const data = JSON.stringify(row);
delete data.id;
// Push into the update array
updateOneArray.push({
updateOne: {
filter: { _id: row.id },
update: { $set: Object.assign({ _id: row.id }, data) },
upsert: true
}
});
// No validation error found
return false;
});
// Check if a validation error was found while looping through the body content
if (hasError) {
return cb(new Error(validationErrors));
}
// No validation data error was found
// Get database collection for model
const collection = db.collection(Model.name);
// Execute Bulk operation
return collection.bulkWrite(updateOneArray, {}, (err, res) => {
// Check if the process failed
if (err) {
console.err('The bulk upsert finished unsuccessfully', err);
return cb(err);
}
// Check if there were errors updating any record
if (res.hasWriteErrors()) {
console.error(`The bulk upsert had ${res.getWriteErrorCount()} errors`, res.getWriteErrors());
}
// Finished successfully, return result
return cb(null, {
received: body.length,
handled: res.upsertedCount + res.insertedCount + res.matchedCount
});
});
});
}
catch (err) {
console.error('A critical error occurred while doing bulk upsert', err);
return cb(err);
}
return null;
};
Ref: Mongodb query documentation

ES6 promise will not work always with mongodb replication set

I did follow How to use MongoDB with promises in Node.js?. The answer 4 by(https://stackoverflow.com/users/5371505/pirateapp), works well with regular mongodb server. But it will not work always with a mongoDB replication set.
const mongodb = require('mongodb');
const MongoClient = mongodb.MongoClient;
// the url talking to replicaSet does not work, while the url with regular mongoDB sever seems working for me.
// const url = 'mongodb://alexlai:alex1765#arch16GMongo01.yushei.me:27017,arch16GMongo02.yushei.me:27017,arch16GMongo03:27017/YuShei?replicaSet=odroid00&connectTimeoutMS=300000';
url = 'mongodb://172.16.1.108/YuShei';
let db = {
open : open,
}
function open(){
return new Promise((resolve, reject)=>{
MongoClient.connect(url, (err, db) => {
if (err) {
reject(err);
} else {
resolve(db);
}
});
});
}
function close(db){
if(db){
db.close();
}
}
// module.exports = db;
// const db = require('./mongoDBServer.js');
const assert = require('assert');
const collectionName= 'yuTsaiLpr20161021'; // a collection contains 500 docs.
// this will hold the final array taht will be sent to browser
// a global variable will be declared with upper camel
let Array = [];
// this will hold database object for latter use
let Database = '';
// global query string and projection
let Query = {};
let Projection = {};
let Collection ={};
let checkoutCarPromise = new Promise((resolve, reject)=>{
Database = null;
db.open() // no ';' semi-column this is a promise, when successful open will be reolved and return with db object, or reject
.then((db)=>{
Database = db; // save it globally
return db.collection(collectionName);
})
.then((collection)=>{
if(collection == 'undefined') reject('collection not found!!');
Collection = collection; //seave it globally
return(collection);
})
.then((collection)=>{
return collection.find(); // return a cursor
})
.then((cursor)=>{
return cursor.toArray();
})
.then((array)=>{
console.log('array[499]: ', array[499]);
Array.push(array[499]);
})
.then(()=>{ // reread to find this car
return Collection.find({plateText:{$regex: /8920/}});
})
.then((cursor)=>{
return cursor.toArray();
})
.then((array)=>{
Array.push(array);
resolve(Array);
})
})
.catch((err)=>{
return(err);
console.error('the checkoutCarPromiserror is: ', err);
})
Promise.all([checkoutCarPromise]).then(results => {
console.log('checkoutCarPromise last resolve value: ', results[0]);
console.log('Array: ', Array);
Database.close();
})
// this will get you more infos about unhandled process
process.on("unhandledRejection", (reason) => {
console.log(reason)
})

Waiting for meteor cursor in method

I have a large aggrogate query that required me to pass "allowDiskUse: true" as an option. This would not work with the aggegate as described here:
https://github.com/meteorhacks/meteor-aggregate/issues/11
My meteor method is defined here. When I call the method I need to wait for ondata to complete before anything is returned to the client, but nothing I try allows me to get that data in a safe way up to the front end.
Meteor.methods({
'getSummary': function (dept,startDate,endDate,filterType) {
f = myQuery(startdate,enddate,dayFinalGroup);
f.on("data", Meteor.bindEnvironment(function(row) {
//load an array or something here to return
}));
f.once("end", Meteor.bindEnvironment(function() {
// tidy up, in my case end the stream
}));
//here I'd return the array loaded
},
});
This is my front end.
Meteor.call(
'getSummary',0,Session.get('start_date'),Session.get('end_date'),1,
function(error, result){
if(error){
console.log(error);
} else {
Session.set('sumTotals',result);
}
}
);
Finally Got it. I utilized wrapSync
'getSummary': function (dept,startDate,endDate,filterType) {
console.log(dept);
console.log(startDate);
console.log(endDate);
console.log(filterType);
var startdate = new Date(startDate);
var enddate = new Date(endDate);
var arr = [];
f = myQuery(startdate,enddate,dayFinalGroup);
var fetchCursor = Meteor.wrapAsync(function fetchCursor (cursor, cb) {
cursor.each(function (err, doc) {
if (err) return cb(err);
if (!doc) return cb(null, { done: true }); // no more documents
arr.push(doc);
});
});
var myData = fetchCursor(f);
return arr;