MongoDB GridFSBucket upload stream doesn't return length of base64 image stream - mongodb

I'm refactoring my team code to remove Deprecation Warnings from MongoDB.
I removed gridfs-stream library and I'm using GridFSBucket instead
But there is a problem: upload stream using GridFSBucket doesn't return the length of base64 image, instead it returns 0. This breaks one of the tests in our code.
This is the code using GridFSBucket:
function getGrid() {
return new mongoose.mongo.GridFSBucket(conn.db);
}
module.exports.store = function(id, contentType, metadata, stream, options, callback) {
... // unrelated things
var strMongoId = mongoId.toHexString(); // http://stackoverflow.com/a/27176168
var opts = {
contentType: contentType,
metadata: metadata
};
options = options || {};
if (options.filename) {
opts.filename = options.filename;
}
if (options.chunk_size) {
var size = parseInt(options.chunk_size, 10);
if (!isNaN(size) && size > 0 && size < 255) {
opts.chunkSizeBytes = chunk_size * size;
}
}
var gfs = getGrid();
var writeStream = gfs.openUploadStreamWithId(strMongoId, opts.filename, opts);
writeStream.on('finish', function(file) {
console.log(file) // <== Notice the log for this line below
return callback(null, file);
});
writeStream.on('error', function(err) {
return callback(err);
});
stream.pipe(writeStream);
};
The result for console.log(file) is:
{
"_id": "5ea7a3ffbc7dd36e7df4561e",
"length": 0, // <== Notice length is 0 here
"chunkSize": 10240,
"uploadDate": "2020-04-28T03:33:19.734Z",
"filename": "default_profile.png",
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"contentType": "image/png",
"metadata": {
"name": "default_profile.png",
"creator": {
"objectType": "user",
"id": "5ea7a3febc7dd36e7df4560a"
}
}
}
This is the old code using gridfs-stream:
var Grid = require('gridfs-stream');
...
function getGrid() {
return new Grid(mongoose.connection.db, mongoose.mongo);
}
module.exports.store = function(id, contentType, metadata, stream, options, callback) {
... // unrelated things
var strMongoId = mongoId.toHexString(); // http://stackoverflow.com/a/27176168
var opts = {
_id: strMongoId,
mode: 'w',
content_type: contentType
};
options = options || {};
if (options.filename) {
opts.filename = options.filename;
}
if (options.chunk_size) {
var size = parseInt(options.chunk_size, 10);
if (!isNaN(size) && size > 0 && size < 255) {
opts.chunk_size = chunk_size * size;
}
}
opts.metadata = metadata;
var gfs = getGrid();
var writeStream = gfs.createWriteStream(opts);
writeStream.on('close', function(file) {
console.log(file) // <== Notice the log for this line below
return callback(null, file);
});
writeStream.on('error', function(err) {
return callback(err);
});
stream.pipe(writeStream);
};
And here is the result of console.log(file):
{
"_id": "5ea7a43a5d6eea73e0a3c8b1",
"filename": "default_profile.png",
"contentType": "image/png",
"length": 634, // <== We have the length here
"chunkSize": 10240,
"uploadDate": "2020-04-28T03:34:18.069Z",
"metadata": {
"name": "default_profile.png",
"creator": {
"objectType": "user",
"id": "5ea7a4395d6eea73e0a3c89d"
}
},
"md5": "c37659eb6a9e741656a8d0348765c668"
}
So how can I get the length using GridFSBucket?
Thank you!
UPDATE:
This is the log of variable stream in both cases:
{
"contentType": "image/png",
"fileName": "default_profile.png",
"transferEncoding": "base64",
"contentDisposition": "attachment",
"generatedFileName": "default_profile.png",
"contentId": "216d9aab57544410901f8ba7981e63aa#mailparser",
"stream": {
"_events": {},
"_eventsCount": 0,
"writable": true,
"checksum": {
"_handle": {},
"writable": true,
"readable": true
},
"length": 0,
"current": ""
}
}

Related

why mongoose populate() request does not work?

I try to populate some data from other collection to an other collection.i had googled the search and also i follow the tutorial step by step but the population had fail.any help is appreciate friends. this is the code:
router.get("/", passport.authenticate("jwt", {session: false}), (req, res)=> {
const errors = {};
Profile.findOne({user: req.user.id})
.then(profile => {
if (!profile) {
errors.noprofile = "there is no profile for this user"
return res.status(404).json(errors);
}
res.json(profile);
}).catch(err=> res.status(404).json(err))
});
// #route POST api/profile
//#desc Create or edit user profile
//#access Private
router.get("/", passport.authenticate("jwt", {session: false}), (req, res)=> {
const {errors, isValid} = validateProfileInput(req.body);
//Check validation
if(!isValid) {
return res.status(400).json(errors);
}
// Get profile data
const profileData = {};
profileData.user = req.user.id;
if(req.body.handle) {
profileData.handle = req.body.handle
};
if(req.body.company) {
profileData.company = req.body.company
};
if(req.body.website) {
profileData.website = req.body.website
};
if(req.body.location) {
profileData.location = req.body.location
};
if(req.body.status) {
profileData.status = req.body.status
};
if(typeof req.body.skills !== 'undefined') {
profileData.skills = req.body.skills.split(',');
}
//social
profileData.social = {};
if(req.body.youtube) {
profileData.social.youtube = req.body.youtube
};
if(req.body.twitter) {
profileData.social.twitter = req.body.twitter
};
if(req.body.facebook) {
profileData.social.facebook = req.body.facebook
};
if(req.body.instagram) {
profileData.social.instagram = req.body.instagram
};
Profile.findOne({user: req.user.id})
.populate(
"user",
["name, avatar"]
)
this is the result that I get from the postman :
"_id": "62ee1058ceb295ccdfedffce",
"user": "62e6825958870d3db69d2da5",
"handle": "pablo",
"status": "developper",
"skills": [
"design web"
],
and the correct result must be :
"_id": "62ee1058ceb295ccdfedffce",
"user": {"_id": "62e6825958870d3db69d2da5",
"name": "pablo",
"avatar": "//www.gravatar.com/avatar/1ffsrenbdgeajks-ghsdereys1dkkdhddbc"
}
"handle": "pablo",
"status": "developper",
"skills": [
"design web"
],

Group by and Get Max Value MongoDb

I would like to get the highest number of counts for each numId and display it on my front end in a table.
Here is an example of my database:
{
"_id": {
"$oid": "6294777f677b4c647e28771a"
},
"numId": "5",
"respondee": "0x9d95bcaa5b609fa97a7ec860bec115aa94f85ba9",
"__v": 0,
"originalResponse": "test2",
"submittedAt": {
"$date": {
"$numberLong": "1653897087357"
}
},
"addresses": [
"0x39c878a3df98002ddba477a7aa0609fb5a27e2ff",
"0xe3342d6522ad72f65d6b23f19b17e3fb12161f90"
],
"count": 2
},
{
"_id": {
"$oid": "6294836e677b4c647e287e93"
},
"numId": "5",
"respondee": "0xe3342d6522ad72f65d6b23f19b17e3fb12161f90",
"__v": 0,
"originalResponse": "test3",
"submittedAt": {
"$date": {
"$numberLong": "1653900142375"
}
},
"addresses": [
],
"count": 0
}
I have written something like this but I'm not sure how to group the results according to the numId
import Response from '../../../models/Response.model';
import db from '../../../utils/config/db';
import nc from 'next-connect';
import { onError } from '../../../utils/error';
const handler = nc({
onError,
});
//GET all
handler.get(async (req, res) => {
await db.connect();
let responses = await Response.find({ });
//To group responses by numId
// Sort responses by votes in ascending order
responses = responses.sort((a, b) => {
return a.count - b.count;
});
let topResponses = responses.filter((response) => {
return response.count === responses[0].count;
});
// Check if respondee has the highest count response
if (
topResponses.length > 0 &&
topResponses.find((response) => {
return response.respondee === respondee;
})
) {
// Get the response
let response = topResponses.find((response) => {
return response.respondee === respondee;
});
// Get the response
let responseString = response.response;
// Get the count
let count = response.count;
}
await db.disconnect();
});
export default handler;
I have figured out the answer by referring from another stackoverflow:
Group by and Get Max Value MongoDb
let responses = await Response.aggregate([
{ $sort: { votes: -1 } },
{ $group: { _id: '$baseId', group: { $first: '$$ROOT' } } },
{ $replaceRoot: { newRoot: '$group' } },
]);
res.send(responses);

Invisible chart area during print and in exported PDF file in spreadsheet created by service account with Google Sheets API (Node.js) in shared folder

I'm creating a spreadsheet in Node.js environment in shared folder using service account with Google Sheets API v.4 in few steps:
Create spreadsheet itself in shared folder with "Can Edit" permission for service account.
Inserting some data and performing some text formats using spreadsheet ID received as callback from previous step.
Inserting chart using as income data the data inserted in prevoius step.
As the result I have a spreadsheet with expected result (text data and horizontal bar chart on same sheet). But when I'm trying to send it on printer, or download as PDF-file - chart area becomes completely invisible. I didn't find any option in official documentation about possible chart visibility during printing or something like this. And when I'm replacing this created chart with manually created one - everything is ok, I can print it and export to PDF.
So, what is the problem? Am I missing something? Or it's some bug?
index.js
const fs = require('fs');
const { google } = require('googleapis');
const express = require('express');
const bodyParser = require("body-parser");
const app = express();
const PORT = 3000;
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.listen(PORT, () => {
console.log(`Server started at http://localhost:${PORT}`)
})
const SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/spreadsheets'];
const FOLDER_ID = '1xG3xHhrucB4AGLmnd8T2TmCyqhmPux5Q';
var timeStamp = new Date().getTime();
console.log(`timeStamp at startup = ${timeStamp}`);
const auth = new google.auth.GoogleAuth({
keyFile: 'credentials.json',
scopes: SCOPES
});
process.env.HTTPS_PROXY = 'http://10.5.0.20:3128';
const sheets = google.sheets({
version: 'v4',
auth: auth,
proxy: 'http://10.5.0.20:3128'
});
const drive = google.drive({
version: 'v3',
auth: auth,
proxy: 'http://10.5.0.20:3128'
});
function createFileName() {
const now = new Date();
let date = String(now.toISOString().slice(0, 10));
let hours = String(now.getHours()).padStart(2, "0");
let minutes = String(now.getMinutes()).padStart(2, "0");
let seconds = String(now.getSeconds()).padStart(2, "0");
let humanDate = date.replaceAll('-', '.');
humanDate = `${humanDate}_${hours}-${minutes}-${seconds}`;
return humanDate;
}
async function saveFileLocally(filePath, data) {
fs.writeFile(filePath, JSON.stringify(data), error => {
if (error) {
console.error(error);
return;
}
});
return true;
}
app.get("/check", (req, res) => {
res.send('server is online...');
});
app.post("/motivation", async (req, res) => {
if(!req.body) return res.sendStatus(400);
try {
const prizv = req.body.prizv;
const name = req.body.name;
const father = req.body.father;
const sex = req.body.sex;
const age = req.body.age;
const factors = req.body.factors;
const testName = 'motivation';
const clientData = { prizv: prizv, name: name, father: father, sex: sex, age: age, factors: factors, testName: testName };
const fileName = `${createFileName()}_${prizv}`;
const filePath = `files/${testName}/${fileName}.txt`;
const isSaved = await saveFileLocally(filePath, clientData);
if (isSaved) {
console.log(`file is saved locally....`);
const sheetID = await createSheetToGoogleDIsk(clientData, fileName);
res.send(sheetID);
}
} catch (error) {
console.log(error)
}
});
async function createSheetToGoogleDIsk(clientData, fileName) {
const file = fileName;
var sheetsMetadata = {
name: file,
mimeType: 'application/vnd.google-apps.spreadsheet',
parents: [FOLDER_ID]
};
const res2 = drive.files.create({
resource: sheetsMetadata,
fields: 'id'
}, function (err, file) {
if (err) {
console.error(err);
} else {
console.log('SheetID: ', file.data.id);
const gSheetID = file.data.id;
pasteDataToGoogleSheet(clientData, gSheetID);
insertChartToGoogleSheet(gSheetID);
return file.data.id;
}
});
}
async function insertChartToGoogleSheet(spreadsheetId) {
spreadsheetId = spreadsheetId;
let requests = [];
// set font size for whole Sheet as 14
requests.push({
"repeatCell": {
"range": {
"sheetId": 0,
"startRowIndex": 0,
"endRowIndex": 100,
},
"cell": {
"userEnteredFormat": {
"textFormat": {
"fontSize": 13,
},
},
},
"fields": "userEnteredFormat.textFormat.fontSize"
},
});
// set header text format as Bold and 18 pt
requests.push({
"repeatCell": {
"range": {
"sheetId": 0,
"startRowIndex": 0,
"endRowIndex": 2,
},
"cell": {
"userEnteredFormat": {
"textFormat": {
"fontSize": 18,
"bold": true
},
},
},
"fields": "userEnteredFormat(textFormat)"
},
});
// set subheader text format as Bold and 15 pt
requests.push({
"repeatCell": {
"range": {
"sheetId": 0,
"startRowIndex": 3,
"endRowIndex": 4,
},
"cell": {
"userEnteredFormat": {
"textFormat": {
"fontSize": 15,
"bold": true
},
},
},
"fields": "userEnteredFormat(textFormat)"
},
});
// set client data as Bold
requests.push({
"repeatCell": {
"range": {
"sheetId": 0,
"startRowIndex": 5,
"endRowIndex": 10,
"startColumnIndex": 3,
"endColumnIndex": 5
},
"cell": {
"userEnteredFormat": {
"textFormat": {
"fontSize": 13,
"bold": true
},
},
},
"fields": "userEnteredFormat(textFormat)"
},
});
// set 1st column width as 20px
requests.push({
"updateDimensionProperties": {
"range": {
"sheetId": 0,
"dimension": "COLUMNS",
"startIndex": 0,
"endIndex": 1
},
"properties": {
"pixelSize": 20
},
"fields": "pixelSize"
}
});
// set 4st column width as 150px
requests.push({
"updateDimensionProperties": {
"range": {
"sheetId": 0,
"dimension": "COLUMNS",
"startIndex": 3,
"endIndex": 4
},
"properties": {
"pixelSize": 150
},
"fields": "pixelSize"
}
});
// set bold factors Values
requests.push({
"repeatCell": {
"range": {
"sheetId": 0,
"startRowIndex": 33,
"endRowIndex": 45,
"startColumnIndex": 4,
"endColumnIndex": 5
},
"cell": {
"userEnteredFormat": {
"textFormat": {
"fontSize": 13,
"bold": true
},
},
},
"fields": "userEnteredFormat(textFormat)"
},
});
requests.push({
"addChart": {
"chart": {
"chartId": 1,
"spec": {
"titleTextFormat": {
},
"basicChart": {
"chartType": "BAR",
"axis": [
{
"position": "BOTTOM_AXIS",
},
{
"position": "LEFT_AXIS",
}
],
"domains": [
{
"domain": {
"sourceRange": {
"sources": [
{
"sheetId": 0,
"startRowIndex": 33,
"endRowIndex": 45,
"startColumnIndex": 1,
"endColumnIndex": 2
}
]
},
},
}
],
"series": [
{
"series": {
"sourceRange": {
"sources": [
{
"sheetId": 0,
"startRowIndex": 33,
"endRowIndex": 45,
"startColumnIndex": 4,
"endColumnIndex": 5
}
]
}
},
"targetAxis": "BOTTOM_AXIS"
}
],
},
},
"position": {
"overlayPosition": {
"anchorCell": {
"sheetId": 0,
"rowIndex": 11,
"columnIndex": 1
},
"offsetXPixels": 0,
"offsetYPixels": -7,
"widthPixels": 800,
"heightPixels": 450
},
},
"border": {
"color": {
"red": 1,
"green": 1,
"blue": 1,
"alpha": 0
},
}
}
}
});
const batchUpdateRequest = { requests };
sheets.spreadsheets.batchUpdate({
spreadsheetId,
resource: batchUpdateRequest,
}, (err, result) => {
if (err) {
// Handle error
console.log(err);
return false
} else {
console.log(`${result.updatedCells} chart inserted`);
return spreadsheetId
}
});
}
async function pasteDataToGoogleSheet(clientData, sheetId) {
ecxelID = sheetId;
let data1 = [
["Тест «Мотиваційний особистісний профіль»"], [""], ["Результати тестування"], [""], ["Прізвище"], ["Імя"], ["По-батькові"], ["Вік"], ["Стать"]
];
let data2 = [
[clientData.prizv], [clientData.name], [clientData.father], [clientData.age], [clientData.sex]
];
let factorsLabels1_6 = [
["1. Матеріальна винагорода:"], ["2. Комфортні умови:"], ["3. Структурованість роботи:"], ["4. Соціальні контакти:"], ["5. Довірливі стосунки:"], ["6. Визнання:"]
];
let factors1_6 = [
[clientData.factors.factor1], [clientData.factors.factor2], [clientData.factors.factor3], [clientData.factors.factor4], [clientData.factors.factor5], [clientData.factors.factor6]
];
let factorsLabels7_12 = [
["7. Досягнення мети:"], ["8. Влада і вплив:"], ["9. Відсутність рутини:"], ["10. Креативність:"], ["11. Самовдосконалення і розвиток:"], ["12. Цікава і корисна діяльність:"]
];
let factors7_12 = [
[clientData.factors.factor7], [clientData.factors.factor8], [clientData.factors.factor9], [clientData.factors.factor10], [clientData.factors.factor11], [clientData.factors.factor12]
];
const data = [{
range: "B2:B10",
values: data1,
},
{
range: "D6:D10",
values: data2,
},
{
range: "B34:B39",
values: factorsLabels1_6,
},
{
range: "E34:E39",
values: factors1_6,
},
{
range: "B40:B45",
values: factorsLabels7_12,
},
{
range: "E40:E45",
values: factors7_12,
}
];
const resource = {
data,
valueInputOption: 'RAW',
};
sheets.spreadsheets.values.batchUpdate({
spreadsheetId: ecxelID,
resource: resource,
}, (err, result) => {
if (err) {
// Handle error
console.log(err);
return false
} else {
console.log(`${result.updatedCells} cells data inserted`);
return spreadsheetId;
}
});
}
I could confirm your situation. In this case, how about the following modification?
From:
"offsetXPixels": 0,
"offsetYPixels": -7,
"widthPixels": 800,
"heightPixels": 450
To:
"offsetXPixels": 0,
"offsetYPixels": 0, // Modified
"widthPixels": 800,
"heightPixels": 450
or
"widthPixels": 800,
"heightPixels": 450
When the values of offsetXPixels and offsetYPixels are the negative values, it was found that such an issue occurs.
When the values of offsetXPixels and offsetYPixels are 0, these values are not required to be included because of the default value.
Note:
When I tested the above modification, I could confirm that your issue could be removed.
Reference:
EmbeddedObjectPosition

issue with creating multiple leaflet layers from one geojson file

I have included my code below. While it is sort of working, ill get to that in a minute, I feel like there is a better, more efficient, more correct, way to achieve my goal. I have a map for different weather options, in this case, Tornado, Severe Weather and Flash flood warnings. These are all included in one geojson file. The geo json file has a property called LayerId. This determines the time in the loop that the layer would show. I have a simple global map loop that constantly runs from 0 - 11. So if I am on loop 5, then only the data that corresponds with LayerId 5 would be visible. All others would be hidden/removed (which ever is preferred). When the loop hits 6, the layer corresponding to LayerId 5 would go away and LayerId 6 would now show and so on. Once the loop reaches 11, it starts over at 0.
I am not using a leaflet control due to the site requirements so i am using my own simple check box controls. when the check box is clicked, it calls a toggleLayer function to apply filters to my data. If FlashFlood is checked then only the data corresponding to the flash flood would show over the course of the loop IF it has data for flash flood at that interval.
When i said that it is sort of working...in my loop function i have a call to remove a layer. this works except every now and then it throws a null or undefined error. problem is is that its never the same layer. each time i start the application, its a different layer that errors out.
Below i have included a sample of my geojson and the code. The entry point for the code is at the toggleLayer function.
Thanks for any and all help.
GEOJSON FILE
{
"name": "WarningsJson",
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"coordinates": [
[
[ -86.00, 31.00 ],
[ -86.00, 32.00 ],
[ -87.00, 30.00 ],
[ -86.00, 31.00 ]
]
],
"type": "Polygon"
},
"properties": {
"type": null,
"strokeColor": "#ff9aa3",
"StartDateTime": "09/29/2020 7:30:00 AM",
"EndDateTime": "09/29/2020 9:30:00 AM",
"strokeThickness": 20,
"InfoboxTitle": "SFFW",
"Station": "KMOB",
"Identifier": "FFW",
"LayerId": "0"
}
},
{
"type": "Feature",
"geometry": {
"coordinates": [
[
[ -87.00, 32.00 ],
[ -87.00, 33.00 ],
[ -88.00, 31.00 ],
[ -87.00, 32.00 ]
]
],
"type": "Polygon"
},
"properties": {
"type": null,
"strokeColor": "#c0ffd4",
"StartDateTime": "09/29/2020 7:30:00 AM",
"EndDateTime": "09/29/2020 9:30:00 AM",
"strokeThickness": 2,
"InfoboxTitle": "TOR",
"Station": "KMOB",
"Identifier": "TOR",
"LayerId": "1"
}
},......
APPLICATION CODE
var WarnStormModel = (function () {
var layer0 = new L.LayerGroup();
var layer1 = new L.LayerGroup();
var layer2 = new L.LayerGroup();
var layer3 = new L.LayerGroup();
var layer4 = new L.LayerGroup();
var layer5 = new L.LayerGroup();
var layer6 = new L.LayerGroup();
var layer7 = new L.LayerGroup();
var layer8 = new L.LayerGroup();
var layer9 = new L.LayerGroup();
var layer10 = new L.LayerGroup();
var layer11 = new L.LayerGroup();
var warnConditionsLayersGroup = [layer0, layer1, layer2, layer3, layer4, layer5, layer6, layer7, layer8, layer9, layer10, layer11];
var tornadoActive = false;
var svrActive = false;
var ffwActive = false;
const WarnFilter = {
tornado: null,
svr: null,
flood: null
}
function init() {
$.getJSON('/Data/GeoJsonFiles/WarningJsons/Warnings_0.json', function (data) {
L.geoJSON(data, {
style: function (feature) {
return {
color: feature.properties.strokeColor,
fillOpacity: 0
};
},
pane: "warnPane",
onEachFeature: function (feature, layer) {
var popupText = '<div>'
+ '<span style="float: right; cursor: pointer; cursor: hand"</i></span><br>'
+ '<b>LAYER: </b>' + layer.feature.properties.LayerId + '<br>'
+ '<b>TYPE: </b>' + layer.feature.properties.InfoboxTitle + '<br>'
+ '<b>STATION:</b>' + layer.feature.properties.Station + '<br>'
+ '<b>START: </b>' + layer.feature.properties.StartDateTime + '<br>'
+ '<b>END: </b>' + layer.feature.properties.EndDateTime + '<br>';
layer.bindPopup(popupText);
layer._leaflet_id = feature.properties.LayerId;
if (feature.properties.LayerId == "0") { layer0.addLayer(layer); }
else if (feature.properties.LayerId == "1") { layer1.addLayer(layer); }
else if (feature.properties.LayerId == "2") { layer2.addLayer(layer); }
else if (feature.properties.LayerId == "3") { layer3.addLayer(layer); }
else if (feature.properties.LayerId == "4") { layer4.addLayer(layer); }
else if (feature.properties.LayerId == "5") { layer5.addLayer(layer); }
else if (feature.properties.LayerId == "6") { layer6.addLayer(layer); }
else if (feature.properties.LayerId == "7") { layer7.addLayer(layer); }
else if (feature.properties.LayerId == "8") { layer8.addLayer(layer); }
else if (feature.properties.LayerId == "9") { layer9.addLayer(layer); }
else if (feature.properties.LayerId == "10") { layer10.addLayer(layer); }
else if (feature.properties.LayerId == "11") { layer11.addLayer(layer); }
},
filter: function (feature, layer) {
return (
feature.properties.Identifier === WarnFilter.tornado ||
feature.properties.Identifier === WarnFilter.svr ||
feature.properties.Identifier === WarnFilter.flood
)
},
interactive: true
});
}).fail(function (err) { console.log('createWarningsErr: ', err); })
};
//**********//
function isActive(layer) {
if (layer == "TOR") { return tornadoActive; }
else if (layer == "SVR") { return tstrmActive; }
else if (layer == "FFW") { return ffwActive; }
}
var isAnyActive = function () { return tornadoActive || svrActive || ffwActive; }
var toggleLayer = function (layer, checkState) {
switch (layer) {
case "TOR": (checkState) ? WarnFilter.tornado = 'TOR' : WarnFilter.tornado = null; tornadoActive = !tornadoActive;
break;
case "SVR": (checkState) ? WarnFilter.svr = 'SVR' : WarnFilter.svr = null; svrActive = !svrActive;
break;
case "FFW": (checkState) ? WarnFilter.flood = 'FFW' : WarnFilter.flood = null; ffwActive = !ffwActive;
break;
default:
if (checkState) {
for (key in WarnFilter) {
if (WarnFilter.hasOwnProperty(key)) {
debugger
WarnFilter[key] = (key.toString()).toUpperCase();
}
}
}
//set all values in filter themselves to show
else {
for (key in WarnFilter) {
if (WarnFilter.hasOwnProperty(key)) {
WarnFilter[key] = null;
}
}
}
break;
}
showHide(layer, checkState);
}
//**********//
var showHide = function (layer, checkState) {
rerender();
if (isAnyActive() && checkState) {
warnConditionsLayersGroup[GlobalMapLoop.getLoopIndex()].addTo(getMap());
}
else {
warnConditionsLayersGroup[GlobalMapLoop.getLoopIndex()].removeLayer(getMap());
}
}
var loop = function (currentIndex, pastIndex) {
console.log("got to warn loop", currentIndex, pastIndex, isAnyActive())
if (isAnyActive()) {
getMap().removeLayer(warnConditionsLayersGroup[pastIndex]);
getMap().addLayer(warnConditionsLayersGroup[currentIndex]);
}
}
var rerender = (function () {
init();
})
return {
init: init,
toggleLayer: toggleLayer,
loop: loop,
rerender: rerender
};
})();

Promise all in typescript does not resolve all

In my code I need to update the model
{
"customerCode": "CUS15168",
"customerName": "Adam Jenie",
"customerType": "Cash",
"printPackingSlip": "true",
"contacts": [
{
"firstName": "Hunt",
"lastName": "Barlow",
"email": "huntbarlow#volax.com",
"deliveryAddress": "805 Division Place, Waumandee, North Carolina, 537",
},
{
"firstName": "Barlow",
"lastName": "Hunt",
"email": "huntbarlow#volax.com",
"deliveryAddress": "805 Division Place, Waumandee, North Carolina, 537",
}
],
"deliveryAddress": [
{
"addressName": "Postal",
"addressType": "postal address",
"addressLine1": "plaza street",
"addressLine2": "broome street",
"suburb": "Guilford",
"city": "Oneida",
"state": "Colorado",
"postalCode": "3971",
"country": "Belarus",
"deliveryInstruction": "test delivery address"
},
{
"addressName": "Physical",
"addressType": "physical address",
"addressLine1": "plaza street",
"addressLine2": "broome street",
"suburb": "Guilford",
"city": "Oneida",
"state": "Colorado",
"postalCode": "3971",
"country": "Belarus",
"deliveryInstruction": "test delivery address"
}
]
}
I used promise all to achieve that. In postman, I send this object, but first it needs to add the customer, the contact array and then delivery address array. I did it as follows.
public async createCustomer(customer: CustomerDTO): Promise<CustomerDTO> {
let deliveryAddress = [];
let contacts = [];
let customerDto = new CustomerDTO();
customerDto.customerCode = customer.customerCode;
customerDto.tenantId = customer.tenantId;
if (customer.contacts.length > 0) {
customer.contacts.map((element => {
contacts.push(element);
}));
customer.contacts.length = 0;
}
if (customer.deliveryAddress.length > 0) {
customer.deliveryAddress.map((element => {
deliveryAddress.push(element);
}));
customer.deliveryAddress.length = 0;
}
const createdCustomer = await this.customerRepo.updateOrCreateCustomer(customer);
let updatedAddress = deliveryAddress.map(async (address: CustomerDeliveryAddressDto) => {
return await this.customerRepo.updateDeliveryAddress(address, customerDto, address._id);
});
let updatedContacts = contacts.map(async (contact: CustomerContactsDto) => {
return await this.customerRepo.createOrUpdateContactList(contact, customerDto, contact._id);
});
return Promise.all([updatedAddress, updatedContacts]).
then((results: [Promise<boolean>[], Promise<boolean>[]]) => {
console.log(results);
return this.customerRepo.getLastUpdatedCustomer();
}).
then((result) => {
return result;
}).
catch(e => {
console.error(e);
return e;
});
}
In customerRepository
public async updateDeliveryAddress(deliveryAddressDto: CustomerDeliveryAddressDto, customerDto: CustomerDTO, deliveryAddressId: string): Promise<boolean> {
const customerToBeUpdated = await this.model.findOne({
customerCode: customerDto.customerCode,
tenantId: customerDto.tenantId
});
if (customerToBeUpdated !== null) {
if (deliveryAddressId != null || deliveryAddressId != undefined) {
const result = await this.model.findOneAndUpdate({ _id: customerToBeUpdated._id, deliveryAddress: { $elemMatch: { _id: deliveryAddressId } } },
{
$set: {
//code here
}
},
{ 'new': true, 'safe': true, 'upsert': true });
if (result){
return true;
}
} else {
const result = await this.model.findOneAndUpdate({ _id: customerToBeUpdated._id },
{
$push: { deliveryAddress: deliveryAddressDto }
},
{ 'new': true, 'safe': true, 'upsert': true }
);
if (result) {
return true;
}
}
} else {
return false;
}
}
The problem is that it does not resolve all the methods when it goes to promise all method and I need to get the last updated customer, but it gives the result DeliveryAddress and contacts with empty arrays. Customer document on mongodb is updated as needed.
You need to pass the promises directly in a flat array.
Promise.all on MDN
If the iterable contains non-promise values, they will be ignored, but still counted in the returned promise array value (if the promise is fulfilled)
You can do this easily using the spread operator.
let updatedAddress = deliveryAddress.map(async (address: CustomerDeliveryAddressDto) => {
return await this.customerRepo.updateDeliveryAddress(address, customerDto, address._id);
});
let updatedContacts = contacts.map(async (contact: CustomerContactsDto) => {
return await this.customerRepo.createOrUpdateContactList(contact, customerDto, contact._id);
});
// need to give a flat array to Promise.all, so use the `...` spread operator.
return Promise.all([...updatedAddress, ...updatedContacts]).then(/* ... */
Also, since you are already using async / await, no reason you cannot await the Promise.all call.
const results = await Promise.all([...updatedAddress, ...updatedContacts]);
console.log(results);
return this.customerRepo.getLastUpdatedCustomer();
You can also nest Promise.all
let updatedAddress = Promise.all(deliveryAddress.map(async (address: CustomerDeliveryAddressDto) => {
return await this.customerRepo.updateDeliveryAddress(address, customerDto, address._id);
}));
let updatedContacts = Promise.all(contacts.map(async (contact: CustomerContactsDto) => {
return await this.customerRepo.createOrUpdateContactList(contact, customerDto, contact._id);
}));
return Promise.all([updatedAddress, updatedContacts])