How to consolidate Cloudwatch lambda log groups in a common log group - amazon-cloudwatchlogs

My app has more than 100 lambdas on AWS as it works on a microservices pattern.
As configured by default, each lambda is writing on his own log group.
I'd like to get a view of the logs accross all my lambdas. I cannot use Logs Insights as I have more than 100 groups to search in.
Do you know how I can create a Log Group that would be an aggregation of all my lambda Log groups ?

Maybe there is a better way, but I ended up creating a Subscription Filter Lambda that copies all the logs to a single log group.
var aws = require("aws-sdk");
var zlib = require("zlib");
var { v4: uuid } = require("uuid");
const cloudwatch = new aws.CloudWatchLogs();
exports.handler = async function (input, context) {
const now = new Date();
const LOG_GROUP_NAME = "/aws/lambda/" + process.env.STAGE + "-main";
const LOG_STREAM_NAME =
now.getUTCFullYear() + "/" + now.getUTCMonth() + "/" + now.getUTCDay() + "/" + uuid();
try {
var zippedInput = new Buffer.from(input.awslogs.data, "base64");
const unzippedDataBuffer = await unzipLogs(zippedInput);
var awslogsData = JSON.parse(unzippedDataBuffer);
await cloudwatch
.createLogStream({
logGroupName: LOG_GROUP_NAME,
logStreamName: LOG_STREAM_NAME,
})
.promise();
await cloudwatch
.putLogEvents({
logGroupName: LOG_GROUP_NAME,
logStreamName: LOG_STREAM_NAME,
logEvents: awslogsData.logEvents.map((logEvent) => {
const { id, ...cleanEvent } = logEvent;
return cleanEvent;
}),
})
.promise();
} catch (err) {
console.error(error);
}
};
async function unzipLogs(zippedInput) {
return new Promise((resolve, reject) => {
zlib.gunzip(zippedInput, function (error, buffer) {
if (error) {
reject(error);
return;
}
resolve(buffer);
});
});
}

Related

trying to get uploads saving in MongoDB

I currently have the following code, which saves the temp file to public/files I have tried to understand the MongoDB GridFS documentation but with no success.
I am wondering how do I get the files to save inside MongoDB GridFS instead of my public/file directory
I am aware I am missing the part where I need to send the uploaded file to mongodb - this is the part I don't know how to do.
In mongodb example they say to do something like:
fs.createReadStream('./myFile').pipe(
bucket.openUploadStream('myFile', {
chunkSizeBytes: 1048576,
metadata: { field: 'myField', value: 'myValue' },
})
);
however I am not using FS or do I need to upload the file to the temp and then do the fs
import formidable from 'formidable';
import { MongoClient, ObjectId } from 'mongodb';
var Grid = require('gridfs-stream');
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
const uri = process.env.MONGODB_URI;
let client;
let clientPromise;
const options = {};
client = new MongoClient(uri, options);
clientPromise = client.connect();
const clients = await clientPromise;
const database = clients.db('AdStitchr');
var gfs = Grid(database, client);
gfs.collection('uploads');
const form = new formidable.IncomingForm();
form.uploadDir = 'public/files';
form.keepExtensions = true;
form.parse(req, (err, fields, files) => {
var file = files.file;
console.log(JSON.stringify(file));
try {
const newFile = File.create({
name: `files\${file.newFilename}.mp3`,
});
res.status(200).json({ status: 'success' });
} catch (error) {
res.send(error);
}
});
};

PWA problem with Vue3+service worker+keep-alive

I have a problem with Vue3+service worker+keep-alive.
I use keep-live in template
<q-page-container>
<router-view v-slot="{ Component }">
<keep-alive :include="['WorkPage']">
<component :is="Component" :key="$route.fullPath"/>
</keep-alive>
</router-view>
</q-page-container>
create queue
createWorkQueue = new Queue('createWorkQueue', {
onSync: async ( {queue} ) => {
let entry
while (entry = await queue.shiftRequest()) {
try {
await fetch(entry.request);
const channel = new BroadcastChannel('sw-messages-work');
channel.postMessage({msg: 'offline-work-uploaded'});
} catch (error) {
await queue.unshiftRequest(entry);
throw error;
}
}
}
})
addEventListener('fetch'
self.addEventListener('fetch', (event) => {
if (event.request.url.endsWith('/api/ins_new_work')) {
const bgSyncLogic = async () => {
try {
const response = await fetch(event.request.clone())
return response
} catch (error) {
await createWorkQueue.pushRequest({request: event.request})
return error
}
}
event.respondWith(bgSyncLogic())
}
})
when in offline I send form - createWorkQueue.pushRequest hangs to 5 minutes
if I delete from keep-alive - WorkPage - then pushRequest works well
but I need keep-alive page. How can I solve this?
I found!!
I use IndexedDB library and for show offline message I read from DB information
const db = await openDB('workbox-background-sync')
but in first time - table 'requests' don't create
I insert next code
const db = await openDB('workbox-background-sync', undefined, { upgrade(db) { db.createObjectStore('requests') }})
and works well

Puppeteer and express can not load new data using REST API

I'm using puppeteer to scrape page that has contents that change periodically and use express to present data in rest api.
If I turn on headless chrome to see what is being shown in the browser, the new data is there, but the data is not showing up in get() and http://localhost:3005/api-weather. The normal browser only shows the original data.
const express = require('express');
const server = new express();
const cors = require('cors');
const morgan = require('morgan');
const puppeteer = require('puppeteer');
server.use(morgan('combined'));
server.use(
cors({
allowHeaders: ['sessionId', 'Content-Type'],
exposedHeaders: ['sessionId'],
origin: '*',
methods: 'GET, HEAD, PUT, PATCH, POST, DELETE',
preflightContinue: false
})
);
const WEATHER_URL = 'https://forecast.weather.gov/MapClick.php?lat=40.793588904953985&lon=-73.95738513173298';
const hazard_url2 = `file://C:/Users/xdevtran/Documents/vshome/wc_api/weather-forecast-nohazard.html`;
(async () => {
try {
const browser = await puppeteer.launch({ headless: true });
const page = await browser.newPage();
await page.setRequestInterception(true);
page.on("request", request => {
console.log(request.url());
request.continue();
});
await page.goto(hazard_url2, { timeout: 0, waitUntil: 'networkidle0' });
hazard = {
"HazardTitle": "stub",
"Hazardhref": "stub"
}
let forecast = await page.evaluate(() => {
try {
let forecasts = document.querySelectorAll("#detailed-forecast-body.panel-body")[0].children;
let weather = [];
for (var i = 0, element; element = forecasts[i]; i++) {
period = element.querySelector("div.forecast-label").textContent;
forecast = element.querySelector("div.forecast-text").textContent;
weather.push(
{
period,
forecast
}
)
}
return weather;
} catch (err) {
console.log('error in evaluate: ', err);
res.end();
}
}).catch(err => {
console.log('err.message :', err.message);
});
weather = forecast;
server.get('/api-weather', (req, res) => {
try {
res.end(JSON.stringify(weather, null, ' '));
console.log(weather);
} catch (err) {
console.log('failure: ', err);
res.sendStatus(500);
res.end();
return;
}
});
} catch (err) {
console.log('caught error :', err);
}
browser.close();
})();
server.listen(3005, () => {
console.log('http://localhost:3005/api-weather');
});
I've tried several solutions WaitUntil, WaitFor, .then and sleep but nothing seems to work.
I wonder if it has something to do with express get()? I'm using res.end() instead of res.send() is because the json looks better when I use res.end(). I don't really know the distinction.
I'm also open to using this reload solution. But I received errors and didn't use it.
I also tried waitForNavigation(), but I don't know how it works, either.
Maybe I'm using the wrong search term to find the solution. Could anyone point me in the right direction? Thank you.

Unable to read from firestore collection

i have a firestore collection called Users and trying to write a document in the collection through fulfillment. I am getting the user's name and his location through dialogflow agent and trying to insert it into the collection. But the document is not getting inserted.
1)Getting data from agent:
name= agent.parameters.name;
location=agent.parameters.location;
2) Writing to the firestore collection Users
db.collection("Users").doc("101").set({
name: name,
location:location});
The function got executed but the document is not inserted into the firestore collection. what am i missing?
'use strict';
const functions = require('firebase-functions');
const { WebhookClient } = require('dialogflow-fulfillment');
const { Card, Suggestion } = require('dialogflow-fulfillment');
const admin = require('firebase-admin');
process.env.DEBUG = 'dialogflow:debug'; // enables lib debugging statements
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
console.log('Dialogflow Request headers: ' + JSON.stringify(request.headers));
console.log('Dialogflow Request body: ' + JSON.stringify(request.body));
var name='';
var location='';
admin.initializeApp(functions.config().firebase);
const db = admin.firestore();
function getUserDetails(agent)
{
name= agent.parameters.name;
location=agent.parameters.location;
console.log("buyer name is " + name);
db.collection("Users").doc("101").set({
name: name,
location:location});
agent.add(`User has been inserted`);
}
intentMap.set('Buy Car', getUserDetails);
agent.handleRequest(intentMap);
})
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {WebhookClient} = require('dialogflow-fulfillment');
process.env.DEBUG = 'dialogflow:*'; // enables lib debugging statements
admin.initializeApp(functions.config().firebase);
const db = admin.firestore();
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) =>
{const agent = new WebhookClient({request,response});
function saveName(agent){
const nameParam = agent.parameters.name;
const name = nameParam;
agent.add(`thank you, ` + name + `!`);
return db.collection('test').add({name: name}).then((snapshot) => {(console.log('success'));
});
}
let intentMap = new Map();
intentMap.set('Get_Name', saveName);
agent.handleRequest(intentMap);
});
It isn't clear what is going wrong, but at least in part that is because you're not checking what the error might be from doing the write to the collection/doc.
This is compounded by your reply "User has been inserted" is sent without you actually confirming that the user document has been set. There isn't even any guarantee that it has been sent by the time the function completes, because you're not treating the document write as asynchronous.
The normal way to do this is to return the Promise that set() returns, so handleRequest() will wait for the handler to finish before it sends the reply. You should also set the reply in a then() portion of the Promise and catch any errors in a catch() block.
A function like this is more correct, and may log what the error is:
function getUserDetails(agent)
{
name= agent.parameters.name;
location=agent.parameters.location;
console.log("buyer name is " + name);
return db.collection("Users").doc("101").set({
name: name,
location: location
})
.then( () => {
agent.add(`User has been inserted`);
})
.catch( err => {
console.log( err );
agent.add('Something went wrong');
});
}

AWS Lambda - MongoDB resource optimization

I'm building facebook chatbot using AWS Lambda and MongoDB. At the moment, my application is pretty simple but I'm trying to nail down the basics before I move onto the complex stuff.
I understand AWS Lambda is stateless but I've read adding below line in handler along with variables initialized outside handler, I don't have to establish DB connection on every request.
context.callbackWaitsForEmptyEventLoop = false;
(I've read this from this article; https://www.mongodb.com/blog/post/optimizing-aws-lambda-performance-with-mongodb-atlas-and-nodejs)
I'm adding my entire code below
'use strict'
const
axios = require('axios'),
mongo = require('mongodb'),
MongoClient = mongo.MongoClient,
assert = require('assert');
var VERIFY_TOKEN = process.env.VERIFY_TOKEN;
var PAGE_ACCESS_TOKEN = process.env.PAGE_ACCESS_TOKEN;
var MONGO_DB_URI = process.env.MONGO_DB_URI;
let cachedDb = null;
let test = null;
exports.handler = (event, context, callback) => {
var method = event.context["http-method"];
context.callbackWaitsForEmptyEventLoop = false;
console.log("test :: " + test);
if (!test) {
test = "1";
}
// process GET request --> verify facebook webhook
if (method === "GET") {
var queryParams = event.params.querystring;
var rVerifyToken = queryParams['hub.verify_token']
if (rVerifyToken === VERIFY_TOKEN) {
var challenge = queryParams['hub.challenge'];
callback(null, parseInt(challenge))
} else {
var response = {
'body': 'Error, wrong validation token',
'statusCode': 403
};
callback(null, response);
}
// process POST request --> handle message
} else if (method === "POST") {
let body = event['body-json'];
body.entry.map((entry) => {
entry.messaging.map((event) => {
if (event.message) {
if (!event.message.is_echo && event.message.text) {
console.log("BODY\n" + JSON.stringify(body));
console.log("<<MESSAGE EVENT>>");
// retrieve message
let response = {
"text": "This is from webhook response for \'" + event.message.text + "\'"
}
// facebook call
callSendAPI(event.sender.id, response);
// store in DB
console.time("dbsave");
storeInMongoDB(event, callback);
}
} else if (event.postback) {
console.log("<<POSTBACK EVENT>>");
} else {
console.log("UNHANDLED EVENT; " + JSON.stringify(event));
}
})
})
}
}
function callSendAPI(senderPsid, response) {
console.log("call to FB");
let payload = {
recipient: {
id: senderPsid
},
message: response
};
let url = `https://graph.facebook.com/v2.6/me/messages?access_token=${PAGE_ACCESS_TOKEN}`;
axios.post(url, payload)
.then((response) => {
console.log("response ::: " + response);
}).catch(function(error) {
console.log(error);
});
}
function storeInMongoDB(messageEnvelope, callback) {
console.log("cachedDB :: " + cachedDb);
if (cachedDb && cachedDb.serverConfig.isConnected()) {
sendToAtlas(cachedDb.db("test"), messageEnvelope, callback);
} else {
console.log(`=> connecting to database ${MONGO_DB_URI}`);
MongoClient.connect(MONGO_DB_URI, function(err, db) {
assert.equal(null, err);
cachedDb = db;
sendToAtlas(db.db("test"), messageEnvelope, callback);
});
}
}
function sendToAtlas(db, message, callback) {
console.log("send to Mongo");
db.collection("chat_records").insertOne({
facebook: {
messageEnvelope: message
}
}, function(err, result) {
if (err != null) {
console.error("an error occurred in sendToAtlas", err);
callback(null, JSON.stringify(err));
} else {
console.timeEnd("dbsave");
var message = `Inserted a message into Atlas with id: ${result.insertedId}`;
console.log(message);
callback(null, message);
}
});
}
I did everything as instructed and referenced a few more similar cases but somehow on every request, "cachedDb" value is not saved from previous request and the app is establishing the connection all over again.
Then I also read that there is no guarantee the Lambda function is using the same container on multiple requests so I made another global variable "test". "test" variable value is logged "1" from the second request which means it's using the same container but again, "cachedDb" value is not saved.
What am I missing here?
Thanks in advance!
In short AWS Lambda function is not a permanently running service of any kind.
So, far I know AWS Lambda works on idea - "one container processes one request at a time".
It means when request comes and there is available running container for the Lambda function AWS uses it, else it starts new container.
If second request comes when first container executes Lambda function for first request AWS starts new container.
and so on...
Then there is no guarantee in what container (already running or new one) Lambda function will be executed, so... new container opens new DB connection.
Of course, there is an inactivity period and no running containers will be there after that. All will start over again by next request.