Here is my service worker:
// This is the "Offline page" service worker
importScripts('https://storage.googleapis.com/workbox-cdn/releases/5.1.2/workbox-sw.js');
const CACHE = "pwabuilder-page";
// TODO: replace the following with the correct offline fallback page i.e.: const offlineFallbackPage = "offline.html";
const offlineFallbackPage = "/error/404.html";
self.addEventListener("message", (event) => {
if (event.data && event.data.type === "SKIP_WAITING") {
self.skipWaiting();
}
});
self.addEventListener('install', async (event) => {
event.waitUntil(
caches.open(CACHE)
.then((cache) => cache.add(offlineFallbackPage))
);
});
if (workbox.navigationPreload.isSupported()) {
workbox.navigationPreload.enable();
}
self.addEventListener('fetch', (event) => {
if (event.request.mode === 'navigate') {
event.respondWith((async () => {
try {
const preloadResp = await event.preloadResponse;
if (preloadResp) {
return preloadResp;
}
const networkResp = await fetch(event.request);
return networkResp;
} catch (error) {
const cache = await caches.open(CACHE);
const cachedResp = await cache.match(offlineFallbackPage);
return cachedResp;
}
})());
}
});
the service worker is stored at /src/pwa/sw.js
How can i allow the scope of the service worker to run at / instead of /src/pwa/ ?
i use a Dynamic JS loader to load all of my javascript files, when i try to load the PWA loader, it says "scope of /src/pwa/ error"
The maximal scope of a service worker is determined by the location of your service worker script file. You mention that your script is being served from /src/pwa/sw.js, so the maximal scope is /src/pwa/.
You need to register your service worker when it's hosted at /sw.js if you want the scope to be able to control all pages on your site.
There's more background at Understanding Service Worker scope
Related
I'm quite new to PWA, service workers and workbox. I don't understand why and how the fetch event inside my service worker is supposed to be triggered?
I see the log of workbox which is precaching files I've provided as an array:
How would I serve from cache now and why does my fetch event handler won't be fired at all?
My service-worker file:
importScripts('https://storage.googleapis.com/workbox-cdn/releases/6.2.0/workbox-sw.js')
workbox.setConfig({
debug: true,
})
// To avoid async issues, we load strategies before we call it in the event listener
workbox.loadModule('workbox-core')
workbox.loadModule('workbox-routing')
workbox.loadModule('workbox-cacheable-response')
workbox.loadModule('workbox-strategies')
workbox.loadModule('workbox-expiration')
workbox.loadModule('workbox-precaching')
const { precacheAndRoute } = workbox.precaching
const wbManifest = self.__WB_MANIFEST
console.log(wbManifest)
precacheAndRoute(wbManifest);
const cacheNames = workbox.core.cacheNames
const { registerRoute, setCatchHandler, setDefaultHandler } = workbox.routing
const { CacheableResponsePlugin } = workbox.cacheableResponse
const {
NetworkFirst,
StaleWhileRevalidate,
NetworkOnly,
} = workbox.strategies
const { ExpirationPlugin } = workbox.expiration
const cacheName = cacheNames.runtime
const contentToCache = [
'/',
]
for (const entry of wbManifest) {
contentToCache.push(entry.url)
}
self.addEventListener('activate', e => {
e.waitUntil(self.clients.claim()) // Become available to all pages
})
self.addEventListener('install', e => {
e.waitUntil((async () => {
const cache = await caches.open(cacheName)
console.log('[Service Worker] Caching content', contentToCache)
await cache.addAll(contentToCache)
self.skipWaiting()
})())
})
self.addEventListener('push', e => {
console.log(e.data.text());
});
self.addEventListener('fetch', e => {
const { request } = e;
console.log(request)
e.respondWith(caches.match(request).then(cachedResponse => {
// This promise explicitly resolves with "undefined" when there are no matches, all other values are correct
if (cachedResponse !== undefined) {
return cachedResponse
} else {
return fetch(request).then(response => {
// Since we can use the response only once, put the clone into the cache and serve the original response
const responseClone = response.clone()
caches.open('CACHE_KEY_WHATEVER').then(cache => {
cache.put(request, responseClone)
})
return response
}).catch(() => {
// Retry logic
});
}
}))
})
precacheAndRoute(wbManifest); will both precache (during install, add the entries to the cache) and route (respond to fetch events with a cached response) for all entries that are present in wbManifest.
I see that your current code attempts to do both the precaching (in your own install handler) and routing (in your own fetch handler) for all of those URLs. If you would prefer to handle that yourself, that's fine, but in that case you shouldn't be calling precacheAndRoute() at all. You're attempting to do the same thing as that method, but the event listeners for Workbox's precacheAndRoute() ends up executing first and take precedence.
I've built my flutter app and have the pwa version of it, hosted it on windows server IIS 10 with a valid ssl certificate on subdomain like Https://pwa.mydomain.com, but when I visit the page, it doesn't show the Install app button on chrome. How can I solve this?
What I've done so far:
Removed the references to files that did not exist anymore in the project.
Here is my service worker:
'use strict';
const MANIFEST = 'flutter-app-manifest';
const TEMP = 'flutter-temp-cache';
const CACHE_NAME = 'flutter-app-cache';
const RESOURCES = {
".dart_tool/package_config.json": "23791ac6df06edd3cb7ca99ab6949a5a",
".dart_tool/package_config_subset": "8453ab6a2adbaeda61d2d58965841223",
".dart_tool/version": "a13ed25c1389038783693bec771e71f6",
"assets/assets/images/svgIcon/wifiOff.svg": "ca97536b42e250214b667a606455a58f",
"assets/FontManifest.json": "fba83cca6cbe4083e6cc0498e15a1a6f",
"assets/fonts/MaterialIcons-Regular.otf": "4e6447691c9509f7acdbf8a931a85ca1",
"assets/NOTICES": "0e94f133a57e1e05b650363a497d95b0",
"assets/packages/cupertino_icons/assets/CupertinoIcons.ttf": "6d342eb68f170c97609e9da345464e5e",
"favicon.png": "19e188c1100dbd706d3d81309d478996",
"icons/launcher_logo192.png": "ea22e4258d33a9a184a426b38f3e6f46",
"icons/launcher_logo512.png": "19e188c1100dbd706d3d81309d478996",
"index.html": "fde3d71f9f8a4623bd98e956040169d2",
"/": "fde3d71f9f8a4623bd98e956040169d2",
"lib/main.dart": "5515740aeb62c25aa7692184d00b90b1",
"lib/src/qr_code_scanner_web.dart": "50b70d762e80fedd228974e786e8f38a",
"lib/src/qr_code_scanner_web_impl.dart": "a0f12d21cc75aefe84f6fbf9c1e0307e",
"lib/src/qr_code_scanner_web_impl_no_web.dart": "11e3ab6339d41ded6e650c96120e7560",
"main.dart.js": "6161190dc35f953037fb5c0e7db4f0e6",
"manifest.json": "a281fd45bea620b437523746b29b93d2",
"pubspec.lock": "6c06cc012f6524824b77f7286b6e256e",
"pubspec.yaml": "b36c6ef43dcaa5a79fc412a8368306cf",
"version.json": "cbca785ad23c1984a075073af0a61003"
//Removed for brevity
};
// The application shell files that are downloaded before a service worker can
// start.
const CORE = [
"/",
"main.dart.js",
"index.html",
"assets/NOTICES",
"assets/AssetManifest.json",
"assets/FontManifest.json"];
// During install, the TEMP cache is populated with the application shell files.
self.addEventListener("install", (event) => {
self.skipWaiting();
return event.waitUntil(
caches.open(TEMP).then((cache) => {
return cache.addAll(
CORE.map((value) => new Request(value, {'cache': 'reload'})));
})
);
});
// During activate, the cache is populated with the temp files downloaded in
// install. If this service worker is upgrading from one with a saved
// MANIFEST, then use this to retain unchanged resource files.
self.addEventListener("activate", function(event) {
return event.waitUntil(async function() {
try {
var contentCache = await caches.open(CACHE_NAME);
var tempCache = await caches.open(TEMP);
var manifestCache = await caches.open(MANIFEST);
var manifest = await manifestCache.match('manifest');
// When there is no prior manifest, clear the entire cache.
if (!manifest) {
await caches.delete(CACHE_NAME);
contentCache = await caches.open(CACHE_NAME);
for (var request of await tempCache.keys()) {
var response = await tempCache.match(request);
await contentCache.put(request, response);
}
await caches.delete(TEMP);
// Save the manifest to make future upgrades efficient.
await manifestCache.put('manifest', new Response(JSON.stringify(RESOURCES)));
return;
}
var oldManifest = await manifest.json();
var origin = self.location.origin;
for (var request of await contentCache.keys()) {
var key = request.url.substring(origin.length + 1);
if (key == "") {
key = "/";
}
// If a resource from the old manifest is not in the new cache, or if
// the MD5 sum has changed, delete it. Otherwise the resource is left
// in the cache and can be reused by the new service worker.
if (!RESOURCES[key] || RESOURCES[key] != oldManifest[key]) {
await contentCache.delete(request);
}
}
// Populate the cache with the app shell TEMP files, potentially overwriting
// cache files preserved above.
for (var request of await tempCache.keys()) {
var response = await tempCache.match(request);
await contentCache.put(request, response);
}
await caches.delete(TEMP);
// Save the manifest to make future upgrades efficient.
await manifestCache.put('manifest', new Response(JSON.stringify(RESOURCES)));
return;
} catch (err) {
// On an unhandled exception the state of the cache cannot be guaranteed.
console.error('Failed to upgrade service worker: ' + err);
await caches.delete(CACHE_NAME);
await caches.delete(TEMP);
await caches.delete(MANIFEST);
}
}());
});
// The fetch handler redirects requests for RESOURCE files to the service
// worker cache.
self.addEventListener("fetch", (event) => {
if (event.request.method !== 'GET') {
return;
}
var origin = self.location.origin;
var key = event.request.url.substring(origin.length + 1);
// Redirect URLs to the index.html
if (key.indexOf('?v=') != -1) {
key = key.split('?v=')[0];
}
if (event.request.url == origin || event.request.url.startsWith(origin + '/#') || key == '') {
key = '/';
}
// If the URL is not the RESOURCE list then return to signal that the
// browser should take over.
if (!RESOURCES[key]) {
return;
}
// If the URL is the index.html, perform an online-first request.
if (key == '/') {
return onlineFirst(event);
}
event.respondWith(caches.open(CACHE_NAME)
.then((cache) => {
return cache.match(event.request).then((response) => {
// Either respond with the cached resource, or perform a fetch and
// lazily populate the cache.
return response || fetch(event.request).then((response) => {
cache.put(event.request, response.clone());
return response;
});
})
})
);
});
self.addEventListener('message', (event) => {
// SkipWaiting can be used to immediately activate a waiting service worker.
// This will also require a page refresh triggered by the main worker.
if (event.data === 'skipWaiting') {
self.skipWaiting();
return;
}
if (event.data === 'downloadOffline') {
downloadOffline();
return;
}
});
// Download offline will check the RESOURCES for all files not in the cache
// and populate them.
async function downloadOffline() {
var resources = [];
var contentCache = await caches.open(CACHE_NAME);
var currentContent = {};
for (var request of await contentCache.keys()) {
var key = request.url.substring(origin.length + 1);
if (key == "") {
key = "/";
}
currentContent[key] = true;
}
for (var resourceKey of Object.keys(RESOURCES)) {
if (!currentContent[resourceKey]) {
resources.push(resourceKey);
}
}
return contentCache.addAll(resources);
}
// Attempt to download the resource online before falling back to
// the offline cache.
function onlineFirst(event) {
return event.respondWith(
fetch(event.request).then((response) => {
return caches.open(CACHE_NAME).then((cache) => {
cache.put(event.request, response.clone());
return response;
});
}).catch((error) => {
return caches.open(CACHE_NAME).then((cache) => {
return cache.match(event.request).then((response) => {
if (response != null) {
return response;
}
throw error;
});
});
})
);
}
This is the error I'm getting:
Uncaught (in promise) TypeError: Failed to execute 'addAll' on 'Cache': Request failed
I'm curious whether this error has something to do with CORS or not.
Thanks.
So lets say i have a function that takes care of user creation which was previously available to us. Now we just want the function to work in the form of one transaction that creates all users together if success or fails completely when it encounters any error. I am assuming passing just the session to the existing function from the transaction wrapper should take care of this. Any number of such function can be passed together and the wrapper should handle them all as a single transaction.
const createUsers = async (users = []) => {
try {
return UserModel.create(users).then((res) => {
logger.info(`[Start-Up] Created ${res.length} users`);
}, (rej) => {
logger.error(`[Start-Up] Failed to create users - ${rej}`);
throw new Error(rej);
});
} catch (err) {
throw new Error('Failed to create Users', err);
}
};
Use https://www.npmjs.com/package/mongoose-transactions
https://mongoosejs.com/docs/transactions.html
const createUsers = async (users = []) => {
const session = await UserModel.startSession();
try {
await session.withTransaction(() => {
return UserModel.create(users, { session: session }).then((res) => {
logger.info(`[Start-Up] Created ${res.length} users`);
}, (rej) => {
logger.error(`[Start-Up] Failed to create users - ${rej}`);
throw new Error(rej);
});
});
} catch (err) {
throw new Error('Failed to create Users', err);
} finally {
session.endSession();
}
};
I'm using puppeteer to scrape page that has contents that change periodically and use express to present data in rest api.
If I turn on headless chrome to see what is being shown in the browser, the new data is there, but the data is not showing up in get() and http://localhost:3005/api-weather. The normal browser only shows the original data.
const express = require('express');
const server = new express();
const cors = require('cors');
const morgan = require('morgan');
const puppeteer = require('puppeteer');
server.use(morgan('combined'));
server.use(
cors({
allowHeaders: ['sessionId', 'Content-Type'],
exposedHeaders: ['sessionId'],
origin: '*',
methods: 'GET, HEAD, PUT, PATCH, POST, DELETE',
preflightContinue: false
})
);
const WEATHER_URL = 'https://forecast.weather.gov/MapClick.php?lat=40.793588904953985&lon=-73.95738513173298';
const hazard_url2 = `file://C:/Users/xdevtran/Documents/vshome/wc_api/weather-forecast-nohazard.html`;
(async () => {
try {
const browser = await puppeteer.launch({ headless: true });
const page = await browser.newPage();
await page.setRequestInterception(true);
page.on("request", request => {
console.log(request.url());
request.continue();
});
await page.goto(hazard_url2, { timeout: 0, waitUntil: 'networkidle0' });
hazard = {
"HazardTitle": "stub",
"Hazardhref": "stub"
}
let forecast = await page.evaluate(() => {
try {
let forecasts = document.querySelectorAll("#detailed-forecast-body.panel-body")[0].children;
let weather = [];
for (var i = 0, element; element = forecasts[i]; i++) {
period = element.querySelector("div.forecast-label").textContent;
forecast = element.querySelector("div.forecast-text").textContent;
weather.push(
{
period,
forecast
}
)
}
return weather;
} catch (err) {
console.log('error in evaluate: ', err);
res.end();
}
}).catch(err => {
console.log('err.message :', err.message);
});
weather = forecast;
server.get('/api-weather', (req, res) => {
try {
res.end(JSON.stringify(weather, null, ' '));
console.log(weather);
} catch (err) {
console.log('failure: ', err);
res.sendStatus(500);
res.end();
return;
}
});
} catch (err) {
console.log('caught error :', err);
}
browser.close();
})();
server.listen(3005, () => {
console.log('http://localhost:3005/api-weather');
});
I've tried several solutions WaitUntil, WaitFor, .then and sleep but nothing seems to work.
I wonder if it has something to do with express get()? I'm using res.end() instead of res.send() is because the json looks better when I use res.end(). I don't really know the distinction.
I'm also open to using this reload solution. But I received errors and didn't use it.
I also tried waitForNavigation(), but I don't know how it works, either.
Maybe I'm using the wrong search term to find the solution. Could anyone point me in the right direction? Thank you.
I am using axios and express.js API to connect to my mongo DB. I have a .get() request that works for one collection and doesn't work for any other collection. This currently will connect to the database and can access one of the collections called users. I have another collection setup under the same database called tasks, I have both users and tasks setup the same way and being used the same way in the code. The users can connect to the DB (get, post) and the tasks fails to connect to the collection when calling the get or the post functions. When viewing the .get() API request in the browser it just hangs and never returns anything or finishes the request.
any help would be greatly appreciated!
The project is on GitHub under SCRUM-150.
API connection
MONGO_URI=mongodb://localhost:27017/mydb
Working
methods: {
//load all users from DB, we call this often to make sure the data is up to date
load() {
http
.get("users")
.then(response => {
this.users = response.data.users;
})
.catch(e => {
this.errors.push(e);
});
},
//opens delete dialog
setupDelete(user) {
this.userToDelete = user;
this.deleteDialog = true;
},
//opens edit dialog
setupEdit(user) {
Object.keys(user).forEach(key => {
this.userToEdit[key] = user[key];
});
this.editName = user.name;
this.editDialog = true;
},
//build the alert info for us
//Will emit an alert, followed by a boolean for success, the type of call made, and the name of the
//resource we are working on
alert(success, callName, resource) {
console.log('Page Alerting')
this.$emit('alert', success, callName, resource)
this.load()
}
},
//get those users
mounted() {
this.load();
}
};
Broken
methods: {
//load all tasks from DB, we call this often to make sure the data is up to date
load() {
http
.get("tasks")
.then(response => {
this.tasks = response.data.tasks
})
.catch(e => {
this.errors.push(e);
});
},
//opens delete dialog
setupDelete(tasks) {
this.taskToDelete = tasks;
this.deleteDialog = true;
},
//opens edit dialog
setupEdit(tasks) {
Object.keys(tasks).forEach(key => {
this.taskToEdit[key] = tasks[key];
});
this.editName = tasks.name;
this.editDialog = true;
},
//build the alert info for us
//Will emit an alert, followed by a boolean for success, the type of call made, and the name of the
//resource we are working on
alert(success, callName, resource) {
console.log('Page Alerting')
this.$emit('alert', success, callName, resource)
this.load()
}
},
//get those tasks
mounted() {
this.load();
}
};
Are you setting any access controls in the code?
Also refer to mongoDB's documentation here:
https://docs.mongodb.com/manual/core/collection-level-access-control/
Here is my solution:
In your app.js, have this:
let mongoose = require('mongoose');
mongoose.connect('Your/Database/Url', {
keepAlive : true,
reconnectTries: 2,
useMongoClient: true
});
In your route have this:
let mongoose = require('mongoose');
let db = mongoose.connection;
fetchAndSendDatabase('yourCollectionName', db);
function fetchAndSendDatabase(dbName, db) {
db.collection(dbName).find({}).toArray(function(err, result) {
if( err ) {
console.log("couldn't get database items. " + err);
}
else {
console.log('Database received successfully');
}
});
}