My project structure looks something this :
.tmp
scripts
bundle.js
bundle.map.js
app
scripts
main.js
styles
buttons.scss
grid.scss
....
main.scss
index.html
sw.js
My service worker registration :
// Register Service Worker
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/sw.js')
.then(reg => console.log("Registration successful"))
.catch(() => console.log("Registration failed"));
}
I wanna use Service Worker to make an offline web app, I cached the resources and implemented the code to serve them on fetch events, but although they aren't served and I keep getting this error The FetchEvent for "http://localhost:9000/" resulted in a network error response: the promise was rejected. and failed to fetch. I'm using localhost with gulp serve for dev. I think the service worker should be placed somewhere else, like .tmp maybe?? what do you think?
My sw.js file :
const staticCache = "staticCache-v1";
const staticAssets = [
//js
"browser-sync/browser-sync-client.js?v=2.24.5",
"index.html",
"scripts/bundle.js",
//css
"styles/main.scss",
//html
"index.html",
//fonts
"https://fonts.gstatic.com/s/sourcesanspro/v11/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNa7lujVj9_mf.woff2",
"https://free.currencyconverterapi.com/api/v5/currencies"
];
self.addEventListener("install", event => {
// Cache static resources
event.waitUntil(
caches.open(staticCache).then(cache => cache.addAll(staticAssets))
);
});
self.addEventListener("activate", event => {
// clean old SW
});
self.addEventListener("fetch", event => {
// try placing the sw in .tmp
console.log("fetch request :", event.request);
event.respondWith(
caches.match(event.request).then(cacheResponse => {
return cacheResponse || fetch(event.request);
})
);
});
Related
I have a service worker that is supposed to cache an offline.html page that is displayed if the client has no network connection. However, it sometimes believes the navigator is offline even when it is not. That is, navigator.onLine === false. This means the user may get offline.html instead of the actual content even when online, which is obviously something I'd like to avoid.
This is how I register the service worker in my main.js:
// Install service worker for offline use and caching
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', {scope: '/'});
}
My current service-worker.js:
const OFFLINE_URL = '/mysite/offline';
const CACHE_NAME = 'mysite-static-v1';
self.addEventListener('install', (event) => {
event.waitUntil(
// Cache the offline page when installing the service worker
fetch(OFFLINE_URL, { credentials: 'include' }).then(response =>
caches.open(CACHE_NAME).then(cache => cache.put(OFFLINE_URL, response)),
),
);
});
self.addEventListener('fetch', (event) => {
const requestURL = new URL(event.request.url);
if (requestURL.origin === location.origin) {
// Load static assets from cache if network is down
if (/\.(css|js|woff|woff2|ttf|eot|svg)$/.test(requestURL.pathname)) {
event.respondWith(
caches.open(CACHE_NAME).then(cache =>
caches.match(event.request).then((result) => {
if (navigator.onLine === false) {
// We are offline so return the cached version immediately, null or not.
return result;
}
// We are online so let's run the request to make sure our content
// is up-to-date.
return fetch(event.request).then((response) => {
// Save the result to cache for later use.
cache.put(event.request, response.clone());
return response;
});
}),
),
);
return;
}
}
if (event.request.mode === 'navigate' && navigator.onLine === false) {
// Uh-oh, we navigated to a page while offline. Let's show our default page.
event.respondWith(caches.match(OFFLINE_URL));
return;
}
// Passthrough for everything else
event.respondWith(fetch(event.request));
});
What am I doing wrong?
navigator.onLine and the related events can be useful when you want to update your UI to indicate that you're offline and, for instance, only show content that exists in a cache.
But I'd avoid writing service worker logic that relies on checking navigator.onLine. Instead, attempt to make a fetch() unconditionally, and if it fails, provide a backup response. This will ensure that your web app behaves as expected regardless of whether the fetch() fails due to being offline, due to lie-fi, or due to your web server experiencing issues.
// Other fetch handler code...
if (event.request.mode === 'navigate') {
return event.respondWith(
fetch(event.request).catch(() => caches.match(OFFLINE_URL))
);
}
// Other fetch handler code...
I am using Puppeteer to take screenshots of a web page for my company. I need to test multiple people's accounts so that means visiting the page multiple times (150 times in this case). This results in our firewall kicking me out for making too many requests.
My solution is to just fetch the contents of the page and save them locally. Then I use puppeteer on that local file, overriding the function used to get data from our servers to instead just use data already loaded into Node from a CSV.
All of this works, but it looks like it's still making requests to our servers.
I tried giving it a userDataDir so it could cache any resources. In theory, if it's loading it from file://, it's caching the resources and there's no Ajax requests, it shouldn't be making any further requests, right?
I also tried installing a debugging proxy but since it's https I can't see what it's trying to request.
This is how I start it:
puppeteer.launch({
userDataDir: "temp/"
})
.then(browser => {
next(browser, links);
)
.catch(error => {
cb(error, null);
});
next will iterate through any links it needs to visit.
This part saves the page locally:
if (this._linkCache[baseLink] === undefined) {
fetch(baseLink)
.then(resp => resp.text())
.then(contents => {
fs.writeFile(fullFileName, contents, 'utf8', err => {
if (err) {
cb(err, null);
} else {
this._linkCache[baseLink] = fileUrl;
gotoPage(fileUrl);
}
});
})
.catch(error => {
cb(error, null);
});
}
// Go to the cached version
else {
gotoPage(this._linkCache[baseLink] + queryParams);
}
And this gets the screenshots:
const gotoPage = async(url) => {
try {
const page = await browser.newPage();
// Override 'fetchAccountData' function
await page.evaluateOnNewDocument(testData => {
window["fetchAccountData"] = (cb: (err: any, data: any)=>void) => {
cb(null, testData);
};
}, data);
// Go to page and get screenshot
await page.goto(url);
const screenie = `${outputPath}${uuid()}.png`;
await page.screenshot({ fullPage: true, path: screenie, type: "png" });
pageHtml.push(`<img src="file://${screenie}" />`);
next(browser, rest);
} catch (e) {
cb(e, null);
}
};
I was hoping this would be able to only make a few requests at the beginning while it saves the html locally and caches all the resources, but it seems to make a request for every link.
How can I stop it?
I have upgraded my CRA to version 3.10.8 as it has built in support for PWA.
As a next step I have registered my service worker in the index.js and I think it got registered succesfully.
Now my main goal is to have some offline caching for our API calls (backend in Rails), so that when there is no network I can serve the cached response .
Is there anything else that I need to do to serve cached API responses.
When I built my app with Create react App, all it did was create a file called
registerServiceWorker.js and then this gets called from the index.js.
Also the final app we are building is packaged with Codova so most of the Assets will be in local , our main aim is to cache the API calls. Is this the right way to go. We are using Redux for state management, but have not use any persistence as of now.
Any help/tips would be highly appreciated.
registerServiceWorker.js code below...
// In production, we register a service worker to serve assets from local cache.
// This lets the app load faster on subsequent visits in production, and gives
// it offline capabilities. However, it also means that developers (and users)
// will only see deployed updates on the "N+1" visit to a page, since previously
// cached resources are updated in the background.
const isLocalhost = Boolean(
window.location.hostname === 'localhost' ||
// [::1] is the IPv6 localhost address.
window.location.hostname === '[::1]' ||
// 127.0.0.1/8 is considered localhost for IPv4.
window.location.hostname.match(
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
)
);
export default function register() {
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
// The URL constructor is available in all browsers that support SW.
const publicUrl = new URL(process.env.PUBLIC_URL, window.location);
if (publicUrl.origin !== window.location.origin) {
// Our service worker won't work if PUBLIC_URL is on a different origin
// from what our page is served on. This might happen if a CDN is used to
// serve assets; see https://github.com/facebookincubator/create-react-app/issues/2374
return;
}
window.addEventListener('load', () => {
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
if (!isLocalhost) {
// Is not local host. Just register service worker
registerValidSW(swUrl);
} else {
// This is running on localhost. Lets check if a service worker still exists or not.
checkValidServiceWorker(swUrl);
}
});
}
}
function registerValidSW(swUrl) {
navigator.serviceWorker
.register(swUrl)
.then(registration => {
registration.onupdatefound = () => {
const installingWorker = registration.installing;
installingWorker.onstatechange = () => {
if (installingWorker.state === 'installed') {
if (navigator.serviceWorker.controller) {
// At this point, the old content will have been purged and
// the fresh content will have been added to the cache.
// It's the perfect time to display a "New content is
// available; please refresh." message in your web app.
console.log('New content is available; please refresh.');
} else {
// At this point, everything has been precached.
// It's the perfect time to display a
// "Content is cached for offline use." message.
console.log('Content is cached for offline use.');
}
}
};
};
})
.catch(error => {
console.error('Error during service worker registration:', error);
});
}
function checkValidServiceWorker(swUrl) {
// Check if the service worker can be found. If it can't reload the page.
fetch(swUrl)
.then(response => {
// Ensure service worker exists, and that we really are getting a JS file.
if (
response.status === 404 ||
response.headers.get('content-type').indexOf('javascript') === -1
) {
// No service worker found. Probably a different app. Reload the page.
navigator.serviceWorker.ready.then(registration => {
registration.unregister().then(() => {
window.location.reload();
});
});
} else {
// Service worker found. Proceed as normal.
registerValidSW(swUrl);
}
})
.catch(() => {
console.log(
'No internet connection found. App is running in offline mode.'
);
});
}
export function unregister() {
if ('serviceWorker' in navigator) {
navigator.serviceWorker.ready.then(registration => {
registration.unregister();
});
}
}
I am using the Create-react-app version 3.
change the condition statement, Remove the codition (process.env.NODE_ENV === 'production' &&) it should only have if('serviceWorker' in navigator).
create your custom-service-worker file in public folder rewrite the following code as const swUrl = ${process.env.PUBLIC_URL}/service-worker.js as swUrl = ./custom-service-worker.js.
In the custom-service-worker.js file in public folder add the follow code, please refer the sample external api calls( place your api urls to be cached)
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
if (workbox) {
console.log('workbok loaded', workbox.routing)
}
//to cache the css html js and images files
workbox.routing.registerRoute(
/\.(?:js|html|css|images|svg)$/,
new workbox.strategies.NetworkFirst()
);
workbox.routing.registerRoute(
'http://localhost:3000',
new workbox.strategies.NetworkFirst()
);
//to cache the external api calls
workbox.routing.registerRoute(
new RegExp('https://jsonplaceholder.typicode.com/users'),
new workbox.strategies.StaleWhileRevalidate()
);
//to cache the external api calls
workbox.routing.registerRoute(new RegExp('http://insight.dev.schoolwires.com/HelpAssets/C2Assets/C2Files/C2ImportUsersSample.csv'),
new workbox.strategies.StaleWhileRevalidate()
);
This registers and works perfectly fine online. But when the server is turned off, and when the page is refreshed, the registered serviceworker no longer shows up in the console and no caches in the cache storage.
if ("serviceWorker" in navigator){
navigator.serviceWorker.register("/sw.js").then(function(registration){
console.log("service worker reg", registration.scope)
}).catch(function(error){
console.log("Error:", error);
})
}
in sw.js
var CACHE_NAME = 'cache-v1';
var urlsToCache = [
'/index.html'
];
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(CACHE_NAME).then(function(cache) {
return cache.addAll(urlsToCache);
})
);
//event.waitUntil(self.skipWaiting());
});
You're properly adding your file to cache, but you're missing returning your cached file on request.
Your sw.js should also have following code:
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
// Cache hit - return response
if (response) {
return response;
}
return fetch(event.request);
}
)
);
});
It's from Introduction to service workers.
Morover, you should rather cache / instead of /index.html as usually, you don't hit index.html file directly.
Your service worker doesn't show any console.log when offline, because you don't have any activate code. This article - offline cookbook is very useful to understand details.
I have an index.html file in the parent dir and with http://localhost:3000/#/ that's what's being loaded instead of the sidebar.html file. If I try http://localhost:300/#/home it redirects to todo
No errors being thrown.
app.js
'use strict';
angular
.module('App', [
'ui.router'
//'lbServices'
])
.run([ '$rootScope', '$state', '$stateParams',
function ($rootScope, $state, $stateParams) {
$rootScope.$state = $state;
$rootScope.$stateParams = $stateParams;
}])
.config(['$stateProvider','$urlRouterProvider', function($stateProvider, $urlRouterProvider) {
$stateProvider
.state('home', {
title: 'Dashboard',
url: '/',
templateUrl: '../shared/sidebar/sidebar.html',
controller: 'sidebarCtrl'
});
$urlRouterProvider.otherwise('todo');
}]);
server.js
var loopback = require('loopback');
var boot = require('loopback-boot');
var app = module.exports = loopback();
// Set up the /favicon.ico
app.use(loopback.favicon());
// request pre-processing middleware
app.use(loopback.compress());
// -- Add your pre-processing middleware here --
// boot scripts mount components like REST API
boot(app, __dirname);
// -- Mount static files here--
// All static middleware should be registered at the end, as all requests
// passing the static middleware are hitting the file system
// Example:
var path = require('path');
app.use(loopback.static(path.resolve(__dirname, '../client')));
app.use(loopback.static(path.resolve(__dirname, '../node_modules')));
// Requests that get this far won't be handled
// by any middleware. Convert them into a 404 error
// that will be handled later down the chain.
app.use(loopback.urlNotFound());
// The ultimate error handler.
app.use(loopback.errorHandler());
app.start = function() {
// start the web server
return app.listen(function() {
app.emit('started');
console.log('Web server listening at: %s', app.get('url'));
});
};
// start the server if `$ node server.js`
if (require.main === module) {
app.start();
}
Not sure if this is related, but initially my server was set to listen on 0.0.0.0:3000 but if I typed that into the URL bar it went to Google search. Although if I type localhost:3000 it seemed to work. I have since changed the listening port to localhost:3000.
In case, that this url http://localhost:3000/#/, which should trigger state home - is loading index.html - should mean, that the path
templateUrl: '../shared/sidebar/sidebar.html',
is not set properly. Use the (e.g. chrome) developer tools and check if the sidebar.html is being loaded.
The fact that this url http://localhost:300/#/home is navigating to TODO state is also correct, becuase '/home' is not mapped to home state
.state('home', {
...
url: '/',
});
so the default is triggered
$urlRouterProvider.otherwise('todo');
NOTE: From the question, and described issue I expect that the index.html is set properly. It contains <div ui-view=""></div>, which will be filled with sidebar.html. That's why I would suspect the wrong path to that partial view template...