Service worker should not cache the whole page - caching

I register '/' (route) .css and .js file as URL that should be cached at first.
But after that I realize that it cached the whole page, which means I don't see any update on my view, event there is an update on database.
So I change it only cache my .css and .js file not route ('/') anymore , I expected that's the problem.
But after awhile, the same problem still occurred. I check on my console it did cache the whole page again, even though my Service Worker file already change like this:
var CACHE_NAME = 'cache-v2';
var urlsToCache = [
'/assets/css/app.css',
'/assets/js/main.js',
'/assets/js/other.js'
];
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(CACHE_NAME)
.then(function(cache) {
console.log('Opened cache');
return cache.addAll(urlsToCache);
})
);
});
self.addEventListener('activate', function(e) {
console.log('[ServiceWorker] Activate');
e.waitUntil(
caches.keys().then(function(keyList) {
return Promise.all(keyList.map(function(key) {
if (key !== CACHE_NAME) {
console.log('[ServiceWorker] Removing old cache', key);
return caches.delete(key);
}
}));
})
);
return self.clients.claim();
});
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
// Cache hit - return response
if (response) {
return response;
}
var fetchRequest = event.request.clone();
return fetch(fetchRequest).then(
function(response) {
if(!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
var responseToCache = response.clone();
caches.open(CACHE_NAME)
.then(function(cache) {
cache.put(event.request, responseToCache);
});
return response;
}
);
})
);
});

in case anyone has the same problem,
here is what i should change
self.addEventListener('fetch', function(e) {
console.log('[ServiceWorker] Fetch', e.request.url);
e.respondWith(
caches.match(e.request).then(function(response) {
return response || fetch(e.request);
})
);
});
so my previous fetch looks like the problem

Related

Progressive Web Apps laravel cache issue

I am working with PWA for my laravel project, My PWA app working fine, my website run in https only but When i logged in my website shows my old logged user details how can i solve this issue?
I am afraid my website also run based on cache memory
In localhost PWA icon not showing in my browser how can i solve in localhost
here i added service provider js file
// Cache API
const staticCacheName = 'app-shell-v2.0';
const filesToCache = [
// Files
'.',
//css files and js file
];
self.addEventListener('install', event => {
console.log('Installing worker to cache static assets');
self.skipWaiting();
event.waitUntil(
caches.open(staticCacheName)
.then(cache => {
return cache.addAll(filesToCache);
})
);
});
self.addEventListener('activate', event => {
console.log('Activating new worker...');
const cacheWhitelist = [staticCacheName];
self.clients.claim();
event.waitUntil(
caches.keys().then(cacheNames => {
return Promise.all(
cacheNames.map(cacheName => {
if (cacheWhitelist.indexOf(cacheName) === -1) {
return caches.delete(cacheName);
}
})
);
})
);
});
self.addEventListener('fetch', event => {
// exclude directories from cache
/*if (event.request.url.match('^.*(\/admin\/).*$','^.*(\/users\/show\/).*$')) {
return false;
}
if (event.request.url.endsWith('authenticate')) {
return false;
}*/
console.log('Fetch event for ', event.request.url);
event.respondWith(
caches.match(event.request)
.then(response => {
if (response) {
console.log('Found ', event.request.url, ' in cache');
return response;
}
console.log('Network request for ', event.request.url);
return fetch(event.request)
.then(response => {
if (response.status === 404) {
return caches.match('https://site_url/404.html');
}
return caches.open(staticCacheName)
.then(cache => {
cache.put(event.request.url, response.clone());
return response;
});
});
}).catch(error => {
console.log('Error, ', error);
return caches.match('https://site_url/offline.html');
})
);
});

Caching only images inside of a service worker

Following is the code for the SW, all working fine. I was caching all the dynamic pages previously, but this was creating me some issues. Like page DOM changes after users interaction are not reflected next time page view. Always it shows original DOM.
SO I have needed the only image caching dynamically. I have commented original code which was caching all content.
self.addEventListener('activate', function(event) {
console.log('[Service Worker] Activating Service Worker ....', event);
/*event.waitUntil(
caches.keys()
.then(function(keyList) {
return Promise.all(keyList.map(function(key) {
if (key !== CACHE_STATIC_NAME && key !== CACHE_DYNAMIC_NAME) {
console.log('[Service Worker] Removing old cache.', key);
return caches.delete(key);
}
}));
})
);*/
return self.clients.claim();
});
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
if (response) {
return response;
} else {
/*return fetch(event.request)
.then(function(res) {
return caches.open(CACHE_DYNAMIC_NAME)
.then(function(cache) {
/!*if ( event.request.url.indexOf( 'maps.google' ) !== -1 ) {
return false;
}*!/
if (!/^https?:$/i.test(new URL(event.request.url).protocol)) {
return;
}
cache.put(event.request.url, res.clone());
return res;
})
})
.catch(function(err) {
console.log('show offline page as cashe and network not available')
return caches.open(CACHE_STATIC_NAME)
.then(function (cache) {
return cache.match(OFFLINE_URL);
});
});*/
return fetch(event.request);
}
})
);
});
I'd recommend following the approach outlined in this "Service Worker Caching Strategies Based on Request Types" article, and use request.destination inside of your fetch handler to figure out which requests are going to be used for images.
self.addEventListener('fetch', (event) => {
if (event.request.destination === 'image') {
event.respondWith(/* your caching logic here */);
}
// If you don't call event.respondWith() for some requests,
// the normal loading behavior will be used by default.
};
It's possible that a request for an image might be loaded via something like XMLHttpRequest, in which case the request.destination value likely won't be set properly. If that's the case, I'd recommend just checking the portion of the URL you feel is most likely to be unique using string comparisons.
self.addEventListener('fetch', (event) => {
const url = new URL(event.request.url);
if (url.origin.includes('maps.google')) {
event.respondWith(/* your caching logic here */);
}
// If you don't call event.respondWith() for some requests,
// the normal loading behavior will be used by default.
};

Bootstrap Selectpicker not refreshing

When a user clicks the edit icon the contents of a selectpicker are updated then refreshed. Then the selectpicker's value is updated, then refreshed again, but for some reason it is not updating with the selected value.
Everything works fine when I manually enter in the the same code in the console.
$('#IncWidgetId').val(864)// the value used when breaking in console
$('#IncWidgetId').selectpicker('refresh')
I have ensured that the selectpicker is updated with the new option values along with confirming the deferred is firing in the proper order.
As a double check, I also have separated the .selectpicker('refresh') to make sure it didn't try to fire before the option was selected due to async, but it is still not updating the selectpicker with the selected value.
$(document).on('click', '[id^=EditWidgetId-]', function () {
var id = $(this).attr('id').split('-')[1];
var mfg = $(this).data('mfg');
var widgetid = $(this).data('widgetid ');
var mfgSelect = $('input[name=mfg][value="' + mfg + '"]');
mfgSelect.prop('checked', true);
$.when(LoadWidgets(mfg)).then(function () {
console.log('then function');
$('#IncWidgetId').val(widgetid );
}).done(function () {
console.log('done function');
$('#IncWidgetId').selectpicker('refresh');
});
$('#modalWidgetNew').modal('show');
});
function LoadWidgets(mfg) {
var r = $.Deferred();
console.log('before ajax');
r.resolve($.ajax({
url: '/Widgets/FilterWidgetsDropdown',
type: 'GET',
cache: false,
data: { mfg: mfg },
success: function (partial) {
$('#IncWidgetDDArea').html(partial);
$('#IncWidgetId').selectpicker('refresh')
},
error: function (x, e) {
if (x.status == 0) {
alert('You are offline!!\n Please Check Your Network.');
} else if (x.status == 404) {
alert('Requested URL not found.');
} else if (x.status == 500) {
alert('Internel Server Error.');
} else if (e == 'parsererror') {
alert('Error.\nParsing JSON Request failed.');
} else if (e == 'timeout') {
alert('Request Time out.');
} else {
alert('Unknow Error.\n' + x.responseText);
}
}
})).done(function () {
console.log('after ajax');
return r.promise();
});
}
What am I missing?
There was such an issue in old versions of this plugin.
Try to destroy it and initialize again. Something like this:
$('#IncWidgetId').selectpicker('destroy');
$('#IncWidgetId').selectpicker();

Using a URL query parameter to version cached responses

I am trying to cache specific urls and each url has md5 hash and If the urls updated with new md5 i want to remove the current cache and add the new one.
cached url: http://www.mysite.lo/cards/index.php?md5=f51c2ef7795480ef2e0b1bd24c9e07
function shouldFetch(event) {
if ( event.request.url.indexOf( '/cards/') == -1 ) {
return false;
}
return true;
}
self.addEventListener('fetch', function(event) {
if (shouldFetch(event)) {
event.respondWith(
caches.match(event.request).then(function(response) {
if (response !== undefined) {
return response;
} else {
return fetch(event.request).then(function (response) {
let responseClone = response.clone();
caches.open('v1').then(function (cache) {
cache.put(event.request, responseClone);
});
return response;
}).catch(function (err) {
return caches.match(event.request);
});
}
})
);
}
});
I know we can use caches.delete() and so on, but I want to call it only if the md5 updated from the new request.
Thanks
You can accomplish roughly what you describe with the following, which makes use of the ignoreSearch option when calling cache.matchAll():
self.addEventListener('fetch', (event) => {
const CACHE_NAME = '...';
const url = new URL(event.request.url);
if (url.searchParams.has('md5')) {
event.respondWith((async () => {
const cache = await caches.open(CACHE_NAME);
const cachedResponses = await cache.matchAll(url.href, {
// https://developers.google.com/web/updates/2016/09/cache-query-options
ignoreSearch: true,
});
for (const cachedResponse of cachedResponses) {
// If we already have the incoming URL cached, return it.
if (cachedResponse.url === url.href) {
return cachedResponse;
}
// Otherwise, delete the out of date response.
await cache.delete(cachedResponse.url);
}
// If we've gotten this far, then there wasn't a cache match,
// and our old entries have been cleaned up.
const response = await fetch(event.request);
await cache.put(event.request, response.clone());
return response;
})());
}
// Logic for non-md5 use cases goes here.
});
(You could make things slightly more efficient by rearranging some of the cache-manipulation code to bring it out of the critical response path, but that's the basic idea.)

Can not connect with API , so couldn't retrieve post from db?

I got problem while I move into https://github.com/DaftMonk/generator-angular-fullstack.
Before my project was working. Here is code for frontend and backend part related to comment.
I am getting all time error 404. I don't know why I cannot find following path.
POST http://localhost:9000/api/providers/554a1dba53d9ca8c2a2a31ff/posts/554b1726f1116e00256e3d82/comments 404 (Not Found)
I am struggling couple of days to discover which part of my code have problem but I couldn't realize that.
server side
in comment.controller
// Creates a new comment in the DB.
exports.create = function(req, res) {
console.log('i ma inside api');
Post.findById(req.originalUrl.split('/')[3], function (err, post) { //here can not find post at all.
if (err) {
return handleError(res, err);
}
if (!post) {
return res.status(404).send('Post not found');
}
Comment.create(req.body, function (err, comment) {
if (err) {
return handleError(res, err);
}
post.comments.push(comment.id);
post.save(function (err) {
if (err) return handleError(res, err);
return res.status(201).json(comment);
});
});
});
};
route.js
app.use('/api/providers/:providerId/posts/:postId/comments', require('./api/provider/post/comment'));
index.js
var controller = require('./comment.controller.js');
var router = express.Router();
router.get('/', controller.index);
router.get('/:id', controller.show);
router.post('/', controller.create);
router.put('/:id', controller.update);
router.patch('/:id', controller.update);
router.delete('/:id', controller.destroy);
router.put('/:id/upvote', controller.upvote);
in client side:
factory:
//create new comment for post
ob.createComment = function(providerId, postId,comment) {
console.log('i am inside factory');
return $http.post('/api/providers/'+ providerId + '/posts/' + postId + '/comments' ,comment, {
headers: {Authorization: 'Bearer '+Auth.getToken()}
}).success(function(data){
_.forEach(ob.provider.posts,function(value,index){
if(value._id === post._id){
ob.posts[index].comments.push(data);
}
})
ob.current.comments.push(data)
// ob.provider1._id.posts.push(data);
});
};
in my controller
$scope.addComment = function(){
// if(!$scope.title || $scope.title === '') { return; }
if(!$scope.body || $scope.body === '') { return; }
console.log('$stateParams',$stateParams);
providers.createComment($stateParams.providerId, $stateParams.postId,{
//title: $scope.title,
body: $scope.body
});
$scope.body = '';
$scope.title = '';
};
This is my model in whole the project.

Resources