I am trying to cache specific urls and each url has md5 hash and If the urls updated with new md5 i want to remove the current cache and add the new one.
cached url: http://www.mysite.lo/cards/index.php?md5=f51c2ef7795480ef2e0b1bd24c9e07
function shouldFetch(event) {
if ( event.request.url.indexOf( '/cards/') == -1 ) {
return false;
}
return true;
}
self.addEventListener('fetch', function(event) {
if (shouldFetch(event)) {
event.respondWith(
caches.match(event.request).then(function(response) {
if (response !== undefined) {
return response;
} else {
return fetch(event.request).then(function (response) {
let responseClone = response.clone();
caches.open('v1').then(function (cache) {
cache.put(event.request, responseClone);
});
return response;
}).catch(function (err) {
return caches.match(event.request);
});
}
})
);
}
});
I know we can use caches.delete() and so on, but I want to call it only if the md5 updated from the new request.
Thanks
You can accomplish roughly what you describe with the following, which makes use of the ignoreSearch option when calling cache.matchAll():
self.addEventListener('fetch', (event) => {
const CACHE_NAME = '...';
const url = new URL(event.request.url);
if (url.searchParams.has('md5')) {
event.respondWith((async () => {
const cache = await caches.open(CACHE_NAME);
const cachedResponses = await cache.matchAll(url.href, {
// https://developers.google.com/web/updates/2016/09/cache-query-options
ignoreSearch: true,
});
for (const cachedResponse of cachedResponses) {
// If we already have the incoming URL cached, return it.
if (cachedResponse.url === url.href) {
return cachedResponse;
}
// Otherwise, delete the out of date response.
await cache.delete(cachedResponse.url);
}
// If we've gotten this far, then there wasn't a cache match,
// and our old entries have been cleaned up.
const response = await fetch(event.request);
await cache.put(event.request, response.clone());
return response;
})());
}
// Logic for non-md5 use cases goes here.
});
(You could make things slightly more efficient by rearranging some of the cache-manipulation code to bring it out of the critical response path, but that's the basic idea.)
Related
I want to achieve something like this:
call my website url https://mywebsite/api/something
then my next.js website api will call external api
get external api data
update external api data to mongodb database one by one
then return respose it's status.
Below code is working correctly correctly. data is updating on mongodb but when I request to my api url it respond me very quickly then it updates data in database.
But I want to first update data in database and then respond me
No matter how much time its take.
Below is my code
export default async function handler(req, res) {
async function updateServer(){
return new Promise(async function(resolve, reject){
const statusArray = [];
const apiUrl = `https://example.com/api`;
const response = await fetch(apiUrl, {headers: { "Content-Type": "application/json" }});
const newsResults = await response.json();
const articles = await newsResults["articles"];
for (let i = 0; i < articles.length; i++) {
const article = articles[i];
try {
insertionData["title"] = article["title"];
insertionData["description"] = article["description"];
MongoClient.connect(mongoUri, async function (error, db) {
if (error) throw error;
const articlesCollection = db.db("database").collection("collectionname");
const customQuery = { url: article["url"] };
const customUpdate = { $set: insertionData };
const customOptions = { upsert: true };
const status = await articlesCollection.updateOne(customQuery,customUpdate,customOptions);
statusArray.push(status);
db.close();
});
} catch (error) {console.log(error);}
}
if(statusArray){
console.log("success", statusArray.length);
resolve(statusArray);
} else {
console.log("error");
reject("reject because no statusArray");
}
});
}
updateServer().then(
function(statusArray){
return res.status(200).json({ "response": "success","statusArray":statusArray }).end();
}
).catch(
function(error){
return res.status(500).json({ "response": "error", }).end();
}
);
}
How to achieve that?
Any suggestions are always welcome!
Following is the code for the SW, all working fine. I was caching all the dynamic pages previously, but this was creating me some issues. Like page DOM changes after users interaction are not reflected next time page view. Always it shows original DOM.
SO I have needed the only image caching dynamically. I have commented original code which was caching all content.
self.addEventListener('activate', function(event) {
console.log('[Service Worker] Activating Service Worker ....', event);
/*event.waitUntil(
caches.keys()
.then(function(keyList) {
return Promise.all(keyList.map(function(key) {
if (key !== CACHE_STATIC_NAME && key !== CACHE_DYNAMIC_NAME) {
console.log('[Service Worker] Removing old cache.', key);
return caches.delete(key);
}
}));
})
);*/
return self.clients.claim();
});
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
if (response) {
return response;
} else {
/*return fetch(event.request)
.then(function(res) {
return caches.open(CACHE_DYNAMIC_NAME)
.then(function(cache) {
/!*if ( event.request.url.indexOf( 'maps.google' ) !== -1 ) {
return false;
}*!/
if (!/^https?:$/i.test(new URL(event.request.url).protocol)) {
return;
}
cache.put(event.request.url, res.clone());
return res;
})
})
.catch(function(err) {
console.log('show offline page as cashe and network not available')
return caches.open(CACHE_STATIC_NAME)
.then(function (cache) {
return cache.match(OFFLINE_URL);
});
});*/
return fetch(event.request);
}
})
);
});
I'd recommend following the approach outlined in this "Service Worker Caching Strategies Based on Request Types" article, and use request.destination inside of your fetch handler to figure out which requests are going to be used for images.
self.addEventListener('fetch', (event) => {
if (event.request.destination === 'image') {
event.respondWith(/* your caching logic here */);
}
// If you don't call event.respondWith() for some requests,
// the normal loading behavior will be used by default.
};
It's possible that a request for an image might be loaded via something like XMLHttpRequest, in which case the request.destination value likely won't be set properly. If that's the case, I'd recommend just checking the portion of the URL you feel is most likely to be unique using string comparisons.
self.addEventListener('fetch', (event) => {
const url = new URL(event.request.url);
if (url.origin.includes('maps.google')) {
event.respondWith(/* your caching logic here */);
}
// If you don't call event.respondWith() for some requests,
// the normal loading behavior will be used by default.
};
I need the server's response in the right format to allow me to traverse it like a list of MongoDb documents.
The console.log shows the data I want as [{"_id": "0YHYT54", etc.}], so it's all there.
However, it is string, making my attempt to traverse it as a MongoDb document fail.
function clientChanged(selectInput) {
if (selectInput.selectedIndex === -1)
selectInput.selectedIndex = 0;
selectedClientId = selectInput.options[selectInput.selectedIndex].value;
getClientMatters(selectedClientId, (matters) => {
console.log("returned Type: " + typeof matters);
<-- I need to process the returned data here, but it is string.
});
}
function getClientMatters(clientId, cb) {
let xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
cb(this.response);
}
};
xhttp.open("GET", "../matters/getByClientId?clientId=" + clientId, true);
xhttp.send();
}
Here is my model, which returns the data from the server:
Matters.getOpenMattersByClientId(clientId, function (err, matters) {
if (err) {
return res(err);
} else {
return res.json(matters);
}
});
Also, this is my first attempt at using AJAX to get my MongoDB data. If the above code reveals other issues, any tips would be appreciated.
I register '/' (route) .css and .js file as URL that should be cached at first.
But after that I realize that it cached the whole page, which means I don't see any update on my view, event there is an update on database.
So I change it only cache my .css and .js file not route ('/') anymore , I expected that's the problem.
But after awhile, the same problem still occurred. I check on my console it did cache the whole page again, even though my Service Worker file already change like this:
var CACHE_NAME = 'cache-v2';
var urlsToCache = [
'/assets/css/app.css',
'/assets/js/main.js',
'/assets/js/other.js'
];
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(CACHE_NAME)
.then(function(cache) {
console.log('Opened cache');
return cache.addAll(urlsToCache);
})
);
});
self.addEventListener('activate', function(e) {
console.log('[ServiceWorker] Activate');
e.waitUntil(
caches.keys().then(function(keyList) {
return Promise.all(keyList.map(function(key) {
if (key !== CACHE_NAME) {
console.log('[ServiceWorker] Removing old cache', key);
return caches.delete(key);
}
}));
})
);
return self.clients.claim();
});
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
// Cache hit - return response
if (response) {
return response;
}
var fetchRequest = event.request.clone();
return fetch(fetchRequest).then(
function(response) {
if(!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
var responseToCache = response.clone();
caches.open(CACHE_NAME)
.then(function(cache) {
cache.put(event.request, responseToCache);
});
return response;
}
);
})
);
});
in case anyone has the same problem,
here is what i should change
self.addEventListener('fetch', function(e) {
console.log('[ServiceWorker] Fetch', e.request.url);
e.respondWith(
caches.match(e.request).then(function(response) {
return response || fetch(e.request);
})
);
});
so my previous fetch looks like the problem
Is it possible to execute the same HTTP request more than once in AngularJS? i.e. without re-defining the same request twice?
var retry = false;
var req = $http.get( 'ajax.php?a=StartSession&ref=' + code );
req.success(function(res) {
alert(res);
});
req.error( function(res) {
if(retry == false)
//run request again req.get(); ?
retry = true;
});
The previous answer is good in terms of reusing it as service. But it looks like you really want to abstract out the retry logic as well. Here is how i would do that.
app.service('SessionService', ['$http', '$q', function($http, $q){
var _this = this;
var _maxRetryCount = 5; //Just have a maxRetryCount
this.StartSession = function (code, retries){
//if you dont pass retry take the maxretryCount
retries = angular.isUndefined(retries) ? _maxRetryCount : retries;
return $http.get('ajax.php?a=StartSession&ref=' + code)
.then(function(result) {
//process and return the result
return result.data;
}, function (errorResponse) {
//If retries left decrement count and make the call again.
if(retries) {
return _this.StartSession(code, --retries); //here we are returning the promise
}
//All tried done Now Fail or return some data
return $q.reject('oops failed after retries');
});
}
}]);
And just inject SessionService anywhere say in yourcontroller:-
SessionService.StartSession(code).then(function(result){
//handle Result
}).catch(function(){
//handle fail condition
});
Plnkr
It's what services and factories were made for:
app.factory("dataFactory", ["$http", function($http) {
return {
call: function(code) {
return $http.get( 'ajax.php?a=StartSession&ref=' + code )
}
}
}]);
Inject and use
app.controller("myCtrl", ["dataFactory", function(dataFactory) {
var code = "myCode";
dataFactory.call(code).success(function(res) {
//gotcha
});
}]);