Exception: argument too large: value Google Script - caching

I'm trying to scrape a website & put the value in cache so I don't hit the daily limit of UrlFetchApp
Here is the script I did:
/**
* Scrape URL, return whatever you choose with jquery-style selectors.
Dependency: cheeriogs, see https://github.com/fgborges/cheeriogs
*
* #param {url} valid start-url
* #return result (array values)
*
* #customfunction
*/
function scrapercache(url) {
var result = [];
var description;
var options = {
'muteHttpExceptions': true,
'followRedirects': false,
};
var cache = CacheService.getScriptCache();
var properties = PropertiesService.getScriptProperties();
try {
let res = cache.get(url);
if (!res) {
// trim url to prevent (rare) errors
url.toString().trim();
var r = UrlFetchApp.fetch(url, options);
var c = r.getResponseCode();
// check for meta refresh if 200 ok
if (c == 200) {
var html = r.getContentText();
cache.put(url, "cached", 21600);
properties.setProperty(url, html);
var $ = Cheerio.load(html); // make sure this lib is added to your project!
// meta description
if ($('meta[name=description]').attr("content")) {
description = $('meta[name=description]').attr("content").trim();
}
}
result.push([description]);
}
}
catch (error) {
result.push(error.toString());
}
finally {
return result;
}
}
but when I call the function like that:
=scrapercache("https://www.gurufocus.com/term/total_freecashflow/nyse:ABBV/Free-Cash-Flow")
I get the error message:
Exception: argument too large: value
Anyone can help me please?
Thank you :)
Gabriel

As written in the official documentation,
The maximum length of a key is 250 characters. The maximum amount of data that can be stored per key is 100KB.
If the size of the data put in cache exceeds any of the above limitations, the error
Exception: argument too large
is shown. In your case, value exceeds 100KB. Solution would be to cache only necessary data or don't cache at all depending on your specific needs.

Related

Exception: Service invoked too many times for one day: urlfetch

I created a script in Google Sheets, which is working well but after a while I'm getting the following error:
Exception: Service invoked too many times for one day: urlfetch
I think I called the function like 200-300 times in the day, for what I checked it should be below the limit.
I read we can use cache to avoid this issue but not sure how to use it in my code.
function scrapercache(url) {
var result = [];
var description;
var options = {
'muteHttpExceptions': true,
'followRedirects': false,
};
var cache = CacheService.getScriptCache();
var properties = PropertiesService.getScriptProperties();
try {
let res = cache.get(url);
if (!res) {
// trim url to prevent (rare) errors
url.toString().trim();
var r = UrlFetchApp.fetch(url, options);
var c = r.getResponseCode();
// check for meta refresh if 200 ok
if (c == 200) {
var html = r.getContentText();
cache.put(url, "cached", 21600);
properties.setProperty(url, html);
var $ = Cheerio.load(html); // make sure this lib is added to your project!
// meta description
if ($('meta[name=description]').attr("content")) {
description = $('meta[name=description]').attr("content").trim();
}
}
result.push([description]);
}
}
catch (error) {
result.push(error.toString());
}
finally {
return result;
}
}
how can I use cache like this to enhance my script please?
var cache = CacheService.getScriptCache();
var result = cache.get(url);
if(!result) {
var response = UrlFetchApp.fetch(url);
result = response.getContentText();
cache.put(url, result, 21600);
Answer:
You can implement CacheService and PropertiesService together and only retrieve the URL again after a specified amount of time.
Code Change:
Be aware that additional calls to retrieving the cache and properties will slow your function down, especially if you are doing this a few hundred times.
As the values of the cache can be a maximum of 100 KB, we will use CacheService to keep track of which URLs are to be retrieved, but PropertiesService to store the data.
You can edit your try block as so:
var cache = CacheService.getScriptCache();
var properties = PropertiesService.getScriptProperties();
try {
let res = cache.get(url);
if (!res) {
// trim url to prevent (rare) errors
url.toString().trim();
var r = UrlFetchApp.fetch(url, options);
var c = r.getResponseCode();
// check for meta refresh if 200 ok
if (c == 200) {
var html = r.getContentText();
cache.put(url, "cached", 21600);
properties.setProperty(url, html);
var $ = Cheerio.load(html); // make sure this lib is added to your project!
// meta description
if ($('meta[name=description]').attr("content")) {
description = $('meta[name=description]').attr("content").trim();
}
}
result.push([description]);
}
}
catch (error) {
result.push(error.toString());
}
finally {
return result;
}
References:
Class CacheService | Apps Script | Google Developers
Class Cache | Apps Script | Google Developers
Class PropertiesService | Apps Script | Google Developers
Related Questions:
Service invoked too many times for one day: urlfetch

How to create repeating form section in ServiceNow

I'm new to ServiceNow and going to start create a new form. One of the requirment is to create a repeating form section (below image is the design of the component).
May I know any default component in ServiceNow or we need to create a custom widget for this?
There is no OOB way to do this.
The way that I've solved the problem in the past, was as follows:
Create a single variable or set of variables representing the data you want to capture
Create a UI Macro/button "Add"
When clicked, that button should trigger a Script which will add the data from the fields into a JSON object which is then used to populate an HTML element with some friendly-looking representation of the data.
Here are some swatches of code I saved to do that, but keep in mind that I had to do this like a year ago so it's not super fresh.
"Add" button:
<?xml version="1.0" encoding="utf-8" ?>
<!--
UI Page
Name: http_addServerButton_loadBalancer
Category: General
Direct: false
Note that this will change slightly in name, and in the argument being passed, depending on whether the button is for the http, https, or TCP
sections.
-->
<j:jelly trim="false"
xmlns:j="jelly:core"
xmlns:g="glide"
xmlns:j2="null"
xmlns:g2="null" >
<script language="javascript"
src="addServerButtonClicked.jsdbx" />
<button name="add"
onclick="addServerButtonClicked('http')" >Add
</button >
</j:jelly >
UI Script that handled the add actions:
/*
UI Script
Name: addServerButtonClicked
Category: General
Direct: false
*/
/***********BEGIN***********/
// todo: Create onSubmit validation. Make sure users don't submit a form with existing server name/port data filled out but not added to the JSON.
// todo: Change add new button name to "add server".
/**
* This function is called when the "Add Server" button is clicked
*/
function addServerButtonClicked(protocol) {
g_form.hideFieldMsg('server_name_' + protocol, true);
g_form.hideFieldMsg('server_port_' + protocol, true);
//todo: validate the server AND port are filled out for the specific protocol selected, using "'server_name_' + protocol" and "'server_port_' + protocol"
//todo: If not BOTH populated, then add field mesage and tell the user that they are a bad user and should feel bad about their life choices.
var isFormValid = validateForm(protocol);
if (isFormValid) {
var fieldName = 'server_name_' + protocol;
g_form.getReference(fieldName, function(gr) {
//cheap way to combine relevant objects from differing scope without writing a gigantic anonymous inline function.
buildDataObject(gr, protocol);
});
} else {
alert('form invalid'); // todo: remove
// todo: throw some error or something
}
}
/**
* This function is called whenever a new server is added to the request. It parses the existing JSON data into an object, and then goes about adding on to it.
* #param serverGR {GlideRecord} - The GlideRecord object returned from the asynchronous nonHireATSCandidatesEncodedQuery that's run when add button is clicked. This param is auto-populated.
*/
function buildDataObject(serverGR, protocol) {
if (!serverGR) {
console.error('No valid server was found.');
}
// Grab the value of the JSON Data field
var existingJsonData = g_form.getValue('json_data');
// If the JSON Data field already contains some existing JSON data, use that as the starting
// point for our object. Otherwise (if this is the first entry), declare a new object.
var dataObject = existingJsonData ? JSON.parse(existingJsonData) : {
http: {},
https: {},
tcp: {}
};
//todo: write and call a function to get the protocol header info crap and append it to the object.
// Set the "serverName" property to either the server's name, or (if no name exists for this server),
// its' IP address of (if no name OR IP address exists for this server), its' sys_id.
var requestedFor = g_form.getValue('requested_for');
var serverSysId = serverGR.getValue('sys_id');
var serverIP = serverGR.getValue('ip_address');
var serverName = serverGR.getValue('name') ? serverGR.getValue('name') : serverGR.getValue('ip_address') ? serverGR.getValue('ip_address') : serverSysId = serverGR.getValue('sys_id');
var serverPort;
var tcpIPPort;
var lbMethod;
var persistence;
var monitorRequest;
var monitorResponse;
//Okay, yeah, so I could've used one line to set each of these vars, and used syntax like "'server_port_' + protocol.toLowerCase()".
//But instead of that, I did it this way. Why? Because I thought for a minute at 2AM that this would help future-me, in the event that
//we ever had stupid variable names to compete with. Sooo... behold, the pointless switch-case block.
switch (protocol.toLowerCase()) {
case 'http':
serverPort = g_form.getValue('server_port_http');
tcpIPPort = g_form.getValue('tcp_ip_http');
lbMethod = g_form.getValue('lb_method_http');
persistence = g_form.getValue('persistence_http');
monitorRequest = g_form.getValue('monitor_request_http');
monitorResponse = g_form.getValue('monitor_response_http');
// Clear the data from these two fields so it's clear that they need to be re-populated.
g_form.setValue('server_port_http', '');
g_form.setValue('server_name_http', '');
break;
case 'https':
serverPort = g_form.getValue('server_port_https');
tcpIPPort = g_form.getValue('tcp_ip_https');
lbMethod = g_form.getValue('lb_method_https');
persistence = g_form.getValue('persistence_https');
monitorRequest = g_form.getValue('monitor_request_https');
monitorResponse = g_form.getValue('monitor_response_https');
// Clear the data from these two fields so it's clear that they need to be re-populated.
g_form.setValue('server_port_https', '');
g_form.setValue('server_name_https', '');
break;
case 'tcp':
serverPort = g_form.getValue('server_port_tcp');
tcpIPPort = g_form.getValue('tcp_ip_tcp');
lbMethod = g_form.getValue('lb_method_tcp');
persistence = g_form.getValue('persistence_tcp');
monitorRequest = g_form.getValue('monitor_request_tcp');
monitorResponse = g_form.getValue('monitor_response_tcp');
// Clear the data from these two fields so it's clear that they need to be re-populated.
g_form.setValue('server_port_tcp', '');
g_form.setValue('server_name_tcp', '');
break;
}
if (!serverIP || !serverSysId || !serverPort || !serverName) {
// return; //Halt execution, since we don't have some of the data we need.
// todo: re-enable the above line after testing. Need to figure out how to handle errors, and what constitutes an error.
console.error('Not able to get one of these important values: [IP, Sys ID, Port, Server]')
}
// Populate the data object.
// Using bracket-notation here, in order to use a variable name as the object property name.
dataObject[protocol][serverSysId] = {};
dataObject[protocol][serverSysId].name = serverName;
dataObject[protocol][serverSysId].port = serverPort;
dataObject[protocol][serverSysId].sysid = serverSysId;
dataObject[protocol][serverSysId].ip = serverIP;
console.log(dataObject);
var dataSummary = '';
/*
This bit's pretty complex.
For each "prot" (protocol) in the outermost object,
check if the object corresponding to that protocol is truthy (not empty).
If it isn't empty, insert a header (H3) for that protocol/section.
Then, for each "prop" (server element) in that protocol object, print out some details about it.
*/
for (var prot in dataObject) {
if (!isObjEmpty(dataObject[prot]) && dataObject.hasOwnProperty(prot) && prot !== 'requestor' && prot !== 'generalInfo') {
dataSummary += '<h3>' + prot.toUpperCase() + '</h3>';
for (var prop in dataObject[prot]) {
if (dataObject[prot].hasOwnProperty(prop) && prop !== 'protocolDetails') {
dataSummary += '<b>Server name</b>: ' + dataObject[prot][prop].name + '<br />Server Sys ID: ' + dataObject[prot][prop].sysid + '<br />IP: ' + dataObject[prot][prop].ip + '<br />Port: ' + dataObject[prot][prop].port + '<br /><br />';
}
}
}
}
dataObject = populateObjectMeta(dataObject);
g_form.setValue('request_summary', dataSummary);
g_form.setValue('json_data', JSON.stringify(dataObject));
g_form.setVisible('request_summary', true);
}
function populateObjectMeta(dataObject) {
var i;
//Get boolean values for the three check-boxes representing whether the user wants HTTP, HTTPS, or TCP.
var httpSelected = isThisTrueOrWhat(g_form.getValue('http_select'));
var httpsSelected = isThisTrueOrWhat(g_form.getValue('https_select'));
var tcpSelected = isThisTrueOrWhat(g_form.getValue('tcp_select')); //todo: use these to populate more metadata in the relevant object
//Populate requestor details
dataObject.requestor = {};
dataObject.requestor.name = g_form.getReference('requested_by').getValue('name');
dataObject.requestor.sysID = g_form.getValue('requested_by');
dataObject.requestor.email = g_form.getValue('email');
//Populate general load balancing details
dataObject.generalInfo = {};
dataObject.generalInfo.dnsHost = g_form.getValue('dns_host');
dataObject.generalInfo.domainName = g_form.getValue('domain_name');
dataObject.generalInfo.application = isThisTrueOrWhat(g_form.getValue('application_not_found')) ? g_form.getValue('application_name_text') : g_form.getReference('application_name_ref').getValue('name');
dataObject.generalInfo.lifeCycle = g_form.getValue('lifecycle');
dataObject.generalInfo.site = g_form.getReference('site').getValue('name');
//Populate protocol details for HTTP, HTTPS, and TCP.
var selectedProtocols = getSelectedProtocols();
console.log(selectedProtocols);
for (i = 0; i < selectedProtocols.length; i++) {
console.log('Adding data to dataObj with selected protocol ' + [selectedProtocols[i]]);
dataObject[selectedProtocols[i]].protocolDetails = {};
dataObject[selectedProtocols[i]].protocolDetails.tcpIPPort = g_form.getValue('tcp_ip_' + selectedProtocols[i]);
dataObject[selectedProtocols[i]].protocolDetails.lbMethod = g_form.getValue('lb_method_' + selectedProtocols[i]);
dataObject[selectedProtocols[i]].protocolDetails.persistence = g_form.getValue('persistence_' + selectedProtocols[i]);
dataObject[selectedProtocols[i]].protocolDetails.monitorRequest = g_form.getValue('monitor_request_' + selectedProtocols[i]);
dataObject[selectedProtocols[i]].protocolDetails.monitorResponse = g_form.getValue('monitor_response_' + selectedProtocols[i]);
}
return dataObject;
}
function getSelectedProtocols() {
var selectedProtocols = [];
var httpSelected = isThisTrueOrWhat(g_form.getValue('http_select'));
var httpsSelected = isThisTrueOrWhat(g_form.getValue('https_select'));
var tcpSelected = isThisTrueOrWhat(g_form.getValue('tcp_select'));
if (httpSelected) {
selectedProtocols.push('http');
}
if (httpsSelected) {
selectedProtocols.push('https');
}
if (tcpSelected) {
selectedProtocols.push('tcp');
}
return selectedProtocols;
}
function validateForm(protocol) {
var port;
switch (protocol.toLowerCase()) {
case 'http':
port = g_form.getValue('server_port_http');
break;
case 'https':
port = g_form.getValue('server_port_https');
break;
case 'tcp':
port = g_form.getValue('server_port_tcp');
break;
}
//todo: validate port, and a bunch of other stuff.
return true;
}
/**
* Adds one object to another, nesting the child object into the parent.
* this is to get around javascript's immutable handling of objects.
* #param name {String} - The name of the property of the parent object, in which to nest the child object. <br />For example, if the name parameter is set to "pickles" then "parent.pickles" will return the child object.
* #param child {Object} - The object that should be nested within the parent object.
* #param [parent={}] {Object} - The parent object in which to nest the child object. If the parent object is not specified, then the child object is simple nested into an otherwise empty object, which is then returned.
* #returns {Object} - A new object consisting of the parent (or an otherwise empty object) with the child object nested within it.
* #example
* //sets myNewObject to a copy of originalObject, which now also contains the original (yet un-linked) version of itself as a child, under the property name "original"
* var myNewObject = addObjToObj("original", originalObj, originalObj);
*/
function addObjToObj(name, child, parent) {
if (!parent) {
parent = {};
}
parent[name] = child;
return parent;
}
function isObjEmpty(o) {
for (var p in o) {
if (o.hasOwnProperty(p)) {
return false;
}
}
return true;
}
function isThisTrueOrWhat(b) {
return ((typeof b == 'string') ? (b.toLowerCase() == 'true') : (b == true)); //all this just to properly return a bool in JS. THERE'S GOT TO BE A BETTER WAY!
}
Just create a UI page with the functionality that you want to have.
Ex- clicking on new button opens up the repeated form view.
Get the inputs from the filled form as list of objects, send them over to the client-callable script include(GlideAjax) that handles creation.

How to retry failures with $q.all

I have some code that saves data using Breeze and reports progress over multiple saves that is working reasonably well.
However, sometimes a save will timeout, and I'd like to retry it once automatically. (Currently the user is shown an error and has to retry manually)
I am struggling to find an appropriate way to do this, but I am confused by promises, so I'd appreciate some help.
Here is my code:
//I'm using Breeze, but because the save takes so long, I
//want to break the changes down into chunks and report progress
//as each chunk is saved....
var surveys = EntityQuery
.from('PropertySurveys')
.using(manager)
.executeLocally();
var promises = [];
var fails = [];
var so = new SaveOptions({ allowConcurrentSaves: false});
var count = 0;
//...so I iterate through the surveys, creating a promise for each survey...
for (var i = 0, len = surveys.length; i < len; i++) {
var query = EntityQuery.from('AnsweredQuestions')
.where('PropertySurveyID', '==', surveys[i].ID)
.expand('ActualAnswers');
var graph = manager.getEntityGraph(query)
var changes = graph.filter(function (entity) {
return !entity.entityAspect.entityState.isUnchanged();
});
if (changes.length > 0) {
promises.push(manager
.saveChanges(changes, so)
.then(function () {
//reporting progress
count++;
logger.info('Uploaded ' + count + ' of ' + promises.length);
},
function () {
//could I retry the fail here?
fails.push(changes);
}
));
}
}
//....then I use $q.all to execute the promises
return $q.all(promises).then(function () {
if (fails.length > 0) {
//could I retry the fails here?
saveFail();
}
else {
saveSuccess();
}
});
Edit
To clarify why I have been attempting this:
I have an http interceptor that sets a timeout on all http requests. When a request times out, the timeout is adjusted upwards, the user is displayed an error message, telling them they can retry with a longer wait if they wish.
Sending all the changes in one http request is looking like it could take several minutes, so I decided to break the changes down into several http requests, reporting progress as each request succeeds.
Now, some requests in the batch might timeout and some might not.
Then I had the bright idea that I would set a low timeout for the http request to start with and automatically increase it. But the batch is sent asynchronously with the same timeout setting and the time is adjusted for each failure. That is no good.
To solve this I wanted to move the timeout adjustment after the batch completes, then also retry all requests.
To be honest I'm not so sure an automatic timeout adjustment and retry is such a great idea in the first place. And even if it was, it would probably be better in a situation where http requests were made one after another - which I've also been looking at: https://stackoverflow.com/a/25730751/150342
Orchestrating retries downstream of $q.all() is possible but would be very messy indeed. It's far simpler to perform retries before aggregating the promises.
You could exploit closures and retry-counters but it's cleaner to build a catch chain :
function retry(fn, n) {
/*
* Description: perform an arbitrary asynchronous function,
* and, on error, retry up to n times.
* Returns: promise
*/
var p = fn(); // first try
for(var i=0; i<n; i++) {
p = p.catch(function(error) {
// possibly log error here to make it observable
return fn(); // retry
});
}
return p;
}
Now, amend your for loop :
use Function.prototype.bind() to define each save as a function with bound-in parameters.
pass that function to retry().
push the promise returned by retry().then(...) onto the promises array.
var query, graph, changes, saveFn;
for (var i = 0, len = surveys.length; i < len; i++) {
query = ...; // as before
graph = ...; // as before
changes = ...; // as before
if (changes.length > 0) {
saveFn = manager.saveChanges.bind(manager, changes, so); // this is what needs to be tried/retried
promises.push(retry(saveFn, 1).then(function() {
// as before
}, function () {
// as before
}));
}
}
return $q.all(promises)... // as before
EDIT
It's not clear why you might want to retry downsteam of $q.all(). If it's a matter of introducing some delay before retrying, the simplest way would be to do within the pattern above.
However, if retrying downstream of $q.all() is a firm requirement, here's a cleanish recursive solution that allows any number of retries, with minimal need for outer vars :
var surveys = //as before
var limit = 2;
function save(changes) {
return manager.saveChanges(changes, so).then(function () {
return true; // true signifies success
}, function (error) {
logger.error('Save Failed');
return changes; // retry (subject to limit)
});
}
function saveChanges(changes_array, tries) {
tries = tries || 0;
if(tries >= limit) {
throw new Error('After ' + tries + ' tries, ' + changes_array.length + ' changes objects were still unsaved.');
}
if(changes_array.length > 0) {
logger.info('Starting try number ' + (tries+1) + ' comprising ' + changes_array.length + ' changes objects');
return $q.all(changes_array.map(save)).then(function(results) {
var successes = results.filter(function() { return item === true; };
var failures = results.filter(function() { return item !== true; }
logger.info('Uploaded ' + successes.length + ' of ' + changes_array.length);
return saveChanges(failures), tries + 1); // recursive call.
});
} else {
return $q(); // return a resolved promise
}
}
//using reduce to populate an array of changes
//the second parameter passed to the reduce method is the initial value
//for memo - in this case an empty array
var changes_array = surveys.reduce(function (memo, survey) {
//memo is the return value from the previous call to the function
var query = EntityQuery.from('AnsweredQuestions')
.where('PropertySurveyID', '==', survey.ID)
.expand('ActualAnswers');
var graph = manager.getEntityGraph(query)
var changes = graph.filter(function (entity) {
return !entity.entityAspect.entityState.isUnchanged();
});
if (changes.length > 0) {
memo.push(changes)
}
return memo;
}, []);
return saveChanges(changes_array).then(saveSuccess, saveFail);
Progress reporting is slightly different here. With a little more thought it could be made more like in your own answer.
This is a very rough idea of how to solve it.
var promises = [];
var LIMIT = 3 // 3 tris per promise.
data.forEach(function(chunk) {
promises.push(tryOrFail({
data: chunk,
retries: 0
}));
});
function tryOrFail(data) {
if (data.tries === LIMIT) return $q.reject();
++data.tries;
return processChunk(data.chunk)
.catch(function() {
//Some error handling here
++data.tries;
return tryOrFail(data);
});
}
$q.all(promises) //...
Two useful answers here, but having worked through this I have concluded that immediate retries is not really going to work for me.
I want to wait for the first batch to complete, then if the failures are because of timeouts, increase the timeout allowance, before retrying failures.
So I took Juan Stiza's example and modified it to do what I want. i.e. retry failures with $q.all
My code now looks like this:
var surveys = //as before
var successes = 0;
var retries = 0;
var failedChanges = [];
//The saveChanges also keeps a track of retries, successes and fails
//it resolves first time through, and rejects second time
//it might be better written as two functions - a save and a retry
function saveChanges(data) {
if (data.retrying) {
retries++;
logger.info('Retrying ' + retries + ' of ' + failedChanges.length);
}
return manager
.saveChanges(data.changes, so)
.then(function () {
successes++;
logger.info('Uploaded ' + successes + ' of ' + promises.length);
},
function (error) {
if (!data.retrying) {
//store the changes and resolve the promise
//so that saveChanges can be called again after the call to $q.all
failedChanges.push(data.changes);
return; //resolved
}
logger.error('Retry Failed');
return $q.reject();
});
}
//using map instead of a for loop to call saveChanges
//and store the returned promises in an array
var promises = surveys.map(function (survey) {
var changes = //as before
return saveChanges({ changes: changes, retrying: false });
});
logger.info('Starting data upload');
return $q.all(promises).then(function () {
if (failedChanges.length > 0) {
var retries = failedChanges.map(function (data) {
return saveChanges({ changes: data, retrying: true });
});
return $q.all(retries).then(saveSuccess, saveFail);
}
else {
saveSuccess();
}
});

Restrictions in file types in FineUploader 3.7.0

I am using the option "allowedExtensions" without any problem but there is a situation where I have to permit any type of extension but two.
Is there a simple way to do that? I didn't find an option like 'restrictedExtensions' to do that in the code.
Thanks
From the docs:
The validate and validateBatch events are thrown/called before the default Fine Uploader validators (defined in the options) execute.
Also, if your validation event handler returns false, then Fine Uploader will register that file as invalid and not submit it.
Here's some code you could try in your validate event handler. It has not been tested yet so YMMV.
var notAllowedExts = ['pptx', 'xlsx', 'docx'];
/* ... */
onValidate: function (fileOrBlobData) {
var valid = true;
var fileName = fileOrBlobData.name || '';
qq.each(notAllowedExts, function(idx, notAllowedExt) {
var extRegex = new RegExp('\\.' + notAllowedExt + "$", 'i');
if (fileName.match(extRegex) != null) {
valid = false;
return false;
}
});
return valid;
}
/* ... */

Less CSS and local storage issue

I'm using LESS CSS (more exactly less.js) which seems to exploit LocalStorage under the hood. I had never seen such an error like this before while running my app locally, but now I get "Persistent storage maximum size reached" at every page display, just above the link the unique .less file of my app.
This only happens with Firefox 12.0 so far.
Is there any way to solve this?
P.S.: mainly inspired by Calculating usage of localStorage space, this is what I ended up doing (this is based on Prototype and depends on a custom trivial Logger class, but this should be easily adapted in your context):
"use strict";
var LocalStorageChecker = Class.create({
testDummyKey: "__DUMMY_DATA_KEY__",
maxIterations: 100,
logger: new Logger("LocalStorageChecker"),
analyzeStorage: function() {
var result = false;
if (Modernizr.localstorage && this._isLimitReached()) {
this._clear();
}
return result;
},
_isLimitReached: function() {
var localStorage = window.localStorage;
var count = 0;
var limitIsReached = false;
do {
try {
var previousEntry = localStorage.getItem(this.testDummyKey);
var entry = (previousEntry == null ? "" : previousEntry) + "m";
localStorage.setItem(this.testDummyKey, entry);
}
catch(e) {
this.logger.debug("Limit exceeded after " + count + " iteration(s)");
limitIsReached = true;
}
}
while(!limitIsReached && count++ < this.maxIterations);
localStorage.removeItem(this.testDummyKey);
return limitIsReached;
},
_clear: function() {
try {
var localStorage = window.localStorage;
localStorage.clear();
this.logger.debug("Storage clear successfully performed");
}
catch(e) {
this.logger.error("An error occurred during storage clear: ");
this.logger.error(e);
}
}
});
document.observe("dom:loaded",function() {
var checker = new LocalStorageChecker();
checker.analyzeStorage();
});
P.P.S.: I didn't measure the performance impact on the UI yet, but a decorator could be created and perform the storage test only every X minutes (with the last timestamp of execution in the local storage for instance).
Here is a good resource for the error you are running into.
http://www.sitepoint.com/building-web-pages-with-local-storage/#fbid=5fFWRXrnKjZ
Gives some insight that localstorage only has so much room and you can max it out in each browser. Look into removing some data from localstorage to resolve your problem.
Less.js persistently caches content that is #imported. You can use this script to clear content that is cached. Using the script below you can call the function destroyLessCache('/path/to/css/') and it will clear your localStorage of css files that have been cached.
function destroyLessCache(pathToCss) { // e.g. '/css/' or '/stylesheets/'
if (!window.localStorage || !less || less.env !== 'development') {
return;
}
var host = window.location.host;
var protocol = window.location.protocol;
var keyPrefix = protocol + '//' + host + pathToCss;
for (var key in window.localStorage) {
if (key.indexOf(keyPrefix) === 0) {
delete window.localStorage[key];
}
}
}

Resources