dc.js filtered table export using filesaver.js - dc.js

I'm trying to export dc.js filtered table data using FileSaver.js.
I use the code below based on this which is fine except it export all fields (but filtered ok) whereas I would just need table specific fields which are are only a few of the fields plus 2 calculated.
d3.select('#download')
.on('click', function() {
var blob = new Blob([d3.csv.format(dateDim.top(Infinity))], {type: "text/csv;charset=utf-8"});
saveAs(blob, DateT + '.csv');
});
Is there a way I can point to the table rather that dimension?
Thanks.
EDIT: Working code below
d3.select('#download')
.on('click', function() {
var data = MYTABLEDIM.top(Infinity);
{
data = data.map(function(d) {
var row = {};
MYTABLENAME.columns().forEach(function(c) {
row[MYTABLENAME._doColumnHeaderFormat(c)] = MYTABLENAME._doColumnValueFormat(c, d);
});
return row;
});
}
var blob = new Blob([d3.csv.format(data)], {type: "text/csv;charset=utf-8"});
saveAs(blob, 'data.csv');
});

Good question.
It is actually possible to format the data according to the column definitions, by using some undocumented methods of the data table.
I've updated the example with a radio button to choose which data to download.
Here is the code that transforms and download the data as it is encoded in the table:
d3.select('#download')
.on('click', function() {
var data = nameDim.top(Infinity);
data = data.map(function(d) {
var row = {};
table.columns().forEach(function(c, i) {
// if you're using the "original method" for specifying columns,
// use i to index an array of names, instead of table._doColumnHeaderFormat(c)
row[table._doColumnHeaderFormat(c)] = table._doColumnValueFormat(c, d);
});
return row;
});
var blob = new Blob([d3.csv.format(data)], {type: "text/csv;charset=utf-8"});
saveAs(blob, 'data.csv');
});
Basically, when the table radio is selected, we'll transform the data row-by-row using the same functions that the table uses to format its data.
The rows will be in the order of the original data, not sorted like the table. (And strictly speaking, the columns may not be in the same order either). That would be a bigger endeavor, and might require new features in dc.js. But this works without any changes. Hope it helps!

Related

Use full group record within title in dc-js geoChoropleth chart

I have a group for which elements after reduction look like this pseudocode :
{
key:"somevalue",
value: {
sum: the_total,
names:{
a: a_number,
b: b_number,
c:c_number
}
}
}
In my dc-js geoChoropleth graph the valueAccessor is (d) => d.value.sum
In my title, I would like to use the names component of my reduction. But when I use .title((d) => {...}), I can onjly access the key and the value resulting from the valueAccessor function instead of the original record.
Is that meant to be ?
This is a peculiarity of the geoChoropleth chart.
Most charts bind the group data directly to chart elements, but since the geoChoropleth chart has two sources of data, the map and the group, it binds the map data and hides the group data.
Here is the direct culprit:
_renderTitles (regionG, layerIndex, data) {
if (this.renderTitle()) {
regionG.selectAll('title').text(d => {
const key = this._getKey(layerIndex, d);
const value = data[key];
return this.title()({key: key, value: value});
});
}
}
It is creating key/value objects itself, and the value, as you deduced, comes from the valueAccessor:
_generateLayeredData () {
const data = {};
const groupAll = this.data();
for (let i = 0; i < groupAll.length; ++i) {
data[this.keyAccessor()(groupAll[i])] = this.valueAccessor()(groupAll[i]);
}
return data;
}
Sorry this is not a complete answer, but I would suggest adding a pretransition handler that replaces the titles, or alternately, using the key passed to the title accessor to lookup the data you need.
As I noted in the issue linked above, I think this is a pretty serious design bug.

D3 How to filter data from multiple datasets

I am using data from a CSV that looks like this
,,Name,First names,s,r,Nat,born,starting point,starting date,arrival date,days,km,Assist,Support,Style,note,arrival date 2
1,1,KAGGE,Erling,,,Nor,1/15/1963,Berkner Island,11/18/1992,1/7/1993,50,appr. 1300,n,n,solo,first solo unassisted,
2,2,ARNESEN,Liv,f,,Nor,6/1/1953,Hercules Inlet,11/4/1994,12/24/1994,50,1130,n,n,solo,first woman unassisted,
I'm using this CSV to create a dropdown menu of'starting point' and this filters all 'Name' associated with that 'starting point'
I want to create an additional dataset that will allow me to add some extra details to each 'starting point' such as 'x', like this: (I want to use this to create some svg lines unique to each 'starting point')
var newdata = [
{'starting point': 'Berkner Island', 'x': '1'},
{'starting point': 'Hercules Inlet', 'x': '2'},
{'starting point': 'Ronne Ice Shelf','x': '3'}
];
I am filtering the data like this
var filter = data.filter(function(d) {
return ("Berkner Island" == d['starting point'])
});
updateCSV(filter); // CSV data function
updateNewdata(filter); // Newdata function
When it loads, only the CSV data is filtered. The second dataset 'newdata' loads all entries with no filter applied to it.
https://plnkr.co/edit/TgZxbaPdyERWeaG5i3HX?p=info
My question is, is it possible to use a second dataset, and filter two different datasets by a shared value?
UPDATE
I managed to work it out, and yes it is possible. I wasn't understanding that the callback in the function could have a unique name, and not just 'data'. Once I did this and created two distinct filters for each of my datasets, it worked.
function updateCSV(CSVdata) {
...
.data(CSVdata)
};
function updateNewdata(newdata) {
...
.data(newdata)
};
// FILTER
var filtera = CSVdata.filter(function(d) {
return ("Berkner Island" == d['starting point'])
});
var filterb = newdata.filter(function(d) {
return ("Berkner Island" == d['starting point'])
});
updateCSV(filtera);
updateNewdata(filterb);
I think the issue is here. You're not actually using the data you pass in.
// should this say 'newdata' not 'data'?
function updateNewdata(data) {
var position = canvas
.selectAll(".position")
.data(newdata)

Data binding in D3 fails when using "cloned" data

D3 data binding seem to be behave differently when using the original data object, vs. using a cloned version of the data object. I have a function updateTable which updates an array of tables based on the passed array of arrays. If an array (representing one new table row) is added to the array of arrays, and passed to the updateFunction, all works as expected (the row is added to the table). If however, we make a shallow copy (clone) of this data structure and pass it to the updateFunction, the data binding fails and no table row is added. Please note that the original data structure and clone are two different objects, however with identical values.
Please see this JSFiddle example. Two tables are generated, one fed the original data, the other the cloned data. The two tables are clearly different, as the second table (built using cloned data) does NOT contain the third row.
'use strict';
d3.select("body").append("h3").text("D3 Data Binding Issue");
// create two divs to hold one table each
var tableDiv1 = d3.select("body").append("div");
d3.select("body").append("hr");
var tableDiv2 = d3.select("body").append("div");
// define data
// here, an array of a single item (which represents a table), containing an array of arrays,
// each destined for a table row
var data = [
{ table: "Table1", rows: [
{ table: "Table1", row: "Row1", data: "DataT1R1" },
{ table: "Table1", row: "Row2", data: "DataT1R2" }
]
}
];
// run update on the initial data
update(data);
// add 3rd array to the data structure (which should add a third row in each table)
data[0].rows.push({ table: "Table1", row: "Row3", data: "DataT1R3" });
// run update again
// observe that the Lower table (which is using cloned data) does NOT update
update(data);
/*
// remove first array of the data structure
data[0].rows.shift();
// run update again
// observe that the Lower table (which again is using cloned data) does NOT update
update(data);
*/
// function to run the tableUpdate function targeting two different divs, one with the
// original data, and the other with cloned data
function update(data) {
// the contents of the two data structures are equal
console.log("\nAre object values equal? ", JSON.stringify(data) == JSON.stringify(clone(data)));
tableUpdate(data, tableDiv1, "Using Original Data"); // update first table
tableUpdate(clone(data), tableDiv2, "Using Cloned Data"); // update second table
}
// generic function to manage array of tables (in this simple example only one table is managed)
function tableUpdate(data, tableDiv, title) {
console.log("data", JSON.stringify(data));
// get all divs in this table div
var divs = tableDiv.selectAll("div")
.data(data, function(d) { return d.table }); // disable default by-index eval
// remove div(s)
divs.exit().remove();
// add new div(s)
var divsEnter = divs.enter().append("div");
// append header(s) in new div(s)
divsEnter.append("h4").text(title);
// append table(s) in new div(s)
var tableEnter = divsEnter.append("table")
.attr("id", function(d) { return d.table });
// append table body in new table(s)
tableEnter.append("tbody");
// select all tr elements in the divs update selection
var tr = divs.selectAll("table").selectAll("tbody").selectAll("tr")
.data(function(d, i, a) { return d.rows; }, function(d, i, a) { return d.row; }); // disable by-index eval
// remove any row(s) with missing data array(s)
tr.exit().remove();
// add row(s) for new data array(s)
tr.enter().append("tr");
// bind data to table cells
var td = tr.selectAll("td")
.data(function(d, i) { return d3.values(d); });
// add new cells
td.enter().append("td");
// update contents of table cells
td.text(function(d) { return d; });
}
// source: https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
function clone(objectToBeCloned) {
return JSON.parse(JSON.stringify(objectToBeCloned));
}
Can anybody shed some light on this behavior? I believe I'm using the key functions properly, but could be wrong. In my application I need to re-generate the data structure before each table update, and I don't have option of reusing the original object.
The root of the problem is that you have a nested structure and .selectAll() doesn't update the data bound to the elements (but .append() automatically "inherits" the data). So the data that you use to render the table is simply not updated -- you can fix this by using .select() instead of .selectAll() (see the updated example).
The subtle difference between .select() and .selectAll() is that the former (similar to .append()) "inherits" the data bound to the elements in the current selection to the newly selected elements, while .selectAll() does not.
So why does it work for the original data? Well, D3 doesn't copy the data when it binds it to an element, but references it. By modifying the original data, you're also modifying what's bound to the elements. Hence simply running the code without rebinding any data works. The cloned data isn't updated as you're not modifying it directly.
Actually, the problem is due to an anti-pattern that you are using to "muscle" the tr structure.
The problem
During the second pass through tableUpdate, the key function finds a match on d.table for both the original and the un-cloned data. This is because the key is converted to a string during the binding process so even though
d.table === data.table; // false
it's still a match because
d.table == data.table; // true
Therefore the enter selection is empty in both cases and all of this code
var divsEnter = divs.enter().append("div");
// append header(s) in new div(s)
divsEnter.append("h4").text(title);
// append table(s) in new div(s)
var tableEnter = divsEnter.append("table")
.attr("id", function(d) { return d.table });
// append table body in new table(s)
tableEnter.append("tbody");
does nothing.
So the original data is not re-bound and the new, cloned data is not bound. But...
the data bound to the first table now has three rows because, as Lars pointed out, it is bound by reference. so, for the first table,
divs.datum() === data; // true
and it now has three rows.
In the case of the cloned data, the key function also returns true because you haven't changed it. Even though it has an extra row, data.key is still "Table1". So you are telling the key function that it's the same table. Consequently, the enter selection is also empty so, the new, cloned data is also not bound so, for the second table,
divs.datum() === data; // false
d.table == data.table == "Table1" // um, true true
and it still has two rows.
The problem is you use an an anti-pattern to bind the data and build the tr elements.
Instead of selecting and binding the data following the hierarchy of it's structure, you go off piste and go back to the div and just ram it down to the tr element to build the structure. This is dangerous because the returned tr elements are unqualified, none of the important context that you gained from carefully selecting/creating the correct tbody element is used to ensure that these are the correct tr elements, they are in fact, whatever tr elements that happen to be laying around - regardless of which table they belong to - inside the div.
In both cases you simply rebuild the tr elements using the original arrays that are still attached, which is fine for the first table but for the second one... not so much.
My "current theory" of best practice is to build your data structure to model the intended structure of your visualisation first and then construct the DOM elements by walking that data structure, binding at each level and kicking the remaining data ahead of you as you go, until finally, it's all bound.
The solution
You need to be truly "data driven" and strictly follow the data structure when building and binding your elements. I re-built your updateTable function below...
'use strict';
d3.select("body").append("h3").text("D3 Data Binding Issue").style({margin:0});
// create two divs to hold one table each
var tableDiv1 = d3.select("body").append("div");
var tableDiv2 = d3.select("body").append("div");
// define data
// here, an array of a single item (which represents a table), containing an array of arrays,
// each destined for a table row
var data = [{
table: "Table1",
rows: [{
table: "Table1",
row: "Row1",
data: "DataT1R1"
}, {
table: "Table1",
row: "Row2",
data: "DataT1R2"
}]
}];
// run update on the initial data
update(data);
update(data);
// add 3rd array to the data structure (which should add a third row in each table)
data[0].rows.push({
table: "Table1",
row: "Row3",
data: "DataT1R3"
});
// run update again
// observe that the Lower table (which is using cloned data) does NOT update
update(data);
/*
// remove first array of the data structure
data[0].rows.shift();
// run update again
// observe that the Lower table (which again is using cloned data) does NOT update
update(data);
*/
// function to run the tableUpdate function targeting two different divs, one with the
// original data, and the other with cloned data
function update(data) {
// the contents of the two data structures are equal
console.log("\nAre object values equal? ", JSON.stringify(data) == JSON.stringify(clone(data)));
tableUpdate(data, tableDiv1, "Using Original Data"); // update first table
tableUpdate(clone(data), tableDiv2, "Using Cloned Data"); // update second table
}
// generic function to manage array of tables (in this simple example only one table is managed)
function tableUpdate(data, tableDiv, title) {
console.log("data", JSON.stringify(data));
// get all divs in this table div
var divs = tableDiv.selectAll("div")
.data(data, function (d) {
return d.table
}); // disable default by-index eval
// remove div(s)
divs.exit().remove();
// add new div(s)
var divsEnter = divs.enter().append("div");
// append header(s) in new div(s)
divsEnter.append("h4").text(title);
// append or replace table(s) in new div(s)
var table = divs.selectAll("table")
.data(function (d) {
// the 1st dimension determines the number of elements
// this needs to be 1 (one table)
return [d.rows];
}, function (d) {
// need a unique key to diferenciate table generations
var sha256 = new jsSHA("SHA-256", "TEXT");
return (sha256.update(JSON.stringify(d)),
console.log([this.length ? "data" : "node", sha256.getHash('HEX')].join("\t")),
sha256.getHash('HEX'));
});
table.exit().remove();
// the table body will have the same data pushed down from the table
// it will also be the array of array of rows
table.enter().append("table").append("tbody");
console.log(table.enter().size() ? "new table" : "same table")
var tBody = table.selectAll("tbody");
// select all tr elements in the divs update selection
var tr = tBody.selectAll("tr")
.data(function (d, i, a) {
// return one element of the rows array
return d;
}, function (d, i, a) {
return d.row;
}); // disable by-index eval
// remove any row(s) with missing data array(s)
tr.exit().remove();
// add row(s) for new data array(s)
tr.enter().append("tr");
// bind data to table cells
var td = tr.selectAll("td")
.data(function (d, i) {
return d3.values(d);
});
// add new cells
td.enter().append("td");
// update contents of table cells
td.text(function (d) {
return d;
});
}
// source: https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
function clone(objectToBeCloned) {
return JSON.parse(JSON.stringify(objectToBeCloned));
}
table, th, td {
border: 1px solid gray;
}
body>div { display: inline-block; margin: 10px;}
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.6/d3.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jsSHA/2.0.1/sha.js"></script>
The interesting thing(s)
The interesting thing is, that the table bound to the original data never gets replaced. The reason being that, again, as mentioned by #Lars, the data is bound by reference.
As an experiment (and inspired by my love-hate relationship with git) I used a 256 bit sha as a key, feeding the stringified data to it. If you're managing a bunch of tables in the same space then maybe this is the way to go. If you always clone the data and calculate a sha then that feels like a very secure approach.
By way of illustration, here is a redacted log (I added a second update with the same data at the start...)
This is the first pass where there are no nodes yet. The key function is only invoked once on each data element because the update selection is empty.
Are object values equal? true
data [{"table":"Table1","rows":[{"tab...,"data":"DataT1R2"}]}]
data a09a5ef8f6b81669eed13c93f609884...
new table ...
data [{"table":"Table1","rows":[{"tab...,"data":"DataT1R2"}]}]
data a09a5ef8f6b81669eed13c93f609884...
new table ...
...
This is the second call with the same data. You can see that the key function is called twice for each table and that the sha is the same for both, hence the "same table" anotation.
Are object values equal? true ...
data [{"table":"Table1","rows":[{"tab...,"data":"DataT1R2"}]}]
node a09a5ef8f6b81669eed13c93f609884...
data a09a5ef8f6b81669eed13c93f609884...
same table ...
data [{"table":"Table1","rows":[{"tab...,"data":"DataT1R2"}]}]
node a09a5ef8f6b81669eed13c93f60...
data a09a5ef8f6b81669eed13c93f60...
same table
Here is the interesting case where, even though the data has changed, the key function returns the same sha for node and data for the first table. The second table is as expected, with different sha for node and data and a new table generated.
Are object values equal? true
data [{"table":"Table1","rows":[{...,"data":"DataT1R3"}]}]
node 7954982db25aee37483face1602...
data 7954982db25aee37483face1602...
same table ...
data [{"table":"Table1","rows":[{...,"data":"DataT1R3"}]}]
node a09a5ef8f6b81669eed13c93f60...
data 7954982db25aee37483face1602...
new table

jqGrid drag and drop headings for grouping .... Group Name

I have setup drag and drop headings to group by the relevant column from jQgrid Grouping Drag and Drop
It works great however I am trying to display the column name before the value i.e.
Client : Test data data
Client : Test2 data data
I've been going around in circles if any one could help.
if i take the same code used for the dynamic group by which should be the (column Name)
I end up with The Column data not the column name.
$('#' + gridId).jqGrid('groupingGroupBy', getheader());
function getheader() {
var header = $('#groups ol li:not(.placeholder)').map(function () {
return $(this).attr('data-column');
}).get();
return header;
}
if i use the same function in group text I get data not the column name.
I've come from C# and I am very new to jQuery.
If any one could help it would be greatly appreciated.
Kind Regards,
Ryan
First of all the updated demo provides the solution of your problem:
Another demo contains simplified demo which demonstrates just how one could display the grouping header in the form Column Header: Column data in the grouping header instead of Column data used as default.
The main idea of the solution is the usage of formatDisplayField property of groupingView which I suggested originally in the answer. The current version of jqGrid support the option. If one would use for example the options
grouping: true,
groupingView: {
groupField: ["name", "invdate"],
groupColumnShow: [false, false],
formatDisplayField: [
customFormatDisplayField,
customFormatDisplayField
]
}
where customFormatDisplayField callback function are defined as
var customFormatDisplayField = function (displayValue, value, colModel) {
return colModel.name + ": " + displayValue;
}
will display almost the results which you need, but it will uses name property of colModel instead of the corresponding name from colNames. To makes the final solution one use another implementation of customFormatDisplayField:
var getColumnHeaderByName = function (colName) {
var $self = $(this),
colNames = $self.jqGrid("getGridParam", "colNames"),
colModel = $self.jqGrid("getGridParam", "colModel"),
cColumns = colModel.length,
iCol;
for (iCol = 0; iCol < cColumns; iCol++) {
if (colModel[iCol].name === colName) {
return colNames[iCol];
}
}
},
customFormatDisplayField = function (displayValue, value, colModel, index, grp) {
return getColumnHeaderByName.call(this, colModel.name) + ": " + displayValue;
};

Loading D3.js data from a simple JSON string

Most of the examples in gallery load data from TSV files.
How can I convert the following to use a local json variable instead of TSV data?
d3.tsv("data.tsv", function(error, data) {
var myEntitiesJson = getEntitiesJson(); // <------ use this instead of "data"
data.forEach(function(d) {
d.frequency = +d.frequency;
});
x.domain(data.map(function(d) { return d.letter; }));
y.domain([0, d3.max(data, function(d) { return d.frequency; })]);
...
svg.selectAll(".bar")
.data(data) // <----- bind to myEntities instead
}
As far as I can tell, I just need to do something to my entitiesJson, in order to data-fy it so that the chart could bind to it.
UPDATE
I am making some progress. I plugged in my entities from JSON and the graph is starting to take new shape.
Currently the following code breaks:
svg.selectAll(".bar")
.data(myEntities) // <-- this is an array of objects
.enter().append("rect")
This is causing:
Error: Invalid value for attribute y="NaN"
Error: Invalid value for attribute height="NaN"
for remote data.json
replace :
d3.tsv("data.tsv", function(error, data) {...}
with :
d3.json("data.json", function(error, data) {
console.log(data); // this is your data
});
for local data:
var myData = { {date:'2013-05-01', frequency:99},
{date:'2013-05-02', frequency:24} };
function draw(data) {
console.log(data); // this is your data
}
draw(myData);
There isn't a simple way to data-fy any given json, because not all json objects are the same shape.
By shape, I mean the way that the data is organized. For example, both '{"foo" : 1, "bar" : 2}' and '{"names" : ["foo", "bar"], "values" : [1, 2]}' could be used to store the same data, but one stores everything in an object in which the object keys correspond to the names of data points, and one uses separate arrays to store names and values, with corresponding entries having a common array index.
There is, however, a general process you can go through to turn json into data. First, you'll need to parse your json. This can be done with the javascript-standard JSON object. USe JSON.parse(myJson) to obtain data from your json object if it's already uploaded to the client. d3.json(my/json/directory, fn(){}) can both load and parse your json, so if you're loading it from elsewhere on your server, this might be a better way to get the json into an object.
Once you have your json packed into a javascript object, you still need to data-fy it, which is the part that will depend on your data. What d3 is going it expect is some form of array: [dataPoint1, dataPoint2, ...]. For the two examples I gave above, the array you would want would look something like this:
[{'name' : 'foo', 'value' : 1}, {'name' : 'bar', 'value' : 2}]
I've got one element in my array for each data point, with two attributes: value and name. (In your example, you would want the attributes letter and frequency)
For each of my examples, I would use a different function to create the array. With this line in common:
var rawData = JSON.parse(myJson);
My first json could be packed with this function:
var key;
var data = [];
for(key in rawData){
if(rawData.hasOwnProperty(key)){
data.push({'name' : key, 'value' : rawData[key]});
}
}
For the second example, I would want to loop through each attribute of my object, names, and values. My code might look like this:
var i;
var data = [];
for(i = 0; i < rawData.names.length; i++){
data.push({'name' : rawData.names[i], 'value' : rawData.values[i]});
}
Both of these will yield a data-fied version of my original JSON that I can then use in d3.
For D3js v2 or v3 (not sure which one).
Declare your dataset
var dataset = {
"first-name": "Stack",
"last-name": "Overflow",
}; // JSON object
var dataset = [ 5, 10, 15, 20, 25 ]; // or array
As stated by the doc, you can use either:
an array of numbers or objects, or a function that returns an array of values
Bind it
d3.select("body").selectAll("p")
.data(dataset)
.enter()
.append("p")
.text("New paragraph!");
More explanation at Scott Murray's D3's tutorial#Binding data.
The data() function apply to a selection, more information can be found in the official documentation: selection.data([values[, key]]).
You can change the json into a javascript file that assigns the data to a global value. Taking https://bl.ocks.org/d3noob/5028304 as an example:
From:
<script>
.....
// load the data
d3.json("sankeygreenhouse.json", function(error, graph) {
var nodeMap = {};
graph.nodes.forEach(function(x) { nodeMap[x.name] = x; });
To:
<script src="graphData.js"></script>
<script>
.....
var nodeMap = {};
graph.nodes.forEach(function(x) { nodeMap[x.name] = x; });
Note that we've removed the need for the callback.
The json file was "sankeygreenhouse.json":
{
"links": [
{"source":"Agricultural Energy Use","target":"Carbon Dioxide","value":"1.4"},
Now, in "graphData.js":
var graph = {
"links": [
{"source":"Agricultural Energy Use","target":"Carbon Dioxide","value":"1.4"},
Just change data to an array of objects like this:
let data = [{"apples":53245,"oranges":200},{"apples":28479,"oranges":200},{"apples":19697,"oranges":200},{"apples":24037,"oranges":200},{"apples":40245,"oranges":200}]
and comment out the d3.tsv("data.tsv", function(error, data) {...
Why not simply transform your json to tsv as described by Rob here?
d3 expects the data or, said in another way, needs the data in a particular format: tsv.The easiest way to resolve your problem is simply formatting your data from json to tsv, which can be done easily using Rob's comments.

Resources