[fixes #444,csv][s]: remove csv backend as now in own repo.
* Moved to https://github.com/okfn/csv.js in Feb and been improving since * Updated the docs to reflect the removal (plus did some other improvements to backend docs at same time)
This commit is contained in:
parent
12c07b45dc
commit
095f64e1f3
@ -42,6 +42,7 @@ Possible breaking changes
|
||||
|
||||
* Support for row/add/delete/Reorder for recline slickGrid check `_includes/recline-deps.html` for slcikGrid plugins required #396
|
||||
* Upgraded timelinejs lib - #316
|
||||
* Removed csv backend (as now in separate repo) #444
|
||||
|
||||
### v0.6 - Summer 2013
|
||||
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
<ul>
|
||||
<li><a href="https://github.com/okfn/recline.backend.gdocs">gdocs: Google Docs (Spreadsheet)</a></li>
|
||||
<li><a href="{{page.root}}/docs/src/backend.csv.html">csv: CSV files</a></li>
|
||||
<li><a href="https://github.com/okfn/recline.backend.solr">solr: SOLR</a> (partial)</li>
|
||||
<li><a href="https://github.com/okfn/elasticsearch.js">elasticsearch: ElasticSearch</a></li>
|
||||
<li><a href="{{page.root}}/docs/src/backend.dataproxy.html">dataproxy: DataProxy (CSV and XLS on the Web)</a></li>
|
||||
<li><a href="https://github.com/okfn/recline.backend.ckan">ckan: CKAN</a> – support for <a href="http://docs.ckan.org/en/latest/datastore.html">CKAN datastore</a></li>
|
||||
<li><a href="https://github.com/okfn/recline.backend.couchdb">couchdb: CouchDB</a></li>
|
||||
<li><a href="{{page.root}}/docs/src/backend.memory.html">memory: Memory (local data)</a></li>
|
||||
</ul>
|
||||
* <a href="https://github.com/okfn/recline.backend.gdocs">gdocs: Google Docs (Spreadsheet)</a>
|
||||
* <a href="https://github.com/okfn/csv.js">csv: CSV files</a>
|
||||
* <a href="https://github.com/okfn/recline.backend.solr">solr: SOLR</a> (partial)
|
||||
* <a href="https://github.com/okfn/elasticsearch.js">elasticsearch: ElasticSearch</a>
|
||||
* <a href="{{page.root}}/docs/src/backend.dataproxy.html">dataproxy: DataProxy (CSV and XLS on the Web)</a>
|
||||
* <a href="https://github.com/okfn/ckan.js">ckan: CKAN</a> – support for <a href="http://docs.ckan.org/en/latest/datastore.html">CKAN datastore</a>
|
||||
* <a href="https://github.com/okfn/recline.backend.couchdb">couchdb: CouchDB</a>
|
||||
* <a href="{{page.root}}/docs/src/backend.memory.html">memory: Memory (local data)</a>
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
// Create the dataset in the usual way
|
||||
// Note the additional options you can specify for parsing the CSV file
|
||||
var dataset = new recline.Model.Dataset({
|
||||
url: '{{page.root}}/demos/data/sample.csv',
|
||||
url: '{{page.root}}demos/data/sample.csv',
|
||||
backend: 'csv',
|
||||
// delimiter: ',',
|
||||
// quotechar: '"',
|
||||
|
||||
@ -67,7 +67,7 @@
|
||||
<script type="text/javascript" src="{{page.root}}src/backend.memory.js"></script>
|
||||
<script type="text/javascript" src="{{page.root}}src/backend.dataproxy.js"></script>
|
||||
<script type="text/javascript" src="http://okfnlabs.org/recline.backend.gdocs/backend.gdocs.js"></script>
|
||||
<script type="text/javascript" src="{{page.root}}src/backend.csv.js"></script>
|
||||
<script type="text/javascript" src="http://okfnlabs.org/csv.js/csv.js"></script>
|
||||
|
||||
<!-- views -->
|
||||
<script type="text/javascript" src="{{page.root}}src/view.grid.js"></script>
|
||||
|
||||
317
dist/recline.js
vendored
317
dist/recline.js
vendored
@ -1,312 +1,5 @@
|
||||
this.recline = this.recline || {};
|
||||
this.recline.Backend = this.recline.Backend || {};
|
||||
this.recline.Backend.CSV = this.recline.Backend.CSV || {};
|
||||
|
||||
// Note that provision of jQuery is optional (it is **only** needed if you use fetch on a remote file)
|
||||
(function(my) {
|
||||
"use strict";
|
||||
my.__type__ = 'csv';
|
||||
|
||||
// use either jQuery or Underscore Deferred depending on what is available
|
||||
var Deferred = (typeof jQuery !== "undefined" && jQuery.Deferred) || _.Deferred;
|
||||
|
||||
// ## fetch
|
||||
//
|
||||
// fetch supports 3 options depending on the attribute provided on the dataset argument
|
||||
//
|
||||
// 1. `dataset.file`: `file` is an HTML5 file object. This is opened and parsed with the CSV parser.
|
||||
// 2. `dataset.data`: `data` is a string in CSV format. This is passed directly to the CSV parser
|
||||
// 3. `dataset.url`: a url to an online CSV file that is ajax accessible (note this usually requires either local or on a server that is CORS enabled). The file is then loaded using jQuery.ajax and parsed using the CSV parser (NB: this requires jQuery)
|
||||
//
|
||||
// All options generates similar data and use the memory store outcome, that is they return something like:
|
||||
//
|
||||
// <pre>
|
||||
// {
|
||||
// records: [ [...], [...], ... ],
|
||||
// metadata: { may be some metadata e.g. file name }
|
||||
// useMemoryStore: true
|
||||
// }
|
||||
// </pre>
|
||||
my.fetch = function(dataset) {
|
||||
var dfd = new Deferred();
|
||||
if (dataset.file) {
|
||||
var reader = new FileReader();
|
||||
var encoding = dataset.encoding || 'UTF-8';
|
||||
reader.onload = function(e) {
|
||||
var out = my.extractFields(my.parseCSV(e.target.result, dataset), dataset);
|
||||
out.useMemoryStore = true;
|
||||
out.metadata = {
|
||||
filename: dataset.file.name
|
||||
}
|
||||
dfd.resolve(out);
|
||||
};
|
||||
reader.onerror = function (e) {
|
||||
alert('Failed to load file. Code: ' + e.target.error.code);
|
||||
};
|
||||
reader.readAsText(dataset.file, encoding);
|
||||
} else if (dataset.data) {
|
||||
var out = my.extractFields(my.parseCSV(dataset.data, dataset), dataset);
|
||||
out.useMemoryStore = true;
|
||||
dfd.resolve(out);
|
||||
} else if (dataset.url) {
|
||||
jQuery.get(dataset.url).done(function(data) {
|
||||
var out = my.extractFields(my.parseCSV(data, dataset), dataset);
|
||||
out.useMemoryStore = true;
|
||||
dfd.resolve(out);
|
||||
});
|
||||
}
|
||||
return dfd.promise();
|
||||
};
|
||||
|
||||
// Convert array of rows in { records: [ ...] , fields: [ ... ] }
|
||||
// @param {Boolean} noHeaderRow If true assume that first row is not a header (i.e. list of fields but is data.
|
||||
my.extractFields = function(rows, noFields) {
|
||||
if (noFields.noHeaderRow !== true && rows.length > 0) {
|
||||
return {
|
||||
fields: rows[0],
|
||||
records: rows.slice(1)
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
records: rows
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// ## parseCSV
|
||||
//
|
||||
// Converts a Comma Separated Values string into an array of arrays.
|
||||
// Each line in the CSV becomes an array.
|
||||
//
|
||||
// Empty fields are converted to nulls and non-quoted numbers are converted to integers or floats.
|
||||
//
|
||||
// @return The CSV parsed as an array
|
||||
// @type Array
|
||||
//
|
||||
// @param {String} s The string to convert
|
||||
// @param {Object} options Options for loading CSV including
|
||||
// @param {Boolean} [trim=false] If set to True leading and trailing
|
||||
// whitespace is stripped off of each non-quoted field as it is imported
|
||||
// @param {String} [delimiter=','] A one-character string used to separate
|
||||
// fields. It defaults to ','
|
||||
// @param {String} [quotechar='"'] A one-character string used to quote
|
||||
// fields containing special characters, such as the delimiter or
|
||||
// quotechar, or which contain new-line characters. It defaults to '"'
|
||||
//
|
||||
// @param {Integer} skipInitialRows A integer number of rows to skip (default 0)
|
||||
//
|
||||
// Heavily based on uselesscode's JS CSV parser (MIT Licensed):
|
||||
// http://www.uselesscode.org/javascript/csv/
|
||||
my.parseCSV= function(s, options) {
|
||||
// Get rid of any trailing \n
|
||||
s = chomp(s);
|
||||
|
||||
var options = options || {};
|
||||
var trm = (options.trim === false) ? false : true;
|
||||
var delimiter = options.delimiter || ',';
|
||||
var quotechar = options.quotechar || '"';
|
||||
|
||||
var cur = '', // The character we are currently processing.
|
||||
inQuote = false,
|
||||
fieldQuoted = false,
|
||||
field = '', // Buffer for building up the current field
|
||||
row = [],
|
||||
out = [],
|
||||
i,
|
||||
processField;
|
||||
|
||||
processField = function (field) {
|
||||
if (fieldQuoted !== true) {
|
||||
// If field is empty set to null
|
||||
if (field === '') {
|
||||
field = null;
|
||||
// If the field was not quoted and we are trimming fields, trim it
|
||||
} else if (trm === true) {
|
||||
field = trim(field);
|
||||
}
|
||||
|
||||
// Convert unquoted numbers to their appropriate types
|
||||
if (rxIsInt.test(field)) {
|
||||
field = parseInt(field, 10);
|
||||
} else if (rxIsFloat.test(field)) {
|
||||
field = parseFloat(field, 10);
|
||||
}
|
||||
}
|
||||
return field;
|
||||
};
|
||||
|
||||
for (i = 0; i < s.length; i += 1) {
|
||||
cur = s.charAt(i);
|
||||
|
||||
// If we are at a EOF or EOR
|
||||
if (inQuote === false && (cur === delimiter || cur === "\n")) {
|
||||
field = processField(field);
|
||||
// Add the current field to the current row
|
||||
row.push(field);
|
||||
// If this is EOR append row to output and flush row
|
||||
if (cur === "\n") {
|
||||
out.push(row);
|
||||
row = [];
|
||||
}
|
||||
// Flush the field buffer
|
||||
field = '';
|
||||
fieldQuoted = false;
|
||||
} else {
|
||||
// If it's not a quotechar, add it to the field buffer
|
||||
if (cur !== quotechar) {
|
||||
field += cur;
|
||||
} else {
|
||||
if (!inQuote) {
|
||||
// We are not in a quote, start a quote
|
||||
inQuote = true;
|
||||
fieldQuoted = true;
|
||||
} else {
|
||||
// Next char is quotechar, this is an escaped quotechar
|
||||
if (s.charAt(i + 1) === quotechar) {
|
||||
field += quotechar;
|
||||
// Skip the next char
|
||||
i += 1;
|
||||
} else {
|
||||
// It's not escaping, so end quote
|
||||
inQuote = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the last field
|
||||
field = processField(field);
|
||||
row.push(field);
|
||||
out.push(row);
|
||||
|
||||
// Expose the ability to discard initial rows
|
||||
if (options.skipInitialRows) out = out.slice(options.skipInitialRows);
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
// ## serializeCSV
|
||||
//
|
||||
// Convert an Object or a simple array of arrays into a Comma
|
||||
// Separated Values string.
|
||||
//
|
||||
// Nulls are converted to empty fields and integers or floats are converted to non-quoted numbers.
|
||||
//
|
||||
// @return The array serialized as a CSV
|
||||
// @type String
|
||||
//
|
||||
// @param {Object or Array} dataToSerialize The Object or array of arrays to convert. Object structure must be as follows:
|
||||
//
|
||||
// {
|
||||
// fields: [ {id: .., ...}, {id: ...,
|
||||
// records: [ { record }, { record }, ... ]
|
||||
// ... // more attributes we do not care about
|
||||
// }
|
||||
//
|
||||
// @param {object} options Options for serializing the CSV file including
|
||||
// delimiter and quotechar (see parseCSV options parameter above for
|
||||
// details on these).
|
||||
//
|
||||
// Heavily based on uselesscode's JS CSV serializer (MIT Licensed):
|
||||
// http://www.uselesscode.org/javascript/csv/
|
||||
my.serializeCSV= function(dataToSerialize, options) {
|
||||
var a = null;
|
||||
if (dataToSerialize instanceof Array) {
|
||||
a = dataToSerialize;
|
||||
} else {
|
||||
a = [];
|
||||
var fieldNames = _.pluck(dataToSerialize.fields, 'id');
|
||||
a.push(fieldNames);
|
||||
_.each(dataToSerialize.records, function(record, index) {
|
||||
var tmp = _.map(fieldNames, function(fn) {
|
||||
return record[fn];
|
||||
});
|
||||
a.push(tmp);
|
||||
});
|
||||
}
|
||||
var options = options || {};
|
||||
var delimiter = options.delimiter || ',';
|
||||
var quotechar = options.quotechar || '"';
|
||||
|
||||
var cur = '', // The character we are currently processing.
|
||||
field = '', // Buffer for building up the current field
|
||||
row = '',
|
||||
out = '',
|
||||
i,
|
||||
j,
|
||||
processField;
|
||||
|
||||
processField = function (field) {
|
||||
if (field === null) {
|
||||
// If field is null set to empty string
|
||||
field = '';
|
||||
} else if (typeof field === "string" && rxNeedsQuoting.test(field)) {
|
||||
// Convert string to delimited string
|
||||
field = quotechar + field + quotechar;
|
||||
} else if (typeof field === "number") {
|
||||
// Convert number to string
|
||||
field = field.toString(10);
|
||||
}
|
||||
|
||||
return field;
|
||||
};
|
||||
|
||||
for (i = 0; i < a.length; i += 1) {
|
||||
cur = a[i];
|
||||
|
||||
for (j = 0; j < cur.length; j += 1) {
|
||||
field = processField(cur[j]);
|
||||
// If this is EOR append row to output and flush row
|
||||
if (j === (cur.length - 1)) {
|
||||
row += field;
|
||||
out += row + "\n";
|
||||
row = '';
|
||||
} else {
|
||||
// Add the current field to the current row
|
||||
row += field + delimiter;
|
||||
}
|
||||
// Flush the field buffer
|
||||
field = '';
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
var rxIsInt = /^\d+$/,
|
||||
rxIsFloat = /^\d*\.\d+$|^\d+\.\d*$/,
|
||||
// If a string has leading or trailing space,
|
||||
// contains a comma double quote or a newline
|
||||
// it needs to be quoted in CSV output
|
||||
rxNeedsQuoting = /^\s|\s$|,|"|\n/,
|
||||
trim = (function () {
|
||||
// Fx 3.1 has a native trim function, it's about 10x faster, use it if it exists
|
||||
if (String.prototype.trim) {
|
||||
return function (s) {
|
||||
return s.trim();
|
||||
};
|
||||
} else {
|
||||
return function (s) {
|
||||
return s.replace(/^\s*/, '').replace(/\s*$/, '');
|
||||
};
|
||||
}
|
||||
}());
|
||||
|
||||
function chomp(s) {
|
||||
if (s.charAt(s.length - 1) !== "\n") {
|
||||
// Does not end with \n, just return string
|
||||
return s;
|
||||
} else {
|
||||
// Remove the \n
|
||||
return s.substring(0, s.length - 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}(this.recline.Backend.CSV));
|
||||
this.recline = this.recline || {};
|
||||
this.recline.Backend = this.recline.Backend || {};
|
||||
this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {};
|
||||
|
||||
(function(my) {
|
||||
@ -2813,7 +2506,7 @@ this.recline.View = this.recline.View || {};
|
||||
// Manage multiple views together along with query editor etc. Usage:
|
||||
//
|
||||
// <pre>
|
||||
// var myExplorer = new model.recline.MultiView({
|
||||
// var myExplorer = new recline.View.MultiView({
|
||||
// model: {{recline.Model.Dataset instance}}
|
||||
// el: {{an existing dom element}}
|
||||
// views: {{dataset views}}
|
||||
@ -2863,7 +2556,7 @@ this.recline.View = this.recline.View || {};
|
||||
// {
|
||||
// id: 'filterEditor', // used for routing
|
||||
// label: 'Filters', // used for view switcher
|
||||
// view: new recline.View.FielterEditor({
|
||||
// view: new recline.View.FilterEditor({
|
||||
// model: dataset
|
||||
// })
|
||||
// },
|
||||
@ -2881,10 +2574,10 @@ this.recline.View = this.recline.View || {};
|
||||
// special as it includes config of many of the subviews.
|
||||
//
|
||||
// <pre>
|
||||
// state = {
|
||||
// var state = {
|
||||
// query: {dataset query state - see dataset.queryState object}
|
||||
// view-{id1}: {view-state for this view}
|
||||
// view-{id2}: {view-state for }
|
||||
// 'view-{id1}': {view-state for this view}
|
||||
// 'view-{id2}': {view-state for }
|
||||
// ...
|
||||
// // Explorer
|
||||
// currentView: id of current view (defaults to first view if not specified)
|
||||
|
||||
@ -20,17 +20,25 @@ on the backend.
|
||||
|
||||
Backends come in 2 flavours:
|
||||
|
||||
1. Loader backends - only implement fetch method. The data is then cached in a Memory.Store on the Dataset and interacted with there. This is best for sources which just allow you to load data or where you want to load the data once and work with it locally.
|
||||
2. Store backends - these support fetch, query and, if write-enabled, save. These are suitable where the backend contains a lot of data (infeasible to load locally - for examples a million rows) or where the backend has capabilities you want to take advantage of.
|
||||
* Loader backends - only implement fetch method. The data is then cached in a
|
||||
Memory.Store on the Dataset and interacted with there. This is best for
|
||||
sources which just allow you to load data or where you want to load the data
|
||||
once and work with it locally.
|
||||
* Store backends - these support fetch, query and, if write-enabled, save.
|
||||
These are suitable where the backend contains a lot of data (infeasible to
|
||||
load locally - for examples a million rows) or where the backend has
|
||||
capabilities you want to take advantage of.
|
||||
|
||||
# List of Backends Shipped with Recline
|
||||
Examples of the 2 types of backends are provided by the Google docs backend (a
|
||||
"Loader" backend) and the ElasticSearch backend (a Store backend).
|
||||
|
||||
{% include backend-list.html %}
|
||||
# Available Backends
|
||||
|
||||
NB: examples of the 2 types of backends are provided by the Google docs backend (a "Loader" backend) and the ElasticSearch backend (a Store backend).
|
||||
|
||||
It's easy to write your own backend - you just need to implement the API as described below.
|
||||
You can find a list of the available Backends along with examples of how to use
|
||||
them in the [Backends Tutorial](tutorial-backends.html).
|
||||
|
||||
Note that it's easy to write your own backend - you just need to implement the
|
||||
Recline Backend API described below.
|
||||
|
||||
# Backend API
|
||||
|
||||
|
||||
@ -52,10 +52,6 @@ root: ../
|
||||
<li><a href="src/model.html">Models</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="span4">
|
||||
<h4>Backends</h4>
|
||||
{% include backend-list.html %}
|
||||
</div>
|
||||
<div class="span4">
|
||||
<h4>Dataset Views and Widgets</h4>
|
||||
<ul>
|
||||
|
||||
@ -14,26 +14,25 @@ sources such as Google Docs or the DataHub using Recline</small>
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="alert alert-info">
|
||||
<p><strong>Note</strong>: often you are loading data from a given source in
|
||||
order to load it into a Recline Dataset and display it in a View. However, you
|
||||
can also happily use a Backend to load data on its own without using any other
|
||||
part of the Recline library as all the Backends are designed to have no
|
||||
dependency on other parts of Recline.</p>
|
||||
</div>
|
||||
|
||||
## Overview
|
||||
|
||||
Backends connect Dataset and Documents to data from a specific 'Backend' data
|
||||
source. They provide methods for loading and saving Datasets and individuals
|
||||
Backends connect Recline Datasets to data from a specific 'Backend' data
|
||||
source.
|
||||
|
||||
They provide methods for loading and saving Datasets and individuals
|
||||
Documents as well as for bulk loading via a query API and doing bulk transforms
|
||||
on the backend.
|
||||
|
||||
Backends come in 2 flavours:
|
||||
|
||||
1. Loader backends - only implement fetch method. The data is then cached in a Memory.Store on the Dataset and interacted with there. This is best for sources which just allow you to load data or where you want to load the data once and work with it locally.
|
||||
2. Store backends - these support fetch, query and, if write-enabled, save. These are suitable where the backend contains a lot of data (infeasible to load locally - for examples a million rows) or where the backend has capabilities you want to take advantage of.
|
||||
* Loader backends - only implement fetch method. The data is then cached in a
|
||||
Memory.Store on the Dataset and interacted with there. This is best for
|
||||
sources which just allow you to load data or where you want to load the data
|
||||
once and work with it locally.
|
||||
* Store backends - these support fetch, query and, if write-enabled, save.
|
||||
These are suited to cases where the source datastore contains a lot of data
|
||||
(infeasible to load locally - for examples a million rows) or where the
|
||||
backend has, for example, query capabilities you want to take advantage of.
|
||||
|
||||
### Instantiation and Use
|
||||
|
||||
@ -58,13 +57,15 @@ How do you know the backend identifier for a given Backend? It's just the name
|
||||
of the 'class' in recline.Backend module (but case-insensitive). E.g.
|
||||
recline.Backend.ElasticSearch can be identified as 'ElasticSearch' or
|
||||
'elasticsearch'.</p>
|
||||
<p><strong>What Backends are available from Recline?</strong>
|
||||
{% include backend-list.html %}
|
||||
</p>
|
||||
<p><strong>Backend you'd like to see not available?</strong> It's easy to write your own – see the <a href="backends.html">Backend reference docs</a> for details of the required API.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
## What Backends are available from Recline?
|
||||
|
||||
{% include backend-list.html %}
|
||||
|
||||
**Backend you'd like to see not available?** It's easy to write your own
|
||||
– see the <a href="backends.html">Backend reference docs</a> for details
|
||||
of the required API.
|
||||
|
||||
## Preparing your app
|
||||
|
||||
@ -76,8 +77,9 @@ much more limited if you are just using a Backend. Specifically:
|
||||
<script type="text/javascript" src="vendor/jquery/1.7.1/jquery.js"></script>
|
||||
<script type="text/javascript" src="vendor/underscore/1.1.6/underscore.js"></script>
|
||||
<script type="text/javascript" src="vendor/backbone/0.5.1/backbone.js"></script>
|
||||
|
||||
<!-- include the backend code you need e.g. here for csv -->
|
||||
<script type="text/javascript" src="src/backend.csv.js"></script>
|
||||
<script type="text/javascript" src="http://okfnlabs.org/csv.js/csv.js"></script>
|
||||
|
||||
<!-- Or you can just include all of recline. -->
|
||||
<script type="text/javascript" src="dist/recline.js"></script>
|
||||
@ -91,13 +93,6 @@ Doc](https://docs.google.com/spreadsheet/ccc?key=0Aon3JiuouxLUdGZPaUZsMjBxeGhfOW
|
||||
For Recline to be able to access a Google Spreadsheet it **must** have been
|
||||
'Published to the Web' (enabled via File -> Publish to the Web menu).
|
||||
|
||||
<div class="alert alert-info">
|
||||
<strong>Want a real world example?</strong> This <a
|
||||
href="http://dashboard.opengovernmentdata.org/census/">Open Data Census micro-app</a> loads
|
||||
data from Google Docs and then displays it on a specialist interface combining
|
||||
a bespoke chooser and a Kartograph (svg-only) map.
|
||||
</div>
|
||||
|
||||
{% highlight javascript %}
|
||||
// include the Recline backend for Google Docs
|
||||
<script type="text/javascript" src="http://okfnlabs.org/recline.backend.gdocs/backend.gdocs.js"></script>
|
||||
@ -131,6 +126,13 @@ For loading data from CSV files there are 3 cases:
|
||||
2. CSV is on local disk -- if your browser supports HTML5 File API we can load the CSV file off disk
|
||||
3. CSV is online but not on same domain -- use DataProxy (see below)
|
||||
|
||||
In all cases we'll need to have loaded the Recline CSV backend (for your own
|
||||
app you'll probably want this locally):
|
||||
|
||||
{% highlight html %}
|
||||
<script type="text/javascript" src="http://okfnlabs.org/csv.js/csv.js"></script>
|
||||
{% endhighlight %}
|
||||
|
||||
### Local online CSV file
|
||||
|
||||
Let's start with first case: loading a "local" online CSV file. We'll be using this [example file]({{page.root}}/demos/data/sample.csv).
|
||||
|
||||
@ -1,307 +0,0 @@
|
||||
this.recline = this.recline || {};
|
||||
this.recline.Backend = this.recline.Backend || {};
|
||||
this.recline.Backend.CSV = this.recline.Backend.CSV || {};
|
||||
|
||||
// Note that provision of jQuery is optional (it is **only** needed if you use fetch on a remote file)
|
||||
(function(my) {
|
||||
"use strict";
|
||||
my.__type__ = 'csv';
|
||||
|
||||
// use either jQuery or Underscore Deferred depending on what is available
|
||||
var Deferred = (typeof jQuery !== "undefined" && jQuery.Deferred) || _.Deferred;
|
||||
|
||||
// ## fetch
|
||||
//
|
||||
// fetch supports 3 options depending on the attribute provided on the dataset argument
|
||||
//
|
||||
// 1. `dataset.file`: `file` is an HTML5 file object. This is opened and parsed with the CSV parser.
|
||||
// 2. `dataset.data`: `data` is a string in CSV format. This is passed directly to the CSV parser
|
||||
// 3. `dataset.url`: a url to an online CSV file that is ajax accessible (note this usually requires either local or on a server that is CORS enabled). The file is then loaded using jQuery.ajax and parsed using the CSV parser (NB: this requires jQuery)
|
||||
//
|
||||
// All options generates similar data and use the memory store outcome, that is they return something like:
|
||||
//
|
||||
// <pre>
|
||||
// {
|
||||
// records: [ [...], [...], ... ],
|
||||
// metadata: { may be some metadata e.g. file name }
|
||||
// useMemoryStore: true
|
||||
// }
|
||||
// </pre>
|
||||
my.fetch = function(dataset) {
|
||||
var dfd = new Deferred();
|
||||
if (dataset.file) {
|
||||
var reader = new FileReader();
|
||||
var encoding = dataset.encoding || 'UTF-8';
|
||||
reader.onload = function(e) {
|
||||
var out = my.extractFields(my.parseCSV(e.target.result, dataset), dataset);
|
||||
out.useMemoryStore = true;
|
||||
out.metadata = {
|
||||
filename: dataset.file.name
|
||||
}
|
||||
dfd.resolve(out);
|
||||
};
|
||||
reader.onerror = function (e) {
|
||||
alert('Failed to load file. Code: ' + e.target.error.code);
|
||||
};
|
||||
reader.readAsText(dataset.file, encoding);
|
||||
} else if (dataset.data) {
|
||||
var out = my.extractFields(my.parseCSV(dataset.data, dataset), dataset);
|
||||
out.useMemoryStore = true;
|
||||
dfd.resolve(out);
|
||||
} else if (dataset.url) {
|
||||
jQuery.get(dataset.url).done(function(data) {
|
||||
var out = my.extractFields(my.parseCSV(data, dataset), dataset);
|
||||
out.useMemoryStore = true;
|
||||
dfd.resolve(out);
|
||||
});
|
||||
}
|
||||
return dfd.promise();
|
||||
};
|
||||
|
||||
// Convert array of rows in { records: [ ...] , fields: [ ... ] }
|
||||
// @param {Boolean} noHeaderRow If true assume that first row is not a header (i.e. list of fields but is data.
|
||||
my.extractFields = function(rows, noFields) {
|
||||
if (noFields.noHeaderRow !== true && rows.length > 0) {
|
||||
return {
|
||||
fields: rows[0],
|
||||
records: rows.slice(1)
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
records: rows
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// ## parseCSV
|
||||
//
|
||||
// Converts a Comma Separated Values string into an array of arrays.
|
||||
// Each line in the CSV becomes an array.
|
||||
//
|
||||
// Empty fields are converted to nulls and non-quoted numbers are converted to integers or floats.
|
||||
//
|
||||
// @return The CSV parsed as an array
|
||||
// @type Array
|
||||
//
|
||||
// @param {String} s The string to convert
|
||||
// @param {Object} options Options for loading CSV including
|
||||
// @param {Boolean} [trim=false] If set to True leading and trailing
|
||||
// whitespace is stripped off of each non-quoted field as it is imported
|
||||
// @param {String} [delimiter=','] A one-character string used to separate
|
||||
// fields. It defaults to ','
|
||||
// @param {String} [quotechar='"'] A one-character string used to quote
|
||||
// fields containing special characters, such as the delimiter or
|
||||
// quotechar, or which contain new-line characters. It defaults to '"'
|
||||
//
|
||||
// @param {Integer} skipInitialRows A integer number of rows to skip (default 0)
|
||||
//
|
||||
// Heavily based on uselesscode's JS CSV parser (MIT Licensed):
|
||||
// http://www.uselesscode.org/javascript/csv/
|
||||
my.parseCSV= function(s, options) {
|
||||
// Get rid of any trailing \n
|
||||
s = chomp(s);
|
||||
|
||||
var options = options || {};
|
||||
var trm = (options.trim === false) ? false : true;
|
||||
var delimiter = options.delimiter || ',';
|
||||
var quotechar = options.quotechar || '"';
|
||||
|
||||
var cur = '', // The character we are currently processing.
|
||||
inQuote = false,
|
||||
fieldQuoted = false,
|
||||
field = '', // Buffer for building up the current field
|
||||
row = [],
|
||||
out = [],
|
||||
i,
|
||||
processField;
|
||||
|
||||
processField = function (field) {
|
||||
if (fieldQuoted !== true) {
|
||||
// If field is empty set to null
|
||||
if (field === '') {
|
||||
field = null;
|
||||
// If the field was not quoted and we are trimming fields, trim it
|
||||
} else if (trm === true) {
|
||||
field = trim(field);
|
||||
}
|
||||
|
||||
// Convert unquoted numbers to their appropriate types
|
||||
if (rxIsInt.test(field)) {
|
||||
field = parseInt(field, 10);
|
||||
} else if (rxIsFloat.test(field)) {
|
||||
field = parseFloat(field, 10);
|
||||
}
|
||||
}
|
||||
return field;
|
||||
};
|
||||
|
||||
for (i = 0; i < s.length; i += 1) {
|
||||
cur = s.charAt(i);
|
||||
|
||||
// If we are at a EOF or EOR
|
||||
if (inQuote === false && (cur === delimiter || cur === "\n")) {
|
||||
field = processField(field);
|
||||
// Add the current field to the current row
|
||||
row.push(field);
|
||||
// If this is EOR append row to output and flush row
|
||||
if (cur === "\n") {
|
||||
out.push(row);
|
||||
row = [];
|
||||
}
|
||||
// Flush the field buffer
|
||||
field = '';
|
||||
fieldQuoted = false;
|
||||
} else {
|
||||
// If it's not a quotechar, add it to the field buffer
|
||||
if (cur !== quotechar) {
|
||||
field += cur;
|
||||
} else {
|
||||
if (!inQuote) {
|
||||
// We are not in a quote, start a quote
|
||||
inQuote = true;
|
||||
fieldQuoted = true;
|
||||
} else {
|
||||
// Next char is quotechar, this is an escaped quotechar
|
||||
if (s.charAt(i + 1) === quotechar) {
|
||||
field += quotechar;
|
||||
// Skip the next char
|
||||
i += 1;
|
||||
} else {
|
||||
// It's not escaping, so end quote
|
||||
inQuote = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the last field
|
||||
field = processField(field);
|
||||
row.push(field);
|
||||
out.push(row);
|
||||
|
||||
// Expose the ability to discard initial rows
|
||||
if (options.skipInitialRows) out = out.slice(options.skipInitialRows);
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
// ## serializeCSV
|
||||
//
|
||||
// Convert an Object or a simple array of arrays into a Comma
|
||||
// Separated Values string.
|
||||
//
|
||||
// Nulls are converted to empty fields and integers or floats are converted to non-quoted numbers.
|
||||
//
|
||||
// @return The array serialized as a CSV
|
||||
// @type String
|
||||
//
|
||||
// @param {Object or Array} dataToSerialize The Object or array of arrays to convert. Object structure must be as follows:
|
||||
//
|
||||
// {
|
||||
// fields: [ {id: .., ...}, {id: ...,
|
||||
// records: [ { record }, { record }, ... ]
|
||||
// ... // more attributes we do not care about
|
||||
// }
|
||||
//
|
||||
// @param {object} options Options for serializing the CSV file including
|
||||
// delimiter and quotechar (see parseCSV options parameter above for
|
||||
// details on these).
|
||||
//
|
||||
// Heavily based on uselesscode's JS CSV serializer (MIT Licensed):
|
||||
// http://www.uselesscode.org/javascript/csv/
|
||||
my.serializeCSV= function(dataToSerialize, options) {
|
||||
var a = null;
|
||||
if (dataToSerialize instanceof Array) {
|
||||
a = dataToSerialize;
|
||||
} else {
|
||||
a = [];
|
||||
var fieldNames = _.pluck(dataToSerialize.fields, 'id');
|
||||
a.push(fieldNames);
|
||||
_.each(dataToSerialize.records, function(record, index) {
|
||||
var tmp = _.map(fieldNames, function(fn) {
|
||||
return record[fn];
|
||||
});
|
||||
a.push(tmp);
|
||||
});
|
||||
}
|
||||
var options = options || {};
|
||||
var delimiter = options.delimiter || ',';
|
||||
var quotechar = options.quotechar || '"';
|
||||
|
||||
var cur = '', // The character we are currently processing.
|
||||
field = '', // Buffer for building up the current field
|
||||
row = '',
|
||||
out = '',
|
||||
i,
|
||||
j,
|
||||
processField;
|
||||
|
||||
processField = function (field) {
|
||||
if (field === null) {
|
||||
// If field is null set to empty string
|
||||
field = '';
|
||||
} else if (typeof field === "string" && rxNeedsQuoting.test(field)) {
|
||||
// Convert string to delimited string
|
||||
field = quotechar + field + quotechar;
|
||||
} else if (typeof field === "number") {
|
||||
// Convert number to string
|
||||
field = field.toString(10);
|
||||
}
|
||||
|
||||
return field;
|
||||
};
|
||||
|
||||
for (i = 0; i < a.length; i += 1) {
|
||||
cur = a[i];
|
||||
|
||||
for (j = 0; j < cur.length; j += 1) {
|
||||
field = processField(cur[j]);
|
||||
// If this is EOR append row to output and flush row
|
||||
if (j === (cur.length - 1)) {
|
||||
row += field;
|
||||
out += row + "\n";
|
||||
row = '';
|
||||
} else {
|
||||
// Add the current field to the current row
|
||||
row += field + delimiter;
|
||||
}
|
||||
// Flush the field buffer
|
||||
field = '';
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
var rxIsInt = /^\d+$/,
|
||||
rxIsFloat = /^\d*\.\d+$|^\d+\.\d*$/,
|
||||
// If a string has leading or trailing space,
|
||||
// contains a comma double quote or a newline
|
||||
// it needs to be quoted in CSV output
|
||||
rxNeedsQuoting = /^\s|\s$|,|"|\n/,
|
||||
trim = (function () {
|
||||
// Fx 3.1 has a native trim function, it's about 10x faster, use it if it exists
|
||||
if (String.prototype.trim) {
|
||||
return function (s) {
|
||||
return s.trim();
|
||||
};
|
||||
} else {
|
||||
return function (s) {
|
||||
return s.replace(/^\s*/, '').replace(/\s*$/, '');
|
||||
};
|
||||
}
|
||||
}());
|
||||
|
||||
function chomp(s) {
|
||||
if (s.charAt(s.length - 1) !== "\n") {
|
||||
// Does not end with \n, just return string
|
||||
return s;
|
||||
} else {
|
||||
// Remove the \n
|
||||
return s.substring(0, s.length - 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}(this.recline.Backend.CSV));
|
||||
@ -1,112 +0,0 @@
|
||||
(function ($) {
|
||||
module("Backend Local CSV");
|
||||
|
||||
test("parseCSV", function() {
|
||||
var csv = '"Jones, Jay",10\n' +
|
||||
'"Xyz ""ABC"" O\'Brien",11:35\n' +
|
||||
'"Other, AN",12:35\n';
|
||||
|
||||
var array = recline.Backend.CSV.parseCSV(csv);
|
||||
var exp = [
|
||||
['Jones, Jay', 10],
|
||||
['Xyz "ABC" O\'Brien', '11:35' ],
|
||||
['Other, AN', '12:35' ]
|
||||
];
|
||||
deepEqual(exp, array);
|
||||
|
||||
var csv = '"Jones, Jay", 10\n' +
|
||||
'"Xyz ""ABC"" O\'Brien", 11:35\n' +
|
||||
'"Other, AN", 12:35\n';
|
||||
var array = recline.Backend.CSV.parseCSV(csv, {trim : true});
|
||||
deepEqual(exp, array);
|
||||
|
||||
var csv = 'Name, Value\n' +
|
||||
'"Jones, Jay", 10\n' +
|
||||
'"Xyz ""ABC"" O\'Brien", 11:35\n' +
|
||||
'"Other, AN", 12:35\n';
|
||||
var dataset = new recline.Model.Dataset({
|
||||
data: csv,
|
||||
backend: 'csv'
|
||||
});
|
||||
dataset.fetch();
|
||||
equal(dataset.records.length, 3);
|
||||
var row = dataset.records.models[0].toJSON();
|
||||
deepEqual(row, {Name: 'Jones, Jay', Value: 10});
|
||||
});
|
||||
|
||||
test("parseCSV - semicolon", function() {
|
||||
var csv = '"Jones; Jay";10\n' +
|
||||
'"Xyz ""ABC"" O\'Brien";11:35\n' +
|
||||
'"Other; AN";12:35\n';
|
||||
|
||||
var array = recline.Backend.CSV.parseCSV(csv, {delimiter : ';'});
|
||||
var exp = [
|
||||
['Jones; Jay', 10],
|
||||
['Xyz "ABC" O\'Brien', '11:35' ],
|
||||
['Other; AN', '12:35' ]
|
||||
];
|
||||
deepEqual(exp, array);
|
||||
|
||||
});
|
||||
|
||||
test("parseCSV - quotechar", function() {
|
||||
var csv = "'Jones, Jay',10\n" +
|
||||
"'Xyz \"ABC\" O''Brien',11:35\n" +
|
||||
"'Other; AN',12:35\n";
|
||||
|
||||
var array = recline.Backend.CSV.parseCSV(csv, {quotechar:"'"});
|
||||
var exp = [
|
||||
["Jones, Jay", 10],
|
||||
["Xyz \"ABC\" O'Brien", "11:35" ],
|
||||
["Other; AN", "12:35" ]
|
||||
];
|
||||
deepEqual(exp, array);
|
||||
|
||||
});
|
||||
|
||||
test("parseCSV skipInitialRows", function() {
|
||||
var csv = '"Jones, Jay",10\n' +
|
||||
'"Xyz ""ABC"" O\'Brien",11:35\n' +
|
||||
'"Other, AN",12:35\n';
|
||||
|
||||
var array = recline.Backend.CSV.parseCSV(csv, {skipInitialRows: 1});
|
||||
var exp = [
|
||||
['Xyz "ABC" O\'Brien', '11:35' ],
|
||||
['Other, AN', '12:35' ]
|
||||
];
|
||||
deepEqual(exp, array);
|
||||
});
|
||||
|
||||
test("serializeCSV - Array", function() {
|
||||
var csv = [
|
||||
['Jones, Jay', 10],
|
||||
['Xyz "ABC" O\'Brien', '11:35' ],
|
||||
['Other, AN', '12:35' ]
|
||||
];
|
||||
|
||||
var array = recline.Backend.CSV.serializeCSV(csv);
|
||||
var exp = '"Jones, Jay",10\n' +
|
||||
'"Xyz \"ABC\" O\'Brien",11:35\n' +
|
||||
'"Other, AN",12:35\n';
|
||||
deepEqual(array, exp);
|
||||
});
|
||||
|
||||
test("serializeCSV - Object", function() {
|
||||
var indata = {
|
||||
fields: [ {id: 'name'}, {id: 'number'}],
|
||||
records: [
|
||||
{name: 'Jones, Jay', number: 10},
|
||||
{name: 'Xyz "ABC" O\'Brien', number: '11:35' },
|
||||
{name: 'Other, AN', number: '12:35' }
|
||||
]
|
||||
};
|
||||
|
||||
var array = recline.Backend.CSV.serializeCSV(indata);
|
||||
var exp = 'name,number\n' +
|
||||
'"Jones, Jay",10\n' +
|
||||
'"Xyz \"ABC\" O\'Brien",11:35\n' +
|
||||
'"Other, AN",12:35\n';
|
||||
deepEqual(array, exp);
|
||||
});
|
||||
|
||||
})(this.jQuery);
|
||||
Loading…
x
Reference in New Issue
Block a user