Merge branch 'master' into gh-pages

This commit is contained in:
Rufus Pollock 2012-06-24 19:55:24 +01:00
commit cc5bb48512
25 changed files with 1153 additions and 896 deletions

View File

@ -9,7 +9,7 @@
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<link rel="stylesheet" href="vendor/bootstrap/2.0.2/css/bootstrap.css" />
<link rel="stylesheet" href="{{ page.root }}vendor/bootstrap/2.0.2/css/bootstrap.css" />
{% if page.recline-deps %}
{% include recline-deps.html %}
@ -17,8 +17,8 @@
<!-- link rel="stylesheet" href="vendor/bootstrap/2.0.2/css/bootstrap-responsive.css" -->
<link href="css/site/pygments.css" rel="stylesheet" type="text/css" />
<link href="css/site/site.css" rel="stylesheet" type="text/css" />
<link href="{{ page.root }}css/site/pygments.css" rel="stylesheet" type="text/css" />
<link href="{{ page.root }}css/site/site.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div class="navbar navbar-fixed-top">
@ -30,7 +30,7 @@
<a class="brand" href="/"><strong>Recline.js</strong> &ndash; relax with your data</a>
<ul class="nav">
<li>
<a href="library.html">
<a href="/docs/library.html">
<i class="icon-book icon-white"></i>
Documentation
</a>

View File

@ -202,17 +202,17 @@ var ExplorerApp = Backbone.View.extend({
var $form = $(e.target);
$('.modal.js-load-dialog-file').modal('hide');
var $file = $form.find('input[type="file"]')[0];
var file = $file.files[0];
var options = {
separator : $form.find('input[name="separator"]').val(),
delimiter : $form.find('input[name="delimiter"]').val(),
encoding : $form.find('input[name="encoding"]').val()
};
recline.Backend.CSV.load(file, function(dataset) {
self.createExplorer(dataset)
var dataset = new recline.Model.Dataset({
file: $file.files[0],
separator : $form.find('input[name="separator"]').val(),
delimiter : $form.find('input[name="delimiter"]').val(),
encoding : $form.find('input[name="encoding"]').val()
},
options
'csv'
);
dataset.fetch().done(function() {
self.createExplorer(dataset)
});
},
_getSettings: function() {

122
docs/backends.markdown Normal file
View File

@ -0,0 +1,122 @@
---
layout: container
title: Backends
root: ../
---
<div class="page-header">
<h1>
Backends
<small>Connect to data sources</small>
</h1>
</div>
Backends come in 2 flavours:
1. Loader backends - only implement fetch method. The data is then cached in a Memory.Store on the Dataset and interacted with there. This is best for sources which just allow you to load data or where you want to load the data once and work with it locally.
2. Store backends - these support fetch, query and, if write-enabled, save. These are suitable where the backend contains a lot of data (infeasible to load locally - for examples a million rows) or where the backend has capabilities you want to take advantage of.
Backend modules must implement the following API:
{% highlight javascript %}
__type__: 'name-of-backend' // e.g. elasticsearch
// Initial load of dataset including initial set of records
fetch: function(dataset)
// Query the backend for records returning them in bulk.
// This method will be used by the Dataset.query method to search the backend
// for records, retrieving the results in bulk.
query: function(queryObj, dataset)
// Save changes to the backend
save: function(changes, dataset)
{% endhighlight %}
Details of each function below. Note that:
* Each backend function takes a dataset object. This is not a Dataset object
but is simple JS object representation resulting from calling
Dataset.toJSON().
It is required because the Dataset attributes contain details of specific
backend (e.g. url for ElasticSearch etc).
* Each function returns a promise API object - that is something conforming to
the jquery promise API and, in particular, having a done and fail function.
### fetch: function(dataset)
On success, promise callback must return an object with the following structure:
{% highlight javascript %}
{
// (optional) Set of record data
// Either an array of arrays *or* an array of objects corresponding to initial set of records for this object
// May not provided if data only returned by query
records: [...]
// (optional) Set of field data
// Either an array of string or an array of objects corresponding to Field specification (see `Field` above)
fields: { ... } // as per recline.Model.Field
// (optional) metadata fields to set on the Dataset object
metadata: { title: ..., id: ... etc }
// boolean indicating whether to use a local memory store for managing this dataset
useMemoryStore:
}
{% endhighlight %}
### query: function(queryObj, dataset)
`queryObj`: JS object following <a href="models.html#query-structure">Query specification</a> above.
#### Callbacks
On success must return a 'QueryResult' object which has the following structure:
{% highlight javascript %}
{
// total number of results (can be null)
total: ...
// one entry for each result record
hits: [
{
// JS object that can be used to initialize a Record object
}
],
// (optional)
facets: {
// facet results (as per <http://www.elasticsearch.org/guide/reference/api/search/facets/>)
}
}
{% endhighlight %}
The QueryResult is partially modelled on ElasticSearch - see <a
href="https://github.com/okfn/recline/issues/57">this issue for more
details</a>.
### save: function(changes, dataset)
<div class="alert alert-warning">The save function is still being revised and
its API and arguments are subject to change</div>
`changes`: an object with the following structure:
{% highlight javascript %}
{
creates: [ record.toJSON(), record.toJSON(), ... ]
updates: [ ... ]
deletes: [ ... ]
}
{% endhighlight %}
Each key has an array of records (as simple JS objects resulting from a call to
Record.toJSON()) that are in that state.
The backend should take appropriate actions for each case.

View File

@ -1,6 +1,7 @@
---
layout: default
title: Library - Home
root: ../
---
<div class="container library">
@ -24,19 +25,19 @@ title: Library - Home
<div class="span4">
<div class="well">
<h3>Models</h3>
<p>Models help you structure your work with data by providing some standard objects such as Dataset and Record &ndash; a Dataset being a collection of Records. <a href="docs/model.html">More &raquo;</a></p>
<p>Models help you structure your work with data by providing some standard objects such as Dataset and Record &ndash; a Dataset being a collection of Records. <a href="models.html">More &raquo;</a></p>
</div>
</div>
<div class="span4">
<div class="well">
<h3>Backends</h3>
<p>Backends connect your Models to data sources (and stores) &ndash; for example Google Docs spreadsheets, local CSV files, the DataHub, ElasticSearch etc. <a href="example-backends.html">More &raquo;</a></p>
<p>Backends connect your Models to data sources (and stores) &ndash; for example Google Docs spreadsheets, local CSV files, the DataHub, ElasticSearch etc. <a href="backends.html">More &raquo;</a></p>
</div>
</div>
<div class="span4">
<div class="well">
<h3>Views</h3>
<p>Views are user interface components for displaying, editing or interacting with the data. For example, maps, graphs, data grids or a query editor. <a href="library-view.html">More &raquo;</a></p>
<p>Views are user interface components for displaying, editing or interacting with the data. For example, maps, graphs, data grids or a query editor. <a href="views.html">More &raquo;</a></p>
</div>
</div>
</div>
@ -106,27 +107,27 @@ title: Library - Home
<div class="span4">
<h4>Models</h4>
<ul>
<li><a href="docs/model.html">Models</a></li>
<li><a href="model.html">Models</a></li>
</ul>
</div>
<div class="span4">
<h4>Backends</h4>
<ul>
<li><a href="docs/backend/base.html">Base module providing convenience functions</a></li>
<li><a href="docs/backend/memory.html">memory: Memory Backend (local data)</a></li>
<li><a href="docs/backend/elasticsearch.html">elasticsearch: ElasticSearch Backend</a></li>
<li><a href="docs/backend/dataproxy.html">dataproxy: DataProxy Backend (CSV and XLS on the Web)</a></li>
<li><a href="docs/backend/gdocs.html">gdocs: Google Docs (Spreadsheet) Backend</a></li>
<li><a href="docs/backend/csv.html">csv: Local CSV file backend</a></li>
<li><a href="backend/base.html">Base module providing convenience functions</a></li>
<li><a href="backend/memory.html">memory: Memory Backend (local data)</a></li>
<li><a href="backend/elasticsearch.html">elasticsearch: ElasticSearch Backend</a></li>
<li><a href="backend/dataproxy.html">dataproxy: DataProxy Backend (CSV and XLS on the Web)</a></li>
<li><a href="backend/gdocs.html">gdocs: Google Docs (Spreadsheet) Backend</a></li>
<li><a href="backend/csv.html">csv: Local CSV file backend</a></li>
</ul>
</div>
<div class="span4">
<h4>Dataset Views and Widgets</h4>
<ul>
<li><a href="docs/view.multiview.html">MultiView View (plus common view code)</a></li>
<li><a href="docs/view-grid.html">(Data) Grid View</a></li>
<li><a href="docs/view-graph.html">Graph View (based on Flot)</a></li>
<li><a href="docs/view-map.html">Map View (based on Leaflet)</a></li>
<li><a href="view.multiview.html">MultiView View (plus common view code)</a></li>
<li><a href="view-grid.html">(Data) Grid View</a></li>
<li><a href="view-graph.html">Graph View (based on Flot)</a></li>
<li><a href="view-map.html">Map View (based on Leaflet)</a></li>
</ul>
</div>
</div>

250
docs/models.markdown Normal file
View File

@ -0,0 +1,250 @@
---
layout: container
title: Models
root: ../
---
<div class="page-header">
<h1>
Models
</h1>
</div>
Models help you structure your work with data by providing some standard
objects. The key ones are Dataset and Record -- a Dataset being a collection of
Records. Additionally, there is a a Field object for describing the columns of
a Dataset, a Query object for describing queries, and a Facet object for
holding summary information about a Field (or multiple Fields).
# Models
All the models are Backbone models, that is they extend Backbone.Model. Note,
however that they do not 'sync' (load/save) like normal Backbone models.
## Dataset
A Dataset is *the* central object in Recline. Standard usage is:
{% highlight javascript %}
var dataset = new recline.model.Dataset({
// general metadata e.g.
id: ...
title: ...
// information about data source e.g.
url: http://url.to.my.data.endpoint/
// backend string or object
backend: the backend we are using - see below
});
// initialize dataset with data from the backend.
dataset.fetch();
// we will now have the following (and more) set up - see below for details
dataset.fields // collection of Fields (columns) for this Dataset
dataset.currentRecords // collection of Records resulting from latest query
dataset.docCount // total number of Records in the last query
{% endhighlight %}
### Key Attributes
* currentRecords: a collection of `Record`s currently loaded for viewing
(updated by calling query method) - note that this need <strong>not</strong>
be all the records in the dataset (for example, you may have connected to a
source where the complete dataset contains a million records but you have
only loaded a 1000 records)
* fields: (aka columns) is a Backbone collectoin of `Field`s listing all the
fields on this Dataset (this can be set explicitly, or, will be set by
Dataset.fetch()
* docCount: total number of records in this dataset
* backend: the Backend (instance) for this Dataset.
* queryState: a `Query` object which stores current queryState. queryState may
be edited by other components (e.g. a query editor view) changes will trigger
a Dataset query.
* facets: a collection of `Facet`s
### Querying
{% highlight javascript %}
dataset.query(queryObj)
{% endhighlight %}
`queryObj` an object following the <a href="#query-structure">query
pecification below</a>.
<h2 id="record">Record (aka Row)</h2>
A Record is a single entry or row in a dataset. A Record needs little more than
what is provided by the standard Backbone Model object. In general, you will
never create a Record directly -- they will get created for you by Datasets
from query results.
<h2 id="field">Field (aka Column)</h2>
A Field should have the following attributes as standard:
{% highlight javascript %}
var field = new Field({
// a unique identifer for this field- usually this should match the key in the records hash
id: 'my-field-id'
// (optional: defaults to id) the visible label used for this field
label: 'My Field Name',
// (optional: defaults to string) the type of the data in this field.
// Should be a string as per type names defined by ElasticSearch - see
// Types list on <http://www.elasticsearch.org/guide/reference/mapping/>
type: 'string',
// (optional - defaults to null) used to indicate how the data should be
// formatted. See below.
format: null,
// (default: false) attribute indicating this field has no backend data but
// is just derived from other fields (see below).
is_derived: false
{% endhighlight %}
#### Rendering, types and formats
One can customize the rendering of fields in the user interface and elsewhere
by setting a renderer function on the field. You do this by setting a field
attribute:
{% highlight javascript %}
myfield.renderer = myRenderFunction;
{% endhighlight %}
Your renderer function should have the following signature:
function(value, field, record)
Where the arguments passed in are as follows:
* `value`: the value of the cell (record value for this field)
* `field`: corresponding `Field` object
* `record : is the `Record` object (as simple JS object)
Note that implementing functions can ignore arguments (e.g. function(value)
would be a valid formatter function).
To guide the behaviour of renderers we have type and format information.
Example types and formats are:
* type=date, format=yyyy-mm-dd
* type=float, format=percentage
* type=string, format=markdown (render as markdown if Showdown available)
Default renderers are provided - see the source for details, but a few examples
are:
* type = string
* no format provided: pass through but convert http:// to hyperlinks
* format = plain: do no processing on the source text
* format = markdown: process as markdown (if Showdown library available)
* type = float
* format = percentage: format as a percentage
#### Derived fields
Some fields may be 'dervied' from other fields. This allows you to define an
entirely new value for data in this field. This provides support for a)
'derived/computed' fields: i.e. fields whose data are functions of the data in
other fields b) transforming the value of this field prior to rendering.
To use derived fields set a `deriver` function on the Field. This function will
be used to derive/compute the value of data in this field as a function of this
field's value (if any) and the current record. It's signature and behaviour is
the same as for renderer.
<h2 id="query">Query</h2>
Query instances encapsulate a query to the backend (see <a
href="backend/base.html">query method on backend</a>). Useful both
for creating queries and for storing and manipulating query state -
e.g. from a query editor).
<h3 id="query-structure">Query Structure and format</h3>
Query structure should follow that of [ElasticSearch query
language](http://www.elasticsearch.org/guide/reference/api/search/).
**NB: It is up to specific backends how to implement and support this query
structure. Different backends might choose to implement things differently
or not support certain features. Please check your backend for details.**
Query object has the following key attributes:
* size (=limit): number of results to return
* from (=offset): offset into result set - http://www.elasticsearch.org/guide/reference/api/search/from-size.html
* sort: sort order - <http://www.elasticsearch.org/guide/reference/api/search/sort.html>
* query: Query in ES Query DSL <http://www.elasticsearch.org/guide/reference/api/search/query.html>
* filter: See filters and <a href="http://www.elasticsearch.org/guide/reference/query-dsl/filtered-query.html">Filtered Query</a>
* fields: set of fields to return - http://www.elasticsearch.org/guide/reference/api/search/fields.html
* facets: specification of facets - see http://www.elasticsearch.org/guide/reference/api/search/facets/
Additions:
* q: either straight text or a hash will map directly onto a [query_string
query](http://www.elasticsearch.org/guide/reference/query-dsl/query-string-query.html)
in backend
* Of course this can be re-interpreted by different backends. E.g. some may
just pass this straight through e.g. for an SQL backend this could be the
full SQL query
* filters: array of ElasticSearch filters. These will be and-ed together for
execution.
#### Examples
<pre>
{
q: 'quick brown fox',
filters: [
{ term: { 'owner': 'jones' } }
]
}
</pre>
<h2>Facet <small>&ndash; Store summary information (e.g. values and counts) about a field obtained by some 'faceting' or 'group by' method</small>
</h2>
Structure of a facet follows that of Facet results in ElasticSearch, see:
<http://www.elasticsearch.org/guide/reference/api/search/facets/>
Specifically the object structure of a facet looks like (there is one
addition compared to ElasticSearch: the "id" field which corresponds to the
key used to specify this facet in the facet query):
{% highlight javascript %}
{
id: "id-of-facet",
// type of this facet (terms, range, histogram etc)
\_type : "terms",
// total number of tokens in the facet
total: 5,
// @property {number} number of records which have no value for the field
missing : 0,
// number of facet values not included in the returned facets
other: 0,
// term object ({term: , count: ...})
terms: [ {
"term" : "foo",
"count" : 2
}, {
"term" : "bar",
"count" : 2
}, {
"term" : "baz",
"count" : 1
}
]
}
{% endhighlight %}

View File

@ -1,6 +1,7 @@
---
layout: container
title: Library - Views
root: ../
---
<div class="page-header">

View File

@ -1,113 +0,0 @@
// # Recline Backends
//
// Backends are connectors to backend data sources and stores
//
// This is just the base module containing a template Base class and convenience methods.
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
// ## recline.Backend.Base
//
// Exemplar 'class' for backends showing what a base class would look like.
this.recline.Backend.Base = function() {
// ### __type__
//
// 'type' of this backend. This should be either the class path for this
// object as a string (e.g. recline.Backend.Memory) or for Backends within
// recline.Backend module it may be their class name.
//
// This value is used as an identifier for this backend when initializing
// backends (see recline.Model.Dataset.initialize).
this.__type__ = 'base';
// ### readonly
//
// Class level attribute indicating that this backend is read-only (that
// is, cannot be written to).
this.readonly = true;
// ### sync
//
// An implementation of Backbone.sync that will be used to override
// Backbone.sync on operations for Datasets and Records which are using this backend.
//
// For read-only implementations you will need only to implement read method
// for Dataset models (and even this can be a null operation). The read method
// should return relevant metadata for the Dataset. We do not require read support
// for Records because they are loaded in bulk by the query method.
//
// For backends supporting write operations you must implement update and delete support for Record objects.
//
// All code paths should return an object conforming to the jquery promise API.
this.sync = function(method, model, options) {
},
// ### query
//
// Query the backend for records returning them in bulk. This method will
// be used by the Dataset.query method to search the backend for records,
// retrieving the results in bulk.
//
// @param {recline.model.Dataset} model: Dataset model.
//
// @param {Object} queryObj: object describing a query (usually produced by
// using recline.Model.Query and calling toJSON on it).
//
// The structure of data in the Query object or
// Hash should follow that defined in <a
// href="http://github.com/okfn/recline/issues/34">issue 34</a>.
// (Of course, if you are writing your own backend, and hence
// have control over the interpretation of the query object, you
// can use whatever structure you like).
//
// @returns {Promise} promise API object. The promise resolve method will
// be called on query completion with a QueryResult object.
//
// A QueryResult has the following structure (modelled closely on
// ElasticSearch - see <a
// href="https://github.com/okfn/recline/issues/57">this issue for more
// details</a>):
//
// <pre>
// {
// total: // (required) total number of results (can be null)
// hits: [ // (required) one entry for each result record
// {
// _score: // (optional) match score for record
// _type: // (optional) record type
// _source: // (required) record/row object
// }
// ],
// facets: { // (optional)
// // facet results (as per <http://www.elasticsearch.org/guide/reference/api/search/facets/>)
// }
// }
// </pre>
this.query = function(model, queryObj) {}
};
// ### makeRequest
//
// Just $.ajax but in any headers in the 'headers' attribute of this
// Backend instance. Example:
//
// <pre>
// var jqxhr = this._makeRequest({
// url: the-url
// });
// </pre>
this.recline.Backend.makeRequest = function(data, headers) {
var extras = {};
if (headers) {
extras = {
beforeSend: function(req) {
_.each(headers, function(value, key) {
req.setRequestHeader(key, value);
});
}
};
}
var data = _.extend(extras, data);
return $.ajax(data);
};

View File

@ -14,9 +14,9 @@ this.recline.Backend.CouchDB = this.recline.Backend.CouchDB || {};
// TODO Add user/password arguments for couchdb authentication support.
my.CouchDBWrapper = function(db_url, view_url, options) {
var self = this;
this.endpoint = db_url;
this.view_url = (view_url) ? view_url : db_url+'/'+'_all_docs';
this.options = _.extend({
self.endpoint = db_url;
self.view_url = (view_url) ? view_url : db_url+'/'+'_all_docs';
self.options = _.extend({
dataType: 'json'
},
options);
@ -45,9 +45,9 @@ this.recline.Backend.CouchDB = this.recline.Backend.CouchDB || {};
// @return promise compatible deferred object.
this.mapping = function() {
var schemaUrl = self.view_url + '?limit=1&include_docs=true';
var jqxhr = this._makeRequest({
var jqxhr = self._makeRequest({
url: schemaUrl,
dataType: this.options.dataType
dataType: self.options.dataType
});
return jqxhr;
};
@ -58,8 +58,8 @@ this.recline.Backend.CouchDB = this.recline.Backend.CouchDB || {};
//
// @return promise compatible deferred object.
this.get = function(_id) {
var base = this.endpoint + '/' + _id;
return this._makeRequest({
var base = self.endpoint + '/' + _id;
return self._makeRequest({
url: base,
dataType: 'json'
});
@ -73,13 +73,13 @@ this.recline.Backend.CouchDB = this.recline.Backend.CouchDB || {};
// @return deferred supporting promise API
this.upsert = function(doc) {
var data = JSON.stringify(doc);
url = this.endpoint;
url = self.endpoint;
if (doc._id) {
url += '/' + doc._id;
}
// use a PUT, not a POST to update the document:
// http://wiki.apache.org/couchdb/HTTP_Document_API#POST
return this._makeRequest({
return self._makeRequest({
url: url,
type: 'PUT',
data: data,
@ -95,9 +95,9 @@ this.recline.Backend.CouchDB = this.recline.Backend.CouchDB || {};
// @param {Object} id id of object to delete
// @return deferred supporting promise API
this.delete = function(_id) {
url = this.endpoint;
url = self.endpoint;
url += '/' + _id;
return this._makeRequest({
return self._makeRequest({
url: url,
type: 'DELETE',
dataType: 'json'
@ -131,14 +131,14 @@ this.recline.Backend.CouchDB = this.recline.Backend.CouchDB || {};
// @param {Object} additional couchdb view query options.
// @return deferred supporting promise API
this.query = function(query_object, query_options) {
var norm_q = this._normalizeQuery(query_object);
var url = this.view_url;
var norm_q = self._normalizeQuery(query_object);
var url = self.view_url;
var q = _.extend(query_options, norm_q);
var jqxhr = this._makeRequest({
var jqxhr = self._makeRequest({
url: url,
data: JSON.stringify(q),
dataType: this.options.dataType,
dataType: self.options.dataType,
});
return jqxhr;
}

View File

@ -3,45 +3,50 @@ this.recline.Backend = this.recline.Backend || {};
this.recline.Backend.CSV = this.recline.Backend.CSV || {};
(function(my) {
// ## load
// ## fetch
//
// Load data from a CSV file referenced in an HTMl5 file object returning the
// dataset in the callback
// 3 options
//
// @param options as for parseCSV below
my.load = function(file, callback, options) {
var encoding = options.encoding || 'UTF-8';
var metadata = {
id: file.name,
file: file
};
var reader = new FileReader();
// TODO
reader.onload = function(e) {
var dataset = my.csvToDataset(e.target.result, options);
callback(dataset);
};
reader.onerror = function (e) {
alert('Failed to load file. Code: ' + e.target.error.code);
};
reader.readAsText(file, encoding);
};
my.csvToDataset = function(csvString, options) {
var out = my.parseCSV(csvString, options);
fields = _.map(out[0], function(cell) {
return { id: cell, label: cell };
});
var data = _.map(out.slice(1), function(row) {
var _doc = {};
_.each(out[0], function(fieldId, idx) {
_doc[fieldId] = row[idx];
// 1. CSV local fileobject -> HTML5 file object + CSV parser
// 2. Already have CSV string (in data) attribute -> CSV parser
// 2. online CSV file that is ajax-able -> ajax + csv parser
//
// All options generates similar data and give a memory store outcome
my.fetch = function(dataset) {
var dfd = $.Deferred();
if (dataset.file) {
var reader = new FileReader();
var encoding = dataset.encoding || 'UTF-8';
reader.onload = function(e) {
var rows = my.parseCSV(e.target.result, dataset);
dfd.resolve({
records: rows,
metadata: {
filename: dataset.file.name
},
useMemoryStore: true
});
};
reader.onerror = function (e) {
alert('Failed to load file. Code: ' + e.target.error.code);
};
reader.readAsText(dataset.file, encoding);
} else if (dataset.data) {
var rows = my.parseCSV(dataset.data, dataset);
dfd.resolve({
records: rows,
useMemoryStore: true
});
return _doc;
});
var dataset = recline.Backend.Memory.createDataset(data, fields);
return dataset;
} else if (dataset.url) {
$.get(dataset.url).done(function(data) {
var rows = my.parseCSV(dataset.data, dataset);
dfd.resolve({
records: rows,
useMemoryStore: true
});
});
}
return dfd.promise();
};
// Converts a Comma Separated Values string into an array of arrays.
@ -57,17 +62,16 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
// @param {Boolean} [trim=false] If set to True leading and trailing whitespace is stripped off of each non-quoted field as it is imported
// @param {String} [separator=','] Separator for CSV file
// Heavily based on uselesscode's JS CSV parser (MIT Licensed):
// thttp://www.uselesscode.org/javascript/csv/
// http://www.uselesscode.org/javascript/csv/
my.parseCSV= function(s, options) {
// Get rid of any trailing \n
s = chomp(s);
var options = options || {};
var trm = options.trim;
var trm = (options.trim === false) ? false : true;
var separator = options.separator || ',';
var delimiter = options.delimiter || '"';
var cur = '', // The character we are currently processing.
inQuote = false,
fieldQuoted = false,

View File

@ -3,95 +3,42 @@ this.recline.Backend = this.recline.Backend || {};
this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {};
(function($, my) {
// ## DataProxy Backend
//
// For connecting to [DataProxy-s](http://github.com/okfn/dataproxy).
//
// When initializing the DataProxy backend you can set the following
// attributes in the options object:
//
// * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
//
// Datasets using using this backend should set the following attributes:
//
// * url: (required) url-of-data-to-proxy
// * format: (optional) csv | xls (defaults to csv if not specified)
//
// Note that this is a **read-only** backend.
my.Backbone = function(options) {
var self = this;
this.__type__ = 'dataproxy';
this.readonly = true;
my.__type__ = 'dataproxy';
// URL for the dataproxy
my.dataproxy_url = 'http://jsonpdataproxy.appspot.com';
this.dataproxy_url = options && options.dataproxy_url ? options.dataproxy_url : 'http://jsonpdataproxy.appspot.com';
this.sync = function(method, model, options) {
if (method === "read") {
if (model.__type__ == 'Dataset') {
// Do nothing as we will get fields in query step (and no metadata to
// retrieve)
var dfd = $.Deferred();
dfd.resolve(model);
return dfd.promise();
}
} else {
alert('This backend only supports read operations');
// ## load
//
// Load data from a URL via the [DataProxy](http://github.com/okfn/dataproxy).
//
// Returns array of field names and array of arrays for records
my.fetch = function(dataset) {
var data = {
url: dataset.url,
'max-results': dataset.size || dataset.rows || 1000,
type: dataset.format || ''
};
var jqxhr = $.ajax({
url: my.dataproxy_url,
data: data,
dataType: 'jsonp'
});
var dfd = $.Deferred();
_wrapInTimeout(jqxhr).done(function(results) {
if (results.error) {
dfd.reject(results.error);
}
};
this.query = function(dataset, queryObj) {
var self = this;
var data = {
url: dataset.get('url'),
'max-results': queryObj.size,
type: dataset.get('format')
};
var jqxhr = $.ajax({
url: this.dataproxy_url,
data: data,
dataType: 'jsonp'
dfd.resolve({
records: results.data,
fields: results.fields,
useMemoryStore: true
});
var dfd = $.Deferred();
_wrapInTimeout(jqxhr).done(function(results) {
if (results.error) {
dfd.reject(results.error);
}
// Rename duplicate fieldIds as each field name needs to be
// unique.
var seen = {};
_.map(results.fields, function(fieldId, index) {
if (fieldId in seen) {
seen[fieldId] += 1;
results.fields[index] = fieldId + "("+seen[fieldId]+")";
} else {
seen[fieldId] = 1;
}
});
dataset.fields.reset(_.map(results.fields, function(fieldId) {
return {id: fieldId};
})
);
var _out = _.map(results.data, function(doc) {
var tmp = {};
_.each(results.fields, function(key, idx) {
tmp[key] = doc[idx];
});
return tmp;
});
dfd.resolve({
total: null,
hits: _.map(_out, function(row) {
return { _source: row };
})
});
})
.fail(function(arguments) {
dfd.reject(arguments);
});
return dfd.promise();
};
})
.fail(function(arguments) {
dfd.reject(arguments);
});
return dfd.promise();
};
// ## _wrapInTimeout

View File

@ -3,11 +3,14 @@ this.recline.Backend = this.recline.Backend || {};
this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
(function($, my) {
my.__type__ = 'elasticsearch';
// ## ElasticSearch Wrapper
//
// Connecting to [ElasticSearch](http://www.elasticsearch.org/) endpoints.
// A simple JS wrapper around an [ElasticSearch](http://www.elasticsearch.org/) endpoints.
//
// @param {String} endpoint: url for ElasticSearch type/table, e.g. for ES running
// on localhost:9200 with index // twitter and type tweet it would be:
// on http://localhost:9200 with index twitter and type tweet it would be:
//
// <pre>http://localhost:9200/twitter/tweet</pre>
//
@ -30,7 +33,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
// @return promise compatible deferred object.
this.mapping = function() {
var schemaUrl = self.endpoint + '/_mapping';
var jqxhr = recline.Backend.makeRequest({
var jqxhr = makeRequest({
url: schemaUrl,
dataType: this.options.dataType
});
@ -44,7 +47,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
// @return promise compatible deferred object.
this.get = function(id) {
var base = this.endpoint + '/' + id;
return recline.Backend.makeRequest({
return makeRequest({
url: base,
dataType: 'json'
});
@ -62,7 +65,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
if (doc.id) {
url += '/' + doc.id;
}
return recline.Backend.makeRequest({
return makeRequest({
url: url,
type: 'POST',
data: data,
@ -79,7 +82,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
this.delete = function(id) {
url = this.endpoint;
url += '/' + id;
return recline.Backend.makeRequest({
return makeRequest({
url: url,
type: 'DELETE',
dataType: 'json'
@ -140,7 +143,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
esQuery.query = queryNormalized;
var data = {source: JSON.stringify(esQuery)};
var url = this.endpoint + '/_search';
var jqxhr = recline.Backend.makeRequest({
var jqxhr = makeRequest({
url: url,
data: data,
dataType: this.options.dataType
@ -149,94 +152,110 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
}
};
// ## ElasticSearch Backbone Backend
//
// Backbone connector for an ES backend.
//
// Usage:
//
// var backend = new recline.Backend.ElasticSearch(options);
//
// `options` are passed through to Wrapper
my.Backbone = function(options) {
var self = this;
var esOptions = options;
this.__type__ = 'elasticsearch';
// ### sync
//
// Backbone sync implementation for this backend.
//
// URL of ElasticSearch endpoint to use must be specified on the dataset
// (and on a Record via its dataset attribute) by the dataset having a
// url attribute.
this.sync = function(method, model, options) {
if (model.__type__ == 'Dataset') {
var endpoint = model.get('url');
} else {
var endpoint = model.dataset.get('url');
}
var es = new my.Wrapper(endpoint, esOptions);
if (method === "read") {
if (model.__type__ == 'Dataset') {
var dfd = $.Deferred();
es.mapping().done(function(schema) {
// only one top level key in ES = the type so we can ignore it
var key = _.keys(schema)[0];
var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
dict.id = fieldName;
return dict;
});
model.fields.reset(fieldData);
dfd.resolve(model);
})
.fail(function(arguments) {
dfd.reject(arguments);
});
return dfd.promise();
} else if (model.__type__ == 'Record') {
return es.get(model.dataset.id);
}
} else if (method === 'update') {
if (model.__type__ == 'Record') {
return es.upsert(model.toJSON());
}
} else if (method === 'delete') {
if (model.__type__ == 'Record') {
return es.delete(model.id);
}
}
};
// ## Recline Connectors
//
// Requires URL of ElasticSearch endpoint to be specified on the dataset
// via the url attribute.
// ### query
//
// query the ES backend
this.query = function(model, queryObj) {
var dfd = $.Deferred();
var url = model.get('url');
var es = new my.Wrapper(url, esOptions);
var jqxhr = es.query(queryObj);
// TODO: fail case
jqxhr.done(function(results) {
_.each(results.hits.hits, function(hit) {
if (!('id' in hit._source) && hit._id) {
hit._source.id = hit._id;
}
});
if (results.facets) {
results.hits.facets = results.facets;
}
dfd.resolve(results.hits);
}).fail(function(errorObj) {
var out = {
title: 'Failed: ' + errorObj.status + ' code',
message: errorObj.responseText
};
dfd.reject(out);
// ES options which are passed through to `options` on Wrapper (see Wrapper for details)
my.esOptions = {};
// ### fetch
my.fetch = function(dataset) {
var es = new my.Wrapper(dataset.url, my.esOptions);
var dfd = $.Deferred();
es.mapping().done(function(schema) {
// only one top level key in ES = the type so we can ignore it
var key = _.keys(schema)[0];
var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
dict.id = fieldName;
return dict;
});
return dfd.promise();
};
dfd.resolve({
fields: fieldData
});
})
.fail(function(arguments) {
dfd.reject(arguments);
});
return dfd.promise();
};
// ### save
my.save = function(changes, dataset) {
var es = new my.Wrapper(dataset.url, my.esOptions);
if (changes.creates.length + changes.updates.length + changes.deletes.length > 1) {
var dfd = $.Deferred();
msg = 'Saving more than one item at a time not yet supported';
alert(msg);
dfd.reject(msg);
return dfd.promise();
}
if (changes.creates.length > 0) {
return es.upsert(changes.creates[0]);
}
else if (changes.updates.length >0) {
return es.upsert(changes.updates[0]);
} else if (changes.deletes.length > 0) {
return es.delete(changes.deletes[0].id);
}
};
// ### query
my.query = function(queryObj, dataset) {
var dfd = $.Deferred();
var es = new my.Wrapper(dataset.url, my.esOptions);
var jqxhr = es.query(queryObj);
jqxhr.done(function(results) {
var out = {
total: results.hits.total,
};
out.hits = _.map(results.hits.hits, function(hit) {
if (!('id' in hit._source) && hit._id) {
hit._source.id = hit._id;
}
return hit._source;
});
if (results.facets) {
out.facets = results.facets;
}
dfd.resolve(out);
}).fail(function(errorObj) {
var out = {
title: 'Failed: ' + errorObj.status + ' code',
message: errorObj.responseText
};
dfd.reject(out);
});
return dfd.promise();
};
// ### makeRequest
//
// Just $.ajax but in any headers in the 'headers' attribute of this
// Backend instance. Example:
//
// <pre>
// var jqxhr = this._makeRequest({
// url: the-url
// });
// </pre>
var makeRequest = function(data, headers) {
var extras = {};
if (headers) {
extras = {
beforeSend: function(req) {
_.each(headers, function(value, key) {
req.setRequestHeader(key, value);
});
}
};
}
var data = _.extend(extras, data);
return $.ajax(data);
};
}(jQuery, this.recline.Backend.ElasticSearch));

View File

@ -3,92 +3,44 @@ this.recline.Backend = this.recline.Backend || {};
this.recline.Backend.GDocs = this.recline.Backend.GDocs || {};
(function($, my) {
my.__type__ = 'gdocs';
// ## Google spreadsheet backend
//
// Connect to Google Docs spreadsheet.
//
// Dataset must have a url attribute pointing to the Gdocs
// spreadsheet's JSON feed e.g.
// Fetch data from a Google Docs spreadsheet.
//
// Dataset must have a url attribute pointing to the Gdocs or its JSON feed e.g.
// <pre>
// var dataset = new recline.Model.Dataset({
// url: 'https://docs.google.com/spreadsheet/ccc?key=0Aon3JiuouxLUdGlQVDJnbjZRSU1tUUJWOUZXRG53VkE#gid=0'
// },
// 'gdocs'
// );
//
// var dataset = new recline.Model.Dataset({
// url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
// },
// 'gdocs'
// );
// </pre>
my.Backbone = function() {
var self = this;
this.__type__ = 'gdocs';
this.readonly = true;
this.sync = function(method, model, options) {
var self = this;
if (method === "read") {
var dfd = $.Deferred();
dfd.resolve(model);
return dfd.promise();
}
};
this.query = function(dataset, queryObj) {
var dfd = $.Deferred();
if (dataset._dataCache) {
dfd.resolve(dataset._dataCache);
} else {
loadData(dataset.get('url')).done(function(result) {
dataset.fields.reset(result.fields);
// cache data onto dataset (we have loaded whole gdoc it seems!)
dataset._dataCache = self._formatResults(dataset, result.data);
dfd.resolve(dataset._dataCache);
});
}
return dfd.promise();
};
this._formatResults = function(dataset, data) {
var fields = _.pluck(dataset.fields.toJSON(), 'id');
// zip the fields with the data rows to produce js objs
// TODO: factor this out as a common method with other backends
var objs = _.map(data, function (d) {
var obj = {};
_.each(_.zip(fields, d), function (x) {
obj[x[0]] = x[1];
});
return obj;
});
var out = {
total: objs.length,
hits: _.map(objs, function(row) {
return { _source: row }
})
}
return out;
};
};
// ## loadData
//
// loadData from a google docs URL
//
// @return object with two attributes
//
// * fields: array of objects
// * data: array of arrays
var loadData = function(url) {
// * fields: array of Field objects
// * records: array of objects for each row
my.fetch = function(dataset) {
var dfd = $.Deferred();
var url = my.getSpreadsheetAPIUrl(url);
var out = {
fields: [],
data: []
}
var url = my.getSpreadsheetAPIUrl(dataset.url);
$.getJSON(url, function(d) {
result = my.parseData(d);
result.fields = _.map(result.fields, function(fieldId) {
var fields = _.map(result.fields, function(fieldId) {
return {id: fieldId};
});
dfd.resolve(result);
dfd.resolve({
records: result.records,
fields: fields,
useMemoryStore: true
});
});
return dfd.promise();
};
@ -109,8 +61,8 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {};
options = arguments[1];
}
var results = {
'fields': [],
'data': []
fields: [],
records: []
};
// default is no special info on type of columns
var colTypes = {};
@ -128,10 +80,9 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {};
// converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
var rep = /^([\d\.\-]+)\%$/;
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
var row = [];
for (var k in results.fields) {
var col = results.fields[k];
results.records = _.map(gdocsSpreadsheet.feed.entry, function(entry) {
var row = {};
_.each(results.fields, function(col) {
var _keyname = 'gsx$' + col;
var value = entry[_keyname]['$t'];
// if labelled as % and value contains %, convert
@ -142,9 +93,9 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {};
value = value3 / 100;
}
}
row.push(value);
}
results.data.push(row);
row[col] = value;
});
return row;
});
return results;
};

View File

@ -3,26 +3,7 @@ this.recline.Backend = this.recline.Backend || {};
this.recline.Backend.Memory = this.recline.Backend.Memory || {};
(function($, my) {
// ## createDataset
//
// Convenience function to create a simple 'in-memory' dataset in one step.
//
// @param data: list of hashes for each record/row in the data ({key:
// value, key: value})
// @param fields: (optional) list of field hashes (each hash defining a hash
// as per recline.Model.Field). If fields not specified they will be taken
// from the data.
// @param metadata: (optional) dataset metadata - see recline.Model.Dataset.
// If not defined (or id not provided) id will be autogenerated.
my.createDataset = function(data, fields, metadata) {
var wrapper = new my.Store(data, fields);
var backend = new my.Backbone();
var dataset = new recline.Model.Dataset(metadata, backend);
dataset._dataCache = wrapper;
dataset.fetch();
dataset.query();
return dataset;
};
my.__type__ = 'memory';
// ## Data Wrapper
//
@ -63,7 +44,22 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {};
this.data = newdocs;
};
this.save = function(changes, dataset) {
var self = this;
var dfd = $.Deferred();
// TODO _.each(changes.creates) { ... }
_.each(changes.updates, function(record) {
self.update(record);
});
_.each(changes.deletes, function(record) {
self.delete(record);
});
dfd.resolve();
return dfd.promise();
},
this.query = function(queryObj) {
var dfd = $.Deferred();
var numRows = queryObj.size || this.data.length;
var start = queryObj.from || 0;
var results = this.data;
@ -80,14 +76,14 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {};
results.reverse();
}
});
var total = results.length;
var facets = this.computeFacets(results, queryObj);
results = results.slice(start, start+numRows);
return {
total: total,
records: results,
var out = {
total: results.length,
hits: results.slice(start, start+numRows),
facets: facets
};
dfd.resolve(out);
return dfd.promise();
};
// in place filtering
@ -166,53 +162,5 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {};
return facetResults;
};
};
// ## Backbone
//
// Backbone connector for memory store attached to a Dataset object
my.Backbone = function() {
this.__type__ = 'memory';
this.sync = function(method, model, options) {
var self = this;
var dfd = $.Deferred();
if (method === "read") {
if (model.__type__ == 'Dataset') {
model.fields.reset(model._dataCache.fields);
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'update') {
if (model.__type__ == 'Record') {
model.dataset._dataCache.update(model.toJSON());
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'delete') {
if (model.__type__ == 'Record') {
model.dataset._dataCache.delete(model.toJSON());
dfd.resolve(model);
}
return dfd.promise();
} else {
alert('Not supported: sync on Memory backend with method ' + method + ' and model ' + model);
}
};
this.query = function(model, queryObj) {
var dfd = $.Deferred();
var results = model._dataCache.query(queryObj);
var hits = _.map(results.records, function(row) {
return { _source: row };
});
var out = {
total: results.total,
hits: hits,
facets: results.facets
};
dfd.resolve(out);
return dfd.promise();
};
};
}(jQuery, this.recline.Backend.Memory));

View File

@ -4,28 +4,7 @@ this.recline.Model = this.recline.Model || {};
(function($, my) {
// ## <a id="dataset">A Dataset model</a>
//
// A model has the following (non-Backbone) attributes:
//
// @property {FieldList} fields: (aka columns) is a `FieldList` listing all the
// fields on this Dataset (this can be set explicitly, or, will be set by
// Dataset.fetch() or Dataset.query()
//
// @property {RecordList} currentRecords: a `RecordList` containing the
// Records we have currently loaded for viewing (updated by calling query
// method)
//
// @property {number} docCount: total number of records in this dataset
//
// @property {Backend} backend: the Backend (instance) for this Dataset.
//
// @property {Query} queryState: `Query` object which stores current
// queryState. queryState may be edited by other components (e.g. a query
// editor view) changes will trigger a Dataset query.
//
// @property {FacetList} facets: FacetList object containing all current
// Facets.
// ## <a id="dataset">Dataset</a>
my.Dataset = Backbone.Model.extend({
__type__: 'Dataset',
@ -44,16 +23,139 @@ my.Dataset = Backbone.Model.extend({
initialize: function(model, backend) {
_.bindAll(this, 'query');
this.backend = backend;
if (typeof backend === 'undefined') {
if (this.get('records')) {
this.backend = recline.Backend.Memory;
}
}
if (typeof(backend) === 'string') {
this.backend = this._backendFromString(backend);
}
this.fields = new my.FieldList();
this.currentRecords = new my.RecordList();
this._changes = {
deletes: [],
updates: [],
creates: []
};
this.facets = new my.FacetList();
this.docCount = null;
this.queryState = new my.Query();
this.queryState.bind('change', this.query);
this.queryState.bind('facet:add', this.query);
this._store = this.backend;
if (this.backend == recline.Backend.Memory) {
this.fetch();
}
},
// ### fetch
//
// Retrieve dataset and (some) records from the backend.
fetch: function() {
var self = this;
var dfd = $.Deferred();
if (this.backend !== recline.Backend.Memory) {
this.backend.fetch(this.toJSON())
.done(handleResults)
.fail(function(arguments) {
dfd.reject(arguments);
});
} else {
// special case where we have been given data directly
handleResults({
records: this.get('records'),
fields: this.get('fields'),
useMemoryStore: true
});
}
function handleResults(results) {
var out = self._normalizeRecordsAndFields(results.records, results.fields);
if (results.useMemoryStore) {
self._store = new recline.Backend.Memory.Store(out.records, out.fields);
}
self.set(results.metadata);
self.fields.reset(out.fields);
self.query()
.done(function() {
dfd.resolve(self);
})
.fail(function(arguments) {
dfd.reject(arguments);
});
}
return dfd.promise();
},
// ### _normalizeRecordsAndFields
//
// Get a proper set of fields and records from incoming set of fields and records either of which may be null or arrays or objects
//
// e.g. fields = ['a', 'b', 'c'] and records = [ [1,2,3] ] =>
// fields = [ {id: a}, {id: b}, {id: c}], records = [ {a: 1}, {b: 2}, {c: 3}]
_normalizeRecordsAndFields: function(records, fields) {
// if no fields get them from records
if (!fields && records && records.length > 0) {
// records is array then fields is first row of records ...
if (records[0] instanceof Array) {
fields = records[0];
records = records.slice(1);
} else {
fields = _.map(_.keys(records[0]), function(key) {
return {id: key};
});
}
}
// fields is an array of strings (i.e. list of field headings/ids)
if (fields && fields.length > 0 && typeof fields[0] === 'string') {
// Rename duplicate fieldIds as each field name needs to be
// unique.
var seen = {};
fields = _.map(fields, function(field, index) {
// cannot use trim as not supported by IE7
var fieldId = field.replace(/^\s+|\s+$/g, '');
if (fieldId === '') {
fieldId = '_noname_';
field = fieldId;
}
while (fieldId in seen) {
seen[field] += 1;
fieldId = field + seen[field];
}
if (!(field in seen)) {
seen[field] = 0;
}
// TODO: decide whether to keep original name as label ...
// return { id: fieldId, label: field || fieldId }
return { id: fieldId };
});
}
// records is provided as arrays so need to zip together with fields
// NB: this requires you to have fields to match arrays
if (records && records.length > 0 && records[0] instanceof Array) {
records = _.map(records, function(doc) {
var tmp = {};
_.each(fields, function(field, idx) {
tmp[field.id] = doc[idx];
});
return tmp;
});
}
return {
fields: fields,
records: records
};
},
save: function() {
var self = this;
// TODO: need to reset the changes ...
return this._store.save(this._changes, this.toJSON());
},
// ### query
@ -67,41 +169,48 @@ my.Dataset = Backbone.Model.extend({
// also returned.
query: function(queryObj) {
var self = this;
this.trigger('query:start');
var actualQuery = self._prepareQuery(queryObj);
var dfd = $.Deferred();
this.backend.query(this, actualQuery).done(function(queryResult) {
self.docCount = queryResult.total;
var docs = _.map(queryResult.hits, function(hit) {
var _doc = new my.Record(hit._source);
_doc.backend = self.backend;
_doc.dataset = self;
return _doc;
this.trigger('query:start');
if (queryObj) {
this.queryState.set(queryObj);
}
var actualQuery = this.queryState.toJSON();
this._store.query(actualQuery, this.toJSON())
.done(function(queryResult) {
self._handleQueryResult(queryResult);
self.trigger('query:done');
dfd.resolve(self.currentRecords);
})
.fail(function(arguments) {
self.trigger('query:fail', arguments);
dfd.reject(arguments);
});
self.currentRecords.reset(docs);
if (queryResult.facets) {
var facets = _.map(queryResult.facets, function(facetResult, facetId) {
facetResult.id = facetId;
return new my.Facet(facetResult);
});
self.facets.reset(facets);
}
self.trigger('query:done');
dfd.resolve(self.currentRecords);
})
.fail(function(arguments) {
self.trigger('query:fail', arguments);
dfd.reject(arguments);
});
return dfd.promise();
},
_prepareQuery: function(newQueryObj) {
if (newQueryObj) {
this.queryState.set(newQueryObj);
_handleQueryResult: function(queryResult) {
var self = this;
self.docCount = queryResult.total;
var docs = _.map(queryResult.hits, function(hit) {
var _doc = new my.Record(hit);
_doc.bind('change', function(doc) {
self._changes.updates.push(doc.toJSON());
});
_doc.bind('destroy', function(doc) {
self._changes.deletes.push(doc.toJSON());
});
return _doc;
});
self.currentRecords.reset(docs);
if (queryResult.facets) {
var facets = _.map(queryResult.facets, function(facetResult, facetId) {
facetResult.id = facetId;
return new my.Facet(facetResult);
});
self.facets.reset(facets);
}
var out = this.queryState.toJSON();
return out;
},
toTemplateJSON: function() {
@ -122,7 +231,7 @@ my.Dataset = Backbone.Model.extend({
query.addFacet(field.id);
});
var dfd = $.Deferred();
this.backend.query(this, query.toJSON()).done(function(queryResult) {
this._store.query(query.toJSON(), this.toJSON()).done(function(queryResult) {
if (queryResult.facets) {
_.each(queryResult.facets, function(facetResult, facetId) {
facetResult.id = facetId;
@ -150,7 +259,7 @@ my.Dataset = Backbone.Model.extend({
current = current[parts[ii]];
}
if (current) {
return new current();
return current;
}
// alternatively we just had a simple string
@ -158,7 +267,7 @@ my.Dataset = Backbone.Model.extend({
if (recline && recline.Backend) {
_.each(_.keys(recline.Backend), function(name) {
if (name.toLowerCase() === backendString.toLowerCase()) {
backend = new recline.Backend[name].Backbone();
backend = recline.Backend[name];
}
});
}
@ -184,20 +293,18 @@ my.Dataset.restore = function(state) {
var dataset = null;
// hack-y - restoring a memory dataset does not mean much ...
if (state.backend === 'memory') {
dataset = recline.Backend.Memory.createDataset(
[{stub: 'this is a stub dataset because we do not restore memory datasets'}],
[],
state.dataset // metadata
);
var datasetInfo = {
records: [{stub: 'this is a stub dataset because we do not restore memory datasets'}]
};
} else {
var datasetInfo = {
url: state.url
};
dataset = new recline.Model.Dataset(
datasetInfo,
state.backend
);
}
dataset = new recline.Model.Dataset(
datasetInfo,
state.backend
);
return dataset;
};
@ -242,7 +349,15 @@ my.Record = Backbone.Model.extend({
}
}
return html;
}
},
// Override Backbone save, fetch and destroy so they do nothing
// Instead, Dataset object that created this Record should take care of
// handling these changes (discovery will occur via event notifications)
// WARNING: these will not persist *unless* you call save on Dataset
fetch: function() {},
save: function() {},
destroy: function() { this.trigger('destroy', this); }
});
// ## A Backbone collection of Records
@ -252,42 +367,6 @@ my.RecordList = Backbone.Collection.extend({
});
// ## <a id="field">A Field (aka Column) on a Dataset</a>
//
// Following (Backbone) attributes as standard:
//
// * id: a unique identifer for this field- usually this should match the key in the records hash
// * label: (optional: defaults to id) the visible label used for this field
// * type: (optional: defaults to string) the type of the data in this field. Should be a string as per type names defined by ElasticSearch - see Types list on <http://www.elasticsearch.org/guide/reference/mapping/>
// * format: (optional) used to indicate how the data should be formatted. For example:
// * type=date, format=yyyy-mm-dd
// * type=float, format=percentage
// * type=string, format=markdown (render as markdown if Showdown available)
// * is_derived: (default: false) attribute indicating this field has no backend data but is just derived from other fields (see below).
//
// Following additional instance properties:
//
// @property {Function} renderer: a function to render the data for this field.
// Signature: function(value, field, record) where value is the value of this
// cell, field is corresponding field object and record is the record
// object (as simple JS object). Note that implementing functions can ignore arguments (e.g.
// function(value) would be a valid formatter function).
//
// @property {Function} deriver: a function to derive/compute the value of data
// in this field as a function of this field's value (if any) and the current
// record, its signature and behaviour is the same as for renderer. Use of
// this function allows you to define an entirely new value for data in this
// field. This provides support for a) 'derived/computed' fields: i.e. fields
// whose data are functions of the data in other fields b) transforming the
// value of this field prior to rendering.
//
// #### Default renderers
//
// * string
// * no format provided: pass through but convert http:// to hyperlinks
// * format = plain: do no processing on the source text
// * format = markdown: process as markdown (if Showdown library available)
// * float
// * format = percentage: format as a percentage
my.Field = Backbone.Model.extend({
// ### defaults - define default values
defaults: {
@ -358,54 +437,6 @@ my.FieldList = Backbone.Collection.extend({
});
// ## <a id="query">Query</a>
//
// Query instances encapsulate a query to the backend (see <a
// href="backend/base.html">query method on backend</a>). Useful both
// for creating queries and for storing and manipulating query state -
// e.g. from a query editor).
//
// **Query Structure and format**
//
// Query structure should follow that of [ElasticSearch query
// language](http://www.elasticsearch.org/guide/reference/api/search/).
//
// **NB: It is up to specific backends how to implement and support this query
// structure. Different backends might choose to implement things differently
// or not support certain features. Please check your backend for details.**
//
// Query object has the following key attributes:
//
// * size (=limit): number of results to return
// * from (=offset): offset into result set - http://www.elasticsearch.org/guide/reference/api/search/from-size.html
// * sort: sort order - <http://www.elasticsearch.org/guide/reference/api/search/sort.html>
// * query: Query in ES Query DSL <http://www.elasticsearch.org/guide/reference/api/search/query.html>
// * filter: See filters and <a href="http://www.elasticsearch.org/guide/reference/query-dsl/filtered-query.html">Filtered Query</a>
// * fields: set of fields to return - http://www.elasticsearch.org/guide/reference/api/search/fields.html
// * facets: specification of facets - see http://www.elasticsearch.org/guide/reference/api/search/facets/
//
// Additions:
//
// * q: either straight text or a hash will map directly onto a [query_string
// query](http://www.elasticsearch.org/guide/reference/query-dsl/query-string-query.html)
// in backend
//
// * Of course this can be re-interpreted by different backends. E.g. some
// may just pass this straight through e.g. for an SQL backend this could be
// the full SQL query
//
// * filters: array of ElasticSearch filters. These will be and-ed together for
// execution.
//
// **Examples**
//
// <pre>
// {
// q: 'quick brown fox',
// filters: [
// { term: { 'owner': 'jones' } }
// ]
// }
// </pre>
my.Query = Backbone.Model.extend({
defaults: function() {
return {
@ -525,43 +556,6 @@ my.Query = Backbone.Model.extend({
// ## <a id="facet">A Facet (Result)</a>
//
// Object to store Facet information, that is summary information (e.g. values
// and counts) about a field obtained by some faceting method on the
// backend.
//
// Structure of a facet follows that of Facet results in ElasticSearch, see:
// <http://www.elasticsearch.org/guide/reference/api/search/facets/>
//
// Specifically the object structure of a facet looks like (there is one
// addition compared to ElasticSearch: the "id" field which corresponds to the
// key used to specify this facet in the facet query):
//
// <pre>
// {
// "id": "id-of-facet",
// // type of this facet (terms, range, histogram etc)
// "_type" : "terms",
// // total number of tokens in the facet
// "total": 5,
// // @property {number} number of records which have no value for the field
// "missing" : 0,
// // number of facet values not included in the returned facets
// "other": 0,
// // term object ({term: , count: ...})
// "terms" : [ {
// "term" : "foo",
// "count" : 2
// }, {
// "term" : "bar",
// "count" : 2
// }, {
// "term" : "baz",
// "count" : 1
// }
// ]
// }
// </pre>
my.Facet = Backbone.Model.extend({
defaults: function() {
return {

View File

@ -182,10 +182,9 @@ my.MultiView = Backbone.View.extend({
// retrieve basic data like fields etc
// note this.model and dataset returned are the same
// TODO: set query state ...?
this.model.queryState.set(self.state.get('query'), {silent: true});
this.model.fetch()
.done(function(dataset) {
self.model.query(self.state.get('query'));
})
.fail(function(error) {
self.notify({message: error.message, category: 'error', persist: true});
});

View File

@ -24,9 +24,15 @@ test("parseCSV", function() {
'"Jones, Jay", 10\n' +
'"Xyz ""ABC"" O\'Brien", 11:35\n' +
'"Other, AN", 12:35\n';
var dataset = recline.Backend.CSV.csvToDataset(csv);
dataset.query();
var dataset = new recline.Model.Dataset({
data: csv
},
'csv'
);
dataset.fetch();
equal(dataset.currentRecords.length, 3);
var row = dataset.currentRecords.models[0].toJSON();
deepEqual(row, {Name: 'Jones, Jay', Value: 10});
});
test("parseCSVsemicolon", function() {

View File

@ -0,0 +1,110 @@
(function ($) {
module("Backend DataProxy");
var dataProxyData = {
"data": [
[
"1",
"1950-01",
"34.73"
],
[
"2",
"1950-02",
"34.73"
],
[
"3",
"1950-03",
"34.73"
],
[
"4",
"1950-04",
"34.73"
],
[
"5",
"1950-05",
"34.73"
],
[
"6",
"1950-06",
"34.73"
],
[
"7",
"1950-07",
"34.73"
],
[
"8",
"1950-08",
"34.73"
],
[
"9",
"1950-09",
"34.73"
],
[
"10",
"1950-10",
"34.73"
]
],
"fields": [
"__id__",
"date",
"price"
],
"length": null,
"max_results": 10,
"url": "http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv"
};
test('DataProxy Backend', function() {
// needed only if not stubbing
// stop();
var backend = recline.Backend.DataProxy;
equal(backend.__type__, 'dataproxy');
var dataset = new recline.Model.Dataset({
url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv'
},
'dataproxy'
);
var stub = sinon.stub($, 'ajax', function(options) {
var partialUrl = 'jsonpdataproxy.appspot.com';
if (options.url.indexOf(partialUrl) != -1) {
return {
done: function(callback) {
callback(dataProxyData);
return this;
},
fail: function() {
return this;
}
}
}
});
expect(6);
dataset.fetch().then(function() {
deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id'));
equal(10, dataset.docCount)
equal(dataset.currentRecords.models[0].get('date'), "1950-01");
// needed only if not stubbing
// start();
});
dataset.query({q: '1950-01'}).then(function() {
equal(dataset.docCount, 1);
equal(dataset.currentRecords.models[0].get('price'), '34.73');
});
$.ajax.restore();
});
})(this.jQuery);

View File

@ -246,14 +246,13 @@ test("write", function() {
// ==================================================
module("Backend ElasticSearch - Backbone");
module("Backend ElasticSearch - Recline");
test("query", function() {
var backend = new recline.Backend.ElasticSearch.Backbone();
var dataset = new recline.Model.Dataset({
url: 'https://localhost:9200/my-es-db/my-es-type'
},
backend
'elasticsearch'
);
var stub = sinon.stub($, 'ajax', function(options) {
@ -292,11 +291,10 @@ test("query", function() {
});
test("write", function() {
var backend = new recline.Backend.ElasticSearch.Backbone();
var dataset = new recline.Model.Dataset({
url: 'http://localhost:9200/recline-test/es-write'
},
backend
'elasticsearch'
);
stop();
@ -306,10 +304,10 @@ test("write", function() {
id: id,
title: 'my title'
});
rec.backend = backend;
rec.dataset = dataset;
dataset.currentRecords.add(rec);
var jqxhr = rec.save();
// have to do this explicitly as we not really supporting adding new items atm
dataset._changes.creates.push(rec.toJSON());
var jqxhr = dataset.save();
jqxhr.done(function(data) {
ok(data.ok);
equal(data._id, id);
@ -318,28 +316,29 @@ test("write", function() {
// update
rec.set({title: 'new title'});
var jqxhr = rec.save();
// again set up by hand ...
dataset._changes.creates = [];
dataset._changes.updates.push(rec.toJSON());
var jqxhr = dataset.save();
jqxhr.done(function(data) {
equal(data._version, 2);
// delete
var jqxhr = rec.destroy();
dataset._changes.updates = 0;
dataset._changes.deletes.push(rec.toJSON());
var jqxhr = dataset.save();
jqxhr.done(function(data) {
ok(data.ok);
rec = null;
// try to get ...
var oldrec = new recline.Model.Record({id: id});
equal(oldrec.get('title'), null);
oldrec.dataset = dataset;
oldrec.backend = backend;
var jqxhr = oldrec.fetch();
var es = new recline.Backend.ElasticSearch.Wrapper(dataset.get('url'));
var jqxhr = es.get(id);
jqxhr.done(function(data) {
// should not be here
ok(false, 'Should have got 404');
}).error(function(error) {
equal(error.status, 404);
equal(typeof oldrec.get('title'), 'undefined');
start();
});
});

View File

@ -1,110 +1,5 @@
(function ($) {
module("Backend");
var dataProxyData = {
"data": [
[
"1",
"1950-01",
"34.73"
],
[
"2",
"1950-02",
"34.73"
],
[
"3",
"1950-03",
"34.73"
],
[
"4",
"1950-04",
"34.73"
],
[
"5",
"1950-05",
"34.73"
],
[
"6",
"1950-06",
"34.73"
],
[
"7",
"1950-07",
"34.73"
],
[
"8",
"1950-08",
"34.73"
],
[
"9",
"1950-09",
"34.73"
],
[
"10",
"1950-10",
"34.73"
]
],
"fields": [
"__id__",
"date",
"price"
],
"length": null,
"max_results": 10,
"url": "http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv"
};
test('DataProxy Backend', function() {
// needed only if not stubbing
// stop();
var backend = new recline.Backend.DataProxy.Backbone();
ok(backend.readonly);
equal(backend.__type__, 'dataproxy');
var dataset = new recline.Model.Dataset({
url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv'
},
backend
);
var stub = sinon.stub($, 'ajax', function(options) {
var partialUrl = 'jsonpdataproxy.appspot.com';
if (options.url.indexOf(partialUrl) != -1) {
return {
done: function(callback) {
callback(dataProxyData);
return this;
},
fail: function() {
return this;
}
}
}
});
dataset.fetch().done(function(dataset) {
dataset.query().done(function(docList) {
deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id'));
equal(null, dataset.docCount)
equal(10, docList.length)
equal("1950-01", docList.models[0].get('date'));
// needed only if not stubbing
start();
});
});
$.ajax.restore();
});
module("Backend GDocs");
var sample_gdocs_spreadsheet_data = {
"feed": {
@ -155,13 +50,13 @@ var sample_gdocs_spreadsheet_data = {
],
"xmlns$openSearch": "http://a9.com/-/spec/opensearchrss/1.0/",
"entry": [
{
"category": [
{
"term": "http://schemas.google.com/spreadsheets/2006#list",
"scheme": "http://schemas.google.com/spreadsheets/2006"
}
],
"category": [
{
"term": "http://schemas.google.com/spreadsheets/2006#list",
"scheme": "http://schemas.google.com/spreadsheets/2006"
}
],
"updated": {
"$t": "2010-07-12T18:32:16.200Z"
},
@ -273,11 +168,10 @@ var sample_gdocs_spreadsheet_data = {
}
test("GDocs Backend", function() {
var backend = new recline.Backend.GDocs.Backbone();
var dataset = new recline.Model.Dataset({
url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
},
backend
'gdocs'
);
var stub = sinon.stub($, 'getJSON', function(options, cb) {
@ -287,7 +181,8 @@ test("GDocs Backend", function() {
}
});
dataset.query().then(function(docList) {
dataset.fetch().then(function() {
var docList = dataset.currentRecords;
deepEqual(['column-2', 'column-1'], _.pluck(dataset.fields.toJSON(), 'id'));
equal(3, docList.length);
equal("A", docList.models[0].get('column-1'));
@ -304,3 +199,4 @@ test("GDocs Backend.getUrl", function() {
});
})(this.jQuery);

View File

@ -29,10 +29,11 @@ test('query', function () {
size: 4
, from: 2
};
var out = data.query(queryObj);
deepEqual(out.records[0], memoryData[2]);
equal(out.records.length, 4);
equal(out.total, 6);
data.query(queryObj).then(function(out) {
deepEqual(out.hits[0], memoryData[2]);
equal(out.hits.length, 4);
equal(out.total, 6);
});
});
test('query sort', function () {
@ -42,44 +43,50 @@ test('query sort', function () {
{'y': {order: 'desc'}}
]
};
var out = data.query(queryObj);
equal(out.records[0].x, 6);
data.query(queryObj).then(function(out) {
equal(out.hits[0].x, 6);
});
var queryObj = {
sort: [
{'country': {order: 'desc'}}
]
};
var out = data.query(queryObj);
equal(out.records[0].country, 'US');
data.query(queryObj).then(function(out) {
equal(out.hits[0].country, 'US');
});
var queryObj = {
sort: [
{'country': {order: 'asc'}}
]
};
var out = data.query(queryObj);
equal(out.records[0].country, 'DE');
data.query(queryObj).then(function(out) {
equal(out.hits[0].country, 'DE');
});
});
test('query string', function () {
var data = _wrapData();
var out = data.query({q: 'UK'});
equal(out.total, 3);
deepEqual(_.pluck(out.records, 'country'), ['UK', 'UK', 'UK']);
data.query({q: 'UK'}).then(function(out) {
equal(out.total, 3);
deepEqual(_.pluck(out.hits, 'country'), ['UK', 'UK', 'UK']);
});
var out = data.query({q: 'UK 6'})
equal(out.total, 1);
deepEqual(out.records[0].id, 1);
data.query({q: 'UK 6'}).then(function(out) {
equal(out.total, 1);
deepEqual(out.hits[0].id, 1);
});
});
test('filters', function () {
var data = _wrapData();
var query = new recline.Model.Query();
query.addFilter({type: 'term', field: 'country', term: 'UK'});
var out = data.query(query.toJSON());
equal(out.total, 3);
deepEqual(_.pluck(out.records, 'country'), ['UK', 'UK', 'UK']);
data.query(query.toJSON()).then(function(out) {
equal(out.total, 3);
deepEqual(_.pluck(out.hits, 'country'), ['UK', 'UK', 'UK']);
});
});
test('facet', function () {
@ -125,7 +132,7 @@ test('update and delete', function () {
(function ($) {
module("Backend Memory - Backbone");
module("Backend Memory - Model Integration");
var memoryData = {
metadata: {
@ -145,23 +152,29 @@ var memoryData = {
};
function makeBackendDataset() {
var dataset = new recline.Backend.Memory.createDataset(memoryData.records, null, memoryData.metadata);
var dataset = new recline.Model.Dataset({
id: 'test-dataset',
title: 'My Test Dataset',
name: '1-my-test-dataset',
fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}, {id: 'country'}, {id: 'label'}],
records: [
{id: 0, x: 1, y: 2, z: 3, country: 'DE', label: 'first'}
, {id: 1, x: 2, y: 4, z: 6, country: 'UK', label: 'second'}
, {id: 2, x: 3, y: 6, z: 9, country: 'US', label: 'third'}
, {id: 3, x: 4, y: 8, z: 12, country: 'UK', label: 'fourth'}
, {id: 4, x: 5, y: 10, z: 15, country: 'UK', label: 'fifth'}
, {id: 5, x: 6, y: 12, z: 18, country: 'DE', label: 'sixth'}
]
});
dataset.fetch();
return dataset;
}
test('createDataset', function () {
var dataset = recline.Backend.Memory.createDataset(memoryData.records);
equal(dataset.fields.length, 6);
deepEqual(['id', 'x', 'y', 'z', 'country', 'label'], dataset.fields.pluck('id'));
dataset.query();
equal(memoryData.records.length, dataset.currentRecords.length);
});
test('basics', function () {
var dataset = makeBackendDataset();
expect(3);
// convenience for tests - get the data that should get changed
var data = dataset._dataCache;
var data = dataset._store;
dataset.fetch().then(function(datasetAgain) {
equal(dataset.get('name'), memoryData.metadata.name);
deepEqual(_.pluck(dataset.fields.toJSON(), 'id'), _.pluck(data.fields, 'id'));
@ -172,21 +185,21 @@ test('basics', function () {
test('query', function () {
var dataset = makeBackendDataset();
// convenience for tests - get the data that should get changed
var data = dataset._dataCache.data;
var data = dataset._store.data;
var dataset = makeBackendDataset();
var queryObj = {
size: 4
, from: 2
};
dataset.query(queryObj).then(function(recordList) {
deepEqual(data[2], recordList.models[0].toJSON());
deepEqual(recordList.models[0].toJSON(), data[2]);
});
});
test('query sort', function () {
var dataset = makeBackendDataset();
// convenience for tests - get the data that should get changed
var data = dataset._dataCache.data;
var data = dataset._store.data;
var queryObj = {
sort: [
{'y': {order: 'desc'}}
@ -247,7 +260,7 @@ test('facet', function () {
test('update and delete', function () {
var dataset = makeBackendDataset();
// convenience for tests - get the data that should get changed
var data = dataset._dataCache;
var data = dataset._store;
dataset.query().then(function(docList) {
equal(docList.length, Math.min(100, data.data.length));
var doc1 = docList.models[0];
@ -256,12 +269,13 @@ test('update and delete', function () {
// Test UPDATE
var newVal = 10;
doc1.set({x: newVal});
doc1.save().then(function() {
equal(data.data[0].x, newVal);
})
doc1.save();
equal(dataset._changes.updates[0].x, newVal);
// Test Delete
doc1.destroy().then(function() {
doc1.destroy();
deepEqual(dataset._changes.deletes[0], doc1.toJSON());
dataset.save().then(function() {
equal(data.data.length, 5);
equal(data.data[0].x, memoryData.records[1].x);
});

View File

@ -19,7 +19,7 @@ var Fixture = {
{id: 4, date: '2011-05-04', x: 5, y: 10, z: 15, country: 'UK', title: 'fifth', lat:51.58, lon:0},
{id: 5, date: '2011-06-02', x: 6, y: 12, z: 18, country: 'DE', title: 'sixth', lat:51.04, lon:7.9}
];
var dataset = recline.Backend.Memory.createDataset(documents, fields);
var dataset = new recline.Model.Dataset({records: documents, fields: fields});
return dataset;
}
};

View File

@ -37,8 +37,9 @@
<script type="text/javascript" src="../src/backend/csv.js"></script>
<script type="text/javascript" src="model.test.js"></script>
<script type="text/javascript" src="backend.test.js"></script>
<script type="text/javascript" src="backend/memory.test.js"></script>
<script type="text/javascript" src="backend/dataproxy.test.js"></script>
<script type="text/javascript" src="backend/gdocs.test.js"></script>
<script type="text/javascript" src="backend/elasticsearch.test.js"></script>
<script type="text/javascript" src="backend/csv.test.js"></script>

View File

@ -108,6 +108,8 @@ test('Field: custom deriver and renderer', function () {
// =================================
// Dataset
module("Model Dataset");
test('Dataset', function () {
var meta = {id: 'test', title: 'xyz'};
var dataset = new recline.Model.Dataset(meta);
@ -116,15 +118,6 @@ test('Dataset', function () {
equal(out.fields.length, 2);
});
test('Dataset _prepareQuery', function () {
var meta = {id: 'test', title: 'xyz'};
var dataset = new recline.Model.Dataset(meta);
var out = dataset._prepareQuery();
var exp = new recline.Model.Query().toJSON();
deepEqual(out, exp);
});
test('Dataset getFieldsSummary', function () {
var dataset = Fixture.getDataset();
dataset.getFieldsSummary().done(function() {
@ -140,6 +133,114 @@ test('Dataset getFieldsSummary', function () {
});
});
test('_normalizeRecordsAndFields', function () {
var data = [
// fields but no records
{
in_: {
fields: [ '', 'abc', 'abc', 'xyz', '' ],
records: null
},
exp: {
fields: [
{id: '_noname_'},
{id: 'abc'},
{id: 'abc1'},
{id: 'xyz'},
{id: '_noname_1'}
],
records: null
},
},
// records array but no fields
{
in_: {
fields: undefined,
records: [
['col1', 'col2'],
[1,2],
[3,4]
]
},
exp: {
fields: [
{id: 'col1'},
{id: 'col2'}
],
records: [
{col1: 1, col2: 2},
{col1: 3, col2: 4}
]
}
},
// records objects but no fields
{
in_: {
fields: undefined,
records: [
{col1: 1, col2: 2},
{col1: 3, col2: 4}
]
},
exp: {
fields: [
{id: 'col1'},
{id: 'col2'}
],
records: [
{col1: 1, col2: 2},
{col1: 3, col2: 4}
]
}
},
// fields and records array
{
in_: {
fields: [{id: 'col1'}, {id: 'col2'}],
records: [
[1,2],
[3,4]
]
},
exp: {
fields: [
{id: 'col1'},
{id: 'col2'}
],
records: [
{col1: 1, col2: 2},
{col1: 3, col2: 4}
]
}
},
// everything already correct
{
in_: {
fields: [{id: 'col1'}, {id: 'col2'}],
records: [
{col1: 1, col2: 2},
{col1: 3, col2: 4},
]
},
exp: {
fields: [
{id: 'col1'},
{id: 'col2'}
],
records: [
{col1: 1, col2: 2},
{col1: 3, col2: 4}
]
}
}
];
var dataset = new recline.Model.Dataset();
_.each(data, function(item) {
out = dataset._normalizeRecordsAndFields(item.in_.records, item.in_.fields);
deepEqual(out, item.exp);
});
});
// =================================
// Query

View File

@ -16,7 +16,10 @@ var GeoJSONFixture = {
{id: 1, x: 2, y: 4, z: 6, geom: {type:"Point",coordinates:[13.40,52.35]}},
{id: 2, x: 3, y: 6, z: 9, geom: {type:"LineString",coordinates:[[100.0, 0.0],[101.0, 1.0]]}}
];
var dataset = recline.Backend.Memory.createDataset(records, fields);
var dataset = new recline.Model.Dataset({
records: records,
fields: fields
});
return dataset;
}
};
@ -114,7 +117,9 @@ test('_getGeometryFromRecord non-GeoJSON', function () {
[[53.3,47.32], [53.3, 47.32]]
];
var view = new recline.View.Map({
model: recline.Backend.Memory.createDataset([{a: 1}]),
model: new recline.Model.Dataset({
records: [{a: 1}]
}),
state: {
geomField: 'location'
}

View File

@ -1,10 +1,12 @@
module("View - Timeline");
test('extract dates and timelineJSON', function () {
var dataset = recline.Backend.Memory.createDataset([
{'Date': '2012-03-20', 'title': '1'},
{'Date': '2012-03-25', 'title': '2'},
]);
var dataset = new recline.Model.Dataset({
records: [
{'Date': '2012-03-20', 'title': '1'},
{'Date': '2012-03-25', 'title': '2'}
]
});
var view = new recline.View.Timeline({
model: dataset
});