diff --git a/demo/index.html b/demo/index.html index fdbc8d2b..26a51caf 100644 --- a/demo/index.html +++ b/demo/index.html @@ -26,7 +26,9 @@ - + + + diff --git a/demo/js/app.js b/demo/js/app.js index 29bb3da1..f027e587 100755 --- a/demo/js/app.js +++ b/demo/js/app.js @@ -62,7 +62,7 @@ function demoDataset() { , {id: 5, x: 6, y: 12, z: 18} ] }; - var backend = new recline.Model.BackendMemory(); + var backend = new recline.Backend.BackendMemory(); backend.addDataset(inData); var dataset = new recline.Model.Dataset({id: datasetId}, backend); return dataset; diff --git a/src/backend.js b/src/backend.js deleted file mode 100644 index f723aac8..00000000 --- a/src/backend.js +++ /dev/null @@ -1,386 +0,0 @@ -// # Recline Backends -// -// Backends are connectors to backend data sources and stores -// -// Backends are implemented as Backbone models but this is just a -// convenience (they do not save or load themselves from any remote -// source) -this.recline = this.recline || {}; -this.recline.Model = this.recline.Model || {}; - -(function($, my) { - // ## Backbone.sync - // - // Override Backbone.sync to hand off to sync function in relevant backend - Backbone.sync = function(method, model, options) { - return model.backend.sync(method, model, options); - } - - // ## wrapInTimeout - // - // Crude way to catch backend errors - // Many of backends use JSONP and so will not get error messages and this is - // a crude way to catch those errors. - function wrapInTimeout(ourFunction) { - var dfd = $.Deferred(); - var timeout = 5000; - var timer = setTimeout(function() { - dfd.reject({ - message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds' - }); - }, timeout); - ourFunction.done(function(arguments) { - clearTimeout(timer); - dfd.resolve(arguments); - }) - .fail(function(arguments) { - clearTimeout(timer); - dfd.reject(arguments); - }) - ; - return dfd.promise(); - } - - // ## BackendMemory - uses in-memory data - // - // This is very artificial and is really only designed for testing - // purposes. - // - // To use it you should provide in your constructor data: - // - // * metadata (including fields array) - // * documents: list of hashes, each hash being one doc. A doc *must* have an id attribute which is unique. - // - // Example: - // - //
- // // Backend setup
- // var backend = Backend();
- // backend.addDataset({
- // metadata: {
- // id: 'my-id',
- // title: 'My Title'
- // },
- // fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
- // documents: [
- // {id: 0, x: 1, y: 2, z: 3},
- // {id: 1, x: 2, y: 4, z: 6}
- // ]
- // });
- // // later ...
- // var dataset = Dataset({id: 'my-id'});
- // dataset.fetch();
- // etc ...
- //
- my.BackendMemory = Backbone.Model.extend({
- initialize: function() {
- this.datasets = {};
- },
- addDataset: function(data) {
- this.datasets[data.metadata.id] = $.extend(true, {}, data);
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- if (model.__type__ == 'Dataset') {
- var rawDataset = this.datasets[model.id];
- model.set(rawDataset.metadata);
- model.fields.reset(rawDataset.fields);
- model.docCount = rawDataset.documents.length;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'update') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
- if(doc.id === model.id) {
- self.datasets[model.dataset.id].documents[idx] = model.toJSON();
- }
- });
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'delete') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- var rawDataset = self.datasets[model.dataset.id];
- var newdocs = _.reject(rawDataset.documents, function(doc) {
- return (doc.id === model.id);
- });
- rawDataset.documents = newdocs;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else {
- alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
- }
- },
- query: function(model, queryObj) {
- var numRows = queryObj.size;
- var start = queryObj.offset;
- var dfd = $.Deferred();
- results = this.datasets[model.id].documents;
- // not complete sorting!
- _.each(queryObj.sort, function(item) {
- results = _.sortBy(results, function(doc) {
- var _out = doc[item[0]];
- return (item[1] == 'asc') ? _out : -1*_out;
- });
- });
- var results = results.slice(start, start+numRows);
- dfd.resolve(results);
- return dfd.promise();
- }
- });
- my.backends['memory'] = new my.BackendMemory();
-
- // ## BackendWebstore
- //
- // Connecting to [Webstores](http://github.com/okfn/webstore)
- //
- // To use this backend ensure your Dataset has a webstore_url in its attributes.
- my.BackendWebstore = Backbone.Model.extend({
- sync: function(method, model, options) {
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = model.get('webstore_url');
- var schemaUrl = base + '/schema.json';
- var jqxhr = $.ajax({
- url: schemaUrl,
- dataType: 'jsonp',
- jsonp: '_callback'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(schema) {
- var fieldData = _.map(schema.data, function(item) {
- item.id = item.name;
- delete item.name;
- return item;
- });
- model.fields.reset(fieldData);
- model.docCount = schema.count;
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- }
- },
- query: function(model, queryObj) {
- var base = model.get('webstore_url');
- var data = {
- _limit: queryObj.size
- , _offset: queryObj.offset
- };
- var jqxhr = $.ajax({
- url: base + '.json',
- data: data,
- dataType: 'jsonp',
- jsonp: '_callback',
- cache: true
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- dfd.resolve(results.data);
- });
- return dfd.promise();
- }
- });
- my.backends['webstore'] = new my.BackendWebstore();
-
- // ## BackendDataProxy
- //
- // For connecting to [DataProxy-s](http://github.com/okfn/dataproxy).
- //
- // When initializing the DataProxy backend you can set the following attributes:
- //
- // * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
- //
- // Datasets using using this backend should set the following attributes:
- //
- // * url: (required) url-of-data-to-proxy
- // * format: (optional) csv | xls (defaults to csv if not specified)
- //
- // Note that this is a **read-only** backend.
- my.BackendDataProxy = Backbone.Model.extend({
- defaults: {
- dataproxy_url: 'http://jsonpdataproxy.appspot.com'
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = self.get('dataproxy_url');
- // TODO: should we cache for extra efficiency
- var data = {
- url: model.get('url')
- , 'max-results': 1
- , type: model.get('format') || 'csv'
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(results) {
- model.fields.reset(_.map(results.fields, function(fieldId) {
- return {id: fieldId};
- })
- );
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- } else {
- alert('This backend only supports read operations');
- }
- },
- query: function(dataset, queryObj) {
- var base = this.get('dataproxy_url');
- var data = {
- url: dataset.get('url')
- , 'max-results': queryObj.size
- , type: dataset.get('format')
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- var _out = _.map(results.data, function(doc) {
- var tmp = {};
- _.each(results.fields, function(key, idx) {
- tmp[key] = doc[idx];
- });
- return tmp;
- });
- dfd.resolve(_out);
- });
- return dfd.promise();
- }
- });
- my.backends['dataproxy'] = new my.BackendDataProxy();
-
-
- // ## Google spreadsheet backend
- //
- // Connect to Google Docs spreadsheet.
- //
- // Dataset must have a url attribute pointing to the Gdocs
- // spreadsheet's JSON feed e.g.
- //
- //
- // var dataset = new recline.Model.Dataset({
- // url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
- // },
- // 'gdocs'
- // );
- //
- my.BackendGDoc = Backbone.Model.extend({
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- var dataset = model;
-
- $.getJSON(model.get('url'), function(d) {
- result = self.gdocsToJavascript(d);
- model.fields.reset(_.map(result.field, function(fieldId) {
- return {id: fieldId};
- })
- );
- // cache data onto dataset (we have loaded whole gdoc it seems!)
- model._dataCache = result.data;
- dfd.resolve(model);
- })
- return dfd.promise(); }
- },
-
- query: function(dataset, queryObj) {
- var dfd = $.Deferred();
- var fields = _.pluck(dataset.fields.toJSON(), 'id');
-
- // zip the fields with the data rows to produce js objs
- // TODO: factor this out as a common method with other backends
- var objs = _.map(dataset._dataCache, function (d) {
- var obj = {};
- _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
- return obj;
- });
- dfd.resolve(objs);
- return dfd;
- },
- gdocsToJavascript: function(gdocsSpreadsheet) {
- /*
- :options: (optional) optional argument dictionary:
- columnsToUse: list of columns to use (specified by field names)
- colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
- :return: tabular data object (hash with keys: field and data).
-
- Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
- */
- var options = {};
- if (arguments.length > 1) {
- options = arguments[1];
- }
- var results = {
- 'field': [],
- 'data': []
- };
- // default is no special info on type of columns
- var colTypes = {};
- if (options.colTypes) {
- colTypes = options.colTypes;
- }
- // either extract column headings from spreadsheet directly, or used supplied ones
- if (options.columnsToUse) {
- // columns set to subset supplied
- results.field = options.columnsToUse;
- } else {
- // set columns to use to be all available
- if (gdocsSpreadsheet.feed.entry.length > 0) {
- for (var k in gdocsSpreadsheet.feed.entry[0]) {
- if (k.substr(0, 3) == 'gsx') {
- var col = k.substr(4)
- results.field.push(col);
- }
- }
- }
- }
-
- // converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
- var rep = /^([\d\.\-]+)\%$/;
- $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
- var row = [];
- for (var k in results.field) {
- var col = results.field[k];
- var _keyname = 'gsx$' + col;
- var value = entry[_keyname]['$t'];
- // if labelled as % and value contains %, convert
- if (colTypes[col] == 'percent') {
- if (rep.test(value)) {
- var value2 = rep.exec(value);
- var value3 = parseFloat(value2);
- value = value3 / 100;
- }
- }
- row.push(value);
- }
- results.data.push(row);
- });
- return results;
- }
- });
- my.backends['gdocs'] = new my.BackendGDoc();
-
-}(jQuery, this.recline.Model));
diff --git a/src/backend/base.js b/src/backend/base.js
new file mode 100644
index 00000000..60b44225
--- /dev/null
+++ b/src/backend/base.js
@@ -0,0 +1,44 @@
+// # Recline Backends
+//
+// Backends are connectors to backend data sources and stores
+//
+// Backends are implemented as Backbone models but this is just a
+// convenience (they do not save or load themselves from any remote
+// source)
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## Backbone.sync
+ //
+ // Override Backbone.sync to hand off to sync function in relevant backend
+ Backbone.sync = function(method, model, options) {
+ return model.backend.sync(method, model, options);
+ }
+
+ // ## wrapInTimeout
+ //
+ // Crude way to catch backend errors
+ // Many of backends use JSONP and so will not get error messages and this is
+ // a crude way to catch those errors.
+ my.wrapInTimeout = function(ourFunction) {
+ var dfd = $.Deferred();
+ var timeout = 5000;
+ var timer = setTimeout(function() {
+ dfd.reject({
+ message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
+ });
+ }, timeout);
+ ourFunction.done(function(arguments) {
+ clearTimeout(timer);
+ dfd.resolve(arguments);
+ })
+ .fail(function(arguments) {
+ clearTimeout(timer);
+ dfd.reject(arguments);
+ })
+ ;
+ return dfd.promise();
+ }
+}(jQuery, this.recline.Backend));
+
diff --git a/src/backend/dataproxy.js b/src/backend/dataproxy.js
new file mode 100644
index 00000000..cff758ba
--- /dev/null
+++ b/src/backend/dataproxy.js
@@ -0,0 +1,85 @@
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## BackendDataProxy
+ //
+ // For connecting to [DataProxy-s](http://github.com/okfn/dataproxy).
+ //
+ // When initializing the DataProxy backend you can set the following attributes:
+ //
+ // * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
+ //
+ // Datasets using using this backend should set the following attributes:
+ //
+ // * url: (required) url-of-data-to-proxy
+ // * format: (optional) csv | xls (defaults to csv if not specified)
+ //
+ // Note that this is a **read-only** backend.
+ my.BackendDataProxy = Backbone.Model.extend({
+ defaults: {
+ dataproxy_url: 'http://jsonpdataproxy.appspot.com'
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = self.get('dataproxy_url');
+ // TODO: should we cache for extra efficiency
+ var data = {
+ url: model.get('url')
+ , 'max-results': 1
+ , type: model.get('format') || 'csv'
+ };
+ var jqxhr = $.ajax({
+ url: base
+ , data: data
+ , dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(results) {
+ model.fields.reset(_.map(results.fields, function(fieldId) {
+ return {id: fieldId};
+ })
+ );
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ } else {
+ alert('This backend only supports read operations');
+ }
+ },
+ query: function(dataset, queryObj) {
+ var base = this.get('dataproxy_url');
+ var data = {
+ url: dataset.get('url')
+ , 'max-results': queryObj.size
+ , type: dataset.get('format')
+ };
+ var jqxhr = $.ajax({
+ url: base
+ , data: data
+ , dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ var _out = _.map(results.data, function(doc) {
+ var tmp = {};
+ _.each(results.fields, function(key, idx) {
+ tmp[key] = doc[idx];
+ });
+ return tmp;
+ });
+ dfd.resolve(_out);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['dataproxy'] = new my.BackendDataProxy();
+
+
+}(jQuery, this.recline.Backend));
diff --git a/src/backend/gdocs.js b/src/backend/gdocs.js
new file mode 100644
index 00000000..e0d41a69
--- /dev/null
+++ b/src/backend/gdocs.js
@@ -0,0 +1,117 @@
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## Google spreadsheet backend
+ //
+ // Connect to Google Docs spreadsheet.
+ //
+ // Dataset must have a url attribute pointing to the Gdocs
+ // spreadsheet's JSON feed e.g.
+ //
+ //
+ // var dataset = new recline.Model.Dataset({
+ // url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
+ // },
+ // 'gdocs'
+ // );
+ //
+ my.BackendGDoc = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ var dataset = model;
+
+ $.getJSON(model.get('url'), function(d) {
+ result = self.gdocsToJavascript(d);
+ model.fields.reset(_.map(result.field, function(fieldId) {
+ return {id: fieldId};
+ })
+ );
+ // cache data onto dataset (we have loaded whole gdoc it seems!)
+ model._dataCache = result.data;
+ dfd.resolve(model);
+ })
+ return dfd.promise(); }
+ },
+
+ query: function(dataset, queryObj) {
+ var dfd = $.Deferred();
+ var fields = _.pluck(dataset.fields.toJSON(), 'id');
+
+ // zip the fields with the data rows to produce js objs
+ // TODO: factor this out as a common method with other backends
+ var objs = _.map(dataset._dataCache, function (d) {
+ var obj = {};
+ _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
+ return obj;
+ });
+ dfd.resolve(objs);
+ return dfd;
+ },
+ gdocsToJavascript: function(gdocsSpreadsheet) {
+ /*
+ :options: (optional) optional argument dictionary:
+ columnsToUse: list of columns to use (specified by field names)
+ colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
+ :return: tabular data object (hash with keys: field and data).
+
+ Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
+ */
+ var options = {};
+ if (arguments.length > 1) {
+ options = arguments[1];
+ }
+ var results = {
+ 'field': [],
+ 'data': []
+ };
+ // default is no special info on type of columns
+ var colTypes = {};
+ if (options.colTypes) {
+ colTypes = options.colTypes;
+ }
+ // either extract column headings from spreadsheet directly, or used supplied ones
+ if (options.columnsToUse) {
+ // columns set to subset supplied
+ results.field = options.columnsToUse;
+ } else {
+ // set columns to use to be all available
+ if (gdocsSpreadsheet.feed.entry.length > 0) {
+ for (var k in gdocsSpreadsheet.feed.entry[0]) {
+ if (k.substr(0, 3) == 'gsx') {
+ var col = k.substr(4)
+ results.field.push(col);
+ }
+ }
+ }
+ }
+
+ // converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
+ var rep = /^([\d\.\-]+)\%$/;
+ $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
+ var row = [];
+ for (var k in results.field) {
+ var col = results.field[k];
+ var _keyname = 'gsx$' + col;
+ var value = entry[_keyname]['$t'];
+ // if labelled as % and value contains %, convert
+ if (colTypes[col] == 'percent') {
+ if (rep.test(value)) {
+ var value2 = rep.exec(value);
+ var value3 = parseFloat(value2);
+ value = value3 / 100;
+ }
+ }
+ row.push(value);
+ }
+ results.data.push(row);
+ });
+ return results;
+ }
+ });
+ recline.Model.backends['gdocs'] = new my.BackendGDoc();
+
+}(jQuery, this.recline.Backend));
+
diff --git a/src/backend/memory.js b/src/backend/memory.js
new file mode 100644
index 00000000..625caf04
--- /dev/null
+++ b/src/backend/memory.js
@@ -0,0 +1,139 @@
+// # Recline Backends
+//
+// Backends are connectors to backend data sources and stores
+//
+// Backends are implemented as Backbone models but this is just a
+// convenience (they do not save or load themselves from any remote
+// source)
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## Backbone.sync
+ //
+ // Override Backbone.sync to hand off to sync function in relevant backend
+ Backbone.sync = function(method, model, options) {
+ return model.backend.sync(method, model, options);
+ }
+
+ // ## wrapInTimeout
+ //
+ // Crude way to catch backend errors
+ // Many of backends use JSONP and so will not get error messages and this is
+ // a crude way to catch those errors.
+ function wrapInTimeout(ourFunction) {
+ var dfd = $.Deferred();
+ var timeout = 5000;
+ var timer = setTimeout(function() {
+ dfd.reject({
+ message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
+ });
+ }, timeout);
+ ourFunction.done(function(arguments) {
+ clearTimeout(timer);
+ dfd.resolve(arguments);
+ })
+ .fail(function(arguments) {
+ clearTimeout(timer);
+ dfd.reject(arguments);
+ })
+ ;
+ return dfd.promise();
+ }
+
+ // ## BackendMemory - uses in-memory data
+ //
+ // This is very artificial and is really only designed for testing
+ // purposes.
+ //
+ // To use it you should provide in your constructor data:
+ //
+ // * metadata (including fields array)
+ // * documents: list of hashes, each hash being one doc. A doc *must* have an id attribute which is unique.
+ //
+ // Example:
+ //
+ //
+ // // Backend setup
+ // var backend = Backend();
+ // backend.addDataset({
+ // metadata: {
+ // id: 'my-id',
+ // title: 'My Title'
+ // },
+ // fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
+ // documents: [
+ // {id: 0, x: 1, y: 2, z: 3},
+ // {id: 1, x: 2, y: 4, z: 6}
+ // ]
+ // });
+ // // later ...
+ // var dataset = Dataset({id: 'my-id'});
+ // dataset.fetch();
+ // etc ...
+ //
+ my.BackendMemory = Backbone.Model.extend({
+ initialize: function() {
+ this.datasets = {};
+ },
+ addDataset: function(data) {
+ this.datasets[data.metadata.id] = $.extend(true, {}, data);
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Dataset') {
+ var rawDataset = this.datasets[model.id];
+ model.set(rawDataset.metadata);
+ model.fields.reset(rawDataset.fields);
+ model.docCount = rawDataset.documents.length;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'update') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
+ if(doc.id === model.id) {
+ self.datasets[model.dataset.id].documents[idx] = model.toJSON();
+ }
+ });
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'delete') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ var rawDataset = self.datasets[model.dataset.id];
+ var newdocs = _.reject(rawDataset.documents, function(doc) {
+ return (doc.id === model.id);
+ });
+ rawDataset.documents = newdocs;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else {
+ alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
+ }
+ },
+ query: function(model, queryObj) {
+ var numRows = queryObj.size;
+ var start = queryObj.offset;
+ var dfd = $.Deferred();
+ results = this.datasets[model.id].documents;
+ // not complete sorting!
+ _.each(queryObj.sort, function(item) {
+ results = _.sortBy(results, function(doc) {
+ var _out = doc[item[0]];
+ return (item[1] == 'asc') ? _out : -1*_out;
+ });
+ });
+ var results = results.slice(start, start+numRows);
+ dfd.resolve(results);
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['memory'] = new my.BackendMemory();
+
+}(jQuery, this.recline.Backend));
diff --git a/src/backend/webstore.js b/src/backend/webstore.js
new file mode 100644
index 00000000..d89d99a0
--- /dev/null
+++ b/src/backend/webstore.js
@@ -0,0 +1,61 @@
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## BackendWebstore
+ //
+ // Connecting to [Webstores](http://github.com/okfn/webstore)
+ //
+ // To use this backend ensure your Dataset has a webstore_url in its attributes.
+ my.BackendWebstore = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = model.get('webstore_url');
+ var schemaUrl = base + '/schema.json';
+ var jqxhr = $.ajax({
+ url: schemaUrl,
+ dataType: 'jsonp',
+ jsonp: '_callback'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(schema) {
+ var fieldData = _.map(schema.data, function(item) {
+ item.id = item.name;
+ delete item.name;
+ return item;
+ });
+ model.fields.reset(fieldData);
+ model.docCount = schema.count;
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ }
+ },
+ query: function(model, queryObj) {
+ var base = model.get('webstore_url');
+ var data = {
+ _limit: queryObj.size
+ , _offset: queryObj.offset
+ };
+ var jqxhr = $.ajax({
+ url: base + '.json',
+ data: data,
+ dataType: 'jsonp',
+ jsonp: '_callback',
+ cache: true
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ dfd.resolve(results.data);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['webstore'] = new my.BackendWebstore();
+
+}(jQuery, this.recline.Backend));
diff --git a/test/backend.test.js b/test/backend.test.js
index 1e9767cd..2ff2db4f 100644
--- a/test/backend.test.js
+++ b/test/backend.test.js
@@ -19,7 +19,7 @@ var memoryData = {
};
function makeBackendDataset() {
- var backend = new recline.Model.BackendMemory();
+ var backend = new recline.Backend.BackendMemory();
backend.addDataset(memoryData);
var dataset = new recline.Model.Dataset({id: memoryData.metadata.id}, backend);
return dataset;
diff --git a/test/index.html b/test/index.html
index a00c9f81..0f16894d 100644
--- a/test/index.html
+++ b/test/index.html
@@ -18,7 +18,11 @@
-
+
+
+
+
+