From 5b70170ec7e4fe463074049d5d0b5a8ea8607052 Mon Sep 17 00:00:00 2001
From: Rufus Pollock There are then various Views (you can easily write your own). Each view holds a pointer to a Dataset: \
\
\
{{/cells}} \
',
events: {
'click .data-table-cell-edit': 'onEditClick',
- // cell editor
'click .data-table-cell-editor .okButton': 'onEditorOK',
'click .data-table-cell-editor .cancelButton': 'onEditorCancel'
},
toTemplateJSON: function() {
+ var self = this;
var doc = this.model;
var cellData = this._fields.map(function(field) {
- return {field: field.id, value: doc.get(field.id)}
+ return {
+ field: field.id,
+ value: self._cellRenderer(doc.get(field.id), field, doc)
+ }
})
return { id: this.id, cells: cellData }
},
diff --git a/test/view.test.js b/test/view.test.js
index 3b9c62e5..f969160d 100644
--- a/test/view.test.js
+++ b/test/view.test.js
@@ -20,6 +20,20 @@ test('new DataTableRow View', function () {
var tds = $el.find('td');
equal(tds.length, 3);
equal($(tds[1]).attr('data-field'), 'a');
+
+ var view = new recline.View.DataTableRow({
+ model: doc
+ , el: $el
+ , fields: new recline.Model.FieldList([{id: 'a'}, {id: 'b'}])
+ },
+ {
+ cellRenderer: function(value, field) {
+ return '' + value + '';
+ }
+ });
+ view.render();
+ var tds = $el.find('td .data-table-cell-value');
+ equal($(tds[0]).html(), '1', 'Checking cellRenderer works');
});
})(this.jQuery);
From 7cd6b0b2845120f7dd0db6c514addc77d9ac8bf2 Mon Sep 17 00:00:00 2001
From: Rufus Pollock \
+ \
+
\
+ ',
+
+ toTemplateJSON: function() {
+ var modelData = this.model.toJSON()
+ modelData.notEmpty = ( this.fields.length > 0 )
+ // TODO: move this sort of thing into a toTemplateJSON method on Dataset?
+ modelData.fields = _.map(this.fields, function(field) { return field.toJSON() });
+ return modelData;
+ },
+ render: function() {
+ var self = this;
+ this.fields = this.model.fields.filter(function(field) {
+ return _.indexOf(self.hiddenFields, field.id) == -1;
+ });
+ var htmls = $.mustache(this.template, this.toTemplateJSON());
+ this.el.html(htmls);
+ this.model.currentDocuments.forEach(function(doc) {
+ var tr = $(' \
+ {{#notEmpty}} \
+ \
+ \
+ \
+ \
+ \
+ \
+ {{/notEmpty}} \
+ {{#fields}} \
+ \
+ \
+ \
+ \
+ {{/fields}} \
+ ');
+ self.el.find('tbody').append(tr);
+ var newView = new my.DataTableRow({
+ model: doc,
+ el: tr,
+ fields: self.fields,
+ },
+ self.options
+ );
+ newView.render();
+ });
+ this.el.toggleClass('no-hidden', (self.hiddenFields.length == 0));
+ return this;
+ }
+});
+
+// ## DataTableRow View for rendering an individual document.
+//
+// Since we want this to update in place it is up to creator to provider the element to attach to.
+// In addition you must pass in a fields in the constructor options. This should be list of fields for the DataTable.
+//
+// Additional options can be passed in a second hash argument. Options:
+//
+// * cellRenderer: function to render cells. Signature: function(value,
+// field, doc) where value is the value of this cell, field is
+// corresponding field object and document is the document object. Note
+// that implementing functions can ignore arguments (e.g.
+// function(value) would be a valid cellRenderer function).
+my.DataTableRow = Backbone.View.extend({
+ initialize: function(initData, options) {
+ _.bindAll(this, 'render');
+ this._fields = initData.fields;
+ if (options && options.cellRenderer) {
+ this._cellRenderer = options.cellRenderer;
+ } else {
+ this._cellRenderer = function(value) {
+ return value;
+ }
+ }
+ this.el = $(this.el);
+ this.model.bind('change', this.render);
+ },
+
+ template: ' \
+ \
+ {{#cells}} \
+ \
+ \
+ \
+ {{/cells}} \
+ ',
+ events: {
+ 'click .data-table-cell-edit': 'onEditClick',
+ 'click .data-table-cell-editor .okButton': 'onEditorOK',
+ 'click .data-table-cell-editor .cancelButton': 'onEditorCancel'
+ },
+
+ toTemplateJSON: function() {
+ var self = this;
+ var doc = this.model;
+ var cellData = this._fields.map(function(field) {
+ return {
+ field: field.id,
+ value: self._cellRenderer(doc.get(field.id), field, doc)
+ }
+ })
+ return { id: this.id, cells: cellData }
+ },
+
+ render: function() {
+ this.el.attr('data-id', this.model.id);
+ var html = $.mustache(this.template, this.toTemplateJSON());
+ $(this.el).html(html);
+ return this;
+ },
+
+ // Cell Editor
+ // ===========
+
+ onEditClick: function(e) {
+ var editing = this.el.find('.data-table-cell-editor-editor');
+ if (editing.length > 0) {
+ editing.parents('.data-table-cell-value').html(editing.text()).siblings('.data-table-cell-edit').removeClass("hidden");
+ }
+ $(e.target).addClass("hidden");
+ var cell = $(e.target).siblings('.data-table-cell-value');
+ cell.data("previousContents", cell.text());
+ util.render('cellEditor', cell, {value: cell.text()});
+ },
+
+ onEditorOK: function(e) {
+ var cell = $(e.target);
+ var rowId = cell.parents('tr').attr('data-id');
+ var field = cell.parents('td').attr('data-field');
+ var newValue = cell.parents('.data-table-cell-editor').find('.data-table-cell-editor-editor').val();
+ var newData = {};
+ newData[field] = newValue;
+ this.model.set(newData);
+ my.notify("Updating row...", {loader: true});
+ this.model.save().then(function(response) {
+ my.notify("Row updated successfully", {category: 'success'});
+ })
+ .fail(function() {
+ my.notify('Error saving row', {
+ category: 'error',
+ persist: true
+ });
+ });
+ },
+
+ onEditorCancel: function(e) {
+ var cell = $(e.target).parents('.data-table-cell-value');
+ cell.html(cell.data('previousContents')).siblings('.data-table-cell-edit').removeClass("hidden");
+ }
+});
+
+})(jQuery, recline.View);
diff --git a/src/view.js b/src/view.js
index 31418cdc..bb35adc9 100644
--- a/src/view.js
+++ b/src/view.js
@@ -190,326 +190,6 @@ my.DataExplorer = Backbone.View.extend({
}
});
-// ## DataTable
-//
-// Provides a tabular view on a Dataset.
-//
-// Initialize it with a recline.Dataset object.
-//
-// Additional options passed in second arguments. Options:
-//
-// * cellRenderer: function used to render individual cells. See DataTableRow for more.
-my.DataTable = Backbone.View.extend({
- tagName: "div",
- className: "data-table-container",
-
- initialize: function(modelEtc, options) {
- var self = this;
- this.el = $(this.el);
- _.bindAll(this, 'render');
- this.model.currentDocuments.bind('add', this.render);
- this.model.currentDocuments.bind('reset', this.render);
- this.model.currentDocuments.bind('remove', this.render);
- this.state = {};
- this.hiddenFields = [];
- this.options = options;
- },
-
- events: {
- 'click .column-header-menu': 'onColumnHeaderClick'
- , 'click .row-header-menu': 'onRowHeaderClick'
- , 'click .root-header-menu': 'onRootHeaderClick'
- , 'click .data-table-menu li a': 'onMenuClick'
- },
-
- // TODO: delete or re-enable (currently this code is not used from anywhere except deprecated or disabled methods (see above)).
- // showDialog: function(template, data) {
- // if (!data) data = {};
- // util.show('dialog');
- // util.render(template, 'dialog-content', data);
- // util.observeExit($('.dialog-content'), function() {
- // util.hide('dialog');
- // })
- // $('.dialog').draggable({ handle: '.dialog-header', cursor: 'move' });
- // },
-
-
- // ======================================================
- // Column and row menus
-
- onColumnHeaderClick: function(e) {
- this.state.currentColumn = $(e.target).closest('.column-header').attr('data-field');
- util.position('data-table-menu', e);
- util.render('columnActions', 'data-table-menu');
- },
-
- onRowHeaderClick: function(e) {
- this.state.currentRow = $(e.target).parents('tr:first').attr('data-id');
- util.position('data-table-menu', e);
- util.render('rowActions', 'data-table-menu');
- },
-
- onRootHeaderClick: function(e) {
- util.position('data-table-menu', e);
- util.render('rootActions', 'data-table-menu', {'columns': this.hiddenFields});
- },
-
- onMenuClick: function(e) {
- var self = this;
- e.preventDefault();
- var actions = {
- bulkEdit: function() { self.showTransformColumnDialog('bulkEdit', {name: self.state.currentColumn}) },
- transform: function() { self.showTransformDialog('transform') },
- sortAsc: function() { self.setColumnSort('asc') },
- sortDesc: function() { self.setColumnSort('desc') },
- hideColumn: function() { self.hideColumn() },
- showColumn: function() { self.showColumn(e) },
- // TODO: Delete or re-implement ...
- csv: function() { window.location.href = app.csvUrl },
- json: function() { window.location.href = "_rewrite/api/json" },
- urlImport: function() { showDialog('urlImport') },
- pasteImport: function() { showDialog('pasteImport') },
- uploadImport: function() { showDialog('uploadImport') },
- // END TODO
- deleteColumn: function() {
- var msg = "Are you sure? This will delete '" + self.state.currentColumn + "' from all documents.";
- // TODO:
- alert('This function needs to be re-implemented');
- return;
- if (confirm(msg)) costco.deleteColumn(self.state.currentColumn);
- },
- deleteRow: function() {
- var doc = _.find(self.model.currentDocuments.models, function(doc) {
- // important this is == as the currentRow will be string (as comes
- // from DOM) while id may be int
- return doc.id == self.state.currentRow
- });
- doc.destroy().then(function() {
- self.model.currentDocuments.remove(doc);
- my.notify("Row deleted successfully");
- })
- .fail(function(err) {
- my.notify("Errorz! " + err)
- })
- }
- }
- util.hide('data-table-menu');
- actions[$(e.target).attr('data-action')]();
- },
-
- showTransformColumnDialog: function() {
- var $el = $('.dialog-content');
- util.show('dialog');
- var view = new my.ColumnTransform({
- model: this.model
- });
- view.state = this.state;
- view.render();
- $el.empty();
- $el.append(view.el);
- util.observeExit($el, function() {
- util.hide('dialog');
- })
- $('.dialog').draggable({ handle: '.dialog-header', cursor: 'move' });
- },
-
- showTransformDialog: function() {
- var $el = $('.dialog-content');
- util.show('dialog');
- var view = new recline.View.DataTransform({
- });
- view.render();
- $el.empty();
- $el.append(view.el);
- util.observeExit($el, function() {
- util.hide('dialog');
- })
- $('.dialog').draggable({ handle: '.dialog-header', cursor: 'move' });
- },
-
- setColumnSort: function(order) {
- this.model.query({
- sort: [
- [this.state.currentColumn, order]
- ]
- });
- },
-
- hideColumn: function() {
- this.hiddenFields.push(this.state.currentColumn);
- this.render();
- },
-
- showColumn: function(e) {
- this.hiddenFields = _.without(this.hiddenFields, $(e.target).data('column'));
- this.render();
- },
-
- // ======================================================
- // #### Templating
- template: ' \
- \
- \
- \
- \
-
\
- ',
-
- toTemplateJSON: function() {
- var modelData = this.model.toJSON()
- modelData.notEmpty = ( this.fields.length > 0 )
- // TODO: move this sort of thing into a toTemplateJSON method on Dataset?
- modelData.fields = _.map(this.fields, function(field) { return field.toJSON() });
- return modelData;
- },
- render: function() {
- var self = this;
- this.fields = this.model.fields.filter(function(field) {
- return _.indexOf(self.hiddenFields, field.id) == -1;
- });
- var htmls = $.mustache(this.template, this.toTemplateJSON());
- this.el.html(htmls);
- this.model.currentDocuments.forEach(function(doc) {
- var tr = $(' \
- {{#notEmpty}} \
- \
- \
- \
- \
- \
- \
- {{/notEmpty}} \
- {{#fields}} \
- \
- \
- \
- \
- {{/fields}} \
- ');
- self.el.find('tbody').append(tr);
- var newView = new my.DataTableRow({
- model: doc,
- el: tr,
- fields: self.fields,
- },
- self.options
- );
- newView.render();
- });
- this.el.toggleClass('no-hidden', (self.hiddenFields.length == 0));
- return this;
- }
-});
-
-// ## DataTableRow View for rendering an individual document.
-//
-// Since we want this to update in place it is up to creator to provider the element to attach to.
-// In addition you must pass in a fields in the constructor options. This should be list of fields for the DataTable.
-//
-// Additional options can be passed in a second hash argument. Options:
-//
-// * cellRenderer: function to render cells. Signature: function(value,
-// field, doc) where value is the value of this cell, field is
-// corresponding field object and document is the document object. Note
-// that implementing functions can ignore arguments (e.g.
-// function(value) would be a valid cellRenderer function).
-my.DataTableRow = Backbone.View.extend({
- initialize: function(initData, options) {
- _.bindAll(this, 'render');
- this._fields = initData.fields;
- if (options && options.cellRenderer) {
- this._cellRenderer = options.cellRenderer;
- } else {
- this._cellRenderer = function(value) {
- return value;
- }
- }
- this.el = $(this.el);
- this.model.bind('change', this.render);
- },
-
- template: ' \
- \
- {{#cells}} \
- \
- \
- \
- {{/cells}} \
- ',
- events: {
- 'click .data-table-cell-edit': 'onEditClick',
- 'click .data-table-cell-editor .okButton': 'onEditorOK',
- 'click .data-table-cell-editor .cancelButton': 'onEditorCancel'
- },
-
- toTemplateJSON: function() {
- var self = this;
- var doc = this.model;
- var cellData = this._fields.map(function(field) {
- return {
- field: field.id,
- value: self._cellRenderer(doc.get(field.id), field, doc)
- }
- })
- return { id: this.id, cells: cellData }
- },
-
- render: function() {
- this.el.attr('data-id', this.model.id);
- var html = $.mustache(this.template, this.toTemplateJSON());
- $(this.el).html(html);
- return this;
- },
-
- // Cell Editor
- // ===========
-
- onEditClick: function(e) {
- var editing = this.el.find('.data-table-cell-editor-editor');
- if (editing.length > 0) {
- editing.parents('.data-table-cell-value').html(editing.text()).siblings('.data-table-cell-edit').removeClass("hidden");
- }
- $(e.target).addClass("hidden");
- var cell = $(e.target).siblings('.data-table-cell-value');
- cell.data("previousContents", cell.text());
- util.render('cellEditor', cell, {value: cell.text()});
- },
-
- onEditorOK: function(e) {
- var cell = $(e.target);
- var rowId = cell.parents('tr').attr('data-id');
- var field = cell.parents('td').attr('data-field');
- var newValue = cell.parents('.data-table-cell-editor').find('.data-table-cell-editor-editor').val();
- var newData = {};
- newData[field] = newValue;
- this.model.set(newData);
- my.notify("Updating row...", {loader: true});
- this.model.save().then(function(response) {
- my.notify("Row updated successfully", {category: 'success'});
- })
- .fail(function() {
- my.notify('Error saving row', {
- category: 'error',
- persist: true
- });
- });
- },
-
- onEditorCancel: function(e) {
- var cell = $(e.target).parents('.data-table-cell-value');
- cell.html(cell.data('previousContents')).siblings('.data-table-cell-edit').removeClass("hidden");
- }
-});
-
/* ========================================================== */
// ## Miscellaneous Utilities
diff --git a/test/index.html b/test/index.html
index cd87c9af..a00c9f81 100644
--- a/test/index.html
+++ b/test/index.html
@@ -21,6 +21,7 @@
+
From 1fd337e1d4e0bb5877fb9788fabc8c964f0eba39 Mon Sep 17 00:00:00 2001
From: Rufus Pollock
diff --git a/src/view-grid.js b/src/view-grid.js
index 80d9820b..feb02b85 100644
--- a/src/view-grid.js
+++ b/src/view-grid.js
@@ -2,7 +2,7 @@ this.recline = this.recline || {};
this.recline.View = this.recline.View || {};
(function($, my) {
-// ## DataTable
+// ## DataGrid
//
// Provides a tabular view on a Dataset.
//
@@ -10,8 +10,8 @@ this.recline.View = this.recline.View || {};
//
// Additional options passed in second arguments. Options:
//
-// * cellRenderer: function used to render individual cells. See DataTableRow for more.
-my.DataTable = Backbone.View.extend({
+// * cellRenderer: function used to render individual cells. See DataGridRow for more.
+my.DataGrid = Backbone.View.extend({
tagName: "div",
className: "data-table-container",
@@ -205,7 +205,7 @@ my.DataTable = Backbone.View.extend({
this.model.currentDocuments.forEach(function(doc) {
var tr = $('');
self.el.find('tbody').append(tr);
- var newView = new my.DataTableRow({
+ var newView = new my.DataGridRow({
model: doc,
el: tr,
fields: self.fields,
@@ -219,10 +219,10 @@ my.DataTable = Backbone.View.extend({
}
});
-// ## DataTableRow View for rendering an individual document.
+// ## DataGridRow View for rendering an individual document.
//
// Since we want this to update in place it is up to creator to provider the element to attach to.
-// In addition you must pass in a fields in the constructor options. This should be list of fields for the DataTable.
+// In addition you must pass in a fields in the constructor options. This should be list of fields for the DataGrid.
//
// Additional options can be passed in a second hash argument. Options:
//
@@ -231,7 +231,7 @@ my.DataTable = Backbone.View.extend({
// corresponding field object and document is the document object. Note
// that implementing functions can ignore arguments (e.g.
// function(value) would be a valid cellRenderer function).
-my.DataTableRow = Backbone.View.extend({
+my.DataGridRow = Backbone.View.extend({
initialize: function(initData, options) {
_.bindAll(this, 'render');
this._fields = initData.fields;
diff --git a/src/view.js b/src/view.js
index bb35adc9..503fa47c 100644
--- a/src/view.js
+++ b/src/view.js
@@ -23,14 +23,14 @@ this.recline.View = this.recline.View || {};
//
// **views**: (optional) the views (Grid, Graph etc) for DataExplorer to
// show. This is an array of view hashes. If not provided
-// just initialize a DataTable with id 'grid'. Example:
+// just initialize a DataGrid with id 'grid'. Example:
//
//
// var views = [
// {
// id: 'grid', // used for routing
// label: 'Grid', // used for view switcher
-// view: new recline.View.DataTable({
+// view: new recline.View.DataGrid({
// model: dataset
// })
// },
@@ -101,7 +101,7 @@ my.DataExplorer = Backbone.View.extend({
this.pageViews = [{
id: 'grid',
label: 'Grid',
- view: new my.DataTable({
+ view: new my.DataGrid({
model: this.model
})
}];
diff --git a/test/view.test.js b/test/view.test.js
index f969160d..05adb60e 100644
--- a/test/view.test.js
+++ b/test/view.test.js
@@ -2,7 +2,7 @@
module("View");
-test('new DataTableRow View', function () {
+test('new DataGridRow View', function () {
var $el = $('');
$('.fixtures .test-datatable').append($el);
var doc = new recline.Model.Document({
@@ -10,7 +10,7 @@ test('new DataTableRow View', function () {
'b': '2',
'a': '1'
});
- var view = new recline.View.DataTableRow({
+ var view = new recline.View.DataGridRow({
model: doc
, el: $el
, fields: new recline.Model.FieldList([{id: 'a'}, {id: 'b'}])
@@ -21,7 +21,7 @@ test('new DataTableRow View', function () {
equal(tds.length, 3);
equal($(tds[1]).attr('data-field'), 'a');
- var view = new recline.View.DataTableRow({
+ var view = new recline.View.DataGridRow({
model: doc
, el: $el
, fields: new recline.Model.FieldList([{id: 'a'}, {id: 'b'}])
From 12bb498d52c38157fa5252a17233d8e40652c48c Mon Sep 17 00:00:00 2001
From: Rufus Pollock
There are then various Views (you can easily write your own). Each view holds a pointer to a Dataset:
+ +Backends (more info below) then connect Dataset and Documents to data + from a specific 'Backend' data source. They provide methods for loading and + saving Datasets and individuals Documents as well as for bulk loading via a + query API and doing bulk transforms on the backend.
+ +Complementing the model are various Views (you can easily write your own). Each view holds a pointer to a Dataset:
Backends are connectors to backend data sources from which data can be retrieved.
+ +Backends are implemented as Backbone models but this is just a convenience +(they do not save or load themselves from any remote source). You can see +detailed examples of backend implementation in the source documentation +below.
+ +A backend must implement two methods:
++sync(method, model, options) +query(dataset, queryObj) ++ +
This is an implemntation of Backbone.sync and is used to override +Backbone.sync on operations for Datasets and Documents which are using this +backend.
+ +For read-only implementations you will need only to implement read method +for Dataset models (and even this can be a null operation). The read method +should return relevant metadata for the Dataset. We do not require read support +for Documents because they are loaded in bulk by the query method.
+ +For backends supporting write operations you must implement update and +delete support for Document objects.
+ +All code paths should return an object conforming to the jquery promise +API.
+ +Query the backend for documents returning them in bulk. This method will be +used by the Dataset.query method to search the backend for documents, +retrieving the results in bulk. This method should also set the docCount +attribute on the dataset.
+ +queryObj should be either a recline.Model.Query object or a +Hash. The structure of data in the Query object or Hash should follow that +defined in issue 34. (That said, if you are writing your own backend and have +control over the query object you can obviously use whatever structure you +like).
- // // Backend setup
- // var backend = Backend();
- // backend.addDataset({
- // metadata: {
- // id: 'my-id',
- // title: 'My Title'
- // },
- // fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
- // documents: [
- // {id: 0, x: 1, y: 2, z: 3},
- // {id: 1, x: 2, y: 4, z: 6}
- // ]
- // });
- // // later ...
- // var dataset = Dataset({id: 'my-id'});
- // dataset.fetch();
- // etc ...
- //
- my.BackendMemory = Backbone.Model.extend({
- initialize: function() {
- this.datasets = {};
- },
- addDataset: function(data) {
- this.datasets[data.metadata.id] = $.extend(true, {}, data);
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- if (model.__type__ == 'Dataset') {
- var rawDataset = this.datasets[model.id];
- model.set(rawDataset.metadata);
- model.fields.reset(rawDataset.fields);
- model.docCount = rawDataset.documents.length;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'update') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
- if(doc.id === model.id) {
- self.datasets[model.dataset.id].documents[idx] = model.toJSON();
- }
- });
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'delete') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- var rawDataset = self.datasets[model.dataset.id];
- var newdocs = _.reject(rawDataset.documents, function(doc) {
- return (doc.id === model.id);
- });
- rawDataset.documents = newdocs;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else {
- alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
- }
- },
- query: function(model, queryObj) {
- var numRows = queryObj.size;
- var start = queryObj.offset;
- var dfd = $.Deferred();
- results = this.datasets[model.id].documents;
- // not complete sorting!
- _.each(queryObj.sort, function(item) {
- results = _.sortBy(results, function(doc) {
- var _out = doc[item[0]];
- return (item[1] == 'asc') ? _out : -1*_out;
- });
- });
- var results = results.slice(start, start+numRows);
- dfd.resolve(results);
- return dfd.promise();
- }
- });
- my.backends['memory'] = new my.BackendMemory();
-
- // ## BackendWebstore
- //
- // Connecting to [Webstores](http://github.com/okfn/webstore)
- //
- // To use this backend ensure your Dataset has a webstore_url in its attributes.
- my.BackendWebstore = Backbone.Model.extend({
- sync: function(method, model, options) {
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = model.get('webstore_url');
- var schemaUrl = base + '/schema.json';
- var jqxhr = $.ajax({
- url: schemaUrl,
- dataType: 'jsonp',
- jsonp: '_callback'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(schema) {
- var fieldData = _.map(schema.data, function(item) {
- item.id = item.name;
- delete item.name;
- return item;
- });
- model.fields.reset(fieldData);
- model.docCount = schema.count;
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- }
- },
- query: function(model, queryObj) {
- var base = model.get('webstore_url');
- var data = {
- _limit: queryObj.size
- , _offset: queryObj.offset
- };
- var jqxhr = $.ajax({
- url: base + '.json',
- data: data,
- dataType: 'jsonp',
- jsonp: '_callback',
- cache: true
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- dfd.resolve(results.data);
- });
- return dfd.promise();
- }
- });
- my.backends['webstore'] = new my.BackendWebstore();
-
- // ## BackendDataProxy
- //
- // For connecting to [DataProxy-s](http://github.com/okfn/dataproxy).
- //
- // When initializing the DataProxy backend you can set the following attributes:
- //
- // * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
- //
- // Datasets using using this backend should set the following attributes:
- //
- // * url: (required) url-of-data-to-proxy
- // * format: (optional) csv | xls (defaults to csv if not specified)
- //
- // Note that this is a **read-only** backend.
- my.BackendDataProxy = Backbone.Model.extend({
- defaults: {
- dataproxy_url: 'http://jsonpdataproxy.appspot.com'
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = self.get('dataproxy_url');
- // TODO: should we cache for extra efficiency
- var data = {
- url: model.get('url')
- , 'max-results': 1
- , type: model.get('format') || 'csv'
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(results) {
- model.fields.reset(_.map(results.fields, function(fieldId) {
- return {id: fieldId};
- })
- );
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- } else {
- alert('This backend only supports read operations');
- }
- },
- query: function(dataset, queryObj) {
- var base = this.get('dataproxy_url');
- var data = {
- url: dataset.get('url')
- , 'max-results': queryObj.size
- , type: dataset.get('format')
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- var _out = _.map(results.data, function(doc) {
- var tmp = {};
- _.each(results.fields, function(key, idx) {
- tmp[key] = doc[idx];
- });
- return tmp;
- });
- dfd.resolve(_out);
- });
- return dfd.promise();
- }
- });
- my.backends['dataproxy'] = new my.BackendDataProxy();
-
-
- // ## Google spreadsheet backend
- //
- // Connect to Google Docs spreadsheet.
- //
- // Dataset must have a url attribute pointing to the Gdocs
- // spreadsheet's JSON feed e.g.
- //
- //
- // var dataset = new recline.Model.Dataset({
- // url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
- // },
- // 'gdocs'
- // );
- //
- my.BackendGDoc = Backbone.Model.extend({
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- var dataset = model;
-
- $.getJSON(model.get('url'), function(d) {
- result = self.gdocsToJavascript(d);
- model.fields.reset(_.map(result.field, function(fieldId) {
- return {id: fieldId};
- })
- );
- // cache data onto dataset (we have loaded whole gdoc it seems!)
- model._dataCache = result.data;
- dfd.resolve(model);
- })
- return dfd.promise(); }
- },
-
- query: function(dataset, queryObj) {
- var dfd = $.Deferred();
- var fields = _.pluck(dataset.fields.toJSON(), 'id');
-
- // zip the fields with the data rows to produce js objs
- // TODO: factor this out as a common method with other backends
- var objs = _.map(dataset._dataCache, function (d) {
- var obj = {};
- _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
- return obj;
- });
- dfd.resolve(objs);
- return dfd;
- },
- gdocsToJavascript: function(gdocsSpreadsheet) {
- /*
- :options: (optional) optional argument dictionary:
- columnsToUse: list of columns to use (specified by field names)
- colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
- :return: tabular data object (hash with keys: field and data).
-
- Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
- */
- var options = {};
- if (arguments.length > 1) {
- options = arguments[1];
- }
- var results = {
- 'field': [],
- 'data': []
- };
- // default is no special info on type of columns
- var colTypes = {};
- if (options.colTypes) {
- colTypes = options.colTypes;
- }
- // either extract column headings from spreadsheet directly, or used supplied ones
- if (options.columnsToUse) {
- // columns set to subset supplied
- results.field = options.columnsToUse;
- } else {
- // set columns to use to be all available
- if (gdocsSpreadsheet.feed.entry.length > 0) {
- for (var k in gdocsSpreadsheet.feed.entry[0]) {
- if (k.substr(0, 3) == 'gsx') {
- var col = k.substr(4)
- results.field.push(col);
- }
- }
- }
- }
-
- // converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
- var rep = /^([\d\.\-]+)\%$/;
- $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
- var row = [];
- for (var k in results.field) {
- var col = results.field[k];
- var _keyname = 'gsx$' + col;
- var value = entry[_keyname]['$t'];
- // if labelled as % and value contains %, convert
- if (colTypes[col] == 'percent') {
- if (rep.test(value)) {
- var value2 = rep.exec(value);
- var value3 = parseFloat(value2);
- value = value3 / 100;
- }
- }
- row.push(value);
- }
- results.data.push(row);
- });
- return results;
- }
- });
- my.backends['gdocs'] = new my.BackendGDoc();
-
-}(jQuery, this.recline.Model));
// importScripts('lib/underscore.js');
onmessage = function(message) {
@@ -550,6 +164,7 @@ my.Dataset = Backbone.Model.extend({
// Resulting DocumentList are used to reset this.currentDocuments and are
// also returned.
query: function(queryObj) {
+ this.trigger('query:start');
var self = this;
this.queryState.set(queryObj, {silent: true});
var dfd = $.Deferred();
@@ -561,9 +176,11 @@ my.Dataset = Backbone.Model.extend({
return _doc;
});
self.currentDocuments.reset(docs);
+ self.trigger('query:done');
dfd.resolve(self.currentDocuments);
})
.fail(function(arguments) {
+ self.trigger('query:fail', arguments);
dfd.reject(arguments);
});
return dfd.promise();
@@ -624,7 +241,7 @@ my.FieldList = Backbone.Collection.extend({
my.Query = Backbone.Model.extend({
defaults: {
size: 100
- , offset: 0
+ , from: 0
}
});
@@ -800,10 +417,9 @@ var util = function() {
this.recline = this.recline || {};
this.recline.View = this.recline.View || {};
-// Views module following classic module pattern
(function($, my) {
-// Graph view for a Dataset using Flot graphing library.
+// ## Graph view for a Dataset using Flot graphing library.
//
// Initialization arguments:
//
@@ -937,7 +553,7 @@ my.FlotGraph = Backbone.View.extend({
// Uncaught Invalid dimensions for plot, width = 0, height = 0
// * There is no data for the plot -- either same error or may have issues later with errors like 'non-existent node-value'
var areWeVisible = !jQuery.expr.filters.hidden(this.el[0]);
- if (!this.plot && (!areWeVisible || this.model.currentDocuments.length == 0)) {
+ if ((!areWeVisible || this.model.currentDocuments.length == 0)) {
return
}
// create this.plot and cache it
@@ -1038,204 +654,20 @@ this.recline = this.recline || {};
this.recline.View = this.recline.View || {};
(function($, my) {
-// ## DataExplorer
-//
-// The primary view for the entire application. Usage:
-//
-//
-// var myExplorer = new model.recline.DataExplorer({
-// model: {{recline.Model.Dataset instance}}
-// el: {{an existing dom element}}
-// views: {{page views}}
-// config: {{config options -- see below}}
-// });
-//
-//
-// ### Parameters
-//
-// **model**: (required) Dataset instance.
-//
-// **el**: (required) DOM element.
-//
-// **views**: (optional) the views (Grid, Graph etc) for DataExplorer to
-// show. This is an array of view hashes. If not provided
-// just initialize a DataTable with id 'grid'. Example:
-//
-//
-// var views = [
-// {
-// id: 'grid', // used for routing
-// label: 'Grid', // used for view switcher
-// view: new recline.View.DataTable({
-// model: dataset
-// })
-// },
-// {
-// id: 'graph',
-// label: 'Graph',
-// view: new recline.View.FlotGraph({
-// model: dataset
-// })
-// }
-// ];
-//
-//
-// **config**: Config options like:
-//
-// * displayCount: how many documents to display initially (default: 10)
-// * readOnly: true/false (default: false) value indicating whether to
-// operate in read-only mode (hiding all editing options).
-//
-// NB: the element already being in the DOM is important for rendering of
-// FlotGraph subview.
-my.DataExplorer = Backbone.View.extend({
- template: ' \
-
+// var myExplorer = new model.recline.DataExplorer({
+// model: {{recline.Model.Dataset instance}}
+// el: {{an existing dom element}}
+// views: {{page views}}
+// config: {{config options -- see below}}
+// });
+//
+//
+// ### Parameters
+//
+// **model**: (required) Dataset instance.
+//
+// **el**: (required) DOM element.
+//
+// **views**: (optional) the views (Grid, Graph etc) for DataExplorer to
+// show. This is an array of view hashes. If not provided
+// just initialize a DataGrid with id 'grid'. Example:
+//
+//
+// var views = [
+// {
+// id: 'grid', // used for routing
+// label: 'Grid', // used for view switcher
+// view: new recline.View.DataGrid({
+// model: dataset
+// })
+// },
+// {
+// id: 'graph',
+// label: 'Graph',
+// view: new recline.View.FlotGraph({
+// model: dataset
+// })
+// }
+// ];
+//
+//
+// **config**: Config options like:
+//
+// * readOnly: true/false (default: false) value indicating whether to
+// operate in read-only mode (hiding all editing options).
+//
+// NB: the element already being in the DOM is important for rendering of
+// FlotGraph subview.
+my.DataExplorer = Backbone.View.extend({
+ template: ' \
+
+ // var dataset = new recline.Model.Dataset({
+ // url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
+ // },
+ // 'gdocs'
+ // );
+ //
+ my.GDoc = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ var dataset = model;
+
+ $.getJSON(model.get('url'), function(d) {
+ result = self.gdocsToJavascript(d);
+ model.fields.reset(_.map(result.field, function(fieldId) {
+ return {id: fieldId};
+ })
+ );
+ // cache data onto dataset (we have loaded whole gdoc it seems!)
+ model._dataCache = result.data;
+ dfd.resolve(model);
+ })
+ return dfd.promise(); }
+ },
+
+ query: function(dataset, queryObj) {
+ var dfd = $.Deferred();
+ var fields = _.pluck(dataset.fields.toJSON(), 'id');
+
+ // zip the fields with the data rows to produce js objs
+ // TODO: factor this out as a common method with other backends
+ var objs = _.map(dataset._dataCache, function (d) {
+ var obj = {};
+ _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
+ return obj;
+ });
+ dfd.resolve(objs);
+ return dfd;
+ },
+ gdocsToJavascript: function(gdocsSpreadsheet) {
+ /*
+ :options: (optional) optional argument dictionary:
+ columnsToUse: list of columns to use (specified by field names)
+ colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
+ :return: tabular data object (hash with keys: field and data).
+
+ Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
+ */
+ var options = {};
+ if (arguments.length > 1) {
+ options = arguments[1];
+ }
+ var results = {
+ 'field': [],
+ 'data': []
+ };
+ // default is no special info on type of columns
+ var colTypes = {};
+ if (options.colTypes) {
+ colTypes = options.colTypes;
+ }
+ // either extract column headings from spreadsheet directly, or used supplied ones
+ if (options.columnsToUse) {
+ // columns set to subset supplied
+ results.field = options.columnsToUse;
+ } else {
+ // set columns to use to be all available
+ if (gdocsSpreadsheet.feed.entry.length > 0) {
+ for (var k in gdocsSpreadsheet.feed.entry[0]) {
+ if (k.substr(0, 3) == 'gsx') {
+ var col = k.substr(4)
+ results.field.push(col);
+ }
+ }
+ }
+ }
+
+ // converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
+ var rep = /^([\d\.\-]+)\%$/;
+ $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
+ var row = [];
+ for (var k in results.field) {
+ var col = results.field[k];
+ var _keyname = 'gsx$' + col;
+ var value = entry[_keyname]['$t'];
+ // if labelled as % and value contains %, convert
+ if (colTypes[col] == 'percent') {
+ if (rep.test(value)) {
+ var value2 = rep.exec(value);
+ var value3 = parseFloat(value2);
+ value = value3 / 100;
+ }
+ }
+ row.push(value);
+ }
+ results.data.push(row);
+ });
+ return results;
+ }
+ });
+ recline.Model.backends['gdocs'] = new my.GDoc();
+
+}(jQuery, this.recline.Backend));
+
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## Memory Backend - uses in-memory data
+ //
+ // This is very artificial and is really only designed for testing
+ // purposes.
+ //
+ // To use it you should provide in your constructor data:
+ //
+ // * metadata (including fields array)
+ // * documents: list of hashes, each hash being one doc. A doc *must* have an id attribute which is unique.
+ //
+ // Example:
+ //
+ //
+ // // Backend setup
+ // var backend = recline.Backend.Memory();
+ // backend.addDataset({
+ // metadata: {
+ // id: 'my-id',
+ // title: 'My Title'
+ // },
+ // fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
+ // documents: [
+ // {id: 0, x: 1, y: 2, z: 3},
+ // {id: 1, x: 2, y: 4, z: 6}
+ // ]
+ // });
+ // // later ...
+ // var dataset = Dataset({id: 'my-id'});
+ // dataset.fetch();
+ // etc ...
+ //
+ my.Memory = Backbone.Model.extend({
+ initialize: function() {
+ this.datasets = {};
+ },
+ addDataset: function(data) {
+ this.datasets[data.metadata.id] = $.extend(true, {}, data);
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Dataset') {
+ var rawDataset = this.datasets[model.id];
+ model.set(rawDataset.metadata);
+ model.fields.reset(rawDataset.fields);
+ model.docCount = rawDataset.documents.length;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'update') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
+ if(doc.id === model.id) {
+ self.datasets[model.dataset.id].documents[idx] = model.toJSON();
+ }
+ });
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'delete') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ var rawDataset = self.datasets[model.dataset.id];
+ var newdocs = _.reject(rawDataset.documents, function(doc) {
+ return (doc.id === model.id);
+ });
+ rawDataset.documents = newdocs;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else {
+ alert('Not supported: sync on Memory backend with method ' + method + ' and model ' + model);
+ }
+ },
+ query: function(model, queryObj) {
+ var numRows = queryObj.size;
+ var start = queryObj.from;
+ var dfd = $.Deferred();
+ results = this.datasets[model.id].documents;
+ // not complete sorting!
+ _.each(queryObj.sort, function(sortObj) {
+ var fieldName = _.keys(sortObj)[0];
+ results = _.sortBy(results, function(doc) {
+ var _out = doc[fieldName];
+ return (sortObj[fieldName].order == 'asc') ? _out : -1*_out;
+ });
+ });
+ var results = results.slice(start, start+numRows);
+ dfd.resolve(results);
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['memory'] = new my.Memory();
+
+}(jQuery, this.recline.Backend));
+this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) {
+ // ## Webstore Backend
+ //
+ // Connecting to [Webstores](http://github.com/okfn/webstore)
+ //
+ // To use this backend ensure your Dataset has a webstore_url in its attributes.
+ my.Webstore = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = model.get('webstore_url');
+ var schemaUrl = base + '/schema.json';
+ var jqxhr = $.ajax({
+ url: schemaUrl,
+ dataType: 'jsonp',
+ jsonp: '_callback'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(schema) {
+ var fieldData = _.map(schema.data, function(item) {
+ item.id = item.name;
+ delete item.name;
+ return item;
+ });
+ model.fields.reset(fieldData);
+ model.docCount = schema.count;
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ }
+ },
+ query: function(model, queryObj) {
+ var base = model.get('webstore_url');
+ var data = {
+ _limit: queryObj.size
+ , _offset: queryObj.from
+ };
+ var jqxhr = $.ajax({
+ url: base + '.json',
+ data: data,
+ dataType: 'jsonp',
+ jsonp: '_callback',
+ cache: true
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ dfd.resolve(results.data);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['webstore'] = new my.Webstore();
+
+}(jQuery, this.recline.Backend));
From 57978c324b1f84019ecc86f9c1fed66bdb1ebbc6 Mon Sep 17 00:00:00 2001
From: Rufus Pollock backend.js | |
|---|---|
Recline Backends- -Backends are connectors to backend data sources and stores - -Backends are implemented as Backbone models but this is just a -convenience (they do not save or load themselves from any remote -source) | this.recline = this.recline || {};
-this.recline.Model = this.recline.Model || {};
-
-(function($, my) { |
Backbone.sync- -Override Backbone.sync to hand off to sync function in relevant backend | Backbone.sync = function(method, model, options) {
- return model.backend.sync(method, model, options);
- } |
wrapInTimeout- -Crude way to catch backend errors -Many of backends use JSONP and so will not get error messages and this is -a crude way to catch those errors. | function wrapInTimeout(ourFunction) {
- var dfd = $.Deferred();
- var timeout = 5000;
- var timer = setTimeout(function() {
- dfd.reject({
- message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
- });
- }, timeout);
- ourFunction.done(function(arguments) {
- clearTimeout(timer);
- dfd.resolve(arguments);
- })
- .fail(function(arguments) {
- clearTimeout(timer);
- dfd.reject(arguments);
- })
- ;
- return dfd.promise();
- } |
BackendMemory - uses in-memory data- -This is very artificial and is really only designed for testing -purposes. - -To use it you should provide in your constructor data: - -
| my.BackendMemory = Backbone.Model.extend({
- initialize: function() {
- this.datasets = {};
- },
- addDataset: function(data) {
- this.datasets[data.metadata.id] = $.extend(true, {}, data);
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- if (model.__type__ == 'Dataset') {
- var rawDataset = this.datasets[model.id];
- model.set(rawDataset.metadata);
- model.fields.reset(rawDataset.fields);
- model.docCount = rawDataset.documents.length;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'update') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
- if(doc.id === model.id) {
- self.datasets[model.dataset.id].documents[idx] = model.toJSON();
- }
- });
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'delete') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- var rawDataset = self.datasets[model.dataset.id];
- var newdocs = _.reject(rawDataset.documents, function(doc) {
- return (doc.id === model.id);
- });
- rawDataset.documents = newdocs;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else {
- alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
- }
- },
- query: function(model, queryObj) {
- var numRows = queryObj.size;
- var start = queryObj.offset;
- var dfd = $.Deferred();
- results = this.datasets[model.id].documents; |
| not complete sorting! | _.each(queryObj.sort, function(item) {
- results = _.sortBy(results, function(doc) {
- var _out = doc[item[0]];
- return (item[1] == 'asc') ? _out : -1*_out;
- });
- });
- var results = results.slice(start, start+numRows);
- dfd.resolve(results);
- return dfd.promise();
- }
- });
- my.backends['memory'] = new my.BackendMemory(); |
BackendWebstore- -Connecting to Webstores - -To use this backend ensure your Dataset has a webstore_url in its attributes. | my.BackendWebstore = Backbone.Model.extend({
- sync: function(method, model, options) {
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = model.get('webstore_url');
- var schemaUrl = base + '/schema.json';
- var jqxhr = $.ajax({
- url: schemaUrl,
- dataType: 'jsonp',
- jsonp: '_callback'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(schema) {
- var fieldData = _.map(schema.data, function(item) {
- item.id = item.name;
- delete item.name;
- return item;
- });
- model.fields.reset(fieldData);
- model.docCount = schema.count;
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- }
- },
- query: function(model, queryObj) {
- var base = model.get('webstore_url');
- var data = {
- _limit: queryObj.size
- , _offset: queryObj.offset
- };
- var jqxhr = $.ajax({
- url: base + '.json',
- data: data,
- dataType: 'jsonp',
- jsonp: '_callback',
- cache: true
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- dfd.resolve(results.data);
- });
- return dfd.promise();
- }
- });
- my.backends['webstore'] = new my.BackendWebstore(); |
BackendDataProxy- -For connecting to DataProxy-s. - -When initializing the DataProxy backend you can set the following attributes: - -
Datasets using using this backend should set the following attributes: - -
Note that this is a read-only backend. | my.BackendDataProxy = Backbone.Model.extend({
- defaults: {
- dataproxy_url: 'http://jsonpdataproxy.appspot.com'
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = self.get('dataproxy_url'); |
| TODO: should we cache for extra efficiency | var data = {
- url: model.get('url')
- , 'max-results': 1
- , type: model.get('format') || 'csv'
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(results) {
- model.fields.reset(_.map(results.fields, function(fieldId) {
- return {id: fieldId};
- })
- );
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- } else {
- alert('This backend only supports read operations');
- }
- },
- query: function(dataset, queryObj) {
- var base = this.get('dataproxy_url');
- var data = {
- url: dataset.get('url')
- , 'max-results': queryObj.size
- , type: dataset.get('format')
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- var _out = _.map(results.data, function(doc) {
- var tmp = {};
- _.each(results.fields, function(key, idx) {
- tmp[key] = doc[idx];
- });
- return tmp;
- });
- dfd.resolve(_out);
- });
- return dfd.promise();
- }
- });
- my.backends['dataproxy'] = new my.BackendDataProxy(); |
Google spreadsheet backend- -Connect to Google Docs spreadsheet. - -Dataset must have a url attribute pointing to the Gdocs -spreadsheet's JSON feed e.g. - -
-var dataset = new recline.Model.Dataset({
- url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
- },
- 'gdocs'
-);
- | my.BackendGDoc = Backbone.Model.extend({
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- var dataset = model;
-
- $.getJSON(model.get('url'), function(d) {
- result = self.gdocsToJavascript(d);
- model.fields.reset(_.map(result.field, function(fieldId) {
- return {id: fieldId};
- })
- ); |
| cache data onto dataset (we have loaded whole gdoc it seems!) | model._dataCache = result.data;
- dfd.resolve(model);
- })
- return dfd.promise(); }
- },
-
- query: function(dataset, queryObj) {
- var dfd = $.Deferred();
- var fields = _.pluck(dataset.fields.toJSON(), 'id'); |
| zip the fields with the data rows to produce js objs -TODO: factor this out as a common method with other backends | var objs = _.map(dataset._dataCache, function (d) {
- var obj = {};
- _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
- return obj;
- });
- dfd.resolve(objs);
- return dfd;
- },
- gdocsToJavascript: function(gdocsSpreadsheet) {
- /*
- :options: (optional) optional argument dictionary:
- columnsToUse: list of columns to use (specified by field names)
- colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
- :return: tabular data object (hash with keys: field and data).
-
- Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
- */
- var options = {};
- if (arguments.length > 1) {
- options = arguments[1];
- }
- var results = {
- 'field': [],
- 'data': []
- }; |
| default is no special info on type of columns | var colTypes = {};
- if (options.colTypes) {
- colTypes = options.colTypes;
- } |
| either extract column headings from spreadsheet directly, or used supplied ones | if (options.columnsToUse) { |
| columns set to subset supplied | results.field = options.columnsToUse;
- } else { |
| set columns to use to be all available | if (gdocsSpreadsheet.feed.entry.length > 0) {
- for (var k in gdocsSpreadsheet.feed.entry[0]) {
- if (k.substr(0, 3) == 'gsx') {
- var col = k.substr(4)
- results.field.push(col);
- }
- }
- }
- } |
| converts non numberical values that should be numerical (22.3%[string] -> 0.223[float]) | var rep = /^([\d\.\-]+)\%$/;
- $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
- var row = [];
- for (var k in results.field) {
- var col = results.field[k];
- var _keyname = 'gsx$' + col;
- var value = entry[_keyname]['$t']; |
| if labelled as % and value contains %, convert | if (colTypes[col] == 'percent') {
- if (rep.test(value)) {
- var value2 = rep.exec(value);
- var value3 = parseFloat(value2);
- value = value3 / 100;
- }
- }
- row.push(value);
- }
- results.data.push(row);
- });
- return results;
- }
- });
- my.backends['gdocs'] = new my.BackendGDoc();
-
-}(jQuery, this.recline.Model));
-
- |
base.js | |
|---|---|
Recline Backends+ +Backends are connectors to backend data sources and stores + +This is just the base module containing various convenience methods. | this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { |
Backbone.sync+ +Override Backbone.sync to hand off to sync function in relevant backend | Backbone.sync = function(method, model, options) {
+ return model.backend.sync(method, model, options);
+ } |
wrapInTimeout+ +Crude way to catch backend errors +Many of backends use JSONP and so will not get error messages and this is +a crude way to catch those errors. | my.wrapInTimeout = function(ourFunction) {
+ var dfd = $.Deferred();
+ var timeout = 5000;
+ var timer = setTimeout(function() {
+ dfd.reject({
+ message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
+ });
+ }, timeout);
+ ourFunction.done(function(arguments) {
+ clearTimeout(timer);
+ dfd.resolve(arguments);
+ })
+ .fail(function(arguments) {
+ clearTimeout(timer);
+ dfd.reject(arguments);
+ })
+ ;
+ return dfd.promise();
+ }
+}(jQuery, this.recline.Backend));
+
+ |
dataproxy.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
DataProxy Backend+ +For connecting to DataProxy-s. + +When initializing the DataProxy backend you can set the following attributes: + +
Datasets using using this backend should set the following attributes: + +
Note that this is a read-only backend. | my.DataProxy = Backbone.Model.extend({
+ defaults: {
+ dataproxy_url: 'http://jsonpdataproxy.appspot.com'
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = self.get('dataproxy_url'); |
| TODO: should we cache for extra efficiency | var data = {
+ url: model.get('url')
+ , 'max-results': 1
+ , type: model.get('format') || 'csv'
+ };
+ var jqxhr = $.ajax({
+ url: base
+ , data: data
+ , dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(results) {
+ model.fields.reset(_.map(results.fields, function(fieldId) {
+ return {id: fieldId};
+ })
+ );
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ } else {
+ alert('This backend only supports read operations');
+ }
+ },
+ query: function(dataset, queryObj) {
+ var base = this.get('dataproxy_url');
+ var data = {
+ url: dataset.get('url')
+ , 'max-results': queryObj.size
+ , type: dataset.get('format')
+ };
+ var jqxhr = $.ajax({
+ url: base
+ , data: data
+ , dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ var _out = _.map(results.data, function(doc) {
+ var tmp = {};
+ _.each(results.fields, function(key, idx) {
+ tmp[key] = doc[idx];
+ });
+ return tmp;
+ });
+ dfd.resolve(_out);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['dataproxy'] = new my.DataProxy();
+
+
+}(jQuery, this.recline.Backend));
+
+ |
elasticsearch.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
ElasticSearch Backend+ +Connecting to ElasticSearch. + +To use this backend ensure your Dataset has one of the following +attributes (first one found is used): + ++elasticsearch_url +webstore_url +url ++ + This should point to the ES type url. E.G. for ES running on +localhost:9200 with index twitter and type tweet it would be + +http://localhost:9200/twitter/tweet | my.ElasticSearch = Backbone.Model.extend({
+ _getESUrl: function(dataset) {
+ var out = dataset.get('elasticsearch_url');
+ if (out) return out;
+ out = dataset.get('webstore_url');
+ if (out) return out;
+ out = dataset.get('url');
+ return out;
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = self._getESUrl(model);
+ var schemaUrl = base + '/_mapping';
+ var jqxhr = $.ajax({
+ url: schemaUrl,
+ dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(schema) { |
| only one top level key in ES = the type so we can ignore it | var key = _.keys(schema)[0];
+ var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
+ dict.id = fieldName;
+ return dict;
+ });
+ model.fields.reset(fieldData);
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ } else {
+ alert('This backend currently only supports read operations');
+ }
+ },
+ _normalizeQuery: function(queryObj) {
+ if (queryObj.toJSON) {
+ var out = queryObj.toJSON();
+ } else {
+ var out = _.extend({}, queryObj);
+ }
+ if (out.q != undefined && out.q.trim() === '') {
+ delete out.q;
+ }
+ if (!out.q) {
+ out.query = {
+ match_all: {}
+ }
+ } else {
+ out.query = {
+ query_string: {
+ query: out.q
+ }
+ }
+ delete out.q;
+ }
+ return out;
+ },
+ query: function(model, queryObj) {
+ var queryNormalized = this._normalizeQuery(queryObj);
+ var data = {source: JSON.stringify(queryNormalized)};
+ var base = this._getESUrl(model);
+ var jqxhr = $.ajax({
+ url: base + '/_search',
+ data: data,
+ dataType: 'jsonp'
+ });
+ var dfd = $.Deferred(); |
| TODO: fail case | jqxhr.done(function(results) {
+ model.docCount = results.hits.total;
+ var docs = _.map(results.hits.hits, function(result) {
+ var _out = result._source;
+ _out.id = result._id;
+ return _out;
+ });
+ dfd.resolve(docs);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['elasticsearch'] = new my.ElasticSearch();
+
+}(jQuery, this.recline.Backend));
+
+ |
gdocs.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
Google spreadsheet backend+ +Connect to Google Docs spreadsheet. + +Dataset must have a url attribute pointing to the Gdocs +spreadsheet's JSON feed e.g. + +
+var dataset = new recline.Model.Dataset({
+ url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
+ },
+ 'gdocs'
+);
+ | my.GDoc = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ var dataset = model;
+
+ $.getJSON(model.get('url'), function(d) {
+ result = self.gdocsToJavascript(d);
+ model.fields.reset(_.map(result.field, function(fieldId) {
+ return {id: fieldId};
+ })
+ ); |
| cache data onto dataset (we have loaded whole gdoc it seems!) | model._dataCache = result.data;
+ dfd.resolve(model);
+ })
+ return dfd.promise(); }
+ },
+
+ query: function(dataset, queryObj) {
+ var dfd = $.Deferred();
+ var fields = _.pluck(dataset.fields.toJSON(), 'id'); |
| zip the fields with the data rows to produce js objs +TODO: factor this out as a common method with other backends | var objs = _.map(dataset._dataCache, function (d) {
+ var obj = {};
+ _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
+ return obj;
+ });
+ dfd.resolve(objs);
+ return dfd;
+ },
+ gdocsToJavascript: function(gdocsSpreadsheet) {
+ /*
+ :options: (optional) optional argument dictionary:
+ columnsToUse: list of columns to use (specified by field names)
+ colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
+ :return: tabular data object (hash with keys: field and data).
+
+ Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
+ */
+ var options = {};
+ if (arguments.length > 1) {
+ options = arguments[1];
+ }
+ var results = {
+ 'field': [],
+ 'data': []
+ }; |
| default is no special info on type of columns | var colTypes = {};
+ if (options.colTypes) {
+ colTypes = options.colTypes;
+ } |
| either extract column headings from spreadsheet directly, or used supplied ones | if (options.columnsToUse) { |
| columns set to subset supplied | results.field = options.columnsToUse;
+ } else { |
| set columns to use to be all available | if (gdocsSpreadsheet.feed.entry.length > 0) {
+ for (var k in gdocsSpreadsheet.feed.entry[0]) {
+ if (k.substr(0, 3) == 'gsx') {
+ var col = k.substr(4)
+ results.field.push(col);
+ }
+ }
+ }
+ } |
| converts non numberical values that should be numerical (22.3%[string] -> 0.223[float]) | var rep = /^([\d\.\-]+)\%$/;
+ $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
+ var row = [];
+ for (var k in results.field) {
+ var col = results.field[k];
+ var _keyname = 'gsx$' + col;
+ var value = entry[_keyname]['$t']; |
| if labelled as % and value contains %, convert | if (colTypes[col] == 'percent') {
+ if (rep.test(value)) {
+ var value2 = rep.exec(value);
+ var value3 = parseFloat(value2);
+ value = value3 / 100;
+ }
+ }
+ row.push(value);
+ }
+ results.data.push(row);
+ });
+ return results;
+ }
+ });
+ recline.Model.backends['gdocs'] = new my.GDoc();
+
+}(jQuery, this.recline.Backend));
+
+ |
memory.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
Memory Backend - uses in-memory data+ +To use it you should provide in your constructor data: + +
Example: + +
+ // Backend setup
+ var backend = recline.Backend.Memory();
+ backend.addDataset({
+ metadata: {
+ id: 'my-id',
+ title: 'My Title'
+ },
+ fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
+ documents: [
+ {id: 0, x: 1, y: 2, z: 3},
+ {id: 1, x: 2, y: 4, z: 6}
+ ]
+ });
+ // later ...
+ var dataset = Dataset({id: 'my-id'}, 'memory');
+ dataset.fetch();
+ etc ...
+ | my.Memory = Backbone.Model.extend({
+ initialize: function() {
+ this.datasets = {};
+ },
+ addDataset: function(data) {
+ this.datasets[data.metadata.id] = $.extend(true, {}, data);
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Dataset') {
+ var rawDataset = this.datasets[model.id];
+ model.set(rawDataset.metadata);
+ model.fields.reset(rawDataset.fields);
+ model.docCount = rawDataset.documents.length;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'update') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
+ if(doc.id === model.id) {
+ self.datasets[model.dataset.id].documents[idx] = model.toJSON();
+ }
+ });
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'delete') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ var rawDataset = self.datasets[model.dataset.id];
+ var newdocs = _.reject(rawDataset.documents, function(doc) {
+ return (doc.id === model.id);
+ });
+ rawDataset.documents = newdocs;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else {
+ alert('Not supported: sync on Memory backend with method ' + method + ' and model ' + model);
+ }
+ },
+ query: function(model, queryObj) {
+ var numRows = queryObj.size;
+ var start = queryObj.from;
+ var dfd = $.Deferred();
+ results = this.datasets[model.id].documents; |
| not complete sorting! | _.each(queryObj.sort, function(sortObj) {
+ var fieldName = _.keys(sortObj)[0];
+ results = _.sortBy(results, function(doc) {
+ var _out = doc[fieldName];
+ return (sortObj[fieldName].order == 'asc') ? _out : -1*_out;
+ });
+ });
+ var results = results.slice(start, start+numRows);
+ dfd.resolve(results);
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['memory'] = new my.Memory();
+
+}(jQuery, this.recline.Backend));
+
+ |
webstore.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
Webstore Backend+ +Connecting to Webstores + +To use this backend ensure your Dataset has a webstore_url in its attributes. | my.Webstore = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = model.get('webstore_url');
+ var schemaUrl = base + '/schema.json';
+ var jqxhr = $.ajax({
+ url: schemaUrl,
+ dataType: 'jsonp',
+ jsonp: '_callback'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(schema) {
+ var fieldData = _.map(schema.data, function(item) {
+ item.id = item.name;
+ delete item.name;
+ return item;
+ });
+ model.fields.reset(fieldData);
+ model.docCount = schema.count;
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ }
+ },
+ query: function(model, queryObj) {
+ var base = model.get('webstore_url');
+ var data = {
+ _limit: queryObj.size
+ , _offset: queryObj.from
+ };
+ var jqxhr = $.ajax({
+ url: base + '.json',
+ data: data,
+ dataType: 'jsonp',
+ jsonp: '_callback',
+ cache: true
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ dfd.resolve(results.data);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['webstore'] = new my.Webstore();
+
+}(jQuery, this.recline.Backend));
+
+ |
model.js | |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Recline Backbone Models | this.recline = this.recline || {};
+ |