backend.js | |
|---|---|
Recline BackendsBackends are connectors to backend data sources and stores Backends are implemented as Backbone models but this is just a convenience (they do not save or load themselves from any remote source) | this.recline = this.recline || {};
this.recline.Model = this.recline.Model || {};
(function($, my) {
my.backends = {};
Backbone.sync = function(method, model, options) {
return my.backends[model.backendConfig.type].sync(method, model, options);
} |
BackendMemory - uses in-memory dataTo use you should: A. provide metadata as model data to the Dataset B. Set backendConfig on your dataset with attributes:
| my.BackendMemory = Backbone.Model.extend({
sync: function(method, model, options) {
var self = this;
if (method === "read") {
var dfd = $.Deferred();
if (model.__type__ == 'Dataset') {
var dataset = model;
dataset.set({
headers: dataset.backendConfig.data.headers
});
dataset.docCount = dataset.backendConfig.data.rows.length;
dfd.resolve(dataset);
}
return dfd.promise();
} else if (method === 'update') {
var dfd = $.Deferred();
if (model.__type__ == 'Document') {
_.each(model.backendConfig.data.rows, function(row, idx) {
if(row.id === model.id) {
model.backendConfig.data.rows[idx] = model.toJSON();
}
});
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'delete') {
var dfd = $.Deferred();
if (model.__type__ == 'Document') {
model.backendConfig.data.rows = _.reject(model.backendConfig.data.rows, function(row) {
return (row.id === model.id);
});
dfd.resolve(model);
}
return dfd.promise();
} else {
alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
}
},
getDocuments: function(model, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var dfd = $.Deferred();
rows = model.backendConfig.data.rows;
var results = rows.slice(start, start+numRows);
dfd.resolve(results);
return dfd.promise();
}
});
my.backends['memory'] = new my.BackendMemory(); |
BackendWebstoreConnecting to Webstores To use this backend set backendConfig on your Dataset as:
{
'type': 'webstore',
'url': url to relevant Webstore table
}
| my.BackendWebstore = Backbone.Model.extend({
sync: function(method, model, options) {
if (method === "read") {
if (model.__type__ == 'Dataset') {
var dataset = model;
var base = dataset.backendConfig.url;
var schemaUrl = base + '/schema.json';
var jqxhr = $.ajax({
url: schemaUrl,
dataType: 'jsonp',
jsonp: '_callback'
});
var dfd = $.Deferred();
jqxhr.then(function(schema) {
headers = _.map(schema.data, function(item) {
return item.name;
});
dataset.set({
headers: headers
});
dataset.docCount = schema.count;
dfd.resolve(dataset, jqxhr);
});
return dfd.promise();
}
}
},
getDocuments: function(model, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var base = model.backendConfig.url;
var jqxhr = $.ajax({
url: base + '.json?_limit=' + numRows,
dataType: 'jsonp',
jsonp: '_callback',
cache: true
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
dfd.resolve(results.data);
});
return dfd.promise();
}
});
my.backends['webstore'] = new my.BackendWebstore(); |
BackendDataProxyFor connecting to DataProxy-s. Set a Dataset to use this backend:
When initializing the DataProxy backend you can set the following attributes:
Note that this is a read-only backend. | my.BackendDataProxy = Backbone.Model.extend({
defaults: {
dataproxy: 'http://jsonpdataproxy.appspot.com'
},
sync: function(method, model, options) {
if (method === "read") {
if (model.__type__ == 'Dataset') {
var dataset = model;
var base = my.backends['dataproxy'].get('dataproxy'); |
| TODO: should we cache for extra efficiency | var data = {
url: dataset.backendConfig.url
, 'max-results': 1
, type: dataset.backendConfig.format
};
var jqxhr = $.ajax({
url: base
, data: data
, dataType: 'jsonp'
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
dataset.set({
headers: results.fields
});
dfd.resolve(dataset, jqxhr);
});
return dfd.promise();
}
} else {
alert('This backend only supports read operations');
}
},
getDocuments: function(dataset, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var base = my.backends['dataproxy'].get('dataproxy');
var data = {
url: dataset.backendConfig.url
, 'max-results': numRows
, type: dataset.backendConfig.format
};
var jqxhr = $.ajax({
url: base
, data: data
, dataType: 'jsonp'
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
var _out = _.map(results.data, function(row) {
var tmp = {};
_.each(results.fields, function(key, idx) {
tmp[key] = row[idx];
});
return tmp;
});
dfd.resolve(_out);
});
return dfd.promise();
}
});
my.backends['dataproxy'] = new my.BackendDataProxy(); |
Google spreadsheet backendConnect to Google Docs spreadsheet. For write operations | my.BackendGDoc = Backbone.Model.extend({
sync: function(method, model, options) {
if (method === "read") {
var dfd = $.Deferred();
var dataset = model;
$.getJSON(model.backendConfig.url, function(d) {
result = my.backends['gdocs'].gdocsToJavascript(d);
model.set({'headers': result.header}); |
| cache data onto dataset (we have loaded whole gdoc it seems!) | model._dataCache = result.data;
dfd.resolve(model);
})
return dfd.promise(); }
},
getDocuments: function(dataset, start, numRows) {
var dfd = $.Deferred();
var fields = dataset.get('headers'); |
| zip the field headers with the data rows to produce js objs TODO: factor this out as a common method with other backends | var objs = _.map(dataset._dataCache, function (d) {
var obj = {};
_.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
return obj;
});
dfd.resolve(objs);
return dfd;
},
gdocsToJavascript: function(gdocsSpreadsheet) {
/*
:options: (optional) optional argument dictionary:
columnsToUse: list of columns to use (specified by header names)
colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
:return: tabular data object (hash with keys: header and data).
Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
*/
var options = {};
if (arguments.length > 1) {
options = arguments[1];
}
var results = {
'header': [],
'data': []
}; |
| default is no special info on type of columns | var colTypes = {};
if (options.colTypes) {
colTypes = options.colTypes;
} |
| either extract column headings from spreadsheet directly, or used supplied ones | if (options.columnsToUse) { |
| columns set to subset supplied | results.header = options.columnsToUse;
} else { |
| set columns to use to be all available | if (gdocsSpreadsheet.feed.entry.length > 0) {
for (var k in gdocsSpreadsheet.feed.entry[0]) {
if (k.substr(0, 3) == 'gsx') {
var col = k.substr(4)
results.header.push(col);
}
}
}
} |
| converts non numberical values that should be numerical (22.3%[string] -> 0.223[float]) | var rep = /^([\d\.\-]+)\%$/;
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
var row = [];
for (var k in results.header) {
var col = results.header[k];
var _keyname = 'gsx$' + col;
var value = entry[_keyname]['$t']; |
| if labelled as % and value contains %, convert | if (colTypes[col] == 'percent') {
if (rep.test(value)) {
var value2 = rep.exec(value);
var value3 = parseFloat(value2);
value = value3 / 100;
}
}
row.push(value);
}
results.data.push(row);
});
return results;
}
});
my.backends['gdocs'] = new my.BackendGDoc();
}(jQuery, this.recline.Model));
|