correct indents on model.js with gg=G

This commit is contained in:
James Casbon
2012-01-29 16:20:51 +00:00
parent 9eeb4ab97a
commit b3a71e82cc
2 changed files with 736 additions and 736 deletions

View File

@@ -3,420 +3,420 @@ this.recline = this.recline || {};
// Models module following classic module pattern
recline.Model = function($) {
var my = {};
var my = {};
// A Dataset model.
//
// Other than standard list of Backbone attributes it has two important attributes:
//
// * currentDocuments: a DocumentList containing the Documents we have currently loaded for viewing (you update currentDocuments by calling getRows)
// * docCount: total number of documents in this dataset (obtained on a fetch for this Dataset)
my.Dataset = Backbone.Model.extend({
__type__: 'Dataset',
initialize: function() {
this.currentDocuments = new my.DocumentList();
this.docCount = null;
},
// A Dataset model.
//
// Other than standard list of Backbone attributes it has two important attributes:
//
// * currentDocuments: a DocumentList containing the Documents we have currently loaded for viewing (you update currentDocuments by calling getRows)
// * docCount: total number of documents in this dataset (obtained on a fetch for this Dataset)
my.Dataset = Backbone.Model.extend({
__type__: 'Dataset',
initialize: function() {
this.currentDocuments = new my.DocumentList();
this.docCount = null;
},
// AJAX method with promise API to get rows (documents) from the backend.
//
// Resulting DocumentList are used to reset this.currentDocuments and are
// also returned.
//
// :param numRows: passed onto backend getDocuments.
// :param start: passed onto backend getDocuments.
//
// this does not fit very well with Backbone setup. Backbone really expects you to know the ids of objects your are fetching (which you do in classic RESTful ajax-y world). But this paradigm does not fill well with data set up we have here.
// This also illustrates the limitations of separating the Dataset and the Backend
getDocuments: function(numRows, start) {
var self = this;
var dfd = $.Deferred();
this.backend.getDocuments(this.id, numRows, start).then(function(rows) {
var docs = _.map(rows, function(row) {
return new my.Document(row);
// AJAX method with promise API to get rows (documents) from the backend.
//
// Resulting DocumentList are used to reset this.currentDocuments and are
// also returned.
//
// :param numRows: passed onto backend getDocuments.
// :param start: passed onto backend getDocuments.
//
// this does not fit very well with Backbone setup. Backbone really expects you to know the ids of objects your are fetching (which you do in classic RESTful ajax-y world). But this paradigm does not fill well with data set up we have here.
// This also illustrates the limitations of separating the Dataset and the Backend
getDocuments: function(numRows, start) {
var self = this;
var dfd = $.Deferred();
this.backend.getDocuments(this.id, numRows, start).then(function(rows) {
var docs = _.map(rows, function(row) {
return new my.Document(row);
});
self.currentDocuments.reset(docs);
dfd.resolve(self.currentDocuments);
});
self.currentDocuments.reset(docs);
dfd.resolve(self.currentDocuments);
});
return dfd.promise();
},
toTemplateJSON: function() {
var data = this.toJSON();
data.docCount = this.docCount;
return data;
}
});
my.Document = Backbone.Model.extend({
__type__: 'Document'
});
my.DocumentList = Backbone.Collection.extend({
__type__: 'DocumentList',
// webStore: new WebStore(this.url),
model: my.Document
});
// Backends section
// ================
my.setBackend = function(backend) {
Backbone.sync = backend.sync;
};
// Backend which just caches in memory
//
// Does not need to be a backbone model but provides some conveniences
my.BackendMemory = Backbone.Model.extend({
// Initialize a Backend with a local in-memory dataset.
//
// NB: We can handle one and only one dataset at a time.
//
// :param dataset: the data for a dataset on which operations will be
// performed. Its form should be a hash with metadata and data
// attributes.
//
// - metadata: hash of key/value attributes of any kind (but usually with title attribute)
// - data: hash with 2 keys:
// - headers: list of header names/labels
// - rows: list of hashes, each hash being one row. A row *must* have an id attribute which is unique.
//
// Example of data:
//
// {
// headers: ['x', 'y', 'z']
// , rows: [
// {id: 0, x: 1, y: 2, z: 3}
// , {id: 1, x: 2, y: 4, z: 6}
// ]
// };
initialize: function(dataset) {
// deep copy
this._datasetAsData = $.extend(true, {}, dataset);
_.bindAll(this, 'sync');
},
getDataset: function() {
var dataset = new my.Dataset({
id: this._datasetAsData.metadata.id
});
// this is a bit weird but problem is in sync this is set to parent model object so need to give dataset a reference to backend explicitly
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
var self = this;
if (method === "read") {
var dfd = $.Deferred();
// this switching on object type is rather horrible
// think may make more sense to do work in individual objects rather than in central Backbone.sync
if (model.__type__ == 'Dataset') {
var dataset = model;
var rawDataset = this._datasetAsData;
dataset.set(rawDataset.metadata);
dataset.set({
headers: rawDataset.data.headers
});
dataset.docCount = rawDataset.data.rows.length;
dfd.resolve(dataset);
}
return dfd.promise();
} else if (method === 'update') {
var dfd = $.Deferred();
if (model.__type__ == 'Document') {
_.each(this._datasetAsData.data.rows, function(row, idx) {
if(row.id === model.id) {
self._datasetAsData.data.rows[idx] = model.toJSON();
},
toTemplateJSON: function() {
var data = this.toJSON();
data.docCount = this.docCount;
return data;
}
});
my.Document = Backbone.Model.extend({
__type__: 'Document'
});
my.DocumentList = Backbone.Collection.extend({
__type__: 'DocumentList',
// webStore: new WebStore(this.url),
model: my.Document
});
// Backends section
// ================
my.setBackend = function(backend) {
Backbone.sync = backend.sync;
};
// Backend which just caches in memory
//
// Does not need to be a backbone model but provides some conveniences
my.BackendMemory = Backbone.Model.extend({
// Initialize a Backend with a local in-memory dataset.
//
// NB: We can handle one and only one dataset at a time.
//
// :param dataset: the data for a dataset on which operations will be
// performed. Its form should be a hash with metadata and data
// attributes.
//
// - metadata: hash of key/value attributes of any kind (but usually with title attribute)
// - data: hash with 2 keys:
// - headers: list of header names/labels
// - rows: list of hashes, each hash being one row. A row *must* have an id attribute which is unique.
//
// Example of data:
//
// {
// headers: ['x', 'y', 'z']
// , rows: [
// {id: 0, x: 1, y: 2, z: 3}
// , {id: 1, x: 2, y: 4, z: 6}
// ]
// };
initialize: function(dataset) {
// deep copy
this._datasetAsData = $.extend(true, {}, dataset);
_.bindAll(this, 'sync');
},
getDataset: function() {
var dataset = new my.Dataset({
id: this._datasetAsData.metadata.id
});
// this is a bit weird but problem is in sync this is set to parent model object so need to give dataset a reference to backend explicitly
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
var self = this;
if (method === "read") {
var dfd = $.Deferred();
// this switching on object type is rather horrible
// think may make more sense to do work in individual objects rather than in central Backbone.sync
if (model.__type__ == 'Dataset') {
var dataset = model;
var rawDataset = this._datasetAsData;
dataset.set(rawDataset.metadata);
dataset.set({
headers: rawDataset.data.headers
});
dataset.docCount = rawDataset.data.rows.length;
dfd.resolve(dataset);
}
});
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'delete') {
var dfd = $.Deferred();
if (model.__type__ == 'Document') {
this._datasetAsData.data.rows = _.reject(this._datasetAsData.data.rows, function(row) {
return (row.id === model.id);
});
dfd.resolve(model);
}
return dfd.promise();
} else {
alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
}
},
getDocuments: function(datasetId, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var dfd = $.Deferred();
rows = this._datasetAsData.data.rows;
var results = rows.slice(start, start+numRows);
dfd.resolve(results);
return dfd.promise();
}
});
// Webstore Backend for connecting to the Webstore
//
// Initializing model argument must contain a url attribute pointing to
// relevant Webstore table.
//
// Designed to only attach to one dataset and one dataset only ...
// Could generalize to support attaching to different datasets
my.BackendWebstore = Backbone.Model.extend({
getDataset: function(id) {
var dataset = new my.Dataset({
id: id
});
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
if (method === "read") {
// this switching on object type is rather horrible
// think may make more sense to do work in individual objects rather than in central Backbone.sync
if (this.__type__ == 'Dataset') {
var dataset = this;
// get the schema and return
var base = this.backend.get('url');
var schemaUrl = base + '/schema.json';
var jqxhr = $.ajax({
url: schemaUrl,
dataType: 'jsonp',
jsonp: '_callback'
});
return dfd.promise();
} else if (method === 'update') {
var dfd = $.Deferred();
if (model.__type__ == 'Document') {
_.each(this._datasetAsData.data.rows, function(row, idx) {
if(row.id === model.id) {
self._datasetAsData.data.rows[idx] = model.toJSON();
}
});
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'delete') {
var dfd = $.Deferred();
if (model.__type__ == 'Document') {
this._datasetAsData.data.rows = _.reject(this._datasetAsData.data.rows, function(row) {
return (row.id === model.id);
});
dfd.resolve(model);
}
return dfd.promise();
} else {
alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
}
},
getDocuments: function(datasetId, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var dfd = $.Deferred();
jqxhr.then(function(schema) {
headers = _.map(schema.data, function(item) {
return item.name;
});
dataset.set({
headers: headers
});
dataset.docCount = schema.count;
dfd.resolve(dataset, jqxhr);
});
rows = this._datasetAsData.data.rows;
var results = rows.slice(start, start+numRows);
dfd.resolve(results);
return dfd.promise();
}
}
},
getDocuments: function(datasetId, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var base = this.get('url');
var jqxhr = $.ajax({
url: base + '.json?_limit=' + numRows,
dataType: 'jsonp',
jsonp: '_callback',
cache: true
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
dfd.resolve(results.data);
});
return dfd.promise();
}
});
});
// DataProxy Backend for connecting to the DataProxy
//
// Example initialization:
//
// BackendDataProxy({
// model: {
// url: {url-of-data-to-proxy},
// type: xls || csv,
// format: json || jsonp # return format (defaults to jsonp)
// dataproxy: {url-to-proxy} # defaults to http://jsonpdataproxy.appspot.com
// }
// })
my.BackendDataProxy = Backbone.Model.extend({
defaults: {
dataproxy: 'http://jsonpdataproxy.appspot.com'
// Webstore Backend for connecting to the Webstore
//
// Initializing model argument must contain a url attribute pointing to
// relevant Webstore table.
//
// Designed to only attach to one dataset and one dataset only ...
// Could generalize to support attaching to different datasets
my.BackendWebstore = Backbone.Model.extend({
getDataset: function(id) {
var dataset = new my.Dataset({
id: id
});
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
if (method === "read") {
// this switching on object type is rather horrible
// think may make more sense to do work in individual objects rather than in central Backbone.sync
if (this.__type__ == 'Dataset') {
var dataset = this;
// get the schema and return
var base = this.backend.get('url');
var schemaUrl = base + '/schema.json';
var jqxhr = $.ajax({
url: schemaUrl,
dataType: 'jsonp',
jsonp: '_callback'
});
var dfd = $.Deferred();
jqxhr.then(function(schema) {
headers = _.map(schema.data, function(item) {
return item.name;
});
dataset.set({
headers: headers
});
dataset.docCount = schema.count;
dfd.resolve(dataset, jqxhr);
});
return dfd.promise();
}
}
},
getDocuments: function(datasetId, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var base = this.get('url');
var jqxhr = $.ajax({
url: base + '.json?_limit=' + numRows,
dataType: 'jsonp',
jsonp: '_callback',
cache: true
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
dfd.resolve(results.data);
});
return dfd.promise();
}
});
// DataProxy Backend for connecting to the DataProxy
//
// Example initialization:
//
// BackendDataProxy({
// model: {
// url: {url-of-data-to-proxy},
// type: xls || csv,
// format: json || jsonp # return format (defaults to jsonp)
// dataproxy: {url-to-proxy} # defaults to http://jsonpdataproxy.appspot.com
// }
// })
my.BackendDataProxy = Backbone.Model.extend({
defaults: {
dataproxy: 'http://jsonpdataproxy.appspot.com'
, type: 'csv'
, format: 'jsonp'
},
getDataset: function(id) {
var dataset = new my.Dataset({
id: id
});
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
if (method === "read") {
// this switching on object type is rather horrible
// think may make more sense to do work in individual objects rather than in central Backbone.sync
if (this.__type__ == 'Dataset') {
var dataset = this;
// get the schema and return
var base = this.backend.get('dataproxy');
var data = this.backend.toJSON();
delete data['dataproxy'];
// TODO: should we cache for extra efficiency
data['max-results'] = 1;
var jqxhr = $.ajax({
url: base
, data: data
, dataType: 'jsonp'
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
dataset.set({
headers: results.fields
},
getDataset: function(id) {
var dataset = new my.Dataset({
id: id
});
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
if (method === "read") {
// this switching on object type is rather horrible
// think may make more sense to do work in individual objects rather than in central Backbone.sync
if (this.__type__ == 'Dataset') {
var dataset = this;
// get the schema and return
var base = this.backend.get('dataproxy');
var data = this.backend.toJSON();
delete data['dataproxy'];
// TODO: should we cache for extra efficiency
data['max-results'] = 1;
var jqxhr = $.ajax({
url: base
, data: data
, dataType: 'jsonp'
});
dfd.resolve(dataset, jqxhr);
});
return dfd.promise();
var dfd = $.Deferred();
jqxhr.then(function(results) {
dataset.set({
headers: results.fields
});
dfd.resolve(dataset, jqxhr);
});
return dfd.promise();
}
} else {
alert('This backend only supports read operations');
}
} else {
alert('This backend only supports read operations');
}
},
getDocuments: function(datasetId, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var base = this.get('dataproxy');
var data = this.toJSON();
delete data['dataproxy'];
data['max-results'] = numRows;
var jqxhr = $.ajax({
url: base
, data: data
, dataType: 'jsonp'
// , cache: true
},
getDocuments: function(datasetId, numRows, start) {
if (start === undefined) {
start = 0;
}
if (numRows === undefined) {
numRows = 10;
}
var base = this.get('dataproxy');
var data = this.toJSON();
delete data['dataproxy'];
data['max-results'] = numRows;
var jqxhr = $.ajax({
url: base
, data: data
, dataType: 'jsonp'
// , cache: true
});
var dfd = $.Deferred();
jqxhr.then(function(results) {
var _out = _.map(results.data, function(row) {
var tmp = {};
_.each(results.fields, function(key, idx) {
tmp[key] = row[idx];
var dfd = $.Deferred();
jqxhr.then(function(results) {
var _out = _.map(results.data, function(row) {
var tmp = {};
_.each(results.fields, function(key, idx) {
tmp[key] = row[idx];
});
return tmp;
});
return tmp;
dfd.resolve(_out);
});
dfd.resolve(_out);
});
return dfd.promise();
}
});
return dfd.promise();
}
});
// Google spreadsheet backend
my.BackendGDoc = Backbone.Model.extend({
getDataset: function(id) {
var dataset = new my.Dataset({
id: id
});
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
if (method === "read") {
// Google spreadsheet backend
my.BackendGDoc = Backbone.Model.extend({
getDataset: function(id) {
var dataset = new my.Dataset({
id: id
});
dataset.backend = this;
return dataset;
},
sync: function(method, model, options) {
if (method === "read") {
console.log('fetching data from url', model.backend.get('url'));
var dfd = $.Deferred();
var dataset = this;
$.getJSON(model.backend.get('url'), function(d) {
result = model.backend.gdocsToJavascript(d);
model.set({'headers': result.header});
model.backend.set({'data': result.data, 'headers': result.header});
dfd.resolve(model);
result = model.backend.gdocsToJavascript(d);
model.set({'headers': result.header});
model.backend.set({'data': result.data, 'headers': result.header});
dfd.resolve(model);
})
return dfd.promise(); }
},
getDocuments: function(datasetId, start, numRows) {
var dfd = $.Deferred();
var fields = this.get('headers');
// zip the field headers with the data rows to produce js objs
// TODO: factor this out as a common method with other backends
var objs = _.map(this.get('data'), function (d) {
var obj = {};
_.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
return obj;
});
dfd.resolve(objs);
return dfd;
return dfd.promise(); }
},
gdocsToJavascript: function(gdocsSpreadsheet) {
/*
:options: (optional) optional argument dictionary:
columnsToUse: list of columns to use (specified by header names)
colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
:return: tabular data object (hash with keys: header and data).
Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
*/
var options = {};
if (arguments.length > 1) {
options = arguments[1];
}
var results = {
'header': [],
'data': []
};
// default is no special info on type of columns
var colTypes = {};
if (options.colTypes) {
colTypes = options.colTypes;
}
// either extract column headings from spreadsheet directly, or used supplied ones
if (options.columnsToUse) {
// columns set to subset supplied
results.header = options.columnsToUse;
} else {
// set columns to use to be all available
if (gdocsSpreadsheet.feed.entry.length > 0) {
for (var k in gdocsSpreadsheet.feed.entry[0]) {
if (k.substr(0, 3) == 'gsx') {
var col = k.substr(4)
results.header.push(col);
}
}
}
}
// converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
var rep = /^([\d\.\-]+)\%$/;
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
var row = [];
for (var k in results.header) {
var col = results.header[k];
var _keyname = 'gsx$' + col;
var value = entry[_keyname]['$t'];
// if labelled as % and value contains %, convert
if (colTypes[col] == 'percent') {
if (rep.test(value)) {
var value2 = rep.exec(value);
var value3 = parseFloat(value2);
value = value3 / 100;
getDocuments: function(datasetId, start, numRows) {
var dfd = $.Deferred();
var fields = this.get('headers');
// zip the field headers with the data rows to produce js objs
// TODO: factor this out as a common method with other backends
var objs = _.map(this.get('data'), function (d) {
var obj = {};
_.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
return obj;
});
dfd.resolve(objs);
return dfd;
},
gdocsToJavascript: function(gdocsSpreadsheet) {
/*
:options: (optional) optional argument dictionary:
columnsToUse: list of columns to use (specified by header names)
colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
:return: tabular data object (hash with keys: header and data).
Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
*/
var options = {};
if (arguments.length > 1) {
options = arguments[1];
}
var results = {
'header': [],
'data': []
};
// default is no special info on type of columns
var colTypes = {};
if (options.colTypes) {
colTypes = options.colTypes;
}
// either extract column headings from spreadsheet directly, or used supplied ones
if (options.columnsToUse) {
// columns set to subset supplied
results.header = options.columnsToUse;
} else {
// set columns to use to be all available
if (gdocsSpreadsheet.feed.entry.length > 0) {
for (var k in gdocsSpreadsheet.feed.entry[0]) {
if (k.substr(0, 3) == 'gsx') {
var col = k.substr(4)
results.header.push(col);
}
}
}
}
row.push(value);
// converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
var rep = /^([\d\.\-]+)\%$/;
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
var row = [];
for (var k in results.header) {
var col = results.header[k];
var _keyname = 'gsx$' + col;
var value = entry[_keyname]['$t'];
// if labelled as % and value contains %, convert
if (colTypes[col] == 'percent') {
if (rep.test(value)) {
var value2 = rep.exec(value);
var value3 = parseFloat(value2);
value = value3 / 100;
}
}
row.push(value);
}
results.data.push(row);
});
return results;
}
results.data.push(row);
});
return results;
}
});
return my;
return my;
}(jQuery);