correct indents on model.js with gg=G
This commit is contained in:
parent
9eeb4ab97a
commit
b3a71e82cc
766
src/model.js
766
src/model.js
@ -3,420 +3,420 @@ this.recline = this.recline || {};
|
||||
// Models module following classic module pattern
|
||||
recline.Model = function($) {
|
||||
|
||||
var my = {};
|
||||
var my = {};
|
||||
|
||||
// A Dataset model.
|
||||
//
|
||||
// Other than standard list of Backbone attributes it has two important attributes:
|
||||
//
|
||||
// * currentDocuments: a DocumentList containing the Documents we have currently loaded for viewing (you update currentDocuments by calling getRows)
|
||||
// * docCount: total number of documents in this dataset (obtained on a fetch for this Dataset)
|
||||
my.Dataset = Backbone.Model.extend({
|
||||
__type__: 'Dataset',
|
||||
initialize: function() {
|
||||
this.currentDocuments = new my.DocumentList();
|
||||
this.docCount = null;
|
||||
},
|
||||
// A Dataset model.
|
||||
//
|
||||
// Other than standard list of Backbone attributes it has two important attributes:
|
||||
//
|
||||
// * currentDocuments: a DocumentList containing the Documents we have currently loaded for viewing (you update currentDocuments by calling getRows)
|
||||
// * docCount: total number of documents in this dataset (obtained on a fetch for this Dataset)
|
||||
my.Dataset = Backbone.Model.extend({
|
||||
__type__: 'Dataset',
|
||||
initialize: function() {
|
||||
this.currentDocuments = new my.DocumentList();
|
||||
this.docCount = null;
|
||||
},
|
||||
|
||||
// AJAX method with promise API to get rows (documents) from the backend.
|
||||
//
|
||||
// Resulting DocumentList are used to reset this.currentDocuments and are
|
||||
// also returned.
|
||||
//
|
||||
// :param numRows: passed onto backend getDocuments.
|
||||
// :param start: passed onto backend getDocuments.
|
||||
//
|
||||
// this does not fit very well with Backbone setup. Backbone really expects you to know the ids of objects your are fetching (which you do in classic RESTful ajax-y world). But this paradigm does not fill well with data set up we have here.
|
||||
// This also illustrates the limitations of separating the Dataset and the Backend
|
||||
getDocuments: function(numRows, start) {
|
||||
var self = this;
|
||||
var dfd = $.Deferred();
|
||||
this.backend.getDocuments(this.id, numRows, start).then(function(rows) {
|
||||
var docs = _.map(rows, function(row) {
|
||||
return new my.Document(row);
|
||||
// AJAX method with promise API to get rows (documents) from the backend.
|
||||
//
|
||||
// Resulting DocumentList are used to reset this.currentDocuments and are
|
||||
// also returned.
|
||||
//
|
||||
// :param numRows: passed onto backend getDocuments.
|
||||
// :param start: passed onto backend getDocuments.
|
||||
//
|
||||
// this does not fit very well with Backbone setup. Backbone really expects you to know the ids of objects your are fetching (which you do in classic RESTful ajax-y world). But this paradigm does not fill well with data set up we have here.
|
||||
// This also illustrates the limitations of separating the Dataset and the Backend
|
||||
getDocuments: function(numRows, start) {
|
||||
var self = this;
|
||||
var dfd = $.Deferred();
|
||||
this.backend.getDocuments(this.id, numRows, start).then(function(rows) {
|
||||
var docs = _.map(rows, function(row) {
|
||||
return new my.Document(row);
|
||||
});
|
||||
self.currentDocuments.reset(docs);
|
||||
dfd.resolve(self.currentDocuments);
|
||||
});
|
||||
self.currentDocuments.reset(docs);
|
||||
dfd.resolve(self.currentDocuments);
|
||||
});
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
toTemplateJSON: function() {
|
||||
var data = this.toJSON();
|
||||
data.docCount = this.docCount;
|
||||
return data;
|
||||
}
|
||||
});
|
||||
|
||||
my.Document = Backbone.Model.extend({
|
||||
__type__: 'Document'
|
||||
});
|
||||
|
||||
my.DocumentList = Backbone.Collection.extend({
|
||||
__type__: 'DocumentList',
|
||||
// webStore: new WebStore(this.url),
|
||||
model: my.Document
|
||||
});
|
||||
|
||||
// Backends section
|
||||
// ================
|
||||
|
||||
my.setBackend = function(backend) {
|
||||
Backbone.sync = backend.sync;
|
||||
};
|
||||
|
||||
// Backend which just caches in memory
|
||||
//
|
||||
// Does not need to be a backbone model but provides some conveniences
|
||||
my.BackendMemory = Backbone.Model.extend({
|
||||
// Initialize a Backend with a local in-memory dataset.
|
||||
//
|
||||
// NB: We can handle one and only one dataset at a time.
|
||||
//
|
||||
// :param dataset: the data for a dataset on which operations will be
|
||||
// performed. Its form should be a hash with metadata and data
|
||||
// attributes.
|
||||
//
|
||||
// - metadata: hash of key/value attributes of any kind (but usually with title attribute)
|
||||
// - data: hash with 2 keys:
|
||||
// - headers: list of header names/labels
|
||||
// - rows: list of hashes, each hash being one row. A row *must* have an id attribute which is unique.
|
||||
//
|
||||
// Example of data:
|
||||
//
|
||||
// {
|
||||
// headers: ['x', 'y', 'z']
|
||||
// , rows: [
|
||||
// {id: 0, x: 1, y: 2, z: 3}
|
||||
// , {id: 1, x: 2, y: 4, z: 6}
|
||||
// ]
|
||||
// };
|
||||
initialize: function(dataset) {
|
||||
// deep copy
|
||||
this._datasetAsData = $.extend(true, {}, dataset);
|
||||
_.bindAll(this, 'sync');
|
||||
},
|
||||
getDataset: function() {
|
||||
var dataset = new my.Dataset({
|
||||
id: this._datasetAsData.metadata.id
|
||||
});
|
||||
// this is a bit weird but problem is in sync this is set to parent model object so need to give dataset a reference to backend explicitly
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
var self = this;
|
||||
if (method === "read") {
|
||||
var dfd = $.Deferred();
|
||||
// this switching on object type is rather horrible
|
||||
// think may make more sense to do work in individual objects rather than in central Backbone.sync
|
||||
if (model.__type__ == 'Dataset') {
|
||||
var dataset = model;
|
||||
var rawDataset = this._datasetAsData;
|
||||
dataset.set(rawDataset.metadata);
|
||||
dataset.set({
|
||||
headers: rawDataset.data.headers
|
||||
});
|
||||
dataset.docCount = rawDataset.data.rows.length;
|
||||
dfd.resolve(dataset);
|
||||
}
|
||||
return dfd.promise();
|
||||
} else if (method === 'update') {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Document') {
|
||||
_.each(this._datasetAsData.data.rows, function(row, idx) {
|
||||
if(row.id === model.id) {
|
||||
self._datasetAsData.data.rows[idx] = model.toJSON();
|
||||
},
|
||||
|
||||
toTemplateJSON: function() {
|
||||
var data = this.toJSON();
|
||||
data.docCount = this.docCount;
|
||||
return data;
|
||||
}
|
||||
});
|
||||
|
||||
my.Document = Backbone.Model.extend({
|
||||
__type__: 'Document'
|
||||
});
|
||||
|
||||
my.DocumentList = Backbone.Collection.extend({
|
||||
__type__: 'DocumentList',
|
||||
// webStore: new WebStore(this.url),
|
||||
model: my.Document
|
||||
});
|
||||
|
||||
// Backends section
|
||||
// ================
|
||||
|
||||
my.setBackend = function(backend) {
|
||||
Backbone.sync = backend.sync;
|
||||
};
|
||||
|
||||
// Backend which just caches in memory
|
||||
//
|
||||
// Does not need to be a backbone model but provides some conveniences
|
||||
my.BackendMemory = Backbone.Model.extend({
|
||||
// Initialize a Backend with a local in-memory dataset.
|
||||
//
|
||||
// NB: We can handle one and only one dataset at a time.
|
||||
//
|
||||
// :param dataset: the data for a dataset on which operations will be
|
||||
// performed. Its form should be a hash with metadata and data
|
||||
// attributes.
|
||||
//
|
||||
// - metadata: hash of key/value attributes of any kind (but usually with title attribute)
|
||||
// - data: hash with 2 keys:
|
||||
// - headers: list of header names/labels
|
||||
// - rows: list of hashes, each hash being one row. A row *must* have an id attribute which is unique.
|
||||
//
|
||||
// Example of data:
|
||||
//
|
||||
// {
|
||||
// headers: ['x', 'y', 'z']
|
||||
// , rows: [
|
||||
// {id: 0, x: 1, y: 2, z: 3}
|
||||
// , {id: 1, x: 2, y: 4, z: 6}
|
||||
// ]
|
||||
// };
|
||||
initialize: function(dataset) {
|
||||
// deep copy
|
||||
this._datasetAsData = $.extend(true, {}, dataset);
|
||||
_.bindAll(this, 'sync');
|
||||
},
|
||||
getDataset: function() {
|
||||
var dataset = new my.Dataset({
|
||||
id: this._datasetAsData.metadata.id
|
||||
});
|
||||
// this is a bit weird but problem is in sync this is set to parent model object so need to give dataset a reference to backend explicitly
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
var self = this;
|
||||
if (method === "read") {
|
||||
var dfd = $.Deferred();
|
||||
// this switching on object type is rather horrible
|
||||
// think may make more sense to do work in individual objects rather than in central Backbone.sync
|
||||
if (model.__type__ == 'Dataset') {
|
||||
var dataset = model;
|
||||
var rawDataset = this._datasetAsData;
|
||||
dataset.set(rawDataset.metadata);
|
||||
dataset.set({
|
||||
headers: rawDataset.data.headers
|
||||
});
|
||||
dataset.docCount = rawDataset.data.rows.length;
|
||||
dfd.resolve(dataset);
|
||||
}
|
||||
});
|
||||
dfd.resolve(model);
|
||||
}
|
||||
return dfd.promise();
|
||||
} else if (method === 'delete') {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Document') {
|
||||
this._datasetAsData.data.rows = _.reject(this._datasetAsData.data.rows, function(row) {
|
||||
return (row.id === model.id);
|
||||
});
|
||||
dfd.resolve(model);
|
||||
}
|
||||
return dfd.promise();
|
||||
} else {
|
||||
alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
|
||||
}
|
||||
},
|
||||
getDocuments: function(datasetId, numRows, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (numRows === undefined) {
|
||||
numRows = 10;
|
||||
}
|
||||
var dfd = $.Deferred();
|
||||
rows = this._datasetAsData.data.rows;
|
||||
var results = rows.slice(start, start+numRows);
|
||||
dfd.resolve(results);
|
||||
return dfd.promise();
|
||||
}
|
||||
});
|
||||
|
||||
// Webstore Backend for connecting to the Webstore
|
||||
//
|
||||
// Initializing model argument must contain a url attribute pointing to
|
||||
// relevant Webstore table.
|
||||
//
|
||||
// Designed to only attach to one dataset and one dataset only ...
|
||||
// Could generalize to support attaching to different datasets
|
||||
my.BackendWebstore = Backbone.Model.extend({
|
||||
getDataset: function(id) {
|
||||
var dataset = new my.Dataset({
|
||||
id: id
|
||||
});
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
// this switching on object type is rather horrible
|
||||
// think may make more sense to do work in individual objects rather than in central Backbone.sync
|
||||
if (this.__type__ == 'Dataset') {
|
||||
var dataset = this;
|
||||
// get the schema and return
|
||||
var base = this.backend.get('url');
|
||||
var schemaUrl = base + '/schema.json';
|
||||
var jqxhr = $.ajax({
|
||||
url: schemaUrl,
|
||||
dataType: 'jsonp',
|
||||
jsonp: '_callback'
|
||||
});
|
||||
return dfd.promise();
|
||||
} else if (method === 'update') {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Document') {
|
||||
_.each(this._datasetAsData.data.rows, function(row, idx) {
|
||||
if(row.id === model.id) {
|
||||
self._datasetAsData.data.rows[idx] = model.toJSON();
|
||||
}
|
||||
});
|
||||
dfd.resolve(model);
|
||||
}
|
||||
return dfd.promise();
|
||||
} else if (method === 'delete') {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Document') {
|
||||
this._datasetAsData.data.rows = _.reject(this._datasetAsData.data.rows, function(row) {
|
||||
return (row.id === model.id);
|
||||
});
|
||||
dfd.resolve(model);
|
||||
}
|
||||
return dfd.promise();
|
||||
} else {
|
||||
alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
|
||||
}
|
||||
},
|
||||
getDocuments: function(datasetId, numRows, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (numRows === undefined) {
|
||||
numRows = 10;
|
||||
}
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(schema) {
|
||||
headers = _.map(schema.data, function(item) {
|
||||
return item.name;
|
||||
});
|
||||
dataset.set({
|
||||
headers: headers
|
||||
});
|
||||
dataset.docCount = schema.count;
|
||||
dfd.resolve(dataset, jqxhr);
|
||||
});
|
||||
rows = this._datasetAsData.data.rows;
|
||||
var results = rows.slice(start, start+numRows);
|
||||
dfd.resolve(results);
|
||||
return dfd.promise();
|
||||
}
|
||||
}
|
||||
},
|
||||
getDocuments: function(datasetId, numRows, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (numRows === undefined) {
|
||||
numRows = 10;
|
||||
}
|
||||
var base = this.get('url');
|
||||
var jqxhr = $.ajax({
|
||||
url: base + '.json?_limit=' + numRows,
|
||||
dataType: 'jsonp',
|
||||
jsonp: '_callback',
|
||||
cache: true
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(results) {
|
||||
dfd.resolve(results.data);
|
||||
});
|
||||
return dfd.promise();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// DataProxy Backend for connecting to the DataProxy
|
||||
//
|
||||
// Example initialization:
|
||||
//
|
||||
// BackendDataProxy({
|
||||
// model: {
|
||||
// url: {url-of-data-to-proxy},
|
||||
// type: xls || csv,
|
||||
// format: json || jsonp # return format (defaults to jsonp)
|
||||
// dataproxy: {url-to-proxy} # defaults to http://jsonpdataproxy.appspot.com
|
||||
// }
|
||||
// })
|
||||
my.BackendDataProxy = Backbone.Model.extend({
|
||||
defaults: {
|
||||
dataproxy: 'http://jsonpdataproxy.appspot.com'
|
||||
// Webstore Backend for connecting to the Webstore
|
||||
//
|
||||
// Initializing model argument must contain a url attribute pointing to
|
||||
// relevant Webstore table.
|
||||
//
|
||||
// Designed to only attach to one dataset and one dataset only ...
|
||||
// Could generalize to support attaching to different datasets
|
||||
my.BackendWebstore = Backbone.Model.extend({
|
||||
getDataset: function(id) {
|
||||
var dataset = new my.Dataset({
|
||||
id: id
|
||||
});
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
// this switching on object type is rather horrible
|
||||
// think may make more sense to do work in individual objects rather than in central Backbone.sync
|
||||
if (this.__type__ == 'Dataset') {
|
||||
var dataset = this;
|
||||
// get the schema and return
|
||||
var base = this.backend.get('url');
|
||||
var schemaUrl = base + '/schema.json';
|
||||
var jqxhr = $.ajax({
|
||||
url: schemaUrl,
|
||||
dataType: 'jsonp',
|
||||
jsonp: '_callback'
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(schema) {
|
||||
headers = _.map(schema.data, function(item) {
|
||||
return item.name;
|
||||
});
|
||||
dataset.set({
|
||||
headers: headers
|
||||
});
|
||||
dataset.docCount = schema.count;
|
||||
dfd.resolve(dataset, jqxhr);
|
||||
});
|
||||
return dfd.promise();
|
||||
}
|
||||
}
|
||||
},
|
||||
getDocuments: function(datasetId, numRows, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (numRows === undefined) {
|
||||
numRows = 10;
|
||||
}
|
||||
var base = this.get('url');
|
||||
var jqxhr = $.ajax({
|
||||
url: base + '.json?_limit=' + numRows,
|
||||
dataType: 'jsonp',
|
||||
jsonp: '_callback',
|
||||
cache: true
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(results) {
|
||||
dfd.resolve(results.data);
|
||||
});
|
||||
return dfd.promise();
|
||||
}
|
||||
});
|
||||
|
||||
// DataProxy Backend for connecting to the DataProxy
|
||||
//
|
||||
// Example initialization:
|
||||
//
|
||||
// BackendDataProxy({
|
||||
// model: {
|
||||
// url: {url-of-data-to-proxy},
|
||||
// type: xls || csv,
|
||||
// format: json || jsonp # return format (defaults to jsonp)
|
||||
// dataproxy: {url-to-proxy} # defaults to http://jsonpdataproxy.appspot.com
|
||||
// }
|
||||
// })
|
||||
my.BackendDataProxy = Backbone.Model.extend({
|
||||
defaults: {
|
||||
dataproxy: 'http://jsonpdataproxy.appspot.com'
|
||||
, type: 'csv'
|
||||
, format: 'jsonp'
|
||||
},
|
||||
getDataset: function(id) {
|
||||
var dataset = new my.Dataset({
|
||||
id: id
|
||||
});
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
// this switching on object type is rather horrible
|
||||
// think may make more sense to do work in individual objects rather than in central Backbone.sync
|
||||
if (this.__type__ == 'Dataset') {
|
||||
var dataset = this;
|
||||
// get the schema and return
|
||||
var base = this.backend.get('dataproxy');
|
||||
var data = this.backend.toJSON();
|
||||
delete data['dataproxy'];
|
||||
// TODO: should we cache for extra efficiency
|
||||
data['max-results'] = 1;
|
||||
var jqxhr = $.ajax({
|
||||
url: base
|
||||
, data: data
|
||||
, dataType: 'jsonp'
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(results) {
|
||||
dataset.set({
|
||||
headers: results.fields
|
||||
},
|
||||
getDataset: function(id) {
|
||||
var dataset = new my.Dataset({
|
||||
id: id
|
||||
});
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
// this switching on object type is rather horrible
|
||||
// think may make more sense to do work in individual objects rather than in central Backbone.sync
|
||||
if (this.__type__ == 'Dataset') {
|
||||
var dataset = this;
|
||||
// get the schema and return
|
||||
var base = this.backend.get('dataproxy');
|
||||
var data = this.backend.toJSON();
|
||||
delete data['dataproxy'];
|
||||
// TODO: should we cache for extra efficiency
|
||||
data['max-results'] = 1;
|
||||
var jqxhr = $.ajax({
|
||||
url: base
|
||||
, data: data
|
||||
, dataType: 'jsonp'
|
||||
});
|
||||
dfd.resolve(dataset, jqxhr);
|
||||
});
|
||||
return dfd.promise();
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(results) {
|
||||
dataset.set({
|
||||
headers: results.fields
|
||||
});
|
||||
dfd.resolve(dataset, jqxhr);
|
||||
});
|
||||
return dfd.promise();
|
||||
}
|
||||
} else {
|
||||
alert('This backend only supports read operations');
|
||||
}
|
||||
} else {
|
||||
alert('This backend only supports read operations');
|
||||
}
|
||||
},
|
||||
getDocuments: function(datasetId, numRows, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (numRows === undefined) {
|
||||
numRows = 10;
|
||||
}
|
||||
var base = this.get('dataproxy');
|
||||
var data = this.toJSON();
|
||||
delete data['dataproxy'];
|
||||
data['max-results'] = numRows;
|
||||
var jqxhr = $.ajax({
|
||||
url: base
|
||||
, data: data
|
||||
, dataType: 'jsonp'
|
||||
// , cache: true
|
||||
},
|
||||
getDocuments: function(datasetId, numRows, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
if (numRows === undefined) {
|
||||
numRows = 10;
|
||||
}
|
||||
var base = this.get('dataproxy');
|
||||
var data = this.toJSON();
|
||||
delete data['dataproxy'];
|
||||
data['max-results'] = numRows;
|
||||
var jqxhr = $.ajax({
|
||||
url: base
|
||||
, data: data
|
||||
, dataType: 'jsonp'
|
||||
// , cache: true
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(results) {
|
||||
var _out = _.map(results.data, function(row) {
|
||||
var tmp = {};
|
||||
_.each(results.fields, function(key, idx) {
|
||||
tmp[key] = row[idx];
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.then(function(results) {
|
||||
var _out = _.map(results.data, function(row) {
|
||||
var tmp = {};
|
||||
_.each(results.fields, function(key, idx) {
|
||||
tmp[key] = row[idx];
|
||||
});
|
||||
return tmp;
|
||||
});
|
||||
return tmp;
|
||||
dfd.resolve(_out);
|
||||
});
|
||||
dfd.resolve(_out);
|
||||
});
|
||||
return dfd.promise();
|
||||
}
|
||||
});
|
||||
return dfd.promise();
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Google spreadsheet backend
|
||||
my.BackendGDoc = Backbone.Model.extend({
|
||||
getDataset: function(id) {
|
||||
var dataset = new my.Dataset({
|
||||
id: id
|
||||
});
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
// Google spreadsheet backend
|
||||
my.BackendGDoc = Backbone.Model.extend({
|
||||
getDataset: function(id) {
|
||||
var dataset = new my.Dataset({
|
||||
id: id
|
||||
});
|
||||
dataset.backend = this;
|
||||
return dataset;
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
console.log('fetching data from url', model.backend.get('url'));
|
||||
var dfd = $.Deferred();
|
||||
var dataset = this;
|
||||
|
||||
|
||||
$.getJSON(model.backend.get('url'), function(d) {
|
||||
result = model.backend.gdocsToJavascript(d);
|
||||
model.set({'headers': result.header});
|
||||
model.backend.set({'data': result.data, 'headers': result.header});
|
||||
dfd.resolve(model);
|
||||
result = model.backend.gdocsToJavascript(d);
|
||||
model.set({'headers': result.header});
|
||||
model.backend.set({'data': result.data, 'headers': result.header});
|
||||
dfd.resolve(model);
|
||||
})
|
||||
|
||||
return dfd.promise(); }
|
||||
},
|
||||
|
||||
getDocuments: function(datasetId, start, numRows) {
|
||||
var dfd = $.Deferred();
|
||||
var fields = this.get('headers');
|
||||
|
||||
// zip the field headers with the data rows to produce js objs
|
||||
// TODO: factor this out as a common method with other backends
|
||||
var objs = _.map(this.get('data'), function (d) {
|
||||
var obj = {};
|
||||
_.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
|
||||
return obj;
|
||||
});
|
||||
dfd.resolve(objs);
|
||||
return dfd;
|
||||
return dfd.promise(); }
|
||||
},
|
||||
gdocsToJavascript: function(gdocsSpreadsheet) {
|
||||
/*
|
||||
:options: (optional) optional argument dictionary:
|
||||
columnsToUse: list of columns to use (specified by header names)
|
||||
colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
|
||||
:return: tabular data object (hash with keys: header and data).
|
||||
|
||||
Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
|
||||
*/
|
||||
var options = {};
|
||||
if (arguments.length > 1) {
|
||||
options = arguments[1];
|
||||
}
|
||||
var results = {
|
||||
'header': [],
|
||||
'data': []
|
||||
};
|
||||
// default is no special info on type of columns
|
||||
var colTypes = {};
|
||||
if (options.colTypes) {
|
||||
colTypes = options.colTypes;
|
||||
}
|
||||
// either extract column headings from spreadsheet directly, or used supplied ones
|
||||
if (options.columnsToUse) {
|
||||
// columns set to subset supplied
|
||||
results.header = options.columnsToUse;
|
||||
} else {
|
||||
// set columns to use to be all available
|
||||
if (gdocsSpreadsheet.feed.entry.length > 0) {
|
||||
for (var k in gdocsSpreadsheet.feed.entry[0]) {
|
||||
if (k.substr(0, 3) == 'gsx') {
|
||||
var col = k.substr(4)
|
||||
results.header.push(col);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
|
||||
var rep = /^([\d\.\-]+)\%$/;
|
||||
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
|
||||
var row = [];
|
||||
for (var k in results.header) {
|
||||
var col = results.header[k];
|
||||
var _keyname = 'gsx$' + col;
|
||||
var value = entry[_keyname]['$t'];
|
||||
// if labelled as % and value contains %, convert
|
||||
if (colTypes[col] == 'percent') {
|
||||
if (rep.test(value)) {
|
||||
var value2 = rep.exec(value);
|
||||
var value3 = parseFloat(value2);
|
||||
value = value3 / 100;
|
||||
getDocuments: function(datasetId, start, numRows) {
|
||||
var dfd = $.Deferred();
|
||||
var fields = this.get('headers');
|
||||
|
||||
// zip the field headers with the data rows to produce js objs
|
||||
// TODO: factor this out as a common method with other backends
|
||||
var objs = _.map(this.get('data'), function (d) {
|
||||
var obj = {};
|
||||
_.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
|
||||
return obj;
|
||||
});
|
||||
dfd.resolve(objs);
|
||||
return dfd;
|
||||
},
|
||||
gdocsToJavascript: function(gdocsSpreadsheet) {
|
||||
/*
|
||||
:options: (optional) optional argument dictionary:
|
||||
columnsToUse: list of columns to use (specified by header names)
|
||||
colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
|
||||
:return: tabular data object (hash with keys: header and data).
|
||||
|
||||
Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
|
||||
*/
|
||||
var options = {};
|
||||
if (arguments.length > 1) {
|
||||
options = arguments[1];
|
||||
}
|
||||
var results = {
|
||||
'header': [],
|
||||
'data': []
|
||||
};
|
||||
// default is no special info on type of columns
|
||||
var colTypes = {};
|
||||
if (options.colTypes) {
|
||||
colTypes = options.colTypes;
|
||||
}
|
||||
// either extract column headings from spreadsheet directly, or used supplied ones
|
||||
if (options.columnsToUse) {
|
||||
// columns set to subset supplied
|
||||
results.header = options.columnsToUse;
|
||||
} else {
|
||||
// set columns to use to be all available
|
||||
if (gdocsSpreadsheet.feed.entry.length > 0) {
|
||||
for (var k in gdocsSpreadsheet.feed.entry[0]) {
|
||||
if (k.substr(0, 3) == 'gsx') {
|
||||
var col = k.substr(4)
|
||||
results.header.push(col);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
row.push(value);
|
||||
|
||||
// converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
|
||||
var rep = /^([\d\.\-]+)\%$/;
|
||||
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
|
||||
var row = [];
|
||||
for (var k in results.header) {
|
||||
var col = results.header[k];
|
||||
var _keyname = 'gsx$' + col;
|
||||
var value = entry[_keyname]['$t'];
|
||||
// if labelled as % and value contains %, convert
|
||||
if (colTypes[col] == 'percent') {
|
||||
if (rep.test(value)) {
|
||||
var value2 = rep.exec(value);
|
||||
var value3 = parseFloat(value2);
|
||||
value = value3 / 100;
|
||||
}
|
||||
}
|
||||
row.push(value);
|
||||
}
|
||||
results.data.push(row);
|
||||
});
|
||||
return results;
|
||||
}
|
||||
results.data.push(row);
|
||||
|
||||
});
|
||||
return results;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
return my;
|
||||
return my;
|
||||
|
||||
}(jQuery);
|
||||
|
||||
|
||||
@ -1,66 +1,66 @@
|
||||
(function ($) {
|
||||
|
||||
module("Dataset");
|
||||
module("Dataset");
|
||||
|
||||
test('new Dataset', function () {
|
||||
var datasetId = 'test-dataset';
|
||||
var metadata = {
|
||||
title: 'My Test Dataset'
|
||||
test('new Dataset', function () {
|
||||
var datasetId = 'test-dataset';
|
||||
var metadata = {
|
||||
title: 'My Test Dataset'
|
||||
, name: '1-my-test-dataset'
|
||||
, id: datasetId
|
||||
};
|
||||
var indata = {
|
||||
};
|
||||
var indata = {
|
||||
headers: ['x', 'y', 'z']
|
||||
, rows: [
|
||||
{id: 0, x: 1, y: 2, z: 3}
|
||||
, {id: 1, x: 2, y: 4, z: 6}
|
||||
, {id: 2, x: 3, y: 6, z: 9}
|
||||
, {id: 3, x: 4, y: 8, z: 12}
|
||||
, {id: 4, x: 5, y: 10, z: 15}
|
||||
, {id: 5, x: 6, y: 12, z: 18}
|
||||
]
|
||||
};
|
||||
// this is all rather artificial here but would make more sense with more complex backend
|
||||
backend = new recline.Model.BackendMemory({
|
||||
metadata: metadata,
|
||||
data: indata
|
||||
{id: 0, x: 1, y: 2, z: 3}
|
||||
, {id: 1, x: 2, y: 4, z: 6}
|
||||
, {id: 2, x: 3, y: 6, z: 9}
|
||||
, {id: 3, x: 4, y: 8, z: 12}
|
||||
, {id: 4, x: 5, y: 10, z: 15}
|
||||
, {id: 5, x: 6, y: 12, z: 18}
|
||||
]
|
||||
};
|
||||
// this is all rather artificial here but would make more sense with more complex backend
|
||||
backend = new recline.Model.BackendMemory({
|
||||
metadata: metadata,
|
||||
data: indata
|
||||
});
|
||||
recline.Model.setBackend(backend);
|
||||
var dataset = backend.getDataset(datasetId);
|
||||
expect(9);
|
||||
dataset.fetch().then(function(dataset) {
|
||||
equal(dataset.get('name'), metadata.name);
|
||||
deepEqual(dataset.get('headers'), indata.headers);
|
||||
equal(dataset.docCount, 6);
|
||||
dataset.getDocuments(4, 2).then(function(documentList) {
|
||||
deepEqual(indata.rows[2], documentList.models[0].toJSON());
|
||||
});
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
// Test getDocuments
|
||||
equal(docList.length, Math.min(10, indata.rows.length));
|
||||
var doc1 = docList.models[0];
|
||||
deepEqual(doc1.toJSON(), indata.rows[0]);
|
||||
recline.Model.setBackend(backend);
|
||||
var dataset = backend.getDataset(datasetId);
|
||||
expect(9);
|
||||
dataset.fetch().then(function(dataset) {
|
||||
equal(dataset.get('name'), metadata.name);
|
||||
deepEqual(dataset.get('headers'), indata.headers);
|
||||
equal(dataset.docCount, 6);
|
||||
dataset.getDocuments(4, 2).then(function(documentList) {
|
||||
deepEqual(indata.rows[2], documentList.models[0].toJSON());
|
||||
});
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
// Test getDocuments
|
||||
equal(docList.length, Math.min(10, indata.rows.length));
|
||||
var doc1 = docList.models[0];
|
||||
deepEqual(doc1.toJSON(), indata.rows[0]);
|
||||
|
||||
// Test UPDATA
|
||||
var newVal = 10;
|
||||
doc1.set({x: newVal});
|
||||
doc1.save().then(function() {
|
||||
equal(backend._datasetAsData.data.rows[0].x, newVal);
|
||||
})
|
||||
|
||||
// Test Delete
|
||||
doc1.destroy().then(function() {
|
||||
equal(backend._datasetAsData.data.rows.length, 5);
|
||||
equal(backend._datasetAsData.data.rows[0].x, indata.rows[1].x);
|
||||
// Test UPDATA
|
||||
var newVal = 10;
|
||||
doc1.set({x: newVal});
|
||||
doc1.save().then(function() {
|
||||
equal(backend._datasetAsData.data.rows[0].x, newVal);
|
||||
})
|
||||
|
||||
// Test Delete
|
||||
doc1.destroy().then(function() {
|
||||
equal(backend._datasetAsData.data.rows.length, 5);
|
||||
equal(backend._datasetAsData.data.rows[0].x, indata.rows[1].x);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: move to fixtures
|
||||
var webstoreSchema = {
|
||||
"count": 3,
|
||||
"data": [
|
||||
// TODO: move to fixtures
|
||||
var webstoreSchema = {
|
||||
"count": 3,
|
||||
"data": [
|
||||
{
|
||||
"name": "__id__",
|
||||
"type": "integer",
|
||||
@ -81,23 +81,23 @@ var webstoreSchema = {
|
||||
"type": "text",
|
||||
"values_url": "/rufuspollock/demo/data/distinct/amount"
|
||||
}
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"name": "type"
|
||||
},
|
||||
{
|
||||
"name": "name"
|
||||
},
|
||||
{
|
||||
"name": "values_url"
|
||||
}
|
||||
]
|
||||
};
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"name": "type"
|
||||
},
|
||||
{
|
||||
"name": "name"
|
||||
},
|
||||
{
|
||||
"name": "values_url"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
webstoreData = {
|
||||
"count": null,
|
||||
"data": [
|
||||
webstoreData = {
|
||||
"count": null,
|
||||
"data": [
|
||||
{
|
||||
"__id__": 1,
|
||||
"amount": "100",
|
||||
@ -116,357 +116,357 @@ webstoreData = {
|
||||
"date": "2011-01-01",
|
||||
"geometry": null
|
||||
}
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"name": "__id__"
|
||||
},
|
||||
{
|
||||
"name": "date"
|
||||
},
|
||||
{
|
||||
"name": "geometry"
|
||||
},
|
||||
{
|
||||
"name": "amount"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
test('Webstore Backend', function() {
|
||||
var backend = new recline.Model.BackendWebstore({
|
||||
url: 'http://webstore.test.ckan.org/rufuspollock/demo/data'
|
||||
});
|
||||
recline.Model.setBackend(backend);
|
||||
dataset = backend.getDataset();
|
||||
|
||||
var stub = sinon.stub($, 'ajax', function(options) {
|
||||
if (options.url.indexOf('schema.json') != -1) {
|
||||
return {
|
||||
then: function(callback) {
|
||||
callback(webstoreSchema);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
then: function(callback) {
|
||||
callback(webstoreData);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
dataset.fetch().then(function(dataset) {
|
||||
deepEqual(['__id__', 'date', 'geometry', 'amount'], dataset.get('headers'));
|
||||
equal(3, dataset.docCount)
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
equal(3, docList.length)
|
||||
equal("2009-01-01", docList.models[0].get('date'));
|
||||
});
|
||||
});
|
||||
$.ajax.restore();
|
||||
});
|
||||
|
||||
|
||||
var dataProxyData = {
|
||||
"data": [
|
||||
[
|
||||
"1",
|
||||
"1950-01",
|
||||
"34.73"
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"name": "__id__"
|
||||
},
|
||||
{
|
||||
"name": "date"
|
||||
},
|
||||
{
|
||||
"name": "geometry"
|
||||
},
|
||||
{
|
||||
"name": "amount"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
test('Webstore Backend', function() {
|
||||
var backend = new recline.Model.BackendWebstore({
|
||||
url: 'http://webstore.test.ckan.org/rufuspollock/demo/data'
|
||||
});
|
||||
recline.Model.setBackend(backend);
|
||||
dataset = backend.getDataset();
|
||||
|
||||
var stub = sinon.stub($, 'ajax', function(options) {
|
||||
if (options.url.indexOf('schema.json') != -1) {
|
||||
return {
|
||||
then: function(callback) {
|
||||
callback(webstoreSchema);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
then: function(callback) {
|
||||
callback(webstoreData);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
dataset.fetch().then(function(dataset) {
|
||||
deepEqual(['__id__', 'date', 'geometry', 'amount'], dataset.get('headers'));
|
||||
equal(3, dataset.docCount)
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
equal(3, docList.length)
|
||||
equal("2009-01-01", docList.models[0].get('date'));
|
||||
});
|
||||
});
|
||||
$.ajax.restore();
|
||||
});
|
||||
|
||||
|
||||
var dataProxyData = {
|
||||
"data": [
|
||||
[
|
||||
"1",
|
||||
"1950-01",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"2",
|
||||
"1950-02",
|
||||
"34.73"
|
||||
],
|
||||
"1950-02",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"3",
|
||||
"1950-03",
|
||||
"34.73"
|
||||
],
|
||||
"1950-03",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"4",
|
||||
"1950-04",
|
||||
"34.73"
|
||||
],
|
||||
"1950-04",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"5",
|
||||
"1950-05",
|
||||
"34.73"
|
||||
],
|
||||
"1950-05",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"6",
|
||||
"1950-06",
|
||||
"34.73"
|
||||
],
|
||||
"1950-06",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"7",
|
||||
"1950-07",
|
||||
"34.73"
|
||||
],
|
||||
"1950-07",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"8",
|
||||
"1950-08",
|
||||
"34.73"
|
||||
],
|
||||
"1950-08",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"9",
|
||||
"1950-09",
|
||||
"34.73"
|
||||
],
|
||||
"1950-09",
|
||||
"34.73"
|
||||
],
|
||||
[
|
||||
"10",
|
||||
"1950-10",
|
||||
"34.73"
|
||||
]
|
||||
],
|
||||
"fields": [
|
||||
"__id__",
|
||||
"1950-10",
|
||||
"34.73"
|
||||
]
|
||||
],
|
||||
"fields": [
|
||||
"__id__",
|
||||
"date",
|
||||
"price"
|
||||
],
|
||||
"length": null,
|
||||
"max_results": 10,
|
||||
"url": "http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv"
|
||||
};
|
||||
],
|
||||
"length": null,
|
||||
"max_results": 10,
|
||||
"url": "http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv"
|
||||
};
|
||||
|
||||
test('DataProxy Backend', function() {
|
||||
// needed only if not stubbing
|
||||
// stop();
|
||||
var backend = new recline.Model.BackendDataProxy({
|
||||
url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv'
|
||||
});
|
||||
recline.Model.setBackend(backend);
|
||||
dataset = backend.getDataset();
|
||||
test('DataProxy Backend', function() {
|
||||
// needed only if not stubbing
|
||||
// stop();
|
||||
var backend = new recline.Model.BackendDataProxy({
|
||||
url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv'
|
||||
});
|
||||
recline.Model.setBackend(backend);
|
||||
dataset = backend.getDataset();
|
||||
|
||||
var stub = sinon.stub($, 'ajax', function(options) {
|
||||
var partialUrl = 'jsonpdataproxy.appspot.com';
|
||||
if (options.url.indexOf(partialUrl) != -1) {
|
||||
return {
|
||||
then: function(callback) {
|
||||
callback(dataProxyData);
|
||||
var stub = sinon.stub($, 'ajax', function(options) {
|
||||
var partialUrl = 'jsonpdataproxy.appspot.com';
|
||||
if (options.url.indexOf(partialUrl) != -1) {
|
||||
return {
|
||||
then: function(callback) {
|
||||
callback(dataProxyData);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
dataset.fetch().then(function(dataset) {
|
||||
deepEqual(['__id__', 'date', 'price'], dataset.get('headers'));
|
||||
equal(null, dataset.docCount)
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
equal(10, docList.length)
|
||||
equal("1950-01", docList.models[0].get('date'));
|
||||
dataset.fetch().then(function(dataset) {
|
||||
deepEqual(['__id__', 'date', 'price'], dataset.get('headers'));
|
||||
equal(null, dataset.docCount)
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
equal(10, docList.length)
|
||||
equal("1950-01", docList.models[0].get('date'));
|
||||
// needed only if not stubbing
|
||||
start();
|
||||
});
|
||||
});
|
||||
$.ajax.restore();
|
||||
});
|
||||
$.ajax.restore();
|
||||
});
|
||||
|
||||
|
||||
var sample_gdocs_spreadsheet_data = {
|
||||
"feed": {
|
||||
"category": [
|
||||
var sample_gdocs_spreadsheet_data = {
|
||||
"feed": {
|
||||
"category": [
|
||||
{
|
||||
"term": "http://schemas.google.com/spreadsheets/2006#list",
|
||||
"scheme": "http://schemas.google.com/spreadsheets/2006"
|
||||
}
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
},
|
||||
"xmlns": "http://www.w3.org/2005/Atom",
|
||||
"xmlns$gsx": "http://schemas.google.com/spreadsheets/2006/extended",
|
||||
"title": {
|
||||
"$t": "Sheet1",
|
||||
"type": "text"
|
||||
},
|
||||
"author": [
|
||||
{
|
||||
"name": {
|
||||
"$t": "okfn.rufus.pollock"
|
||||
},
|
||||
"email": {
|
||||
"$t": "okfn.rufus.pollock@gmail.com"
|
||||
}
|
||||
}
|
||||
],
|
||||
"openSearch$startIndex": {
|
||||
"$t": "1"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/pub?key=0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc",
|
||||
"type": "text/html",
|
||||
"rel": "alternate"
|
||||
},
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "http://schemas.google.com/g/2005#feed"
|
||||
},
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json-in-script",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"xmlns$openSearch": "http://a9.com/-/spec/opensearchrss/1.0/",
|
||||
"entry": [
|
||||
{
|
||||
"category": [
|
||||
{
|
||||
"term": "http://schemas.google.com/spreadsheets/2006#list",
|
||||
"scheme": "http://schemas.google.com/spreadsheets/2006"
|
||||
}
|
||||
],
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
},
|
||||
"gsx$column-2": {
|
||||
"xmlns": "http://www.w3.org/2005/Atom",
|
||||
"xmlns$gsx": "http://schemas.google.com/spreadsheets/2006/extended",
|
||||
"title": {
|
||||
"$t": "Sheet1",
|
||||
"type": "text"
|
||||
},
|
||||
"author": [
|
||||
{
|
||||
"name": {
|
||||
"$t": "okfn.rufus.pollock"
|
||||
},
|
||||
"email": {
|
||||
"$t": "okfn.rufus.pollock@gmail.com"
|
||||
}
|
||||
}
|
||||
],
|
||||
"openSearch$startIndex": {
|
||||
"$t": "1"
|
||||
},
|
||||
"gsx$column-1": {
|
||||
"$t": "A"
|
||||
},
|
||||
"title": {
|
||||
"$t": "A",
|
||||
"type": "text"
|
||||
},
|
||||
"content": {
|
||||
"$t": "column-2: 1",
|
||||
"type": "text"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cokwr",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cokwr"
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/pub?key=0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc",
|
||||
"type": "text/html",
|
||||
"rel": "alternate"
|
||||
},
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "http://schemas.google.com/g/2005#feed"
|
||||
},
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json-in-script",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": [
|
||||
],
|
||||
"xmlns$openSearch": "http://a9.com/-/spec/opensearchrss/1.0/",
|
||||
"entry": [
|
||||
{
|
||||
"category": [
|
||||
{
|
||||
"term": "http://schemas.google.com/spreadsheets/2006#list",
|
||||
"scheme": "http://schemas.google.com/spreadsheets/2006"
|
||||
}
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
},
|
||||
"gsx$column-2": {
|
||||
"$t": "1"
|
||||
},
|
||||
"gsx$column-1": {
|
||||
"$t": "A"
|
||||
},
|
||||
"title": {
|
||||
"$t": "A",
|
||||
"type": "text"
|
||||
},
|
||||
"content": {
|
||||
"$t": "column-2: 1",
|
||||
"type": "text"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cokwr",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cokwr"
|
||||
}
|
||||
},
|
||||
"gsx$column-2": {
|
||||
"$t": "2"
|
||||
},
|
||||
"gsx$column-1": {
|
||||
"$t": "b"
|
||||
},
|
||||
"title": {
|
||||
"$t": "b",
|
||||
"type": "text"
|
||||
},
|
||||
"content": {
|
||||
"$t": "column-2: 2",
|
||||
"type": "text"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cpzh4",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cpzh4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": [
|
||||
{
|
||||
"category": [
|
||||
{
|
||||
"term": "http://schemas.google.com/spreadsheets/2006#list",
|
||||
"scheme": "http://schemas.google.com/spreadsheets/2006"
|
||||
}
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
},
|
||||
"gsx$column-2": {
|
||||
"$t": "2"
|
||||
},
|
||||
"gsx$column-1": {
|
||||
"$t": "b"
|
||||
},
|
||||
"title": {
|
||||
"$t": "b",
|
||||
"type": "text"
|
||||
},
|
||||
"content": {
|
||||
"$t": "column-2: 2",
|
||||
"type": "text"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cpzh4",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cpzh4"
|
||||
}
|
||||
},
|
||||
"gsx$column-2": {
|
||||
{
|
||||
"category": [
|
||||
{
|
||||
"term": "http://schemas.google.com/spreadsheets/2006#list",
|
||||
"scheme": "http://schemas.google.com/spreadsheets/2006"
|
||||
}
|
||||
],
|
||||
"updated": {
|
||||
"$t": "2010-07-12T18:32:16.200Z"
|
||||
},
|
||||
"gsx$column-2": {
|
||||
"$t": "3"
|
||||
},
|
||||
"gsx$column-1": {
|
||||
"$t": "c"
|
||||
},
|
||||
"title": {
|
||||
"$t": "c",
|
||||
"type": "text"
|
||||
},
|
||||
"content": {
|
||||
"$t": "column-2: 3",
|
||||
"type": "text"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cre1l",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cre1l"
|
||||
}
|
||||
}
|
||||
],
|
||||
"openSearch$totalResults": {
|
||||
"$t": "3"
|
||||
},
|
||||
"gsx$column-1": {
|
||||
"$t": "c"
|
||||
},
|
||||
"title": {
|
||||
"$t": "c",
|
||||
"type": "text"
|
||||
},
|
||||
"content": {
|
||||
"$t": "column-2: 3",
|
||||
"type": "text"
|
||||
},
|
||||
"link": [
|
||||
{
|
||||
"href": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cre1l",
|
||||
"type": "application/atom+xml",
|
||||
"rel": "self"
|
||||
}
|
||||
],
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values/cre1l"
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values"
|
||||
}
|
||||
}
|
||||
],
|
||||
"openSearch$totalResults": {
|
||||
"$t": "3"
|
||||
},
|
||||
"id": {
|
||||
"$t": "http://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values"
|
||||
}
|
||||
},
|
||||
"version": "1.0",
|
||||
"encoding": "UTF-8"
|
||||
}
|
||||
"version": "1.0",
|
||||
"encoding": "UTF-8"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
test("GDoc Backend", function() {
|
||||
var backend = new recline.Model.BackendGDoc({url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
|
||||
});
|
||||
recline.Model.setBackend(backend);
|
||||
dataset = backend.getDataset();
|
||||
|
||||
console.log('got gdoc dataset', dataset);
|
||||
|
||||
var stub = sinon.stub($, 'getJSON', function(options, cb) {
|
||||
console.log('options are', options, cb);
|
||||
var partialUrl = 'spreadsheets.google.com';
|
||||
if (options.indexOf(partialUrl) != -1) {
|
||||
cb(sample_gdocs_spreadsheet_data)
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
dataset.fetch().then(function(dataset) {
|
||||
console.log('inside dataset:', dataset, dataset.get('headers'), dataset.get('data'));
|
||||
deepEqual(['column-2', 'column-1'], dataset.get('headers'));
|
||||
//equal(null, dataset.docCount)
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
equal(3, docList.length);
|
||||
console.log(docList.models[0]);
|
||||
equal("A", docList.models[0].get('column-1'));
|
||||
// needed only if not stubbing
|
||||
start();
|
||||
test("GDoc Backend", function() {
|
||||
var backend = new recline.Model.BackendGDoc({url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
|
||||
});
|
||||
});
|
||||
$.getJSON.restore();
|
||||
|
||||
recline.Model.setBackend(backend);
|
||||
dataset = backend.getDataset();
|
||||
|
||||
});
|
||||
console.log('got gdoc dataset', dataset);
|
||||
|
||||
var stub = sinon.stub($, 'getJSON', function(options, cb) {
|
||||
console.log('options are', options, cb);
|
||||
var partialUrl = 'spreadsheets.google.com';
|
||||
if (options.indexOf(partialUrl) != -1) {
|
||||
cb(sample_gdocs_spreadsheet_data)
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
dataset.fetch().then(function(dataset) {
|
||||
console.log('inside dataset:', dataset, dataset.get('headers'), dataset.get('data'));
|
||||
deepEqual(['column-2', 'column-1'], dataset.get('headers'));
|
||||
//equal(null, dataset.docCount)
|
||||
dataset.getDocuments().then(function(docList) {
|
||||
equal(3, docList.length);
|
||||
console.log(docList.models[0]);
|
||||
equal("A", docList.models[0].get('column-1'));
|
||||
// needed only if not stubbing
|
||||
start();
|
||||
});
|
||||
});
|
||||
$.getJSON.restore();
|
||||
|
||||
|
||||
});
|
||||
|
||||
|
||||
})(this.jQuery);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user