[#43,model][m]: refactor Dataset and BackendMemory to the new setup approach.
* NB: other model tests broken as a result.
This commit is contained in:
parent
57effa02aa
commit
2d4e6a2ecc
@ -9,13 +9,11 @@ this.recline = this.recline || {};
|
||||
this.recline.Model = this.recline.Model || {};
|
||||
|
||||
(function($, my) {
|
||||
my.backends = {};
|
||||
|
||||
// ## Backbone.sync
|
||||
//
|
||||
// Override Backbone.sync to hand off to sync function in relevant backend
|
||||
Backbone.sync = function(method, model, options) {
|
||||
return my.backends[model.backendConfig.type].sync(method, model, options);
|
||||
return model.backend.sync(method, model, options);
|
||||
}
|
||||
|
||||
// ## wrapInTimeout
|
||||
@ -45,49 +43,59 @@ this.recline.Model = this.recline.Model || {};
|
||||
|
||||
// ## BackendMemory - uses in-memory data
|
||||
//
|
||||
// To use you should:
|
||||
// This is very artificial and is really only designed for testing
|
||||
// purposes.
|
||||
//
|
||||
// To use it you should provide in your constructor data:
|
||||
//
|
||||
// A. provide metadata as model data to the Dataset
|
||||
// * metadata (including headers array)
|
||||
// * documents: list of hashes, each hash being one doc. A doc *must* have an id attribute which is unique.
|
||||
//
|
||||
// B. Set backendConfig on your dataset with attributes:
|
||||
//
|
||||
// - type: 'memory'
|
||||
// - data: hash with 2 keys:
|
||||
//
|
||||
// * headers: list of header names/labels
|
||||
// * rows: list of hashes, each hash being one row. A row *must* have an id attribute which is unique.
|
||||
//
|
||||
// Example of data:
|
||||
// Example:
|
||||
//
|
||||
// <pre>
|
||||
// {
|
||||
// headers: ['x', 'y', 'z']
|
||||
// , rows: [
|
||||
// {id: 0, x: 1, y: 2, z: 3}
|
||||
// , {id: 1, x: 2, y: 4, z: 6}
|
||||
// ]
|
||||
// };
|
||||
// // Backend setup
|
||||
// var backend = Backend();
|
||||
// backend.addDataset({
|
||||
// metadata: {
|
||||
// id: 'my-id',
|
||||
// title: 'My Title',
|
||||
// headers: ['x', 'y', 'z'],
|
||||
// },
|
||||
// documents: [
|
||||
// {id: 0, x: 1, y: 2, z: 3},
|
||||
// {id: 1, x: 2, y: 4, z: 6}
|
||||
// ]
|
||||
// });
|
||||
// // later ...
|
||||
// var dataset = Dataset({id: 'my-id'});
|
||||
// dataset.fetch();
|
||||
// etc ...
|
||||
// </pre>
|
||||
my.BackendMemory = Backbone.Model.extend({
|
||||
initialize: function() {
|
||||
this.datasets = {};
|
||||
},
|
||||
addDataset: function(data) {
|
||||
this.datasets[data.metadata.id] = $.extend(true, {}, data);
|
||||
},
|
||||
sync: function(method, model, options) {
|
||||
var self = this;
|
||||
if (method === "read") {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Dataset') {
|
||||
var dataset = model;
|
||||
dataset.set({
|
||||
headers: dataset.backendConfig.data.headers
|
||||
});
|
||||
dataset.docCount = dataset.backendConfig.data.rows.length;
|
||||
dfd.resolve(dataset);
|
||||
var rawDataset = this.datasets[model.id];
|
||||
model.set(rawDataset.metadata);
|
||||
model.docCount = rawDataset.documents.length;
|
||||
dfd.resolve(model);
|
||||
}
|
||||
return dfd.promise();
|
||||
} else if (method === 'update') {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Document') {
|
||||
_.each(model.backendConfig.data.rows, function(row, idx) {
|
||||
if(row.id === model.id) {
|
||||
model.backendConfig.data.rows[idx] = model.toJSON();
|
||||
_.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
|
||||
if(doc.id === model.id) {
|
||||
self.datasets[model.dataset.id].documents[idx] = model.toJSON();
|
||||
}
|
||||
});
|
||||
dfd.resolve(model);
|
||||
@ -96,9 +104,11 @@ this.recline.Model = this.recline.Model || {};
|
||||
} else if (method === 'delete') {
|
||||
var dfd = $.Deferred();
|
||||
if (model.__type__ == 'Document') {
|
||||
model.backendConfig.data.rows = _.reject(model.backendConfig.data.rows, function(row) {
|
||||
return (row.id === model.id);
|
||||
var rawDataset = self.datasets[model.dataset.id];
|
||||
var newdocs = _.reject(rawDataset.documents, function(doc) {
|
||||
return (doc.id === model.id);
|
||||
});
|
||||
rawDataset.documents = newdocs;
|
||||
dfd.resolve(model);
|
||||
}
|
||||
return dfd.promise();
|
||||
@ -110,11 +120,11 @@ this.recline.Model = this.recline.Model || {};
|
||||
var numRows = queryObj.size;
|
||||
var start = queryObj.offset;
|
||||
var dfd = $.Deferred();
|
||||
results = model.backendConfig.data.rows;
|
||||
results = this.datasets[model.id].documents;
|
||||
// not complete sorting!
|
||||
_.each(queryObj.sort, function(item) {
|
||||
results = _.sortBy(results, function(row) {
|
||||
var _out = row[item[0]];
|
||||
results = _.sortBy(results, function(doc) {
|
||||
var _out = doc[item[0]];
|
||||
return (item[1] == 'asc') ? _out : -1*_out;
|
||||
});
|
||||
});
|
||||
@ -129,14 +139,7 @@ this.recline.Model = this.recline.Model || {};
|
||||
//
|
||||
// Connecting to [Webstores](http://github.com/okfn/webstore)
|
||||
//
|
||||
// To use this backend set backendConfig on your Dataset as:
|
||||
//
|
||||
// <pre>
|
||||
// {
|
||||
// 'type': 'webstore',
|
||||
// 'url': url to relevant Webstore table
|
||||
// }
|
||||
// </pre>
|
||||
// To use this backend ensure your Dataset has a webstore_url in its attributes.
|
||||
my.BackendWebstore = Backbone.Model.extend({
|
||||
sync: function(method, model, options) {
|
||||
if (method === "read") {
|
||||
@ -256,10 +259,10 @@ this.recline.Model = this.recline.Model || {};
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
jqxhr.done(function(results) {
|
||||
var _out = _.map(results.data, function(row) {
|
||||
var _out = _.map(results.data, function(doc) {
|
||||
var tmp = {};
|
||||
_.each(results.fields, function(key, idx) {
|
||||
tmp[key] = row[idx];
|
||||
tmp[key] = doc[idx];
|
||||
});
|
||||
return tmp;
|
||||
});
|
||||
|
||||
21
src/model.js
21
src/model.js
@ -11,11 +11,13 @@ this.recline.Model = this.recline.Model || {};
|
||||
// * docCount: total number of documents in this dataset (obtained on a fetch for this Dataset)
|
||||
my.Dataset = Backbone.Model.extend({
|
||||
__type__: 'Dataset',
|
||||
initialize: function(options) {
|
||||
console.log(options);
|
||||
initialize: function(model, backend) {
|
||||
this.backend = backend;
|
||||
if (backend && backend.constructor == String) {
|
||||
this.backend = my.backends[backend];
|
||||
}
|
||||
this.currentDocuments = new my.DocumentList();
|
||||
this.docCount = null;
|
||||
this.backend = null;
|
||||
this.defaultQuery = {
|
||||
size: 100
|
||||
, offset: 0
|
||||
@ -37,15 +39,14 @@ this.recline.Model = this.recline.Model || {};
|
||||
// This also illustrates the limitations of separating the Dataset and the Backend
|
||||
query: function(queryObj) {
|
||||
var self = this;
|
||||
var backend = my.backends[this.backendConfig.type];
|
||||
this.queryState = queryObj || this.defaultQuery;
|
||||
this.queryState = _.extend({size: 100, offset: 0}, this.queryState);
|
||||
var dfd = $.Deferred();
|
||||
backend.query(this, this.queryState).done(function(rows) {
|
||||
this.backend.query(this, this.queryState).done(function(rows) {
|
||||
var docs = _.map(rows, function(row) {
|
||||
var _doc = new my.Document(row);
|
||||
_doc.backendConfig = self.backendConfig;
|
||||
_doc.backend = backend;
|
||||
_doc.backend = self.backend;
|
||||
_doc.dataset = self;
|
||||
return _doc;
|
||||
});
|
||||
self.currentDocuments.reset(docs);
|
||||
@ -76,5 +77,11 @@ this.recline.Model = this.recline.Model || {};
|
||||
__type__: 'DocumentList',
|
||||
model: my.Document
|
||||
});
|
||||
|
||||
// ## Backend registry
|
||||
//
|
||||
// Backends will register themselves by id into this registry
|
||||
my.backends = {};
|
||||
|
||||
}(jQuery, this.recline.Model));
|
||||
|
||||
|
||||
@ -1,41 +1,41 @@
|
||||
(function ($) {
|
||||
module("Dataset");
|
||||
|
||||
test('new Dataset', function () {
|
||||
test('Memory Backend', function () {
|
||||
var datasetId = 'test-dataset';
|
||||
var metadata = {
|
||||
title: 'My Test Dataset'
|
||||
, name: '1-my-test-dataset'
|
||||
, id: datasetId
|
||||
};
|
||||
var indata = {
|
||||
headers: ['x', 'y', 'z']
|
||||
, rows: [
|
||||
var inData = {
|
||||
metadata: {
|
||||
title: 'My Test Dataset'
|
||||
, name: '1-my-test-dataset'
|
||||
, id: datasetId
|
||||
, headers: ['x', 'y', 'z']
|
||||
},
|
||||
documents: [
|
||||
{id: 0, x: 1, y: 2, z: 3}
|
||||
, {id: 1, x: 2, y: 4, z: 6}
|
||||
, {id: 2, x: 3, y: 6, z: 9}
|
||||
, {id: 3, x: 4, y: 8, z: 12}
|
||||
, {id: 4, x: 5, y: 10, z: 15}
|
||||
, {id: 5, x: 6, y: 12, z: 18}
|
||||
]
|
||||
};
|
||||
var dataset = new recline.Model.Dataset(metadata);
|
||||
dataset.backendConfig = {
|
||||
type: 'memory'
|
||||
// deep copy so we do not touch original data ...
|
||||
, data: $.extend(true, {}, indata)
|
||||
]
|
||||
};
|
||||
var backend = new recline.Model.BackendMemory();
|
||||
backend.addDataset(inData);
|
||||
var dataset = new recline.Model.Dataset({id: datasetId}, backend);
|
||||
// ### Start testing
|
||||
expect(10);
|
||||
dataset.fetch().then(function(dataset) {
|
||||
equal(dataset.get('name'), metadata.name);
|
||||
deepEqual(dataset.get('headers'), indata.headers);
|
||||
// convenience for tests
|
||||
var data = backend.datasets[datasetId];
|
||||
dataset.fetch().then(function(datasetAgain) {
|
||||
equal(dataset.get('name'), data.metadata.name);
|
||||
deepEqual(dataset.get('headers'), data.metadata.headers);
|
||||
equal(dataset.docCount, 6);
|
||||
var queryObj = {
|
||||
size: 4
|
||||
, offset: 2
|
||||
};
|
||||
dataset.query(queryObj).then(function(documentList) {
|
||||
deepEqual(indata.rows[2], documentList.models[0].toJSON());
|
||||
deepEqual(data.documents[2], documentList.models[0].toJSON());
|
||||
});
|
||||
var queryObj = {
|
||||
sort: [
|
||||
@ -47,21 +47,21 @@ test('new Dataset', function () {
|
||||
equal(doc0.x, 6);
|
||||
});
|
||||
dataset.query().then(function(docList) {
|
||||
equal(docList.length, Math.min(100, indata.rows.length));
|
||||
equal(docList.length, Math.min(100, data.documents.length));
|
||||
var doc1 = docList.models[0];
|
||||
deepEqual(doc1.toJSON(), indata.rows[0]);
|
||||
deepEqual(doc1.toJSON(), data.documents[0]);
|
||||
|
||||
// Test UPDATE
|
||||
var newVal = 10;
|
||||
doc1.set({x: newVal});
|
||||
doc1.save().then(function() {
|
||||
equal(dataset.backendConfig.data.rows[0].x, newVal);
|
||||
equal(data.documents[0].x, newVal);
|
||||
})
|
||||
|
||||
// Test Delete
|
||||
doc1.destroy().then(function() {
|
||||
equal(dataset.backendConfig.data.rows.length, 5);
|
||||
equal(dataset.backendConfig.data.rows[0].x, indata.rows[1].x);
|
||||
equal(data.documents.length, 5);
|
||||
equal(data.documents[0].x, inData.documents[1].x);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user