[backend.memory][s]: rename Store.data attribute to records to be more consistent (and more meaningful).

This commit is contained in:
Rufus Pollock 2013-01-03 21:50:47 +00:00
parent 4eb68fbf0f
commit e8ad368347
3 changed files with 33 additions and 30 deletions

View File

@ -56,6 +56,7 @@ Possible breaking changes
* Added marker clustering in map view to handle a large number of markers
* Dataset.restore method removed (not used internally except from Multiview.restore)
* Views no longer call render in initialize but must be called client code
* Backend.Memory.Store attribute for holding 'records' renamed to `records` from `data`
### v0.5 - July 5th 2012 (first public release)

View File

@ -11,37 +11,39 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {};
// functionality like querying, faceting, updating (by ID) and deleting (by
// ID).
//
// @param data list of hashes for each record/row in the data ({key:
// @param records list of hashes for each record/row in the data ({key:
// value, key: value})
// @param fields (optional) list of field hashes (each hash defining a field
// as per recline.Model.Field). If fields not specified they will be taken
// from the data.
my.Store = function(data, fields) {
my.Store = function(records, fields) {
var self = this;
this.data = data;
this.records = records;
// backwards compatability (in v0.5 records was named data)
this.data = this.records;
if (fields) {
this.fields = fields;
} else {
if (data) {
this.fields = _.map(data[0], function(value, key) {
if (records) {
this.fields = _.map(records[0], function(value, key) {
return {id: key, type: 'string'};
});
}
}
this.update = function(doc) {
_.each(self.data, function(internalDoc, idx) {
_.each(self.records, function(internalDoc, idx) {
if(doc.id === internalDoc.id) {
self.data[idx] = doc;
self.records[idx] = doc;
}
});
};
this.remove = function(doc) {
var newdocs = _.reject(self.data, function(internalDoc) {
var newdocs = _.reject(self.records, function(internalDoc) {
return (doc.id === internalDoc.id);
});
this.data = newdocs;
this.records = newdocs;
};
this.save = function(changes, dataset) {
@ -60,9 +62,9 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {};
this.query = function(queryObj) {
var dfd = $.Deferred();
var numRows = queryObj.size || this.data.length;
var numRows = queryObj.size || this.records.length;
var start = queryObj.from || 0;
var results = this.data;
var results = this.records;
results = this._applyFilters(results, queryObj);
results = this._applyFreeTextQuery(results, queryObj);
@ -229,9 +231,9 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {};
this.transform = function(editFunc) {
var dfd = $.Deferred();
// TODO: should we clone before mapping? Do not see the point atm.
self.data = _.map(self.data, editFunc);
self.records = _.map(self.records, editFunc);
// now deal with deletes (i.e. nulls)
self.data = _.filter(self.data, function(record) {
self.records = _.filter(self.records, function(record) {
return record != null;
});
dfd.resolve();

View File

@ -22,16 +22,16 @@ var memoryFields = [
];
var _wrapData = function() {
var dataCopy = $.extend(true, [], memoryData);
var recordsCopy = $.extend(true, [], memoryData);
// return new recline.Backend.Memory.Store(dataCopy, fields);
return new recline.Backend.Memory.Store(dataCopy, memoryFields);
return new recline.Backend.Memory.Store(recordsCopy, memoryFields);
}
test('basics', function () {
var data = _wrapData();
equal(data.fields.length, 7);
deepEqual(['id', 'date', 'x', 'y', 'z', 'country', 'label'], _.pluck(data.fields, 'id'));
equal(memoryData.length, data.data.length);
equal(memoryData.length, data.records.length);
});
test('query', function () {
@ -136,7 +136,7 @@ test('filters with nulls', function () {
equal(out.total, 4);
});
data.data[5].country = '';
data.records[5].country = '';
query = new recline.Model.Query();
query.addFilter({type: 'range', field: 'country', start: '', stop: 'Z'});
data.query(query.toJSON()).then(function(out) {
@ -154,7 +154,7 @@ test('facet', function () {
var data = _wrapData();
var query = new recline.Model.Query();
query.addFacet('country');
var out = data.computeFacets(data.data, query.toJSON());
var out = data.computeFacets(data.records, query.toJSON());
var exp = [
{
term: 'UK',
@ -179,12 +179,12 @@ test('update and delete', function () {
doc1 = $.extend(true, {}, memoryData[0]);
doc1.x = newVal;
data.update(doc1);
equal(data.data[0].x, newVal);
equal(data.records[0].x, newVal);
// Test Delete
data.remove(doc1);
equal(data.data.length, 5);
equal(data.data[0].x, memoryData[1].x);
equal(data.records.length, 5);
equal(data.records[0].x, memoryData[1].x);
});
test('transform', function () {
@ -198,8 +198,8 @@ test('transform', function () {
d.a = d.a * 10;
return d;
})
equal(store.data[0].a, 10);
equal(store.data[1].a, 20);
equal(store.records[0].a, 10);
equal(store.records[1].a, 20);
});
test('transform deletes', function () {
@ -209,7 +209,7 @@ test('transform deletes', function () {
if (d.a == '1') return null;
else return d;
})
equal(store.data.length, 2);
equal(store.records.length, 2);
});
})(this.jQuery);
@ -281,7 +281,7 @@ test('basics', function () {
test('query', function () {
var dataset = makeBackendDataset();
// convenience for tests - get the data that should get changed
var data = dataset._store.data;
var data = dataset._store.records;
var dataset = makeBackendDataset();
var queryObj = {
size: 4
@ -295,7 +295,7 @@ test('query', function () {
test('query sort', function () {
var dataset = makeBackendDataset();
// convenience for tests - get the data that should get changed
var data = dataset._store.data;
var data = dataset._store.records;
var queryObj = {
sort: [
{field: 'y', order: 'desc'}
@ -372,9 +372,9 @@ test('update and delete', function () {
// convenience for tests - get the data that should get changed
var data = dataset._store;
dataset.query().then(function(docList) {
equal(docList.length, Math.min(100, data.data.length));
equal(docList.length, Math.min(100, data.records.length));
var doc1 = docList.models[0];
deepEqual(doc1.toJSON(), data.data[0]);
deepEqual(doc1.toJSON(), data.records[0]);
// Test UPDATE
var newVal = 10;
@ -386,8 +386,8 @@ test('update and delete', function () {
deepEqual(dataset._changes.deletes[0], doc1.toJSON());
dataset.save().then(function() {
equal(data.data.length, 5);
equal(data.data[0].x, memoryData.records[1].x);
equal(data.records.length, 5);
equal(data.records[0].x, memoryData.records[1].x);
});
});
});