[be/dataproxy][m]: switch dataproxy to use Memory data store (fixes #164) and switch to new backend / model setup (#162).
This commit is contained in:
parent
7141b7aafd
commit
1ed3b9f423
@ -3,95 +3,78 @@ this.recline.Backend = this.recline.Backend || {};
|
||||
this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {};
|
||||
|
||||
(function($, my) {
|
||||
// ## DataProxy Backend
|
||||
//
|
||||
// For connecting to [DataProxy-s](http://github.com/okfn/dataproxy).
|
||||
//
|
||||
// When initializing the DataProxy backend you can set the following
|
||||
// attributes in the options object:
|
||||
//
|
||||
// * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
|
||||
//
|
||||
// Datasets using using this backend should set the following attributes:
|
||||
//
|
||||
// * url: (required) url-of-data-to-proxy
|
||||
// * format: (optional) csv | xls (defaults to csv if not specified)
|
||||
//
|
||||
// Note that this is a **read-only** backend.
|
||||
my.Backbone = function(options) {
|
||||
var self = this;
|
||||
this.__type__ = 'dataproxy';
|
||||
this.readonly = true;
|
||||
my.__type__ = 'dataproxy';
|
||||
// URL for the dataproxy
|
||||
my.dataproxy_url = 'http://jsonpdataproxy.appspot.com';
|
||||
|
||||
this.dataproxy_url = options && options.dataproxy_url ? options.dataproxy_url : 'http://jsonpdataproxy.appspot.com';
|
||||
|
||||
this.sync = function(method, model, options) {
|
||||
if (method === "read") {
|
||||
if (model.__type__ == 'Dataset') {
|
||||
// Do nothing as we will get fields in query step (and no metadata to
|
||||
// retrieve)
|
||||
var dfd = $.Deferred();
|
||||
dfd.resolve(model);
|
||||
return dfd.promise();
|
||||
}
|
||||
} else {
|
||||
alert('This backend only supports read operations');
|
||||
// ## load
|
||||
//
|
||||
// Load data from a URL via the [DataProxy](http://github.com/okfn/dataproxy).
|
||||
my.fetch = function(dataset) {
|
||||
var data = {
|
||||
url: dataset.get('url'),
|
||||
'max-results': dataset.get('size') || dataset.get('rows') || 1000,
|
||||
type: dataset.get('format') || ''
|
||||
};
|
||||
var jqxhr = $.ajax({
|
||||
url: my.dataproxy_url,
|
||||
data: data,
|
||||
dataType: 'jsonp'
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
_wrapInTimeout(jqxhr).done(function(results) {
|
||||
if (results.error) {
|
||||
dfd.reject(results.error);
|
||||
}
|
||||
};
|
||||
|
||||
this.query = function(dataset, queryObj) {
|
||||
var self = this;
|
||||
var data = {
|
||||
url: dataset.get('url'),
|
||||
'max-results': queryObj.size,
|
||||
type: dataset.get('format')
|
||||
};
|
||||
var jqxhr = $.ajax({
|
||||
url: this.dataproxy_url,
|
||||
data: data,
|
||||
dataType: 'jsonp'
|
||||
});
|
||||
var dfd = $.Deferred();
|
||||
_wrapInTimeout(jqxhr).done(function(results) {
|
||||
if (results.error) {
|
||||
dfd.reject(results.error);
|
||||
// Rename duplicate fieldIds as each field name needs to be
|
||||
// unique.
|
||||
var seen = {};
|
||||
var fields = _.map(results.fields, function(field, index) {
|
||||
var fieldId = field;
|
||||
while (fieldId in seen) {
|
||||
seen[field] += 1;
|
||||
fieldId = field + seen[field];
|
||||
}
|
||||
|
||||
// Rename duplicate fieldIds as each field name needs to be
|
||||
// unique.
|
||||
var seen = {};
|
||||
_.map(results.fields, function(fieldId, index) {
|
||||
if (fieldId in seen) {
|
||||
seen[fieldId] += 1;
|
||||
results.fields[index] = fieldId + "("+seen[fieldId]+")";
|
||||
} else {
|
||||
seen[fieldId] = 1;
|
||||
}
|
||||
});
|
||||
|
||||
dataset.fields.reset(_.map(results.fields, function(fieldId) {
|
||||
return {id: fieldId};
|
||||
})
|
||||
);
|
||||
var _out = _.map(results.data, function(doc) {
|
||||
var tmp = {};
|
||||
_.each(results.fields, function(key, idx) {
|
||||
tmp[key] = doc[idx];
|
||||
});
|
||||
return tmp;
|
||||
});
|
||||
dfd.resolve({
|
||||
total: null,
|
||||
hits: _.map(_out, function(row) {
|
||||
return { _source: row };
|
||||
})
|
||||
});
|
||||
})
|
||||
.fail(function(arguments) {
|
||||
dfd.reject(arguments);
|
||||
if (!(field in seen)) {
|
||||
seen[field] = 0;
|
||||
}
|
||||
return { id: fieldId, label: field }
|
||||
});
|
||||
return dfd.promise();
|
||||
|
||||
// data is provided as arrays so need to zip together with fields
|
||||
var records = _.map(results.data, function(doc) {
|
||||
var tmp = {};
|
||||
_.each(results.fields, function(key, idx) {
|
||||
tmp[key] = doc[idx];
|
||||
});
|
||||
return tmp;
|
||||
});
|
||||
var store = new recline.Backend.Memory.Store(records, fields);
|
||||
dataset._dataCache = store;
|
||||
dataset.fields.reset(fields);
|
||||
dataset.query();
|
||||
dfd.resolve(dataset);
|
||||
})
|
||||
.fail(function(arguments) {
|
||||
dfd.reject(arguments);
|
||||
});
|
||||
return dfd.promise();
|
||||
};
|
||||
|
||||
my.query = function(dataset, queryObj) {
|
||||
var dfd = $.Deferred();
|
||||
var results = dataset._dataCache.query(queryObj);
|
||||
var hits = _.map(results.records, function(row) {
|
||||
return { _source: row };
|
||||
});
|
||||
var out = {
|
||||
total: results.total,
|
||||
hits: hits,
|
||||
facets: results.facets
|
||||
};
|
||||
dfd.resolve(out);
|
||||
return dfd.promise();
|
||||
};
|
||||
|
||||
// ## _wrapInTimeout
|
||||
|
||||
@ -179,7 +179,7 @@ my.Dataset = Backbone.Model.extend({
|
||||
current = current[parts[ii]];
|
||||
}
|
||||
if (current) {
|
||||
return new current();
|
||||
return current;
|
||||
}
|
||||
|
||||
// alternatively we just had a simple string
|
||||
@ -187,7 +187,7 @@ my.Dataset = Backbone.Model.extend({
|
||||
if (recline && recline.Backend) {
|
||||
_.each(_.keys(recline.Backend), function(name) {
|
||||
if (name.toLowerCase() === backendString.toLowerCase()) {
|
||||
backend = new recline.Backend[name].Backbone();
|
||||
backend = recline.Backend[name];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -67,14 +67,13 @@ var dataProxyData = {
|
||||
test('DataProxy Backend', function() {
|
||||
// needed only if not stubbing
|
||||
// stop();
|
||||
var backend = new recline.Backend.DataProxy.Backbone();
|
||||
ok(backend.readonly);
|
||||
var backend = recline.Backend.DataProxy;
|
||||
equal(backend.__type__, 'dataproxy');
|
||||
|
||||
var dataset = new recline.Model.Dataset({
|
||||
url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv'
|
||||
},
|
||||
backend
|
||||
'dataproxy'
|
||||
);
|
||||
|
||||
var stub = sinon.stub($, 'ajax', function(options) {
|
||||
@ -92,15 +91,18 @@ test('DataProxy Backend', function() {
|
||||
}
|
||||
});
|
||||
|
||||
dataset.fetch().done(function(dataset) {
|
||||
dataset.query().done(function(docList) {
|
||||
deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id'));
|
||||
equal(null, dataset.docCount)
|
||||
equal(10, docList.length)
|
||||
equal("1950-01", docList.models[0].get('date'));
|
||||
// needed only if not stubbing
|
||||
start();
|
||||
});
|
||||
expect(6);
|
||||
dataset.fetch().then(function() {
|
||||
deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id'));
|
||||
equal(10, dataset.docCount)
|
||||
equal(dataset.currentRecords.models[0].get('date'), "1950-01");
|
||||
// needed only if not stubbing
|
||||
// start();
|
||||
});
|
||||
|
||||
dataset.query({q: '1950-01'}).then(function() {
|
||||
equal(dataset.docCount, 1);
|
||||
equal(dataset.currentRecords.models[0].get('price'), '34.73');
|
||||
});
|
||||
$.ajax.restore();
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user