From 7141b7aafd72496889fc2de8737ce2059c1ab101 Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 13:28:32 +0100 Subject: [PATCH 1/8] [#162,model,be/memory][m]: refactor started with the new API on Dataset, Record and Memory backend. * Tests are passing except for a dataproxy one. --- src/backend/memory.js | 97 +++++++++++++++++-------------------- src/model.js | 39 ++++++++++++++- test/backend/memory.test.js | 37 ++++++++------ 3 files changed, 104 insertions(+), 69 deletions(-) diff --git a/src/backend/memory.js b/src/backend/memory.js index fea90eb9..308557d1 100644 --- a/src/backend/memory.js +++ b/src/backend/memory.js @@ -3,6 +3,8 @@ this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.Memory = this.recline.Backend.Memory || {}; (function($, my) { + my.__type__ = 'memory'; + // ## createDataset // // Convenience function to create a simple 'in-memory' dataset in one step. @@ -15,15 +17,52 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; // @param metadata: (optional) dataset metadata - see recline.Model.Dataset. // If not defined (or id not provided) id will be autogenerated. my.createDataset = function(data, fields, metadata) { - var wrapper = new my.Store(data, fields); - var backend = new my.Backbone(); - var dataset = new recline.Model.Dataset(metadata, backend); - dataset._dataCache = wrapper; + var dataset = new recline.Model.Dataset( + _.extend({}, metadata, {records: data, fields: fields}) + ); dataset.fetch(); - dataset.query(); return dataset; }; + my.fetch = function(dataset) { + var dfd = $.Deferred(); + var store = new my.Store(dataset.get('records'), dataset.get('fields')); + dataset._dataCache = store; + dataset.fields.reset(store.fields); + dataset.query(); + dfd.resolve(dataset); + return dfd.promise(); + }; + + my.save = function(dataset, changes) { + var dfd = $.Deferred(); + // TODO + // _.each(changes.creates) { ... } + _.each(changes.updates, function(record) { + dataset._dataCache.update(record); + }); + _.each(changes.deletes, function(record) { + dataset._dataCache.delete(record); + }); + dfd.resolve(dataset); + return dfd.promise(); + }, + + my.query = function(dataset, queryObj) { + var dfd = $.Deferred(); + var results = dataset._dataCache.query(queryObj); + var hits = _.map(results.records, function(row) { + return { _source: row }; + }); + var out = { + total: results.total, + hits: hits, + facets: results.facets + }; + dfd.resolve(out); + return dfd.promise(); + }; + // ## Data Wrapper // // Turn a simple array of JS objects into a mini data-store with @@ -166,53 +205,5 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; return facetResults; }; }; - - - // ## Backbone - // - // Backbone connector for memory store attached to a Dataset object - my.Backbone = function() { - this.__type__ = 'memory'; - this.sync = function(method, model, options) { - var self = this; - var dfd = $.Deferred(); - if (method === "read") { - if (model.__type__ == 'Dataset') { - model.fields.reset(model._dataCache.fields); - dfd.resolve(model); - } - return dfd.promise(); - } else if (method === 'update') { - if (model.__type__ == 'Record') { - model.dataset._dataCache.update(model.toJSON()); - dfd.resolve(model); - } - return dfd.promise(); - } else if (method === 'delete') { - if (model.__type__ == 'Record') { - model.dataset._dataCache.delete(model.toJSON()); - dfd.resolve(model); - } - return dfd.promise(); - } else { - alert('Not supported: sync on Memory backend with method ' + method + ' and model ' + model); - } - }; - - this.query = function(model, queryObj) { - var dfd = $.Deferred(); - var results = model._dataCache.query(queryObj); - var hits = _.map(results.records, function(row) { - return { _source: row }; - }); - var out = { - total: results.total, - hits: hits, - facets: results.facets - }; - dfd.resolve(out); - return dfd.promise(); - }; - }; }(jQuery, this.recline.Backend.Memory)); diff --git a/src/model.js b/src/model.js index ea7a9ff1..75f1ee09 100644 --- a/src/model.js +++ b/src/model.js @@ -44,11 +44,22 @@ my.Dataset = Backbone.Model.extend({ initialize: function(model, backend) { _.bindAll(this, 'query'); this.backend = backend; + if (typeof backend === 'undefined') { + // guess backend ... + if (this.get('records')) { + this.backend = recline.Backend.Memory; + } + } if (typeof(backend) === 'string') { this.backend = this._backendFromString(backend); } this.fields = new my.FieldList(); this.currentRecords = new my.RecordList(); + this._changes = { + deletes: [], + updates: [], + creates: [] + }; this.facets = new my.FacetList(); this.docCount = null; this.queryState = new my.Query(); @@ -56,6 +67,17 @@ my.Dataset = Backbone.Model.extend({ this.queryState.bind('facet:add', this.query); }, + // ### fetch + // + // Retrieve dataset and (some) records from the backend. + fetch: function() { + return this.backend.fetch(this); + }, + + save: function() { + return this.backend.save(this, this._changes); + }, + // ### query // // AJAX method with promise API to get records from the backend. @@ -76,6 +98,12 @@ my.Dataset = Backbone.Model.extend({ var _doc = new my.Record(hit._source); _doc.backend = self.backend; _doc.dataset = self; + _doc.bind('change', function(doc) { + self._changes.updates.push(doc.toJSON()); + }); + _doc.bind('destroy', function(doc) { + self._changes.deletes.push(doc.toJSON()); + }); return _doc; }); self.currentRecords.reset(docs); @@ -96,6 +124,7 @@ my.Dataset = Backbone.Model.extend({ return dfd.promise(); }, + _prepareQuery: function(newQueryObj) { if (newQueryObj) { this.queryState.set(newQueryObj); @@ -242,7 +271,15 @@ my.Record = Backbone.Model.extend({ } } return html; - } + }, + + // Override Backbone save, fetch and destroy so they do nothing + // Instead, Dataset object that created this Record should take care of + // handling these changes (discovery will occur via event notifications) + // WARNING: these will not persist *unless* you call save on Dataset + fetch: function() {}, + save: function() {}, + destroy: function() { this.trigger('destroy', this); } }); // ## A Backbone collection of Records diff --git a/test/backend/memory.test.js b/test/backend/memory.test.js index 535aeb61..ba14b0e5 100644 --- a/test/backend/memory.test.js +++ b/test/backend/memory.test.js @@ -125,7 +125,7 @@ test('update and delete', function () { (function ($) { -module("Backend Memory - Backbone"); +module("Backend Memory - Model Integration"); var memoryData = { metadata: { @@ -145,18 +145,24 @@ var memoryData = { }; function makeBackendDataset() { - var dataset = new recline.Backend.Memory.createDataset(memoryData.records, null, memoryData.metadata); + var dataset = new recline.Model.Dataset({ + id: 'test-dataset', + title: 'My Test Dataset', + name: '1-my-test-dataset', + fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}, {id: 'country'}, {id: 'label'}], + records: [ + {id: 0, x: 1, y: 2, z: 3, country: 'DE', label: 'first'} + , {id: 1, x: 2, y: 4, z: 6, country: 'UK', label: 'second'} + , {id: 2, x: 3, y: 6, z: 9, country: 'US', label: 'third'} + , {id: 3, x: 4, y: 8, z: 12, country: 'UK', label: 'fourth'} + , {id: 4, x: 5, y: 10, z: 15, country: 'UK', label: 'fifth'} + , {id: 5, x: 6, y: 12, z: 18, country: 'DE', label: 'sixth'} + ] + }); + dataset.fetch(); return dataset; } -test('createDataset', function () { - var dataset = recline.Backend.Memory.createDataset(memoryData.records); - equal(dataset.fields.length, 6); - deepEqual(['id', 'x', 'y', 'z', 'country', 'label'], dataset.fields.pluck('id')); - dataset.query(); - equal(memoryData.records.length, dataset.currentRecords.length); -}); - test('basics', function () { var dataset = makeBackendDataset(); expect(3); @@ -256,12 +262,13 @@ test('update and delete', function () { // Test UPDATE var newVal = 10; doc1.set({x: newVal}); - doc1.save().then(function() { - equal(data.data[0].x, newVal); - }) + doc1.save(); + equal(dataset._changes.updates[0].x, newVal); - // Test Delete - doc1.destroy().then(function() { + doc1.destroy(); + deepEqual(dataset._changes.deletes[0], doc1.toJSON()); + + dataset.save().then(function() { equal(data.data.length, 5); equal(data.data[0].x, memoryData.records[1].x); }); From 1ed3b9f423a43eac9983725574e4f852ddbfcd9f Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 18:00:30 +0100 Subject: [PATCH 2/8] [be/dataproxy][m]: switch dataproxy to use Memory data store (fixes #164) and switch to new backend / model setup (#162). --- src/backend/dataproxy.js | 149 +++++++++++++++++---------------------- src/model.js | 4 +- test/backend.test.js | 26 +++---- 3 files changed, 82 insertions(+), 97 deletions(-) diff --git a/src/backend/dataproxy.js b/src/backend/dataproxy.js index eb305189..df2ed03b 100644 --- a/src/backend/dataproxy.js +++ b/src/backend/dataproxy.js @@ -3,95 +3,78 @@ this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; (function($, my) { - // ## DataProxy Backend - // - // For connecting to [DataProxy-s](http://github.com/okfn/dataproxy). - // - // When initializing the DataProxy backend you can set the following - // attributes in the options object: - // - // * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com - // - // Datasets using using this backend should set the following attributes: - // - // * url: (required) url-of-data-to-proxy - // * format: (optional) csv | xls (defaults to csv if not specified) - // - // Note that this is a **read-only** backend. - my.Backbone = function(options) { - var self = this; - this.__type__ = 'dataproxy'; - this.readonly = true; + my.__type__ = 'dataproxy'; + // URL for the dataproxy + my.dataproxy_url = 'http://jsonpdataproxy.appspot.com'; - this.dataproxy_url = options && options.dataproxy_url ? options.dataproxy_url : 'http://jsonpdataproxy.appspot.com'; - - this.sync = function(method, model, options) { - if (method === "read") { - if (model.__type__ == 'Dataset') { - // Do nothing as we will get fields in query step (and no metadata to - // retrieve) - var dfd = $.Deferred(); - dfd.resolve(model); - return dfd.promise(); - } - } else { - alert('This backend only supports read operations'); + // ## load + // + // Load data from a URL via the [DataProxy](http://github.com/okfn/dataproxy). + my.fetch = function(dataset) { + var data = { + url: dataset.get('url'), + 'max-results': dataset.get('size') || dataset.get('rows') || 1000, + type: dataset.get('format') || '' + }; + var jqxhr = $.ajax({ + url: my.dataproxy_url, + data: data, + dataType: 'jsonp' + }); + var dfd = $.Deferred(); + _wrapInTimeout(jqxhr).done(function(results) { + if (results.error) { + dfd.reject(results.error); } - }; - this.query = function(dataset, queryObj) { - var self = this; - var data = { - url: dataset.get('url'), - 'max-results': queryObj.size, - type: dataset.get('format') - }; - var jqxhr = $.ajax({ - url: this.dataproxy_url, - data: data, - dataType: 'jsonp' - }); - var dfd = $.Deferred(); - _wrapInTimeout(jqxhr).done(function(results) { - if (results.error) { - dfd.reject(results.error); + // Rename duplicate fieldIds as each field name needs to be + // unique. + var seen = {}; + var fields = _.map(results.fields, function(field, index) { + var fieldId = field; + while (fieldId in seen) { + seen[field] += 1; + fieldId = field + seen[field]; } - - // Rename duplicate fieldIds as each field name needs to be - // unique. - var seen = {}; - _.map(results.fields, function(fieldId, index) { - if (fieldId in seen) { - seen[fieldId] += 1; - results.fields[index] = fieldId + "("+seen[fieldId]+")"; - } else { - seen[fieldId] = 1; - } - }); - - dataset.fields.reset(_.map(results.fields, function(fieldId) { - return {id: fieldId}; - }) - ); - var _out = _.map(results.data, function(doc) { - var tmp = {}; - _.each(results.fields, function(key, idx) { - tmp[key] = doc[idx]; - }); - return tmp; - }); - dfd.resolve({ - total: null, - hits: _.map(_out, function(row) { - return { _source: row }; - }) - }); - }) - .fail(function(arguments) { - dfd.reject(arguments); + if (!(field in seen)) { + seen[field] = 0; + } + return { id: fieldId, label: field } }); - return dfd.promise(); + + // data is provided as arrays so need to zip together with fields + var records = _.map(results.data, function(doc) { + var tmp = {}; + _.each(results.fields, function(key, idx) { + tmp[key] = doc[idx]; + }); + return tmp; + }); + var store = new recline.Backend.Memory.Store(records, fields); + dataset._dataCache = store; + dataset.fields.reset(fields); + dataset.query(); + dfd.resolve(dataset); + }) + .fail(function(arguments) { + dfd.reject(arguments); + }); + return dfd.promise(); + }; + + my.query = function(dataset, queryObj) { + var dfd = $.Deferred(); + var results = dataset._dataCache.query(queryObj); + var hits = _.map(results.records, function(row) { + return { _source: row }; + }); + var out = { + total: results.total, + hits: hits, + facets: results.facets }; + dfd.resolve(out); + return dfd.promise(); }; // ## _wrapInTimeout diff --git a/src/model.js b/src/model.js index 75f1ee09..982111f9 100644 --- a/src/model.js +++ b/src/model.js @@ -179,7 +179,7 @@ my.Dataset = Backbone.Model.extend({ current = current[parts[ii]]; } if (current) { - return new current(); + return current; } // alternatively we just had a simple string @@ -187,7 +187,7 @@ my.Dataset = Backbone.Model.extend({ if (recline && recline.Backend) { _.each(_.keys(recline.Backend), function(name) { if (name.toLowerCase() === backendString.toLowerCase()) { - backend = new recline.Backend[name].Backbone(); + backend = recline.Backend[name]; } }); } diff --git a/test/backend.test.js b/test/backend.test.js index 332ab6b9..7d384d98 100644 --- a/test/backend.test.js +++ b/test/backend.test.js @@ -67,14 +67,13 @@ var dataProxyData = { test('DataProxy Backend', function() { // needed only if not stubbing // stop(); - var backend = new recline.Backend.DataProxy.Backbone(); - ok(backend.readonly); + var backend = recline.Backend.DataProxy; equal(backend.__type__, 'dataproxy'); var dataset = new recline.Model.Dataset({ url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv' }, - backend + 'dataproxy' ); var stub = sinon.stub($, 'ajax', function(options) { @@ -92,15 +91,18 @@ test('DataProxy Backend', function() { } }); - dataset.fetch().done(function(dataset) { - dataset.query().done(function(docList) { - deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id')); - equal(null, dataset.docCount) - equal(10, docList.length) - equal("1950-01", docList.models[0].get('date')); - // needed only if not stubbing - start(); - }); + expect(6); + dataset.fetch().then(function() { + deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id')); + equal(10, dataset.docCount) + equal(dataset.currentRecords.models[0].get('date'), "1950-01"); + // needed only if not stubbing + // start(); + }); + + dataset.query({q: '1950-01'}).then(function() { + equal(dataset.docCount, 1); + equal(dataset.currentRecords.models[0].get('price'), '34.73'); }); $.ajax.restore(); }); From bda4797ed8332d1fb8e4fe0d703ea5e5790b40e4 Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 18:07:00 +0100 Subject: [PATCH 3/8] [test/refactor][xs]: dataproxy and gdocs tests now in own files. --- test/backend/dataproxy.test.js | 110 ++++++++++++++++++ .../gdocs.test.js} | 110 +----------------- test/index.html | 3 +- 3 files changed, 114 insertions(+), 109 deletions(-) create mode 100644 test/backend/dataproxy.test.js rename test/{backend.test.js => backend/gdocs.test.js} (76%) diff --git a/test/backend/dataproxy.test.js b/test/backend/dataproxy.test.js new file mode 100644 index 00000000..660b867b --- /dev/null +++ b/test/backend/dataproxy.test.js @@ -0,0 +1,110 @@ +(function ($) { +module("Backend DataProxy"); + +var dataProxyData = { + "data": [ + [ + "1", + "1950-01", + "34.73" + ], + [ + "2", + "1950-02", + "34.73" + ], + [ + "3", + "1950-03", + "34.73" + ], + [ + "4", + "1950-04", + "34.73" + ], + [ + "5", + "1950-05", + "34.73" + ], + [ + "6", + "1950-06", + "34.73" + ], + [ + "7", + "1950-07", + "34.73" + ], + [ + "8", + "1950-08", + "34.73" + ], + [ + "9", + "1950-09", + "34.73" + ], + [ + "10", + "1950-10", + "34.73" + ] + ], + "fields": [ + "__id__", + "date", + "price" + ], + "length": null, + "max_results": 10, + "url": "http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv" +}; + +test('DataProxy Backend', function() { + // needed only if not stubbing + // stop(); + var backend = recline.Backend.DataProxy; + equal(backend.__type__, 'dataproxy'); + + var dataset = new recline.Model.Dataset({ + url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv' + }, + 'dataproxy' + ); + + var stub = sinon.stub($, 'ajax', function(options) { + var partialUrl = 'jsonpdataproxy.appspot.com'; + if (options.url.indexOf(partialUrl) != -1) { + return { + done: function(callback) { + callback(dataProxyData); + return this; + }, + fail: function() { + return this; + } + } + } + }); + + expect(6); + dataset.fetch().then(function() { + deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id')); + equal(10, dataset.docCount) + equal(dataset.currentRecords.models[0].get('date'), "1950-01"); + // needed only if not stubbing + // start(); + }); + + dataset.query({q: '1950-01'}).then(function() { + equal(dataset.docCount, 1); + equal(dataset.currentRecords.models[0].get('price'), '34.73'); + }); + $.ajax.restore(); +}); + +})(this.jQuery); diff --git a/test/backend.test.js b/test/backend/gdocs.test.js similarity index 76% rename from test/backend.test.js rename to test/backend/gdocs.test.js index 7d384d98..6d50bbd5 100644 --- a/test/backend.test.js +++ b/test/backend/gdocs.test.js @@ -1,112 +1,5 @@ (function ($) { -module("Backend"); - -var dataProxyData = { - "data": [ - [ - "1", - "1950-01", - "34.73" - ], - [ - "2", - "1950-02", - "34.73" - ], - [ - "3", - "1950-03", - "34.73" - ], - [ - "4", - "1950-04", - "34.73" - ], - [ - "5", - "1950-05", - "34.73" - ], - [ - "6", - "1950-06", - "34.73" - ], - [ - "7", - "1950-07", - "34.73" - ], - [ - "8", - "1950-08", - "34.73" - ], - [ - "9", - "1950-09", - "34.73" - ], - [ - "10", - "1950-10", - "34.73" - ] - ], - "fields": [ - "__id__", - "date", - "price" - ], - "length": null, - "max_results": 10, - "url": "http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv" -}; - -test('DataProxy Backend', function() { - // needed only if not stubbing - // stop(); - var backend = recline.Backend.DataProxy; - equal(backend.__type__, 'dataproxy'); - - var dataset = new recline.Model.Dataset({ - url: 'http://webstore.thedatahub.org/rufuspollock/gold_prices/data.csv' - }, - 'dataproxy' - ); - - var stub = sinon.stub($, 'ajax', function(options) { - var partialUrl = 'jsonpdataproxy.appspot.com'; - if (options.url.indexOf(partialUrl) != -1) { - return { - done: function(callback) { - callback(dataProxyData); - return this; - }, - fail: function() { - return this; - } - } - } - }); - - expect(6); - dataset.fetch().then(function() { - deepEqual(['__id__', 'date', 'price'], _.pluck(dataset.fields.toJSON(), 'id')); - equal(10, dataset.docCount) - equal(dataset.currentRecords.models[0].get('date'), "1950-01"); - // needed only if not stubbing - // start(); - }); - - dataset.query({q: '1950-01'}).then(function() { - equal(dataset.docCount, 1); - equal(dataset.currentRecords.models[0].get('price'), '34.73'); - }); - $.ajax.restore(); -}); - +module("Backend GDocs"); var sample_gdocs_spreadsheet_data = { "feed": { @@ -306,3 +199,4 @@ test("GDocs Backend.getUrl", function() { }); })(this.jQuery); + diff --git a/test/index.html b/test/index.html index 2e3aa91c..db350c70 100644 --- a/test/index.html +++ b/test/index.html @@ -37,8 +37,9 @@ - + + From 6e5c15a8165c09ee3f9bfb6e7c572e800e25d1b1 Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 20:23:24 +0100 Subject: [PATCH 4/8] [#162,backend,model][l]: major commit addressing several parts of the backend / model refactor in #162. * Now have Dataset setup and manage "memory store" * New fetch API as per issue #162 spec * dataproxy utilizes useMemoryStore attribute and just implements fetch * Switch gdocs to use Memory.Store properly via new useMemoryStore + fetch methodology * Memory backend: query function now follows promise API, remove fetch,upsert,delete and add save function to Store object * Also refactor to remove _source in QueryResult "hits" attribute on all backends but ElasticSearch - cf #159 (note this means ES currently broken) --- src/backend/dataproxy.js | 25 ++------ src/backend/gdocs.js | 98 ++++++++------------------------ src/backend/memory.js | 65 +++++++-------------- src/model.js | 110 +++++++++++++++++++++++------------- test/backend/gdocs.test.js | 6 +- test/backend/memory.test.js | 55 ++++++++++-------- test/base.js | 2 +- test/model.test.js | 9 --- 8 files changed, 153 insertions(+), 217 deletions(-) diff --git a/src/backend/dataproxy.js b/src/backend/dataproxy.js index df2ed03b..c16f2cf4 100644 --- a/src/backend/dataproxy.js +++ b/src/backend/dataproxy.js @@ -50,11 +50,11 @@ this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; }); return tmp; }); - var store = new recline.Backend.Memory.Store(records, fields); - dataset._dataCache = store; - dataset.fields.reset(fields); - dataset.query(); - dfd.resolve(dataset); + dfd.resolve({ + records: records, + fields: fields, + useMemoryStore: true + }); }) .fail(function(arguments) { dfd.reject(arguments); @@ -62,21 +62,6 @@ this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; return dfd.promise(); }; - my.query = function(dataset, queryObj) { - var dfd = $.Deferred(); - var results = dataset._dataCache.query(queryObj); - var hits = _.map(results.records, function(row) { - return { _source: row }; - }); - var out = { - total: results.total, - hits: hits, - facets: results.facets - }; - dfd.resolve(out); - return dfd.promise(); - }; - // ## _wrapInTimeout // // Convenience method providing a crude way to catch backend errors on JSONP calls. diff --git a/src/backend/gdocs.js b/src/backend/gdocs.js index c9449916..bd855cd4 100644 --- a/src/backend/gdocs.js +++ b/src/backend/gdocs.js @@ -3,14 +3,13 @@ this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; (function($, my) { + my.__type__ = 'gdocs'; // ## Google spreadsheet backend // - // Connect to Google Docs spreadsheet. - // - // Dataset must have a url attribute pointing to the Gdocs - // spreadsheet's JSON feed e.g. + // Fetch data from a Google Docs spreadsheet. // + // Dataset must have a url attribute pointing to the Gdocs or its JSON feed e.g. //
   // var dataset = new recline.Model.Dataset({
   //     url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
@@ -18,77 +17,25 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {};
   //   'gdocs'
   // );
   // 
- my.Backbone = function() { - var self = this; - this.__type__ = 'gdocs'; - this.readonly = true; - - this.sync = function(method, model, options) { - var self = this; - if (method === "read") { - var dfd = $.Deferred(); - dfd.resolve(model); - return dfd.promise(); - } - }; - - this.query = function(dataset, queryObj) { - var dfd = $.Deferred(); - if (dataset._dataCache) { - dfd.resolve(dataset._dataCache); - } else { - loadData(dataset.get('url')).done(function(result) { - dataset.fields.reset(result.fields); - // cache data onto dataset (we have loaded whole gdoc it seems!) - dataset._dataCache = self._formatResults(dataset, result.data); - dfd.resolve(dataset._dataCache); - }); - } - return dfd.promise(); - }; - - this._formatResults = function(dataset, data) { - var fields = _.pluck(dataset.fields.toJSON(), 'id'); - // zip the fields with the data rows to produce js objs - // TODO: factor this out as a common method with other backends - var objs = _.map(data, function (d) { - var obj = {}; - _.each(_.zip(fields, d), function (x) { - obj[x[0]] = x[1]; - }); - return obj; - }); - var out = { - total: objs.length, - hits: _.map(objs, function(row) { - return { _source: row } - }) - } - return out; - }; - }; - - // ## loadData - // - // loadData from a google docs URL // // @return object with two attributes // - // * fields: array of objects - // * data: array of arrays - var loadData = function(url) { + // * fields: array of Field objects + // * records: array of objects for each row + // + my.fetch = function(dataset) { var dfd = $.Deferred(); - var url = my.getSpreadsheetAPIUrl(url); - var out = { - fields: [], - data: [] - } + var url = my.getSpreadsheetAPIUrl(dataset.get('url')); $.getJSON(url, function(d) { result = my.parseData(d); - result.fields = _.map(result.fields, function(fieldId) { + var fields = _.map(result.fields, function(fieldId) { return {id: fieldId}; }); - dfd.resolve(result); + dfd.resolve({ + records: result.records, + fields: fields, + useMemoryStore: true + }); }); return dfd.promise(); }; @@ -109,8 +56,8 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; options = arguments[1]; } var results = { - 'fields': [], - 'data': [] + fields: [], + records: [] }; // default is no special info on type of columns var colTypes = {}; @@ -128,10 +75,9 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; // converts non numberical values that should be numerical (22.3%[string] -> 0.223[float]) var rep = /^([\d\.\-]+)\%$/; - $.each(gdocsSpreadsheet.feed.entry, function (i, entry) { - var row = []; - for (var k in results.fields) { - var col = results.fields[k]; + results.records = _.map(gdocsSpreadsheet.feed.entry, function(entry) { + var row = {}; + _.each(results.fields, function(col) { var _keyname = 'gsx$' + col; var value = entry[_keyname]['$t']; // if labelled as % and value contains %, convert @@ -142,9 +88,9 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; value = value3 / 100; } } - row.push(value); - } - results.data.push(row); + row[col] = value; + }); + return row; }); return results; }; diff --git a/src/backend/memory.js b/src/backend/memory.js index 308557d1..2692c9e3 100644 --- a/src/backend/memory.js +++ b/src/backend/memory.js @@ -20,49 +20,9 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; var dataset = new recline.Model.Dataset( _.extend({}, metadata, {records: data, fields: fields}) ); - dataset.fetch(); return dataset; }; - my.fetch = function(dataset) { - var dfd = $.Deferred(); - var store = new my.Store(dataset.get('records'), dataset.get('fields')); - dataset._dataCache = store; - dataset.fields.reset(store.fields); - dataset.query(); - dfd.resolve(dataset); - return dfd.promise(); - }; - - my.save = function(dataset, changes) { - var dfd = $.Deferred(); - // TODO - // _.each(changes.creates) { ... } - _.each(changes.updates, function(record) { - dataset._dataCache.update(record); - }); - _.each(changes.deletes, function(record) { - dataset._dataCache.delete(record); - }); - dfd.resolve(dataset); - return dfd.promise(); - }, - - my.query = function(dataset, queryObj) { - var dfd = $.Deferred(); - var results = dataset._dataCache.query(queryObj); - var hits = _.map(results.records, function(row) { - return { _source: row }; - }); - var out = { - total: results.total, - hits: hits, - facets: results.facets - }; - dfd.resolve(out); - return dfd.promise(); - }; - // ## Data Wrapper // // Turn a simple array of JS objects into a mini data-store with @@ -102,7 +62,22 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; this.data = newdocs; }; + this.save = function(changes, dataset) { + var self = this; + var dfd = $.Deferred(); + // TODO _.each(changes.creates) { ... } + _.each(changes.updates, function(record) { + self.update(record); + }); + _.each(changes.deletes, function(record) { + self.delete(record); + }); + dfd.resolve(this); + return dfd.promise(); + }, + this.query = function(queryObj) { + var dfd = $.Deferred(); var numRows = queryObj.size || this.data.length; var start = queryObj.from || 0; var results = this.data; @@ -119,14 +94,14 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; results.reverse(); } }); - var total = results.length; var facets = this.computeFacets(results, queryObj); - results = results.slice(start, start+numRows); - return { - total: total, - records: results, + var out = { + total: results.length, + hits: results.slice(start, start+numRows), facets: facets }; + dfd.resolve(out); + return dfd.promise(); }; // in place filtering diff --git a/src/model.js b/src/model.js index 982111f9..72b57040 100644 --- a/src/model.js +++ b/src/model.js @@ -65,17 +65,49 @@ my.Dataset = Backbone.Model.extend({ this.queryState = new my.Query(); this.queryState.bind('change', this.query); this.queryState.bind('facet:add', this.query); + this._store = this.backend; + if (this.backend == recline.Backend.Memory) { + this.fetch(); + } }, // ### fetch // // Retrieve dataset and (some) records from the backend. fetch: function() { - return this.backend.fetch(this); + var self = this; + var dfd = $.Deferred(); + // TODO: fail case; + if (this.backend !== recline.Backend.Memory) { + this.backend.fetch(this).then(handleResults) + } else { + // special case where we have been given data directly + handleResults({ + records: this.get('records'), + fields: this.get('fields'), + useMemoryStore: true + }); + } + + function handleResults(results) { + self.set(results.metadata); + if (results.useMemoryStore) { + self._store = new recline.Backend.Memory.Store(results.records, results.fields); + self.query(); + // store will have extracted fields if not provided + self.fields.reset(self._store.fields); + } else { + self.fields.reset(results.fields); + } + // TODO: parsing the processing of fields + dfd.resolve(this); + } + return dfd.promise(); }, save: function() { - return this.backend.save(this, this._changes); + var self = this; + return this._store.save(this._changes, this); }, // ### query @@ -89,48 +121,48 @@ my.Dataset = Backbone.Model.extend({ // also returned. query: function(queryObj) { var self = this; - this.trigger('query:start'); - var actualQuery = self._prepareQuery(queryObj); var dfd = $.Deferred(); - this.backend.query(this, actualQuery).done(function(queryResult) { - self.docCount = queryResult.total; - var docs = _.map(queryResult.hits, function(hit) { - var _doc = new my.Record(hit._source); - _doc.backend = self.backend; - _doc.dataset = self; - _doc.bind('change', function(doc) { - self._changes.updates.push(doc.toJSON()); - }); - _doc.bind('destroy', function(doc) { - self._changes.deletes.push(doc.toJSON()); - }); - return _doc; + this.trigger('query:start'); + + if (queryObj) { + this.queryState.set(queryObj); + } + var actualQuery = this.queryState.toJSON(); + + this._store.query(actualQuery, this) + .done(function(queryResult) { + self._handleQueryResult(queryResult); + self.trigger('query:done'); + dfd.resolve(self.currentRecords); + }) + .fail(function(arguments) { + self.trigger('query:fail', arguments); + dfd.reject(arguments); }); - self.currentRecords.reset(docs); - if (queryResult.facets) { - var facets = _.map(queryResult.facets, function(facetResult, facetId) { - facetResult.id = facetId; - return new my.Facet(facetResult); - }); - self.facets.reset(facets); - } - self.trigger('query:done'); - dfd.resolve(self.currentRecords); - }) - .fail(function(arguments) { - self.trigger('query:fail', arguments); - dfd.reject(arguments); - }); return dfd.promise(); }, - - _prepareQuery: function(newQueryObj) { - if (newQueryObj) { - this.queryState.set(newQueryObj); + _handleQueryResult: function(queryResult) { + var self = this; + self.docCount = queryResult.total; + var docs = _.map(queryResult.hits, function(hit) { + var _doc = new my.Record(hit); + _doc.bind('change', function(doc) { + self._changes.updates.push(doc.toJSON()); + }); + _doc.bind('destroy', function(doc) { + self._changes.deletes.push(doc.toJSON()); + }); + return _doc; + }); + self.currentRecords.reset(docs); + if (queryResult.facets) { + var facets = _.map(queryResult.facets, function(facetResult, facetId) { + facetResult.id = facetId; + return new my.Facet(facetResult); + }); + self.facets.reset(facets); } - var out = this.queryState.toJSON(); - return out; }, toTemplateJSON: function() { @@ -151,7 +183,7 @@ my.Dataset = Backbone.Model.extend({ query.addFacet(field.id); }); var dfd = $.Deferred(); - this.backend.query(this, query.toJSON()).done(function(queryResult) { + this._store.query(query.toJSON(), this).done(function(queryResult) { if (queryResult.facets) { _.each(queryResult.facets, function(facetResult, facetId) { facetResult.id = facetId; diff --git a/test/backend/gdocs.test.js b/test/backend/gdocs.test.js index 6d50bbd5..08af2919 100644 --- a/test/backend/gdocs.test.js +++ b/test/backend/gdocs.test.js @@ -168,11 +168,10 @@ var sample_gdocs_spreadsheet_data = { } test("GDocs Backend", function() { - var backend = new recline.Backend.GDocs.Backbone(); var dataset = new recline.Model.Dataset({ url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json' }, - backend + 'gdocs' ); var stub = sinon.stub($, 'getJSON', function(options, cb) { @@ -182,7 +181,8 @@ test("GDocs Backend", function() { } }); - dataset.query().then(function(docList) { + dataset.fetch().then(function() { + var docList = dataset.currentRecords; deepEqual(['column-2', 'column-1'], _.pluck(dataset.fields.toJSON(), 'id')); equal(3, docList.length); equal("A", docList.models[0].get('column-1')); diff --git a/test/backend/memory.test.js b/test/backend/memory.test.js index ba14b0e5..81ec8f80 100644 --- a/test/backend/memory.test.js +++ b/test/backend/memory.test.js @@ -29,10 +29,11 @@ test('query', function () { size: 4 , from: 2 }; - var out = data.query(queryObj); - deepEqual(out.records[0], memoryData[2]); - equal(out.records.length, 4); - equal(out.total, 6); + data.query(queryObj).then(function(out) { + deepEqual(out.hits[0], memoryData[2]); + equal(out.hits.length, 4); + equal(out.total, 6); + }); }); test('query sort', function () { @@ -42,44 +43,50 @@ test('query sort', function () { {'y': {order: 'desc'}} ] }; - var out = data.query(queryObj); - equal(out.records[0].x, 6); + data.query(queryObj).then(function(out) { + equal(out.hits[0].x, 6); + }); var queryObj = { sort: [ {'country': {order: 'desc'}} ] }; - var out = data.query(queryObj); - equal(out.records[0].country, 'US'); + data.query(queryObj).then(function(out) { + equal(out.hits[0].country, 'US'); + }); var queryObj = { sort: [ {'country': {order: 'asc'}} ] }; - var out = data.query(queryObj); - equal(out.records[0].country, 'DE'); + data.query(queryObj).then(function(out) { + equal(out.hits[0].country, 'DE'); + }); }); test('query string', function () { var data = _wrapData(); - var out = data.query({q: 'UK'}); - equal(out.total, 3); - deepEqual(_.pluck(out.records, 'country'), ['UK', 'UK', 'UK']); + data.query({q: 'UK'}).then(function(out) { + equal(out.total, 3); + deepEqual(_.pluck(out.hits, 'country'), ['UK', 'UK', 'UK']); + }); - var out = data.query({q: 'UK 6'}) - equal(out.total, 1); - deepEqual(out.records[0].id, 1); + data.query({q: 'UK 6'}).then(function(out) { + equal(out.total, 1); + deepEqual(out.hits[0].id, 1); + }); }); test('filters', function () { var data = _wrapData(); var query = new recline.Model.Query(); query.addFilter({type: 'term', field: 'country', term: 'UK'}); - var out = data.query(query.toJSON()); - equal(out.total, 3); - deepEqual(_.pluck(out.records, 'country'), ['UK', 'UK', 'UK']); + data.query(query.toJSON()).then(function(out) { + equal(out.total, 3); + deepEqual(_.pluck(out.hits, 'country'), ['UK', 'UK', 'UK']); + }); }); test('facet', function () { @@ -167,7 +174,7 @@ test('basics', function () { var dataset = makeBackendDataset(); expect(3); // convenience for tests - get the data that should get changed - var data = dataset._dataCache; + var data = dataset._store; dataset.fetch().then(function(datasetAgain) { equal(dataset.get('name'), memoryData.metadata.name); deepEqual(_.pluck(dataset.fields.toJSON(), 'id'), _.pluck(data.fields, 'id')); @@ -178,21 +185,21 @@ test('basics', function () { test('query', function () { var dataset = makeBackendDataset(); // convenience for tests - get the data that should get changed - var data = dataset._dataCache.data; + var data = dataset._store.data; var dataset = makeBackendDataset(); var queryObj = { size: 4 , from: 2 }; dataset.query(queryObj).then(function(recordList) { - deepEqual(data[2], recordList.models[0].toJSON()); + deepEqual(recordList.models[0].toJSON(), data[2]); }); }); test('query sort', function () { var dataset = makeBackendDataset(); // convenience for tests - get the data that should get changed - var data = dataset._dataCache.data; + var data = dataset._store.data; var queryObj = { sort: [ {'y': {order: 'desc'}} @@ -253,7 +260,7 @@ test('facet', function () { test('update and delete', function () { var dataset = makeBackendDataset(); // convenience for tests - get the data that should get changed - var data = dataset._dataCache; + var data = dataset._store; dataset.query().then(function(docList) { equal(docList.length, Math.min(100, data.data.length)); var doc1 = docList.models[0]; diff --git a/test/base.js b/test/base.js index bd279965..eb629e25 100644 --- a/test/base.js +++ b/test/base.js @@ -19,7 +19,7 @@ var Fixture = { {id: 4, date: '2011-05-04', x: 5, y: 10, z: 15, country: 'UK', title: 'fifth', lat:51.58, lon:0}, {id: 5, date: '2011-06-02', x: 6, y: 12, z: 18, country: 'DE', title: 'sixth', lat:51.04, lon:7.9} ]; - var dataset = recline.Backend.Memory.createDataset(documents, fields); + var dataset = new recline.Model.Dataset({records: documents, fields: fields}); return dataset; } }; diff --git a/test/model.test.js b/test/model.test.js index 556eb80a..6d96a365 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -116,15 +116,6 @@ test('Dataset', function () { equal(out.fields.length, 2); }); -test('Dataset _prepareQuery', function () { - var meta = {id: 'test', title: 'xyz'}; - var dataset = new recline.Model.Dataset(meta); - - var out = dataset._prepareQuery(); - var exp = new recline.Model.Query().toJSON(); - deepEqual(out, exp); -}); - test('Dataset getFieldsSummary', function () { var dataset = Fixture.getDataset(); dataset.getFieldsSummary().done(function() { From 3f37da089adf1f7d1f26f1c99593ee65cd38dd06 Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 20:38:47 +0100 Subject: [PATCH 5/8] [#162,refactor][s]: remove recline.Memory.createDataset now that it is obsoleted by recent improvements. --- src/backend/csv.js | 5 ++++- src/backend/memory.js | 18 ------------------ src/model.js | 16 +++++++--------- test/view.map.test.js | 9 +++++++-- test/view.timeline.test.js | 10 ++++++---- 5 files changed, 24 insertions(+), 34 deletions(-) diff --git a/src/backend/csv.js b/src/backend/csv.js index a680ef17..436c9f3d 100644 --- a/src/backend/csv.js +++ b/src/backend/csv.js @@ -40,7 +40,10 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {}; }); return _doc; }); - var dataset = recline.Backend.Memory.createDataset(data, fields); + var dataset = new recline.Model.Dataset({ + records: data, + fields: fields + }); return dataset; }; diff --git a/src/backend/memory.js b/src/backend/memory.js index 2692c9e3..64df2dc5 100644 --- a/src/backend/memory.js +++ b/src/backend/memory.js @@ -5,24 +5,6 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; (function($, my) { my.__type__ = 'memory'; - // ## createDataset - // - // Convenience function to create a simple 'in-memory' dataset in one step. - // - // @param data: list of hashes for each record/row in the data ({key: - // value, key: value}) - // @param fields: (optional) list of field hashes (each hash defining a hash - // as per recline.Model.Field). If fields not specified they will be taken - // from the data. - // @param metadata: (optional) dataset metadata - see recline.Model.Dataset. - // If not defined (or id not provided) id will be autogenerated. - my.createDataset = function(data, fields, metadata) { - var dataset = new recline.Model.Dataset( - _.extend({}, metadata, {records: data, fields: fields}) - ); - return dataset; - }; - // ## Data Wrapper // // Turn a simple array of JS objects into a mini data-store with diff --git a/src/model.js b/src/model.js index 72b57040..765c14c1 100644 --- a/src/model.js +++ b/src/model.js @@ -245,20 +245,18 @@ my.Dataset.restore = function(state) { var dataset = null; // hack-y - restoring a memory dataset does not mean much ... if (state.backend === 'memory') { - dataset = recline.Backend.Memory.createDataset( - [{stub: 'this is a stub dataset because we do not restore memory datasets'}], - [], - state.dataset // metadata - ); + var datasetInfo = { + records: [{stub: 'this is a stub dataset because we do not restore memory datasets'}] + }; } else { var datasetInfo = { url: state.url }; - dataset = new recline.Model.Dataset( - datasetInfo, - state.backend - ); } + dataset = new recline.Model.Dataset( + datasetInfo, + state.backend + ); return dataset; }; diff --git a/test/view.map.test.js b/test/view.map.test.js index 638ff2c9..158c3749 100644 --- a/test/view.map.test.js +++ b/test/view.map.test.js @@ -16,7 +16,10 @@ var GeoJSONFixture = { {id: 1, x: 2, y: 4, z: 6, geom: {type:"Point",coordinates:[13.40,52.35]}}, {id: 2, x: 3, y: 6, z: 9, geom: {type:"LineString",coordinates:[[100.0, 0.0],[101.0, 1.0]]}} ]; - var dataset = recline.Backend.Memory.createDataset(records, fields); + var dataset = new recline.Model.Dataset({ + records: records, + fields: fields + }); return dataset; } }; @@ -114,7 +117,9 @@ test('_getGeometryFromRecord non-GeoJSON', function () { [[53.3,47.32], [53.3, 47.32]] ]; var view = new recline.View.Map({ - model: recline.Backend.Memory.createDataset([{a: 1}]), + model: new recline.Model.Dataset({ + records: [{a: 1}] + }), state: { geomField: 'location' } diff --git a/test/view.timeline.test.js b/test/view.timeline.test.js index eef8b759..2545340b 100644 --- a/test/view.timeline.test.js +++ b/test/view.timeline.test.js @@ -1,10 +1,12 @@ module("View - Timeline"); test('extract dates and timelineJSON', function () { - var dataset = recline.Backend.Memory.createDataset([ - {'Date': '2012-03-20', 'title': '1'}, - {'Date': '2012-03-25', 'title': '2'}, - ]); + var dataset = new recline.Model.Dataset({ + records: [ + {'Date': '2012-03-20', 'title': '1'}, + {'Date': '2012-03-25', 'title': '2'} + ] + }); var view = new recline.View.Timeline({ model: dataset }); From dd45991321c66db07b5ac52a2af401bf93e355da Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 21:36:31 +0100 Subject: [PATCH 6/8] [#162,refactor][s]: backend fetch method dataset argument is simple JS object not backbone Dataset. * Helps make Backends simpler and more independent of Model --- src/backend/dataproxy.js | 6 +++--- src/backend/elasticsearch.js | 5 ++++- src/backend/gdocs.js | 9 +++++++-- src/model.js | 2 +- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/backend/dataproxy.js b/src/backend/dataproxy.js index c16f2cf4..8ce7c025 100644 --- a/src/backend/dataproxy.js +++ b/src/backend/dataproxy.js @@ -12,9 +12,9 @@ this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; // Load data from a URL via the [DataProxy](http://github.com/okfn/dataproxy). my.fetch = function(dataset) { var data = { - url: dataset.get('url'), - 'max-results': dataset.get('size') || dataset.get('rows') || 1000, - type: dataset.get('format') || '' + url: dataset.url, + 'max-results': dataset.size || dataset.rows || 1000, + type: dataset.format || '' }; var jqxhr = $.ajax({ url: my.dataproxy_url, diff --git a/src/backend/elasticsearch.js b/src/backend/elasticsearch.js index 1db060d9..17ee688c 100644 --- a/src/backend/elasticsearch.js +++ b/src/backend/elasticsearch.js @@ -3,11 +3,14 @@ this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {}; (function($, my) { + my.fetch = function(dataset) { + }; + // ## ElasticSearch Wrapper // // Connecting to [ElasticSearch](http://www.elasticsearch.org/) endpoints. // @param {String} endpoint: url for ElasticSearch type/table, e.g. for ES running - // on localhost:9200 with index // twitter and type tweet it would be: + // on http://localhost:9200 with index twitter and type tweet it would be: // //
http://localhost:9200/twitter/tweet
// diff --git a/src/backend/gdocs.js b/src/backend/gdocs.js index bd855cd4..085ac385 100644 --- a/src/backend/gdocs.js +++ b/src/backend/gdocs.js @@ -12,6 +12,12 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; // Dataset must have a url attribute pointing to the Gdocs or its JSON feed e.g. //
   // var dataset = new recline.Model.Dataset({
+  //     url: 'https://docs.google.com/spreadsheet/ccc?key=0Aon3JiuouxLUdGlQVDJnbjZRSU1tUUJWOUZXRG53VkE#gid=0'
+  //   },
+  //   'gdocs'
+  // );
+  //
+  // var dataset = new recline.Model.Dataset({
   //     url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
   //   },
   //   'gdocs'
@@ -22,10 +28,9 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {};
   //
   // * fields: array of Field objects
   // * records: array of objects for each row
-  //  
   my.fetch = function(dataset) {
     var dfd = $.Deferred(); 
-    var url = my.getSpreadsheetAPIUrl(dataset.get('url'));
+    var url = my.getSpreadsheetAPIUrl(dataset.url);
     $.getJSON(url, function(d) {
       result = my.parseData(d);
       var fields = _.map(result.fields, function(fieldId) {
diff --git a/src/model.js b/src/model.js
index 765c14c1..ab0d47b5 100644
--- a/src/model.js
+++ b/src/model.js
@@ -79,7 +79,7 @@ my.Dataset = Backbone.Model.extend({
     var dfd = $.Deferred();
     // TODO: fail case;
     if (this.backend !== recline.Backend.Memory) {
-      this.backend.fetch(this).then(handleResults)
+      this.backend.fetch(this.toJSON()).then(handleResults)
     } else {
       // special case where we have been given data directly
       handleResults({

From 31980857a91be84adc6f991d28a8d07da00715c3 Mon Sep 17 00:00:00 2001
From: Rufus Pollock 
Date: Sat, 23 Jun 2012 22:29:51 +0100
Subject: [PATCH 7/8] [#162,be/elasticsearch][s]: convert elasticsearch to the
 new setup (remove all Backbone from the Backend!).

* Also convert to new QueryResult format in which no _source, _type etc - cf #159
---
 src/backend/elasticsearch.js       | 200 ++++++++++++++++-------------
 src/backend/memory.js              |   2 +-
 src/model.js                       |  11 +-
 test/backend/elasticsearch.test.js |  31 +++--
 4 files changed, 132 insertions(+), 112 deletions(-)

diff --git a/src/backend/elasticsearch.js b/src/backend/elasticsearch.js
index 17ee688c..b9333c54 100644
--- a/src/backend/elasticsearch.js
+++ b/src/backend/elasticsearch.js
@@ -3,12 +3,12 @@ this.recline.Backend = this.recline.Backend || {};
 this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
 
 (function($, my) {
-  my.fetch = function(dataset) {
-  };
+  my.__type__ = 'elasticsearch';
 
   // ## ElasticSearch Wrapper
   //
-  // Connecting to [ElasticSearch](http://www.elasticsearch.org/) endpoints.
+  // A simple JS wrapper around an [ElasticSearch](http://www.elasticsearch.org/) endpoints.
+  //
   // @param {String} endpoint: url for ElasticSearch type/table, e.g. for ES running
   // on http://localhost:9200 with index twitter and type tweet it would be:
   // 
@@ -33,7 +33,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
     // @return promise compatible deferred object.
     this.mapping = function() {
       var schemaUrl = self.endpoint + '/_mapping';
-      var jqxhr = recline.Backend.makeRequest({
+      var jqxhr = makeRequest({
         url: schemaUrl,
         dataType: this.options.dataType
       });
@@ -47,7 +47,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
     // @return promise compatible deferred object.
     this.get = function(id) {
       var base = this.endpoint + '/' + id;
-      return recline.Backend.makeRequest({
+      return makeRequest({
         url: base,
         dataType: 'json'
       });
@@ -65,7 +65,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
       if (doc.id) {
         url += '/' + doc.id;
       }
-      return recline.Backend.makeRequest({
+      return makeRequest({
         url: url,
         type: 'POST',
         data: data,
@@ -82,7 +82,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
     this.delete = function(id) {
       url = this.endpoint;
       url += '/' + id;
-      return recline.Backend.makeRequest({
+      return makeRequest({
         url: url,
         type: 'DELETE',
         dataType: 'json'
@@ -143,7 +143,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
       esQuery.query = queryNormalized;
       var data = {source: JSON.stringify(esQuery)};
       var url = this.endpoint + '/_search';
-      var jqxhr = recline.Backend.makeRequest({
+      var jqxhr = makeRequest({
         url: url,
         data: data,
         dataType: this.options.dataType
@@ -152,94 +152,110 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {};
     }
   };
 
-  // ## ElasticSearch Backbone Backend
-  //
-  // Backbone connector for an ES backend.
-  //
-  // Usage:
-  //
-  // var backend = new recline.Backend.ElasticSearch(options);
-  //
-  // `options` are passed through to Wrapper
-  my.Backbone = function(options) {
-    var self = this;
-    var esOptions = options;
-    this.__type__ = 'elasticsearch';
 
-    // ### sync
-    //
-    // Backbone sync implementation for this backend.
-    //
-    // URL of ElasticSearch endpoint to use must be specified on the dataset
-    // (and on a Record via its dataset attribute) by the dataset having a
-    // url attribute.
-    this.sync = function(method, model, options) {
-      if (model.__type__ == 'Dataset') {
-        var endpoint = model.get('url');
-      } else {
-        var endpoint = model.dataset.get('url');
-      }
-      var es = new my.Wrapper(endpoint, esOptions);
-      if (method === "read") {
-        if (model.__type__ == 'Dataset') {
-          var dfd = $.Deferred();
-          es.mapping().done(function(schema) {
-            // only one top level key in ES = the type so we can ignore it
-            var key = _.keys(schema)[0];
-            var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
-              dict.id = fieldName;
-              return dict;
-            });
-            model.fields.reset(fieldData);
-            dfd.resolve(model);
-          })
-          .fail(function(arguments) {
-            dfd.reject(arguments);
-          });
-          return dfd.promise();
-        } else if (model.__type__ == 'Record') {
-          return es.get(model.dataset.id);
-        }
-      } else if (method === 'update') {
-        if (model.__type__ == 'Record') {
-          return es.upsert(model.toJSON());
-        }
-      } else if (method === 'delete') {
-        if (model.__type__ == 'Record') {
-          return es.delete(model.id);
-        }
-      }
-    };
+  // ## Recline Connectors 
+  //
+  // Requires URL of ElasticSearch endpoint to be specified on the dataset
+  // via the url attribute.
 
-    // ### query
-    //
-    // query the ES backend
-    this.query = function(model, queryObj) {
-      var dfd = $.Deferred();
-      var url = model.get('url');
-      var es = new my.Wrapper(url, esOptions);
-      var jqxhr = es.query(queryObj);
-      // TODO: fail case
-      jqxhr.done(function(results) {
-        _.each(results.hits.hits, function(hit) {
-          if (!('id' in hit._source) && hit._id) {
-            hit._source.id = hit._id;
-          }
-        });
-        if (results.facets) {
-          results.hits.facets = results.facets;
-        }
-        dfd.resolve(results.hits);
-      }).fail(function(errorObj) {
-        var out = {
-          title: 'Failed: ' + errorObj.status + ' code',
-          message: errorObj.responseText
-        };
-        dfd.reject(out);
+  // ES options which are passed through to `options` on Wrapper (see Wrapper for details)
+  my.esOptions = {};
+
+  // ### fetch
+  my.fetch = function(dataset) {
+    var es = new my.Wrapper(dataset.url, my.esOptions);
+    var dfd = $.Deferred();
+    es.mapping().done(function(schema) {
+      // only one top level key in ES = the type so we can ignore it
+      var key = _.keys(schema)[0];
+      var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
+        dict.id = fieldName;
+        return dict;
       });
-      return dfd.promise();
-    };
+      dfd.resolve({
+        fields: fieldData
+      });
+    })
+    .fail(function(arguments) {
+      dfd.reject(arguments);
+    });
+    return dfd.promise();
   };
 
+  // ### save
+  my.save = function(changes, dataset) {
+    var es = new my.Wrapper(dataset.url, my.esOptions);
+    if (changes.creates.length + changes.updates.length + changes.deletes.length > 1) {
+      var dfd = $.Deferred();
+      msg = 'Saving more than one item at a time not yet supported';
+      alert(msg);
+      dfd.reject(msg);
+      return dfd.promise();
+    }
+    if (changes.creates.length > 0) {
+      return es.upsert(changes.creates[0]);
+    }
+    else if (changes.updates.length >0) {
+      return es.upsert(changes.updates[0]);
+    } else if (changes.deletes.length > 0) {
+      return es.delete(changes.deletes[0].id);
+    }
+  };
+
+  // ### query
+  my.query = function(queryObj, dataset) {
+    var dfd = $.Deferred();
+    var es = new my.Wrapper(dataset.url, my.esOptions);
+    var jqxhr = es.query(queryObj);
+    jqxhr.done(function(results) {
+      var out = {
+        total: results.hits.total,
+      };
+      out.hits = _.map(results.hits.hits, function(hit) {
+        if (!('id' in hit._source) && hit._id) {
+          hit._source.id = hit._id;
+        }
+        return hit._source;
+      });
+      if (results.facets) {
+        out.facets = results.facets;
+      }
+      dfd.resolve(out);
+    }).fail(function(errorObj) {
+      var out = {
+        title: 'Failed: ' + errorObj.status + ' code',
+        message: errorObj.responseText
+      };
+      dfd.reject(out);
+    });
+    return dfd.promise();
+  };
+
+
+// ### makeRequest
+// 
+// Just $.ajax but in any headers in the 'headers' attribute of this
+// Backend instance. Example:
+//
+// 
+// var jqxhr = this._makeRequest({
+//   url: the-url
+// });
+// 
+var makeRequest = function(data, headers) { + var extras = {}; + if (headers) { + extras = { + beforeSend: function(req) { + _.each(headers, function(value, key) { + req.setRequestHeader(key, value); + }); + } + }; + } + var data = _.extend(extras, data); + return $.ajax(data); +}; + }(jQuery, this.recline.Backend.ElasticSearch)); diff --git a/src/backend/memory.js b/src/backend/memory.js index 64df2dc5..89515921 100644 --- a/src/backend/memory.js +++ b/src/backend/memory.js @@ -54,7 +54,7 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; _.each(changes.deletes, function(record) { self.delete(record); }); - dfd.resolve(this); + dfd.resolve(); return dfd.promise(); }, diff --git a/src/model.js b/src/model.js index ab0d47b5..d5c3ee11 100644 --- a/src/model.js +++ b/src/model.js @@ -79,7 +79,11 @@ my.Dataset = Backbone.Model.extend({ var dfd = $.Deferred(); // TODO: fail case; if (this.backend !== recline.Backend.Memory) { - this.backend.fetch(this.toJSON()).then(handleResults) + this.backend.fetch(this.toJSON()) + .done(handleResults) + .fail(function(arguments) { + dfd.reject(arguments); + }); } else { // special case where we have been given data directly handleResults({ @@ -100,14 +104,15 @@ my.Dataset = Backbone.Model.extend({ self.fields.reset(results.fields); } // TODO: parsing the processing of fields - dfd.resolve(this); + dfd.resolve(self); } return dfd.promise(); }, save: function() { var self = this; - return this._store.save(this._changes, this); + // TODO: need to reset the changes ... + return this._store.save(this._changes, this.toJSON()); }, // ### query diff --git a/test/backend/elasticsearch.test.js b/test/backend/elasticsearch.test.js index c63821c8..4ab6aa07 100644 --- a/test/backend/elasticsearch.test.js +++ b/test/backend/elasticsearch.test.js @@ -246,14 +246,13 @@ test("write", function() { // ================================================== -module("Backend ElasticSearch - Backbone"); +module("Backend ElasticSearch - Recline"); test("query", function() { - var backend = new recline.Backend.ElasticSearch.Backbone(); var dataset = new recline.Model.Dataset({ url: 'https://localhost:9200/my-es-db/my-es-type' }, - backend + 'elasticsearch' ); var stub = sinon.stub($, 'ajax', function(options) { @@ -292,11 +291,10 @@ test("query", function() { }); test("write", function() { - var backend = new recline.Backend.ElasticSearch.Backbone(); var dataset = new recline.Model.Dataset({ url: 'http://localhost:9200/recline-test/es-write' }, - backend + 'elasticsearch' ); stop(); @@ -306,10 +304,10 @@ test("write", function() { id: id, title: 'my title' }); - rec.backend = backend; - rec.dataset = dataset; dataset.currentRecords.add(rec); - var jqxhr = rec.save(); + // have to do this explicitly as we not really supporting adding new items atm + dataset._changes.creates.push(rec.toJSON()); + var jqxhr = dataset.save(); jqxhr.done(function(data) { ok(data.ok); equal(data._id, id); @@ -318,28 +316,29 @@ test("write", function() { // update rec.set({title: 'new title'}); - var jqxhr = rec.save(); + // again set up by hand ... + dataset._changes.creates = []; + dataset._changes.updates.push(rec.toJSON()); + var jqxhr = dataset.save(); jqxhr.done(function(data) { equal(data._version, 2); // delete - var jqxhr = rec.destroy(); + dataset._changes.updates = 0; + dataset._changes.deletes.push(rec.toJSON()); + var jqxhr = dataset.save(); jqxhr.done(function(data) { ok(data.ok); rec = null; // try to get ... - var oldrec = new recline.Model.Record({id: id}); - equal(oldrec.get('title'), null); - oldrec.dataset = dataset; - oldrec.backend = backend; - var jqxhr = oldrec.fetch(); + var es = new recline.Backend.ElasticSearch.Wrapper(dataset.get('url')); + var jqxhr = es.get(id); jqxhr.done(function(data) { // should not be here ok(false, 'Should have got 404'); }).error(function(error) { equal(error.status, 404); - equal(typeof oldrec.get('title'), 'undefined'); start(); }); }); From bb02f755390be583573834f6bb277f3dda623f4f Mon Sep 17 00:00:00 2001 From: Rufus Pollock Date: Sat, 23 Jun 2012 23:27:11 +0100 Subject: [PATCH 8/8] [doc/model][m]: move all main model item docs in jekyll doc library-model.markdown and start tidying up. * Started this in branch even though general as a pre-cursor to documentation of new backend setup. --- library-model.markdown | 188 +++++++++++++++++++++++++++++++++++++++++ library.html | 2 +- src/model.js | 144 +------------------------------ 3 files changed, 190 insertions(+), 144 deletions(-) create mode 100644 library-model.markdown diff --git a/library-model.markdown b/library-model.markdown new file mode 100644 index 00000000..fc815936 --- /dev/null +++ b/library-model.markdown @@ -0,0 +1,188 @@ +--- +layout: container +title: Models and Backends +--- + + + +Models help you structure your work with data by providing some standard objects. The key ones are Dataset and Record -- a Dataset being a collection of Records. Additionally, there is a a Field object for describing the columns of a Dataset, a Query object for describing queries, and a Facet object for holding summary information about a Field (or multiple Fields). + +# Models + +## Dataset + +A Dataset is *the* central object in Recline. It has the following key attributes: + +* currentRecords: a collection of `Record`s currently loaded for viewing (updated by calling query method) - note that this need not be all the records in the dataset (for example, you may have connected to a source where the complete dataset contains a million records but you have only loaded a 1000 records) +* fields: (aka columns) is a Backbone collectoin of `Field`s listing all + the fields on this Dataset (this can be set explicitly, or, will be + set by Dataset.fetch() +* docCount: total number of records in this dataset +* backend: the Backend (instance) for this Dataset. +* queryState: a `Query` object which stores current queryState. + queryState may be edited by other components (e.g. a query editor + view) changes will trigger a Dataset query. +* facets: a collection of `Facet`s + +

Record (aka Row)

+ +A Record represents a single entry or row in a dataset. As Record fits very nicely with the default behaviour of a Backbone Model object it has little additional functionality. + +

Field (aka Column)

+ +A Field should have the following attributes as standard: + +{% highlight javascript %} +var field = new Field({ + // a unique identifer for this field- usually this should match the key in the records hash + id: 'my-field-id' + // (optional: defaults to id) the visible label used for this field + label: 'My Field Name', + // (optional: defaults to string) the type of the data in this field. + // Should be a string as per type names defined by ElasticSearch - see + // Types list on + type: 'string', + // (optional - defaults to null) used to indicate how the data should be formatted. See below. + format: null, + // (default: false) attribute indicating this field has no backend data but is just derived from other fields (see below). + is_derived: false +{% endhighlight %} + +#### Rendering, types and formats + +One can customize the rendering of fields in the user interface and elsewhere by setting a renderer function on the field. You do this by setting a field attribute: + +{% highlight javascript %} +myfield.renderer = myRenderFunction; +{% endhighlight %} + +Your renderer function should have the following signature: + + function(value, field, record) + +where value is the value of this cell, field is corresponding field +object and record is the record object (as simple JS object). Note that +implementing functions can ignore arguments (e.g. function(value) would +be a valid formatter function). + +To guide the behaviour of renderers we have type and format information. Example types and formats are: + + * type=date, format=yyyy-mm-dd + * type=float, format=percentage + * type=string, format=markdown (render as markdown if Showdown available) + +Default renderers are provided - see the source for details, but a few examples are: + + * type = string + * no format provided: pass through but convert http:// to hyperlinks + * format = plain: do no processing on the source text + * format = markdown: process as markdown (if Showdown library available) + * type = float + * format = percentage: format as a percentage + +#### Derived fields: + +* deriver: a function to derive/compute the value of data +in this field as a function of this field's value (if any) and the current +record. It's signature and behaviour is the same as for renderer. Use of +this function allows you to define an entirely new value for data in this +field. This provides support for a) 'derived/computed' fields: i.e. fields +whose data are functions of the data in other fields b) transforming the +value of this field prior to rendering. + + +

Query

+ +Query instances encapsulate a query to the backend (see query method on backend). Useful both +for creating queries and for storing and manipulating query state - +e.g. from a query editor). + +**Query Structure and format** + +Query structure should follow that of [ElasticSearch query +language](http://www.elasticsearch.org/guide/reference/api/search/). + +**NB: It is up to specific backends how to implement and support this query +structure. Different backends might choose to implement things differently +or not support certain features. Please check your backend for details.** + +Query object has the following key attributes: + + * size (=limit): number of results to return + * from (=offset): offset into result set - http://www.elasticsearch.org/guide/reference/api/search/from-size.html + * sort: sort order - + * query: Query in ES Query DSL + * filter: See filters and Filtered Query + * fields: set of fields to return - http://www.elasticsearch.org/guide/reference/api/search/fields.html + * facets: specification of facets - see http://www.elasticsearch.org/guide/reference/api/search/facets/ + +Additions: + + * q: either straight text or a hash will map directly onto a [query_string + query](http://www.elasticsearch.org/guide/reference/query-dsl/query-string-query.html) + in backend + + * Of course this can be re-interpreted by different backends. E.g. some + may just pass this straight through e.g. for an SQL backend this could be + the full SQL query + + * filters: array of ElasticSearch filters. These will be and-ed together for + execution. + +**Examples** + +
+{
+   q: 'quick brown fox',
+   filters: [
+     { term: { 'owner': 'jones' } }
+   ]
+}
+
+ +

Facet – Store summary information (e.g. values and counts) about a field obtained by some 'faceting' or 'group by' method +

+ +Structure of a facet follows that of Facet results in ElasticSearch, see: + + +Specifically the object structure of a facet looks like (there is one +addition compared to ElasticSearch: the "id" field which corresponds to the +key used to specify this facet in the facet query): + +
+{
+  "id": "id-of-facet",
+  // type of this facet (terms, range, histogram etc)
+  "_type" : "terms",
+  // total number of tokens in the facet
+  "total": 5,
+  // @property {number} number of records which have no value for the field
+  "missing" : 0,
+  // number of facet values not included in the returned facets
+  "other": 0,
+  // term object ({term: , count: ...})
+  "terms" : [ {
+      "term" : "foo",
+      "count" : 2
+    }, {
+      "term" : "bar",
+      "count" : 2
+    }, {
+      "term" : "baz",
+      "count" : 1
+    }
+  ]
+}
+
+ +# Backends + +1. Data is held in an in memory store on the Dataset object. +2. Data is transparently sourced from a backend store. + diff --git a/library.html b/library.html index 4e980bc6..9189dd84 100644 --- a/library.html +++ b/library.html @@ -24,7 +24,7 @@ title: Library - Home

Models

-

Models help you structure your work with data by providing some standard objects such as Dataset and Record – a Dataset being a collection of Records. More »

+

Models help you structure your work with data by providing some standard objects such as Dataset and Record – a Dataset being a collection of Records. More »

diff --git a/src/model.js b/src/model.js index d5c3ee11..6bab5776 100644 --- a/src/model.js +++ b/src/model.js @@ -4,28 +4,7 @@ this.recline.Model = this.recline.Model || {}; (function($, my) { -// ## A Dataset model -// -// A model has the following (non-Backbone) attributes: -// -// @property {FieldList} fields: (aka columns) is a `FieldList` listing all the -// fields on this Dataset (this can be set explicitly, or, will be set by -// Dataset.fetch() or Dataset.query() -// -// @property {RecordList} currentRecords: a `RecordList` containing the -// Records we have currently loaded for viewing (updated by calling query -// method) -// -// @property {number} docCount: total number of records in this dataset -// -// @property {Backend} backend: the Backend (instance) for this Dataset. -// -// @property {Query} queryState: `Query` object which stores current -// queryState. queryState may be edited by other components (e.g. a query -// editor view) changes will trigger a Dataset query. -// -// @property {FacetList} facets: FacetList object containing all current -// Facets. +// ## Dataset my.Dataset = Backbone.Model.extend({ __type__: 'Dataset', @@ -324,42 +303,6 @@ my.RecordList = Backbone.Collection.extend({ }); // ## A Field (aka Column) on a Dataset -// -// Following (Backbone) attributes as standard: -// -// * id: a unique identifer for this field- usually this should match the key in the records hash -// * label: (optional: defaults to id) the visible label used for this field -// * type: (optional: defaults to string) the type of the data in this field. Should be a string as per type names defined by ElasticSearch - see Types list on -// * format: (optional) used to indicate how the data should be formatted. For example: -// * type=date, format=yyyy-mm-dd -// * type=float, format=percentage -// * type=string, format=markdown (render as markdown if Showdown available) -// * is_derived: (default: false) attribute indicating this field has no backend data but is just derived from other fields (see below). -// -// Following additional instance properties: -// -// @property {Function} renderer: a function to render the data for this field. -// Signature: function(value, field, record) where value is the value of this -// cell, field is corresponding field object and record is the record -// object (as simple JS object). Note that implementing functions can ignore arguments (e.g. -// function(value) would be a valid formatter function). -// -// @property {Function} deriver: a function to derive/compute the value of data -// in this field as a function of this field's value (if any) and the current -// record, its signature and behaviour is the same as for renderer. Use of -// this function allows you to define an entirely new value for data in this -// field. This provides support for a) 'derived/computed' fields: i.e. fields -// whose data are functions of the data in other fields b) transforming the -// value of this field prior to rendering. -// -// #### Default renderers -// -// * string -// * no format provided: pass through but convert http:// to hyperlinks -// * format = plain: do no processing on the source text -// * format = markdown: process as markdown (if Showdown library available) -// * float -// * format = percentage: format as a percentage my.Field = Backbone.Model.extend({ // ### defaults - define default values defaults: { @@ -430,54 +373,6 @@ my.FieldList = Backbone.Collection.extend({ }); // ## Query -// -// Query instances encapsulate a query to the backend (see query method on backend). Useful both -// for creating queries and for storing and manipulating query state - -// e.g. from a query editor). -// -// **Query Structure and format** -// -// Query structure should follow that of [ElasticSearch query -// language](http://www.elasticsearch.org/guide/reference/api/search/). -// -// **NB: It is up to specific backends how to implement and support this query -// structure. Different backends might choose to implement things differently -// or not support certain features. Please check your backend for details.** -// -// Query object has the following key attributes: -// -// * size (=limit): number of results to return -// * from (=offset): offset into result set - http://www.elasticsearch.org/guide/reference/api/search/from-size.html -// * sort: sort order - -// * query: Query in ES Query DSL -// * filter: See filters and Filtered Query -// * fields: set of fields to return - http://www.elasticsearch.org/guide/reference/api/search/fields.html -// * facets: specification of facets - see http://www.elasticsearch.org/guide/reference/api/search/facets/ -// -// Additions: -// -// * q: either straight text or a hash will map directly onto a [query_string -// query](http://www.elasticsearch.org/guide/reference/query-dsl/query-string-query.html) -// in backend -// -// * Of course this can be re-interpreted by different backends. E.g. some -// may just pass this straight through e.g. for an SQL backend this could be -// the full SQL query -// -// * filters: array of ElasticSearch filters. These will be and-ed together for -// execution. -// -// **Examples** -// -//
-// {
-//    q: 'quick brown fox',
-//    filters: [
-//      { term: { 'owner': 'jones' } }
-//    ]
-// }
-// 
my.Query = Backbone.Model.extend({ defaults: function() { return { @@ -597,43 +492,6 @@ my.Query = Backbone.Model.extend({ // ## A Facet (Result) -// -// Object to store Facet information, that is summary information (e.g. values -// and counts) about a field obtained by some faceting method on the -// backend. -// -// Structure of a facet follows that of Facet results in ElasticSearch, see: -// -// -// Specifically the object structure of a facet looks like (there is one -// addition compared to ElasticSearch: the "id" field which corresponds to the -// key used to specify this facet in the facet query): -// -//
-// {
-//   "id": "id-of-facet",
-//   // type of this facet (terms, range, histogram etc)
-//   "_type" : "terms",
-//   // total number of tokens in the facet
-//   "total": 5,
-//   // @property {number} number of records which have no value for the field
-//   "missing" : 0,
-//   // number of facet values not included in the returned facets
-//   "other": 0,
-//   // term object ({term: , count: ...})
-//   "terms" : [ {
-//       "term" : "foo",
-//       "count" : 2
-//     }, {
-//       "term" : "bar",
-//       "count" : 2
-//     }, {
-//       "term" : "baz",
-//       "count" : 1
-//     }
-//   ]
-// }
-// 
my.Facet = Backbone.Model.extend({ defaults: function() { return {