diff --git a/docs/backend.html b/docs/backend.html deleted file mode 100644 index b1f5009e..00000000 --- a/docs/backend.html +++ /dev/null @@ -1,353 +0,0 @@ -
backend.js | |
|---|---|
Recline Backends- -Backends are connectors to backend data sources and stores - -Backends are implemented as Backbone models but this is just a -convenience (they do not save or load themselves from any remote -source) | this.recline = this.recline || {};
-this.recline.Model = this.recline.Model || {};
-
-(function($, my) { |
Backbone.sync- -Override Backbone.sync to hand off to sync function in relevant backend | Backbone.sync = function(method, model, options) {
- return model.backend.sync(method, model, options);
- } |
wrapInTimeout- -Crude way to catch backend errors -Many of backends use JSONP and so will not get error messages and this is -a crude way to catch those errors. | function wrapInTimeout(ourFunction) {
- var dfd = $.Deferred();
- var timeout = 5000;
- var timer = setTimeout(function() {
- dfd.reject({
- message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
- });
- }, timeout);
- ourFunction.done(function(arguments) {
- clearTimeout(timer);
- dfd.resolve(arguments);
- })
- .fail(function(arguments) {
- clearTimeout(timer);
- dfd.reject(arguments);
- })
- ;
- return dfd.promise();
- } |
BackendMemory - uses in-memory data- -This is very artificial and is really only designed for testing -purposes. - -To use it you should provide in your constructor data: - -
| my.BackendMemory = Backbone.Model.extend({
- initialize: function() {
- this.datasets = {};
- },
- addDataset: function(data) {
- this.datasets[data.metadata.id] = $.extend(true, {}, data);
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- if (model.__type__ == 'Dataset') {
- var rawDataset = this.datasets[model.id];
- model.set(rawDataset.metadata);
- model.fields.reset(rawDataset.fields);
- model.docCount = rawDataset.documents.length;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'update') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
- if(doc.id === model.id) {
- self.datasets[model.dataset.id].documents[idx] = model.toJSON();
- }
- });
- dfd.resolve(model);
- }
- return dfd.promise();
- } else if (method === 'delete') {
- var dfd = $.Deferred();
- if (model.__type__ == 'Document') {
- var rawDataset = self.datasets[model.dataset.id];
- var newdocs = _.reject(rawDataset.documents, function(doc) {
- return (doc.id === model.id);
- });
- rawDataset.documents = newdocs;
- dfd.resolve(model);
- }
- return dfd.promise();
- } else {
- alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
- }
- },
- query: function(model, queryObj) {
- var numRows = queryObj.size;
- var start = queryObj.offset;
- var dfd = $.Deferred();
- results = this.datasets[model.id].documents; |
| not complete sorting! | _.each(queryObj.sort, function(item) {
- results = _.sortBy(results, function(doc) {
- var _out = doc[item[0]];
- return (item[1] == 'asc') ? _out : -1*_out;
- });
- });
- var results = results.slice(start, start+numRows);
- dfd.resolve(results);
- return dfd.promise();
- }
- });
- my.backends['memory'] = new my.BackendMemory(); |
BackendWebstore- -Connecting to Webstores - -To use this backend ensure your Dataset has a webstore_url in its attributes. | my.BackendWebstore = Backbone.Model.extend({
- sync: function(method, model, options) {
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = model.get('webstore_url');
- var schemaUrl = base + '/schema.json';
- var jqxhr = $.ajax({
- url: schemaUrl,
- dataType: 'jsonp',
- jsonp: '_callback'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(schema) {
- var fieldData = _.map(schema.data, function(item) {
- item.id = item.name;
- delete item.name;
- return item;
- });
- model.fields.reset(fieldData);
- model.docCount = schema.count;
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- }
- },
- query: function(model, queryObj) {
- var base = model.get('webstore_url');
- var data = {
- _limit: queryObj.size
- , _offset: queryObj.offset
- };
- var jqxhr = $.ajax({
- url: base + '.json',
- data: data,
- dataType: 'jsonp',
- jsonp: '_callback',
- cache: true
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- dfd.resolve(results.data);
- });
- return dfd.promise();
- }
- });
- my.backends['webstore'] = new my.BackendWebstore(); |
BackendDataProxy- -For connecting to DataProxy-s. - -When initializing the DataProxy backend you can set the following attributes: - -
Datasets using using this backend should set the following attributes: - -
Note that this is a read-only backend. | my.BackendDataProxy = Backbone.Model.extend({
- defaults: {
- dataproxy_url: 'http://jsonpdataproxy.appspot.com'
- },
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- if (model.__type__ == 'Dataset') {
- var base = self.get('dataproxy_url'); |
| TODO: should we cache for extra efficiency | var data = {
- url: model.get('url')
- , 'max-results': 1
- , type: model.get('format') || 'csv'
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- wrapInTimeout(jqxhr).done(function(results) {
- model.fields.reset(_.map(results.fields, function(fieldId) {
- return {id: fieldId};
- })
- );
- dfd.resolve(model, jqxhr);
- })
- .fail(function(arguments) {
- dfd.reject(arguments);
- });
- return dfd.promise();
- }
- } else {
- alert('This backend only supports read operations');
- }
- },
- query: function(dataset, queryObj) {
- var base = this.get('dataproxy_url');
- var data = {
- url: dataset.get('url')
- , 'max-results': queryObj.size
- , type: dataset.get('format')
- };
- var jqxhr = $.ajax({
- url: base
- , data: data
- , dataType: 'jsonp'
- });
- var dfd = $.Deferred();
- jqxhr.done(function(results) {
- var _out = _.map(results.data, function(doc) {
- var tmp = {};
- _.each(results.fields, function(key, idx) {
- tmp[key] = doc[idx];
- });
- return tmp;
- });
- dfd.resolve(_out);
- });
- return dfd.promise();
- }
- });
- my.backends['dataproxy'] = new my.BackendDataProxy(); |
Google spreadsheet backend- -Connect to Google Docs spreadsheet. - -Dataset must have a url attribute pointing to the Gdocs -spreadsheet's JSON feed e.g. - -
-var dataset = new recline.Model.Dataset({
- url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
- },
- 'gdocs'
-);
- | my.BackendGDoc = Backbone.Model.extend({
- sync: function(method, model, options) {
- var self = this;
- if (method === "read") {
- var dfd = $.Deferred();
- var dataset = model;
-
- $.getJSON(model.get('url'), function(d) {
- result = self.gdocsToJavascript(d);
- model.fields.reset(_.map(result.field, function(fieldId) {
- return {id: fieldId};
- })
- ); |
| cache data onto dataset (we have loaded whole gdoc it seems!) | model._dataCache = result.data;
- dfd.resolve(model);
- })
- return dfd.promise(); }
- },
-
- query: function(dataset, queryObj) {
- var dfd = $.Deferred();
- var fields = _.pluck(dataset.fields.toJSON(), 'id'); |
| zip the fields with the data rows to produce js objs -TODO: factor this out as a common method with other backends | var objs = _.map(dataset._dataCache, function (d) {
- var obj = {};
- _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
- return obj;
- });
- dfd.resolve(objs);
- return dfd;
- },
- gdocsToJavascript: function(gdocsSpreadsheet) {
- /*
- :options: (optional) optional argument dictionary:
- columnsToUse: list of columns to use (specified by field names)
- colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
- :return: tabular data object (hash with keys: field and data).
-
- Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
- */
- var options = {};
- if (arguments.length > 1) {
- options = arguments[1];
- }
- var results = {
- 'field': [],
- 'data': []
- }; |
| default is no special info on type of columns | var colTypes = {};
- if (options.colTypes) {
- colTypes = options.colTypes;
- } |
| either extract column headings from spreadsheet directly, or used supplied ones | if (options.columnsToUse) { |
| columns set to subset supplied | results.field = options.columnsToUse;
- } else { |
| set columns to use to be all available | if (gdocsSpreadsheet.feed.entry.length > 0) {
- for (var k in gdocsSpreadsheet.feed.entry[0]) {
- if (k.substr(0, 3) == 'gsx') {
- var col = k.substr(4)
- results.field.push(col);
- }
- }
- }
- } |
| converts non numberical values that should be numerical (22.3%[string] -> 0.223[float]) | var rep = /^([\d\.\-]+)\%$/;
- $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
- var row = [];
- for (var k in results.field) {
- var col = results.field[k];
- var _keyname = 'gsx$' + col;
- var value = entry[_keyname]['$t']; |
| if labelled as % and value contains %, convert | if (colTypes[col] == 'percent') {
- if (rep.test(value)) {
- var value2 = rep.exec(value);
- var value3 = parseFloat(value2);
- value = value3 / 100;
- }
- }
- row.push(value);
- }
- results.data.push(row);
- });
- return results;
- }
- });
- my.backends['gdocs'] = new my.BackendGDoc();
-
-}(jQuery, this.recline.Model));
-
- |
base.js | |
|---|---|
Recline Backends+ +Backends are connectors to backend data sources and stores + +This is just the base module containing various convenience methods. | this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { |
Backbone.sync+ +Override Backbone.sync to hand off to sync function in relevant backend | Backbone.sync = function(method, model, options) {
+ return model.backend.sync(method, model, options);
+ } |
wrapInTimeout+ +Crude way to catch backend errors +Many of backends use JSONP and so will not get error messages and this is +a crude way to catch those errors. | my.wrapInTimeout = function(ourFunction) {
+ var dfd = $.Deferred();
+ var timeout = 5000;
+ var timer = setTimeout(function() {
+ dfd.reject({
+ message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
+ });
+ }, timeout);
+ ourFunction.done(function(arguments) {
+ clearTimeout(timer);
+ dfd.resolve(arguments);
+ })
+ .fail(function(arguments) {
+ clearTimeout(timer);
+ dfd.reject(arguments);
+ })
+ ;
+ return dfd.promise();
+ }
+}(jQuery, this.recline.Backend));
+
+ |
dataproxy.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
DataProxy Backend+ +For connecting to DataProxy-s. + +When initializing the DataProxy backend you can set the following attributes: + +
Datasets using using this backend should set the following attributes: + +
Note that this is a read-only backend. | my.DataProxy = Backbone.Model.extend({
+ defaults: {
+ dataproxy_url: 'http://jsonpdataproxy.appspot.com'
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = self.get('dataproxy_url'); |
| TODO: should we cache for extra efficiency | var data = {
+ url: model.get('url')
+ , 'max-results': 1
+ , type: model.get('format') || 'csv'
+ };
+ var jqxhr = $.ajax({
+ url: base
+ , data: data
+ , dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(results) {
+ model.fields.reset(_.map(results.fields, function(fieldId) {
+ return {id: fieldId};
+ })
+ );
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ } else {
+ alert('This backend only supports read operations');
+ }
+ },
+ query: function(dataset, queryObj) {
+ var base = this.get('dataproxy_url');
+ var data = {
+ url: dataset.get('url')
+ , 'max-results': queryObj.size
+ , type: dataset.get('format')
+ };
+ var jqxhr = $.ajax({
+ url: base
+ , data: data
+ , dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ var _out = _.map(results.data, function(doc) {
+ var tmp = {};
+ _.each(results.fields, function(key, idx) {
+ tmp[key] = doc[idx];
+ });
+ return tmp;
+ });
+ dfd.resolve(_out);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['dataproxy'] = new my.DataProxy();
+
+
+}(jQuery, this.recline.Backend));
+
+ |
elasticsearch.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
ElasticSearch Backend+ +Connecting to ElasticSearch. + +To use this backend ensure your Dataset has one of the following +attributes (first one found is used): + ++elasticsearch_url +webstore_url +url ++ + This should point to the ES type url. E.G. for ES running on +localhost:9200 with index twitter and type tweet it would be + +http://localhost:9200/twitter/tweet | my.ElasticSearch = Backbone.Model.extend({
+ _getESUrl: function(dataset) {
+ var out = dataset.get('elasticsearch_url');
+ if (out) return out;
+ out = dataset.get('webstore_url');
+ if (out) return out;
+ out = dataset.get('url');
+ return out;
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = self._getESUrl(model);
+ var schemaUrl = base + '/_mapping';
+ var jqxhr = $.ajax({
+ url: schemaUrl,
+ dataType: 'jsonp'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(schema) { |
| only one top level key in ES = the type so we can ignore it | var key = _.keys(schema)[0];
+ var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
+ dict.id = fieldName;
+ return dict;
+ });
+ model.fields.reset(fieldData);
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ } else {
+ alert('This backend currently only supports read operations');
+ }
+ },
+ _normalizeQuery: function(queryObj) {
+ if (queryObj.toJSON) {
+ var out = queryObj.toJSON();
+ } else {
+ var out = _.extend({}, queryObj);
+ }
+ if (out.q != undefined && out.q.trim() === '') {
+ delete out.q;
+ }
+ if (!out.q) {
+ out.query = {
+ match_all: {}
+ }
+ } else {
+ out.query = {
+ query_string: {
+ query: out.q
+ }
+ }
+ delete out.q;
+ }
+ return out;
+ },
+ query: function(model, queryObj) {
+ var queryNormalized = this._normalizeQuery(queryObj);
+ var data = {source: JSON.stringify(queryNormalized)};
+ var base = this._getESUrl(model);
+ var jqxhr = $.ajax({
+ url: base + '/_search',
+ data: data,
+ dataType: 'jsonp'
+ });
+ var dfd = $.Deferred(); |
| TODO: fail case | jqxhr.done(function(results) {
+ model.docCount = results.hits.total;
+ var docs = _.map(results.hits.hits, function(result) {
+ var _out = result._source;
+ _out.id = result._id;
+ return _out;
+ });
+ dfd.resolve(docs);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['elasticsearch'] = new my.ElasticSearch();
+
+}(jQuery, this.recline.Backend));
+
+ |
gdocs.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
Google spreadsheet backend+ +Connect to Google Docs spreadsheet. + +Dataset must have a url attribute pointing to the Gdocs +spreadsheet's JSON feed e.g. + +
+var dataset = new recline.Model.Dataset({
+ url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
+ },
+ 'gdocs'
+);
+ | my.GDoc = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ var dataset = model;
+
+ $.getJSON(model.get('url'), function(d) {
+ result = self.gdocsToJavascript(d);
+ model.fields.reset(_.map(result.field, function(fieldId) {
+ return {id: fieldId};
+ })
+ ); |
| cache data onto dataset (we have loaded whole gdoc it seems!) | model._dataCache = result.data;
+ dfd.resolve(model);
+ })
+ return dfd.promise(); }
+ },
+
+ query: function(dataset, queryObj) {
+ var dfd = $.Deferred();
+ var fields = _.pluck(dataset.fields.toJSON(), 'id'); |
| zip the fields with the data rows to produce js objs +TODO: factor this out as a common method with other backends | var objs = _.map(dataset._dataCache, function (d) {
+ var obj = {};
+ _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
+ return obj;
+ });
+ dfd.resolve(objs);
+ return dfd;
+ },
+ gdocsToJavascript: function(gdocsSpreadsheet) {
+ /*
+ :options: (optional) optional argument dictionary:
+ columnsToUse: list of columns to use (specified by field names)
+ colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
+ :return: tabular data object (hash with keys: field and data).
+
+ Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
+ */
+ var options = {};
+ if (arguments.length > 1) {
+ options = arguments[1];
+ }
+ var results = {
+ 'field': [],
+ 'data': []
+ }; |
| default is no special info on type of columns | var colTypes = {};
+ if (options.colTypes) {
+ colTypes = options.colTypes;
+ } |
| either extract column headings from spreadsheet directly, or used supplied ones | if (options.columnsToUse) { |
| columns set to subset supplied | results.field = options.columnsToUse;
+ } else { |
| set columns to use to be all available | if (gdocsSpreadsheet.feed.entry.length > 0) {
+ for (var k in gdocsSpreadsheet.feed.entry[0]) {
+ if (k.substr(0, 3) == 'gsx') {
+ var col = k.substr(4)
+ results.field.push(col);
+ }
+ }
+ }
+ } |
| converts non numberical values that should be numerical (22.3%[string] -> 0.223[float]) | var rep = /^([\d\.\-]+)\%$/;
+ $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
+ var row = [];
+ for (var k in results.field) {
+ var col = results.field[k];
+ var _keyname = 'gsx$' + col;
+ var value = entry[_keyname]['$t']; |
| if labelled as % and value contains %, convert | if (colTypes[col] == 'percent') {
+ if (rep.test(value)) {
+ var value2 = rep.exec(value);
+ var value3 = parseFloat(value2);
+ value = value3 / 100;
+ }
+ }
+ row.push(value);
+ }
+ results.data.push(row);
+ });
+ return results;
+ }
+ });
+ recline.Model.backends['gdocs'] = new my.GDoc();
+
+}(jQuery, this.recline.Backend));
+
+ |
memory.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
Memory Backend - uses in-memory data+ +To use it you should provide in your constructor data: + +
Example: + +
+ // Backend setup
+ var backend = recline.Backend.Memory();
+ backend.addDataset({
+ metadata: {
+ id: 'my-id',
+ title: 'My Title'
+ },
+ fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
+ documents: [
+ {id: 0, x: 1, y: 2, z: 3},
+ {id: 1, x: 2, y: 4, z: 6}
+ ]
+ });
+ // later ...
+ var dataset = Dataset({id: 'my-id'}, 'memory');
+ dataset.fetch();
+ etc ...
+ | my.Memory = Backbone.Model.extend({
+ initialize: function() {
+ this.datasets = {};
+ },
+ addDataset: function(data) {
+ this.datasets[data.metadata.id] = $.extend(true, {}, data);
+ },
+ sync: function(method, model, options) {
+ var self = this;
+ if (method === "read") {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Dataset') {
+ var rawDataset = this.datasets[model.id];
+ model.set(rawDataset.metadata);
+ model.fields.reset(rawDataset.fields);
+ model.docCount = rawDataset.documents.length;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'update') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ _.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
+ if(doc.id === model.id) {
+ self.datasets[model.dataset.id].documents[idx] = model.toJSON();
+ }
+ });
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else if (method === 'delete') {
+ var dfd = $.Deferred();
+ if (model.__type__ == 'Document') {
+ var rawDataset = self.datasets[model.dataset.id];
+ var newdocs = _.reject(rawDataset.documents, function(doc) {
+ return (doc.id === model.id);
+ });
+ rawDataset.documents = newdocs;
+ dfd.resolve(model);
+ }
+ return dfd.promise();
+ } else {
+ alert('Not supported: sync on Memory backend with method ' + method + ' and model ' + model);
+ }
+ },
+ query: function(model, queryObj) {
+ var numRows = queryObj.size;
+ var start = queryObj.from;
+ var dfd = $.Deferred();
+ results = this.datasets[model.id].documents; |
| not complete sorting! | _.each(queryObj.sort, function(sortObj) {
+ var fieldName = _.keys(sortObj)[0];
+ results = _.sortBy(results, function(doc) {
+ var _out = doc[fieldName];
+ return (sortObj[fieldName].order == 'asc') ? _out : -1*_out;
+ });
+ });
+ var results = results.slice(start, start+numRows);
+ dfd.resolve(results);
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['memory'] = new my.Memory();
+
+}(jQuery, this.recline.Backend));
+
+ |
webstore.js | |
|---|---|
this.recline = this.recline || {};
+this.recline.Backend = this.recline.Backend || {};
+
+(function($, my) { | |
Webstore Backend+ +Connecting to Webstores + +To use this backend ensure your Dataset has a webstore_url in its attributes. | my.Webstore = Backbone.Model.extend({
+ sync: function(method, model, options) {
+ if (method === "read") {
+ if (model.__type__ == 'Dataset') {
+ var base = model.get('webstore_url');
+ var schemaUrl = base + '/schema.json';
+ var jqxhr = $.ajax({
+ url: schemaUrl,
+ dataType: 'jsonp',
+ jsonp: '_callback'
+ });
+ var dfd = $.Deferred();
+ my.wrapInTimeout(jqxhr).done(function(schema) {
+ var fieldData = _.map(schema.data, function(item) {
+ item.id = item.name;
+ delete item.name;
+ return item;
+ });
+ model.fields.reset(fieldData);
+ model.docCount = schema.count;
+ dfd.resolve(model, jqxhr);
+ })
+ .fail(function(arguments) {
+ dfd.reject(arguments);
+ });
+ return dfd.promise();
+ }
+ }
+ },
+ query: function(model, queryObj) {
+ var base = model.get('webstore_url');
+ var data = {
+ _limit: queryObj.size
+ , _offset: queryObj.from
+ };
+ var jqxhr = $.ajax({
+ url: base + '.json',
+ data: data,
+ dataType: 'jsonp',
+ jsonp: '_callback',
+ cache: true
+ });
+ var dfd = $.Deferred();
+ jqxhr.done(function(results) {
+ dfd.resolve(results.data);
+ });
+ return dfd.promise();
+ }
+ });
+ recline.Model.backends['webstore'] = new my.Webstore();
+
+}(jQuery, this.recline.Backend));
+
+ |
model.js | |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Recline Backbone Models | this.recline = this.recline || {};
+ |