diff --git a/docs/backend.html b/docs/backend.html new file mode 100644 index 00000000..26bd5b61 --- /dev/null +++ b/docs/backend.html @@ -0,0 +1,313 @@ + backend.js
Jump To …

backend.js

Recline Backends

+ +

Backends are connectors to backend data sources and stores

+ +

Backends are implemented as Backbone models but this is just a +convenience (they do not save or load themselves from any remote +source)

this.recline = this.recline || {};
+this.recline.Model = this.recline.Model || {};
+
+(function($, my) {
+  my.backends = {};
+
+  Backbone.sync = function(method, model, options) {
+    return my.backends[model.backendConfig.type].sync(method, model, options);
+  }

BackendMemory - uses in-memory data

+ +

To use you should:

+ +

A. provide metadata as model data to the Dataset

+ +

B. Set backendConfig on your dataset with attributes:

+ +
    +
  • type: 'memory'
  • +
  • data: hash with 2 keys:

    + +
    • headers: list of header names/labels
    • +
    • rows: list of hashes, each hash being one row. A row must have an id attribute which is unique.
    + +

    Example of data:

    + +
    +   {
    +       headers: ['x', 'y', 'z']
    +     , rows: [
    +         {id: 0, x: 1, y: 2, z: 3}
    +       , {id: 1, x: 2, y: 4, z: 6}
    +     ]
    +   };
    +
  • +
  my.BackendMemory = Backbone.Model.extend({
+      sync: function(method, model, options) {
+        var self = this;
+        if (method === "read") {
+          var dfd = $.Deferred();
+          if (model.__type__ == 'Dataset') {
+            var dataset = model;
+            dataset.set({
+              headers: dataset.backendConfig.data.headers
+            });
+            dataset.docCount = dataset.backendConfig.data.rows.length;
+            dfd.resolve(dataset);
+          }
+          return dfd.promise();
+        } else if (method === 'update') {
+          var dfd = $.Deferred();
+          if (model.__type__ == 'Document') {
+            _.each(model.backendConfig.data.rows, function(row, idx) {
+              if(row.id === model.id) {
+                model.backendConfig.data.rows[idx] = model.toJSON();
+              }
+            });
+            dfd.resolve(model);
+          }
+          return dfd.promise();
+        } else if (method === 'delete') {
+          var dfd = $.Deferred();
+          if (model.__type__ == 'Document') {
+            model.backendConfig.data.rows = _.reject(model.backendConfig.data.rows, function(row) {
+              return (row.id === model.id);
+            });
+            dfd.resolve(model);
+          }
+          return dfd.promise();
+        } else {
+          alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
+        }
+      },
+      getDocuments: function(model, numRows, start) {
+        if (start === undefined) {
+          start = 0;
+        }
+        if (numRows === undefined) {
+          numRows = 10;
+        }
+        var dfd = $.Deferred();
+        rows = model.backendConfig.data.rows;
+        var results = rows.slice(start, start+numRows);
+        dfd.resolve(results);
+        return dfd.promise();
+      }
+  });
+  my.backends['memory'] = new my.BackendMemory();

BackendWebstore

+ +

Connecting to Webstores

+ +

To use this backend set backendConfig on your Dataset as:

+ +
+{
+  'type': 'webstore',
+  'url': url to relevant Webstore table
+}
+
  my.BackendWebstore = Backbone.Model.extend({
+    sync: function(method, model, options) {
+      if (method === "read") {
+        if (model.__type__ == 'Dataset') {
+          var dataset = model;
+          var base = dataset.backendConfig.url;
+          var schemaUrl = base + '/schema.json';
+          var jqxhr = $.ajax({
+            url: schemaUrl,
+              dataType: 'jsonp',
+              jsonp: '_callback'
+          });
+          var dfd = $.Deferred();
+          jqxhr.then(function(schema) {
+            headers = _.map(schema.data, function(item) {
+              return item.name;
+            });
+            dataset.set({
+              headers: headers
+            });
+            dataset.docCount = schema.count;
+            dfd.resolve(dataset, jqxhr);
+          });
+          return dfd.promise();
+        }
+      }
+    },
+    getDocuments: function(model, numRows, start) {
+      if (start === undefined) {
+        start = 0;
+      }
+      if (numRows === undefined) {
+        numRows = 10;
+      }
+      var base = model.backendConfig.url;
+      var jqxhr = $.ajax({
+        url: base + '.json?_limit=' + numRows,
+          dataType: 'jsonp',
+          jsonp: '_callback',
+          cache: true
+      });
+      var dfd = $.Deferred();
+      jqxhr.then(function(results) {
+        dfd.resolve(results.data);
+      });
+      return dfd.promise();
+    }
+  });
+  my.backends['webstore'] = new my.BackendWebstore();

BackendDataProxy

+ +

For connecting to DataProxy-s.

+ +

Set a Dataset to use this backend:

+ +
dataset.backendConfig = {
+  // required
+  url: {url-of-data-to-proxy},
+  format: csv | xls,
+}
+
+ +

When initializing the DataProxy backend you can set the following attributes:

+ +
    +
  • dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
  • +
+ +

Note that this is a read-only backend.

  my.BackendDataProxy = Backbone.Model.extend({
+    defaults: {
+      dataproxy: 'http://jsonpdataproxy.appspot.com'
+    },
+    sync: function(method, model, options) {
+      if (method === "read") {
+        if (model.__type__ == 'Dataset') {
+          var dataset = model;
+          var base = my.backends['dataproxy'].get('dataproxy');

TODO: should we cache for extra efficiency

          var data = {
+            url: dataset.backendConfig.url
+            , 'max-results':  1
+            , type: dataset.backendConfig.format
+          };
+          var jqxhr = $.ajax({
+            url: base
+            , data: data
+            , dataType: 'jsonp'
+          });
+          var dfd = $.Deferred();
+          jqxhr.then(function(results) {
+            dataset.set({
+              headers: results.fields
+            });
+            dfd.resolve(dataset, jqxhr);
+          });
+          return dfd.promise();
+        }
+      } else {
+        alert('This backend only supports read operations');
+      }
+    },
+    getDocuments: function(dataset, numRows, start) {
+      if (start === undefined) {
+        start = 0;
+      }
+      if (numRows === undefined) {
+        numRows = 10;
+      }
+      var base = my.backends['dataproxy'].get('dataproxy');
+      var data = {
+        url: dataset.backendConfig.url
+        , 'max-results':  numRows
+        , type: dataset.backendConfig.format
+      };
+      var jqxhr = $.ajax({
+        url: base
+        , data: data
+        , dataType: 'jsonp'
+      });
+      var dfd = $.Deferred();
+      jqxhr.then(function(results) {
+        var _out = _.map(results.data, function(row) {
+          var tmp = {};
+          _.each(results.fields, function(key, idx) {
+            tmp[key] = row[idx];
+          });
+          return tmp;
+        });
+        dfd.resolve(_out);
+      });
+      return dfd.promise();
+    }
+  });
+  my.backends['dataproxy'] = new my.BackendDataProxy();

Google spreadsheet backend

+ +

Connect to Google Docs spreadsheet. For write operations

  my.BackendGDoc = Backbone.Model.extend({
+    sync: function(method, model, options) {
+      if (method === "read") { 
+        var dfd = $.Deferred(); 
+        var dataset = model;
+
+        $.getJSON(model.backendConfig.url, function(d) {
+          result = my.backends['gdocs'].gdocsToJavascript(d);
+          model.set({'headers': result.header});

cache data onto dataset (we have loaded whole gdoc it seems!)

          model._dataCache = result.data;
+          dfd.resolve(model);
+        })
+        return dfd.promise(); }
+    },
+
+    getDocuments: function(dataset, start, numRows) { 
+      var dfd = $.Deferred();
+      var fields = dataset.get('headers');

zip the field headers with the data rows to produce js objs +TODO: factor this out as a common method with other backends

      var objs = _.map(dataset._dataCache, function (d) { 
+        var obj = {};
+        _.each(_.zip(fields, d), function (x) { obj[x[0]] = x[1]; })
+        return obj;
+      });
+      dfd.resolve(objs);
+      return dfd;
+    },
+    gdocsToJavascript:  function(gdocsSpreadsheet) {
+      /*
+         :options: (optional) optional argument dictionary:
+         columnsToUse: list of columns to use (specified by header names)
+         colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
+         :return: tabular data object (hash with keys: header and data).
+
+         Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
+         */
+      var options = {};
+      if (arguments.length > 1) {
+        options = arguments[1];
+      }
+      var results = {
+        'header': [],
+        'data': []
+      };

default is no special info on type of columns

      var colTypes = {};
+      if (options.colTypes) {
+        colTypes = options.colTypes;
+      }

either extract column headings from spreadsheet directly, or used supplied ones

      if (options.columnsToUse) {

columns set to subset supplied

        results.header = options.columnsToUse;
+      } else {

set columns to use to be all available

        if (gdocsSpreadsheet.feed.entry.length > 0) {
+          for (var k in gdocsSpreadsheet.feed.entry[0]) {
+            if (k.substr(0, 3) == 'gsx') {
+              var col = k.substr(4)
+                results.header.push(col);
+            }
+          }
+        }
+      }

converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])

      var rep = /^([\d\.\-]+)\%$/;
+      $.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
+        var row = [];
+        for (var k in results.header) {
+          var col = results.header[k];
+          var _keyname = 'gsx$' + col;
+          var value = entry[_keyname]['$t'];

if labelled as % and value contains %, convert

          if (colTypes[col] == 'percent') {
+            if (rep.test(value)) {
+              var value2 = rep.exec(value);
+              var value3 = parseFloat(value2);
+              value = value3 / 100;
+            }
+          }
+          row.push(value);
+        }
+        results.data.push(row);
+      });
+      return results;
+    }
+  });
+  my.backends['gdocs'] = new my.BackendGDoc();
+
+}(jQuery, this.recline.Model));
+
+
\ No newline at end of file diff --git a/docs/model.html b/docs/model.html index cc4967e7..74e34674 100644 --- a/docs/model.html +++ b/docs/model.html @@ -1,18 +1,22 @@ - model.js
Jump To …

model.js

this.recline = this.recline || {};

Models module following classic module pattern

recline.Model = function($) {
+      model.js           

model.js

Recline Backbone Models

this.recline = this.recline || {};
+this.recline.Model = this.recline.Model || {};
 
-var my = {};

A Dataset model.

+(function($, my) {

A Dataset model

-

Other than standard list of Backbone attributes it has two important attributes:

+

Other than standard list of Backbone methods it has two important attributes:

  • currentDocuments: a DocumentList containing the Documents we have currently loaded for viewing (you update currentDocuments by calling getRows)
  • docCount: total number of documents in this dataset (obtained on a fetch for this Dataset)
  • -
my.Dataset = Backbone.Model.extend({
-  __type__: 'Dataset',
-  initialize: function() {
-    this.currentDocuments = new my.DocumentList();
-    this.docCount = null;
-  },

AJAX method with promise API to get rows (documents) from the backend.

+
  my.Dataset = Backbone.Model.extend({
+    __type__: 'Dataset',
+    initialize: function(options) {
+      this.currentDocuments = new my.DocumentList();
+      this.docCount = null;
+      this.backend = null;
+    },

getDocuments

+ +

AJAX method with promise API to get rows (documents) from the backend.

Resulting DocumentList are used to reset this.currentDocuments and are also returned.

@@ -21,261 +25,36 @@ also returned.

:param start: passed onto backend getDocuments.

this does not fit very well with Backbone setup. Backbone really expects you to know the ids of objects your are fetching (which you do in classic RESTful ajax-y world). But this paradigm does not fill well with data set up we have here. -This also illustrates the limitations of separating the Dataset and the Backend

  getDocuments: function(numRows, start) {
-    var self = this;
-    var dfd = $.Deferred();
-    this.backend.getDocuments(this.id, numRows, start).then(function(rows) {
-      var docs = _.map(rows, function(row) {
-        return new my.Document(row);
-      });
-      self.currentDocuments.reset(docs);
-      dfd.resolve(self.currentDocuments);
-    });
-    return dfd.promise();
-  },
-
-  toTemplateJSON: function() {
-    var data = this.toJSON();
-    data.docCount = this.docCount;
-    return data;
-  }
-});
-
-my.Document = Backbone.Model.extend({
-  __type__: 'Document'
-});
-
-my.DocumentList = Backbone.Collection.extend({
-  __type__: 'DocumentList',

webStore: new WebStore(this.url),

  model: my.Document
-});

Backends section

my.setBackend = function(backend) {
-  Backbone.sync = backend.sync;
-};

Backend which just caches in memory

- -

Does not need to be a backbone model but provides some conveniences

my.BackendMemory = Backbone.Model.extend({

Initialize a Backend with a local in-memory dataset.

- -

NB: We can handle one and only one dataset at a time.

- -

:param dataset: the data for a dataset on which operations will be -performed. Its form should be a hash with metadata and data -attributes.

- -
    -
  • metadata: hash of key/value attributes of any kind (but usually with title attribute)
  • -
  • data: hash with 2 keys:

    - -
    • headers: list of header names/labels
    • -
    • rows: list of hashes, each hash being one row. A row must have an id attribute which is unique.
    - -

    Example of data:

    - -

    { - headers: ['x', 'y', 'z'] - , rows: [ - {id: 0, x: 1, y: 2, z: 3} - , {id: 1, x: 2, y: 4, z: 6} - ] - };

  • -
  initialize: function(dataset) {

deep copy

    this._datasetAsData = $.extend(true, {}, dataset);
-    _.bindAll(this, 'sync');
-  }, 
-  getDataset: function() {
-    var dataset = new my.Dataset({
-      id: this._datasetAsData.metadata.id
-    });

this is a bit weird but problem is in sync this is set to parent model object so need to give dataset a reference to backend explicitly

    dataset.backend = this;
-    return dataset;
-  },
-  sync: function(method, model, options) {
-    var self = this;
-    if (method === "read") {
-      var dfd = $.Deferred();

this switching on object type is rather horrible -think may make more sense to do work in individual objects rather than in central Backbone.sync

      if (model.__type__ == 'Dataset') {
-        var dataset = model;
-        var rawDataset = this._datasetAsData;
-        dataset.set(rawDataset.metadata);
-        dataset.set({
-          headers: rawDataset.data.headers
-          });
-        dataset.docCount = rawDataset.data.rows.length;
-        dfd.resolve(dataset);
-      }
-      return dfd.promise();
-    } else if (method === 'update') {
+This also illustrates the limitations of separating the Dataset and the Backend

    getDocuments: function(numRows, start) {
+      var self = this;
+      var backend = my.backends[this.backendConfig.type];
       var dfd = $.Deferred();
-      if (model.__type__ == 'Document') {
-        _.each(this._datasetAsData.data.rows, function(row, idx) {
-          if(row.id === model.id) {
-            self._datasetAsData.data.rows[idx] = model.toJSON();
-          }
+      backend.getDocuments(this, numRows, start).then(function(rows) {
+        var docs = _.map(rows, function(row) {
+          var _doc = new my.Document(row);
+          _doc.backendConfig = self.backendConfig;
+          _doc.backend = backend;
+          return _doc;
         });
-        dfd.resolve(model);
-      }
-      return dfd.promise();
-    } else if (method === 'delete') {
-      var dfd = $.Deferred();
-      if (model.__type__ == 'Document') {
-        this._datasetAsData.data.rows = _.reject(this._datasetAsData.data.rows, function(row) {
-          return (row.id === model.id);
-        });
-        dfd.resolve(model);
-      }
-      return dfd.promise();
-    } else {
-      alert('Not supported: sync on BackendMemory with method ' + method + ' and model ' + model);
-    }
-  },
-  getDocuments: function(datasetId, numRows, start) {
-    if (start === undefined) {
-      start = 0;
-    }
-    if (numRows === undefined) {
-      numRows = 10;
-    }
-    var dfd = $.Deferred();
-    rows = this._datasetAsData.data.rows;
-    var results = rows.slice(start, start+numRows);
-    dfd.resolve(results);
-    return dfd.promise();
- }
-});

Webstore Backend for connecting to the Webstore

- -

Initializing model argument must contain a url attribute pointing to -relevant Webstore table.

- -

Designed to only attach to one dataset and one dataset only ... -Could generalize to support attaching to different datasets

my.BackendWebstore = Backbone.Model.extend({
-  getDataset: function(id) {
-    var dataset = new my.Dataset({
-      id: id
-    });
-    dataset.backend = this;
-    return dataset;
-  },
-  sync: function(method, model, options) {
-    if (method === "read") {

this switching on object type is rather horrible -think may make more sense to do work in individual objects rather than in central Backbone.sync

      if (this.__type__ == 'Dataset') {
-        var dataset = this;

get the schema and return

        var base = this.backend.get('url');
-        var schemaUrl = base + '/schema.json';
-        var jqxhr = $.ajax({
-          url: schemaUrl,
-          dataType: 'jsonp',
-          jsonp: '_callback'
-          });
-        var dfd = $.Deferred();
-        jqxhr.then(function(schema) {
-          headers = _.map(schema.data, function(item) {
-            return item.name;
-          });
-          dataset.set({
-            headers: headers
-          });
-          dataset.docCount = schema.count;
-          dfd.resolve(dataset, jqxhr);
-        });
-        return dfd.promise();
-      }
-    }
-  },
-  getDocuments: function(datasetId, numRows, start) {
-    if (start === undefined) {
-      start = 0;
-    }
-    if (numRows === undefined) {
-      numRows = 10;
-    }
-    var base = this.get('url');
-    var jqxhr = $.ajax({
-      url: base + '.json?_limit=' + numRows,
-      dataType: 'jsonp',
-      jsonp: '_callback',
-      cache: true
+        self.currentDocuments.reset(docs);
+        dfd.resolve(self.currentDocuments);
       });
-    var dfd = $.Deferred();
-    jqxhr.then(function(results) {
-      dfd.resolve(results.data);
-    });
-    return dfd.promise();
- }
-});

DataProxy Backend for connecting to the DataProxy

+ return dfd.promise(); + }, -

Example initialization:

- -
BackendDataProxy({
-  model: {
-    url: {url-of-data-to-proxy},
-    type: xls || csv,
-    format: json || jsonp # return format (defaults to jsonp)
-    dataproxy: {url-to-proxy} # defaults to http://jsonpdataproxy.appspot.com
-  }
-})
-
my.BackendDataProxy = Backbone.Model.extend({
-  defaults: {
-    dataproxy: 'http://jsonpdataproxy.appspot.com'
-    , type: 'csv'
-    , format: 'jsonp'
-  },
-  getDataset: function(id) {
-    var dataset = new my.Dataset({
-      id: id
-    });
-    dataset.backend = this;
-    return dataset;
-  },
-  sync: function(method, model, options) {
-    if (method === "read") {

this switching on object type is rather horrible -think may make more sense to do work in individual objects rather than in central Backbone.sync

      if (this.__type__ == 'Dataset') {
-        var dataset = this;

get the schema and return

        var base = this.backend.get('dataproxy');
-        var data = this.backend.toJSON();
-        delete data['dataproxy'];

TODO: should we cache for extra efficiency

        data['max-results'] = 1;
-        var jqxhr = $.ajax({
-          url: base
-          , data: data
-          , dataType: 'jsonp'
-        });
-        var dfd = $.Deferred();
-        jqxhr.then(function(results) {
-          dataset.set({
-            headers: results.fields
-          });
-          dfd.resolve(dataset, jqxhr);
-        });
-        return dfd.promise();
-      }
-    } else {
-      alert('This backend only supports read operations');
+    toTemplateJSON: function() {
+      var data = this.toJSON();
+      data.docCount = this.docCount;
+      return data;
     }
-  },
-  getDocuments: function(datasetId, numRows, start) {
-    if (start === undefined) {
-      start = 0;
-    }
-    if (numRows === undefined) {
-      numRows = 10;
-    }
-    var base = this.get('dataproxy');
-    var data = this.toJSON();
-    delete data['dataproxy'];
-    data['max-results'] = numRows;
-    var jqxhr = $.ajax({
-      url: base
-      , data: data
-      , dataType: 'jsonp'

, cache: true

      });
-    var dfd = $.Deferred();
-    jqxhr.then(function(results) {
-      var _out = _.map(results.data, function(row) {
-        var tmp = {};
-        _.each(results.fields, function(key, idx) {
-          tmp[key] = row[idx];
-        });
-        return tmp;
-      });
-      dfd.resolve(_out);
-    });
-    return dfd.promise();
- }
-});
+  });

A Document (aka Row)

-return my; - -}(jQuery); +

A single entry or row in the dataset

  my.Document = Backbone.Model.extend({
+    __type__: 'Document'
+  });

A Backbone collection of Documents

  my.DocumentList = Backbone.Collection.extend({
+    __type__: 'DocumentList',
+    model: my.Document
+  });
+}(jQuery, this.recline.Model));
 
 
\ No newline at end of file