diff --git a/README.md b/README.md index e6260030..d9e1d58f 100755 --- a/README.md +++ b/README.md @@ -15,7 +15,17 @@ A simple but powerful library for building data applications in pure Javascript ## Developer Notes -Running the tests by opening `test/index.html` in your browser. +Run the tests by opening `test/index.html` in your browser. + +Note that the demos and documentation utilize the [jekyll templating +system][jekyll] and to use them *locally* you will need to build them using +jekyll. Once installed, all you need to do from the command line is run jekyll: + + jekyll + +[jekyll]: https://github.com/mojombo/jekyll + +Notes on the architecture can be found in the [documentation online](http://okfnlabs.org/recline). ### Contributing @@ -43,6 +53,16 @@ For larger changes: * Update `/_includes/recline-deps.html` if you change required files (e.g. leaflet libraries) * Try to build the demos in `/demos/` with jekyll and then check out the `/demos/multiview/` which utilizes most aspects of Recline +### Contributors + +* [Rufus Pollock](http://rufuspollock.org/) +* [Max Ogden](http://maxogden.com/) +* [John Glover](https://github.com/johnglover) +* [James Casbon](http://casbon.me/) +* [AdriĆ Mercader](http://amercader.net/) +* [Dominik Moritz](https://github.com/domoritz) +* [Friedrich Lindenberg](http://pudo.org/) +* And [many more](https://github.com/okfn/recline/graphs/contributors) ## Changelog @@ -56,6 +76,8 @@ Possible breaking changes * Added marker clustering in map view to handle a large number of markers * Dataset.restore method removed (not used internally except from Multiview.restore) * Views no longer call render in initialize but must be called client code +* Backend.Memory.Store attribute for holding 'records' renamed to `records` from `data` +* Require new underscore.deferred vendor library for all use (jQuery no longer required if just using recline.dataset.js) ### v0.5 - July 5th 2012 (first public release) diff --git a/_includes/recline-deps.html b/_includes/recline-deps.html index af8baeeb..84d43041 100644 --- a/_includes/recline-deps.html +++ b/_includes/recline-deps.html @@ -14,6 +14,7 @@ + @@ -22,10 +23,15 @@ + + + @@ -61,6 +67,7 @@ + diff --git a/css/flot.css b/css/flot.css new file mode 100644 index 00000000..03f21e56 --- /dev/null +++ b/css/flot.css @@ -0,0 +1,26 @@ +.recline-graph .graph { + height: 500px; + overflow: hidden; +} + +.recline-graph .legend table { + width: auto; + margin-bottom: 0; +} + +.recline-graph .legend td { + padding: 5px; + line-height: 13px; +} + +.recline-graph .graph .alert { + width: 450px; +} + +#recline-graph-tooltip { + position: absolute; + background-color: #FEE !important; + color: #000000 !important; + opacity: 0.8 !important; + border: 1px solid #fdd !important; +} diff --git a/dist/recline.css b/dist/recline.css index 6a7f23d7..610a56a7 100644 --- a/dist/recline.css +++ b/dist/recline.css @@ -1,3 +1,29 @@ +.recline-graph .graph { + height: 500px; + overflow: hidden; +} + +.recline-graph .legend table { + width: auto; + margin-bottom: 0; +} + +.recline-graph .legend td { + padding: 5px; + line-height: 13px; +} + +.recline-graph .graph .alert { + width: 450px; +} + +#recline-graph-tooltip { + position: absolute; + background-color: #FEE !important; + color: #000000 !important; + opacity: 0.8 !important; + border: 1px solid #fdd !important; +} .recline-graph .graph { height: 500px; } diff --git a/dist/recline.dataset.js b/dist/recline.dataset.js index 963cac53..23fce729 100644 --- a/dist/recline.dataset.js +++ b/dist/recline.dataset.js @@ -2,7 +2,9 @@ this.recline = this.recline || {}; this.recline.Model = this.recline.Model || {}; -(function($, my) { +(function(my) { + +var Deferred = _.isUndefined(this.jQuery) ? _.Deferred : jQuery.Deferred; // ## Dataset my.Dataset = Backbone.Model.extend({ @@ -47,7 +49,7 @@ my.Dataset = Backbone.Model.extend({ // Retrieve dataset and (some) records from the backend. fetch: function() { var self = this; - var dfd = $.Deferred(); + var dfd = new Deferred(); if (this.backend !== recline.Backend.Memory) { this.backend.fetch(this.toJSON()) @@ -181,7 +183,7 @@ my.Dataset = Backbone.Model.extend({ // also returned. query: function(queryObj) { var self = this; - var dfd = $.Deferred(); + var dfd = new Deferred(); this.trigger('query:start'); if (queryObj) { @@ -245,7 +247,7 @@ my.Dataset = Backbone.Model.extend({ this.fields.each(function(field) { query.addFacet(field.id); }); - var dfd = $.Deferred(); + var dfd = new Deferred(); this._store.query(query.toJSON(), this.toJSON()).done(function(queryResult) { if (queryResult.facets) { _.each(queryResult.facets, function(facetResult, facetId) { @@ -585,13 +587,13 @@ Backbone.sync = function(method, model, options) { return model.backend.sync(method, model, options); }; -}(jQuery, this.recline.Model)); +}(this.recline.Model)); this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.Memory = this.recline.Backend.Memory || {}; -(function($, my) { +(function(my) { my.__type__ = 'memory'; // ## Data Wrapper @@ -600,42 +602,44 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; // functionality like querying, faceting, updating (by ID) and deleting (by // ID). // - // @param data list of hashes for each record/row in the data ({key: + // @param records list of hashes for each record/row in the data ({key: // value, key: value}) // @param fields (optional) list of field hashes (each hash defining a field // as per recline.Model.Field). If fields not specified they will be taken // from the data. - my.Store = function(data, fields) { + my.Store = function(records, fields) { var self = this; - this.data = data; + this.records = records; + // backwards compatability (in v0.5 records was named data) + this.data = this.records; if (fields) { this.fields = fields; } else { - if (data) { - this.fields = _.map(data[0], function(value, key) { + if (records) { + this.fields = _.map(records[0], function(value, key) { return {id: key, type: 'string'}; }); } } this.update = function(doc) { - _.each(self.data, function(internalDoc, idx) { + _.each(self.records, function(internalDoc, idx) { if(doc.id === internalDoc.id) { - self.data[idx] = doc; + self.records[idx] = doc; } }); }; this.remove = function(doc) { - var newdocs = _.reject(self.data, function(internalDoc) { + var newdocs = _.reject(self.records, function(internalDoc) { return (doc.id === internalDoc.id); }); - this.data = newdocs; + this.records = newdocs; }; this.save = function(changes, dataset) { var self = this; - var dfd = $.Deferred(); + var dfd = new _.Deferred(); // TODO _.each(changes.creates) { ... } _.each(changes.updates, function(record) { self.update(record); @@ -648,10 +652,10 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; }, this.query = function(queryObj) { - var dfd = $.Deferred(); - var numRows = queryObj.size || this.data.length; + var dfd = new _.Deferred(); + var numRows = queryObj.size || this.records.length; var start = queryObj.from || 0; - var results = this.data; + var results = this.records; results = this._applyFilters(results, queryObj); results = this._applyFreeTextQuery(results, queryObj); @@ -816,11 +820,11 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; }; this.transform = function(editFunc) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); // TODO: should we clone before mapping? Do not see the point atm. - self.data = _.map(self.data, editFunc); + self.records = _.map(self.records, editFunc); // now deal with deletes (i.e. nulls) - self.data = _.filter(self.data, function(record) { + self.records = _.filter(self.records, function(record) { return record != null; }); dfd.resolve(); @@ -828,4 +832,4 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; }; }; -}(jQuery, this.recline.Backend.Memory)); +}(this.recline.Backend.Memory)); diff --git a/dist/recline.js b/dist/recline.js index 3f0591ba..192b861b 100644 --- a/dist/recline.js +++ b/dist/recline.js @@ -2,7 +2,7 @@ this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.Ckan = this.recline.Backend.Ckan || {}; -(function($, my) { +(function(my) { // ## CKAN Backend // // This provides connection to the CKAN DataStore (v2) @@ -41,7 +41,7 @@ this.recline.Backend.Ckan = this.recline.Backend.Ckan || {}; dataset.id = out.resource_id; var wrapper = my.DataStore(out.endpoint); } - var dfd = $.Deferred(); + var dfd = new _.Deferred(); var jqxhr = wrapper.search({resource_id: dataset.id, limit: 0}); jqxhr.done(function(results) { // map ckan types to our usual types ... @@ -84,7 +84,7 @@ this.recline.Backend.Ckan = this.recline.Backend.Ckan || {}; var wrapper = my.DataStore(out.endpoint); } var actualQuery = my._normalizeQuery(queryObj, dataset); - var dfd = $.Deferred(); + var dfd = new _.Deferred(); var jqxhr = wrapper.search(actualQuery); jqxhr.done(function(results) { var out = { @@ -107,7 +107,7 @@ this.recline.Backend.Ckan = this.recline.Backend.Ckan || {}; }; that.search = function(data) { var searchUrl = that.endpoint + '/3/action/datastore_search'; - var jqxhr = $.ajax({ + var jqxhr = jQuery.ajax({ url: searchUrl, data: data, dataType: 'json' @@ -136,13 +136,14 @@ this.recline.Backend.Ckan = this.recline.Backend.Ckan || {}; 'float8': 'float' }; -}(jQuery, this.recline.Backend.Ckan)); +}(this.recline.Backend.Ckan)); this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.CSV = this.recline.Backend.CSV || {}; // Note that provision of jQuery is optional (it is **only** needed if you use fetch on a remote file) -(function(my, $) { +(function(my) { + my.__type__ = 'csv'; // ## fetch // @@ -150,7 +151,7 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {}; // // 1. `dataset.file`: `file` is an HTML5 file object. This is opened and parsed with the CSV parser. // 2. `dataset.data`: `data` is a string in CSV format. This is passed directly to the CSV parser - // 3. `dataset.url`: a url to an online CSV file that is ajax accessible (note this usually requires either local or on a server that is CORS enabled). The file is then loaded using $.ajax and parsed using the CSV parser (NB: this requires jQuery) + // 3. `dataset.url`: a url to an online CSV file that is ajax accessible (note this usually requires either local or on a server that is CORS enabled). The file is then loaded using jQuery.ajax and parsed using the CSV parser (NB: this requires jQuery) // // All options generates similar data and use the memory store outcome, that is they return something like: // @@ -162,7 +163,7 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {}; // } // my.fetch = function(dataset) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); if (dataset.file) { var reader = new FileReader(); var encoding = dataset.encoding || 'UTF-8'; @@ -187,7 +188,7 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {}; useMemoryStore: true }); } else if (dataset.url) { - $.get(dataset.url).done(function(data) { + jQuery.get(dataset.url).done(function(data) { var rows = my.parseCSV(data, dataset); dfd.resolve({ records: rows, @@ -424,12 +425,12 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {}; } -}(this.recline.Backend.CSV, jQuery)); +}(this.recline.Backend.CSV)); this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; -(function($, my) { +(function(my) { my.__type__ = 'dataproxy'; // URL for the dataproxy my.dataproxy_url = 'http://jsonpdataproxy.appspot.com'; @@ -448,12 +449,12 @@ this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; 'max-results': dataset.size || dataset.rows || 1000, type: dataset.format || '' }; - var jqxhr = $.ajax({ + var jqxhr = jQuery.ajax({ url: my.dataproxy_url, data: data, dataType: 'jsonp' }); - var dfd = $.Deferred(); + var dfd = new _.Deferred(); _wrapInTimeout(jqxhr).done(function(results) { if (results.error) { dfd.reject(results.error); @@ -477,7 +478,7 @@ this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; // Many of backends use JSONP and so will not get error messages and this is // a crude way to catch those errors. var _wrapInTimeout = function(ourFunction) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); var timer = setTimeout(function() { dfd.reject({ message: 'Request Error: Backend did not respond after ' + (my.timeout / 1000) + ' seconds' @@ -495,7 +496,7 @@ this.recline.Backend.DataProxy = this.recline.Backend.DataProxy || {}; return dfd.promise(); } -}(jQuery, this.recline.Backend.DataProxy)); +}(this.recline.Backend.DataProxy)); this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {}; @@ -677,7 +678,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {}; // ### fetch my.fetch = function(dataset) { var es = new my.Wrapper(dataset.url, my.esOptions); - var dfd = $.Deferred(); + var dfd = new _.Deferred(); es.mapping().done(function(schema) { if (!schema){ @@ -705,7 +706,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {}; my.save = function(changes, dataset) { var es = new my.Wrapper(dataset.url, my.esOptions); if (changes.creates.length + changes.updates.length + changes.deletes.length > 1) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); msg = 'Saving more than one item at a time not yet supported'; alert(msg); dfd.reject(msg); @@ -723,7 +724,7 @@ this.recline.Backend.ElasticSearch = this.recline.Backend.ElasticSearch || {}; // ### query my.query = function(queryObj, dataset) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); var es = new my.Wrapper(dataset.url, my.esOptions); var jqxhr = es.query(queryObj); jqxhr.done(function(results) { @@ -782,7 +783,7 @@ this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; -(function($, my) { +(function(my) { my.__type__ = 'gdocs'; // ## Google spreadsheet backend @@ -809,15 +810,15 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; // * fields: array of Field objects // * records: array of objects for each row my.fetch = function(dataset) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); var urls = my.getGDocsAPIUrls(dataset.url); // TODO cover it with tests // get the spreadsheet title (function () { - var titleDfd = $.Deferred(); + var titleDfd = new _.Deferred(); - $.getJSON(urls.spreadsheet, function (d) { + jQuery.getJSON(urls.spreadsheet, function (d) { titleDfd.resolve({ spreadsheetTitle: d.feed.title.$t }); @@ -827,7 +828,7 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; }()).then(function (response) { // get the actual worksheet data - $.getJSON(urls.worksheet, function(d) { + jQuery.getJSON(urls.worksheet, function(d) { var result = my.parseData(d); var fields = _.map(result.fields, function(fieldId) { return {id: fieldId}; @@ -941,12 +942,12 @@ this.recline.Backend.GDocs = this.recline.Backend.GDocs || {}; return urls; }; -}(jQuery, this.recline.Backend.GDocs)); +}(this.recline.Backend.GDocs)); this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.Memory = this.recline.Backend.Memory || {}; -(function($, my) { +(function(my) { my.__type__ = 'memory'; // ## Data Wrapper @@ -955,42 +956,44 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; // functionality like querying, faceting, updating (by ID) and deleting (by // ID). // - // @param data list of hashes for each record/row in the data ({key: + // @param records list of hashes for each record/row in the data ({key: // value, key: value}) // @param fields (optional) list of field hashes (each hash defining a field // as per recline.Model.Field). If fields not specified they will be taken // from the data. - my.Store = function(data, fields) { + my.Store = function(records, fields) { var self = this; - this.data = data; + this.records = records; + // backwards compatability (in v0.5 records was named data) + this.data = this.records; if (fields) { this.fields = fields; } else { - if (data) { - this.fields = _.map(data[0], function(value, key) { + if (records) { + this.fields = _.map(records[0], function(value, key) { return {id: key, type: 'string'}; }); } } this.update = function(doc) { - _.each(self.data, function(internalDoc, idx) { + _.each(self.records, function(internalDoc, idx) { if(doc.id === internalDoc.id) { - self.data[idx] = doc; + self.records[idx] = doc; } }); }; this.remove = function(doc) { - var newdocs = _.reject(self.data, function(internalDoc) { + var newdocs = _.reject(self.records, function(internalDoc) { return (doc.id === internalDoc.id); }); - this.data = newdocs; + this.records = newdocs; }; this.save = function(changes, dataset) { var self = this; - var dfd = $.Deferred(); + var dfd = new _.Deferred(); // TODO _.each(changes.creates) { ... } _.each(changes.updates, function(record) { self.update(record); @@ -1003,10 +1006,10 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; }, this.query = function(queryObj) { - var dfd = $.Deferred(); - var numRows = queryObj.size || this.data.length; + var dfd = new _.Deferred(); + var numRows = queryObj.size || this.records.length; var start = queryObj.from || 0; - var results = this.data; + var results = this.records; results = this._applyFilters(results, queryObj); results = this._applyFreeTextQuery(results, queryObj); @@ -1171,11 +1174,11 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; }; this.transform = function(editFunc) { - var dfd = $.Deferred(); + var dfd = new _.Deferred(); // TODO: should we clone before mapping? Do not see the point atm. - self.data = _.map(self.data, editFunc); + self.records = _.map(self.records, editFunc); // now deal with deletes (i.e. nulls) - self.data = _.filter(self.data, function(record) { + self.records = _.filter(self.records, function(record) { return record != null; }); dfd.resolve(); @@ -1183,7 +1186,7 @@ this.recline.Backend.Memory = this.recline.Backend.Memory || {}; }; }; -}(jQuery, this.recline.Backend.Memory)); +}(this.recline.Backend.Memory)); this.recline = this.recline || {}; this.recline.Backend = this.recline.Backend || {}; this.recline.Backend.Solr = this.recline.Backend.Solr || {}; @@ -1204,7 +1207,7 @@ this.recline.Backend.Solr = this.recline.Backend.Solr || {}; dataType: 'jsonp', jsonp: 'json.wrf' }); - var dfd = $.Deferred(); + var dfd = new _.Deferred(); jqxhr.done(function(results) { // if we get 0 results we cannot get fields var fields = [] @@ -1237,7 +1240,7 @@ this.recline.Backend.Solr = this.recline.Backend.Solr || {}; dataType: 'jsonp', jsonp: 'json.wrf' }); - var dfd = $.Deferred(); + var dfd = new _.Deferred(); jqxhr.done(function(results) { var out = { total: results.response.numFound, @@ -1386,7 +1389,9 @@ if (!('some' in Array.prototype)) { this.recline = this.recline || {}; this.recline.Model = this.recline.Model || {}; -(function($, my) { +(function(my) { + +var Deferred = _.isUndefined(this.jQuery) ? _.Deferred : jQuery.Deferred; // ## Dataset my.Dataset = Backbone.Model.extend({ @@ -1431,7 +1436,7 @@ my.Dataset = Backbone.Model.extend({ // Retrieve dataset and (some) records from the backend. fetch: function() { var self = this; - var dfd = $.Deferred(); + var dfd = new Deferred(); if (this.backend !== recline.Backend.Memory) { this.backend.fetch(this.toJSON()) @@ -1565,7 +1570,7 @@ my.Dataset = Backbone.Model.extend({ // also returned. query: function(queryObj) { var self = this; - var dfd = $.Deferred(); + var dfd = new Deferred(); this.trigger('query:start'); if (queryObj) { @@ -1629,7 +1634,7 @@ my.Dataset = Backbone.Model.extend({ this.fields.each(function(field) { query.addFacet(field.id); }); - var dfd = $.Deferred(); + var dfd = new Deferred(); this._store.query(query.toJSON(), this.toJSON()).done(function(queryResult) { if (queryResult.facets) { _.each(queryResult.facets, function(facetResult, facetId) { @@ -1969,7 +1974,7 @@ Backbone.sync = function(method, model, options) { return model.backend.sync(method, model, options); }; -}(jQuery, this.recline.Model)); +}(this.recline.Model)); /*jshint multistr:true */ @@ -1978,6 +1983,508 @@ this.recline.View = this.recline.View || {}; (function($, my) { +// ## Graph view for a Dataset using Flot graphing library. +// +// Initialization arguments (in a hash in first parameter): +// +// * model: recline.Model.Dataset +// * state: (optional) configuration hash of form: +// +// { +// group: {column name for x-axis}, +// series: [{column name for series A}, {column name series B}, ... ], +// graphType: 'line', +// graphOptions: {custom [flot options]} +// } +// +// NB: should *not* provide an el argument to the view but must let the view +// generate the element itself (you can then append view.el to the DOM. +my.Flot = Backbone.View.extend({ + template: ' \ +
There\'s no graph here yet because we don\'t know what fields you\'d like to see plotted.
\ +Please tell us by using the menu on the right and a graph will automatically appear.
\ +