diff --git a/demo/built.html b/app/built.html
similarity index 100%
rename from demo/built.html
rename to app/built.html
diff --git a/demo/images/bg_gradient.gif b/app/images/bg_gradient.gif
similarity index 100%
rename from demo/images/bg_gradient.gif
rename to app/images/bg_gradient.gif
diff --git a/demo/images/couch.png b/app/images/couch.png
similarity index 100%
rename from demo/images/couch.png
rename to app/images/couch.png
diff --git a/demo/images/large-spinner.gif b/app/images/large-spinner.gif
similarity index 100%
rename from demo/images/large-spinner.gif
rename to app/images/large-spinner.gif
diff --git a/demo/index.html b/app/index.html
similarity index 94%
rename from demo/index.html
rename to app/index.html
index bfd45a5f..0a11e6cd 100644
--- a/demo/index.html
+++ b/app/index.html
@@ -2,8 +2,8 @@
- Recline Data Explorer Demo
-
+ Recline Data Explorer
+
@@ -49,7 +49,7 @@
diff --git a/demo/js/app.js b/app/js/app.js
similarity index 95%
rename from demo/js/app.js
rename to app/js/app.js
index e99abcae..9f17f845 100755
--- a/demo/js/app.js
+++ b/app/js/app.js
@@ -17,12 +17,6 @@ $(function() {
// setup the loader menu in top bar
setupLoader(createExplorer);
-
- // set up readonly enabling in top bar
- $('a.set-read-only').click(function() {
- window.dataExplorer.setReadOnly();
- alert('Read-only mode set');
- });
});
// make Explorer creation / initialization in a function so we can call it
diff --git a/demo/style/demo.css b/app/style/demo.css
similarity index 100%
rename from demo/style/demo.css
rename to app/style/demo.css
diff --git a/demo b/demo
new file mode 120000
index 00000000..f83b1b63
--- /dev/null
+++ b/demo
@@ -0,0 +1 @@
+app/
\ No newline at end of file
diff --git a/demo/style/images/down-arrow.png b/demo/style/images/down-arrow.png
deleted file mode 100755
index f172a162..00000000
Binary files a/demo/style/images/down-arrow.png and /dev/null differ
diff --git a/demo/style/images/edit-map.png b/demo/style/images/edit-map.png
deleted file mode 100755
index dea0ed1e..00000000
Binary files a/demo/style/images/edit-map.png and /dev/null differ
diff --git a/demo/style/images/loader-blue.gif b/demo/style/images/loader-blue.gif
deleted file mode 100755
index 45b49052..00000000
Binary files a/demo/style/images/loader-blue.gif and /dev/null differ
diff --git a/demo/style/images/loader.gif b/demo/style/images/loader.gif
deleted file mode 100755
index 4a16dea3..00000000
Binary files a/demo/style/images/loader.gif and /dev/null differ
diff --git a/demo/style/images/menu-dropdown.png b/demo/style/images/menu-dropdown.png
deleted file mode 100755
index c733fef7..00000000
Binary files a/demo/style/images/menu-dropdown.png and /dev/null differ
diff --git a/docs/backend/base.html b/docs/backend/base.html
index b4da1482..61eab0d5 100644
--- a/docs/backend/base.html
+++ b/docs/backend/base.html
@@ -1,37 +1,107 @@
- base.js
Crude way to catch backend errors
-Many of backends use JSONP and so will not get error messages and this is
-a crude way to catch those errors.
my.wrapInTimeout=function(ourFunction){
- vardfd=$.Deferred();
- vartimeout=5000;
- vartimer=setTimeout(function(){
- dfd.reject({
- message:'Request Error: Backend did not respond after '+(timeout/1000)+' seconds'
+
Base class for backends providing a template and convenience functions.
+You do not have to inherit from this class but even when not it does provide guidance on the functions you must implement.
+
+
Note also that while this (and other Backends) are implemented as Backbone models this is just a convenience.
An implementation of Backbone.sync that will be used to override
+Backbone.sync on operations for Datasets and Documents which are using this backend.
+
+
For read-only implementations you will need only to implement read method
+for Dataset models (and even this can be a null operation). The read method
+should return relevant metadata for the Dataset. We do not require read support
+for Documents because they are loaded in bulk by the query method.
+
+
For backends supporting write operations you must implement update and delete support for Document objects.
+
+
All code paths should return an object conforming to the jquery promise API.
Query the backend for documents returning them in bulk. This method will
+be used by the Dataset.query method to search the backend for documents,
+retrieving the results in bulk.
@param {Object} queryObj: object describing a query (usually produced by
+using recline.Model.Query and calling toJSON on it).
+
+
The structure of data in the Query object or
+Hash should follow that defined in issue 34.
+(Of course, if you are writing your own backend, and hence
+have control over the interpretation of the query object, you
+can use whatever structure you like).
+
+
@returns {Promise} promise API object. The promise resolve method will
+be called on query completion with a QueryResult object.
+
+
A QueryResult has the following structure (modelled closely on
+ElasticSearch - see this issue for more
+details):
+
+
+{
+ total: // (required) total number of results (can be null)
+ hits: [ // (required) one entry for each result document
+ {
+ _score: // (optional) match score for document
+ _type: // (optional) document type
+ _source: // (required) document/row object
+ }
+ ],
+ facets: { // (optional)
+ // facet results (as per )
+ }
+}
+
Convenience method providing a crude way to catch backend errors on JSONP calls.
+Many of backends use JSONP and so will not get error messages and this is
+a crude way to catch those errors.
\ No newline at end of file
diff --git a/docs/backend/dataproxy.html b/docs/backend/dataproxy.html
index d232ef91..303d2286 100644
--- a/docs/backend/dataproxy.html
+++ b/docs/backend/dataproxy.html
@@ -1,4 +1,4 @@
- dataproxy.js
This should point to the ES type url. E.G. for ES running on
localhost:9200 with index twitter and type tweet it would be
-
http://localhost:9200/twitter/tweet
my.ElasticSearch=Backbone.Model.extend({
+
http://localhost:9200/twitter/tweet
my.ElasticSearch=my.Base.extend({_getESUrl:function(dataset){varout=dataset.get('elasticsearch_url');if(out)returnout;
@@ -37,7 +37,7 @@ localhost:9200 with index twitter and type tweet it would be
dataType:'jsonp'});vardfd=$.Deferred();
- my.wrapInTimeout(jqxhr).done(function(schema){
only one top level key in ES = the type so we can ignore it
varkey=_.keys(schema)[0];varfieldData=_.map(schema[key].properties,function(dict,fieldName){dict.id=fieldName;returndict;
@@ -74,6 +74,17 @@ localhost:9200 with index twitter and type tweet it would be
}}deleteout.q;
+ }
@@ -14,7 +14,7 @@ var dataset = new recline.Model.Dataset({
},
'gdocs'
);
-
my.GDoc=Backbone.Model.extend({
+
my.GDoc=my.Base.extend({getUrl:function(dataset){varurl=dataset.get('url');if(url.indexOf('feeds/list')!=-1){
@@ -58,7 +58,7 @@ TODO: factor this out as a common method with other backends _.each(_.zip(fields,d),function(x){obj[x[0]]=x[1];})returnobj;});
- dfd.resolve(objs);
+ dfd.resolve(this._docsToQueryResult(objs));returndfd;},gdocsToJavascript:function(gdocsSpreadsheet){
diff --git a/docs/backend/localcsv.html b/docs/backend/localcsv.html
new file mode 100644
index 00000000..1abf5bf3
--- /dev/null
+++ b/docs/backend/localcsv.html
@@ -0,0 +1,120 @@
+ localcsv.js
Converts a Comma Separated Values string into an array of arrays.
+Each line in the CSV becomes an array.
+
+
Empty fields are converted to nulls and non-quoted numbers are converted to integers or floats.
+
+
@return The CSV parsed as an array
+@type Array
+
+
@param {String} s The string to convert
+@param {Boolean} [trm=false] If set to True leading and trailing whitespace is stripped off of each non-quoted field as it is imported
+
+
Heavily based on uselesscode's JS CSV parser (MIT Licensed):
+thttp://www.uselesscode.org/javascript/csv/
s=chomp(s);
+
+ varcur='',// The character we are currently processing.
+ inQuote=false,
+ fieldQuoted=false,
+ field='',// Buffer for building up the current field
+ row=[],
+ out=[],
+ i,
+ processField;
+
+ processField=function(field){
+ if(fieldQuoted!==true){
\ No newline at end of file
diff --git a/docs/backend/memory.html b/docs/backend/memory.html
index 8b9fe834..3f03bd6a 100644
--- a/docs/backend/memory.html
+++ b/docs/backend/memory.html
@@ -1,7 +1,42 @@
- memory.js
Convenience function to create a simple 'in-memory' dataset in one step.
+
+
@param data: list of hashes for each document/row in the data ({key:
+value, key: value})
+@param fields: (optional) list of field hashes (each hash defining a hash
+as per recline.Model.Field). If fields not specified they will be taken
+from the data.
+@param metadata: (optional) dataset metadata - see recline.Model.Dataset.
+If not defined (or id not provided) id will be autogenerated.
http://www.elasticsearch.org/guide/reference/query-dsl/and-filter.html
+, filter: {}
+list of simple filters which will be add to 'add' filter of filter
Backends will register themselves by id into this registry
my.backends={};
diff --git a/docs/view-flot-graph.html b/docs/view-flot-graph.html
index cd3d6080..7ef5b8eb 100644
--- a/docs/view-flot-graph.html
+++ b/docs/view-flot-graph.html
@@ -138,11 +138,9 @@ could be simpler just to have a common template!
create this.plot and cache it
if (!this.plot) {
this.plot = $.plot(this.$graph, series, options);
} else {
@@ -151,42 +149,60 @@ could be simpler just to have a common template!
TODO: we should really use tickFormatter and 1 interval ticks if (and
+only if) x-axis values are non-numeric
+However, that is non-trivial to work out from a dataset (datasets may
+have no field type info). Thus at present we only do this for bars.
convert back from 'index' value on x-axis (e.g. in cases where non-number values)
if(self.model.currentDocuments.models[x]){
+ x=self.model.currentDocuments.models[x].get(self.chartConfig.group);
+ }else{
+ x=x.toFixed(2);
+ }
+ y=y.toFixed(2);varcontent=_.template('<%= group %> = <%= x %>, <%= series %> = <%= y %>',{group:self.chartConfig.group,
@@ -241,14 +262,17 @@ could be simpler just to have a common template!
TODO: move this sort of thing into a toTemplateJSON method on Dataset?
modelData.fields=_.map(this.fields,function(field){returnfield.toJSON()});returnmodelData;},render:function(){
@@ -192,7 +187,7 @@ from DOM) while id may be int
var newView=newmy.DataGridRow({model:doc,el:tr,
- fields:self.fields,
+ fields:self.fields},self.options);
@@ -201,7 +196,7 @@ from DOM) while id may be int
varqs=my.parseHashQueryString();qs['reclineQuery']=JSON.stringify(self.model.queryState.toJSON());
- my.setHashQueryString(qs);
+ varout=my.getNewHashForQueryString(qs);
+ self.router.navigate(out);});this.model.bind('query:fail',function(error){my.clearNotifications();
@@ -158,11 +159,15 @@ note this.model and dataset returned are the same
this.router.route(/^(\?.*)?$/,this.pageViews[0].id,function(queryString){
+ self.updateNav(self.pageViews[0].id,queryString);});$.each(this.pageViews,function(idx,view){self.router.route(/^([^?]+)(\?.*)?/,'view',function(viewId,queryString){
@@ -261,8 +266,62 @@ note this.model and dataset returned are the same
Parse the Hash section of a URL into path and query string
my.parseHashUrl=function(hashUrl){varparsed=urlPathRegex.exec(hashUrl);if(parsed==null){return{};
@@ -272,7 +331,7 @@ note this.model and dataset returned are the same
my.composeQueryString=function(queryParams){varqueryString='?';varitems=[];$.each(queryParams,function(key,value){
@@ -301,9 +360,17 @@ note this.model and dataset returned are the same