\
\
Filters
\
\
\
',
events: {
'click .js-hide': 'onHide',
'click .js-remove-filter': 'onRemoveFilter',
'submit form': 'onTermFiltersUpdate'
},
initialize: function() {
this.el = $(this.el);
_.bindAll(this, 'render');
this.model.bind('change', this.render);
this.model.bind('change:filters:new-blank', this.render);
this.render();
},
render: function() {
var tmplData = $.extend(true, {}, this.model.toJSON());
// we will use idx in list as there id ...
tmplData.filters = _.map(tmplData.filters, function(filter, idx) {
filter.id = idx;
return filter;
});
tmplData.termFilters = _.filter(tmplData.filters, function(filter) {
return filter.term !== undefined;
});
tmplData.termFilters = _.map(tmplData.termFilters, function(filter) {
var fieldId = _.keys(filter.term)[0];
return {
id: filter.id,
fieldId: fieldId,
label: fieldId,
value: filter.term[fieldId]
};
});
var out = $.mustache(this.template, tmplData);
this.el.html(out);
// are there actually any facets to show?
if (this.model.get('filters').length > 0) {
this.el.show();
} else {
this.el.hide();
}
},
onHide: function(e) {
e.preventDefault();
this.el.hide();
},
onRemoveFilter: function(e) {
e.preventDefault();
var $target = $(e.target);
var filterId = $target.closest('.filter').attr('data-filter-id');
this.model.removeFilter(filterId);
},
onTermFiltersUpdate: function(e) {
var self = this;
e.preventDefault();
var filters = self.model.get('filters');
var $form = $(e.target);
_.each($form.find('input'), function(input) {
var $input = $(input);
var filterIndex = parseInt($input.attr('data-filter-id'));
var value = $input.val();
var fieldId = $input.attr('data-filter-field');
filters[filterIndex].term[fieldId] = value;
});
self.model.set({filters: filters});
self.model.trigger('change');
}
});
my.FacetViewer = Backbone.View.extend({
className: 'recline-facet-viewer well',
template: ' \
× \
\
\
Facets
\
\
{{#facets}} \
\
{{/facets}} \
\
',
events: {
'click .js-hide': 'onHide',
'click .js-facet-filter': 'onFacetFilter'
},
initialize: function(model) {
_.bindAll(this, 'render');
this.el = $(this.el);
this.model.facets.bind('all', this.render);
this.model.fields.bind('all', this.render);
this.render();
},
render: function() {
var tmplData = {
facets: this.model.facets.toJSON(),
fields: this.model.fields.toJSON()
};
tmplData.facets = _.map(tmplData.facets, function(facet) {
if (facet._type === 'date_histogram') {
facet.entries = _.map(facet.entries, function(entry) {
entry.term = new Date(entry.time).toDateString();
return entry;
});
}
return facet;
});
var templated = $.mustache(this.template, tmplData);
this.el.html(templated);
// are there actually any facets to show?
if (this.model.facets.length > 0) {
this.el.show();
} else {
this.el.hide();
}
},
onHide: function(e) {
e.preventDefault();
this.el.hide();
},
onFacetFilter: function(e) {
var $target= $(e.target);
var fieldId = $target.closest('.facet-summary').attr('data-facet');
var value = $target.attr('data-value');
this.model.queryState.addTermFilter(fieldId, value);
}
});
})(jQuery, recline.View);
// # Recline Backends
//
// Backends are connectors to backend data sources and stores
//
// This is just the base module containing a template Base class and convenience methods.
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
(function($, my) {
// ## Backbone.sync
//
// Override Backbone.sync to hand off to sync function in relevant backend
Backbone.sync = function(method, model, options) {
return model.backend.sync(method, model, options);
};
// ## recline.Backend.Base
//
// Base class for backends providing a template and convenience functions.
// You do not have to inherit from this class but even when not it does
// provide guidance on the functions you must implement.
//
// Note also that while this (and other Backends) are implemented as Backbone models this is just a convenience.
my.Base = Backbone.Model.extend({
// ### __type__
//
// 'type' of this backend. This should be either the class path for this
// object as a string (e.g. recline.Backend.Memory) or for Backends within
// recline.Backend module it may be their class name.
//
// This value is used as an identifier for this backend when initializing
// backends (see recline.Model.Dataset.initialize).
__type__: 'base',
// ### readonly
//
// Class level attribute indicating that this backend is read-only (that
// is, cannot be written to).
readonly: true,
// ### sync
//
// An implementation of Backbone.sync that will be used to override
// Backbone.sync on operations for Datasets and Documents which are using this backend.
//
// For read-only implementations you will need only to implement read method
// for Dataset models (and even this can be a null operation). The read method
// should return relevant metadata for the Dataset. We do not require read support
// for Documents because they are loaded in bulk by the query method.
//
// For backends supporting write operations you must implement update and delete support for Document objects.
//
// All code paths should return an object conforming to the jquery promise API.
sync: function(method, model, options) {
},
// ### query
//
// Query the backend for documents returning them in bulk. This method will
// be used by the Dataset.query method to search the backend for documents,
// retrieving the results in bulk.
//
// @param {recline.model.Dataset} model: Dataset model.
//
// @param {Object} queryObj: object describing a query (usually produced by
// using recline.Model.Query and calling toJSON on it).
//
// The structure of data in the Query object or
// Hash should follow that defined in
issue 34.
// (Of course, if you are writing your own backend, and hence
// have control over the interpretation of the query object, you
// can use whatever structure you like).
//
// @returns {Promise} promise API object. The promise resolve method will
// be called on query completion with a QueryResult object.
//
// A QueryResult has the following structure (modelled closely on
// ElasticSearch - see
this issue for more
// details):
//
//
// {
// total: // (required) total number of results (can be null)
// hits: [ // (required) one entry for each result document
// {
// _score: // (optional) match score for document
// _type: // (optional) document type
// _source: // (required) document/row object
// }
// ],
// facets: { // (optional)
// // facet results (as per )
// }
// }
//
query: function(model, queryObj) {
},
// ### _makeRequest
//
// Just $.ajax but in any headers in the 'headers' attribute of this
// Backend instance. Example:
//
//
// var jqxhr = this._makeRequest({
// url: the-url
// });
//
_makeRequest: function(data) {
var headers = this.get('headers');
var extras = {};
if (headers) {
extras = {
beforeSend: function(req) {
_.each(headers, function(value, key) {
req.setRequestHeader(key, value);
});
}
};
}
var data = _.extend(extras, data);
return $.ajax(data);
},
// convenience method to convert simple set of documents / rows to a QueryResult
_docsToQueryResult: function(rows) {
var hits = _.map(rows, function(row) {
return { _source: row };
});
return {
total: null,
hits: hits
};
},
// ## _wrapInTimeout
//
// Convenience method providing a crude way to catch backend errors on JSONP calls.
// Many of backends use JSONP and so will not get error messages and this is
// a crude way to catch those errors.
_wrapInTimeout: function(ourFunction) {
var dfd = $.Deferred();
var timeout = 5000;
var timer = setTimeout(function() {
dfd.reject({
message: 'Request Error: Backend did not respond after ' + (timeout / 1000) + ' seconds'
});
}, timeout);
ourFunction.done(function(arguments) {
clearTimeout(timer);
dfd.resolve(arguments);
})
.fail(function(arguments) {
clearTimeout(timer);
dfd.reject(arguments);
})
;
return dfd.promise();
}
});
}(jQuery, this.recline.Backend));
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
(function($, my) {
// ## DataProxy Backend
//
// For connecting to [DataProxy-s](http://github.com/okfn/dataproxy).
//
// When initializing the DataProxy backend you can set the following attributes:
//
// * dataproxy: {url-to-proxy} (optional). Defaults to http://jsonpdataproxy.appspot.com
//
// Datasets using using this backend should set the following attributes:
//
// * url: (required) url-of-data-to-proxy
// * format: (optional) csv | xls (defaults to csv if not specified)
//
// Note that this is a **read-only** backend.
my.DataProxy = my.Base.extend({
__type__: 'dataproxy',
readonly: true,
defaults: {
dataproxy_url: 'http://jsonpdataproxy.appspot.com'
},
sync: function(method, model, options) {
var self = this;
if (method === "read") {
if (model.__type__ == 'Dataset') {
// Do nothing as we will get fields in query step (and no metadata to
// retrieve)
var dfd = $.Deferred();
dfd.resolve(model);
return dfd.promise();
}
} else {
alert('This backend only supports read operations');
}
},
query: function(dataset, queryObj) {
var self = this;
var base = this.get('dataproxy_url');
var data = {
url: dataset.get('url'),
'max-results': queryObj.size,
type: dataset.get('format')
};
var jqxhr = $.ajax({
url: base,
data: data,
dataType: 'jsonp'
});
var dfd = $.Deferred();
this._wrapInTimeout(jqxhr).done(function(results) {
if (results.error) {
dfd.reject(results.error);
}
dataset.fields.reset(_.map(results.fields, function(fieldId) {
return {id: fieldId};
})
);
var _out = _.map(results.data, function(doc) {
var tmp = {};
_.each(results.fields, function(key, idx) {
tmp[key] = doc[idx];
});
return tmp;
});
dfd.resolve(self._docsToQueryResult(_out));
})
.fail(function(arguments) {
dfd.reject(arguments);
});
return dfd.promise();
}
});
}(jQuery, this.recline.Backend));
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
(function($, my) {
// ## ElasticSearch Backend
//
// Connecting to [ElasticSearch](http://www.elasticsearch.org/).
//
// Usage:
//
//
// var backend = new recline.Backend.ElasticSearch({
// // optional as can also be provided by Dataset/Document
// url: {url to ElasticSearch endpoint i.e. ES 'type/table' url - more info below}
// // optional
// headers: {dict of headers to add to each request}
// });
//
// @param {String} url: url for ElasticSearch type/table, e.g. for ES running
// on localhost:9200 with index // twitter and type tweet it would be:
//
// http://localhost:9200/twitter/tweet
//
// This url is optional since the ES endpoint url may be specified on the the
// dataset (and on a Document by the document having a dataset attribute) by
// having one of the following (see also `_getESUrl` function):
//
//
// elasticsearch_url
// webstore_url
// url
//
my.ElasticSearch = my.Base.extend({
__type__: 'elasticsearch',
readonly: false,
sync: function(method, model, options) {
var self = this;
if (method === "read") {
if (model.__type__ == 'Dataset') {
var schemaUrl = self._getESUrl(model) + '/_mapping';
var jqxhr = this._makeRequest({
url: schemaUrl,
dataType: 'jsonp'
});
var dfd = $.Deferred();
this._wrapInTimeout(jqxhr).done(function(schema) {
// only one top level key in ES = the type so we can ignore it
var key = _.keys(schema)[0];
var fieldData = _.map(schema[key].properties, function(dict, fieldName) {
dict.id = fieldName;
return dict;
});
model.fields.reset(fieldData);
dfd.resolve(model, jqxhr);
})
.fail(function(arguments) {
dfd.reject(arguments);
});
return dfd.promise();
} else if (model.__type__ == 'Document') {
var base = this._getESUrl(model.dataset) + '/' + model.id;
return this._makeRequest({
url: base,
dataType: 'json'
});
}
} else if (method === 'update') {
if (model.__type__ == 'Document') {
return this.upsert(model.toJSON(), this._getESUrl(model.dataset));
}
} else if (method === 'delete') {
if (model.__type__ == 'Document') {
var url = this._getESUrl(model.dataset);
return this.delete(model.id, url);
}
}
},
// ### upsert
//
// create / update a document to ElasticSearch backend
//
// @param {Object} doc an object to insert to the index.
// @param {string} url (optional) url for ElasticSearch endpoint (if not
// defined called this._getESUrl()
upsert: function(doc, url) {
var data = JSON.stringify(doc);
url = url ? url : this._getESUrl();
if (doc.id) {
url += '/' + doc.id;
}
return this._makeRequest({
url: url,
type: 'POST',
data: data,
dataType: 'json'
});
},
// ### delete
//
// Delete a document from the ElasticSearch backend.
//
// @param {Object} id id of object to delete
// @param {string} url (optional) url for ElasticSearch endpoint (if not
// provided called this._getESUrl()
delete: function(id, url) {
url = url ? url : this._getESUrl();
url += '/' + id;
return this._makeRequest({
url: url,
type: 'DELETE',
dataType: 'json'
});
},
// ### _getESUrl
//
// get url to ElasticSearch endpoint (see above)
_getESUrl: function(dataset) {
if (dataset) {
var out = dataset.get('elasticsearch_url');
if (out) return out;
out = dataset.get('webstore_url');
if (out) return out;
out = dataset.get('url');
return out;
}
return this.get('url');
},
_normalizeQuery: function(queryObj) {
var out = queryObj.toJSON ? queryObj.toJSON() : _.extend({}, queryObj);
if (out.q !== undefined && out.q.trim() === '') {
delete out.q;
}
if (!out.q) {
out.query = {
match_all: {}
};
} else {
out.query = {
query_string: {
query: out.q
}
};
delete out.q;
}
// now do filters (note the *plural*)
if (out.filters && out.filters.length) {
if (!out.filter) {
out.filter = {};
}
if (!out.filter.and) {
out.filter.and = [];
}
out.filter.and = out.filter.and.concat(out.filters);
}
if (out.filters !== undefined) {
delete out.filters;
}
return out;
},
query: function(model, queryObj) {
var queryNormalized = this._normalizeQuery(queryObj);
var data = {source: JSON.stringify(queryNormalized)};
var base = this._getESUrl(model);
var jqxhr = this._makeRequest({
url: base + '/_search',
data: data,
dataType: 'jsonp'
});
var dfd = $.Deferred();
// TODO: fail case
jqxhr.done(function(results) {
_.each(results.hits.hits, function(hit) {
if (!('id' in hit._source) && hit._id) {
hit._source.id = hit._id;
}
});
if (results.facets) {
results.hits.facets = results.facets;
}
dfd.resolve(results.hits);
});
return dfd.promise();
}
});
}(jQuery, this.recline.Backend));
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
(function($, my) {
// ## Google spreadsheet backend
//
// Connect to Google Docs spreadsheet.
//
// Dataset must have a url attribute pointing to the Gdocs
// spreadsheet's JSON feed e.g.
//
//
// var dataset = new recline.Model.Dataset({
// url: 'https://spreadsheets.google.com/feeds/list/0Aon3JiuouxLUdDQwZE1JdV94cUd6NWtuZ0IyWTBjLWc/od6/public/values?alt=json'
// },
// 'gdocs'
// );
//
my.GDoc = my.Base.extend({
__type__: 'gdoc',
readonly: true,
getUrl: function(dataset) {
var url = dataset.get('url');
if (url.indexOf('feeds/list') != -1) {
return url;
} else {
// https://docs.google.com/spreadsheet/ccc?key=XXXX#gid=0
var regex = /.*spreadsheet\/ccc?.*key=([^#?&+]+).*/;
var matches = url.match(regex);
if (matches) {
var key = matches[1];
var worksheet = 1;
var out = 'https://spreadsheets.google.com/feeds/list/' + key + '/' + worksheet + '/public/values?alt=json';
return out;
} else {
alert('Failed to extract gdocs key from ' + url);
}
}
},
sync: function(method, model, options) {
var self = this;
if (method === "read") {
var dfd = $.Deferred();
var dataset = model;
var url = this.getUrl(model);
$.getJSON(url, function(d) {
result = self.gdocsToJavascript(d);
model.fields.reset(_.map(result.field, function(fieldId) {
return {id: fieldId};
})
);
// cache data onto dataset (we have loaded whole gdoc it seems!)
model._dataCache = result.data;
dfd.resolve(model);
});
return dfd.promise();
}
},
query: function(dataset, queryObj) {
var dfd = $.Deferred();
var fields = _.pluck(dataset.fields.toJSON(), 'id');
// zip the fields with the data rows to produce js objs
// TODO: factor this out as a common method with other backends
var objs = _.map(dataset._dataCache, function (d) {
var obj = {};
_.each(_.zip(fields, d), function (x) {
obj[x[0]] = x[1];
});
return obj;
});
dfd.resolve(this._docsToQueryResult(objs));
return dfd;
},
gdocsToJavascript: function(gdocsSpreadsheet) {
/*
:options: (optional) optional argument dictionary:
columnsToUse: list of columns to use (specified by field names)
colTypes: dictionary (with column names as keys) specifying types (e.g. range, percent for use in conversion).
:return: tabular data object (hash with keys: field and data).
Issues: seems google docs return columns in rows in random order and not even sure whether consistent across rows.
*/
var options = {};
if (arguments.length > 1) {
options = arguments[1];
}
var results = {
'field': [],
'data': []
};
// default is no special info on type of columns
var colTypes = {};
if (options.colTypes) {
colTypes = options.colTypes;
}
// either extract column headings from spreadsheet directly, or used supplied ones
if (options.columnsToUse) {
// columns set to subset supplied
results.field = options.columnsToUse;
} else {
// set columns to use to be all available
if (gdocsSpreadsheet.feed.entry.length > 0) {
for (var k in gdocsSpreadsheet.feed.entry[0]) {
if (k.substr(0, 3) == 'gsx') {
var col = k.substr(4);
results.field.push(col);
}
}
}
}
// converts non numberical values that should be numerical (22.3%[string] -> 0.223[float])
var rep = /^([\d\.\-]+)\%$/;
$.each(gdocsSpreadsheet.feed.entry, function (i, entry) {
var row = [];
for (var k in results.field) {
var col = results.field[k];
var _keyname = 'gsx$' + col;
var value = entry[_keyname]['$t'];
// if labelled as % and value contains %, convert
if (colTypes[col] == 'percent') {
if (rep.test(value)) {
var value2 = rep.exec(value);
var value3 = parseFloat(value2);
value = value3 / 100;
}
}
row.push(value);
}
results.data.push(row);
});
return results;
}
});
}(jQuery, this.recline.Backend));
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
(function($, my) {
my.loadFromCSVFile = function(file, callback, options) {
var encoding = options.encoding || 'UTF-8';
var metadata = {
id: file.name,
file: file
};
var reader = new FileReader();
// TODO
reader.onload = function(e) {
var dataset = my.csvToDataset(e.target.result, options);
callback(dataset);
};
reader.onerror = function (e) {
alert('Failed to load file. Code: ' + e.target.error.code);
};
reader.readAsText(file, encoding);
};
my.csvToDataset = function(csvString, options) {
var out = my.parseCSV(csvString, options);
fields = _.map(out[0], function(cell) {
return { id: cell, label: cell };
});
var data = _.map(out.slice(1), function(row) {
var _doc = {};
_.each(out[0], function(fieldId, idx) {
_doc[fieldId] = row[idx];
});
return _doc;
});
var dataset = recline.Backend.createDataset(data, fields);
return dataset;
};
// Converts a Comma Separated Values string into an array of arrays.
// Each line in the CSV becomes an array.
//
// Empty fields are converted to nulls and non-quoted numbers are converted to integers or floats.
//
// @return The CSV parsed as an array
// @type Array
//
// @param {String} s The string to convert
// @param {Object} options Options for loading CSV including
// @param {Boolean} [trim=false] If set to True leading and trailing whitespace is stripped off of each non-quoted field as it is imported
// @param {String} [separator=','] Separator for CSV file
// Heavily based on uselesscode's JS CSV parser (MIT Licensed):
// thttp://www.uselesscode.org/javascript/csv/
my.parseCSV= function(s, options) {
// Get rid of any trailing \n
s = chomp(s);
var options = options || {};
var trm = options.trim;
var separator = options.separator || ',';
var delimiter = options.delimiter || '"';
var cur = '', // The character we are currently processing.
inQuote = false,
fieldQuoted = false,
field = '', // Buffer for building up the current field
row = [],
out = [],
i,
processField;
processField = function (field) {
if (fieldQuoted !== true) {
// If field is empty set to null
if (field === '') {
field = null;
// If the field was not quoted and we are trimming fields, trim it
} else if (trm === true) {
field = trim(field);
}
// Convert unquoted numbers to their appropriate types
if (rxIsInt.test(field)) {
field = parseInt(field, 10);
} else if (rxIsFloat.test(field)) {
field = parseFloat(field, 10);
}
}
return field;
};
for (i = 0; i < s.length; i += 1) {
cur = s.charAt(i);
// If we are at a EOF or EOR
if (inQuote === false && (cur === separator || cur === "\n")) {
field = processField(field);
// Add the current field to the current row
row.push(field);
// If this is EOR append row to output and flush row
if (cur === "\n") {
out.push(row);
row = [];
}
// Flush the field buffer
field = '';
fieldQuoted = false;
} else {
// If it's not a delimiter, add it to the field buffer
if (cur !== delimiter) {
field += cur;
} else {
if (!inQuote) {
// We are not in a quote, start a quote
inQuote = true;
fieldQuoted = true;
} else {
// Next char is delimiter, this is an escaped delimiter
if (s.charAt(i + 1) === delimiter) {
field += delimiter;
// Skip the next char
i += 1;
} else {
// It's not escaping, so end quote
inQuote = false;
}
}
}
}
}
// Add the last field
field = processField(field);
row.push(field);
out.push(row);
return out;
};
var rxIsInt = /^\d+$/,
rxIsFloat = /^\d*\.\d+$|^\d+\.\d*$/,
// If a string has leading or trailing space,
// contains a comma double quote or a newline
// it needs to be quoted in CSV output
rxNeedsQuoting = /^\s|\s$|,|"|\n/,
trim = (function () {
// Fx 3.1 has a native trim function, it's about 10x faster, use it if it exists
if (String.prototype.trim) {
return function (s) {
return s.trim();
};
} else {
return function (s) {
return s.replace(/^\s*/, '').replace(/\s*$/, '');
};
}
}());
function chomp(s) {
if (s.charAt(s.length - 1) !== "\n") {
// Does not end with \n, just return string
return s;
} else {
// Remove the \n
return s.substring(0, s.length - 1);
}
}
}(jQuery, this.recline.Backend));
this.recline = this.recline || {};
this.recline.Backend = this.recline.Backend || {};
(function($, my) {
// ## createDataset
//
// Convenience function to create a simple 'in-memory' dataset in one step.
//
// @param data: list of hashes for each document/row in the data ({key:
// value, key: value})
// @param fields: (optional) list of field hashes (each hash defining a hash
// as per recline.Model.Field). If fields not specified they will be taken
// from the data.
// @param metadata: (optional) dataset metadata - see recline.Model.Dataset.
// If not defined (or id not provided) id will be autogenerated.
my.createDataset = function(data, fields, metadata) {
if (!metadata) {
metadata = {};
}
if (!metadata.id) {
metadata.id = String(Math.floor(Math.random() * 100000000) + 1);
}
var backend = new recline.Backend.Memory();
var datasetInfo = {
documents: data,
metadata: metadata
};
if (fields) {
datasetInfo.fields = fields;
} else {
if (data) {
datasetInfo.fields = _.map(data[0], function(value, key) {
return {id: key};
});
}
}
backend.addDataset(datasetInfo);
var dataset = new recline.Model.Dataset({id: metadata.id}, backend);
dataset.fetch();
dataset.query();
return dataset;
};
// ## Memory Backend - uses in-memory data
//
// To use it you should provide in your constructor data:
//
// * metadata (including fields array)
// * documents: list of hashes, each hash being one doc. A doc *must* have an id attribute which is unique.
//
// Example:
//
//
// // Backend setup
// var backend = recline.Backend.Memory();
// backend.addDataset({
// metadata: {
// id: 'my-id',
// title: 'My Title'
// },
// fields: [{id: 'x'}, {id: 'y'}, {id: 'z'}],
// documents: [
// {id: 0, x: 1, y: 2, z: 3},
// {id: 1, x: 2, y: 4, z: 6}
// ]
// });
// // later ...
// var dataset = Dataset({id: 'my-id'}, 'memory');
// dataset.fetch();
// etc ...
//
my.Memory = my.Base.extend({
__type__: 'memory',
readonly: false,
initialize: function() {
this.datasets = {};
},
addDataset: function(data) {
this.datasets[data.metadata.id] = $.extend(true, {}, data);
},
sync: function(method, model, options) {
var self = this;
var dfd = $.Deferred();
if (method === "read") {
if (model.__type__ == 'Dataset') {
var rawDataset = this.datasets[model.id];
model.set(rawDataset.metadata);
model.fields.reset(rawDataset.fields);
model.docCount = rawDataset.documents.length;
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'update') {
if (model.__type__ == 'Document') {
_.each(self.datasets[model.dataset.id].documents, function(doc, idx) {
if(doc.id === model.id) {
self.datasets[model.dataset.id].documents[idx] = model.toJSON();
}
});
dfd.resolve(model);
}
return dfd.promise();
} else if (method === 'delete') {
if (model.__type__ == 'Document') {
var rawDataset = self.datasets[model.dataset.id];
var newdocs = _.reject(rawDataset.documents, function(doc) {
return (doc.id === model.id);
});
rawDataset.documents = newdocs;
dfd.resolve(model);
}
return dfd.promise();
} else {
alert('Not supported: sync on Memory backend with method ' + method + ' and model ' + model);
}
},
query: function(model, queryObj) {
var dfd = $.Deferred();
var out = {};
var numRows = queryObj.size;
var start = queryObj.from;
var results = this.datasets[model.id].documents;
results = this._applyFilters(results, queryObj);
results = this._applyFreeTextQuery(model, results, queryObj);
// not complete sorting!
_.each(queryObj.sort, function(sortObj) {
var fieldName = _.keys(sortObj)[0];
results = _.sortBy(results, function(doc) {
var _out = doc[fieldName];
return (sortObj[fieldName].order == 'asc') ? _out : -1*_out;
});
});
out.facets = this._computeFacets(results, queryObj);
var total = results.length;
resultsObj = this._docsToQueryResult(results.slice(start, start+numRows));
_.extend(out, resultsObj);
out.total = total;
dfd.resolve(out);
return dfd.promise();
},
// in place filtering
_applyFilters: function(results, queryObj) {
_.each(queryObj.filters, function(filter) {
results = _.filter(results, function(doc) {
var fieldId = _.keys(filter.term)[0];
return (doc[fieldId] == filter.term[fieldId]);
});
});
return results;
},
// we OR across fields but AND across terms in query string
_applyFreeTextQuery: function(dataset, results, queryObj) {
if (queryObj.q) {
var terms = queryObj.q.split(' ');
results = _.filter(results, function(rawdoc) {
var matches = true;
_.each(terms, function(term) {
var foundmatch = false;
dataset.fields.each(function(field) {
var value = rawdoc[field.id];
if (value !== null) { value = value.toString(); }
// TODO regexes?
foundmatch = foundmatch || (value === term);
// TODO: early out (once we are true should break to spare unnecessary testing)
// if (foundmatch) return true;
});
matches = matches && foundmatch;
// TODO: early out (once false should break to spare unnecessary testing)
// if (!matches) return false;
});
return matches;
});
}
return results;
},
_computeFacets: function(documents, queryObj) {
var facetResults = {};
if (!queryObj.facets) {
return facetsResults;
}
_.each(queryObj.facets, function(query, facetId) {
facetResults[facetId] = new recline.Model.Facet({id: facetId}).toJSON();
facetResults[facetId].termsall = {};
});
// faceting
_.each(documents, function(doc) {
_.each(queryObj.facets, function(query, facetId) {
var fieldId = query.terms.field;
var val = doc[fieldId];
var tmp = facetResults[facetId];
if (val) {
tmp.termsall[val] = tmp.termsall[val] ? tmp.termsall[val] + 1 : 1;
} else {
tmp.missing = tmp.missing + 1;
}
});
});
_.each(queryObj.facets, function(query, facetId) {
var tmp = facetResults[facetId];
var terms = _.map(tmp.termsall, function(count, term) {
return { term: term, count: count };
});
tmp.terms = _.sortBy(terms, function(item) {
// want descending order
return -item.count;
});
tmp.terms = tmp.terms.slice(0, 10);
});
return facetResults;
}
});
}(jQuery, this.recline.Backend));