Typeahead/Bloodhound dupDetector for Multiple Datasets - autocomplete

I have two Bloodhound datasets, a local and a remote. The local data will always be duplicated in the remote results. I've found the dupDetector method mentioned in Typeahead documentation, but it seems this method only works if both my local and remote datasets are built within the same Bloodhound object.
Here's my code. Is there a way to filter duplicates across multiple Bloodhound datasets?
var local_props = new Bloodhound({
datumTokenizer: function (p) {
return Bloodhound.tokenizers.whitespace(p.name);
},
queryTokenizer: Bloodhound.tokenizers.whitespace,
local: portfolio_props,
limit: 100
});
var remote_props = new Bloodhound({
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
queryTokenizer: Bloodhound.tokenizers.whitespace,
remote: 'properties/searchPropertiesLike?substr=%QUERY',
dupDetector: function(remoteMatch, localMatch) {
return remoteMatch.name === localMatch.name;
},
limit: 100
});
local_props.initialize();
remote_props.initialize();
$('#typeahead-property').typeahead({
hint: true,
highlight: true,
minLength: 2
},
{
name: 'port_properties',
displayKey: 'name',
source: local_props.ttAdapter(),
templates: {
header: '<h3>Your Portfolio Properties</h3>'
}
},
{
name: 'dir_properties',
displayKey: 'name',
source: remote_props.ttAdapter(),
templates: {
header: '<h3>Properties Directory</h3>'
}
});

Related

MongoDB query with 300k documents takes more than 30 seconds

Ok, as said in title, I have "performance issue" where I need to get all documents from a collection but it takes too long. Players collection contains around 300k documents with small size and query in service goes like this:
async getAllPlayers() {
const players = await this.playersCollection.find({}, {projection: { playerId: 1, name: 1, surname: 1, shirtNumber: 1, position: 1 }}).toArray();
return players;
}
Overall size is 6.4MB. I'm using Fastify adapter, fastify-compress and mongodb native driver. If I remove projection, it takes almost a minute.
Any idea how to improve this?
The best time I get is 8 seconds, where fast-json-stringify give me more than 10 seconds boost over 300k records:
'use strict'
// run fresh mongo
// docker run --name temp --rm -p 27017:27017 mongo
const fastify = require('fastify')({ logger: true })
const fjs = require('fast-json-stringify')
const toString = fjs({
type: 'object',
properties: {
playerId: { type: 'integer' },
name: { type: 'string' },
surname: { type: 'string' },
shirtNumber: { type: 'integer' },
}
})
fastify.register(require('fastify-mongodb'), {
forceClose: true,
url: 'mongodb://localhost/mydb'
})
fastify.get('/', (request, reply) => {
const dataStream = fastify.mongo.db.collection('foo')
.find({}, {
limit: 300000,
projection: { playerId: 1, name: 1, surname: 1, shirtNumber: 1, position: 1 }
})
.stream({
transform(doc) {
return toString(doc) + '\n'
}
})
reply.type('application/jsonl')
reply.send(dataStream)
})
fastify.get('/insert', async (request, reply) => {
const collection = fastify.mongo.db.collection('foo')
const batch = collection.initializeOrderedBulkOp();
for (let i = 0; i < 300000; i++) {
const player = {
playerId: i,
name: `Name ${i}`,
surname: `surname ${i}`,
shirtNumber: i
}
batch.insert(player);
}
const { result } = await batch.execute()
return result
})
fastify.listen(8080)
In any case, you should consider to:
paginate your output
or pushing the data into a bucket (like S3) and return to the client a URL to download the file directly, this will speed up a lot the process and will save your node.js process from this data streaming
Note that the compression in node.js is a heavy process, so it slows it down a lot the response. An nginx proxy adds it by default without the need to implement it in your business logic server.

Why is my page not rendering EnhancedGrid

Good day to all, while studying dojo, I ran into a problem that I do not draw an EnhancedGrid on my page. this error appears in the browser console:
dojo.js.uncompressed.js:1321 Uncaught TypeError: Cannot read property 'get' of null
at Object.getFeatures (ObjectStore.js.uncompressed.js:241)
at Object._setStore (DataGrid.js.uncompressed.js:14511)
at Object.advice (dojo.js.uncompressed.js:8428)
at Object.c [as _setStore] (dojo.js.uncompressed.js:8408)
at Object.postCreate (DataGrid.js.uncompressed.js:14351)
at Object.l (dojo.js.uncompressed.js:10753)
at Object.postCreate (EnhancedGrid.js.uncompressed.js:90)
at Object.create (DataGrid.js.uncompressed.js:4330)
at Object.postscript (DataGrid.js.uncompressed.js:4243)
at new <anonymous> (dojo.js.uncompressed.js:10950)
the grid drawing script looks like this:
var blogStore;
/**
* Creates Dojo Store.
*/
require(["dojo/store/JsonRest",
"dojo/data/ObjectStore"
], function (JsonRest, ObjectStore) {
blogJsonStore = new JsonRest({
handleAs: 'json',
target: 'http://localhost:8080/myservice'
});
var data = {
identifier: 'id',
items: []
};
blogJsonStore.query({
start: 0,
count: 10
}).then(function (results) {
var res =[];
res = results;
if (0 === res.length){
data.items.push("There are no entries in this blog. Create a post!!!")
}else {
data.items.push(results)
}
});
blogStore = new ObjectStore({data: data});
});
/**
* Creates Dojo EnhancedGrid.
*/
require(["dojox/grid/EnhancedGrid",
"dojox/grid/enhanced/plugins/Filter",
"dojox/grid/enhanced/plugins/NestedSorting",
"dojox/grid/enhanced/plugins/Pagination",
"dojo/domReady!"
], function (EnhancedGrid) {
Grid = new EnhancedGrid({
id: 'grid',
store: blogStore,
structure: [
{ name: 'Message', field: 'text', datatype: 'string',
width: 'auto', autoComplete: true }
],
rowsPerPage: 5,
rowSelector: "20px",
selectionMode: "single",
plugins: {
nestedSorting: true,
pagination: {
description: true,
pageStepper: true,
sizeSwitch: true,
pageSizes: ["5","10","15","All"],
maxPageStep: 4,
position: "bottom"
}
}
});
Grid.placeAt('resultDiv');
Grid.startup();
});
if you remove the blog "Creates Dojo Store." it renders normally
Help me solve the problem. Thank you in advance for any help

Global Models in SailsJS not accessible

In porting a sails app to 1.x framework. I found that my global model names (which are physically defined in api/models directory) were undefined in one of my hook initialize function and in config/bootstrap.js
"models": true, is defined in config/globals.js in this project.
For example, one of my models is Job.js:
const async = require('async');
const _ = require('lodash');
const fetch = require('node-fetch');
module.exports = {
attributes: {
id: {
type: 'integer',
autoIncrement: false,
unique: true,
primaryKey: true
}
},
Init: function(params,cb) {
sails.log.info('Job Engine Starting');
...
}
}
But, when I try to call Job.Init(), from hooks/index.js or from config/bootstrap.js, I get reference error: Job is undefined.

Avoiding global leaking when using JayData 1.3.4 local item store

Out of the box Entities defined by using $data.Entity.extend will be globally accessible. e.g. in the example taken from JayData's home page Todo will leak.
// Case 1: local item store example from http://jaydata.org/
$data.Entity.extend("Todo", {
Id: { type: "int", key: true, computed: true },
Task: { type: String, required: true, maxLength: 200 },
DueDate: { type: Date },
Completed: { type: Boolean }
});
console.log('Leaks Todo?', typeof window.Todo !== 'undefined');
//Result: true
In a JayData forum post I found a reference to $data.createContainer(), which can be used as container during Entity definition. In this case Todo2 won't leak.
// Case2: creating Todo2 in a container
$data.Entity.extend("Todo2", container, {
Id: { type: "int", key: true, computed: true },
Task: { type: String, required: true, maxLength: 200 },
DueDate: { type: Date },
Completed: { type: Boolean }
});
console.log('Leaks Todo2?', typeof window.Todo2 !== 'undefined');
//Result: false
Unfortunately after accessing stores there'll be other variables that leak globally even if the Entity itself is associated with a container.
console.log('Before store access: Leaks Todo2_items?',
typeof window.Todo2_items !== 'undefined');
//Result: false
$data('Todo2').save({ Task: 'Initialized Todo2'})
console.log('After store access: Leaks Todo2_items?',
typeof window.Todo2_items !== 'undefined');
//Result: true
Complete fiddle can be found at http://jsfiddle.net/RainerAtSpirit/nXaYn/.
In an ideal world every variable that is created for entities that run in a container would be associated with the same container. Is there an option to accomplish that or is the behavior described in Case2 the best that can be currently accomplished?

Sencha touch 2 filterby() not updating records

I have a nested list on one of the pages of a Tabbed Panel app that is pulling data from "offices.json"
I should like to be able to filter this list when a user clicks on a toolbar button. However my filterBy() function doesn't update the store and the list of offices I can see, even though I can see in the console it is iterating the records and finding a match. What am I doing wrong?
(And yes I have tried doing s.load() both before and after the filterBy to no avail!)
toolbar:{
items:[{
text: 'Near you',
id: 'btnNearYou',
xtype: 'button',
handler: function() {
s = Ext.StoreMgr.get('offices');
s._proxy._url = 'officesFLAT.json';
console.log("trying to filter");
s.filterBy(function(record) {
var search = new RegExp("Altrincham", 'i');
if(record.get('text').match(search)){
console.log("did Match");
return true;
}else {
console.log("didnt Match");
return false;
}
});
s.load();
}
}]
For the record I'm defining my store like so:
store: {
type: 'tree',
model: 'ListItem',
id: 'offices',
defaultRootProperty: 'items',
proxy: {
type: 'ajax',
root: {},
url: 'offices.json',
reader: {
type: 'json',
rootProperty: 'items'
}
}
}
No need to recreate the regex each time, cache it outside.
You can simplify the code a lot (see below).
Why are you calling load directly after? That's going to send it to the server and it will just retrieve the same dataset.
toolbar: {
items: [{
text: 'Near you',
id: 'btnNearYou',
xtype: 'button',
handler: function() {
s = Ext.StoreMgr.get('offices');
s._proxy._url = 'officesFLAT.json';
var search = /Altrincham/i;
s.filterBy(function(record) {
return !!record.get('text').match(search);
});
}
}]
}