Design Search service using elastic search - rest

I have a requirement. I am building up a search service for a social network. The search service should return the name of the users that somebody searches. Now it will be limited to user domain search only. I am planning to use elastic search to keep the indexes(user domain details). I will then call the EL from my search service(The search service is on nodejs). I am not able to think of a design on how to create the indexes for EL. Should I use a batch to create the indexes or during creation of users I will create the index.
A good pointers or a good design will be appreciated.

you should create index when new user create
simple example may be useful
your user data like:
`
[
{
"_id": "1",
"status": true,
"username": "mak",
"userdomain": "mydomain.com",
"name": "mak doe"
},
{
"_id": "2",
"status": true,
"username": "janny",
"userdomain": "mydomain.com",
"name": "janny"
},
{
"_id": "3",
"status": true,
"username": "mac",
"userdomain": "newdomain.com",
"name": "mac peter"
},
{
"_id": "4",
"status": true,
"username": "mak",
"userdomain": "mydomain.com",
"name": "mak peter"
},
{
"id": "5",
"status": true,
"username": "mak",
"userdomain": "newdomain.com",
"name": "mak peter"
},
]
`
elastic schema look like as below:
`
PUT socialdata
{
"mappings": {
"users": {
"properties": {
"status": {
"type": "boolean"
},
"name": {
"type": "text"
},
"username": {
"type": "string",
"fields": {
"raw": {
"type": "string",
"analyzer": "keyword_lowercase_analyzer"
},
"english": {
"type": "text",
"analyzer": "english"
}
}
},
"userdomain": {
"type": "string",
"fields": {
"raw": {
"type": "string",
"analyzer": "keyword_lowercase_analyzer"
},
"english": {
"type": "text",
"analyzer": "english"
}
}
}
}
}
}
}
`
For Bulk upload :
`
POST socialdata/users/_bulk
{ "index": { "_index": "socialdata","_type": "users", "_id": 1 }}
{"status":true,"username": "mak","userdomain": "mydomain.com","name": "mak doe"}
{ "index": { "_index": "socialdata","_type": "users", "_id": 2 }}
{"status":true,"username": "janny","userdomain": "mydomain.com","name": "janny"}
{ "index": { "_index": "socialdata","_type": "users", "_id": 3 }}
{"status":true,"username": "mac","userdomain": "newdomain.com","name": "mac peter"}
{ "index": { "_index": "socialdata","_type": "users", "_id": 4 }}
{"status":true,"username": "mak","userdomain": "mydomain.com","name": "mak peter"}
{ "index": { "_index": "socialdata","_type": "users", "_id": 5 }}
{"status":true,"username": "mak","userdomain": "newdomain.com","name": "mak peter"}
`
for single index:
`
POST socialdata/users/_bulk
{ "index": { "_index": "socialdata","_type": "users", "_id": 1 }}
{"status":true,"username": "mak","userdomain": "mydomain.com","name": "mak doe"}
`
elastic query :
it will return only two record
`
POST socialdata/users/_search
{
"query": {
"bool": {
"must": [
{
"match": {
"username": "mak"
}
},
{
"match": {
"userdomain": "mydomain.com"
}
}
]
}
}
}
`
it will return only one record
`
POST socialdata/users/_search
{
"query": {
"bool": {
"must": [
{
"match": {
"username": "mak"
}
},
{
"match": {
"userdomain": "newdomain.com"
}
}
]
}
}
}
`

Related

Spring Boot MongoDB Lookup not working with ObjectId

I've user collection which has role object which has roleId. I also have roles collection which has id.
Now,for each role, I'd like to get the list of users.
Ex:
[
{
"name": "Scott",
"role": {
"roleId": "123432"
}
},
{
"name": "John",
"role": {
"roleId": "123432"
}
},
{
"name": "Scott",
"role": {
"roleId": "556432"
}
}
]
Roles Data:
[
{
"id": "123432"
"name": "admin",
"type": "internal"
},
{
"id": "556432"
"name": "owner",
"type": "external"
},
{
"id": "556432"
"name": "owner",
"type": "internal"
}
]
Now I want to get all the roles of type internal and their related users:
So, the output should be,
[
{
"role": "123432",
"users": [
{
"name": "Scott",
"role": {
"roleId": "123432"
}
},
{
"name": "John",
"role": {
"roleId": "123432"
}
}
],
{
"role": "556432",
"users": []
}
}
]
This is my aggregation in SpringBoort:
LookupOperation lookupOperation = LookupOperation.newLookup().from("roles").localField("roleId")
.foreignField("_id").as("roles");
AggregationOperation match = Aggregation.match(Criteria.where("type").is("internal"));
Aggregation aggregation = Aggregation.newAggregation(lookupOperation, match);
List<UserDTO> results = mongoTemplate.aggregate(aggregation, "users", UserDTO.class).getMappedResults();
This is working great when roleId is in the form of ObjectId(Ex: ObjectId("556432")). But it's not working if it's in String(Ex: "556432").
Can someone help me on this please?

Querying a map (<String, Object>) in JSON through MongoDB

How to query a map of type Map<String, List> in JSON form, in MongoDB?
Sample JSON:
{
"WIDTH": 810,
"HEIGHT": 465,
"MODULES": {
"23": {
"XNAME": "COMP1",
"PARAMS": {
"_Klockers": {
"TYPE": "text",
"VALUE": "Klocker#3"
},
"SUBSYS": {
"TYPE": "text",
"VALUE": "2"
},
"EP": {
"TYPE": "integer",
"VALUE": "2"
}
}
},
"24": {
"XNAME": "COMP2",
"PARAMS": {
"_Rockers": {
"TYPE": "text",
"VALUE": "Rocker#3"
},
"Driver": {
"TYPE": "binary",
"VALUE": 1
},
"EP": {
"TYPE": "long",
"VALUE": "233"
}
}
},
"25": {
"XNAME": "COMP3",
"PARAMS": {
"_Mockers": {
"TYPE": "text",
"VALUE": "Mocker#3"
},
"SYSMain": {
"TYPE": "text",
"VALUE": "2342"
},
"TLP": {
"TYPE": "double",
"VALUE": "2.3"
}
}
}
}
}
Basically I want to :
List all the "XNAME" field values of all keys in "MODULES".
Expected output : {"COMP1", "COMP2", "COMP3"}
List all the "TYPE" in "PARAMS" object within each key of "MODULES".
Expected output : {"text", "text", "integer", "text", "binary", "long", "text", "text", "double"}
I am new to MongoDB and any help or redirection is appreciated.
You can use this
db.collection.aggregate([
{
$project: {//You require this as your data is dynamic
"modules": {
"$objectToArray": "$MODULES"
}
}
},
{//Destruct the array
"$unwind": "$modules"
},
{
"$project": {//Again, requires the same as keys are dynamic
"types": {
"$objectToArray": "$modules.v.PARAMS"
},
xname: "$modules.v.XNAME"
}
},
{//Destruct the types
$unwind: "$types"
},
{//Get the distinct values
$group: {
"_id": null,
"xname": {
"$addToSet": "$xname"
},
"types": {
"$addToSet": "$types.v.TYPE"
},
}
}
])

Domino 9.x calendar service create meeting

I have been following this guide to work on Domino 9.0.1
Domino Calendar services
I am using JSON and the POST command works but creates an appointment, what I want to do is create a meeting. I have tried setting other fields like event['x-lotus-appttype'].data or event.AppointmentType = 3 but I still get an appointment.
JSON I am sending
{
"events": [
{
"summary":"Meeting 1",
"location":"Location 1",
"start": {
"date":"2013-12-01",
"time":"13:00:00",
"utc":true
},
"end": {
"date":"2013-12-01",
"time":"14:00:00",
"utc":true
}
}
]
}
What is the correct JSON format to create a meeting ?
Take a look at the following documentation: Event with attendees represented in JSON format
EXAMPLE 4. Event with attendees and time zone array:
{
"x-lotus-charset": {
"data": "UTF-8"
},
"timezones": [
{
"tzid": "Eastern",
"standard": {
"start": {
"date": "1950-11-05",
"time": "02:00:00"
},
"offsetFrom": "-0400",
"offsetTo": "-0500",
"recurrenceRule": "FREQ=YEARLY;BYMONTH=11;BYDAY=1SU;BYHOUR=2;BYMINUTE=0"
},
"daylight": {
"start": {
"date": "1950-03-12",
"time": "02:00:00"
},
"offsetFrom": "-0500",
"offsetTo": "-0400",
"recurrenceRule": "FREQ=YEARLY;BYMONTH=3;BYDAY=2SU;BYHOUR=2;BYMINUTE=0"
}
}
],
"events": [
{
"href": "/mail/dlawson.nsf/api/calendar/events/DB7E0BAC21EC322A85257BD200756E26-Lotus_Notes_Generated",
"id": "DB7E0BAC21EC322A85257BD200756E26-Lotus_Notes_Generated",
"summary": "Staff meeting",
"location": "Ray's office",
"description": "Please email your status update 24 hours before the meeting.",
"start": {
"date": "2013-09-12",
"time": "09:00:00",
"tzid": "Eastern"
},
"end": {
"date": "2013-09-12",
"time": "10:00:00",
"tzid": "Eastern"
},
"class": "public",
"transparency": "opaque",
"sequence": 0,
"last-modified": "20130825T212457Z",
"attendees": [
{
"role": "chair",
"status": "accepted",
"rsvp": false,
"displayName": "Duke Lawson/Peaks",
"email": "DukeLawson#swg.usma.ibm.com"
},
{
"role": "req-participant",
"status": "needs-action",
"rsvp": true,
"displayName": "Dean Melnyk/Peaks",
"email": "DeanMelnyk#swg.usma.ibm.com"
},
{
"role": "req-participant",
"status": "needs-action",
"rsvp": true,
"displayName": "Raymond Chan/Peaks",
"email": "RaymondChan#swg.usma.ibm.com"
}
],
"organizer": {
"displayName": "Duke Lawson/Peaks",
"email": "DukeLawson#swg.usma.ibm.com"
},
"x-lotus-broadcast": {
"data": "FALSE"
},
"x-lotus-notesversion": {
"data": "2"
},
"x-lotus-appttype": {
"data": "3"
}
}
]
}
I hope this can help :)

How can I use CloudKit web services to query based on a reference field?

I've got two CloudKit data objects that look somewhat like this:
Parent Object:
{
"records": [
{
"recordName": "14102C0A-60F2-4457-AC1C-601BC628BF47-184-000000012D225C57",
"recordType": "ParentObject",
"fields": {
"fsYear": {
"value": "2015",
"type": "STRING"
},
"displayOrder": {
"value": 2015221153856287200,
"type": "INT64"
},
"fjpFSGuidForReference": {
"value": "14102C0A-60F2-4457-AC1C-601BC628BF47-184-000000012D225C57",
"type": "STRING"
},
"fsDateSearch": {
"value": "2015221153856287158",
"type": "STRING"
},
},
"recordChangeTag": "id4w7ivn",
"created": {
"timestamp": 1439149087571,
"userRecordName": "_0d26968032e31bbc72c213037b6cb35d",
"deviceID": "A19CD995FDA3093781096AF5D818033A241D65C1BFC3D32EC6C5D6B3B4A9AA6B"
},
"modified": {
"timestamp": 1439149087571,
"userRecordName": "_0d26968032e31bbc72c213037b6cb35d",
"deviceID": "A19CD995FDA3093781096AF5D818033A241D65C1BFC3D32EC6C5D6B3B4A9AA6B"
}
}
],
"total":
}
Child Object:
{
"records": [
{
"recordName": "2015221153856287168",
"recordType": "ChildObject",
"fields": {
"District": {
"value": "002",
"type": "STRING"
},
"ZipCode": {
"value": "12345",
"type": "STRING"
},
"InspecReference": {
"value": {
"recordName": "14102C0A-60F2-4457-AC1C-601BC628BF47-184-000000012D225C57",
"action": "NONE",
"zoneID": {
"zoneName": "_defaultZone"
}
},
"type": "REFERENCE"
},
},
"recordChangeTag": "id4w7lew",
"created": {
"timestamp": 1439149090856,
"userRecordName": "_0d26968032e31bbc72c213037b6cb35d",
"deviceID": "A19CD995FDA3093781096AF5D818033A241D65C1BFC3D32EC6C5D6B3B4A9AA6B"
},
"modified": {
"timestamp": 1439149090856,
"userRecordName": "_0d26968032e31bbc72c213037b6cb35d",
"deviceID": "A19CD995FDA3093781096AF5D818033A241D65C1BFC3D32EC6C5D6B3B4A9AA6B"
}
}
],
"total": 1
}
I'm trying to write a query to directly access the CloudKit web service and return the Child Object based on the reference of the parent object.
My test JSON looks something like this:
{"query":{"recordType":"ChildObject","filterBy":{"fieldName":"InspecReference","fieldValue":{ "value" : "14102C0A-60F2-4457-AC1C-601BC628BF47-184-000000012D225C57", "type" : "string" },"comparator":"EQUALS"}},"zoneID":{"zoneName":"_defaultZone"}}
However, I'm getting the following error from CloudKit:
{"uuid":"33db91f3-b768-4a68-9056-216ecc033e9e","serverErrorCode":"BAD_REQUEST","reason":"BadRequestException:
Unexpected input"}
I'm guessing I have the Record Field Dictionary in the query wrong. However, the documentation isn't clear on what this should look like on a reference object.
You have to re-create the actual object of the reference. In this particular case, the JSON looks like this:
{
"query": {
"recordType": "ChildObject",
"filterBy": {
"fieldName": "InspecReference",
"fieldValue": {
"value": {
"recordName": "14102C0A-60F2-4457-AC1C-601BC628BF47-184-000000012D225C57",
"action": "NONE"
},
"type": "REFERENCE"
},
"comparator": "EQUALS"
}
},
"zoneID": {
"zoneName": "_defaultZone"
}
}

Get the first document using $in with mongodb

how can get the first element using in in mongo ?
if i've a list like ['car', 'house', 'cat', dog'], and a collection which contains many documents these element, i'd like to find the first document which contain cat, and first which contains dog etc.
I've tried to use limit() but in fact it gives me only one document, which can be either car, or dog or cat etc.
is there a way to combine a limit with $in ?
Thanks
EDIT:
example of data i've:
{
"_id": {
"$oid": "51d53ace9e674607e837d62d"
},
"sensors": [{
"name": "os-hostname",
"value": "yahourt"
}, {
"name": "os-domain-name",
"value": ""
}, {
"name": "os-platform",
"value": "Win32NT"
}, {
"name": "os-fullname",
"value": "Microsoft Windows XP Professional"
}, {
"name": "os-version",
"value": "5.1.2600.131072"
}],
"type": "os",
"serial": "2_os_os-hostname_yahourt"
} {
"_id": {
"$oid": "51d53ace9e674607e837d62e"
},
"sensors": [{
"name": "cpu-id",
"value": "_Total"
}, {
"name": "cpu-usage",
"value": 37.2257042
}],
"type": "cpu",
"serial": "2_cpu_cpu-id_total"
} {
"_id": {
"$oid": "51d53ace9e674607e837d62f"
},
"sensors": [{
"name": "cpu-id",
"value": "0"
}, {
"name": "cpu-usage",
"value": 48.90282
}],
"type": "cpu",
"serial": "2_cpu_cpu-id_0"
} {
"_id": {
"$oid": "51d53ace9e674607e837d630"
},
"sensors": [{
"name": "cpu-id",
"value": "1"
}, {
"name": "cpu-usage",
"value": 25.54859
}],
"type": "cpu",
"serial": "2_cpu_cpu-id_1"
} {
"_id": {
"$oid": "51d53ace9e674607e837d631"
},
"sensors": [{
"name": "volume-name",
"value": "C:"
}, {
"name": "volume-label",
"value": ""
}, {
"name": "volume-total-size",
"value": "52427898880"
}, {
"name": "volume-total-free-space",
"value": "20305170432"
}, {
"name": "volume-percent-free-space",
"value": "38"
}, {
"name": "volume-reads-per-second",
"value": 0.0
}, {
"name": "volume-writes-per-second",
"value": 9.324152
}, {
"name": "volume-read-bytes-per-second",
"value": 0.0
}, {
"name": "volume-write-bytes-per-second",
"value": 194141.6
}, {
"name": "volume-queue-length",
"value": 0.0
}],
"type": "disk",
"serial": "2_disk_volume-name_c"
}
You cannot add a limit to $in but you could cheat by using the aggregation framework:
db.collection.aggregate([
{$match:{serial:{$in:[list_of_serials]}}},
{$sort:{_id:-1}},
{$group:{_id:'$serial',type:{$first:'$type'},sensors:{$first:'$sensors'},id:{$first:'$_id'}}}
]);
Would get a list of all first found of each type.
Edit
The update will get the last inserted according to the _id.