Merge object fields aggregation - mongodb

I'm using MongoDB 3.6 and I have the following data:
[
{
"valoresVencimentos": [
1468.12,
85.9,
14.1,
1899.99,
241.92,
869.99,
696.11
],
"classesVencimentos": [
"lim_up_1ano",
"lim_1ano",
"a_venc_180d",
"a_venc_180d",
"outros",
"a_venc_180d",
"a_venc_180d"
],
"classesModalidades": [
"financiamento",
"financiamento",
"curto prazo",
"cartao",
"cartao",
"cartao",
"cartao"
]
},
{
"valoresVencimentos": [
627.29,
241.92,
413.47,
229.74,
1687.58,
100
],
"classesVencimentos": [
"a_venc_180d",
"outros",
"a_venc_180d",
"a_venc_180d",
"lim_up_1ano",
"lim_1ano"
],
"classesModalidades": [
"cartao",
"cartao",
"cartao",
"cartao",
"financiamento",
"financiamento"
]
},
{
"valoresVencimentos": [
268.59,
27.6,
428.51,
173.85,
2301.45,
100,
241.11
],
"classesVencimentos": [
"a_venc_180d",
"outros",
"a_venc_180d",
"a_venc_180d",
"lim_up_1ano",
"lim_1ano",
"a_venc_180d"
],
"classesModalidades": [
"cartao",
"cartao",
"cartao",
"cartao",
"financiamento",
"financiamento",
"curto prazo"
]
}
]
And I need merge fields with same name of each object in the input array, to be:
{
"valoresVencimentos": [
1468.12,
85.9,
14.1,
1899.99,
241.92,
869.99,
696.11,
627.29,
241.92,
413.47,
229.74,
1687.58,
100,
268.59,
27.6,
428.51,
173.85,
2301.45,
100,
241.11
],
"classesVencimentos": [
"lim_up_1ano",
"lim_1ano",
"a_venc_180d",
"a_venc_180d",
"outros",
"a_venc_180d",
"a_venc_180d",
"a_venc_180d",
"outros",
"a_venc_180d",
"a_venc_180d",
"lim_up_1ano",
"lim_1ano",
"a_venc_180d",
"outros",
"a_venc_180d",
"a_venc_180d",
"lim_up_1ano",
"lim_1ano",
"a_venc_180d"
],
"classesModalidades": [
"financiamento",
"financiamento",
"curto prazo",
"cartao",
"cartao",
"cartao",
"cartao",
"cartao",
"cartao",
"cartao",
"cartao",
"financiamento",
"financiamento",
"cartao",
"cartao",
"cartao",
"cartao",
"financiamento",
"financiamento",
"curto prazo"
]
}
Currently my aggregation is:
db.collection.aggregate([
{
$match: { code: '11122233344' }
},
{
$project: {
_id: 0,
valoresVencimentos: '$response.operations.expirations.value',
classesVencimentos: '$response.operations.expirations.class',
classesModalidades: '$response.operations.expirations.modality'
}
},
{
$addFields: {
valoresVencimentos: {
$reduce: {
input: '$valoresVencimentos',
initialValue: [],
in: { $concatArrays: ['$$this', '$$value'] }
}
},
classesVencimentos: {
$reduce: {
input: '$classesVencimentos',
initialValue: [],
in: { $concatArrays: ['$$this', '$$value'] }
}
},
classesModalidades: {
$reduce: {
input: '$classesModalidades',
initialValue: [],
in: { $concatArrays: ['$$this', '$$value'] }
}
},
}
},
])
It's imporant to say that ordering should be the same of the input data, so I think that using $group maybe a problem, I already tried a lot of alternatives but my knowledge with the aggregation framework is kind limited, appreciate any help.

Try this:
db.myCollection.aggregate([
{
$facet: {
valoresVencimentos: [
{ $unwind: "$valoresVencimentos" },
{
$group: {
_id: null,
valoresVencimentos: { $push: "$valoresVencimentos" }
}
}
],
classesVencimentos: [
{ $unwind: "$classesVencimentos" },
{
$group: {
_id: null,
classesVencimentos: { $push: "$classesVencimentos" }
}
}
],
classesModalidades: [
{ $unwind: "$classesModalidades" },
{
$group: {
_id: null,
classesModalidades: { $push: "$classesModalidades" }
}
}
]
}
},
{ $unwind: "$valoresVencimentos" },
{ $unwind: "$classesVencimentos" },
{ $unwind: "$classesModalidades" },
{
$project: {
valoresVencimentos: "$valoresVencimentos.valoresVencimentos",
classesVencimentos: "$classesVencimentos.classesVencimentos",
classesModalidades: "$classesModalidades.classesModalidades"
}
}
]);
or you can combine last three $unwinds and $project into single $project:
...,
{
$project: {
valoresVencimentos: { $arrayElemAt: ["$valoresVencimentos.valoresVencimentos", 0] },
classesVencimentos: { $arrayElemAt: ["$classesVencimentos.classesVencimentos", 0] },
classesModalidades: { $arrayElemAt: ["$classesModalidades.classesModalidades", 0] }
}
}
...

Related

access array of object data in flutter

I am getting an array of object from my rest API and try to access in flutter but I can not access.I want to access posts,comments,like and status.
The error code is The getter 'data' isn't defined for the type 'List'.
Try importing the library that defines 'data', correcting the name to the name of an existing getter, or defining a getter or field named 'data
and error is given by line snapshot.data!.data[index].posts[postPosition].url and snapshot.data!.data[index].status
The data from API:
[
{
"_id": "6304e73ecdc5d350cc33e902",
"userId": "6304e42231ef2e7a4dec924d",
"posts": [
{
"postType": "image",
"post": "https://www.theskinnybeep.com/wp-content/uploads/2019/01/Versace-Man-2019.jpg",
"_id": "6304e73ecdc5d350cc33e903"
},
{
"postType": "image",
"post": "https://www.theskinnybeep.com/wp-content/uploads/2019/01/Versace-Man-2019.jpg",
"_id": "6304e73ecdc5d350cc33e904"
}
],
"status": "testing status",
"comments": [
{
"commentText": "Testing comment",
"commentAt": "2022-08-23T14:41:55.646Z",
"commentReplys": [
{
"userId": "6304e02d481e08d44e618d41",
"replyText": "Testing comment",
"replyAt": "2022-08-23T14:41:55.646Z",
"replyLikes": [
{
"userId": "6304e02d481e08d44e618d41",
"_id": "6304e73ecdc5d350cc33e907",
"isNotified": true
},
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8d07d513ce62b9c099f"
}
],
"_id": "6304e73ecdc5d350cc33e906"
},
{
"userId": "6304e02d481e08d44e618d41",
"replyText": "reply text testing",
"replyAt": "2022-08-23T15:57:51.259Z",
"_id": "6304f90191c32e0deac663b8",
"replyLikes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8d07d513ce62b9c099f"
}
]
}
],
"commentLikes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8f67d513ce62b9c09a2"
}
],
"commentId": "bc174de0-22f1-11ed-9c5d-23d89a83ff32",
"_id": "6304e73ecdc5d350cc33e905"
},
{
"commentText": "Testing comment",
"commentAt": "2022-08-23T15:02:11.123Z",
"commentId": "90928740-22f4-11ed-b912-e99836187b6d",
"_id": "6304ec67825b5926f0f074cf",
"commentReplys": [
{
"userId": "6304e02d481e08d44e618d41",
"replyText": "reply text testing",
"replyAt": "2022-08-23T15:57:51.259Z",
"_id": "6304f90191c32e0deac663b8",
"replyLikes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8d07d513ce62b9c099f"
}
]
}
],
"commentLikes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8f67d513ce62b9c09a2"
}
]
},
{
"commentText": "Testing comment",
"commentAt": "2022-08-23T15:02:11.123Z",
"commentId": "90928740-22f4-11ed-b912-e99836187b6d",
"_id": "6304ec81825b5926f0f074d1",
"commentReplys": [
{
"userId": "6304e02d481e08d44e618d41",
"replyText": "reply text testing",
"replyAt": "2022-08-23T15:57:51.259Z",
"_id": "6304f90191c32e0deac663b8",
"replyLikes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8d07d513ce62b9c099f"
}
]
}
],
"commentLikes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"isNotified": true,
"_id": "6305f8f67d513ce62b9c09a2"
}
]
}
],
"likes": [
{
"userId": "6304e42231ef2e7a4dec924d",
"_id": "63052dc7a1728d463769681b"
}
],
"__v": 0
},
{
"_id": "63070a03584ed0febe5b5a5f",
"status": "testing",
"posts": [],
"comments": [],
"likes": []
}
]
Flutter code:
Widget build(BuildContext context) {
return SizedBox(
height: 600,
child: FutureBuilder<List<Posts>>(
future: fetchPost(),
builder: ((context, snapshot) {
if (snapshot.hasData) {
return ListView.builder(
scrollDirection: Axis.vertical,
shrinkWrap: true,
itemCount: snapshot.data!.length,
itemBuilder: (context, index) {
return ListView.builder(
scrollDirection: Axis.vertical,
shrinkWrap: true,
itemCount: snapshot.data!.data[index].posts!.length,
itemBuilder: (context, postPosition) {
return Column(
children: [
Text(snapshot.data!.data[index].status),
ListView.builder(itemCount:snapshot.data!.data[index].posts.length,itemBuilder: (context, postPosition) {
return Column(children: [
Image.network(snapshot.data!.data[index].posts[postPosition].url)
],);
})
],
);
}
}
);
} else
return CircularProgressIndicator();
}),
),
);
}
The Posts class:
List<Posts> postsFromJson(String str) => List<Posts>.from(json.decode(str).map((x) => Posts.fromJson(x)));
String postsToJson(List<Posts> data) => json.encode(List<dynamic>.from(data.map((x) => x.toJson())));
class Posts {
Posts({
this.id,
this.userId,
required this.posts,
this.status,
this.comments,
this.likes,
});
String? id;
String? userId;
List<Post> posts;
String? status;
List<Comment>? comments;
List<Like>? likes;
factory Posts.fromJson(Map<String, dynamic> json) => Posts(
id: json["_id"],
userId: json["userId"],
posts: List<Post>.from(json["posts"].map((x) => Post.fromJson(x))),
status: json["status"],
comments: List<Comment>.from(json["comments"].map((x) => Comment.fromJson(x))),
likes: List<Like>.from(json["likes"].map((x) => Like.fromJson(x))),
);
get length => null;
get data => null;
Map<String, dynamic> toJson() => {
"_id": id,
"userId": userId,
"posts": List<dynamic>.from(posts.map((x) => x.toJson())),
"status": status,
"comments": List<dynamic>.from(comments!.map((x) => x.toJson())),
"likes": List<dynamic>.from(likes!.map((x) => x.toJson())),
};
}
I am not sure how your IDE lets you compile the code while you added type in FutureBuilder.
The problem seems here. After snapshot.data you should not add another data variable. snapshot.data!.data should not be added.
itemCount: snapshot.data![index].posts!.length,
itemBuilder: (context, postPosition) {
return Column(
children: [
Text(snapshot.data![index].status),
ListView.builder(itemCount:snapshot.data![index].posts.length,itemBuilder: (context, postPosition) {
return Column(children: [
Image.network(snapshot.data![index].posts[postPosition].url)
],);
from your api response and you named it as "class:Post" it not a list. but Post.post is a list. Try to delete the list just like this, and print the result. you may have other error when casting it to your widget because the code will be a bit different. you can ask more if you face other difficulties
example:
PostModel postModelFromJson(String str) =>
postModel.fromJson(json.decode(str));
String postModelToJson(PostModel data) =>
json.encode(data.toJson());

MongoDB: use $sum with multiple $cond

I want calculate points based to the size of an array element.
I can use $sum with multiple $cond?
Something like this:
{
$project: {
points: {
$sum: [
{
$cond: [
{
$and: [{ $gt: [{ $arrayElemAt: ["$matches", 0] }, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 1] }, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 2] }, 0] }],
},
10,
0,
],
},
{
$cond: [
{
$and: [{ $gt: [{ $arrayElemAt: ["$matches", 3] }, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 4] }, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 5] }, 0] }],
},
10,
0,
],
},
],
},
},
},
This script always returns 20 even if not all conditions are true
Example of document:
{
"_id" : NumberInt(5),
"total_matches" : NumberInt(0),
"matches" : [
[
],
[
],
[
],
[
],
[
],
[
],
[
],
[
],
[
]
]
}
OUTPUT
{
"_id" : NumberInt(5),
"points" : 20.0,
"matches" : [
[
],
[
],
[
],
[
],
[
],
[
],
[
],
[
],
[
]
]
}
Ok, the problem is that I forgot $size in the query.
The right query is this:
{
$project: {
points: {
$sum: [
{
$cond: [
{
$and: [{ $gt: [{ $size:{ $arrayElemAt: ["$matches", 0] }}, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 1] }, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 2] }, 0] }],
},
10,
0,
],
},
{
$cond: [
{
$and: [{ $gt: [{ $size:{ $arrayElemAt: ["$matches", 3] }}, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 4] }, 0] }, { $gt: [{ $arrayElemAt: ["$matches", 5] }, 0] }],
},
10,
0,
],
},
],
},
},
}

How to modify the execution plan in Spark?

I am getting some execution plans in json format.
val df: DataFrame = ???
val jsonPlan = df.queryExecution.optimizedPlan.toJSON
jsonPlan output:
You can see that the InMemoryRelation operator has the child key: "child": [..].
Is it possible to add user information to the plan tree node before calling the toJSON method? So that after calling toJSON, the added information is serialized along with the tree node.
For example I want to add labels for all child nodes (label - it a cute child).
Then the result will be like this:
"child":[
{
"class":"org.apache.spark.sql.execution.WholeStageCodegenExec",
"num-children":1,
"child":0,
"codegenStageId":2
// My Added Information
"label": "it a cute child"
},
...
]
Full json plan:
[
{
"class":"org.apache.spark.sql.execution.columnar.InMemoryRelation",
"num-children":0,
"output":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"criminal_name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":73,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}],
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"punishment",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":78,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}]],
"cacheBuilder":null,
"outputOrdering":[
],
"child":[
{
"class":"org.apache.spark.sql.execution.WholeStageCodegenExec",
"num-children":1,
"child":0,
"codegenStageId":2
},
{
"class":"org.apache.spark.sql.execution.ProjectExec",
"num-children":1,
"projectList":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.Alias",
"num-children":1,
"child":0,
"name":"criminal_name",
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":73,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
],
"explicitMetadata":{
}
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, staff_dossiers]"
}],
[
{
"class":"org.apache.spark.sql.catalyst.expressions.Alias",
"num-children":1,
"child":0,
"name":"punishment",
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":78,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
],
"explicitMetadata":{
}
},
{
"class":"org.apache.spark.sql.catalyst.expressions.ScalaUDF",
"num-children":1,
"function":null,
"dataType":"string",
"children":[
0],
"inputEncoders":null,
"nullable":true,
"udfDeterministic":true
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"hobby",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":64,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, indicators]"
}]],
"child":0
},
{
"class":"org.apache.spark.sql.execution.joins.BroadcastHashJoinExec",
"num-children":2,
"leftKeys":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, staff_dossiers]"
}]],
"rightKeys":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, indicators]"
}]],
"joinType":{
"object":"org.apache.spark.sql.catalyst.plans.Inner$"
},
"buildSide":{
"object":"org.apache.spark.sql.execution.joins.package$BuildLeft$"
},
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.execution.InputAdapter",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.execution.exchange.BroadcastExchangeExec",
"num-children":1,
"mode":{
"product-class":"org.apache.spark.sql.execution.joins.HashedRelationBroadcastMode",
"key":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.BoundReference",
"num-children":0,
"ordinal":0,
"dataType":"string",
"nullable":true
}]]
},
"child":0
},
{
"class":"org.apache.spark.sql.execution.WholeStageCodegenExec",
"num-children":1,
"child":0,
"codegenStageId":1
},
{
"class":"org.apache.spark.sql.execution.ProjectExec",
"num-children":1,
"projectList":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}]],
"child":0
},
{
"class":"org.apache.spark.sql.execution.FilterExec",
"num-children":1,
"condition":[
{
"class":"org.apache.spark.sql.catalyst.expressions.And",
"num-children":2,
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.catalyst.expressions.IsNotNull",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, staff_dossiers]"
},
{
"class":"org.apache.spark.sql.catalyst.expressions.EqualTo",
"num-children":2,
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, staff_dossiers]"
},
{
"class":"org.apache.spark.sql.catalyst.expressions.Literal",
"num-children":0,
"value":"Nikita",
"dataType":"string"
}],
"child":0
},
{
"class":"org.apache.spark.sql.execution.ColumnarToRowExec",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.execution.InputAdapter",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.execution.FileSourceScanExec",
"num-children":0,
"relation":null,
"output":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}]],
"requiredSchema":{
"type":"struct",
"fields":[
{
"name":"name",
"type":"string",
"nullable":true,
"metadata":{
}
}]
},
"partitionFilters":[
],
"dataFilters":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.IsNotNull",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, staff_dossiers]"
}],
[
{
"class":"org.apache.spark.sql.catalyst.expressions.EqualTo",
"num-children":2,
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":56,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, staff_dossiers]"
},
{
"class":"org.apache.spark.sql.catalyst.expressions.Literal",
"num-children":0,
"value":"Nikita",
"dataType":"string"
}]],
"tableIdentifier":{
"product-class":"org.apache.spark.sql.catalyst.TableIdentifier",
"table":"staff_dossiers",
"database":"murphy"
}
},
{
"class":"org.apache.spark.sql.execution.ProjectExec",
"num-children":1,
"projectList":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}],
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"hobby",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":64,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}]],
"child":0
},
{
"class":"org.apache.spark.sql.execution.FilterExec",
"num-children":1,
"condition":[
{
"class":"org.apache.spark.sql.catalyst.expressions.And",
"num-children":2,
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.catalyst.expressions.EqualTo",
"num-children":2,
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, indicators]"
},
{
"class":"org.apache.spark.sql.catalyst.expressions.Literal",
"num-children":0,
"value":"Nikita",
"dataType":"string"
},
{
"class":"org.apache.spark.sql.catalyst.expressions.IsNotNull",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, indicators]"
}],
"child":0
},
{
"class":"org.apache.spark.sql.execution.ColumnarToRowExec",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.execution.InputAdapter",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.execution.FileSourceScanExec",
"num-children":0,
"relation":null,
"output":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}],
[
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"hobby",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":64,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":[
]
}]],
"requiredSchema":{
"type":"struct",
"fields":[
{
"name":"name",
"type":"string",
"nullable":true,
"metadata":{
}
},
{
"name":"hobby",
"type":"string",
"nullable":true,
"metadata":{
}
}]
},
"partitionFilters":[
],
"dataFilters":[
[
{
"class":"org.apache.spark.sql.catalyst.expressions.EqualTo",
"num-children":2,
"left":0,
"right":1
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, indicators]"
},
{
"class":"org.apache.spark.sql.catalyst.expressions.Literal",
"num-children":0,
"value":"Nikita",
"dataType":"string"
}],
[
{
"class":"org.apache.spark.sql.catalyst.expressions.IsNotNull",
"num-children":1,
"child":0
},
{
"class":"org.apache.spark.sql.catalyst.expressions.AttributeReference",
"num-children":0,
"name":"name",
"dataType":"string",
"nullable":true,
"metadata":{
},
"exprId":{
"product-class":"org.apache.spark.sql.catalyst.expressions.ExprId",
"id":62,
"jvmId":"2996f433-9e44-40c1-9aaf-d74c0768f68b"
},
"qualifier":"[spark_catalog, murphy, indicators]"
}]],
"tableIdentifier":{
"product-class":"org.apache.spark.sql.catalyst.TableIdentifier",
"table":"indicators",
"database":"murphy"
}
}
]
}
]
Sure, you can parse and modify any JSON object in memory, but that has nothing to do with Spark. Related: What JSON library to use in Scala?
Any modifications you make wouldn't be persisted within the execution plan itself.

Issues with geo indexes in mongodb

We have a collection of ~1M items that we query using the $nearSphere selector. It takes around between 3 seconds and 20 seconds to return 200 items.
From the explain plan of the request, we can see that is going through the same index 6 times.
Is it the expected behavior of mongodb query planner?
We would like to know if there is a way to force mongo to filter first by some field like endDate to reduce the set and then use $nearSphere?
On our monitoring system we can see some pagefault and assert but they might be related to the lack of IOPS of our hard drive.
Thank you for your help.
Here is the explan plan (I removed the rejected plans and troncates the BinData lines)
{
"queryPlanner": {
"plannerVersion": 1.0,
"namespace": "myCollection.Post",
"indexFilterSet": false,
"parsedQuery": {
"$and": [
{
"$or": [
{
"availableToUsers": {
"$eq": "M76zJCedq4"
}
},
{
"$nor": [
{
"availableToUsers": {
"$exists": true
}
}
]
}
]
},
{
"startDate": {
"$lt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"availableSubmitNumber": {
"$gt": 0.0
}
},
{
"endDate": {
"$gt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"name": {
"$in": ["Post1", "Post2"]
}
},
{
"$nor": [
{
"acceptedByUserId": {
"$eq": "M76zJCedq4"
}
}
]
},
{
"locationGeoPoint": {
"$nearSphere": [
174.9084055,
-36.9293289
]
}
}
]
},
"winningPlan": {
"stage": "FETCH",
"filter": {
"$and": [
{
"$or": [
{
"availableToUsers": {
"$eq": "M76zJCedq4"
}
},
{
"$nor": [
{
"availableToUsers": {
"$exists": true
}
}
]
}
]
},
{
"$nor": [
{
"acceptedByUserId": {
"$eq": "M76zJCedq4"
}
}
]
}
]
},
"inputStage": {
"stage": "GEO_NEAR_2D",
"keyPattern": {
"locationGeoPoint": "2d",
"endDate": 1.0,
"startDate": 1.0,
"availableSubmitNumber": 1.0,
"name": 1.0
},
"indexName": "locationGeoPoint_2d_endDate_1_startDate_1_availableSubmitNumber_1_name_1",
"indexVersion": 2.0,
"inputStages": [
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"filter": {
"$and": [
{
"endDate": {
"$gt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"startDate": {
"$lt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"availableSubmitNumber": {
"$gt": 0.0
}
},
{
"name": {
"$in": ["Post1", "Post2"]
}
}
]
},
"keyPattern": {
"locationGeoPoint": "2d",
"endDate": 1.0,
"startDate": 1.0,
"availableSubmitNumber": 1.0,
"name": 1.0
},
"indexName": "locationGeoPoint_2d_endDate_1_startDate_1_availableSubmitNumber_1_name_1",
"isMultiKey": false,
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2.0,
"direction": "forward",
"indexBounds": {
"locationGeoPoint": [
"[BinData(128, BEB167B000000000), BinData(128, BEB167BFFFFFFFFF)]"
],
"endDate": [
"[MinKey, MaxKey]"
],
"startDate": [
"[MinKey, MaxKey]"
],
"availableSubmitNumber": [
"[MinKey, MaxKey]"
],
"name": [
"[MinKey, MaxKey]"
]
}
}
},
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"filter": {
"$and": [
{
"endDate": {
"$gt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"startDate": {
"$lt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"availableSubmitNumber": {
"$gt": 0.0
}
},
{
"name": {
"$in": ["Post1", "Post2"]
}
}
]
},
"keyPattern": {
"locationGeoPoint": "2d",
"endDate": 1.0,
"startDate": 1.0,
"availableSubmitNumber": 1.0,
"name": 1.0
},
"indexName": "locationGeoPoint_2d_endDate_1_startDate_1_availableSubmitNumber_1_name_1",
"isMultiKey": false,
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2.0,
"direction": "forward",
"indexBounds": {
"locationGeoPoint": [
"[BinData(128, BEB1658000000000), BinData(128, BEB165BFFFFFFFFF)]",
"[BinData(128, BEB165C000000000), BinData(128, BEB165FFFFFFFFFF)]"
],
"endDate": [
"[MinKey, MaxKey]"
],
"startDate": [
"[MinKey, MaxKey]"
],
"availableSubmitNumber": [
"[MinKey, MaxKey]"
],
"name": [
"[MinKey, MaxKey]"
]
}
}
},
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"filter": {
"$and": [
{
"endDate": {
"$gt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"startDate": {
"$lt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"availableSubmitNumber": {
"$gt": 0.0
}
},
{
"name": {
"$in": ["Post1", "Post2"]
}
}
]
},
"keyPattern": {
"locationGeoPoint": "2d",
"endDate": 1.0,
"startDate": 1.0,
"availableSubmitNumber": 1.0,
"name": 1.0
},
"indexName": "locationGeoPoint_2d_endDate_1_startDate_1_availableSubmitNumber_1_name_1",
"isMultiKey": false,
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2.0,
"direction": "forward",
"indexBounds": {
"locationGeoPoint": [
"[BinData(128, BEB14BC000000000), BinData(128, BEB14BFFFFFFFFFF)]",
"[BinData(128, BEB14C0000000000), BinData(128, BEB14FFFFFFFFFFF)]",
"[BinData(128, BEB1580000000000), BinData(128, BEB15BFFFFFFFFFF)]",
"[BinData(128, BEB1600000000000), BinData(128, BEB163FFFFFFFFFF)]",
"[BinData(128, BEB1640000000000), BinData(128, BEB164FFFFFFFFFF)]",
"[BinData(128, BEB1650000000000), BinData(128, BEB1653FFFFFFFFF)]",
"[BinData(128, BEB1654000000000), BinData(128, BEB1657FFFFFFFFF)]",
"[BinData(128, BEB1680000000000), BinData(128, BEB16BFFFFFFFFFF)]"
],
"endDate": [
"[MinKey, MaxKey]"
],
"startDate": [
"[MinKey, MaxKey]"
],
"availableSubmitNumber": [
"[MinKey, MaxKey]"
],
"name": [
"[MinKey, MaxKey]"
]
}
}
},
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"filter": {
"$and": [
{
"endDate": {
"$gt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"startDate": {
"$lt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"availableSubmitNumber": {
"$gt": 0.0
}
},
{
"name": {
"$in": ["Post1", "Post2"]
}
}
]
},
"keyPattern": {
"locationGeoPoint": "2d",
"endDate": 1.0,
"startDate": 1.0,
"availableSubmitNumber": 1.0,
"name": 1.0
},
"indexName": "locationGeoPoint_2d_endDate_1_startDate_1_availableSubmitNumber_1_name_1",
"isMultiKey": false,
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2.0,
"direction": "forward",
"indexBounds": {
"locationGeoPoint": [
"[BinData(128, BE9BE00000000000), BinData(128, BE9BEFFFFFFFFFFF)]",
"[BinData(128, BE9BF80000000000), BinData(128, BE9BFBFFFFFFFFFF)]",
"[BinData(128, BEB1100000000000), BinData(128, BEB11FFFFFFFFFFF)]",
"[BinData(128, BEB1300000000000), BinData(128, BEB13FFFFFFFFFFF)]",
"[BinData(128, BEB1400000000000), BinData(128, BEB143FFFFFFFFFF)]"
],
"endDate": [
"[MinKey, MaxKey]"
],
"startDate": [
"[MinKey, MaxKey]"
],
"availableSubmitNumber": [
"[MinKey, MaxKey]"
],
"name": [
"[MinKey, MaxKey]"
]
}
}
},
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"filter": {
"$and": [
{
"endDate": {
"$gt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"startDate": {
"$lt": ISODate(
"2019-03-01T01:02:00.000+0000"
)
}
},
{
"availableSubmitNumber": {
"$gt": 0.0
}
},
{
"name": {
"$in": ["Post1", "Post2"]
}
}
]
},
"keyPattern": {
"locationGeoPoint": "2d",
"endDate": 1.0,
"startDate": 1.0,
"availableSubmitNumber": 1.0,
"name": 1.0
},
"indexName": "locationGeoPoint_2d_endDate_1_startDate_1_availableSubmitNumber_1_name_1",
"isMultiKey": false,
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2.0,
"direction": "forward",
"indexBounds": {
"locationGeoPoint": [
"[BinData(128, BE9B800000000000), BinData(128, BE9BBFFFFFFFFFFF)]",
"[BinData(128, BE9BC00000000000), BinData(128, BE9BCFFFFFFFFFFF)]"
],
"endDate": [
"[MinKey, MaxKey]"
],
"startDate": [
"[MinKey, MaxKey]"
],
"availableSubmitNumber": [
"[MinKey, MaxKey]"
],
"name": [
"[MinKey, MaxKey]"
]
}
}
}
]
}
}
},
"serverInfo": {
"port": 27017.0,
"version": "4.0.3",
"gitVersion": "7ea530946fa7880364d88c8d8b6026bbc9ffa48c"
},
"ok": 1.0,
"operationTime": Timestamp(1551940718,
4),
"$clusterTime": {
"clusterTime": Timestamp(1551940718,
4),
"signature": {
"hash": BinData(0,
"AAAAAAAAAAAAAAAAAAAAAAAAAAA="
),
"keyId": NumberLong(0)
}
}
}

How to calculate YTD and MTD in mongodb?

How to calculate Month-To-Date(MTD) and Year-To-Date(YTD) in mongodb in a single query? sample data below, in this data requestedOn is a date field, I want to calculate MTD & YTD, on the assumption of financial year on "1st Jan of the year"(For example financial year for year 2016 is "01-Jan-2016" :
{
"_id": {
"$oid": "5808578b33fa6f161c9747f8"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-03-01",
"bookName": "Test6",
"revenue": 10.0,
"unitsSold": 1,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747f9"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-03-01",
"bookName": "Test1",
"revenue": 11.0,
"unitsSold": 2,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747fa"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-06-01",
"bookName": "Test2",
"revenue": 12.0,
"unitsSold": 3,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747fb"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-07-01",
"bookName": "Test3",
"revenue": 13.0,
"unitsSold": 4,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747fc"
},
"_class": "exceltest.TestBean",
"requestedOn": "2009-09-01",
"bookName": "Test4",
"revenue": 14.0,
"unitsSold": 5,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747fd"
},
"_class": "exceltest.TestBean",
"requestedOn": "2009-06-01",
"bookName": "Test5",
"revenue": 15.0,
"unitsSold": 6,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747fe"
},
"_class": "exceltest.TestBean",
"requestedOn": "2004-06-01",
"bookName": "Test10",
"revenue": 16.0,
"unitsSold": 7,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "5808578b33fa6f161c9747ff"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-01-01",
"bookName": "Test11",
"revenue": 100.0,
"unitsSold": 100,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
},
{
"categoryCode": "Cooking/Beverages/Bartending"
},
{
"categoryCode": "Food Receipe/Taste"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e462"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-02-01",
"bookName": "Test1",
"revenue": 20.0,
"unitsSold": 10,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e463"
},
"_class": "exceltest.TestBean",
"requestedOn": "2001-02-01",
"bookName": "Test2",
"revenue": 19.0,
"unitsSold": 9,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e464"
},
"_class": "exceltest.TestBean",
"requestedOn": "2001-02-01",
"bookName": "Test3",
"revenue": 18.0,
"unitsSold": 8,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e465"
},
"_class": "exceltest.TestBean",
"requestedOn": "2007-06-01",
"bookName": "Test4",
"revenue": 17.0,
"unitsSold": 7,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e466"
},
"_class": "exceltest.TestBean",
"requestedOn": "2005-06-01",
"bookName": "Test5",
"revenue": 16.0,
"unitsSold": 6,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e467"
},
"_class": "exceltest.TestBean",
"requestedOn": "2004-06-01",
"bookName": "Test1",
"revenue": 15.0,
"unitsSold": 5,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e468"
},
"_class": "exceltest.TestBean",
"requestedOn": "2002-06-01",
"bookName": "Test2",
"revenue": 14.0,
"unitsSold": 4,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e469"
},
"_class": "exceltest.TestBean",
"requestedOn": "2001-06-01",
"bookName": "Test3",
"revenue": 13.0,
"unitsSold": 3,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e46a"
},
"_class": "exceltest.TestBean",
"requestedOn": "2000-06-01",
"bookName": "Test4",
"revenue": 12.0,
"unitsSold": 2,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}{
"_id": {
"$oid": "580857b833fa6f0c3499e46b"
},
"_class": "exceltest.TestBean",
"requestedOn": "2008-06-01",
"bookName": "Test5",
"revenue": 11.0,
"unitsSold": 1,
"bookCategory": [
{
"categoryCode": "Cooking/"
},
{
"categoryCode": "Cooking/Beverages"
},
{
"categoryCode": "Food Receipe/"
},
{
"categoryCode": "Food Receipe/Bartending"
}
]
}
Regards
Kris
It is a good practice to keep dates in MongoDB in its native dateformat ISODate().
You can use date formats like $year,$month,$day,$hour etc.
These can be used for grouping , in your case:
db.collectionName.aggregate([
{$group:{_id:{'Date':{$year:'$requestedOn'}},total:{$sum:'$FieldName'}}}
])
to convert string to ISODate , answers can be found at
- [http://stackoverflow.com/questions/15473772/how-to-convert-from-string-to-date-data-type?noredirect=1&lq=1][2]
- [http://stackoverflow.com/questions/15473772/how-to-convert-from-string-to-date-data-type?noredirect=1&lq=1][2]