mongodb - Group by day with Multiple Date Fields -


i have documents stored mongodb :

{     "_id" : "xbpnkbdgsggfnc2mj",     "po" : 72134185,     "machine" : 40940,     "location" : "02a01",     "indate" : isodate("2017-07-19t06:10:13.059z"),     "requestdate" : isodate("2017-07-19t06:17:04.901z"),     "outdate" : isodate("2017-07-19t06:30:34z") } 

and want give sum, day, of indate and outdate.

i can retrieve of both side sum of documents indate day and, on other side, sum of documents outdate, sum of each.

currently, use pipeline :

      $group: {         _id: {           yeara: { $year: '$indate' },           montha: { $month: '$indate' },           daya: { $dayofmonth: '$indate' },         },         count: { $sum: 1 },       }, 

and give :

{ "_id" : { "year" : 2017, "month" : 7, "day" : 24 }, "count" : 1 } { "_id" : { "year" : 2017, "month" : 7, "day" : 21 }, "count" : 11 } { "_id" : { "year" : 2017, "month" : 7, "day" : 19 }, "count" : 20 } 

but like, if it's possible :

{ "_id" : { "year" : 2017, "month" : 7, "day" : 24 }, "countin" : 1, "countout" : 4 } { "_id" : { "year" : 2017, "month" : 7, "day" : 21 }, "countin" : 11, "countout" : 23 } { "_id" : { "year" : 2017, "month" : 7, "day" : 19 }, "countin" : 20, "countout" : 18 } 

any idea ? many :-)

you can split documents @ source, combining each value array of entries "type" "in" , "out". can using $map , $cond select fields, $unwind array , determine field "count" again inspecting $cond:

collection.aggregate([   { "$project": {     "dates": {       "$filter": {         "input": {            "$map": {             "input": [ "in", "out" ],             "as": "type",             "in": {               "type": "$$type",               "date": {                 "$cond": {                   "if": { "$eq": [ "$$type", "in" ] },                   "then": "$indate",                   "else": "$outdate"                 }               }             }           }         },         "as": "dates",         "cond": { "$ne": [ "$$dates.date", null ] }       }     }   }},   { "$unwind": "$dates" },   { "$group": {     "_id": {       "year": { "$year": "$dates.date" },       "month": { "$month": "$dates.date" },       "day": { "$dayofmonth": "$dates.date" }     },     "countin": {       "$sum": {         "$cond": {           "if": { "$eq": [ "$dates.type", "in" ]  },           "then": 1,           "else": 0         }       }     },     "countout": {       "$sum": {         "$cond": {           "if": { "$eq": [ "$dates.type", "out" ]  },           "then": 1,           "else": 0         }       }     }   }} ]) 

that's safe way not risk breaking bson limit, no matter size of data send @ it.

personally rather run separate processes , "combine" aggregated results separately, depend on environment running in, not mentioned in question.


for example of "parallel" execution, can structure in meteor somewhere along these lines:

import { meteor } 'meteor/meteor'; import { source } '../imports/source'; import { target } '../imports/target';  meteor.startup(async () => {   // code run on server @ startup    await source.remove({});   await target.remove({});    console.log('removed');    source.insert({     "_id" : "xbpnkbdgsggfnc2mj",     "po" : 72134185,     "machine" : 40940,     "location" : "02a01",     "indate" : new date("2017-07-19t06:10:13.059z"),     "requestdate" : new date("2017-07-19t06:17:04.901z"),     "outdate" : new date("2017-07-19t06:30:34z")   });    console.log('inserted');    await promise.all(     ["in","out"].map( f => new promise((resolve,reject) => {       let cursor = source.rawcollection().aggregate([         { "$match": { [`${f.tolowercase()}date`]: { "$exists": true } } },         { "$group": {           "_id": {             "year": { "$year": `$${f.tolowercase()}date` },             "month": { "$month": `$${f.tolowercase()}date` },             "day": { "$dayofyear": `$${f.tolowercase()}date` }           },           [`count${f}`]: { "$sum": 1 }         }}       ]);        cursor.on('data', async (data) => {         cursor.pause();         data.date = data._id;         delete data._id;         await target.upsert(           { date: data.date },           { "$set": data }         );         cursor.resume();       });        cursor.on('end', () => resolve('done'));       cursor.on('error', (err) => reject(err));     }))   );    console.log('mapped');    let targets = await target.find().fetch();   console.log(targets);  }); 

which going output target collection mentioned in comments like:

{         "_id" : "xdpgmky24acvtnkq7",         "date" : {                 "year" : 2017,                 "month" : 7,                 "day" : 200         },         "countin" : 1,         "countout" : 1 } 

Comments

Popular posts from this blog

node.js - Node js - Trying to send POST request, but it is not loading javascript content -

javascript - Replicate keyboard event with html button -

javascript - Web audio api 5.1 surround example not working in firefox -