I looked at various answers but I can't not find a way to get this going for myself.
I have a function (in node.js) that selects a winner out of a pool, when it selects the winner though, I need it to send a event to the client where it runs a function with data. The data would be the array index of the winner.
Selecting a winner:
var endRound = function() {
ref.child('currentJackpot').once('value', function(data) {
var currentJackpot = data.val();
var winnerArray = [];
var winnerObj = {};
winnerObj.items = [];
for (var i = 0; i < currentJackpot.players.length; i++) {
winnerObj.items = winnerObj.items.concat(currentJackpot.players[i].items);
var playerValue = currentJackpot.players[i].itemsValue * 100;
currentJackpot.players[i].chance = ((currentJackpot.players[i].itemsValue / currentJackpot.jackpotValue) * 100).toFixed(2);
for (var j = 0; j < playerValue; j++) {
winnerArray.push(i);
}
}
var formatted = currentJackpot.roundHash.replace(/[.#$/]/g, "");
sgRef.child(formatted).once('value', function(data) {
var sgData = data.val();
salt = sgData.salt;
rngStr = sgData.rngStr;
console.log('ROUND ENDED! hash: ', hash, ' salt: ', salt, ' rngStr: ', rngStr);
currentJackpot.tickets = currentJackpot.jackpotValue * 100;
currentJackpot.winningTicket = Math.floor((parseFloat(rngStr, 2) * currentJackpot.tickets));
currentJackpot.winningNumber = (parseFloat(rngStr, 2) * 100).toFixed(2) + "%";
currentJackpot.winner = currentJackpot.players[winnerArray[currentJackpot.winningTicket]];
currentJackpot.salt = salt;
currentJackpot.rngStr = rngStr;
winnerObj.jackpotValue = currentJackpot.jackpotValue;
currentJackpot.jackpotValue = currentJackpot.jackpotValue.toFixed(2);
winnerObj.winner = currentJackpot.winner;
winnerObj.tradeToken = currentJackpot.winner.tradeToken;
ref.child('endedJackpots').push(currentJackpot);
bcrypt.genSalt(10, function(err, data) {
salt = data;
rngStr = JSON.stringify(rng());
bcrypt.hash(rngStr, salt, function(err, data) {
hash = data;
ref.child('currentJackpot').set({
itemsCount: 0,
jackpotValue: 0,
roundHash: hash,
}, function() {
console.log('NEW ROUND! hash: ', hash, 'salt: ', salt, 'rngStr: ', rngStr);
var formatted = hash.replace(/[.#$/]/g, "");
var sgJackpotRef = sgRef.child(formatted);
sgRef.set({}, function() {
sgJackpotRef.set({
salt: salt,
rngStr: rngStr,
}, function() {
request.post({
url: '*******',
body: winnerObj,
json: true,
}, function(error, response, body) {
if (error) {
console.log(error);
setPollTimer(10000);
} else {
usersRef.child(winnerObj.winner.id).once('value', function(data) {
var userData = data.val();
if (data.child('won').exists()) {
userData.won = (Math.floor(parseFloat(userData.won, 2)) + Math.floor(parseFloat(winnerObj.jackpotValue, 2))).toFixed(2);
} else {
userData.won = (Math.floor(parseFloat(winnerObj.jackpotValue, 2))).toFixed(2);
}
usersRef.child(winnerObj.winner.id).update({
won: userData.won
}, function() {
console.log('Added winnings to user data');
});
});
console.log('Making a withdraw request now to bot');
setPollTimer(10000);
}
});
});
});
});
});
});
});
});
};
And the function it should run client side:
function slotMachine(winnerIndex) {
var params = {
active: 3,
randomize: function(activeElementIndex){
return 1;
}
};
var machine = $('.slot').slotMachine( params );
$("#slotMachineButton").click(function(){
machine.shuffle(3, function(){
$(this).text("Index: " + this.active);
});
});
}
I'm completely stuck and stressed though, this isn't my code and It's a lot of code I can't grasp where to begin to do this. Any help?
Related
Node newbie here I have a problem that I have been banging my head against for a few days now and I'm sure I am missing something small but my google too has failed me so far.
I have an AWS Firehose that is writing CloudWatch logs to S3 that are then picked up by a lambda function parsed and ingested into Elasticsearch. Or at least that is the plan. I reason I am using Firehose is that the data is coming from remote AWS accounts and sending all the logs to one place s3. I am able to pull the logs once in S3. Cloud watch logs by default are gzipped so the firehose is simply writing the files to S3 no transforming or anything. Now the issue :). Once I read the file from S3 I attempt to send it to a function transform to parse the data but it fails when trying to loop through the log which is JSON. I did a console.log(payload) and it appears to be in JSON format but running it through JSON.stringify the back to parsing does nothing. JSON.parse will work but the JSON will not be valid.
I am attaching the code below. I am not the original author someone in my company found it on GitHub. I just added the S3 parts to it.
I added a file that was one of the logs as an example it's in a public s3 bucket if anyone wants to grab it. No worries its all test data nothing special.
https://s3.amazonaws.com/node-issue-stackoverflow/cwl-test-11-2018-08-26-00-45-34-84a4c3de-179a-4bf2-9376-895cdc063e6b+(1)
Pastebin Link
// v1.1.2
var https = require('https');
var zlib = require('zlib');
var crypto = require('crypto');
var AWS = require('aws-sdk');
var endpoint = process.env.es_endpoint;
var s3 = new AWS.S3();
var params;
exports.handler = function(input, context) {
// Get the event from S3 based on input
params = {Bucket: input.Records[0].s3.bucket.name, Key: input.Records[0].s3.object.key};
console.log(input.Records[0].s3);
s3.getObject(params, function(error, event){
if (error) { context.fail(error); return; }
console.log(event);
console.log(event.Body);
// decode input from base64
var zippedInput = new Buffer(event.Body, 'base64');
// decompress the input
zlib.gunzip(zippedInput, function(error, buffer) {
if (error) { context.fail(error); return; }
// console.log(buffer.toString());
// parse the input from JSON
var awslogsData = buffer.toString('utf8');
// transform the input to Elasticsearch documents
var elasticsearchBulkData = transform(awslogsData);
// skip control messages
if (!elasticsearchBulkData) {
console.log('Received a control message');
context.succeed('Control message handled successfully');
return;
}
// post documents to the Amazon Elasticsearch Service
post(elasticsearchBulkData, function(error, success, statusCode, failedItems) {
console.log('Response: ' + JSON.stringify({
"statusCode": statusCode
}));
if (error) {
console.log('Error: ' + JSON.stringify(error, null, 2));
if (failedItems && failedItems.length > 0) {
console.log("Failed Items: " +
JSON.stringify(failedItems, null, 2));
}
context.fail(JSON.stringify(error));
} else {
console.log('Success: ' + JSON.stringify(success));
context.succeed('Success');
}
});
});
});
};
function transform(payload) {
if (payload.messageType === 'CONTROL_MESSAGE') {
return null;
}
var bulkRequestBody = '';
payload.logEvents.forEach(function(logEvent) {
var timestamp = new Date(1 * logEvent.timestamp);
// index name format: cwl-YYYY.MM.DD
var indexName = [
'cwl-' + timestamp.getUTCFullYear(), // year
('0' + (timestamp.getUTCMonth() + 1)).slice(-2), // month
('0' + timestamp.getUTCDate()).slice(-2) // day
].join('.');
var source = buildSource(logEvent.message, logEvent.extractedFields);
source['#id'] = logEvent.id;
source['#timestamp'] = new Date(1 * logEvent.timestamp).toISOString();
source['#message'] = logEvent.message;
source['#owner'] = payload.owner;
source['#log_group'] = payload.logGroup;
source['#log_stream'] = payload.logStream;
var action = { "index": {} };
action.index._index = indexName;
action.index._type = payload.logGroup;
action.index._id = logEvent.id;
bulkRequestBody += [
JSON.stringify(action),
JSON.stringify(source),
].join('\n') + '\n';
});
return bulkRequestBody;
}
function buildSource(message, extractedFields) {
if (extractedFields) {
var source = {};
for (var key in extractedFields) {
if (extractedFields.hasOwnProperty(key) && extractedFields[key]) {
var value = extractedFields[key];
if (isNumeric(value)) {
source[key] = 1 * value;
continue;
}
jsonSubString = extractJson(value);
if (jsonSubString !== null) {
source['$' + key] = JSON.parse(jsonSubString);
}
source[key] = value;
}
}
return source;
}
jsonSubString = extractJson(message);
if (jsonSubString !== null) {
return JSON.parse(jsonSubString);
}
return {};
}
function extractJson(message) {
var jsonStart = message.indexOf('{');
if (jsonStart < 0) return null;
var jsonSubString = message.substring(jsonStart);
return isValidJson(jsonSubString) ? jsonSubString : null;
}
function isValidJson(message) {
try {
JSON.parse(message);
} catch (e) { return false; }
return true;
}
function isNumeric(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
function post(body, callback) {
var requestParams = buildRequest(endpoint, body);
var request = https.request(requestParams, function(response) {
var responseBody = '';
response.on('data', function(chunk) {
responseBody += chunk;
});
response.on('end', function() {
var info = JSON.parse(responseBody);
var failedItems;
var success;
if (response.statusCode >= 200 && response.statusCode < 299) {
failedItems = info.items.filter(function(x) {
return x.index.status >= 300;
});
success = {
"attemptedItems": info.items.length,
"successfulItems": info.items.length - failedItems.length,
"failedItems": failedItems.length
};
}
var error = response.statusCode !== 200 || info.errors === true ? {
"statusCode": response.statusCode,
"responseBody": responseBody
} : null;
callback(error, success, response.statusCode, failedItems);
});
}).on('error', function(e) {
callback(e);
});
request.end(requestParams.body);
}
function buildRequest(endpoint, body) {
var endpointParts = endpoint.match(/^([^\.]+)\.?([^\.]*)\.?([^\.]*)\.amazonaws\.com$/);
var region = endpointParts[2];
var service = endpointParts[3];
var datetime = (new Date()).toISOString().replace(/[:\-]|\.\d{3}/g, '');
var date = datetime.substr(0, 8);
var kDate = hmac('AWS4' + process.env.AWS_SECRET_ACCESS_KEY, date);
var kRegion = hmac(kDate, region);
var kService = hmac(kRegion, service);
var kSigning = hmac(kService, 'aws4_request');
var request = {
host: endpoint,
method: 'POST',
path: '/_bulk',
body: body,
headers: {
'Content-Type': 'application/json',
'Host': endpoint,
'Content-Length': Buffer.byteLength(body),
'X-Amz-Security-Token': process.env.AWS_SESSION_TOKEN,
'X-Amz-Date': datetime
}
};
var canonicalHeaders = Object.keys(request.headers)
.sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1; })
.map(function(k) { return k.toLowerCase() + ':' + request.headers[k]; })
.join('\n');
var signedHeaders = Object.keys(request.headers)
.map(function(k) { return k.toLowerCase(); })
.sort()
.join(';');
var canonicalString = [
request.method,
request.path, '',
canonicalHeaders, '',
signedHeaders,
hash(request.body, 'hex'),
].join('\n');
var credentialString = [ date, region, service, 'aws4_request' ].join('/');
var stringToSign = [
'AWS4-HMAC-SHA256',
datetime,
credentialString,
hash(canonicalString, 'hex')
] .join('\n');
request.headers.Authorization = [
'AWS4-HMAC-SHA256 Credential=' + process.env.AWS_ACCESS_KEY_ID + '/' + credentialString,
'SignedHeaders=' + signedHeaders,
'Signature=' + hmac(kSigning, stringToSign, 'hex')
].join(', ');
return request;
}
function hmac(key, str, encoding) {
return crypto.createHmac('sha256', key).update(str, 'utf8').digest(encoding);
}
function hash(str, encoding) {
return crypto.createHash('sha256').update(str, 'utf8').digest(encoding);
}
I have made an Azure function with 3 calls to db. But as they work async only the first item gets added.
My code below. So to be exact Context.Done but first, the calls have to be finished before going to the next call hope someone can help me.
It seems it has to do with the Azure function context as promises did not work either.
const rp = require('request-promise');
const azure = require("azure-storage");
var feed = require('feed-read');
const env = require('dotenv').config();
const con = "storageconnection";
function guid() {
return s4() + s4() + '-' + s4() + '-' + s4() + '-' +
s4() + '-' + s4() + s4() + s4();
}
function CreateEntitie(item) {
var link = item.link;
var team = link.substr(39, link.length);
team = team.substr(0, team.indexOf('/'));
var newlink = link.substr(0, link.lastIndexOf('#'));
var pubdate = item.published;
var titel = item.title;
var Entity = {
PartitionKey: { '_': '' },
RowKey: { '_': '0' },
Gelezen: { "_": false },
Team: { "_": team },
Titel: { "_": item.title },
Com_url: { "_": newlink },
pubdate: { "_": item.published, '$': 'Edm.DateTime' }
}
return Entity;
}
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
function getrownummer(azure, tableSvc, entitie) {
for (var x = 0; x < entitie.length; x++) {
var query = new azure.TableQuery()
.where('com_url eq ?', entitie[x].Com_url._);
//check if exists and update
tableSvc.queryEntities('Posts', query, null, function (error, posts, response) {
if (!error) {
//update
var rownummer = "0";
for (var a = 0; a < posts.length; a++) {
rownummer = posts[a].RowKey._;
}
if (rownummer === "0") {
rownummer = guid();
}
entitie[x].RowKey._ = rownummer;
}
});
}
return entitie;
}
function InsertorReplaceItem(tableSvc, entitie) {
var returnvalue;
tableSvc.insertOrReplaceEntity('posts', entitie, function (error, inputed, response) {
if (!error) {
console.log(entitie.Com_url._ + "rownummer=" + entitie.RowKey._);
// Entity updated
return true;
}
else {
return false;
}
});
}
module.exports = function (context, myTimer) {
var timeStamp = new Date().toISOString();
if (myTimer.isPastDue) {
context.log('JavaScript is running late');
}
tableSvc = azure.createTableService(con);
tableSvc.queryEntities('rssfeeds', null, null, function (error, result, response) {
if (error) {
context.log('well that didn\'t work: ' + err.stack);
context.done();
} else {
context.log(result)
};
if (result) {
var i = 1;
var lastupdates = []
feed(result.entries[i].rssurl._, function (err, articles) {
if (err) throw err;
var newdate = new Date(articles[0].published).getTime();
for (var x = 0; x < articles.length; x++) {
//console.log(articles[x].title)
var from = new Date(articles[x].published).getTime();
var to = new Date(result.entries[i].Lastupdate._).getTime();
if (from >= to) {
var entitie = null;
var entitie = CreateEntitie(articles[x]);
lastupdates.push(entitie);
// context.log(entitie.titel);
}
if (newdate <= from) {
newdate = from;
}
}
context.log(lastupdates);
var completeArticle = [];
completeArticle = getrownummer(azure, tableSvc, lastupdates);
context.log(completeArticle);
if (completeArticle) {
for (var x = 0; x < completeArticle.length; x++) {
var endresult = InsertorReplaceItem(tableSvc, completeArticle[x]);
// context.log(endresult);
}
if (endresult) {
context.done;
}
}
//update date to latest date
});
// }
}
});
}
I'm quite new to the topic and i'm still having some issues with my mailparser. Though searching and finding emails in the email header (mail.from) does work, it doesn't work in the email body. Does anybody have some experience with that and is willing to help? You can find the function i'm talking about under the "// Check for other addresses in Mail-Body (Doesn't work yet)"-comment. I think, that my Regex is correct. Also if the matchAll-Function give back an array and it can't be saved in the the subscriber.email-object, it shall be at least logged to the console. Also i checked manually in the inbox if there are mails with email adresses in the mail body. There are at least two, which shall be found..
The part of the App.js, that does the mailparsing:
const simpleParser = require('mailparser').simpleParser;
//const htmlparser = require("htmlparser2");
var fs = require('fs');
var config = require('./config');
var Imap = require('imap');
var imap = new Imap(config.imap);
var blacklistString = '';
String.prototype.matchAll = function(regexp) {
var matches = [];
this.replace(regexp, function() {
var arr = ([]).slice.call(arguments, 0);
var extras = arr.splice(-2);
arr.index = extras[0];
arr.input = extras[1];
matches.push(arr);
});
return matches.length ? matches : null;
};
function openInbox(subbox,cb) {
imap.openBox('INBOX.'+subbox, true, cb);
}
function getBoxes(cb) {
imap.getBoxes(cb);
}
function showBoxes(boxes) {
imap.end();
}
function logArrayElements(element) {
if(element[1].indexOf('placeholder.de')==-1){
addToBlacklistString(element[1]);
}
}
function addToBlacklistString(str) {
blacklistString += str+"\n";
}
function writeBlacklistFile() {
fs.appendFile('data/data.csv', blacklistString, function (err) {
if (err) throw err;
console.log('Saved!');
});
}
function search(searchArray, regex){
imap.search(searchArray, function(err, results) {
if (err) throw err;
var temp = 0;
var mailtemp = [];
var f = imap.fetch(results, { bodies: '' });
f.on('message', function(msg, seqno) {
console.log('Message #%d', seqno);
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
simpleParser(stream, (err, mail)=>{
//console.log(temp);
//console.log(mail.subject);
/*fs.writeFile('data/'+seqno+'.txt',mail.text, function(err){
console.log(err);
});*/
//var text = mail.text;
// New Subscriber Object
var subscr = new Subscriber({nr: '', mailIdent: '', from: '', emails: '', text:'', uLink: '', anwalt: false });
subscr.nr = seqno;
//Check for From-Address
if(!!mail.from) {
//console.log(mail.from.value);
for(var i = 0; i < mail.from.value.length; i++) {
mailtemp = mail.from.value[i].address.matchAll(regex);
mailtemp.forEach(function(element){
/*fs.appendFile('data/data.csv', element[0] + "\n", function(error){
console.log(error);
});*/
subscr.from = element[0];
});
if(!!mailtemp) {
mailtemp.forEach(logArrayElements);
}
}
}else{
//console.log(mail.text);
}
// Message-ID
if(!!mail.messageId) {
subscr.mailIdent = mail.messageId;
}
console.log(mail.messageId);
// Check for other addresses in Mail-Body (Doesn't work yet)
var regexEmails = new RegExp('/([\w\.\-\_\#\+]+#[\w\.\-\_äüö]+\.[a-zA-Z]+)/g');
if(!!mail.text){
if(mail.text.matchAll(regexEmails)!=null) {
subscr.emails = mail.text.matchAll(regexEmails);
console.log(subscr.emails);
}
}
/* Split mail.text at substrings in substr-array. Extend if necessary..
*
* Also check for 'Anwalt'-Expression in splitted Substring
*
* If mail.text doesn't exist -> Check for html body and convert it to text-format
*/
//var regexLink = new RegExp('\.de\/(unsubscribe|austragen)\/([^\"]+)');
var regexAnwalt = new RegExp('nwalt|echtsanwalt|rechtlicher');
if(!!mail.text) {
var substr = ["schrieb pplaceholder.de", "Von: \"placeholder.de", "Von: pplaceholder.de", "From: placeholder.de", "Ursprüngliche Nachricht"];
for (var i = 0; i<substr.length; i++) {
if(mail.text.indexOf(substr[i]) > -1) {
var textTemp = mail.text;
var arr = textTemp.split(substr[i]);
if(arr[0].matchAll(regexAnwalt)!=null) {
subscr.anwalt = true;
};
subscr.text = arr[0];
break;
} else {
subscr.text = mail.text;
}
}
//console.log(arr);
}
else
{
var html = mail.html;
var text = htmlToText.fromString(html, {
noLinkBrackets: true,
ignoreImage: true,
uppercaseHeadings: false,
preserveNewlines: false,
wordwrap:130,
format: {
heading: function (node, fn, options) {
var h = fn(node.children, options);
return '\n==== ' + h + ' ====\n\n';
}
}
});
subscr.text = text;
}
mail.headers.forEach(function(value, key) {
//console.log(value);
});
subscr.save();
//console.log(subscr);
temp++;
});
});
msg.once('end', function() {
console.log(prefix + 'Finished');
});
});
f.once('error', function(err) {
console.log('Fetch error: ' + err);
});
f.once('end', function() {
console.log('Done fetching all messages!');
//writeBlacklistFile();
imap.end();
});
});
}
imap.once('ready', function() {
openInbox('Test',function(err, box) {
var searchArray = [['FROM', '#']];
search(searchArray,/([\w\.\-\_\#\+]+#[\w\.\-\_äüö]+\.[a-zA-Z]+)/g);
});
});
imap.once('error', function(err) {
console.log(err);
});
imap.once('end', function() {
console.log('Connection ended');
});
imap.connect();
app.listen(2700, function(){
console.log("Listening on Port 2700")
});
module.exports = app;
subscriber.js
const mongoose = require('mongoose');
var subscriberSchema = mongoose.Schema({
nr: Number,
mailIdent: String,
from: String,
emails: String,
text: String,
uLink: String,
anwalt: Boolean
});
var Subscriber = module.exports = mongoose.model('Subscriber', subscriberSchema);
//get Subscriber
module.exports.getSubscribers = function(callback, limit){
Subscriber.find(callback).limit(limit);
};
module.exports.getSubscriberByID = function(_id, callback){
Subscriber.findById(_id, callback);
};
The Regex for the Emails was a little bit wrong.
Also i didn't noticed that the matchAll-Fct. is giving back a two-dimensional Array. Here is the changed part of the code:
var regexEmails = new RegExp("([\\w\\.\\-\\_\\#\\+]+#[\\w\\.\\-\\_äüö]+\\.[a-zA-Z]+)");
var temp1 = mail.text.matchAll(regexEmails);
if(!!temp1){
//console.log(temp1);
for(var i =0; i<temp1.length; i++) {
if(temp1[0][i]!=='info#service.placeholder.de' && temp1[0][i] !== "info#placeholder.de"){
subscr.emails += temp1[0][i];
}
}
}
I'm new to Parse.com cloud jobs and I have an error.
I need to make a cloud job that compute statistics out of all registered games in the database ( ~500K ).
My script first draft is :
Parse.Cloud.job("dostats", function(request, status) {
Parse.Cloud.useMasterKey();
var max_score_query = new Parse.Query("ScoreData");
max_score_query.descending('score');
max_score_query.first(
{
success: function(result) {
var stat_size = 40;
var count = 0;
var stats = new Array(stat_size);
for ( i = 0; i < stat_size; ++i) { stats[i] = 0; }
var max_score = result.get('score');
var promise = Parse.Promise.as();
promise = promise.then( function() {
var stats_query = new Parse.Query("ScoreData");
return stats_query.each( function(line) {
var score = line.get('score');
var id = parseInt((stat_size - 1) * ( score / max_score));
stats[id] = stats[id] + 1;
count = count + 1;
}).then( function() { status.success("lol"); },
function(error) { status.error("error"); });
});
return promise;
},
error: function() {
status.error("Unable to get max score");
}
});
});
});
If I console.log(stats.toString()) in the each loop data is correct.
However job fails with this message :
E2015-02-10T11:16:54.296Z] v137: Ran job dostats with:
Input: {}
Failed with: undefined
Any idea what I do wrong?
I'm current trying to use sails.js with mongodb, I need some custom mapReduce function to group data.
Now I could achieve what I want by using waterline's native function, but have some questions.
These function has only small variation actually, but I found myself keep repeating codes like the following one:
function getSomeData() {
// First-query
Log.native(function(err, logCollection) {
var mapFunction = function() {
function dateFormatter(date) {
return date.getFullYear() + "-" + (date.getMonth() + 1)
}
//! Generate Grouping Key
emit(dateFormatter(this.emb_date), this.bad_qty)
}
var reduceFunction = function (key, values) {
return Array.sum(values);
}
var outputControl = {
out: {inline: 1},
//! Filters
query: {order_type: product}
}
logCollection.mapReduce(mapFunction, reduceFunction, outputControl, function (err, result) {
if (err) {
callback(err);
return;
}
var resultSet = [];
//! post-processing
for (var i = 0; i < result.length; i++) {
//.....
}
callback(err, resultSet);
});
});
}
Second-query:
function getAnotherData() {
Log.native(function(err, logCollection) {
var mapFunction = function() {
//! Generate Grouping Key
emit(dateFormatter(this.product), this.bad_qty)
}
var reduceFunction = function (key, values) {
return Array.sum(values);
}
var outputControl = {
out: {inline: 1},
//! Filters
query: {order_type: product}
}
logCollection.mapReduce(mapFunction, reduceFunction, outputControl, function (err, result) {
if (err) {
callback(err);
return;
}
var resultSet = [];
//! post-processing
for (var i = 0; i < result.length; i++) {
//......
}
callback(err, resultSet);
});
});
}
As you can see, these two snippet shares lots of common code, only has difference in three place (Generate grouping key, filters, post-process).
So I would really like to extract the common part to make my code cleaner, but have no success.
I first try to make dateFromatter is provided by a callback instead of hard-coding like the following:
function dateFormatter(data) {
return data.emb_date.getFullYear() + "-" + (data.emb_date.getMonth() + 1)
}
function getSomeData(groupingKey) {
// First-query
Log.native(function(err, logCollection) {
var mapFunction = function() {
//! Generate Grouping Key
emit(groupingKey(this.emb_date), this.bad_qty)
}
var reduceFunction = function (key, values) {
return Array.sum(values);
}
var outputControl = {
out: {inline: 1},
//! Filters
query: {order_type: product}
}
logCollection.mapReduce(mapFunction, reduceFunction, outputControl, function (err, result) {
if (err) {
callback(err);
return;
}
var resultSet = [];
//! post-processing
for (var i = 0; i < result.length; i++) {
//.....
}
callback(err, resultSet);
});
});
}
But without any luck, I keep getting error like the following one:
MongoError: exception: ReferenceError: groupingKey is not defined near 'emit(groupingKey(this), this.bad_qty' (line 3)
at Object.toError (/home/brianhsu/zh800/dashboard/node_modules/sails-mongo/node_modules/mongodb/lib/mongodb/utils.js:114:11)
What should I do if I would like to reduce those duplicate part of code?
Finally I found that I need pass the option called 'scope' to mongodb, I come up with the following solution which works quite well.
exports.defineOn = function(options) {
var model = options.model
var groupingFunction = options.groupingFunction
var mongoFilters = options.mongoFilters
var customFilter = options.customFilter
var converter = options.converter
var sorting = options.sorting
return function(callback) {
model.native(function(err, collection) {
var mapFunction = function() { emit(groupingFunction(this), this.bad_qty) }
var reduceFunction = function(key, values) { return Array.sum(values); }
var mapReduceOptions = {
out: {inline: 1},
query: mongoFilters,
scope: {
groupingFunction: groupingFunction,
mongoFilters: mongoFilters,
customFilter: customFilter,
converter: converter
}
}
var processCallback = function (err, result) {
if (err) {
callback(err);
return;
}
if (sorting) {
result.sort(sorting);
}
var resultSet = [];
for (var i = 0; i < result.length; i++) {
if (customFilter && customFilter(result[i])) {
resultSet.push(converter(result[i]));
} else if (!customFilter) {
resultSet.push(converter(result[i]));
}
}
callback(err, resultSet);
}
collection.mapReduce(mapFunction, reduceFunction, mapReduceOptions, processCallback);
});
}
}
Usage:
function machineDetail (year, month, date, machine, callback) {
var startDate = new Date(+year, +(month-1), +date);
var endDate = new Date(+year, +(month-1), (+date) + 1);
var mapReducer = MapReducer.defineOn({
model: Log,
groupingFunction: function(data) {
return {date: data.emb_date, error: data.defact_id};
},
mongoFilters: {
mach_id: machine,
emb_date: {$gte: startDate, $lt: endDate}
},
converter: function (data) {
return {
name: data._id,
value: data.value,
};
}
});
mapReducer(callback);
}