Skip to content
8 changes: 6 additions & 2 deletions exampleConfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Optional Variables:
graphiteProtocol: either 'text' or 'pickle' [default: 'text']
backends: an array of backends to load. Each backend must exist
by name in the directory backends/. If not specified,
the default graphite backend will be loaded.
the default graphite backend will be loaded.
* example for console and graphite:
[ "./backends/console", "./backends/graphite" ]

Expand Down Expand Up @@ -60,7 +60,7 @@ Optional Variables:
log: location of log file for frequent keys [default: STDOUT]
deleteIdleStats: don't send values to graphite for inactive counters, sets, gauges, or timers
as opposed to sending 0. For gauges, this unsets the gauge (instead of sending
the previous value). Can be individually overriden. [default: false]
the previous value). Can be individually overridden. [default: false]
deleteGauges: don't send values to graphite for inactive gauges, as opposed to sending the previous value [default: false]
deleteTimers: don't send values to graphite for inactive timers, as opposed to sending 0 [default: false]
deleteSets: don't send values to graphite for inactive sets, as opposed to sending 0 [default: false]
Expand All @@ -71,6 +71,10 @@ Optional Variables:
If disabled, it is up to the backends to sanitize keynames
as appropriate per their storage requirements.

calculated_timer_metrics: List of timer metrics that will be sent. Default will send all metrics.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Doesn't follow casing style. Should be calculatedTimerMetrics

To filter on percents and top percents: append '_percent' to the metric name.
Example: calculated_timer_metrics: ['count', 'median', 'upper_percent', 'histogram']

console:
prettyprint: whether to prettyprint the console backend
output [true or false, default: true]
Expand Down
20 changes: 18 additions & 2 deletions lib/process_metrics.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/*jshint node:true, laxcomma:true */

var process_metrics = function (metrics, flushInterval, ts, flushCallback) {
var process_metrics = function (metrics, calculated_timer_metrics, flushInterval, ts, flushCallback) {
var starttime = Date.now();
var key;
var counter_rates = {};
Expand Down Expand Up @@ -132,7 +132,7 @@ var process_metrics = function (metrics, flushInterval, ts, flushCallback) {

}

timer_data[key] = current_timer_data;
timer_data[key] = filtered_timer_metrics(current_timer_data, calculated_timer_metrics);
}

statsd_metrics["processing_time"] = (Date.now() - starttime);
Expand All @@ -144,4 +144,20 @@ var process_metrics = function (metrics, flushInterval, ts, flushCallback) {
flushCallback(metrics);
};

var filtered_timer_metrics = function (timer_metrics, calculated_timer_metrics = []) {
if (!Array.isArray(calculated_timer_metrics) || calculated_timer_metrics.length == 0) {
return timer_metrics;
} else {
return Object.keys(timer_metrics)
.filter((key) => {
// Generalizes filtering percent metrics by cleaning key from <metric>_<number> to <metric>_percent
let cleaned_key = key.replace(/_(top)?\d+$/, "_percent")
return calculated_timer_metrics.includes(cleaned_key);
})
.reduce((obj, key) => {
obj[key] = timer_metrics[key];
return obj;
}, {});
}
}
exports.process_metrics = process_metrics;
2 changes: 1 addition & 1 deletion stats.js
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ function flushMetrics() {
}
});

pm.process_metrics(metrics_hash, flushInterval, time_stamp, function emitFlush(metrics) {
pm.process_metrics(metrics_hash, conf.calculated_timer_metrics, flushInterval, time_stamp, function emitFlush(metrics) {
backendEvents.emit('flush', time_stamp, metrics);
});

Expand Down
186 changes: 186 additions & 0 deletions test/graphite_tests_filters.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
var fs = require('fs'),
net = require('net'),
temp = require('temp'),
spawn = require('child_process').spawn,
util = require('util'),
urlparse = require('url').parse,
_ = require('underscore'),
dgram = require('dgram'),
qsparse = require('querystring').parse,
http = require('http');


var writeconfig = function(text,worker,cb,obj){
temp.open({suffix: '-statsdconf.js'}, function(err, info) {
if (err) throw err;
fs.writeSync(info.fd, text);
fs.close(info.fd, function(err) {
if (err) throw err;
worker(info.path,cb,obj);
});
});
}

var array_contents_are_equal = function(first,second){
var intlen = _.intersection(first,second).length;
var unlen = _.union(first,second).length;
return (intlen == unlen) && (intlen == first.length);
}

var statsd_send = function(data,sock,host,port,cb){
send_data = new Buffer(data);
sock.send(send_data,0,send_data.length,port,host,function(err,bytes){
if (err) {
throw err;
}
cb();
});
}

// keep collecting data until a specified timeout period has elapsed
// this will let us capture all data chunks so we don't miss one
var collect_for = function(server,timeout,cb){
var received = [];
var in_flight = 0;
var timed_out = false;
var collector = function(req,res){
in_flight += 1;
var body = '';
req.on('data',function(data){ body += data; });
req.on('end',function(){
received = received.concat(body.split("\n"));
in_flight -= 1;
if((in_flight < 1) && timed_out){
server.removeListener('request',collector);
cb(received);
}
});
}

setTimeout(function (){
timed_out = true;
if((in_flight < 1)) {
server.removeListener('connection',collector);
cb(received);
}
},timeout);

server.on('connection',collector);
}
module.exports = {
setUp: function (callback) {
this.testport = 31337;
this.myflush = 200;
var configfile = "{graphService: \"graphite\"\n\
, batch: 200 \n\
, flushInterval: " + this.myflush + " \n\
, percentThreshold: 90\n\
, calculated_timer_metrics: ['count_ps', 'count', 'count_percent', 'mean_percent', 'histogram']\n\
, histogram: [ { metric: \"a_test_value\", bins: [1000] } ]\n\
, port: 8125\n\
, dumpMessages: false \n\
, debug: false\n\
, graphite: { legacyNamespace: false }\n\
, graphitePort: " + this.testport + "\n\
, graphiteHost: \"127.0.0.1\"}";

this.acceptor = net.createServer();
this.acceptor.listen(this.testport);
this.sock = dgram.createSocket('udp4');

this.server_up = true;
this.ok_to_die = false;
this.exit_callback_callback = process.exit;

writeconfig(configfile,function(path,cb,obj){
obj.path = path;
obj.server = spawn('node',['stats.js', path]);
obj.exit_callback = function (code) {
obj.server_up = false;
if(!obj.ok_to_die){
console.log('node server unexpectedly quit with code: ' + code);
process.exit(1);
}
else {
obj.exit_callback_callback();
}
};
obj.server.on('exit', obj.exit_callback);
obj.server.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString().replace(/\n$/,''));
});
/*
obj.server.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString().replace(/\n$/,''));
});
*/
obj.server.stdout.on('data', function (data) {
// wait until server is up before we finish setUp
if (data.toString().match(/server is up/)) {
cb();
}
});

},callback,this);
},
tearDown: function (callback) {
this.sock.close();
this.acceptor.close();
this.ok_to_die = true;
if(this.server_up){
this.exit_callback_callback = callback;
this.server.kill();
} else {
callback();
}
},

timers_are_valid: function (test) {
test.expect(11);

var testvalue = 100;
var me = this;
this.acceptor.once('connection',function(c){
statsd_send('a_test_value:' + testvalue + '|ms',me.sock,'127.0.0.1',8125,function(){
collect_for(me.acceptor,me.myflush*2,function(strings){
test.ok(strings.length > 0,'should receive some data');
var hashes = _.map(strings, function(x) {
var chunks = x.split(' ');
var data = {};
data[chunks[0]] = chunks[1];
return data;
});
var numstat_test = function(post){
var mykey = 'stats.statsd.numStats';
return _.include(_.keys(post),mykey) && (post[mykey] == 5);
};
test.ok(_.any(hashes,numstat_test), 'stats.statsd.numStats should be 5');

var testtimervalue_test = function(post){
var mykey = 'stats.timers.a_test_value.mean_90';
return _.include(_.keys(post),mykey) && (post[mykey] == testvalue);
};
var testtimerhistogramvalue_test = function(post){
var mykey = 'stats.timers.a_test_value.histogram.bin_1000';
return _.include(_.keys(post),mykey) && (post[mykey] == 1);
};
test.ok(_.any(hashes,testtimerhistogramvalue_test), 'stats.timers.a_test_value.histogram.bin_1000 should be 1');
test.ok(_.any(hashes,testtimervalue_test), 'stats.timers.a_test_value.mean_90 should be ' + testvalue);

var count_test = function(post, metric){
var mykey = 'stats.timers.a_test_value.' + metric;
return _.first(_.filter(_.pluck(post, mykey), function (e) { return e; }));
};
test.equals(count_test(hashes, 'count_ps'), 5, 'count_ps should be 5');
test.equals(count_test(hashes, 'count'), 1, 'count should be 1');
test.equals(count_test(hashes, 'count_90'), 1, 'count_90 should be 1');
test.equals(count_test(hashes, 'sum'), null, 'sum should be null');
test.equals(count_test(hashes, 'sum_squares'), null, 'sum_squares should be null');
test.equals(count_test(hashes, 'sum_90'), null, 'sum_90 should be null');
test.equals(count_test(hashes, 'sum_squares_90'), null, 'sum_squares_90 should be null');
test.done();
});
});
});
},
}
Loading