Added a $unique_max field to Reducers for the SumStats::UNIQUE calculation

- Using the new option in scan.bro and the FTP bruteforce detection.
This commit is contained in:
Seth Hall 2013-08-28 00:57:44 -04:00
parent ea89e30a9c
commit 2b78922af8
4 changed files with 37 additions and 6 deletions

View file

@ -3,6 +3,11 @@
module SumStats; module SumStats;
export { export {
redef record Reducer += {
## Maximum number of unique elements to store.
unique_max: count &optional;
};
redef enum Calculation += { redef enum Calculation += {
## Calculate the number of unique values. ## Calculate the number of unique values.
UNIQUE UNIQUE
@ -16,6 +21,11 @@ export {
} }
redef record ResultVal += { redef record ResultVal += {
# Internal use only. This is used when multiple ResultVals
# are being merged and they need to abide the unique limit
# set in the reducer.
unique_max: count &optional;
# Internal use only. This is not meant to be publically available # Internal use only. This is not meant to be publically available
# because we don't want to trust that we can inspect the values # because we don't want to trust that we can inspect the values
# since we will like move to a probalistic data structure in the future. # since we will like move to a probalistic data structure in the future.
@ -29,7 +39,12 @@ hook register_observe_plugins()
{ {
if ( ! rv?$unique_vals ) if ( ! rv?$unique_vals )
rv$unique_vals=set(); rv$unique_vals=set();
if ( r?$unique_max )
rv$unique_max=r$unique_max;
if ( ! r?$unique_max || |rv$unique_vals| <= r$unique_max )
add rv$unique_vals[obs]; add rv$unique_vals[obs];
rv$unique = |rv$unique_vals|; rv$unique = |rv$unique_vals|;
}); });
} }
@ -38,15 +53,31 @@ hook compose_resultvals_hook(result: ResultVal, rv1: ResultVal, rv2: ResultVal)
{ {
if ( rv1?$unique_vals || rv2?$unique_vals ) if ( rv1?$unique_vals || rv2?$unique_vals )
{ {
if ( rv1?$unique_max )
result$unique_max = rv1$unique_max;
else if ( rv2?$unique_max )
result$unique_max = rv2$unique_max;
if ( rv1?$unique_vals ) if ( rv1?$unique_vals )
result$unique_vals = copy(rv1$unique_vals); result$unique_vals = copy(rv1$unique_vals);
if ( rv2?$unique_vals ) if ( rv2?$unique_vals )
{
if ( ! result?$unique_vals ) if ( ! result?$unique_vals )
{
result$unique_vals = copy(rv2$unique_vals); result$unique_vals = copy(rv2$unique_vals);
}
else else
{
for ( val2 in rv2$unique_vals ) for ( val2 in rv2$unique_vals )
{
if ( result?$unique_max && |result$unique_vals| >= result$unique_max )
break;
add result$unique_vals[copy(val2)]; add result$unique_vals[copy(val2)];
}
}
}
result$unique = |result$unique_vals|; result$unique = |result$unique_vals|;
} }

View file

@ -52,7 +52,7 @@ export {
event bro_init() &priority=5 event bro_init() &priority=5
{ {
local r1: SumStats::Reducer = [$stream="scan.addr.fail", $apply=set(SumStats::UNIQUE)]; local r1: SumStats::Reducer = [$stream="scan.addr.fail", $apply=set(SumStats::UNIQUE), $unique_max=double_to_count(addr_scan_threshold+2)];
SumStats::create([$name="addr-scan", SumStats::create([$name="addr-scan",
$epoch=addr_scan_interval, $epoch=addr_scan_interval,
$reducers=set(r1), $reducers=set(r1),
@ -77,7 +77,7 @@ event bro_init() &priority=5
}]); }]);
# Note: port scans are tracked similar to: table[src_ip, dst_ip] of set(port); # Note: port scans are tracked similar to: table[src_ip, dst_ip] of set(port);
local r2: SumStats::Reducer = [$stream="scan.port.fail", $apply=set(SumStats::UNIQUE)]; local r2: SumStats::Reducer = [$stream="scan.port.fail", $apply=set(SumStats::UNIQUE), $unique_max=double_to_count(port_scan_threshold+2)];
SumStats::create([$name="port-scan", SumStats::create([$name="port-scan",
$epoch=port_scan_interval, $epoch=port_scan_interval,
$reducers=set(r2), $reducers=set(r2),

View file

@ -27,7 +27,7 @@ export {
event bro_init() event bro_init()
{ {
local r1: SumStats::Reducer = [$stream="ftp.failed_auth", $apply=set(SumStats::UNIQUE)]; local r1: SumStats::Reducer = [$stream="ftp.failed_auth", $apply=set(SumStats::UNIQUE), $unique_max=double_to_count(bruteforce_threshold+2)];
SumStats::create([$name="ftp-detect-bruteforcing", SumStats::create([$name="ftp-detect-bruteforcing",
$epoch=bruteforce_measurement_interval, $epoch=bruteforce_measurement_interval,
$reducers=set(r1), $reducers=set(r1),

View file

@ -1,6 +1,6 @@
THRESHOLD_SERIES: hit a threshold series value at 3 for sumstats_key(host=1.2.3.4) THRESHOLD_SERIES: hit a threshold series value at 3 for sumstats_key(host=1.2.3.4)
THRESHOLD_SERIES: hit a threshold series value at 6 for sumstats_key(host=1.2.3.4)
THRESHOLD: hit a threshold value at 6 for sumstats_key(host=1.2.3.4) THRESHOLD: hit a threshold value at 6 for sumstats_key(host=1.2.3.4)
THRESHOLD_SERIES: hit a threshold series value at 1001 for sumstats_key(host=7.2.1.5) THRESHOLD_SERIES: hit a threshold series value at 6 for sumstats_key(host=1.2.3.4)
THRESHOLD: hit a threshold value at 1001 for sumstats_key(host=7.2.1.5) THRESHOLD: hit a threshold value at 1001 for sumstats_key(host=7.2.1.5)
THRESHOLD_SERIES: hit a threshold series value at 1001 for sumstats_key(host=7.2.1.5)
THRESHOLD WITH RATIO BETWEEN REDUCERS: hit a threshold value at 55x for sumstats_key(host=7.2.1.5) THRESHOLD WITH RATIO BETWEEN REDUCERS: hit a threshold value at 55x for sumstats_key(host=7.2.1.5)