mirror of
https://github.com/zeek/zeek.git
synced 2025-10-13 03:58:20 +00:00
Merge branch 'master' into topic/jsiwek/file-reassembly-merge
Conflicts: testing/btest/Baseline/plugins.hooks/output
This commit is contained in:
commit
138438b88e
7 changed files with 106 additions and 34 deletions
13
CHANGES
13
CHANGES
|
@ -1,4 +1,17 @@
|
|||
|
||||
2.3-349 | 2015-01-05 15:21:13 -0600
|
||||
|
||||
* Fix race condition in unified2 file analyzer startup. (Jon siwek)
|
||||
|
||||
2.3-348 | 2014-12-31 09:19:34 -0800
|
||||
|
||||
* Changing Makefile's test-all to run test-all for broctl, which now
|
||||
executes trace-summary tests as well. (Robin Sommer)
|
||||
|
||||
2.3-345 | 2014-12-31 09:06:15 -0800
|
||||
|
||||
* Correct a typo in the Notice framework doc. (Daniel Thayer)
|
||||
|
||||
2.3-343 | 2014-12-12 12:43:46 -0800
|
||||
|
||||
* Fix PIA packet replay to deliver copy of IP header. This prevented
|
||||
|
|
2
Makefile
2
Makefile
|
@ -54,7 +54,7 @@ test:
|
|||
@( cd testing && make )
|
||||
|
||||
test-all: test
|
||||
test -d aux/broctl && ( cd aux/broctl && make test )
|
||||
test -d aux/broctl && ( cd aux/broctl && make test-all )
|
||||
test -d aux/btest && ( cd aux/btest && make test )
|
||||
test -d aux/bro-aux && ( cd aux/bro-aux && make test )
|
||||
test -d aux/plugins && ( cd aux/plugins && make test-all )
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.3-343
|
||||
2.3-349
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 90f9ca0ffa2306f0d1d2ac208cdbb7787199f890
|
||||
Subproject commit 8c9b87bc73e1ddaa304e3d89028c1e7b95d37a91
|
|
@ -271,7 +271,7 @@ script that is generating the notice has indicated to the notice framework how
|
|||
to identify notices that are intrinsically the same. Identification of these
|
||||
"intrinsically duplicate" notices is implemented with an optional field in
|
||||
:bro:see:`Notice::Info` records named ``$identifier`` which is a simple string.
|
||||
If the ``$identifier`` and ``$type`` fields are the same for two notices, the
|
||||
If the ``$identifier`` and ``$note`` fields are the same for two notices, the
|
||||
notice framework actually considers them to be the same thing and can use that
|
||||
information to suppress duplicates for a configurable period of time.
|
||||
|
||||
|
|
|
@ -71,11 +71,50 @@ global classification_map: table[count] of string;
|
|||
global sid_map: table[count] of string;
|
||||
global gen_map: table[count] of string;
|
||||
|
||||
global num_classification_map_reads = 0;
|
||||
global num_sid_map_reads = 0;
|
||||
global num_gen_map_reads = 0;
|
||||
global watching = F;
|
||||
|
||||
# For reading in config files.
|
||||
type OneLine: record {
|
||||
line: string;
|
||||
};
|
||||
|
||||
function mappings_initialized(): bool
|
||||
{
|
||||
return num_classification_map_reads > 0 &&
|
||||
num_sid_map_reads > 0 &&
|
||||
num_gen_map_reads > 0;
|
||||
}
|
||||
|
||||
function start_watching()
|
||||
{
|
||||
if ( watching )
|
||||
return;
|
||||
|
||||
watching = T;
|
||||
|
||||
if ( watch_dir != "" )
|
||||
{
|
||||
Dir::monitor(watch_dir, function(fname: string)
|
||||
{
|
||||
Input::add_analysis([$source=fname,
|
||||
$reader=Input::READER_BINARY,
|
||||
$mode=Input::STREAM,
|
||||
$name=fname]);
|
||||
}, 10secs);
|
||||
}
|
||||
|
||||
if ( watch_file != "" )
|
||||
{
|
||||
Input::add_analysis([$source=watch_file,
|
||||
$reader=Input::READER_BINARY,
|
||||
$mode=Input::STREAM,
|
||||
$name=watch_file]);
|
||||
}
|
||||
}
|
||||
|
||||
function create_info(ev: IDSEvent): Info
|
||||
{
|
||||
local info = Info($ts=ev$ts,
|
||||
|
@ -136,11 +175,33 @@ event Unified2::read_classification_line(desc: Input::EventDescription, tpe: Inp
|
|||
}
|
||||
}
|
||||
|
||||
event Input::end_of_data(name: string, source: string)
|
||||
{
|
||||
if ( name == classification_config )
|
||||
++num_classification_map_reads;
|
||||
else if ( name == sid_msg )
|
||||
++num_sid_map_reads;
|
||||
else if ( name == gen_msg )
|
||||
++num_gen_map_reads;
|
||||
else
|
||||
return;
|
||||
|
||||
if ( watching )
|
||||
return;
|
||||
|
||||
if ( mappings_initialized() )
|
||||
start_watching();
|
||||
}
|
||||
|
||||
event bro_init() &priority=5
|
||||
{
|
||||
Log::create_stream(Unified2::LOG, [$columns=Info, $ev=log_unified2]);
|
||||
|
||||
if ( sid_msg != "" )
|
||||
if ( sid_msg == "" )
|
||||
{
|
||||
num_sid_map_reads = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
Input::add_event([$source=sid_msg,
|
||||
$reader=Input::READER_RAW,
|
||||
|
@ -151,7 +212,11 @@ event bro_init() &priority=5
|
|||
$ev=Unified2::read_sid_msg_line]);
|
||||
}
|
||||
|
||||
if ( gen_msg != "" )
|
||||
if ( gen_msg == "" )
|
||||
{
|
||||
num_gen_map_reads = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
Input::add_event([$source=gen_msg,
|
||||
$name=gen_msg,
|
||||
|
@ -162,7 +227,11 @@ event bro_init() &priority=5
|
|||
$ev=Unified2::read_gen_msg_line]);
|
||||
}
|
||||
|
||||
if ( classification_config != "" )
|
||||
if ( classification_config == "" )
|
||||
{
|
||||
num_classification_map_reads = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
Input::add_event([$source=classification_config,
|
||||
$name=classification_config,
|
||||
|
@ -173,24 +242,8 @@ event bro_init() &priority=5
|
|||
$ev=Unified2::read_classification_line]);
|
||||
}
|
||||
|
||||
if ( watch_dir != "" )
|
||||
{
|
||||
Dir::monitor(watch_dir, function(fname: string)
|
||||
{
|
||||
Input::add_analysis([$source=fname,
|
||||
$reader=Input::READER_BINARY,
|
||||
$mode=Input::STREAM,
|
||||
$name=fname]);
|
||||
}, 10secs);
|
||||
}
|
||||
|
||||
if ( watch_file != "" )
|
||||
{
|
||||
Input::add_analysis([$source=watch_file,
|
||||
$reader=Input::READER_BINARY,
|
||||
$mode=Input::STREAM,
|
||||
$name=watch_file]);
|
||||
}
|
||||
if ( mappings_initialized() )
|
||||
start_watching();
|
||||
}
|
||||
|
||||
event file_new(f: fa_file)
|
||||
|
|
|
@ -189,7 +189,7 @@
|
|||
0.000000 MetaHookPost CallFunction(Log::__create_stream, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, (X509::LOG, [columns=<no value description>, ev=X509::log_x509])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::__write, (PacketFilter::LOG, [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::__write, (PacketFilter::LOG, [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, (Cluster::LOG)) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, (Communication::LOG)) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, (Conn::LOG)) -> <null>
|
||||
|
@ -283,8 +283,8 @@
|
|||
0.000000 MetaHookPost CallFunction(Log::create_stream, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::create_stream, (X509::LOG, [columns=<no value description>, ev=X509::log_x509])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::create_stream, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::default_path_func, (PacketFilter::LOG, , [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::write, (PacketFilter::LOG, [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::default_path_func, (PacketFilter::LOG, , [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Log::write, (PacketFilter::LOG, [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T])) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Notice::want_pp, ()) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(PacketFilter::build, ()) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(PacketFilter::combine_filters, (ip or not ip, and, )) -> <null>
|
||||
|
@ -303,6 +303,8 @@
|
|||
0.000000 MetaHookPost CallFunction(SumStats::register_observe_plugin, (SumStats::UNIQUE, anonymous-function{ if (!SumStats::rv?$unique_vals) SumStats::rv$unique_vals = (coerce set() to set[SumStats::Observation])if (SumStats::r?$unique_max) SumStats::rv$unique_max = SumStats::r$unique_maxif (!SumStats::r?$unique_max || flattenSumStats::rv$unique_vals <= SumStats::r$unique_max) add SumStats::rv$unique_vals[SumStats::obs]SumStats::rv$unique = flattenSumStats::rv$unique_vals})) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(SumStats::register_observe_plugin, (SumStats::VARIANCE, anonymous-function{ if (1 < SumStats::rv$num) SumStats::rv$var_s += ((SumStats::val - SumStats::rv$prev_avg) * (SumStats::val - SumStats::rv$average))SumStats::calc_variance(SumStats::rv)SumStats::rv$prev_avg = SumStats::rv$average})) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(SumStats::register_observe_plugins, ()) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Unified2::mappings_initialized, ()) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(Unified2::start_watching, ()) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(bro_init, ()) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(cat, (Packe, t, _, Filter)) -> <null>
|
||||
0.000000 MetaHookPost CallFunction(current_time, ()) -> <null>
|
||||
|
@ -724,7 +726,7 @@
|
|||
0.000000 MetaHookPre CallFunction(Log::__create_stream, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, (X509::LOG, [columns=<no value description>, ev=X509::log_x509]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__write, (PacketFilter::LOG, [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__write, (PacketFilter::LOG, [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, (Cluster::LOG))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, (Communication::LOG))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, (Conn::LOG))
|
||||
|
@ -818,8 +820,8 @@
|
|||
0.000000 MetaHookPre CallFunction(Log::create_stream, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird]))
|
||||
0.000000 MetaHookPre CallFunction(Log::create_stream, (X509::LOG, [columns=<no value description>, ev=X509::log_x509]))
|
||||
0.000000 MetaHookPre CallFunction(Log::create_stream, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql]))
|
||||
0.000000 MetaHookPre CallFunction(Log::default_path_func, (PacketFilter::LOG, , [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::write, (PacketFilter::LOG, [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::default_path_func, (PacketFilter::LOG, , [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::write, (PacketFilter::LOG, [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Notice::want_pp, ())
|
||||
0.000000 MetaHookPre CallFunction(PacketFilter::build, ())
|
||||
0.000000 MetaHookPre CallFunction(PacketFilter::combine_filters, (ip or not ip, and, ))
|
||||
|
@ -838,6 +840,8 @@
|
|||
0.000000 MetaHookPre CallFunction(SumStats::register_observe_plugin, (SumStats::UNIQUE, anonymous-function{ if (!SumStats::rv?$unique_vals) SumStats::rv$unique_vals = (coerce set() to set[SumStats::Observation])if (SumStats::r?$unique_max) SumStats::rv$unique_max = SumStats::r$unique_maxif (!SumStats::r?$unique_max || flattenSumStats::rv$unique_vals <= SumStats::r$unique_max) add SumStats::rv$unique_vals[SumStats::obs]SumStats::rv$unique = flattenSumStats::rv$unique_vals}))
|
||||
0.000000 MetaHookPre CallFunction(SumStats::register_observe_plugin, (SumStats::VARIANCE, anonymous-function{ if (1 < SumStats::rv$num) SumStats::rv$var_s += ((SumStats::val - SumStats::rv$prev_avg) * (SumStats::val - SumStats::rv$average))SumStats::calc_variance(SumStats::rv)SumStats::rv$prev_avg = SumStats::rv$average}))
|
||||
0.000000 MetaHookPre CallFunction(SumStats::register_observe_plugins, ())
|
||||
0.000000 MetaHookPre CallFunction(Unified2::mappings_initialized, ())
|
||||
0.000000 MetaHookPre CallFunction(Unified2::start_watching, ())
|
||||
0.000000 MetaHookPre CallFunction(bro_init, ())
|
||||
0.000000 MetaHookPre CallFunction(cat, (Packe, t, _, Filter))
|
||||
0.000000 MetaHookPre CallFunction(current_time, ())
|
||||
|
@ -1259,7 +1263,7 @@
|
|||
0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=<no value description>, ev=Weird::log_weird])
|
||||
0.000000 | HookCallFunction Log::__create_stream(X509::LOG, [columns=<no value description>, ev=X509::log_x509])
|
||||
0.000000 | HookCallFunction Log::__create_stream(mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql])
|
||||
0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG)
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Communication::LOG)
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Conn::LOG)
|
||||
|
@ -1353,8 +1357,8 @@
|
|||
0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=<no value description>, ev=Weird::log_weird])
|
||||
0.000000 | HookCallFunction Log::create_stream(X509::LOG, [columns=<no value description>, ev=X509::log_x509])
|
||||
0.000000 | HookCallFunction Log::create_stream(mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql])
|
||||
0.000000 | HookCallFunction Log::default_path_func(PacketFilter::LOG, , [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1418743793.447552, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::default_path_func(PacketFilter::LOG, , [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1420494303.113424, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Notice::want_pp()
|
||||
0.000000 | HookCallFunction PacketFilter::build()
|
||||
0.000000 | HookCallFunction PacketFilter::combine_filters(ip or not ip, and, )
|
||||
|
@ -1373,6 +1377,8 @@
|
|||
0.000000 | HookCallFunction SumStats::register_observe_plugin(SumStats::UNIQUE, anonymous-function{ if (!SumStats::rv?$unique_vals) SumStats::rv$unique_vals = (coerce set() to set[SumStats::Observation])if (SumStats::r?$unique_max) SumStats::rv$unique_max = SumStats::r$unique_maxif (!SumStats::r?$unique_max || flattenSumStats::rv$unique_vals <= SumStats::r$unique_max) add SumStats::rv$unique_vals[SumStats::obs]SumStats::rv$unique = flattenSumStats::rv$unique_vals})
|
||||
0.000000 | HookCallFunction SumStats::register_observe_plugin(SumStats::VARIANCE, anonymous-function{ if (1 < SumStats::rv$num) SumStats::rv$var_s += ((SumStats::val - SumStats::rv$prev_avg) * (SumStats::val - SumStats::rv$average))SumStats::calc_variance(SumStats::rv)SumStats::rv$prev_avg = SumStats::rv$average})
|
||||
0.000000 | HookCallFunction SumStats::register_observe_plugins()
|
||||
0.000000 | HookCallFunction Unified2::mappings_initialized()
|
||||
0.000000 | HookCallFunction Unified2::start_watching()
|
||||
0.000000 | HookCallFunction bro_init()
|
||||
0.000000 | HookCallFunction cat(Packe, t, _, Filter)
|
||||
0.000000 | HookCallFunction current_time()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue