Settle on analyzer.log for the dpd.log replacement

This commit renames analyzer-failed.log to analyzer.log, and updates the
respective news entry.
This commit is contained in:
Johanna Amann 2025-05-22 10:40:56 +01:00
parent 130c89a0a7
commit 42ba2fcca0
73 changed files with 88 additions and 88 deletions

View file

@ -1,4 +1,4 @@
##! Logging analyzer violations into analyzer_failed.log
##! Logging analyzer violations into analyzer.log
@load base/frameworks/logging
@load ./main
@ -9,7 +9,7 @@ export {
## Add the analyzer logging stream identifier.
redef enum Log::ID += { LOG };
## The record type defining the columns to log in the analyzer-failed logging stream.
## The record type defining the columns to log in the analyzer logging stream.
type Info: record {
## Timestamp of the violation.
ts: time &log;
@ -38,7 +38,7 @@ export {
## An event that can be handled to access the :zeek:type:`Analyzer::Logging::Info`
## record as it is sent on to the logging framework.
global log_analyzer_failed: event(rec: Info);
global log_analyzer: event(rec: Info);
## A default logging policy hook for the stream.
global log_policy: Log::PolicyHook;
@ -46,7 +46,7 @@ export {
event zeek_init() &priority=5
{
Log::create_stream(LOG, [$columns=Info, $path="analyzer_failed", $ev=log_analyzer_failed, $policy=log_policy]);
Log::create_stream(LOG, [$columns=Info, $path="analyzer", $ev=log_analyzer, $policy=log_policy]);
}
function log_analyzer_failure(ts: time, atype: AllAnalyzers::Tag, info: AnalyzerViolationInfo)

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -1,6 +1,6 @@
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
analyzer
analyzer_debug
analyzer_failed
broker
capture_loss
cluster

View file

@ -27,7 +27,7 @@
0.000000 MetaHookPost CallFunction(Cluster::register_pool, <frame>, ([topic=zeek<...>/worker, node_type=Cluster::WORKER, max_nodes=<uninitialized>, exclusive=F])) -> <no result>
0.000000 MetaHookPost CallFunction(Config::config_option_changed, <null>, (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>)) -> <no result>
0.000000 MetaHookPost CallFunction(Files::register_protocol, <frame>, (Analyzer::ANALYZER_HTTP, [get_file_handle=HTTP::get_file_handle: function(c:connection, is_orig:bool) : string{ if (!HTTP::c?$http) return ()if (HTTP::c$http$range_request && !HTTP::is_orig) { return (cat(Analyzer::ANALYZER_HTTP, HTTP::is_orig, HTTP::c$id$orig_h, HTTP::build_url(HTTP::c$http)))}else{ HTTP::mime_depth = HTTP::is_orig ? HTTP::c$http$orig_mime_depth : HTTP::c$http$resp_mime_depthreturn (cat(Analyzer::ANALYZER_HTTP, HTTP::c$start_time, HTTP::is_orig, HTTP::c$http$trans_depth, HTTP::mime_depth, id_string(HTTP::c$id)))}}, describe=HTTP::describe_file: function(f:fa_file) : string{ if (HTTP::f$source != HTTP) return ()for ([HTTP::_], HTTP::c in HTTP::f$conns) { if (HTTP::c?$http) return (HTTP::build_url_http(HTTP::c$http))}return ()}])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer_failed, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=broker, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=cluster, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=config, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
@ -38,7 +38,7 @@
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer_failed: event(rec:Analyzer::Logging::Info), path=analyzer_failed, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
@ -82,7 +82,7 @@
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Notice::LOG, default)) -> <no result>
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Tunnel::LOG, default)) -> <no result>
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Weird::LOG, default)) -> <no result>
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer_failed: event(rec:Analyzer::Logging::Info), path=analyzer_failed, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
@ -966,7 +966,7 @@
0.000000 MetaHookPre CallFunction(Cluster::register_pool, <frame>, ([topic=zeek<...>/worker, node_type=Cluster::WORKER, max_nodes=<uninitialized>, exclusive=F]))
0.000000 MetaHookPre CallFunction(Config::config_option_changed, <null>, (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>))
0.000000 MetaHookPre CallFunction(Files::register_protocol, <frame>, (Analyzer::ANALYZER_HTTP, [get_file_handle=HTTP::get_file_handle: function(c:connection, is_orig:bool) : string{ if (!HTTP::c?$http) return ()if (HTTP::c$http$range_request && !HTTP::is_orig) { return (cat(Analyzer::ANALYZER_HTTP, HTTP::is_orig, HTTP::c$id$orig_h, HTTP::build_url(HTTP::c$http)))}else{ HTTP::mime_depth = HTTP::is_orig ? HTTP::c$http$orig_mime_depth : HTTP::c$http$resp_mime_depthreturn (cat(Analyzer::ANALYZER_HTTP, HTTP::c$start_time, HTTP::is_orig, HTTP::c$http$trans_depth, HTTP::mime_depth, id_string(HTTP::c$id)))}}, describe=HTTP::describe_file: function(f:fa_file) : string{ if (HTTP::f$source != HTTP) return ()for ([HTTP::_], HTTP::c in HTTP::f$conns) { if (HTTP::c?$http) return (HTTP::build_url_http(HTTP::c$http))}return ()}]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer_failed, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=broker, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=cluster, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=config, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
@ -977,7 +977,7 @@
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer_failed: event(rec:Analyzer::Logging::Info), path=analyzer_failed, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
@ -1021,7 +1021,7 @@
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Notice::LOG, default))
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Tunnel::LOG, default))
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Weird::LOG, default))
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer_failed: event(rec:Analyzer::Logging::Info), path=analyzer_failed, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
@ -1904,7 +1904,7 @@
0.000000 | HookCallFunction Cluster::register_pool([topic=zeek<...>/worker, node_type=Cluster::WORKER, max_nodes=<uninitialized>, exclusive=F])
0.000000 | HookCallFunction Config::config_option_changed(Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>)
0.000000 | HookCallFunction Files::register_protocol(Analyzer::ANALYZER_HTTP, [get_file_handle=HTTP::get_file_handle: function(c:connection, is_orig:bool) : string{ if (!HTTP::c?$http) return ()if (HTTP::c$http$range_request && !HTTP::is_orig) { return (cat(Analyzer::ANALYZER_HTTP, HTTP::is_orig, HTTP::c$id$orig_h, HTTP::build_url(HTTP::c$http)))}else{ HTTP::mime_depth = HTTP::is_orig ? HTTP::c$http$orig_mime_depth : HTTP::c$http$resp_mime_depthreturn (cat(Analyzer::ANALYZER_HTTP, HTTP::c$start_time, HTTP::is_orig, HTTP::c$http$trans_depth, HTTP::mime_depth, id_string(HTTP::c$id)))}}, describe=HTTP::describe_file: function(f:fa_file) : string{ if (HTTP::f$source != HTTP) return ()for ([HTTP::_], HTTP::c in HTTP::f$conns) { if (HTTP::c?$http) return (HTTP::build_url_http(HTTP::c$http))}return ()}])
0.000000 | HookCallFunction Log::__add_filter(Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer_failed, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__add_filter(Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__add_filter(Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=broker, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__add_filter(Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=cluster, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__add_filter(Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=config, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
@ -1915,7 +1915,7 @@
0.000000 | HookCallFunction Log::__add_filter(Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__add_filter(Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__add_filter(Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer_failed: event(rec:Analyzer::Logging::Info), path=analyzer_failed, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
@ -1959,7 +1959,7 @@
0.000000 | HookCallFunction Log::add_stream_filters(Notice::LOG, default)
0.000000 | HookCallFunction Log::add_stream_filters(Tunnel::LOG, default)
0.000000 | HookCallFunction Log::add_stream_filters(Weird::LOG, default)
0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer_failed: event(rec:Analyzer::Logging::Info), path=analyzer_failed, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data packet_segment
#types time string string string string addr port addr port string string string

View file

@ -3,7 +3,7 @@
#set_separator ,
#empty_field (empty)
#unset_field -
#path analyzer_failed
#path analyzer
#open XXXX-XX-XX-XX-XX-XX
#fields ts analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data
#types time string string string string addr port addr port string string

View file

@ -1,5 +1,5 @@
# @TEST-EXEC: zeek -C -r $TRACES/tunnels/gtp/gtp9_unknown_or_too_short_payload.pcap %INPUT
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
# @TEST-EXEC: btest-diff tunnel.log
# Packet 11, epoch time 1333458853.075889 is malformed. Only 222 byte are

View file

@ -4,7 +4,7 @@
# @TEST-EXEC: zeek -r $TRACES/tunnels/vxlan-encapsulated-igmp-v2.pcap %INPUT
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
@load base/frameworks/tunnels
@load base/protocols/conn

View file

@ -2,7 +2,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/dce-rpc/ntlm-empty-av-sequence.pcap %INPUT
# @TEST-EXEC: btest-diff ntlm.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
@load frameworks/analyzer/debug-logging.zeek
@load base/protocols/dce-rpc

View file

@ -2,7 +2,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/dce-rpc/ntlm-unterminated-av-sequence.pcap %INPUT
# @TEST-EXEC: btest-diff ntlm.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
@load base/protocols/dce-rpc
@load base/protocols/ntlm

View file

@ -2,7 +2,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/ftp/ftp-invalid-reply-code.pcap %INPUT
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ftp.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
# @TEST-EXEC: test ! -f reporter.log
@load base/protocols/conn

View file

@ -2,7 +2,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/ftp/ftp-missing-reply-code.pcap %INPUT
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ftp.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
# @TEST-EXEC: test ! -f reporter.log
@load base/protocols/conn

View file

@ -2,7 +2,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/ftp/ftp-missing-space-after-reply-code.pcap %INPUT
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ftp.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
# @TEST-EXEC: test ! -f reporter.log
@load base/protocols/conn

View file

@ -5,7 +5,7 @@
#
# @TEST-EXEC: zeek -r $TRACES/http/websocket.pcap %INPUT
# @TEST-EXEC: test ! -f weird.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: btest-diff http.log
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: btest-diff .stdout
@ -25,7 +25,7 @@ hook WebSocket::configure_analyzer(c: connection, aid: count, config: WebSocket:
# The originator's WebSocket frames match HTTP, so DPD would
# enable HTTP for the frame's payload, but the responder's frames
# contain some ack/status junk just before HTTP response that
# trigger a violation. Disable DPD for to prevent a analyzer_failed.log
# trigger a violation. Disable DPD for to prevent a analyzer.log
# entry.
config$use_dpd = F;
}

View file

@ -1,7 +1,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/http/http-11-request-then-cruft.pcap %INPUT > output
# @TEST-EXEC: btest-diff http.log
# @TEST-EXEC: btest-diff weird.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
@load base/protocols/http
@load base/frameworks/notice/weird

View file

@ -5,6 +5,6 @@
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: The addRequest/addResponse operation is not implemented, yet we process it.

View file

@ -7,6 +7,6 @@
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: btest-diff ldap_search.log
# @TEST-EXEC: ! test -f weird.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: Test LDAP analyzer with SASL encrypted payloads.

View file

@ -6,6 +6,6 @@
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: btest-diff ldap_search.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: This broke after #3826 got merged

View file

@ -6,6 +6,6 @@
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: btest-diff ldap_search.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: This broke after #3826 got merged

View file

@ -6,6 +6,6 @@
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: btest-diff ldap_search.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: Test LDAP analyzer with GSS-API integrity traffic where we can still peak into LDAP wrapped into WRAP tokens.

View file

@ -6,6 +6,6 @@
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: btest-diff ldap_search.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: Test LDAP analyzer with GSS-API integrity traffic where we can still peak into LDAP wrapped into WRAP tokens.

View file

@ -5,6 +5,6 @@
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: SASL authentication using SRP (Secure Remote Password)

View file

@ -9,6 +9,6 @@
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: SASL bindRequest with SPNEGO NTLMSSP.

View file

@ -7,7 +7,7 @@
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: LDAP supports StartTLS through extendedRequest 1.3.6.1.4.1.1466.20037

View file

@ -6,7 +6,7 @@
# @TEST-EXEC: btest-diff out
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff ldap.log
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
#
# @TEST-DOC: Testing OpenLDAP's ldapwhoami utility with simple authentication.

View file

@ -3,7 +3,7 @@
# @TEST-EXEC: btest-diff conn.log
# @TEST-EXEC: btest-diff out
# @TEST-EXEC: test ! -f weird.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/frameworks/notice/weird
@load base/protocols/conn

View file

@ -3,7 +3,7 @@
# @TEST-REQUIRES: ${SCRIPTS}/have-spicy
# @TEST-EXEC: zeek -b -Cr ${TRACES}/postgresql/bad-backend-message-1.pcap %INPUT
# @TEST-EXEC: zeek-cut -m ts uid id.orig_h id.orig_p id.resp_h id.resp_p service < conn.log > conn.cut
# @TEST-EXEC: zeek-cut -m < analyzer_failed.log > analyzer.cut
# @TEST-EXEC: zeek-cut -m < analyzer.log > analyzer.cut
#
# @TEST-EXEC: btest-diff conn.cut
# @TEST-EXEC: TEST_DIFF_CANONIFIER="sed -r 's,(.*) \(/[^\)]+\),\1 (...),'" btest-diff analyzer.cut

View file

@ -7,4 +7,4 @@
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: btest-diff quic.log
# @TEST-EXEC: btest-diff .stderr
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log

View file

@ -7,4 +7,4 @@
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: btest-diff quic.log
# @TEST-EXEC: btest-diff .stderr
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log

View file

@ -7,4 +7,4 @@
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: btest-diff quic.log
# @TEST-EXEC: btest-diff .stderr
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log

View file

@ -1,6 +1,6 @@
# Tests a pcap that has a known-invalid length in a RDP_Negotiation_Response
# header, ensuring that it throws a binpac exception and reports a notice to
# analyzer_failed.log. The pcap used is a snippet of a pcap from OSS-Fuzz #57109.
# analyzer.log. The pcap used is a snippet of a pcap from OSS-Fuzz #57109.
# @TEST-EXEC: zeek -C -b -r $TRACES/rdp/rdp-invalid-length.pcap %INPUT
# @TEST-EXEC: btest-diff analyzer_debug.log

View file

@ -3,7 +3,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/rdp/rdp-no-cookie-mstshash.pcap %INPUT
# @TEST-EXEC: btest-diff rdp.log
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/protocols/rdp
@load base/protocols/ssl

View file

@ -1,7 +1,7 @@
# @TEST-EXEC: zeek -C -r $TRACES/smb/smb2readwrite.pcap %INPUT
# @TEST-EXEC: btest-diff smb_files.log
# @TEST-EXEC: btest-diff files.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/protocols/smb

View file

@ -1,6 +1,6 @@
# @TEST-DOC: Tests handling of PDUs containing error ioctls with byte lengths of zero
# @TEST-EXEC: zeek -b -r $TRACES/smb/smb2-zero-byte-error-ioctl.pcap %INPUT 2>&1 >out
# @TEST-EXEC: ! test -f analyzer_failed.log
# @TEST-EXEC: ! test -f analyzer.log
# @TEST-EXEC: btest-diff out
@load base/protocols/smb

View file

@ -2,7 +2,7 @@
# @TEST-EXEC: btest-diff smb_files.log
# @TEST-EXEC: btest-diff smb_mapping.log
# @TEST-EXEC: btest-diff files.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
# @TEST-EXEC: btest-diff .stdout

View file

@ -1,6 +1,6 @@
# @TEST-EXEC: zeek -b -r $TRACES/smb/smb3_multichannel.pcap %INPUT
# @TEST-EXEC: btest-diff smb_files.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/smb

View file

@ -1,6 +1,6 @@
# @TEST-EXEC: zeek -r $TRACES/smb/smb3.pcap %INPUT
# @TEST-EXEC: btest-diff smb_mapping.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
# @TEST-EXEC: btest-diff .stdout

View file

@ -1,5 +1,5 @@
# @TEST-EXEC: zeek -b -C -r $TRACES/smb/smb311.pcap %INPUT
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
# @TEST-EXEC: btest-diff .stdout

View file

@ -1,5 +1,5 @@
# Tests processing of half-duplex client-side connections, including no
# analyzer_failed.log output.
# analyzer.log output.
# @TEST-EXEC: zeek -r $TRACES/ssh/ssh.client-side-half-duplex.pcap %INPUT
# @TEST-EXEC: btest-diff analyzer_debug.log

View file

@ -1,5 +1,5 @@
# Tests processing of half-duplex server-side connections, including no
# analyzer_failed.log output.
# analyzer.log output.
# @TEST-EXEC: zeek -r $TRACES/ssh/ssh.server-side-half-duplex.pcap %INPUT
# @TEST-EXEC: btest-diff analyzer_debug.log

View file

@ -3,5 +3,5 @@
# @TEST-EXEC: zeek -r $TRACES/tls/tls-conn-with-extensions.trace %INPUT
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: btest-diff x509.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f files.log

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: cat ssl.log >> ssl-all.log
# @TEST-EXEC: btest-diff ssl-all.log
# @TEST-EXEC: btest-diff .stdout
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
event ssl_client_hello(c: connection, version: count, record_version: count, possible_ts: time, client_random: string, session_id: string, ciphers: index_vec, comp_methods: index_vec)
{

View file

@ -1,8 +1,8 @@
# @TEST-REQUIRES: ! have-spicy-ssl # DTLS is not supported in Spicy SSL yet
# @TEST-EXEC: zeek -b -r $TRACES/tls/webrtc-stun.pcap %INPUT
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: touch analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: touch analyzer.log
# @TEST-EXEC: btest-diff analyzer.log
# @TEST-EXEC: btest-diff .stdout
@load base/protocols/ssl

View file

@ -9,7 +9,7 @@
#
# @TEST-EXEC: zeek -r $TRACES/tls/signed_certificate_timestamp_tls1_0.pcap %INPUT
# @TEST-EXEC: btest-diff .stdout
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
redef SSL::ct_logs += {
["\x68\xf6\x98\xf8\x1f\x64\x82\xbe\x3a\x8c\xee\xb9\x28\x1d\x4c\xfc\x71\x51\x5d\x67\x93\xd4\x44\xd1\x0a\x67\xac\xbb\x4f\x4f\xfb\xc4"] = SSL::CTInfo($description="Google 'Aviator' log", $operator="Google", $url="ct.googleapis.com/aviator/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xd7\xf4\xcc\x69\xb2\xe4\x0e\x90\xa3\x8a\xea\x5a\x70\x09\x4f\xef\x13\x62\xd0\x8d\x49\x60\xff\x1b\x40\x50\x07\x0c\x6d\x71\x86\xda\x25\x49\x8d\x65\xe1\x08\x0d\x47\x34\x6b\xbd\x27\xbc\x96\x21\x3e\x34\xf5\x87\x76\x31\xb1\x7f\x1d\xc9\x85\x3b\x0d\xf7\x1f\x3f\xe9"),

View file

@ -1,5 +1,5 @@
# This tests that no error messages are output when a protocol violation occurs
# @TEST-EXEC: zeek -C -r $TRACES/tls/tls1.2-protocol-violation.pcap %INPUT
# @TEST-EXEC: test -f analyzer_failed.log
# @TEST-EXEC: test -f analyzer.log
# @TEST-EXEC: btest-diff .stderr

View file

@ -3,7 +3,7 @@
# @TEST-EXEC: zeek -b -r $TRACES/tls/tls1_1.pcap %INPUT
# @TEST-EXEC: btest-diff ssl.log
# @TEST-EXEC: btest-diff x509.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/protocols/ssl
@load base/files/x509

View file

@ -6,7 +6,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: btest-diff out-coalesced
# @TEST-EXEC: btest-diff weird.log
# @TEST-EXEC: diff out-separate out-coalesced
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/protocols/websocket

View file

@ -12,7 +12,7 @@
# @TEST-EXEC: echo "message-too-big-status.pcap" >>out.spicy
# @TEST-EXEC: zeek -b -r $TRACES//websocket/message-too-big-status.pcap %INPUT WebSocket::use_spicy_analyzer=T >>out.spicy
# @TEST-EXEC: diff -u out.spicy out >&2
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/websocket

View file

@ -11,7 +11,7 @@
# @TEST-EXEC: echo "two-binary-fragments.pcap" >>out
# @TEST-EXEC: zeek -b -r $TRACES//websocket/two-binary-fragments.pcap %INPUT >>out
# @TEST-EXEC: btest-diff out
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/websocket

View file

@ -6,7 +6,7 @@
#
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff http.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff ssl.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f ssh.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/protocols/conn
@load base/protocols/http

View file

@ -7,7 +7,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f ssh.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
@load base/protocols/conn
@load base/protocols/http

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff ssh.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -7,7 +7,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: test ! -f websocket.log
# @TEST-EXEC: test ! -f ssh.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -14,7 +14,7 @@
# @TEST-EXEC: diff -u ssh.log.cut.spicy ssh.log.cut >&2
# @TEST-EXEC: btest-diff conn.log.cut.spicy
# @TEST-EXEC: btest-diff ssh.log.cut.spicy
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -8,7 +8,7 @@
# @TEST-EXEC: btest-diff conn.log.cut
# @TEST-EXEC: btest-diff ssh.log.cut
# @TEST-EXEC: btest-diff websocket.log
# @TEST-EXEC: test ! -f analyzer_failed.log
# @TEST-EXEC: test ! -f analyzer.log
# @TEST-EXEC: test ! -f weird.log
@load base/protocols/conn

View file

@ -1,6 +1,6 @@
# @TEST-DOC: IPv6 connection from external ipv6.pcap triggering FTP analyzer violation. Check analyzer_failed.log contains the right packet_segment
# @TEST-DOC: IPv6 connection from external ipv6.pcap triggering FTP analyzer violation. Check analyzer.log contains the right packet_segment
# @TEST-EXEC: zeek -r $TRACES/ftp/ipv6-violation.trace %INPUT
# @TEST-EXEC: btest-diff analyzer_failed.log
# @TEST-EXEC: btest-diff analyzer.log
@load frameworks/analyzer/packet-segment-logging

View file

@ -2,11 +2,11 @@
#
# @TEST-EXEC: spicyz -d -o test.hlto test.evt test.spicy
# @TEST-EXEC: HILTI_DEBUG=zeek zeek -r ${TRACES}/ssh/single-conn.trace misc/dump-events test.hlto %INPUT
# Zeek versions differ in their quoting of the newline character in analyzer_failed.log (two slashes vs one).
# @TEST-EXEC: cat analyzer_failed.log | sed 's#\\\\#\\#g' >analyzer_failed.log.tmp && mv analyzer_failed.log.tmp analyzer_failed.log.log
# @TEST-EXEC: TEST_DIFF_CANONIFIER=diff-canonifier-spicy btest-diff analyzer_failed.log
# Zeek versions differ in their quoting of the newline character in analyzer.log (two slashes vs one).
# @TEST-EXEC: cat analyzer.log | sed 's#\\\\#\\#g' >analyzer.log.tmp && mv analyzer.log.tmp analyzer.log
# @TEST-EXEC: TEST_DIFF_CANONIFIER=diff-canonifier-spicy btest-diff analyzer.log
#
# @TEST-DOC: Trigger parse error after confirmation, should be recorded in analyzer_failed.log
# @TEST-DOC: Trigger parse error after confirmation, should be recorded in analyzer.log
event zeek_init() {
Analyzer::register_for_port(Analyzer::ANALYZER_SPICY_SSH, 22/tcp);

View file

@ -1 +1 @@
b7089cf5abe4fa7aca548e6c8616ccbed7fbcfaa
d1b0dc34612cdd0a2f6608039ad842eaf8934478

View file

@ -1 +1 @@
c85d69fb00e1c032579282455e2d7ed39b7dab14
d4f52f1ab6eb26d4a88315a43d25e8481665ad3a

View file

@ -23,7 +23,7 @@ if [ "$filename" == "analyzer_debug.log" ]; then
addl="$(dirname $0)/diff-remove-abspath"
fi
if [ "$filename" == "analyzer_failed.log" ]; then
if [ "$filename" == "analyzer.log" ]; then
addl="$(dirname $0)/diff-remove-abspath"
fi