From 0ec2161b0494a7af947b30fa2bba18f1aa10d047 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Tue, 5 Aug 2025 14:50:15 -0700 Subject: [PATCH] Add options to filter at the stream level as well as globally --- scripts/base/frameworks/logging/main.zeek | 20 +++ scripts/base/init-bare.zeek | 8 +- src/logging/Manager.cc | 75 +++++----- src/logging/Manager.h | 10 +- testing/btest/Baseline/plugins.hooks/output | 132 +++++++++--------- .../.stderr | 4 +- .../.stderr | 4 +- .../.stderr | 4 +- .../.stderr | 4 +- .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../logging/field-length-limiting-stream.zeek | 106 ++++++++++++++ .../logging/field-length-limiting.zeek | 12 +- .../frameworks/logging/length-checking.zeek | 8 +- 30 files changed, 420 insertions(+), 129 deletions(-) create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log create mode 100644 testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek diff --git a/scripts/base/frameworks/logging/main.zeek b/scripts/base/frameworks/logging/main.zeek index 7d236f829e..114b33711e 100644 --- a/scripts/base/frameworks/logging/main.zeek +++ b/scripts/base/frameworks/logging/main.zeek @@ -422,6 +422,26 @@ export { ## .. :zeek:see:`Log::default_max_delay_queue_size` ## .. :zeek:see:`Log::set_max_delay_queue_size` max_delay_queue_size: count &default=default_max_delay_queue_size; + + ## Maximum string size for field in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_field_string_bytes` + max_field_string_bytes: count &default=Log::default_max_field_string_bytes; + + ## Maximum total string size in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_total_string_bytes` + max_total_string_bytes: count &default=Log::default_max_total_string_bytes; + + ## Maximum container elements for field in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_field_container_elements` + max_field_container_elements: count &default=Log::default_max_field_container_elements; + + ## Maximum total container elements in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_total_container_elements` + max_total_container_elements: count &default=Log::default_max_total_container_elements; }; ## Sentinel value for indicating that a filter was not found when looked up. diff --git a/scripts/base/init-bare.zeek b/scripts/base/init-bare.zeek index 8f300bce4d..c77c96f472 100644 --- a/scripts/base/init-bare.zeek +++ b/scripts/base/init-bare.zeek @@ -3747,12 +3747,12 @@ export { ## The maximum number of bytes that a single string field can contain when ## logging. If a string reaches this limit, the log output for the field will be ## truncated. Setting this to zero disables the limiting. - const max_field_string_bytes = 4096 &redef; + const default_max_field_string_bytes = 4096 &redef; ## The maximum number of elements a single container field can contain when ## logging. If a container reaches this limit, the log output for the field will ## be truncated. Setting this to zero disables the limiting. - const max_field_container_elements = 100 &redef; + const default_max_field_container_elements = 100 &redef; ## The maximum total bytes a record may log for string fields. This is the sum of ## all bytes in string fields logged for the record. If this limit is reached, all @@ -3760,14 +3760,14 @@ export { ## string fields will be logged as empty containers. If the limit is reached while ## processing a container holding string fields, the container will be truncated ## in the log output. Setting this to zero disables the limiting. - const max_total_string_bytes = 256000 &redef; + const default_max_total_string_bytes = 256000 &redef; ## The maximum total number of container elements a record may log. This is the ## sum of all container elements logged for the record. If this limit is reached, ## all further containers will be logged as empty containers. If the limit is ## reached while processing a container, the container will be truncated in the ## output. Setting this to zero disables the limiting. - const max_total_container_elements = 500 &redef; + const default_max_total_container_elements = 500 &redef; } module POP3; diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index 89c6e8266f..1d832172b9 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -243,7 +243,6 @@ struct Manager::WriterInfo { bool from_remote = false; bool hook_initialized = false; string instantiating_filter; - string stream_name; std::shared_ptr total_writes; std::shared_ptr total_discarded_writes; @@ -289,10 +288,13 @@ struct Manager::Stream { zeek_uint_t max_delay_queue_size = 1; bool evicting = false; + size_t max_field_string_bytes = 0; + size_t max_total_string_bytes = 0; + size_t max_field_container_elements = 0; + size_t max_total_container_elements = 0; ~Stream(); - const detail::DelayInfoPtr& GetDelayInfo(const detail::WriteContext& ctx); void EnqueueWriteForDelay(const detail::WriteContext& ctx); @@ -520,22 +522,6 @@ void Manager::InitPostScript() { rotation_format_func = id::find_func("Log::rotation_format_func"); log_stream_policy_hook = id::find_func("Log::log_stream_policy"); max_log_record_size = id::find_val("Log::max_log_record_size")->AsCount(); - - max_field_string_bytes = id::find_val("Log::max_field_string_bytes")->AsCount(); - if ( max_field_string_bytes == 0 ) - max_field_string_bytes = std::numeric_limits::max(); - - max_total_string_bytes = id::find_val("Log::max_total_string_bytes")->AsCount(); - if ( max_total_string_bytes == 0 ) - max_total_string_bytes = std::numeric_limits::max(); - - max_field_container_elements = id::find_val("Log::max_field_container_elements")->AsCount(); - if ( max_field_container_elements == 0 ) - max_field_container_elements = std::numeric_limits::max(); - - max_total_container_elements = id::find_val("Log::max_total_container_elements")->AsCount(); - if ( max_total_container_elements == 0 ) - max_total_container_elements = std::numeric_limits::max(); } WriterBackend* Manager::CreateBackend(WriterFrontend* frontend, EnumVal* tag) { @@ -691,9 +677,24 @@ bool Manager::CreateStream(EnumVal* id, RecordVal* sval) { streams[idx]->columns = columns->Ref()->AsRecordType(); streams[idx]->max_delay_interval = sval->GetField("max_delay_interval")->AsInterval(); streams[idx]->max_delay_queue_size = sval->GetField("max_delay_queue_size")->AsCount(); - streams[idx]->enable_remote = id::find_val("Log::enable_remote_logging")->AsBool(); + streams[idx]->max_field_string_bytes = sval->GetField("max_field_string_bytes")->AsCount(); + if ( streams[idx]->max_field_string_bytes == 0 ) + streams[idx]->max_field_string_bytes = std::numeric_limits::max(); + + streams[idx]->max_total_string_bytes = sval->GetField("max_total_string_bytes")->AsCount(); + if ( streams[idx]->max_total_string_bytes == 0 ) + streams[idx]->max_total_string_bytes = std::numeric_limits::max(); + + streams[idx]->max_field_container_elements = sval->GetField("max_field_container_elements")->AsCount(); + if ( streams[idx]->max_field_container_elements == 0 ) + streams[idx]->max_field_container_elements = std::numeric_limits::max(); + + streams[idx]->max_total_container_elements = sval->GetField("max_total_container_elements")->AsCount(); + if ( streams[idx]->max_total_container_elements == 0 ) + streams[idx]->max_total_container_elements = std::numeric_limits::max(); + DBG_LOG(DBG_LOGGING, "Created new logging stream '%s', raising event %s", streams[idx]->name.c_str(), event ? streams[idx]->event->Name() : ""); @@ -1183,7 +1184,7 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c size_t total_size = 0; total_string_bytes = 0; total_container_elements = 0; - auto rec = RecordToLogRecord(w->second, filter, columns.get(), total_size); + auto rec = RecordToLogRecord(w->second, filter, stream, columns.get(), total_size); if ( total_size > max_log_record_size ) { reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str())); @@ -1422,7 +1423,8 @@ bool Manager::SetMaxDelayQueueSize(const EnumValPtr& id, zeek_uint_t queue_size) return true; } -threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val, Type* ty, size_t& total_size) { +threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, + size_t& total_size) { if ( ! val ) return {ty->Tag(), false}; @@ -1499,11 +1501,11 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val case TYPE_STRING: { const String* s = val->AsString()->AsString(); - size_t allowed_bytes = std::min( - {static_cast(s->Len()), max_field_string_bytes, max_total_string_bytes - total_string_bytes}); + size_t allowed_bytes = std::min({static_cast(s->Len()), stream->max_field_string_bytes, + stream->max_total_string_bytes - total_string_bytes}); if ( allowed_bytes < static_cast(s->Len()) ) { - reporter->Weird("log_string_field_truncated", util::fmt("%s", info->stream_name.c_str())); + reporter->Weird("log_string_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_string_fields->Inc(); } @@ -1555,11 +1557,12 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val auto& set_t = tbl_t->GetIndexTypes()[0]; bool is_managed = ZVal::IsManagedType(set_t); - size_t allowed_elements = std::min({static_cast(set->Length()), max_field_container_elements, - max_total_container_elements - total_container_elements}); + size_t allowed_elements = + std::min({static_cast(set->Length()), stream->max_field_container_elements, + stream->max_total_container_elements - total_container_elements}); if ( allowed_elements < static_cast(set->Length()) ) { - reporter->Weird("log_container_field_truncated", util::fmt("%s", info->stream_name.c_str())); + reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_containers->Inc(); } @@ -1570,7 +1573,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { std::optional s_i = ZVal(set->Idx(i), set_t); - lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(info, s_i, set_t.get(), total_size)); + lval.val.set_val.vals[i] = + new threading::Value(ValToLogVal(info, stream, s_i, set_t.get(), total_size)); if ( is_managed ) ZVal::DeleteManagedType(*s_i); lval.val.set_val.size++; @@ -1584,11 +1588,11 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val case TYPE_VECTOR: { VectorVal* vec = val->AsVector(); - size_t allowed_elements = std::min({static_cast(vec->Size()), max_field_container_elements, - max_total_container_elements - total_container_elements}); + size_t allowed_elements = std::min({static_cast(vec->Size()), stream->max_field_container_elements, + stream->max_total_container_elements - total_container_elements}); if ( allowed_elements < static_cast(vec->Size()) ) { - reporter->Weird("log_container_field_truncated", util::fmt("%s", info->stream_name.c_str())); + reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_containers->Inc(); } @@ -1601,7 +1605,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val auto& vt = vec->GetType()->Yield(); for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { - lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(info, vv[i], vt.get(), total_size)); + lval.val.vector_val.vals[i] = + new threading::Value(ValToLogVal(info, stream, vv[i], vt.get(), total_size)); lval.val.vector_val.size++; } @@ -1616,7 +1621,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val return lval; } -detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, RecordVal* columns, size_t& total_size) { +detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns, + size_t& total_size) { RecordValPtr ext_rec; if ( filter->num_ext_fields > 0 ) { @@ -1666,7 +1672,7 @@ detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, R } if ( val ) - vals.emplace_back(ValToLogVal(info, val, vt, total_size)); + vals.emplace_back(ValToLogVal(info, stream, val, vt, total_size)); if ( total_size > max_log_record_size ) { return {}; @@ -1734,7 +1740,6 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken winfo->from_remote = from_remote; winfo->hook_initialized = false; winfo->instantiating_filter = instantiating_filter; - winfo->stream_name = stream->name; // Search for a corresponding filter for the writer/path pair and use its // rotation settings. If no matching filter is found, fall back on diff --git a/src/logging/Manager.h b/src/logging/Manager.h index 9db48c366e..e78898bb50 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -422,8 +422,10 @@ private: bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, const std::string& path, const std::list& indices); - detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, RecordVal* columns, size_t& total_size); - threading::Value ValToLogVal(WriterInfo* info, std::optional& val, Type* ty, size_t& total_size); + detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns, + size_t& total_size); + threading::Value ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, + size_t& total_size); Stream* FindStream(EnumVal* id); void RemoveDisabledWriters(Stream* stream); @@ -450,10 +452,6 @@ private: FuncPtr log_stream_policy_hook; size_t max_log_record_size = 0; - size_t max_field_string_bytes = 0; - size_t max_total_string_bytes = 0; - size_t max_field_container_elements = 0; - size_t max_total_container_elements = 0; size_t total_string_bytes = 0; size_t total_container_elements = 0; diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index d944c123ed..b4333b458d 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -38,17 +38,17 @@ 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Broker::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> @@ -82,17 +82,17 @@ 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Notice::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Tunnel::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Weird::LOG, default)) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Option::set, , (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, )) -> 0.000000 MetaHookPost CallFunction(Option::set_change_handler, , (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> 0.000000 MetaHookPost CallFunction(Option::set_change_handler, , (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> @@ -980,17 +980,17 @@ 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Broker::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) @@ -1024,17 +1024,17 @@ 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Notice::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Tunnel::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Weird::LOG, default)) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Option::set, , (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, )) 0.000000 MetaHookPre CallFunction(Option::set_change_handler, , (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) 0.000000 MetaHookPre CallFunction(Option::set_change_handler, , (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) @@ -1921,17 +1921,17 @@ 0.000000 | HookCallFunction Log::__add_filter(Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) -0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) +0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::add_default_filter(Analyzer::Logging::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) @@ -1965,17 +1965,17 @@ 0.000000 | HookCallFunction Log::add_stream_filters(Notice::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Tunnel::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Weird::LOG, default) -0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) +0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Option::set(Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, ) 0.000000 | HookCallFunction Option::set_change_handler(Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100) 0.000000 | HookCallFunction Option::set_change_handler(Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr index 57a839051e..a3577fb43e 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb) -error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback) +error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb) +error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback) error in <...>/static-errors.zeek, line 12: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr index e39286ce49..597cb83a0b 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42) -error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken) +error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42) +error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken) error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr index 31db1e4af6..4ad3a42493 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42) -error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken) +error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42) +error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken) error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr index 098dbf9bee..dcd0d2de37 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb) -error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback) +error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb) +error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback) error in <...>/static-errors.zeek, line 16: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout new file mode 100644 index 0000000000..8257d72161 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log new file mode 100644 index 0000000000..93c73f8a84 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout new file mode 100644 index 0000000000..57f1fd5f12 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log new file mode 100644 index 0000000000..c44d2e2c5c --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log new file mode 100644 index 0000000000..542d53cf44 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout new file mode 100644 index 0000000000..f2e2b796bd --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log new file mode 100644 index 0000000000..2b02e6e655 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log new file mode 100644 index 0000000000..4f595571fe --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout new file mode 100644 index 0000000000..ee93469f35 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log new file mode 100644 index 0000000000..9bcebb3086 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek new file mode 100644 index 0000000000..a67acc521d --- /dev/null +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek @@ -0,0 +1,106 @@ +# @TEST-DOC: Tests field length limiting at the stream level instead of at the global level. +# +# @TEST-EXEC: zeek -b test.zeek %INPUT +# @TEST-EXEC: btest-diff test.log +# @TEST-EXEC: btest-diff .stdout +# @TEST-EXEC: btest-diff weird.log + +# @TEST-START-FILE test.zeek + +@load base/frameworks/telemetry +@load base/frameworks/notice/weird + +module Test; + +export { + redef enum Log::ID += { LOG }; + + type Info: record { + strings1: vector of string &log; + strings2: vector of string &log; + }; +} + +event log_telemetry() + { + local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); + for (i in storage_metrics) + { + local m = storage_metrics[i]; + print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; + } + } + +event zeek_init() &priority=-5 + { + local rec = Test::Info(); + local i = 0; + + # Create two vectors containing 10 strings with 10 characters each. + # This leaves us with 200 total characters to work with. + while ( ++i <= 10 ) + { + rec$strings1 += "ABCDEFGHIJ"; + rec$strings2 += "ABCDEFGHIJ"; + } + + Log::write(Test::LOG, rec); + + # Do this as a separate event so the weirds get processed before we log the + # telemetry outout. See the comment below for the first test as to why. + event log_telemetry(); + } + +# @TEST-END-FILE test.zeek + +# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it +# will write all of the fields. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_string_bytes=5]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the second field in the middle of a string. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=115]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle of a string. Second field should log empty strings. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=85]); + } + +# @TEST-START-NEXT + +# Limit the individual containers to 5 items, but keep the total maximum large enough that +# it will write all of the fields. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_container_elements=5]); + } + +# @TEST-START-NEXT + +# Leave the individual field items alone, but set the maximum length to where it cuts off +# the second field in the middle. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=15]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle. Second field should log empty containers. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=5]); + } diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek index 4946c1b230..f1ae881847 100644 --- a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek @@ -59,34 +59,34 @@ event zeek_init() # will write all of the fields. The weird test for this one will be off since it will # limit the name of the weird. It will pass, but the fields in the log will get truncated # like they're supposed to. -redef Log::max_field_string_bytes = 5; +redef Log::default_max_field_string_bytes = 5; # @TEST-START-NEXT # Leave the individual field bytes alone, but set the maximum length to where it cuts off # the second field in the middle of a string. -redef Log::max_total_string_bytes = 115; +redef Log::default_max_total_string_bytes = 115; # @TEST-START-NEXT # Leave the individual field bytes alone, but set the maximum length to where it cuts off # the first field in the middle of a string. Second field should log empty strings. -redef Log::max_total_string_bytes = 85; +redef Log::default_max_total_string_bytes = 85; # @TEST-START-NEXT # Limit the individual containers to 5 items, but keep the total maximum large enough that # it will write all of the fields. -redef Log::max_field_container_elements = 5; +redef Log::default_max_field_container_elements = 5; # @TEST-START-NEXT # Leave the individual field items alone, but set the maximum length to where it cuts off # the second field in the middle. -redef Log::max_total_container_elements = 15; +redef Log::default_max_total_container_elements = 15; # @TEST-START-NEXT # Leave the individual field bytes alone, but set the maximum length to where it cuts off # the first field in the middle. Second field should log empty containers. -redef Log::max_total_container_elements = 5; +redef Log::default_max_total_container_elements = 5; diff --git a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek index 0e14734012..95e966c3d6 100644 --- a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek +++ b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek @@ -20,10 +20,10 @@ module Test; # Disable the string and container length filtering. -redef Log::max_field_string_bytes = 0; -redef Log::max_total_string_bytes = 0; -redef Log::max_field_container_elements = 0; -redef Log::max_total_container_elements = 0; +redef Log::default_max_field_string_bytes = 0; +redef Log::default_max_total_string_bytes = 0; +redef Log::default_max_field_container_elements = 0; +redef Log::default_max_total_container_elements = 0; export { redef enum Log::ID += { LOG };