From e2e7ab28da117753afa40a3408ebc6133ed016c3 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Fri, 25 Jul 2025 13:19:47 -0700 Subject: [PATCH 01/11] Implement string- and container-length filtering at the log record level --- scripts/base/init-bare.zeek | 25 +++++++ src/logging/Manager.cc | 61 +++++++++++++--- src/logging/Manager.h | 7 ++ .../test.log | 11 +++ .../test.log | 11 +++ .../test.log | 11 +++ .../test.log | 11 +++ .../test.log | 11 +++ .../test.log | 11 +++ .../logging/field-length-limiting.zeek | 72 +++++++++++++++++++ .../frameworks/logging/length-checking.zeek | 6 ++ 11 files changed, 227 insertions(+), 10 deletions(-) create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log create mode 100644 testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek diff --git a/scripts/base/init-bare.zeek b/scripts/base/init-bare.zeek index 66ce27876c..8f300bce4d 100644 --- a/scripts/base/init-bare.zeek +++ b/scripts/base/init-bare.zeek @@ -3743,6 +3743,31 @@ export { ## higher than this limit, but it prevents runaway-sized log entries from causing ## problems. const max_log_record_size = 1024*1024*64 &redef; + + ## The maximum number of bytes that a single string field can contain when + ## logging. If a string reaches this limit, the log output for the field will be + ## truncated. Setting this to zero disables the limiting. + const max_field_string_bytes = 4096 &redef; + + ## The maximum number of elements a single container field can contain when + ## logging. If a container reaches this limit, the log output for the field will + ## be truncated. Setting this to zero disables the limiting. + const max_field_container_elements = 100 &redef; + + ## The maximum total bytes a record may log for string fields. This is the sum of + ## all bytes in string fields logged for the record. If this limit is reached, all + ## further string fields will be logged as empty strings. Any containers holding + ## string fields will be logged as empty containers. If the limit is reached while + ## processing a container holding string fields, the container will be truncated + ## in the log output. Setting this to zero disables the limiting. + const max_total_string_bytes = 256000 &redef; + + ## The maximum total number of container elements a record may log. This is the + ## sum of all container elements logged for the record. If this limit is reached, + ## all further containers will be logged as empty containers. If the limit is + ## reached while processing a container, the container will be truncated in the + ## output. Setting this to zero disables the limiting. + const max_total_container_elements = 500 &redef; } module POP3; diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index eb22695e05..734a54bbc0 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -504,6 +504,22 @@ void Manager::InitPostScript() { rotation_format_func = id::find_func("Log::rotation_format_func"); log_stream_policy_hook = id::find_func("Log::log_stream_policy"); max_log_record_size = id::find_val("Log::max_log_record_size")->AsCount(); + + max_field_string_bytes = id::find_val("Log::max_field_string_bytes")->AsCount(); + if ( max_field_string_bytes == 0 ) + max_field_string_bytes = std::numeric_limits::max(); + + max_total_string_bytes = id::find_val("Log::max_total_string_bytes")->AsCount(); + if ( max_total_string_bytes == 0 ) + max_total_string_bytes = std::numeric_limits::max(); + + max_field_container_elements = id::find_val("Log::max_field_container_elements")->AsCount(); + if ( max_field_container_elements == 0 ) + max_field_container_elements = std::numeric_limits::max(); + + max_total_container_elements = id::find_val("Log::max_total_container_elements")->AsCount(); + if ( max_total_container_elements == 0 ) + max_total_container_elements = std::numeric_limits::max(); } WriterBackend* Manager::CreateBackend(WriterFrontend* frontend, EnumVal* tag) { @@ -1149,6 +1165,8 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c // Alright, can do the write now. size_t total_size = 0; + total_string_bytes = 0; + total_container_elements = 0; auto rec = RecordToLogRecord(stream, filter, columns.get(), total_size); if ( total_size > max_log_record_size ) { @@ -1464,12 +1482,20 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t case TYPE_STRING: { const String* s = val->AsString()->AsString(); - char* buf = new char[s->Len()]; - memcpy(buf, s->Bytes(), s->Len()); + + size_t allowed_bytes = std::min( + {static_cast(s->Len()), max_field_string_bytes, max_total_string_bytes - total_string_bytes}); + + if ( allowed_bytes == 0 ) + return lval; + + char* buf = new char[allowed_bytes]; + memcpy(buf, s->Bytes(), allowed_bytes); lval.val.string_val.data = buf; - lval.val.string_val.length = s->Len(); - total_size += lval.val.string_val.length; + lval.val.string_val.length = allowed_bytes; + total_size += allowed_bytes; + total_string_bytes += allowed_bytes; break; } @@ -1508,10 +1534,15 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t auto& set_t = tbl_t->GetIndexTypes()[0]; bool is_managed = ZVal::IsManagedType(set_t); - zeek_int_t set_length = set->Length(); - lval.val.set_val.vals = new threading::Value*[set_length]; + size_t allowed_elements = std::min({static_cast(set->Length()), max_field_container_elements, + max_total_container_elements - total_container_elements}); - for ( zeek_int_t i = 0; i < set_length && total_size < max_log_record_size; i++ ) { + if ( allowed_elements == 0 ) + return lval; + + lval.val.set_val.vals = new threading::Value*[allowed_elements]; + + for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { std::optional s_i = ZVal(set->Idx(i), set_t); lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(s_i, set_t.get(), total_size)); if ( is_managed ) @@ -1519,22 +1550,32 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t lval.val.set_val.size++; } + total_container_elements += lval.val.set_val.size; + break; } case TYPE_VECTOR: { VectorVal* vec = val->AsVector(); - zeek_int_t vec_length = vec->Size(); - lval.val.vector_val.vals = new threading::Value*[vec_length]; + + size_t allowed_elements = std::min({static_cast(vec->Size()), max_field_container_elements, + max_total_container_elements - total_container_elements}); + + if ( allowed_elements == 0 ) + return lval; + + lval.val.vector_val.vals = new threading::Value*[allowed_elements]; auto& vv = vec->RawVec(); auto& vt = vec->GetType()->Yield(); - for ( zeek_int_t i = 0; i < vec_length && total_size < max_log_record_size; i++ ) { + for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(vv[i], vt.get(), total_size)); lval.val.vector_val.size++; } + total_container_elements += lval.val.vector_val.size; + break; } diff --git a/src/logging/Manager.h b/src/logging/Manager.h index 63336d67cb..dd933e0b50 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -448,7 +448,14 @@ private: int rotations_pending = 0; // Number of rotations not yet finished. FuncPtr rotation_format_func; FuncPtr log_stream_policy_hook; + size_t max_log_record_size = 0; + size_t max_field_string_bytes = 0; + size_t max_total_string_bytes = 0; + size_t max_field_container_elements = 0; + size_t max_total_container_elements = 0; + size_t total_string_bytes = 0; + size_t total_container_elements = 0; std::shared_ptr total_log_stream_writes_family; std::shared_ptr total_log_writer_writes_family; diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log new file mode 100644 index 0000000000..93c73f8a84 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log new file mode 100644 index 0000000000..c44d2e2c5c --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log new file mode 100644 index 0000000000..542d53cf44 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log new file mode 100644 index 0000000000..2b02e6e655 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log new file mode 100644 index 0000000000..4f595571fe --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log new file mode 100644 index 0000000000..9bcebb3086 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek new file mode 100644 index 0000000000..d52f625561 --- /dev/null +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek @@ -0,0 +1,72 @@ +# @TEST-DOC: Test the options that limit string and container lengths when logging +# +# @TEST-EXEC: zeek -b test.zeek %INPUT +# @TEST-EXEC: btest-diff test.log + +# @TEST-START-FILE test.zeek + +module Test; + +export { + redef enum Log::ID += { LOG }; + + type Info: record { + strings1: vector of string &log; + strings2: vector of string &log; + }; +} + +event zeek_init() + { + Log::create_stream(LOG, [$columns=Info, $path="test"]); + + local rec = Test::Info(); + local i = 0; + + # Create two vectors containing 10 strings with 10 characters each. + # This leaves us with 200 total characters to work with. + while ( ++i <= 10 ) + { + rec$strings1 += "ABCDEFGHIJ"; + rec$strings2 += "ABCDEFGHIJ"; + } + + Log::write(Test::LOG, rec); + } + + +# @TEST-END-FILE test.zeek + +# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it +# will write all of the fields. +redef Log::max_field_string_bytes = 5; + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the second field in the middle of a string. +redef Log::max_total_string_bytes = 115; + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle of a string. Second field should log empty strings. +redef Log::max_total_string_bytes = 85; + +# @TEST-START-NEXT + +# Limit the individual containers to 5 items, but keep the total maximum large enough that +# it will write all of the fields. +redef Log::max_field_container_elements = 5; + +# @TEST-START-NEXT + +# Leave the individual field items alone, but set the maximum length to where it cuts off +# the second field in the middle. +redef Log::max_total_container_elements = 15; + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle. Second field should log empty containers. +redef Log::max_total_container_elements = 5; diff --git a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek index 7316b2c4d0..0e14734012 100644 --- a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek +++ b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek @@ -19,6 +19,12 @@ module Test; +# Disable the string and container length filtering. +redef Log::max_field_string_bytes = 0; +redef Log::max_total_string_bytes = 0; +redef Log::max_field_container_elements = 0; +redef Log::max_total_container_elements = 0; + export { redef enum Log::ID += { LOG }; From cd74a4e138fefb683f2e6a0e0b8d142797653278 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Wed, 30 Jul 2025 09:40:24 -0700 Subject: [PATCH 02/11] Replace unused stream argument from RecordToLogRecord with WriterInfo This also adds a WriterInfo argument to ValToLogVal and passes the one from RecordToLogRecord into it. --- src/logging/Manager.cc | 13 ++++++------- src/logging/Manager.h | 4 ++-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index 734a54bbc0..c0214bdde5 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -1167,7 +1167,7 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c size_t total_size = 0; total_string_bytes = 0; total_container_elements = 0; - auto rec = RecordToLogRecord(stream, filter, columns.get(), total_size); + auto rec = RecordToLogRecord(w->second, filter, columns.get(), total_size); if ( total_size > max_log_record_size ) { reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str())); @@ -1406,7 +1406,7 @@ bool Manager::SetMaxDelayQueueSize(const EnumValPtr& id, zeek_uint_t queue_size) return true; } -threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t& total_size) { +threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val, Type* ty, size_t& total_size) { if ( ! val ) return {ty->Tag(), false}; @@ -1544,7 +1544,7 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { std::optional s_i = ZVal(set->Idx(i), set_t); - lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(s_i, set_t.get(), total_size)); + lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(info, s_i, set_t.get(), total_size)); if ( is_managed ) ZVal::DeleteManagedType(*s_i); lval.val.set_val.size++; @@ -1570,7 +1570,7 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t auto& vt = vec->GetType()->Yield(); for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { - lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(vv[i], vt.get(), total_size)); + lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(info, vv[i], vt.get(), total_size)); lval.val.vector_val.size++; } @@ -1585,8 +1585,7 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t return lval; } -detail::LogRecord Manager::RecordToLogRecord(const Stream* stream, Filter* filter, RecordVal* columns, - size_t& total_size) { +detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, RecordVal* columns, size_t& total_size) { RecordValPtr ext_rec; if ( filter->num_ext_fields > 0 ) { @@ -1636,7 +1635,7 @@ detail::LogRecord Manager::RecordToLogRecord(const Stream* stream, Filter* filte } if ( val ) - vals.emplace_back(ValToLogVal(val, vt, total_size)); + vals.emplace_back(ValToLogVal(info, val, vt, total_size)); if ( total_size > max_log_record_size ) { return {}; diff --git a/src/logging/Manager.h b/src/logging/Manager.h index dd933e0b50..d54c3ac204 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -422,8 +422,8 @@ private: bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, const std::string& path, const std::list& indices); - detail::LogRecord RecordToLogRecord(const Stream* stream, Filter* filter, RecordVal* columns, size_t& total_size); - threading::Value ValToLogVal(std::optional& val, Type* ty, size_t& total_size); + detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, RecordVal* columns, size_t& total_size); + threading::Value ValToLogVal(WriterInfo* info, std::optional& val, Type* ty, size_t& total_size); Stream* FindStream(EnumVal* id); void RemoveDisabledWriters(Stream* stream); From 837fde1a084fe157d635763bab709d77e6e74ef2 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Wed, 30 Jul 2025 09:43:45 -0700 Subject: [PATCH 03/11] Add metrics to track string and container fields limited by length --- src/logging/Manager.cc | 40 ++++++++++++++++--- src/logging/Manager.h | 2 + .../.stdout | 3 ++ .../.stdout | 3 ++ .../.stdout | 3 ++ .../.stdout | 3 ++ .../.stdout | 3 ++ .../.stdout | 3 ++ .../telemetry.log | 6 +++ .../logging/field-length-limiting.zeek | 10 +++++ 10 files changed, 71 insertions(+), 5 deletions(-) create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index c0214bdde5..b023506661 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -246,10 +246,17 @@ struct Manager::WriterInfo { std::shared_ptr total_writes; std::shared_ptr total_discarded_writes; + std::shared_ptr total_truncated_string_fields; + std::shared_ptr total_truncated_containers; WriterInfo(std::shared_ptr total_writes, - std::shared_ptr total_discarded_writes) - : total_writes(std::move(total_writes)), total_discarded_writes(std::move(total_discarded_writes)) {} + std::shared_ptr total_discarded_writes, + std::shared_ptr total_truncated_string_fields, + std::shared_ptr total_truncated_containers) + : total_writes(std::move(total_writes)), + total_discarded_writes(std::move(total_discarded_writes)), + total_truncated_string_fields(std::move(total_truncated_string_fields)), + total_truncated_containers(std::move(total_truncated_containers)) {} }; struct Manager::Stream { @@ -491,7 +498,15 @@ Manager::Manager() total_log_writer_discarded_writes_family( telemetry_mgr->CounterFamily("zeek", "log-writer-discarded-writes", {"writer", "module", "stream", "filter-name", "path"}, - "Total number of log writes discarded due to size limitations.")) { + "Total number of log writes discarded due to size limitations.")), + total_log_writer_truncated_string_fields_family( + telemetry_mgr->CounterFamily("zeek", "log-writer-truncated-string-fields", + {"writer", "module", "stream", "filter-name", "path"}, + "Total number of logged string fields limited by length")), + total_log_writer_truncated_container_fields_family( + telemetry_mgr->CounterFamily("zeek", "log-writer-truncated-containers", + {"writer", "module", "stream", "filter-name", "path"}, + "Total number of logged container fields limited by length")) { rotations_pending = 0; } @@ -1486,6 +1501,10 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val size_t allowed_bytes = std::min( {static_cast(s->Len()), max_field_string_bytes, max_total_string_bytes - total_string_bytes}); + if ( allowed_bytes < static_cast(s->Len()) ) + // TODO: this could also log a reporter warning or a weird or something + info->total_truncated_string_fields->Inc(); + if ( allowed_bytes == 0 ) return lval; @@ -1537,6 +1556,10 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val size_t allowed_elements = std::min({static_cast(set->Length()), max_field_container_elements, max_total_container_elements - total_container_elements}); + if ( allowed_elements < static_cast(set->Length()) ) + // TODO: this could also log a reporter warning or a weird or something + info->total_truncated_containers->Inc(); + if ( allowed_elements == 0 ) return lval; @@ -1561,6 +1584,10 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val size_t allowed_elements = std::min({static_cast(vec->Size()), max_field_container_elements, max_total_container_elements - total_container_elements}); + if ( allowed_elements < static_cast(vec->Size()) ) + // TODO: this could also log a reporter warning or a weird or something + info->total_truncated_containers->Inc(); + if ( allowed_elements == 0 ) return lval; @@ -1688,8 +1715,11 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken {"filter-name", instantiating_filter}, {"path", info->path}}; - WriterInfo* winfo = new WriterInfo(zeek::log_mgr->total_log_writer_writes_family->GetOrAdd(labels), - zeek::log_mgr->total_log_writer_discarded_writes_family->GetOrAdd(labels)); + WriterInfo* winfo = + new WriterInfo(zeek::log_mgr->total_log_writer_writes_family->GetOrAdd(labels), + zeek::log_mgr->total_log_writer_discarded_writes_family->GetOrAdd(labels), + zeek::log_mgr->total_log_writer_truncated_string_fields_family->GetOrAdd(labels), + zeek::log_mgr->total_log_writer_truncated_container_fields_family->GetOrAdd(labels)); winfo->type = writer->Ref()->AsEnumVal(); winfo->writer = nullptr; winfo->open_time = run_state::network_time; diff --git a/src/logging/Manager.h b/src/logging/Manager.h index d54c3ac204..9db48c366e 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -460,6 +460,8 @@ private: std::shared_ptr total_log_stream_writes_family; std::shared_ptr total_log_writer_writes_family; std::shared_ptr total_log_writer_discarded_writes_family; + std::shared_ptr total_log_writer_truncated_string_fields_family; + std::shared_ptr total_log_writer_truncated_container_fields_family; zeek_uint_t last_delay_token = 0; std::vector active_writes; diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout new file mode 100644 index 0000000000..e40107ed88 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout new file mode 100644 index 0000000000..51e81b6b71 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout new file mode 100644 index 0000000000..7094a3c95e --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout new file mode 100644 index 0000000000..44df4ae86b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout new file mode 100644 index 0000000000..7094a3c95e --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout new file mode 100644 index 0000000000..354dee21f9 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log b/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log index f98d15e8f4..6b52728ce8 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log @@ -16,4 +16,10 @@ XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_writes_total filter_name,module,p XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0 XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0 XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0 #close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek index d52f625561..cb05664065 100644 --- a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek @@ -2,9 +2,12 @@ # # @TEST-EXEC: zeek -b test.zeek %INPUT # @TEST-EXEC: btest-diff test.log +# @TEST-EXEC: btest-diff .stdout # @TEST-START-FILE test.zeek +@load base/frameworks/telemetry + module Test; export { @@ -32,6 +35,13 @@ event zeek_init() } Log::write(Test::LOG, rec); + + local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); + for (i in storage_metrics) + { + local m = storage_metrics[i]; + print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; + } } From 339d46ae26e13ffb8dfc5c45036584913a8e52d7 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Thu, 31 Jul 2025 13:16:16 -0700 Subject: [PATCH 04/11] Add a weird that gets emitted when strings/containers are over the limits --- src/logging/Manager.cc | 17 +++++++----- .../.stdout | 2 ++ .../weird.log | 11 ++++++++ .../.stdout | 2 ++ .../weird.log | 11 ++++++++ .../.stdout | 2 ++ .../weird.log | 11 ++++++++ .../.stdout | 2 ++ .../weird.log | 11 ++++++++ .../.stdout | 2 ++ .../weird.log | 11 ++++++++ .../.stdout | 2 ++ .../weird.log | 11 ++++++++ .../logging/field-length-limiting.zeek | 26 +++++++++++++------ 14 files changed, 107 insertions(+), 14 deletions(-) create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index b023506661..89c6e8266f 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -243,6 +243,7 @@ struct Manager::WriterInfo { bool from_remote = false; bool hook_initialized = false; string instantiating_filter; + string stream_name; std::shared_ptr total_writes; std::shared_ptr total_discarded_writes; @@ -1501,9 +1502,10 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val size_t allowed_bytes = std::min( {static_cast(s->Len()), max_field_string_bytes, max_total_string_bytes - total_string_bytes}); - if ( allowed_bytes < static_cast(s->Len()) ) - // TODO: this could also log a reporter warning or a weird or something + if ( allowed_bytes < static_cast(s->Len()) ) { + reporter->Weird("log_string_field_truncated", util::fmt("%s", info->stream_name.c_str())); info->total_truncated_string_fields->Inc(); + } if ( allowed_bytes == 0 ) return lval; @@ -1556,9 +1558,10 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val size_t allowed_elements = std::min({static_cast(set->Length()), max_field_container_elements, max_total_container_elements - total_container_elements}); - if ( allowed_elements < static_cast(set->Length()) ) - // TODO: this could also log a reporter warning or a weird or something + if ( allowed_elements < static_cast(set->Length()) ) { + reporter->Weird("log_container_field_truncated", util::fmt("%s", info->stream_name.c_str())); info->total_truncated_containers->Inc(); + } if ( allowed_elements == 0 ) return lval; @@ -1584,9 +1587,10 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val size_t allowed_elements = std::min({static_cast(vec->Size()), max_field_container_elements, max_total_container_elements - total_container_elements}); - if ( allowed_elements < static_cast(vec->Size()) ) - // TODO: this could also log a reporter warning or a weird or something + if ( allowed_elements < static_cast(vec->Size()) ) { + reporter->Weird("log_container_field_truncated", util::fmt("%s", info->stream_name.c_str())); info->total_truncated_containers->Inc(); + } if ( allowed_elements == 0 ) return lval; @@ -1730,6 +1734,7 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken winfo->from_remote = from_remote; winfo->hook_initialized = false; winfo->instantiating_filter = instantiating_filter; + winfo->stream_name = stream->name; // Search for a corresponding filter for the writer/path pair and use its // rotation settings. If no matching filter is found, fall back on diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout index e40107ed88..8257d72161 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout @@ -1,3 +1,5 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout index 51e81b6b71..57f1fd5f12 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout @@ -1,3 +1,5 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout index 7094a3c95e..29978a303b 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout @@ -1,3 +1,5 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout index 44df4ae86b..f2e2b796bd 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout @@ -1,3 +1,5 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout index 7094a3c95e..29978a303b 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout @@ -1,3 +1,5 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout index 354dee21f9..a9548ba949 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout @@ -1,3 +1,5 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 2.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log new file mode 100644 index 0000000000..ef2c56bc6b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_s Test: F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek index cb05664065..4946c1b230 100644 --- a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek @@ -3,10 +3,12 @@ # @TEST-EXEC: zeek -b test.zeek %INPUT # @TEST-EXEC: btest-diff test.log # @TEST-EXEC: btest-diff .stdout +# @TEST-EXEC: btest-diff weird.log # @TEST-START-FILE test.zeek @load base/frameworks/telemetry +@load base/frameworks/notice/weird module Test; @@ -19,6 +21,16 @@ export { }; } +event log_telemetry() + { + local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); + for (i in storage_metrics) + { + local m = storage_metrics[i]; + print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; + } + } + event zeek_init() { Log::create_stream(LOG, [$columns=Info, $path="test"]); @@ -36,19 +48,17 @@ event zeek_init() Log::write(Test::LOG, rec); - local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); - for (i in storage_metrics) - { - local m = storage_metrics[i]; - print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; - } + # Do this as a separate event so the weirds get processed before we log the + # telemetry outout. See the comment below for the first test as to why. + event log_telemetry(); } - # @TEST-END-FILE test.zeek # Limit the individual fields to 5 bytes, but keep the total maximum large enough that it -# will write all of the fields. +# will write all of the fields. The weird test for this one will be off since it will +# limit the name of the weird. It will pass, but the fields in the log will get truncated +# like they're supposed to. redef Log::max_field_string_bytes = 5; # @TEST-START-NEXT From 0ec2161b0494a7af947b30fa2bba18f1aa10d047 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Tue, 5 Aug 2025 14:50:15 -0700 Subject: [PATCH 05/11] Add options to filter at the stream level as well as globally --- scripts/base/frameworks/logging/main.zeek | 20 +++ scripts/base/init-bare.zeek | 8 +- src/logging/Manager.cc | 75 +++++----- src/logging/Manager.h | 10 +- testing/btest/Baseline/plugins.hooks/output | 132 +++++++++--------- .../.stderr | 4 +- .../.stderr | 4 +- .../.stderr | 4 +- .../.stderr | 4 +- .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../.stdout | 5 + .../test.log | 11 ++ .../weird.log | 11 ++ .../logging/field-length-limiting-stream.zeek | 106 ++++++++++++++ .../logging/field-length-limiting.zeek | 12 +- .../frameworks/logging/length-checking.zeek | 8 +- 30 files changed, 420 insertions(+), 129 deletions(-) create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log create mode 100644 testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log create mode 100644 testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek diff --git a/scripts/base/frameworks/logging/main.zeek b/scripts/base/frameworks/logging/main.zeek index 7d236f829e..114b33711e 100644 --- a/scripts/base/frameworks/logging/main.zeek +++ b/scripts/base/frameworks/logging/main.zeek @@ -422,6 +422,26 @@ export { ## .. :zeek:see:`Log::default_max_delay_queue_size` ## .. :zeek:see:`Log::set_max_delay_queue_size` max_delay_queue_size: count &default=default_max_delay_queue_size; + + ## Maximum string size for field in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_field_string_bytes` + max_field_string_bytes: count &default=Log::default_max_field_string_bytes; + + ## Maximum total string size in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_total_string_bytes` + max_total_string_bytes: count &default=Log::default_max_total_string_bytes; + + ## Maximum container elements for field in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_field_container_elements` + max_field_container_elements: count &default=Log::default_max_field_container_elements; + + ## Maximum total container elements in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_total_container_elements` + max_total_container_elements: count &default=Log::default_max_total_container_elements; }; ## Sentinel value for indicating that a filter was not found when looked up. diff --git a/scripts/base/init-bare.zeek b/scripts/base/init-bare.zeek index 8f300bce4d..c77c96f472 100644 --- a/scripts/base/init-bare.zeek +++ b/scripts/base/init-bare.zeek @@ -3747,12 +3747,12 @@ export { ## The maximum number of bytes that a single string field can contain when ## logging. If a string reaches this limit, the log output for the field will be ## truncated. Setting this to zero disables the limiting. - const max_field_string_bytes = 4096 &redef; + const default_max_field_string_bytes = 4096 &redef; ## The maximum number of elements a single container field can contain when ## logging. If a container reaches this limit, the log output for the field will ## be truncated. Setting this to zero disables the limiting. - const max_field_container_elements = 100 &redef; + const default_max_field_container_elements = 100 &redef; ## The maximum total bytes a record may log for string fields. This is the sum of ## all bytes in string fields logged for the record. If this limit is reached, all @@ -3760,14 +3760,14 @@ export { ## string fields will be logged as empty containers. If the limit is reached while ## processing a container holding string fields, the container will be truncated ## in the log output. Setting this to zero disables the limiting. - const max_total_string_bytes = 256000 &redef; + const default_max_total_string_bytes = 256000 &redef; ## The maximum total number of container elements a record may log. This is the ## sum of all container elements logged for the record. If this limit is reached, ## all further containers will be logged as empty containers. If the limit is ## reached while processing a container, the container will be truncated in the ## output. Setting this to zero disables the limiting. - const max_total_container_elements = 500 &redef; + const default_max_total_container_elements = 500 &redef; } module POP3; diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index 89c6e8266f..1d832172b9 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -243,7 +243,6 @@ struct Manager::WriterInfo { bool from_remote = false; bool hook_initialized = false; string instantiating_filter; - string stream_name; std::shared_ptr total_writes; std::shared_ptr total_discarded_writes; @@ -289,10 +288,13 @@ struct Manager::Stream { zeek_uint_t max_delay_queue_size = 1; bool evicting = false; + size_t max_field_string_bytes = 0; + size_t max_total_string_bytes = 0; + size_t max_field_container_elements = 0; + size_t max_total_container_elements = 0; ~Stream(); - const detail::DelayInfoPtr& GetDelayInfo(const detail::WriteContext& ctx); void EnqueueWriteForDelay(const detail::WriteContext& ctx); @@ -520,22 +522,6 @@ void Manager::InitPostScript() { rotation_format_func = id::find_func("Log::rotation_format_func"); log_stream_policy_hook = id::find_func("Log::log_stream_policy"); max_log_record_size = id::find_val("Log::max_log_record_size")->AsCount(); - - max_field_string_bytes = id::find_val("Log::max_field_string_bytes")->AsCount(); - if ( max_field_string_bytes == 0 ) - max_field_string_bytes = std::numeric_limits::max(); - - max_total_string_bytes = id::find_val("Log::max_total_string_bytes")->AsCount(); - if ( max_total_string_bytes == 0 ) - max_total_string_bytes = std::numeric_limits::max(); - - max_field_container_elements = id::find_val("Log::max_field_container_elements")->AsCount(); - if ( max_field_container_elements == 0 ) - max_field_container_elements = std::numeric_limits::max(); - - max_total_container_elements = id::find_val("Log::max_total_container_elements")->AsCount(); - if ( max_total_container_elements == 0 ) - max_total_container_elements = std::numeric_limits::max(); } WriterBackend* Manager::CreateBackend(WriterFrontend* frontend, EnumVal* tag) { @@ -691,9 +677,24 @@ bool Manager::CreateStream(EnumVal* id, RecordVal* sval) { streams[idx]->columns = columns->Ref()->AsRecordType(); streams[idx]->max_delay_interval = sval->GetField("max_delay_interval")->AsInterval(); streams[idx]->max_delay_queue_size = sval->GetField("max_delay_queue_size")->AsCount(); - streams[idx]->enable_remote = id::find_val("Log::enable_remote_logging")->AsBool(); + streams[idx]->max_field_string_bytes = sval->GetField("max_field_string_bytes")->AsCount(); + if ( streams[idx]->max_field_string_bytes == 0 ) + streams[idx]->max_field_string_bytes = std::numeric_limits::max(); + + streams[idx]->max_total_string_bytes = sval->GetField("max_total_string_bytes")->AsCount(); + if ( streams[idx]->max_total_string_bytes == 0 ) + streams[idx]->max_total_string_bytes = std::numeric_limits::max(); + + streams[idx]->max_field_container_elements = sval->GetField("max_field_container_elements")->AsCount(); + if ( streams[idx]->max_field_container_elements == 0 ) + streams[idx]->max_field_container_elements = std::numeric_limits::max(); + + streams[idx]->max_total_container_elements = sval->GetField("max_total_container_elements")->AsCount(); + if ( streams[idx]->max_total_container_elements == 0 ) + streams[idx]->max_total_container_elements = std::numeric_limits::max(); + DBG_LOG(DBG_LOGGING, "Created new logging stream '%s', raising event %s", streams[idx]->name.c_str(), event ? streams[idx]->event->Name() : ""); @@ -1183,7 +1184,7 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c size_t total_size = 0; total_string_bytes = 0; total_container_elements = 0; - auto rec = RecordToLogRecord(w->second, filter, columns.get(), total_size); + auto rec = RecordToLogRecord(w->second, filter, stream, columns.get(), total_size); if ( total_size > max_log_record_size ) { reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str())); @@ -1422,7 +1423,8 @@ bool Manager::SetMaxDelayQueueSize(const EnumValPtr& id, zeek_uint_t queue_size) return true; } -threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val, Type* ty, size_t& total_size) { +threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, + size_t& total_size) { if ( ! val ) return {ty->Tag(), false}; @@ -1499,11 +1501,11 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val case TYPE_STRING: { const String* s = val->AsString()->AsString(); - size_t allowed_bytes = std::min( - {static_cast(s->Len()), max_field_string_bytes, max_total_string_bytes - total_string_bytes}); + size_t allowed_bytes = std::min({static_cast(s->Len()), stream->max_field_string_bytes, + stream->max_total_string_bytes - total_string_bytes}); if ( allowed_bytes < static_cast(s->Len()) ) { - reporter->Weird("log_string_field_truncated", util::fmt("%s", info->stream_name.c_str())); + reporter->Weird("log_string_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_string_fields->Inc(); } @@ -1555,11 +1557,12 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val auto& set_t = tbl_t->GetIndexTypes()[0]; bool is_managed = ZVal::IsManagedType(set_t); - size_t allowed_elements = std::min({static_cast(set->Length()), max_field_container_elements, - max_total_container_elements - total_container_elements}); + size_t allowed_elements = + std::min({static_cast(set->Length()), stream->max_field_container_elements, + stream->max_total_container_elements - total_container_elements}); if ( allowed_elements < static_cast(set->Length()) ) { - reporter->Weird("log_container_field_truncated", util::fmt("%s", info->stream_name.c_str())); + reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_containers->Inc(); } @@ -1570,7 +1573,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { std::optional s_i = ZVal(set->Idx(i), set_t); - lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(info, s_i, set_t.get(), total_size)); + lval.val.set_val.vals[i] = + new threading::Value(ValToLogVal(info, stream, s_i, set_t.get(), total_size)); if ( is_managed ) ZVal::DeleteManagedType(*s_i); lval.val.set_val.size++; @@ -1584,11 +1588,11 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val case TYPE_VECTOR: { VectorVal* vec = val->AsVector(); - size_t allowed_elements = std::min({static_cast(vec->Size()), max_field_container_elements, - max_total_container_elements - total_container_elements}); + size_t allowed_elements = std::min({static_cast(vec->Size()), stream->max_field_container_elements, + stream->max_total_container_elements - total_container_elements}); if ( allowed_elements < static_cast(vec->Size()) ) { - reporter->Weird("log_container_field_truncated", util::fmt("%s", info->stream_name.c_str())); + reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_containers->Inc(); } @@ -1601,7 +1605,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val auto& vt = vec->GetType()->Yield(); for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { - lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(info, vv[i], vt.get(), total_size)); + lval.val.vector_val.vals[i] = + new threading::Value(ValToLogVal(info, stream, vv[i], vt.get(), total_size)); lval.val.vector_val.size++; } @@ -1616,7 +1621,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, std::optional& val return lval; } -detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, RecordVal* columns, size_t& total_size) { +detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns, + size_t& total_size) { RecordValPtr ext_rec; if ( filter->num_ext_fields > 0 ) { @@ -1666,7 +1672,7 @@ detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, R } if ( val ) - vals.emplace_back(ValToLogVal(info, val, vt, total_size)); + vals.emplace_back(ValToLogVal(info, stream, val, vt, total_size)); if ( total_size > max_log_record_size ) { return {}; @@ -1734,7 +1740,6 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken winfo->from_remote = from_remote; winfo->hook_initialized = false; winfo->instantiating_filter = instantiating_filter; - winfo->stream_name = stream->name; // Search for a corresponding filter for the writer/path pair and use its // rotation settings. If no matching filter is found, fall back on diff --git a/src/logging/Manager.h b/src/logging/Manager.h index 9db48c366e..e78898bb50 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -422,8 +422,10 @@ private: bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, const std::string& path, const std::list& indices); - detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, RecordVal* columns, size_t& total_size); - threading::Value ValToLogVal(WriterInfo* info, std::optional& val, Type* ty, size_t& total_size); + detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns, + size_t& total_size); + threading::Value ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, + size_t& total_size); Stream* FindStream(EnumVal* id); void RemoveDisabledWriters(Stream* stream); @@ -450,10 +452,6 @@ private: FuncPtr log_stream_policy_hook; size_t max_log_record_size = 0; - size_t max_field_string_bytes = 0; - size_t max_total_string_bytes = 0; - size_t max_field_container_elements = 0; - size_t max_total_container_elements = 0; size_t total_string_bytes = 0; size_t total_container_elements = 0; diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index d944c123ed..b4333b458d 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -38,17 +38,17 @@ 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Broker::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> @@ -82,17 +82,17 @@ 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Notice::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Tunnel::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Weird::LOG, default)) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Option::set, , (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, )) -> 0.000000 MetaHookPost CallFunction(Option::set_change_handler, , (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> 0.000000 MetaHookPost CallFunction(Option::set_change_handler, , (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> @@ -980,17 +980,17 @@ 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Broker::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) @@ -1024,17 +1024,17 @@ 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Notice::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Tunnel::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Weird::LOG, default)) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Option::set, , (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, )) 0.000000 MetaHookPre CallFunction(Option::set_change_handler, , (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) 0.000000 MetaHookPre CallFunction(Option::set_change_handler, , (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) @@ -1921,17 +1921,17 @@ 0.000000 | HookCallFunction Log::__add_filter(Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) -0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) +0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::add_default_filter(Analyzer::Logging::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) @@ -1965,17 +1965,17 @@ 0.000000 | HookCallFunction Log::add_stream_filters(Notice::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Tunnel::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Weird::LOG, default) -0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) +0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Option::set(Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, ) 0.000000 | HookCallFunction Option::set_change_handler(Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100) 0.000000 | HookCallFunction Option::set_change_handler(Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr index 57a839051e..a3577fb43e 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb) -error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback) +error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb) +error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback) error in <...>/static-errors.zeek, line 12: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr index e39286ce49..597cb83a0b 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42) -error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken) +error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42) +error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken) error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr index 31db1e4af6..4ad3a42493 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42) -error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken) +error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42) +error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken) error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr index 098dbf9bee..dcd0d2de37 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb) -error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback) +error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb) +error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback) error in <...>/static-errors.zeek, line 16: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout new file mode 100644 index 0000000000..8257d72161 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log new file mode 100644 index 0000000000..93c73f8a84 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout new file mode 100644 index 0000000000..57f1fd5f12 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log new file mode 100644 index 0000000000..c44d2e2c5c --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log new file mode 100644 index 0000000000..542d53cf44 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout new file mode 100644 index 0000000000..f2e2b796bd --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log new file mode 100644 index 0000000000..2b02e6e655 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log new file mode 100644 index 0000000000..4f595571fe --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout new file mode 100644 index 0000000000..ee93469f35 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log new file mode 100644 index 0000000000..9bcebb3086 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek new file mode 100644 index 0000000000..a67acc521d --- /dev/null +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek @@ -0,0 +1,106 @@ +# @TEST-DOC: Tests field length limiting at the stream level instead of at the global level. +# +# @TEST-EXEC: zeek -b test.zeek %INPUT +# @TEST-EXEC: btest-diff test.log +# @TEST-EXEC: btest-diff .stdout +# @TEST-EXEC: btest-diff weird.log + +# @TEST-START-FILE test.zeek + +@load base/frameworks/telemetry +@load base/frameworks/notice/weird + +module Test; + +export { + redef enum Log::ID += { LOG }; + + type Info: record { + strings1: vector of string &log; + strings2: vector of string &log; + }; +} + +event log_telemetry() + { + local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); + for (i in storage_metrics) + { + local m = storage_metrics[i]; + print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; + } + } + +event zeek_init() &priority=-5 + { + local rec = Test::Info(); + local i = 0; + + # Create two vectors containing 10 strings with 10 characters each. + # This leaves us with 200 total characters to work with. + while ( ++i <= 10 ) + { + rec$strings1 += "ABCDEFGHIJ"; + rec$strings2 += "ABCDEFGHIJ"; + } + + Log::write(Test::LOG, rec); + + # Do this as a separate event so the weirds get processed before we log the + # telemetry outout. See the comment below for the first test as to why. + event log_telemetry(); + } + +# @TEST-END-FILE test.zeek + +# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it +# will write all of the fields. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_string_bytes=5]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the second field in the middle of a string. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=115]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle of a string. Second field should log empty strings. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=85]); + } + +# @TEST-START-NEXT + +# Limit the individual containers to 5 items, but keep the total maximum large enough that +# it will write all of the fields. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_container_elements=5]); + } + +# @TEST-START-NEXT + +# Leave the individual field items alone, but set the maximum length to where it cuts off +# the second field in the middle. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=15]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle. Second field should log empty containers. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=5]); + } diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek index 4946c1b230..f1ae881847 100644 --- a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek @@ -59,34 +59,34 @@ event zeek_init() # will write all of the fields. The weird test for this one will be off since it will # limit the name of the weird. It will pass, but the fields in the log will get truncated # like they're supposed to. -redef Log::max_field_string_bytes = 5; +redef Log::default_max_field_string_bytes = 5; # @TEST-START-NEXT # Leave the individual field bytes alone, but set the maximum length to where it cuts off # the second field in the middle of a string. -redef Log::max_total_string_bytes = 115; +redef Log::default_max_total_string_bytes = 115; # @TEST-START-NEXT # Leave the individual field bytes alone, but set the maximum length to where it cuts off # the first field in the middle of a string. Second field should log empty strings. -redef Log::max_total_string_bytes = 85; +redef Log::default_max_total_string_bytes = 85; # @TEST-START-NEXT # Limit the individual containers to 5 items, but keep the total maximum large enough that # it will write all of the fields. -redef Log::max_field_container_elements = 5; +redef Log::default_max_field_container_elements = 5; # @TEST-START-NEXT # Leave the individual field items alone, but set the maximum length to where it cuts off # the second field in the middle. -redef Log::max_total_container_elements = 15; +redef Log::default_max_total_container_elements = 15; # @TEST-START-NEXT # Leave the individual field bytes alone, but set the maximum length to where it cuts off # the first field in the middle. Second field should log empty containers. -redef Log::max_total_container_elements = 5; +redef Log::default_max_total_container_elements = 5; diff --git a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek index 0e14734012..95e966c3d6 100644 --- a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek +++ b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek @@ -20,10 +20,10 @@ module Test; # Disable the string and container length filtering. -redef Log::max_field_string_bytes = 0; -redef Log::max_total_string_bytes = 0; -redef Log::max_field_container_elements = 0; -redef Log::max_total_container_elements = 0; +redef Log::default_max_field_string_bytes = 0; +redef Log::default_max_total_string_bytes = 0; +redef Log::default_max_field_container_elements = 0; +redef Log::default_max_total_container_elements = 0; export { redef enum Log::ID += { LOG }; From 8a4bc084f9a29f8e2588ddd79ad66657c4b9b70a Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Wed, 6 Aug 2025 15:28:21 -0700 Subject: [PATCH 06/11] Expand the size of the log-size filters for x509 --- scripts/base/files/x509/main.zeek | 31 +++++++++++++++++-- .../protocols/ssl/log-certs-base64.zeek | 3 ++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/scripts/base/files/x509/main.zeek b/scripts/base/files/x509/main.zeek index 56355e0675..94c905faee 100644 --- a/scripts/base/files/x509/main.zeek +++ b/scripts/base/files/x509/main.zeek @@ -105,6 +105,29 @@ export { ## Event for accessing logged records. global log_x509: event(rec: Info); + + ## The maximum number of bytes that a single string field can contain when + ## logging. If a string reaches this limit, the log output for the field will be + ## truncated. Setting this to zero disables the limiting. + ## + ## .. zeek:see:: Log::default_max_field_string_bytes + const default_max_field_string_bytes = Log::default_max_field_string_bytes &redef; + + ## The maximum number of elements a single container field can contain when + ## logging. If a container reaches this limit, the log output for the field will + ## be truncated. Setting this to zero disables the limiting. + ## + ## .. zeek:see:: Log::default_max_field_container_elements + const default_max_field_container_elements = 500 &redef; + + ## The maximum total number of container elements a record may log. This is the + ## sum of all container elements logged for the record. If this limit is reached, + ## all further containers will be logged as empty containers. If the limit is + ## reached while processing a container, the container will be truncated in the + ## output. Setting this to zero disables the limiting. + ## + ## .. zeek:see:: Log::default_max_total_container_elements + const default_max_total_container_elements = 1500 &redef; } global known_log_certs_with_broker: set[LogCertHash] &create_expire=relog_known_certificates_after &backend=Broker::MEMORY; @@ -117,7 +140,12 @@ redef record Files::Info += { event zeek_init() &priority=5 { - Log::create_stream(X509::LOG, Log::Stream($columns=Info, $ev=log_x509, $path="x509", $policy=log_policy)); + # x509 can have some very large certificates and very large sets of URIs. Expand the log size filters + # so that we're not truncating those. + Log::create_stream(X509::LOG, Log::Stream($columns=Info, $ev=log_x509, $path="x509", $policy=log_policy, + $max_field_string_bytes=X509::default_max_field_string_bytes, + $max_field_container_elements=X509::default_max_field_container_elements, + $max_total_container_elements=X509::default_max_total_container_elements)); # We use MIME types internally to distinguish between user and CA certificates. # The first certificate in a connection always gets tagged as user-cert, all @@ -225,4 +253,3 @@ event file_state_remove(f: fa_file) &priority=5 Log::write(LOG, f$info$x509); } - diff --git a/scripts/policy/protocols/ssl/log-certs-base64.zeek b/scripts/policy/protocols/ssl/log-certs-base64.zeek index 0ec4834065..656281ba3f 100644 --- a/scripts/policy/protocols/ssl/log-certs-base64.zeek +++ b/scripts/policy/protocols/ssl/log-certs-base64.zeek @@ -10,6 +10,9 @@ redef record X509::Info += { cert: string &log &optional; }; +## Certificates can be large and we don't want to risk truncating the output. +redef X509::default_max_field_string_bytes = 0; + event x509_certificate(f: fa_file, cert_ref: opaque of x509, cert: X509::Certificate) &priority=1 { if ( ! f$info?$x509 ) From 98a77b5f2591a815dd1bd8c216c2efc524b92f4b Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Mon, 11 Aug 2025 13:24:09 -0700 Subject: [PATCH 07/11] Remove using numeric_limits and just check for zero instead --- src/logging/Manager.cc | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index 1d832172b9..cda678195d 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -680,20 +680,9 @@ bool Manager::CreateStream(EnumVal* id, RecordVal* sval) { streams[idx]->enable_remote = id::find_val("Log::enable_remote_logging")->AsBool(); streams[idx]->max_field_string_bytes = sval->GetField("max_field_string_bytes")->AsCount(); - if ( streams[idx]->max_field_string_bytes == 0 ) - streams[idx]->max_field_string_bytes = std::numeric_limits::max(); - streams[idx]->max_total_string_bytes = sval->GetField("max_total_string_bytes")->AsCount(); - if ( streams[idx]->max_total_string_bytes == 0 ) - streams[idx]->max_total_string_bytes = std::numeric_limits::max(); - streams[idx]->max_field_container_elements = sval->GetField("max_field_container_elements")->AsCount(); - if ( streams[idx]->max_field_container_elements == 0 ) - streams[idx]->max_field_container_elements = std::numeric_limits::max(); - streams[idx]->max_total_container_elements = sval->GetField("max_total_container_elements")->AsCount(); - if ( streams[idx]->max_total_container_elements == 0 ) - streams[idx]->max_total_container_elements = std::numeric_limits::max(); DBG_LOG(DBG_LOGGING, "Created new logging stream '%s', raising event %s", streams[idx]->name.c_str(), event ? streams[idx]->event->Name() : ""); @@ -1423,6 +1412,20 @@ bool Manager::SetMaxDelayQueueSize(const EnumValPtr& id, zeek_uint_t queue_size) return true; } +static size_t calculate_allowed(size_t field_size, size_t max_field_size, size_t max_total_size, size_t current_total) { + size_t allowed = field_size; + + if ( max_field_size != 0 && max_field_size < allowed ) { + allowed = max_field_size; + } + if ( max_total_size != 0 && (max_total_size - current_total) < allowed ) { + allowed = max_total_size - current_total; + } + + return allowed; +} + + threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, size_t& total_size) { if ( ! val ) @@ -1500,9 +1503,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st case TYPE_STRING: { const String* s = val->AsString()->AsString(); - - size_t allowed_bytes = std::min({static_cast(s->Len()), stream->max_field_string_bytes, - stream->max_total_string_bytes - total_string_bytes}); + size_t allowed_bytes = calculate_allowed(static_cast(s->Len()), stream->max_field_string_bytes, + stream->max_total_string_bytes, total_string_bytes); if ( allowed_bytes < static_cast(s->Len()) ) { reporter->Weird("log_string_field_truncated", util::fmt("%s", stream->name.c_str())); @@ -1558,8 +1560,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st bool is_managed = ZVal::IsManagedType(set_t); size_t allowed_elements = - std::min({static_cast(set->Length()), stream->max_field_container_elements, - stream->max_total_container_elements - total_container_elements}); + calculate_allowed(static_cast(set->Length()), stream->max_field_container_elements, + stream->max_total_container_elements, total_container_elements); if ( allowed_elements < static_cast(set->Length()) ) { reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); @@ -1588,8 +1590,9 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st case TYPE_VECTOR: { VectorVal* vec = val->AsVector(); - size_t allowed_elements = std::min({static_cast(vec->Size()), stream->max_field_container_elements, - stream->max_total_container_elements - total_container_elements}); + size_t allowed_elements = + calculate_allowed(static_cast(vec->Size()), stream->max_field_container_elements, + stream->max_total_container_elements, total_container_elements); if ( allowed_elements < static_cast(vec->Size()) ) { reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); From 29425688da4df7ca8152e80cdabcd667cc5f9411 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Mon, 11 Aug 2025 14:19:44 -0700 Subject: [PATCH 08/11] Make total_size counter a member in logging::Manager --- src/logging/Manager.cc | 47 ++++++++++++++++++++---------------------- src/logging/Manager.h | 7 +++---- 2 files changed, 25 insertions(+), 29 deletions(-) diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index cda678195d..3699a822d3 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -1170,12 +1170,12 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c assert(info); // Alright, can do the write now. - size_t total_size = 0; + total_record_size = 0; total_string_bytes = 0; total_container_elements = 0; - auto rec = RecordToLogRecord(w->second, filter, stream, columns.get(), total_size); + auto rec = RecordToLogRecord(w->second, filter, stream, columns.get()); - if ( total_size > max_log_record_size ) { + if ( total_record_size > max_log_record_size ) { reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str())); w->second->total_discarded_writes->Inc(); continue; @@ -1426,8 +1426,7 @@ static size_t calculate_allowed(size_t field_size, size_t max_field_size, size_t } -threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, - size_t& total_size) { +threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty) { if ( ! val ) return {ty->Tag(), false}; @@ -1437,7 +1436,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st case TYPE_BOOL: case TYPE_INT: lval.val.int_val = val->AsInt(); - total_size += sizeof(lval.val.int_val); + total_record_size += sizeof(lval.val.int_val); break; case TYPE_ENUM: { @@ -1456,14 +1455,14 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st lval.val.string_val.length = 0; } - total_size += lval.val.string_val.length; + total_record_size += lval.val.string_val.length; break; } case TYPE_COUNT: lval.val.uint_val = val->AsCount(); - total_size += sizeof(lval.val.uint_val); + total_record_size += sizeof(lval.val.uint_val); break; case TYPE_PORT: { @@ -1480,25 +1479,25 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st lval.val.port_val.port = p & ~PORT_SPACE_MASK; lval.val.port_val.proto = pt; - total_size += lval.val.port_val.size(); + total_record_size += lval.val.port_val.size(); break; } case TYPE_SUBNET: val->AsSubNet()->Get().ConvertToThreadingValue(&lval.val.subnet_val); - total_size += lval.val.subnet_val.size(); + total_record_size += lval.val.subnet_val.size(); break; case TYPE_ADDR: val->AsAddr()->Get().ConvertToThreadingValue(&lval.val.addr_val); - total_size += lval.val.addr_val.size(); + total_record_size += lval.val.addr_val.size(); break; case TYPE_DOUBLE: case TYPE_TIME: case TYPE_INTERVAL: lval.val.double_val = val->AsDouble(); - total_size += sizeof(lval.val.double_val); + total_record_size += sizeof(lval.val.double_val); break; case TYPE_STRING: { @@ -1519,7 +1518,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st lval.val.string_val.data = buf; lval.val.string_val.length = allowed_bytes; - total_size += allowed_bytes; + total_record_size += allowed_bytes; total_string_bytes += allowed_bytes; break; } @@ -1530,7 +1529,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st auto len = strlen(s); lval.val.string_val.data = util::copy_string(s, len); lval.val.string_val.length = len; - total_size += lval.val.string_val.length; + total_record_size += lval.val.string_val.length; break; } @@ -1542,7 +1541,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st auto len = strlen(s); lval.val.string_val.data = util::copy_string(s, len); lval.val.string_val.length = len; - total_size += lval.val.string_val.length; + total_record_size += lval.val.string_val.length; break; } @@ -1573,10 +1572,9 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st lval.val.set_val.vals = new threading::Value*[allowed_elements]; - for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { + for ( size_t i = 0; i < allowed_elements && total_record_size < max_log_record_size; i++ ) { std::optional s_i = ZVal(set->Idx(i), set_t); - lval.val.set_val.vals[i] = - new threading::Value(ValToLogVal(info, stream, s_i, set_t.get(), total_size)); + lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(info, stream, s_i, set_t.get())); if ( is_managed ) ZVal::DeleteManagedType(*s_i); lval.val.set_val.size++; @@ -1607,9 +1605,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st auto& vv = vec->RawVec(); auto& vt = vec->GetType()->Yield(); - for ( size_t i = 0; i < allowed_elements && total_size < max_log_record_size; i++ ) { - lval.val.vector_val.vals[i] = - new threading::Value(ValToLogVal(info, stream, vv[i], vt.get(), total_size)); + for ( size_t i = 0; i < allowed_elements && total_record_size < max_log_record_size; i++ ) { + lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(info, stream, vv[i], vt.get())); lval.val.vector_val.size++; } @@ -1624,8 +1621,8 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st return lval; } -detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns, - size_t& total_size) { +detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, + RecordVal* columns) { RecordValPtr ext_rec; if ( filter->num_ext_fields > 0 ) { @@ -1675,9 +1672,9 @@ detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, c } if ( val ) - vals.emplace_back(ValToLogVal(info, stream, val, vt, total_size)); + vals.emplace_back(ValToLogVal(info, stream, val, vt)); - if ( total_size > max_log_record_size ) { + if ( total_record_size > max_log_record_size ) { return {}; } } diff --git a/src/logging/Manager.h b/src/logging/Manager.h index e78898bb50..ddf149c4e8 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -422,10 +422,8 @@ private: bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, const std::string& path, const std::list& indices); - detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns, - size_t& total_size); - threading::Value ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty, - size_t& total_size); + detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns); + threading::Value ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty); Stream* FindStream(EnumVal* id); void RemoveDisabledWriters(Stream* stream); @@ -452,6 +450,7 @@ private: FuncPtr log_stream_policy_hook; size_t max_log_record_size = 0; + size_t total_record_size = 0; size_t total_string_bytes = 0; size_t total_container_elements = 0; From c8818d76bd241b19af8abac08bca375f07d49f2d Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Mon, 11 Aug 2025 14:49:23 -0700 Subject: [PATCH 09/11] Remove length limiting on string fields for HTTP --- scripts/base/protocols/http/main.zeek | 12 ++++++++++-- testing/btest/Baseline/plugins.hooks/output | 12 ++++++------ 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/scripts/base/protocols/http/main.zeek b/scripts/base/protocols/http/main.zeek index 983bb37d9a..27046dd642 100644 --- a/scripts/base/protocols/http/main.zeek +++ b/scripts/base/protocols/http/main.zeek @@ -139,6 +139,14 @@ export { ## out and request/response tracking reset to prevent unbounded ## state growth. option max_pending_requests = 100; + + ## The maximum number of bytes that a single string field can contain when + ## logging. If a string reaches this limit, the log output for the field will be + ## truncated. Setting this to zero disables the limiting. HTTP has no maximum + ## length for various fields such as the URI, so this is set to zero by default. + ## + ## .. zeek:see:: Log::default_max_field_string_bytes + const default_max_field_string_bytes = 0 &redef; } # Add the http state tracking fields to the connection record. @@ -156,7 +164,8 @@ redef likely_server_ports += { ports }; # Initialize the HTTP logging stream and ports. event zeek_init() &priority=5 { - Log::create_stream(HTTP::LOG, Log::Stream($columns=Info, $ev=log_http, $path="http", $policy=log_policy)); + Log::create_stream(HTTP::LOG, Log::Stream($columns=Info, $ev=log_http, $path="http", $policy=log_policy, + $max_field_string_bytes=HTTP::default_max_field_string_bytes)); Analyzer::register_for_ports(Analyzer::ANALYZER_HTTP, ports); } @@ -394,4 +403,3 @@ hook finalize_http(c: connection) } } } - diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index b4333b458d..8697db08b7 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -44,7 +44,7 @@ 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> @@ -88,7 +88,7 @@ 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> @@ -986,7 +986,7 @@ 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) @@ -1030,7 +1030,7 @@ 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) @@ -1927,7 +1927,7 @@ 0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) -0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) @@ -1971,7 +1971,7 @@ 0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) -0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) From 39814816af14328a27879bd79987bf124a39855e Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Mon, 11 Aug 2025 16:47:11 -0700 Subject: [PATCH 10/11] Tag truncated values with a flag, plus pack threading::Value better --- src/logging/Manager.cc | 3 +++ src/threading/SerialTypes.h | 19 ++++++++----------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index 3699a822d3..ee704a6b9d 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -1506,6 +1506,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st stream->max_total_string_bytes, total_string_bytes); if ( allowed_bytes < static_cast(s->Len()) ) { + lval.truncated = true; reporter->Weird("log_string_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_string_fields->Inc(); } @@ -1563,6 +1564,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st stream->max_total_container_elements, total_container_elements); if ( allowed_elements < static_cast(set->Length()) ) { + lval.truncated = true; reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_containers->Inc(); } @@ -1593,6 +1595,7 @@ threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, st stream->max_total_container_elements, total_container_elements); if ( allowed_elements < static_cast(vec->Size()) ) { + lval.truncated = true; reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); info->total_truncated_containers->Inc(); } diff --git a/src/threading/SerialTypes.h b/src/threading/SerialTypes.h index fb7336b9d5..e5d8dd4ded 100644 --- a/src/threading/SerialTypes.h +++ b/src/threading/SerialTypes.h @@ -126,8 +126,14 @@ struct Field { * those Vals supported). */ struct Value { - TypeTag type; //! The type of the value. - TypeTag subtype; //! Inner type for sets and vectors. + TypeTag type; //! The type of the value. + TypeTag subtype; //! Inner type for sets and vectors. + bool present = false; //! False for optional record fields that are not set. + bool truncated = false; //! True if the string or container field was truncated. + + // For values read by the input framework, this can represent the line number + // containing this value. Used by the Ascii reader primarily. + int32_t line_number = -1; struct set_t { zeek_int_t size; @@ -186,8 +192,6 @@ struct Value { _val() { memset(this, 0, sizeof(_val)); } } val; - bool present = false; //! False for optional record fields that are not set. - /** * Constructor. * @@ -275,13 +279,6 @@ struct Value { void SetFileLineNumber(int line) { line_number = line; } int GetFileLineNumber() const { return line_number; } - -private: - friend class IPAddr; - - // For values read by the input framework, this can represent the line number - // containing this value. Used by the Ascii reader primarily. - int line_number = -1; }; } // namespace zeek::threading From a1c201fb8f1cef8f5571cf43d86928c5ba6ef1a3 Mon Sep 17 00:00:00 2001 From: Tim Wojtulewicz Date: Tue, 12 Aug 2025 16:03:05 -0700 Subject: [PATCH 11/11] Add NEWS entry for field length limiting --- NEWS | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/NEWS b/NEWS index 7dc2032041..7116f65555 100644 --- a/NEWS +++ b/NEWS @@ -21,6 +21,32 @@ New Functionality been added to allow observing ``Subscribe()`` and ``Unsubscribe()`` calls on backends by Zeek scripts. +- The ability to control the length of strings and containers in log output was added. The + maximum length of individual log fields can be set, as well as the total length of all + string or container fields in a single log record. This feature is controlled via four + new script-level variables: + + Log::default_max_field_string_bytes + Log::default_max_total_string_bytes + Log::default_max_field_container_elements + Log::default_max_total_container_elements + + When one of the ``field`` limits is reached, the individual field is truncated. When one + of the ``total`` limits is reached, all further strings will returned as empty and all + further container elements will not be output. See the documentation for those variables + for more detail. + + The above variables control the truncation globally, but they can also be set for log + streams individually. This is controlled by variables with the same names that can be + set when the log stream is created. + + Two new weirds were added to report the truncation: ``log_string_field_truncated`` and + ``log_container_field_truncated``. New metrics were added to track how many truncations + have occurred: ``zeek_log_writer_truncated_string_fields_total`` and + ``zeek_log_writer_truncated_containers_total``. The metrics are reported for each log + stream. + + Changed Functionality ---------------------