diff --git a/CHANGES b/CHANGES index 48f2d4e494..6b4b045f09 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,30 @@ +8.1.0-dev.50 | 2025-08-12 17:42:46 -0700 + + * Add NEWS entry for field length limiting (Tim Wojtulewicz, Corelight) + + * Tag truncated values with a flag, plus pack threading::Value better (Tim Wojtulewicz, Corelight) + + * Remove length limiting on string fields for HTTP (Tim Wojtulewicz, Corelight) + + * Make total_size counter a member in logging::Manager (Tim Wojtulewicz, Corelight) + + * Remove using numeric_limits and just check for zero instead (Tim Wojtulewicz, Corelight) + + * Expand the size of the log-size filters for x509 (Tim Wojtulewicz, Corelight) + + * Add options to filter at the stream level as well as globally (Tim Wojtulewicz, Corelight) + + * Add a weird that gets emitted when strings/containers are over the limits (Tim Wojtulewicz, Corelight) + + * Add metrics to track string and container fields limited by length (Tim Wojtulewicz, Corelight) + + * Replace unused stream argument from RecordToLogRecord with WriterInfo (Tim Wojtulewicz, Corelight) + + This also adds a WriterInfo argument to ValToLogVal and passes the one from + RecordToLogRecord into it. + + * Implement string- and container-length filtering at the log record level (Tim Wojtulewicz, Corelight) + 8.1.0-dev.38 | 2025-08-12 12:38:24 -0700 * Bump pre-commit hooks (Benjamin Bannier, Corelight) diff --git a/NEWS b/NEWS index 7dc2032041..7116f65555 100644 --- a/NEWS +++ b/NEWS @@ -21,6 +21,32 @@ New Functionality been added to allow observing ``Subscribe()`` and ``Unsubscribe()`` calls on backends by Zeek scripts. +- The ability to control the length of strings and containers in log output was added. The + maximum length of individual log fields can be set, as well as the total length of all + string or container fields in a single log record. This feature is controlled via four + new script-level variables: + + Log::default_max_field_string_bytes + Log::default_max_total_string_bytes + Log::default_max_field_container_elements + Log::default_max_total_container_elements + + When one of the ``field`` limits is reached, the individual field is truncated. When one + of the ``total`` limits is reached, all further strings will returned as empty and all + further container elements will not be output. See the documentation for those variables + for more detail. + + The above variables control the truncation globally, but they can also be set for log + streams individually. This is controlled by variables with the same names that can be + set when the log stream is created. + + Two new weirds were added to report the truncation: ``log_string_field_truncated`` and + ``log_container_field_truncated``. New metrics were added to track how many truncations + have occurred: ``zeek_log_writer_truncated_string_fields_total`` and + ``zeek_log_writer_truncated_containers_total``. The metrics are reported for each log + stream. + + Changed Functionality --------------------- diff --git a/VERSION b/VERSION index e2c9520277..d863755398 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -8.1.0-dev.38 +8.1.0-dev.50 diff --git a/scripts/base/files/x509/main.zeek b/scripts/base/files/x509/main.zeek index 56355e0675..94c905faee 100644 --- a/scripts/base/files/x509/main.zeek +++ b/scripts/base/files/x509/main.zeek @@ -105,6 +105,29 @@ export { ## Event for accessing logged records. global log_x509: event(rec: Info); + + ## The maximum number of bytes that a single string field can contain when + ## logging. If a string reaches this limit, the log output for the field will be + ## truncated. Setting this to zero disables the limiting. + ## + ## .. zeek:see:: Log::default_max_field_string_bytes + const default_max_field_string_bytes = Log::default_max_field_string_bytes &redef; + + ## The maximum number of elements a single container field can contain when + ## logging. If a container reaches this limit, the log output for the field will + ## be truncated. Setting this to zero disables the limiting. + ## + ## .. zeek:see:: Log::default_max_field_container_elements + const default_max_field_container_elements = 500 &redef; + + ## The maximum total number of container elements a record may log. This is the + ## sum of all container elements logged for the record. If this limit is reached, + ## all further containers will be logged as empty containers. If the limit is + ## reached while processing a container, the container will be truncated in the + ## output. Setting this to zero disables the limiting. + ## + ## .. zeek:see:: Log::default_max_total_container_elements + const default_max_total_container_elements = 1500 &redef; } global known_log_certs_with_broker: set[LogCertHash] &create_expire=relog_known_certificates_after &backend=Broker::MEMORY; @@ -117,7 +140,12 @@ redef record Files::Info += { event zeek_init() &priority=5 { - Log::create_stream(X509::LOG, Log::Stream($columns=Info, $ev=log_x509, $path="x509", $policy=log_policy)); + # x509 can have some very large certificates and very large sets of URIs. Expand the log size filters + # so that we're not truncating those. + Log::create_stream(X509::LOG, Log::Stream($columns=Info, $ev=log_x509, $path="x509", $policy=log_policy, + $max_field_string_bytes=X509::default_max_field_string_bytes, + $max_field_container_elements=X509::default_max_field_container_elements, + $max_total_container_elements=X509::default_max_total_container_elements)); # We use MIME types internally to distinguish between user and CA certificates. # The first certificate in a connection always gets tagged as user-cert, all @@ -225,4 +253,3 @@ event file_state_remove(f: fa_file) &priority=5 Log::write(LOG, f$info$x509); } - diff --git a/scripts/base/frameworks/logging/main.zeek b/scripts/base/frameworks/logging/main.zeek index 7d236f829e..114b33711e 100644 --- a/scripts/base/frameworks/logging/main.zeek +++ b/scripts/base/frameworks/logging/main.zeek @@ -422,6 +422,26 @@ export { ## .. :zeek:see:`Log::default_max_delay_queue_size` ## .. :zeek:see:`Log::set_max_delay_queue_size` max_delay_queue_size: count &default=default_max_delay_queue_size; + + ## Maximum string size for field in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_field_string_bytes` + max_field_string_bytes: count &default=Log::default_max_field_string_bytes; + + ## Maximum total string size in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_total_string_bytes` + max_total_string_bytes: count &default=Log::default_max_total_string_bytes; + + ## Maximum container elements for field in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_field_container_elements` + max_field_container_elements: count &default=Log::default_max_field_container_elements; + + ## Maximum total container elements in a log record from this stream. + ## + ## .. :zeek:see:`Log::default_max_total_container_elements` + max_total_container_elements: count &default=Log::default_max_total_container_elements; }; ## Sentinel value for indicating that a filter was not found when looked up. diff --git a/scripts/base/init-bare.zeek b/scripts/base/init-bare.zeek index 66ce27876c..c77c96f472 100644 --- a/scripts/base/init-bare.zeek +++ b/scripts/base/init-bare.zeek @@ -3743,6 +3743,31 @@ export { ## higher than this limit, but it prevents runaway-sized log entries from causing ## problems. const max_log_record_size = 1024*1024*64 &redef; + + ## The maximum number of bytes that a single string field can contain when + ## logging. If a string reaches this limit, the log output for the field will be + ## truncated. Setting this to zero disables the limiting. + const default_max_field_string_bytes = 4096 &redef; + + ## The maximum number of elements a single container field can contain when + ## logging. If a container reaches this limit, the log output for the field will + ## be truncated. Setting this to zero disables the limiting. + const default_max_field_container_elements = 100 &redef; + + ## The maximum total bytes a record may log for string fields. This is the sum of + ## all bytes in string fields logged for the record. If this limit is reached, all + ## further string fields will be logged as empty strings. Any containers holding + ## string fields will be logged as empty containers. If the limit is reached while + ## processing a container holding string fields, the container will be truncated + ## in the log output. Setting this to zero disables the limiting. + const default_max_total_string_bytes = 256000 &redef; + + ## The maximum total number of container elements a record may log. This is the + ## sum of all container elements logged for the record. If this limit is reached, + ## all further containers will be logged as empty containers. If the limit is + ## reached while processing a container, the container will be truncated in the + ## output. Setting this to zero disables the limiting. + const default_max_total_container_elements = 500 &redef; } module POP3; diff --git a/scripts/base/protocols/http/main.zeek b/scripts/base/protocols/http/main.zeek index 983bb37d9a..27046dd642 100644 --- a/scripts/base/protocols/http/main.zeek +++ b/scripts/base/protocols/http/main.zeek @@ -139,6 +139,14 @@ export { ## out and request/response tracking reset to prevent unbounded ## state growth. option max_pending_requests = 100; + + ## The maximum number of bytes that a single string field can contain when + ## logging. If a string reaches this limit, the log output for the field will be + ## truncated. Setting this to zero disables the limiting. HTTP has no maximum + ## length for various fields such as the URI, so this is set to zero by default. + ## + ## .. zeek:see:: Log::default_max_field_string_bytes + const default_max_field_string_bytes = 0 &redef; } # Add the http state tracking fields to the connection record. @@ -156,7 +164,8 @@ redef likely_server_ports += { ports }; # Initialize the HTTP logging stream and ports. event zeek_init() &priority=5 { - Log::create_stream(HTTP::LOG, Log::Stream($columns=Info, $ev=log_http, $path="http", $policy=log_policy)); + Log::create_stream(HTTP::LOG, Log::Stream($columns=Info, $ev=log_http, $path="http", $policy=log_policy, + $max_field_string_bytes=HTTP::default_max_field_string_bytes)); Analyzer::register_for_ports(Analyzer::ANALYZER_HTTP, ports); } @@ -394,4 +403,3 @@ hook finalize_http(c: connection) } } } - diff --git a/scripts/policy/protocols/ssl/log-certs-base64.zeek b/scripts/policy/protocols/ssl/log-certs-base64.zeek index 0ec4834065..656281ba3f 100644 --- a/scripts/policy/protocols/ssl/log-certs-base64.zeek +++ b/scripts/policy/protocols/ssl/log-certs-base64.zeek @@ -10,6 +10,9 @@ redef record X509::Info += { cert: string &log &optional; }; +## Certificates can be large and we don't want to risk truncating the output. +redef X509::default_max_field_string_bytes = 0; + event x509_certificate(f: fa_file, cert_ref: opaque of x509, cert: X509::Certificate) &priority=1 { if ( ! f$info?$x509 ) diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index eb22695e05..ee704a6b9d 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -246,10 +246,17 @@ struct Manager::WriterInfo { std::shared_ptr total_writes; std::shared_ptr total_discarded_writes; + std::shared_ptr total_truncated_string_fields; + std::shared_ptr total_truncated_containers; WriterInfo(std::shared_ptr total_writes, - std::shared_ptr total_discarded_writes) - : total_writes(std::move(total_writes)), total_discarded_writes(std::move(total_discarded_writes)) {} + std::shared_ptr total_discarded_writes, + std::shared_ptr total_truncated_string_fields, + std::shared_ptr total_truncated_containers) + : total_writes(std::move(total_writes)), + total_discarded_writes(std::move(total_discarded_writes)), + total_truncated_string_fields(std::move(total_truncated_string_fields)), + total_truncated_containers(std::move(total_truncated_containers)) {} }; struct Manager::Stream { @@ -281,10 +288,13 @@ struct Manager::Stream { zeek_uint_t max_delay_queue_size = 1; bool evicting = false; + size_t max_field_string_bytes = 0; + size_t max_total_string_bytes = 0; + size_t max_field_container_elements = 0; + size_t max_total_container_elements = 0; ~Stream(); - const detail::DelayInfoPtr& GetDelayInfo(const detail::WriteContext& ctx); void EnqueueWriteForDelay(const detail::WriteContext& ctx); @@ -491,7 +501,15 @@ Manager::Manager() total_log_writer_discarded_writes_family( telemetry_mgr->CounterFamily("zeek", "log-writer-discarded-writes", {"writer", "module", "stream", "filter-name", "path"}, - "Total number of log writes discarded due to size limitations.")) { + "Total number of log writes discarded due to size limitations.")), + total_log_writer_truncated_string_fields_family( + telemetry_mgr->CounterFamily("zeek", "log-writer-truncated-string-fields", + {"writer", "module", "stream", "filter-name", "path"}, + "Total number of logged string fields limited by length")), + total_log_writer_truncated_container_fields_family( + telemetry_mgr->CounterFamily("zeek", "log-writer-truncated-containers", + {"writer", "module", "stream", "filter-name", "path"}, + "Total number of logged container fields limited by length")) { rotations_pending = 0; } @@ -659,9 +677,13 @@ bool Manager::CreateStream(EnumVal* id, RecordVal* sval) { streams[idx]->columns = columns->Ref()->AsRecordType(); streams[idx]->max_delay_interval = sval->GetField("max_delay_interval")->AsInterval(); streams[idx]->max_delay_queue_size = sval->GetField("max_delay_queue_size")->AsCount(); - streams[idx]->enable_remote = id::find_val("Log::enable_remote_logging")->AsBool(); + streams[idx]->max_field_string_bytes = sval->GetField("max_field_string_bytes")->AsCount(); + streams[idx]->max_total_string_bytes = sval->GetField("max_total_string_bytes")->AsCount(); + streams[idx]->max_field_container_elements = sval->GetField("max_field_container_elements")->AsCount(); + streams[idx]->max_total_container_elements = sval->GetField("max_total_container_elements")->AsCount(); + DBG_LOG(DBG_LOGGING, "Created new logging stream '%s', raising event %s", streams[idx]->name.c_str(), event ? streams[idx]->event->Name() : ""); @@ -1148,10 +1170,12 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c assert(info); // Alright, can do the write now. - size_t total_size = 0; - auto rec = RecordToLogRecord(stream, filter, columns.get(), total_size); + total_record_size = 0; + total_string_bytes = 0; + total_container_elements = 0; + auto rec = RecordToLogRecord(w->second, filter, stream, columns.get()); - if ( total_size > max_log_record_size ) { + if ( total_record_size > max_log_record_size ) { reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str())); w->second->total_discarded_writes->Inc(); continue; @@ -1388,7 +1412,21 @@ bool Manager::SetMaxDelayQueueSize(const EnumValPtr& id, zeek_uint_t queue_size) return true; } -threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t& total_size) { +static size_t calculate_allowed(size_t field_size, size_t max_field_size, size_t max_total_size, size_t current_total) { + size_t allowed = field_size; + + if ( max_field_size != 0 && max_field_size < allowed ) { + allowed = max_field_size; + } + if ( max_total_size != 0 && (max_total_size - current_total) < allowed ) { + allowed = max_total_size - current_total; + } + + return allowed; +} + + +threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty) { if ( ! val ) return {ty->Tag(), false}; @@ -1398,7 +1436,7 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t case TYPE_BOOL: case TYPE_INT: lval.val.int_val = val->AsInt(); - total_size += sizeof(lval.val.int_val); + total_record_size += sizeof(lval.val.int_val); break; case TYPE_ENUM: { @@ -1417,14 +1455,14 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t lval.val.string_val.length = 0; } - total_size += lval.val.string_val.length; + total_record_size += lval.val.string_val.length; break; } case TYPE_COUNT: lval.val.uint_val = val->AsCount(); - total_size += sizeof(lval.val.uint_val); + total_record_size += sizeof(lval.val.uint_val); break; case TYPE_PORT: { @@ -1441,35 +1479,48 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t lval.val.port_val.port = p & ~PORT_SPACE_MASK; lval.val.port_val.proto = pt; - total_size += lval.val.port_val.size(); + total_record_size += lval.val.port_val.size(); break; } case TYPE_SUBNET: val->AsSubNet()->Get().ConvertToThreadingValue(&lval.val.subnet_val); - total_size += lval.val.subnet_val.size(); + total_record_size += lval.val.subnet_val.size(); break; case TYPE_ADDR: val->AsAddr()->Get().ConvertToThreadingValue(&lval.val.addr_val); - total_size += lval.val.addr_val.size(); + total_record_size += lval.val.addr_val.size(); break; case TYPE_DOUBLE: case TYPE_TIME: case TYPE_INTERVAL: lval.val.double_val = val->AsDouble(); - total_size += sizeof(lval.val.double_val); + total_record_size += sizeof(lval.val.double_val); break; case TYPE_STRING: { const String* s = val->AsString()->AsString(); - char* buf = new char[s->Len()]; - memcpy(buf, s->Bytes(), s->Len()); + size_t allowed_bytes = calculate_allowed(static_cast(s->Len()), stream->max_field_string_bytes, + stream->max_total_string_bytes, total_string_bytes); + + if ( allowed_bytes < static_cast(s->Len()) ) { + lval.truncated = true; + reporter->Weird("log_string_field_truncated", util::fmt("%s", stream->name.c_str())); + info->total_truncated_string_fields->Inc(); + } + + if ( allowed_bytes == 0 ) + return lval; + + char* buf = new char[allowed_bytes]; + memcpy(buf, s->Bytes(), allowed_bytes); lval.val.string_val.data = buf; - lval.val.string_val.length = s->Len(); - total_size += lval.val.string_val.length; + lval.val.string_val.length = allowed_bytes; + total_record_size += allowed_bytes; + total_string_bytes += allowed_bytes; break; } @@ -1479,7 +1530,7 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t auto len = strlen(s); lval.val.string_val.data = util::copy_string(s, len); lval.val.string_val.length = len; - total_size += lval.val.string_val.length; + total_record_size += lval.val.string_val.length; break; } @@ -1491,7 +1542,7 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t auto len = strlen(s); lval.val.string_val.data = util::copy_string(s, len); lval.val.string_val.length = len; - total_size += lval.val.string_val.length; + total_record_size += lval.val.string_val.length; break; } @@ -1508,33 +1559,62 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t auto& set_t = tbl_t->GetIndexTypes()[0]; bool is_managed = ZVal::IsManagedType(set_t); - zeek_int_t set_length = set->Length(); - lval.val.set_val.vals = new threading::Value*[set_length]; + size_t allowed_elements = + calculate_allowed(static_cast(set->Length()), stream->max_field_container_elements, + stream->max_total_container_elements, total_container_elements); - for ( zeek_int_t i = 0; i < set_length && total_size < max_log_record_size; i++ ) { + if ( allowed_elements < static_cast(set->Length()) ) { + lval.truncated = true; + reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); + info->total_truncated_containers->Inc(); + } + + if ( allowed_elements == 0 ) + return lval; + + lval.val.set_val.vals = new threading::Value*[allowed_elements]; + + for ( size_t i = 0; i < allowed_elements && total_record_size < max_log_record_size; i++ ) { std::optional s_i = ZVal(set->Idx(i), set_t); - lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(s_i, set_t.get(), total_size)); + lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(info, stream, s_i, set_t.get())); if ( is_managed ) ZVal::DeleteManagedType(*s_i); lval.val.set_val.size++; } + total_container_elements += lval.val.set_val.size; + break; } case TYPE_VECTOR: { VectorVal* vec = val->AsVector(); - zeek_int_t vec_length = vec->Size(); - lval.val.vector_val.vals = new threading::Value*[vec_length]; + + size_t allowed_elements = + calculate_allowed(static_cast(vec->Size()), stream->max_field_container_elements, + stream->max_total_container_elements, total_container_elements); + + if ( allowed_elements < static_cast(vec->Size()) ) { + lval.truncated = true; + reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str())); + info->total_truncated_containers->Inc(); + } + + if ( allowed_elements == 0 ) + return lval; + + lval.val.vector_val.vals = new threading::Value*[allowed_elements]; auto& vv = vec->RawVec(); auto& vt = vec->GetType()->Yield(); - for ( zeek_int_t i = 0; i < vec_length && total_size < max_log_record_size; i++ ) { - lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(vv[i], vt.get(), total_size)); + for ( size_t i = 0; i < allowed_elements && total_record_size < max_log_record_size; i++ ) { + lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(info, stream, vv[i], vt.get())); lval.val.vector_val.size++; } + total_container_elements += lval.val.vector_val.size; + break; } @@ -1544,8 +1624,8 @@ threading::Value Manager::ValToLogVal(std::optional& val, Type* ty, size_t return lval; } -detail::LogRecord Manager::RecordToLogRecord(const Stream* stream, Filter* filter, RecordVal* columns, - size_t& total_size) { +detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, + RecordVal* columns) { RecordValPtr ext_rec; if ( filter->num_ext_fields > 0 ) { @@ -1595,9 +1675,9 @@ detail::LogRecord Manager::RecordToLogRecord(const Stream* stream, Filter* filte } if ( val ) - vals.emplace_back(ValToLogVal(val, vt, total_size)); + vals.emplace_back(ValToLogVal(info, stream, val, vt)); - if ( total_size > max_log_record_size ) { + if ( total_record_size > max_log_record_size ) { return {}; } } @@ -1648,8 +1728,11 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken {"filter-name", instantiating_filter}, {"path", info->path}}; - WriterInfo* winfo = new WriterInfo(zeek::log_mgr->total_log_writer_writes_family->GetOrAdd(labels), - zeek::log_mgr->total_log_writer_discarded_writes_family->GetOrAdd(labels)); + WriterInfo* winfo = + new WriterInfo(zeek::log_mgr->total_log_writer_writes_family->GetOrAdd(labels), + zeek::log_mgr->total_log_writer_discarded_writes_family->GetOrAdd(labels), + zeek::log_mgr->total_log_writer_truncated_string_fields_family->GetOrAdd(labels), + zeek::log_mgr->total_log_writer_truncated_container_fields_family->GetOrAdd(labels)); winfo->type = writer->Ref()->AsEnumVal(); winfo->writer = nullptr; winfo->open_time = run_state::network_time; diff --git a/src/logging/Manager.h b/src/logging/Manager.h index 63336d67cb..ddf149c4e8 100644 --- a/src/logging/Manager.h +++ b/src/logging/Manager.h @@ -422,8 +422,8 @@ private: bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, const std::string& path, const std::list& indices); - detail::LogRecord RecordToLogRecord(const Stream* stream, Filter* filter, RecordVal* columns, size_t& total_size); - threading::Value ValToLogVal(std::optional& val, Type* ty, size_t& total_size); + detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns); + threading::Value ValToLogVal(WriterInfo* info, const Stream* stream, std::optional& val, Type* ty); Stream* FindStream(EnumVal* id); void RemoveDisabledWriters(Stream* stream); @@ -448,11 +448,17 @@ private: int rotations_pending = 0; // Number of rotations not yet finished. FuncPtr rotation_format_func; FuncPtr log_stream_policy_hook; + size_t max_log_record_size = 0; + size_t total_record_size = 0; + size_t total_string_bytes = 0; + size_t total_container_elements = 0; std::shared_ptr total_log_stream_writes_family; std::shared_ptr total_log_writer_writes_family; std::shared_ptr total_log_writer_discarded_writes_family; + std::shared_ptr total_log_writer_truncated_string_fields_family; + std::shared_ptr total_log_writer_truncated_container_fields_family; zeek_uint_t last_delay_token = 0; std::vector active_writes; diff --git a/src/threading/SerialTypes.h b/src/threading/SerialTypes.h index fb7336b9d5..e5d8dd4ded 100644 --- a/src/threading/SerialTypes.h +++ b/src/threading/SerialTypes.h @@ -126,8 +126,14 @@ struct Field { * those Vals supported). */ struct Value { - TypeTag type; //! The type of the value. - TypeTag subtype; //! Inner type for sets and vectors. + TypeTag type; //! The type of the value. + TypeTag subtype; //! Inner type for sets and vectors. + bool present = false; //! False for optional record fields that are not set. + bool truncated = false; //! True if the string or container field was truncated. + + // For values read by the input framework, this can represent the line number + // containing this value. Used by the Ascii reader primarily. + int32_t line_number = -1; struct set_t { zeek_int_t size; @@ -186,8 +192,6 @@ struct Value { _val() { memset(this, 0, sizeof(_val)); } } val; - bool present = false; //! False for optional record fields that are not set. - /** * Constructor. * @@ -275,13 +279,6 @@ struct Value { void SetFileLineNumber(int line) { line_number = line; } int GetFileLineNumber() const { return line_number; } - -private: - friend class IPAddr; - - // For values read by the input framework, this can represent the line number - // containing this value. Used by the Ascii reader primarily. - int line_number = -1; }; } // namespace zeek::threading diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index d944c123ed..8697db08b7 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -38,17 +38,17 @@ 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Broker::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> @@ -82,17 +82,17 @@ 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Notice::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Tunnel::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Weird::LOG, default)) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> 0.000000 MetaHookPost CallFunction(Option::set, , (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, )) -> 0.000000 MetaHookPost CallFunction(Option::set_change_handler, , (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> 0.000000 MetaHookPost CallFunction(Option::set_change_handler, , (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> @@ -980,17 +980,17 @@ 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Broker::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) @@ -1024,17 +1024,17 @@ 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Notice::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Tunnel::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Weird::LOG, default)) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) 0.000000 MetaHookPre CallFunction(Option::set, , (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, )) 0.000000 MetaHookPre CallFunction(Option::set_change_handler, , (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) 0.000000 MetaHookPre CallFunction(Option::set_change_handler, , (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) @@ -1921,17 +1921,17 @@ 0.000000 | HookCallFunction Log::__add_filter(Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=, config={}, policy=]) -0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) +0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Log::add_default_filter(Analyzer::Logging::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) @@ -1965,17 +1965,17 @@ 0.000000 | HookCallFunction Log::add_stream_filters(Notice::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Tunnel::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Weird::LOG, default) -0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) -0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]) +0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) +0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]) 0.000000 | HookCallFunction Option::set(Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, ) 0.000000 | HookCallFunction Option::set_change_handler(Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100) 0.000000 | HookCallFunction Option::set_change_handler(Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if ( == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr index 57a839051e..a3577fb43e 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-2/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb) -error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback) +error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb) +error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback) error in <...>/static-errors.zeek, line 12: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr index e39286ce49..597cb83a0b 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-3/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42) -error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken) +error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42) +error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken) error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr index 31db1e4af6..4ad3a42493 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors-4/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42) -error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken) +error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42) +error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken) error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr index 098dbf9bee..dcd0d2de37 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.delay.static-errors/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb) -error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback) +error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb) +error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback) error in <...>/static-errors.zeek, line 16: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb)) diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout new file mode 100644 index 0000000000..8257d72161 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log new file mode 100644 index 0000000000..93c73f8a84 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-2/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout new file mode 100644 index 0000000000..57f1fd5f12 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log new file mode 100644 index 0000000000..c44d2e2c5c --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-3/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log new file mode 100644 index 0000000000..542d53cf44 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-4/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout new file mode 100644 index 0000000000..f2e2b796bd --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log new file mode 100644 index 0000000000..2b02e6e655 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-5/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log new file mode 100644 index 0000000000..4f595571fe --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-6/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout new file mode 100644 index 0000000000..8257d72161 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log new file mode 100644 index 0000000000..93c73f8a84 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-2/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout new file mode 100644 index 0000000000..57f1fd5f12 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log new file mode 100644 index 0000000000..c44d2e2c5c --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-3/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log new file mode 100644 index 0000000000..542d53cf44 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-4/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout new file mode 100644 index 0000000000..f2e2b796bd --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log new file mode 100644 index 0000000000..2b02e6e655 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-5/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout new file mode 100644 index 0000000000..29978a303b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log new file mode 100644 index 0000000000..4f595571fe --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty) +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log new file mode 100644 index 0000000000..32aea67226 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream-6/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout new file mode 100644 index 0000000000..ee93469f35 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log new file mode 100644 index 0000000000..9bcebb3086 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log new file mode 100644 index 0000000000..fe5cd88cd4 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting-stream/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout new file mode 100644 index 0000000000..a9548ba949 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/.stdout @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 2.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0 +Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log new file mode 100644 index 0000000000..9bcebb3086 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/test.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path test +#open XXXX-XX-XX-XX-XX-XX +#fields strings1 strings2 +#types vector[string] vector[string] +ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log new file mode 100644 index 0000000000..ef2c56bc6b --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.field-length-limiting/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX - - - - - log_s Test: F zeek - +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log b/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log index f98d15e8f4..6b52728ce8 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.telemetry/telemetry.log @@ -16,4 +16,10 @@ XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_writes_total filter_name,module,p XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0 XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0 XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0 +XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0 #close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek new file mode 100644 index 0000000000..a67acc521d --- /dev/null +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting-stream.zeek @@ -0,0 +1,106 @@ +# @TEST-DOC: Tests field length limiting at the stream level instead of at the global level. +# +# @TEST-EXEC: zeek -b test.zeek %INPUT +# @TEST-EXEC: btest-diff test.log +# @TEST-EXEC: btest-diff .stdout +# @TEST-EXEC: btest-diff weird.log + +# @TEST-START-FILE test.zeek + +@load base/frameworks/telemetry +@load base/frameworks/notice/weird + +module Test; + +export { + redef enum Log::ID += { LOG }; + + type Info: record { + strings1: vector of string &log; + strings2: vector of string &log; + }; +} + +event log_telemetry() + { + local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); + for (i in storage_metrics) + { + local m = storage_metrics[i]; + print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; + } + } + +event zeek_init() &priority=-5 + { + local rec = Test::Info(); + local i = 0; + + # Create two vectors containing 10 strings with 10 characters each. + # This leaves us with 200 total characters to work with. + while ( ++i <= 10 ) + { + rec$strings1 += "ABCDEFGHIJ"; + rec$strings2 += "ABCDEFGHIJ"; + } + + Log::write(Test::LOG, rec); + + # Do this as a separate event so the weirds get processed before we log the + # telemetry outout. See the comment below for the first test as to why. + event log_telemetry(); + } + +# @TEST-END-FILE test.zeek + +# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it +# will write all of the fields. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_string_bytes=5]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the second field in the middle of a string. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=115]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle of a string. Second field should log empty strings. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=85]); + } + +# @TEST-START-NEXT + +# Limit the individual containers to 5 items, but keep the total maximum large enough that +# it will write all of the fields. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_container_elements=5]); + } + +# @TEST-START-NEXT + +# Leave the individual field items alone, but set the maximum length to where it cuts off +# the second field in the middle. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=15]); + } + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle. Second field should log empty containers. +event zeek_init() &priority=10 + { + Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=5]); + } diff --git a/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek new file mode 100644 index 0000000000..f1ae881847 --- /dev/null +++ b/testing/btest/scripts/base/frameworks/logging/field-length-limiting.zeek @@ -0,0 +1,92 @@ +# @TEST-DOC: Test the options that limit string and container lengths when logging +# +# @TEST-EXEC: zeek -b test.zeek %INPUT +# @TEST-EXEC: btest-diff test.log +# @TEST-EXEC: btest-diff .stdout +# @TEST-EXEC: btest-diff weird.log + +# @TEST-START-FILE test.zeek + +@load base/frameworks/telemetry +@load base/frameworks/notice/weird + +module Test; + +export { + redef enum Log::ID += { LOG }; + + type Info: record { + strings1: vector of string &log; + strings2: vector of string &log; + }; +} + +event log_telemetry() + { + local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*"); + for (i in storage_metrics) + { + local m = storage_metrics[i]; + print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value; + } + } + +event zeek_init() + { + Log::create_stream(LOG, [$columns=Info, $path="test"]); + + local rec = Test::Info(); + local i = 0; + + # Create two vectors containing 10 strings with 10 characters each. + # This leaves us with 200 total characters to work with. + while ( ++i <= 10 ) + { + rec$strings1 += "ABCDEFGHIJ"; + rec$strings2 += "ABCDEFGHIJ"; + } + + Log::write(Test::LOG, rec); + + # Do this as a separate event so the weirds get processed before we log the + # telemetry outout. See the comment below for the first test as to why. + event log_telemetry(); + } + +# @TEST-END-FILE test.zeek + +# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it +# will write all of the fields. The weird test for this one will be off since it will +# limit the name of the weird. It will pass, but the fields in the log will get truncated +# like they're supposed to. +redef Log::default_max_field_string_bytes = 5; + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the second field in the middle of a string. +redef Log::default_max_total_string_bytes = 115; + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle of a string. Second field should log empty strings. +redef Log::default_max_total_string_bytes = 85; + +# @TEST-START-NEXT + +# Limit the individual containers to 5 items, but keep the total maximum large enough that +# it will write all of the fields. +redef Log::default_max_field_container_elements = 5; + +# @TEST-START-NEXT + +# Leave the individual field items alone, but set the maximum length to where it cuts off +# the second field in the middle. +redef Log::default_max_total_container_elements = 15; + +# @TEST-START-NEXT + +# Leave the individual field bytes alone, but set the maximum length to where it cuts off +# the first field in the middle. Second field should log empty containers. +redef Log::default_max_total_container_elements = 5; diff --git a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek index 7316b2c4d0..95e966c3d6 100644 --- a/testing/btest/scripts/base/frameworks/logging/length-checking.zeek +++ b/testing/btest/scripts/base/frameworks/logging/length-checking.zeek @@ -19,6 +19,12 @@ module Test; +# Disable the string and container length filtering. +redef Log::default_max_field_string_bytes = 0; +redef Log::default_max_total_string_bytes = 0; +redef Log::default_max_field_container_elements = 0; +redef Log::default_max_total_container_elements = 0; + export { redef enum Log::ID += { LOG };