mirror of
https://github.com/zeek/zeek.git
synced 2025-10-01 22:28:20 +00:00
Merge remote-tracking branch 'origin/topic/timw/limit-string-and-container-lengths-in-logs'
* origin/topic/timw/limit-string-and-container-lengths-in-logs: Add NEWS entry for field length limiting Tag truncated values with a flag, plus pack threading::Value better Remove length limiting on string fields for HTTP Make total_size counter a member in logging::Manager Remove using numeric_limits and just check for zero instead Expand the size of the log-size filters for x509 Add options to filter at the stream level as well as globally Add a weird that gets emitted when strings/containers are over the limits Add metrics to track string and container fields limited by length Replace unused stream argument from RecordToLogRecord with WriterInfo Implement string- and container-length filtering at the log record level
This commit is contained in:
commit
75ba63eb3f
56 changed files with 884 additions and 128 deletions
27
CHANGES
27
CHANGES
|
@ -1,3 +1,30 @@
|
||||||
|
8.1.0-dev.50 | 2025-08-12 17:42:46 -0700
|
||||||
|
|
||||||
|
* Add NEWS entry for field length limiting (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Tag truncated values with a flag, plus pack threading::Value better (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Remove length limiting on string fields for HTTP (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Make total_size counter a member in logging::Manager (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Remove using numeric_limits and just check for zero instead (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Expand the size of the log-size filters for x509 (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Add options to filter at the stream level as well as globally (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Add a weird that gets emitted when strings/containers are over the limits (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Add metrics to track string and container fields limited by length (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Replace unused stream argument from RecordToLogRecord with WriterInfo (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
This also adds a WriterInfo argument to ValToLogVal and passes the one from
|
||||||
|
RecordToLogRecord into it.
|
||||||
|
|
||||||
|
* Implement string- and container-length filtering at the log record level (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
8.1.0-dev.38 | 2025-08-12 12:38:24 -0700
|
8.1.0-dev.38 | 2025-08-12 12:38:24 -0700
|
||||||
|
|
||||||
* Bump pre-commit hooks (Benjamin Bannier, Corelight)
|
* Bump pre-commit hooks (Benjamin Bannier, Corelight)
|
||||||
|
|
26
NEWS
26
NEWS
|
@ -21,6 +21,32 @@ New Functionality
|
||||||
been added to allow observing ``Subscribe()`` and ``Unsubscribe()`` calls on
|
been added to allow observing ``Subscribe()`` and ``Unsubscribe()`` calls on
|
||||||
backends by Zeek scripts.
|
backends by Zeek scripts.
|
||||||
|
|
||||||
|
- The ability to control the length of strings and containers in log output was added. The
|
||||||
|
maximum length of individual log fields can be set, as well as the total length of all
|
||||||
|
string or container fields in a single log record. This feature is controlled via four
|
||||||
|
new script-level variables:
|
||||||
|
|
||||||
|
Log::default_max_field_string_bytes
|
||||||
|
Log::default_max_total_string_bytes
|
||||||
|
Log::default_max_field_container_elements
|
||||||
|
Log::default_max_total_container_elements
|
||||||
|
|
||||||
|
When one of the ``field`` limits is reached, the individual field is truncated. When one
|
||||||
|
of the ``total`` limits is reached, all further strings will returned as empty and all
|
||||||
|
further container elements will not be output. See the documentation for those variables
|
||||||
|
for more detail.
|
||||||
|
|
||||||
|
The above variables control the truncation globally, but they can also be set for log
|
||||||
|
streams individually. This is controlled by variables with the same names that can be
|
||||||
|
set when the log stream is created.
|
||||||
|
|
||||||
|
Two new weirds were added to report the truncation: ``log_string_field_truncated`` and
|
||||||
|
``log_container_field_truncated``. New metrics were added to track how many truncations
|
||||||
|
have occurred: ``zeek_log_writer_truncated_string_fields_total`` and
|
||||||
|
``zeek_log_writer_truncated_containers_total``. The metrics are reported for each log
|
||||||
|
stream.
|
||||||
|
|
||||||
|
|
||||||
Changed Functionality
|
Changed Functionality
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
8.1.0-dev.38
|
8.1.0-dev.50
|
||||||
|
|
|
@ -105,6 +105,29 @@ export {
|
||||||
|
|
||||||
## Event for accessing logged records.
|
## Event for accessing logged records.
|
||||||
global log_x509: event(rec: Info);
|
global log_x509: event(rec: Info);
|
||||||
|
|
||||||
|
## The maximum number of bytes that a single string field can contain when
|
||||||
|
## logging. If a string reaches this limit, the log output for the field will be
|
||||||
|
## truncated. Setting this to zero disables the limiting.
|
||||||
|
##
|
||||||
|
## .. zeek:see:: Log::default_max_field_string_bytes
|
||||||
|
const default_max_field_string_bytes = Log::default_max_field_string_bytes &redef;
|
||||||
|
|
||||||
|
## The maximum number of elements a single container field can contain when
|
||||||
|
## logging. If a container reaches this limit, the log output for the field will
|
||||||
|
## be truncated. Setting this to zero disables the limiting.
|
||||||
|
##
|
||||||
|
## .. zeek:see:: Log::default_max_field_container_elements
|
||||||
|
const default_max_field_container_elements = 500 &redef;
|
||||||
|
|
||||||
|
## The maximum total number of container elements a record may log. This is the
|
||||||
|
## sum of all container elements logged for the record. If this limit is reached,
|
||||||
|
## all further containers will be logged as empty containers. If the limit is
|
||||||
|
## reached while processing a container, the container will be truncated in the
|
||||||
|
## output. Setting this to zero disables the limiting.
|
||||||
|
##
|
||||||
|
## .. zeek:see:: Log::default_max_total_container_elements
|
||||||
|
const default_max_total_container_elements = 1500 &redef;
|
||||||
}
|
}
|
||||||
|
|
||||||
global known_log_certs_with_broker: set[LogCertHash] &create_expire=relog_known_certificates_after &backend=Broker::MEMORY;
|
global known_log_certs_with_broker: set[LogCertHash] &create_expire=relog_known_certificates_after &backend=Broker::MEMORY;
|
||||||
|
@ -117,7 +140,12 @@ redef record Files::Info += {
|
||||||
|
|
||||||
event zeek_init() &priority=5
|
event zeek_init() &priority=5
|
||||||
{
|
{
|
||||||
Log::create_stream(X509::LOG, Log::Stream($columns=Info, $ev=log_x509, $path="x509", $policy=log_policy));
|
# x509 can have some very large certificates and very large sets of URIs. Expand the log size filters
|
||||||
|
# so that we're not truncating those.
|
||||||
|
Log::create_stream(X509::LOG, Log::Stream($columns=Info, $ev=log_x509, $path="x509", $policy=log_policy,
|
||||||
|
$max_field_string_bytes=X509::default_max_field_string_bytes,
|
||||||
|
$max_field_container_elements=X509::default_max_field_container_elements,
|
||||||
|
$max_total_container_elements=X509::default_max_total_container_elements));
|
||||||
|
|
||||||
# We use MIME types internally to distinguish between user and CA certificates.
|
# We use MIME types internally to distinguish between user and CA certificates.
|
||||||
# The first certificate in a connection always gets tagged as user-cert, all
|
# The first certificate in a connection always gets tagged as user-cert, all
|
||||||
|
@ -225,4 +253,3 @@ event file_state_remove(f: fa_file) &priority=5
|
||||||
|
|
||||||
Log::write(LOG, f$info$x509);
|
Log::write(LOG, f$info$x509);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -422,6 +422,26 @@ export {
|
||||||
## .. :zeek:see:`Log::default_max_delay_queue_size`
|
## .. :zeek:see:`Log::default_max_delay_queue_size`
|
||||||
## .. :zeek:see:`Log::set_max_delay_queue_size`
|
## .. :zeek:see:`Log::set_max_delay_queue_size`
|
||||||
max_delay_queue_size: count &default=default_max_delay_queue_size;
|
max_delay_queue_size: count &default=default_max_delay_queue_size;
|
||||||
|
|
||||||
|
## Maximum string size for field in a log record from this stream.
|
||||||
|
##
|
||||||
|
## .. :zeek:see:`Log::default_max_field_string_bytes`
|
||||||
|
max_field_string_bytes: count &default=Log::default_max_field_string_bytes;
|
||||||
|
|
||||||
|
## Maximum total string size in a log record from this stream.
|
||||||
|
##
|
||||||
|
## .. :zeek:see:`Log::default_max_total_string_bytes`
|
||||||
|
max_total_string_bytes: count &default=Log::default_max_total_string_bytes;
|
||||||
|
|
||||||
|
## Maximum container elements for field in a log record from this stream.
|
||||||
|
##
|
||||||
|
## .. :zeek:see:`Log::default_max_field_container_elements`
|
||||||
|
max_field_container_elements: count &default=Log::default_max_field_container_elements;
|
||||||
|
|
||||||
|
## Maximum total container elements in a log record from this stream.
|
||||||
|
##
|
||||||
|
## .. :zeek:see:`Log::default_max_total_container_elements`
|
||||||
|
max_total_container_elements: count &default=Log::default_max_total_container_elements;
|
||||||
};
|
};
|
||||||
|
|
||||||
## Sentinel value for indicating that a filter was not found when looked up.
|
## Sentinel value for indicating that a filter was not found when looked up.
|
||||||
|
|
|
@ -3743,6 +3743,31 @@ export {
|
||||||
## higher than this limit, but it prevents runaway-sized log entries from causing
|
## higher than this limit, but it prevents runaway-sized log entries from causing
|
||||||
## problems.
|
## problems.
|
||||||
const max_log_record_size = 1024*1024*64 &redef;
|
const max_log_record_size = 1024*1024*64 &redef;
|
||||||
|
|
||||||
|
## The maximum number of bytes that a single string field can contain when
|
||||||
|
## logging. If a string reaches this limit, the log output for the field will be
|
||||||
|
## truncated. Setting this to zero disables the limiting.
|
||||||
|
const default_max_field_string_bytes = 4096 &redef;
|
||||||
|
|
||||||
|
## The maximum number of elements a single container field can contain when
|
||||||
|
## logging. If a container reaches this limit, the log output for the field will
|
||||||
|
## be truncated. Setting this to zero disables the limiting.
|
||||||
|
const default_max_field_container_elements = 100 &redef;
|
||||||
|
|
||||||
|
## The maximum total bytes a record may log for string fields. This is the sum of
|
||||||
|
## all bytes in string fields logged for the record. If this limit is reached, all
|
||||||
|
## further string fields will be logged as empty strings. Any containers holding
|
||||||
|
## string fields will be logged as empty containers. If the limit is reached while
|
||||||
|
## processing a container holding string fields, the container will be truncated
|
||||||
|
## in the log output. Setting this to zero disables the limiting.
|
||||||
|
const default_max_total_string_bytes = 256000 &redef;
|
||||||
|
|
||||||
|
## The maximum total number of container elements a record may log. This is the
|
||||||
|
## sum of all container elements logged for the record. If this limit is reached,
|
||||||
|
## all further containers will be logged as empty containers. If the limit is
|
||||||
|
## reached while processing a container, the container will be truncated in the
|
||||||
|
## output. Setting this to zero disables the limiting.
|
||||||
|
const default_max_total_container_elements = 500 &redef;
|
||||||
}
|
}
|
||||||
|
|
||||||
module POP3;
|
module POP3;
|
||||||
|
|
|
@ -139,6 +139,14 @@ export {
|
||||||
## out and request/response tracking reset to prevent unbounded
|
## out and request/response tracking reset to prevent unbounded
|
||||||
## state growth.
|
## state growth.
|
||||||
option max_pending_requests = 100;
|
option max_pending_requests = 100;
|
||||||
|
|
||||||
|
## The maximum number of bytes that a single string field can contain when
|
||||||
|
## logging. If a string reaches this limit, the log output for the field will be
|
||||||
|
## truncated. Setting this to zero disables the limiting. HTTP has no maximum
|
||||||
|
## length for various fields such as the URI, so this is set to zero by default.
|
||||||
|
##
|
||||||
|
## .. zeek:see:: Log::default_max_field_string_bytes
|
||||||
|
const default_max_field_string_bytes = 0 &redef;
|
||||||
}
|
}
|
||||||
|
|
||||||
# Add the http state tracking fields to the connection record.
|
# Add the http state tracking fields to the connection record.
|
||||||
|
@ -156,7 +164,8 @@ redef likely_server_ports += { ports };
|
||||||
# Initialize the HTTP logging stream and ports.
|
# Initialize the HTTP logging stream and ports.
|
||||||
event zeek_init() &priority=5
|
event zeek_init() &priority=5
|
||||||
{
|
{
|
||||||
Log::create_stream(HTTP::LOG, Log::Stream($columns=Info, $ev=log_http, $path="http", $policy=log_policy));
|
Log::create_stream(HTTP::LOG, Log::Stream($columns=Info, $ev=log_http, $path="http", $policy=log_policy,
|
||||||
|
$max_field_string_bytes=HTTP::default_max_field_string_bytes));
|
||||||
Analyzer::register_for_ports(Analyzer::ANALYZER_HTTP, ports);
|
Analyzer::register_for_ports(Analyzer::ANALYZER_HTTP, ports);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,4 +403,3 @@ hook finalize_http(c: connection)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,9 @@ redef record X509::Info += {
|
||||||
cert: string &log &optional;
|
cert: string &log &optional;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
## Certificates can be large and we don't want to risk truncating the output.
|
||||||
|
redef X509::default_max_field_string_bytes = 0;
|
||||||
|
|
||||||
event x509_certificate(f: fa_file, cert_ref: opaque of x509, cert: X509::Certificate) &priority=1
|
event x509_certificate(f: fa_file, cert_ref: opaque of x509, cert: X509::Certificate) &priority=1
|
||||||
{
|
{
|
||||||
if ( ! f$info?$x509 )
|
if ( ! f$info?$x509 )
|
||||||
|
|
|
@ -246,10 +246,17 @@ struct Manager::WriterInfo {
|
||||||
|
|
||||||
std::shared_ptr<telemetry::Counter> total_writes;
|
std::shared_ptr<telemetry::Counter> total_writes;
|
||||||
std::shared_ptr<telemetry::Counter> total_discarded_writes;
|
std::shared_ptr<telemetry::Counter> total_discarded_writes;
|
||||||
|
std::shared_ptr<telemetry::Counter> total_truncated_string_fields;
|
||||||
|
std::shared_ptr<telemetry::Counter> total_truncated_containers;
|
||||||
|
|
||||||
WriterInfo(std::shared_ptr<telemetry::Counter> total_writes,
|
WriterInfo(std::shared_ptr<telemetry::Counter> total_writes,
|
||||||
std::shared_ptr<telemetry::Counter> total_discarded_writes)
|
std::shared_ptr<telemetry::Counter> total_discarded_writes,
|
||||||
: total_writes(std::move(total_writes)), total_discarded_writes(std::move(total_discarded_writes)) {}
|
std::shared_ptr<telemetry::Counter> total_truncated_string_fields,
|
||||||
|
std::shared_ptr<telemetry::Counter> total_truncated_containers)
|
||||||
|
: total_writes(std::move(total_writes)),
|
||||||
|
total_discarded_writes(std::move(total_discarded_writes)),
|
||||||
|
total_truncated_string_fields(std::move(total_truncated_string_fields)),
|
||||||
|
total_truncated_containers(std::move(total_truncated_containers)) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Manager::Stream {
|
struct Manager::Stream {
|
||||||
|
@ -281,10 +288,13 @@ struct Manager::Stream {
|
||||||
zeek_uint_t max_delay_queue_size = 1;
|
zeek_uint_t max_delay_queue_size = 1;
|
||||||
bool evicting = false;
|
bool evicting = false;
|
||||||
|
|
||||||
|
size_t max_field_string_bytes = 0;
|
||||||
|
size_t max_total_string_bytes = 0;
|
||||||
|
size_t max_field_container_elements = 0;
|
||||||
|
size_t max_total_container_elements = 0;
|
||||||
|
|
||||||
~Stream();
|
~Stream();
|
||||||
|
|
||||||
|
|
||||||
const detail::DelayInfoPtr& GetDelayInfo(const detail::WriteContext& ctx);
|
const detail::DelayInfoPtr& GetDelayInfo(const detail::WriteContext& ctx);
|
||||||
|
|
||||||
void EnqueueWriteForDelay(const detail::WriteContext& ctx);
|
void EnqueueWriteForDelay(const detail::WriteContext& ctx);
|
||||||
|
@ -491,7 +501,15 @@ Manager::Manager()
|
||||||
total_log_writer_discarded_writes_family(
|
total_log_writer_discarded_writes_family(
|
||||||
telemetry_mgr->CounterFamily("zeek", "log-writer-discarded-writes",
|
telemetry_mgr->CounterFamily("zeek", "log-writer-discarded-writes",
|
||||||
{"writer", "module", "stream", "filter-name", "path"},
|
{"writer", "module", "stream", "filter-name", "path"},
|
||||||
"Total number of log writes discarded due to size limitations.")) {
|
"Total number of log writes discarded due to size limitations.")),
|
||||||
|
total_log_writer_truncated_string_fields_family(
|
||||||
|
telemetry_mgr->CounterFamily("zeek", "log-writer-truncated-string-fields",
|
||||||
|
{"writer", "module", "stream", "filter-name", "path"},
|
||||||
|
"Total number of logged string fields limited by length")),
|
||||||
|
total_log_writer_truncated_container_fields_family(
|
||||||
|
telemetry_mgr->CounterFamily("zeek", "log-writer-truncated-containers",
|
||||||
|
{"writer", "module", "stream", "filter-name", "path"},
|
||||||
|
"Total number of logged container fields limited by length")) {
|
||||||
rotations_pending = 0;
|
rotations_pending = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -659,9 +677,13 @@ bool Manager::CreateStream(EnumVal* id, RecordVal* sval) {
|
||||||
streams[idx]->columns = columns->Ref()->AsRecordType();
|
streams[idx]->columns = columns->Ref()->AsRecordType();
|
||||||
streams[idx]->max_delay_interval = sval->GetField("max_delay_interval")->AsInterval();
|
streams[idx]->max_delay_interval = sval->GetField("max_delay_interval")->AsInterval();
|
||||||
streams[idx]->max_delay_queue_size = sval->GetField("max_delay_queue_size")->AsCount();
|
streams[idx]->max_delay_queue_size = sval->GetField("max_delay_queue_size")->AsCount();
|
||||||
|
|
||||||
streams[idx]->enable_remote = id::find_val("Log::enable_remote_logging")->AsBool();
|
streams[idx]->enable_remote = id::find_val("Log::enable_remote_logging")->AsBool();
|
||||||
|
|
||||||
|
streams[idx]->max_field_string_bytes = sval->GetField("max_field_string_bytes")->AsCount();
|
||||||
|
streams[idx]->max_total_string_bytes = sval->GetField("max_total_string_bytes")->AsCount();
|
||||||
|
streams[idx]->max_field_container_elements = sval->GetField("max_field_container_elements")->AsCount();
|
||||||
|
streams[idx]->max_total_container_elements = sval->GetField("max_total_container_elements")->AsCount();
|
||||||
|
|
||||||
DBG_LOG(DBG_LOGGING, "Created new logging stream '%s', raising event %s", streams[idx]->name.c_str(),
|
DBG_LOG(DBG_LOGGING, "Created new logging stream '%s', raising event %s", streams[idx]->name.c_str(),
|
||||||
event ? streams[idx]->event->Name() : "<none>");
|
event ? streams[idx]->event->Name() : "<none>");
|
||||||
|
|
||||||
|
@ -1148,10 +1170,12 @@ bool Manager::WriteToFilters(const Manager::Stream* stream, zeek::RecordValPtr c
|
||||||
assert(info);
|
assert(info);
|
||||||
|
|
||||||
// Alright, can do the write now.
|
// Alright, can do the write now.
|
||||||
size_t total_size = 0;
|
total_record_size = 0;
|
||||||
auto rec = RecordToLogRecord(stream, filter, columns.get(), total_size);
|
total_string_bytes = 0;
|
||||||
|
total_container_elements = 0;
|
||||||
|
auto rec = RecordToLogRecord(w->second, filter, stream, columns.get());
|
||||||
|
|
||||||
if ( total_size > max_log_record_size ) {
|
if ( total_record_size > max_log_record_size ) {
|
||||||
reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str()));
|
reporter->Weird("log_record_too_large", util::fmt("%s", stream->name.c_str()));
|
||||||
w->second->total_discarded_writes->Inc();
|
w->second->total_discarded_writes->Inc();
|
||||||
continue;
|
continue;
|
||||||
|
@ -1388,7 +1412,21 @@ bool Manager::SetMaxDelayQueueSize(const EnumValPtr& id, zeek_uint_t queue_size)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t& total_size) {
|
static size_t calculate_allowed(size_t field_size, size_t max_field_size, size_t max_total_size, size_t current_total) {
|
||||||
|
size_t allowed = field_size;
|
||||||
|
|
||||||
|
if ( max_field_size != 0 && max_field_size < allowed ) {
|
||||||
|
allowed = max_field_size;
|
||||||
|
}
|
||||||
|
if ( max_total_size != 0 && (max_total_size - current_total) < allowed ) {
|
||||||
|
allowed = max_total_size - current_total;
|
||||||
|
}
|
||||||
|
|
||||||
|
return allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
threading::Value Manager::ValToLogVal(WriterInfo* info, const Stream* stream, std::optional<ZVal>& val, Type* ty) {
|
||||||
if ( ! val )
|
if ( ! val )
|
||||||
return {ty->Tag(), false};
|
return {ty->Tag(), false};
|
||||||
|
|
||||||
|
@ -1398,7 +1436,7 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
case TYPE_BOOL:
|
case TYPE_BOOL:
|
||||||
case TYPE_INT:
|
case TYPE_INT:
|
||||||
lval.val.int_val = val->AsInt();
|
lval.val.int_val = val->AsInt();
|
||||||
total_size += sizeof(lval.val.int_val);
|
total_record_size += sizeof(lval.val.int_val);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_ENUM: {
|
case TYPE_ENUM: {
|
||||||
|
@ -1417,14 +1455,14 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
lval.val.string_val.length = 0;
|
lval.val.string_val.length = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
total_size += lval.val.string_val.length;
|
total_record_size += lval.val.string_val.length;
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case TYPE_COUNT:
|
case TYPE_COUNT:
|
||||||
lval.val.uint_val = val->AsCount();
|
lval.val.uint_val = val->AsCount();
|
||||||
total_size += sizeof(lval.val.uint_val);
|
total_record_size += sizeof(lval.val.uint_val);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_PORT: {
|
case TYPE_PORT: {
|
||||||
|
@ -1441,35 +1479,48 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
|
|
||||||
lval.val.port_val.port = p & ~PORT_SPACE_MASK;
|
lval.val.port_val.port = p & ~PORT_SPACE_MASK;
|
||||||
lval.val.port_val.proto = pt;
|
lval.val.port_val.proto = pt;
|
||||||
total_size += lval.val.port_val.size();
|
total_record_size += lval.val.port_val.size();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case TYPE_SUBNET:
|
case TYPE_SUBNET:
|
||||||
val->AsSubNet()->Get().ConvertToThreadingValue(&lval.val.subnet_val);
|
val->AsSubNet()->Get().ConvertToThreadingValue(&lval.val.subnet_val);
|
||||||
total_size += lval.val.subnet_val.size();
|
total_record_size += lval.val.subnet_val.size();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_ADDR:
|
case TYPE_ADDR:
|
||||||
val->AsAddr()->Get().ConvertToThreadingValue(&lval.val.addr_val);
|
val->AsAddr()->Get().ConvertToThreadingValue(&lval.val.addr_val);
|
||||||
total_size += lval.val.addr_val.size();
|
total_record_size += lval.val.addr_val.size();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_DOUBLE:
|
case TYPE_DOUBLE:
|
||||||
case TYPE_TIME:
|
case TYPE_TIME:
|
||||||
case TYPE_INTERVAL:
|
case TYPE_INTERVAL:
|
||||||
lval.val.double_val = val->AsDouble();
|
lval.val.double_val = val->AsDouble();
|
||||||
total_size += sizeof(lval.val.double_val);
|
total_record_size += sizeof(lval.val.double_val);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_STRING: {
|
case TYPE_STRING: {
|
||||||
const String* s = val->AsString()->AsString();
|
const String* s = val->AsString()->AsString();
|
||||||
char* buf = new char[s->Len()];
|
size_t allowed_bytes = calculate_allowed(static_cast<size_t>(s->Len()), stream->max_field_string_bytes,
|
||||||
memcpy(buf, s->Bytes(), s->Len());
|
stream->max_total_string_bytes, total_string_bytes);
|
||||||
|
|
||||||
|
if ( allowed_bytes < static_cast<size_t>(s->Len()) ) {
|
||||||
|
lval.truncated = true;
|
||||||
|
reporter->Weird("log_string_field_truncated", util::fmt("%s", stream->name.c_str()));
|
||||||
|
info->total_truncated_string_fields->Inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( allowed_bytes == 0 )
|
||||||
|
return lval;
|
||||||
|
|
||||||
|
char* buf = new char[allowed_bytes];
|
||||||
|
memcpy(buf, s->Bytes(), allowed_bytes);
|
||||||
|
|
||||||
lval.val.string_val.data = buf;
|
lval.val.string_val.data = buf;
|
||||||
lval.val.string_val.length = s->Len();
|
lval.val.string_val.length = allowed_bytes;
|
||||||
total_size += lval.val.string_val.length;
|
total_record_size += allowed_bytes;
|
||||||
|
total_string_bytes += allowed_bytes;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1479,7 +1530,7 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
auto len = strlen(s);
|
auto len = strlen(s);
|
||||||
lval.val.string_val.data = util::copy_string(s, len);
|
lval.val.string_val.data = util::copy_string(s, len);
|
||||||
lval.val.string_val.length = len;
|
lval.val.string_val.length = len;
|
||||||
total_size += lval.val.string_val.length;
|
total_record_size += lval.val.string_val.length;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1491,7 +1542,7 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
auto len = strlen(s);
|
auto len = strlen(s);
|
||||||
lval.val.string_val.data = util::copy_string(s, len);
|
lval.val.string_val.data = util::copy_string(s, len);
|
||||||
lval.val.string_val.length = len;
|
lval.val.string_val.length = len;
|
||||||
total_size += lval.val.string_val.length;
|
total_record_size += lval.val.string_val.length;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1508,33 +1559,62 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
auto& set_t = tbl_t->GetIndexTypes()[0];
|
auto& set_t = tbl_t->GetIndexTypes()[0];
|
||||||
bool is_managed = ZVal::IsManagedType(set_t);
|
bool is_managed = ZVal::IsManagedType(set_t);
|
||||||
|
|
||||||
zeek_int_t set_length = set->Length();
|
size_t allowed_elements =
|
||||||
lval.val.set_val.vals = new threading::Value*[set_length];
|
calculate_allowed(static_cast<size_t>(set->Length()), stream->max_field_container_elements,
|
||||||
|
stream->max_total_container_elements, total_container_elements);
|
||||||
|
|
||||||
for ( zeek_int_t i = 0; i < set_length && total_size < max_log_record_size; i++ ) {
|
if ( allowed_elements < static_cast<size_t>(set->Length()) ) {
|
||||||
|
lval.truncated = true;
|
||||||
|
reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str()));
|
||||||
|
info->total_truncated_containers->Inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( allowed_elements == 0 )
|
||||||
|
return lval;
|
||||||
|
|
||||||
|
lval.val.set_val.vals = new threading::Value*[allowed_elements];
|
||||||
|
|
||||||
|
for ( size_t i = 0; i < allowed_elements && total_record_size < max_log_record_size; i++ ) {
|
||||||
std::optional<ZVal> s_i = ZVal(set->Idx(i), set_t);
|
std::optional<ZVal> s_i = ZVal(set->Idx(i), set_t);
|
||||||
lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(s_i, set_t.get(), total_size));
|
lval.val.set_val.vals[i] = new threading::Value(ValToLogVal(info, stream, s_i, set_t.get()));
|
||||||
if ( is_managed )
|
if ( is_managed )
|
||||||
ZVal::DeleteManagedType(*s_i);
|
ZVal::DeleteManagedType(*s_i);
|
||||||
lval.val.set_val.size++;
|
lval.val.set_val.size++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
total_container_elements += lval.val.set_val.size;
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case TYPE_VECTOR: {
|
case TYPE_VECTOR: {
|
||||||
VectorVal* vec = val->AsVector();
|
VectorVal* vec = val->AsVector();
|
||||||
zeek_int_t vec_length = vec->Size();
|
|
||||||
lval.val.vector_val.vals = new threading::Value*[vec_length];
|
size_t allowed_elements =
|
||||||
|
calculate_allowed(static_cast<size_t>(vec->Size()), stream->max_field_container_elements,
|
||||||
|
stream->max_total_container_elements, total_container_elements);
|
||||||
|
|
||||||
|
if ( allowed_elements < static_cast<size_t>(vec->Size()) ) {
|
||||||
|
lval.truncated = true;
|
||||||
|
reporter->Weird("log_container_field_truncated", util::fmt("%s", stream->name.c_str()));
|
||||||
|
info->total_truncated_containers->Inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( allowed_elements == 0 )
|
||||||
|
return lval;
|
||||||
|
|
||||||
|
lval.val.vector_val.vals = new threading::Value*[allowed_elements];
|
||||||
|
|
||||||
auto& vv = vec->RawVec();
|
auto& vv = vec->RawVec();
|
||||||
auto& vt = vec->GetType()->Yield();
|
auto& vt = vec->GetType()->Yield();
|
||||||
|
|
||||||
for ( zeek_int_t i = 0; i < vec_length && total_size < max_log_record_size; i++ ) {
|
for ( size_t i = 0; i < allowed_elements && total_record_size < max_log_record_size; i++ ) {
|
||||||
lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(vv[i], vt.get(), total_size));
|
lval.val.vector_val.vals[i] = new threading::Value(ValToLogVal(info, stream, vv[i], vt.get()));
|
||||||
lval.val.vector_val.size++;
|
lval.val.vector_val.size++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
total_container_elements += lval.val.vector_val.size;
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1544,8 +1624,8 @@ threading::Value Manager::ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t
|
||||||
return lval;
|
return lval;
|
||||||
}
|
}
|
||||||
|
|
||||||
detail::LogRecord Manager::RecordToLogRecord(const Stream* stream, Filter* filter, RecordVal* columns,
|
detail::LogRecord Manager::RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream,
|
||||||
size_t& total_size) {
|
RecordVal* columns) {
|
||||||
RecordValPtr ext_rec;
|
RecordValPtr ext_rec;
|
||||||
|
|
||||||
if ( filter->num_ext_fields > 0 ) {
|
if ( filter->num_ext_fields > 0 ) {
|
||||||
|
@ -1595,9 +1675,9 @@ detail::LogRecord Manager::RecordToLogRecord(const Stream* stream, Filter* filte
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( val )
|
if ( val )
|
||||||
vals.emplace_back(ValToLogVal(val, vt, total_size));
|
vals.emplace_back(ValToLogVal(info, stream, val, vt));
|
||||||
|
|
||||||
if ( total_size > max_log_record_size ) {
|
if ( total_record_size > max_log_record_size ) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1648,8 +1728,11 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken
|
||||||
{"filter-name", instantiating_filter},
|
{"filter-name", instantiating_filter},
|
||||||
{"path", info->path}};
|
{"path", info->path}};
|
||||||
|
|
||||||
WriterInfo* winfo = new WriterInfo(zeek::log_mgr->total_log_writer_writes_family->GetOrAdd(labels),
|
WriterInfo* winfo =
|
||||||
zeek::log_mgr->total_log_writer_discarded_writes_family->GetOrAdd(labels));
|
new WriterInfo(zeek::log_mgr->total_log_writer_writes_family->GetOrAdd(labels),
|
||||||
|
zeek::log_mgr->total_log_writer_discarded_writes_family->GetOrAdd(labels),
|
||||||
|
zeek::log_mgr->total_log_writer_truncated_string_fields_family->GetOrAdd(labels),
|
||||||
|
zeek::log_mgr->total_log_writer_truncated_container_fields_family->GetOrAdd(labels));
|
||||||
winfo->type = writer->Ref()->AsEnumVal();
|
winfo->type = writer->Ref()->AsEnumVal();
|
||||||
winfo->writer = nullptr;
|
winfo->writer = nullptr;
|
||||||
winfo->open_time = run_state::network_time;
|
winfo->open_time = run_state::network_time;
|
||||||
|
|
|
@ -422,8 +422,8 @@ private:
|
||||||
bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude,
|
bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude,
|
||||||
const std::string& path, const std::list<int>& indices);
|
const std::string& path, const std::list<int>& indices);
|
||||||
|
|
||||||
detail::LogRecord RecordToLogRecord(const Stream* stream, Filter* filter, RecordVal* columns, size_t& total_size);
|
detail::LogRecord RecordToLogRecord(WriterInfo* info, Filter* filter, const Stream* stream, RecordVal* columns);
|
||||||
threading::Value ValToLogVal(std::optional<ZVal>& val, Type* ty, size_t& total_size);
|
threading::Value ValToLogVal(WriterInfo* info, const Stream* stream, std::optional<ZVal>& val, Type* ty);
|
||||||
|
|
||||||
Stream* FindStream(EnumVal* id);
|
Stream* FindStream(EnumVal* id);
|
||||||
void RemoveDisabledWriters(Stream* stream);
|
void RemoveDisabledWriters(Stream* stream);
|
||||||
|
@ -448,11 +448,17 @@ private:
|
||||||
int rotations_pending = 0; // Number of rotations not yet finished.
|
int rotations_pending = 0; // Number of rotations not yet finished.
|
||||||
FuncPtr rotation_format_func;
|
FuncPtr rotation_format_func;
|
||||||
FuncPtr log_stream_policy_hook;
|
FuncPtr log_stream_policy_hook;
|
||||||
|
|
||||||
size_t max_log_record_size = 0;
|
size_t max_log_record_size = 0;
|
||||||
|
size_t total_record_size = 0;
|
||||||
|
size_t total_string_bytes = 0;
|
||||||
|
size_t total_container_elements = 0;
|
||||||
|
|
||||||
std::shared_ptr<telemetry::CounterFamily> total_log_stream_writes_family;
|
std::shared_ptr<telemetry::CounterFamily> total_log_stream_writes_family;
|
||||||
std::shared_ptr<telemetry::CounterFamily> total_log_writer_writes_family;
|
std::shared_ptr<telemetry::CounterFamily> total_log_writer_writes_family;
|
||||||
std::shared_ptr<telemetry::CounterFamily> total_log_writer_discarded_writes_family;
|
std::shared_ptr<telemetry::CounterFamily> total_log_writer_discarded_writes_family;
|
||||||
|
std::shared_ptr<telemetry::CounterFamily> total_log_writer_truncated_string_fields_family;
|
||||||
|
std::shared_ptr<telemetry::CounterFamily> total_log_writer_truncated_container_fields_family;
|
||||||
|
|
||||||
zeek_uint_t last_delay_token = 0;
|
zeek_uint_t last_delay_token = 0;
|
||||||
std::vector<detail::WriteContext> active_writes;
|
std::vector<detail::WriteContext> active_writes;
|
||||||
|
|
|
@ -126,8 +126,14 @@ struct Field {
|
||||||
* those Vals supported).
|
* those Vals supported).
|
||||||
*/
|
*/
|
||||||
struct Value {
|
struct Value {
|
||||||
TypeTag type; //! The type of the value.
|
TypeTag type; //! The type of the value.
|
||||||
TypeTag subtype; //! Inner type for sets and vectors.
|
TypeTag subtype; //! Inner type for sets and vectors.
|
||||||
|
bool present = false; //! False for optional record fields that are not set.
|
||||||
|
bool truncated = false; //! True if the string or container field was truncated.
|
||||||
|
|
||||||
|
// For values read by the input framework, this can represent the line number
|
||||||
|
// containing this value. Used by the Ascii reader primarily.
|
||||||
|
int32_t line_number = -1;
|
||||||
|
|
||||||
struct set_t {
|
struct set_t {
|
||||||
zeek_int_t size;
|
zeek_int_t size;
|
||||||
|
@ -186,8 +192,6 @@ struct Value {
|
||||||
_val() { memset(this, 0, sizeof(_val)); }
|
_val() { memset(this, 0, sizeof(_val)); }
|
||||||
} val;
|
} val;
|
||||||
|
|
||||||
bool present = false; //! False for optional record fields that are not set.
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor.
|
* Constructor.
|
||||||
*
|
*
|
||||||
|
@ -275,13 +279,6 @@ struct Value {
|
||||||
|
|
||||||
void SetFileLineNumber(int line) { line_number = line; }
|
void SetFileLineNumber(int line) { line_number = line; }
|
||||||
int GetFileLineNumber() const { return line_number; }
|
int GetFileLineNumber() const { return line_number; }
|
||||||
|
|
||||||
private:
|
|
||||||
friend class IPAddr;
|
|
||||||
|
|
||||||
// For values read by the input framework, this can represent the line number
|
|
||||||
// containing this value. Used by the Ascii reader primarily.
|
|
||||||
int line_number = -1;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace zeek::threading
|
} // namespace zeek::threading
|
||||||
|
|
|
@ -38,17 +38,17 @@
|
||||||
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__add_filter, <frame>, (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Analyzer::Logging::LOG)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Analyzer::Logging::LOG)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Broker::LOG)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Broker::LOG)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG)) -> <no result>
|
||||||
|
@ -82,17 +82,17 @@
|
||||||
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Notice::LOG, default)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Notice::LOG, default)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Tunnel::LOG, default)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Tunnel::LOG, default)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Weird::LOG, default)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::add_stream_filters, <frame>, (Weird::LOG, default)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])) -> <no result>
|
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Option::set, <frame>, (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Option::set, <frame>, (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Option::set_change_handler, <frame>, (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Option::set_change_handler, <frame>, (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> <no result>
|
||||||
0.000000 MetaHookPost CallFunction(Option::set_change_handler, <frame>, (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> <no result>
|
0.000000 MetaHookPost CallFunction(Option::set_change_handler, <frame>, (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)) -> <no result>
|
||||||
|
@ -980,17 +980,17 @@
|
||||||
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
|
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
|
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
|
0.000000 MetaHookPre CallFunction(Log::__add_filter, <frame>, (Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Analyzer::Logging::LOG))
|
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Analyzer::Logging::LOG))
|
||||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Broker::LOG))
|
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Broker::LOG))
|
||||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG))
|
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG))
|
||||||
|
@ -1024,17 +1024,17 @@
|
||||||
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Notice::LOG, default))
|
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Notice::LOG, default))
|
||||||
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Tunnel::LOG, default))
|
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Tunnel::LOG, default))
|
||||||
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Weird::LOG, default))
|
0.000000 MetaHookPre CallFunction(Log::add_stream_filters, <frame>, (Weird::LOG, default))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000]))
|
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500]))
|
||||||
0.000000 MetaHookPre CallFunction(Option::set, <frame>, (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>))
|
0.000000 MetaHookPre CallFunction(Option::set, <frame>, (Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>))
|
||||||
0.000000 MetaHookPre CallFunction(Option::set_change_handler, <frame>, (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100))
|
0.000000 MetaHookPre CallFunction(Option::set_change_handler, <frame>, (Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100))
|
||||||
0.000000 MetaHookPre CallFunction(Option::set_change_handler, <frame>, (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100))
|
0.000000 MetaHookPre CallFunction(Option::set_change_handler, <frame>, (Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100))
|
||||||
|
@ -1921,17 +1921,17 @@
|
||||||
0.000000 | HookCallFunction Log::__add_filter(Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
|
0.000000 | HookCallFunction Log::__add_filter(Notice::LOG, [name=default, writer=Log::WRITER_ASCII, path=notice, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
|
||||||
0.000000 | HookCallFunction Log::__add_filter(Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
|
0.000000 | HookCallFunction Log::__add_filter(Tunnel::LOG, [name=default, writer=Log::WRITER_ASCII, path=tunnel, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
|
||||||
0.000000 | HookCallFunction Log::__add_filter(Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
|
0.000000 | HookCallFunction Log::__add_filter(Weird::LOG, [name=default, writer=Log::WRITER_ASCII, path=weird, path_func=<uninitialized>, include=<uninitialized>, exclude=<uninitialized>, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<4692973652431675528>: function(path:string) : void, interv=0 secs, postprocessor=<uninitialized>, config={}, policy=<uninitialized>])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::add_default_filter(Analyzer::Logging::LOG)
|
0.000000 | HookCallFunction Log::add_default_filter(Analyzer::Logging::LOG)
|
||||||
0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG)
|
0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG)
|
||||||
0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG)
|
0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG)
|
||||||
|
@ -1965,17 +1965,17 @@
|
||||||
0.000000 | HookCallFunction Log::add_stream_filters(Notice::LOG, default)
|
0.000000 | HookCallFunction Log::add_stream_filters(Notice::LOG, default)
|
||||||
0.000000 | HookCallFunction Log::add_stream_filters(Tunnel::LOG, default)
|
0.000000 | HookCallFunction Log::add_stream_filters(Tunnel::LOG, default)
|
||||||
0.000000 | HookCallFunction Log::add_stream_filters(Weird::LOG, default)
|
0.000000 | HookCallFunction Log::add_stream_filters(Weird::LOG, default)
|
||||||
0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=Analyzer::Logging::log_analyzer: event(rec:Analyzer::Logging::Info), path=analyzer, policy=Analyzer::Logging::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=<uninitialized>, path=broker, policy=Broker::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=<uninitialized>, path=cluster, policy=Cluster::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config: event(rec:Config::Info), path=config, policy=Config::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn: event(rec:Conn::Info), path=conn, policy=Conn::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Files::LOG, [columns=Files::Info, ev=Files::log_files: event(rec:Files::Info), path=files, policy=Files::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(HTTP::LOG, [columns=HTTP::Info, ev=HTTP::log_http: event(rec:HTTP::Info), path=http, policy=HTTP::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=0, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Notice::ALARM_LOG, [columns=Notice::Info, ev=<uninitialized>, path=notice_alarm, policy=Notice::log_policy_alarm: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Notice::LOG, [columns=Notice::Info, ev=Notice::log_notice: event(rec:Notice::Info), path=notice, policy=Notice::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Tunnel::LOG, [columns=Tunnel::Info, ev=<uninitialized>, path=tunnel, policy=Tunnel::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000])
|
0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird: event(rec:Weird::Info), path=weird, policy=Weird::log_policy: Log::PolicyHook, event_groups={}, max_delay_interval=200.0 msecs, max_delay_queue_size=1000, max_field_string_bytes=4096, max_total_string_bytes=256000, max_field_container_elements=100, max_total_container_elements=500])
|
||||||
0.000000 | HookCallFunction Option::set(Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>)
|
0.000000 | HookCallFunction Option::set(Site::local_nets, {64:ff9b:1::<...>/15,fc00::<...>/10,::/128,2002:ffff:ffff::/48,::1/128,fec0::/10,2002:cb00:7100::/40,2002:c633:6400::<...>/4,2002:a00::/24,100::<...>/8,2001:2::<...>/12,2002:c000:200::/40,2002:f000::/20,2002:7f00::/24,2001::/23,2002:6440::/26,2002:c000::<...>/16,2002:ac10::/28,2002:a9fe::<...>/16,2002:c612::/31,2002::/24,fe80::/10,2001:db8::/32,2002:ef00::<...>/24,2002:e000::/40,2002:c0a8::<...>/24}, <skip-config-log>)
|
||||||
0.000000 | HookCallFunction Option::set_change_handler(Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)
|
0.000000 | HookCallFunction Option::set_change_handler(Analyzer::Logging::failure_data_max_size, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)
|
||||||
0.000000 | HookCallFunction Option::set_change_handler(Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)
|
0.000000 | HookCallFunction Option::set_change_handler(Broker::peer_counts_as_iosource, Config::config_option_changed: function(ID:string, new_value:any, location:string) : any{ if (<skip-config-log> == Config::location) return (Config::new_value)Config::log = Config::Info($ts=network_time(), $id=Config::ID, $old_value=Config::format_value(lookup_ID(Config::ID)), $new_value=Config::format_value(Config::new_value))if ( != Config::location) Config::log$location = Config::locationLog::write(Config::LOG, to_any_coerce Config::log)return (Config::new_value)}, -100)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb)
|
error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 12: type clash (Log::PostDelayCallback and post_delay_cb)
|
||||||
error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback)
|
error in <...>/static-errors.zeek, line 12 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback)
|
||||||
error in <...>/static-errors.zeek, line 12: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb))
|
error in <...>/static-errors.zeek, line 12: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42)
|
error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: type clash (Log::DelayToken and 42)
|
||||||
error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken)
|
error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken)
|
||||||
error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42))
|
error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
error in <...>/main.zeek, line 647 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42)
|
error in <...>/main.zeek, line 667 and <...>/static-errors.zeek, line 8: arithmetic mixed with non-arithmetic (Log::DelayToken and 42)
|
||||||
error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 647: type mismatch (42 and Log::DelayToken)
|
error in <...>/static-errors.zeek, line 8 and <...>/main.zeek, line 667: type mismatch (42 and Log::DelayToken)
|
||||||
error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42))
|
error in <...>/static-errors.zeek, line 8: argument type mismatch in function call (Log::delay_finish(id, to_any_coerce rec, 42))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
error in <...>/main.zeek, line 642 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb)
|
error in <...>/main.zeek, line 662 and <...>/static-errors.zeek, line 16: type clash (Log::PostDelayCallback and post_delay_cb)
|
||||||
error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 642: type mismatch (post_delay_cb and Log::PostDelayCallback)
|
error in <...>/static-errors.zeek, line 16 and <...>/main.zeek, line 662: type mismatch (post_delay_cb and Log::PostDelayCallback)
|
||||||
error in <...>/static-errors.zeek, line 16: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb))
|
error in <...>/static-errors.zeek, line 16: argument type mismatch in function call (Log::delay(id, to_any_coerce rec, post_delay_cb))
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty)
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty)
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty)
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 9.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDE,(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty)
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 12.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDE,(empty) (empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty),(empty)
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 1.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 2.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ,ABCDEFGHIJ (empty)
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_container_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_string_field_truncated Test::LOG F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,5 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 2.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_string_fields_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 20.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Weird, weird, Weird::LOG, Log::WRITER_ASCII], 0.0
|
||||||
|
Telemetry::COUNTER, zeek, zeek_log_writer_truncated_containers_total, [filter_name, module, path, stream, writer], [default, Test, test, Test::LOG, Log::WRITER_ASCII], 0.0
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path test
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields strings1 strings2
|
||||||
|
#types vector[string] vector[string]
|
||||||
|
ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE,ABCDE
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path weird
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source
|
||||||
|
#types time string addr port addr port string string bool string string
|
||||||
|
XXXXXXXXXX.XXXXXX - - - - - log_s Test: F zeek -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -16,4 +16,10 @@ XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_writes_total filter_name,module,p
|
||||||
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0
|
||||||
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0
|
||||||
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_discarded_writes_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0
|
||||||
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0
|
||||||
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0
|
||||||
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_containers_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0
|
||||||
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,Conn,conn,Conn::LOG,Log::WRITER_ASCII 0.0
|
||||||
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,DNS,dns,DNS::LOG,Log::WRITER_ASCII 0.0
|
||||||
|
XXXXXXXXXX.XXXXXX zeek counter zeek_log_writer_truncated_string_fields_total filter_name,module,path,stream,writer default,HTTP,http,HTTP::LOG,Log::WRITER_ASCII 0.0
|
||||||
#close XXXX-XX-XX-XX-XX-XX
|
#close XXXX-XX-XX-XX-XX-XX
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
# @TEST-DOC: Tests field length limiting at the stream level instead of at the global level.
|
||||||
|
#
|
||||||
|
# @TEST-EXEC: zeek -b test.zeek %INPUT
|
||||||
|
# @TEST-EXEC: btest-diff test.log
|
||||||
|
# @TEST-EXEC: btest-diff .stdout
|
||||||
|
# @TEST-EXEC: btest-diff weird.log
|
||||||
|
|
||||||
|
# @TEST-START-FILE test.zeek
|
||||||
|
|
||||||
|
@load base/frameworks/telemetry
|
||||||
|
@load base/frameworks/notice/weird
|
||||||
|
|
||||||
|
module Test;
|
||||||
|
|
||||||
|
export {
|
||||||
|
redef enum Log::ID += { LOG };
|
||||||
|
|
||||||
|
type Info: record {
|
||||||
|
strings1: vector of string &log;
|
||||||
|
strings2: vector of string &log;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
event log_telemetry()
|
||||||
|
{
|
||||||
|
local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*");
|
||||||
|
for (i in storage_metrics)
|
||||||
|
{
|
||||||
|
local m = storage_metrics[i];
|
||||||
|
print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init() &priority=-5
|
||||||
|
{
|
||||||
|
local rec = Test::Info();
|
||||||
|
local i = 0;
|
||||||
|
|
||||||
|
# Create two vectors containing 10 strings with 10 characters each.
|
||||||
|
# This leaves us with 200 total characters to work with.
|
||||||
|
while ( ++i <= 10 )
|
||||||
|
{
|
||||||
|
rec$strings1 += "ABCDEFGHIJ";
|
||||||
|
rec$strings2 += "ABCDEFGHIJ";
|
||||||
|
}
|
||||||
|
|
||||||
|
Log::write(Test::LOG, rec);
|
||||||
|
|
||||||
|
# Do this as a separate event so the weirds get processed before we log the
|
||||||
|
# telemetry outout. See the comment below for the first test as to why.
|
||||||
|
event log_telemetry();
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-END-FILE test.zeek
|
||||||
|
|
||||||
|
# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it
|
||||||
|
# will write all of the fields.
|
||||||
|
event zeek_init() &priority=10
|
||||||
|
{
|
||||||
|
Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_string_bytes=5]);
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field bytes alone, but set the maximum length to where it cuts off
|
||||||
|
# the second field in the middle of a string.
|
||||||
|
event zeek_init() &priority=10
|
||||||
|
{
|
||||||
|
Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=115]);
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field bytes alone, but set the maximum length to where it cuts off
|
||||||
|
# the first field in the middle of a string. Second field should log empty strings.
|
||||||
|
event zeek_init() &priority=10
|
||||||
|
{
|
||||||
|
Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_string_bytes=85]);
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Limit the individual containers to 5 items, but keep the total maximum large enough that
|
||||||
|
# it will write all of the fields.
|
||||||
|
event zeek_init() &priority=10
|
||||||
|
{
|
||||||
|
Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_field_container_elements=5]);
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field items alone, but set the maximum length to where it cuts off
|
||||||
|
# the second field in the middle.
|
||||||
|
event zeek_init() &priority=10
|
||||||
|
{
|
||||||
|
Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=15]);
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field bytes alone, but set the maximum length to where it cuts off
|
||||||
|
# the first field in the middle. Second field should log empty containers.
|
||||||
|
event zeek_init() &priority=10
|
||||||
|
{
|
||||||
|
Log::create_stream(Test::LOG, [$columns=Test::Info, $path="test", $max_total_container_elements=5]);
|
||||||
|
}
|
|
@ -0,0 +1,92 @@
|
||||||
|
# @TEST-DOC: Test the options that limit string and container lengths when logging
|
||||||
|
#
|
||||||
|
# @TEST-EXEC: zeek -b test.zeek %INPUT
|
||||||
|
# @TEST-EXEC: btest-diff test.log
|
||||||
|
# @TEST-EXEC: btest-diff .stdout
|
||||||
|
# @TEST-EXEC: btest-diff weird.log
|
||||||
|
|
||||||
|
# @TEST-START-FILE test.zeek
|
||||||
|
|
||||||
|
@load base/frameworks/telemetry
|
||||||
|
@load base/frameworks/notice/weird
|
||||||
|
|
||||||
|
module Test;
|
||||||
|
|
||||||
|
export {
|
||||||
|
redef enum Log::ID += { LOG };
|
||||||
|
|
||||||
|
type Info: record {
|
||||||
|
strings1: vector of string &log;
|
||||||
|
strings2: vector of string &log;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
event log_telemetry()
|
||||||
|
{
|
||||||
|
local storage_metrics = Telemetry::collect_metrics("zeek", "log_writer_truncated*");
|
||||||
|
for (i in storage_metrics)
|
||||||
|
{
|
||||||
|
local m = storage_metrics[i];
|
||||||
|
print m$opts$metric_type, m$opts$prefix, m$opts$name, m$label_names, m$label_values, m$value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::create_stream(LOG, [$columns=Info, $path="test"]);
|
||||||
|
|
||||||
|
local rec = Test::Info();
|
||||||
|
local i = 0;
|
||||||
|
|
||||||
|
# Create two vectors containing 10 strings with 10 characters each.
|
||||||
|
# This leaves us with 200 total characters to work with.
|
||||||
|
while ( ++i <= 10 )
|
||||||
|
{
|
||||||
|
rec$strings1 += "ABCDEFGHIJ";
|
||||||
|
rec$strings2 += "ABCDEFGHIJ";
|
||||||
|
}
|
||||||
|
|
||||||
|
Log::write(Test::LOG, rec);
|
||||||
|
|
||||||
|
# Do this as a separate event so the weirds get processed before we log the
|
||||||
|
# telemetry outout. See the comment below for the first test as to why.
|
||||||
|
event log_telemetry();
|
||||||
|
}
|
||||||
|
|
||||||
|
# @TEST-END-FILE test.zeek
|
||||||
|
|
||||||
|
# Limit the individual fields to 5 bytes, but keep the total maximum large enough that it
|
||||||
|
# will write all of the fields. The weird test for this one will be off since it will
|
||||||
|
# limit the name of the weird. It will pass, but the fields in the log will get truncated
|
||||||
|
# like they're supposed to.
|
||||||
|
redef Log::default_max_field_string_bytes = 5;
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field bytes alone, but set the maximum length to where it cuts off
|
||||||
|
# the second field in the middle of a string.
|
||||||
|
redef Log::default_max_total_string_bytes = 115;
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field bytes alone, but set the maximum length to where it cuts off
|
||||||
|
# the first field in the middle of a string. Second field should log empty strings.
|
||||||
|
redef Log::default_max_total_string_bytes = 85;
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Limit the individual containers to 5 items, but keep the total maximum large enough that
|
||||||
|
# it will write all of the fields.
|
||||||
|
redef Log::default_max_field_container_elements = 5;
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field items alone, but set the maximum length to where it cuts off
|
||||||
|
# the second field in the middle.
|
||||||
|
redef Log::default_max_total_container_elements = 15;
|
||||||
|
|
||||||
|
# @TEST-START-NEXT
|
||||||
|
|
||||||
|
# Leave the individual field bytes alone, but set the maximum length to where it cuts off
|
||||||
|
# the first field in the middle. Second field should log empty containers.
|
||||||
|
redef Log::default_max_total_container_elements = 5;
|
|
@ -19,6 +19,12 @@
|
||||||
|
|
||||||
module Test;
|
module Test;
|
||||||
|
|
||||||
|
# Disable the string and container length filtering.
|
||||||
|
redef Log::default_max_field_string_bytes = 0;
|
||||||
|
redef Log::default_max_total_string_bytes = 0;
|
||||||
|
redef Log::default_max_field_container_elements = 0;
|
||||||
|
redef Log::default_max_total_container_elements = 0;
|
||||||
|
|
||||||
export {
|
export {
|
||||||
redef enum Log::ID += { LOG };
|
redef enum Log::ID += { LOG };
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue