mirror of
https://github.com/zeek/zeek.git
synced 2025-10-13 12:08:20 +00:00
Merge branch 'master' of https://github.com/zeek/zeek into topic/zeke/closures
This commit is contained in:
commit
8d9355eb51
45 changed files with 1201 additions and 532 deletions
40
.clang-tidy
Normal file
40
.clang-tidy
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
Checks: '*,
|
||||||
|
-abseil-string-find-startswith,
|
||||||
|
-bugprone-exception-escape,
|
||||||
|
-bugprone-macro-parentheses,
|
||||||
|
-bugprone-suspicious-semicolon,
|
||||||
|
-cert-err58-cpp,
|
||||||
|
-cppcoreguidelines-avoid-c-arrays,
|
||||||
|
-cppcoreguidelines-avoid-goto,
|
||||||
|
-cppcoreguidelines-avoid-magic-numbers,
|
||||||
|
-cppcoreguidelines-macro-usage,
|
||||||
|
-cppcoreguidelines-non-private-member-variables-in-classes,
|
||||||
|
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
||||||
|
-cppcoreguidelines-pro-bounds-constant-array-index,
|
||||||
|
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
||||||
|
-cppcoreguidelines-pro-type-const-cast,
|
||||||
|
-cppcoreguidelines-pro-type-reinterpret-cast,
|
||||||
|
-fuchsia-default-arguments,
|
||||||
|
-fuchsia-multiple-inheritance,
|
||||||
|
-fuchsia-overloaded-operator,
|
||||||
|
-fuchsia-statically-constructed-objects,
|
||||||
|
-fuchsia-trailing-return,
|
||||||
|
-google-build-using-namespace,
|
||||||
|
-google-explicit-constructor,
|
||||||
|
-google-readability-braces-around-statements,
|
||||||
|
-hicpp-avoid-c-arrays,
|
||||||
|
-hicpp-avoid-goto,
|
||||||
|
-hicpp-braces-around-statements,
|
||||||
|
-hicpp-explicit-conversions,
|
||||||
|
-hicpp-no-array-decay,
|
||||||
|
-llvm-header-guard,
|
||||||
|
-misc-macro-parentheses,
|
||||||
|
-misc-non-private-member-variables-in-classes,
|
||||||
|
-misc-suspicious-semicolon,
|
||||||
|
-misc-unused-parameters,
|
||||||
|
-modernize-avoid-c-arrays,
|
||||||
|
-modernize-use-nodiscard,
|
||||||
|
-readability-braces-around-statements,
|
||||||
|
-readability-container-size-empty,
|
||||||
|
-readability-implicit-bool-conversion,
|
||||||
|
-readability-magic-numbers'
|
60
CHANGES
60
CHANGES
|
@ -1,4 +1,64 @@
|
||||||
|
|
||||||
|
2.6-591 | 2019-07-11 13:29:28 -0700
|
||||||
|
|
||||||
|
* Fix potential thread safety issue with zeekenv util function
|
||||||
|
|
||||||
|
Observed segfault accessing the local static std::map of zeekenv() from
|
||||||
|
a logging thread, but only in non-debug builds using Apple/Clang
|
||||||
|
compiler, not in a debug build or GCC. Don't quite get this behavior
|
||||||
|
since static local variable initialization is supposed to be thread-safe
|
||||||
|
since C++11, but moving to a global static works and is "more efficient"
|
||||||
|
anyway since there's no longer any run-time overhead. (Jon Siwek, Corelight)
|
||||||
|
|
||||||
|
2.6-589 | 2019-07-11 13:14:52 -0700
|
||||||
|
|
||||||
|
* GH-421: fix bugs/regressions in DNP3 analyzer (Hui Lin)
|
||||||
|
|
||||||
|
2.6-587 | 2019-07-11 12:13:48 -0700
|
||||||
|
|
||||||
|
* Fix a sign-compare compiler warning (Jon Siwek, Corelight)
|
||||||
|
|
||||||
|
2.6-586 | 2019-07-11 11:15:40 -0700
|
||||||
|
|
||||||
|
* Convert all JSON output to use an external library for better consistency (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
See NEWS for more details; this makes to_json a bif and causes slight changes in its
|
||||||
|
output, as well as the output of the JSON logger.
|
||||||
|
|
||||||
|
2.6-576 | 2019-07-10 18:38:54 -0700
|
||||||
|
|
||||||
|
* Remove unused option: chunked_io_buffer_soft_cap (Jon Siwek, Corelight)
|
||||||
|
|
||||||
|
2.6-575 | 2019-07-09 18:28:03 -0700
|
||||||
|
|
||||||
|
* Avoid a null dereference (Coverity-1402816) (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Avoid resource leaks (Coverity-1402818, Coverity-1402812) (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Avoid null dereference in broker (Coverity-1402824, Coverity-1402814) (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Improve stability of a unit test (Jon Siwek, Corelight)
|
||||||
|
|
||||||
|
2.6-569 | 2019-07-03 13:03:22 -0700
|
||||||
|
|
||||||
|
* Improve stability of a unit test (Jon Siwek, Corelight)
|
||||||
|
|
||||||
|
2.6-568 | 2019-07-03 11:50:56 -0700
|
||||||
|
|
||||||
|
* Add clang-tidy rule to CMake including a base configuration (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
2.6-566 | 2019-07-03 11:08:24 -0700
|
||||||
|
|
||||||
|
* Improve Zeekygen output for long attribute expressions (Jon Siwek, Corelight)
|
||||||
|
|
||||||
|
2.6-565 | 2019-07-03 09:32:34 -0700
|
||||||
|
|
||||||
|
* GH-446: Deprecate rfb_event. (Johanna Amann, Corelight)
|
||||||
|
|
||||||
|
2.6-563 | 2019-07-03 01:57:40 -0700
|
||||||
|
|
||||||
|
* Fix CIF integration and add logging options to intel.log and added comments to code (sfinlon)
|
||||||
|
|
||||||
2.6-558 | 2019-07-01 01:27:50 -0700
|
2.6-558 | 2019-07-01 01:27:50 -0700
|
||||||
|
|
||||||
* GH-443: fix uses of timestamp 0 in cluster diagnostic logs
|
* GH-443: fix uses of timestamp 0 in cluster diagnostic logs
|
||||||
|
|
11
NEWS
11
NEWS
|
@ -347,6 +347,16 @@ Changed Functionality
|
||||||
of each other on separate cluster nodes to all be logged rather
|
of each other on separate cluster nodes to all be logged rather
|
||||||
than suppressed and de-duplicated into a single notice.
|
than suppressed and de-duplicated into a single notice.
|
||||||
|
|
||||||
|
|
||||||
|
- to_json is now a bif, no longer a script. Loading base/utils/json.zeek is no
|
||||||
|
longer necessary and has been deprecated. to_json should yield much better, always
|
||||||
|
valid json. There are some small differences in output; unnecessary spaces are removed
|
||||||
|
and port values are rendered differently, now including the port and the protocol.
|
||||||
|
|
||||||
|
- The output of the JSON logger now uses an external library to generate json. There
|
||||||
|
are small changes to the output; most visibly double numbers are now rounded slightly
|
||||||
|
differently. The way in which port values are rendered does _not_ change for JSON logs.
|
||||||
|
|
||||||
Removed Functionality
|
Removed Functionality
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
|
@ -479,6 +489,7 @@ Removed Functionality
|
||||||
- ``backdoor_stat_period``
|
- ``backdoor_stat_period``
|
||||||
- ``backdoor_stat_backoff``
|
- ``backdoor_stat_backoff``
|
||||||
- ``backdoor_endp_stats``
|
- ``backdoor_endp_stats``
|
||||||
|
- ``chunked_io_buffer_soft_cap``
|
||||||
|
|
||||||
- The following constants were used as part of deprecated functionality in version 2.6
|
- The following constants were used as part of deprecated functionality in version 2.6
|
||||||
or below and are removed from this release:
|
or below and are removed from this release:
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
2.6-558
|
2.6-591
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
@load base/frameworks/openflow
|
@load base/frameworks/openflow
|
||||||
@load base/utils/active-http
|
@load base/utils/active-http
|
||||||
@load base/utils/exec
|
@load base/utils/exec
|
||||||
@load base/utils/json
|
|
||||||
|
|
||||||
module OpenFlow;
|
module OpenFlow;
|
||||||
|
|
||||||
|
|
|
@ -4608,11 +4608,6 @@ const sig_max_group_size = 50 &redef;
|
||||||
## Description transmitted to remote communication peers for identification.
|
## Description transmitted to remote communication peers for identification.
|
||||||
const peer_description = "zeek" &redef;
|
const peer_description = "zeek" &redef;
|
||||||
|
|
||||||
## The number of IO chunks allowed to be buffered between the child
|
|
||||||
## and parent process of remote communication before Zeek starts dropping
|
|
||||||
## connections to remote peers in an attempt to catch up.
|
|
||||||
const chunked_io_buffer_soft_cap = 800000 &redef;
|
|
||||||
|
|
||||||
## Reassemble the beginning of all TCP connections before doing
|
## Reassemble the beginning of all TCP connections before doing
|
||||||
## signature matching. Enabling this provides more accurate matching at the
|
## signature matching. Enabling this provides more accurate matching at the
|
||||||
## expense of CPU cycles.
|
## expense of CPU cycles.
|
||||||
|
|
|
@ -106,11 +106,6 @@ function set_session(c: connection)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
event rfb_event(c: connection) &priority=5
|
|
||||||
{
|
|
||||||
set_session(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
event rfb_client_version(c: connection, major_version: string, minor_version: string) &priority=5
|
event rfb_client_version(c: connection, major_version: string, minor_version: string) &priority=5
|
||||||
{
|
{
|
||||||
set_session(c);
|
set_session(c);
|
||||||
|
|
|
@ -1,109 +1,2 @@
|
||||||
##! Functions to assist with generating JSON data from Zeek data scructures.
|
## This file is deprecated in favor of to_json in zeek.bif
|
||||||
# We might want to implement this in core somtime, this looks... hacky at best.
|
@deprecated="Remove in 3.1. to_json is now always available as a built-in function."
|
||||||
|
|
||||||
@load base/utils/strings
|
|
||||||
|
|
||||||
## A function to convert arbitrary Zeek data into a JSON string.
|
|
||||||
##
|
|
||||||
## v: The value to convert to JSON. Typically a record.
|
|
||||||
##
|
|
||||||
## only_loggable: If the v value is a record this will only cause
|
|
||||||
## fields with the &log attribute to be included in the JSON.
|
|
||||||
##
|
|
||||||
## returns: a JSON formatted string.
|
|
||||||
function to_json(v: any, only_loggable: bool &default=F, field_escape_pattern: pattern &default=/^_/): string
|
|
||||||
{
|
|
||||||
local tn = type_name(v);
|
|
||||||
switch ( tn )
|
|
||||||
{
|
|
||||||
case "type":
|
|
||||||
return "";
|
|
||||||
|
|
||||||
case "string":
|
|
||||||
return cat("\"", gsub(gsub(clean(v), /\\/, "\\\\"), /\"/, "\\\""), "\"");
|
|
||||||
|
|
||||||
case "port":
|
|
||||||
return cat(port_to_count(to_port(cat(v))));
|
|
||||||
|
|
||||||
case "enum":
|
|
||||||
fallthrough;
|
|
||||||
case "interval":
|
|
||||||
fallthrough;
|
|
||||||
case "addr":
|
|
||||||
fallthrough;
|
|
||||||
case "subnet":
|
|
||||||
return cat("\"", v, "\"");
|
|
||||||
|
|
||||||
case "int":
|
|
||||||
fallthrough;
|
|
||||||
case "count":
|
|
||||||
fallthrough;
|
|
||||||
case "time":
|
|
||||||
return cat(v);
|
|
||||||
|
|
||||||
case "double":
|
|
||||||
return fmt("%.16g", v);
|
|
||||||
|
|
||||||
case "bool":
|
|
||||||
local bval: bool = v;
|
|
||||||
return bval ? "true" : "false";
|
|
||||||
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( /^record/ in tn )
|
|
||||||
{
|
|
||||||
local rec_parts: string_vec = vector();
|
|
||||||
|
|
||||||
local ft = record_fields(v);
|
|
||||||
for ( field, field_desc in ft )
|
|
||||||
{
|
|
||||||
# replace the escape pattern in the field.
|
|
||||||
if( field_escape_pattern in field )
|
|
||||||
field = cat(sub(field, field_escape_pattern, ""));
|
|
||||||
if ( field_desc?$value && (!only_loggable || field_desc$log) )
|
|
||||||
{
|
|
||||||
local onepart = cat("\"", field, "\": ", to_json(field_desc$value, only_loggable));
|
|
||||||
rec_parts += onepart;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return cat("{", join_string_vec(rec_parts, ", "), "}");
|
|
||||||
}
|
|
||||||
|
|
||||||
# None of the following are supported.
|
|
||||||
else if ( /^set/ in tn )
|
|
||||||
{
|
|
||||||
local set_parts: string_vec = vector();
|
|
||||||
local sa: set[bool] = v;
|
|
||||||
for ( sv in sa )
|
|
||||||
{
|
|
||||||
set_parts += to_json(sv, only_loggable);
|
|
||||||
}
|
|
||||||
return cat("[", join_string_vec(set_parts, ", "), "]");
|
|
||||||
}
|
|
||||||
else if ( /^table/ in tn )
|
|
||||||
{
|
|
||||||
local tab_parts: vector of string = vector();
|
|
||||||
local ta: table[bool] of any = v;
|
|
||||||
for ( ti, tv in ta )
|
|
||||||
{
|
|
||||||
local ts = to_json(ti);
|
|
||||||
local if_quotes = (ts[0] == "\"") ? "" : "\"";
|
|
||||||
tab_parts += cat(if_quotes, ts, if_quotes, ": ", to_json(tv, only_loggable));
|
|
||||||
}
|
|
||||||
return cat("{", join_string_vec(tab_parts, ", "), "}");
|
|
||||||
}
|
|
||||||
else if ( /^vector/ in tn )
|
|
||||||
{
|
|
||||||
local vec_parts: string_vec = vector();
|
|
||||||
local va: vector of any = v;
|
|
||||||
for ( vi in va )
|
|
||||||
{
|
|
||||||
vec_parts += to_json(va[vi], only_loggable);
|
|
||||||
}
|
|
||||||
return cat("[", join_string_vec(vec_parts, ", "), "]");
|
|
||||||
}
|
|
||||||
|
|
||||||
return "\"\"";
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,15 +1,66 @@
|
||||||
|
|
||||||
@load base/frameworks/intel
|
@load base/frameworks/intel
|
||||||
|
|
||||||
module Intel;
|
module Intel;
|
||||||
|
|
||||||
## These are some fields to add extended compatibility between Zeek and the
|
## This file adds mapping between the Collective Intelligence Framework (CIF) and Zeek.
|
||||||
## Collective Intelligence Framework.
|
|
||||||
redef record Intel::MetaData += {
|
export {
|
||||||
## Maps to the Impact field in the Collective Intelligence Framework.
|
redef record Intel::MetaData += {
|
||||||
cif_impact: string &optional;
|
## Maps to the 'tags' fields in CIF
|
||||||
## Maps to the Severity field in the Collective Intelligence Framework.
|
cif_tags: string &optional;
|
||||||
cif_severity: string &optional;
|
## Maps to the 'confidence' field in CIF
|
||||||
## Maps to the Confidence field in the Collective Intelligence Framework.
|
cif_confidence: double &optional;
|
||||||
cif_confidence: double &optional;
|
## Maps to the 'source' field in CIF
|
||||||
};
|
cif_source: string &optional;
|
||||||
|
## Maps to the 'description' field in CIF
|
||||||
|
cif_description: string &optional;
|
||||||
|
## Maps to the 'firstseen' field in CIF
|
||||||
|
cif_firstseen: string &optional;
|
||||||
|
## Maps to the 'lastseen' field in CIF
|
||||||
|
cif_lastseen: string &optional;
|
||||||
|
};
|
||||||
|
|
||||||
|
## CIF record used for consistent formatting of CIF values.
|
||||||
|
type CIF: record {
|
||||||
|
## CIF tags observations, examples for tags are ``botnet`` or ``exploit``.
|
||||||
|
tags: string &optional &log;
|
||||||
|
## In CIF Confidence details the degree of certainty of a given observation.
|
||||||
|
confidence: double &optional &log;
|
||||||
|
## Source given in CIF.
|
||||||
|
source: string &optional &log;
|
||||||
|
## description given in CIF.
|
||||||
|
description: string &optional &log;
|
||||||
|
## First time the source observed the behavior.
|
||||||
|
firstseen: string &optional &log;
|
||||||
|
## Last time the source observed the behavior.
|
||||||
|
lastseen: string &optional &log;
|
||||||
|
};
|
||||||
|
|
||||||
|
redef record Info += {
|
||||||
|
cif: CIF &log &optional;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
hook extend_match(info: Info, s: Seen, items: set[Item]) &priority=5
|
||||||
|
{
|
||||||
|
for ( item in items )
|
||||||
|
{
|
||||||
|
local tmp: CIF;
|
||||||
|
|
||||||
|
if ( item$meta?$cif_tags )
|
||||||
|
tmp$tags = item$meta$cif_tags;
|
||||||
|
if ( item$meta?$cif_confidence )
|
||||||
|
tmp$confidence = item$meta$cif_confidence;
|
||||||
|
if ( item$meta?$cif_source )
|
||||||
|
tmp$source = item$meta$cif_source;
|
||||||
|
if ( item$meta?$cif_description )
|
||||||
|
tmp$description = item$meta$cif_description;
|
||||||
|
if ( item$meta?$cif_firstseen )
|
||||||
|
tmp$firstseen = item$meta$cif_firstseen;
|
||||||
|
if ( item$meta?$cif_lastseen )
|
||||||
|
tmp$lastseen = item$meta$cif_lastseen;
|
||||||
|
|
||||||
|
info$cif = tmp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
44
src/Attr.cc
44
src/Attr.cc
|
@ -45,8 +45,33 @@ void Attr::Describe(ODesc* d) const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Attr::DescribeReST(ODesc* d) const
|
void Attr::DescribeReST(ODesc* d, bool shorten) const
|
||||||
{
|
{
|
||||||
|
auto add_long_expr_string = [](ODesc* d, const std::string& s, bool shorten)
|
||||||
|
{
|
||||||
|
constexpr auto max_expr_chars = 32;
|
||||||
|
constexpr auto shortened_expr = "*...*";
|
||||||
|
|
||||||
|
if ( s.size() > max_expr_chars )
|
||||||
|
{
|
||||||
|
if ( shorten )
|
||||||
|
d->Add(shortened_expr);
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Long inline-literals likely won't wrap well in HTML render
|
||||||
|
d->Add("*");
|
||||||
|
d->Add(s);
|
||||||
|
d->Add("*");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
d->Add("``");
|
||||||
|
d->Add(s);
|
||||||
|
d->Add("``");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
d->Add(":zeek:attr:`");
|
d->Add(":zeek:attr:`");
|
||||||
AddTag(d);
|
AddTag(d);
|
||||||
d->Add("`");
|
d->Add("`");
|
||||||
|
@ -57,7 +82,6 @@ void Attr::DescribeReST(ODesc* d) const
|
||||||
d->Add("=");
|
d->Add("=");
|
||||||
d->SP();
|
d->SP();
|
||||||
|
|
||||||
|
|
||||||
if ( expr->Tag() == EXPR_NAME )
|
if ( expr->Tag() == EXPR_NAME )
|
||||||
{
|
{
|
||||||
d->Add(":zeek:see:`");
|
d->Add(":zeek:see:`");
|
||||||
|
@ -74,14 +98,15 @@ void Attr::DescribeReST(ODesc* d) const
|
||||||
|
|
||||||
else if ( expr->Tag() == EXPR_CONST )
|
else if ( expr->Tag() == EXPR_CONST )
|
||||||
{
|
{
|
||||||
d->Add("``");
|
ODesc dd;
|
||||||
expr->Describe(d);
|
dd.SetQuotes(1);
|
||||||
d->Add("``");
|
expr->Describe(&dd);
|
||||||
|
string s = dd.Description();
|
||||||
|
add_long_expr_string(d, s, shorten);
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
d->Add("``");
|
|
||||||
Val* v = expr->Eval(0);
|
Val* v = expr->Eval(0);
|
||||||
ODesc dd;
|
ODesc dd;
|
||||||
v->Describe(&dd);
|
v->Describe(&dd);
|
||||||
|
@ -92,8 +117,7 @@ void Attr::DescribeReST(ODesc* d) const
|
||||||
if ( s[i] == '\n' )
|
if ( s[i] == '\n' )
|
||||||
s[i] = ' ';
|
s[i] = ' ';
|
||||||
|
|
||||||
d->Add(s);
|
add_long_expr_string(d, s, shorten);
|
||||||
d->Add("``");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -211,14 +235,14 @@ void Attributes::Describe(ODesc* d) const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Attributes::DescribeReST(ODesc* d) const
|
void Attributes::DescribeReST(ODesc* d, bool shorten) const
|
||||||
{
|
{
|
||||||
loop_over_list(*attrs, i)
|
loop_over_list(*attrs, i)
|
||||||
{
|
{
|
||||||
if ( i > 0 )
|
if ( i > 0 )
|
||||||
d->Add(" ");
|
d->Add(" ");
|
||||||
|
|
||||||
(*attrs)[i]->DescribeReST(d);
|
(*attrs)[i]->DescribeReST(d, shorten);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ public:
|
||||||
{ return tag == ATTR_REDEF || tag == ATTR_OPTIONAL; }
|
{ return tag == ATTR_REDEF || tag == ATTR_OPTIONAL; }
|
||||||
|
|
||||||
void Describe(ODesc* d) const override;
|
void Describe(ODesc* d) const override;
|
||||||
void DescribeReST(ODesc* d) const;
|
void DescribeReST(ODesc* d, bool shorten = false) const;
|
||||||
|
|
||||||
bool operator==(const Attr& other) const
|
bool operator==(const Attr& other) const
|
||||||
{
|
{
|
||||||
|
@ -88,7 +88,7 @@ public:
|
||||||
void RemoveAttr(attr_tag t);
|
void RemoveAttr(attr_tag t);
|
||||||
|
|
||||||
void Describe(ODesc* d) const override;
|
void Describe(ODesc* d) const override;
|
||||||
void DescribeReST(ODesc* d) const;
|
void DescribeReST(ODesc* d, bool shorten = false) const;
|
||||||
|
|
||||||
attr_list* Attrs() { return attrs; }
|
attr_list* Attrs() { return attrs; }
|
||||||
|
|
||||||
|
|
|
@ -220,23 +220,7 @@ add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
||||||
set_source_files_properties(nb_dns.c PROPERTIES COMPILE_FLAGS
|
set_source_files_properties(nb_dns.c PROPERTIES COMPILE_FLAGS
|
||||||
-fno-strict-aliasing)
|
-fno-strict-aliasing)
|
||||||
|
|
||||||
set(bro_SRCS
|
set(MAIN_SRCS
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/version.c
|
|
||||||
${BIF_SRCS}
|
|
||||||
${BINPAC_AUXSRC}
|
|
||||||
${BINPAC_OUTPUTS}
|
|
||||||
${TRANSFORMED_BISON_OUTPUTS}
|
|
||||||
${FLEX_RuleScanner_OUTPUTS}
|
|
||||||
${FLEX_RuleScanner_INPUT}
|
|
||||||
${BISON_RuleParser_INPUT}
|
|
||||||
${FLEX_REScanner_OUTPUTS}
|
|
||||||
${FLEX_REScanner_INPUT}
|
|
||||||
${BISON_REParser_INPUT}
|
|
||||||
${FLEX_Scanner_OUTPUTS}
|
|
||||||
${FLEX_Scanner_INPUT}
|
|
||||||
${BISON_Parser_INPUT}
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
|
||||||
${PLUGIN_INIT}
|
|
||||||
main.cc
|
main.cc
|
||||||
net_util.cc
|
net_util.cc
|
||||||
util.cc
|
util.cc
|
||||||
|
@ -332,8 +316,6 @@ set(bro_SRCS
|
||||||
threading/formatters/Ascii.cc
|
threading/formatters/Ascii.cc
|
||||||
threading/formatters/JSON.cc
|
threading/formatters/JSON.cc
|
||||||
|
|
||||||
3rdparty/sqlite3.c
|
|
||||||
|
|
||||||
plugin/Component.cc
|
plugin/Component.cc
|
||||||
plugin/ComponentManager.h
|
plugin/ComponentManager.h
|
||||||
plugin/TaggedComponent.h
|
plugin/TaggedComponent.h
|
||||||
|
@ -344,6 +326,31 @@ set(bro_SRCS
|
||||||
digest.h
|
digest.h
|
||||||
)
|
)
|
||||||
|
|
||||||
|
set(THIRD_PARTY_SRCS
|
||||||
|
3rdparty/sqlite3.c
|
||||||
|
)
|
||||||
|
|
||||||
|
set(bro_SRCS
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/version.c
|
||||||
|
${BIF_SRCS}
|
||||||
|
${BINPAC_AUXSRC}
|
||||||
|
${BINPAC_OUTPUTS}
|
||||||
|
${TRANSFORMED_BISON_OUTPUTS}
|
||||||
|
${FLEX_RuleScanner_OUTPUTS}
|
||||||
|
${FLEX_RuleScanner_INPUT}
|
||||||
|
${BISON_RuleParser_INPUT}
|
||||||
|
${FLEX_REScanner_OUTPUTS}
|
||||||
|
${FLEX_REScanner_INPUT}
|
||||||
|
${BISON_REParser_INPUT}
|
||||||
|
${FLEX_Scanner_OUTPUTS}
|
||||||
|
${FLEX_Scanner_INPUT}
|
||||||
|
${BISON_Parser_INPUT}
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
||||||
|
${PLUGIN_INIT}
|
||||||
|
${THIRD_PARTY_SRCS}
|
||||||
|
${MAIN_SRCS}
|
||||||
|
)
|
||||||
|
|
||||||
collect_headers(bro_HEADERS ${bro_SRCS})
|
collect_headers(bro_HEADERS ${bro_SRCS})
|
||||||
|
|
||||||
if ( bro_HAVE_OBJECT_LIBRARIES )
|
if ( bro_HAVE_OBJECT_LIBRARIES )
|
||||||
|
@ -433,3 +440,18 @@ install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
|
||||||
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/sqlite3.h
|
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/sqlite3.h
|
||||||
DESTINATION include/zeek/3rdparty
|
DESTINATION include/zeek/3rdparty
|
||||||
)
|
)
|
||||||
|
|
||||||
|
find_program(CLANG_TIDY NAMES clang-tidy)
|
||||||
|
if (CLANG_TIDY)
|
||||||
|
set(TIDY_SRCS "")
|
||||||
|
foreach(f ${MAIN_SRCS})
|
||||||
|
list(APPEND TIDY_SRCS "src/${f}")
|
||||||
|
endforeach(f)
|
||||||
|
# TODO: this currently doesn't include many of the subdirectories/plugins
|
||||||
|
# that build static libraries for inclusion into the final zeek binary
|
||||||
|
# (analyzers, broker, etc.) or generated code (BIFs, BinPAC, etc.).
|
||||||
|
add_custom_target(clang-tidy
|
||||||
|
COMMAND ${CLANG_TIDY} -p ${CMAKE_BINARY_DIR} ${TIDY_SRCS}
|
||||||
|
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
|
@ -412,7 +412,7 @@ void ID::DescribeReSTShort(ODesc* d) const
|
||||||
if ( attrs )
|
if ( attrs )
|
||||||
{
|
{
|
||||||
d->SP();
|
d->SP();
|
||||||
attrs->DescribeReST(d);
|
attrs->DescribeReST(d, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -141,7 +141,6 @@ RecordType* rotate_info;
|
||||||
StringVal* log_rotate_base_time;
|
StringVal* log_rotate_base_time;
|
||||||
|
|
||||||
StringVal* peer_description;
|
StringVal* peer_description;
|
||||||
bro_uint_t chunked_io_buffer_soft_cap;
|
|
||||||
|
|
||||||
Val* profiling_file;
|
Val* profiling_file;
|
||||||
double profiling_interval;
|
double profiling_interval;
|
||||||
|
@ -213,7 +212,6 @@ void init_general_global_var()
|
||||||
|
|
||||||
peer_description =
|
peer_description =
|
||||||
internal_val("peer_description")->AsStringVal();
|
internal_val("peer_description")->AsStringVal();
|
||||||
chunked_io_buffer_soft_cap = opt_internal_unsigned("chunked_io_buffer_soft_cap");
|
|
||||||
|
|
||||||
packet_filter_default = opt_internal_int("packet_filter_default");
|
packet_filter_default = opt_internal_int("packet_filter_default");
|
||||||
|
|
||||||
|
|
|
@ -144,7 +144,6 @@ extern RecordType* rotate_info;
|
||||||
extern StringVal* log_rotate_base_time;
|
extern StringVal* log_rotate_base_time;
|
||||||
|
|
||||||
extern StringVal* peer_description;
|
extern StringVal* peer_description;
|
||||||
extern bro_uint_t chunked_io_buffer_soft_cap;
|
|
||||||
|
|
||||||
extern Val* profiling_file;
|
extern Val* profiling_file;
|
||||||
extern double profiling_interval;
|
extern double profiling_interval;
|
||||||
|
|
409
src/Val.cc
409
src/Val.cc
|
@ -27,6 +27,16 @@
|
||||||
|
|
||||||
#include "broker/Data.h"
|
#include "broker/Data.h"
|
||||||
|
|
||||||
|
#include "3rdparty/json.hpp"
|
||||||
|
#include "3rdparty/fifo_map.hpp"
|
||||||
|
|
||||||
|
// Define a class for use with the json library that orders the keys in the same order that
|
||||||
|
// they were inserted. By default, the json library orders them alphabetically and we don't
|
||||||
|
// want it like that.
|
||||||
|
template<class K, class V, class compare, class A>
|
||||||
|
using json_fifo_map = nlohmann::fifo_map<K, V, nlohmann::fifo_map_compare<K>, A>;
|
||||||
|
using ZeekJson = nlohmann::basic_json<json_fifo_map>;
|
||||||
|
|
||||||
Val::Val(Func* f)
|
Val::Val(Func* f)
|
||||||
{
|
{
|
||||||
val.func_val = f;
|
val.func_val = f;
|
||||||
|
@ -378,6 +388,274 @@ bool Val::WouldOverflow(const BroType* from_type, const BroType* to_type, const
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TableVal* Val::GetRecordFields()
|
||||||
|
{
|
||||||
|
TableVal* fields = new TableVal(internal_type("record_field_table")->AsTableType());
|
||||||
|
|
||||||
|
auto t = Type();
|
||||||
|
|
||||||
|
if ( t->Tag() != TYPE_RECORD && t->Tag() != TYPE_TYPE )
|
||||||
|
{
|
||||||
|
reporter->Error("non-record value/type passed to record_fields");
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
RecordType* rt = nullptr;
|
||||||
|
RecordVal* rv = nullptr;
|
||||||
|
|
||||||
|
if ( t->Tag() == TYPE_RECORD )
|
||||||
|
{
|
||||||
|
rt = t->AsRecordType();
|
||||||
|
rv = AsRecordVal();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
t = t->AsTypeType()->Type();
|
||||||
|
|
||||||
|
if ( t->Tag() != TYPE_RECORD )
|
||||||
|
{
|
||||||
|
reporter->Error("non-record value/type passed to record_fields");
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
rt = t->AsRecordType();
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( int i = 0; i < rt->NumFields(); ++i )
|
||||||
|
{
|
||||||
|
BroType* ft = rt->FieldType(i);
|
||||||
|
TypeDecl* fd = rt->FieldDecl(i);
|
||||||
|
Val* fv = nullptr;
|
||||||
|
|
||||||
|
if ( rv )
|
||||||
|
fv = rv->Lookup(i);
|
||||||
|
|
||||||
|
if ( fv )
|
||||||
|
::Ref(fv);
|
||||||
|
|
||||||
|
bool logged = (fd->attrs && fd->FindAttr(ATTR_LOG) != 0);
|
||||||
|
|
||||||
|
RecordVal* nr = new RecordVal(internal_type("record_field")->AsRecordType());
|
||||||
|
|
||||||
|
if ( ft->Tag() == TYPE_RECORD )
|
||||||
|
nr->Assign(0, new StringVal("record " + ft->GetName()));
|
||||||
|
else
|
||||||
|
nr->Assign(0, new StringVal(type_name(ft->Tag())));
|
||||||
|
|
||||||
|
nr->Assign(1, val_mgr->GetBool(logged));
|
||||||
|
nr->Assign(2, fv);
|
||||||
|
nr->Assign(3, rt->FieldDefault(i));
|
||||||
|
|
||||||
|
Val* field_name = new StringVal(rt->FieldName(i));
|
||||||
|
fields->Assign(field_name, nr);
|
||||||
|
Unref(field_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is a static method in this file to avoid including json.hpp in Val.h since it's huge.
|
||||||
|
static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new RE_Matcher("^_"))
|
||||||
|
{
|
||||||
|
ZeekJson j;
|
||||||
|
BroType* type = val->Type();
|
||||||
|
switch ( type->Tag() )
|
||||||
|
{
|
||||||
|
case TYPE_BOOL:
|
||||||
|
j = val->AsBool();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_INT:
|
||||||
|
j = val->AsInt();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_COUNT:
|
||||||
|
j = val->AsCount();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_COUNTER:
|
||||||
|
j = val->AsCounter();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_TIME:
|
||||||
|
j = val->AsTime();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_DOUBLE:
|
||||||
|
j = val->AsDouble();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_PORT:
|
||||||
|
{
|
||||||
|
auto* pval = val->AsPortVal();
|
||||||
|
j["port"] = pval->Port();
|
||||||
|
j["proto"] = pval->Protocol();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_PATTERN:
|
||||||
|
case TYPE_INTERVAL:
|
||||||
|
case TYPE_ADDR:
|
||||||
|
case TYPE_SUBNET:
|
||||||
|
{
|
||||||
|
ODesc d;
|
||||||
|
d.SetStyle(RAW_STYLE);
|
||||||
|
val->Describe(&d);
|
||||||
|
|
||||||
|
auto* bs = new BroString(1, d.TakeBytes(), d.Len());
|
||||||
|
j = string((char*)bs->Bytes(), bs->Len());
|
||||||
|
|
||||||
|
delete bs;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_FILE:
|
||||||
|
case TYPE_FUNC:
|
||||||
|
case TYPE_ENUM:
|
||||||
|
case TYPE_STRING:
|
||||||
|
{
|
||||||
|
ODesc d;
|
||||||
|
d.SetStyle(RAW_STYLE);
|
||||||
|
val->Describe(&d);
|
||||||
|
|
||||||
|
auto* bs = new BroString(1, d.TakeBytes(), d.Len());
|
||||||
|
j = json_escape_utf8(string((char*)bs->Bytes(), bs->Len()));
|
||||||
|
|
||||||
|
delete bs;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_TABLE:
|
||||||
|
{
|
||||||
|
auto* table = val->AsTable();
|
||||||
|
auto* tval = val->AsTableVal();
|
||||||
|
|
||||||
|
if ( tval->Type()->IsSet() )
|
||||||
|
j = ZeekJson::array();
|
||||||
|
else
|
||||||
|
j = ZeekJson::object();
|
||||||
|
|
||||||
|
HashKey* k;
|
||||||
|
auto c = table->InitForIteration();
|
||||||
|
while ( table->NextEntry(k, c) )
|
||||||
|
{
|
||||||
|
auto lv = tval->RecoverIndex(k);
|
||||||
|
delete k;
|
||||||
|
|
||||||
|
if ( tval->Type()->IsSet() )
|
||||||
|
{
|
||||||
|
auto* value = lv->Index(0)->Ref();
|
||||||
|
j.push_back(BuildJSON(value, only_loggable, re));
|
||||||
|
Unref(value);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ZeekJson key_json;
|
||||||
|
Val* entry_value;
|
||||||
|
if ( lv->Length() == 1 )
|
||||||
|
{
|
||||||
|
Val* entry_key = lv->Index(0)->Ref();
|
||||||
|
entry_value = tval->Lookup(entry_key, true);
|
||||||
|
key_json = BuildJSON(entry_key, only_loggable, re);
|
||||||
|
Unref(entry_key);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
entry_value = tval->Lookup(lv, true);
|
||||||
|
key_json = BuildJSON(lv, only_loggable, re);
|
||||||
|
}
|
||||||
|
|
||||||
|
string key_string;
|
||||||
|
if ( key_json.is_string() )
|
||||||
|
key_string = key_json;
|
||||||
|
else
|
||||||
|
key_string = key_json.dump();
|
||||||
|
|
||||||
|
j[key_string] = BuildJSON(entry_value, only_loggable, re);
|
||||||
|
}
|
||||||
|
|
||||||
|
Unref(lv);
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_RECORD:
|
||||||
|
{
|
||||||
|
j = ZeekJson::object();
|
||||||
|
auto* rval = val->AsRecordVal();
|
||||||
|
TableVal* fields = rval->GetRecordFields();
|
||||||
|
auto* field_indexes = fields->ConvertToPureList();
|
||||||
|
int num_indexes = field_indexes->Length();
|
||||||
|
|
||||||
|
for ( int i = 0; i < num_indexes; ++i )
|
||||||
|
{
|
||||||
|
Val* key = field_indexes->Index(i);
|
||||||
|
auto* key_field = fields->Lookup(key)->AsRecordVal();
|
||||||
|
|
||||||
|
auto* key_val = key->AsStringVal();
|
||||||
|
string key_string;
|
||||||
|
if ( re->MatchAnywhere(key_val->AsString()) != 0 )
|
||||||
|
{
|
||||||
|
key_val = key_val->Substitute(re, new StringVal(""), 0)->AsStringVal();
|
||||||
|
key_string = key_val->ToStdString();
|
||||||
|
delete key_val;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
key_string = key_val->ToStdString();
|
||||||
|
|
||||||
|
Val* value = key_field->Lookup("value", true);
|
||||||
|
|
||||||
|
if ( value && ( ! only_loggable || key_field->Lookup("log")->AsBool() ) )
|
||||||
|
j[key_string] = BuildJSON(value, only_loggable, re);
|
||||||
|
}
|
||||||
|
|
||||||
|
delete fields;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_LIST:
|
||||||
|
{
|
||||||
|
j = ZeekJson::array();
|
||||||
|
auto* lval = val->AsListVal();
|
||||||
|
size_t size = lval->Length();
|
||||||
|
for (size_t i = 0; i < size; i++)
|
||||||
|
j.push_back(BuildJSON(lval->Index(i), only_loggable, re));
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_VECTOR:
|
||||||
|
{
|
||||||
|
j = ZeekJson::array();
|
||||||
|
auto* vval = val->AsVectorVal();
|
||||||
|
size_t size = vval->SizeVal()->AsCount();
|
||||||
|
for (size_t i = 0; i < size; i++)
|
||||||
|
j.push_back(BuildJSON(vval->Lookup(i), only_loggable, re));
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_OPAQUE:
|
||||||
|
{
|
||||||
|
j = ZeekJson::object();
|
||||||
|
auto* oval = val->AsOpaqueVal();
|
||||||
|
j["opaque_type"] = OpaqueMgr::mgr()->TypeID(oval);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
default: break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return j;
|
||||||
|
}
|
||||||
|
|
||||||
|
StringVal* Val::ToJSON(bool only_loggable, RE_Matcher* re)
|
||||||
|
{
|
||||||
|
ZeekJson j = BuildJSON(this, only_loggable, re);
|
||||||
|
return new StringVal(j.dump());
|
||||||
|
}
|
||||||
|
|
||||||
IntervalVal::IntervalVal(double quantity, double units) :
|
IntervalVal::IntervalVal(double quantity, double units) :
|
||||||
Val(quantity * units, TYPE_INTERVAL)
|
Val(quantity * units, TYPE_INTERVAL)
|
||||||
{
|
{
|
||||||
|
@ -489,6 +767,18 @@ uint32 PortVal::Port() const
|
||||||
return p & ~PORT_SPACE_MASK;
|
return p & ~PORT_SPACE_MASK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
string PortVal::Protocol() const
|
||||||
|
{
|
||||||
|
if ( IsUDP() )
|
||||||
|
return "udp";
|
||||||
|
else if ( IsTCP() )
|
||||||
|
return "tcp";
|
||||||
|
else if ( IsICMP() )
|
||||||
|
return "icmp";
|
||||||
|
else
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
int PortVal::IsTCP() const
|
int PortVal::IsTCP() const
|
||||||
{
|
{
|
||||||
return (val.uint_val & PORT_SPACE_MASK) == TCP_PORT_MASK;
|
return (val.uint_val & PORT_SPACE_MASK) == TCP_PORT_MASK;
|
||||||
|
@ -508,14 +798,8 @@ void PortVal::ValDescribe(ODesc* d) const
|
||||||
{
|
{
|
||||||
uint32 p = static_cast<uint32>(val.uint_val);
|
uint32 p = static_cast<uint32>(val.uint_val);
|
||||||
d->Add(p & ~PORT_SPACE_MASK);
|
d->Add(p & ~PORT_SPACE_MASK);
|
||||||
if ( IsUDP() )
|
d->Add("/");
|
||||||
d->Add("/udp");
|
d->Add(Protocol());
|
||||||
else if ( IsTCP() )
|
|
||||||
d->Add("/tcp");
|
|
||||||
else if ( IsICMP() )
|
|
||||||
d->Add("/icmp");
|
|
||||||
else
|
|
||||||
d->Add("/unknown");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Val* PortVal::DoClone(CloneState* state)
|
Val* PortVal::DoClone(CloneState* state)
|
||||||
|
@ -711,6 +995,12 @@ StringVal::StringVal(const string& s) : Val(TYPE_STRING)
|
||||||
val.string_val = new BroString(reinterpret_cast<const u_char*>(s.data()), s.length(), 1);
|
val.string_val = new BroString(reinterpret_cast<const u_char*>(s.data()), s.length(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
string StringVal::ToStdString() const
|
||||||
|
{
|
||||||
|
auto* bs = AsString();
|
||||||
|
return string((char*)bs->Bytes(), bs->Len());
|
||||||
|
}
|
||||||
|
|
||||||
StringVal* StringVal::ToUpper()
|
StringVal* StringVal::ToUpper()
|
||||||
{
|
{
|
||||||
val.string_val->ToUpper();
|
val.string_val->ToUpper();
|
||||||
|
@ -732,6 +1022,92 @@ unsigned int StringVal::MemoryAllocation() const
|
||||||
return padded_sizeof(*this) + val.string_val->MemoryAllocation();
|
return padded_sizeof(*this) + val.string_val->MemoryAllocation();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Val* StringVal::Substitute(RE_Matcher* re, StringVal* repl, bool do_all)
|
||||||
|
{
|
||||||
|
const u_char* s = Bytes();
|
||||||
|
int offset = 0;
|
||||||
|
int n = Len();
|
||||||
|
|
||||||
|
// cut_points is a set of pairs of indices in str that should
|
||||||
|
// be removed/replaced. A pair <x,y> means "delete starting
|
||||||
|
// at offset x, up to but not including offset y".
|
||||||
|
List(ptr_compat_int) cut_points; // where RE matches pieces of str
|
||||||
|
|
||||||
|
int size = 0; // size of result
|
||||||
|
|
||||||
|
while ( n > 0 )
|
||||||
|
{
|
||||||
|
// Find next match offset.
|
||||||
|
int end_of_match;
|
||||||
|
while ( n > 0 &&
|
||||||
|
(end_of_match = re->MatchPrefix(&s[offset], n)) <= 0 )
|
||||||
|
{
|
||||||
|
// This character is going to be copied to the result.
|
||||||
|
++size;
|
||||||
|
|
||||||
|
// Move on to next character.
|
||||||
|
++offset;
|
||||||
|
--n;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( n <= 0 )
|
||||||
|
break;
|
||||||
|
|
||||||
|
// s[offset .. offset+end_of_match-1] matches re.
|
||||||
|
cut_points.append(offset);
|
||||||
|
cut_points.append(offset + end_of_match);
|
||||||
|
|
||||||
|
offset += end_of_match;
|
||||||
|
n -= end_of_match;
|
||||||
|
|
||||||
|
if ( ! do_all )
|
||||||
|
{
|
||||||
|
// We've now done the first substitution - finished.
|
||||||
|
// Include the remainder of the string in the result.
|
||||||
|
size += n;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// size now reflects amount of space copied. Factor in amount
|
||||||
|
// of space for replacement text.
|
||||||
|
int num_cut_points = cut_points.length() / 2;
|
||||||
|
size += num_cut_points * repl->Len();
|
||||||
|
|
||||||
|
// And a final NUL for good health.
|
||||||
|
++size;
|
||||||
|
|
||||||
|
byte_vec result = new u_char[size];
|
||||||
|
byte_vec r = result;
|
||||||
|
|
||||||
|
// Copy it all over.
|
||||||
|
int start_offset = 0;
|
||||||
|
for ( int i = 0; i < cut_points.length(); i += 2 /* loop over pairs */ )
|
||||||
|
{
|
||||||
|
int num_to_copy = cut_points[i] - start_offset;
|
||||||
|
memcpy(r, s + start_offset, num_to_copy);
|
||||||
|
|
||||||
|
r += num_to_copy;
|
||||||
|
start_offset = cut_points[i+1];
|
||||||
|
|
||||||
|
// Now add in replacement text.
|
||||||
|
memcpy(r, repl->Bytes(), repl->Len());
|
||||||
|
r += repl->Len();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy final trailing characters.
|
||||||
|
int num_to_copy = Len() - start_offset;
|
||||||
|
memcpy(r, s + start_offset, num_to_copy);
|
||||||
|
r += num_to_copy;
|
||||||
|
|
||||||
|
// Final NUL. No need to increment r, since the length
|
||||||
|
// computed from it in the next statement does not include
|
||||||
|
// the NUL.
|
||||||
|
r[0] = '\0';
|
||||||
|
|
||||||
|
return new StringVal(new BroString(1, result, r - result));
|
||||||
|
}
|
||||||
|
|
||||||
Val* StringVal::DoClone(CloneState* state)
|
Val* StringVal::DoClone(CloneState* state)
|
||||||
{
|
{
|
||||||
// We could likely treat this type as immutable and return a reference
|
// We could likely treat this type as immutable and return a reference
|
||||||
|
@ -1369,7 +1745,20 @@ Val* TableVal::Default(Val* index)
|
||||||
|
|
||||||
if ( def_val->Type()->Tag() != TYPE_FUNC ||
|
if ( def_val->Type()->Tag() != TYPE_FUNC ||
|
||||||
same_type(def_val->Type(), Type()->YieldType()) )
|
same_type(def_val->Type(), Type()->YieldType()) )
|
||||||
return def_attr->AttrExpr()->IsConst() ? def_val->Ref() : def_val->Clone();
|
{
|
||||||
|
if ( def_attr->AttrExpr()->IsConst() )
|
||||||
|
return def_val->Ref();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return def_val->Clone();
|
||||||
|
}
|
||||||
|
catch ( InterpreterException& e )
|
||||||
|
{ /* Already reported. */ }
|
||||||
|
|
||||||
|
Error("&default value for table is not clone-able");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
const Func* f = def_val->AsFunc();
|
const Func* f = def_val->AsFunc();
|
||||||
val_list vl;
|
val_list vl;
|
||||||
|
@ -2040,7 +2429,7 @@ vector<RecordVal*> RecordVal::parse_time_records;
|
||||||
|
|
||||||
RecordVal::RecordVal(RecordType* t, bool init_fields) : Val(t)
|
RecordVal::RecordVal(RecordType* t, bool init_fields) : Val(t)
|
||||||
{
|
{
|
||||||
origin = 0;
|
origin = nullptr;
|
||||||
int n = t->NumFields();
|
int n = t->NumFields();
|
||||||
val_list* vl = val.val_list_val = new val_list(n);
|
val_list* vl = val.val_list_val = new val_list(n);
|
||||||
|
|
||||||
|
|
11
src/Val.h
11
src/Val.h
|
@ -20,6 +20,7 @@
|
||||||
#include "Notifier.h"
|
#include "Notifier.h"
|
||||||
#include "IPAddr.h"
|
#include "IPAddr.h"
|
||||||
#include "DebugLogger.h"
|
#include "DebugLogger.h"
|
||||||
|
#include "RE.h"
|
||||||
|
|
||||||
// We have four different port name spaces: TCP, UDP, ICMP, and UNKNOWN.
|
// We have four different port name spaces: TCP, UDP, ICMP, and UNKNOWN.
|
||||||
// We distinguish between them based on the bits specified in the *_PORT_MASK
|
// We distinguish between them based on the bits specified in the *_PORT_MASK
|
||||||
|
@ -35,7 +36,6 @@ class Val;
|
||||||
class BroFunc;
|
class BroFunc;
|
||||||
class Func;
|
class Func;
|
||||||
class BroFile;
|
class BroFile;
|
||||||
class RE_Matcher;
|
|
||||||
class PrefixTable;
|
class PrefixTable;
|
||||||
|
|
||||||
class PortVal;
|
class PortVal;
|
||||||
|
@ -348,6 +348,10 @@ public:
|
||||||
|
|
||||||
static bool WouldOverflow(const BroType* from_type, const BroType* to_type, const Val* val);
|
static bool WouldOverflow(const BroType* from_type, const BroType* to_type, const Val* val);
|
||||||
|
|
||||||
|
TableVal* GetRecordFields();
|
||||||
|
|
||||||
|
StringVal* ToJSON(bool only_loggable=false, RE_Matcher* re=new RE_Matcher("^_"));
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
friend class EnumType;
|
friend class EnumType;
|
||||||
|
@ -531,6 +535,7 @@ public:
|
||||||
|
|
||||||
// Returns the port number in host order (not including the mask).
|
// Returns the port number in host order (not including the mask).
|
||||||
uint32 Port() const;
|
uint32 Port() const;
|
||||||
|
string Protocol() const;
|
||||||
|
|
||||||
// Tests for protocol types.
|
// Tests for protocol types.
|
||||||
int IsTCP() const;
|
int IsTCP() const;
|
||||||
|
@ -633,10 +638,13 @@ public:
|
||||||
// char* ExpandedString(int format = BroString::EXPANDED_STRING)
|
// char* ExpandedString(int format = BroString::EXPANDED_STRING)
|
||||||
// { return AsString()->ExpandedString(format); }
|
// { return AsString()->ExpandedString(format); }
|
||||||
|
|
||||||
|
std::string ToStdString() const;
|
||||||
StringVal* ToUpper();
|
StringVal* ToUpper();
|
||||||
|
|
||||||
unsigned int MemoryAllocation() const override;
|
unsigned int MemoryAllocation() const override;
|
||||||
|
|
||||||
|
Val* Substitute(RE_Matcher* re, StringVal* repl, bool do_all);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
friend class Val;
|
friend class Val;
|
||||||
StringVal() {}
|
StringVal() {}
|
||||||
|
@ -989,7 +997,6 @@ protected:
|
||||||
|
|
||||||
Val* DoClone(CloneState* state) override;
|
Val* DoClone(CloneState* state) override;
|
||||||
|
|
||||||
RecordType* record_type;
|
|
||||||
BroObj* origin;
|
BroObj* origin;
|
||||||
|
|
||||||
static vector<RecordVal*> parse_time_records;
|
static vector<RecordVal*> parse_time_records;
|
||||||
|
|
|
@ -215,10 +215,10 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
||||||
0x2804 -> aos_dp: empty;
|
0x2804 -> aos_dp: empty;
|
||||||
|
|
||||||
# analog ouput g41
|
# analog ouput g41
|
||||||
0x2901 -> ao_32: AnaOut32;
|
0x2901 -> ao_32: empty;
|
||||||
0x2902 -> ao_16: AnaOut16;
|
0x2902 -> ao_16: empty;
|
||||||
0x2903 -> ao_sp: AnaOutSP;
|
0x2903 -> ao_sp: empty;
|
||||||
0x2904 -> ao_dp: AnaOutDP;
|
0x2904 -> ao_dp: empty;
|
||||||
|
|
||||||
# analog output event g42
|
# analog output event g42
|
||||||
0x2a00 -> aoe_default: empty;
|
0x2a00 -> aoe_default: empty;
|
||||||
|
@ -258,7 +258,6 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
||||||
|
|
||||||
# class objects g60
|
# class objects g60
|
||||||
0x3C01 -> class0data: empty; # &check(qualifier_field == 0x06);
|
0x3C01 -> class0data: empty; # &check(qualifier_field == 0x06);
|
||||||
#0x3C02 -> class1data: uint8; # &check(qualifier_field == 0x06);
|
|
||||||
0x3C02 -> class1data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
0x3C02 -> class1data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
||||||
0x3C03 -> class2data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
0x3C03 -> class2data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
||||||
0x3C04 -> class3data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
0x3C04 -> class3data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
||||||
|
@ -266,11 +265,9 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
||||||
0x4601 -> file_control_id: File_Control_ID;
|
0x4601 -> file_control_id: File_Control_ID;
|
||||||
0x4602 -> file_control_auth: File_Control_Auth_Wrap(function_code);
|
0x4602 -> file_control_auth: File_Control_Auth_Wrap(function_code);
|
||||||
0x4603 -> file_control_cmd: File_Control_Cmd; # &check( file_control_cmd.op_mode == 0 || file_control_cmd.op_mode == 1 || file_control_cmd.op_mode == 2 || file_control_cmd.op_mode == 3 );
|
0x4603 -> file_control_cmd: File_Control_Cmd; # &check( file_control_cmd.op_mode == 0 || file_control_cmd.op_mode == 1 || file_control_cmd.op_mode == 2 || file_control_cmd.op_mode == 3 );
|
||||||
#0x4604 -> file_control_cmd_status: File_Control_Cmd_Status_Wrap(function_code, prefix.prefix_value); # example shown in P66
|
|
||||||
0x4604 -> file_control_cmd_status: File_Control_Cmd_Status(prefix.prefix_value); # example shown in P66
|
0x4604 -> file_control_cmd_status: File_Control_Cmd_Status(prefix.prefix_value); # example shown in P66
|
||||||
0x4605 -> file_trans: File_Transport(prefix.prefix_value);
|
0x4605 -> file_trans: File_Transport(prefix.prefix_value);
|
||||||
0x4606 -> file_trans_status: File_Transport_Status(prefix.prefix_value);
|
0x4606 -> file_trans_status: File_Transport_Status(prefix.prefix_value);
|
||||||
#0x4607 -> file_desc: File_Desc_Wrap(function_code);
|
|
||||||
0x4607 -> file_desc: File_Desc;
|
0x4607 -> file_desc: File_Desc;
|
||||||
|
|
||||||
# internal indication g80
|
# internal indication g80
|
||||||
|
@ -318,13 +315,20 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
||||||
|
|
||||||
# authentication challenge g120
|
# authentication challenge g120
|
||||||
0x7801 -> challenge: AuthChallenge(prefix.prefix_value);
|
0x7801 -> challenge: AuthChallenge(prefix.prefix_value);
|
||||||
0x7802 -> reply: AuthRely(prefix.prefix_value);
|
0x7802 -> reply: AuthReply(prefix.prefix_value);
|
||||||
0x7803 -> aggrRequest: AuthAggrRequest(prefix.prefix_value);
|
0x7803 -> aggrRequest: AuthAggrRequest(prefix.prefix_value);
|
||||||
0x7804 -> seesionKeyRequest: uint8;
|
0x7804 -> seesionKeyRequest: uint16;
|
||||||
0x7805 -> status: AuthSessionKeyStatus(prefix.prefix_value);
|
0x7805 -> status: AuthSessionKeyStatus(prefix.prefix_value);
|
||||||
0x7806 -> keyChange: AuthSessionKeyChange(prefix.prefix_value);
|
0x7806 -> keyChange: AuthSessionKeyChange(prefix.prefix_value);
|
||||||
0x7807 -> error: AuthError(prefix.prefix_value);
|
0x7807 -> error: AuthError(prefix.prefix_value);
|
||||||
|
0x7808 -> user_cert: UserCert(prefix.prefix_value);
|
||||||
|
0x7809 -> mac: MAC(prefix.prefix_value);
|
||||||
|
0x780A -> user_status_change: UserStatusChange(prefix.prefix_value);
|
||||||
|
0x780B -> update_key_req: UpdateKeyReq(prefix.prefix_value);
|
||||||
|
0x780C -> update_key_rep: UpdateKeyRep(prefix.prefix_value);
|
||||||
|
0x780D -> update_key: UpdateKey(prefix.prefix_value);
|
||||||
|
0x780E -> update_key_sig: UpdateKeySig(prefix.prefix_value);
|
||||||
|
0x780F -> update_key_con: UpdateKeyCon(prefix.prefix_value);
|
||||||
default -> unmatched: Default_Wrap(object_type_field);
|
default -> unmatched: Default_Wrap(object_type_field);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -468,10 +472,10 @@ type Response_Data_Object(function_code: uint8, qualifier_field: uint8, object_t
|
||||||
0x1f02 -> f_ai_16_wflag: FrozenAnalogInput16wFlag;
|
0x1f02 -> f_ai_16_wflag: FrozenAnalogInput16wFlag;
|
||||||
0x1f03 -> f_ai_32_wtime: FrozenAnalogInput32wTime;
|
0x1f03 -> f_ai_32_wtime: FrozenAnalogInput32wTime;
|
||||||
0x1f04 -> f_ai_16_wtime: FrozenAnalogInput16wTime;
|
0x1f04 -> f_ai_16_wtime: FrozenAnalogInput16wTime;
|
||||||
0x1f05 -> f_ai_32_woflag: AnalogInput32woFlag;
|
0x1f05 -> f_ai_32_woflag: FrozenAnalogInput32woFlag;
|
||||||
0x1f06 -> f_ai_16_woflag: AnalogInput16woFlag;
|
0x1f06 -> f_ai_16_woflag: FrozenAnalogInput16woFlag;
|
||||||
0x1f07 -> f_ai_sp_wflag: AnalogInputSPwFlag;
|
0x1f07 -> f_ai_sp_wflag: FrozenAnalogInputSPwFlag;
|
||||||
0x1f08 -> f_ai_dp_wflag: AnalogInputDPwFlag;
|
0x1f08 -> f_ai_dp_wflag: FrozenAnalogInputDPwFlag;
|
||||||
|
|
||||||
# analog input event g32
|
# analog input event g32
|
||||||
0x2001 -> ai32wotime: AnalogInput32woTime;
|
0x2001 -> ai32wotime: AnalogInput32woTime;
|
||||||
|
@ -592,12 +596,20 @@ type Response_Data_Object(function_code: uint8, qualifier_field: uint8, object_t
|
||||||
|
|
||||||
# authentication challenge g120
|
# authentication challenge g120
|
||||||
0x7801 -> challenge: AuthChallenge(prefix.prefix_value);
|
0x7801 -> challenge: AuthChallenge(prefix.prefix_value);
|
||||||
0x7802 -> reply: AuthRely(prefix.prefix_value);
|
0x7802 -> reply: AuthReply(prefix.prefix_value);
|
||||||
0x7803 -> aggrRequest: AuthAggrRequest(prefix.prefix_value);
|
0x7803 -> aggrRequest: AuthAggrRequest(prefix.prefix_value);
|
||||||
0x7804 -> seesionKeyRequest: uint8;
|
0x7804 -> seesionKeyRequest: uint16;
|
||||||
0x7805 -> status: AuthSessionKeyStatus(prefix.prefix_value);
|
0x7805 -> status: AuthSessionKeyStatus(prefix.prefix_value);
|
||||||
0x7806 -> keyChange: AuthSessionKeyChange(prefix.prefix_value);
|
0x7806 -> keyChange: AuthSessionKeyChange(prefix.prefix_value);
|
||||||
0x7807 -> error: AuthError(prefix.prefix_value);
|
0x7807 -> error: AuthError(prefix.prefix_value);
|
||||||
|
0x7808 -> user_cert: UserCert(prefix.prefix_value);
|
||||||
|
0x7809 -> mac: MAC(prefix.prefix_value);
|
||||||
|
0x780A -> user_status_change: UserStatusChange(prefix.prefix_value);
|
||||||
|
0x780B -> update_key_req: UpdateKeyReq(prefix.prefix_value);
|
||||||
|
0x780C -> update_key_rep: UpdateKeyRep(prefix.prefix_value);
|
||||||
|
0x780D -> update_key: UpdateKey(prefix.prefix_value);
|
||||||
|
0x780E -> update_key_sig: UpdateKeySig(prefix.prefix_value);
|
||||||
|
0x780F -> update_key_con: UpdateKeyCon(prefix.prefix_value);
|
||||||
|
|
||||||
#default -> unkonwndata: Debug_Byte; # &check( T );
|
#default -> unkonwndata: Debug_Byte; # &check( T );
|
||||||
default -> unmatched: Default_Wrap(object_type_field);
|
default -> unmatched: Default_Wrap(object_type_field);
|
||||||
|
@ -1381,41 +1393,115 @@ type BCD_Large = record {
|
||||||
|
|
||||||
# g120v1
|
# g120v1
|
||||||
type AuthChallenge(prefix: uint16) = record {
|
type AuthChallenge(prefix: uint16) = record {
|
||||||
csqUsr: uint32;
|
cha_seq_num: uint32;
|
||||||
hal: uint8;
|
user_num: uint16;
|
||||||
|
mac_alg: uint8;
|
||||||
reason: uint8;
|
reason: uint8;
|
||||||
chan_data: bytestring &length = (prefix - 10);
|
chan_data: bytestring &length = (prefix - 8);
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
# g120v2
|
# g120v2
|
||||||
type AuthRely(prefix: uint16) = record {
|
type AuthReply(prefix: uint16) = record {
|
||||||
csqUsr: uint32;
|
cha_seq_num: uint32;
|
||||||
chan_data: bytestring &length = (prefix - 4);
|
user_num : uint16;
|
||||||
|
mac: bytestring &length = (prefix - 6);
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
# g120v3
|
# g120v3
|
||||||
type AuthAggrRequest(prefix: uint16) = record {
|
type AuthAggrRequest(prefix: uint16) = record {
|
||||||
csqUsr: uint32;
|
cha_seq_num: uint32;
|
||||||
chan_data: bytestring &length = (prefix - 4);
|
user_num: uint16;
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
# g120v5
|
# g120v5
|
||||||
type AuthSessionKeyStatus(prefix: uint16) = record {
|
type AuthSessionKeyStatus(prefix: uint16) = record {
|
||||||
csqUsr: uint32;
|
cha_seq_num: uint32;
|
||||||
|
user_num: uint16;
|
||||||
key_alg: uint8;
|
key_alg: uint8;
|
||||||
key_status: uint8;
|
key_status: uint8;
|
||||||
chan_data: bytestring &length = (prefix - 10);
|
mac_alg: uint8;
|
||||||
|
cha_data_len : uint16;
|
||||||
|
chan_data: bytestring &length = cha_data_len;
|
||||||
|
mac: bytestring &length = (prefix - 11 - cha_data_len);
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
# g120v6
|
# g120v6
|
||||||
type AuthSessionKeyChange(prefix: uint16) = record {
|
type AuthSessionKeyChange(prefix: uint16) = record {
|
||||||
csqUsr: uint32;
|
key_change_num: uint32;
|
||||||
key_wrap_data: bytestring &length = (prefix - 5);
|
user_num: uint16;
|
||||||
|
key_wrap_data: bytestring &length = (prefix - 6);
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
# g120v7
|
# g120v7
|
||||||
type AuthError(prefix: uint16) = record {
|
type AuthError(prefix: uint16) = record {
|
||||||
csqUsr: uint32;
|
cha_seq_num: uint32;
|
||||||
|
user_num: uint16;
|
||||||
|
id: uint16;
|
||||||
error_code: uint8;
|
error_code: uint8;
|
||||||
key_wrap_data: bytestring &length = (prefix - 6);
|
time_error: bytestring &length = 6;
|
||||||
|
error_text: bytestring &length = (prefix - 15);
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v8
|
||||||
|
type UserCert(prefix: uint16) = record {
|
||||||
|
method: uint8;
|
||||||
|
cert_type: uint8;
|
||||||
|
cert_text: bytestring &length = (prefix - 2);
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v9
|
||||||
|
type MAC(prefix: uint16) = record {
|
||||||
|
mac_text: bytestring &length = prefix;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v10
|
||||||
|
type UserStatusChange(prefix: uint16) = record {
|
||||||
|
method: uint8;
|
||||||
|
operation: uint8;
|
||||||
|
seq_num: uint32;
|
||||||
|
user_role: uint16;
|
||||||
|
user_role_exp: uint16;
|
||||||
|
user_name_len: uint16;
|
||||||
|
user_pubkey_len: uint16;
|
||||||
|
cert_data_len: uint16;
|
||||||
|
user_name: bytestring &length = user_name_len;
|
||||||
|
user_pubkey: bytestring &length = user_pubkey_len;
|
||||||
|
cert_data: bytestring &length = cert_data_len;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v11
|
||||||
|
type UpdateKeyReq(prefix: uint16) = record {
|
||||||
|
method: uint8;
|
||||||
|
user_name_len: uint16;
|
||||||
|
master_cha_data_len: uint16;
|
||||||
|
user_name: bytestring &length = user_name_len;
|
||||||
|
master_cha_data: bytestring &length = master_cha_data_len;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v12
|
||||||
|
type UpdateKeyRep(prefix: uint16) = record {
|
||||||
|
seq_num: uint32;
|
||||||
|
user_num: uint16;
|
||||||
|
user_name_len: uint16;
|
||||||
|
outstation_cha_data_len: uint16;
|
||||||
|
outstation_cha_data: bytestring &length = outstation_cha_data_len;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v13
|
||||||
|
type UpdateKey(prefix: uint16) = record {
|
||||||
|
seq_num: uint32;
|
||||||
|
user_num: uint16;
|
||||||
|
update_key_len: uint16;
|
||||||
|
update_key_data: bytestring &length = update_key_len;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v14
|
||||||
|
type UpdateKeySig(prefix: uint16) = record {
|
||||||
|
digital_sig: bytestring &length = prefix;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
# g120v15
|
||||||
|
type UpdateKeyCon(prefix: uint16) = record {
|
||||||
|
mac: bytestring &length = prefix;
|
||||||
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
|
|
|
@ -91,10 +91,57 @@ type DNP3_Application_Response_Header = record {
|
||||||
type Request_Objects(function_code: uint8) = record {
|
type Request_Objects(function_code: uint8) = record {
|
||||||
object_header: Object_Header(function_code);
|
object_header: Object_Header(function_code);
|
||||||
data: case (object_header.object_type_field) of {
|
data: case (object_header.object_type_field) of {
|
||||||
|
# binary output command g12
|
||||||
|
0x0c01 -> g12v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x0c02 -> g12v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
0x0c03 -> bocmd_PM: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ ( object_header.number_of_item / 8 ) + 1*( object_header.number_of_item > ( (object_header.number_of_item / 8)*8 ) ) ];
|
0x0c03 -> bocmd_PM: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ ( object_header.number_of_item / 8 ) + 1*( object_header.number_of_item > ( (object_header.number_of_item / 8)*8 ) ) ];
|
||||||
0x3202 -> time_interval_ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
|
||||||
|
# time data interval data object g50
|
||||||
|
0x3201 -> g50v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
#0x3202 -> time_interval_ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
# &check( object_header.qualifier_field == 0x0f && object_header.number_of_item == 0x01);
|
# &check( object_header.qualifier_field == 0x0f && object_header.number_of_item == 0x01);
|
||||||
default -> ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
0x3202 -> g50v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x3203 -> g50v3_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
|
||||||
|
# Time and Date Common Time-of-Occurrence g51
|
||||||
|
0x3301 -> g51v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x3302 -> g51v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
|
||||||
|
# time delay g52
|
||||||
|
0x3401 -> g52v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x3402 -> g52v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
|
||||||
|
# file control g70
|
||||||
|
0x4601 -> g70v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x4602 -> g70v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x4603 -> g70v3_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x4604 -> g70v4_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x4605 -> g70v5_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x4606 -> g70v6_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x4607 -> g70v7_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
|
||||||
|
# internal indication g80
|
||||||
|
0x5001 -> g80v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
|
||||||
|
# authentication challenge g120
|
||||||
|
0x7801 -> g120v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7802 -> g120v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7803 -> g120v3_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7804 -> g120v4_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7805 -> g120v5_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7806 -> g120v6_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7807 -> g120v7_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7808 -> g120v8_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x7809 -> g120v9_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x780A -> g120v10_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x780B -> g120v11_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x780C -> g120v12_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x780D -> g120v13_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x780E -> g120v14_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
0x780F -> g120v15_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
|
||||||
|
# default -> ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||||
|
default -> objects: empty;
|
||||||
};
|
};
|
||||||
# dump_data is always empty; I intend to use it for checking some conditions;
|
# dump_data is always empty; I intend to use it for checking some conditions;
|
||||||
# However, in the current binpac implementation, &check is not implemented
|
# However, in the current binpac implementation, &check is not implemented
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
## Generated for RFB event
|
## Generated for RFB event
|
||||||
##
|
##
|
||||||
## c: The connection record for the underlying transport-layer session/flow.
|
## c: The connection record for the underlying transport-layer session/flow.
|
||||||
event rfb_event%(c: connection%);
|
event rfb_event%(c: connection%) &deprecated="Remove in v3.1: This event never served a real purpose and will be removed. Please use the other rfb events instead.";
|
||||||
|
|
||||||
## Generated for RFB event authentication mechanism selection
|
## Generated for RFB event authentication mechanism selection
|
||||||
##
|
##
|
||||||
|
@ -47,4 +47,4 @@ event rfb_server_version%(c: connection, major_version: string, minor_version: s
|
||||||
## width: width of the shared screen
|
## width: width of the shared screen
|
||||||
##
|
##
|
||||||
## height: height of the shared screen
|
## height: height of the shared screen
|
||||||
event rfb_server_parameters%(c: connection, name: string, width: count, height: count%);
|
event rfb_server_parameters%(c: connection, name: string, width: count, height: count%);
|
||||||
|
|
|
@ -1265,6 +1265,9 @@ bool bro_broker::VectorIterator::DoUnserialize(const broker::data& data)
|
||||||
auto x = caf::get_if<broker::vector>(&(*v)[0]);
|
auto x = caf::get_if<broker::vector>(&(*v)[0]);
|
||||||
auto y = caf::get_if<broker::integer>(&(*v)[1]);
|
auto y = caf::get_if<broker::integer>(&(*v)[1]);
|
||||||
|
|
||||||
|
if ( ! (x && y) )
|
||||||
|
return false;
|
||||||
|
|
||||||
dat = *x;
|
dat = *x;
|
||||||
it = dat.begin() + *y;
|
it = dat.begin() + *y;
|
||||||
return true;
|
return true;
|
||||||
|
@ -1287,6 +1290,9 @@ bool bro_broker::RecordIterator::DoUnserialize(const broker::data& data)
|
||||||
auto x = caf::get_if<broker::vector>(&(*v)[0]);
|
auto x = caf::get_if<broker::vector>(&(*v)[0]);
|
||||||
auto y = caf::get_if<broker::integer>(&(*v)[1]);
|
auto y = caf::get_if<broker::integer>(&(*v)[1]);
|
||||||
|
|
||||||
|
if ( ! (x && y) )
|
||||||
|
return false;
|
||||||
|
|
||||||
dat = *x;
|
dat = *x;
|
||||||
it = dat.begin() + *y;
|
it = dat.begin() + *y;
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -179,6 +179,9 @@ std::unique_ptr<CounterVector> CounterVector::Unserialize(const broker::data& da
|
||||||
auto width = caf::get_if<uint64>(&(*v)[0]);
|
auto width = caf::get_if<uint64>(&(*v)[0]);
|
||||||
auto bits = BitVector::Unserialize((*v)[1]);
|
auto bits = BitVector::Unserialize((*v)[1]);
|
||||||
|
|
||||||
|
if ( ! (width && bits) )
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
auto cv = std::unique_ptr<CounterVector>(new CounterVector());
|
auto cv = std::unique_ptr<CounterVector>(new CounterVector());
|
||||||
cv->width = *width;
|
cv->width = *width;
|
||||||
cv->bits = bits.release();
|
cv->bits = bits.release();
|
||||||
|
|
|
@ -492,25 +492,25 @@ bool TopkVal::DoUnserialize(const broker::data& data)
|
||||||
|
|
||||||
while ( i < numElements )
|
while ( i < numElements )
|
||||||
{
|
{
|
||||||
Bucket* b = new Bucket();
|
|
||||||
auto elements_count = caf::get_if<uint64>(&(*v)[idx++]);
|
auto elements_count = caf::get_if<uint64>(&(*v)[idx++]);
|
||||||
auto count = caf::get_if<uint64>(&(*v)[idx++]);
|
auto count = caf::get_if<uint64>(&(*v)[idx++]);
|
||||||
|
|
||||||
if ( ! (elements_count && count) )
|
if ( ! (elements_count && count) )
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
Bucket* b = new Bucket();
|
||||||
b->count = *count;
|
b->count = *count;
|
||||||
b->bucketPos = buckets.insert(buckets.end(), b);
|
b->bucketPos = buckets.insert(buckets.end(), b);
|
||||||
|
|
||||||
for ( uint64_t j = 0; j < *elements_count; j++ )
|
for ( uint64_t j = 0; j < *elements_count; j++ )
|
||||||
{
|
{
|
||||||
Element* e = new Element();
|
|
||||||
auto epsilon = caf::get_if<uint64>(&(*v)[idx++]);
|
auto epsilon = caf::get_if<uint64>(&(*v)[idx++]);
|
||||||
Val* val = bro_broker::data_to_val((*v)[idx++], type);
|
Val* val = bro_broker::data_to_val((*v)[idx++], type);
|
||||||
|
|
||||||
if ( ! (epsilon && val) )
|
if ( ! (epsilon && val) )
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
Element* e = new Element();
|
||||||
e->epsilon = *epsilon;
|
e->epsilon = *epsilon;
|
||||||
e->value = val;
|
e->value = val;
|
||||||
e->parent = b;
|
e->parent = b;
|
||||||
|
|
|
@ -351,91 +351,6 @@ Val* do_split(StringVal* str_val, RE_Matcher* re, int incl_sep, int max_num_sep)
|
||||||
return a;
|
return a;
|
||||||
}
|
}
|
||||||
|
|
||||||
Val* do_sub(StringVal* str_val, RE_Matcher* re, StringVal* repl, int do_all)
|
|
||||||
{
|
|
||||||
const u_char* s = str_val->Bytes();
|
|
||||||
int offset = 0;
|
|
||||||
int n = str_val->Len();
|
|
||||||
|
|
||||||
// cut_points is a set of pairs of indices in str that should
|
|
||||||
// be removed/replaced. A pair <x,y> means "delete starting
|
|
||||||
// at offset x, up to but not including offset y".
|
|
||||||
List(ptr_compat_int) cut_points; // where RE matches pieces of str
|
|
||||||
|
|
||||||
int size = 0; // size of result
|
|
||||||
|
|
||||||
while ( n > 0 )
|
|
||||||
{
|
|
||||||
// Find next match offset.
|
|
||||||
int end_of_match;
|
|
||||||
while ( n > 0 &&
|
|
||||||
(end_of_match = re->MatchPrefix(&s[offset], n)) <= 0 )
|
|
||||||
{
|
|
||||||
// This character is going to be copied to the result.
|
|
||||||
++size;
|
|
||||||
|
|
||||||
// Move on to next character.
|
|
||||||
++offset;
|
|
||||||
--n;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( n <= 0 )
|
|
||||||
break;
|
|
||||||
|
|
||||||
// s[offset .. offset+end_of_match-1] matches re.
|
|
||||||
cut_points.append(offset);
|
|
||||||
cut_points.append(offset + end_of_match);
|
|
||||||
|
|
||||||
offset += end_of_match;
|
|
||||||
n -= end_of_match;
|
|
||||||
|
|
||||||
if ( ! do_all )
|
|
||||||
{
|
|
||||||
// We've now done the first substitution - finished.
|
|
||||||
// Include the remainder of the string in the result.
|
|
||||||
size += n;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// size now reflects amount of space copied. Factor in amount
|
|
||||||
// of space for replacement text.
|
|
||||||
int num_cut_points = cut_points.length() / 2;
|
|
||||||
size += num_cut_points * repl->Len();
|
|
||||||
|
|
||||||
// And a final NUL for good health.
|
|
||||||
++size;
|
|
||||||
|
|
||||||
byte_vec result = new u_char[size];
|
|
||||||
byte_vec r = result;
|
|
||||||
|
|
||||||
// Copy it all over.
|
|
||||||
int start_offset = 0;
|
|
||||||
for ( int i = 0; i < cut_points.length(); i += 2 /* loop over pairs */ )
|
|
||||||
{
|
|
||||||
int num_to_copy = cut_points[i] - start_offset;
|
|
||||||
memcpy(r, s + start_offset, num_to_copy);
|
|
||||||
|
|
||||||
r += num_to_copy;
|
|
||||||
start_offset = cut_points[i+1];
|
|
||||||
|
|
||||||
// Now add in replacement text.
|
|
||||||
memcpy(r, repl->Bytes(), repl->Len());
|
|
||||||
r += repl->Len();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy final trailing characters.
|
|
||||||
int num_to_copy = str_val->Len() - start_offset;
|
|
||||||
memcpy(r, s + start_offset, num_to_copy);
|
|
||||||
r += num_to_copy;
|
|
||||||
|
|
||||||
// Final NUL. No need to increment r, since the length
|
|
||||||
// computed from it in the next statement does not include
|
|
||||||
// the NUL.
|
|
||||||
r[0] = '\0';
|
|
||||||
|
|
||||||
return new StringVal(new BroString(1, result, r - result));
|
|
||||||
}
|
|
||||||
%%}
|
%%}
|
||||||
|
|
||||||
## Splits a string into an array of strings according to a pattern.
|
## Splits a string into an array of strings according to a pattern.
|
||||||
|
@ -535,7 +450,7 @@ function split_string_n%(str: string, re: pattern,
|
||||||
## .. zeek:see:: gsub subst_string
|
## .. zeek:see:: gsub subst_string
|
||||||
function sub%(str: string, re: pattern, repl: string%): string
|
function sub%(str: string, re: pattern, repl: string%): string
|
||||||
%{
|
%{
|
||||||
return do_sub(str, re, repl, 0);
|
return str->Substitute(re, repl, false);
|
||||||
%}
|
%}
|
||||||
|
|
||||||
## Substitutes a given replacement string for all occurrences of a pattern
|
## Substitutes a given replacement string for all occurrences of a pattern
|
||||||
|
@ -552,7 +467,7 @@ function sub%(str: string, re: pattern, repl: string%): string
|
||||||
## .. zeek:see:: sub subst_string
|
## .. zeek:see:: sub subst_string
|
||||||
function gsub%(str: string, re: pattern, repl: string%): string
|
function gsub%(str: string, re: pattern, repl: string%): string
|
||||||
%{
|
%{
|
||||||
return do_sub(str, re, repl, 1);
|
return str->Substitute(re, repl, true);
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -111,3 +111,14 @@ string Formatter::Render(double d)
|
||||||
return buf;
|
return buf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
string Formatter::Render(TransportProto proto)
|
||||||
|
{
|
||||||
|
if ( proto == TRANSPORT_UDP )
|
||||||
|
return "udp";
|
||||||
|
else if ( proto == TRANSPORT_TCP )
|
||||||
|
return "tcp";
|
||||||
|
else if ( proto == TRANSPORT_ICMP )
|
||||||
|
return "icmp";
|
||||||
|
else
|
||||||
|
return "unknown";
|
||||||
|
}
|
||||||
|
|
|
@ -112,6 +112,17 @@ public:
|
||||||
*/
|
*/
|
||||||
static string Render(double d);
|
static string Render(double d);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a transport protocol into a string.
|
||||||
|
*
|
||||||
|
* This is a helper function that formatter implementations may use.
|
||||||
|
*
|
||||||
|
* @param proto The transport protocol.
|
||||||
|
*
|
||||||
|
* @return An ASCII representation of the protocol.
|
||||||
|
*/
|
||||||
|
static string Render(TransportProto proto);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a string into a TransportProto. The string must be one of
|
* Convert a string into a TransportProto. The string must be one of
|
||||||
* \c tcp, \c udp, \c icmp, or \c unknown.
|
* \c tcp, \c udp, \c icmp, or \c unknown.
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
#include <math.h>
|
#include <math.h>
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
#include "./JSON.h"
|
#include "JSON.h"
|
||||||
|
|
||||||
using namespace threading::formatter;
|
using namespace threading::formatter;
|
||||||
|
|
||||||
|
@ -27,78 +27,83 @@ JSON::~JSON()
|
||||||
bool JSON::Describe(ODesc* desc, int num_fields, const Field* const * fields,
|
bool JSON::Describe(ODesc* desc, int num_fields, const Field* const * fields,
|
||||||
Value** vals) const
|
Value** vals) const
|
||||||
{
|
{
|
||||||
if ( surrounding_braces )
|
ZeekJson j = ZeekJson::object();
|
||||||
desc->AddRaw("{");
|
|
||||||
|
|
||||||
for ( int i = 0; i < num_fields; i++ )
|
for ( int i = 0; i < num_fields; i++ )
|
||||||
{
|
{
|
||||||
const u_char* bytes = desc->Bytes();
|
if ( vals[i]->present )
|
||||||
int len = desc->Len();
|
{
|
||||||
|
ZeekJson new_entry = BuildJSON(vals[i]);
|
||||||
|
if ( new_entry.is_null() )
|
||||||
|
return false;
|
||||||
|
|
||||||
if ( i > 0 &&
|
j[fields[i]->name] = new_entry;
|
||||||
len > 0 &&
|
}
|
||||||
bytes[len-1] != ',' &&
|
|
||||||
bytes[len-1] != '{' &&
|
|
||||||
bytes[len-1] != '[' &&
|
|
||||||
vals[i]->present )
|
|
||||||
desc->AddRaw(",");
|
|
||||||
|
|
||||||
if ( ! Describe(desc, vals[i], fields[i]->name) )
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( surrounding_braces )
|
desc->Add(j.dump());
|
||||||
desc->AddRaw("}");
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
||||||
{
|
{
|
||||||
|
if ( desc->IsBinary() )
|
||||||
|
{
|
||||||
|
GetThread()->Error("json formatter: binary format not supported");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if ( ! val->present )
|
if ( ! val->present )
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
if ( name.size() )
|
ZeekJson j = BuildJSON(val, name);
|
||||||
{
|
if ( j.is_null() )
|
||||||
desc->AddRaw("\"", 1);
|
return false;
|
||||||
desc->Add(name);
|
|
||||||
desc->AddRaw("\":", 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
desc->Add(j.dump());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype) const
|
||||||
|
{
|
||||||
|
GetThread()->Error("JSON formatter does not support parsing yet.");
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
ZeekJson JSON::BuildJSON(Value* val, const string& name) const
|
||||||
|
{
|
||||||
|
ZeekJson j;
|
||||||
switch ( val->type )
|
switch ( val->type )
|
||||||
{
|
{
|
||||||
case TYPE_BOOL:
|
case TYPE_BOOL:
|
||||||
desc->AddRaw(val->val.int_val == 0 ? "false" : "true");
|
j = val->val.int_val != 0;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_INT:
|
case TYPE_INT:
|
||||||
desc->Add(val->val.int_val);
|
j = val->val.int_val;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_COUNT:
|
case TYPE_COUNT:
|
||||||
case TYPE_COUNTER:
|
case TYPE_COUNTER:
|
||||||
desc->Add(val->val.uint_val);
|
j = val->val.uint_val;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_PORT:
|
case TYPE_PORT:
|
||||||
desc->Add(val->val.port_val.port);
|
j = val->val.port_val.port;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_SUBNET:
|
case TYPE_SUBNET:
|
||||||
desc->AddRaw("\"", 1);
|
j = Formatter::Render(val->val.subnet_val);
|
||||||
desc->Add(Render(val->val.subnet_val));
|
|
||||||
desc->AddRaw("\"", 1);
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_ADDR:
|
case TYPE_ADDR:
|
||||||
desc->AddRaw("\"", 1);
|
j = Formatter::Render(val->val.addr_val);
|
||||||
desc->Add(Render(val->val.addr_val));
|
|
||||||
desc->AddRaw("\"", 1);
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_DOUBLE:
|
case TYPE_DOUBLE:
|
||||||
case TYPE_INTERVAL:
|
case TYPE_INTERVAL:
|
||||||
desc->Add(val->val.double_val);
|
j = val->val.double_val;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_TIME:
|
case TYPE_TIME:
|
||||||
|
@ -110,15 +115,13 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
||||||
time_t the_time = time_t(floor(val->val.double_val));
|
time_t the_time = time_t(floor(val->val.double_val));
|
||||||
struct tm t;
|
struct tm t;
|
||||||
|
|
||||||
desc->AddRaw("\"", 1);
|
|
||||||
|
|
||||||
if ( ! gmtime_r(&the_time, &t) ||
|
if ( ! gmtime_r(&the_time, &t) ||
|
||||||
! strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%S", &t) )
|
! strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%S", &t) )
|
||||||
{
|
{
|
||||||
GetThread()->Error(GetThread()->Fmt("json formatter: failure getting time: (%lf)", val->val.double_val));
|
GetThread()->Error(GetThread()->Fmt("json formatter: failure getting time: (%lf)", val->val.double_val));
|
||||||
// This was a failure, doesn't really matter what gets put here
|
// This was a failure, doesn't really matter what gets put here
|
||||||
// but it should probably stand out...
|
// but it should probably stand out...
|
||||||
desc->Add("2000-01-01T00:00:00.000000");
|
j = "2000-01-01T00:00:00.000000";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -129,20 +132,17 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
||||||
frac += 1;
|
frac += 1;
|
||||||
|
|
||||||
snprintf(buffer2, sizeof(buffer2), "%s.%06.0fZ", buffer, fabs(frac) * 1000000);
|
snprintf(buffer2, sizeof(buffer2), "%s.%06.0fZ", buffer, fabs(frac) * 1000000);
|
||||||
desc->Add(buffer2);
|
j = buffer2;
|
||||||
}
|
}
|
||||||
|
|
||||||
desc->AddRaw("\"", 1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else if ( timestamps == TS_EPOCH )
|
else if ( timestamps == TS_EPOCH )
|
||||||
desc->Add(val->val.double_val);
|
j = val->val.double_val;
|
||||||
|
|
||||||
else if ( timestamps == TS_MILLIS )
|
else if ( timestamps == TS_MILLIS )
|
||||||
{
|
{
|
||||||
// ElasticSearch uses milliseconds for timestamps
|
// ElasticSearch uses milliseconds for timestamps
|
||||||
uint64_t ts = (uint64_t) (val->val.double_val * 1000);
|
j = (uint64_t) (val->val.double_val * 1000);
|
||||||
desc->Add(ts);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
@ -153,74 +153,40 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
||||||
case TYPE_FILE:
|
case TYPE_FILE:
|
||||||
case TYPE_FUNC:
|
case TYPE_FUNC:
|
||||||
{
|
{
|
||||||
desc->AddRaw("\"", 1);
|
j = json_escape_utf8(string(val->val.string_val.data, val->val.string_val.length));
|
||||||
|
|
||||||
for ( int i = 0; i < val->val.string_val.length; ++i )
|
|
||||||
{
|
|
||||||
char c = val->val.string_val.data[i];
|
|
||||||
|
|
||||||
// 2byte Unicode escape special characters.
|
|
||||||
if ( c < 32 || c > 126 || c == '\n' || c == '"' || c == '\'' || c == '\\' || c == '&' )
|
|
||||||
{
|
|
||||||
desc->AddRaw("\\u00", 4);
|
|
||||||
char hex[2] = {'0', '0'};
|
|
||||||
bytetohex(c, hex);
|
|
||||||
desc->AddRaw(hex, 1);
|
|
||||||
desc->AddRaw(hex + 1, 1);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
desc->AddRaw(&c, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
desc->AddRaw("\"", 1);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case TYPE_TABLE:
|
case TYPE_TABLE:
|
||||||
{
|
{
|
||||||
desc->AddRaw("[", 1);
|
j = ZeekJson::array();
|
||||||
|
|
||||||
for ( int j = 0; j < val->val.set_val.size; j++ )
|
for ( int idx = 0; idx < val->val.set_val.size; idx++ )
|
||||||
{
|
j.push_back(BuildJSON(val->val.set_val.vals[idx]));
|
||||||
if ( j > 0 )
|
|
||||||
desc->AddRaw(",", 1);
|
|
||||||
|
|
||||||
Describe(desc, val->val.set_val.vals[j]);
|
|
||||||
}
|
|
||||||
|
|
||||||
desc->AddRaw("]", 1);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case TYPE_VECTOR:
|
case TYPE_VECTOR:
|
||||||
{
|
{
|
||||||
desc->AddRaw("[", 1);
|
j = ZeekJson::array();
|
||||||
|
|
||||||
for ( int j = 0; j < val->val.vector_val.size; j++ )
|
for ( int idx = 0; idx < val->val.vector_val.size; idx++ )
|
||||||
{
|
j.push_back(BuildJSON(val->val.vector_val.vals[idx]));
|
||||||
if ( j > 0 )
|
|
||||||
desc->AddRaw(",", 1);
|
|
||||||
Describe(desc, val->val.vector_val.vals[j]);
|
|
||||||
}
|
|
||||||
|
|
||||||
desc->AddRaw("]", 1);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return false;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
if ( ! name.empty() && ! j.is_null() )
|
||||||
}
|
{
|
||||||
|
ZeekJson j2 = ZeekJson::object();
|
||||||
|
j2[name] = j;
|
||||||
|
return j2;
|
||||||
|
}
|
||||||
|
|
||||||
threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype) const
|
return j;
|
||||||
{
|
|
||||||
GetThread()->Error("JSON formatter does not support parsing yet.");
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
void JSON::SurroundingBraces(bool use_braces)
|
|
||||||
{
|
|
||||||
surrounding_braces = use_braces;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,9 +4,19 @@
|
||||||
#define THREADING_FORMATTERS_JSON_H
|
#define THREADING_FORMATTERS_JSON_H
|
||||||
|
|
||||||
#include "../Formatter.h"
|
#include "../Formatter.h"
|
||||||
|
#include "3rdparty/json.hpp"
|
||||||
|
#include "3rdparty/fifo_map.hpp"
|
||||||
|
|
||||||
|
|
||||||
namespace threading { namespace formatter {
|
namespace threading { namespace formatter {
|
||||||
|
|
||||||
|
// Define a class for use with the json library that orders the keys in the same order that
|
||||||
|
// they were inserted. By default, the json library orders them alphabetically and we don't
|
||||||
|
// want it like that.
|
||||||
|
template<class K, class V, class compare, class A>
|
||||||
|
using json_fifo_map = nlohmann::fifo_map<K, V, nlohmann::fifo_map_compare<K>, A>;
|
||||||
|
using ZeekJson = nlohmann::basic_json<json_fifo_map>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A thread-safe class for converting values into a JSON representation
|
* A thread-safe class for converting values into a JSON representation
|
||||||
* and vice versa.
|
* and vice versa.
|
||||||
|
@ -27,9 +37,10 @@ public:
|
||||||
threading::Value** vals) const override;
|
threading::Value** vals) const override;
|
||||||
threading::Value* ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype = TYPE_ERROR) const override;
|
threading::Value* ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype = TYPE_ERROR) const override;
|
||||||
|
|
||||||
void SurroundingBraces(bool use_braces);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
ZeekJson BuildJSON(Value* val, const string& name = "") const;
|
||||||
|
|
||||||
TimeFormat timestamps;
|
TimeFormat timestamps;
|
||||||
bool surrounding_braces;
|
bool surrounding_braces;
|
||||||
};
|
};
|
||||||
|
|
119
src/util.cc
119
src/util.cc
|
@ -1842,24 +1842,24 @@ void bro_strerror_r(int bro_errno, char* buf, size_t buflen)
|
||||||
strerror_r_helper(res, buf, buflen);
|
strerror_r_helper(res, buf, buflen);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static const std::map<const char*, const char*, CompareString> legacy_vars = {
|
||||||
|
{ "ZEEKPATH", "BROPATH" },
|
||||||
|
{ "ZEEK_PLUGIN_PATH", "BRO_PLUGIN_PATH" },
|
||||||
|
{ "ZEEK_PLUGIN_ACTIVATE", "BRO_PLUGIN_ACTIVATE" },
|
||||||
|
{ "ZEEK_PREFIXES", "BRO_PREFIXES" },
|
||||||
|
{ "ZEEK_DNS_FAKE", "BRO_DNS_FAKE" },
|
||||||
|
{ "ZEEK_SEED_FILE", "BRO_SEED_FILE" },
|
||||||
|
{ "ZEEK_LOG_SUFFIX", "BRO_LOG_SUFFIX" },
|
||||||
|
{ "ZEEK_PROFILER_FILE", "BRO_PROFILER_FILE" },
|
||||||
|
{ "ZEEK_DISABLE_ZEEKYGEN", "BRO_DISABLE_BROXYGEN" },
|
||||||
|
{ "ZEEK_DEFAULT_CONNECT_RETRY", "BRO_DEFAULT_CONNECT_RETRY" },
|
||||||
|
{ "ZEEK_BROKER_MAX_THREADS", "BRO_BROKER_MAX_THREADS" },
|
||||||
|
{ "ZEEK_DEFAULT_LISTEN_ADDRESS", "BRO_DEFAULT_LISTEN_ADDRESS" },
|
||||||
|
{ "ZEEK_DEFAULT_LISTEN_RETRY", "BRO_DEFAULT_LISTEN_RETRY" },
|
||||||
|
};
|
||||||
|
|
||||||
char* zeekenv(const char* name)
|
char* zeekenv(const char* name)
|
||||||
{
|
{
|
||||||
static std::map<const char*, const char*, CompareString> legacy_vars = {
|
|
||||||
{ "ZEEKPATH", "BROPATH" },
|
|
||||||
{ "ZEEK_PLUGIN_PATH", "BRO_PLUGIN_PATH" },
|
|
||||||
{ "ZEEK_PLUGIN_ACTIVATE", "BRO_PLUGIN_ACTIVATE" },
|
|
||||||
{ "ZEEK_PREFIXES", "BRO_PREFIXES" },
|
|
||||||
{ "ZEEK_DNS_FAKE", "BRO_DNS_FAKE" },
|
|
||||||
{ "ZEEK_SEED_FILE", "BRO_SEED_FILE" },
|
|
||||||
{ "ZEEK_LOG_SUFFIX", "BRO_LOG_SUFFIX" },
|
|
||||||
{ "ZEEK_PROFILER_FILE", "BRO_PROFILER_FILE" },
|
|
||||||
{ "ZEEK_DISABLE_ZEEKYGEN", "BRO_DISABLE_BROXYGEN" },
|
|
||||||
{ "ZEEK_DEFAULT_CONNECT_RETRY", "BRO_DEFAULT_CONNECT_RETRY" },
|
|
||||||
{ "ZEEK_BROKER_MAX_THREADS", "BRO_BROKER_MAX_THREADS" },
|
|
||||||
{ "ZEEK_DEFAULT_LISTEN_ADDRESS", "BRO_DEFAULT_LISTEN_ADDRESS" },
|
|
||||||
{ "ZEEK_DEFAULT_LISTEN_RETRY", "BRO_DEFAULT_LISTEN_RETRY" },
|
|
||||||
};
|
|
||||||
|
|
||||||
auto rval = getenv(name);
|
auto rval = getenv(name);
|
||||||
|
|
||||||
if ( rval )
|
if ( rval )
|
||||||
|
@ -1872,3 +1872,90 @@ char* zeekenv(const char* name)
|
||||||
|
|
||||||
return getenv(it->second);
|
return getenv(it->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static string json_escape_byte(char c)
|
||||||
|
{
|
||||||
|
char hex[2] = {'0', '0'};
|
||||||
|
bytetohex(c, hex);
|
||||||
|
|
||||||
|
string result = "\\x";
|
||||||
|
result.append(hex, 2);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
string json_escape_utf8(const string& val)
|
||||||
|
{
|
||||||
|
string result;
|
||||||
|
result.reserve(val.length());
|
||||||
|
|
||||||
|
size_t char_start = 0;
|
||||||
|
size_t idx;
|
||||||
|
for ( idx = 0; idx < val.length(); )
|
||||||
|
{
|
||||||
|
// Normal ASCII characters plus a few of the control characters can be inserted directly. The rest of
|
||||||
|
// the control characters should be escaped as regular bytes.
|
||||||
|
if ( ( val[idx] >= 32 && val[idx] <= 127 ) ||
|
||||||
|
val[idx] == '\b' || val[idx] == '\f' || val[idx] == '\n' || val[idx] == '\r' || val[idx] == '\t' )
|
||||||
|
{
|
||||||
|
result.push_back(val[idx]);
|
||||||
|
++idx;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else if ( val[idx] >= 0 && val[idx] < 32 )
|
||||||
|
{
|
||||||
|
result.append(json_escape_byte(val[idx]));
|
||||||
|
++idx;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The next bit is based on the table at https://en.wikipedia.org/wiki/UTF-8#Description.
|
||||||
|
// If next character is 11110xxx, this is a 4-byte UTF-8
|
||||||
|
unsigned int char_size = 0;
|
||||||
|
if ( (val[idx] & 0xF8) == 0xF0 ) char_size = 4;
|
||||||
|
|
||||||
|
// If next character is 1110xxxx, this is a 3-byte UTF-8
|
||||||
|
else if ( (val[idx] & 0xF0) == 0xE0 ) char_size = 3;
|
||||||
|
|
||||||
|
// If next character is 110xxxxx, this is a 2-byte UTF-8
|
||||||
|
else if ( (val[idx] & 0xE0) == 0xC0 ) char_size = 2;
|
||||||
|
|
||||||
|
// This byte isn't a continuation byte, insert it as a byte and continue.
|
||||||
|
if ( char_size == 0)
|
||||||
|
{
|
||||||
|
result.append(json_escape_byte(val[idx]));
|
||||||
|
++idx;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we don't have enough bytes to get to the end of character, give up and insert all of the rest
|
||||||
|
// of them as escaped values.
|
||||||
|
if ( char_size > (val.length() - idx) )
|
||||||
|
break;
|
||||||
|
|
||||||
|
// Loop through the rest of the supposed character and see if this is a valid character.
|
||||||
|
size_t c_idx = idx + 1;
|
||||||
|
for ( ; c_idx < idx + char_size; c_idx++ )
|
||||||
|
if ( (val[c_idx] & 0xC0) != 0x80 ) break;
|
||||||
|
|
||||||
|
// if we didn't make it to the end of the character without finding an error, insert just this
|
||||||
|
// character and skip ahead. Otherwise insert all of the bytes for this character into the result.
|
||||||
|
if ( c_idx != idx + char_size )
|
||||||
|
{
|
||||||
|
result.append(json_escape_byte(val[idx]));
|
||||||
|
++idx;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
for ( size_t step = 0; step < char_size; step++, idx++ )
|
||||||
|
result.push_back(val[idx]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( idx != val.length() )
|
||||||
|
for ( ; idx < val.length(); ++idx )
|
||||||
|
result.append(json_escape_byte(val[idx]));
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
|
@ -565,4 +565,12 @@ std::unique_ptr<T> build_unique (Args&&... args) {
|
||||||
return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
|
return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escapes bytes in a string that are not valid UTF8 characters with \xYY format. Used
|
||||||
|
* by the JSON writer and BIF methods.
|
||||||
|
* @param val the input string to be escaped
|
||||||
|
* @return the escaped string
|
||||||
|
*/
|
||||||
|
std::string json_escape_utf8(const std::string& val);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
76
src/zeek.bif
76
src/zeek.bif
|
@ -1981,68 +1981,7 @@ function lookup_ID%(id: string%) : any
|
||||||
## Returns: A table that describes the fields of a record.
|
## Returns: A table that describes the fields of a record.
|
||||||
function record_fields%(rec: any%): record_field_table
|
function record_fields%(rec: any%): record_field_table
|
||||||
%{
|
%{
|
||||||
TableVal* fields = new TableVal(record_field_table);
|
return rec->GetRecordFields();
|
||||||
|
|
||||||
auto t = rec->Type();
|
|
||||||
|
|
||||||
if ( t->Tag() != TYPE_RECORD && t->Tag() != TYPE_TYPE )
|
|
||||||
{
|
|
||||||
reporter->Error("non-record value/type passed to record_fields");
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
RecordType* rt = nullptr;
|
|
||||||
RecordVal* rv = nullptr;
|
|
||||||
|
|
||||||
if ( t->Tag() == TYPE_RECORD )
|
|
||||||
{
|
|
||||||
rt = t->AsRecordType();
|
|
||||||
rv = rec->AsRecordVal();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
t = t->AsTypeType()->Type();
|
|
||||||
|
|
||||||
if ( t->Tag() != TYPE_RECORD )
|
|
||||||
{
|
|
||||||
reporter->Error("non-record value/type passed to record_fields");
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
rt = t->AsRecordType();
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( int i = 0; i < rt->NumFields(); ++i )
|
|
||||||
{
|
|
||||||
BroType* ft = rt->FieldType(i);
|
|
||||||
TypeDecl* fd = rt->FieldDecl(i);
|
|
||||||
Val* fv = nullptr;
|
|
||||||
|
|
||||||
if ( rv )
|
|
||||||
fv = rv->Lookup(i);
|
|
||||||
|
|
||||||
if ( fv )
|
|
||||||
Ref(fv);
|
|
||||||
|
|
||||||
bool logged = (fd->attrs && fd->FindAttr(ATTR_LOG) != 0);
|
|
||||||
|
|
||||||
RecordVal* nr = new RecordVal(record_field);
|
|
||||||
|
|
||||||
if ( ft->Tag() == TYPE_RECORD )
|
|
||||||
nr->Assign(0, new StringVal("record " + ft->GetName()));
|
|
||||||
else
|
|
||||||
nr->Assign(0, new StringVal(type_name(ft->Tag())));
|
|
||||||
|
|
||||||
nr->Assign(1, val_mgr->GetBool(logged));
|
|
||||||
nr->Assign(2, fv);
|
|
||||||
nr->Assign(3, rt->FieldDefault(i));
|
|
||||||
|
|
||||||
Val* field_name = new StringVal(rt->FieldName(i));
|
|
||||||
fields->Assign(field_name, nr);
|
|
||||||
Unref(field_name);
|
|
||||||
}
|
|
||||||
|
|
||||||
return fields;
|
|
||||||
%}
|
%}
|
||||||
|
|
||||||
## Enables detailed collection of profiling statistics. Statistics include
|
## Enables detailed collection of profiling statistics. Statistics include
|
||||||
|
@ -5100,3 +5039,16 @@ function anonymize_addr%(a: addr, cl: IPAddrAnonymizationClass%): addr
|
||||||
(enum ip_addr_anonymization_class_t) anon_class));
|
(enum ip_addr_anonymization_class_t) anon_class));
|
||||||
}
|
}
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
## A function to convert arbitrary Zeek data into a JSON string.
|
||||||
|
##
|
||||||
|
## v: The value to convert to JSON. Typically a record.
|
||||||
|
##
|
||||||
|
## only_loggable: If the v value is a record this will only cause
|
||||||
|
## fields with the &log attribute to be included in the JSON.
|
||||||
|
##
|
||||||
|
## returns: a JSON formatted string.
|
||||||
|
function to_json%(val: any, only_loggable: bool &default=F, field_escape_pattern: pattern &default=/^_/%): string
|
||||||
|
%{
|
||||||
|
return val->ToJSON(only_loggable, field_escape_pattern);
|
||||||
|
%}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
warning in /Users/tim/Desktop/projects/zeek/testing/btest/../../scripts//base/utils/json.zeek, line 2: deprecated script loaded from command line arguments ="Remove in 3.1. to_json is now always available as a built-in function."
|
|
@ -249,7 +249,6 @@ scripts/base/init-default.zeek
|
||||||
scripts/base/frameworks/openflow/main.zeek
|
scripts/base/frameworks/openflow/main.zeek
|
||||||
scripts/base/frameworks/openflow/plugins/__load__.zeek
|
scripts/base/frameworks/openflow/plugins/__load__.zeek
|
||||||
scripts/base/frameworks/openflow/plugins/ryu.zeek
|
scripts/base/frameworks/openflow/plugins/ryu.zeek
|
||||||
scripts/base/utils/json.zeek
|
|
||||||
scripts/base/frameworks/openflow/plugins/log.zeek
|
scripts/base/frameworks/openflow/plugins/log.zeek
|
||||||
scripts/base/frameworks/openflow/plugins/broker.zeek
|
scripts/base/frameworks/openflow/plugins/broker.zeek
|
||||||
scripts/base/frameworks/openflow/non-cluster.zeek
|
scripts/base/frameworks/openflow/non-cluster.zeek
|
||||||
|
|
|
@ -8,3 +8,4 @@
|
||||||
-./frameworks/openflow/cluster.zeek
|
-./frameworks/openflow/cluster.zeek
|
||||||
-./frameworks/packet-filter/cluster.zeek
|
-./frameworks/packet-filter/cluster.zeek
|
||||||
-./frameworks/sumstats/cluster.zeek
|
-./frameworks/sumstats/cluster.zeek
|
||||||
|
-./utils/json.zeek
|
||||||
|
|
|
@ -821,7 +821,6 @@
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/input.bif.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, base<...>/input.bif.zeek) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/intel) -> -1
|
0.000000 MetaHookPost LoadFile(0, base<...>/intel) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/irc) -> -1
|
0.000000 MetaHookPost LoadFile(0, base<...>/irc) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/json.zeek) -> -1
|
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/krb) -> -1
|
0.000000 MetaHookPost LoadFile(0, base<...>/krb) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/logging) -> -1
|
0.000000 MetaHookPost LoadFile(0, base<...>/logging) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, base<...>/logging.bif.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, base<...>/logging.bif.zeek) -> -1
|
||||||
|
@ -1712,7 +1711,6 @@
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/input.bif.zeek)
|
0.000000 MetaHookPre LoadFile(0, base<...>/input.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/intel)
|
0.000000 MetaHookPre LoadFile(0, base<...>/intel)
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/irc)
|
0.000000 MetaHookPre LoadFile(0, base<...>/irc)
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/json.zeek)
|
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/krb)
|
0.000000 MetaHookPre LoadFile(0, base<...>/krb)
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/logging)
|
0.000000 MetaHookPre LoadFile(0, base<...>/logging)
|
||||||
0.000000 MetaHookPre LoadFile(0, base<...>/logging.bif.zeek)
|
0.000000 MetaHookPre LoadFile(0, base<...>/logging.bif.zeek)
|
||||||
|
@ -2611,7 +2609,6 @@
|
||||||
0.000000 | HookLoadFile base<...>/input.bif.zeek
|
0.000000 | HookLoadFile base<...>/input.bif.zeek
|
||||||
0.000000 | HookLoadFile base<...>/intel
|
0.000000 | HookLoadFile base<...>/intel
|
||||||
0.000000 | HookLoadFile base<...>/irc
|
0.000000 | HookLoadFile base<...>/irc
|
||||||
0.000000 | HookLoadFile base<...>/json.zeek
|
|
||||||
0.000000 | HookLoadFile base<...>/krb
|
0.000000 | HookLoadFile base<...>/krb
|
||||||
0.000000 | HookLoadFile base<...>/logging
|
0.000000 | HookLoadFile base<...>/logging
|
||||||
0.000000 | HookLoadFile base<...>/logging.bif.zeek
|
0.000000 | HookLoadFile base<...>/logging.bif.zeek
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
{"d":2.153226e+09}
|
{"d":2153226000.0}
|
||||||
{"d":2.153226e+09}
|
{"d":2153226000.1}
|
||||||
{"d":2.153226e+09}
|
{"d":2153226000.123457}
|
||||||
{"d":1.0}
|
{"d":1.0}
|
||||||
{"d":1.1}
|
{"d":1.1}
|
||||||
{"d":1.123457}
|
{"d":1.123456789}
|
||||||
{"d":-1.123457}
|
{"d":-1.123456789}
|
||||||
{"d":1.1234}
|
{"d":1.1234}
|
||||||
{"d":0.1234}
|
{"d":0.1234}
|
||||||
{"d":50000.0}
|
{"d":50000.0}
|
||||||
{"d":-50000.0}
|
{"d":-50000.0}
|
||||||
{"d":3.140000e+15}
|
{"d":3.14e+15}
|
||||||
{"d":-3.140000e+15}
|
{"d":-3.14e+15}
|
||||||
{"d":1.790000e+308}
|
{"d":1.79e+308}
|
||||||
{"d":-1.790000e+308}
|
{"d":-1.79e+308}
|
||||||
{"d":0.000012}
|
{"d":1.23456789e-05}
|
||||||
{"d":0}
|
{"d":2.23e-308}
|
||||||
{"d":-0}
|
{"d":-2.23e-308}
|
||||||
{"d":inf}
|
{"d":null}
|
||||||
{"d":-inf}
|
{"d":null}
|
||||||
{"d":0.0}
|
{"d":-0.0}
|
||||||
{"d":nan}
|
{"d":null}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
#empty_field (empty)
|
#empty_field (empty)
|
||||||
#unset_field -
|
#unset_field -
|
||||||
#path test
|
#path test
|
||||||
#open 2017-11-06-19-58-08
|
#open 2019-07-01-17-40-55
|
||||||
#fields d
|
#fields d
|
||||||
#types double
|
#types double
|
||||||
2153226000.0
|
2153226000.0
|
||||||
|
@ -28,4 +28,4 @@ inf
|
||||||
-inf
|
-inf
|
||||||
0.0
|
0.0
|
||||||
nan
|
nan
|
||||||
#close 2017-11-06-19-58-08
|
#close 2019-07-01-17-40-55
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
{"s":"a"}
|
||||||
|
{"s":"\b\f\n\r\t\\x00\\x15"}
|
||||||
|
{"s":"ñ"}
|
||||||
|
{"s":"\\xc3("}
|
||||||
|
{"s":"\\xa0\\xa1"}
|
||||||
|
{"s":"₡"}
|
||||||
|
{"s":"\\xe2(\\xa1"}
|
||||||
|
{"s":"\\xe2\\x82("}
|
||||||
|
{"s":"𐌼"}
|
||||||
|
{"s":"\\xf0(\\x8c\\xbc"}
|
||||||
|
{"s":"\\xf0\\x90(\\xbc"}
|
||||||
|
{"s":"\\xf0(\\x8c("}
|
|
@ -1 +1 @@
|
||||||
{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010.54321,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["BB","AA","CC"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\u000a{ \u000aif (0 < SSH::i) \u000a\u0009return (Foo);\u000aelse\u000a\u0009return (Bar);\u000a\u000a}"}
|
{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010.54321,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["BB","AA","CC"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\n{ \nif (0 < SSH::i) \n\treturn (Foo);\nelse\n\treturn (Bar);\n\n}"}
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
http://127.0.0.1:8080/stats/flowentry/clear/42
|
http://127.0.0.1:8080/stats/flowentry/clear/42
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 0, "idle_timeout": 0, "hard_timeout": 0, "match": {}, "actions": [{"port": 3, "type": "OUTPUT"}, {"port": 7, "type": "OUTPUT"}], "cookie": 4398046511105, "flags": 0, "dpid": 42}
|
{"priority":0,"idle_timeout":0,"hard_timeout":0,"match":{},"actions":[{"port":3,"type":"OUTPUT"},{"port":7,"type":"OUTPUT"}],"cookie":4398046511105,"flags":0,"dpid":42}
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "74.53.140.153/32", "tp_dst": 25, "tp_src": 1470, "nw_proto": 6, "dl_type": 2048, "nw_src": "10.10.1.4/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"74.53.140.153/32","tp_dst":25,"tp_src":1470,"nw_proto":6,"dl_type":2048,"nw_src":"10.10.1.4/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "10.10.1.4/32", "tp_dst": 1470, "tp_src": 25, "nw_proto": 6, "dl_type": 2048, "nw_src": "74.53.140.153/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"10.10.1.4/32","tp_dst":1470,"tp_src":25,"nw_proto":6,"dl_type":2048,"nw_src":"74.53.140.153/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "192.168.133.102/32", "tp_dst": 25, "tp_src": 49648, "nw_proto": 6, "dl_type": 2048, "nw_src": "192.168.133.100/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.102/32","tp_dst":25,"tp_src":49648,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.100/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "192.168.133.100/32", "tp_dst": 49648, "tp_src": 25, "nw_proto": 6, "dl_type": 2048, "nw_src": "192.168.133.102/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.100/32","tp_dst":49648,"tp_src":25,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.102/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "17.167.150.73/32", "tp_dst": 443, "tp_src": 49655, "nw_proto": 6, "dl_type": 2048, "nw_src": "192.168.133.100/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"17.167.150.73/32","tp_dst":443,"tp_src":49655,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.100/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||||
http://127.0.0.1:8080/stats/flowentry/add
|
http://127.0.0.1:8080/stats/flowentry/add
|
||||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "192.168.133.100/32", "tp_dst": 49655, "tp_src": 443, "nw_proto": 6, "dl_type": 2048, "nw_src": "17.167.150.73/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.100/32","tp_dst":49655,"tp_src":443,"nw_proto":6,"dl_type":2048,"nw_src":"17.167.150.73/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
Flow_mod_success
|
Flow_mod_success
|
||||||
|
|
|
@ -8,32 +8,33 @@ true
|
||||||
"-12.0 hrs"
|
"-12.0 hrs"
|
||||||
"hello"
|
"hello"
|
||||||
""
|
""
|
||||||
65535
|
{"port":65535,"proto":"tcp"}
|
||||||
1
|
{"port":1,"proto":"udp"}
|
||||||
123
|
{"port":123,"proto":"icmp"}
|
||||||
0
|
{"port":0,"proto":"unknown"}
|
||||||
"1.2.3.4"
|
"1.2.3.4"
|
||||||
"ffff:1234::1"
|
"ffff:1234::1"
|
||||||
"123.123.123.123"
|
"123.123.123.123"
|
||||||
"192.0.0.0/8"
|
"192.0.0.0/8"
|
||||||
"fe80::/64"
|
"fe80::/64"
|
||||||
"Red"
|
"Red"
|
||||||
{"s": "test", "c": 100}
|
"/^?(^abcd)$?/"
|
||||||
{"s": "test"}
|
{"s":"test","c":100}
|
||||||
{"s": "test"}
|
{"s":"test"}
|
||||||
{"m": {"s": "test"}}
|
{"s":"test"}
|
||||||
|
{"m":{"s":"test"}}
|
||||||
[]
|
[]
|
||||||
[2, 1]
|
[2,1]
|
||||||
["1.2.3.4"]
|
["1.2.3.4"]
|
||||||
[[true, false]]
|
[[true,false]]
|
||||||
[{"s": "test"}]
|
[{"s":"test"}]
|
||||||
[]
|
[]
|
||||||
[2, 1]
|
[2,1]
|
||||||
["1.2.3.4"]
|
["1.2.3.4"]
|
||||||
[{"s": "test"}]
|
[{"s":"test"}]
|
||||||
[{"s": "test"}]
|
[{"s":"test"}]
|
||||||
{}
|
{}
|
||||||
{"2": "10.2.2.2", "1": "10.1.1.1"}
|
{"2":"10.2.2.2","1":"10.1.1.1"}
|
||||||
{"10.1.1.1": {"a": 1}, "10.2.2.2": {"b": 2}}
|
{"10.1.1.1":{"a":1},"10.2.2.2":{"b":2}}
|
||||||
{"10.1.1.1": [1, 2], "10.2.2.2": [3, 5]}
|
{"10.1.1.1":[1,2],"10.2.2.2":[3,5]}
|
||||||
{"1": {"s": "test"}}
|
{"1":{"s":"test"}}
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
#
|
||||||
|
# @TEST-EXEC: zeek -b %INPUT
|
||||||
|
# @TEST-EXEC: btest-diff ssh.log
|
||||||
|
#
|
||||||
|
# Testing all possible types.
|
||||||
|
|
||||||
|
redef LogAscii::use_json = T;
|
||||||
|
|
||||||
|
|
||||||
|
module SSH;
|
||||||
|
|
||||||
|
export {
|
||||||
|
redef enum Log::ID += { LOG };
|
||||||
|
|
||||||
|
type Log: record {
|
||||||
|
s: string;
|
||||||
|
} &log;
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::create_stream(SSH::LOG, [$columns=Log]);
|
||||||
|
|
||||||
|
# Strings taken from https://stackoverflow.com/a/3886015
|
||||||
|
|
||||||
|
# Valid ASCII and valid ASCII control characters
|
||||||
|
Log::write(SSH::LOG, [$s="a"]);
|
||||||
|
Log::write(SSH::LOG, [$s="\b\f\n\r\t\x00\x15"]);
|
||||||
|
|
||||||
|
# Valid 2 Octet Sequence
|
||||||
|
Log::write(SSH::LOG, [$s="\xc3\xb1"]);
|
||||||
|
|
||||||
|
# Invalid 2 Octet Sequence
|
||||||
|
Log::write(SSH::LOG, [$s="\xc3\x28"]);
|
||||||
|
|
||||||
|
# Invalid Sequence Identifier
|
||||||
|
Log::write(SSH::LOG, [$s="\xa0\xa1"]);
|
||||||
|
|
||||||
|
# Valid 3 Octet Sequence
|
||||||
|
Log::write(SSH::LOG, [$s="\xe2\x82\xa1"]);
|
||||||
|
|
||||||
|
# Invalid 3 Octet Sequence (in 2nd Octet)
|
||||||
|
Log::write(SSH::LOG, [$s="\xe2\x28\xa1"]);
|
||||||
|
|
||||||
|
# Invalid 3 Octet Sequence (in 3rd Octet)
|
||||||
|
Log::write(SSH::LOG, [$s="\xe2\x82\x28"]);
|
||||||
|
|
||||||
|
# Valid 4 Octet Sequence
|
||||||
|
Log::write(SSH::LOG, [$s="\xf0\x90\x8c\xbc"]);
|
||||||
|
|
||||||
|
# Invalid 4 Octet Sequence (in 2nd Octet)
|
||||||
|
Log::write(SSH::LOG, [$s="\xf0\x28\x8c\xbc"]);
|
||||||
|
|
||||||
|
# Invalid 4 Octet Sequence (in 3rd Octet)
|
||||||
|
Log::write(SSH::LOG, [$s="\xf0\x90\x28\xbc"]);
|
||||||
|
|
||||||
|
# Invalid 4 Octet Sequence (in 4th Octet)
|
||||||
|
Log::write(SSH::LOG, [$s="\xf0\x28\x8c\x28"]);
|
||||||
|
}
|
|
@ -56,7 +56,7 @@ event slow_death()
|
||||||
schedule 2sec { die() };
|
schedule 2sec { die() };
|
||||||
}
|
}
|
||||||
|
|
||||||
event kill_worker()
|
event ready()
|
||||||
{
|
{
|
||||||
Reporter::info("qux");
|
Reporter::info("qux");
|
||||||
Broker::publish("death", slow_death);
|
Broker::publish("death", slow_death);
|
||||||
|
@ -69,20 +69,30 @@ event zeek_init()
|
||||||
Broker::subscribe("death");
|
Broker::subscribe("death");
|
||||||
suspend_processing();
|
suspend_processing();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ( Cluster::node == "manager-1" )
|
||||||
|
{
|
||||||
|
Broker::subscribe("ready");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
global conn_count = 0;
|
||||||
|
|
||||||
|
event new_connection(c: connection)
|
||||||
|
{
|
||||||
|
++conn_count;
|
||||||
|
|
||||||
|
if ( conn_count == 30 )
|
||||||
|
{
|
||||||
|
Reporter::info("qux");
|
||||||
|
Broker::publish("ready", ready);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
event Broker::peer_added(endpoint: Broker::EndpointInfo, msg: string)
|
event Broker::peer_added(endpoint: Broker::EndpointInfo, msg: string)
|
||||||
{
|
{
|
||||||
if ( Cluster::node == "manager-1" )
|
|
||||||
{
|
|
||||||
schedule 2sec { kill_worker() };
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( Cluster::node == "worker-1" )
|
if ( Cluster::node == "worker-1" )
|
||||||
{
|
|
||||||
continue_processing();
|
continue_processing();
|
||||||
Reporter::info("qux");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
event Broker::peer_lost(endpoint: Broker::EndpointInfo, msg: string)
|
event Broker::peer_lost(endpoint: Broker::EndpointInfo, msg: string)
|
||||||
|
|
|
@ -72,6 +72,9 @@ event zeek_init()
|
||||||
local e: color = Red;
|
local e: color = Red;
|
||||||
print to_json(e);
|
print to_json(e);
|
||||||
|
|
||||||
|
local p: pattern = /^abcd/;
|
||||||
|
print to_json(p);
|
||||||
|
|
||||||
# #########################
|
# #########################
|
||||||
# Test the container types:
|
# Test the container types:
|
||||||
|
|
||||||
|
|
2
testing/external/commit-hash.zeek-testing
vendored
2
testing/external/commit-hash.zeek-testing
vendored
|
@ -1 +1 @@
|
||||||
3db517fc4e1cfb1f0050b65eee4fd1b61ba5a461
|
84239d2fdd2f491f436f8597e8b6ca5fb93f7a5f
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue