mirror of
https://github.com/zeek/zeek.git
synced 2025-10-02 06:38:20 +00:00
Merge branch 'master' of https://github.com/zeek/zeek into topic/zeke/README
This commit is contained in:
commit
b767d18ee0
50 changed files with 1148 additions and 520 deletions
40
.clang-tidy
Normal file
40
.clang-tidy
Normal file
|
@ -0,0 +1,40 @@
|
|||
Checks: '*,
|
||||
-abseil-string-find-startswith,
|
||||
-bugprone-exception-escape,
|
||||
-bugprone-macro-parentheses,
|
||||
-bugprone-suspicious-semicolon,
|
||||
-cert-err58-cpp,
|
||||
-cppcoreguidelines-avoid-c-arrays,
|
||||
-cppcoreguidelines-avoid-goto,
|
||||
-cppcoreguidelines-avoid-magic-numbers,
|
||||
-cppcoreguidelines-macro-usage,
|
||||
-cppcoreguidelines-non-private-member-variables-in-classes,
|
||||
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
||||
-cppcoreguidelines-pro-bounds-constant-array-index,
|
||||
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
||||
-cppcoreguidelines-pro-type-const-cast,
|
||||
-cppcoreguidelines-pro-type-reinterpret-cast,
|
||||
-fuchsia-default-arguments,
|
||||
-fuchsia-multiple-inheritance,
|
||||
-fuchsia-overloaded-operator,
|
||||
-fuchsia-statically-constructed-objects,
|
||||
-fuchsia-trailing-return,
|
||||
-google-build-using-namespace,
|
||||
-google-explicit-constructor,
|
||||
-google-readability-braces-around-statements,
|
||||
-hicpp-avoid-c-arrays,
|
||||
-hicpp-avoid-goto,
|
||||
-hicpp-braces-around-statements,
|
||||
-hicpp-explicit-conversions,
|
||||
-hicpp-no-array-decay,
|
||||
-llvm-header-guard,
|
||||
-misc-macro-parentheses,
|
||||
-misc-non-private-member-variables-in-classes,
|
||||
-misc-suspicious-semicolon,
|
||||
-misc-unused-parameters,
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-use-nodiscard,
|
||||
-readability-braces-around-statements,
|
||||
-readability-container-size-empty,
|
||||
-readability-implicit-bool-conversion,
|
||||
-readability-magic-numbers'
|
64
CHANGES
64
CHANGES
|
@ -1,4 +1,68 @@
|
|||
|
||||
2.6-598 | 2019-07-12 18:20:12 -0700
|
||||
|
||||
* Fix canonification of timestamps with a decisecond multiple (Jon Siwek, Corelight)
|
||||
|
||||
2.6-597 | 2019-07-12 15:01:56 -0700
|
||||
|
||||
* Fix a couple of resource leaks from JSON changes (Tim Wojtulewicz, Corelight)
|
||||
|
||||
2.6-595 | 2019-07-12 13:34:08 -0700
|
||||
|
||||
* GH-157: Mark some attributes as not allowed for global variables (Tim Wojtulewicz, Corelight)
|
||||
|
||||
This disallows &default for global values that are not tables, and &optional for all globals.
|
||||
|
||||
* Fix uncaught exceptions from Val cloning failures (Jon Siwek, Corelight)
|
||||
|
||||
2.6-591 | 2019-07-11 13:29:28 -0700
|
||||
|
||||
* Fix potential thread safety issue with zeekenv util function
|
||||
|
||||
Observed segfault accessing the local static std::map of zeekenv() from
|
||||
a logging thread, but only in non-debug builds using Apple/Clang
|
||||
compiler, not in a debug build or GCC. Don't quite get this behavior
|
||||
since static local variable initialization is supposed to be thread-safe
|
||||
since C++11, but moving to a global static works and is "more efficient"
|
||||
anyway since there's no longer any run-time overhead. (Jon Siwek, Corelight)
|
||||
|
||||
2.6-589 | 2019-07-11 13:14:52 -0700
|
||||
|
||||
* GH-421: fix bugs/regressions in DNP3 analyzer (Hui Lin)
|
||||
|
||||
2.6-587 | 2019-07-11 12:13:48 -0700
|
||||
|
||||
* Fix a sign-compare compiler warning (Jon Siwek, Corelight)
|
||||
|
||||
2.6-586 | 2019-07-11 11:15:40 -0700
|
||||
|
||||
* Convert all JSON output to use an external library for better consistency (Tim Wojtulewicz, Corelight)
|
||||
|
||||
See NEWS for more details; this makes to_json a bif and causes slight changes in its
|
||||
output, as well as the output of the JSON logger.
|
||||
|
||||
2.6-576 | 2019-07-10 18:38:54 -0700
|
||||
|
||||
* Remove unused option: chunked_io_buffer_soft_cap (Jon Siwek, Corelight)
|
||||
|
||||
2.6-575 | 2019-07-09 18:28:03 -0700
|
||||
|
||||
* Avoid a null dereference (Coverity-1402816) (Tim Wojtulewicz, Corelight)
|
||||
|
||||
* Avoid resource leaks (Coverity-1402818, Coverity-1402812) (Tim Wojtulewicz, Corelight)
|
||||
|
||||
* Avoid null dereference in broker (Coverity-1402824, Coverity-1402814) (Tim Wojtulewicz, Corelight)
|
||||
|
||||
* Improve stability of a unit test (Jon Siwek, Corelight)
|
||||
|
||||
2.6-569 | 2019-07-03 13:03:22 -0700
|
||||
|
||||
* Improve stability of a unit test (Jon Siwek, Corelight)
|
||||
|
||||
2.6-568 | 2019-07-03 11:50:56 -0700
|
||||
|
||||
* Add clang-tidy rule to CMake including a base configuration (Tim Wojtulewicz, Corelight)
|
||||
|
||||
2.6-566 | 2019-07-03 11:08:24 -0700
|
||||
|
||||
* Improve Zeekygen output for long attribute expressions (Jon Siwek, Corelight)
|
||||
|
|
11
NEWS
11
NEWS
|
@ -347,6 +347,16 @@ Changed Functionality
|
|||
of each other on separate cluster nodes to all be logged rather
|
||||
than suppressed and de-duplicated into a single notice.
|
||||
|
||||
|
||||
- to_json is now a bif, no longer a script. Loading base/utils/json.zeek is no
|
||||
longer necessary and has been deprecated. to_json should yield much better, always
|
||||
valid json. There are some small differences in output; unnecessary spaces are removed
|
||||
and port values are rendered differently, now including the port and the protocol.
|
||||
|
||||
- The output of the JSON logger now uses an external library to generate json. There
|
||||
are small changes to the output; most visibly double numbers are now rounded slightly
|
||||
differently. The way in which port values are rendered does _not_ change for JSON logs.
|
||||
|
||||
Removed Functionality
|
||||
---------------------
|
||||
|
||||
|
@ -479,6 +489,7 @@ Removed Functionality
|
|||
- ``backdoor_stat_period``
|
||||
- ``backdoor_stat_backoff``
|
||||
- ``backdoor_endp_stats``
|
||||
- ``chunked_io_buffer_soft_cap``
|
||||
|
||||
- The following constants were used as part of deprecated functionality in version 2.6
|
||||
or below and are removed from this release:
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.6-566
|
||||
2.6-598
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 1a8d674d2ccbef06a6e4e6f1a9c8747a2eadf026
|
||||
Subproject commit 4b0c213ad64737fd1694216fe136b5665f932e22
|
2
doc
2
doc
|
@ -1 +1 @@
|
|||
Subproject commit 80c0cb3d9db1cea6661b3d3c79d17e3b67533507
|
||||
Subproject commit bcfe6ffc88e0a89e7ade664113d458bae9e5e5fc
|
|
@ -3,7 +3,6 @@
|
|||
@load base/frameworks/openflow
|
||||
@load base/utils/active-http
|
||||
@load base/utils/exec
|
||||
@load base/utils/json
|
||||
|
||||
module OpenFlow;
|
||||
|
||||
|
|
|
@ -4608,11 +4608,6 @@ const sig_max_group_size = 50 &redef;
|
|||
## Description transmitted to remote communication peers for identification.
|
||||
const peer_description = "zeek" &redef;
|
||||
|
||||
## The number of IO chunks allowed to be buffered between the child
|
||||
## and parent process of remote communication before Zeek starts dropping
|
||||
## connections to remote peers in an attempt to catch up.
|
||||
const chunked_io_buffer_soft_cap = 800000 &redef;
|
||||
|
||||
## Reassemble the beginning of all TCP connections before doing
|
||||
## signature matching. Enabling this provides more accurate matching at the
|
||||
## expense of CPU cycles.
|
||||
|
|
|
@ -1,109 +1,2 @@
|
|||
##! Functions to assist with generating JSON data from Zeek data scructures.
|
||||
# We might want to implement this in core somtime, this looks... hacky at best.
|
||||
|
||||
@load base/utils/strings
|
||||
|
||||
## A function to convert arbitrary Zeek data into a JSON string.
|
||||
##
|
||||
## v: The value to convert to JSON. Typically a record.
|
||||
##
|
||||
## only_loggable: If the v value is a record this will only cause
|
||||
## fields with the &log attribute to be included in the JSON.
|
||||
##
|
||||
## returns: a JSON formatted string.
|
||||
function to_json(v: any, only_loggable: bool &default=F, field_escape_pattern: pattern &default=/^_/): string
|
||||
{
|
||||
local tn = type_name(v);
|
||||
switch ( tn )
|
||||
{
|
||||
case "type":
|
||||
return "";
|
||||
|
||||
case "string":
|
||||
return cat("\"", gsub(gsub(clean(v), /\\/, "\\\\"), /\"/, "\\\""), "\"");
|
||||
|
||||
case "port":
|
||||
return cat(port_to_count(to_port(cat(v))));
|
||||
|
||||
case "enum":
|
||||
fallthrough;
|
||||
case "interval":
|
||||
fallthrough;
|
||||
case "addr":
|
||||
fallthrough;
|
||||
case "subnet":
|
||||
return cat("\"", v, "\"");
|
||||
|
||||
case "int":
|
||||
fallthrough;
|
||||
case "count":
|
||||
fallthrough;
|
||||
case "time":
|
||||
return cat(v);
|
||||
|
||||
case "double":
|
||||
return fmt("%.16g", v);
|
||||
|
||||
case "bool":
|
||||
local bval: bool = v;
|
||||
return bval ? "true" : "false";
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if ( /^record/ in tn )
|
||||
{
|
||||
local rec_parts: string_vec = vector();
|
||||
|
||||
local ft = record_fields(v);
|
||||
for ( field, field_desc in ft )
|
||||
{
|
||||
# replace the escape pattern in the field.
|
||||
if( field_escape_pattern in field )
|
||||
field = cat(sub(field, field_escape_pattern, ""));
|
||||
if ( field_desc?$value && (!only_loggable || field_desc$log) )
|
||||
{
|
||||
local onepart = cat("\"", field, "\": ", to_json(field_desc$value, only_loggable));
|
||||
rec_parts += onepart;
|
||||
}
|
||||
}
|
||||
return cat("{", join_string_vec(rec_parts, ", "), "}");
|
||||
}
|
||||
|
||||
# None of the following are supported.
|
||||
else if ( /^set/ in tn )
|
||||
{
|
||||
local set_parts: string_vec = vector();
|
||||
local sa: set[bool] = v;
|
||||
for ( sv in sa )
|
||||
{
|
||||
set_parts += to_json(sv, only_loggable);
|
||||
}
|
||||
return cat("[", join_string_vec(set_parts, ", "), "]");
|
||||
}
|
||||
else if ( /^table/ in tn )
|
||||
{
|
||||
local tab_parts: vector of string = vector();
|
||||
local ta: table[bool] of any = v;
|
||||
for ( ti, tv in ta )
|
||||
{
|
||||
local ts = to_json(ti);
|
||||
local if_quotes = (ts[0] == "\"") ? "" : "\"";
|
||||
tab_parts += cat(if_quotes, ts, if_quotes, ": ", to_json(tv, only_loggable));
|
||||
}
|
||||
return cat("{", join_string_vec(tab_parts, ", "), "}");
|
||||
}
|
||||
else if ( /^vector/ in tn )
|
||||
{
|
||||
local vec_parts: string_vec = vector();
|
||||
local va: vector of any = v;
|
||||
for ( vi in va )
|
||||
{
|
||||
vec_parts += to_json(va[vi], only_loggable);
|
||||
}
|
||||
return cat("[", join_string_vec(vec_parts, ", "), "]");
|
||||
}
|
||||
|
||||
return "\"\"";
|
||||
}
|
||||
## This file is deprecated in favor of to_json in zeek.bif
|
||||
@deprecated="Remove in 3.1. to_json is now always available as a built-in function."
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 785e581f00a1efae3fca7a62fb15d8756c5aedb1
|
||||
Subproject commit 1e9d49362d2c3bb2f43abcd8eebe47be045659a5
|
20
src/Attr.cc
20
src/Attr.cc
|
@ -130,11 +130,12 @@ void Attr::AddTag(ODesc* d) const
|
|||
d->Add(attr_name(Tag()));
|
||||
}
|
||||
|
||||
Attributes::Attributes(attr_list* a, BroType* t, bool arg_in_record)
|
||||
Attributes::Attributes(attr_list* a, BroType* t, bool arg_in_record, bool is_global)
|
||||
{
|
||||
attrs = new attr_list(a->length());
|
||||
type = t->Ref();
|
||||
in_record = arg_in_record;
|
||||
global_var = is_global;
|
||||
|
||||
SetLocationInfo(&start_location, &end_location);
|
||||
|
||||
|
@ -250,10 +251,14 @@ void Attributes::CheckAttr(Attr* a)
|
|||
{
|
||||
switch ( a->Tag() ) {
|
||||
case ATTR_DEPRECATED:
|
||||
case ATTR_OPTIONAL:
|
||||
case ATTR_REDEF:
|
||||
break;
|
||||
|
||||
case ATTR_OPTIONAL:
|
||||
if ( global_var )
|
||||
Error("&optional is not valid for global variables");
|
||||
break;
|
||||
|
||||
case ATTR_ADD_FUNC:
|
||||
case ATTR_DEL_FUNC:
|
||||
{
|
||||
|
@ -283,6 +288,14 @@ void Attributes::CheckAttr(Attr* a)
|
|||
|
||||
case ATTR_DEFAULT:
|
||||
{
|
||||
// &default is allowed for global tables, since it's used in initialization
|
||||
// of table fields. it's not allowed otherwise.
|
||||
if ( global_var && ! type->IsSet() && type->Tag() != TYPE_TABLE )
|
||||
{
|
||||
Error("&default is not valid for global variables");
|
||||
break;
|
||||
}
|
||||
|
||||
BroType* atype = a->AttrExpr()->Type();
|
||||
|
||||
if ( type->Tag() != TYPE_TABLE || (type->IsSet() && ! in_record) )
|
||||
|
@ -410,9 +423,10 @@ void Attributes::CheckAttr(Attr* a)
|
|||
|
||||
#if 0
|
||||
//### not easy to test this w/o knowing the ID.
|
||||
if ( ! IsGlobal() )
|
||||
if ( ! global_var )
|
||||
Error("expiration not supported for local variables");
|
||||
#endif
|
||||
|
||||
break;
|
||||
|
||||
case ATTR_EXPIRE_FUNC:
|
||||
|
|
|
@ -77,7 +77,7 @@ protected:
|
|||
// Manages a collection of attributes.
|
||||
class Attributes : public BroObj {
|
||||
public:
|
||||
Attributes(attr_list* a, BroType* t, bool in_record);
|
||||
Attributes(attr_list* a, BroType* t, bool in_record, bool is_global);
|
||||
~Attributes() override;
|
||||
|
||||
void AddAttr(Attr* a);
|
||||
|
@ -101,6 +101,7 @@ protected:
|
|||
BroType* type;
|
||||
attr_list* attrs;
|
||||
bool in_record;
|
||||
bool global_var;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -220,23 +220,7 @@ add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
|||
set_source_files_properties(nb_dns.c PROPERTIES COMPILE_FLAGS
|
||||
-fno-strict-aliasing)
|
||||
|
||||
set(bro_SRCS
|
||||
${CMAKE_CURRENT_BINARY_DIR}/version.c
|
||||
${BIF_SRCS}
|
||||
${BINPAC_AUXSRC}
|
||||
${BINPAC_OUTPUTS}
|
||||
${TRANSFORMED_BISON_OUTPUTS}
|
||||
${FLEX_RuleScanner_OUTPUTS}
|
||||
${FLEX_RuleScanner_INPUT}
|
||||
${BISON_RuleParser_INPUT}
|
||||
${FLEX_REScanner_OUTPUTS}
|
||||
${FLEX_REScanner_INPUT}
|
||||
${BISON_REParser_INPUT}
|
||||
${FLEX_Scanner_OUTPUTS}
|
||||
${FLEX_Scanner_INPUT}
|
||||
${BISON_Parser_INPUT}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
||||
${PLUGIN_INIT}
|
||||
set(MAIN_SRCS
|
||||
main.cc
|
||||
net_util.cc
|
||||
util.cc
|
||||
|
@ -332,8 +316,6 @@ set(bro_SRCS
|
|||
threading/formatters/Ascii.cc
|
||||
threading/formatters/JSON.cc
|
||||
|
||||
3rdparty/sqlite3.c
|
||||
|
||||
plugin/Component.cc
|
||||
plugin/ComponentManager.h
|
||||
plugin/TaggedComponent.h
|
||||
|
@ -344,6 +326,31 @@ set(bro_SRCS
|
|||
digest.h
|
||||
)
|
||||
|
||||
set(THIRD_PARTY_SRCS
|
||||
3rdparty/sqlite3.c
|
||||
)
|
||||
|
||||
set(bro_SRCS
|
||||
${CMAKE_CURRENT_BINARY_DIR}/version.c
|
||||
${BIF_SRCS}
|
||||
${BINPAC_AUXSRC}
|
||||
${BINPAC_OUTPUTS}
|
||||
${TRANSFORMED_BISON_OUTPUTS}
|
||||
${FLEX_RuleScanner_OUTPUTS}
|
||||
${FLEX_RuleScanner_INPUT}
|
||||
${BISON_RuleParser_INPUT}
|
||||
${FLEX_REScanner_OUTPUTS}
|
||||
${FLEX_REScanner_INPUT}
|
||||
${BISON_REParser_INPUT}
|
||||
${FLEX_Scanner_OUTPUTS}
|
||||
${FLEX_Scanner_INPUT}
|
||||
${BISON_Parser_INPUT}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
||||
${PLUGIN_INIT}
|
||||
${THIRD_PARTY_SRCS}
|
||||
${MAIN_SRCS}
|
||||
)
|
||||
|
||||
collect_headers(bro_HEADERS ${bro_SRCS})
|
||||
|
||||
if ( bro_HAVE_OBJECT_LIBRARIES )
|
||||
|
@ -433,3 +440,18 @@ install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
|
|||
install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/sqlite3.h
|
||||
DESTINATION include/zeek/3rdparty
|
||||
)
|
||||
|
||||
find_program(CLANG_TIDY NAMES clang-tidy)
|
||||
if (CLANG_TIDY)
|
||||
set(TIDY_SRCS "")
|
||||
foreach(f ${MAIN_SRCS})
|
||||
list(APPEND TIDY_SRCS "src/${f}")
|
||||
endforeach(f)
|
||||
# TODO: this currently doesn't include many of the subdirectories/plugins
|
||||
# that build static libraries for inclusion into the final zeek binary
|
||||
# (analyzers, broker, etc.) or generated code (BIFs, BinPAC, etc.).
|
||||
add_custom_target(clang-tidy
|
||||
COMMAND ${CLANG_TIDY} -p ${CMAKE_BINARY_DIR} ${TIDY_SRCS}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
)
|
||||
endif()
|
||||
|
|
|
@ -3133,7 +3133,7 @@ TableConstructorExpr::TableConstructorExpr(ListExpr* constructor_list,
|
|||
}
|
||||
}
|
||||
|
||||
attrs = arg_attrs ? new Attributes(arg_attrs, type, false) : 0;
|
||||
attrs = arg_attrs ? new Attributes(arg_attrs, type, false, false) : 0;
|
||||
|
||||
type_list* indices = type->AsTableType()->Indices()->Types();
|
||||
const expr_list& cle = constructor_list->Exprs();
|
||||
|
@ -3240,7 +3240,7 @@ SetConstructorExpr::SetConstructorExpr(ListExpr* constructor_list,
|
|||
else if ( type->Tag() != TYPE_TABLE || ! type->AsTableType()->IsSet() )
|
||||
SetError("values in set(...) constructor do not specify a set");
|
||||
|
||||
attrs = arg_attrs ? new Attributes(arg_attrs, type, false) : 0;
|
||||
attrs = arg_attrs ? new Attributes(arg_attrs, type, false, false) : 0;
|
||||
|
||||
type_list* indices = type->AsTableType()->Indices()->Types();
|
||||
expr_list& cle = constructor_list->Exprs();
|
||||
|
|
|
@ -181,7 +181,7 @@ void ID::UpdateValAttrs()
|
|||
TypeDecl* fd = rt->FieldDecl(i);
|
||||
|
||||
if ( ! fd->attrs )
|
||||
fd->attrs = new Attributes(new attr_list, rt->FieldType(i), true);
|
||||
fd->attrs = new Attributes(new attr_list, rt->FieldType(i), true, IsGlobal());
|
||||
|
||||
fd->attrs->AddAttr(new Attr(ATTR_LOG));
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ void ID::MakeDeprecated(Expr* deprecation)
|
|||
return;
|
||||
|
||||
attr_list* attr = new attr_list{new Attr(ATTR_DEPRECATED, deprecation)};
|
||||
AddAttrs(new Attributes(attr, Type(), false));
|
||||
AddAttrs(new Attributes(attr, Type(), false, IsGlobal()));
|
||||
}
|
||||
|
||||
string ID::GetDeprecationWarning() const
|
||||
|
@ -245,7 +245,7 @@ void ID::SetOption()
|
|||
if ( ! IsRedefinable() )
|
||||
{
|
||||
attr_list* attr = new attr_list{new Attr(ATTR_REDEF)};
|
||||
AddAttrs(new Attributes(attr, Type(), false));
|
||||
AddAttrs(new Attributes(attr, Type(), false, IsGlobal()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -141,7 +141,6 @@ RecordType* rotate_info;
|
|||
StringVal* log_rotate_base_time;
|
||||
|
||||
StringVal* peer_description;
|
||||
bro_uint_t chunked_io_buffer_soft_cap;
|
||||
|
||||
Val* profiling_file;
|
||||
double profiling_interval;
|
||||
|
@ -213,7 +212,6 @@ void init_general_global_var()
|
|||
|
||||
peer_description =
|
||||
internal_val("peer_description")->AsStringVal();
|
||||
chunked_io_buffer_soft_cap = opt_internal_unsigned("chunked_io_buffer_soft_cap");
|
||||
|
||||
packet_filter_default = opt_internal_int("packet_filter_default");
|
||||
|
||||
|
|
|
@ -144,7 +144,6 @@ extern RecordType* rotate_info;
|
|||
extern StringVal* log_rotate_base_time;
|
||||
|
||||
extern StringVal* peer_description;
|
||||
extern bro_uint_t chunked_io_buffer_soft_cap;
|
||||
|
||||
extern Val* profiling_file;
|
||||
extern double profiling_interval;
|
||||
|
|
|
@ -650,7 +650,7 @@ void FuncType::DescribeReST(ODesc* d, bool roles_only) const
|
|||
TypeDecl::TypeDecl(BroType* t, const char* i, attr_list* arg_attrs, bool in_record)
|
||||
{
|
||||
type = t;
|
||||
attrs = arg_attrs ? new Attributes(arg_attrs, t, in_record) : 0;
|
||||
attrs = arg_attrs ? new Attributes(arg_attrs, t, in_record, false) : 0;
|
||||
id = i;
|
||||
}
|
||||
|
||||
|
@ -841,7 +841,7 @@ const char* RecordType::AddFields(type_decl_list* others, attr_list* attr)
|
|||
if ( log )
|
||||
{
|
||||
if ( ! td->attrs )
|
||||
td->attrs = new Attributes(new attr_list, td->type, true);
|
||||
td->attrs = new Attributes(new attr_list, td->type, true, false);
|
||||
|
||||
td->attrs->AddAttr(new Attr(ATTR_LOG));
|
||||
}
|
||||
|
|
411
src/Val.cc
411
src/Val.cc
|
@ -27,6 +27,16 @@
|
|||
|
||||
#include "broker/Data.h"
|
||||
|
||||
#include "3rdparty/json.hpp"
|
||||
#include "3rdparty/fifo_map.hpp"
|
||||
|
||||
// Define a class for use with the json library that orders the keys in the same order that
|
||||
// they were inserted. By default, the json library orders them alphabetically and we don't
|
||||
// want it like that.
|
||||
template<class K, class V, class compare, class A>
|
||||
using json_fifo_map = nlohmann::fifo_map<K, V, nlohmann::fifo_map_compare<K>, A>;
|
||||
using ZeekJson = nlohmann::basic_json<json_fifo_map>;
|
||||
|
||||
Val::Val(Func* f)
|
||||
{
|
||||
val.func_val = f;
|
||||
|
@ -380,6 +390,276 @@ bool Val::WouldOverflow(const BroType* from_type, const BroType* to_type, const
|
|||
return false;
|
||||
}
|
||||
|
||||
TableVal* Val::GetRecordFields()
|
||||
{
|
||||
TableVal* fields = new TableVal(internal_type("record_field_table")->AsTableType());
|
||||
|
||||
auto t = Type();
|
||||
|
||||
if ( t->Tag() != TYPE_RECORD && t->Tag() != TYPE_TYPE )
|
||||
{
|
||||
reporter->Error("non-record value/type passed to record_fields");
|
||||
return fields;
|
||||
}
|
||||
|
||||
RecordType* rt = nullptr;
|
||||
RecordVal* rv = nullptr;
|
||||
|
||||
if ( t->Tag() == TYPE_RECORD )
|
||||
{
|
||||
rt = t->AsRecordType();
|
||||
rv = AsRecordVal();
|
||||
}
|
||||
else
|
||||
{
|
||||
t = t->AsTypeType()->Type();
|
||||
|
||||
if ( t->Tag() != TYPE_RECORD )
|
||||
{
|
||||
reporter->Error("non-record value/type passed to record_fields");
|
||||
return fields;
|
||||
}
|
||||
|
||||
rt = t->AsRecordType();
|
||||
}
|
||||
|
||||
for ( int i = 0; i < rt->NumFields(); ++i )
|
||||
{
|
||||
BroType* ft = rt->FieldType(i);
|
||||
TypeDecl* fd = rt->FieldDecl(i);
|
||||
Val* fv = nullptr;
|
||||
|
||||
if ( rv )
|
||||
fv = rv->Lookup(i);
|
||||
|
||||
if ( fv )
|
||||
::Ref(fv);
|
||||
|
||||
bool logged = (fd->attrs && fd->FindAttr(ATTR_LOG) != 0);
|
||||
|
||||
RecordVal* nr = new RecordVal(internal_type("record_field")->AsRecordType());
|
||||
|
||||
if ( ft->Tag() == TYPE_RECORD )
|
||||
nr->Assign(0, new StringVal("record " + ft->GetName()));
|
||||
else
|
||||
nr->Assign(0, new StringVal(type_name(ft->Tag())));
|
||||
|
||||
nr->Assign(1, val_mgr->GetBool(logged));
|
||||
nr->Assign(2, fv);
|
||||
nr->Assign(3, rt->FieldDefault(i));
|
||||
|
||||
Val* field_name = new StringVal(rt->FieldName(i));
|
||||
fields->Assign(field_name, nr);
|
||||
Unref(field_name);
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
// This is a static method in this file to avoid including json.hpp in Val.h since it's huge.
|
||||
static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new RE_Matcher("^_"))
|
||||
{
|
||||
ZeekJson j;
|
||||
BroType* type = val->Type();
|
||||
switch ( type->Tag() )
|
||||
{
|
||||
case TYPE_BOOL:
|
||||
j = val->AsBool();
|
||||
break;
|
||||
|
||||
case TYPE_INT:
|
||||
j = val->AsInt();
|
||||
break;
|
||||
|
||||
case TYPE_COUNT:
|
||||
j = val->AsCount();
|
||||
break;
|
||||
|
||||
case TYPE_COUNTER:
|
||||
j = val->AsCounter();
|
||||
break;
|
||||
|
||||
case TYPE_TIME:
|
||||
j = val->AsTime();
|
||||
break;
|
||||
|
||||
case TYPE_DOUBLE:
|
||||
j = val->AsDouble();
|
||||
break;
|
||||
|
||||
case TYPE_PORT:
|
||||
{
|
||||
auto* pval = val->AsPortVal();
|
||||
j["port"] = pval->Port();
|
||||
j["proto"] = pval->Protocol();
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_PATTERN:
|
||||
case TYPE_INTERVAL:
|
||||
case TYPE_ADDR:
|
||||
case TYPE_SUBNET:
|
||||
{
|
||||
ODesc d;
|
||||
d.SetStyle(RAW_STYLE);
|
||||
val->Describe(&d);
|
||||
|
||||
auto* bs = new BroString(1, d.TakeBytes(), d.Len());
|
||||
j = string((char*)bs->Bytes(), bs->Len());
|
||||
|
||||
delete bs;
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_FILE:
|
||||
case TYPE_FUNC:
|
||||
case TYPE_ENUM:
|
||||
case TYPE_STRING:
|
||||
{
|
||||
ODesc d;
|
||||
d.SetStyle(RAW_STYLE);
|
||||
val->Describe(&d);
|
||||
|
||||
auto* bs = new BroString(1, d.TakeBytes(), d.Len());
|
||||
j = json_escape_utf8(string((char*)bs->Bytes(), bs->Len()));
|
||||
|
||||
delete bs;
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_TABLE:
|
||||
{
|
||||
auto* table = val->AsTable();
|
||||
auto* tval = val->AsTableVal();
|
||||
|
||||
if ( tval->Type()->IsSet() )
|
||||
j = ZeekJson::array();
|
||||
else
|
||||
j = ZeekJson::object();
|
||||
|
||||
HashKey* k;
|
||||
auto c = table->InitForIteration();
|
||||
while ( table->NextEntry(k, c) )
|
||||
{
|
||||
auto lv = tval->RecoverIndex(k);
|
||||
delete k;
|
||||
|
||||
if ( tval->Type()->IsSet() )
|
||||
{
|
||||
auto* value = lv->Index(0)->Ref();
|
||||
j.push_back(BuildJSON(value, only_loggable, re));
|
||||
Unref(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
ZeekJson key_json;
|
||||
Val* entry_value;
|
||||
if ( lv->Length() == 1 )
|
||||
{
|
||||
Val* entry_key = lv->Index(0)->Ref();
|
||||
entry_value = tval->Lookup(entry_key, true);
|
||||
key_json = BuildJSON(entry_key, only_loggable, re);
|
||||
Unref(entry_key);
|
||||
}
|
||||
else
|
||||
{
|
||||
entry_value = tval->Lookup(lv, true);
|
||||
key_json = BuildJSON(lv, only_loggable, re);
|
||||
}
|
||||
|
||||
string key_string;
|
||||
if ( key_json.is_string() )
|
||||
key_string = key_json;
|
||||
else
|
||||
key_string = key_json.dump();
|
||||
|
||||
j[key_string] = BuildJSON(entry_value, only_loggable, re);
|
||||
}
|
||||
|
||||
Unref(lv);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_RECORD:
|
||||
{
|
||||
j = ZeekJson::object();
|
||||
auto* rval = val->AsRecordVal();
|
||||
TableVal* fields = rval->GetRecordFields();
|
||||
auto* field_indexes = fields->ConvertToPureList();
|
||||
int num_indexes = field_indexes->Length();
|
||||
|
||||
for ( int i = 0; i < num_indexes; ++i )
|
||||
{
|
||||
Val* key = field_indexes->Index(i);
|
||||
auto* key_field = fields->Lookup(key)->AsRecordVal();
|
||||
|
||||
auto* key_val = key->AsStringVal();
|
||||
string key_string;
|
||||
if ( re->MatchAnywhere(key_val->AsString()) != 0 )
|
||||
{
|
||||
StringVal blank("");
|
||||
key_val = key_val->Substitute(re, &blank, 0)->AsStringVal();
|
||||
key_string = key_val->ToStdString();
|
||||
delete key_val;
|
||||
}
|
||||
else
|
||||
key_string = key_val->ToStdString();
|
||||
|
||||
Val* value = key_field->Lookup("value", true);
|
||||
|
||||
if ( value && ( ! only_loggable || key_field->Lookup("log")->AsBool() ) )
|
||||
j[key_string] = BuildJSON(value, only_loggable, re);
|
||||
}
|
||||
|
||||
delete fields;
|
||||
delete field_indexes;
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_LIST:
|
||||
{
|
||||
j = ZeekJson::array();
|
||||
auto* lval = val->AsListVal();
|
||||
size_t size = lval->Length();
|
||||
for (size_t i = 0; i < size; i++)
|
||||
j.push_back(BuildJSON(lval->Index(i), only_loggable, re));
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
j = ZeekJson::array();
|
||||
auto* vval = val->AsVectorVal();
|
||||
size_t size = vval->SizeVal()->AsCount();
|
||||
for (size_t i = 0; i < size; i++)
|
||||
j.push_back(BuildJSON(vval->Lookup(i), only_loggable, re));
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_OPAQUE:
|
||||
{
|
||||
j = ZeekJson::object();
|
||||
auto* oval = val->AsOpaqueVal();
|
||||
j["opaque_type"] = OpaqueMgr::mgr()->TypeID(oval);
|
||||
break;
|
||||
}
|
||||
|
||||
default: break;
|
||||
}
|
||||
|
||||
return j;
|
||||
}
|
||||
|
||||
StringVal* Val::ToJSON(bool only_loggable, RE_Matcher* re)
|
||||
{
|
||||
ZeekJson j = BuildJSON(this, only_loggable, re);
|
||||
return new StringVal(j.dump());
|
||||
}
|
||||
|
||||
IntervalVal::IntervalVal(double quantity, double units) :
|
||||
Val(quantity * units, TYPE_INTERVAL)
|
||||
{
|
||||
|
@ -491,6 +771,18 @@ uint32 PortVal::Port() const
|
|||
return p & ~PORT_SPACE_MASK;
|
||||
}
|
||||
|
||||
string PortVal::Protocol() const
|
||||
{
|
||||
if ( IsUDP() )
|
||||
return "udp";
|
||||
else if ( IsTCP() )
|
||||
return "tcp";
|
||||
else if ( IsICMP() )
|
||||
return "icmp";
|
||||
else
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
int PortVal::IsTCP() const
|
||||
{
|
||||
return (val.uint_val & PORT_SPACE_MASK) == TCP_PORT_MASK;
|
||||
|
@ -510,14 +802,8 @@ void PortVal::ValDescribe(ODesc* d) const
|
|||
{
|
||||
uint32 p = static_cast<uint32>(val.uint_val);
|
||||
d->Add(p & ~PORT_SPACE_MASK);
|
||||
if ( IsUDP() )
|
||||
d->Add("/udp");
|
||||
else if ( IsTCP() )
|
||||
d->Add("/tcp");
|
||||
else if ( IsICMP() )
|
||||
d->Add("/icmp");
|
||||
else
|
||||
d->Add("/unknown");
|
||||
d->Add("/");
|
||||
d->Add(Protocol());
|
||||
}
|
||||
|
||||
Val* PortVal::DoClone(CloneState* state)
|
||||
|
@ -713,6 +999,12 @@ StringVal::StringVal(const string& s) : Val(TYPE_STRING)
|
|||
val.string_val = new BroString(reinterpret_cast<const u_char*>(s.data()), s.length(), 1);
|
||||
}
|
||||
|
||||
string StringVal::ToStdString() const
|
||||
{
|
||||
auto* bs = AsString();
|
||||
return string((char*)bs->Bytes(), bs->Len());
|
||||
}
|
||||
|
||||
StringVal* StringVal::ToUpper()
|
||||
{
|
||||
val.string_val->ToUpper();
|
||||
|
@ -734,6 +1026,92 @@ unsigned int StringVal::MemoryAllocation() const
|
|||
return padded_sizeof(*this) + val.string_val->MemoryAllocation();
|
||||
}
|
||||
|
||||
Val* StringVal::Substitute(RE_Matcher* re, StringVal* repl, bool do_all)
|
||||
{
|
||||
const u_char* s = Bytes();
|
||||
int offset = 0;
|
||||
int n = Len();
|
||||
|
||||
// cut_points is a set of pairs of indices in str that should
|
||||
// be removed/replaced. A pair <x,y> means "delete starting
|
||||
// at offset x, up to but not including offset y".
|
||||
List(ptr_compat_int) cut_points; // where RE matches pieces of str
|
||||
|
||||
int size = 0; // size of result
|
||||
|
||||
while ( n > 0 )
|
||||
{
|
||||
// Find next match offset.
|
||||
int end_of_match;
|
||||
while ( n > 0 &&
|
||||
(end_of_match = re->MatchPrefix(&s[offset], n)) <= 0 )
|
||||
{
|
||||
// This character is going to be copied to the result.
|
||||
++size;
|
||||
|
||||
// Move on to next character.
|
||||
++offset;
|
||||
--n;
|
||||
}
|
||||
|
||||
if ( n <= 0 )
|
||||
break;
|
||||
|
||||
// s[offset .. offset+end_of_match-1] matches re.
|
||||
cut_points.append(offset);
|
||||
cut_points.append(offset + end_of_match);
|
||||
|
||||
offset += end_of_match;
|
||||
n -= end_of_match;
|
||||
|
||||
if ( ! do_all )
|
||||
{
|
||||
// We've now done the first substitution - finished.
|
||||
// Include the remainder of the string in the result.
|
||||
size += n;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// size now reflects amount of space copied. Factor in amount
|
||||
// of space for replacement text.
|
||||
int num_cut_points = cut_points.length() / 2;
|
||||
size += num_cut_points * repl->Len();
|
||||
|
||||
// And a final NUL for good health.
|
||||
++size;
|
||||
|
||||
byte_vec result = new u_char[size];
|
||||
byte_vec r = result;
|
||||
|
||||
// Copy it all over.
|
||||
int start_offset = 0;
|
||||
for ( int i = 0; i < cut_points.length(); i += 2 /* loop over pairs */ )
|
||||
{
|
||||
int num_to_copy = cut_points[i] - start_offset;
|
||||
memcpy(r, s + start_offset, num_to_copy);
|
||||
|
||||
r += num_to_copy;
|
||||
start_offset = cut_points[i+1];
|
||||
|
||||
// Now add in replacement text.
|
||||
memcpy(r, repl->Bytes(), repl->Len());
|
||||
r += repl->Len();
|
||||
}
|
||||
|
||||
// Copy final trailing characters.
|
||||
int num_to_copy = Len() - start_offset;
|
||||
memcpy(r, s + start_offset, num_to_copy);
|
||||
r += num_to_copy;
|
||||
|
||||
// Final NUL. No need to increment r, since the length
|
||||
// computed from it in the next statement does not include
|
||||
// the NUL.
|
||||
r[0] = '\0';
|
||||
|
||||
return new StringVal(new BroString(1, result, r - result));
|
||||
}
|
||||
|
||||
Val* StringVal::DoClone(CloneState* state)
|
||||
{
|
||||
// We could likely treat this type as immutable and return a reference
|
||||
|
@ -1371,7 +1749,20 @@ Val* TableVal::Default(Val* index)
|
|||
|
||||
if ( def_val->Type()->Tag() != TYPE_FUNC ||
|
||||
same_type(def_val->Type(), Type()->YieldType()) )
|
||||
return def_attr->AttrExpr()->IsConst() ? def_val->Ref() : def_val->Clone();
|
||||
{
|
||||
if ( def_attr->AttrExpr()->IsConst() )
|
||||
return def_val->Ref();
|
||||
|
||||
try
|
||||
{
|
||||
return def_val->Clone();
|
||||
}
|
||||
catch ( InterpreterException& e )
|
||||
{ /* Already reported. */ }
|
||||
|
||||
Error("&default value for table is not clone-able");
|
||||
return 0;
|
||||
}
|
||||
|
||||
const Func* f = def_val->AsFunc();
|
||||
val_list vl;
|
||||
|
@ -2020,7 +2411,7 @@ vector<RecordVal*> RecordVal::parse_time_records;
|
|||
|
||||
RecordVal::RecordVal(RecordType* t, bool init_fields) : Val(t)
|
||||
{
|
||||
origin = 0;
|
||||
origin = nullptr;
|
||||
int n = t->NumFields();
|
||||
val_list* vl = val.val_list_val = new val_list(n);
|
||||
|
||||
|
|
11
src/Val.h
11
src/Val.h
|
@ -20,6 +20,7 @@
|
|||
#include "Notifier.h"
|
||||
#include "IPAddr.h"
|
||||
#include "DebugLogger.h"
|
||||
#include "RE.h"
|
||||
|
||||
// We have four different port name spaces: TCP, UDP, ICMP, and UNKNOWN.
|
||||
// We distinguish between them based on the bits specified in the *_PORT_MASK
|
||||
|
@ -34,7 +35,6 @@
|
|||
class Val;
|
||||
class Func;
|
||||
class BroFile;
|
||||
class RE_Matcher;
|
||||
class PrefixTable;
|
||||
|
||||
class PortVal;
|
||||
|
@ -347,6 +347,10 @@ public:
|
|||
|
||||
static bool WouldOverflow(const BroType* from_type, const BroType* to_type, const Val* val);
|
||||
|
||||
TableVal* GetRecordFields();
|
||||
|
||||
StringVal* ToJSON(bool only_loggable=false, RE_Matcher* re=new RE_Matcher("^_"));
|
||||
|
||||
protected:
|
||||
|
||||
friend class EnumType;
|
||||
|
@ -530,6 +534,7 @@ public:
|
|||
|
||||
// Returns the port number in host order (not including the mask).
|
||||
uint32 Port() const;
|
||||
string Protocol() const;
|
||||
|
||||
// Tests for protocol types.
|
||||
int IsTCP() const;
|
||||
|
@ -632,10 +637,13 @@ public:
|
|||
// char* ExpandedString(int format = BroString::EXPANDED_STRING)
|
||||
// { return AsString()->ExpandedString(format); }
|
||||
|
||||
std::string ToStdString() const;
|
||||
StringVal* ToUpper();
|
||||
|
||||
unsigned int MemoryAllocation() const override;
|
||||
|
||||
Val* Substitute(RE_Matcher* re, StringVal* repl, bool do_all);
|
||||
|
||||
protected:
|
||||
friend class Val;
|
||||
StringVal() {}
|
||||
|
@ -981,7 +989,6 @@ protected:
|
|||
|
||||
Val* DoClone(CloneState* state) override;
|
||||
|
||||
RecordType* record_type;
|
||||
BroObj* origin;
|
||||
|
||||
static vector<RecordVal*> parse_time_records;
|
||||
|
|
|
@ -108,7 +108,7 @@ static void make_var(ID* id, BroType* t, init_class c, Expr* init,
|
|||
id->SetType(t);
|
||||
|
||||
if ( attr )
|
||||
id->AddAttrs(new Attributes(attr, t, false));
|
||||
id->AddAttrs(new Attributes(attr, t, false, id->IsGlobal()));
|
||||
|
||||
if ( init )
|
||||
{
|
||||
|
@ -286,7 +286,7 @@ void add_type(ID* id, BroType* t, attr_list* attr)
|
|||
id->MakeType();
|
||||
|
||||
if ( attr )
|
||||
id->SetAttrs(new Attributes(attr, tnew, false));
|
||||
id->SetAttrs(new Attributes(attr, tnew, false, false));
|
||||
}
|
||||
|
||||
static void transfer_arg_defaults(RecordType* args, RecordType* recv)
|
||||
|
@ -304,7 +304,7 @@ static void transfer_arg_defaults(RecordType* args, RecordType* recv)
|
|||
if ( ! recv_i->attrs )
|
||||
{
|
||||
attr_list* a = new attr_list{def};
|
||||
recv_i->attrs = new Attributes(a, recv_i->type, true);
|
||||
recv_i->attrs = new Attributes(a, recv_i->type, true, false);
|
||||
}
|
||||
|
||||
else if ( ! recv_i->attrs->FindAttr(ATTR_DEFAULT) )
|
||||
|
|
|
@ -215,10 +215,10 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
|||
0x2804 -> aos_dp: empty;
|
||||
|
||||
# analog ouput g41
|
||||
0x2901 -> ao_32: AnaOut32;
|
||||
0x2902 -> ao_16: AnaOut16;
|
||||
0x2903 -> ao_sp: AnaOutSP;
|
||||
0x2904 -> ao_dp: AnaOutDP;
|
||||
0x2901 -> ao_32: empty;
|
||||
0x2902 -> ao_16: empty;
|
||||
0x2903 -> ao_sp: empty;
|
||||
0x2904 -> ao_dp: empty;
|
||||
|
||||
# analog output event g42
|
||||
0x2a00 -> aoe_default: empty;
|
||||
|
@ -258,7 +258,6 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
|||
|
||||
# class objects g60
|
||||
0x3C01 -> class0data: empty; # &check(qualifier_field == 0x06);
|
||||
#0x3C02 -> class1data: uint8; # &check(qualifier_field == 0x06);
|
||||
0x3C02 -> class1data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
||||
0x3C03 -> class2data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
||||
0x3C04 -> class3data: empty; # &check(qualifier_field == 0x06 || qualifier_field == 0x07 || qualifier_field == 0x08);
|
||||
|
@ -266,11 +265,9 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
|||
0x4601 -> file_control_id: File_Control_ID;
|
||||
0x4602 -> file_control_auth: File_Control_Auth_Wrap(function_code);
|
||||
0x4603 -> file_control_cmd: File_Control_Cmd; # &check( file_control_cmd.op_mode == 0 || file_control_cmd.op_mode == 1 || file_control_cmd.op_mode == 2 || file_control_cmd.op_mode == 3 );
|
||||
#0x4604 -> file_control_cmd_status: File_Control_Cmd_Status_Wrap(function_code, prefix.prefix_value); # example shown in P66
|
||||
0x4604 -> file_control_cmd_status: File_Control_Cmd_Status(prefix.prefix_value); # example shown in P66
|
||||
0x4605 -> file_trans: File_Transport(prefix.prefix_value);
|
||||
0x4606 -> file_trans_status: File_Transport_Status(prefix.prefix_value);
|
||||
#0x4607 -> file_desc: File_Desc_Wrap(function_code);
|
||||
0x4607 -> file_desc: File_Desc;
|
||||
|
||||
# internal indication g80
|
||||
|
@ -318,13 +315,20 @@ type Request_Data_Object(function_code: uint8, qualifier_field: uint8, object_ty
|
|||
|
||||
# authentication challenge g120
|
||||
0x7801 -> challenge: AuthChallenge(prefix.prefix_value);
|
||||
0x7802 -> reply: AuthRely(prefix.prefix_value);
|
||||
0x7802 -> reply: AuthReply(prefix.prefix_value);
|
||||
0x7803 -> aggrRequest: AuthAggrRequest(prefix.prefix_value);
|
||||
0x7804 -> seesionKeyRequest: uint8;
|
||||
0x7804 -> seesionKeyRequest: uint16;
|
||||
0x7805 -> status: AuthSessionKeyStatus(prefix.prefix_value);
|
||||
0x7806 -> keyChange: AuthSessionKeyChange(prefix.prefix_value);
|
||||
0x7807 -> error: AuthError(prefix.prefix_value);
|
||||
|
||||
0x7808 -> user_cert: UserCert(prefix.prefix_value);
|
||||
0x7809 -> mac: MAC(prefix.prefix_value);
|
||||
0x780A -> user_status_change: UserStatusChange(prefix.prefix_value);
|
||||
0x780B -> update_key_req: UpdateKeyReq(prefix.prefix_value);
|
||||
0x780C -> update_key_rep: UpdateKeyRep(prefix.prefix_value);
|
||||
0x780D -> update_key: UpdateKey(prefix.prefix_value);
|
||||
0x780E -> update_key_sig: UpdateKeySig(prefix.prefix_value);
|
||||
0x780F -> update_key_con: UpdateKeyCon(prefix.prefix_value);
|
||||
default -> unmatched: Default_Wrap(object_type_field);
|
||||
};
|
||||
};
|
||||
|
@ -468,10 +472,10 @@ type Response_Data_Object(function_code: uint8, qualifier_field: uint8, object_t
|
|||
0x1f02 -> f_ai_16_wflag: FrozenAnalogInput16wFlag;
|
||||
0x1f03 -> f_ai_32_wtime: FrozenAnalogInput32wTime;
|
||||
0x1f04 -> f_ai_16_wtime: FrozenAnalogInput16wTime;
|
||||
0x1f05 -> f_ai_32_woflag: AnalogInput32woFlag;
|
||||
0x1f06 -> f_ai_16_woflag: AnalogInput16woFlag;
|
||||
0x1f07 -> f_ai_sp_wflag: AnalogInputSPwFlag;
|
||||
0x1f08 -> f_ai_dp_wflag: AnalogInputDPwFlag;
|
||||
0x1f05 -> f_ai_32_woflag: FrozenAnalogInput32woFlag;
|
||||
0x1f06 -> f_ai_16_woflag: FrozenAnalogInput16woFlag;
|
||||
0x1f07 -> f_ai_sp_wflag: FrozenAnalogInputSPwFlag;
|
||||
0x1f08 -> f_ai_dp_wflag: FrozenAnalogInputDPwFlag;
|
||||
|
||||
# analog input event g32
|
||||
0x2001 -> ai32wotime: AnalogInput32woTime;
|
||||
|
@ -592,12 +596,20 @@ type Response_Data_Object(function_code: uint8, qualifier_field: uint8, object_t
|
|||
|
||||
# authentication challenge g120
|
||||
0x7801 -> challenge: AuthChallenge(prefix.prefix_value);
|
||||
0x7802 -> reply: AuthRely(prefix.prefix_value);
|
||||
0x7802 -> reply: AuthReply(prefix.prefix_value);
|
||||
0x7803 -> aggrRequest: AuthAggrRequest(prefix.prefix_value);
|
||||
0x7804 -> seesionKeyRequest: uint8;
|
||||
0x7804 -> seesionKeyRequest: uint16;
|
||||
0x7805 -> status: AuthSessionKeyStatus(prefix.prefix_value);
|
||||
0x7806 -> keyChange: AuthSessionKeyChange(prefix.prefix_value);
|
||||
0x7807 -> error: AuthError(prefix.prefix_value);
|
||||
0x7808 -> user_cert: UserCert(prefix.prefix_value);
|
||||
0x7809 -> mac: MAC(prefix.prefix_value);
|
||||
0x780A -> user_status_change: UserStatusChange(prefix.prefix_value);
|
||||
0x780B -> update_key_req: UpdateKeyReq(prefix.prefix_value);
|
||||
0x780C -> update_key_rep: UpdateKeyRep(prefix.prefix_value);
|
||||
0x780D -> update_key: UpdateKey(prefix.prefix_value);
|
||||
0x780E -> update_key_sig: UpdateKeySig(prefix.prefix_value);
|
||||
0x780F -> update_key_con: UpdateKeyCon(prefix.prefix_value);
|
||||
|
||||
#default -> unkonwndata: Debug_Byte; # &check( T );
|
||||
default -> unmatched: Default_Wrap(object_type_field);
|
||||
|
@ -1381,41 +1393,115 @@ type BCD_Large = record {
|
|||
|
||||
# g120v1
|
||||
type AuthChallenge(prefix: uint16) = record {
|
||||
csqUsr: uint32;
|
||||
hal: uint8;
|
||||
cha_seq_num: uint32;
|
||||
user_num: uint16;
|
||||
mac_alg: uint8;
|
||||
reason: uint8;
|
||||
chan_data: bytestring &length = (prefix - 10);
|
||||
chan_data: bytestring &length = (prefix - 8);
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v2
|
||||
type AuthRely(prefix: uint16) = record {
|
||||
csqUsr: uint32;
|
||||
chan_data: bytestring &length = (prefix - 4);
|
||||
type AuthReply(prefix: uint16) = record {
|
||||
cha_seq_num: uint32;
|
||||
user_num : uint16;
|
||||
mac: bytestring &length = (prefix - 6);
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v3
|
||||
type AuthAggrRequest(prefix: uint16) = record {
|
||||
csqUsr: uint32;
|
||||
chan_data: bytestring &length = (prefix - 4);
|
||||
cha_seq_num: uint32;
|
||||
user_num: uint16;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v5
|
||||
type AuthSessionKeyStatus(prefix: uint16) = record {
|
||||
csqUsr: uint32;
|
||||
cha_seq_num: uint32;
|
||||
user_num: uint16;
|
||||
key_alg: uint8;
|
||||
key_status: uint8;
|
||||
chan_data: bytestring &length = (prefix - 10);
|
||||
mac_alg: uint8;
|
||||
cha_data_len : uint16;
|
||||
chan_data: bytestring &length = cha_data_len;
|
||||
mac: bytestring &length = (prefix - 11 - cha_data_len);
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v6
|
||||
type AuthSessionKeyChange(prefix: uint16) = record {
|
||||
csqUsr: uint32;
|
||||
key_wrap_data: bytestring &length = (prefix - 5);
|
||||
key_change_num: uint32;
|
||||
user_num: uint16;
|
||||
key_wrap_data: bytestring &length = (prefix - 6);
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v7
|
||||
type AuthError(prefix: uint16) = record {
|
||||
csqUsr: uint32;
|
||||
cha_seq_num: uint32;
|
||||
user_num: uint16;
|
||||
id: uint16;
|
||||
error_code: uint8;
|
||||
key_wrap_data: bytestring &length = (prefix - 6);
|
||||
time_error: bytestring &length = 6;
|
||||
error_text: bytestring &length = (prefix - 15);
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v8
|
||||
type UserCert(prefix: uint16) = record {
|
||||
method: uint8;
|
||||
cert_type: uint8;
|
||||
cert_text: bytestring &length = (prefix - 2);
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v9
|
||||
type MAC(prefix: uint16) = record {
|
||||
mac_text: bytestring &length = prefix;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v10
|
||||
type UserStatusChange(prefix: uint16) = record {
|
||||
method: uint8;
|
||||
operation: uint8;
|
||||
seq_num: uint32;
|
||||
user_role: uint16;
|
||||
user_role_exp: uint16;
|
||||
user_name_len: uint16;
|
||||
user_pubkey_len: uint16;
|
||||
cert_data_len: uint16;
|
||||
user_name: bytestring &length = user_name_len;
|
||||
user_pubkey: bytestring &length = user_pubkey_len;
|
||||
cert_data: bytestring &length = cert_data_len;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v11
|
||||
type UpdateKeyReq(prefix: uint16) = record {
|
||||
method: uint8;
|
||||
user_name_len: uint16;
|
||||
master_cha_data_len: uint16;
|
||||
user_name: bytestring &length = user_name_len;
|
||||
master_cha_data: bytestring &length = master_cha_data_len;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v12
|
||||
type UpdateKeyRep(prefix: uint16) = record {
|
||||
seq_num: uint32;
|
||||
user_num: uint16;
|
||||
user_name_len: uint16;
|
||||
outstation_cha_data_len: uint16;
|
||||
outstation_cha_data: bytestring &length = outstation_cha_data_len;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v13
|
||||
type UpdateKey(prefix: uint16) = record {
|
||||
seq_num: uint32;
|
||||
user_num: uint16;
|
||||
update_key_len: uint16;
|
||||
update_key_data: bytestring &length = update_key_len;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v14
|
||||
type UpdateKeySig(prefix: uint16) = record {
|
||||
digital_sig: bytestring &length = prefix;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
# g120v15
|
||||
type UpdateKeyCon(prefix: uint16) = record {
|
||||
mac: bytestring &length = prefix;
|
||||
} &byteorder = littleendian;
|
||||
|
||||
|
|
|
@ -91,10 +91,57 @@ type DNP3_Application_Response_Header = record {
|
|||
type Request_Objects(function_code: uint8) = record {
|
||||
object_header: Object_Header(function_code);
|
||||
data: case (object_header.object_type_field) of {
|
||||
# binary output command g12
|
||||
0x0c01 -> g12v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x0c02 -> g12v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x0c03 -> bocmd_PM: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ ( object_header.number_of_item / 8 ) + 1*( object_header.number_of_item > ( (object_header.number_of_item / 8)*8 ) ) ];
|
||||
0x3202 -> time_interval_ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# time data interval data object g50
|
||||
0x3201 -> g50v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
#0x3202 -> time_interval_ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
# &check( object_header.qualifier_field == 0x0f && object_header.number_of_item == 0x01);
|
||||
default -> ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x3202 -> g50v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x3203 -> g50v3_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# Time and Date Common Time-of-Occurrence g51
|
||||
0x3301 -> g51v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x3302 -> g51v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# time delay g52
|
||||
0x3401 -> g52v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x3402 -> g52v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# file control g70
|
||||
0x4601 -> g70v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x4602 -> g70v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x4603 -> g70v3_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x4604 -> g70v4_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x4605 -> g70v5_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x4606 -> g70v6_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x4607 -> g70v7_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# internal indication g80
|
||||
0x5001 -> g80v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# authentication challenge g120
|
||||
0x7801 -> g120v1_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7802 -> g120v2_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7803 -> g120v3_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7804 -> g120v4_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7805 -> g120v5_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7806 -> g120v6_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7807 -> g120v7_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7808 -> g120v8_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x7809 -> g120v9_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x780A -> g120v10_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x780B -> g120v11_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x780C -> g120v12_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x780D -> g120v13_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x780E -> g120v14_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
0x780F -> g120v15_objs: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
|
||||
# default -> ojbects: Request_Data_Object(function_code, object_header.qualifier_field, object_header.object_type_field )[ object_header.number_of_item];
|
||||
default -> objects: empty;
|
||||
};
|
||||
# dump_data is always empty; I intend to use it for checking some conditions;
|
||||
# However, in the current binpac implementation, &check is not implemented
|
||||
|
|
|
@ -1200,6 +1200,9 @@ bool bro_broker::VectorIterator::DoUnserialize(const broker::data& data)
|
|||
auto x = caf::get_if<broker::vector>(&(*v)[0]);
|
||||
auto y = caf::get_if<broker::integer>(&(*v)[1]);
|
||||
|
||||
if ( ! (x && y) )
|
||||
return false;
|
||||
|
||||
dat = *x;
|
||||
it = dat.begin() + *y;
|
||||
return true;
|
||||
|
@ -1222,6 +1225,9 @@ bool bro_broker::RecordIterator::DoUnserialize(const broker::data& data)
|
|||
auto x = caf::get_if<broker::vector>(&(*v)[0]);
|
||||
auto y = caf::get_if<broker::integer>(&(*v)[1]);
|
||||
|
||||
if ( ! (x && y) )
|
||||
return false;
|
||||
|
||||
dat = *x;
|
||||
it = dat.begin() + *y;
|
||||
return true;
|
||||
|
|
|
@ -179,6 +179,9 @@ std::unique_ptr<CounterVector> CounterVector::Unserialize(const broker::data& da
|
|||
auto width = caf::get_if<uint64>(&(*v)[0]);
|
||||
auto bits = BitVector::Unserialize((*v)[1]);
|
||||
|
||||
if ( ! (width && bits) )
|
||||
return nullptr;
|
||||
|
||||
auto cv = std::unique_ptr<CounterVector>(new CounterVector());
|
||||
cv->width = *width;
|
||||
cv->bits = bits.release();
|
||||
|
|
|
@ -492,25 +492,25 @@ bool TopkVal::DoUnserialize(const broker::data& data)
|
|||
|
||||
while ( i < numElements )
|
||||
{
|
||||
Bucket* b = new Bucket();
|
||||
auto elements_count = caf::get_if<uint64>(&(*v)[idx++]);
|
||||
auto count = caf::get_if<uint64>(&(*v)[idx++]);
|
||||
|
||||
if ( ! (elements_count && count) )
|
||||
return false;
|
||||
|
||||
Bucket* b = new Bucket();
|
||||
b->count = *count;
|
||||
b->bucketPos = buckets.insert(buckets.end(), b);
|
||||
|
||||
for ( uint64_t j = 0; j < *elements_count; j++ )
|
||||
{
|
||||
Element* e = new Element();
|
||||
auto epsilon = caf::get_if<uint64>(&(*v)[idx++]);
|
||||
Val* val = bro_broker::data_to_val((*v)[idx++], type);
|
||||
|
||||
if ( ! (epsilon && val) )
|
||||
return false;
|
||||
|
||||
Element* e = new Element();
|
||||
e->epsilon = *epsilon;
|
||||
e->value = val;
|
||||
e->parent = b;
|
||||
|
|
|
@ -351,91 +351,6 @@ Val* do_split(StringVal* str_val, RE_Matcher* re, int incl_sep, int max_num_sep)
|
|||
return a;
|
||||
}
|
||||
|
||||
Val* do_sub(StringVal* str_val, RE_Matcher* re, StringVal* repl, int do_all)
|
||||
{
|
||||
const u_char* s = str_val->Bytes();
|
||||
int offset = 0;
|
||||
int n = str_val->Len();
|
||||
|
||||
// cut_points is a set of pairs of indices in str that should
|
||||
// be removed/replaced. A pair <x,y> means "delete starting
|
||||
// at offset x, up to but not including offset y".
|
||||
List(ptr_compat_int) cut_points; // where RE matches pieces of str
|
||||
|
||||
int size = 0; // size of result
|
||||
|
||||
while ( n > 0 )
|
||||
{
|
||||
// Find next match offset.
|
||||
int end_of_match;
|
||||
while ( n > 0 &&
|
||||
(end_of_match = re->MatchPrefix(&s[offset], n)) <= 0 )
|
||||
{
|
||||
// This character is going to be copied to the result.
|
||||
++size;
|
||||
|
||||
// Move on to next character.
|
||||
++offset;
|
||||
--n;
|
||||
}
|
||||
|
||||
if ( n <= 0 )
|
||||
break;
|
||||
|
||||
// s[offset .. offset+end_of_match-1] matches re.
|
||||
cut_points.append(offset);
|
||||
cut_points.append(offset + end_of_match);
|
||||
|
||||
offset += end_of_match;
|
||||
n -= end_of_match;
|
||||
|
||||
if ( ! do_all )
|
||||
{
|
||||
// We've now done the first substitution - finished.
|
||||
// Include the remainder of the string in the result.
|
||||
size += n;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// size now reflects amount of space copied. Factor in amount
|
||||
// of space for replacement text.
|
||||
int num_cut_points = cut_points.length() / 2;
|
||||
size += num_cut_points * repl->Len();
|
||||
|
||||
// And a final NUL for good health.
|
||||
++size;
|
||||
|
||||
byte_vec result = new u_char[size];
|
||||
byte_vec r = result;
|
||||
|
||||
// Copy it all over.
|
||||
int start_offset = 0;
|
||||
for ( int i = 0; i < cut_points.length(); i += 2 /* loop over pairs */ )
|
||||
{
|
||||
int num_to_copy = cut_points[i] - start_offset;
|
||||
memcpy(r, s + start_offset, num_to_copy);
|
||||
|
||||
r += num_to_copy;
|
||||
start_offset = cut_points[i+1];
|
||||
|
||||
// Now add in replacement text.
|
||||
memcpy(r, repl->Bytes(), repl->Len());
|
||||
r += repl->Len();
|
||||
}
|
||||
|
||||
// Copy final trailing characters.
|
||||
int num_to_copy = str_val->Len() - start_offset;
|
||||
memcpy(r, s + start_offset, num_to_copy);
|
||||
r += num_to_copy;
|
||||
|
||||
// Final NUL. No need to increment r, since the length
|
||||
// computed from it in the next statement does not include
|
||||
// the NUL.
|
||||
r[0] = '\0';
|
||||
|
||||
return new StringVal(new BroString(1, result, r - result));
|
||||
}
|
||||
%%}
|
||||
|
||||
## Splits a string into an array of strings according to a pattern.
|
||||
|
@ -535,7 +450,7 @@ function split_string_n%(str: string, re: pattern,
|
|||
## .. zeek:see:: gsub subst_string
|
||||
function sub%(str: string, re: pattern, repl: string%): string
|
||||
%{
|
||||
return do_sub(str, re, repl, 0);
|
||||
return str->Substitute(re, repl, false);
|
||||
%}
|
||||
|
||||
## Substitutes a given replacement string for all occurrences of a pattern
|
||||
|
@ -552,7 +467,7 @@ function sub%(str: string, re: pattern, repl: string%): string
|
|||
## .. zeek:see:: sub subst_string
|
||||
function gsub%(str: string, re: pattern, repl: string%): string
|
||||
%{
|
||||
return do_sub(str, re, repl, 1);
|
||||
return str->Substitute(re, repl, true);
|
||||
%}
|
||||
|
||||
|
||||
|
|
|
@ -111,3 +111,14 @@ string Formatter::Render(double d)
|
|||
return buf;
|
||||
}
|
||||
|
||||
string Formatter::Render(TransportProto proto)
|
||||
{
|
||||
if ( proto == TRANSPORT_UDP )
|
||||
return "udp";
|
||||
else if ( proto == TRANSPORT_TCP )
|
||||
return "tcp";
|
||||
else if ( proto == TRANSPORT_ICMP )
|
||||
return "icmp";
|
||||
else
|
||||
return "unknown";
|
||||
}
|
||||
|
|
|
@ -112,6 +112,17 @@ public:
|
|||
*/
|
||||
static string Render(double d);
|
||||
|
||||
/**
|
||||
* Convert a transport protocol into a string.
|
||||
*
|
||||
* This is a helper function that formatter implementations may use.
|
||||
*
|
||||
* @param proto The transport protocol.
|
||||
*
|
||||
* @return An ASCII representation of the protocol.
|
||||
*/
|
||||
static string Render(TransportProto proto);
|
||||
|
||||
/**
|
||||
* Convert a string into a TransportProto. The string must be one of
|
||||
* \c tcp, \c udp, \c icmp, or \c unknown.
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
#include <math.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "./JSON.h"
|
||||
#include "JSON.h"
|
||||
|
||||
using namespace threading::formatter;
|
||||
|
||||
|
@ -27,78 +27,83 @@ JSON::~JSON()
|
|||
bool JSON::Describe(ODesc* desc, int num_fields, const Field* const * fields,
|
||||
Value** vals) const
|
||||
{
|
||||
if ( surrounding_braces )
|
||||
desc->AddRaw("{");
|
||||
ZeekJson j = ZeekJson::object();
|
||||
|
||||
for ( int i = 0; i < num_fields; i++ )
|
||||
{
|
||||
const u_char* bytes = desc->Bytes();
|
||||
int len = desc->Len();
|
||||
|
||||
if ( i > 0 &&
|
||||
len > 0 &&
|
||||
bytes[len-1] != ',' &&
|
||||
bytes[len-1] != '{' &&
|
||||
bytes[len-1] != '[' &&
|
||||
vals[i]->present )
|
||||
desc->AddRaw(",");
|
||||
|
||||
if ( ! Describe(desc, vals[i], fields[i]->name) )
|
||||
if ( vals[i]->present )
|
||||
{
|
||||
ZeekJson new_entry = BuildJSON(vals[i]);
|
||||
if ( new_entry.is_null() )
|
||||
return false;
|
||||
|
||||
j[fields[i]->name] = new_entry;
|
||||
}
|
||||
}
|
||||
|
||||
if ( surrounding_braces )
|
||||
desc->AddRaw("}");
|
||||
desc->Add(j.dump());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
||||
{
|
||||
if ( desc->IsBinary() )
|
||||
{
|
||||
GetThread()->Error("json formatter: binary format not supported");
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( ! val->present )
|
||||
return true;
|
||||
|
||||
if ( name.size() )
|
||||
{
|
||||
desc->AddRaw("\"", 1);
|
||||
desc->Add(name);
|
||||
desc->AddRaw("\":", 2);
|
||||
ZeekJson j = BuildJSON(val, name);
|
||||
if ( j.is_null() )
|
||||
return false;
|
||||
|
||||
desc->Add(j.dump());
|
||||
return true;
|
||||
}
|
||||
|
||||
threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype) const
|
||||
{
|
||||
GetThread()->Error("JSON formatter does not support parsing yet.");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
ZeekJson JSON::BuildJSON(Value* val, const string& name) const
|
||||
{
|
||||
ZeekJson j;
|
||||
switch ( val->type )
|
||||
{
|
||||
case TYPE_BOOL:
|
||||
desc->AddRaw(val->val.int_val == 0 ? "false" : "true");
|
||||
j = val->val.int_val != 0;
|
||||
break;
|
||||
|
||||
case TYPE_INT:
|
||||
desc->Add(val->val.int_val);
|
||||
j = val->val.int_val;
|
||||
break;
|
||||
|
||||
case TYPE_COUNT:
|
||||
case TYPE_COUNTER:
|
||||
desc->Add(val->val.uint_val);
|
||||
j = val->val.uint_val;
|
||||
break;
|
||||
|
||||
case TYPE_PORT:
|
||||
desc->Add(val->val.port_val.port);
|
||||
j = val->val.port_val.port;
|
||||
break;
|
||||
|
||||
case TYPE_SUBNET:
|
||||
desc->AddRaw("\"", 1);
|
||||
desc->Add(Render(val->val.subnet_val));
|
||||
desc->AddRaw("\"", 1);
|
||||
j = Formatter::Render(val->val.subnet_val);
|
||||
break;
|
||||
|
||||
case TYPE_ADDR:
|
||||
desc->AddRaw("\"", 1);
|
||||
desc->Add(Render(val->val.addr_val));
|
||||
desc->AddRaw("\"", 1);
|
||||
j = Formatter::Render(val->val.addr_val);
|
||||
break;
|
||||
|
||||
case TYPE_DOUBLE:
|
||||
case TYPE_INTERVAL:
|
||||
desc->Add(val->val.double_val);
|
||||
j = val->val.double_val;
|
||||
break;
|
||||
|
||||
case TYPE_TIME:
|
||||
|
@ -110,15 +115,13 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
|||
time_t the_time = time_t(floor(val->val.double_val));
|
||||
struct tm t;
|
||||
|
||||
desc->AddRaw("\"", 1);
|
||||
|
||||
if ( ! gmtime_r(&the_time, &t) ||
|
||||
! strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%S", &t) )
|
||||
{
|
||||
GetThread()->Error(GetThread()->Fmt("json formatter: failure getting time: (%lf)", val->val.double_val));
|
||||
// This was a failure, doesn't really matter what gets put here
|
||||
// but it should probably stand out...
|
||||
desc->Add("2000-01-01T00:00:00.000000");
|
||||
j = "2000-01-01T00:00:00.000000";
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -129,20 +132,17 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
|||
frac += 1;
|
||||
|
||||
snprintf(buffer2, sizeof(buffer2), "%s.%06.0fZ", buffer, fabs(frac) * 1000000);
|
||||
desc->Add(buffer2);
|
||||
j = buffer2;
|
||||
}
|
||||
|
||||
desc->AddRaw("\"", 1);
|
||||
}
|
||||
|
||||
else if ( timestamps == TS_EPOCH )
|
||||
desc->Add(val->val.double_val);
|
||||
j = val->val.double_val;
|
||||
|
||||
else if ( timestamps == TS_MILLIS )
|
||||
{
|
||||
// ElasticSearch uses milliseconds for timestamps
|
||||
uint64_t ts = (uint64_t) (val->val.double_val * 1000);
|
||||
desc->Add(ts);
|
||||
j = (uint64_t) (val->val.double_val * 1000);
|
||||
}
|
||||
|
||||
break;
|
||||
|
@ -153,74 +153,40 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
|
|||
case TYPE_FILE:
|
||||
case TYPE_FUNC:
|
||||
{
|
||||
desc->AddRaw("\"", 1);
|
||||
|
||||
for ( int i = 0; i < val->val.string_val.length; ++i )
|
||||
{
|
||||
char c = val->val.string_val.data[i];
|
||||
|
||||
// 2byte Unicode escape special characters.
|
||||
if ( c < 32 || c > 126 || c == '\n' || c == '"' || c == '\'' || c == '\\' || c == '&' )
|
||||
{
|
||||
desc->AddRaw("\\u00", 4);
|
||||
char hex[2] = {'0', '0'};
|
||||
bytetohex(c, hex);
|
||||
desc->AddRaw(hex, 1);
|
||||
desc->AddRaw(hex + 1, 1);
|
||||
}
|
||||
else
|
||||
desc->AddRaw(&c, 1);
|
||||
}
|
||||
|
||||
desc->AddRaw("\"", 1);
|
||||
j = json_escape_utf8(string(val->val.string_val.data, val->val.string_val.length));
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_TABLE:
|
||||
{
|
||||
desc->AddRaw("[", 1);
|
||||
j = ZeekJson::array();
|
||||
|
||||
for ( int j = 0; j < val->val.set_val.size; j++ )
|
||||
{
|
||||
if ( j > 0 )
|
||||
desc->AddRaw(",", 1);
|
||||
for ( int idx = 0; idx < val->val.set_val.size; idx++ )
|
||||
j.push_back(BuildJSON(val->val.set_val.vals[idx]));
|
||||
|
||||
Describe(desc, val->val.set_val.vals[j]);
|
||||
}
|
||||
|
||||
desc->AddRaw("]", 1);
|
||||
break;
|
||||
}
|
||||
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
desc->AddRaw("[", 1);
|
||||
j = ZeekJson::array();
|
||||
|
||||
for ( int j = 0; j < val->val.vector_val.size; j++ )
|
||||
{
|
||||
if ( j > 0 )
|
||||
desc->AddRaw(",", 1);
|
||||
Describe(desc, val->val.vector_val.vals[j]);
|
||||
}
|
||||
for ( int idx = 0; idx < val->val.vector_val.size; idx++ )
|
||||
j.push_back(BuildJSON(val->val.vector_val.vals[idx]));
|
||||
|
||||
desc->AddRaw("]", 1);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
return false;
|
||||
break;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype) const
|
||||
if ( ! name.empty() && ! j.is_null() )
|
||||
{
|
||||
GetThread()->Error("JSON formatter does not support parsing yet.");
|
||||
return NULL;
|
||||
ZeekJson j2 = ZeekJson::object();
|
||||
j2[name] = j;
|
||||
return j2;
|
||||
}
|
||||
|
||||
void JSON::SurroundingBraces(bool use_braces)
|
||||
{
|
||||
surrounding_braces = use_braces;
|
||||
return j;
|
||||
}
|
||||
|
|
|
@ -4,9 +4,19 @@
|
|||
#define THREADING_FORMATTERS_JSON_H
|
||||
|
||||
#include "../Formatter.h"
|
||||
#include "3rdparty/json.hpp"
|
||||
#include "3rdparty/fifo_map.hpp"
|
||||
|
||||
|
||||
namespace threading { namespace formatter {
|
||||
|
||||
// Define a class for use with the json library that orders the keys in the same order that
|
||||
// they were inserted. By default, the json library orders them alphabetically and we don't
|
||||
// want it like that.
|
||||
template<class K, class V, class compare, class A>
|
||||
using json_fifo_map = nlohmann::fifo_map<K, V, nlohmann::fifo_map_compare<K>, A>;
|
||||
using ZeekJson = nlohmann::basic_json<json_fifo_map>;
|
||||
|
||||
/**
|
||||
* A thread-safe class for converting values into a JSON representation
|
||||
* and vice versa.
|
||||
|
@ -27,9 +37,10 @@ public:
|
|||
threading::Value** vals) const override;
|
||||
threading::Value* ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype = TYPE_ERROR) const override;
|
||||
|
||||
void SurroundingBraces(bool use_braces);
|
||||
|
||||
private:
|
||||
|
||||
ZeekJson BuildJSON(Value* val, const string& name = "") const;
|
||||
|
||||
TimeFormat timestamps;
|
||||
bool surrounding_braces;
|
||||
};
|
||||
|
|
95
src/util.cc
95
src/util.cc
|
@ -1842,9 +1842,7 @@ void bro_strerror_r(int bro_errno, char* buf, size_t buflen)
|
|||
strerror_r_helper(res, buf, buflen);
|
||||
}
|
||||
|
||||
char* zeekenv(const char* name)
|
||||
{
|
||||
static std::map<const char*, const char*, CompareString> legacy_vars = {
|
||||
static const std::map<const char*, const char*, CompareString> legacy_vars = {
|
||||
{ "ZEEKPATH", "BROPATH" },
|
||||
{ "ZEEK_PLUGIN_PATH", "BRO_PLUGIN_PATH" },
|
||||
{ "ZEEK_PLUGIN_ACTIVATE", "BRO_PLUGIN_ACTIVATE" },
|
||||
|
@ -1858,8 +1856,10 @@ char* zeekenv(const char* name)
|
|||
{ "ZEEK_BROKER_MAX_THREADS", "BRO_BROKER_MAX_THREADS" },
|
||||
{ "ZEEK_DEFAULT_LISTEN_ADDRESS", "BRO_DEFAULT_LISTEN_ADDRESS" },
|
||||
{ "ZEEK_DEFAULT_LISTEN_RETRY", "BRO_DEFAULT_LISTEN_RETRY" },
|
||||
};
|
||||
};
|
||||
|
||||
char* zeekenv(const char* name)
|
||||
{
|
||||
auto rval = getenv(name);
|
||||
|
||||
if ( rval )
|
||||
|
@ -1872,3 +1872,90 @@ char* zeekenv(const char* name)
|
|||
|
||||
return getenv(it->second);
|
||||
}
|
||||
|
||||
static string json_escape_byte(char c)
|
||||
{
|
||||
char hex[2] = {'0', '0'};
|
||||
bytetohex(c, hex);
|
||||
|
||||
string result = "\\x";
|
||||
result.append(hex, 2);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
string json_escape_utf8(const string& val)
|
||||
{
|
||||
string result;
|
||||
result.reserve(val.length());
|
||||
|
||||
size_t char_start = 0;
|
||||
size_t idx;
|
||||
for ( idx = 0; idx < val.length(); )
|
||||
{
|
||||
// Normal ASCII characters plus a few of the control characters can be inserted directly. The rest of
|
||||
// the control characters should be escaped as regular bytes.
|
||||
if ( ( val[idx] >= 32 && val[idx] <= 127 ) ||
|
||||
val[idx] == '\b' || val[idx] == '\f' || val[idx] == '\n' || val[idx] == '\r' || val[idx] == '\t' )
|
||||
{
|
||||
result.push_back(val[idx]);
|
||||
++idx;
|
||||
continue;
|
||||
}
|
||||
else if ( val[idx] >= 0 && val[idx] < 32 )
|
||||
{
|
||||
result.append(json_escape_byte(val[idx]));
|
||||
++idx;
|
||||
continue;
|
||||
}
|
||||
|
||||
// The next bit is based on the table at https://en.wikipedia.org/wiki/UTF-8#Description.
|
||||
// If next character is 11110xxx, this is a 4-byte UTF-8
|
||||
unsigned int char_size = 0;
|
||||
if ( (val[idx] & 0xF8) == 0xF0 ) char_size = 4;
|
||||
|
||||
// If next character is 1110xxxx, this is a 3-byte UTF-8
|
||||
else if ( (val[idx] & 0xF0) == 0xE0 ) char_size = 3;
|
||||
|
||||
// If next character is 110xxxxx, this is a 2-byte UTF-8
|
||||
else if ( (val[idx] & 0xE0) == 0xC0 ) char_size = 2;
|
||||
|
||||
// This byte isn't a continuation byte, insert it as a byte and continue.
|
||||
if ( char_size == 0)
|
||||
{
|
||||
result.append(json_escape_byte(val[idx]));
|
||||
++idx;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we don't have enough bytes to get to the end of character, give up and insert all of the rest
|
||||
// of them as escaped values.
|
||||
if ( char_size > (val.length() - idx) )
|
||||
break;
|
||||
|
||||
// Loop through the rest of the supposed character and see if this is a valid character.
|
||||
size_t c_idx = idx + 1;
|
||||
for ( ; c_idx < idx + char_size; c_idx++ )
|
||||
if ( (val[c_idx] & 0xC0) != 0x80 ) break;
|
||||
|
||||
// if we didn't make it to the end of the character without finding an error, insert just this
|
||||
// character and skip ahead. Otherwise insert all of the bytes for this character into the result.
|
||||
if ( c_idx != idx + char_size )
|
||||
{
|
||||
result.append(json_escape_byte(val[idx]));
|
||||
++idx;
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
for ( size_t step = 0; step < char_size; step++, idx++ )
|
||||
result.push_back(val[idx]);
|
||||
}
|
||||
}
|
||||
|
||||
if ( idx != val.length() )
|
||||
for ( ; idx < val.length(); ++idx )
|
||||
result.append(json_escape_byte(val[idx]));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -565,4 +565,12 @@ std::unique_ptr<T> build_unique (Args&&... args) {
|
|||
return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes bytes in a string that are not valid UTF8 characters with \xYY format. Used
|
||||
* by the JSON writer and BIF methods.
|
||||
* @param val the input string to be escaped
|
||||
* @return the escaped string
|
||||
*/
|
||||
std::string json_escape_utf8(const std::string& val);
|
||||
|
||||
#endif
|
||||
|
|
76
src/zeek.bif
76
src/zeek.bif
|
@ -1981,68 +1981,7 @@ function lookup_ID%(id: string%) : any
|
|||
## Returns: A table that describes the fields of a record.
|
||||
function record_fields%(rec: any%): record_field_table
|
||||
%{
|
||||
TableVal* fields = new TableVal(record_field_table);
|
||||
|
||||
auto t = rec->Type();
|
||||
|
||||
if ( t->Tag() != TYPE_RECORD && t->Tag() != TYPE_TYPE )
|
||||
{
|
||||
reporter->Error("non-record value/type passed to record_fields");
|
||||
return fields;
|
||||
}
|
||||
|
||||
RecordType* rt = nullptr;
|
||||
RecordVal* rv = nullptr;
|
||||
|
||||
if ( t->Tag() == TYPE_RECORD )
|
||||
{
|
||||
rt = t->AsRecordType();
|
||||
rv = rec->AsRecordVal();
|
||||
}
|
||||
else
|
||||
{
|
||||
t = t->AsTypeType()->Type();
|
||||
|
||||
if ( t->Tag() != TYPE_RECORD )
|
||||
{
|
||||
reporter->Error("non-record value/type passed to record_fields");
|
||||
return fields;
|
||||
}
|
||||
|
||||
rt = t->AsRecordType();
|
||||
}
|
||||
|
||||
for ( int i = 0; i < rt->NumFields(); ++i )
|
||||
{
|
||||
BroType* ft = rt->FieldType(i);
|
||||
TypeDecl* fd = rt->FieldDecl(i);
|
||||
Val* fv = nullptr;
|
||||
|
||||
if ( rv )
|
||||
fv = rv->Lookup(i);
|
||||
|
||||
if ( fv )
|
||||
Ref(fv);
|
||||
|
||||
bool logged = (fd->attrs && fd->FindAttr(ATTR_LOG) != 0);
|
||||
|
||||
RecordVal* nr = new RecordVal(record_field);
|
||||
|
||||
if ( ft->Tag() == TYPE_RECORD )
|
||||
nr->Assign(0, new StringVal("record " + ft->GetName()));
|
||||
else
|
||||
nr->Assign(0, new StringVal(type_name(ft->Tag())));
|
||||
|
||||
nr->Assign(1, val_mgr->GetBool(logged));
|
||||
nr->Assign(2, fv);
|
||||
nr->Assign(3, rt->FieldDefault(i));
|
||||
|
||||
Val* field_name = new StringVal(rt->FieldName(i));
|
||||
fields->Assign(field_name, nr);
|
||||
Unref(field_name);
|
||||
}
|
||||
|
||||
return fields;
|
||||
return rec->GetRecordFields();
|
||||
%}
|
||||
|
||||
## Enables detailed collection of profiling statistics. Statistics include
|
||||
|
@ -5100,3 +5039,16 @@ function anonymize_addr%(a: addr, cl: IPAddrAnonymizationClass%): addr
|
|||
(enum ip_addr_anonymization_class_t) anon_class));
|
||||
}
|
||||
%}
|
||||
|
||||
## A function to convert arbitrary Zeek data into a JSON string.
|
||||
##
|
||||
## v: The value to convert to JSON. Typically a record.
|
||||
##
|
||||
## only_loggable: If the v value is a record this will only cause
|
||||
## fields with the &log attribute to be included in the JSON.
|
||||
##
|
||||
## returns: a JSON formatted string.
|
||||
function to_json%(val: any, only_loggable: bool &default=F, field_escape_pattern: pattern &default=/^_/%): string
|
||||
%{
|
||||
return val->ToJSON(only_loggable, field_escape_pattern);
|
||||
%}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
warning in /Users/tim/Desktop/projects/zeek/testing/btest/../../scripts//base/utils/json.zeek, line 2: deprecated script loaded from command line arguments ="Remove in 3.1. to_json is now always available as a built-in function."
|
|
@ -249,7 +249,6 @@ scripts/base/init-default.zeek
|
|||
scripts/base/frameworks/openflow/main.zeek
|
||||
scripts/base/frameworks/openflow/plugins/__load__.zeek
|
||||
scripts/base/frameworks/openflow/plugins/ryu.zeek
|
||||
scripts/base/utils/json.zeek
|
||||
scripts/base/frameworks/openflow/plugins/log.zeek
|
||||
scripts/base/frameworks/openflow/plugins/broker.zeek
|
||||
scripts/base/frameworks/openflow/non-cluster.zeek
|
||||
|
|
|
@ -8,3 +8,4 @@
|
|||
-./frameworks/openflow/cluster.zeek
|
||||
-./frameworks/packet-filter/cluster.zeek
|
||||
-./frameworks/sumstats/cluster.zeek
|
||||
-./utils/json.zeek
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: arithmetic mixed with non-arithmetic (set[string] and 0)
|
||||
error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: &default value has inconsistent type (0 and set[string])
|
||||
error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: arithmetic mixed with non-arithmetic (set[string] and 0)
|
||||
error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: &default value has inconsistent type (0 and set[string])
|
||||
error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &default is not valid for global variables (&default=10)
|
||||
error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &default is not valid for global variables (&optional, &default=9)
|
||||
error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &optional is not valid for global variables (&optional, &default=9, &optional)
|
||||
|
|
|
@ -821,7 +821,6 @@
|
|||
0.000000 MetaHookPost LoadFile(0, base<...>/input.bif.zeek) -> -1
|
||||
0.000000 MetaHookPost LoadFile(0, base<...>/intel) -> -1
|
||||
0.000000 MetaHookPost LoadFile(0, base<...>/irc) -> -1
|
||||
0.000000 MetaHookPost LoadFile(0, base<...>/json.zeek) -> -1
|
||||
0.000000 MetaHookPost LoadFile(0, base<...>/krb) -> -1
|
||||
0.000000 MetaHookPost LoadFile(0, base<...>/logging) -> -1
|
||||
0.000000 MetaHookPost LoadFile(0, base<...>/logging.bif.zeek) -> -1
|
||||
|
@ -1712,7 +1711,6 @@
|
|||
0.000000 MetaHookPre LoadFile(0, base<...>/input.bif.zeek)
|
||||
0.000000 MetaHookPre LoadFile(0, base<...>/intel)
|
||||
0.000000 MetaHookPre LoadFile(0, base<...>/irc)
|
||||
0.000000 MetaHookPre LoadFile(0, base<...>/json.zeek)
|
||||
0.000000 MetaHookPre LoadFile(0, base<...>/krb)
|
||||
0.000000 MetaHookPre LoadFile(0, base<...>/logging)
|
||||
0.000000 MetaHookPre LoadFile(0, base<...>/logging.bif.zeek)
|
||||
|
@ -2611,7 +2609,6 @@
|
|||
0.000000 | HookLoadFile base<...>/input.bif.zeek
|
||||
0.000000 | HookLoadFile base<...>/intel
|
||||
0.000000 | HookLoadFile base<...>/irc
|
||||
0.000000 | HookLoadFile base<...>/json.zeek
|
||||
0.000000 | HookLoadFile base<...>/krb
|
||||
0.000000 | HookLoadFile base<...>/logging
|
||||
0.000000 | HookLoadFile base<...>/logging.bif.zeek
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
{"d":2.153226e+09}
|
||||
{"d":2.153226e+09}
|
||||
{"d":2.153226e+09}
|
||||
{"d":2153226000.0}
|
||||
{"d":2153226000.1}
|
||||
{"d":2153226000.123457}
|
||||
{"d":1.0}
|
||||
{"d":1.1}
|
||||
{"d":1.123457}
|
||||
{"d":-1.123457}
|
||||
{"d":1.123456789}
|
||||
{"d":-1.123456789}
|
||||
{"d":1.1234}
|
||||
{"d":0.1234}
|
||||
{"d":50000.0}
|
||||
{"d":-50000.0}
|
||||
{"d":3.140000e+15}
|
||||
{"d":-3.140000e+15}
|
||||
{"d":1.790000e+308}
|
||||
{"d":-1.790000e+308}
|
||||
{"d":0.000012}
|
||||
{"d":0}
|
||||
{"d":-0}
|
||||
{"d":inf}
|
||||
{"d":-inf}
|
||||
{"d":0.0}
|
||||
{"d":nan}
|
||||
{"d":3.14e+15}
|
||||
{"d":-3.14e+15}
|
||||
{"d":1.79e+308}
|
||||
{"d":-1.79e+308}
|
||||
{"d":1.23456789e-05}
|
||||
{"d":2.23e-308}
|
||||
{"d":-2.23e-308}
|
||||
{"d":null}
|
||||
{"d":null}
|
||||
{"d":-0.0}
|
||||
{"d":null}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path test
|
||||
#open 2017-11-06-19-58-08
|
||||
#open 2019-07-01-17-40-55
|
||||
#fields d
|
||||
#types double
|
||||
2153226000.0
|
||||
|
@ -28,4 +28,4 @@ inf
|
|||
-inf
|
||||
0.0
|
||||
nan
|
||||
#close 2017-11-06-19-58-08
|
||||
#close 2019-07-01-17-40-55
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
{"s":"a"}
|
||||
{"s":"\b\f\n\r\t\\x00\\x15"}
|
||||
{"s":"ñ"}
|
||||
{"s":"\\xc3("}
|
||||
{"s":"\\xa0\\xa1"}
|
||||
{"s":"₡"}
|
||||
{"s":"\\xe2(\\xa1"}
|
||||
{"s":"\\xe2\\x82("}
|
||||
{"s":"𐌼"}
|
||||
{"s":"\\xf0(\\x8c\\xbc"}
|
||||
{"s":"\\xf0\\x90(\\xbc"}
|
||||
{"s":"\\xf0(\\x8c("}
|
|
@ -1 +1 @@
|
|||
{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010.54321,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["BB","AA","CC"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\u000a{ \u000aif (0 < SSH::i) \u000a\u0009return (Foo);\u000aelse\u000a\u0009return (Bar);\u000a\u000a}"}
|
||||
{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010.54321,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["BB","AA","CC"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\n{ \nif (0 < SSH::i) \n\treturn (Foo);\nelse\n\treturn (Bar);\n\n}"}
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
http://127.0.0.1:8080/stats/flowentry/clear/42
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 0, "idle_timeout": 0, "hard_timeout": 0, "match": {}, "actions": [{"port": 3, "type": "OUTPUT"}, {"port": 7, "type": "OUTPUT"}], "cookie": 4398046511105, "flags": 0, "dpid": 42}
|
||||
{"priority":0,"idle_timeout":0,"hard_timeout":0,"match":{},"actions":[{"port":3,"type":"OUTPUT"},{"port":7,"type":"OUTPUT"}],"cookie":4398046511105,"flags":0,"dpid":42}
|
||||
Flow_mod_success
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "74.53.140.153/32", "tp_dst": 25, "tp_src": 1470, "nw_proto": 6, "dl_type": 2048, "nw_src": "10.10.1.4/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
||||
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"74.53.140.153/32","tp_dst":25,"tp_src":1470,"nw_proto":6,"dl_type":2048,"nw_src":"10.10.1.4/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "10.10.1.4/32", "tp_dst": 1470, "tp_src": 25, "nw_proto": 6, "dl_type": 2048, "nw_src": "74.53.140.153/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
||||
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"10.10.1.4/32","tp_dst":1470,"tp_src":25,"nw_proto":6,"dl_type":2048,"nw_src":"74.53.140.153/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||
Flow_mod_success
|
||||
Flow_mod_success
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "192.168.133.102/32", "tp_dst": 25, "tp_src": 49648, "nw_proto": 6, "dl_type": 2048, "nw_src": "192.168.133.100/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
||||
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.102/32","tp_dst":25,"tp_src":49648,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.100/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "192.168.133.100/32", "tp_dst": 49648, "tp_src": 25, "nw_proto": 6, "dl_type": 2048, "nw_src": "192.168.133.102/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
||||
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.100/32","tp_dst":49648,"tp_src":25,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.102/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||
Flow_mod_success
|
||||
Flow_mod_success
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "17.167.150.73/32", "tp_dst": 443, "tp_src": 49655, "nw_proto": 6, "dl_type": 2048, "nw_src": "192.168.133.100/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
||||
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"17.167.150.73/32","tp_dst":443,"tp_src":49655,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.100/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||
http://127.0.0.1:8080/stats/flowentry/add
|
||||
{"priority": 5, "idle_timeout": 30, "hard_timeout": 0, "match": {"nw_dst": "192.168.133.100/32", "tp_dst": 49655, "tp_src": 443, "nw_proto": 6, "dl_type": 2048, "nw_src": "17.167.150.73/32"}, "actions": [], "cookie": 4398046511146, "flags": 0, "dpid": 42}
|
||||
{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.100/32","tp_dst":49655,"tp_src":443,"nw_proto":6,"dl_type":2048,"nw_src":"17.167.150.73/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42}
|
||||
Flow_mod_success
|
||||
Flow_mod_success
|
||||
|
|
|
@ -8,32 +8,33 @@ true
|
|||
"-12.0 hrs"
|
||||
"hello"
|
||||
""
|
||||
65535
|
||||
1
|
||||
123
|
||||
0
|
||||
{"port":65535,"proto":"tcp"}
|
||||
{"port":1,"proto":"udp"}
|
||||
{"port":123,"proto":"icmp"}
|
||||
{"port":0,"proto":"unknown"}
|
||||
"1.2.3.4"
|
||||
"ffff:1234::1"
|
||||
"123.123.123.123"
|
||||
"192.0.0.0/8"
|
||||
"fe80::/64"
|
||||
"Red"
|
||||
{"s": "test", "c": 100}
|
||||
{"s": "test"}
|
||||
{"s": "test"}
|
||||
{"m": {"s": "test"}}
|
||||
"/^?(^abcd)$?/"
|
||||
{"s":"test","c":100}
|
||||
{"s":"test"}
|
||||
{"s":"test"}
|
||||
{"m":{"s":"test"}}
|
||||
[]
|
||||
[2, 1]
|
||||
[2,1]
|
||||
["1.2.3.4"]
|
||||
[[true, false]]
|
||||
[{"s": "test"}]
|
||||
[[true,false]]
|
||||
[{"s":"test"}]
|
||||
[]
|
||||
[2, 1]
|
||||
[2,1]
|
||||
["1.2.3.4"]
|
||||
[{"s": "test"}]
|
||||
[{"s": "test"}]
|
||||
[{"s":"test"}]
|
||||
[{"s":"test"}]
|
||||
{}
|
||||
{"2": "10.2.2.2", "1": "10.1.1.1"}
|
||||
{"10.1.1.1": {"a": 1}, "10.2.2.2": {"b": 2}}
|
||||
{"10.1.1.1": [1, 2], "10.2.2.2": [3, 5]}
|
||||
{"1": {"s": "test"}}
|
||||
{"2":"10.2.2.2","1":"10.1.1.1"}
|
||||
{"10.1.1.1":{"a":1},"10.2.2.2":{"b":2}}
|
||||
{"10.1.1.1":[1,2],"10.2.2.2":[3,5]}
|
||||
{"1":{"s":"test"}}
|
||||
|
|
|
@ -2,3 +2,8 @@
|
|||
# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff out
|
||||
|
||||
global ss: set[string] &default=0;
|
||||
global d: count &default = 10
|
||||
&default = 9
|
||||
&optional
|
||||
&log
|
||||
&add_func = function(): count { return 3; };
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
#
|
||||
# @TEST-EXEC: zeek -b %INPUT
|
||||
# @TEST-EXEC: btest-diff ssh.log
|
||||
#
|
||||
# Testing all possible types.
|
||||
|
||||
redef LogAscii::use_json = T;
|
||||
|
||||
|
||||
module SSH;
|
||||
|
||||
export {
|
||||
redef enum Log::ID += { LOG };
|
||||
|
||||
type Log: record {
|
||||
s: string;
|
||||
} &log;
|
||||
}
|
||||
|
||||
event zeek_init()
|
||||
{
|
||||
Log::create_stream(SSH::LOG, [$columns=Log]);
|
||||
|
||||
# Strings taken from https://stackoverflow.com/a/3886015
|
||||
|
||||
# Valid ASCII and valid ASCII control characters
|
||||
Log::write(SSH::LOG, [$s="a"]);
|
||||
Log::write(SSH::LOG, [$s="\b\f\n\r\t\x00\x15"]);
|
||||
|
||||
# Valid 2 Octet Sequence
|
||||
Log::write(SSH::LOG, [$s="\xc3\xb1"]);
|
||||
|
||||
# Invalid 2 Octet Sequence
|
||||
Log::write(SSH::LOG, [$s="\xc3\x28"]);
|
||||
|
||||
# Invalid Sequence Identifier
|
||||
Log::write(SSH::LOG, [$s="\xa0\xa1"]);
|
||||
|
||||
# Valid 3 Octet Sequence
|
||||
Log::write(SSH::LOG, [$s="\xe2\x82\xa1"]);
|
||||
|
||||
# Invalid 3 Octet Sequence (in 2nd Octet)
|
||||
Log::write(SSH::LOG, [$s="\xe2\x28\xa1"]);
|
||||
|
||||
# Invalid 3 Octet Sequence (in 3rd Octet)
|
||||
Log::write(SSH::LOG, [$s="\xe2\x82\x28"]);
|
||||
|
||||
# Valid 4 Octet Sequence
|
||||
Log::write(SSH::LOG, [$s="\xf0\x90\x8c\xbc"]);
|
||||
|
||||
# Invalid 4 Octet Sequence (in 2nd Octet)
|
||||
Log::write(SSH::LOG, [$s="\xf0\x28\x8c\xbc"]);
|
||||
|
||||
# Invalid 4 Octet Sequence (in 3rd Octet)
|
||||
Log::write(SSH::LOG, [$s="\xf0\x90\x28\xbc"]);
|
||||
|
||||
# Invalid 4 Octet Sequence (in 4th Octet)
|
||||
Log::write(SSH::LOG, [$s="\xf0\x28\x8c\x28"]);
|
||||
}
|
|
@ -56,7 +56,7 @@ event slow_death()
|
|||
schedule 2sec { die() };
|
||||
}
|
||||
|
||||
event kill_worker()
|
||||
event ready()
|
||||
{
|
||||
Reporter::info("qux");
|
||||
Broker::publish("death", slow_death);
|
||||
|
@ -69,20 +69,30 @@ event zeek_init()
|
|||
Broker::subscribe("death");
|
||||
suspend_processing();
|
||||
}
|
||||
|
||||
if ( Cluster::node == "manager-1" )
|
||||
{
|
||||
Broker::subscribe("ready");
|
||||
}
|
||||
}
|
||||
|
||||
global conn_count = 0;
|
||||
|
||||
event new_connection(c: connection)
|
||||
{
|
||||
++conn_count;
|
||||
|
||||
if ( conn_count == 30 )
|
||||
{
|
||||
Reporter::info("qux");
|
||||
Broker::publish("ready", ready);
|
||||
}
|
||||
}
|
||||
|
||||
event Broker::peer_added(endpoint: Broker::EndpointInfo, msg: string)
|
||||
{
|
||||
if ( Cluster::node == "manager-1" )
|
||||
{
|
||||
schedule 2sec { kill_worker() };
|
||||
}
|
||||
|
||||
if ( Cluster::node == "worker-1" )
|
||||
{
|
||||
continue_processing();
|
||||
Reporter::info("qux");
|
||||
}
|
||||
}
|
||||
|
||||
event Broker::peer_lost(endpoint: Broker::EndpointInfo, msg: string)
|
||||
|
|
|
@ -72,6 +72,9 @@ event zeek_init()
|
|||
local e: color = Red;
|
||||
print to_json(e);
|
||||
|
||||
local p: pattern = /^abcd/;
|
||||
print to_json(p);
|
||||
|
||||
# #########################
|
||||
# Test the container types:
|
||||
|
||||
|
|
|
@ -9,5 +9,5 @@ else
|
|||
sed="sed -E"
|
||||
fi
|
||||
|
||||
$sed 's/(0\.000000)|([0-9]{9,10}\.[0-9]{2,8})/XXXXXXXXXX.XXXXXX/g' | \
|
||||
$sed 's/(0\.000000)|([0-9]{9,10}\.[0-9]{1,8})/XXXXXXXXXX.XXXXXX/g' | \
|
||||
$sed 's/^ *#(open|close).(19|20)..-..-..-..-..-..$/#\1 XXXX-XX-XX-XX-XX-XX/g'
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue