diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4315bdfbb9..3992307028 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,3 +23,9 @@ repos: hooks: - id: cmake-format exclude: '^auxil/.*$' + +- repo: https://github.com/crate-ci/typos + rev: v1.15.0 + hooks: + - id: typos + exclude: '^(.typos.toml|src/SmithWaterman.cc|testing/.*|auxil/.*|scripts/base/frameworks/files/magic/.*|CHANGES)$' diff --git a/.typos.toml b/.typos.toml new file mode 100644 index 0000000000..cc29d0abb1 --- /dev/null +++ b/.typos.toml @@ -0,0 +1,71 @@ +[default] +extend-ignore-re = [ + "#.*MSDN Ref.*\\[ms-oaut\\]", + # seh too close to she + "registered SEH to support IDL", + # ALLO is a valid FTP command + "\"ALLO\".*200", + "des-ede3-cbc-Env-OID", + "Remove in v6.1.*SupressWeird", + "max_repititions:.*Remove in v6.1", + # On purpose + "\"THE NETBIOS NAM\"", + # NFS stuff. + "commited: stable_how_t &optional", + # script_opt uses "ot" a lot, but it's very close to "to", so + # don't want to add it as identifier. Match some patterns. + "ASSERT\\(ot == OP_.*", + "ot->InternalType", + "ot->Tag\\(\\) == TYPE_.*", + "auto.* ot =", + "ot = OP_.*", + "if \\( ot == OP_.*", + "ot->Yield\\(\\)->InternalType\\(\\)", + "switch \\( ot \\)", + "\\(ZAMOpType ot\\)", + + # News stuff + "SupressWeirds.*deprecated", + "\"BaR\"", + "\"xFoObar\"", + "\"FoO\"", +] + +extend-ignore-identifiers-re = [ + "TLS_.*_EDE.*_.*", + "SSL.*_EDE.*_.*", + "_3DES_EDE_CBC_SHA", + "GOST_R_.*", + "icmp6_nd_.*", + "ND_ROUTER_.*", + "ND_NEIGHBOR_.*", + ".*_ND_option.*", +] + +[default.extend-identifiers] +BuildNDOptionsVal = "BuildNDOptionsVal" +ESC_SER = "ESC_SER" +MCA_OCCURED = "MCA_OCCURED" +MNT3ERR_ACCES = "MNT3ERR_ACCES" +ND_QUEUE_OVERFLOW = "ND_QUEUE_OVERFLOW" +ND_REDIRECT = "ND_REDIRECT" +NED_ACK = "NED_ACK" +NFS3ERR_ACCES = "NFS3ERR_ACCES" +NO_SEH = "NO_SEH" +OP_SWITCHS_VVV = "OP_SWITCHS_VVV" +O_WRONLY = "O_WRONLY" +RPC_NT_CALL_FAILED_DNE = "RPC_NT_CALL_FAILED_DNE" +RpcAddPrintProvidor = "RpcAddPrintProvidor" +RpcDeletePrintProvidor = "RpcDeletePrintProvidor" +THA = "THA" +ar_tha = "ar_tha" +have_2nd = "have_2nd" +ot1 = "ot1" +ot2 = "ot2" +uses_seh = "uses_seh" + +[default.extend-words] +caf = "caf" +helo = "helo" +# Seems we use this in the management framework +requestor = "requestor" diff --git a/CHANGES b/CHANGES index 9f36075a4d..28521c4131 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,308 @@ +6.1.0-dev.115 | 2023-06-21 17:45:35 -0700 + + * formatters/JSON: Make JSON::NullDoubleWriter use zeek::json::detail version (Arne Welzel, Corelight) + + Not using inheritance and preferring composition to avoid including the + detail/json.h header do an indirection via a unique_ptr and then just + re-use the Double() implementation. + + * formatters/JSON: Remove surrounding_braces (Arne Welzel, Corelight) + + This seems to have become unused 4 years ago with 9b76e8faf44e90c41f33f24b18900a50f0840c5a, + remove it. + + * formatters/JSON: Prepare to remove rapidjson from installed Zeek headers (Arne Welzel, Corelight) + + threading/formatters/JSON.h currently includes rapidjson headers for declaring + the NullDoubleWriter. This appears mostly an internal detail, but + results in the situation that 1) we need to ship rapidjson headers with + the Zeek install tree and 2) taking care that external plugins are able + to find these headers should they include formatters/JSON.h. + + There are currently no other Zeek headers that include rapidjson, so this + seems very unfortunate and self-inflicted given it's not actually required. + + Attempt to hide this implementation detail with the goal to remove the + rapidjson includes with v7.1 and then also stop bundling and exposing + the include path to external plugins. + + The NullDoubleWriter implementation moves into a new formatters/detail/json.h + header which is not installed. + + Closes #3128 + +6.1.0-dev.110 | 2023-06-21 15:36:32 -0700 + + * Update zeekjs submodule (Tim Wojtulewicz, Corelight) + + * Update broker submodule (Tim Wojtulewicz, Corelight) + +6.1.0-dev.108 | 2023-06-21 15:33:50 -0700 + + * telemetry: Disable metrics centralization by default (Arne Welzel, Corelight) + + Move the telemetry/cluster.zeek file over into policy/frameworks/telemetry/prometheus.zeek. + + Mention it in local.zeek. + + Relates to zeek/broker#366. + +6.1.0-dev.106 | 2023-06-21 15:33:24 -0700 + + * GSSAPI: basic support for MIC/WRAP tokens (Johanna Amann, Corelight) + + When MIC/WRAP tokens are encountered, we now skip the message, instead + of raising a parse error. The data in the messages is encrypted - so it + does not seem work to raise an event at the moment. + +6.1.0-dev.104 | 2023-06-20 10:15:24 -0700 + + * CI: Force rebuild of tumbleweed VM to pick up newer version of python (Tim Wojtulewicz, Corelight) + + The version of python included in the existing VM doesn't have the sqlite + module included for some reason. Forcing the VM to rebuild installs python311 + which does include it, fixing a build failure. + + * Stmt/Assert: Use std::move() for backtrace element (Arne Welzel, Corelight) + +6.1.0-dev.100 | 2023-06-15 21:13:46 -0700 + + * Update external testing hashes for 6.1 deprecation changes (Tim Wojtulewicz, Corelight) + + * Remove leftover baseline files from 6.1 deprecation removal (Tim Wojtulewicz, Corelight) + +6.1.0-dev.96 | 2023-06-15 16:27:36 -0700 + + * Update dce-rpc constants (Tim Wojtulewicz, Corelight) + + * Update external testing repo hashes to current master commits (Tim Wojtulewicz, Corelight) + +6.1.0-dev.93 | 2023-06-15 16:25:50 -0700 + + * -O gen-C++ refinements for BiF failures, negative vector indices, boolean vector operations (Vern Paxson, Corelight) + +6.1.0-dev.91 | 2023-06-15 14:21:03 -0700 + + * ZAM bug fix for incorrect management of global variables (Vern Paxson, Corelight) + + * ZAM bug fix (simplification) for nested inline functions that don't do anything (Vern Paxson, Corelight) + + * ZAM support for negative vector indices (Vern Paxson, Corelight) + + * ZAM ignores new "assert" statement (Vern Paxson, Corelight) + + * fixes for doing "script validation" to check for ZAM compile-ability (Vern Paxson, Corelight) + + * tweak for Broker BTest that needs a delay for ZAM (Vern Paxson, Corelight) + + * BTest to check for invalid negative vector indices (Vern Paxson, Corelight) + +6.1.0-dev.80 | 2023-06-15 12:15:29 -0700 + + * Force -std=c++17 mode for plugin targets, remove use of RequireCXX17.cmake (Tim Wojtulewicz, Corelight) + + * Add Spicy updates to NEWS for 6.0. (Robin Sommer, Corelight) + +6.1.0-dev.76 | 2023-06-15 18:36:25 +0200 + + * plugin/Manager: Warn when plugin with the same name is found (Arne Welzel, Corelight) + + This was brought up on Slack as a potential source of confusion during + development as it's not visible when plugin directories are ignored outside + of looking into the plugin debug stream. I'd actually prefer to just + FatalError() this, but a warning seems reasonably visible for interactive + usage. + + * Options: Do not output full usage on --test error (Arne Welzel, Corelight) + + ...mostly because the usage output is very long and the actual useful + error message scrolls off the screen. + + * GH-3090: CMakeLists: Propagate DOCTEST defines to external plugins (Arne Welzel, Corelight) + + If Zeek has not been built with doctest enabled then it's not easy + to run a plugin's tests (and if they really wanted to they could + again undef the DOCTEST_CONFIG_DISABLE and provide their own doctest + main() implementation. + + * GH-3090: CMakeLists: Add rapidjson/include to zeek_dynamic_plugin_base (Arne Welzel, Corelight) + + threading/formatters/JSON.h has a rapidjson include. Extend the + include directories of external plugins so they are setup to find + these in Zeek's install tree. + + * GH-3090: ZeekPluginBootstrap: Encode Zeek's CMAKE_BUILD_TYPE (Arne Welzel, Corelight) + + ...and bump cmake to have plugin's make use of it. + +6.1.0-dev.66 | 2023-06-14 10:09:46 -0700 + + * Change table initialization deprecation to error (Tim Wojtulewicz, Corelight) + + * Remove 5.2 deprecation we missed (Tim Wojtulewicz, Corelight) + + * Remove Supervisor::NodeConfig (6.1 deprecation) (Tim Wojtulewicz, Corelight) + + * Remove LogAscii::logdir (6.1 deprecation) (Tim Wojtulewicz, Corelight) + + * Make break/next/continue outside loops an error (6.1 deprecation) (Tim Wojtulewicz, Corelight) + + * Remove analyzer_confirmation/analyzer_violation events (6.1 deprecation) (Tim Wojtulewicz, Corelight) + + * Remove full scripts marked as 6.1 deprecations (Tim Wojtulewicz, Corelight) + + * Remove script functions marked as unused (6.1 deprecations) (Tim Wojtulewicz, Corelight) + + * Remove deprecations tagged for v6.1 (Tim Wojtulewicz, Corelight) + +6.1.0-dev.54 | 2023-06-14 18:55:27 +0200 + + * docker: Add libnode to enable JavaScript support (Arne Welzel, Corelight) + + * docker: Bump images to Debian 12 (Arne Welzel, Corelight) + +6.1.0-dev.50 | 2023-06-14 09:25:58 -0700 + + * Fix usage of realpath on macOS, instead preferring grealpath (Tim Wojtulewicz, Corelight) + + * Don't generate minimal tarball anymore (Tim Wojtulewicz, Corelight) + + * NEWS: Fix enumeration in Breaking Changes (Arne Welzel, Corelight) + +6.1.0-dev.46 | 2023-06-14 12:59:41 +0200 + + * all: Fix typos identified by typos pre-commit hook (Arne Welzel, Corelight) + + * NEWS typo fixes (Arne Welzel, Corelight) + + * Start with a typos pre-commit file (Arne Welzel, Corelight) + + Inspired by Spicy and me creating typos everywhere. Some of the + suggestions are also very reasonable. + +6.1.0-dev.42 | 2023-06-14 12:51:08 +0200 + + * NEWS: Small section about assert statement (Arne Welzel, Corelight) + + * Stmt: Rework assertion hooks break semantics (Arne Welzel, Corelight) + + Using break in either of the hooks allows to suppress the default reporter + error message rather than suppressing solely based on the existence of an + assertion_failure() handler. + + * Stmt: Introduce assert statement and related hooks (Arne Welzel, Corelight) + + including two hooks called assertion_failure() and assertion_result() for + customization and tracking of assertion results. + + * ZeekArgs: Helper for empty arguments (Arne Welzel, Corelight) + + * Reporter: Allow AssertStmt to throw InterpreterException (Arne Welzel, Corelight) + + * Lift backtrace() code into Func.{h,cc} (Arne Welzel, Corelight) + + This is to be re-used by the assertion facility. + +6.1.0-dev.32 | 2023-06-13 11:29:36 -0700 + + * Clarify subitem relationship in CMake configure summary. (Benjamin Bannier, Corelight) + +6.1.0-dev.30 | 2023-06-13 11:28:26 -0700 + + * tests: Do not use archiver_rotation_format_func as postprocessor (Arne Welzel, Corelight) + + This test triggered ubsan by putting a function with the wrong type + as a post-processor into the .shadow file. Don't do that. + + Likely Zeek should provide a better error message, but hand-crafting + .shadow files isn't what is normally done and this is to fix the + master build for now. + +6.1.0-dev.28 | 2023-06-13 17:33:31 +0200 + + * GH-3112: cluster/logger: Fix leftover-log-rotation in multi-logger setups (Arne Welzel, Corelight) + + Populating log_metadata during zeek_init() is too late for the + leftover-log-rotation functionality, so do it at script parse time. + + Also, prepend archiver_ to the log_metadata table and encoding function + due to being in the global namespace and to align with the + archiver_rotation_format_func. This hasn't been in a released + version yet, so fine to rename still. + + Closes #3112 + + * cluster/logger: Fix global var reference (Arne Welzel, Corelight) + +6.1.0-dev.25 | 2023-06-12 15:27:20 -0700 + + * Update broker submodule [nomail] (Tim Wojtulewicz, Corelight) + +6.1.0-dev.23 | 2023-06-12 11:06:34 -0700 + + * Bump `auxil/spicy`. (Benjamin Bannier, Corelight) + +6.1.0-dev.19 | 2023-06-12 11:04:42 -0700 + + * test-all-policy: Do not load nodes-experimental/manager.zeek (Arne Welzel, Corelight) + + Turns out loading this script in non-cluster mode uses Cluster::log() + and creates cluster.log in the external baselines saying "cluster + started". Do not load it into test-all-policy.zeek and instead rely + on the new test-all-policy-cluster.test to load it transitively + when running as manager for basic checking. + + * cluster/main: Remove extra @if ( Cluster::is_enabled() ) (Arne Welzel, Corelight) + + These have been discussed in the context of "@if &analyze" [1] and + am much in favor for not disabling/removing ~100 lines (more than + fits on a single terminal) out from the middle of a file. There's no + performance impact for having these handlers enabled unconditionally. + Also, any future work on "@if &analyze" will look at them again which + we could also skip. + + This also reverts back to the behavior where the Cluster::LOG stream + is created even in non cluster setups like in previous Zeek versions. + As long as no one writes to it there's essentially no difference. If + someone does write to Cluster::LOG, I'd argue not black holing these + messages is better. Schema generators using Log::active_streams will + continue to discover Cluster::LOG even if they run in non-cluster + mode. + + https://github.com/zeek/zeek/pull/3062#discussion_r1200498905 + +6.1.0-dev.16 | 2023-06-12 10:56:55 -0700 + + * socks/dpd: Add newer auth methods (Arne Welzel, Corelight) + + The IANA has allocated a few more authentication methods, minimally + extend the signature to catch those. + + * socks/dpd: Fix socks5_server side signature (Arne Welzel, Corelight) + + The server replies with \x05 and identifier for the chosen method. + Not quite sure what the previous signature meant capture. + + See also: https://datatracker.ietf.org/doc/html/rfc1928#section-3 + + Closes #3099. + +6.1.0-dev.13 | 2023-06-09 11:03:54 +0200 + + * ci/windows: choco --localonly is gone (Arne Welzel, Corelight) + + choco 2.0 is now used after some caching changes on the Cirrus side [1] + and the --localonly flag is gone from choco [2], remove its usage. + + [1] https://github.com/cirruslabs/cirrus-ci-docs/issues/1174#issuecomment-1580928673 + [2] https://docs.chocolatey.org/en-us/guides/upgrading-to-chocolatey-v2-v6#the-list-command-now-lists-local-packages-only-and-the-local-only-and-lo-options-have-been-removed + +6.1.0-dev.8 | 2023-06-05 14:16:53 +0200 + + * Update zeekctl for pysubnetree/patricia upgrade (Tim Wojtulewicz, Corelight) + + * Update link to slack in README.md (Tim Wojtulewicz, Corelight) + 6.1.0-dev.4 | 2023-05-31 13:48:49 -0700 * Update bifcl, binpac, and broker repos for cmake changes (Tim Wojtulewicz) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5938c37898..76d720c0b4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -188,7 +188,6 @@ add_library(zeek_internal INTERFACE) add_library(Zeek::Internal ALIAS zeek_internal) set_target_properties(zeek_internal PROPERTIES EXPORT_NAME Internal) install(TARGETS zeek_internal EXPORT ZeekTargets) -target_compile_features(zeek_internal INTERFACE ${ZEEK_CXX_STD}) # Skip "link-time version check" in Plugin.h for plugins that we bake in. target_compile_definitions(zeek_internal INTERFACE ZEEK_PLUGIN_SKIP_VERSION_CHECK) @@ -200,6 +199,8 @@ add_custom_target(zeek_autogen_files) # reasons and backwards compatibility). if (ZEEK_STANDALONE) add_executable(zeek_exe) + target_compile_features(zeek_exe PRIVATE ${ZEEK_CXX_STD}) + set_target_properties(zeek_exe PROPERTIES CXX_EXTENSIONS OFF) target_link_libraries(zeek_exe PRIVATE $) add_dependencies(zeek_exe zeek_autogen_files) set_target_properties(zeek_exe PROPERTIES RUNTIME_OUTPUT_NAME zeek) @@ -238,6 +239,8 @@ endif () # bundles all of our object libraries and other dependencies. if (ZEEK_ENABLE_FUZZERS) add_library(zeek_fuzzer_shared SHARED) + target_compile_features(zeek_fuzzer_shared PRIVATE ${ZEEK_CXX_STD}) + set_target_properties(zeek_fuzzer_shared PROPERTIES CXX_EXTENSIONS OFF) target_link_libraries(zeek_fuzzer_shared PUBLIC $) # Tell zeek_target_link_libraries to add library dependencies as PUBLIC. set(zeek_fuzzer_shared_access PUBLIC) @@ -284,7 +287,6 @@ target_include_directories( zeek_dynamic_plugin_base INTERFACE $ $ $) -target_compile_features(zeek_dynamic_plugin_base INTERFACE ${ZEEK_CXX_STD}) add_library(Zeek::DynamicPluginBase ALIAS zeek_dynamic_plugin_base) set_target_properties(zeek_dynamic_plugin_base PROPERTIES EXPORT_NAME DynamicPluginBase) install(TARGETS zeek_dynamic_plugin_base EXPORT ZeekTargets) @@ -313,6 +315,11 @@ add_zeek_dynamic_plugin_build_interface_include_directories( ${CMAKE_BINARY_DIR}/auxil/binpac/lib ${CMAKE_BINARY_DIR}/auxil/broker/include) +# threading/formatters/JSON.h includes rapidjson headers and may be used +# by external plugins, extend the include path. +target_include_directories(zeek_dynamic_plugin_base SYSTEM + INTERFACE $) + # Convenience function for adding an OBJECT library that feeds directly into the # main target(s). # @@ -329,6 +336,8 @@ function (zeek_add_subdir_library name) # Create target and add the sources. set(target_name "zeek_${name}_obj") add_library(${target_name} OBJECT ${FN_ARGS_SOURCES}) + target_compile_features(${target_name} PRIVATE ${ZEEK_CXX_STD}) + set_target_properties(${target_name} PROPERTIES CXX_EXTENSIONS OFF) target_compile_definitions(${target_name} PRIVATE ZEEK_CONFIG_SKIP_VERSION_H) add_dependencies(${target_name} zeek_autogen_files) target_link_libraries(${target_name} PRIVATE $) @@ -405,8 +414,10 @@ endif () if (ENABLE_ZEEK_UNIT_TESTS) enable_testing() add_definitions(-DDOCTEST_CONFIG_SUPER_FAST_ASSERTS) + target_compile_definitions(zeek_dynamic_plugin_base INTERFACE DOCTEST_CONFIG_SUPER_FAST_ASSERTS) else () add_definitions(-DDOCTEST_CONFIG_DISABLE) + target_compile_definitions(zeek_dynamic_plugin_base INTERFACE DOCTEST_CONFIG_DISABLE) endif () if (ENABLE_CCACHE) @@ -1049,7 +1060,6 @@ include(PCAPTests) include(OpenSSLTests) include(CheckNameserCompat) include(GetArchitecture) -include(RequireCXX17) include(FindKqueue) include(FindCAres) include_directories(BEFORE "auxil/out_ptr/include") @@ -1411,8 +1421,8 @@ message( "\nlibmaxminddb: ${USE_GEOIP}" "\nKerberos: ${USE_KRB5}" "\ngperftools found: ${HAVE_PERFTOOLS}" - "\n tcmalloc: ${USE_PERFTOOLS_TCMALLOC}" - "\n debugging: ${USE_PERFTOOLS_DEBUG}" + "\n - tcmalloc: ${USE_PERFTOOLS_TCMALLOC}" + "\n - debugging: ${USE_PERFTOOLS_DEBUG}" "\njemalloc: ${ENABLE_JEMALLOC}" "\n" "\nFuzz Targets: ${ZEEK_ENABLE_FUZZERS}" diff --git a/Makefile b/Makefile index 82e261c3fe..5c0f8a4b97 100644 --- a/Makefile +++ b/Makefile @@ -8,8 +8,8 @@ BUILD=build REPO=$$(cd $(CURDIR) && basename $$(git config --get remote.origin.url | sed 's/^[^:]*://g')) VERSION_FULL=$(REPO)-$$(cd $(CURDIR) && cat VERSION) -VERSION_MIN=$(REPO)-$$(cd $(CURDIR) && cat VERSION)-minimal GITDIR=$$(test -f .git && echo $$(cut -d" " -f2 .git) || echo .git) +REALPATH=$$($$(realpath --relative-to=$(pwd) . >/dev/null 2>&1) && echo 'realpath' || echo 'grealpath') all: configured $(MAKE) -C $(BUILD) $@ @@ -39,8 +39,8 @@ livehtml: dist: @test -e ../$(VERSION_FULL) && rm -ri ../$(VERSION_FULL) || true @cp -R . ../$(VERSION_FULL) - @for i in . $$(git submodule foreach -q --recursive realpath --relative-to=$$(pwd) .); do ((cd ../$(VERSION_FULL)/$$i && test -f .git && cp -R $(GITDIR) .gitnew && rm -f .git && mv .gitnew .git && sed -i.bak -e 's#[[:space:]]*worktree[[:space:]]*=[[:space:]]*.*##g' .git/config) || true); done - @for i in . $$(git submodule foreach -q --recursive realpath --relative-to=$$(pwd) .); do (cd ../$(VERSION_FULL)/$$i && git reset -q --hard && git clean -ffdxq); done + @for i in . $$(git submodule foreach -q --recursive $(REALPATH) --relative-to=$$(pwd) .); do ((cd ../$(VERSION_FULL)/$$i && test -f .git && cp -R $(GITDIR) .gitnew && rm -f .git && mv .gitnew .git && sed -i.bak -e 's#[[:space:]]*worktree[[:space:]]*=[[:space:]]*.*##g' .git/config) || true); done + @for i in . $$(git submodule foreach -q --recursive $(REALPATH) --relative-to=$$(pwd) .); do (cd ../$(VERSION_FULL)/$$i && git reset -q --hard && git clean -ffdxq); done @(cd ../$(VERSION_FULL) && find . -name \.git\* | xargs rm -rf) @(cd ../$(VERSION_FULL) && find . -name \.idea -type d | xargs rm -rf) @(cd ../$(VERSION_FULL) && find . -maxdepth 1 -name build\* | xargs rm -rf) @@ -48,11 +48,7 @@ dist: @mv ../$(VERSION_FULL) . @COPYFILE_DISABLE=true tar -czf $(VERSION_FULL).tar.gz $(VERSION_FULL) @echo Package: $(VERSION_FULL).tar.gz - @mv $(VERSION_FULL) $(VERSION_MIN) - @(cd $(VERSION_MIN) && for i in auxil/*; do rm -rf $$i/*; done) - @COPYFILE_DISABLE=true tar -czf $(VERSION_MIN).tar.gz $(VERSION_MIN) - @echo Package: $(VERSION_MIN).tar.gz - @rm -rf $(VERSION_MIN) + @rm -rf $(VERSION_FULL) distclean: rm -rf $(BUILD) diff --git a/NEWS b/NEWS index b6ca019137..341bbd4d8e 100644 --- a/NEWS +++ b/NEWS @@ -9,9 +9,28 @@ Zeek 6.1.0 Breaking Changes ---------------- +- ``assert`` is now a reserved keyword for the new ``assert`` statement. + New Functionality ----------------- +- Added a new ``assert`` statement for assertion based testing and asserting + runtime state. + + assert [, ]; + + This statement comes with two hooks. First, ``assertion_failure()`` that + is invoked for every failing assert statement. Second, ``assertion_result()`` + which is invoked for every assert statement and its outcome. The latter allows + to construct a summary of failing and passing assert statements. Both hooks + receive the location and call stack for the ``assert`` statement via a + ``Backtrace`` vector. + + A failing assert will abort execution of the current event handler similar + to scripting errors. By default, a reporter error message is logged. Using + the break statement within ``assertion_failure()`` or ``assertion_result()`` + allows to suppress the default message. + Changed Functionality --------------------- @@ -46,6 +65,22 @@ Breaking Changes come in handy for example when working with tests that compare results against log baselines that have not yet been updated. +- Telemetry centralization and Prometheus exposition is not enabled by default + anymore. Previously, the manager node would open port 9911/tcp by default and + import all metrics from other nodes. For large clusters, the current implementation + introduces significant processing overhead on the manager even if the Prometheus + functionality is not used. While inconvenient, disable this functionality + (assumed to be used by few as of now) by default to preserve resources. + + The script to enable centralization and the Prometheus endpoint is now + located in the ``policy/`` folder. Re-enable the old functionality with: + + @load frameworks/telemetry/prometheus + + You may experiment with increasing ``Broker::metrics_export_interval`` + (default 1s) to reduce the extra overhead and communication at the expense + of stale metrics. + - Custom source tarballs require a ``repo-info.json`` file. Note, should you be using official Zeek release tarballs only, or build @@ -109,6 +144,19 @@ New Functionality To disable this functionality, pass ``--disable-javascript`` to configure. +- Zeek now comes with Spicy support built in, meaning it can now + leverage any analyzers written in Spicy out of the box. While the + interface layer connecting Zeek and Spicy used to be implemented + through an external Zeek plugin, that code has now moved into the + Zeek code base itself. We also added infrastructure to Zeek that + enables its built-in standard analyzers to use Spicy instead of + Binpac. As initial (simple) examples, Zeek's Syslog and Finger + analyzers are now implemented in Spicy. While their legacy versions + remain available as fallbacks for now in case Spicy gets explicitly + disabled at build time, their use is deprecated and their code won't + be maintained any further. (Some of these Spicy updates were part of + Zeek 5.2 already, but hadn't been included in its NEWS section.) + - Zeek events now hold network timestamps. For scheduled events, the timestamp represents the network time for which the event was scheduled for, otherwise it is the network time at event creation. A new bif ``current_event_time()`` @@ -260,7 +308,7 @@ New Functionality recognize CCMP-encrypted packets. These encrypted packets are currently dropped to Zeek's inability to do anything with them. -- Add packet analzyers for LLC, SNAP, and Novell 802.3, called from the Ethernet +- Add packet analyzers for LLC, SNAP, and Novell 802.3, called from the Ethernet and VLAN analyzers by default. - Environment variables for the execution of log rotation postprocessors can @@ -290,6 +338,9 @@ New Functionality Changed Functionality --------------------- +- The base distribution of the Zeek container images has been upgraded to + Debian 12 "bookworm" and JavaScript support was enabled. + - When ``get_file_handle()`` is invoked for an analyzer that did not register an appropriate callback function, log a warning and return a generic handle value based on the analyzer and connection information. @@ -2381,7 +2432,7 @@ Changed Functionality - The IOSource API changed fairly wildly. The ``GetFds()`` and ``NextTimestamp`` methods no longer exist. If you had previously implemented a custom IOSource, you will need to look at the new API - and make changes to your code to accomodate it. This does not include + and make changes to your code to accommodate it. This does not include packet sources, which should remain functional with little to no changes, since the entirety of the changes should be in ``PktSrc``. @@ -4033,7 +4084,7 @@ Changed Functionality - HTTP - Removed 'filename' field (which was seldomly used). + Removed 'filename' field (which was seldom used). New 'orig_filenames' and 'resp_filenames' fields which each contain a vector of filenames seen in entities transferred. @@ -5019,7 +5070,7 @@ New Functionality Instead of adding a separate worker entry in node.cfg for each Bro worker process on each worker host, it is now possible to just specify the number of worker processes on each host and BroControl - configures everything correctly (including any necessary enviroment + configures everything correctly (including any necessary environment variables for the balancers). This change adds three new keywords to the node.cfg file (to be used diff --git a/VERSION b/VERSION index 982593cc0a..191f154ee3 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -6.1.0-dev.4 +6.1.0-dev.115 diff --git a/auxil/broker b/auxil/broker index 3df48de38e..2a1d3232b7 160000 --- a/auxil/broker +++ b/auxil/broker @@ -1 +1 @@ -Subproject commit 3df48de38ef75a5d274c2fa59ad3f798a62c6bfc +Subproject commit 2a1d3232b75b530a0cd7df4b376ca3608fe8b9df diff --git a/auxil/spicy b/auxil/spicy index ec87b43037..6f43758670 160000 --- a/auxil/spicy +++ b/auxil/spicy @@ -1 +1 @@ -Subproject commit ec87b43037dba50648cb93be8940a4db23658905 +Subproject commit 6f4375867083805513a70feb32a626df40039256 diff --git a/auxil/zeekctl b/auxil/zeekctl index 01c54f8b38..42341843df 160000 --- a/auxil/zeekctl +++ b/auxil/zeekctl @@ -1 +1 @@ -Subproject commit 01c54f8b385c42ac82553fc8e18b28b22f7be62a +Subproject commit 42341843df09dd7867b8b4ce3059ebd3ebd67278 diff --git a/auxil/zeekjs b/auxil/zeekjs index e77634d5f9..c1cb44eb70 160000 --- a/auxil/zeekjs +++ b/auxil/zeekjs @@ -1 +1 @@ -Subproject commit e77634d5f92db96e66de0c36ddc4d44893306fa7 +Subproject commit c1cb44eb709e15ef16844c6a8648ed35017409e1 diff --git a/ci/opensuse-tumbleweed/Dockerfile b/ci/opensuse-tumbleweed/Dockerfile index 635f9af1c5..b5903b72f8 100644 --- a/ci/opensuse-tumbleweed/Dockerfile +++ b/ci/opensuse-tumbleweed/Dockerfile @@ -2,7 +2,7 @@ FROM opensuse/tumbleweed # A version field to invalidate Cirrus's build cache when needed, as suggested in # https://github.com/cirruslabs/cirrus-ci-docs/issues/544#issuecomment-566066822 -ENV DOCKERFILE_VERSION 20230523 +ENV DOCKERFILE_VERSION 20230620 # Remove the repo-openh264 repository, it caused intermittent issues # and we should not be needing any packages from it. diff --git a/ci/ubuntu-22.10/Dockerfile b/ci/ubuntu-22.10/Dockerfile index b34ae527a8..63f9ca2dd5 100644 --- a/ci/ubuntu-22.10/Dockerfile +++ b/ci/ubuntu-22.10/Dockerfile @@ -2,7 +2,7 @@ FROM ubuntu:22.10 ENV DEBIAN_FRONTEND="noninteractive" TZ="America/Los_Angeles" -# A version field to invalide Cirrus's build cache when needed, as suggested in +# A version field to invalidate Cirrus's build cache when needed, as suggested in # https://github.com/cirruslabs/cirrus-ci-docs/issues/544#issuecomment-566066822 ENV DOCKERFILE_VERSION 20230413 diff --git a/ci/windows/prepare.cmd b/ci/windows/prepare.cmd index 162381367e..a44a391317 100644 --- a/ci/windows/prepare.cmd +++ b/ci/windows/prepare.cmd @@ -4,4 +4,4 @@ echo %ZEEK_CI_CPUS% wmic cpu get NumberOfCores, NumberOfLogicalProcessors/Format:List systeminfo dir C: -choco list --localonly +choco list diff --git a/cmake b/cmake index 4e41cdd77f..afa62ecbe3 160000 --- a/cmake +++ b/cmake @@ -1 +1 @@ -Subproject commit 4e41cdd77f0aa617c23f37b4776a1ba5c4ea4ea3 +Subproject commit afa62ecbe399c3dac41f6ebcdb622f409569edd6 diff --git a/doc b/doc index 8a0873c710..e479f28d22 160000 --- a/doc +++ b/doc @@ -1 +1 @@ -Subproject commit 8a0873c71095136ef1f611a01bf936f7a2805aed +Subproject commit e479f28d2263ae3c452567a52ef613f144191f08 diff --git a/docker/builder.Dockerfile b/docker/builder.Dockerfile index 5352c48938..0c1685ac2e 100644 --- a/docker/builder.Dockerfile +++ b/docker/builder.Dockerfile @@ -1,7 +1,7 @@ # See the file "COPYING" in the main distribution directory for copyright. # Layer to build Zeek. -FROM debian:bullseye-slim +FROM debian:bookworm-slim RUN echo 'Acquire::Retries "3";' > /etc/apt/apt.conf.d/80-retries @@ -18,15 +18,20 @@ RUN apt-get -q update \ git \ libfl2 \ libfl-dev \ + libnode-dev \ libmaxminddb-dev \ libpcap-dev \ libssl-dev \ + libuv1-dev \ libz-dev \ make \ python3-minimal \ - python3.9-dev \ + python3.11-dev \ swig \ ninja-build \ python3-pip \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* + +# Tell git all the repositories are safe. +RUN git config --global --add safe.directory '*' diff --git a/docker/final.Dockerfile b/docker/final.Dockerfile index ca2db566cb..e13931f93b 100644 --- a/docker/final.Dockerfile +++ b/docker/final.Dockerfile @@ -1,7 +1,7 @@ # See the file "COPYING" in the main distribution directory for copyright. # Final layer containing all artifacts. -FROM debian:bullseye-slim +FROM debian:bookworm-slim RUN echo 'Acquire::Retries "3";' > /etc/apt/apt.conf.d/80-retries @@ -10,9 +10,11 @@ RUN apt-get -q update \ ca-certificates \ git \ libmaxminddb0 \ - libpython3.9 \ + libnode108 \ + libpython3.11 \ libpcap0.8 \ - libssl1.1 \ + libssl3 \ + libuv1 \ libz1 \ python3-minimal \ python3-git \ diff --git a/scripts/base/frameworks/cluster/main.zeek b/scripts/base/frameworks/cluster/main.zeek index 495885c431..af2b96f0d6 100644 --- a/scripts/base/frameworks/cluster/main.zeek +++ b/scripts/base/frameworks/cluster/main.zeek @@ -211,12 +211,6 @@ export { ## Returns: The :zeek:type:`Cluster::NodeType` the calling node acts as. global local_node_type: function(): NodeType; - ## This gives the value for the number of workers currently connected to, - ## and it's maintained internally by the cluster framework. It's - ## primarily intended for use by managers to find out how many workers - ## should be responding to requests. - global worker_count: count = 0 &deprecated="Remove in v6.1. Active worker count can be obtained via get_active_node_count(Cluster::WORKER)"; - ## The cluster layout definition. This should be placed into a filter ## named cluster-layout.zeek somewhere in the ZEEKPATH. It will be ## automatically loaded if the CLUSTER_NODE environment variable is set. @@ -347,8 +341,6 @@ function nodeid_topic(id: string): string return nodeid_topic_prefix + id + "/"; } -@if ( Cluster::is_enabled() ) - event Cluster::hello(name: string, id: string) &priority=10 { if ( name !in nodes ) @@ -374,11 +366,6 @@ event Cluster::hello(name: string, id: string) &priority=10 if ( n$node_type !in active_node_ids ) active_node_ids[n$node_type] = set(); add active_node_ids[n$node_type][id]; - -@pragma push ignore-deprecations - if ( n$node_type == WORKER ) - worker_count = get_active_node_count(WORKER); -@pragma pop ignore-deprecations } event Broker::peer_added(endpoint: Broker::EndpointInfo, msg: string) &priority=10 @@ -400,11 +387,6 @@ event Broker::peer_lost(endpoint: Broker::EndpointInfo, msg: string) &priority=1 delete n$id; delete active_node_ids[n$node_type][endpoint$id]; -@pragma push ignore-deprecations - if ( n$node_type == WORKER ) - worker_count = get_active_node_count(WORKER); -@pragma pop ignore-deprecations - event Cluster::node_down(node_name, endpoint$id); break; } @@ -423,8 +405,6 @@ event zeek_init() &priority=5 Log::create_stream(Cluster::LOG, [$columns=Info, $path="cluster", $policy=log_policy]); } -@endif - function create_store(name: string, persistent: bool &default=F): Cluster::StoreInfo { local info = stores[name]; diff --git a/scripts/base/frameworks/cluster/nodes/logger.zeek b/scripts/base/frameworks/cluster/nodes/logger.zeek index 9dedac27a0..4b29d7654f 100644 --- a/scripts/base/frameworks/cluster/nodes/logger.zeek +++ b/scripts/base/frameworks/cluster/nodes/logger.zeek @@ -22,18 +22,26 @@ redef Log::default_rotation_interval = 1 hrs; ## Alarm summary mail interval. redef Log::default_mail_alarms_interval = 24 hrs; -## Generic log metadata rendered into the filename that zeek-archiver may interpret. -## This is populated with a log_suffix entry within zeek_init() when multiple -## logger nodes are defined in cluster-layout.zeek. -global log_metadata: table[string] of string; +## Generic log metadata rendered into filename that zeek-archiver may interpret. +global archiver_log_metadata: table[string] of string &redef; + +# Populate archiver_log_metadata with a "log_suffix" entry when multiple +# loggers are configured in Cluster::nodes. Need to evaluate at script +# loading time as leftover-log-rotation functionality is invoking +# archiver_rotation_format_func early on during InitPostScript(). +@if ( Cluster::get_node_count(Cluster::LOGGER) > 1 ) +redef archiver_log_metadata += { + ["log_suffix"] = Cluster::node, +}; +@endif ## Encode the given table as zeek-archiver understood metadata part. -function encode_log_metadata(tbl: table[string] of string): string +function archiver_encode_log_metadata(tbl: table[string] of string): string { local metadata_vec: vector of string; - for ( k, v in log_metadata ) + for ( k, v in tbl ) { - if ( |v| == 0 ) # Assume concious decision to skip this entry. + if ( |v| == 0 ) # Assume conscious decision to skip this entry. next; if ( /[,=]/ in k || /[,=]/ in v ) @@ -57,8 +65,8 @@ function archiver_rotation_format_func(ri: Log::RotationFmtInfo): Log::RotationP local close_str = strftime(Log::default_rotation_date_format, ri$close); local base = fmt("%s__%s__%s__", ri$path, open_str, close_str); - if ( |log_metadata| > 0 ) - base = fmt("%s%s__", base, encode_log_metadata(log_metadata)); + if ( |archiver_log_metadata| > 0 ) + base = fmt("%s%s__", base, archiver_encode_log_metadata(archiver_log_metadata)); local rval = Log::RotationPath($file_basename=base); return rval; @@ -71,15 +79,6 @@ redef Log::default_rotation_dir = "log-queue"; redef Log::rotation_format_func = archiver_rotation_format_func; redef LogAscii::enable_leftover_log_rotation = T; - -event zeek_init() - { - if ( "log_suffix" in log_metadata ) - return; - - if ( Cluster::get_node_count(Cluster::LOGGER) > 1 ) - log_metadata["log_suffix"] = Cluster::node; - } @else ## Use the cluster's archive logging script. diff --git a/scripts/base/frameworks/dpd/__load__.zeek b/scripts/base/frameworks/dpd/__load__.zeek deleted file mode 100644 index 643addd66b..0000000000 --- a/scripts/base/frameworks/dpd/__load__.zeek +++ /dev/null @@ -1,3 +0,0 @@ -@deprecated "Remove in v6.1 - now loaded automatically"; - -@load base/frameworks/analyzer diff --git a/scripts/base/frameworks/logging/writers/ascii.zeek b/scripts/base/frameworks/logging/writers/ascii.zeek index 3b7e87a125..a65ae290cb 100644 --- a/scripts/base/frameworks/logging/writers/ascii.zeek +++ b/scripts/base/frameworks/logging/writers/ascii.zeek @@ -54,14 +54,6 @@ export { ## This option is also available as a per-filter ``$config`` option. const gzip_file_extension = "gz" &redef; - ## Define the default logging directory. If empty, logs are written - ## to the current working directory. - ## - ## This setting is superseded by :zeek:see:`Log::default_logdir`. The - ## latter applies to all file writers and also interacts better with - ## log rotation. - const logdir = "" &redef &deprecated="Remove in v6.1. Use 'Log::default_logdir'."; - ## Format of timestamps when writing out JSON. By default, the JSON ## formatter will use double values for timestamps which represent the ## number of seconds from the UNIX epoch. diff --git a/scripts/base/frameworks/netcontrol/main.zeek b/scripts/base/frameworks/netcontrol/main.zeek index 7d1d6a7976..4687c2d52d 100644 --- a/scripts/base/frameworks/netcontrol/main.zeek +++ b/scripts/base/frameworks/netcontrol/main.zeek @@ -167,7 +167,7 @@ export { ## ## For example, a search for 192.168.17.0/8 will reveal a rule that exists for ## 192.168.0.0/16, since this rule affects the subnet. However, it will not reveal - ## a more specific rule for 192.168.17.1/32, which does not directy affect the whole + ## a more specific rule for 192.168.17.1/32, which does not directly affect the whole ## subnet. ## ## This function works on both the manager and workers of a cluster. Note that on diff --git a/scripts/base/frameworks/notice/main.zeek b/scripts/base/frameworks/notice/main.zeek index 85830ae499..14bbbeb08f 100644 --- a/scripts/base/frameworks/notice/main.zeek +++ b/scripts/base/frameworks/notice/main.zeek @@ -584,15 +584,6 @@ function is_being_suppressed(n: Notice::Info): bool return F; } -# Executes a script with all of the notice fields put into the -# new process' environment as "ZEEK_ARG_" variables. -function execute_with_notice(cmd: string, n: Notice::Info) &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - # TODO: fix system calls - #local tgs = tags(n); - #system_env(cmd, tags); - } - function create_file_info(f: fa_file): Notice::FileInfo { local fi: Notice::FileInfo = Notice::FileInfo($fuid = f$id, diff --git a/scripts/base/frameworks/signatures/main.zeek b/scripts/base/frameworks/signatures/main.zeek index 51760b5d1f..64d5df915a 100644 --- a/scripts/base/frameworks/signatures/main.zeek +++ b/scripts/base/frameworks/signatures/main.zeek @@ -148,13 +148,6 @@ event zeek_init() &priority=5 Log::create_stream(Signatures::LOG, [$columns=Info, $ev=log_signature, $path="signatures", $policy=log_policy]); } -# Returns true if the given signature has already been triggered for the given -# [orig, resp] pair. -function has_signature_matched(id: string, orig: addr, resp: addr): bool &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - return [orig, resp] in vert_table ? id in vert_table[orig, resp] : F; - } - event sig_summary(orig: addr, id: string, msg: string) { NOTICE([$note=Signature_Summary, $src=orig, diff --git a/scripts/base/frameworks/software/main.zeek b/scripts/base/frameworks/software/main.zeek index 7e7631a4f3..70881304c1 100644 --- a/scripts/base/frameworks/software/main.zeek +++ b/scripts/base/frameworks/software/main.zeek @@ -454,11 +454,6 @@ function cmp_versions(v1: Version, v2: Version): int return 0; } -function software_endpoint_name(id: conn_id, host: addr): string &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - return fmt("%s %s", host, (host == id$orig_h ? "client" : "server")); - } - # Convert a version into a string "a.b.c-x". Marked "&is_used" because # while the base scripts don't call it, the optional policy/ scripts do. function software_fmt_version(v: Version): string &is_used diff --git a/scripts/base/frameworks/sumstats/non-cluster.zeek b/scripts/base/frameworks/sumstats/non-cluster.zeek index 5bf615fcaf..47d2f29eb4 100644 --- a/scripts/base/frameworks/sumstats/non-cluster.zeek +++ b/scripts/base/frameworks/sumstats/non-cluster.zeek @@ -71,18 +71,6 @@ function data_added(ss: SumStat, key: Key, result: Result) threshold_crossed(ss, key, result); } -function request(ss_name: string): ResultTable &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - # This only needs to be implemented this way for cluster compatibility. - return when [ss_name] ( T ) - { - if ( ss_name in result_store ) - return result_store[ss_name]; - else - return table(); - } - } - function request_key(ss_name: string, key: Key): Result { # This only needs to be implemented this way for cluster compatibility. diff --git a/scripts/base/frameworks/sumstats/plugins/hll_unique.zeek b/scripts/base/frameworks/sumstats/plugins/hll_unique.zeek index 43cafcff7f..f1e4d23714 100644 --- a/scripts/base/frameworks/sumstats/plugins/hll_unique.zeek +++ b/scripts/base/frameworks/sumstats/plugins/hll_unique.zeek @@ -26,7 +26,7 @@ export { } redef record ResultVal += { - # Internal use only. This is not meant to be publically available + # Internal use only. This is not meant to be publicly available # because probabilistic data structures have to be examined using # specialized bifs. card: opaque of cardinality &optional; diff --git a/scripts/base/frameworks/sumstats/plugins/sample.zeek b/scripts/base/frameworks/sumstats/plugins/sample.zeek index d5d236f43f..4a2c9e20bd 100644 --- a/scripts/base/frameworks/sumstats/plugins/sample.zeek +++ b/scripts/base/frameworks/sumstats/plugins/sample.zeek @@ -26,7 +26,7 @@ export { } redef record ResultVal += { - # Internal use only. This is not meant to be publically available + # Internal use only. This is not meant to be publicly available # and just a copy of num_samples from the Reducer. Needed for # availability in the compose hook. num_samples: count &default=0; diff --git a/scripts/base/frameworks/sumstats/plugins/std-dev.zeek b/scripts/base/frameworks/sumstats/plugins/std-dev.zeek index a181923b10..fb5e5a0b4c 100644 --- a/scripts/base/frameworks/sumstats/plugins/std-dev.zeek +++ b/scripts/base/frameworks/sumstats/plugins/std-dev.zeek @@ -23,11 +23,6 @@ function calc_std_dev(rv: ResultVal) rv$std_dev = sqrt(rv$variance); } -hook std_dev_hook(r: Reducer, val: double, obs: Observation, rv: ResultVal) &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - calc_std_dev(rv); - } - hook register_observe_plugins() &priority=-10 { register_observe_plugin(STD_DEV, function(r: Reducer, val: double, obs: Observation, rv: ResultVal) diff --git a/scripts/base/frameworks/sumstats/plugins/unique.zeek b/scripts/base/frameworks/sumstats/plugins/unique.zeek index 069522effb..dbcb4314a3 100644 --- a/scripts/base/frameworks/sumstats/plugins/unique.zeek +++ b/scripts/base/frameworks/sumstats/plugins/unique.zeek @@ -28,7 +28,7 @@ redef record ResultVal += { # set in the reducer. unique_max: count &optional; - # Internal use only. This is not meant to be publically available + # Internal use only. This is not meant to be publicly available # because we don't want to trust that we can inspect the values # since we will likely move to a probabilistic data structure in the future. # TODO: in the future this will optionally be a hyperloglog structure diff --git a/scripts/base/frameworks/supervisor/api.zeek b/scripts/base/frameworks/supervisor/api.zeek index 0475d305cc..97a286f8c9 100644 --- a/scripts/base/frameworks/supervisor/api.zeek +++ b/scripts/base/frameworks/supervisor/api.zeek @@ -56,9 +56,6 @@ export { ## Additional script filenames/paths that the node should load ## after any user-specified scripts. addl_user_scripts: vector of string &default = vector(); - ## The former name of addl_user_scripts. - scripts: vector of string &default = vector() - &deprecated="Remove in 6.1. Use the addl_user_scripts field instead."; ## Environment variables to define in the supervised node. env: table[string] of string &default=table(); ## A cpu/core number to which the node will try to pin itself. diff --git a/scripts/base/frameworks/telemetry/__load__.zeek b/scripts/base/frameworks/telemetry/__load__.zeek index 2aad89db82..88b6dbf672 100644 --- a/scripts/base/frameworks/telemetry/__load__.zeek +++ b/scripts/base/frameworks/telemetry/__load__.zeek @@ -1,7 +1,3 @@ @load ./main @load base/frameworks/cluster - -@if ( Cluster::is_enabled() ) -@load ./cluster -@endif diff --git a/scripts/base/init-bare.zeek b/scripts/base/init-bare.zeek index fb15bea8dd..b891e0792f 100644 --- a/scripts/base/init-bare.zeek +++ b/scripts/base/init-bare.zeek @@ -623,10 +623,6 @@ type fa_metadata: record { inferred: bool &default=T; }; -## Same as :zeek:see:`Analyzer::disabling_analyzer`, but deprecated due -## to living in the global namespace. -type disabling_analyzer: hook(c: connection, atype: AllAnalyzers::Tag, aid: count) &redef &deprecated="Remove in v6.1. Use Analyzer::disabling_analyzer() instead."; - module Analyzer; export { ## A hook taking a connection, analyzer tag and analyzer id that can be @@ -942,6 +938,45 @@ type BacktraceElement: record { ## .. zeek:see:: backtrace print_backtrace type Backtrace: vector of BacktraceElement; +## A hook that is invoked when an assert statement fails. +## +## By default, a reporter error message is logged describing the failing +## assert similarly to how scripting errors are reported after invoking +## this hook. Using the :zeek:see:`break` statement in an assertion_failure +## hook handler allows to suppress this message. +## +## cond: The string representation of the condition. +## +## msg: Evaluated message as string given to the assert statement. +## +## bt: Backtrace of the assertion error. The top element will contain +## the location of the assert statement that failed. +## +## .. zeek:see:: assertion_result +type assertion_failure: hook(cond: string, msg: string, bt: Backtrace); + +## A hook that is invoked with the result of every assert statement. +## +## This is a potentially expensive hook meant to be used by testing +## frameworks to summarize assert results. In a production setup, +## this hook is likely detrimental to performance. +## +## Using the :zeek:see:`break` statement within an assertion_failure hook +## handler allows to suppress the reporter error message generated for +## failing assert statements. +## +## result: The result of evaluating **cond**. +## +## cond: The string representation of the condition. +## +## msg: Evaluated message as string given to the assert statement. +## +## bt: Backtrace of the assertion error. The top element will contain +## the location of the assert statement that failed. +## +## .. zeek:see:: assertion_failure +type assertion_result: hook(result: bool, cond: string, msg: string, bt: Backtrace); + # todo:: Do we still need these here? Can they move into the packet filter # framework? # @@ -1097,7 +1132,7 @@ type entropy_test_result: record { ## Return type for from_json BIF. ## ## .. zeek:see:: from_json -type from_json_result: record { +type from_json_result: record { v: any &optional; ##< Parsed value. valid: bool; ##< True if parsing was successful. }; @@ -4741,7 +4776,6 @@ export { type SNMP::BulkPDU: record { request_id: int; non_repeaters: count; - max_repititions: count &deprecated="Remove in v6.1. Use max_repetitions instead"; max_repetitions: count; bindings: SNMP::Bindings; }; @@ -5162,31 +5196,16 @@ export { ## Setting this to zero will disable all types of tunnel decapsulation. const max_depth: count = 2 &redef; - ## Toggle whether to do IPv{4,6}-in-IPv{4,6} decapsulation. - const enable_ip = T &redef &deprecated="Remove in v6.1. Tunnel analyzers can be toggled with enable_analyzer()/disable_analyzer() or disabled through Analyzer::disabled_analyzers"; - - ## Toggle whether to do IPv{4,6}-in-AYIYA decapsulation. - const enable_ayiya = T &redef &deprecated="Remove in v6.1. Tunnel analyzers can be toggled with enable_analyzer()/disable_analyzer() or disabled through Analyzer::disabled_analyzers"; - - ## Toggle whether to do IPv6-in-Teredo decapsulation. - const enable_teredo = T &redef &deprecated="Remove in v6.1. Tunnel analyzers can be toggled with enable_analyzer()/disable_analyzer() or disabled through Analyzer::disabled_analyzers"; - - ## Toggle whether to do GTPv1 decapsulation. - const enable_gtpv1 = T &redef &deprecated="Remove in v6.1. Tunnel analyzers can be toggled with enable_analyzer()/disable_analyzer() or disabled through Analyzer::disabled_analyzers"; - - ## Toggle whether to do GRE decapsulation. - const enable_gre = T &redef &deprecated="Remove in v6.1. Tunnel analyzers can be toggled with enable_analyzer()/disable_analyzer() or disabled through Analyzer::disabled_analyzers"; - ## With this set, the Teredo analyzer waits until it sees both sides ## of a connection using a valid Teredo encapsulation before issuing - ## a :zeek:see:`analyzer_confirmation`. If it's false, the first + ## a :zeek:see:`analyzer_confirmation_info`. If it's false, the first ## occurrence of a packet with valid Teredo encapsulation causes a ## confirmation. const delay_teredo_confirmation = T &redef; ## With this set, the GTP analyzer waits until the most-recent upflow ## and downflow packets are a valid GTPv1 encapsulation before - ## issuing :zeek:see:`analyzer_confirmation`. If it's false, the + ## issuing :zeek:see:`analyzer_confirmation_info`. If it's false, the ## first occurrence of a packet with valid GTPv1 encapsulation causes ## confirmation. Since the same inner connection can be carried ## differing outer upflow/downflow connections, setting to false diff --git a/scripts/base/protocols/dce-rpc/consts.zeek b/scripts/base/protocols/dce-rpc/consts.zeek index 3b65df83bb..bd283b032c 100644 --- a/scripts/base/protocols/dce-rpc/consts.zeek +++ b/scripts/base/protocols/dce-rpc/consts.zeek @@ -283,6 +283,8 @@ export { ["86d35949-83c9-4044-b424-db363231fd0c",0x0f] = "SchRpcScheduledRuntimes", ["86d35949-83c9-4044-b424-db363231fd0c",0x10] = "SchRpcGetLastRunInfo", ["86d35949-83c9-4044-b424-db363231fd0c",0x11] = "SchRpcGetTaskInfo", + ["86d35949-83c9-4044-b424-db363231fd0c",0x12] = "SchRpcGetNumberOfMissedRuns", + ["86d35949-83c9-4044-b424-db363231fd0c",0x13] = "SchRpcEnableTask", # IObjectExporter ["99fcfec4-5260-101b-bbcb-00aa0021347a",0x00] = "ResolveOxid", @@ -306,6 +308,16 @@ export { ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x07] = "NspiDNToEph", ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x08] = "NspiGetPropList", ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x09] = "NspiGetProps", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x0a] = "NspiCompareMIds", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x0b] = "NspiModProps", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x0c] = "NspiGetSpecialTable", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x0d] = "NspiGetTemplateInfo", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x0e] = "NspiModLinkAtt", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x10] = "NspiQueryColumns", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x11] = "NspiGetNamesFromIDs", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x12] = "NspiGetIDsFromNames", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x13] = "NspiResolveNames", + ["f5cc5a18-4264-101a-8c59-08002b2f8426",0x14] = "NspiResolveNamesW", # IWbemServices ["9556dc99-828c-11cf-a37e-00aa003240c7",0x03] = "OpenNamespace", @@ -611,6 +623,12 @@ export { ["12345678-1234-abcd-ef00-0123456789ab",0x66] = "RpcCorePrinterDriverInstalled", ["12345678-1234-abcd-ef00-0123456789ab",0x67] = "RpcGetPrinterDriverPackagePath", ["12345678-1234-abcd-ef00-0123456789ab",0x68] = "RpcReportJobProcessingProgress", + ["12345678-1234-abcd-ef00-0123456789ab",0x6e] = "RpcGetJobNamedPropertyValue", + ["12345678-1234-abcd-ef00-0123456789ab",0x6f] = "RpcSetJobNamedProperty", + ["12345678-1234-abcd-ef00-0123456789ab",0x70] = "RpcDeleteJobNamedProperty", + ["12345678-1234-abcd-ef00-0123456789ab",0x71] = "RpcEnumJobNamedProperties", + ["12345678-1234-abcd-ef00-0123456789ab",0x74] = "RpcLogJobInfoForBranchOffice", + ["4b324fc8-1670-01d3-1278-5a47bf6ee188",0x00] = "NetrCharDevEnum", ["4b324fc8-1670-01d3-1278-5a47bf6ee188",0x01] = "NetrCharDevGetInfo", @@ -740,6 +758,7 @@ export { ["12345778-1234-abcd-ef00-0123456789ac",0x43] = "SamrValidatePassword", ["12345778-1234-abcd-ef00-0123456789ac",0x44] = "SamrQueryLocalizableAccountsInDomain", ["12345778-1234-abcd-ef00-0123456789ac",0x45] = "SamrPerformGenericOperation", + ["12345778-1234-abcd-ef00-0123456789ac",0x49] = "SamrUnicodeChangePasswordUser4", ["338cd001-2244-31f1-aaaa-900038001003",0x00] = "OpenClassesRoot", ["338cd001-2244-31f1-aaaa-900038001003",0x01] = "OpenCurrentUser", @@ -847,6 +866,9 @@ export { ["367abb81-9844-35f1-ad32-98f038001003",0x34] = "ScSendPnPMessage", ["367abb81-9844-35f1-ad32-98f038001003",0x35] = "ScValidatePnPService", ["367abb81-9844-35f1-ad32-98f038001003",0x36] = "ScOpenServiceStatusHandle", + ["367abb81-9844-35f1-ad32-98f038001003",0x38] = "QueryServiceConfigEx", + ["367abb81-9844-35f1-ad32-98f038001003",0x3c] = "CreateWowService", + ["367abb81-9844-35f1-ad32-98f038001003",0x40] = "OpenSCManager2", # browser ["6bffd098-a112-3610-9833-012892020162",0x00] = "BrowserrServerEnum", @@ -1132,6 +1154,8 @@ export { ["82273fdc-e32a-18c3-3f78-827929dc23ea",0x16] = "ElfrGetLogInformation", ["82273fdc-e32a-18c3-3f78-827929dc23ea",0x17] = "ElfrFlushEL", ["82273fdc-e32a-18c3-3f78-827929dc23ea",0x18] = "ElfrReportEventAndSourceW", + ["82273fdc-e32a-18c3-3f78-827929dc23ea",0x19] = "ElfrReportEventExW", + ["82273fdc-e32a-18c3-3f78-827929dc23ea",0x1a] = "ElfrReportEventExA", # ISeclogon ["12b81e99-f207-4a4c-85d3-77b42f76fd14",0x00] = "SeclCreateProcessWithLogonW", @@ -1265,7 +1289,9 @@ export { ["12345778-1234-abcd-ef00-0123456789ab",0x5c] = "CredReadByTokenHandle", ["12345778-1234-abcd-ef00-0123456789ab",0x5d] = "CredrRestoreCredentials", ["12345778-1234-abcd-ef00-0123456789ab",0x5e] = "CredrBackupCredentials", - + ["12345778-1234-abcd-ef00-0123456789ab",0x81] = "LsarCreateTrustedDomainEx3", + ["12345778-1234-abcd-ef00-0123456789ab",0x82] = "LsarOpenPolicy3", + ["12345778-1234-abcd-ef00-0123456789ab",0x85] = "LsarSetForestTrustInformation2", # msgsvc ["17fdd703-1827-4e34-79d4-24a55c53bb37",0x00] = "NetrMessageNameAdd", ["17fdd703-1827-4e34-79d4-24a55c53bb37",0x01] = "NetrMessageNameEnum", @@ -1363,6 +1389,15 @@ export { ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x07] = "DnssrvComplexOperation2", ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x08] = "DnssrvEnumRecords2", ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x09] = "DnssrvUpdateRecord2", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x0a] = "DnssrvUpdateRecord3", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x0b] = "DnssrvEnumRecords3", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x0c] = "DnssrvOperation3", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x0d] = "DnssrvQuery3", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x0e] = "DnssrvComplexOperation3", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x0f] = "DnssrvOperation4", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x10] = "DnssrvQuery4", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x11] = "DnssrvUpdateRecord4", + ["50abc2a4-574d-40b3-9d66-ee4fd5fba076",0x12] = "DnssrvEnumRecords4", # lls_license ["57674cd0-5200-11ce-a897-08002b2e9c6d",0x00] = "LlsrLicenseRequestW", @@ -1487,6 +1522,9 @@ export { ["4fc742e0-4a10-11cf-8273-00aa004ae673",0x14] = "NetrDfsRemove2", ["4fc742e0-4a10-11cf-8273-00aa004ae673",0x15] = "NetrDfsEnumEx", ["4fc742e0-4a10-11cf-8273-00aa004ae673",0x16] = "NetrDfsSetInfo2", + ["4fc742e0-4a10-11cf-8273-00aa004ae673",0x17] = "NetrDfsAddRootTarget", + ["4fc742e0-4a10-11cf-8273-00aa004ae673",0x18] = "NetrDfsRemoveRootTarget", + ["4fc742e0-4a10-11cf-8273-00aa004ae673",0x19] = "NetrDfsGetSupportedNamespaceVersion", # sfcapi ["83da7c00-e84f-11d2-9807-00c04f8ec850",0x00] = "SfcSrv_GetNextProtectedFile", @@ -1609,6 +1647,12 @@ export { ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x16] = "DRSReplicaVerifyObjects", ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x17] = "DRSGetObjectExistence", ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x18] = "DRSQuerySitesByCost", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x19] = "IDL_DRSInitDemotion", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x1a] = "IDL_DRSReplicaDemotion", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x1b] = "IDL_DRSFinishDemotion", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x1c] = "IDL_DRSAddCloneDC", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x1d] = "IDL_DRSWriteNgcKey", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x1e] = "IDL_DRSReadNgcKey", # winspipe ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x00] = "R_WinsRecordAction", @@ -1628,9 +1672,12 @@ export { ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x0e] = "R_WinsGetBrowserNames_Old", ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x0f] = "R_WinsDeleteWins", ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x10] = "R_WinsSetFlags", - ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x11] = "R_WinsGetDbRecsByName", - ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x12] = "R_WinsStatusWHdl", - ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x13] = "R_WinsDoScavengingNew", + ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x11] = "R_WinsGetBrowserNames", + ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x12] = "R_WinsGetDbRecsByName", + ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x13] = "R_WinsStatusNew", + ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x14] = "R_WinsStatusWHdl", + ["45f52c28-7f9f-101a-b52b-08002b2efabe",0x15] = "R_WinsDoScavengingNew", + # mgmt ["afa8bd80-7d8a-11c9-bef4-08002b102989",0x00] = "inq_if_ids", @@ -1761,6 +1808,64 @@ export { ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x6A] = "ApiClusterControl", ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x6B] = "ApiUnblockGetNotifyCall", ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x6C] = "ApiSetServiceAccountPassword", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x6D] = "ApiSetResourceDependencyExpression", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x6E] = "ApiGetResourceDependencyExpression", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x70] = "ApiGetResourceNetworkName", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x71] = "ApiExecuteBatch", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x72] = "ApiCreateBatchPort", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x73] = "ApiGetBatchNotification", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x74] = "ApiCloseBatchPort", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x75] = "ApiOpenClusterEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x76] = "ApiOpenNodeEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x77] = "ApiOpenGroupEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x78] = "ApiOpenResourceEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x79] = "ApiOpenNetworkEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x7A] = "ApiOpenNetInterfaceEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x7B] = "ApiChangeCsvState", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x7C] = "ApiCreateNodeEnumEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x7D] = "ApiCreateEnumEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x7E] = "ApiPauseNodeEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x7F] = "ApiPauseNodeWithDrainTarget", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x80] = "ApiResumeNodeEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x81] = "ApiCreateGroupEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x82] = "ApiOnlineGroupEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x83] = "ApiOfflineGroupEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x84] = "ApiMoveGroupEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x85] = "ApiMoveGroupToNodeEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x86] = "ApiCancelClusterGroupOperation", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x87] = "ApiOnlineResourceEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x88] = "ApiOfflineResourceEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x89] = "ApiCreateNotifyV2", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x8A] = "ApiAddNotifyV2", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x8B] = "ApiGetNotifyV2", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x8F] = "ApiCreateGroupEnum", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x90] = "ApiCreateResourceEnum", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x91] = "ApiExecuteReadBatch", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x92] = "ApiRestartResource", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x93] = "ApiGetNotifyAsync", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x9B] = "ApiAddNotifyResourceTypeV2", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0x9D] = "ApiExecuteReadBatchEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA3] = "ApiCreateGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA4] = "ApiOpenGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA5] = "ApiCloseGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA6] = "ApiDeleteGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA7] = "ApiAddGroupToGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA8] = "ApiRemoveGroupFromGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xA9] = "ApiMoveGroupToGroupSet", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xAB] = "ApiAddGroupSetDependency", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xAC] = "ApiAddGroupToGroupSetDependency", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xAD] = "ApiNodeGroupSetControl", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xAE] = "ApiGroupSetControl", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xAF] = "ApiSetGroupDependencyExpression", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB0] = "ApiRemoveClusterGroupDependency", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB1] = "ApiSetGroupSetDependencyExpression", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB2] = "ApiRemoveGroupSetDependency", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB3] = "ApiRemoveClusterGroupToGroupSetDependency", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB4] = "ApiCreateGroupSetEnum", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB5] = "ApiCreateNetInterfaceEnum", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB6] = "ApiChangeCsvStateEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB7] = "ApiAddGroupToGroupSetEx", + ["b97db8b2-4c63-11cf-bff6-08002be23f2f",0xB8] = "ApiChangeResourceGroupEx", # dhcpsrv - MSDN Ref: DHCP Server Mgmt Protocol [ms-dhcpm] ["6bffd098-a112-3610-9833-46c3f874532d",0x00] = "R_DhcpCreateSubnet", @@ -2219,7 +2324,7 @@ export { ["00020401-0000-0000-c000-000000000046",0x12] = "GetContainingTypeLib", # IDMNotify - MSDN Ref: Disk Mgmt Remote Protocol [ms-dmrp] - ["d2d79df7-3400-11d0-b40b-00aa005ff586",0x00] = "ObjectsChanged", + ["d2d79df7-3400-11d0-b40b-00aa005ff586",0x03] = "ObjectsChanged", # IDMRemoteServer - MSDN Ref: Disk Mgmt Remote Protocol [ms-dmrp] ["3a410f21-553f-11d1-8e5e-00a0c92c9d5d",0x03] = "CreateRemoteObject", diff --git a/scripts/base/protocols/ftp/utils-commands.zeek b/scripts/base/protocols/ftp/utils-commands.zeek index 31466970a5..d14d8da97c 100644 --- a/scripts/base/protocols/ftp/utils-commands.zeek +++ b/scripts/base/protocols/ftp/utils-commands.zeek @@ -135,10 +135,3 @@ function remove_pending_cmd(pc: PendingCmds, ca: CmdArg): bool else return F; } - -function pop_pending_cmd(pc: PendingCmds, reply_code: count, reply_msg: string): CmdArg &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - local ca = get_pending_cmd(pc, reply_code, reply_msg); - remove_pending_cmd(pc, ca); - return ca; - } diff --git a/scripts/base/protocols/radius/consts.zeek b/scripts/base/protocols/radius/consts.zeek index 06cb014e31..8b76021947 100644 --- a/scripts/base/protocols/radius/consts.zeek +++ b/scripts/base/protocols/radius/consts.zeek @@ -182,7 +182,7 @@ const attr_types: table[count] of string = { [171] = "Delegated-IPv6-Prefix-Pool", [172] = "Stateful-IPv6-Address-Pool", [173] = "IPv6-6rd-Configuration" -} &default=function(i: count): string { return fmt("unknown-%d", i); } &deprecated="Remove in v6.1. Usage testing indicates this function is unused."; +} &default=function(i: count): string { return fmt("unknown-%d", i); } &is_used; const nas_port_types: table[count] of string = { [0] = "Async", @@ -205,7 +205,7 @@ const nas_port_types: table[count] of string = { [17] = "Cable", [18] = "Wireless - Other", [19] = "Wireless - IEEE 802.11" -} &default=function(i: count): string { return fmt("unknown-%d", i); } &deprecated="Remove in v6.1. Usage testing indicates this function is unused."; +} &default=function(i: count): string { return fmt("unknown-%d", i); } &is_used; const service_types: table[count] of string = { [1] = "Login", @@ -219,7 +219,7 @@ const service_types: table[count] of string = { [9] = "Callback NAS Prompt", [10] = "Call Check", [11] = "Callback Administrative", -} &default=function(i: count): string { return fmt("unknown-%d", i); } &deprecated="Remove in v6.1. Usage testing indicates this function is unused."; +} &default=function(i: count): string { return fmt("unknown-%d", i); } &is_used; const framed_protocol_types: table[count] of string = { [1] = "PPP", @@ -228,4 +228,4 @@ const framed_protocol_types: table[count] of string = { [4] = "Gandalf proprietary SingleLink/MultiLink protocol", [5] = "Xylogics proprietary IPX/SLIP", [6] = "X.75 Synchronous" -} &default=function(i: count): string { return fmt("unknown-%d", i); } &deprecated="Remove in v6.1. Usage testing indicates this function is unused."; +} &default=function(i: count): string { return fmt("unknown-%d", i); } &is_used; diff --git a/scripts/base/protocols/smb/smb1-main.zeek b/scripts/base/protocols/smb/smb1-main.zeek index cb6831d708..68a5ecbaee 100644 --- a/scripts/base/protocols/smb/smb1-main.zeek +++ b/scripts/base/protocols/smb/smb1-main.zeek @@ -274,50 +274,3 @@ event smb1_write_andx_request(c: connection, hdr: SMB1::Header, file_id: count, c$smb_state$pipe_map[file_id] = c$smb_state$current_file$uuid; } - -event smb_pipe_bind_ack_response(c: connection, hdr: SMB1::Header) &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - if ( ! c$smb_state?$current_file || ! c$smb_state$current_file?$uuid ) - { - # TODO: figure out why the uuid isn't getting set sometimes. - return; - } - - c$smb_state$current_cmd$sub_command = "RPC_BIND_ACK"; - c$smb_state$current_cmd$argument = SMB::rpc_uuids[c$smb_state$current_file$uuid]; - } - -event smb_pipe_bind_request(c: connection, hdr: SMB1::Header, uuid: string, version: string) &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - if ( ! c$smb_state?$current_file || ! c$smb_state$current_file?$uuid ) - { - # TODO: figure out why the current_file isn't getting set sometimes. - return; - } - - c$smb_state$current_cmd$sub_command = "RPC_BIND"; - c$smb_state$current_file$uuid = uuid; - c$smb_state$current_cmd$argument = fmt("%s v%s", SMB::rpc_uuids[uuid], version); - } - -event smb_pipe_request(c: connection, hdr: SMB1::Header, op_num: count) &deprecated="Remove in v6.1. Usage testing indicates this function is unused." - { - if ( ! c$smb_state?$current_file ) - { - # TODO: figure out why the current file isn't being set sometimes. - return; - } - - local f = c$smb_state$current_file; - if ( ! f?$uuid ) - { - # TODO: figure out why this is happening. - Reporter::conn_weird("smb_pipe_request_missing_uuid", c, ""); - return; - } - local arg = fmt("%s: %s", - SMB::rpc_uuids[f$uuid], - SMB::rpc_sub_cmds[f$uuid][op_num]); - - c$smb_state$current_cmd$argument = arg; - } diff --git a/scripts/base/protocols/socks/dpd.sig b/scripts/base/protocols/socks/dpd.sig index 3dcd7a945a..8b58574b07 100644 --- a/scripts/base/protocols/socks/dpd.sig +++ b/scripts/base/protocols/socks/dpd.sig @@ -31,7 +31,7 @@ signature dpd_socks4_reverse_server { signature dpd_socks5_client { ip-proto == tcp # Watch for a few authentication methods to reduce false positives. - payload /^\x05.[\x00\x01\x02]/ + payload /^\x05.[\x00\x01\x02\x03\x05\x06\x07\x08\x09]/ tcp-state originator } @@ -40,9 +40,23 @@ signature dpd_socks5_server { requires-reverse-signature dpd_socks5_client # Watch for a single authentication method to be chosen by the server or # the server to indicate the no authentication is required. - payload /^\x05(\x00|\x01[\x00\x01\x02])/ + # From wikipedia: + # 0x00: No authentication + # 0x01: GSSAPI (RFC 1961) + # 0x02: Username/password (RFC 1929) + # 0x03–0x7F: methods assigned by IANA[11] + # 0x03: Challenge-Handshake Authentication Protocol + # 0x04: Unassigned + # 0x05: Challenge-Response Authentication Method + # 0x06: Secure Sockets Layer + # 0x07: NDS Authentication + # 0x08: Multi-Authentication Framework + # 0x09: JSON Parameter Block + # 0x0A–0x7F: Unassigned + # 0x80–0xFE: methods reserved for private use + # + # Keep in sync with dpd_socks5_client, 0xff is "no acceptable methods" + payload /^\x05[\x00\x01\x02\x03\x05\x06\x07\x08\x09\xff]/ tcp-state responder enable "socks" } - - diff --git a/scripts/base/utils/site.zeek b/scripts/base/utils/site.zeek index 8aeb8f8296..e3e309da66 100644 --- a/scripts/base/utils/site.zeek +++ b/scripts/base/utils/site.zeek @@ -167,7 +167,7 @@ export { global get_emails: function(a: addr): string; } -# Please ignore, this is an interally used variable. +# Please ignore, this is an internally used variable. global local_dns_suffix_regex: pattern = /MATCH_NOTHING/; global local_dns_neighbor_suffix_regex: pattern = /MATCH_NOTHING/; diff --git a/scripts/policy/frameworks/files/deprecated-txhosts-rxhosts-connuids.zeek b/scripts/policy/frameworks/files/deprecated-txhosts-rxhosts-connuids.zeek deleted file mode 100644 index 93d81a29f7..0000000000 --- a/scripts/policy/frameworks/files/deprecated-txhosts-rxhosts-connuids.zeek +++ /dev/null @@ -1,64 +0,0 @@ -##! This script can be used to add back the fields ``tx_hosts``, ``rx_hosts`` -##! and ``conn_uids`` to the :zeek:see:`Files::Info` record and thereby also -##! back into the ``files.log``. These fields have been removed in Zeek 5.1 -##! and replaced with the more commonly used ``uid`` and ``id`` fields. -##! -##! It's only purpose is to provide an easy way to add back the fields such that -##! existing downstream processes continue to work without the need to adapt them. -##! This script will be removed with Zeek 6.1 at which point downstream processes -##! hopefully have switched over to use ``uid`` and ``id`` instead. - -# Remove in v6.1. - -@load base/frameworks/files - -module Files; - -# Add back the fields to Files::Info. -redef record Info += { - ## If this file was transferred over a network - ## connection this should show the host or hosts that - ## the data sourced from. - tx_hosts: set[addr] &default=addr_set() &log; - - ## If this file was transferred over a network - ## connection this should show the host or hosts that - ## the data traveled to. - rx_hosts: set[addr] &default=addr_set() &log; - - ## Connection UIDs over which the file was transferred. - conn_uids: set[string] &default=string_set() &log; -}; - -event file_over_new_connection(f: fa_file, c: connection, is_orig: bool) &priority=9 - { - local cid = c$id; - add f$info$conn_uids[c$uid]; - add f$info$tx_hosts[f$is_orig ? cid$orig_h : cid$resp_h]; - add f$info$rx_hosts[f$is_orig ? cid$resp_h : cid$orig_h]; - } - -# For every log write to files.log, ensure tx_hosts, rx_hosts and conn_uids -# hold just a single value. Use a high priority for this handler to ensure -# this happens before any user defined hooks. -hook Log::log_stream_policy(rec: any, id: Log::ID) &priority=100 - { - if ( id != Files::LOG ) - return; - - local info = rec as Files::Info; - - # In the common case of a single connection (or the less common case - # of no connection), there's nothing to do in this hook. - if ( |info$conn_uids| == 1 || ! info?$id ) - return; - - # Make singular tx_hosts, rx_hosts and conn_uids fields based on - # the active uid. Note, this currently assumes that Files::Info$is_orig - # is the same for all connections. This seems reasonable given that - # all connections will use the same protocol. - local cid = info$id; - info$conn_uids = set(info$uid); - info$tx_hosts = set(info$is_orig ? cid$orig_h : cid$resp_h); - info$rx_hosts = set(info$is_orig ? cid$resp_h : cid$orig_h); - } diff --git a/scripts/policy/frameworks/spicy/resource-usage.zeek b/scripts/policy/frameworks/spicy/resource-usage.zeek index d5daf8c404..e75176f2a4 100644 --- a/scripts/policy/frameworks/spicy/resource-usage.zeek +++ b/scripts/policy/frameworks/spicy/resource-usage.zeek @@ -1,4 +1,4 @@ -##! Logs Spicy-related resource usage continously for debugging purposes. +##! Logs Spicy-related resource usage continuously for debugging purposes. module Spicy; diff --git a/scripts/base/frameworks/telemetry/cluster.zeek b/scripts/policy/frameworks/telemetry/prometheus.zeek similarity index 76% rename from scripts/base/frameworks/telemetry/cluster.zeek rename to scripts/policy/frameworks/telemetry/prometheus.zeek index a589f0ddc5..abc947670a 100644 --- a/scripts/base/frameworks/telemetry/cluster.zeek +++ b/scripts/policy/frameworks/telemetry/prometheus.zeek @@ -1,13 +1,18 @@ ##! In a cluster configuration, open port 9911 on the manager for -##! Prometheus exposition and import all metrics from +##! Prometheus exposition and import all metrics from the ##! `zeek/cluster/metrics/...` topic. ##! ##! For customization or disabling, redef the involved Broker options again. ##! Specifically, to disable listening on port 9911, set ##! :zeek:see:`Broker::metrics_port` to `0/unknown` again. - +##! +##! Note that in large clusters, metrics import may cause significant +##! communication overhead as well as load on the manager. +##! @load base/frameworks/cluster +@if ( Cluster::is_enabled() ) + # Use Cluster::node as "endpoint" label redef Broker::metrics_export_endpoint_name = Cluster::node; @@ -19,3 +24,5 @@ redef Broker::metrics_import_topics = vector("zeek/cluster/metrics/"); @else redef Broker::metrics_export_topic = "zeek/cluster/metrics/"; @endif + +@endif diff --git a/scripts/policy/misc/scan.zeek b/scripts/policy/misc/scan.zeek deleted file mode 100644 index 6e37b99500..0000000000 --- a/scripts/policy/misc/scan.zeek +++ /dev/null @@ -1,184 +0,0 @@ -##! TCP Scan detection. - -# ..Authors: Sheharbano Khattak -# Seth Hall -# All the authors of the old scan.bro - -@deprecated "Remove in v6.1. Use the external github.com/ncsa/bro-simple-scan package instead (e.g., by installing it via `zkg install ncsa/bro-simple-scan`). The misc/scan.zeek script hasn't been maintained since 2013. Further, the external bro-simple-scan package from NCSA (Justin Azoff) has become the recommended alternative for TCP scan detection." - -@load base/frameworks/notice -@load base/frameworks/sumstats - -@load base/utils/time - -module Scan; - -export { - redef enum Notice::Type += { - ## Address scans detect that a host appears to be scanning some - ## number of destinations on a single port. This notice is - ## generated when more than :zeek:id:`Scan::addr_scan_threshold` - ## unique hosts are seen over the previous - ## :zeek:id:`Scan::addr_scan_interval` time range. - Address_Scan, - - ## Port scans detect that an attacking host appears to be - ## scanning a single victim host on several ports. This notice - ## is generated when an attacking host attempts to connect to - ## :zeek:id:`Scan::port_scan_threshold` - ## unique ports on a single host over the previous - ## :zeek:id:`Scan::port_scan_interval` time range. - Port_Scan, - }; - - ## Failed connection attempts are tracked over this time interval for - ## the address scan detection. A higher interval will detect slower - ## scanners, but may also yield more false positives. - const addr_scan_interval = 5min &redef; - - ## Failed connection attempts are tracked over this time interval for - ## the port scan detection. A higher interval will detect slower - ## scanners, but may also yield more false positives. - const port_scan_interval = 5min &redef; - - ## The threshold of the unique number of hosts a scanning host has to - ## have failed connections with on a single port. - const addr_scan_threshold = 25.0 &redef; - - ## The threshold of the number of unique ports a scanning host has to - ## have failed connections with on a single victim host. - const port_scan_threshold = 15.0 &redef; - - global Scan::addr_scan_policy: hook(scanner: addr, victim: addr, scanned_port: port); - global Scan::port_scan_policy: hook(scanner: addr, victim: addr, scanned_port: port); -} - -event zeek_init() &priority=5 - { - local r1: SumStats::Reducer = [$stream="scan.addr.fail", $apply=set(SumStats::UNIQUE), $unique_max=double_to_count(addr_scan_threshold+2)]; - SumStats::create([$name="addr-scan", - $epoch=addr_scan_interval, - $reducers=set(r1), - $threshold_val(key: SumStats::Key, result: SumStats::Result) = - { - return result["scan.addr.fail"]$unique+0.0; - }, - #$threshold_func=check_addr_scan_threshold, - $threshold=addr_scan_threshold, - $threshold_crossed(key: SumStats::Key, result: SumStats::Result) = - { - local r = result["scan.addr.fail"]; - local side = Site::is_local_addr(key$host) ? "local" : "remote"; - local dur = duration_to_mins_secs(r$end-r$begin); - local message=fmt("%s scanned at least %d unique hosts on port %s in %s", key$host, r$unique, key$str, dur); - NOTICE([$note=Address_Scan, - $src=key$host, - $p=to_port(key$str), - $sub=side, - $msg=message, - $identifier=cat(key$host)]); - }]); - - # Note: port scans are tracked similar to: table[src_ip, dst_ip] of set(port); - local r2: SumStats::Reducer = [$stream="scan.port.fail", $apply=set(SumStats::UNIQUE), $unique_max=double_to_count(port_scan_threshold+2)]; - SumStats::create([$name="port-scan", - $epoch=port_scan_interval, - $reducers=set(r2), - $threshold_val(key: SumStats::Key, result: SumStats::Result) = - { - return result["scan.port.fail"]$unique+0.0; - }, - $threshold=port_scan_threshold, - $threshold_crossed(key: SumStats::Key, result: SumStats::Result) = - { - local r = result["scan.port.fail"]; - local side = Site::is_local_addr(key$host) ? "local" : "remote"; - local dur = duration_to_mins_secs(r$end-r$begin); - local message = fmt("%s scanned at least %d unique ports of host %s in %s", key$host, r$unique, key$str, dur); - NOTICE([$note=Port_Scan, - $src=key$host, - $dst=to_addr(key$str), - $sub=side, - $msg=message, - $identifier=cat(key$host)]); - }]); - } - -function add_sumstats(id: conn_id, reverse: bool) - { - local scanner = id$orig_h; - local victim = id$resp_h; - local scanned_port = id$resp_p; - - if ( reverse ) - { - scanner = id$resp_h; - victim = id$orig_h; - scanned_port = id$orig_p; - } - - if ( hook Scan::addr_scan_policy(scanner, victim, scanned_port) ) - SumStats::observe("scan.addr.fail", [$host=scanner, $str=cat(scanned_port)], [$str=cat(victim)]); - - if ( hook Scan::port_scan_policy(scanner, victim, scanned_port) ) - SumStats::observe("scan.port.fail", [$host=scanner, $str=cat(victim)], [$str=cat(scanned_port)]); - } - -function is_failed_conn(c: connection): bool - { - # Sr || ( (hR || ShR) && (data not sent in any direction) ) - if ( (c$orig$state == TCP_SYN_SENT && c$resp$state == TCP_RESET) || - (((c$orig$state == TCP_RESET && c$resp$state == TCP_SYN_ACK_SENT) || - (c$orig$state == TCP_RESET && c$resp$state == TCP_ESTABLISHED && "S" in c$history ) - ) && /[Dd]/ !in c$history ) - ) - return T; - return F; - } - -function is_reverse_failed_conn(c: connection): bool - { - # reverse scan i.e. conn dest is the scanner - # sR || ( (Hr || sHr) && (data not sent in any direction) ) - if ( (c$resp$state == TCP_SYN_SENT && c$orig$state == TCP_RESET) || - (((c$resp$state == TCP_RESET && c$orig$state == TCP_SYN_ACK_SENT) || - (c$resp$state == TCP_RESET && c$orig$state == TCP_ESTABLISHED && "s" in c$history ) - ) && /[Dd]/ !in c$history ) - ) - return T; - return F; - } - -event connection_attempt(c: connection) - { - local is_reverse_scan = F; - if ( "H" in c$history ) - is_reverse_scan = T; - - add_sumstats(c$id, is_reverse_scan); - } - -event connection_rejected(c: connection) - { - local is_reverse_scan = F; - if ( "s" in c$history ) - is_reverse_scan = T; - - add_sumstats(c$id, is_reverse_scan); - } - -event connection_reset(c: connection) - { - if ( is_failed_conn(c) ) - add_sumstats(c$id, F); - else if ( is_reverse_failed_conn(c) ) - add_sumstats(c$id, T); - } - -event connection_pending(c: connection) - { - if ( is_failed_conn(c) ) - add_sumstats(c$id, F); - else if ( is_reverse_failed_conn(c) ) - add_sumstats(c$id, T); - } diff --git a/scripts/policy/protocols/mqtt/__load__.zeek b/scripts/policy/protocols/mqtt/__load__.zeek deleted file mode 100644 index 1e81f375eb..0000000000 --- a/scripts/policy/protocols/mqtt/__load__.zeek +++ /dev/null @@ -1,4 +0,0 @@ -@deprecated "Remove in v6.1. The MQTT scripts have been moved out of policy/ into base and are loaded by default" - -# For those running bare-mode and loading protocols/mqtt from policy. -@load base/protocols/mqtt diff --git a/scripts/site/local.zeek b/scripts/site/local.zeek index 512b1ea9cc..6935c6c4d0 100644 --- a/scripts/site/local.zeek +++ b/scripts/site/local.zeek @@ -97,6 +97,10 @@ redef digest_salt = "Please change this value."; # telemetry_histogram.log. @load frameworks/telemetry/log +# Enable metrics centralization on the manager. This opens port 9911/tcp +# on the manager node that can be readily scraped by Prometheus. +# @load frameworks/telemetry/prometheus + # Uncomment the following line to enable detection of the heartbleed attack. Enabling # this might impact performance a bit. # @load policy/protocols/ssl/heartbleed diff --git a/scripts/test-all-policy.zeek b/scripts/test-all-policy.zeek index 285d1f1198..efa867c6bf 100644 --- a/scripts/test-all-policy.zeek +++ b/scripts/test-all-policy.zeek @@ -12,7 +12,10 @@ # @load frameworks/control/controllee.zeek # @load frameworks/control/controller.zeek @load frameworks/cluster/experimental.zeek -@load frameworks/cluster/nodes-experimental/manager.zeek +# Loaded via the above through test-all-policy-cluster.test +# when running as a manager, creates cluster.log entries +# even in non-cluster mode if loaded like the below. +# @load frameworks/cluster/nodes-experimental/manager.zeek @load frameworks/management/agent/__load__.zeek @load frameworks/management/agent/api.zeek @load frameworks/management/agent/boot.zeek @@ -59,7 +62,6 @@ @load frameworks/intel/seen/where-locations.zeek @load frameworks/intel/seen/x509.zeek @load frameworks/netcontrol/catch-and-release.zeek -@load frameworks/files/deprecated-txhosts-rxhosts-connuids.zeek @load frameworks/files/detect-MHR.zeek @load frameworks/files/entropy-test-all-files.zeek #@load frameworks/files/extract-all-files.zeek @@ -75,6 +77,7 @@ # @load frameworks/spicy/record-spicy-batch.zeek # @load frameworks/spicy/resource-usage.zeek @load frameworks/software/windows-version-detection.zeek +@load frameworks/telemetry/prometheus.zeek @load frameworks/telemetry/log.zeek @load integration/collective-intel/__load__.zeek @load integration/collective-intel/main.zeek @@ -85,7 +88,6 @@ @load misc/load-balancing.zeek @load misc/loaded-scripts.zeek @load misc/profiling.zeek -@load misc/scan.zeek @load misc/stats.zeek @load misc/weird-stats.zeek @load misc/trim-trace-file.zeek @@ -116,7 +118,6 @@ @load protocols/krb/ticket-logging.zeek @load protocols/modbus/known-masters-slaves.zeek @load protocols/modbus/track-memmap.zeek -#@load protocols/mqtt/__load__.zeek @load protocols/mysql/software.zeek @load protocols/rdp/indicate_ssl.zeek @load protocols/smb/log-cmds.zeek diff --git a/scripts/zeekygen/__load__.zeek b/scripts/zeekygen/__load__.zeek index d7280b018c..734ed2fc81 100644 --- a/scripts/zeekygen/__load__.zeek +++ b/scripts/zeekygen/__load__.zeek @@ -1,8 +1,8 @@ @load test-all-policy.zeek # Scripts which are commented out in test-all-policy.zeek. -@load protocols/mqtt/__load__.zeek @load protocols/ssl/decryption.zeek +@load frameworks/cluster/nodes-experimental/manager.zeek @load frameworks/control/controllee.zeek @load frameworks/control/controller.zeek @load frameworks/management/agent/main.zeek diff --git a/src/3rdparty b/src/3rdparty index 410ada8bbe..c2763e952e 160000 --- a/src/3rdparty +++ b/src/3rdparty @@ -1 +1 @@ -Subproject commit 410ada8bbe5839807a459a99c1b77221f790f1be +Subproject commit c2763e952ea899f86bec2b60f840d38861cefd03 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 627bb17c57..f7fe8931bf 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -422,7 +422,6 @@ set(MAIN_SRCS digest.h) set(THIRD_PARTY_SRCS - 3rdparty/bro_inet_ntop.c # Remove in v6.1. 3rdparty/zeek_inet_ntop.c 3rdparty/bsd-getopt-long.c 3rdparty/ConvertUTF.c @@ -511,6 +510,8 @@ set(zeek_SRCS collect_headers(zeek_HEADERS ${zeek_SRCS}) add_library(zeek_objs OBJECT ${zeek_SRCS}) +target_compile_features(zeek_objs PRIVATE ${ZEEK_CXX_STD}) +set_target_properties(zeek_objs PROPERTIES CXX_EXTENSIONS OFF) target_link_libraries(zeek_objs PRIVATE $) target_compile_definitions(zeek_objs PRIVATE ZEEK_CONFIG_SKIP_VERSION_H) add_dependencies(zeek_objs zeek_autogen_files) @@ -601,6 +602,8 @@ install( PATTERN "*.h" PATTERN "*.pac" PATTERN "3rdparty/*" EXCLUDE + # Headers used only during build + PATTERN "threading/formatters/detail" EXCLUDE # The "zeek -> ." symlink isn't needed in the install-tree REGEX "${escaped_include_path}$" EXCLUDE # FILES_MATCHING creates empty directories: @@ -622,7 +625,6 @@ install( install( FILES ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/ConvertUTF.h - ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/bro_inet_ntop.h # Remove in v6.1 ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/zeek_inet_ntop.h ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/bsd-getopt-long.h ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/modp_numtoa.h diff --git a/src/DNS_Mgr.cc b/src/DNS_Mgr.cc index 7093010421..1bbca1659b 100644 --- a/src/DNS_Mgr.cc +++ b/src/DNS_Mgr.cc @@ -1554,7 +1554,7 @@ TableValPtr DNS_Mgr::empty_addr_set() // Unit testing coverage for the DNS_Mgr code, including making actual DNS requests to // test responses and timeouts. Note that all of these tests are marked with the skip -// decorator, since they take some time to run and this slows down local developement. To +// decorator, since they take some time to run and this slows down local development. To // run them manually, pass the --no-skip flag when running tests. These tests are // run automatically as part of CI builds. @@ -1605,7 +1605,7 @@ public: /** * Derived testing version of DNS_Mgr so that the Process() method can be exposed - * publically. If new unit tests are added, this class should be used over using + * publicly. If new unit tests are added, this class should be used over using * DNS_Mgr directly. */ class TestDNS_Mgr final : public DNS_Mgr diff --git a/src/Debug.cc b/src/Debug.cc index 78fb3b990b..ac03aceb35 100644 --- a/src/Debug.cc +++ b/src/Debug.cc @@ -485,7 +485,7 @@ int dbg_shutdown_debugger() // by the operation argument; the additional arguments are put in the // supplied vector. // -// Parse the string into individual tokens, similarily to how shell +// Parse the string into individual tokens, similarly to how shell // would do it. void tokenize(const char* cstr, string& operation, vector& arguments) diff --git a/src/Desc.h b/src/Desc.h index d33b9bf03f..e907693da0 100644 --- a/src/Desc.h +++ b/src/Desc.h @@ -191,7 +191,7 @@ protected: std::pair FirstEscapeLoc(const char* bytes, size_t n); /** - * @param start start of string to check for starting with an espace + * @param start start of string to check for starting with an escape * sequence. * @param end one byte past the last character in the string. * @return The number of bytes in the escape sequence that the string diff --git a/src/Dict.h b/src/Dict.h index d33e6d597b..e61faf5c66 100644 --- a/src/Dict.h +++ b/src/Dict.h @@ -169,13 +169,6 @@ public: 0 == memcmp(GetKey(), arg_key, key_size); } - template - [[deprecated("Remove in v6.1. Access the value in the entry directly.")]] T* GetValue() const - { - static_assert(std::is_same_v, "Type of DictEntry and type requested are different"); - return value; - } - bool operator==(const DictEntry& r) const { return Equal(r.GetKey(), r.key_size, r.hash); } bool operator!=(const DictEntry& r) const { return ! Equal(r.GetKey(), r.key_size, r.hash); } }; diff --git a/src/Expr.h b/src/Expr.h index 73658b6e47..97127629b0 100644 --- a/src/Expr.h +++ b/src/Expr.h @@ -106,8 +106,6 @@ enum ExprTag : int #define NUM_EXPRS (int(EXPR_NOP) + 1) }; -using BroExprTag [[deprecated("Remove in v6.1. Use ExprTag.")]] = ExprTag; - extern const char* expr_name(ExprTag t); class AddToExpr; diff --git a/src/Func.cc b/src/Func.cc index f56719ee00..fa08d81709 100644 --- a/src/Func.cc +++ b/src/Func.cc @@ -903,6 +903,59 @@ FunctionIngredients::FunctionIngredients(ScopePtr _scope, StmtPtr _body, } } +zeek::RecordValPtr make_backtrace_element(std::string_view name, const VectorValPtr args, + const zeek::detail::Location* loc) + { + static auto elem_type = id::find_type("BacktraceElement"); + static auto function_name_idx = elem_type->FieldOffset("function_name"); + static auto function_args_idx = elem_type->FieldOffset("function_args"); + static auto file_location_idx = elem_type->FieldOffset("file_location"); + static auto line_location_idx = elem_type->FieldOffset("line_location"); + + auto elem = make_intrusive(elem_type); + elem->Assign(function_name_idx, name.data()); + elem->Assign(function_args_idx, std::move(args)); + + if ( loc ) + { + elem->Assign(file_location_idx, loc->filename); + elem->Assign(line_location_idx, loc->first_line); + } + + return elem; + } + +zeek::VectorValPtr get_current_script_backtrace() + { + static auto backtrace_type = id::find_type("Backtrace"); + + auto rval = make_intrusive(backtrace_type); + + // The body of the following loop can wind up adding items to + // the call stack (because MakeCallArgumentVector() evaluates + // default arguments, which can in turn involve calls to script + // functions), so we work from a copy of the current call stack + // to prevent problems with iterator invalidation. + auto cs_copy = zeek::detail::call_stack; + + for ( auto it = cs_copy.rbegin(); it != cs_copy.rend(); ++it ) + { + const auto& ci = *it; + if ( ! ci.func ) + // This happens for compiled code. + continue; + + const auto& params = ci.func->GetType()->Params(); + auto args = MakeCallArgumentVector(ci.args, params); + + auto elem = make_backtrace_element(ci.func->Name(), std::move(args), + ci.call ? ci.call->GetLocationInfo() : nullptr); + rval->Append(std::move(elem)); + } + + return rval; + } + static void emit_builtin_error_common(const char* msg, Obj* arg, bool unwind) { auto emit = [=](const CallExpr* ce) diff --git a/src/Func.h b/src/Func.h index a27eb892b7..06f2d26306 100644 --- a/src/Func.h +++ b/src/Func.h @@ -364,6 +364,25 @@ private: extern std::vector call_stack; +/** + * Create a single BacktraceElement record val. + * + * @param name the name of the function. + * @param args call argument vector created by MakeCallArgumentVector(). + * @param loc optional location information of the caller. + * + * @return record value representing a BacktraceElement. + */ +zeek::RecordValPtr make_backtrace_element(std::string_view name, const VectorValPtr args, + const zeek::detail::Location* loc); + +/** + * Create a Zeek script Backtrace of the current script call_stack. + * + * @return VectorValPtr containing BacktraceElement entries. + */ +zeek::VectorValPtr get_current_script_backtrace(); + // This is set to true after the built-in functions have been initialized. extern bool did_builtin_init; extern std::vector bif_initializers; diff --git a/src/Obj.cc b/src/Obj.cc index 47bbf1462d..1bf384aeb4 100644 --- a/src/Obj.cc +++ b/src/Obj.cc @@ -60,17 +60,7 @@ int Obj::suppress_errors = 0; Obj::~Obj() { if ( notify_plugins ) - { -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - PLUGIN_HOOK_VOID(HOOK_BRO_OBJ_DTOR, HookBroObjDtor(this)); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif PLUGIN_HOOK_VOID(HOOK_OBJ_DTOR, HookObjDtor(this)); - } delete location; } diff --git a/src/Options.cc b/src/Options.cc index ac4da88099..86e8d01e51 100644 --- a/src/Options.cc +++ b/src/Options.cc @@ -301,7 +301,7 @@ Options parse_cmdline(int argc, char** argv) fprintf(stderr, "ERROR: C++ unit tests are disabled for this build.\n" " Please re-compile with ENABLE_ZEEK_UNIT_TESTS " "to run the C++ unit tests.\n"); - usage(argv[0], 1); + exit(1); #endif auto is_separator = [](const char* cstr) diff --git a/src/RE.h b/src/RE.h index b56660399f..614524754b 100644 --- a/src/RE.h +++ b/src/RE.h @@ -167,7 +167,7 @@ public: const AcceptingMatchSet& AcceptedMatches() const { return accepted_matches; } - // Returns the number of bytes feeded into the matcher so far + // Returns the number of bytes fed into the matcher so far int Length() { return current_pos; } // Returns true if this inputs leads to at least one new match. diff --git a/src/Reporter.cc b/src/Reporter.cc index 6815dbe4b2..1ae0c0335d 100644 --- a/src/Reporter.cc +++ b/src/Reporter.cc @@ -557,7 +557,7 @@ void Reporter::DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Conne int size = sizeof(tmp); char* buffer = tmp; - char* alloced = nullptr; + char* allocated = nullptr; std::string loc_str; @@ -621,7 +621,7 @@ void Reporter::DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Conne // Enlarge buffer; size *= 2; - buffer = alloced = (char*)realloc(alloced, size); + buffer = allocated = (char*)realloc(allocated, size); if ( ! buffer ) FatalError("out of memory in Reporter"); @@ -740,8 +740,8 @@ void Reporter::DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Conne #endif } - if ( alloced ) - free(alloced); + if ( allocated ) + free(allocated); } bool Reporter::EmitToStderr(bool flag) diff --git a/src/Reporter.h b/src/Reporter.h index c243d6e98d..8f8e54e251 100644 --- a/src/Reporter.h +++ b/src/Reporter.h @@ -31,6 +31,7 @@ using StringValPtr = IntrusivePtr; namespace detail { +class AssertStmt; class Location; class Expr; @@ -59,6 +60,7 @@ class InterpreterException : public ReporterException { protected: friend class Reporter; + friend class detail::AssertStmt; InterpreterException() { } }; diff --git a/src/ScriptValidation.cc b/src/ScriptValidation.cc index 3accd8dbe6..d13bc63c48 100644 --- a/src/ScriptValidation.cc +++ b/src/ScriptValidation.cc @@ -23,12 +23,10 @@ public: if ( stmt->Tag() == STMT_BREAK && ! BreakStmtIsValid() ) Report(stmt, "break statement used outside of for, while or " - "switch statement and not within a hook. " - "With v6.1 this will become an error."); + "switch statement and not within a hook."); if ( stmt->Tag() == STMT_NEXT && ! NextStmtIsValid() ) - Report(stmt, "next statement used outside of for or while statement. " - "With v6.1 this will become an error."); + Report(stmt, "next statement used outside of for or while statement."); return TC_CONTINUE; } @@ -65,6 +63,8 @@ public: return TC_CONTINUE; } + void SetHookDepth(int hd) { hook_depth = hd; } + bool IsValid() const { return valid_script; } private: @@ -86,11 +86,7 @@ private: void Report(const Stmt* stmt, const char* msg) { if ( report ) - { - zeek::reporter->PushLocation(stmt->GetLocationInfo()); - zeek::reporter->Warning("%s", msg); - zeek::reporter->PopLocation(); - } + Error(stmt, msg); valid_script = false; } @@ -107,10 +103,15 @@ void script_validation() traverse_all(&bn_cb); } -bool script_is_valid(const Stmt* stmt) +bool script_is_valid(const Stmt* stmt, bool is_in_hook) { BreakNextScriptValidation bn_cb(false); + + if ( is_in_hook ) + bn_cb.SetHookDepth(1); + stmt->Traverse(&bn_cb); + return bn_cb.IsValid(); } diff --git a/src/ScriptValidation.h b/src/ScriptValidation.h index 05fc3a3ded..db0d86b820 100644 --- a/src/ScriptValidation.h +++ b/src/ScriptValidation.h @@ -13,8 +13,12 @@ class Stmt; void script_validation(); /** - * Returns true if the given script statement (body) is valid. + * Returns true if the given script statement (body) is valid. The + * second argument indicates whether the statement is the body of a hook. + * + * Unlike script_validation(), does not report any errors, just returns + * whether they are present. */ -bool script_is_valid(const Stmt* s); +bool script_is_valid(const Stmt* s, bool is_in_hook); } diff --git a/src/Stats.cc b/src/Stats.cc index 5ac3c7b427..6ae3548596 100644 --- a/src/Stats.cc +++ b/src/Stats.cc @@ -178,7 +178,7 @@ void ProfileLogger::Log() DNS_Mgr::Stats dstats; dns_mgr->GetStats(&dstats); - file->Write(util::fmt("%.06f DNS_Mgr: requests=%lu succesful=%lu failed=%lu pending=%lu " + file->Write(util::fmt("%.06f DNS_Mgr: requests=%lu successful=%lu failed=%lu pending=%lu " "cached_hosts=%lu cached_addrs=%lu\n", run_state::network_time, dstats.requests, dstats.successful, dstats.failed, dstats.pending, dstats.cached_hosts, diff --git a/src/Stmt.cc b/src/Stmt.cc index 1150740a9d..5cfb7bb914 100644 --- a/src/Stmt.cc +++ b/src/Stmt.cc @@ -54,6 +54,7 @@ const char* stmt_name(StmtTag t) "ZAM", "ZAM-resumption", "null", + "assert", }; return stmt_names[int(t)]; @@ -1864,6 +1865,140 @@ TraversalCode NullStmt::Traverse(TraversalCallback* cb) const HANDLE_TC_STMT_POST(tc); } +AssertStmt::AssertStmt(ExprPtr arg_cond, ExprPtr arg_msg) + : Stmt(STMT_ASSERT), cond(std::move(arg_cond)), msg(std::move(arg_msg)) + { + if ( ! IsBool(cond->GetType()->Tag()) ) + cond->Error("conditional must be boolean"); + + if ( msg && ! IsString(msg->GetType()->Tag()) ) + msg->Error("message must be string"); + } + +ValPtr AssertStmt::Exec(Frame* f, StmtFlowType& flow) + { + RegisterAccess(); + flow = FLOW_NEXT; + + static auto assertion_failure_hook = id::find_func("assertion_failure"); + static auto assertion_result_hook = id::find_func("assertion_result"); + + bool run_result_hook = assertion_result_hook && assertion_result_hook->HasEnabledBodies(); + bool run_failure_hook = assertion_failure_hook && assertion_failure_hook->HasEnabledBodies(); + + auto assert_result = cond->Eval(f)->AsBool(); + + if ( assert_result && ! run_result_hook ) + return Val::nil; + + // Textual representation of cond from the AST. + static zeek::ODesc desc; + desc.Clear(); + desc.SetShort(true); + desc.SetQuotes(true); + cond->Describe(&desc); + auto cond_val = zeek::make_intrusive(desc.Len(), (const char*)desc.Bytes()); + + zeek::StringValPtr msg_val = zeek::val_mgr->EmptyString(); + if ( msg ) + { + // Eval() may fail if expression assumes assert + // condition is F, but we still try to get it for + // the assertion_result hook. + try + { + msg_val = cast_intrusive(msg->Eval(f)); + } + catch ( InterpreterException& e ) + { + desc.Clear(); + desc.Add("Describe(&desc); + desc.Add(">"); + msg_val = zeek::make_intrusive(desc.Len(), (const char*)desc.Bytes()); + } + } + + VectorValPtr bt = nullptr; + if ( run_result_hook || run_failure_hook ) + { + bt = get_current_script_backtrace(); + auto assert_elem = make_backtrace_element("assert", MakeEmptyCallArgumentVector(), + GetLocationInfo()); + bt->Insert(0, std::move(assert_elem)); + } + + // Breaking from either the assertion_failure() or assertion_result() + // hook can be used to suppress the default log message. + bool report_error = true; + + if ( run_result_hook ) + report_error &= assertion_result_hook + ->Invoke(zeek::val_mgr->Bool(assert_result), cond_val, msg_val, bt) + ->AsBool(); + + if ( assert_result ) + return Val::nil; + + if ( run_failure_hook ) + report_error &= assertion_failure_hook->Invoke(cond_val, msg_val, bt)->AsBool(); + + if ( report_error ) + { + std::string reporter_msg = util::fmt("assertion failure: %s", cond_val->CheckString()); + if ( msg_val->Len() > 0 ) + reporter_msg += util::fmt(" (%s)", msg_val->CheckString()); + + reporter->PushLocation(GetLocationInfo()); + reporter->Error("%s", reporter_msg.c_str()); + reporter->PopLocation(); + } + + throw InterpreterException(); + } + +void AssertStmt::StmtDescribe(ODesc* d) const + { + Stmt::StmtDescribe(d); + + // Quoting strings looks better when describing assert + // statements. So turn it on explicitly. + // + // E.g., md5_hash("") ends up as md5_hash() without quoting. + auto orig_quotes = d->WantQuotes(); + d->SetQuotes(true); + + cond->Describe(d); + + if ( msg ) + { + d->Add(","); + d->SP(); + msg->Describe(d); + } + + DescribeDone(d); + + d->SetQuotes(orig_quotes); + } + +TraversalCode AssertStmt::Traverse(TraversalCallback* cb) const + { + TraversalCode tc = cb->PreStmt(this); + HANDLE_TC_STMT_PRE(tc); + + tc = cond->Traverse(cb); + HANDLE_TC_STMT_PRE(tc); + if ( msg ) + { + tc = msg->Traverse(cb); + HANDLE_TC_STMT_PRE(tc); + } + + tc = cb->PostStmt(this); + HANDLE_TC_STMT_POST(tc); + } + WhenInfo::WhenInfo(ExprPtr arg_cond, FuncType::CaptureList* arg_cl, bool arg_is_return) : cond(std::move(arg_cond)), cl(arg_cl), is_return(arg_is_return) { diff --git a/src/Stmt.h b/src/Stmt.h index 5625b54502..dc1a33a45b 100644 --- a/src/Stmt.h +++ b/src/Stmt.h @@ -544,6 +544,28 @@ private: bool is_directive; }; +class AssertStmt final : public Stmt + { +public: + explicit AssertStmt(ExprPtr cond, ExprPtr msg = nullptr); + + ValPtr Exec(Frame* f, StmtFlowType& flow) override; + + void StmtDescribe(ODesc* d) const override; + + TraversalCode Traverse(TraversalCallback* cb) const override; + + // Optimization-related: + StmtPtr Duplicate() override; + + bool IsReduced(Reducer* c) const override; + StmtPtr DoReduce(Reducer* c) override; + +private: + ExprPtr cond; + ExprPtr msg; + }; + // A helper class for tracking all of the information associated with // a "when" statement, and constructing the necessary components in support // of lambda-style captures. @@ -617,7 +639,7 @@ private: bool is_return = false; - // The name of parameter passed ot the lambda. + // The name of parameter passed to the lambda. std::string lambda_param_id; // The expression for constructing the lambda, and its type. diff --git a/src/StmtBase.h b/src/StmtBase.h index dc7bb375c3..17188e0ce1 100644 --- a/src/StmtBase.h +++ b/src/StmtBase.h @@ -29,6 +29,7 @@ namespace detail class CompositeHash; class Frame; +class AssertStmt; class CatchReturnStmt; class ExprStmt; class ForStmt; @@ -94,6 +95,7 @@ public: const WhenStmt* AsWhenStmt() const; const SwitchStmt* AsSwitchStmt() const; const NullStmt* AsNullStmt() const; + const AssertStmt* AsAssertStmt() const; void RegisterAccess() const { diff --git a/src/StmtEnums.h b/src/StmtEnums.h index 1252edb43d..75f500f91c 100644 --- a/src/StmtEnums.h +++ b/src/StmtEnums.h @@ -32,8 +32,9 @@ enum StmtTag STMT_CPP, // compiled C++ STMT_ZAM, // a ZAM function body STMT_ZAM_RESUMPTION, // resumes ZAM execution for "when" statements - STMT_NULL -#define NUM_STMTS (int(STMT_NULL) + 1) + STMT_NULL, + STMT_ASSERT, +#define NUM_STMTS (int(STMT_ASSERT) + 1) }; enum StmtFlowType diff --git a/src/Trigger.h b/src/Trigger.h index e1c228fe33..71362ed511 100644 --- a/src/Trigger.h +++ b/src/Trigger.h @@ -92,7 +92,7 @@ public: void Describe(ODesc* d) const override; - // Overidden from Notifier. We queue the trigger and evaluate it + // Overridden from Notifier. We queue the trigger and evaluate it // later to avoid race conditions. void Modified(zeek::notifier::detail::Modifiable* m) override; diff --git a/src/Type.cc b/src/Type.cc index 97542ee008..c07a7b5fec 100644 --- a/src/Type.cc +++ b/src/Type.cc @@ -1398,18 +1398,6 @@ void RecordType::AddFieldsDirectly(const type_decl_list& others, bool add_log_at num_fields = types->length(); } -void RecordType::Create(std::vector>& r) const - { - for ( auto& di : deferred_inits ) - if ( di ) - r.push_back(di->Generate()); - else - r.push_back(std::nullopt); - - for ( auto& ci : creation_inits ) - r[ci.first] = ci.second->Generate(); - } - void RecordType::DescribeFields(ODesc* d) const { if ( d->IsReadable() ) @@ -2692,31 +2680,6 @@ TypePtr merge_types(const TypePtr& arg_t1, const TypePtr& arg_t2) } } -TypePtr merge_type_list(detail::ListExpr* elements) - { - TypeList* tl_type = elements->GetType()->AsTypeList(); - const auto& tl = tl_type->GetTypes(); - - if ( tl.size() < 1 ) - { - reporter->Error("no type can be inferred for empty list"); - return nullptr; - } - - auto t = tl[0]; - - if ( tl.size() == 1 ) - return t; - - for ( size_t i = 1; t && i < tl.size(); ++i ) - t = merge_types(t, tl[i]); - - if ( ! t ) - reporter->Error("inconsistent types in list"); - - return t; - } - TypePtr maximal_type(detail::ListExpr* elements) { TypeList* tl_type = elements->GetType()->AsTypeList(); diff --git a/src/Type.h b/src/Type.h index be7667d8bf..020ab6e358 100644 --- a/src/Type.h +++ b/src/Type.h @@ -688,15 +688,6 @@ public: void AddFieldsDirectly(const type_decl_list& types, bool add_log_attr = false); - /** - * - * Populates a new instance of the record with its initial values. - * @param r The record's underlying value vector. - */ - [[deprecated("Remove in v6.1. Construct a corresponding RecordVal and build vector from " - "GetFieldAs() calls.")]] void - Create(std::vector>& r) const; - void DescribeReST(ODesc* d, bool roles_only = false) const override; void DescribeFields(ODesc* d) const; void DescribeFieldsReST(ODesc* d, bool func_args) const; @@ -949,12 +940,6 @@ extern TypeTag max_type(TypeTag t1, TypeTag t2); // an error message) if the types are incompatible. TypePtr merge_types(const TypePtr& t1, const TypePtr& t2); -// Given a list of expressions, returns a (ref'd) type reflecting -// a merged type consistent across all of them, or nil if this -// cannot be done. -[[deprecated("Remove in v6.1. Use maximal_type() if possible. See GH-2604.")]] TypePtr -merge_type_list(detail::ListExpr* elements); - // Given a list of expressions, returns the maximal type consistent across // all of them, or nil if this cannot be done. "Maximal" incorporates // notions of arithmetic coercion, but otherwise requires type-equivalence. diff --git a/src/UID.h b/src/UID.h index 326d568f9f..31ed6efae5 100644 --- a/src/UID.h +++ b/src/UID.h @@ -98,5 +98,3 @@ inline UID& UID::operator=(const UID& other) } } // namespace zeek - -constexpr int BRO_UID_LEN [[deprecated("Remove in v6.1. Use zeek::UID_LEN")]] = zeek::UID_LEN; diff --git a/src/Val.cc b/src/Val.cc index a6d6fbfed5..3cb1d08a63 100644 --- a/src/Val.cc +++ b/src/Val.cc @@ -39,7 +39,7 @@ #include "zeek/broker/Data.h" #include "zeek/broker/Manager.h" #include "zeek/broker/Store.h" -#include "zeek/threading/formatters/JSON.h" +#include "zeek/threading/formatters/detail/json.h" using namespace std; @@ -404,8 +404,8 @@ TableValPtr Val::GetRecordFields() // This is a static method in this file to avoid including rapidjson's headers in Val.h because // they're huge. -static void BuildJSON(threading::formatter::JSON::NullDoubleWriter& writer, Val* val, - bool only_loggable = false, RE_Matcher* re = nullptr, const string& key = "") +static void BuildJSON(json::detail::NullDoubleWriter& writer, Val* val, bool only_loggable = false, + RE_Matcher* re = nullptr, const string& key = "") { if ( ! key.empty() ) writer.Key(key); @@ -509,7 +509,7 @@ static void BuildJSON(threading::formatter::JSON::NullDoubleWriter& writer, Val* else { rapidjson::StringBuffer buffer; - threading::formatter::JSON::NullDoubleWriter key_writer(buffer); + json::detail::NullDoubleWriter key_writer(buffer); BuildJSON(key_writer, entry_key, only_loggable, re); string key_str = buffer.GetString(); @@ -612,7 +612,7 @@ static void BuildJSON(threading::formatter::JSON::NullDoubleWriter& writer, Val* StringValPtr Val::ToJSON(bool only_loggable, RE_Matcher* re) { rapidjson::StringBuffer buffer; - threading::formatter::JSON::NullDoubleWriter writer(buffer); + json::detail::NullDoubleWriter writer(buffer); BuildJSON(writer, this, only_loggable, re, ""); @@ -1375,7 +1375,7 @@ static std::variant BuildVal(const rapidjson::Value& j, con } default: - return util::fmt("type '%s' unsupport", type_name(t->Tag())); + return util::fmt("type '%s' unsupported", type_name(t->Tag())); } } @@ -2425,7 +2425,7 @@ void TableVal::SendToStore(const Val* index, const TableEntryVal* new_entry_val, { if ( attrs->Find(detail::ATTR_EXPIRE_CREATE) ) { - // for create expiry, we have to substract the already elapsed time from + // for create expiry, we have to subtract the already elapsed time from // the expiry. auto e = expire_time - (run_state::network_time - new_entry_val->ExpireAccessTime()); @@ -2720,7 +2720,7 @@ void TableVal::Describe(ODesc* d) const void TableVal::InitDefaultFunc(detail::Frame* f) { - // Value aready initialized. + // Value already initialized. if ( def_val ) return; diff --git a/src/Var.cc b/src/Var.cc index d23edc0df8..70540cef48 100644 --- a/src/Var.cc +++ b/src/Var.cc @@ -195,18 +195,7 @@ static void make_var(const IDPtr& id, TypePtr t, InitClass c, ExprPtr init, { // This can happen because the grammar allows any "init_class", // including none, to be followed by an expression. - // Remove in v6.1 (make an error) - reporter->Deprecation( - util::fmt("Remove in v6.1. Initialization not preceded by =/+=/-= is deprecated. (%s)", - obj_desc_short(init.get()).c_str()), - init->GetLocationInfo()); - - // The historical instances of these, such as the - // language/redef-same-prefixtable-idx.zeek btest, treat - // this as += rather than =, and with the initializer - // implicitly inside a list. - init = make_intrusive(init); - c = INIT_EXTRA; + init->Error("Initialization not preceded by =/+=/-= is not allowed."); } if ( init && init->Tag() == EXPR_LIST ) diff --git a/src/ZeekArgs.cc b/src/ZeekArgs.cc index 503cd98ff4..8db4264cf7 100644 --- a/src/ZeekArgs.cc +++ b/src/ZeekArgs.cc @@ -52,4 +52,10 @@ VectorValPtr MakeCallArgumentVector(const Args& vals, const RecordTypePtr& types return rval; } +VectorValPtr MakeEmptyCallArgumentVector() + { + static auto call_argument_vector = id::find_type("call_argument_vector"); + return make_intrusive(call_argument_vector); + } + } // namespace zeek diff --git a/src/ZeekArgs.h b/src/ZeekArgs.h index 27acea28b1..6d4cf15d74 100644 --- a/src/ZeekArgs.h +++ b/src/ZeekArgs.h @@ -39,4 +39,11 @@ Args val_list_to_args(const ValPList& vl); */ VectorValPtr MakeCallArgumentVector(const Args& vals, const RecordTypePtr& types); +/** + * Creates an empty "call_argument_vector" vector. + * + * @return empty vector of script-level type "call_argument_vector" + */ +VectorValPtr MakeEmptyCallArgumentVector(); + } // namespace zeek diff --git a/src/ZeekPluginBootstrap.cmake.in b/src/ZeekPluginBootstrap.cmake.in index 1e1e11713c..4a56007d41 100644 --- a/src/ZeekPluginBootstrap.cmake.in +++ b/src/ZeekPluginBootstrap.cmake.in @@ -17,3 +17,7 @@ set(ZEEK_CMAKE_INSTALL_PREFIX "@CMAKE_INSTALL_PREFIX@" # the package directory. set(ZEEK_PLUGIN_SCRIPTS_PATH "${ZEEK_CMAKE_CONFIG_DIR}" CACHE PATH "Path to utility scripts for building Zeek plugins." FORCE) + +# The CMAKE_BUILD_TYPE type to use for external plugins if not overridden. +set(ZEEK_CMAKE_BUILD_TYPE "@CMAKE_BUILD_TYPE@" + CACHE PATH "Internal Zeek variable: CMAKE_BUILD_TYPE of Zeek." FORCE) diff --git a/src/ZeekString.h b/src/ZeekString.h index 2775bb30d9..c1b56c8e49 100644 --- a/src/ZeekString.h +++ b/src/ZeekString.h @@ -104,9 +104,6 @@ public: static constexpr int ZEEK_STRING_LITERAL = // as in a Zeek string literal ESC_ESC | ESC_QUOT | ESC_HEX; - static constexpr int BRO_STRING_LITERAL - [[deprecated("Remove in v6.1. Use ZEEK_STRING_LITERAL.")]] = ZEEK_STRING_LITERAL; - // Renders a string into a newly allocated character array that // you have to delete[]. You can combine the render styles given // above to achieve the representation you desire. If you pass a diff --git a/src/analyzer/Analyzer.cc b/src/analyzer/Analyzer.cc index f214350abb..40a21372e1 100644 --- a/src/analyzer/Analyzer.cc +++ b/src/analyzer/Analyzer.cc @@ -696,11 +696,6 @@ void Analyzer::EnqueueAnalyzerConfirmationInfo(const zeek::Tag& arg_tag) event_mgr.Enqueue(analyzer_confirmation_info, arg_tag.AsVal(), info); } -void Analyzer::EnqueueAnalyzerConfirmation(const zeek::Tag& arg_tag) - { - event_mgr.Enqueue(analyzer_confirmation, ConnVal(), arg_tag.AsVal(), val_mgr->Count(id)); - } - void Analyzer::AnalyzerConfirmation(zeek::Tag arg_tag) { if ( analyzer_confirmed ) @@ -712,9 +707,6 @@ void Analyzer::AnalyzerConfirmation(zeek::Tag arg_tag) if ( analyzer_confirmation_info ) EnqueueAnalyzerConfirmationInfo(effective_tag); - - if ( analyzer_confirmation ) - EnqueueAnalyzerConfirmation(effective_tag); } void Analyzer::EnqueueAnalyzerViolationInfo(const char* reason, const char* data, int len, @@ -736,25 +728,6 @@ void Analyzer::EnqueueAnalyzerViolationInfo(const char* reason, const char* data event_mgr.Enqueue(analyzer_violation_info, arg_tag.AsVal(), info); } -void Analyzer::EnqueueAnalyzerViolation(const char* reason, const char* data, int len, - const zeek::Tag& arg_tag) - { - StringValPtr r; - - if ( data && len ) - { - const char* tmp = util::copy_string(reason); - r = make_intrusive(util::fmt( - "%s [%s%s]", tmp, util::fmt_bytes(data, min(40, len)), len > 40 ? "..." : "")); - delete[] tmp; - } - else - r = make_intrusive(reason); - - event_mgr.Enqueue(analyzer_violation, ConnVal(), arg_tag.AsVal(), val_mgr->Count(id), - std::move(r)); - } - void Analyzer::AnalyzerViolation(const char* reason, const char* data, int len, zeek::Tag arg_tag) { const auto& effective_tag = arg_tag ? arg_tag : tag; @@ -771,9 +744,6 @@ void Analyzer::AnalyzerViolation(const char* reason, const char* data, int len, if ( analyzer_violation_info ) EnqueueAnalyzerViolationInfo(reason, data, len, effective_tag); - - if ( analyzer_violation ) - EnqueueAnalyzerViolation(reason, data, len, effective_tag); } void Analyzer::AddTimer(analyzer_timer_func timer, double t, bool do_expire, diff --git a/src/analyzer/Analyzer.h b/src/analyzer/Analyzer.h index 6533841c95..cebd455ff5 100644 --- a/src/analyzer/Analyzer.h +++ b/src/analyzer/Analyzer.h @@ -466,7 +466,7 @@ public: Analyzer* GetChildAnalyzer(const zeek::Tag& tag) const; /** - * Recursively searches all (direct or indirect) childs of the + * Recursively searches all (direct or indirect) children of the * analyzer for an analyzer with a specific ID. * * @param id The analyzer id to search. This is the ID that GetID() @@ -477,7 +477,7 @@ public: virtual Analyzer* FindChild(ID id); /** - * Recursively searches all (direct or indirect) childs of the + * Recursively searches all (direct or indirect) children of the * analyzer for an analyzer of a given type. * * @param tag The analyzer type to search. @@ -488,7 +488,7 @@ public: virtual Analyzer* FindChild(zeek::Tag tag); /** - * Recursively searches all (direct or indirect) childs of the + * Recursively searches all (direct or indirect) children of the * analyzer for an analyzer of a given type. * * @param name The name of the analyzer type to search (e.g., @@ -737,17 +737,10 @@ private: // Internal helper to raise analyzer_confirmation events void EnqueueAnalyzerConfirmationInfo(const zeek::Tag& arg_tag); - // Remove in v6.1 - internal helper to raise analyzer_confirmation - void EnqueueAnalyzerConfirmation(const zeek::Tag& arg_tag); - // Internal helper to raise analyzer_violation_info void EnqueueAnalyzerViolationInfo(const char* reason, const char* data, int len, const zeek::Tag& arg_tag); - // Remove in v6.1 - internal helper to raise analyzer_violation - void EnqueueAnalyzerViolation(const char* reason, const char* data, int len, - const zeek::Tag& arg_tag); - zeek::Tag tag; ID id; diff --git a/src/analyzer/Manager.h b/src/analyzer/Manager.h index 5a1c23235f..599ba69f52 100644 --- a/src/analyzer/Manager.h +++ b/src/analyzer/Manager.h @@ -297,7 +297,7 @@ public: * @param init True if the newly added analyzers should be * immediately initialized. * - * @param root If given, the scheduled analyzers will become childs + * @param root If given, the scheduled analyzers will become children * of this; if not given the connection's root analyzer is used * instead. * diff --git a/src/analyzer/protocol/dnp3/DNP3.cc b/src/analyzer/protocol/dnp3/DNP3.cc index ae52c885e1..3f589aa616 100644 --- a/src/analyzer/protocol/dnp3/DNP3.cc +++ b/src/analyzer/protocol/dnp3/DNP3.cc @@ -66,13 +66,13 @@ // (excluding CRC fields) in the current DNP3 packet. // // Since "Len" is of size one byte, the largest length it can represent is -// 255 bytes. The larget DNP3 Application Layer size is "255 - 5 + size of +// 255 bytes. The largest DNP3 Application Layer size is "255 - 5 + size of // all CRC fields". "minus 5" is coming from the 5 bytes after "Len" field in // the DNP3 Link Layer, i.e. Ctrl Dest_LSB Dest_MSB Src_LSB Src_MSB Hence, // the largest size of a DNP3 Packet (DNP3 Data Link Layer : DNP3 Transport // Layer : DNP3 Application Layer) can only be 292 bytes. // -// The "Len" field indicates the length of of a single chunk of DNP3 Psuedo +// The "Len" field indicates the length of of a single chunk of DNP3 Pseudo // Application Layer data instead of the whole DNP3 Application Layer // Fragment. However, we can not know the whole length of the DNP3 // Application Layer Fragment (which Binpac would normally need) until all diff --git a/src/analyzer/protocol/dnp3/dnp3-objects.pac b/src/analyzer/protocol/dnp3/dnp3-objects.pac index e96436341e..be3ac9fd82 100644 --- a/src/analyzer/protocol/dnp3/dnp3-objects.pac +++ b/src/analyzer/protocol/dnp3/dnp3-objects.pac @@ -775,7 +775,7 @@ type FrozenCounter16woFlag = record { # g21v11 and g21v12 are obsolete -# Conter event g22 +# Counter event g22 # g22v1 type CounterEve32wFlag = record { @@ -807,7 +807,7 @@ type CounterEve16wFlagTime = record { # g22v7 g22v8 obsolete -# Conter event g23 +# Counter event g23 # g23v1 type FrozenCounterEve32wFlag = record { diff --git a/src/analyzer/protocol/dns/DNS.h b/src/analyzer/protocol/dns/DNS.h index 2f229785d1..bc67170260 100644 --- a/src/analyzer/protocol/dns/DNS.h +++ b/src/analyzer/protocol/dns/DNS.h @@ -72,7 +72,7 @@ enum RR_Type TYPE_NSEC3 = 50, TYPE_NSEC3PARAM = 51, ///< Contains the NSEC3 parameters (RFC 5155) TYPE_SVCB = - 64, ///< SerViCe Binding (RFC draft: + 64, ///< Service Binding (RFC draft: ///< https://datatracker.ietf.org/doc/html/draft-ietf-dnsop-svcb-https-07#section-1.1) TYPE_HTTPS = 65, ///< HTTPS record (HTTPS specific SVCB resource record) // Obsoleted diff --git a/src/analyzer/protocol/dns/events.bif b/src/analyzer/protocol/dns/events.bif index a951fc94d2..2389162e9c 100644 --- a/src/analyzer/protocol/dns/events.bif +++ b/src/analyzer/protocol/dns/events.bif @@ -335,7 +335,6 @@ event dns_WKS_reply%(c: connection, msg: dns_msg, ans: dns_answer%); ## dns_max_queries dns_session_timeout dns_skip_addl ## dns_skip_all_addl dns_skip_all_auth dns_skip_auth event dns_HINFO_reply%(c: connection, msg: dns_msg, ans: dns_answer, cpu: string, os: string%); -event dns_HINFO_reply%(c: connection, msg: dns_msg, ans: dns_answer%) &deprecated="Remove in v5.2. Use the definition with the extra parameters for cpu and os."; ## Generated for DNS replies of type *MX*. For replies with multiple answers, an ## individual event of the corresponding type is raised for each. @@ -739,7 +738,7 @@ event dns_SVCB%(c: connection, msg: dns_msg, ans: dns_answer, svcb: dns_svcb_rr% ## Generated for DNS replies of type *HTTPS* (HTTPS Specific Service Endpoints). ## See `RFC draft for DNS SVCB/HTTPS `__ ## for more information about DNS SVCB/HTTPS resource records. -## Since SVCB and HTTPS records share the same wire format layout, the argument https is dns_svcb_rr. +## Since SVCB and HTTPS records share the same wire format layout, the argument https is dns_svcb_rr. ## For replies with multiple answers, an individual event of the corresponding type is raised for each. ## ## c: The connection, which may be UDP or TCP depending on the type of the diff --git a/src/analyzer/protocol/gssapi/gssapi-protocol.pac b/src/analyzer/protocol/gssapi/gssapi-protocol.pac index a2df047ffd..3436f29b06 100644 --- a/src/analyzer/protocol/gssapi/gssapi-protocol.pac +++ b/src/analyzer/protocol/gssapi/gssapi-protocol.pac @@ -1,6 +1,16 @@ - -type GSSAPI_NEG_TOKEN(is_orig: bool) = record { +type GSSAPI_SELECT(is_orig: bool) = record { wrapper : ASN1EncodingMeta; + token: case tok_id of { + 0x0404 -> mic_blob: bytestring &restofdata; + 0x0504 -> wrap_blob: bytestring &restofdata; + default -> neg_token: GSSAPI_NEG_TOKEN(is_orig, is_init); + } &requires(is_init) &requires(tok_id); +} &let { + is_init: bool = wrapper.tag == 0x60; + tok_id: uint32 = (wrapper.tag << 8) | wrapper.len; +} &byteorder=littleendian; + +type GSSAPI_NEG_TOKEN(is_orig: bool, is_init: bool) = record { have_oid : case is_init of { true -> oid : ASN1Encoding; false -> no_oid : empty; @@ -13,8 +23,6 @@ type GSSAPI_NEG_TOKEN(is_orig: bool) = record { true -> init : GSSAPI_NEG_TOKEN_INIT; false -> resp : GSSAPI_NEG_TOKEN_RESP; }; -} &let { - is_init: bool = wrapper.tag == 0x60; } &byteorder=littleendian; type GSSAPI_NEG_TOKEN_INIT = record { diff --git a/src/analyzer/protocol/gssapi/gssapi.pac b/src/analyzer/protocol/gssapi/gssapi.pac index 3e56c7424e..dda39cf337 100644 --- a/src/analyzer/protocol/gssapi/gssapi.pac +++ b/src/analyzer/protocol/gssapi/gssapi.pac @@ -23,7 +23,7 @@ connection GSSAPI_Conn(zeek_analyzer: ZeekAnalyzer) { # Now we define the flow: flow GSSAPI_Flow(is_orig: bool) { - datagram = GSSAPI_NEG_TOKEN(is_orig) withcontext(connection, this); + datagram = GSSAPI_SELECT(is_orig) withcontext(connection, this); }; %include gssapi-analyzer.pac diff --git a/src/analyzer/protocol/http/HTTP.cc b/src/analyzer/protocol/http/HTTP.cc index 3465c796f3..1053c7b14f 100644 --- a/src/analyzer/protocol/http/HTTP.cc +++ b/src/analyzer/protocol/http/HTTP.cc @@ -1293,7 +1293,7 @@ int HTTP_Analyzer::HTTP_RequestLine(const char* line, const char* end_of_line) // If we determined HTTP/0.9 (no HTTP/ in the request line), assert that // minimally we have an URI and a 3 character method (HTTP 0.9 only - // supports GET). If that doesn't hold, probably not HTTP or very stange. + // supports GET). If that doesn't hold, probably not HTTP or very strange. if ( request_version == HTTP_VersionNumber{0, 9} ) { bool maybe_get_method = (end_of_method - line) >= 3; diff --git a/src/analyzer/protocol/irc/IRC.cc b/src/analyzer/protocol/irc/IRC.cc index f944b7e02a..6e721a2f5c 100644 --- a/src/analyzer/protocol/irc/IRC.cc +++ b/src/analyzer/protocol/irc/IRC.cc @@ -1079,7 +1079,7 @@ void IRC_Analyzer::DeliverStream(int length, const u_char* line, bool orig) void IRC_Analyzer::StartTLS() { - // STARTTLS was succesful. Remove support analyzers, add SSL + // STARTTLS was successful. Remove support analyzers, add SSL // analyzer, and throw event signifying the change. starttls = true; diff --git a/src/analyzer/protocol/irc/IRC.h b/src/analyzer/protocol/irc/IRC.h index 0b58eeafd7..4d54c8bafa 100644 --- a/src/analyzer/protocol/irc/IRC.h +++ b/src/analyzer/protocol/irc/IRC.h @@ -72,7 +72,7 @@ private: * \brief Splits a string into its words which are separated by * the split character. * - * \param input string which will be splitted + * \param input string which will be split * \param split character which separates the words * \return vector containing words */ diff --git a/src/analyzer/protocol/mysql/mysql-protocol.pac b/src/analyzer/protocol/mysql/mysql-protocol.pac index b19b216610..e8415e3de0 100644 --- a/src/analyzer/protocol/mysql/mysql-protocol.pac +++ b/src/analyzer/protocol/mysql/mysql-protocol.pac @@ -48,7 +48,7 @@ type LengthEncodedStringArg(first_byte: uint8) = record { public: int operator()(uint24le* num) const { - // Convert 24bit little endian int parsed as 3 uint8 into host endianess. + // Convert 24bit little endian int parsed as 3 uint8 into host endianness. return (num->byte1() << 16) | (num->byte2() << 8) | num->byte3(); } diff --git a/src/analyzer/protocol/pop3/POP3.cc b/src/analyzer/protocol/pop3/POP3.cc index 501cecaa56..2890225bb8 100644 --- a/src/analyzer/protocol/pop3/POP3.cc +++ b/src/analyzer/protocol/pop3/POP3.cc @@ -806,7 +806,7 @@ void POP3_Analyzer::ProcessReply(int length, const char* line) void POP3_Analyzer::StartTLS() { - // STARTTLS was succesful. Remove support analyzers, add SSL + // STARTTLS was successful. Remove support analyzers, add SSL // analyzer, and throw event signifying the change. RemoveSupportAnalyzer(cl_orig); RemoveSupportAnalyzer(cl_resp); diff --git a/src/analyzer/protocol/rfb/events.bif b/src/analyzer/protocol/rfb/events.bif index dd790d9a20..f55967ffaa 100644 --- a/src/analyzer/protocol/rfb/events.bif +++ b/src/analyzer/protocol/rfb/events.bif @@ -9,7 +9,7 @@ event rfb_authentication_type%(c: connection, authtype: count%); ## ## c: The connection record for the underlying transport-layer session/flow. ## -## result: whether or not authentication was succesful +## result: whether or not authentication was successful event rfb_auth_result%(c: connection, result: bool%); ## Generated for RFB event share flag messages diff --git a/src/analyzer/protocol/rfb/rfb-protocol.pac b/src/analyzer/protocol/rfb/rfb-protocol.pac index 3852a17e98..3c7a6ad0e8 100644 --- a/src/analyzer/protocol/rfb/rfb-protocol.pac +++ b/src/analyzer/protocol/rfb/rfb-protocol.pac @@ -224,8 +224,8 @@ type PixelData(encoding: int32, x: uint16, y: uint16, w: uint16, h: uint16) = ca 15 -> trle: PD_TRLE; 16 -> zrle: PD_ZRLE; # TODO: binpac is not happy with negative values here - #-239 -> cursor_pseudo: PD_PsuedoCursor; - #-223 -> desktop_size: PD_PsuedoDesktopSize; + #-239 -> cursor_pseudo: PD_PseudoCursor; + #-223 -> desktop_size: PD_PseudoDesktopSize; }; type PD_Raw(w: uint16, h: uint16) = record { @@ -266,12 +266,12 @@ type PD_ZRLE = record { zlib_data: bytestring &length=len &transient; } &length=(4 + len); -type PD_PsuedoCursor(w: uint16, h: uint16) = record { +type PD_PseudoCursor(w: uint16, h: uint16) = record { pixels: bytestring &length=(w * h * $context.connection.get_bytes_per_pixel()) &transient; bitmask: bytestring &length=(h * ((w + 7) / 8)) &transient; } &length=(w * h * $context.connection.get_bytes_per_pixel()) + (h * ((w + 7) / 8)) -type PD_PsuedoDesktopSize = record { +type PD_PseudoDesktopSize = record { # Actually no further data nothing: empty; } &length=0; diff --git a/src/analyzer/protocol/smtp/SMTP.cc b/src/analyzer/protocol/smtp/SMTP.cc index c6f1c2d92f..dca03701cf 100644 --- a/src/analyzer/protocol/smtp/SMTP.cc +++ b/src/analyzer/protocol/smtp/SMTP.cc @@ -384,7 +384,7 @@ void SMTP_Analyzer::NewCmd(int cmd_code) void SMTP_Analyzer::StartTLS() { - // STARTTLS was succesful. Remove SMTP support analyzers, add SSL + // STARTTLS was successful. Remove SMTP support analyzers, add SSL // analyzer, and throw event signifying the change. state = detail::SMTP_IN_TLS; expect_sender = expect_recver = true; diff --git a/src/analyzer/protocol/snmp/snmp-analyzer.pac b/src/analyzer/protocol/snmp/snmp-analyzer.pac index 6291f39781..9ba78073c0 100644 --- a/src/analyzer/protocol/snmp/snmp-analyzer.pac +++ b/src/analyzer/protocol/snmp/snmp-analyzer.pac @@ -195,9 +195,7 @@ zeek::RecordValPtr build_bulk_pdu(const GetBulkRequestPDU* pdu) rv->Assign(0, asn1_integer_to_val(pdu->request_id(), zeek::TYPE_INT)); rv->Assign(1, asn1_integer_to_val(pdu->non_repeaters(), zeek::TYPE_COUNT)); rv->Assign(2, asn1_integer_to_val(pdu->max_repetitions(), zeek::TYPE_COUNT)); - // Remove in v6.1: Misspelled repititions/repetitions backwards compat - rv->Assign(3, asn1_integer_to_val(pdu->max_repetitions(), zeek::TYPE_COUNT)); - rv->Assign(4, build_bindings(pdu->var_bindings())); + rv->Assign(3, build_bindings(pdu->var_bindings())); return rv; } %} diff --git a/src/analyzer/protocol/ssl/ssl-analyzer.pac b/src/analyzer/protocol/ssl/ssl-analyzer.pac index 05badf63cc..768e7b2fdc 100644 --- a/src/analyzer/protocol/ssl/ssl-analyzer.pac +++ b/src/analyzer/protocol/ssl/ssl-analyzer.pac @@ -33,8 +33,8 @@ refine connection SSL_Conn += { for ( unsigned int i = 0; i < cipher_suites.size(); ++i ) { - auto ciph = zeek::val_mgr->Count(cipher_suites[i]); - cipher_vec->Assign(i, ciph); + auto cipher = zeek::val_mgr->Count(cipher_suites[i]); + cipher_vec->Assign(i, std::move(cipher)); } auto comp_vec = zeek::make_intrusive(zeek::id::index_vec); diff --git a/src/analyzer/protocol/ssl/ssl-protocol.pac b/src/analyzer/protocol/ssl/ssl-protocol.pac index f291bf5551..5914bcaeda 100644 --- a/src/analyzer/protocol/ssl/ssl-protocol.pac +++ b/src/analyzer/protocol/ssl/ssl-protocol.pac @@ -91,10 +91,10 @@ type V2ServerHello(rec: SSLRecord) = record { #cert_type : uint8; server_version : uint16; cert_len : uint16; - ciph_len : uint16; + ciphers_len : uint16; conn_id_len : uint16; cert_data : bytestring &length = cert_len; - ciphers : uint24[ciph_len/3]; + ciphers : uint24[ciphers_len/3]; conn_id_data : bytestring &length = conn_id_len; } &let { session_id_hit : uint8 = rec.head3; diff --git a/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac b/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac index 251047dec6..825827b15c 100644 --- a/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac +++ b/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac @@ -56,8 +56,8 @@ refine connection Handshake_Conn += { for ( unsigned int i = 0; i < cipher_suites.size(); ++i ) { - auto ciph = zeek::val_mgr->Count(cipher_suites[i]); - cipher_vec->Assign(i, ciph); + auto cipher = zeek::val_mgr->Count(cipher_suites[i]); + cipher_vec->Assign(i, std::move(cipher)); } auto comp_vec = zeek::make_intrusive(zeek::id::index_vec); diff --git a/src/analyzer/protocol/tcp/ContentLine.h b/src/analyzer/protocol/tcp/ContentLine.h index e3bf59516a..92cc4c84c1 100644 --- a/src/analyzer/protocol/tcp/ContentLine.h +++ b/src/analyzer/protocol/tcp/ContentLine.h @@ -22,11 +22,6 @@ public: void SuppressWeirds(bool enable) { suppress_weirds = enable; }; - [[deprecated("Remove in v6.1. Use SuppressWeirds() instead.")]] void SupressWeirds(bool enable) - { - SuppressWeirds(enable); - } - // If enabled, flag (first) line with embedded NUL. Default off. void SetIsNULSensitive(bool enable) { flag_NULs = enable; } diff --git a/src/const.bif b/src/const.bif index 55d5222f47..1ae177335d 100644 --- a/src/const.bif +++ b/src/const.bif @@ -23,11 +23,6 @@ const NFS3::return_data_max: count; const NFS3::return_data_first_only: bool; const Tunnel::max_depth: count; -const Tunnel::enable_ip: bool; -const Tunnel::enable_ayiya: bool; -const Tunnel::enable_teredo: bool; -const Tunnel::enable_gtpv1: bool; -const Tunnel::enable_gre: bool; const Tunnel::delay_teredo_confirmation: bool; const Tunnel::delay_gtp_confirmation: bool; const Tunnel::ip_tunnel_timeout: interval; diff --git a/src/event.bif b/src/event.bif index 07d643fa0f..e474c200b4 100644 --- a/src/event.bif +++ b/src/event.bif @@ -384,31 +384,6 @@ event analyzer_confirmation_info%(atype: AllAnalyzers::Tag, info: AnalyzerConfir ## .. zeek:see:: is_protocol_analyzer is_packet_analyzer is_file_analyzer event analyzer_violation_info%(atype: AllAnalyzers::Tag, info: AnalyzerViolationInfo%); -## Generated when a protocol analyzer confirms that a connection is indeed -## using that protocol. Zeek's dynamic protocol detection heuristically activates -## analyzers as soon as it believes a connection *could* be using a particular -## protocol. It is then left to the corresponding analyzer to verify whether -## that is indeed the case; if so, this event will be generated. -## -## c: The connection. -## -## atype: The type of the analyzer confirming that its protocol is in -## use. The value is one of the ``Analyzer::ANALYZER_*`` constants. For example, -## ``Analyzer::ANALYZER_HTTP`` means the HTTP analyzer determined that it's indeed -## parsing an HTTP connection. -## -## aid: A unique integer ID identifying the specific *instance* of the -## analyzer *atype* that is analyzing the connection ``c``. The ID can -## be used to reference the analyzer when using builtin functions like -## :zeek:id:`disable_analyzer`. -## -## .. note:: -## -## Zeek's default scripts use this event to determine the ``service`` column -## of :zeek:type:`Conn::Info`: once confirmed, the protocol will be listed -## there (and thus in ``conn.log``). -event analyzer_confirmation%(c: connection, atype: AllAnalyzers::Tag, aid: count%) &deprecated="Remove in 6.1. Use the generic analyzer_confirmation_info event instead."; - ## Generated if a DPD signature matched but the DPD buffer is already exhausted ## and thus the analyzer could not be attached. While this does not confirm ## that a protocol is actually used, it allows to retain that information. @@ -423,35 +398,6 @@ event analyzer_confirmation%(c: connection, atype: AllAnalyzers::Tag, aid: count ## .. zeek:see:: dpd_buffer_size dpd_max_packets event protocol_late_match%(c: connection, atype: Analyzer::Tag%); -## Generated when a protocol analyzer determines that a connection it is parsing -## is not conforming to the protocol it expects. Zeek's dynamic protocol -## detection heuristically activates analyzers as soon as it believes a -## connection *could* be using a particular protocol. It is then left to the -## corresponding analyzer to verify whether that is indeed the case; if not, -## the analyzer will trigger this event. -## -## c: The connection. -## -## atype: The type of the analyzer confirming that its protocol is in -## use. The value is one of the ``Analyzer::ANALYZER_*`` constants. For example, -## ``Analyzer::ANALYZER_HTTP`` means the HTTP analyzer determined that it's indeed -## parsing an HTTP connection. -## -## aid: A unique integer ID identifying the specific *instance* of the -## analyzer *atype* that is analyzing the connection ``c``. The ID can -## be used to reference the analyzer when using builtin functions like -## :zeek:id:`disable_analyzer`. -## -## reason: TODO. -## -## .. note:: -## -## Zeek's default scripts use this event to disable an analyzer via -## :zeek:id:`disable_analyzer` if it's parsing the wrong protocol. That's -## however a script-level decision and not done automatically by the event -## engine. -event analyzer_violation%(c: connection, atype: AllAnalyzers::Tag, aid: count, reason: string%) &deprecated="Remove in 6.1. Use the generic analyzer_violation_info event instead."; - ## Generated when a TCP connection terminated, passing on statistics about the ## two endpoints. This event is always generated when Zeek flushes the internal ## connection state, independent of how a connection terminates. diff --git a/src/file_analysis/File.cc b/src/file_analysis/File.cc index 845bc9a4d8..40e8299d53 100644 --- a/src/file_analysis/File.cc +++ b/src/file_analysis/File.cc @@ -130,9 +130,9 @@ bool File::UpdateConnectionFields(Connection* conn, bool is_orig) if ( ! conns ) { - auto ect = empty_connection_table(); - conns = ect; - val->Assign(conns_idx, std::move(ect)); + auto empty_conn_table = empty_connection_table(); + conns = empty_conn_table; + val->Assign(conns_idx, std::move(empty_conn_table)); } auto idx = get_conn_id_val(conn); diff --git a/src/file_analysis/analyzer/x509/ocsp_events.bif b/src/file_analysis/analyzer/x509/ocsp_events.bif index 0f839c0ee5..b23ea1d6e0 100644 --- a/src/file_analysis/analyzer/x509/ocsp_events.bif +++ b/src/file_analysis/analyzer/x509/ocsp_events.bif @@ -39,7 +39,7 @@ event ocsp_request_certificate%(f: fa_file, hashAlgorithm: string, issuerNameHas ## ## f: The file. ## -## status: The status of the OCSP response (e.g. succesful, malformedRequest, tryLater). +## status: The status of the OCSP response (e.g. successful, malformedRequest, tryLater). ## ## .. zeek:see:: ocsp_request ocsp_request_certificate ## ocsp_response_bytes ocsp_response_certificate ocsp_extension @@ -52,7 +52,7 @@ event ocsp_response_status%(f: fa_file, status: string%); ## ## f: The file. ## -## status: The status of the OCSP response (e.g. succesful, malformedRequest, tryLater). +## status: The status of the OCSP response (e.g. successful, malformedRequest, tryLater). ## ## version: Version of the OCSP response (typically - for version 1). ## diff --git a/src/fuzzers/CMakeLists.txt b/src/fuzzers/CMakeLists.txt index 5bf6e7b5eb..fec9ab5b4a 100644 --- a/src/fuzzers/CMakeLists.txt +++ b/src/fuzzers/CMakeLists.txt @@ -31,6 +31,8 @@ string(REGEX MATCH ".*\\.a$" _have_static_bind_lib "${BIND_LIBRARY}") macro (SETUP_FUZZ_TARGET _fuzz_target _fuzz_source) add_executable(${_fuzz_target} ${_fuzz_source} ${ARGN}) + target_compile_features(${_fuzz_target} PRIVATE "${ZEEK_CXX_STD}") + set_target_properties(${_fuzz_target} PROPERTIES CXX_EXTENSIONS OFF) target_link_libraries(${_fuzz_target} zeek_fuzzer_shared) if (_have_static_bind_lib) @@ -62,6 +64,8 @@ endmacro () include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}) add_library(zeek_fuzzer_standalone OBJECT standalone-driver.cc) +target_compile_features(zeek_fuzzer_standalone PRIVATE "${ZEEK_CXX_STD}") +set_target_properties(zeek_fuzzer_standalone PROPERTIES CXX_EXTENSIONS OFF) target_sources(zeek_fuzzer_shared PRIVATE FuzzBuffer.cc) diff --git a/src/input/Manager.cc b/src/input/Manager.cc index a8f1d2d94d..6db091ed53 100644 --- a/src/input/Manager.cc +++ b/src/input/Manager.cc @@ -653,7 +653,7 @@ bool Manager::CreateTableStream(RecordVal* fval) if ( ! CheckErrorEventTypes(stream_name, error_event, true) ) return false; - vector fieldsV; // vector, because we don't know the length beforehands + vector fieldsV; // vector, because we don't know the length beforehand bool status = (! UnrollRecordType(&fieldsV, idx, "", false)); diff --git a/src/iosource/PktDumper.h b/src/iosource/PktDumper.h index 7171d9e239..81ded1cbff 100644 --- a/src/iosource/PktDumper.h +++ b/src/iosource/PktDumper.h @@ -83,7 +83,7 @@ public: * * @param pkt The packet to record. * - * @return True if succesful, false otherwise (in which case \a + * @return True if successful, false otherwise (in which case \a * Error() must have been called.) */ virtual bool Dump(const Packet* pkt) = 0; diff --git a/src/iosource/PktSrc.h b/src/iosource/PktSrc.h index 062ac5032b..c9e4975411 100644 --- a/src/iosource/PktSrc.h +++ b/src/iosource/PktSrc.h @@ -263,7 +263,7 @@ protected: uint32_t netmask; /** - * True if the source is reading live inout, false for + * True if the source is reading live input, false for * working offline. */ bool is_live; diff --git a/src/iosource/pcap/Dumper.h b/src/iosource/pcap/Dumper.h index 58bd169030..ce24ecba06 100644 --- a/src/iosource/pcap/Dumper.h +++ b/src/iosource/pcap/Dumper.h @@ -20,7 +20,7 @@ public: PcapDumper(const std::string& path, bool append); ~PcapDumper() override; - static PktDumper* Instantiate(const std::string& path, bool appen); + static PktDumper* Instantiate(const std::string& path, bool append); protected: // PktDumper interface. diff --git a/src/logging/WriterFrontend.h b/src/logging/WriterFrontend.h index 52b2a1ae58..b2c64a129b 100644 --- a/src/logging/WriterFrontend.h +++ b/src/logging/WriterFrontend.h @@ -198,7 +198,7 @@ protected: bool initialized; // True if initialized. bool buf; // True if buffering is enabled (default). bool local; // True if logging locally. - bool remote; // True if loggin remotely. + bool remote; // True if logging remotely. const char* name; // Descriptive name of the WriterBackend::WriterInfo* info; // The writer information. diff --git a/src/logging/writers/ascii/Ascii.cc b/src/logging/writers/ascii/Ascii.cc index fd73b4a9c2..6f6b3435f7 100644 --- a/src/logging/writers/ascii/Ascii.cc +++ b/src/logging/writers/ascii/Ascii.cc @@ -264,13 +264,7 @@ void Ascii::InitConfigOptions() gzip_file_extension.assign((const char*)BifConst::LogAscii::gzip_file_extension->Bytes(), BifConst::LogAscii::gzip_file_extension->Len()); - // Remove in v6.1: LogAscii::logdir should be gone in favor - // of using Log::default_logdir. - logdir.assign((const char*)BifConst::LogAscii::logdir->Bytes(), - BifConst::LogAscii::logdir->Len()); - - if ( logdir.empty() ) - logdir = zeek::id::find_const("Log::default_logdir")->ToStdString(); + logdir = zeek::id::find_const("Log::default_logdir")->ToStdString(); } bool Ascii::InitFilterOptions() @@ -379,15 +373,6 @@ bool Ascii::InitFilterOptions() else if ( strcmp(i->first, "gzip_file_extension") == 0 ) gzip_file_extension.assign(i->second); - - else if ( strcmp(i->first, "logdir") == 0 ) - { - // This doesn't play nice with leftover log rotation - // and log rotation in general. There's no documentation - // or a test for this specifically, so deprecate it. - reporter->Deprecation("Remove in v6.1. Per writer logdir is deprecated."); - logdir.assign(i->second); - } } if ( ! InitFormatter() ) @@ -763,17 +748,13 @@ static std::vector find_leftover_logs() auto prefix_len = strlen(shadow_file_prefix); auto default_logdir = zeek::id::find_const("Log::default_logdir")->ToStdString(); - // Find any .shadow files within LogAscii::logdir, Log::default_logdir - // or otherwise search in the current working directory. + // Find any .shadow files within Log::default_logdir or otherwise search in + // the current working directory. auto logdir = zeek::filesystem::current_path(); if ( ! default_logdir.empty() ) logdir = zeek::filesystem::absolute(default_logdir); - // Remove LogAscii::logdir fallback in v6.1. - if ( BifConst::LogAscii::logdir->Len() > 0 ) - logdir = zeek::filesystem::absolute(BifConst::LogAscii::logdir->ToStdString()); - auto d = opendir(logdir.string().c_str()); struct dirent* dp; diff --git a/src/logging/writers/ascii/ascii.bif b/src/logging/writers/ascii/ascii.bif index 38932ded36..632c89f055 100644 --- a/src/logging/writers/ascii/ascii.bif +++ b/src/logging/writers/ascii/ascii.bif @@ -17,4 +17,3 @@ const json_timestamps: JSON::TimestampFormat; const json_include_unset_fields: bool; const gzip_level: count; const gzip_file_extension: string; -const logdir: string; diff --git a/src/packet_analysis/Analyzer.cc b/src/packet_analysis/Analyzer.cc index afd21ec40f..93dd47213d 100644 --- a/src/packet_analysis/Analyzer.cc +++ b/src/packet_analysis/Analyzer.cc @@ -185,11 +185,6 @@ void Analyzer::EnqueueAnalyzerConfirmationInfo(session::Session* session, const event_mgr.Enqueue(analyzer_confirmation_info, arg_tag.AsVal(), info); } -void Analyzer::EnqueueAnalyzerConfirmation(session::Session* session, const zeek::Tag& arg_tag) - { - event_mgr.Enqueue(analyzer_confirmation, session->GetVal(), arg_tag.AsVal(), val_mgr->Count(0)); - } - void Analyzer::AnalyzerConfirmation(session::Session* session, zeek::Tag arg_tag) { const auto& effective_tag = arg_tag ? arg_tag : GetAnalyzerTag(); @@ -208,9 +203,6 @@ void Analyzer::AnalyzerConfirmation(session::Session* session, zeek::Tag arg_tag if ( analyzer_confirmation_info ) EnqueueAnalyzerConfirmationInfo(session, effective_tag); - - if ( analyzer_confirmation ) - EnqueueAnalyzerConfirmation(session, effective_tag); } void Analyzer::EnqueueAnalyzerViolationInfo(session::Session* session, const char* reason, @@ -230,24 +222,6 @@ void Analyzer::EnqueueAnalyzerViolationInfo(session::Session* session, const cha event_mgr.Enqueue(analyzer_violation_info, arg_tag.AsVal(), info); } -void Analyzer::EnqueueAnalyzerViolation(session::Session* session, const char* reason, - const char* data, int len, const zeek::Tag& arg_tag) - { - StringValPtr r; - if ( data && len ) - { - const char* tmp = util::copy_string(reason); - r = make_intrusive(util::fmt( - "%s [%s%s]", tmp, util::fmt_bytes(data, std::min(40, len)), len > 40 ? "..." : "")); - delete[] tmp; - } - else - r = make_intrusive(reason); - - event_mgr.Enqueue(analyzer_violation, session->GetVal(), arg_tag.AsVal(), val_mgr->Count(0), - std::move(r)); - } - void Analyzer::AnalyzerViolation(const char* reason, session::Session* session, const char* data, int len, zeek::Tag arg_tag) { @@ -263,9 +237,6 @@ void Analyzer::AnalyzerViolation(const char* reason, session::Session* session, if ( analyzer_violation_info ) EnqueueAnalyzerViolationInfo(session, reason, data, len, effective_tag); - - if ( analyzer_violation ) - EnqueueAnalyzerViolation(session, reason, data, len, effective_tag); } } // namespace zeek::packet_analysis diff --git a/src/packet_analysis/Analyzer.h b/src/packet_analysis/Analyzer.h index 90f83d10fa..eff3b57ecd 100644 --- a/src/packet_analysis/Analyzer.h +++ b/src/packet_analysis/Analyzer.h @@ -262,17 +262,10 @@ private: // Internal helper to raise analyzer_confirmation events void EnqueueAnalyzerConfirmationInfo(session::Session* session, const zeek::Tag& arg_tag); - // Remove in v6.1 - internal helper to raise analyzer_confirmation - void EnqueueAnalyzerConfirmation(session::Session* session, const zeek::Tag& arg_tag); - // Internal helper to raise analyzer_violation_info void EnqueueAnalyzerViolationInfo(session::Session* session, const char* reason, const char* data, int len, const zeek::Tag& arg_tag); - // Remove in v6.1 - internal helper to raise analyzer_violation - void EnqueueAnalyzerViolation(session::Session* session, const char* reason, const char* data, - int len, const zeek::Tag& arg_tag); - zeek::Tag tag; Dispatcher dispatcher; AnalyzerPtr default_analyzer = nullptr; diff --git a/src/packet_analysis/protocol/ayiya/AYIYA.cc b/src/packet_analysis/protocol/ayiya/AYIYA.cc index f1ff574e56..3ed64df1e4 100644 --- a/src/packet_analysis/protocol/ayiya/AYIYA.cc +++ b/src/packet_analysis/protocol/ayiya/AYIYA.cc @@ -10,9 +10,6 @@ AYIYAAnalyzer::AYIYAAnalyzer() : zeek::packet_analysis::Analyzer("AYIYA") { } bool AYIYAAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* packet) { - if ( ! BifConst::Tunnel::enable_ayiya ) - return false; - // AYIYA always comes from a TCP or UDP connection, which means that session // should always be valid and always be a connection. Return a weird if we // didn't have a session stored. @@ -83,9 +80,6 @@ bool AYIYAAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* packe bool AYIYAAnalyzer::DetectProtocol(size_t len, const uint8_t* data, Packet* packet) { - if ( ! BifConst::Tunnel::enable_ayiya ) - return false; - // These magic numbers are based on the old DPD entry, which was based on... something? return len >= 3 && data[1] == 0x52 && data[2] == 0x11; } diff --git a/src/packet_analysis/protocol/gre/GRE.cc b/src/packet_analysis/protocol/gre/GRE.cc index 9c1cf311ed..95f77f4dd3 100644 --- a/src/packet_analysis/protocol/gre/GRE.cc +++ b/src/packet_analysis/protocol/gre/GRE.cc @@ -46,12 +46,6 @@ bool GREAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* packet) return false; } - if ( ! BifConst::Tunnel::enable_gre ) - { - Weird("GRE_tunnel", packet); - return false; - } - if ( len < gre_header_len() ) { Weird("truncated_GRE", packet); diff --git a/src/packet_analysis/protocol/ieee802_11/IEEE802_11.cc b/src/packet_analysis/protocol/ieee802_11/IEEE802_11.cc index bd9d913f78..beb69b2616 100644 --- a/src/packet_analysis/protocol/ieee802_11/IEEE802_11.cc +++ b/src/packet_analysis/protocol/ieee802_11/IEEE802_11.cc @@ -41,7 +41,7 @@ bool IEEE802_11Analyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* if ( (fc_80211 >> 4) & 0x08 ) { // Store off whether this is an A-MSDU header, which indicates that there are - // mulitple packets following the 802.11 header. + // multiple packets following the 802.11 header. is_amsdu = (data[len_80211] & 0x80) == 0x80; // Check for the protected bit. This means the data is encrypted and we can't diff --git a/src/packet_analysis/protocol/ip/IP.cc b/src/packet_analysis/protocol/ip/IP.cc index 1803122c27..705178883f 100644 --- a/src/packet_analysis/protocol/ip/IP.cc +++ b/src/packet_analysis/protocol/ip/IP.cc @@ -87,7 +87,7 @@ bool IPAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* packet) total_len = packet->cap_len - hdr_size; else // If this is caused by segmentation offloading, the checksum will - // also be incorrect. If checksum validation is enabled - jus tbail here. + // also be incorrect. If checksum validation is enabled - just bail here. return false; } diff --git a/src/packet_analysis/protocol/iptunnel/IPTunnel.cc b/src/packet_analysis/protocol/iptunnel/IPTunnel.cc index 1bfb69b09d..83032b90c9 100644 --- a/src/packet_analysis/protocol/iptunnel/IPTunnel.cc +++ b/src/packet_analysis/protocol/iptunnel/IPTunnel.cc @@ -28,12 +28,6 @@ bool IPTunnelAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* pa return false; } - if ( ! BifConst::Tunnel::enable_ip ) - { - Weird("IP_tunnel", packet); - return false; - } - if ( packet->encap && packet->encap->Depth() >= BifConst::Tunnel::max_depth ) { Weird("exceeded_tunnel_max_depth", packet); diff --git a/src/packet_analysis/protocol/tcp/TCPSessionAdapter.cc b/src/packet_analysis/protocol/tcp/TCPSessionAdapter.cc index f8f400ad3e..2a3d671efc 100644 --- a/src/packet_analysis/protocol/tcp/TCPSessionAdapter.cc +++ b/src/packet_analysis/protocol/tcp/TCPSessionAdapter.cc @@ -1356,7 +1356,7 @@ void TCPSessionAdapter::ConnectionClosed(analyzer::tcp::TCP_Endpoint* endpoint, const analyzer::analyzer_list& children(GetChildren()); LOOP_OVER_CONST_CHILDREN(i) // Using this type of cast here is nasty (will crash if - // we inadvertantly have a child analyzer that's not a + // we inadvertently have a child analyzer that's not a // TCP_ApplicationAnalyzer), but we have to ... static_cast(*i)->ConnectionClosed(endpoint, peer, gen_event); diff --git a/src/packet_analysis/protocol/teredo/Teredo.cc b/src/packet_analysis/protocol/teredo/Teredo.cc index efab374028..e487441227 100644 --- a/src/packet_analysis/protocol/teredo/Teredo.cc +++ b/src/packet_analysis/protocol/teredo/Teredo.cc @@ -153,9 +153,6 @@ TeredoAnalyzer::TeredoAnalyzer() : zeek::packet_analysis::Analyzer("TEREDO") bool TeredoAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* packet) { - if ( ! BifConst::Tunnel::enable_teredo ) - return false; - // Teredo always comes from a UDP connection, which means that session should always // be valid and always be a connection. Store this off for the span of the // processing so that it can be used for other things. Return a weird if we didn't @@ -267,9 +264,6 @@ bool TeredoAnalyzer::AnalyzePacket(size_t len, const uint8_t* data, Packet* pack bool TeredoAnalyzer::DetectProtocol(size_t len, const uint8_t* data, Packet* packet) { - if ( ! BifConst::Tunnel::enable_teredo ) - return false; - // Do some fast checks that must be true before moving to more complicated ones. // Mostly this avoids doing the regex below if we can help it. if ( (len < 40) || ((len > 8) && ((data[0] >> 4) != 6) && diff --git a/src/parse.y b/src/parse.y index 4db15571ab..d16fc5042c 100644 --- a/src/parse.y +++ b/src/parse.y @@ -7,7 +7,7 @@ %expect 211 -%token TOK_ADD TOK_ADD_TO TOK_ADDR TOK_ANY +%token TOK_ADD TOK_ADD_TO TOK_ADDR TOK_ANY TOK_ASSERT %token TOK_ATENDIF TOK_ATELSE TOK_ATIF TOK_ATIFDEF TOK_ATIFNDEF %token TOK_BOOL TOK_BREAK TOK_CASE TOK_OPTION TOK_CONST %token TOK_CONSTANT TOK_COPY TOK_COUNT TOK_DEFAULT TOK_DELETE @@ -78,6 +78,7 @@ %type capture_list opt_captures when_captures %type when_head when_start when_clause %type TOK_PATTERN_END +%type opt_assert_msg %{ #include @@ -1802,6 +1803,11 @@ stmt: script_coverage_mgr.DecIgnoreDepth(); } + | TOK_ASSERT expr opt_assert_msg ';' + { + $$ = new AssertStmt(IntrusivePtr{AdoptRef{}, $2}, {AdoptRef{}, $3}); + } + | TOK_PRINT expr_list ';' opt_no_test { set_location(@1, @3); @@ -2228,6 +2234,13 @@ resolve_id: } ; +opt_assert_msg: + ',' expr + { $$ = $2; } + | + { $$ = nullptr; } + ; + opt_no_test: TOK_NO_TEST { $$ = true; } diff --git a/src/plugin/Manager.cc b/src/plugin/Manager.cc index 12884fdc6b..3f0d4a72bc 100644 --- a/src/plugin/Manager.cc +++ b/src/plugin/Manager.cc @@ -112,10 +112,10 @@ void Manager::SearchDynamicPlugins(const std::string& dir) if ( name.empty() ) reporter->FatalError("empty plugin magic file %s", magic.c_str()); - if ( dynamic_plugins.find(lower_name) != dynamic_plugins.end() ) + if ( const auto& other = dynamic_plugins.find(lower_name); other != dynamic_plugins.end() ) { - DBG_LOG(DBG_PLUGINS, "Found already known plugin %s in %s, ignoring", name.c_str(), - dir.c_str()); + reporter->Warning("ignoring dynamic plugin %s from %s, already found in %s", + name.c_str(), dir.c_str(), other->second.c_str()); return; } @@ -699,11 +699,6 @@ void Manager::RequestEvent(EventHandlerPtr handler, Plugin* plugin) handler->SetGenerateAlways(); } -void Manager::RequestBroObjDtor(Obj* obj, Plugin* plugin) - { - obj->NotifyPluginsOnDtor(); - } - void Manager::RequestObjDtor(Obj* obj, Plugin* plugin) { obj->NotifyPluginsOnDtor(); @@ -922,57 +917,6 @@ void Manager::HookUpdateNetworkTime(double network_time) const MetaHookPost(HOOK_UPDATE_NETWORK_TIME, args, HookArgument()); } -void Manager::HookBroObjDtor(void* obj) const - { - HookArgumentList args; - - if ( HavePluginForHook(META_HOOK_PRE) ) - { - args.push_back(HookArgument(obj)); -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - MetaHookPre(HOOK_BRO_OBJ_DTOR, args); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - } - -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - hook_list* l = hooks[HOOK_BRO_OBJ_DTOR]; -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - - if ( l ) - for ( hook_list::iterator i = l->begin(); i != l->end(); ++i ) - { - Plugin* p = (*i).second; -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - p->HookBroObjDtor(obj); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - } - - if ( HavePluginForHook(META_HOOK_POST) ) -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - MetaHookPost(HOOK_BRO_OBJ_DTOR, args, HookArgument()); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - } - void Manager::HookObjDtor(void* obj) const { HookArgumentList args; diff --git a/src/plugin/Manager.h b/src/plugin/Manager.h index 7278a1b582..d11cef0a06 100644 --- a/src/plugin/Manager.h +++ b/src/plugin/Manager.h @@ -217,18 +217,6 @@ public: */ void RequestEvent(EventHandlerPtr handler, Plugin* plugin); - /** - * Register interest in the destruction of a Obj instance. When Zeek's - * reference counting triggers the objects destructor to run, the \a - * HookBroObjDtor will be called. - * - * @param handler The object being interested in. - * - * @param plugin The plugin expressing interest. - */ - [[deprecated("Remove in v6.1. Use RequestObjDtor.")]] void RequestBroObjDtor(Obj* obj, - Plugin* plugin); - /** * Register interest in the destruction of a Obj instance. When Zeek's * reference counting triggers the objects destructor to run, the \a @@ -332,12 +320,6 @@ public: */ void HookDrainEvents() const; - /** - * Hook that informs plugins that an Obj is being destroyed. Will be - * called only for objects that a plugin has expressed interest in. - */ - [[deprecated("Remove in v6.1. Use HookObjDtor.")]] void HookBroObjDtor(void* obj) const; - /** * Hook that informs plugins that an Obj is being destroyed. Will be * called only for objects that a plugin has expressed interest in. diff --git a/src/plugin/Plugin.cc b/src/plugin/Plugin.cc index af005a40f9..21fbf37341 100644 --- a/src/plugin/Plugin.cc +++ b/src/plugin/Plugin.cc @@ -27,7 +27,6 @@ const char* hook_name(HookType h) "QueueEvent", "DrainEvents", "UpdateNetworkTime", - "BroObjDtor", "SetupAnalyzerTree", "LogInit", "LogWrite", @@ -381,18 +380,6 @@ void Plugin::RequestEvent(EventHandlerPtr handler) plugin_mgr->RequestEvent(handler, this); } -void Plugin::RequestBroObjDtor(Obj* obj) - { -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - plugin_mgr->RequestBroObjDtor(obj, this); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - } - void Plugin::RequestObjDtor(Obj* obj) { plugin_mgr->RequestObjDtor(obj, this); @@ -427,8 +414,6 @@ void Plugin::HookUpdateNetworkTime(double network_time) { } void Plugin::HookSetupAnalyzerTree(Connection* conn) { } -void Plugin::HookBroObjDtor(void* obj) { } - void Plugin::HookObjDtor(void* obj) { } void Plugin::HookLogInit(const std::string& writer, const std::string& instantiating_filter, diff --git a/src/plugin/Plugin.h b/src/plugin/Plugin.h index d4f34f28ff..89c0d3515a 100644 --- a/src/plugin/Plugin.h +++ b/src/plugin/Plugin.h @@ -18,8 +18,6 @@ // building Zeek itself. #ifndef ZEEK_PLUGIN_SKIP_VERSION_CHECK #include "zeek/zeek-version.h" -// Remove the BRO define in v6.1. -#define BRO_PLUGIN_BRO_VERSION ZEEK_VERSION_FUNCTION #define ZEEK_PLUGIN_ZEEK_VERSION ZEEK_VERSION_FUNCTION #endif @@ -77,7 +75,6 @@ enum HookType HOOK_QUEUE_EVENT, //< Activates Plugin::HookQueueEvent(). HOOK_DRAIN_EVENTS, //< Activates Plugin::HookDrainEvents(). HOOK_UPDATE_NETWORK_TIME, //< Activates Plugin::HookUpdateNetworkTime(). - HOOK_BRO_OBJ_DTOR [[deprecated("Remove in v6.1. Use HOOK_OBJ_DTOR.")]], HOOK_SETUP_ANALYZER_TREE, //< Activates Plugin::HookAddToAnalyzerTree(). HOOK_LOG_INIT, //< Activates Plugin::HookLogInit(). HOOK_LOG_WRITE, //< Activates Plugin::HookLogWrite(). @@ -126,10 +123,6 @@ public: // We force this to inline so that the API version gets hardcoded // into the external plugin. (Technically, it's not a "force", just a // strong hint.). The attribute seems generally available. -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif inline Configuration() __attribute__((always_inline)) { // Only bake in a ZEEK_PLUGIN_ZEEK_VERSION reference into external plugins. The @@ -138,55 +131,30 @@ public: // CMakeLists.txt file for the Zeek::Internal target, which only exists when // building Zeek itself. #ifndef ZEEK_PLUGIN_SKIP_VERSION_CHECK - bro_version = ZEEK_PLUGIN_ZEEK_VERSION; zeek_version = ZEEK_PLUGIN_ZEEK_VERSION; #endif } -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif Configuration(Configuration&& c) { - bro_version = std::move(c.bro_version); zeek_version = std::move(c.zeek_version); name = std::move(c.name); description = std::move(c.description); version = std::move(c.version); } -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif Configuration(const Configuration& c) { - bro_version = c.bro_version; zeek_version = c.zeek_version; name = c.name; description = c.description; version = c.version; } -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif Configuration& operator=(Configuration&& c) { - bro_version = std::move(c.bro_version); zeek_version = std::move(c.zeek_version); name = std::move(c.name); @@ -195,17 +163,9 @@ public: return *this; } -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif Configuration& operator=(const Configuration& c) { - bro_version = c.bro_version; zeek_version = c.zeek_version; name = c.name; @@ -214,24 +174,8 @@ public: return *this; } -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif ~Configuration() { } -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - - /** - * One can assign ZEEK_PLUGIN_ZEEK_VERSION to this to catch - * version mismatches at link(!) time. - */ - [[deprecated("Remove in v6.1. Use zeek_version.")]] std::function bro_version; /** * One can assign ZEEK_PLUGIN_ZEEK_VERSION to this to catch @@ -847,11 +791,6 @@ public: */ bool LoadZeekFile(const std::string& file); - [[deprecated("Remove in v6.1. Use LoadZeekFile.")]] bool LoadBroFile(const std::string& file) - { - return LoadZeekFile(file); - } - protected: friend class Manager; @@ -936,17 +875,6 @@ protected: */ void RequestEvent(EventHandlerPtr handler); - /** - * Registers interest in the destruction of a Obj instance. When - * Zeek's reference counting triggers the objects destructor to run, - * \a HookBroObjDtor will be called. - * - * Note that this can get expensive if triggered for many objects. - * - * @param obj The object being interested in. - */ - [[deprecated("Remove in v6.1. Use RequestObjDtor.")]] void RequestBroObjDtor(Obj* obj); - /** * Registers interest in the destruction of a Obj instance. When * Zeek's reference counting triggers the objects destructor to run, @@ -1092,18 +1020,6 @@ protected: */ virtual void HookSetupAnalyzerTree(Connection* conn); - /** - * Hook for destruction of objects registered with - * RequestBroObjDtor(). When Zeek's reference counting triggers the - * objects destructor to run, this method will be run. It may also - * run for other objects that this plugin has not registered for. - * - * @param obj A pointer to the object being destroyed. Note that the - * object is already considered invalid and the pointer must not be - * dereferenced. - */ - [[deprecated("Remove in v6.1. Use HookObjDtor.")]] virtual void HookBroObjDtor(void* obj); - /** * Hook for destruction of objects registered with * RequestObjDtor(). When Zeek's reference counting triggers the @@ -1276,8 +1192,3 @@ private: } // namespace plugin } // namespace zeek - -// Increase this when making incompatible changes to the plugin API. Note -// that the constant is never used in C code. It's picked up on by CMake. -constexpr int BRO_PLUGIN_API_VERSION - [[deprecated("Remove in v6.1. Use zeek::PLUGIN_API_VERSION")]] = zeek::PLUGIN_API_VERSION; diff --git a/src/probabilistic/CardinalityCounter.h b/src/probabilistic/CardinalityCounter.h index e3f97b7b33..9650e207fb 100644 --- a/src/probabilistic/CardinalityCounter.h +++ b/src/probabilistic/CardinalityCounter.h @@ -136,7 +136,7 @@ private: * Define x = 2*(log(1.04*k/error)/log(2)). Then b is the ceiling of x. * * After that initial estimate, the value of b is increased until the - * standard deviation falls within the specified valud. + * standard deviation falls within the specified value. * * @param error error margin * diff --git a/src/scan.l b/src/scan.l index c961b928e2..f1a8ecd201 100644 --- a/src/scan.l +++ b/src/scan.l @@ -321,6 +321,7 @@ add return TOK_ADD; addr return TOK_ADDR; any return TOK_ANY; as return TOK_AS; +assert return TOK_ASSERT; bool return TOK_BOOL; break return TOK_BREAK; case return TOK_CASE; diff --git a/src/script_opt/CPP/Compile.h b/src/script_opt/CPP/Compile.h index 6b110a00de..d0809fbb6a 100644 --- a/src/script_opt/CPP/Compile.h +++ b/src/script_opt/CPP/Compile.h @@ -776,7 +776,7 @@ private: std::string GenConstExpr(const ConstExpr* c, GenType gt); std::string GenIncrExpr(const Expr* e, GenType gt, bool is_incr, bool top_level); std::string GenCondExpr(const Expr* e, GenType gt); - std::string GenCallExpr(const CallExpr* c, GenType gt); + std::string GenCallExpr(const CallExpr* c, GenType gt, bool top_level); std::string GenInExpr(const Expr* e, GenType gt); std::string GenFieldExpr(const FieldExpr* fe, GenType gt); std::string GenHasFieldExpr(const HasFieldExpr* hfe, GenType gt); diff --git a/src/script_opt/CPP/DeclFunc.cc b/src/script_opt/CPP/DeclFunc.cc index f46fe6b76c..12bedf82b2 100644 --- a/src/script_opt/CPP/DeclFunc.cc +++ b/src/script_opt/CPP/DeclFunc.cc @@ -146,7 +146,7 @@ void CPPCompile::DeclareSubclass(const FuncTypePtr& ft, const ProfileFunc* pf, c Emit("%s_cl(const char* name%s) : CPPStmt(name, %s)%s { }", fname, addl_args, loc_info, inits); // An additional constructor just used to generate place-holder - // instances, due to the mis-design that lambdas are identified + // instances, due to the misdesign that lambdas are identified // by their Func objects rather than their FuncVal objects. if ( lambda_ids && lambda_ids->length() > 0 ) Emit("%s_cl(const char* name) : CPPStmt(name, %s) { }", fname, loc_info); diff --git a/src/script_opt/CPP/Exprs.cc b/src/script_opt/CPP/Exprs.cc index b0ec6cd8e4..c9cde5ab0c 100644 --- a/src/script_opt/CPP/Exprs.cc +++ b/src/script_opt/CPP/Exprs.cc @@ -114,7 +114,7 @@ string CPPCompile::GenExpr(const Expr* e, GenType gt, bool top_level) case EXPR_COND: return GenCondExpr(e, gt); case EXPR_CALL: - return GenCallExpr(e->AsCallExpr(), gt); + return GenCallExpr(e->AsCallExpr(), gt, top_level); case EXPR_LIST: return GenListExpr(e, gt, false); case EXPR_IN: @@ -291,7 +291,7 @@ string CPPCompile::GenCondExpr(const Expr* e, GenType gt) return string("(") + gen1 + ") ? (" + gen2 + ") : (" + gen3 + ")"; } -string CPPCompile::GenCallExpr(const CallExpr* c, GenType gt) +string CPPCompile::GenCallExpr(const CallExpr* c, GenType gt, bool top_level) { const auto& t = c->GetType(); auto f = c->Func(); @@ -347,7 +347,18 @@ string CPPCompile::GenCallExpr(const CallExpr* c, GenType gt) // Indirect call. gen = string("(") + gen + ")->AsFunc()"; - string invoke_func = is_async ? "when_invoke__CPP" : "invoke__CPP"; + string invoke_func; + + if ( is_async ) + invoke_func = "when_invoke__CPP"; + else if ( t->Tag() == TYPE_VOID ) + { + ASSERT(top_level); + invoke_func = "invoke_void__CPP"; + } + else + invoke_func = "invoke__CPP"; + auto args_list = string(", {") + GenExpr(args_l, GEN_VAL_PTR) + "}"; auto invoker = invoke_func + "(" + gen + args_list + ", f__CPP"; @@ -356,6 +367,10 @@ string CPPCompile::GenCallExpr(const CallExpr* c, GenType gt) invoker += ")"; + if ( top_level ) + // No need to use accessor. + return invoker; + if ( IsNativeType(t) && gt != GEN_VAL_PTR ) return invoker + NativeAccessor(t); diff --git a/src/script_opt/CPP/RuntimeOps.cc b/src/script_opt/CPP/RuntimeOps.cc index 61732c6f21..b74ac496e7 100644 --- a/src/script_opt/CPP/RuntimeOps.cc +++ b/src/script_opt/CPP/RuntimeOps.cc @@ -51,9 +51,13 @@ ValPtr index_table__CPP(const TableValPtr& t, vector indices) ValPtr index_vec__CPP(const VectorValPtr& vec, int index) { + if ( index < 0 ) + index += vec->Size(); + auto v = vec->ValAt(index); if ( ! v ) reporter->CPPRuntimeError("no such index"); + return v; } diff --git a/src/script_opt/CPP/RuntimeOps.h b/src/script_opt/CPP/RuntimeOps.h index 4318e7c7ff..8ed8622941 100644 --- a/src/script_opt/CPP/RuntimeOps.h +++ b/src/script_opt/CPP/RuntimeOps.h @@ -48,12 +48,28 @@ extern ValPtr when_index_vec__CPP(const VectorValPtr& vec, int index); // custom one for those occurring inside a "when" clause. extern ValPtr when_index_slice__CPP(VectorVal* vec, const ListVal* lv); +// Calls out to the given script or BiF function, which does not return +// a value. +inline ValPtr invoke_void__CPP(Func* f, std::vector args, Frame* frame) + { + return f->Invoke(&args, frame); + } + +// Used for error propagation by failed calls. +class CPPInterpreterException : public InterpreterException + { + }; + // Calls out to the given script or BiF function. A separate function because // of the need to (1) construct the "args" vector using {} initializers, // but (2) needing to have the address of that vector. inline ValPtr invoke__CPP(Func* f, std::vector args, Frame* frame) { - return f->Invoke(&args, frame); + auto v = f->Invoke(&args, frame); + if ( ! v ) + throw CPPInterpreterException(); + + return v; } // The same, but raises an interpreter exception if the function does diff --git a/src/script_opt/CPP/RuntimeVec.cc b/src/script_opt/CPP/RuntimeVec.cc index bc0be1182e..6208a6f7e0 100644 --- a/src/script_opt/CPP/RuntimeVec.cc +++ b/src/script_opt/CPP/RuntimeVec.cc @@ -26,12 +26,17 @@ static bool check_vec_sizes__CPP(const VectorValPtr& v1, const VectorValPtr& v2) // (for example, adding one vector of "interval" to another), which // we want to do using the low-level representations. We'll later // convert the vector to the high-level representation if needed. -static VectorTypePtr base_vector_type__CPP(const VectorTypePtr& vt) +// +// One exception: for booleans ("is_bool" is true), we use those directly. +static VectorTypePtr base_vector_type__CPP(const VectorTypePtr& vt, bool is_bool = false) { switch ( vt->Yield()->InternalType() ) { case TYPE_INTERNAL_INT: - return make_intrusive(base_type(TYPE_INT)); + { + auto base_tag = is_bool ? TYPE_BOOL : TYPE_INT; + return make_intrusive(base_type(base_tag)); + } case TYPE_INTERNAL_UNSIGNED: return make_intrusive(base_type(TYPE_COUNT)); @@ -119,36 +124,27 @@ VEC_OP1(comp, ~, ) } // Analogous to VEC_OP1, instantiates a function for a given binary operation, -// which might-or-might-not be supported for low-level "double" types. +// with customimzable kernels for "int" and "double" operations. // This version is for operations whose result type is the same as the // operand type. -#define VEC_OP2(name, op, double_kernel, zero_check) \ +#define VEC_OP2(name, op, int_kernel, double_kernel, zero_check, is_bool) \ VectorValPtr vec_op_##name##__CPP(const VectorValPtr& v1, const VectorValPtr& v2) \ { \ if ( ! check_vec_sizes__CPP(v1, v2) ) \ return nullptr; \ \ - auto vt = base_vector_type__CPP(v1->GetType()); \ + auto vt = base_vector_type__CPP(v1->GetType(), is_bool); \ auto v_result = make_intrusive(vt); \ \ switch ( vt->Yield()->InternalType() ) \ { \ - case TYPE_INTERNAL_INT: \ - { \ - if ( vt->Yield()->Tag() == TYPE_BOOL ) \ - VEC_OP2_KERNEL(AsBool, BoolVal, op, zero_check) \ - else \ - VEC_OP2_KERNEL(AsInt, IntVal, op, zero_check) \ - break; \ - } \ - \ case TYPE_INTERNAL_UNSIGNED: \ { \ VEC_OP2_KERNEL(AsCount, CountVal, op, zero_check) \ break; \ } \ \ - double_kernel \ + int_kernel double_kernel \ \ default : break; \ } \ @@ -156,9 +152,29 @@ VEC_OP1(comp, ~, ) return v_result; \ } +// Instantiates a regular int_kernel for a binary operation. +#define VEC_OP2_WITH_INT(name, op, double_kernel, zero_check) \ + VEC_OP2( \ + name, op, case TYPE_INTERNAL_INT \ + : { \ + VEC_OP2_KERNEL(AsInt, IntVal, op, zero_check) \ + break; \ + }, \ + double_kernel, zero_check, false) + +// Instantiates an int_kernel for boolean operations. +#define VEC_OP2_WITH_BOOL(name, op, zero_check) \ + VEC_OP2( \ + name, op, case TYPE_INTERNAL_INT \ + : { \ + VEC_OP2_KERNEL(AsBool, BoolVal, op, zero_check) \ + break; \ + }, \ + , zero_check, true) + // Instantiates a double_kernel for a binary operation. #define VEC_OP2_WITH_DOUBLE(name, op, zero_check) \ - VEC_OP2( \ + VEC_OP2_WITH_INT( \ name, op, case TYPE_INTERNAL_DOUBLE \ : { \ VEC_OP2_KERNEL(AsDouble, DoubleVal, op, zero_check) \ @@ -171,14 +187,14 @@ VEC_OP2_WITH_DOUBLE(add, +, 0) VEC_OP2_WITH_DOUBLE(sub, -, 0) VEC_OP2_WITH_DOUBLE(mul, *, 0) VEC_OP2_WITH_DOUBLE(div, /, 1) -VEC_OP2(mod, %, , 1) -VEC_OP2(and, &, , 0) -VEC_OP2(or, |, , 0) -VEC_OP2(xor, ^, , 0) -VEC_OP2(andand, &&, , 0) -VEC_OP2(oror, ||, , 0) -VEC_OP2(lshift, <<, , 0) -VEC_OP2(rshift, >>, , 0) +VEC_OP2_WITH_INT(mod, %, , 1) +VEC_OP2_WITH_INT(and, &, , 0) +VEC_OP2_WITH_INT(or, |, , 0) +VEC_OP2_WITH_INT(xor, ^, , 0) +VEC_OP2_WITH_BOOL(andand, &&, 0) +VEC_OP2_WITH_BOOL(oror, ||, 0) +VEC_OP2_WITH_INT(lshift, <<, , 0) +VEC_OP2_WITH_INT(rshift, >>, , 0) // A version of VEC_OP2 that instead supports relational operations, so // the result type is always vector-of-bool. diff --git a/src/script_opt/CPP/maint/README b/src/script_opt/CPP/maint/README index 072b9dd222..d48ba575b6 100644 --- a/src/script_opt/CPP/maint/README +++ b/src/script_opt/CPP/maint/README @@ -34,7 +34,9 @@ The maintenance workflow: 5. Run "check-CPP-gen.sh" for each Zeek file that passed "check-zeek.sh". This will generate a corresponding file in CPP-test/out* indicating whether "-O gen-C++" can successfully run on the input. Presently, it should - be able to do so for all of them. + be able to do so for all of them except a few that have conditional code, + which I've left active (no @TEST-REQUIRES to prune) given hopes of + soon being able to support (most) conditional code for C++ compilation. This step is parallelizable, say using xargs -P 10 -n 1. diff --git a/src/script_opt/Expr.cc b/src/script_opt/Expr.cc index c8ef874880..b2e9880056 100644 --- a/src/script_opt/Expr.cc +++ b/src/script_opt/Expr.cc @@ -414,7 +414,7 @@ ExprPtr Expr::AssignToTemporary(ExprPtr e, Reducer* c, StmtPtr& red_stmt) // Important: our result is not result_tmp, but a duplicate of it. // This is important because subsequent passes that associate - // information with Expr's need to not mis-associate that + // information with Expr's need to not misassociate that // information with both the assignment creating the temporary, // and the subsequent use of the temporary. return result_tmp->Duplicate(); diff --git a/src/script_opt/IDOptInfo.h b/src/script_opt/IDOptInfo.h index a55f5791a8..df076f1b36 100644 --- a/src/script_opt/IDOptInfo.h +++ b/src/script_opt/IDOptInfo.h @@ -59,10 +59,10 @@ public: // (= more inner) block level. int BlockLevel() const { return block_level; } - // True if in the region the identifer could be defined. + // True if in the region the identifier could be defined. bool MaybeDefined() const { return maybe_defined; } - // Returns (or sets) the statement after which the identifer is + // Returns (or sets) the statement after which the identifier is // (definitely) defined, or NO_DEF if it doesn't have a definite // point of definition. int DefinedAfter() const { return defined; } @@ -198,7 +198,7 @@ public: // the block. void ConfluenceBlockEndsAfter(const Stmt* s, bool no_orig_flow); - // All of these regard the identifer's state just *prior* to + // All of these regard the identifier's state just *prior* to // executing the given statement. bool IsPossiblyDefinedBefore(const Stmt* s); bool IsDefinedBefore(const Stmt* s); diff --git a/src/script_opt/ProfileFunc.h b/src/script_opt/ProfileFunc.h index cc2605d1ec..07d56ef39e 100644 --- a/src/script_opt/ProfileFunc.h +++ b/src/script_opt/ProfileFunc.h @@ -3,7 +3,7 @@ // Classes for traversing functions and their body ASTs to build up profiles // of the various elements (types, globals, locals, lambdas, etc.) that appear. // These profiles enable script optimization to make decisions regarding -// compilability and how to efficiently provide run-time components. +// compatibility and how to efficiently provide run-time components. // For all of the following, we use the term "function" to refer to a single // ScriptFunc/body pair, so an event handler or hook with multiple bodies // is treated as multiple distinct "function"'s. diff --git a/src/script_opt/ScriptOpt.h b/src/script_opt/ScriptOpt.h index 13fb7966c3..f262149a37 100644 --- a/src/script_opt/ScriptOpt.h +++ b/src/script_opt/ScriptOpt.h @@ -43,7 +43,7 @@ struct AnalyOpt // Whether to analyze scripts. bool activate = false; - // If true, compile all compileable functions, even those that + // If true, compile all compilable functions, even those that // are inlined. Mainly useful for ensuring compatibility for // some tests in the test suite. bool compile_all = false; diff --git a/src/script_opt/Stmt.cc b/src/script_opt/Stmt.cc index bb3e0c551b..5ffdda0f28 100644 --- a/src/script_opt/Stmt.cc +++ b/src/script_opt/Stmt.cc @@ -919,6 +919,21 @@ StmtPtr InitStmt::DoReduce(Reducer* c) return ThisPtr(); } +StmtPtr AssertStmt::Duplicate() + { + return SetSucc(new AssertStmt(cond->Duplicate(), msg ? msg->Duplicate() : nullptr)); + } + +bool AssertStmt::IsReduced(Reducer* c) const + { + return false; + } + +StmtPtr AssertStmt::DoReduce(Reducer* c) + { + return make_intrusive(); + } + StmtPtr WhenStmt::Duplicate() { FuncType::CaptureList* cl_dup = nullptr; diff --git a/src/script_opt/ZAM/Compile.h b/src/script_opt/ZAM/Compile.h index e1cd35bf2b..1287dcdc6a 100644 --- a/src/script_opt/ZAM/Compile.h +++ b/src/script_opt/ZAM/Compile.h @@ -333,7 +333,10 @@ private: // Returns how many values were added. int InternalAddVal(ZInstAux* zi, int i, Expr* e); - const ZAMStmt AddInst(const ZInstI& inst); + // Adds the given instruction to the ZAM program. The second + // argument, if true, suppresses generation of any pending + // global/capture store for this instruction. + const ZAMStmt AddInst(const ZInstI& inst, bool suppress_non_local = false); // Returns the statement just before the given one. ZAMStmt PrevStmt(const ZAMStmt s); diff --git a/src/script_opt/ZAM/Low-Level.cc b/src/script_opt/ZAM/Low-Level.cc index 6aefbea1f0..40fd4c30df 100644 --- a/src/script_opt/ZAM/Low-Level.cc +++ b/src/script_opt/ZAM/Low-Level.cc @@ -128,7 +128,7 @@ int ZAMCompiler::InternalAddVal(ZInstAux* zi, int i, Expr* e) return 1; } -const ZAMStmt ZAMCompiler::AddInst(const ZInstI& inst) +const ZAMStmt ZAMCompiler::AddInst(const ZInstI& inst, bool suppress_non_local) { ZInstI* i; @@ -146,17 +146,22 @@ const ZAMStmt ZAMCompiler::AddInst(const ZInstI& inst) top_main_inst = insts1.size() - 1; - if ( pending_global_store < 0 ) + if ( suppress_non_local ) return ZAMStmt(top_main_inst); - auto global_slot = pending_global_store; - pending_global_store = -1; + if ( pending_global_store >= 0 ) + { + auto gs = pending_global_store; + pending_global_store = -1; - auto store_inst = ZInstI(OP_STORE_GLOBAL_V, global_slot); - store_inst.op_type = OP_V_I1; - store_inst.t = globalsI[global_slot].id->GetType(); + auto store_inst = ZInstI(OP_STORE_GLOBAL_V, gs); + store_inst.op_type = OP_V_I1; + store_inst.t = globalsI[gs].id->GetType(); - return AddInst(store_inst); + return AddInst(store_inst); + } + + return ZAMStmt(top_main_inst); } const Stmt* ZAMCompiler::LastStmt(const Stmt* s) const diff --git a/src/script_opt/ZAM/Ops.in b/src/script_opt/ZAM/Ops.in index b6b09c7645..89bfccdc35 100644 --- a/src/script_opt/ZAM/Ops.in +++ b/src/script_opt/ZAM/Ops.in @@ -438,7 +438,7 @@ macro EvalCast(rhs) else ZAM_run_time_error(z.loc, error.c_str()); -# Cast an "any" type to the given type. Only needed for type-based swith +# Cast an "any" type to the given type. Only needed for type-based switch # statements. internal-op Cast-Any type VV @@ -960,8 +960,10 @@ eval EvalIndexVec(frame[z.v3].uint_val) macro EvalIndexVec(index) auto vv = frame[z.v2].vector_val->RawVec(); const auto& vec = *vv; - zeek_uint_t ind = index; - if ( ind >= vv->size() ) + zeek_int_t ind = index; + if ( ind < 0 ) + ind += vv->size(); + if ( ind < 0 || ind >= vv->size() ) ZAM_run_time_error(z.loc, "no such index"); AssignV1(CopyVal(*vec[ind])) @@ -975,8 +977,10 @@ eval EvalIndexAnyVec(frame[z.v3].uint_val) macro EvalIndexAnyVec(index) auto vv = frame[z.v2].vector_val; - zeek_uint_t ind = index; - if ( ind >= vv->Size() ) + zeek_int_t ind = index; + if ( ind < 0 ) + ind += vv->Size(); + if ( ind < 0 || ind >= vv->Size() ) ZAM_run_time_error(z.loc, "no such index"); AssignV1(ZVal(vv->ValAt(ind).release())) diff --git a/src/script_opt/ZAM/README.md b/src/script_opt/ZAM/README.md index 3ba84b67b1..9d2ac86c72 100644 --- a/src/script_opt/ZAM/README.md +++ b/src/script_opt/ZAM/README.md @@ -61,7 +61,7 @@ of course this varies for different users. ### Deficiencies to eventually fix: -* Error messages in compiled scripts often lack important identifying +* Error messages in compiled scripts have diminished identifying information. * The optimizer assumes you have ensured initialization of your variables. @@ -75,13 +75,12 @@ of the expression are modified by compiled scripts. ### Incompatibilities: +* ZAM ignores `assert` statements. + * When printing scripts (such as in some error messages), the names of variables often reflect internal temporaries rather than the original variables. -* The deprecated feature of intermixing vectors and scalars in operations -(e.g., `v2 = v1 * 3`) is not supported. - * The `same_object()` BiF will always deem two non-container values as different. diff --git a/src/script_opt/ZAM/Stmt.cc b/src/script_opt/ZAM/Stmt.cc index d35c279c38..4d8e385c14 100644 --- a/src/script_opt/ZAM/Stmt.cc +++ b/src/script_opt/ZAM/Stmt.cc @@ -1047,14 +1047,7 @@ const ZAMStmt ZAMCompiler::CompileCatchReturn(const CatchReturnStmt* cr) ResolveCatchReturns(GoToTargetBeyond(block_end)); - // If control flow runs off the end of the block, then we need - // to consider sync'ing globals at that point. - auto block_last = LastStmt(block.get()); - - if ( block_last->Tag() == STMT_RETURN ) - return block_end; - - return top_main_inst; + return block_end; } const ZAMStmt ZAMCompiler::CompileStmts(const StmtList* ws) diff --git a/src/script_opt/ZAM/Support.cc b/src/script_opt/ZAM/Support.cc index 83d8828351..f813f86758 100644 --- a/src/script_opt/ZAM/Support.cc +++ b/src/script_opt/ZAM/Support.cc @@ -35,7 +35,8 @@ bool is_ZAM_compilable(const ProfileFunc* pf, const char** reason) } auto b = pf->ProfiledBody(); - if ( b && ! script_is_valid(b) ) + auto is_hook = pf->ProfiledFunc()->Flavor() == FUNC_FLAVOR_HOOK; + if ( b && ! script_is_valid(b, is_hook) ) { if ( reason ) *reason = "invalid script body"; diff --git a/src/script_opt/ZAM/Vars.cc b/src/script_opt/ZAM/Vars.cc index 543b32e32d..4bd1653c80 100644 --- a/src/script_opt/ZAM/Vars.cc +++ b/src/script_opt/ZAM/Vars.cc @@ -66,7 +66,7 @@ const ZAMStmt ZAMCompiler::LoadGlobal(const ID* id) z.aux = new ZInstAux(0); z.aux->id_val = id; - return AddInst(z); + return AddInst(z, true); } int ZAMCompiler::AddToFrame(const ID* id) @@ -81,38 +81,27 @@ int ZAMCompiler::FrameSlot(const ID* id) auto slot = RawSlot(id); if ( id->IsGlobal() ) - (void)LoadGlobal(frame_denizens[slot]); + (void)LoadGlobal(id); return slot; } int ZAMCompiler::Frame1Slot(const ID* id, ZAMOp1Flavor fl) { - auto slot = RawSlot(id); + if ( fl == OP1_READ ) + return FrameSlot(id); - switch ( fl ) - { - case OP1_READ: - if ( id->IsGlobal() ) - (void)LoadGlobal(frame_denizens[slot]); - break; + if ( fl == OP1_INTERNAL ) + return RawSlot(id); - case OP1_WRITE: - if ( id->IsGlobal() ) - pending_global_store = global_id_to_info[id]; - break; + ASSERT(fl == OP1_WRITE || fl == OP1_READ_WRITE); - case OP1_READ_WRITE: - if ( id->IsGlobal() ) - { - (void)LoadGlobal(frame_denizens[slot]); - pending_global_store = global_id_to_info[id]; - } - break; + // Important: get the slot *before* tracking non-locals, so we don't + // prematurely generate a Store for the read/write case. + auto slot = fl == OP1_READ_WRITE ? FrameSlot(id) : RawSlot(id); - case OP1_INTERNAL: - break; - } + if ( id->IsGlobal() ) + pending_global_store = global_id_to_info[id]; return slot; } diff --git a/src/spicy/manager.cc b/src/spicy/manager.cc index 4c36a8c256..37300e75bf 100644 --- a/src/spicy/manager.cc +++ b/src/spicy/manager.cc @@ -201,7 +201,7 @@ void Manager::registerPacketAnalyzer(const std::string& name, const std::string& ::zeek::detail::zeekygen_mgr->Script(info.name_zeekygen); ::zeek::detail::set_location(makeLocation(info.name_zeekygen)); - // TODO: Should Zeek do this? It has run component intialization at + // TODO: Should Zeek do this? It has run component initialization at // this point already, so ours won't get initialized anymore. c->Initialize(); diff --git a/src/spicy/spicyz/CMakeLists.txt b/src/spicy/spicyz/CMakeLists.txt index 003564af21..f49d8a309f 100644 --- a/src/spicy/spicyz/CMakeLists.txt +++ b/src/spicy/spicyz/CMakeLists.txt @@ -5,6 +5,8 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/config.h DESTINATION include/zeek/spic add_executable(spicyz driver.cc glue-compiler.cc main.cc) target_compile_options(spicyz PRIVATE "-Wall") +target_compile_features(spicyz PRIVATE "${ZEEK_CXX_STD}") +set_target_properties(spicyz PROPERTIES CXX_EXTENSIONS OFF) target_include_directories(spicyz PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) target_link_libraries(spicyz PRIVATE hilti spicy) diff --git a/src/spicy/spicyz/main.cc b/src/spicy/spicyz/main.cc index 3945dfe7d8..015e37e2a3 100644 --- a/src/spicy/spicyz/main.cc +++ b/src/spicy/spicyz/main.cc @@ -30,7 +30,7 @@ static struct option long_driver_options[] = {{"abort-on-exceptions", required_a {"output-c++-files", no_argument, nullptr, 'x'}, {"print-module-path", no_argument, nullptr, 'M'}, {"print-plugin-path", no_argument, nullptr, - 'P'}, // for backwards compatiblity + 'P'}, // for backwards compatibility {"print-prefix-path", no_argument, nullptr, 'p'}, {"print-zeek-config", no_argument, nullptr, 'z'}, {"report-times", required_argument, nullptr, 'R'}, diff --git a/src/strings.bif b/src/strings.bif index 1144bae14f..0c280c58ac 100644 --- a/src/strings.bif +++ b/src/strings.bif @@ -409,7 +409,7 @@ function split_string%(str: string, re: pattern%): string_vec ## re: The pattern describing the separator to split *str* in two pieces. ## ## Returns: An array of strings with two elements in which the first represents -## the substring in *str* up to the first occurence of *re*, and the +## the substring in *str* up to the first occurrence of *re*, and the ## second everything after *re*. An array of one string is returned ## when *s* cannot be split. ## @@ -474,7 +474,7 @@ function split_string_n%(str: string, re: pattern, ## ## repl: The string that replaces *re*. ## -## Returns: A copy of *str* with the first occurence of *re* replaced with +## Returns: A copy of *str* with the first occurrence of *re* replaced with ## *repl*. ## ## .. zeek:see:: gsub subst_string diff --git a/src/supervisor/Supervisor.cc b/src/supervisor/Supervisor.cc index 9ee98e0b1b..e913eb343a 100644 --- a/src/supervisor/Supervisor.cc +++ b/src/supervisor/Supervisor.cc @@ -1279,21 +1279,6 @@ Supervisor::NodeConfig Supervisor::NodeConfig::FromRecord(const RecordVal* node) rval.addl_user_scripts.emplace_back(std::move(script)); } - auto scripts_val = node->GetField("scripts")->AsVectorVal(); - - for ( auto i = 0u; i < scripts_val->Size(); ++i ) - { - auto script = scripts_val->StringValAt(i)->ToStdString(); -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - rval.scripts.emplace_back(std::move(script)); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - } - auto env_table_val = node->GetField("env")->AsTableVal(); auto env_table = env_table_val->AsTable(); @@ -1380,18 +1365,6 @@ Supervisor::NodeConfig Supervisor::NodeConfig::FromJSON(std::string_view json) for ( auto it = addl_user_scripts.Begin(); it != addl_user_scripts.End(); ++it ) rval.addl_user_scripts.emplace_back(it->GetString()); - auto& scripts = j["scripts"]; - - for ( auto it = scripts.Begin(); it != scripts.End(); ++it ) -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - rval.scripts.emplace_back(it->GetString()); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - auto& env = j["env"]; for ( auto it = env.MemberBegin(); it != env.MemberEnd(); ++it ) @@ -1473,21 +1446,6 @@ RecordValPtr Supervisor::NodeConfig::ToRecord() const rval->AssignField("addl_user_scripts", std::move(addl_user_scripts_val)); - auto st = rt->GetFieldType("scripts"); - auto scripts_val = make_intrusive(std::move(st)); - -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - for ( const auto& s : scripts ) -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - scripts_val->Assign(scripts_val->Size(), make_intrusive(s)); - - rval->AssignField("scripts", std::move(scripts_val)); - auto et = rt->GetFieldType("env"); auto env_val = make_intrusive(std::move(et)); rval->AssignField("env", env_val); @@ -1695,14 +1653,6 @@ void SupervisedNode::Init(Options* options) const stl.insert(stl.begin(), config.addl_base_scripts.begin(), config.addl_base_scripts.end()); stl.insert(stl.end(), config.addl_user_scripts.begin(), config.addl_user_scripts.end()); -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - stl.insert(stl.end(), config.scripts.begin(), config.scripts.end()); -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif } RecordValPtr Supervisor::Status(std::string_view node_name) diff --git a/src/supervisor/Supervisor.h b/src/supervisor/Supervisor.h index 5147ecbea1..bcab530398 100644 --- a/src/supervisor/Supervisor.h +++ b/src/supervisor/Supervisor.h @@ -159,7 +159,7 @@ public: // "scripts" field. It can go when we remove that deprecation. NodeConfig() = default; #ifndef _MSC_VER - // MSVC throws this error when specifing this constructor: + // MSVC throws this error when specifying this constructor: // error C2580: multiple versions of a defaulted special member functions are not allowed NodeConfig(NodeConfig&) = default; #endif @@ -239,11 +239,6 @@ public: * after any user-specified scripts. */ std::vector addl_user_scripts; - /** - * The former name for addl_user_scripts, now deprecated. - */ - std::vector scripts - [[deprecated("Remove in v6.1. Use NodeConfig::addl_user_scripts.")]]; /** * Environment variables and values to define in the node. */ diff --git a/src/threading/MsgThread.h b/src/threading/MsgThread.h index c92a935b56..1d25d50d66 100644 --- a/src/threading/MsgThread.h +++ b/src/threading/MsgThread.h @@ -232,7 +232,7 @@ protected: * * This is method is called regularly by the threading::Manager. * - * @return The message, wth ownership passed to caller. Returns null + * @return The message, with ownership passed to caller. Returns null * if the queue is empty. */ BasicOutputMessage* RetrieveOut(); @@ -298,7 +298,7 @@ private: * * Must only be called by the child thread. * - * @return The message, wth ownership passed to caller. Returns null + * @return The message, with ownership passed to caller. Returns null * if the queue is empty. */ BasicInputMessage* RetrieveIn(); diff --git a/src/threading/SerialTypes.h b/src/threading/SerialTypes.h index c463e7cecd..1b29f44761 100644 --- a/src/threading/SerialTypes.h +++ b/src/threading/SerialTypes.h @@ -138,7 +138,7 @@ struct Value // A small note for handling subnet values: Subnet values emitted from // the logging framework will always have a length that is based on the - // internal IPv6 representation (so you have to substract 96 from it to + // internal IPv6 representation (so you have to subtract 96 from it to // get the correct value for IPv4). // However, the Input framework expects the "normal" length for an IPv4 // address (so do not add 96 to it), because the underlying constructors diff --git a/src/threading/formatters/JSON.cc b/src/threading/formatters/JSON.cc index 9c5825bdfd..0a89db329d 100644 --- a/src/threading/formatters/JSON.cc +++ b/src/threading/formatters/JSON.cc @@ -16,22 +16,25 @@ #include "zeek/Desc.h" #include "zeek/threading/MsgThread.h" +#include "zeek/threading/formatters/detail/json.h" namespace zeek::threading::formatter { +// For deprecated NullDoubleWriter +JSON::NullDoubleWriter::NullDoubleWriter(rapidjson::StringBuffer& stream) + : writer(std::make_unique(stream)) + { + } + bool JSON::NullDoubleWriter::Double(double d) { - if ( rapidjson::internal::Double(d).IsNanOrInf() ) - return rapidjson::Writer::Null(); - - return rapidjson::Writer::Double(d); + return writer->Double(d); } JSON::JSON(MsgThread* t, TimeFormat tf, bool arg_include_unset_fields) - : Formatter(t), surrounding_braces(true), include_unset_fields(arg_include_unset_fields) + : Formatter(t), timestamps(tf), include_unset_fields(arg_include_unset_fields) { - timestamps = tf; } JSON::~JSON() { } @@ -39,7 +42,7 @@ JSON::~JSON() { } bool JSON::Describe(ODesc* desc, int num_fields, const Field* const* fields, Value** vals) const { rapidjson::StringBuffer buffer; - NullDoubleWriter writer(buffer); + zeek::json::detail::NullDoubleWriter writer(buffer); writer.StartObject(); @@ -68,7 +71,7 @@ bool JSON::Describe(ODesc* desc, Value* val, const std::string& name) const rapidjson::Document doc; rapidjson::StringBuffer buffer; - NullDoubleWriter writer(buffer); + zeek::json::detail::NullDoubleWriter writer(buffer); writer.StartObject(); BuildJSON(writer, val, name); @@ -85,7 +88,8 @@ Value* JSON::ParseValue(const std::string& s, const std::string& name, TypeTag t return nullptr; } -void JSON::BuildJSON(NullDoubleWriter& writer, Value* val, const std::string& name) const +void JSON::BuildJSON(zeek::json::detail::NullDoubleWriter& writer, Value* val, + const std::string& name) const { if ( ! name.empty() ) writer.Key(name); diff --git a/src/threading/formatters/JSON.h b/src/threading/formatters/JSON.h index c946ab7bf2..74bc89c8ae 100644 --- a/src/threading/formatters/JSON.h +++ b/src/threading/formatters/JSON.h @@ -2,12 +2,21 @@ #pragma once +#include + #define RAPIDJSON_HAS_STDSTRING 1 +// Remove in v7.1 when removing NullDoubleWriter below and also remove +// rapidjson include tweaks from CMake's dynamic_plugin_base target. #include #include #include "zeek/threading/Formatter.h" +namespace zeek::json::detail + { +class NullDoubleWriter; + } + namespace zeek::threading::formatter { @@ -38,18 +47,19 @@ public: class NullDoubleWriter : public rapidjson::Writer { public: - NullDoubleWriter(rapidjson::StringBuffer& stream) - : rapidjson::Writer(stream) - { - } + [[deprecated("Remove in v7.1 - This is an implementation detail.")]] NullDoubleWriter( + rapidjson::StringBuffer& stream); bool Double(double d); + + private: + std::unique_ptr writer; }; private: - void BuildJSON(NullDoubleWriter& writer, Value* val, const std::string& name = "") const; + void BuildJSON(zeek::json::detail::NullDoubleWriter& writer, Value* val, + const std::string& name = "") const; TimeFormat timestamps; - bool surrounding_braces; bool include_unset_fields; }; diff --git a/src/threading/formatters/detail/json.h b/src/threading/formatters/detail/json.h new file mode 100644 index 0000000000..7ae9f05c70 --- /dev/null +++ b/src/threading/formatters/detail/json.h @@ -0,0 +1,27 @@ +// Not installed - used by Val.cc and formatters/JSON.cc only. +#pragma once + +#include +#include +#include + +namespace zeek::json::detail + { +// A rapidjson Writer that writes null for inf or nan numbers. +class NullDoubleWriter : public rapidjson::Writer + { +public: + explicit NullDoubleWriter(rapidjson::StringBuffer& stream) + : rapidjson::Writer(stream) + { + } + bool Double(double d) + { + if ( rapidjson::internal::Double(d).IsNanOrInf() ) + return rapidjson::Writer::Null(); + + return rapidjson::Writer::Double(d); + } + }; + + } diff --git a/src/util.h b/src/util.h index d08a819488..9376f008d4 100644 --- a/src/util.h +++ b/src/util.h @@ -102,8 +102,6 @@ inline constexpr std::string_view path_list_separator = ":"; using zeek_int_t = int64_t; using zeek_uint_t = uint64_t; -using bro_int_t [[deprecated("Remove in v6.1. Use zeek_int_t.")]] = zeek_int_t; -using bro_uint_t [[deprecated("Remove in v6.1. Use zeek_uint_t.")]] = zeek_uint_t; #ifndef HAVE_STRCASESTR extern char* strcasestr(const char* s, const char* find); @@ -363,7 +361,7 @@ extern const char* strpbrk_n(size_t len, const char* s, const char* charset); int strstr_n(const int big_len, const unsigned char* big, const int little_len, const unsigned char* little); -// Replaces all occurences of *o* in *s* with *n*. +// Replaces all occurrences of *o* in *s* with *n*. extern std::string strreplace(const std::string& s, const std::string& o, const std::string& n); // Remove all leading and trailing white space from string. diff --git a/src/zeek.bif b/src/zeek.bif index c97e6e7d9c..f1ddc87274 100644 --- a/src/zeek.bif +++ b/src/zeek.bif @@ -2368,6 +2368,10 @@ function is_v6_subnet%(s: subnet%): bool return zeek::val_mgr->False(); %} +%%{ +#include "zeek/Func.h" +%%} + ## Returns a representation of the call stack as a vector of call stack ## elements, each containing call location information. ## @@ -2375,49 +2379,7 @@ function is_v6_subnet%(s: subnet%): bool ## location information. function backtrace%(%): Backtrace %{ - using zeek::detail::call_stack; - static auto backtrace_type = id::find_type("Backtrace"); - static auto elem_type = id::find_type("BacktraceElement"); - static auto function_name_idx = elem_type->FieldOffset("function_name"); - static auto function_args_idx = elem_type->FieldOffset("function_args"); - static auto file_location_idx = elem_type->FieldOffset("file_location"); - static auto line_location_idx = elem_type->FieldOffset("line_location"); - - auto rval = make_intrusive(backtrace_type); - - // The body of the following loop can wind up adding items to - // the call stack (because MakeCallArgumentVector() evaluates - // default arguments, which can in turn involve calls to script - // functions), so we work from a copy of the current call stack - // to prevent problems with iterator invalidation. - auto cs_copy = call_stack; - - for ( auto it = cs_copy.rbegin(); it != cs_copy.rend(); ++it ) - { - const auto& ci = *it; - if ( ! ci.func ) - // This happens for compiled code. - continue; - - auto elem = make_intrusive(elem_type); - - const auto& params = ci.func->GetType()->Params(); - auto args = MakeCallArgumentVector(ci.args, params); - - elem->Assign(function_name_idx, ci.func->Name()); - elem->Assign(function_args_idx, std::move(args)); - - if ( ci.call ) - { - auto loc = ci.call->GetLocationInfo(); - elem->Assign(file_location_idx, loc->filename); - elem->Assign(line_location_idx, loc->first_line); - } - - rval->Assign(rval->Size(), std::move(elem)); - } - - return rval; + return zeek::detail::get_current_script_backtrace(); %} # =========================================================================== @@ -4308,7 +4270,7 @@ static bool mmdb_try_open_asn () ## ## Returns: A boolean indicating whether the db was successfully opened. ## -## .. zeek:see:: lookup_asn lookup_autonomous_system +## .. zeek:see:: lookup_autonomous_system function mmdb_open_location_db%(f: string%) : bool %{ #ifdef USE_GEOIP @@ -4318,14 +4280,14 @@ function mmdb_open_location_db%(f: string%) : bool #endif %} -## Initializes MMDB for later use of lookup_asn or lookup_autonomous_system. +## Initializes MMDB for later use of lookup_autonomous_system. ## Requires Zeek to be built with ``libmaxminddb``. ## ## f: The filename of the MaxMind ASN DB. ## ## Returns: A boolean indicating whether the db was successfully opened. ## -## .. zeek:see:: lookup_asn lookup_autonomous_system +## .. zeek:see:: lookup_autonomous_system function mmdb_open_asn_db%(f: string%) : bool %{ #ifdef USE_GEOIP @@ -4342,7 +4304,7 @@ function mmdb_open_asn_db%(f: string%) : bool ## ## Returns: A record with country, region, city, latitude, and longitude. ## -## .. zeek:see:: lookup_asn lookup_autonomous_system +## .. zeek:see:: lookup_autonomous_system function lookup_location%(a: addr%) : geo_location %{ static auto geo_location = zeek::id::find_type("geo_location"); @@ -4421,62 +4383,6 @@ function lookup_location%(a: addr%) : geo_location return location; %} -## Performs an ASN lookup of an IP address. -## Requires Zeek to be built with ``libmaxminddb``. -## -## a: The IP address to lookup. -## -## Returns: The number of the ASN that contains the IP address. -## -## .. zeek:see:: lookup_location lookup_autonomous_system -function lookup_asn%(a: addr%) : count &deprecated="Remove in v6.1. Functionality is now handled by lookup_autonomous_system()." - %{ -#ifdef USE_GEOIP - mmdb_check_asn(); - if ( ! mmdb_asn ) - { - if ( ! mmdb_try_open_asn() ) - { - if ( ! did_mmdb_asn_db_error ) - { - did_mmdb_asn_db_error = true; - zeek::emit_builtin_error("Failed to open GeoIP ASN database"); - } - - return zeek::val_mgr->Count(0); - } - } - - MMDB_lookup_result_s result; - - if ( mmdb_lookup_asn(a->AsAddr(), result) ) - { - MMDB_entry_data_s entry_data; - int status; - - // Get Autonomous System Number - status = MMDB_get_value(&result.entry, &entry_data, - "autonomous_system_number", nullptr); - auto asn = mmdb_getvalue(&entry_data, status, MMDB_DATA_TYPE_UINT32); - return asn == nullptr ? zeek::val_mgr->Count(0) : asn; - } - -#else // not USE_GEOIP - static int missing_geoip_reported = 0; - - if ( ! missing_geoip_reported ) - { - zeek::emit_builtin_error("Zeek was not configured for GeoIP ASN support"); - missing_geoip_reported = 1; - } -#endif - - // We can get here even if we have GeoIP support, if we weren't - // able to initialize it or it didn't return any information for - // the address. - return zeek::val_mgr->Count(0); - %} - ## Performs an lookup of AS number & organization of an IP address. ## Requires Zeek to be built with ``libmaxminddb``. ## @@ -4484,7 +4390,7 @@ function lookup_asn%(a: addr%) : count &deprecated="Remove in v6.1. Functionali ## ## Returns: A record with autonomous system number and organization that contains *a*. ## -## .. zeek:see:: lookup_location lookup_asn +## .. zeek:see:: lookup_location function lookup_autonomous_system%(a: addr%) : geo_autonomous_system %{ static auto geo_autonomous_system = zeek::id::find_type("geo_autonomous_system"); @@ -4720,9 +4626,7 @@ function disable_analyzer%(cid: conn_id, aid: count, err_if_no_conn: bool &defau return zeek::val_mgr->False(); } - // Remove in v6.1: Global disabling_analyzer is to be removed. - static auto disabling_analyzer_hook = id::find_func("disabling_analyzer"); - static auto analyzer_disabling_analyzer_hook = id::find_func("Analyzer::disabling_analyzer"); + static auto disabling_analyzer_hook = id::find_func("Analyzer::disabling_analyzer"); if ( disabling_analyzer_hook ) { @@ -4732,14 +4636,6 @@ function disable_analyzer%(cid: conn_id, aid: count, err_if_no_conn: bool &defau return zeek::val_mgr->False(); } - if ( analyzer_disabling_analyzer_hook ) - { - auto hook_rval = analyzer_disabling_analyzer_hook->Invoke(c->GetVal(), a->GetAnalyzerTag().AsVal(), - zeek::val_mgr->Count(aid)); - if ( hook_rval && ! hook_rval->AsBool() ) - return zeek::val_mgr->False(); - } - if ( prevent ) a->Parent()->PreventChildren(a->GetAnalyzerTag()); diff --git a/testing/btest/Baseline.cpp/scripts.base.frameworks.cluster.publish-hrw-type-check/.stderr b/testing/btest/Baseline.cpp/scripts.base.frameworks.cluster.publish-hrw-type-check/.stderr new file mode 100644 index 0000000000..928f4eecec --- /dev/null +++ b/testing/btest/Baseline.cpp/scripts.base.frameworks.cluster.publish-hrw-type-check/.stderr @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/publish-hrw-type-check.zeek (C++), line 13: expected type Cluster::Pool for pool (<___>testing_btest__tmp_scripts_base_frameworks_cluster_publish_hrw_type_check_publish_hrw_type_check_zeek__zeek_init__34__zf()) +error in <...>/publish-hrw-type-check.zeek (C++), line 13: expected type Cluster::Pool for pool (<___>testing_btest__tmp_scripts_base_frameworks_cluster_publish_hrw_type_check_publish_hrw_type_check_zeek__zeek_init__34__zf()) +error in <...>/publish-hrw-type-check.zeek (C++), line 13: expected type Cluster::Pool for pool (<___>testing_btest__tmp_scripts_base_frameworks_cluster_publish_hrw_type_check_publish_hrw_type_check_zeek__zeek_init__34__zf()) +error in <...>/publish-hrw-type-check.zeek (C++), line 13: expected type string for key, got port (<___>testing_btest__tmp_scripts_base_frameworks_cluster_publish_hrw_type_check_publish_hrw_type_check_zeek__zeek_init__34__zf()) diff --git a/testing/btest/Baseline.zam/bifs.disable_analyzer-early/out b/testing/btest/Baseline.zam/bifs.disable_analyzer-early/out new file mode 100644 index 0000000000..b72c958aef --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.disable_analyzer-early/out @@ -0,0 +1,7 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +proto confirm, AllAnalyzers::ANALYZER_ANALYZER_HTTP +T +http_request, GET, /style/enhanced.css +total http messages, { +[[orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp]] = 1 +} diff --git a/testing/btest/Baseline.zam/bifs.disable_analyzer-hook/out b/testing/btest/Baseline.zam/bifs.disable_analyzer-hook/out index bbce22f58e..c72bc161e8 100644 --- a/testing/btest/Baseline.zam/bifs.disable_analyzer-hook/out +++ b/testing/btest/Baseline.zam/bifs.disable_analyzer-hook/out @@ -1,15 +1,15 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. proto confirm, AllAnalyzers::ANALYZER_ANALYZER_HTTP http_request, GET, /style/enhanced.css -preventing disable_analyzer, [orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp], Analyzer::ANALYZER_HTTP, 3, 1 +preventing disable_analyzer, [orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp], AllAnalyzers::ANALYZER_ANALYZER_HTTP, 3, 1 F http_reply, 200 http_request, GET, /script/urchin.js -preventing disable_analyzer, [orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp], Analyzer::ANALYZER_HTTP, 3, 3 +preventing disable_analyzer, [orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp], AllAnalyzers::ANALYZER_ANALYZER_HTTP, 3, 3 F http_reply, 200 http_request, GET, /images/template/screen/bullet_utility.png -allowing disable_analyzer, [orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp], Analyzer::ANALYZER_HTTP, 3, 5 +allowing disable_analyzer, [orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp], AllAnalyzers::ANALYZER_ANALYZER_HTTP, 3, 5 T total http messages, { [[orig_h=192.168.1.104, orig_p=1673/tcp, resp_h=63.245.209.11, resp_p=80/tcp]] = 5 diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/out.deprecated b/testing/btest/Baseline.zam/bifs.from_json-10/.stderr similarity index 100% rename from testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/out.deprecated rename to testing/btest/Baseline.zam/bifs.from_json-10/.stderr diff --git a/testing/btest/Baseline.zam/bifs.from_json-10/.stdout b/testing/btest/Baseline.zam/bifs.from_json-10/.stdout new file mode 100644 index 0000000000..c7202a240c --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-10/.stdout @@ -0,0 +1,6 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v={ +fe80::/64, +192.168.0.0/16 +}, valid=T] +[v=[1, 3, 4], valid=T] diff --git a/testing/btest/Baseline.zam/bifs.from_json-11/.stderr b/testing/btest/Baseline.zam/bifs.from_json-11/.stderr new file mode 100644 index 0000000000..83cccb763e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-11/.stderr @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 8: required field Foo$hello is missing in JSON (from_json({"t":null}, ::#0)) +error in <...>/from_json.zeek, line 9: required field Foo$hello is null in JSON (from_json({"hello": null, "t": true}, ::#2)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-11/.stdout b/testing/btest/Baseline.zam/bifs.from_json-11/.stdout new file mode 100644 index 0000000000..d288024480 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-11/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] +[v=, valid=F] diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/out.new b/testing/btest/Baseline.zam/bifs.from_json-12/.stderr similarity index 100% rename from testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/out.new rename to testing/btest/Baseline.zam/bifs.from_json-12/.stderr diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-shadow-files/logs.cat b/testing/btest/Baseline.zam/bifs.from_json-12/.stdout similarity index 76% rename from testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-shadow-files/logs.cat rename to testing/btest/Baseline.zam/bifs.from_json-12/.stdout index 05dd1c9670..7673a47ea5 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-shadow-files/logs.cat +++ b/testing/btest/Baseline.zam/bifs.from_json-12/.stdout @@ -1,3 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -leftover conn log -leftover dns log +[v=[hello=Hello!], valid=T] diff --git a/testing/btest/Baseline.zam/bifs.from_json-2/.stderr b/testing/btest/Baseline.zam/bifs.from_json-2/.stderr new file mode 100644 index 0000000000..5fe8977244 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-2/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 4: from_json() requires a type argument (from_json([], 10)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-2/.stdout b/testing/btest/Baseline.zam/bifs.from_json-2/.stdout new file mode 100644 index 0000000000..aee95c8a8e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-2/.stdout @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-3/.stderr b/testing/btest/Baseline.zam/bifs.from_json-3/.stderr new file mode 100644 index 0000000000..e8e76fd280 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-3/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 4: JSON parse error: Missing a closing quotation mark in string. Offset: 5 (from_json({"hel, ::#0)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-3/.stdout b/testing/btest/Baseline.zam/bifs.from_json-3/.stdout new file mode 100644 index 0000000000..aee95c8a8e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-3/.stdout @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-4/.stderr b/testing/btest/Baseline.zam/bifs.from_json-4/.stderr new file mode 100644 index 0000000000..ed567bc817 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-4/.stderr @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 9: cannot convert JSON type 'array' to Zeek type 'bool' (from_json([], ::#0)) +error in <...>/from_json.zeek, line 10: cannot convert JSON type 'string' to Zeek type 'bool' (from_json({"a": "hello"}, ::#2)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-4/.stdout b/testing/btest/Baseline.zam/bifs.from_json-4/.stdout new file mode 100644 index 0000000000..d288024480 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-4/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-5/.stderr b/testing/btest/Baseline.zam/bifs.from_json-5/.stderr new file mode 100644 index 0000000000..a8d80a29c7 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-5/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 4: tables are not supported (from_json([], ::#0)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-5/.stdout b/testing/btest/Baseline.zam/bifs.from_json-5/.stdout new file mode 100644 index 0000000000..aee95c8a8e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-5/.stdout @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-6/.stderr b/testing/btest/Baseline.zam/bifs.from_json-6/.stderr new file mode 100644 index 0000000000..2ac321f4e0 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-6/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 5: wrong port format, must be <...>/(tcp|udp|icmp|unknown)/ (from_json("80", ::#0)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-6/.stdout b/testing/btest/Baseline.zam/bifs.from_json-6/.stdout new file mode 100644 index 0000000000..aee95c8a8e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-6/.stdout @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-7/.stderr b/testing/btest/Baseline.zam/bifs.from_json-7/.stderr new file mode 100644 index 0000000000..fd5ec83642 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-7/.stderr @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 5: index type doesn't match (from_json([[1, false], [2]], ::#0)) +error in <...>/from_json.zeek, line 6: cannot convert JSON type 'number' to Zeek type 'bool' (from_json([[1, false], [2, 1]], ::#2)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-7/.stdout b/testing/btest/Baseline.zam/bifs.from_json-7/.stdout new file mode 100644 index 0000000000..d288024480 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-7/.stdout @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-8/.stderr b/testing/btest/Baseline.zam/bifs.from_json-8/.stderr new file mode 100644 index 0000000000..ba565788a5 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-8/.stderr @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error: error compiling pattern /^?(.|\n)*(([[:print:]]{-}[[:alnum:]]foo))/ +error in <...>/from_json.zeek, line 5: error compiling pattern (from_json("/([[:print:]]{-}[[:alnum:]]foo)/", ::#0)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-8/.stdout b/testing/btest/Baseline.zam/bifs.from_json-8/.stdout new file mode 100644 index 0000000000..aee95c8a8e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-8/.stdout @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] diff --git a/testing/btest/Baseline.zam/bifs.from_json-9/.stderr b/testing/btest/Baseline.zam/bifs.from_json-9/.stderr new file mode 100644 index 0000000000..14894c2146 --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-9/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/from_json.zeek, line 7: 'Yellow' is not a valid enum for 'Color'. (from_json("Yellow", ::#0)) diff --git a/testing/btest/Baseline.zam/bifs.from_json-9/.stdout b/testing/btest/Baseline.zam/bifs.from_json-9/.stdout new file mode 100644 index 0000000000..aee95c8a8e --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json-9/.stdout @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=, valid=F] diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/out.deprecated b/testing/btest/Baseline.zam/bifs.from_json/.stderr similarity index 100% rename from testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/out.deprecated rename to testing/btest/Baseline.zam/bifs.from_json/.stderr diff --git a/testing/btest/Baseline.zam/bifs.from_json/.stdout b/testing/btest/Baseline.zam/bifs.from_json/.stdout new file mode 100644 index 0000000000..24f35f7b9b --- /dev/null +++ b/testing/btest/Baseline.zam/bifs.from_json/.stdout @@ -0,0 +1,8 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +[v=[hello=world, t=T, f=F, n=, m=, def=123, i=123, pi=3.1416, a=[1, 2, 3, 4], c1=A::Blue, p=1500/tcp, ti=1681652265.042767, it=1.0 hr 23.0 mins 20.0 secs, ad=127.0.0.1, s=::1/128, re=/^?(a)$?/, su={ +aa:bb::/32, +192.168.0.0/16 +}, se={ +[192.168.0.1, 80/tcp] , +[2001:db8::1, 8080/udp] +}], valid=T] diff --git a/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info-ftp/.stdout b/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info-ftp/.stdout index 315fb7c87f..1e4f9aa98a 100644 --- a/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info-ftp/.stdout +++ b/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info-ftp/.stdout @@ -1,5 +1,3 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. analyzer_confirmation_info, AllAnalyzers::ANALYZER_ANALYZER_FTP, [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3 -analyzer_confirmation, AllAnalyzers::ANALYZER_ANALYZER_FTP, [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3 analyzer_violation_info, AllAnalyzers::ANALYZER_ANALYZER_FTP, non-numeric reply code, [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3, SSH-2.0-mod_sftp/0.9.7 -analyzer_violation, AllAnalyzers::ANALYZER_ANALYZER_FTP, non-numeric reply code [SSH-2.0-mod_sftp/0.9.7], [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3 diff --git a/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info/.stdout b/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info/.stdout index 286fc6c502..af8800750d 100644 --- a/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info/.stdout +++ b/testing/btest/Baseline.zam/core.analyzer-confirmation-violation-info/.stdout @@ -1,5 +1,3 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. analyzer_confirmation_info, AllAnalyzers::ANALYZER_ANALYZER_SSL, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 -analyzer_confirmation, AllAnalyzers::ANALYZER_ANALYZER_SSL, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 analyzer_violation_info, AllAnalyzers::ANALYZER_ANALYZER_SSL, Invalid version late in TLS connection. Packet reported version: 0, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 -analyzer_violation, AllAnalyzers::ANALYZER_ANALYZER_SSL, Invalid version late in TLS connection. Packet reported version: 0, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 diff --git a/testing/btest/Baseline.zam/language.vector-neg-index/out b/testing/btest/Baseline.zam/language.vector-neg-index/out new file mode 100644 index 0000000000..7059727219 --- /dev/null +++ b/testing/btest/Baseline.zam/language.vector-neg-index/out @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +runtime error in <...>/vector-neg-index.zeek, line 8: no such index diff --git a/testing/btest/Baseline.zam/scripts.base.frameworks.input.raw.rereadraw/out b/testing/btest/Baseline.zam/scripts.base.frameworks.input.raw.rereadraw/out deleted file mode 100644 index 2700d05e77..0000000000 --- a/testing/btest/Baseline.zam/scripts.base.frameworks.input.raw.rereadraw/out +++ /dev/null @@ -1,97 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -sdfkh:KH;fdkncv;ISEUp34:Fkdj;YVpIODhfDF -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -DSF"DFKJ"SDFKLh304yrsdkfj@#(*U$34jfDJup3UF -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -q3r3057fdf -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -sdfs\d -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW - -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -dfsdf -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -sdf -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -3rw43wRRERLlL#RWERERERE. -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -sdfkh:KH;fdkncv;ISEUp34:Fkdj;YVpIODhfDF -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -DSF"DFKJ"SDFKLh304yrsdkfj@#(*U$34jfDJup3UF -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -q3r3057fdf -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -sdfs\d -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW - -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -dfsdf -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -sdf -[source=../input.log, reader=Input::READER_RAW, mode=Input::REREAD, name=input, fields=A::Val, want_record=F, ev=line -ZAM-code line , error_ev=, config={ - -}] -Input::EVENT_NEW -3rw43wRRERLlL#RWERERERE. diff --git a/testing/btest/Baseline.zam/scripts.base.protocols.ssl.prevent-disable-analyzer/.stdout b/testing/btest/Baseline.zam/scripts.base.protocols.ssl.prevent-disable-analyzer/.stdout index d4fcef9236..229c675729 100644 --- a/testing/btest/Baseline.zam/scripts.base.protocols.ssl.prevent-disable-analyzer/.stdout +++ b/testing/btest/Baseline.zam/scripts.base.protocols.ssl.prevent-disable-analyzer/.stdout @@ -2,10 +2,10 @@ analyzer_confirmation, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], AllAnalyzers::ANALYZER_ANALYZER_SSL, 3 encrypted_data, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], T, 22, 32, 1 established, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp] -disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], Analyzer::ANALYZER_SSL, 3 -preventing disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], Analyzer::ANALYZER_SSL, 3 +disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], AllAnalyzers::ANALYZER_ANALYZER_SSL, 3 +preventing disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], AllAnalyzers::ANALYZER_ANALYZER_SSL, 3 encrypted_data, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], F, 22, 32, 2 encrypted_data, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], T, 23, 31, 3 encrypted_data, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], T, 23, 17, 4 -disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], Analyzer::ANALYZER_SSL, 3 -allowing disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], Analyzer::ANALYZER_SSL, 3 +disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], AllAnalyzers::ANALYZER_ANALYZER_SSL, 3 +allowing disabling_analyzer, [orig_h=10.0.0.80, orig_p=56637/tcp, resp_h=68.233.76.12, resp_p=443/tcp], AllAnalyzers::ANALYZER_ANALYZER_SSL, 3 diff --git a/testing/btest/Baseline.zam/spicy.analyzer-tag/output b/testing/btest/Baseline.zam/spicy.analyzer-tag/output new file mode 100644 index 0000000000..d999b52c17 --- /dev/null +++ b/testing/btest/Baseline.zam/spicy.analyzer-tag/output @@ -0,0 +1,6 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +Have analyzer! +tag: AllAnalyzers::ANALYZER_ANALYZER_SPICY_SSH +name: SPICY_SSH + +Do not have analyzer! diff --git a/testing/btest/Baseline.zam/spicy.replaces/output b/testing/btest/Baseline.zam/spicy.replaces/output new file mode 100644 index 0000000000..0a68502166 --- /dev/null +++ b/testing/btest/Baseline.zam/spicy.replaces/output @@ -0,0 +1,4 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +AllAnalyzers::ANALYZER_ANALYZER_SSH, 3 +SSH banner, [orig_h=192.150.186.169, orig_p=49244/tcp, resp_h=131.159.14.23, resp_p=22/tcp], F, 1.99, OpenSSH_3.9p1 +SSH banner, [orig_h=192.150.186.169, orig_p=49244/tcp, resp_h=131.159.14.23, resp_p=22/tcp], T, 2.0, OpenSSH_3.8.1p1 diff --git a/testing/btest/Baseline.zam/spicy.ssh-banner/analyzer.log b/testing/btest/Baseline.zam/spicy.ssh-banner/analyzer.log new file mode 100644 index 0000000000..b60c24d0f9 --- /dev/null +++ b/testing/btest/Baseline.zam/spicy.ssh-banner/analyzer.log @@ -0,0 +1,12 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path analyzer +#open XXXX-XX-XX-XX-XX-XX +#fields ts cause analyzer_kind analyzer_name uid fuid id.orig_h id.orig_p id.resp_h id.resp_p failure_reason failure_data +#types time string string string string string addr port addr port string string +XXXXXXXXXX.XXXXXX violation protocol SPICY_SSH CHhAvVGS1DHFjwGM9 - 141.142.228.5 53595 54.243.55.129 80 protocol rejected - +XXXXXXXXXX.XXXXXX violation protocol SPICY_SSH CHhAvVGS1DHFjwGM9 - 141.142.228.5 53595 54.243.55.129 80 failed to match regular expression (<...>/ssh.spicy:7:15) POST /post HTTP/1.1\x0d\x0aUser-Agent: curl/7. +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline.zam/spicy.ssh-banner/output b/testing/btest/Baseline.zam/spicy.ssh-banner/output new file mode 100644 index 0000000000..b9c4a75921 --- /dev/null +++ b/testing/btest/Baseline.zam/spicy.ssh-banner/output @@ -0,0 +1,10 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +=== confirmation +SSH banner in Foo, [orig_h=192.150.186.169, orig_p=49244/tcp, resp_h=131.159.14.23, resp_p=22/tcp], F, 1.99, OpenSSH_3.9p1 +SSH banner in Foo, [orig_h=192.150.186.169, orig_p=49244/tcp, resp_h=131.159.14.23, resp_p=22/tcp], T, 2.0, OpenSSH_3.8.1p1 +SSH banner, [orig_h=192.150.186.169, orig_p=49244/tcp, resp_h=131.159.14.23, resp_p=22/tcp], F, 1.99, OpenSSH_3.9p1 +SSH banner, [orig_h=192.150.186.169, orig_p=49244/tcp, resp_h=131.159.14.23, resp_p=22/tcp], T, 2.0, OpenSSH_3.8.1p1 +confirm, AllAnalyzers::ANALYZER_ANALYZER_SPICY_SSH +=== violation +violation, AllAnalyzers::ANALYZER_ANALYZER_SPICY_SSH, failed to match regular expression (<...>/ssh.spicy:7:15) +violation, AllAnalyzers::ANALYZER_ANALYZER_SPICY_SSH, protocol rejected diff --git a/testing/btest/Baseline.zam/spicy.ssh-banner/weird.log b/testing/btest/Baseline.zam/spicy.ssh-banner/weird.log new file mode 100644 index 0000000000..7dcdd71aef --- /dev/null +++ b/testing/btest/Baseline.zam/spicy.ssh-banner/weird.log @@ -0,0 +1,11 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path weird +#open XXXX-XX-XX-XX-XX-XX +#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer source +#types time string addr port addr port string string bool string string +XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.150.186.169 49244 131.159.14.23 22 my_weird OpenSSH_3.9p1 F zeek SPICY_SSH +#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/core.analyzer-confirmation-violation-info-ftp/.stdout b/testing/btest/Baseline/core.analyzer-confirmation-violation-info-ftp/.stdout index d6bd041b4c..6656beff58 100644 --- a/testing/btest/Baseline/core.analyzer-confirmation-violation-info-ftp/.stdout +++ b/testing/btest/Baseline/core.analyzer-confirmation-violation-info-ftp/.stdout @@ -1,5 +1,3 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. analyzer_confirmation_info, Analyzer::ANALYZER_FTP, [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3 -analyzer_confirmation, Analyzer::ANALYZER_FTP, [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3 analyzer_violation_info, Analyzer::ANALYZER_FTP, non-numeric reply code, [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3, SSH-2.0-mod_sftp/0.9.7 -analyzer_violation, Analyzer::ANALYZER_FTP, non-numeric reply code [SSH-2.0-mod_sftp/0.9.7], [orig_h=2001:470:1f05:17a6:d69a:20ff:fefd:6b88, orig_p=24316/tcp, resp_h=2001:6a8:a40::21, resp_p=21/tcp], 3 diff --git a/testing/btest/Baseline/core.analyzer-confirmation-violation-info/.stdout b/testing/btest/Baseline/core.analyzer-confirmation-violation-info/.stdout index 658a058478..074e41b435 100644 --- a/testing/btest/Baseline/core.analyzer-confirmation-violation-info/.stdout +++ b/testing/btest/Baseline/core.analyzer-confirmation-violation-info/.stdout @@ -1,5 +1,3 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. analyzer_confirmation_info, Analyzer::ANALYZER_SSL, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 -analyzer_confirmation, Analyzer::ANALYZER_SSL, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 analyzer_violation_info, Analyzer::ANALYZER_SSL, Invalid version late in TLS connection. Packet reported version: 0, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 -analyzer_violation, Analyzer::ANALYZER_SSL, Invalid version late in TLS connection. Packet reported version: 0, [orig_h=1.1.1.1, orig_p=20394/tcp, resp_h=2.2.2.2, resp_p=443/tcp], 3 diff --git a/testing/btest/Baseline/core.check-unused-event-handlers/.stderr b/testing/btest/Baseline/core.check-unused-event-handlers/.stderr index 3d69e621ce..d3db972878 100644 --- a/testing/btest/Baseline/core.check-unused-event-handlers/.stderr +++ b/testing/btest/Baseline/core.check-unused-event-handlers/.stderr @@ -1,9 +1,6 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. ### NOTE: This file has been sorted with diff-sort. warning in <...>/check-unused-event-handlers.test, line 7: handler for non-existing event cannot be invoked (this_is_never_used) -warning in , line 1: event handler never invoked: Cluster::hello -warning in , line 1: event handler never invoked: Cluster::node_down -warning in , line 1: event handler never invoked: Cluster::node_up warning in , line 1: event handler never invoked: Control::configuration_update warning in , line 1: event handler never invoked: Control::configuration_update_request warning in , line 1: event handler never invoked: Control::configuration_update_response diff --git a/testing/btest/Baseline/core.tunnels.analyzer-confirmation/out b/testing/btest/Baseline/core.tunnels.analyzer-confirmation/out index 6f82ec81b8..b580a37dff 100644 --- a/testing/btest/Baseline/core.tunnels.analyzer-confirmation/out +++ b/testing/btest/Baseline/core.tunnels.analyzer-confirmation/out @@ -1,3 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -analyzer_confirmation, CHhAvVGS1DHFjwGM9, [orig_h=10.1.200.131, orig_p=50000/udp, resp_h=10.1.1.172, resp_p=4789/udp], 0 analyzer_confirmation, ClEkJM2Vm5giqnMf4h, [orig_h=172.16.11.201, orig_p=40354/tcp, resp_h=54.86.237.188, resp_p=80/tcp], 6 diff --git a/testing/btest/Baseline/coverage.bare-mode-errors/errors b/testing/btest/Baseline/coverage.bare-mode-errors/errors index 56081d837b..b1bb951e92 100644 --- a/testing/btest/Baseline/coverage.bare-mode-errors/errors +++ b/testing/btest/Baseline/coverage.bare-mode-errors/errors @@ -1,3 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. ### NOTE: This file has been sorted with diff-sort. -warning in <...>/__load__.zeek, line 1: deprecated script loaded from command line arguments "Remove in v6.1 - now loaded automatically"; diff --git a/testing/btest/Baseline/coverage.init-default/missing_loads b/testing/btest/Baseline/coverage.init-default/missing_loads index 33a5c60cfb..fe23c7a04a 100644 --- a/testing/btest/Baseline/coverage.init-default/missing_loads +++ b/testing/btest/Baseline/coverage.init-default/missing_loads @@ -5,11 +5,9 @@ -./frameworks/cluster/nodes/proxy.zeek -./frameworks/cluster/nodes/worker.zeek -./frameworks/cluster/setup-connections.zeek --./frameworks/dpd/__load__.zeek -./frameworks/intel/cluster.zeek -./frameworks/netcontrol/cluster.zeek -./frameworks/openflow/cluster.zeek -./frameworks/packet-filter/cluster.zeek -./frameworks/sumstats/cluster.zeek --./frameworks/telemetry/cluster.zeek -./init-supervisor.zeek diff --git a/testing/btest/Baseline/language.assert-2/out b/testing/btest/Baseline/language.assert-2/out new file mode 100644 index 0000000000..40ed4ca014 --- /dev/null +++ b/testing/btest/Baseline/language.assert-2/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 3: assertion failure: fmt("%s", 1) == "2" ("1" != "2") +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.assert-3/out b/testing/btest/Baseline/language.assert-3/out new file mode 100644 index 0000000000..5a58147fa5 --- /dev/null +++ b/testing/btest/Baseline/language.assert-3/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 3: assertion failure: (coerce to_count("42") to double) == 42.5 (always failing) +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.assert-4/out b/testing/btest/Baseline/language.assert-4/out new file mode 100644 index 0000000000..3146002d53 --- /dev/null +++ b/testing/btest/Baseline/language.assert-4/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 4: assertion failure: 1 == x (Expected x to be 1, have 2) +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.assert-5/out b/testing/btest/Baseline/language.assert-5/out new file mode 100644 index 0000000000..166362e0c3 --- /dev/null +++ b/testing/btest/Baseline/language.assert-5/out @@ -0,0 +1,6 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 9: assertion failure: "ghi" in tbl ({ + [abc] = 123, + [def] = 456 +}) +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.assert-6/out b/testing/btest/Baseline/language.assert-6/out new file mode 100644 index 0000000000..4bcf7b428e --- /dev/null +++ b/testing/btest/Baseline/language.assert-6/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 10: assertion failure: r?$b (r$b is not set in [a=1234, b=]) +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.assert-7/out b/testing/btest/Baseline/language.assert-7/out new file mode 100644 index 0000000000..c28e697ecb --- /dev/null +++ b/testing/btest/Baseline/language.assert-7/out @@ -0,0 +1,4 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +expression error in <...>/assert.zeek, line 10: field value missing (r$b) +error in <...>/assert.zeek, line 10: assertion failure: r?$b () +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.assert-8/out b/testing/btest/Baseline/language.assert-8/out new file mode 100644 index 0000000000..7d877b65b5 --- /dev/null +++ b/testing/btest/Baseline/language.assert-8/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 2: assertion failure: 1 == 2 (always false) +fatal error: failed to execute script statements at top-level scope diff --git a/testing/btest/Baseline/language.assert-error-2/.stderr b/testing/btest/Baseline/language.assert-error-2/.stderr new file mode 100644 index 0000000000..ffba84808c --- /dev/null +++ b/testing/btest/Baseline/language.assert-error-2/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-error.zeek, line 3: message must be string (1234) diff --git a/testing/btest/Baseline/language.assert-error-3/.stderr b/testing/btest/Baseline/language.assert-error-3/.stderr new file mode 100644 index 0000000000..8bbc43e570 --- /dev/null +++ b/testing/btest/Baseline/language.assert-error-3/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-error.zeek, line 3: syntax error, at or near ";" diff --git a/testing/btest/Baseline/language.assert-error-4/.stderr b/testing/btest/Baseline/language.assert-error-4/.stderr new file mode 100644 index 0000000000..90ff2d507a --- /dev/null +++ b/testing/btest/Baseline/language.assert-error-4/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-error.zeek, line 3: syntax error, at or near "," diff --git a/testing/btest/Baseline/language.assert-error/.stderr b/testing/btest/Baseline/language.assert-error/.stderr new file mode 100644 index 0000000000..1f636a5228 --- /dev/null +++ b/testing/btest/Baseline/language.assert-error/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-error.zeek, line 8: conditional must be boolean (1) diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/out.new b/testing/btest/Baseline/language.assert-hook-2/.stderr similarity index 100% rename from testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/out.new rename to testing/btest/Baseline/language.assert-hook-2/.stderr diff --git a/testing/btest/Baseline/language.assert-hook-2/out b/testing/btest/Baseline/language.assert-hook-2/out new file mode 100644 index 0000000000..f898a52732 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-2/out @@ -0,0 +1,7 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_failure, to_count("5") == 4, 5 is not 4 +assert <...>/assert-hook.zeek:21 + f <...>/assert-hook.zeek:25 + g <...>/assert-hook.zeek:26 + h <...>/assert-hook.zeek:30 + zeek_init :0 diff --git a/testing/btest/Baseline/language.assert-hook-3/.stderr b/testing/btest/Baseline/language.assert-hook-3/.stderr new file mode 100644 index 0000000000..a1707e3408 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-3/.stderr @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-hook.zeek, line 12: assertion failure: F (terminate me!) +received termination signal diff --git a/testing/btest/Baseline/language.assert-hook-3/out b/testing/btest/Baseline/language.assert-hook-3/out new file mode 100644 index 0000000000..a09aae6630 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-3/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_failure, terminate me! +zeek_done() diff --git a/testing/btest/Baseline/language.assert-hook-4/.stderr b/testing/btest/Baseline/language.assert-hook-4/.stderr new file mode 100644 index 0000000000..49d861c74c --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-4/.stderr @@ -0,0 +1 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. diff --git a/testing/btest/Baseline/language.assert-hook-4/out b/testing/btest/Baseline/language.assert-hook-4/out new file mode 100644 index 0000000000..13dabea91e --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-4/out @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_failure, calling exit! diff --git a/testing/btest/Baseline/language.assert-hook-5/.stderr b/testing/btest/Baseline/language.assert-hook-5/.stderr new file mode 100644 index 0000000000..49d861c74c --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-5/.stderr @@ -0,0 +1 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. diff --git a/testing/btest/Baseline/language.assert-hook-5/out b/testing/btest/Baseline/language.assert-hook-5/out new file mode 100644 index 0000000000..cfd12f766e --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-5/out @@ -0,0 +1,8 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_result T at <...>/assert-hook.zeek:25: md5_hash("") == "d41d8cd98f00b204e9800998ecf8427e" +assertion_result T at <...>/assert-hook.zeek:30: sha1_hash("") == "da39a3ee5e6b4b0d3255bfef95601890afd80709" +assertion_result F at <...>/assert-hook.zeek:35: sha1_hash("") == "wrong" +assertion_failure at <...>/assert-hook.zeek:35: sha1_hash("") == "wrong" +assertion_result F at <...>/assert-hook.zeek:40: md5_hash("") == "wrong" +assertion_failure at <...>/assert-hook.zeek:40: md5_hash("") == "wrong" +2 of 4 assertions failed diff --git a/testing/btest/Baseline/language.assert-hook-6/.stderr b/testing/btest/Baseline/language.assert-hook-6/.stderr new file mode 100644 index 0000000000..13dbb41dae --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-6/.stderr @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +expression error in <...>/assert-hook.zeek, line 15: field value missing (get_current_packet_header()$ip) +expression error in <...>/assert-hook.zeek, line 17: field value missing (get_current_packet_header()$ip) +error in <...>/assert-hook.zeek, line 17: assertion failure: 2 + 2 == 5 () +error in <...>/assert-hook.zeek, line 22: assertion failure: 2 + 2 == 5 ({"msg":"false and works"}) diff --git a/testing/btest/Baseline/language.assert-hook-6/out b/testing/btest/Baseline/language.assert-hook-6/out new file mode 100644 index 0000000000..37d07df4b9 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-6/out @@ -0,0 +1,7 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_result, T, 2 + 2 == 4, , <...>/assert-hook.zeek, 15 +assertion_result, T, 2 + 2 == 4, {"msg":"true and works"}, <...>/assert-hook.zeek, 16 +assertion_result, F, 2 + 2 == 5, , <...>/assert-hook.zeek, 17 +assertion_failure, 2 + 2 == 5, , <...>/assert-hook.zeek, 17 +assertion_result, F, 2 + 2 == 5, {"msg":"false and works"}, <...>/assert-hook.zeek, 22 +assertion_failure, 2 + 2 == 5, {"msg":"false and works"}, <...>/assert-hook.zeek, 22 diff --git a/testing/btest/Baseline/language.assert-hook-7/.stderr b/testing/btest/Baseline/language.assert-hook-7/.stderr new file mode 100644 index 0000000000..49d861c74c --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-7/.stderr @@ -0,0 +1 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. diff --git a/testing/btest/Baseline/language.assert-hook-7/out b/testing/btest/Baseline/language.assert-hook-7/out new file mode 100644 index 0000000000..820b6a66a1 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook-7/out @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_result, T, 2 + 2 == 4, this is true, <...>/assert-hook.zeek, 10 +assertion_result, T, 2 + 2 == 4, {"msg":"this is also true"}, <...>/assert-hook.zeek, 11 +assertion_result, F, 2 + 2 == 5, this is false, <...>/assert-hook.zeek, 12 +assertion_result, F, 2 + 2 == 5, this is false, <...>/assert-hook.zeek, 18 diff --git a/testing/btest/Baseline/language.assert-hook/.stderr b/testing/btest/Baseline/language.assert-hook/.stderr new file mode 100644 index 0000000000..c04263e182 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook/.stderr @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-hook.zeek, line 18: assertion failure: 1 != 1 diff --git a/testing/btest/Baseline/language.assert-hook/out b/testing/btest/Baseline/language.assert-hook/out new file mode 100644 index 0000000000..f4efbf3732 --- /dev/null +++ b/testing/btest/Baseline/language.assert-hook/out @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +assertion_failure, 1 != 1, , <...>/assert-hook.zeek, 18 diff --git a/testing/btest/Baseline/language.assert-misc/out b/testing/btest/Baseline/language.assert-misc/out new file mode 100644 index 0000000000..1298f95384 --- /dev/null +++ b/testing/btest/Baseline/language.assert-misc/out @@ -0,0 +1,13 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +f, lambda_<505728364269398358> +{ +assert 0 < getpid(), fmt("my pid is funny: %s", getpid()); +} +g, lambda_<8496146571423528161> +{ +assert to_count("42") == 42; +} +test_function, test_function +{ +assert 0 < getpid(); +} diff --git a/testing/btest/Baseline/language.assert-top-level/.stderr b/testing/btest/Baseline/language.assert-top-level/.stderr new file mode 100644 index 0000000000..5322f47419 --- /dev/null +++ b/testing/btest/Baseline/language.assert-top-level/.stderr @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert-top-level.zeek, line 7: assertion failure: getpid() == 0 (my pid greater 0? T) +fatal error: failed to execute script statements at top-level scope diff --git a/testing/btest/Baseline/language.assert/out b/testing/btest/Baseline/language.assert/out new file mode 100644 index 0000000000..886324ae99 --- /dev/null +++ b/testing/btest/Baseline/language.assert/out @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +error in <...>/assert.zeek, line 11: assertion failure: fmt("%s", 1) == "2" +fatal error: errors occurred while initializing diff --git a/testing/btest/Baseline/language.init-mismatch/.stderr b/testing/btest/Baseline/language.init-mismatch/.stderr index 27c9a908dc..294a1efe24 100644 --- a/testing/btest/Baseline/language.init-mismatch/.stderr +++ b/testing/btest/Baseline/language.init-mismatch/.stderr @@ -1,7 +1,7 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. error in <...>/init-mismatch.zeek, line 6: invalid constructor list on RHS of assignment (a = 3, 5) error in <...>/init-mismatch.zeek, line 6: assignment of non-arithmetic value to arithmetic (count/types) (a = 3, 5) -warning in <...>/init-mismatch.zeek, line 7: Remove in v6.1. Initialization not preceded by =<...>/-= is deprecated. (4, 6) +error in <...>/init-mismatch.zeek, line 7: Initialization not preceded by =<...>/-= is not allowed. (4, 6) error in <...>/init-mismatch.zeek, line 13: different number of indices (list of count,count and list of count,count,count) error in <...>/init-mismatch.zeek, line 14: table constructor element lacks '=' structure (bar) error in <...>/init-mismatch.zeek, line 17: empty list in untyped initialization () diff --git a/testing/btest/Baseline/language.next-break-context-errors-2/.stderr b/testing/btest/Baseline/language.next-break-context-errors-2/.stderr index c7cfd07cb0..07626ebb94 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-2/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-2/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 3: break statement used outside of for, while or switch statement and not within a hook. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 3: break statement used outside of for, while or switch statement and not within a hook. (break ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-3/.stderr b/testing/btest/Baseline/language.next-break-context-errors-3/.stderr index 71b305068b..ec489d6d93 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-3/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-3/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 3: next statement used outside of for or while statement. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 3: next statement used outside of for or while statement. (next ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-4/.stderr b/testing/btest/Baseline/language.next-break-context-errors-4/.stderr index c7cfd07cb0..07626ebb94 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-4/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-4/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 3: break statement used outside of for, while or switch statement and not within a hook. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 3: break statement used outside of for, while or switch statement and not within a hook. (break ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-5/.stderr b/testing/btest/Baseline/language.next-break-context-errors-5/.stderr index bf2d6b9806..043304ed3e 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-5/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-5/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 4: break statement used outside of for, while or switch statement and not within a hook. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 4: break statement used outside of for, while or switch statement and not within a hook. (break ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-6/.stderr b/testing/btest/Baseline/language.next-break-context-errors-6/.stderr index 496ca6d854..ba071edc26 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-6/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-6/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 7: next statement used outside of for or while statement. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 7: next statement used outside of for or while statement. (next ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-7/.stderr b/testing/btest/Baseline/language.next-break-context-errors-7/.stderr index fd485b8587..18110d7e9a 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-7/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-7/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 5: next statement used outside of for or while statement. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 5: next statement used outside of for or while statement. (next ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-8/.stderr b/testing/btest/Baseline/language.next-break-context-errors-8/.stderr index 5583094a27..60db706125 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-8/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-8/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 6: next statement used outside of for or while statement. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 6: next statement used outside of for or while statement. (next ) diff --git a/testing/btest/Baseline/language.next-break-context-errors-9/.stderr b/testing/btest/Baseline/language.next-break-context-errors-9/.stderr index 39fe9ba574..3b5b394fb0 100644 --- a/testing/btest/Baseline/language.next-break-context-errors-9/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors-9/.stderr @@ -1,4 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 6: next statement used outside of for or while statement. With v6.1 this will become an error. -warning in <...>/next-break-context-errors.zeek, line 11: break statement used outside of for, while or switch statement and not within a hook. With v6.1 this will become an error. -warning in <...>/next-break-context-errors.zeek, line 16: next statement used outside of for or while statement. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 6: next statement used outside of for or while statement. (next ) +error in <...>/next-break-context-errors.zeek, line 11: break statement used outside of for, while or switch statement and not within a hook. (break ) +error in <...>/next-break-context-errors.zeek, line 16: next statement used outside of for or while statement. (next ) diff --git a/testing/btest/Baseline/language.next-break-context-errors/.stderr b/testing/btest/Baseline/language.next-break-context-errors/.stderr index 496ca6d854..ba071edc26 100644 --- a/testing/btest/Baseline/language.next-break-context-errors/.stderr +++ b/testing/btest/Baseline/language.next-break-context-errors/.stderr @@ -1,2 +1,2 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/next-break-context-errors.zeek, line 7: next statement used outside of for or while statement. With v6.1 this will become an error. +error in <...>/next-break-context-errors.zeek, line 7: next statement used outside of for or while statement. (next ) diff --git a/testing/btest/Baseline/language.redef-same-prefixtable-idx/.stderr b/testing/btest/Baseline/language.redef-same-prefixtable-idx/.stderr index b8c2b8c70e..49d861c74c 100644 --- a/testing/btest/Baseline/language.redef-same-prefixtable-idx/.stderr +++ b/testing/btest/Baseline/language.redef-same-prefixtable-idx/.stderr @@ -1,3 +1 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/redef-same-prefixtable-idx.zeek, line 7: Remove in v6.1. Initialization not preceded by =<...>/-= is deprecated. (3.0.0.0/8 = 1.0.0.0/8) -warning in <...>/redef-same-prefixtable-idx.zeek, line 8: Remove in v6.1. Initialization not preceded by =<...>/-= is deprecated. (3.0.0.0/8 = 2.0.0.0/8) diff --git a/testing/btest/Baseline/language.table-redef/out b/testing/btest/Baseline/language.table-redef/out index d950ce010e..aed170ff09 100644 --- a/testing/btest/Baseline/language.table-redef/out +++ b/testing/btest/Baseline/language.table-redef/out @@ -1,7 +1,7 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. { -[abc] = 8.0, +[abc] = 42.0, [neat] = 1.0, -[cool] = 28.0, +[cool] = 5.0, [def] = 99.0 } diff --git a/testing/btest/Baseline/language.vector-neg-index/out b/testing/btest/Baseline/language.vector-neg-index/out new file mode 100644 index 0000000000..18f3dfc89a --- /dev/null +++ b/testing/btest/Baseline/language.vector-neg-index/out @@ -0,0 +1,2 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +expression error in <...>/vector-neg-index.zeek, line 8: no such index (v[-7]) diff --git a/testing/btest/Baseline/plugins.duplicate-pktsrc/output b/testing/btest/Baseline/plugins.duplicate-pktsrc/output new file mode 100644 index 0000000000..19f5d4978e --- /dev/null +++ b/testing/btest/Baseline/plugins.duplicate-pktsrc/output @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +warning in <...>/init-bare.zeek, line 1: ignoring dynamic plugin Demo::Foo from <...>/build, already found in <...>/build_backup +Demo::Foo - A Foo packet source (dynamic, version 1.0.0) + [Packet Source] FooPktSrc (interface prefix "foo"; supports live and trace input) + diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index 04f66804c6..e520223373 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -198,6 +198,7 @@ 0.000000 MetaHookPost CallFunction(FilteredTraceDetection::should_detect, , ()) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=broker, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> +0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=cluster, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=config, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Conn::LOG, [name=default, writer=Log::WRITER_ASCII, path=conn, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (DCE_RPC::LOG, [name=default, writer=Log::WRITER_ASCII, path=dce_rpc, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> @@ -248,6 +249,7 @@ 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (mysql::LOG, [name=default, writer=Log::WRITER_ASCII, path=mysql, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=, path=analyzer, policy=Analyzer::Logging::log_policy, event_groups={Analyzer::Logging}])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy, event_groups={}])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy, event_groups={}])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config, path=config, policy=Config::log_policy, event_groups={}])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn, path=conn, policy=Conn::log_policy, event_groups={}])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (DCE_RPC::LOG, [columns=DCE_RPC::Info, ev=, path=dce_rpc, policy=DCE_RPC::log_policy, event_groups={}])) -> @@ -299,6 +301,7 @@ 0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=XXXXXXXXXX.XXXXXX, node=zeek, filter=ip or not ip, init=T, success=T, failure_reason=])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Broker::LOG)) -> +0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Config::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Conn::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (DCE_RPC::LOG)) -> @@ -348,6 +351,7 @@ 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (mysql::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_filter, , (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::add_filter, , (Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> +0.000000 MetaHookPost CallFunction(Log::add_filter, , (Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::add_filter, , (Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::add_filter, , (Conn::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::add_filter, , (DCE_RPC::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> @@ -398,6 +402,7 @@ 0.000000 MetaHookPost CallFunction(Log::add_filter, , (mysql::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Analyzer::Logging::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Broker::LOG, default)) -> +0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Cluster::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Config::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (Conn::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (DCE_RPC::LOG, default)) -> @@ -447,6 +452,7 @@ 0.000000 MetaHookPost CallFunction(Log::add_stream_filters, , (mysql::LOG, default)) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=, path=analyzer, policy=Analyzer::Logging::log_policy, event_groups={Analyzer::Logging}])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy, event_groups={}])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy, event_groups={}])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config, path=config, policy=Config::log_policy, event_groups={}])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn, path=conn, policy=Conn::log_policy, event_groups={}])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (DCE_RPC::LOG, [columns=DCE_RPC::Info, ev=, path=dce_rpc, policy=DCE_RPC::log_policy, event_groups={}])) -> @@ -1812,6 +1818,7 @@ 0.000000 MetaHookPre CallFunction(FilteredTraceDetection::should_detect, , ()) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=broker, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) +0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=cluster, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=config, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Conn::LOG, [name=default, writer=Log::WRITER_ASCII, path=conn, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (DCE_RPC::LOG, [name=default, writer=Log::WRITER_ASCII, path=dce_rpc, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) @@ -1862,6 +1869,7 @@ 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (mysql::LOG, [name=default, writer=Log::WRITER_ASCII, path=mysql, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=, path=analyzer, policy=Analyzer::Logging::log_policy, event_groups={Analyzer::Logging}])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy, event_groups={}])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy, event_groups={}])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config, path=config, policy=Config::log_policy, event_groups={}])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn, path=conn, policy=Conn::log_policy, event_groups={}])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (DCE_RPC::LOG, [columns=DCE_RPC::Info, ev=, path=dce_rpc, policy=DCE_RPC::log_policy, event_groups={}])) @@ -1913,6 +1921,7 @@ 0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=XXXXXXXXXX.XXXXXX, node=zeek, filter=ip or not ip, init=T, success=T, failure_reason=])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Analyzer::Logging::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Broker::LOG)) +0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Config::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Conn::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (DCE_RPC::LOG)) @@ -1962,6 +1971,7 @@ 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (mysql::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_filter, , (Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::add_filter, , (Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) +0.000000 MetaHookPre CallFunction(Log::add_filter, , (Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::add_filter, , (Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::add_filter, , (Conn::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::add_filter, , (DCE_RPC::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) @@ -2012,6 +2022,7 @@ 0.000000 MetaHookPre CallFunction(Log::add_filter, , (mysql::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=])) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Analyzer::Logging::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Broker::LOG, default)) +0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Cluster::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Config::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (Conn::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (DCE_RPC::LOG, default)) @@ -2061,6 +2072,7 @@ 0.000000 MetaHookPre CallFunction(Log::add_stream_filters, , (mysql::LOG, default)) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=, path=analyzer, policy=Analyzer::Logging::log_policy, event_groups={Analyzer::Logging}])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy, event_groups={}])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy, event_groups={}])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Config::LOG, [columns=Config::Info, ev=Config::log_config, path=config, policy=Config::log_policy, event_groups={}])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn, path=conn, policy=Conn::log_policy, event_groups={}])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (DCE_RPC::LOG, [columns=DCE_RPC::Info, ev=, path=dce_rpc, policy=DCE_RPC::log_policy, event_groups={}])) @@ -3425,6 +3437,7 @@ 0.000000 | HookCallFunction FilteredTraceDetection::should_detect() 0.000000 | HookCallFunction Log::__add_filter(Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=analyzer, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=broker, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) +0.000000 | HookCallFunction Log::__add_filter(Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=cluster, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=config, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(Conn::LOG, [name=default, writer=Log::WRITER_ASCII, path=conn, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__add_filter(DCE_RPC::LOG, [name=default, writer=Log::WRITER_ASCII, path=dce_rpc, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) @@ -3475,6 +3488,7 @@ 0.000000 | HookCallFunction Log::__add_filter(mysql::LOG, [name=default, writer=Log::WRITER_ASCII, path=mysql, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::__create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=, path=analyzer, policy=Analyzer::Logging::log_policy, event_groups={Analyzer::Logging}]) 0.000000 | HookCallFunction Log::__create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy, event_groups={}]) +0.000000 | HookCallFunction Log::__create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy, event_groups={}]) 0.000000 | HookCallFunction Log::__create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config, path=config, policy=Config::log_policy, event_groups={}]) 0.000000 | HookCallFunction Log::__create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn, path=conn, policy=Conn::log_policy, event_groups={}]) 0.000000 | HookCallFunction Log::__create_stream(DCE_RPC::LOG, [columns=DCE_RPC::Info, ev=, path=dce_rpc, policy=DCE_RPC::log_policy, event_groups={}]) @@ -3526,6 +3540,7 @@ 0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=XXXXXXXXXX.XXXXXX, node=zeek, filter=ip or not ip, init=T, success=T, failure_reason=]) 0.000000 | HookCallFunction Log::add_default_filter(Analyzer::Logging::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG) +0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Config::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Conn::LOG) 0.000000 | HookCallFunction Log::add_default_filter(DCE_RPC::LOG) @@ -3575,6 +3590,7 @@ 0.000000 | HookCallFunction Log::add_default_filter(mysql::LOG) 0.000000 | HookCallFunction Log::add_filter(Analyzer::Logging::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::add_filter(Broker::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) +0.000000 | HookCallFunction Log::add_filter(Cluster::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::add_filter(Config::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::add_filter(Conn::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::add_filter(DCE_RPC::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) @@ -3625,6 +3641,7 @@ 0.000000 | HookCallFunction Log::add_filter(mysql::LOG, [name=default, writer=Log::WRITER_ASCII, path=, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2528247166937952945>, interv=0 secs, postprocessor=, config={}, policy=]) 0.000000 | HookCallFunction Log::add_stream_filters(Analyzer::Logging::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Broker::LOG, default) +0.000000 | HookCallFunction Log::add_stream_filters(Cluster::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Config::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(Conn::LOG, default) 0.000000 | HookCallFunction Log::add_stream_filters(DCE_RPC::LOG, default) @@ -3674,6 +3691,7 @@ 0.000000 | HookCallFunction Log::add_stream_filters(mysql::LOG, default) 0.000000 | HookCallFunction Log::create_stream(Analyzer::Logging::LOG, [columns=Analyzer::Logging::Info, ev=, path=analyzer, policy=Analyzer::Logging::log_policy, event_groups={Analyzer::Logging}]) 0.000000 | HookCallFunction Log::create_stream(Broker::LOG, [columns=Broker::Info, ev=, path=broker, policy=Broker::log_policy, event_groups={}]) +0.000000 | HookCallFunction Log::create_stream(Cluster::LOG, [columns=Cluster::Info, ev=, path=cluster, policy=Cluster::log_policy, event_groups={}]) 0.000000 | HookCallFunction Log::create_stream(Config::LOG, [columns=Config::Info, ev=Config::log_config, path=config, policy=Config::log_policy, event_groups={}]) 0.000000 | HookCallFunction Log::create_stream(Conn::LOG, [columns=Conn::Info, ev=Conn::log_conn, path=conn, policy=Conn::log_policy, event_groups={}]) 0.000000 | HookCallFunction Log::create_stream(DCE_RPC::LOG, [columns=DCE_RPC::Info, ev=, path=dce_rpc, policy=DCE_RPC::log_policy, event_groups={}]) @@ -4840,7 +4858,6 @@ 0.000000 | HookQueueEvent NetControl::init() 0.000000 | HookQueueEvent filter_change_tracking() 0.000000 | HookQueueEvent zeek_init() -XXXXXXXXXX.XXXXXX MetaHookPost BroObjDtor() -> XXXXXXXXXX.XXXXXX MetaHookPost CallFunction(Broker::__flush_logs, , ()) -> XXXXXXXXXX.XXXXXX MetaHookPost CallFunction(Broker::flush_logs, , ()) -> XXXXXXXXXX.XXXXXX MetaHookPost CallFunction(Broker::log_flush, , ()) -> @@ -4859,7 +4876,6 @@ XXXXXXXXXX.XXXXXX MetaHookPost QueueEvent(new_connection([id=[orig_h=141.142. XXXXXXXXXX.XXXXXX MetaHookPost QueueEvent(run_sync_hook()) -> false XXXXXXXXXX.XXXXXX MetaHookPost SetupAnalyzerTree(XXXXXXXXXX.XXXXXX(XXXXXXXXXX.XXXXXX) TCP 141.142.228.5:59856 -> 192.150.187.43:80) -> XXXXXXXXXX.XXXXXX MetaHookPost UpdateNetworkTime(XXXXXXXXXX.XXXXXX) -> -XXXXXXXXXX.XXXXXX MetaHookPre BroObjDtor() XXXXXXXXXX.XXXXXX MetaHookPre CallFunction(Broker::__flush_logs, , ()) XXXXXXXXXX.XXXXXX MetaHookPre CallFunction(Broker::flush_logs, , ()) XXXXXXXXXX.XXXXXX MetaHookPre CallFunction(Broker::log_flush, , ()) @@ -4878,7 +4894,6 @@ XXXXXXXXXX.XXXXXX MetaHookPre QueueEvent(new_connection([id=[orig_h=141.142. XXXXXXXXXX.XXXXXX MetaHookPre QueueEvent(run_sync_hook()) XXXXXXXXXX.XXXXXX MetaHookPre SetupAnalyzerTree(XXXXXXXXXX.XXXXXX(XXXXXXXXXX.XXXXXX) TCP 141.142.228.5:59856 -> 192.150.187.43:80) XXXXXXXXXX.XXXXXX MetaHookPre UpdateNetworkTime(XXXXXXXXXX.XXXXXX) -XXXXXXXXXX.XXXXXX | HookBroObjDtor XXXXXXXXXX.XXXXXX | HookObjDtor XXXXXXXXXX.XXXXXX | HookUpdateNetworkTime XXXXXXXXXX.XXXXXX XXXXXXXXXX.XXXXXX | HookCallFunction Broker::__flush_logs() diff --git a/testing/btest/Baseline/scripts.base.frameworks.cluster.leftover-log-rotation-multi-logger/out b/testing/btest/Baseline/scripts.base.frameworks.cluster.leftover-log-rotation-multi-logger/out new file mode 100644 index 0000000000..946ade5fb2 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.cluster.leftover-log-rotation-multi-logger/out @@ -0,0 +1,5 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +./log-queue/conn__XXXX-XX-XX-XX-XX-XX__XXXX-XX-XX-XX-XX-XX__log_suffix=logger-2__.log +./log-queue/dns__XXXX-XX-XX-XX-XX-XX__XXXX-XX-XX-XX-XX-XX__log_suffix=logger-2__.log +leftover conn log +leftover dns log diff --git a/testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out.new b/testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out similarity index 100% rename from testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out.new rename to testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out diff --git a/testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out.deprecated b/testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out.deprecated deleted file mode 100644 index 8b6e2d4062..0000000000 --- a/testing/btest/Baseline/scripts.base.frameworks.file-analysis.log-files-event-flattening/out.deprecated +++ /dev/null @@ -1,4 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -C4J4Th3PJpwUYZZ6gc, [orig_h=192.168.0.107, orig_p=58720/tcp, resp_h=88.198.248.254, resp_p=80/tcp], {\x0a\x0988.198.248.254\x0a}, {\x0a\x09192.168.0.107\x0a}, {\x0aC4J4Th3PJpwUYZZ6gc\x0a} -CHhAvVGS1DHFjwGM9, [orig_h=192.168.0.107, orig_p=58716/tcp, resp_h=88.198.248.254, resp_p=80/tcp], {\x0a\x0988.198.248.254\x0a}, {\x0a\x09192.168.0.107\x0a}, {\x0aCHhAvVGS1DHFjwGM9\x0a} -ClEkJM2Vm5giqnMf4h, [orig_h=192.168.0.107, orig_p=58718/tcp, resp_h=88.198.248.254, resp_p=80/tcp], {\x0a\x0988.198.248.254\x0a}, {\x0a\x09192.168.0.107\x0a}, {\x0aClEkJM2Vm5giqnMf4h\x0a} diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-shadow-files/out b/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-shadow-files/out deleted file mode 100644 index 823cec2fc1..0000000000 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-shadow-files/out +++ /dev/null @@ -1,4 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -### NOTE: This file has been sorted with diff-sort. -running my rotation postprocessor for path 'conn' -running my rotation postprocessor for path 'dns' diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files/out b/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files/out deleted file mode 100644 index 0f3aae8c56..0000000000 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files/out +++ /dev/null @@ -1,2 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -warning in <...>/rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files.zeek, line 20: deprecated (LogAscii::logdir): Remove in v6.1. Use 'Log::default_logdir'. diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation/out b/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation/out index 21ba63d2f4..94e748a4e2 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation/out +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.rotate-ascii-logdir-leftover-log-rotation/out @@ -1,16 +1,15 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -test.2011-03-07-03-00-05.log test 11-03-07_03.00.05 11-03-07_04.00.05 0 ascii -test.2011-03-07-04-00-05.log test 11-03-07_04.00.05 11-03-07_05.00.05 0 ascii -test.2011-03-07-05-00-05.log test 11-03-07_05.00.05 11-03-07_06.00.05 0 ascii -test.2011-03-07-06-00-05.log test 11-03-07_06.00.05 11-03-07_07.00.05 0 ascii -test.2011-03-07-07-00-05.log test 11-03-07_07.00.05 11-03-07_08.00.05 0 ascii -test.2011-03-07-08-00-05.log test 11-03-07_08.00.05 11-03-07_09.00.05 0 ascii -test.2011-03-07-09-00-05.log test 11-03-07_09.00.05 11-03-07_10.00.05 0 ascii -test.2011-03-07-10-00-05.log test 11-03-07_10.00.05 11-03-07_11.00.05 0 ascii -test.2011-03-07-11-00-05.log test 11-03-07_11.00.05 11-03-07_12.00.05 0 ascii -test.2011-03-07-12-00-05.log test 11-03-07_12.00.05 11-03-07_12.59.55 1 ascii -warning in <...>/rotate-ascii-logdir-leftover-log-rotation.zeek, line 22: deprecated (LogAscii::logdir): Remove in v6.1. Use 'Log::default_logdir'. -> test.2011-03-07-03-00-05.log +.<...>/test.2011-03-07-03-00-05.log test 11-03-07_03.00.05 11-03-07_04.00.05 0 ascii +.<...>/test.2011-03-07-04-00-05.log test 11-03-07_04.00.05 11-03-07_05.00.05 0 ascii +.<...>/test.2011-03-07-05-00-05.log test 11-03-07_05.00.05 11-03-07_06.00.05 0 ascii +.<...>/test.2011-03-07-06-00-05.log test 11-03-07_06.00.05 11-03-07_07.00.05 0 ascii +.<...>/test.2011-03-07-07-00-05.log test 11-03-07_07.00.05 11-03-07_08.00.05 0 ascii +.<...>/test.2011-03-07-08-00-05.log test 11-03-07_08.00.05 11-03-07_09.00.05 0 ascii +.<...>/test.2011-03-07-09-00-05.log test 11-03-07_09.00.05 11-03-07_10.00.05 0 ascii +.<...>/test.2011-03-07-10-00-05.log test 11-03-07_10.00.05 11-03-07_11.00.05 0 ascii +.<...>/test.2011-03-07-11-00-05.log test 11-03-07_11.00.05 11-03-07_12.00.05 0 ascii +.<...>/test.2011-03-07-12-00-05.log test 11-03-07_12.00.05 11-03-07_12.59.55 1 ascii +> logs/test.2011-03-07-03-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -22,7 +21,7 @@ warning in <...>/rotate-ascii-logdir-leftover-log-rotation.zeek, line 22: deprec XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1024 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 0 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-04-00-05.log +> logs/test.2011-03-07-04-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -34,7 +33,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 0 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1025 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 1 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-05-00-05.log +> logs/test.2011-03-07-05-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -46,7 +45,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 1 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1026 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 2 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-06-00-05.log +> logs/test.2011-03-07-06-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -58,7 +57,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 2 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1027 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 3 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-07-00-05.log +> logs/test.2011-03-07-07-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -70,7 +69,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 3 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1028 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 4 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-08-00-05.log +> logs/test.2011-03-07-08-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -82,7 +81,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 4 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1029 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 5 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-09-00-05.log +> logs/test.2011-03-07-09-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -94,7 +93,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 5 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1030 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 6 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-10-00-05.log +> logs/test.2011-03-07-10-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -106,7 +105,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 6 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1031 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 7 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-11-00-05.log +> logs/test.2011-03-07-11-00-05.log #separator \x09 #set_separator , #empty_field (empty) @@ -118,7 +117,7 @@ XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 7 XXXXXXXXXX.XXXXXX 10.0.0.1 20 10.0.0.2 1032 XXXXXXXXXX.XXXXXX 10.0.0.2 20 10.0.0.3 8 #close XXXX-XX-XX-XX-XX-XX -> test.2011-03-07-12-00-05.log +> logs/test.2011-03-07-12-00-05.log #separator \x09 #set_separator , #empty_field (empty) diff --git a/testing/btest/Baseline/scripts.base.protocols.socks.socks-auth-10080/conn.log.cut b/testing/btest/Baseline/scripts.base.protocols.socks.socks-auth-10080/conn.log.cut new file mode 100644 index 0000000000..640346b6bc --- /dev/null +++ b/testing/btest/Baseline/scripts.base.protocols.socks.socks-auth-10080/conn.log.cut @@ -0,0 +1,4 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +id.orig_h id.orig_p id.resp_h id.resp_p service history +192.168.0.2 55951 192.168.0.1 10080 socks ShADad +192.168.0.1 55951 192.168.0.2 22 - ShA diff --git a/testing/btest/Baseline/scripts.base.protocols.socks.socks-auth-10080/socks.log.cut b/testing/btest/Baseline/scripts.base.protocols.socks.socks-auth-10080/socks.log.cut new file mode 100644 index 0000000000..22eed48205 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.protocols.socks.socks-auth-10080/socks.log.cut @@ -0,0 +1,3 @@ +### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. +id.orig_h id.orig_p id.resp_h id.resp_p version status bound.host bound.name bound_p +192.168.0.2 55951 192.168.0.1 10080 5 succeeded 192.168.0.1 - 55951 diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/files.log.deprecated b/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/files.log.deprecated deleted file mode 100644 index 9908333d39..0000000000 --- a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/files.log.deprecated +++ /dev/null @@ -1,13 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -#separator \x09 -#set_separator , -#empty_field (empty) -#unset_field - -#path files -#open XXXX-XX-XX-XX-XX-XX -#fields ts fuid uid id.orig_h id.orig_p id.resp_h id.resp_p source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid tx_hosts rx_hosts conn_uids -#types time string string addr port addr port string count set[string] string string interval bool bool count count count count bool string set[addr] set[addr] set[string] -XXXXXXXXXX.XXXXXX FaGjhv1ozACeoEnwg5 C4J4Th3PJpwUYZZ6gc 192.168.0.107 58720 88.198.248.254 80 HTTP 0 (empty) - - 0.076646 F F 30003 104857600 179998 0 T - 88.198.248.254 192.168.0.107 C4J4Th3PJpwUYZZ6gc -XXXXXXXXXX.XXXXXX FaGjhv1ozACeoEnwg5 CHhAvVGS1DHFjwGM9 192.168.0.107 58716 88.198.248.254 80 HTTP 0 (empty) - - 0.076646 F F 30003 104857600 179998 0 T - 88.198.248.254 192.168.0.107 CHhAvVGS1DHFjwGM9 -XXXXXXXXXX.XXXXXX FaGjhv1ozACeoEnwg5 ClEkJM2Vm5giqnMf4h 192.168.0.107 58718 88.198.248.254 80 HTTP 0 (empty) - - 0.076646 F F 30003 104857600 179998 0 T - 88.198.248.254 192.168.0.107 ClEkJM2Vm5giqnMf4h -#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/files.log.new b/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/files.log.new deleted file mode 100644 index 6b5f32fd1b..0000000000 --- a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields-concurrent-range-requests/files.log.new +++ /dev/null @@ -1,13 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -#separator \x09 -#set_separator , -#empty_field (empty) -#unset_field - -#path files -#open XXXX-XX-XX-XX-XX-XX -#fields ts fuid uid id.orig_h id.orig_p id.resp_h id.resp_p source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid -#types time string string addr port addr port string count set[string] string string interval bool bool count count count count bool string -XXXXXXXXXX.XXXXXX FaGjhv1ozACeoEnwg5 C4J4Th3PJpwUYZZ6gc 192.168.0.107 58720 88.198.248.254 80 HTTP 0 (empty) - - 0.076646 F F 30003 104857600 179998 0 T - -XXXXXXXXXX.XXXXXX FaGjhv1ozACeoEnwg5 CHhAvVGS1DHFjwGM9 192.168.0.107 58716 88.198.248.254 80 HTTP 0 (empty) - - 0.076646 F F 30003 104857600 179998 0 T - -XXXXXXXXXX.XXXXXX FaGjhv1ozACeoEnwg5 ClEkJM2Vm5giqnMf4h 192.168.0.107 58718 88.198.248.254 80 HTTP 0 (empty) - - 0.076646 F F 30003 104857600 179998 0 T - -#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/files.log.deprecated b/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/files.log.deprecated deleted file mode 100644 index 4664ffb864..0000000000 --- a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/files.log.deprecated +++ /dev/null @@ -1,11 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -#separator \x09 -#set_separator , -#empty_field (empty) -#unset_field - -#path files -#open XXXX-XX-XX-XX-XX-XX -#fields ts fuid uid id.orig_h id.orig_p id.resp_h id.resp_p source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid tx_hosts rx_hosts conn_uids -#types time string string addr port addr port string count set[string] string string interval bool bool count count count count bool string set[addr] set[addr] set[string] -XXXXXXXXXX.XXXXXX FMnxxt3xjVcWNS2141 CHhAvVGS1DHFjwGM9 141.142.228.5 59856 192.150.187.43 80 HTTP 0 (empty) text/plain - 0.000263 F F 4705 4705 0 0 F - 192.150.187.43 141.142.228.5 CHhAvVGS1DHFjwGM9 -#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/files.log.new b/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/files.log.new deleted file mode 100644 index 1e6ed8b6c8..0000000000 --- a/testing/btest/Baseline/scripts.policy.frameworks.files.deprecated-fields/files.log.new +++ /dev/null @@ -1,11 +0,0 @@ -### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -#separator \x09 -#set_separator , -#empty_field (empty) -#unset_field - -#path files -#open XXXX-XX-XX-XX-XX-XX -#fields ts fuid uid id.orig_h id.orig_p id.resp_h id.resp_p source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid -#types time string string addr port addr port string count set[string] string string interval bool bool count count count count bool string -XXXXXXXXXX.XXXXXX FMnxxt3xjVcWNS2141 CHhAvVGS1DHFjwGM9 141.142.228.5 59856 192.150.187.43 80 HTTP 0 (empty) text/plain - 0.000263 F F 4705 4705 0 0 F - -#close XXXX-XX-XX-XX-XX-XX diff --git a/testing/btest/Baseline/scripts.base.frameworks.telemetry.cluster/manager-1..stdout b/testing/btest/Baseline/scripts.policy.frameworks.telemetry.prometheus/manager-1..stdout similarity index 100% rename from testing/btest/Baseline/scripts.base.frameworks.telemetry.cluster/manager-1..stdout rename to testing/btest/Baseline/scripts.policy.frameworks.telemetry.prometheus/manager-1..stdout diff --git a/testing/btest/Baseline/scripts.policy.misc.dump-events/really-all-events.log b/testing/btest/Baseline/scripts.policy.misc.dump-events/really-all-events.log index 06276d7a7a..9d3eca4e48 100644 --- a/testing/btest/Baseline/scripts.policy.misc.dump-events/really-all-events.log +++ b/testing/btest/Baseline/scripts.policy.misc.dump-events/really-all-events.log @@ -41,11 +41,6 @@ XXXXXXXXXX.XXXXXX analyzer_confirmation_info [0] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_DNS [1] info: AnalyzerConfirmationInfo = [c=[id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=XXXXXXXXXX.XXXXXX, duration=0 secs, service={\x0a\x0a}, history=D, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09DNS::finalize_dns\x0a\x09{ \x0a\x09if (!DNS::c?$dns_state) \x0a\x09\x09return ;\x0a\x0a\x09if (DNS::c$dns_state?$pending_query) \x0a\x09\x09Log::write(DNS::LOG, to_any_coerceDNS::c$dns_state$pending_query);\x0a\x0a\x09if (DNS::c$dns_state?$pending_queries) \x0a\x09\x09DNS::log_unmatched_msgs(DNS::c$dns_state$pending_queries);\x0a\x0a\x09if (DNS::c$dns_state?$pending_replies) \x0a\x09\x09DNS::log_unmatched_msgs(DNS::c$dns_state$pending_replies);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=XXXXXXXXXX.XXXXXX, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=, rcode_name=, AA=F, TC=F, RD=T, RA=F, Z=0, answers=, TTLs=, rejected=F, total_answers=, total_replies=, saw_query=F, saw_reply=F], dns_state=[pending_query=[ts=XXXXXXXXXX.XXXXXX, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=, rcode_name=, AA=F, TC=F, RD=T, RA=F, Z=0, answers=, TTLs=, rejected=F, total_answers=, total_replies=, saw_query=F, saw_reply=F], pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], f=, aid=3] -XXXXXXXXXX.XXXXXX analyzer_confirmation - [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=XXXXXXXXXX.XXXXXX, duration=0 secs, service={\x0aDNS\x0a}, history=D, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09DNS::finalize_dns\x0a\x09{ \x0a\x09if (!DNS::c?$dns_state) \x0a\x09\x09return ;\x0a\x0a\x09if (DNS::c$dns_state?$pending_query) \x0a\x09\x09Log::write(DNS::LOG, to_any_coerceDNS::c$dns_state$pending_query);\x0a\x0a\x09if (DNS::c$dns_state?$pending_queries) \x0a\x09\x09DNS::log_unmatched_msgs(DNS::c$dns_state$pending_queries);\x0a\x0a\x09if (DNS::c$dns_state?$pending_replies) \x0a\x09\x09DNS::log_unmatched_msgs(DNS::c$dns_state$pending_replies);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=XXXXXXXXXX.XXXXXX, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=, rcode_name=, AA=F, TC=F, RD=T, RA=F, Z=0, answers=, TTLs=, rejected=F, total_answers=, total_replies=, saw_query=F, saw_reply=F], dns_state=[pending_query=[ts=XXXXXXXXXX.XXXXXX, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=, rcode_name=, AA=F, TC=F, RD=T, RA=F, Z=0, answers=, TTLs=, rejected=F, total_answers=, total_replies=, saw_query=F, saw_reply=F], pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] - [1] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_DNS - [2] aid: count = 3 - XXXXXXXXXX.XXXXXX dns_end [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=XXXXXXXXXX.XXXXXX, duration=0 secs, service={\x0aDNS\x0a}, history=D, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09DNS::finalize_dns\x0a\x09{ \x0a\x09if (!DNS::c?$dns_state) \x0a\x09\x09return ;\x0a\x0a\x09if (DNS::c$dns_state?$pending_query) \x0a\x09\x09Log::write(DNS::LOG, to_any_coerceDNS::c$dns_state$pending_query);\x0a\x0a\x09if (DNS::c$dns_state?$pending_queries) \x0a\x09\x09DNS::log_unmatched_msgs(DNS::c$dns_state$pending_queries);\x0a\x0a\x09if (DNS::c$dns_state?$pending_replies) \x0a\x09\x09DNS::log_unmatched_msgs(DNS::c$dns_state$pending_replies);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=XXXXXXXXXX.XXXXXX, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=, rcode_name=, AA=F, TC=F, RD=T, RA=F, Z=0, answers=, TTLs=, rejected=F, total_answers=, total_replies=, saw_query=F, saw_reply=F], dns_state=[pending_query=[ts=XXXXXXXXXX.XXXXXX, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=, rcode_name=, AA=F, TC=F, RD=T, RA=F, Z=0, answers=, TTLs=, rejected=F, total_answers=, total_replies=, saw_query=F, saw_reply=F], pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] [1] msg: dns_msg = [id=31062, opcode=0, rcode=0, QR=F, AA=F, TC=F, RD=T, RA=F, Z=0, AD=F, CD=F, num_queries=1, num_answers=0, num_auth=0, num_addl=0] @@ -306,11 +301,6 @@ XXXXXXXXXX.XXXXXX analyzer_confirmation_info [0] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_SMTP [1] info: AnalyzerConfirmationInfo = [c=[id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=9, state=4, num_pkts=2, num_bytes_ip=88, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=181, state=4, num_pkts=2, num_bytes_ip=269, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=XXXXXXXXXX.XXXXXX, duration=695.0 msecs 762.872696 usecs, service={\x0a\x0a}, history=ShAdD, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SMTP::finalize_smtp\x0a\x09{ \x0a\x09if (SMTP::c?$smtp) \x0a\x09\x09SMTP::smtp_message(SMTP::c);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=XXXXXXXXXX.XXXXXX, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=, mailfrom=, rcptto=, date=, from=, to=, cc=, reply_to=, msg_id=, in_reply_to=, subject=, x_originating_ip=, first_received=, second_received=, last_reply=220 and/or bulk e-mail., path=[74.53.140.153, 10.10.1.4], user_agent=, tls=F, process_received_from=T, has_client_activity=F, process_smtp_headers=T, entity_count=0, entity=, fuids=[]], smtp_state=[helo=, messages_transferred=0, pending_messages=, trans_mail_from_seen=F, trans_rcpt_to_seen=F, invalid_transactions=0, analyzer_id=, mime_depth=0], socks=, ssh=, syslog=], f=, aid=7] -XXXXXXXXXX.XXXXXX analyzer_confirmation - [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=9, state=4, num_pkts=2, num_bytes_ip=88, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=181, state=4, num_pkts=2, num_bytes_ip=269, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=XXXXXXXXXX.XXXXXX, duration=695.0 msecs 762.872696 usecs, service={\x0aSMTP\x0a}, history=ShAdD, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SMTP::finalize_smtp\x0a\x09{ \x0a\x09if (SMTP::c?$smtp) \x0a\x09\x09SMTP::smtp_message(SMTP::c);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=XXXXXXXXXX.XXXXXX, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=, mailfrom=, rcptto=, date=, from=, to=, cc=, reply_to=, msg_id=, in_reply_to=, subject=, x_originating_ip=, first_received=, second_received=, last_reply=220 and/or bulk e-mail., path=[74.53.140.153, 10.10.1.4], user_agent=, tls=F, process_received_from=T, has_client_activity=F, process_smtp_headers=T, entity_count=0, entity=, fuids=[]], smtp_state=[helo=, messages_transferred=0, pending_messages=, trans_mail_from_seen=F, trans_rcpt_to_seen=F, invalid_transactions=0, analyzer_id=7, mime_depth=0], socks=, ssh=, syslog=] - [1] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_SMTP - [2] aid: count = 7 - XXXXXXXXXX.XXXXXX smtp_request [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=9, state=4, num_pkts=2, num_bytes_ip=88, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=181, state=4, num_pkts=2, num_bytes_ip=269, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=XXXXXXXXXX.XXXXXX, duration=695.0 msecs 762.872696 usecs, service={\x0aSMTP\x0a}, history=ShAdD, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SMTP::finalize_smtp\x0a\x09{ \x0a\x09if (SMTP::c?$smtp) \x0a\x09\x09SMTP::smtp_message(SMTP::c);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=XXXXXXXXXX.XXXXXX, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=, mailfrom=, rcptto=, date=, from=, to=, cc=, reply_to=, msg_id=, in_reply_to=, subject=, x_originating_ip=, first_received=, second_received=, last_reply=220 and/or bulk e-mail., path=[74.53.140.153, 10.10.1.4], user_agent=, tls=F, process_received_from=T, has_client_activity=F, process_smtp_headers=T, entity_count=0, entity=, fuids=[]], smtp_state=[helo=, messages_transferred=0, pending_messages=, trans_mail_from_seen=F, trans_rcpt_to_seen=F, invalid_transactions=0, analyzer_id=7, mime_depth=0], socks=, ssh=, syslog=] [1] is_orig: bool = T @@ -6699,11 +6689,6 @@ XXXXXXXXXX.XXXXXX analyzer_confirmation_info [0] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_SMTP [1] info: AnalyzerConfirmationInfo = [c=[id=[orig_h=192.168.133.100, orig_p=49648/tcp, resp_h=192.168.133.102, resp_p=25/tcp], orig=[size=24, state=4, num_pkts=3, num_bytes_ip=168, flow_label=0, l2_addr=58:b0:35:86:54:8d], resp=[size=35, state=4, num_pkts=2, num_bytes_ip=147, flow_label=0, l2_addr=00:08:ca:cc:ad:4c], start_time=XXXXXXXXXX.XXXXXX, duration=26.0 msecs 411.056519 usecs, service={\x0a\x0a}, history=ShAdD, uid=CmES5u32sYpV7JYN, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SMTP::finalize_smtp\x0a\x09{ \x0a\x09if (SMTP::c?$smtp) \x0a\x09\x09SMTP::smtp_message(SMTP::c);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=XXXXXXXXXX.XXXXXX, uid=CmES5u32sYpV7JYN, id=[orig_h=192.168.133.100, orig_p=49648/tcp, resp_h=192.168.133.102, resp_p=25/tcp], trans_depth=1, helo=, mailfrom=, rcptto=, date=, from=, to=, cc=, reply_to=, msg_id=, in_reply_to=, subject=, x_originating_ip=, first_received=, second_received=, last_reply=220 uprise ESMTP SubEthaSMTP null, path=[192.168.133.102, 192.168.133.100], user_agent=, tls=F, process_received_from=T, has_client_activity=F, process_smtp_headers=T, entity_count=0, entity=, fuids=[]], smtp_state=[helo=, messages_transferred=0, pending_messages=, trans_mail_from_seen=F, trans_rcpt_to_seen=F, invalid_transactions=0, analyzer_id=, mime_depth=0], socks=, ssh=, syslog=], f=, aid=21] -XXXXXXXXXX.XXXXXX analyzer_confirmation - [0] c: connection = [id=[orig_h=192.168.133.100, orig_p=49648/tcp, resp_h=192.168.133.102, resp_p=25/tcp], orig=[size=24, state=4, num_pkts=3, num_bytes_ip=168, flow_label=0, l2_addr=58:b0:35:86:54:8d], resp=[size=35, state=4, num_pkts=2, num_bytes_ip=147, flow_label=0, l2_addr=00:08:ca:cc:ad:4c], start_time=XXXXXXXXXX.XXXXXX, duration=26.0 msecs 411.056519 usecs, service={\x0aSMTP\x0a}, history=ShAdD, uid=CmES5u32sYpV7JYN, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SMTP::finalize_smtp\x0a\x09{ \x0a\x09if (SMTP::c?$smtp) \x0a\x09\x09SMTP::smtp_message(SMTP::c);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=XXXXXXXXXX.XXXXXX, uid=CmES5u32sYpV7JYN, id=[orig_h=192.168.133.100, orig_p=49648/tcp, resp_h=192.168.133.102, resp_p=25/tcp], trans_depth=1, helo=, mailfrom=, rcptto=, date=, from=, to=, cc=, reply_to=, msg_id=, in_reply_to=, subject=, x_originating_ip=, first_received=, second_received=, last_reply=220 uprise ESMTP SubEthaSMTP null, path=[192.168.133.102, 192.168.133.100], user_agent=, tls=F, process_received_from=T, has_client_activity=F, process_smtp_headers=T, entity_count=0, entity=, fuids=[]], smtp_state=[helo=, messages_transferred=0, pending_messages=, trans_mail_from_seen=F, trans_rcpt_to_seen=F, invalid_transactions=0, analyzer_id=21, mime_depth=0], socks=, ssh=, syslog=] - [1] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_SMTP - [2] aid: count = 21 - XXXXXXXXXX.XXXXXX smtp_request [0] c: connection = [id=[orig_h=192.168.133.100, orig_p=49648/tcp, resp_h=192.168.133.102, resp_p=25/tcp], orig=[size=24, state=4, num_pkts=3, num_bytes_ip=168, flow_label=0, l2_addr=58:b0:35:86:54:8d], resp=[size=35, state=4, num_pkts=2, num_bytes_ip=147, flow_label=0, l2_addr=00:08:ca:cc:ad:4c], start_time=XXXXXXXXXX.XXXXXX, duration=26.0 msecs 411.056519 usecs, service={\x0aSMTP\x0a}, history=ShAdD, uid=CmES5u32sYpV7JYN, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SMTP::finalize_smtp\x0a\x09{ \x0a\x09if (SMTP::c?$smtp) \x0a\x09\x09SMTP::smtp_message(SMTP::c);\x0a\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=XXXXXXXXXX.XXXXXX, uid=CmES5u32sYpV7JYN, id=[orig_h=192.168.133.100, orig_p=49648/tcp, resp_h=192.168.133.102, resp_p=25/tcp], trans_depth=1, helo=, mailfrom=, rcptto=, date=, from=, to=, cc=, reply_to=, msg_id=, in_reply_to=, subject=, x_originating_ip=, first_received=, second_received=, last_reply=220 uprise ESMTP SubEthaSMTP null, path=[192.168.133.102, 192.168.133.100], user_agent=, tls=F, process_received_from=T, has_client_activity=F, process_smtp_headers=T, entity_count=0, entity=, fuids=[]], smtp_state=[helo=, messages_transferred=0, pending_messages=, trans_mail_from_seen=F, trans_rcpt_to_seen=F, invalid_transactions=0, analyzer_id=21, mime_depth=0], socks=, ssh=, syslog=] [1] is_orig: bool = T @@ -8637,11 +8622,6 @@ XXXXXXXXXX.XXXXXX analyzer_confirmation_info [0] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_SSL [1] info: AnalyzerConfirmationInfo = [c=[id=[orig_h=192.168.133.100, orig_p=49655/tcp, resp_h=17.167.150.73, resp_p=443/tcp], orig=[size=201, state=4, num_pkts=2, num_bytes_ip=104, flow_label=0, l2_addr=58:b0:35:86:54:8d], resp=[size=0, state=4, num_pkts=1, num_bytes_ip=52, flow_label=0, l2_addr=cc:b2:55:f4:62:92], start_time=XXXXXXXXXX.XXXXXX, duration=150.0 msecs 611.877441 usecs, service={\x0a\x0a}, history=ShAD, uid=C3eiCBGOLw3VtHfOj, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SSL::finalize_ssl\x0a\x09{ \x0a\x09if (!SSL::c?$ssl) \x0a\x09\x09return ;\x0a\x0a\x09if (!SSL::c$ssl$logged) \x0a\x09\x09SSL::ssl_finishing(SSL::c);\x0a\x0a\x09SSL::finish(SSL::c, F);\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=[ts=XXXXXXXXXX.XXXXXX, uid=C3eiCBGOLw3VtHfOj, id=[orig_h=192.168.133.100, orig_p=49655/tcp, resp_h=17.167.150.73, resp_p=443/tcp], version_num=, version=, cipher=, curve=, server_name=p31-keyvalueservice.icloud.com, session_id=, resumed=F, client_ticket_empty_session_seen=F, client_key_exchange_seen=F, client_psk_seen=F, last_alert=, next_protocol=, analyzer_id=, established=F, logged=F, hrr_seen=F, ssl_history=, delay_tokens=, cert_chain=, cert_chain_fps=, client_cert_chain=, client_cert_chain_fps=, subject=, issuer=, client_subject=, client_issuer=, sni_matches_cert=, server_depth=0, client_depth=0], http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], f=, aid=35] -XXXXXXXXXX.XXXXXX analyzer_confirmation - [0] c: connection = [id=[orig_h=192.168.133.100, orig_p=49655/tcp, resp_h=17.167.150.73, resp_p=443/tcp], orig=[size=201, state=4, num_pkts=2, num_bytes_ip=104, flow_label=0, l2_addr=58:b0:35:86:54:8d], resp=[size=0, state=4, num_pkts=1, num_bytes_ip=52, flow_label=0, l2_addr=cc:b2:55:f4:62:92], start_time=XXXXXXXXXX.XXXXXX, duration=150.0 msecs 611.877441 usecs, service={\x0aSSL\x0a}, history=ShAD, uid=C3eiCBGOLw3VtHfOj, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SSL::finalize_ssl\x0a\x09{ \x0a\x09if (!SSL::c?$ssl) \x0a\x09\x09return ;\x0a\x0a\x09if (!SSL::c$ssl$logged) \x0a\x09\x09SSL::ssl_finishing(SSL::c);\x0a\x0a\x09SSL::finish(SSL::c, F);\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=[ts=XXXXXXXXXX.XXXXXX, uid=C3eiCBGOLw3VtHfOj, id=[orig_h=192.168.133.100, orig_p=49655/tcp, resp_h=17.167.150.73, resp_p=443/tcp], version_num=, version=, cipher=, curve=, server_name=p31-keyvalueservice.icloud.com, session_id=, resumed=F, client_ticket_empty_session_seen=F, client_key_exchange_seen=F, client_psk_seen=F, last_alert=, next_protocol=, analyzer_id=35, established=F, logged=F, hrr_seen=F, ssl_history=, delay_tokens=, cert_chain=, cert_chain_fps=, client_cert_chain=, client_cert_chain_fps=, subject=, issuer=, client_subject=, client_issuer=, sni_matches_cert=, server_depth=0, client_depth=0], http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] - [1] atype: AllAnalyzers::Tag = Analyzer::ANALYZER_SSL - [2] aid: count = 35 - XXXXXXXXXX.XXXXXX ssl_client_hello [0] c: connection = [id=[orig_h=192.168.133.100, orig_p=49655/tcp, resp_h=17.167.150.73, resp_p=443/tcp], orig=[size=201, state=4, num_pkts=2, num_bytes_ip=104, flow_label=0, l2_addr=58:b0:35:86:54:8d], resp=[size=0, state=4, num_pkts=1, num_bytes_ip=52, flow_label=0, l2_addr=cc:b2:55:f4:62:92], start_time=XXXXXXXXXX.XXXXXX, duration=150.0 msecs 611.877441 usecs, service={\x0aSSL\x0a}, history=ShAD, uid=C3eiCBGOLw3VtHfOj, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, service_violation={\x0a\x0a}, removal_hooks={\x0a\x09SSL::finalize_ssl\x0a\x09{ \x0a\x09if (!SSL::c?$ssl) \x0a\x09\x09return ;\x0a\x0a\x09if (!SSL::c$ssl$logged) \x0a\x09\x09SSL::ssl_finishing(SSL::c);\x0a\x0a\x09SSL::finish(SSL::c, F);\x0a\x09}\x0a}, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=[ts=XXXXXXXXXX.XXXXXX, uid=C3eiCBGOLw3VtHfOj, id=[orig_h=192.168.133.100, orig_p=49655/tcp, resp_h=17.167.150.73, resp_p=443/tcp], version_num=, version=, cipher=, curve=, server_name=p31-keyvalueservice.icloud.com, session_id=, resumed=F, client_ticket_empty_session_seen=F, client_key_exchange_seen=F, client_psk_seen=F, last_alert=, next_protocol=, analyzer_id=35, established=F, logged=F, hrr_seen=F, ssl_history=, delay_tokens=, cert_chain=, cert_chain_fps=, client_cert_chain=, client_cert_chain_fps=, subject=, issuer=, client_subject=, client_issuer=, sni_matches_cert=, server_depth=0, client_depth=0], http=, http_state=, irc=, krb=, modbus=, mqtt=, mqtt_state=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] [1] version: count = 771 diff --git a/testing/btest/Baseline/supervisor.config-scripts/zeek.node.out b/testing/btest/Baseline/supervisor.config-scripts/zeek.node.out index cb24a6aeff..f51b22da7a 100644 --- a/testing/btest/Baseline/supervisor.config-scripts/zeek.node.out +++ b/testing/btest/Baseline/supervisor.config-scripts/zeek.node.out @@ -1,5 +1,4 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. supervised node zeek_init(), counter at 1 supervised node loaded addl_user_script.zeek, counter at 2 -supervised node loaded script.zeek, counter at 3 supervised node zeek_done() diff --git a/testing/btest/Traces/socks-auth-10080.pcap b/testing/btest/Traces/socks-auth-10080.pcap new file mode 100644 index 0000000000..8720135223 Binary files /dev/null and b/testing/btest/Traces/socks-auth-10080.pcap differ diff --git a/testing/btest/bifs/disable_analyzer-early.zeek b/testing/btest/bifs/disable_analyzer-early.zeek index 96e2fc8914..c59fecf6d7 100644 --- a/testing/btest/bifs/disable_analyzer-early.zeek +++ b/testing/btest/bifs/disable_analyzer-early.zeek @@ -5,13 +5,13 @@ global msg_count: table[conn_id] of count &default=0; -event analyzer_confirmation(c: connection, atype: Analyzer::Tag, aid: count) &priority=10 +event analyzer_confirmation_info(atype: AllAnalyzers::Tag, info: AnalyzerConfirmationInfo) &priority=10 { if ( atype != Analyzer::ANALYZER_HTTP ) return; print "proto confirm", atype; - print disable_analyzer(c$id, aid, T, T); + print disable_analyzer(info$c$id, info$aid, T, T); } event http_request(c: connection, method: string, original_URI: string, unescaped_URI: string, version: string) diff --git a/testing/btest/bifs/disable_analyzer.zeek b/testing/btest/bifs/disable_analyzer.zeek index 03475a8dbc..26f45efdad 100644 --- a/testing/btest/bifs/disable_analyzer.zeek +++ b/testing/btest/bifs/disable_analyzer.zeek @@ -5,7 +5,7 @@ global msg_count: table[conn_id] of count &default=0; -event analyzer_confirmation(c: connection, atype: AllAnalyzers::Tag, aid: count) &priority=10 +event analyzer_confirmation_info(atype: AllAnalyzers::Tag, info: AnalyzerConfirmationInfo) &priority=10 { if ( atype != Analyzer::ANALYZER_HTTP ) return; diff --git a/testing/btest/broker/remote_event_auto_ts.zeek b/testing/btest/broker/remote_event_auto_ts.zeek index 31bc2da59f..fe2b24f870 100644 --- a/testing/btest/broker/remote_event_auto_ts.zeek +++ b/testing/btest/broker/remote_event_auto_ts.zeek @@ -1,3 +1,6 @@ +# Not compatible with -O C++ testing since includes two distinct scripts. +# @TEST-REQUIRES: test "${ZEEK_USE_CPP}" != "1" +# # @TEST-GROUP: broker # # @TEST-PORT: BROKER_PORT diff --git a/testing/btest/broker/remote_event_ts.zeek b/testing/btest/broker/remote_event_ts.zeek index e4f2f508d1..f822fde332 100644 --- a/testing/btest/broker/remote_event_ts.zeek +++ b/testing/btest/broker/remote_event_ts.zeek @@ -1,3 +1,6 @@ +# Not compatible with -O C++ testing since includes two distinct scripts. +# @TEST-REQUIRES: test "${ZEEK_USE_CPP}" != "1" +# # @TEST-GROUP: broker # # @TEST-PORT: BROKER_PORT diff --git a/testing/btest/broker/remote_event_ts_compat.zeek b/testing/btest/broker/remote_event_ts_compat.zeek index 8e73b1078c..d18b0579ca 100644 --- a/testing/btest/broker/remote_event_ts_compat.zeek +++ b/testing/btest/broker/remote_event_ts_compat.zeek @@ -7,6 +7,8 @@ # @TEST-REQUIRES: TOPIC=/zeek/my_topic python3 client.py check # # @TEST-EXEC: TOPIC=/zeek/my_topic btest-bg-run server "zeek %INPUT >output" +# Leave room for Zeek to start up, which can be slow when using -O ZAM +# @TEST-EXEC: sleep 5 # @TEST-EXEC: TOPIC=/zeek/my_topic btest-bg-run client "python3 ../client.py >output" # # @TEST-EXEC: btest-bg-wait 45 diff --git a/testing/btest/core/analyzer-confirmation-violation-info-ftp.zeek b/testing/btest/core/analyzer-confirmation-violation-info-ftp.zeek index 36ea69fd3d..4d4e5396e8 100644 --- a/testing/btest/core/analyzer-confirmation-violation-info-ftp.zeek +++ b/testing/btest/core/analyzer-confirmation-violation-info-ftp.zeek @@ -7,17 +7,7 @@ event analyzer_confirmation_info(tag: AllAnalyzers::Tag, info: AnalyzerConfirmat print "analyzer_confirmation_info", tag, info$c$id, info$aid; } -event analyzer_confirmation(c: connection, tag: AllAnalyzers::Tag, aid: count) - { - print "analyzer_confirmation", tag, c$id, aid; - } - event analyzer_violation_info(tag: AllAnalyzers::Tag, info: AnalyzerViolationInfo) { print "analyzer_violation_info", tag, info$reason, info$c$id, info$aid, fmt("%s", info$data); } - -event analyzer_violation(c: connection, tag: AllAnalyzers::Tag, aid: count, reason: string) - { - print "analyzer_violation", tag, reason, c$id, aid; - } diff --git a/testing/btest/core/analyzer-confirmation-violation-info.zeek b/testing/btest/core/analyzer-confirmation-violation-info.zeek index 268d0c8c6a..7f59276ad3 100644 --- a/testing/btest/core/analyzer-confirmation-violation-info.zeek +++ b/testing/btest/core/analyzer-confirmation-violation-info.zeek @@ -7,17 +7,7 @@ event analyzer_confirmation_info(tag: AllAnalyzers::Tag, info: AnalyzerConfirmat print "analyzer_confirmation_info", tag, info$c$id, info$aid; } -event analyzer_confirmation(c: connection, tag: AllAnalyzers::Tag, aid: count) - { - print "analyzer_confirmation", tag, c$id, aid; - } - event analyzer_violation_info(tag: AllAnalyzers::Tag, info: AnalyzerViolationInfo) { print "analyzer_violation_info", tag, info$reason, info$c$id, info$aid; } - -event analyzer_violation(c: connection, tag: AllAnalyzers::Tag, aid: count, reason: string) - { - print "analyzer_violation", tag, reason, c$id, aid; - } diff --git a/testing/btest/core/tunnels/analyzer-confirmation.zeek b/testing/btest/core/tunnels/analyzer-confirmation.zeek index 22ec0d3d2c..c121fe00d6 100644 --- a/testing/btest/core/tunnels/analyzer-confirmation.zeek +++ b/testing/btest/core/tunnels/analyzer-confirmation.zeek @@ -8,12 +8,12 @@ @load base/protocols/conn @load base/protocols/http -event analyzer_confirmation(c: connection, atype: AllAnalyzers::Tag, aid: count) +event analyzer_confirmation_info(tag: AllAnalyzers::Tag, info: AnalyzerConfirmationInfo) { - print "analyzer_confirmation", c$uid, c$id, aid; + print "analyzer_confirmation", info$c$uid, info$c$id, info$aid; } -event analyzer_violation(c: connection, atype: AllAnalyzers::Tag, aid: count, reason: string) +event analyzer_violation_info(tag: AllAnalyzers::Tag, info: AnalyzerViolationInfo) { - print "analyzer_violation", c$uid, c$id, aid, reason; + print "analyzer_violation", info$c$uid, info$c$id, info$aid, info$reason; } diff --git a/testing/btest/core/tunnels/gtp/non_recursive.test b/testing/btest/core/tunnels/gtp/non_recursive.test index ebeaae52a8..9dd4b62304 100644 --- a/testing/btest/core/tunnels/gtp/non_recursive.test +++ b/testing/btest/core/tunnels/gtp/non_recursive.test @@ -5,7 +5,7 @@ # So if we find inside a GTP tunnel another IP/UDP packet with port 2152, # it is just a UDP packet, but not another GTP tunnel. -event analyzer_violation(c: connection, atype: AllAnalyzers::Tag, aid: count, reason: string) +event analyzer_violation_info(tag: AllAnalyzers::Tag, info: AnalyzerViolationInfo) { - print "protocol_violation", c$id, reason; + print "protocol_violation", info$c$id, info$reason; } diff --git a/testing/btest/core/tunnels/teredo-known-services.test b/testing/btest/core/tunnels/teredo-known-services.test index c5a687527c..07455a5932 100644 --- a/testing/btest/core/tunnels/teredo-known-services.test +++ b/testing/btest/core/tunnels/teredo-known-services.test @@ -1,4 +1,4 @@ -# @TEST-EXEC: zeek -b -r $TRACES/tunnels/Teredo.pcap base/frameworks/dpd base/protocols/tunnels base/protocols/dns protocols/conn/known-services Tunnel::delay_teredo_confirmation=T "Site::local_nets+={192.168.2.0/24}" +# @TEST-EXEC: zeek -b -r $TRACES/tunnels/Teredo.pcap base/protocols/tunnels base/protocols/dns protocols/conn/known-services Tunnel::delay_teredo_confirmation=T "Site::local_nets+={192.168.2.0/24}" # @TEST-EXEC: btest-diff known_services.log # Expect known_services.log to NOT indicate any service using teredo. diff --git a/testing/btest/language/assert-error.zeek b/testing/btest/language/assert-error.zeek new file mode 100644 index 0000000000..3ae8384e3d --- /dev/null +++ b/testing/btest/language/assert-error.zeek @@ -0,0 +1,27 @@ +# @TEST-DOC: Assert statement wrong usage +# +# @TEST-EXEC-FAIL: zeek -b %INPUT +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff .stderr + +event zeek_init() + { + assert 1; + } + +@TEST-START-NEXT +event zeek_init() + { + assert T, 1234; + } + +@TEST-START-NEXT +event zeek_init() + { + assert; + } + +@TEST-START-NEXT +event zeek_init() + { + assert T, "extra", "something"; + } diff --git a/testing/btest/language/assert-hook.zeek b/testing/btest/language/assert-hook.zeek new file mode 100644 index 0000000000..36909dec80 --- /dev/null +++ b/testing/btest/language/assert-hook.zeek @@ -0,0 +1,198 @@ +# @TEST-DOC: Assert statement testing with assertion_failure and assertion_result implementation. +# +# Doesn't make sense for ZAM as it ignores assert's. +# @TEST-REQUIRES: test "${ZEEK_ZAM}" != "1" +# +# @TEST-EXEC: zeek -b %INPUT >out +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff out +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff .stderr + +# Hook is not calling break: Reporter log is produced. +hook assertion_failure(cond: string, msg: string, bt: Backtrace) + { + print "assertion_failure", cond, msg, bt[0]$file_location, bt[0]$line_location; + } + +event zeek_init() + { + assert 1 != 1; + print "not reached"; + } + +@TEST-START-NEXT +# Test the backtrace location, also calling break to suppress reporter log. +hook assertion_failure(cond: string, msg: string, bt: Backtrace) + { + print "assertion_failure", cond, msg; + local indent = ""; + for ( _, e in bt ) + { + local file_name = e?$file_location ? e$file_location : ""; + local line_number = e?$line_location ? e$line_location : 0; + print fmt("%s%s %s:%s", indent, e$function_name, file_name, line_number); + indent = fmt("%s ", indent); + } + + break; + } + + +function f() + { + assert md5_hash("") == "d41d8cd98f00b204e9800998ecf8427e"; + assert to_count("5") == 4, fmt("5 is not 4"); + assert sha1_hash("") == "da39a3ee5e6b4b0d3255bfef95601890afd80709"; + } + +function g() { f(); } +function h() { g(); } + +event zeek_init() + { + h(); + print "not reached"; + } + +@TEST-START-NEXT +# Calling terminate() from the assertion hook. +redef exit_only_after_terminate = T; + +hook assertion_failure(cond: string, msg: string, bt: Backtrace) + { + print "assertion_failure", msg; + terminate(); + } + +event zeek_init() + { + assert F, "terminate me!"; + print "not reached"; + } + +event zeek_done() + { + print "zeek_done()"; + assert zeek_is_terminating(), "zeek_done() should have zeek terminating"; + } + +@TEST-START-NEXT +# Calling exit() from the assertion hook. +redef exit_only_after_terminate = T; + +hook assertion_failure(cond: string, msg: string, bt: Backtrace) + { + print "assertion_failure", msg; + exit(0); # in real tests use exit(1), this is to please btest. + } + +event zeek_init() + { + assert F, "calling exit!"; + print "not reached"; + } + +event zeek_done() + { + assert F, "zeek_done() not executed with exit()"; + } + +@TEST-START-NEXT +global assertion_failures = 0; +global assertions_total = 0; + +hook assertion_failure(cond: string, msg: string, bt: Backtrace) + { + print fmt("assertion_failure at %s:%s: %s%s%s", + bt[0]$file_location, bt[0]$line_location, + cond, |msg| > 0 ? " - " : "", msg); + + ++assertion_failures; + break; + } + +hook assertion_result(result: bool, cond: string, msg: string, bt: Backtrace) + { + print fmt("assertion_result %s at %s:%s: %s%s%s", + result, bt[0]$file_location, bt[0]$line_location, + cond, |msg| > 0 ? " - " : "", msg); + + ++assertions_total; + } + +event zeek_test() + { + assert md5_hash("") == "d41d8cd98f00b204e9800998ecf8427e"; + } + +event zeek_test() + { + assert sha1_hash("") == "da39a3ee5e6b4b0d3255bfef95601890afd80709"; + } + +event zeek_test() + { + assert sha1_hash("") == "wrong"; + } + +event zeek_test() + { + assert md5_hash("") == "wrong"; + } + +event zeek_init() + { + event zeek_test(); + } + +event zeek_done() + { + print fmt("%d of %d assertions failed", assertion_failures, assertions_total); + } + +@TEST-START-NEXT +# Evaluating the msg expression can cause errors, see if we deal +# with that gracefully. +hook assertion_failure(cond: string, msg: string, bt: Backtrace) + { + print "assertion_failure", cond, msg, bt[0]$file_location, bt[0]$line_location; + } + +hook assertion_result(result: bool, cond: string, msg: string, bt: Backtrace) + { + print "assertion_result", result, cond, msg, bt[0]$file_location, bt[0]$line_location; + } + +event zeek_init() + { + assert 2 + 2 == 4, cat(get_current_packet_header()$ip); + assert 2 + 2 == 4, to_json([$msg="true and works"]); + assert 2 + 2 == 5, cat(get_current_packet_header()$ip); + } + +event zeek_done() + { + assert 2 + 2 == 5, to_json([$msg="false and works"]); + assert 2 + 2 == 5, cat(get_current_packet_header()$ip); + } + +@TEST-START-NEXT +# Breaking in assertion_result() also suppresses the reporter errors. +hook assertion_result(result: bool, cond: string, msg: string, bt: Backtrace) + { + print "assertion_result", result, cond, msg, bt[0]$file_location, bt[0]$line_location; + break; + } + +event zeek_init() + { + assert 2 + 2 == 4, "this is true"; + assert 2 + 2 == 4, to_json([$msg="this is also true"]); + assert 2 + 2 == 5, "this is false"; + print "not reached"; + } + +event zeek_done() + { + assert 2 + 2 == 5, "this is false"; + print "not reached"; + } diff --git a/testing/btest/language/assert-misc.zeek b/testing/btest/language/assert-misc.zeek new file mode 100644 index 0000000000..6c9de58432 --- /dev/null +++ b/testing/btest/language/assert-misc.zeek @@ -0,0 +1,31 @@ +# @TEST-DOC: Test Describe() of assert statement. Expressions may be canonicalized. +# +# Doesn't make sense for ZAM as it ignores assert's. +# @TEST-REQUIRES: test "${ZEEK_ZAM}" != "1" +# +# @TEST-EXEC: zeek -b %INPUT >out 2>&1 +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff out + +function test_function() + { + assert getpid() > 0; + } + +event zeek_init() + { + local f = function() { + assert getpid() > 0, fmt("my pid is funny: %s", getpid()); + }; + local g = function() { + assert to_count("42") == 42; + }; + + print "f", f; + f(); + + print "g", g; + g(); + + print "test_function", test_function; + test_function(); + } diff --git a/testing/btest/language/assert-top-level.zeek b/testing/btest/language/assert-top-level.zeek new file mode 100644 index 0000000000..0bde1ad926 --- /dev/null +++ b/testing/btest/language/assert-top-level.zeek @@ -0,0 +1,7 @@ +# Doesn't make sense for ZAM as it ignores assert's. +# @TEST-REQUIRES: test "${ZEEK_ZAM}" != "1" +# @TEST-EXEC-FAIL: zeek -b %INPUT >out +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff .stderr + +assert getpid() > 0; +assert getpid() == 0, fmt("my pid greater 0? %s", getpid() > 0); diff --git a/testing/btest/language/assert.zeek b/testing/btest/language/assert.zeek new file mode 100644 index 0000000000..79ea8a9096 --- /dev/null +++ b/testing/btest/language/assert.zeek @@ -0,0 +1,78 @@ +# @TEST-DOC: Assert statement behavior testing without an assertion_failure() hook. +# +# Doesn't make sense for ZAM as it ignores assert's. +# @TEST-REQUIRES: test "${ZEEK_ZAM}" != "1" +# +# @TEST-EXEC-FAIL: unset ZEEK_ALLOW_INIT_ERRORS; zeek -b %INPUT >out 2>&1 +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff out + +event zeek_init() + { + assert fmt("%s", 1) == "2"; + print "not reached"; + } + +@TEST-START-NEXT +event zeek_init() + { + assert fmt("%s", 1) == "2", fmt("\"%s\" != \"2\"", 1); + print "not reached"; + } + +@TEST-START-NEXT +event zeek_init() + { + assert to_count("42") == 42.5, "always failing"; + print "not reached"; + } + +@TEST-START-NEXT +event zeek_init() + { + local x = 2; + assert x == 1, fmt("Expected x to be 1, have %s", x); + print "not reached"; + } + +@TEST-START-NEXT +event zeek_init() + { + local tbl: table[string] of string = [ + ["abc"] = "123", + ["def"] = "456", + ]; + assert "abc" in tbl, cat(tbl); + assert "def" in tbl, cat(tbl); + assert "ghi" in tbl, cat(tbl); + } + +@TEST-START-NEXT +type MyRecord: record { + a: count; + b: count &optional; +}; + +event zeek_init() + { + local r: MyRecord = [$a=1234]; + assert ! r?$b, fmt("Unexpected r$b is set to %s", r$b); + assert r?$b, fmt("r$b is not set in %s", r); + } + +@TEST-START-NEXT +type MyRecord: record { + a: count; + b: count &optional; +}; + +event zeek_init() + { + local r: MyRecord = [$a=1234]; + assert ! r?$b, fmt("Unexpected r$b is set to %s", r$b); + assert r?$b, fmt("r$b is not set trying anyway: %s", r$b); + } + +@TEST-START-NEXT +assert 1 == 1, "always true"; +assert 1 == 2, "always false"; +print "not reached"; diff --git a/testing/btest/language/next-break-context-errors.zeek b/testing/btest/language/next-break-context-errors.zeek index 5c5d52d060..122f96d2d6 100644 --- a/testing/btest/language/next-break-context-errors.zeek +++ b/testing/btest/language/next-break-context-errors.zeek @@ -1,6 +1,6 @@ # @TEST-DOC: Check break and next usage within for, while, switch and hooks. -# @TEST-EXEC: zeek -b %INPUT +# @TEST-EXEC-FAIL: zeek -b %INPUT # @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff .stderr function f() { diff --git a/testing/btest/language/redef-same-prefixtable-idx.zeek b/testing/btest/language/redef-same-prefixtable-idx.zeek index 838f3fc0ba..194c49746b 100644 --- a/testing/btest/language/redef-same-prefixtable-idx.zeek +++ b/testing/btest/language/redef-same-prefixtable-idx.zeek @@ -4,12 +4,8 @@ const my_table: table[subnet] of subnet &redef; -redef my_table[3.0.0.0/8] = 1.0.0.0/8; -redef my_table[3.0.0.0/8] = 2.0.0.0/8; - -# The above is basically a shorthand for: -# redef my_table += { [3.0.0.0/8] = 1.0.0.0/8 }; -# redef my_table += { [3.0.0.0/8] = 2.0.0.0/8 }; +redef my_table += { [3.0.0.0/8] = 1.0.0.0/8 }; +redef my_table += { [3.0.0.0/8] = 2.0.0.0/8 }; event zeek_init() { diff --git a/testing/btest/language/table-redef.zeek b/testing/btest/language/table-redef.zeek index 51c4360044..87a12580b2 100644 --- a/testing/btest/language/table-redef.zeek +++ b/testing/btest/language/table-redef.zeek @@ -18,9 +18,7 @@ redef foo -= { ["ghi"] = 0.0 }; # RHS can be a table value redef foo += table(["cool"] = 5.0, ["neat"] = 1.0); -# Redef at a single index is allowed, same as += when RHS has overlapping index -redef foo["cool"] = 28.0; -redef foo["abc"] = 8.0; +# redef a single element using += redef foo += { ["def"] = 99.0 }; print foo; diff --git a/testing/btest/language/vector-neg-index.zeek b/testing/btest/language/vector-neg-index.zeek new file mode 100644 index 0000000000..330a13a033 --- /dev/null +++ b/testing/btest/language/vector-neg-index.zeek @@ -0,0 +1,9 @@ +# @TEST-DOC: check for errors for negative vector indexes that are too small +# @TEST-EXEC: zeek -b %INPUT >out 2>&1 +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff out + +event zeek_init() +{ + local v = vector( 1, 2, 3, 4, 5 ); + print v[-1], v[-3], v[-5], v[-7]; +} diff --git a/testing/btest/plugins/duplicate-pktsrc.zeek b/testing/btest/plugins/duplicate-pktsrc.zeek new file mode 100644 index 0000000000..aecc6190c6 --- /dev/null +++ b/testing/btest/plugins/duplicate-pktsrc.zeek @@ -0,0 +1,8 @@ +# @TEST-DOC: Loading two plugins with the same name triggers a warning. + +# @TEST-EXEC: ${DIST}/auxil/zeek-aux/plugin-support/init-plugin -u . Demo Foo +# @TEST-EXEC: cp -r %DIR/pktsrc-plugin/* . +# @TEST-EXEC: ./configure --zeek-dist=${DIST} && make +# @TEST-EXEC: cp -R build build_backup +# @TEST-EXEC: ZEEK_PLUGIN_PATH=`pwd`/build_backup:`pwd`/build zeek -NN Demo::Foo >output 2>&1 +# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff output diff --git a/testing/btest/plugins/hooks-plugin/src/Plugin.cc b/testing/btest/plugins/hooks-plugin/src/Plugin.cc index 99cf4eafd9..b8a4d812be 100644 --- a/testing/btest/plugins/hooks-plugin/src/Plugin.cc +++ b/testing/btest/plugins/hooks-plugin/src/Plugin.cc @@ -58,7 +58,6 @@ zeek::plugin::Configuration Plugin::Configure() EnableHook(zeek::plugin::HOOK_UPDATE_NETWORK_TIME); EnableHook(zeek::plugin::META_HOOK_PRE); EnableHook(zeek::plugin::META_HOOK_POST); - EnableHook(zeek::plugin::HOOK_BRO_OBJ_DTOR); EnableHook(zeek::plugin::HOOK_SETUP_ANALYZER_TREE); EnableHook(zeek::plugin::HOOK_LOG_INIT); EnableHook(zeek::plugin::HOOK_LOG_WRITE); @@ -166,7 +165,7 @@ bool Plugin::HookQueueEvent(zeek::Event* event) fprintf(stderr, "%.6f %-15s %s\n", zeek::run_state::network_time, "| RequestObjDtor", d.Description()); - RequestBroObjDtor(event); + RequestObjDtor(event); i = 1; } @@ -184,11 +183,6 @@ void Plugin::HookUpdateNetworkTime(double network_time) zeek::run_state::network_time); } -void Plugin::HookBroObjDtor(void* obj) - { - fprintf(stderr, "%.6f %-15s\n", zeek::run_state::network_time, "| HookBroObjDtor"); - } - void Plugin::HookObjDtor(void* obj) { fprintf(stderr, "%.6f %-15s\n", zeek::run_state::network_time, "| HookObjDtor"); diff --git a/testing/btest/plugins/hooks-plugin/src/Plugin.h b/testing/btest/plugins/hooks-plugin/src/Plugin.h index 21727786dd..30f42a11ec 100644 --- a/testing/btest/plugins/hooks-plugin/src/Plugin.h +++ b/testing/btest/plugins/hooks-plugin/src/Plugin.h @@ -20,7 +20,6 @@ protected: bool HookQueueEvent(zeek::Event* event) override; void HookDrainEvents() override; void HookUpdateNetworkTime(double network_time) override; - void HookBroObjDtor(void* obj) override; void HookObjDtor(void* obj) override; void HookLogInit(const std::string& writer, const std::string& instantiating_filter, bool local, bool remote, const zeek::logging::WriterBackend::WriterInfo& info, diff --git a/testing/btest/scripts/base/frameworks/analyzer/enable-analyzer.zeek b/testing/btest/scripts/base/frameworks/analyzer/enable-analyzer.zeek index 148d9b4846..a925783b8a 100644 --- a/testing/btest/scripts/base/frameworks/analyzer/enable-analyzer.zeek +++ b/testing/btest/scripts/base/frameworks/analyzer/enable-analyzer.zeek @@ -1,5 +1,5 @@ # -# @TEST-EXEC: zeek -b -r ${TRACES}/var-services-std-ports.trace %INPUT base/protocols/dns base/protocols/conn base/frameworks/dpd +# @TEST-EXEC: zeek -b -r ${TRACES}/var-services-std-ports.trace %INPUT base/protocols/dns base/protocols/conn # @TEST-EXEC: cat conn.log | zeek-cut service | grep -q dns # @@ -9,5 +9,3 @@ event zeek_init() { Analyzer::enable_analyzer(Analyzer::ANALYZER_DNS); } - - diff --git a/testing/btest/scripts/base/frameworks/analyzer/register-for-port.zeek b/testing/btest/scripts/base/frameworks/analyzer/register-for-port.zeek index d37212a2d2..63ac9f701b 100644 --- a/testing/btest/scripts/base/frameworks/analyzer/register-for-port.zeek +++ b/testing/btest/scripts/base/frameworks/analyzer/register-for-port.zeek @@ -2,15 +2,13 @@ # some runs having complaints that there are no scripts. # @TEST-REQUIRES: test "${ZEEK_USE_CPP}" != "1" -# @TEST-EXEC: zeek -b -r ${TRACES}/ssh/ssh-on-port-80.trace %INPUT dpd_buffer_size=0 base/protocols/conn base/protocols/ssh base/frameworks/dpd +# @TEST-EXEC: zeek -b -r ${TRACES}/ssh/ssh-on-port-80.trace %INPUT dpd_buffer_size=0 base/protocols/conn base/protocols/ssh # @TEST-EXEC: cat conn.log | zeek-cut service | grep -q ssh # -# @TEST-EXEC: zeek -b -r ${TRACES}/ssh/ssh-on-port-80.trace dpd_buffer_size=0 base/protocols/conn base/protocols/ssh base/frameworks/dpd +# @TEST-EXEC: zeek -b -r ${TRACES}/ssh/ssh-on-port-80.trace dpd_buffer_size=0 base/protocols/conn base/protocols/ssh # @TEST-EXEC: cat conn.log | zeek-cut service | grep -vq ssh event zeek_init() { Analyzer::register_for_port(Analyzer::ANALYZER_SSH, 80/tcp); } - - diff --git a/testing/btest/scripts/base/frameworks/cluster/leftover-log-rotation-multi-logger.zeek b/testing/btest/scripts/base/frameworks/cluster/leftover-log-rotation-multi-logger.zeek new file mode 100644 index 0000000000..93247ce6ce --- /dev/null +++ b/testing/btest/scripts/base/frameworks/cluster/leftover-log-rotation-multi-logger.zeek @@ -0,0 +1,44 @@ +# @TEST-DOC: Ensure that left-over log rotation tags the logger name on as well. + +# @TEST-EXEC: echo ".log" >> .shadow.conn.log +# @TEST-EXEC: echo "" >> .shadow.conn.log +# @TEST-EXEC: echo "leftover conn log" > conn.log + +# @TEST-EXEC: echo ".log" >> .shadow.dns.log +# @TEST-EXEC: echo "" >> .shadow.dns.log +# @TEST-EXEC: echo "leftover dns log" > dns.log + +# Start Zeek as cluster node logger-2. +# @TEST-EXEC: CLUSTER_NODE=logger-2 zeek -b %INPUT > out + +# Ensure leftover files were removed. +# @TEST-EXEC: ! test -f .shadow.conn.log +# @TEST-EXEC: ! test -f conn.log +# @TEST-EXEC: ! test -f .shadow.dns.log +# @TEST-EXEC: ! test -f dns.log + +# Ensure the rotated files end-up in the default log-queue directory and have +# the logger-2 name encoded into them. +# @TEST-EXEC: ls ./log-queue/conn__*.log >>out +# @TEST-EXEC: ls ./log-queue/dns__*.log >>out +# @TEST-EXEC: cat ./log-queue/conn__*logger-2__.log ./log-queue/dns__*logger-2__.log >>out + +# @TEST-EXEC: TEST_DIFF_CANONIFIER='sed -r "s/[0-9]{2}/XX/g"' btest-diff out + +@TEST-START-FILE cluster-layout.zeek +redef Cluster::nodes = { + ["logger-1"] = [$node_type=Cluster::LOGGER, $ip=127.0.0.1, $p=1234/tcp], + ["logger-2"] = [$node_type=Cluster::LOGGER, $ip=127.0.0.1, $p=1235/tcp], +}; +@TEST-END-FILE + +# Switch settings into a supervisor/non-zeekctl setup +redef Log::default_rotation_dir = "log-queue"; +redef Log::rotation_format_func = archiver_rotation_format_func; +redef LogAscii::enable_leftover_log_rotation = T; +redef Log::default_rotation_postprocessor_cmd = ""; + +event zeek_init() + { + terminate(); + } diff --git a/testing/btest/scripts/base/frameworks/file-analysis/log-files-event-flattening.zeek b/testing/btest/scripts/base/frameworks/file-analysis/log-files-event-flattening.zeek index 56d15b9f4b..ca8c7dc27a 100644 --- a/testing/btest/scripts/base/frameworks/file-analysis/log-files-event-flattening.zeek +++ b/testing/btest/scripts/base/frameworks/file-analysis/log-files-event-flattening.zeek @@ -1,10 +1,7 @@ # @TEST-DOC: Implement Files::log_files and verify it is seeing unique File::Info records. -# @TEST-EXEC: zeek -b -r $TRACES/http/concurrent-range-requests.pcap uid-id.zeek >out.new -# @TEST-EXEC: zeek -b -r $TRACES/http/concurrent-range-requests.pcap frameworks/files/deprecated-txhosts-rxhosts-connuids uid-id-deprecated.zeek >out.deprecated -# @TEST-EXEC: btest-diff out.new -# @TEST-EXEC: btest-diff out.deprecated +# @TEST-EXEC: zeek -b -r $TRACES/http/concurrent-range-requests.pcap %INPUT >out +# @TEST-EXEC: btest-diff out -@TEST-START-FILE uid-id.zeek @load base/frameworks/files @load base/protocols/http @@ -12,15 +9,3 @@ event Files::log_files(rec: Files::Info) { print rec$uid, rec$id; } -@TEST-END-FILE - - -@TEST-START-FILE uid-id-deprecated.zeek -@load base/frameworks/files -@load base/protocols/http - -event Files::log_files(rec: Files::Info) - { - print rec$uid, rec$id, cat(rec$tx_hosts), cat(rec$rx_hosts), cat(rec$conn_uids); - } -@TEST-END-FILE diff --git a/testing/btest/scripts/base/frameworks/logging/ascii-logdir.zeek b/testing/btest/scripts/base/frameworks/logging/ascii-logdir.zeek index 104c073971..51b2a9606c 100644 --- a/testing/btest/scripts/base/frameworks/logging/ascii-logdir.zeek +++ b/testing/btest/scripts/base/frameworks/logging/ascii-logdir.zeek @@ -1,6 +1,6 @@ # # @TEST-EXEC: mkdir logdir -# @TEST-EXEC: zeek -b %INPUT LogAscii::logdir=logdir +# @TEST-EXEC: zeek -b %INPUT Log::default_logdir=logdir # @TEST-EXEC: cat logdir/ssh.log | grep -v PREFIX.*20..- >ssh-filtered.log # @TEST-EXEC: btest-diff ssh-filtered.log diff --git a/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation-shadow-files.zeek b/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation-shadow-files.zeek deleted file mode 100644 index 9c15dba571..0000000000 --- a/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation-shadow-files.zeek +++ /dev/null @@ -1,34 +0,0 @@ -# @TEST-DOC: Test that .shadow files are picked up from LogAscii::logdir. -# @TEST-EXEC: mkdir logs -# @TEST-EXEC: echo ".log" >> logs/.shadow.conn.log -# @TEST-EXEC: echo "my_rotation_postprocessor" >> logs/.shadow.conn.log -# @TEST-EXEC: echo "leftover conn log" > logs/conn.log -# @TEST-EXEC: echo ".log" >> logs/.shadow.dns.log -# @TEST-EXEC: echo "my_rotation_postprocessor" >> logs/.shadow.dns.log -# @TEST-EXEC: echo "leftover dns log" > logs/dns.log - -# @TEST-EXEC: zeek -b %INPUT > out - -# @TEST-EXEC: ! test -f logs/.shadow.conn.log -# @TEST-EXEC: ! test -f logs/conn.log -# @TEST-EXEC: ! test -f logs/.shadow.dns.log -# @TEST-EXEC: ! test -f logs/dns.log - -# Ensure rotated logs ends-up in the current working directory: This may change in the future. -# @TEST-EXEC: cat ./conn-*.log ./dns-*.log > logs.cat - -# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-sort btest-diff out -# @TEST-EXEC: btest-diff logs.cat - -module GLOBAL; - -function my_rotation_postprocessor(info: Log::RotationInfo) : bool - { - print fmt("running my rotation postprocessor for path '%s'", info$path); - return T; - } - -redef LogAscii::logdir = "./logs"; -redef LogAscii::enable_leftover_log_rotation = T; -redef Log::default_rotation_interval = 1hr; -redef Log::default_rotation_postprocessor_cmd = "echo"; diff --git a/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files.zeek b/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files.zeek deleted file mode 100644 index af1f5a6d37..0000000000 --- a/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation-stale-shadow-files.zeek +++ /dev/null @@ -1,23 +0,0 @@ -# @TEST-DOC: Test that stale .shadow files are removed from LogAscii::logdir. -# @TEST-EXEC: mkdir logs -# @TEST-EXEC: echo ".log" >> logs/.shadow.conn.log -# @TEST-EXEC: echo "my_rotation_postprocessor" >> logs/.shadow.conn.log - -# @TEST-EXEC: zeek -b %INPUT > out 2>&1 - -# @TEST-EXEC: ! test -f logs/.shadow.conn.log - -# @TEST-EXEC: TEST_DIFF_CANONIFIER='$SCRIPTS/diff-remove-abspath | $SCRIPTS/diff-remove-timestamps' btest-diff out - -module GLOBAL; - -function my_rotation_postprocessor(info: Log::RotationInfo) : bool - { - print fmt("running my rotation postprocessor for path '%s'", info$path); - return T; - } - -redef LogAscii::logdir = "./logs"; -redef LogAscii::enable_leftover_log_rotation = T; -redef Log::default_rotation_interval = 1hr; -redef Log::default_rotation_postprocessor_cmd = "echo"; diff --git a/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation.zeek b/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation.zeek index 7fdba2c9fa..c6349448e0 100644 --- a/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation.zeek +++ b/testing/btest/scripts/base/frameworks/logging/rotate-ascii-logdir-leftover-log-rotation.zeek @@ -2,7 +2,7 @@ # @TEST-EXEC: mkdir logs # @TEST-EXEC: zeek -b -r ${TRACES}/rotation.trace %INPUT >zeek.out 2>&1 # @TEST-EXEC: grep "test" zeek.out | sort >out -# @TEST-EXEC: for i in `ls test.*.log | sort`; do printf '> %s\n' $i; cat $i; done >>out +# @TEST-EXEC: for i in `ls logs/test.*.log | sort`; do printf '> %s\n' $i; cat $i; done >>out # @TEST-EXEC: TEST_DIFF_CANONIFIER='$SCRIPTS/diff-remove-abspath | $SCRIPTS/diff-remove-timestamps' btest-diff out module Test; @@ -19,8 +19,8 @@ export { } &log; } -redef LogAscii::logdir = "./logs"; redef LogAscii::enable_leftover_log_rotation = T; +redef Log::default_logdir = "./logs"; redef Log::default_rotation_interval = 1hr; redef Log::default_rotation_postprocessor_cmd = "echo"; diff --git a/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap-dynamic.test b/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap-dynamic.test index 09dd2cdc91..c987e0b889 100644 --- a/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap-dynamic.test +++ b/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap-dynamic.test @@ -1,7 +1,7 @@ # @TEST-EXEC: zeek -b -r $TRACES/mqtt.pcap %INPUT > out # @TEST-EXEC: btest-diff out -@load policy/protocols/mqtt +@load base/protocols/mqtt @load base/frameworks/config event mqtt_publish(c: connection, is_orig: bool, msg_id: count, msg: MQTT::PublishMsg) diff --git a/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap.test b/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap.test index 5304cbc914..76d2b20435 100644 --- a/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap.test +++ b/testing/btest/scripts/base/protocols/mqtt/mqtt-payload-cap.test @@ -3,4 +3,4 @@ redef MQTT::max_payload_size = 8; -@load policy/protocols/mqtt +@load base/protocols/mqtt diff --git a/testing/btest/scripts/base/protocols/mqtt/mqtt.test b/testing/btest/scripts/base/protocols/mqtt/mqtt.test index 553bc8bb3e..25f3046a4c 100644 --- a/testing/btest/scripts/base/protocols/mqtt/mqtt.test +++ b/testing/btest/scripts/base/protocols/mqtt/mqtt.test @@ -3,4 +3,4 @@ # @TEST-EXEC: btest-diff mqtt_subscribe.log # @TEST-EXEC: btest-diff mqtt_publish.log -@load policy/protocols/mqtt +@load base/protocols/mqtt diff --git a/testing/btest/scripts/base/protocols/socks/socks-auth-10080.zeek b/testing/btest/scripts/base/protocols/socks/socks-auth-10080.zeek new file mode 100644 index 0000000000..d272aed38e --- /dev/null +++ b/testing/btest/scripts/base/protocols/socks/socks-auth-10080.zeek @@ -0,0 +1,11 @@ +# @TEST-DOC: Socks V5 over a non-standard port. + +# @TEST-EXEC: zeek -r $TRACES/socks-auth-10080.pcap %INPUT +# @TEST-EXEC: zeek-cut -m id.orig_h id.orig_p id.resp_h id.resp_p service history < conn.log > conn.log.cut +# @TEST-EXEC: zeek-cut -m id.orig_h id.orig_p id.resp_h id.resp_p version status bound.host bound.name bound_p < socks.log > socks.log.cut +# @TEST-EXEC: btest-diff conn.log.cut +# @TEST-EXEC: btest-diff socks.log.cut + +@load base/protocols/socks + +redef SOCKS::default_capture_password = T; diff --git a/testing/btest/scripts/policy/frameworks/files/deprecated-fields-concurrent-range-requests.zeek b/testing/btest/scripts/policy/frameworks/files/deprecated-fields-concurrent-range-requests.zeek deleted file mode 100644 index 681114acaa..0000000000 --- a/testing/btest/scripts/policy/frameworks/files/deprecated-fields-concurrent-range-requests.zeek +++ /dev/null @@ -1,15 +0,0 @@ -# @TEST-DOC: The pcap contains 3 connections with range requests for the same file. We expect 3 files.log entries all with the same fuid, but different uids. With the deprecated fields, we expect tx_hosts, rx_hosts and conn_uuids to agree with the uid and id fields. -# @TEST-EXEC: zeek -b -r $TRACES/http/concurrent-range-requests.pcap %INPUT 2>&1 > out -# @TEST-EXEC: mv files.log files.log.new -# @TEST-EXEC: mv out out.new -# @TEST-EXEC: btest-diff out.new -# @TEST-EXEC: btest-diff files.log.new - -# @TEST-EXEC: zeek -b -r $TRACES/http/concurrent-range-requests.pcap %INPUT frameworks/files/deprecated-txhosts-rxhosts-connuids 2>&1 > out -# @TEST-EXEC: mv files.log files.log.deprecated -# @TEST-EXEC: mv out out.deprecated -# @TEST-EXEC: btest-diff out.deprecated -# @TEST-EXEC: btest-diff files.log.deprecated - -@load base/frameworks/files -@load base/protocols/http diff --git a/testing/btest/scripts/policy/frameworks/files/deprecated-fields.zeek b/testing/btest/scripts/policy/frameworks/files/deprecated-fields.zeek deleted file mode 100644 index 3e791a15aa..0000000000 --- a/testing/btest/scripts/policy/frameworks/files/deprecated-fields.zeek +++ /dev/null @@ -1,15 +0,0 @@ -# @TEST-DOC: Verify the files.log with and without the tx_hosts, rx_hosts and conn_uids fields -# @TEST-EXEC: zeek -b -r $TRACES/http/get.trace %INPUT 2>&1 > out -# @TEST-EXEC: mv files.log files.log.new -# @TEST-EXEC: mv out out.new -# @TEST-EXEC: btest-diff out.new -# @TEST-EXEC: btest-diff files.log.new - -# @TEST-EXEC: zeek -b -r $TRACES/http/get.trace %INPUT frameworks/files/deprecated-txhosts-rxhosts-connuids 2>&1 > out -# @TEST-EXEC: mv files.log files.log.deprecated -# @TEST-EXEC: mv out out.deprecated -# @TEST-EXEC: btest-diff out.deprecated -# @TEST-EXEC: btest-diff files.log.deprecated - -@load base/frameworks/files -@load base/protocols/http diff --git a/testing/btest/scripts/base/frameworks/telemetry/cluster.zeek b/testing/btest/scripts/policy/frameworks/telemetry/prometheus.zeek similarity index 98% rename from testing/btest/scripts/base/frameworks/telemetry/cluster.zeek rename to testing/btest/scripts/policy/frameworks/telemetry/prometheus.zeek index 56ea828b36..83fb95f06d 100644 --- a/testing/btest/scripts/base/frameworks/telemetry/cluster.zeek +++ b/testing/btest/scripts/policy/frameworks/telemetry/prometheus.zeek @@ -29,6 +29,7 @@ redef Cluster::nodes = { @TEST-END-FILE @load policy/frameworks/cluster/experimental +@load policy/frameworks/telemetry/prometheus @load base/frameworks/telemetry @load base/utils/active-http diff --git a/testing/btest/spicy/replaces.zeek b/testing/btest/spicy/replaces.zeek index eabdcbcb6e..a1b155c718 100644 --- a/testing/btest/spicy/replaces.zeek +++ b/testing/btest/spicy/replaces.zeek @@ -14,9 +14,9 @@ event ssh::banner(c: connection, is_orig: bool, version: string, software: strin print "SSH banner", c$id, is_orig, version, software; } -event analyzer_confirmation(c: connection, atype: AllAnalyzers::Tag, aid: count) +event analyzer_confirmation_info(atype: AllAnalyzers::Tag, info: AnalyzerConfirmationInfo) { - print atype, aid; + print atype, info$aid; } # @TEST-START-FILE ssh.spicy diff --git a/testing/btest/spicy/ssh-banner.zeek b/testing/btest/spicy/ssh-banner.zeek index e932517ec8..ef608be08f 100644 --- a/testing/btest/spicy/ssh-banner.zeek +++ b/testing/btest/spicy/ssh-banner.zeek @@ -17,16 +17,16 @@ event ssh::banner(c: connection, is_orig: bool, version: string, software: strin print "SSH banner", c$id, is_orig, version, software; } -event analyzer_confirmation(c: connection, atype: AllAnalyzers::Tag, aid: count) +event analyzer_confirmation_info(atype: AllAnalyzers::Tag, info: AnalyzerConfirmationInfo) { if ( atype == Analyzer::ANALYZER_SPICY_SSH ) print "confirm", atype; } -event analyzer_violation(c: connection, atype: AllAnalyzers::Tag, aid: count, reason: string) +event analyzer_violation_info(atype: AllAnalyzers::Tag, info: AnalyzerViolationInfo) { if ( atype == Analyzer::ANALYZER_SPICY_SSH ) - print "violation", atype, reason; + print "violation", atype, info$reason; } # @TEST-START-FILE extern.zeek diff --git a/testing/btest/supervisor/config-scripts.zeek b/testing/btest/supervisor/config-scripts.zeek index d48281b9a4..3a56922ac8 100644 --- a/testing/btest/supervisor/config-scripts.zeek +++ b/testing/btest/supervisor/config-scripts.zeek @@ -27,8 +27,7 @@ event zeek_init() print supervisor_output_file, "supervisor zeek_init()"; local sn = Supervisor::NodeConfig($name="grault", $addl_base_scripts=vector("../addl_base_script.zeek"), - $addl_user_scripts=vector("../addl_user_script.zeek"), - $scripts=vector("../script.zeek")); + $addl_user_scripts=vector("../addl_user_script.zeek")); local res = Supervisor::create(sn); if ( res != "" ) diff --git a/testing/external/commit-hash.zeek-testing b/testing/external/commit-hash.zeek-testing index d57e1cb4df..8a79b0e32b 100644 --- a/testing/external/commit-hash.zeek-testing +++ b/testing/external/commit-hash.zeek-testing @@ -1 +1 @@ -828845c99306c6d5d6811fa42987de5b16f530b9 +d59caff708b41db11fa0cbfe0b1f95b46c3e700e diff --git a/testing/external/commit-hash.zeek-testing-private b/testing/external/commit-hash.zeek-testing-private index c5dd200880..72b4b0a144 100644 --- a/testing/external/commit-hash.zeek-testing-private +++ b/testing/external/commit-hash.zeek-testing-private @@ -1 +1 @@ -b121bfe4d869f1f5e334505b970cd456558ef6a1 +7162c907aa25e155ea841710ef30b65afb578c3f