diff --git a/.travis.yml b/.travis.yml index e10a8d5ffc..12b21ed5cb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,5 @@ +dist: trusty + language: cpp services: @@ -24,7 +26,7 @@ notifications: env: - distro: centos_7 - distro: debian_9 - - distro: fedora_28 + - distro: fedora_30 - distro: ubuntu_16.04 - distro: ubuntu_18.04 - distro: ubuntu_18.04_leaktest diff --git a/CHANGES b/CHANGES index 22bd59e630..0a39bce71a 100644 --- a/CHANGES +++ b/CHANGES @@ -1,4 +1,244 @@ +3.1.0-dev.253 | 2019-11-05 09:54:01 -0800 + + * Add and use new IntrusivePtr type (Dominik Charousset, Corelight) + + Manual memory management via Ref/Unref is verbose and prone to error. An + intrusive smart pointer automates the reference counting, makes code + more robust (in particular w.r.t. to exceptions) and reduces boilerplate + code. A big benefit of the intrusive smart pointers for Zeek is that + they can co-exist with the manual memory management. Rather than having + to port the entire code base at once, we can migrate components + one-by-one. In this first step, we add the new template + `IntrusivePtr` and start using it in the Broker Manager. This makes + the previous `unref_guard` obsolete. + +3.1.0-dev.247 | 2019-11-04 17:26:32 -0800 + + * Cluster-ize FTP data channel analysis (Jeff Barber) + + The ftp_data_expected table is now synchronized across Zeek + worker nodes. Note there's still a possible race condition where + the traffic for the ftp-data connection proceeds before the Zeek + cluster has a change to inform workers to expect it. + +3.1.0-dev.245 | 2019-11-04 16:57:11 -0800 + + * Use CMake object libraries for subdir libs and plugins (Dominik Charousset, Corelight) + +3.1.0-dev.242 | 2019-11-04 09:58:53 -0800 + + * Fix error handling in decode_base64() (Jon Siwek, Corelight) + +3.1.0-dev.241 | 2019-11-04 09:44:20 -0800 + + * Revert recent changes to Base64.h API. (Jon Siwek, Corelight) + + This reverts commit 705210a035d4128ecdf249c7db98607c570bfcd2. + The original changes broke the Base64.h API which may be used by + external plugins. + +3.1.0-dev.240 | 2019-11-01 15:58:55 -0700 + + * Use #pragma once in Base64.h (Jon Siwek, Corelight) + +3.1.0-dev.239 | 2019-10-31 12:18:16 -0700 + + * Fix Travis CI script to run Coverity build in container (Jon Siwek, Corelight) + +3.1.0-dev.237 | 2019-10-31 06:37:23 +0100 + + * Remove "using namespace std" from header files (Jon Siwek, Corelight) + +3.1.0-dev.235 | 2019-10-30 15:04:13 -0700 + + * GH-649: define missing DLT_NFLOG to support OpenBSD (Jon Siwek, Corelight) + +3.1.0-dev.234 | 2019-10-30 13:33:08 -0700 + + * Add --libdir convenience flag to configure. (Johanna Amann, Corelight) + +3.1.0-dev.231 | 2019-10-30 11:02:28 -0700 + + * Add missing ipv6 support in patricia.c local_inet_pton() (Tim Shelton) + +3.1.0-dev.229 | 2019-10-30 07:57:19 +0000 + + * GH-664: fix signature matching for cases where the first TCP + packet seen contained payload data, regardless of its TCP flags. + (Jon Siwek, Corelight) + +3.1.0-dev.227 | 2019-10-29 09:39:10 -0700 + + * Replace build_unique with make_unique (Johanna Amann, Corelight) + + This was a rarely used convenience function from when we did not yet + have c++17 support. + + * GH-626: Revert "Fix compilation on OS-X." (Johanna Amann, Corelight) + + Reverts workaround in cde28074a169212aa8f38fdac225ecbeac4e642d + which depended on C++14 features at a time when we used only C++11. + +3.1.0-dev.222 | 2019-10-28 20:18:15 -0700 + + * Install cmake3 from EPEL on CentOS CI system (Jon Siwek, Corelight) + +3.1.0-dev.221 | 2019-10-28 18:30:24 -0700 + + * Require CMake 3.0+ (Dominik Charousset, Corelight) + + * Move CMake project() after cmake_minimum_required() (Dominik Charousset, Corelight) + + The call to `project` must come after `cmake_minimum_required` in CMake + in order to get the correct policy settings. + +3.1.0-dev.219 | 2019-10-28 20:59:12 +0100 + + * GH-654: allow table() in function &default expressions + + Table parameters of functions previously did not coerce unspecified + tables used in their &default attribute to the correct type. (Jon Siwek, Corelight) + + * GH-654: allow table() in record &default expressions + + Table fields of records previously did not coerce unspecified tables + used in their &default attribute to the correct type. (Jon Siwek, Corelight) + + * Fix up some documentation. (Keith J. Jones) + +3.1.0-dev.214 | 2019-10-28 10:58:43 +0000 + + * GH-585: Require C++17 (Jon Siwek, Corelight) + + * Update Travis CI config for C++17 (Jon Siwek, Corelight) + +3.1.0-dev.209 | 2019-10-28 09:17:15 +0000 + + * Fix HTTP::build_url. (Alexey Mokeev) + + * Free memory allocated by glob() in plugin manager. (Arne Welzel) + + * Remove always-false condition in plugin manager. (Arne Welzel) + +3.1.0-dev.203 | 2019-10-25 10:41:10 -0700 + + * Change usage of old bro:see directive to zeek:see (Jon Siwek, Corelight) + + * Use explicit path name for NTP log stream (Jon Siwek, Corelight) + + For consistency (we do this for all other logs) and just to avoid + the extra path function calls. + +3.1.0-dev.201 | 2019-10-25 10:12:49 -0700 + + * Fixed typo. (Keith J. Jones) + +3.1.0-dev.197 | 2019-10-21 09:34:18 -0700 + + * Update embedded CAF in Broker to 0.17.2 (Jon Siwek, Corelight) + +3.1.0-dev.195 | 2019-10-21 11:42:18 +0200 + + * Prefer cmake3 command, add --cmake=PATH option (Dominik Charousset, Corelight) + +3.1.0-dev.192 | 2019-10-17 16:32:16 -0700 + + * Zeekctl: change gzip compression level from 9 to default #614 (JC Connell) + +3.1.0-dev.191 | 2019-10-16 10:02:45 -0700 + + * Update broker submodule to use pybind11 v2.4.3 (Jon Siwek, Corelight) + + * Improve &default validation for global vars: exclude sets (Jon Siwek, Corelight) + + * Remove unnecessary &optional from a Config::set_value() parameter (Jon Siwek, Corelight) + + * Remove check for redundant attributes (Jon Siwek, Corelight) + +3.1.0-dev.185 | 2019-10-15 16:46:22 -0700 + + * Add weirds for invalid TCP option lengths (Jon Siwek, Corelight) + + * GH-618: add "tcp_options" event containing TCP option values (Jon Siwek, Corelight) + + * Fix tcp_option event + + It was not being raised in all the cases it should have been due to + an incorrect/unnecessary truncation check. (Jon Siwek, Corelight) + +3.1.0-dev.181 | 2019-10-14 22:05:01 -0700 + + * Fix redef'ing a table with a new &default attribute (Jon Siwek, Corelight) + +3.1.0-dev.179 | 2019-10-14 21:33:15 -0700 + + * Support whitespace at end of line for config reader (Seth Hall, Corelight) + +3.1.0-dev.177 | 2019-10-14 20:13:24 -0700 + + * Proof of Concept on Bugprone Narrowing Conversions: Base64 (Dev Bali, Corelight) + +3.1.0-dev.175 | 2019-10-12 10:39:49 -0700 + + * Use Ubuntu 14.04 for Travis CI environment (Jon Siwek, Corelight) + +3.1.0-dev.172 | 2019-10-08 15:14:35 -0700 + + * Add new BIF: print_raw() + + This is a convenience function to make it easier to print literal byte + sequences to stdout without additional escaping like what may be added + by the default `print` statement behavior. + + For example, related to GH-596, `print` currently escapes even valid + UTF-8 byte sequences and makes it difficult to output valid JSON strings + containing such. (Jon Siwek, Corelight) + + * Fix compilation on OS-X. (Johanna Amann) + +3.1.0-dev.169 | 2019-10-07 10:50:35 -0700 + + * GH-589: improve printing of sub-microsecond intervals (Jon Siwek, Corelight) + +3.1.0-dev.167 | 2019-10-07 10:42:15 -0700 + + * GH-591: allow Config::set_value() to use empty/unspecified table/sets (Jon Siwek, Corelight) + + * GH-591: fix reading set[enum] values from input files (Jon Siwek, Corelight) + +3.1.0-dev.164 | 2019-10-02 11:04:13 -0700 + + * Fix build with external CAF but bundled Broker (Dominik Charousset) + +3.1.0-dev.161 | 2019-10-01 16:41:35 -0700 + + * Update COPYING.3rdparty (Tim Wojtulewicz, Corelight) + + * Use json::emplace to avoid some extra calls to operator[] (Tim Wojtulewicz, Corelight) + + * Use tessil/unordered-map instead of nlohmann/fifo-map to improve JSON + logging performance (Tim Wojtulewicz, Corelight) + +3.1.0-dev.156 | 2019-10-01 09:05:49 +0000 + + * Improve RecordVal JSON formatting to no longer create a record + introspection table each time when all the required information + can be obtained directly in the RecordVal and RecordType objects. + (Jon Siwek, Corelight) + +3.1.0-dev.153 | 2019-09-30 21:09:55 -0700 + + * GH-602: fix paraglob header inclusion, installation, and search paths (Jon Siwek, Corelight) + +3.1.0-dev.151 | 2019-09-30 17:39:50 -0700 + + * GH-606: Output nulls into json data if a field isn't set (Tim Wojtulewicz, Corelight) + + * GH-598: handle multi-key sets correctly when outputting json (Tim Wojtulewicz, Corelight) + + * Fix Xcode deprecation warning for std::ptr_fun (Jon Siwek, Corelight) + 3.1.0-dev.143 | 2019-09-24 10:23:24 -0700 * Simplify reassembly code using DataBlockList::LastBlock() (Jon Siwek, Corelight) diff --git a/CMakeLists.txt b/CMakeLists.txt index a1587bf529..c786f5ee59 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,8 +1,8 @@ -project(Zeek C CXX) - # When changing the minimum version here, also adapt # aux/zeek-aux/plugin-support/skeleton/CMakeLists.txt -cmake_minimum_required(VERSION 2.8.12 FATAL_ERROR) +cmake_minimum_required(VERSION 3.0 FATAL_ERROR) + +project(Zeek C CXX) if ( NOT CMAKE_INSTALL_LIBDIR ) # Currently, some sub-projects may use GNUInstallDirs.cmake to choose the @@ -281,7 +281,7 @@ include(PCAPTests) include(OpenSSLTests) include(CheckNameserCompat) include(GetArchitecture) -include(RequireCXX11) +include(RequireCXX17) if ( (OPENSSL_VERSION VERSION_EQUAL "1.1.0") OR (OPENSSL_VERSION VERSION_GREATER "1.1.0") ) set(ZEEK_HAVE_OPENSSL_1_1 true CACHE INTERNAL "" FORCE) @@ -339,11 +339,17 @@ InstallSymlink("${CMAKE_INSTALL_PREFIX}/bin/zeek-wrapper" "${CMAKE_INSTALL_PREFI ######################################################################## ## Recurse on sub-directories -if ( BROKER_ROOT_DIR ) - find_package(Broker REQUIRED) +if ( CAF_ROOT_DIR ) find_package(CAF COMPONENTS core io openssl REQUIRED) +endif () - set(zeekdeps ${zeekdeps} ${BROKER_LIBRARY} ${CAF_LIBRARIES}) +if ( BROKER_ROOT_DIR ) + # Avoid calling find_package(CAF) twice. + if ( NOT CAF_ROOT_DIR ) + find_package(CAF COMPONENTS core io openssl REQUIRED) + endif () + find_package(Broker REQUIRED) + set(zeekdeps ${zeekdeps} ${BROKER_LIBRARY}) include_directories(BEFORE ${BROKER_INCLUDE_DIR}) else () set(ENABLE_STATIC_ONLY_SAVED ${ENABLE_STATIC_ONLY}) @@ -364,13 +370,16 @@ else () ${CMAKE_CURRENT_BINARY_DIR}/aux/broker/include) endif () -# CAF headers aren't necessarily in same location as Broker headers and -# inclusion of a Broker header may pull in CAF headers. +# CAF_LIBRARIES and CAF_INCLUDE_DIRS are defined either by calling +# find_package(CAF) or by calling add_subdirectory(aux/broker). In either case, +# we have to care about CAF here because Broker headers can pull in CAF +# headers. +set(zeekdeps ${zeekdeps} ${CAF_LIBRARIES}) include_directories(BEFORE ${CAF_INCLUDE_DIRS}) add_subdirectory(aux/paraglob) set(zeekdeps ${zeekdeps} paraglob) -include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}/aux/paraglob) +include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}/aux/paraglob/include) add_subdirectory(src) add_subdirectory(scripts) diff --git a/COPYING.3rdparty b/COPYING.3rdparty index 8c66c27e5d..52b69deecb 100644 --- a/COPYING.3rdparty +++ b/COPYING.3rdparty @@ -115,18 +115,18 @@ DEALINGS IN THE SOFTWARE. ============================================================================== -%%% fifo_map.hpp +%%% tsl-ordered-map ============================================================================== -Copyright (c) 2015-2017 Niels Lohmann. +Copyright (c) 2017 Tessil -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. diff --git a/NEWS b/NEWS index ddc682850e..0378fb74d3 100644 --- a/NEWS +++ b/NEWS @@ -19,6 +19,9 @@ New Functionality example of how to perform this tuning and add a "speculative_service" field to conn.log, but it's not loaded by default. +- There is now a new ``tcp_options`` event that is raised for each TCP header + that contains options. + Changed Functionality --------------------- @@ -32,6 +35,11 @@ Changed Functionality script logic that inspects the ``qclass`` field of ``DNS::Info`` after a ``dns_request`` event. +- The configuration input reader now ignores trailing spaces at the end of + configuration lines. + +- The tcp_option event is now correctly raised. + Removed Functionality --------------------- diff --git a/VERSION b/VERSION index 7565ce859e..aee649527f 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.1.0-dev.143 +3.1.0-dev.253 diff --git a/aux/bifcl b/aux/bifcl index 15faaf3274..e5b1940850 160000 --- a/aux/bifcl +++ b/aux/bifcl @@ -1 +1 @@ -Subproject commit 15faaf32741475b9fe07db79c8bbddc0506488d0 +Subproject commit e5b1940850d486e3989f6a55615140a16bd1b9d2 diff --git a/aux/binpac b/aux/binpac index 08fa3a237f..9d7bdd82df 160000 --- a/aux/binpac +++ b/aux/binpac @@ -1 +1 @@ -Subproject commit 08fa3a237f11d817981e5a8ec0c95c2866481db9 +Subproject commit 9d7bdd82dfabe9ca2522c43ef1dd22e1044882ee diff --git a/aux/broker b/aux/broker index 509dd83009..6c0d6c1d43 160000 --- a/aux/broker +++ b/aux/broker @@ -1 +1 @@ -Subproject commit 509dd8300978f1de94a56d8aac8b0a31a72ec178 +Subproject commit 6c0d6c1d43e787c0110d5ad610281e5fb3f48725 diff --git a/aux/paraglob b/aux/paraglob index b9b834c8d1..6c2b36193e 160000 --- a/aux/paraglob +++ b/aux/paraglob @@ -1 +1 @@ -Subproject commit b9b834c8d1ec3f2621ca504eaf60e0361fd607a2 +Subproject commit 6c2b36193e47490e61f22ce6de233af7ed3101b1 diff --git a/aux/zeek-aux b/aux/zeek-aux index 4ab17ec01f..6ecf78b664 160000 --- a/aux/zeek-aux +++ b/aux/zeek-aux @@ -1 +1 @@ -Subproject commit 4ab17ec01f2824df8b0be34505114eba2fcf7b04 +Subproject commit 6ecf78b664653736eb74243f93baf6f2b6aa8747 diff --git a/aux/zeekctl b/aux/zeekctl index 6b2927052f..1bb662776b 160000 --- a/aux/zeekctl +++ b/aux/zeekctl @@ -1 +1 @@ -Subproject commit 6b2927052fead7b2c1eb0e084e81041ef3f5a9d1 +Subproject commit 1bb662776b84420142ca746e3a7b0b486c2ad15d diff --git a/cmake b/cmake index 13401de204..d3e1884a87 160000 --- a/cmake +++ b/cmake @@ -1 +1 @@ -Subproject commit 13401de20449d459501de9461635fe93f4b4ac15 +Subproject commit d3e1884a876781dedac716d102e8a06e1cc54380 diff --git a/configure b/configure index b40fb9fd3d..e2670eed5d 100755 --- a/configure +++ b/configure @@ -4,19 +4,11 @@ set -e command="$0 $*" -# check for `cmake` command -type cmake > /dev/null 2>&1 || { - echo "\ -This package requires CMake, please install it first, then you may -use this configure script to access CMake equivalent functionality.\ -" >&2; - exit 1; -} - usage="\ Usage: $0 [OPTION]... [VAR=VALUE]... Build Options: + --cmake=PATH custom path to a CMake binary --builddir=DIR place build files in directory [build] --build-dir=DIR alias for --builddir --build-type=TYPE set CMake build type [RelWithDebInfo]: @@ -42,6 +34,7 @@ Usage: $0 [OPTION]... [VAR=VALUE]... [PREFIX/spool] --logdir=PATH when using ZeekControl, path to store log file [PREFIX/logs] + --libdir=PATH installation directory for library files [PREFIX/lib] --conf-files-dir=PATH config files installation directory [PREFIX/etc] --mandir=PATH installation path for man pages [PREFIX/share/man] @@ -169,6 +162,9 @@ while [ $# -ne 0 ]; do echo "${usage}" 1>&2 exit 1 ;; + --cmake=*) + CMakeCommand=$optarg + ;; --builddir=*) builddir=$optarg ;; @@ -197,6 +193,9 @@ while [ $# -ne 0 ]; do append_cache_entry ZEEK_ROOT_DIR PATH $optarg append_cache_entry PY_MOD_INSTALL_DIR PATH $optarg/lib/zeekctl ;; + --libdir=*) + append_cache_entry CMAKE_INSTALL_LIBDIR PATH $optarg + ;; --scriptdir=*) append_cache_entry ZEEK_SCRIPT_INSTALL_PATH STRING $optarg user_set_scriptdir="true" @@ -338,6 +337,20 @@ while [ $# -ne 0 ]; do shift done +if [ -z "$CMakeCommand" ]; then + # prefer cmake3 over "regular" cmake (cmake == cmake2 on RHEL) + if command -v cmake3 >/dev/null 2>&1 ; then + CMakeCommand="cmake3" + elif command -v cmake >/dev/null 2>&1 ; then + CMakeCommand="cmake" + else + echo "This package requires CMake, please install it first." + echo "Then you may use this script to configure the CMake build." + echo "Note: pass --cmake=PATH to use cmake in non-standard locations." + exit 1; + fi +fi + if [ "$user_set_scriptdir" != "true" ]; then append_cache_entry ZEEK_SCRIPT_INSTALL_PATH STRING $prefix/share/zeek fi @@ -363,9 +376,9 @@ echo "Source Directory: $sourcedir" cd $builddir if [ -n "$CMakeGenerator" ]; then - cmake -G "$CMakeGenerator" $CMakeCacheEntries $sourcedir + "$CMakeCommand" -G "$CMakeGenerator" $CMakeCacheEntries $sourcedir else - cmake $CMakeCacheEntries $sourcedir + "$CMakeCommand" $CMakeCacheEntries $sourcedir fi echo "# This is the command used to configure this build" > config.status diff --git a/doc b/doc index 218907b650..b481bc908e 160000 --- a/doc +++ b/doc @@ -1 +1 @@ -Subproject commit 218907b6500d627904f6ba0767f7ba362bf825f7 +Subproject commit b481bc908ed21a33fab215037e54bba0ab30822e diff --git a/scripts/base/frameworks/config/main.zeek b/scripts/base/frameworks/config/main.zeek index 3b188732b9..08c0905545 100644 --- a/scripts/base/frameworks/config/main.zeek +++ b/scripts/base/frameworks/config/main.zeek @@ -41,7 +41,7 @@ export { ## location: Optional parameter detailing where this change originated from. ## ## Returns: true on success, false when an error occurs. - global set_value: function(ID: string, val: any, location: string &default = "" &optional): bool; + global set_value: function(ID: string, val: any, location: string &default = ""): bool; } @if ( Cluster::is_enabled() ) @@ -76,7 +76,7 @@ event Config::cluster_set_option(ID: string, val: any, location: string) Option::set(ID, val, location); } -function set_value(ID: string, val: any, location: string &default = "" &optional): bool +function set_value(ID: string, val: any, location: string &default = ""): bool { # Always copy the value to break references -- if caller mutates their # value afterwards, we still guarantee the option has not changed. If @@ -99,7 +99,7 @@ function set_value(ID: string, val: any, location: string &default = "" &optiona return T; } @else # Standalone implementation -function set_value(ID: string, val: any, location: string &default = "" &optional): bool +function set_value(ID: string, val: any, location: string &default = ""): bool { return Option::set(ID, val, location); } diff --git a/scripts/base/init-bare.zeek b/scripts/base/init-bare.zeek index 4667fb1fb2..4bb8597efe 100644 --- a/scripts/base/init-bare.zeek +++ b/scripts/base/init-bare.zeek @@ -296,6 +296,39 @@ type endpoint_stats: record { endian_type: count; }; +module TCP; +export { + ## A TCP Option field parsed from a TCP header. + type Option: record { + ## The kind number associated with the option. Other optional fields + ## of this record may be set depending on this value. + kind: count; + ## The total length of the option in bytes, including the kind byte and + ## length byte (if present). + length: count; + ## This field is set to the raw option bytes if the kind is not + ## otherwise known/parsed. It's also set for known kinds whose length + ## was invalid. + data: string &optional; + ## Kind 2: Maximum Segment Size. + mss: count &optional; + ## Kind 3: Window scale. + window_scale: count &optional; + ## Kind 5: Selective ACKnowledgement (SACK). This is a list of 2, 4, + ## 6, or 8 numbers with each consecutive pair being a 32-bit + ## begin-pointer and 32-bit end pointer. + sack: index_vec &optional; + ## Kind 8: 4-byte sender timestamp value. + send_timestamp: count &optional; + ## Kind 8: 4-byte echo reply timestamp value. + echo_timestamp: count &optional; + }; + + ## The full list of TCP Option fields parsed from a TCP header. + type OptionList: vector of Option; +} +module GLOBAL; + module Tunnel; export { ## Records the identity of an encapsulating parent of a tunneled connection. diff --git a/scripts/base/protocols/ftp/main.zeek b/scripts/base/protocols/ftp/main.zeek index 1c2dce17f8..30ffc145bf 100644 --- a/scripts/base/protocols/ftp/main.zeek +++ b/scripts/base/protocols/ftp/main.zeek @@ -9,6 +9,7 @@ @load base/utils/paths @load base/utils/numbers @load base/utils/addrs +@load base/frameworks/cluster module FTP; @@ -76,6 +77,17 @@ const directory_cmds = { ["XPWD", 257], }; +function ftp_relay_topic(): string + { + local rval = Cluster::rr_topic(Cluster::proxy_pool, "ftp_transfer_rr_key"); + + if ( rval == "" ) + # No proxy is alive, so relay via manager instead. + return Cluster::manager_topic; + + return rval; + } + function parse_ftp_reply_code(code: count): ReplyCode { local a: ReplyCode; @@ -137,6 +149,29 @@ function ftp_message(s: Info) delete s$data_channel; } +event sync_add_expected_data(s: Info, chan: ExpectedDataChannel) + { +@if ( Cluster::local_node_type() == Cluster::PROXY || + Cluster::local_node_type() == Cluster::MANAGER ) + Broker::publish(Cluster::worker_topic, sync_add_expected_data, s, chan); +@else + ftp_data_expected[chan$resp_h, chan$resp_p] = s; + Analyzer::schedule_analyzer(chan$orig_h, chan$resp_h, chan$resp_p, + Analyzer::ANALYZER_FTP_DATA, + 5mins); +@endif + } + +event sync_remove_expected_data(resp_h: addr, resp_p: port) + { +@if ( Cluster::local_node_type() == Cluster::PROXY || + Cluster::local_node_type() == Cluster::MANAGER ) + Broker::publish(Cluster::worker_topic, sync_remove_expected_data, resp_h, resp_p); +@else + delete ftp_data_expected[resp_h, resp_p]; +@endif + } + function add_expected_data_channel(s: Info, chan: ExpectedDataChannel) { s$passive = chan$passive; @@ -145,6 +180,9 @@ function add_expected_data_channel(s: Info, chan: ExpectedDataChannel) Analyzer::schedule_analyzer(chan$orig_h, chan$resp_h, chan$resp_p, Analyzer::ANALYZER_FTP_DATA, 5mins); +@if ( Cluster::is_enabled() ) + Broker::publish(ftp_relay_topic(), sync_add_expected_data, s, chan); +@endif } event ftp_request(c: connection, command: string, arg: string) &priority=5 @@ -287,7 +325,13 @@ event connection_reused(c: connection) &priority=5 event connection_state_remove(c: connection) &priority=-5 { if ( c$ftp_data_reuse ) return; - delete ftp_data_expected[c$id$resp_h, c$id$resp_p]; + if ( [c$id$resp_h, c$id$resp_p] in ftp_data_expected ) + { + delete ftp_data_expected[c$id$resp_h, c$id$resp_p]; +@if ( Cluster::is_enabled() ) + Broker::publish(ftp_relay_topic(), sync_remove_expected_data, c$id$resp_h, c$id$resp_p); +@endif + } } # Use state remove event to cover connections terminated by RST. diff --git a/scripts/base/protocols/http/utils.zeek b/scripts/base/protocols/http/utils.zeek index a48841cef5..dcdbe4bc8e 100644 --- a/scripts/base/protocols/http/utils.zeek +++ b/scripts/base/protocols/http/utils.zeek @@ -55,9 +55,13 @@ function extract_keys(data: string, kv_splitter: pattern): string_vec function build_url(rec: Info): string { local uri = rec?$uri ? rec$uri : "/"; + if ( strstr(uri, "://") != 0 ) + return uri; + local host = rec?$host ? rec$host : addr_to_uri(rec$id$resp_h); - if ( rec$id$resp_p != 80/tcp ) - host = fmt("%s:%s", host, rec$id$resp_p); + local resp_p = port_to_count(rec$id$resp_p); + if ( resp_p != 80 ) + host = fmt("%s:%d", host, resp_p); return fmt("%s%s", host, uri); } diff --git a/scripts/base/protocols/ntp/main.zeek b/scripts/base/protocols/ntp/main.zeek index 7aad2c8dce..7f4a0ffd10 100644 --- a/scripts/base/protocols/ntp/main.zeek +++ b/scripts/base/protocols/ntp/main.zeek @@ -59,7 +59,7 @@ redef likely_server_ports += { ports }; event zeek_init() &priority=5 { Analyzer::register_for_ports(Analyzer::ANALYZER_NTP, ports); - Log::create_stream(NTP::LOG, [$columns = Info, $ev = log_ntp]); + Log::create_stream(NTP::LOG, [$columns = Info, $ev = log_ntp, $path="ntp"]); } event ntp_message(c: connection, is_orig: bool, msg: NTP::Message) &priority=5 diff --git a/src/3rdparty b/src/3rdparty index c1eab215ae..0e1f951b0b 160000 --- a/src/3rdparty +++ b/src/3rdparty @@ -1 +1 @@ -Subproject commit c1eab215ae34b2bc03fcb8c787b386a25e00bf3e +Subproject commit 0e1f951b0bcafea63e503957ae005220c24e4b20 diff --git a/src/Anon.h b/src/Anon.h index 3f19cbd801..f7cbaa24f3 100644 --- a/src/Anon.h +++ b/src/Anon.h @@ -13,7 +13,6 @@ #include #include #include -using namespace std; #include "Reporter.h" #include "net_util.h" diff --git a/src/Attr.cc b/src/Attr.cc index d8cd73ee6f..d63f72a3fd 100644 --- a/src/Attr.cc +++ b/src/Attr.cc @@ -164,10 +164,8 @@ void Attributes::AddAttr(Attr* attr) if ( ! attrs ) attrs = new attr_list(1); - if ( ! attr->RedundantAttrOkay() ) - // We overwrite old attributes by deleting them first. - RemoveAttr(attr->Tag()); - + // We overwrite old attributes by deleting them first. + RemoveAttr(attr->Tag()); attrs->push_back(attr); Ref(attr); @@ -181,8 +179,9 @@ void Attributes::AddAttr(Attr* attr) ! FindAttr(ATTR_REDEF) ) attrs->push_back(new Attr(ATTR_REDEF)); - // For DEFAULT, add an implicit OPTIONAL. - if ( attr->Tag() == ATTR_DEFAULT && ! FindAttr(ATTR_OPTIONAL) ) + // For DEFAULT, add an implicit OPTIONAL if it's not a global. + if ( ! global_var && attr->Tag() == ATTR_DEFAULT && + ! FindAttr(ATTR_OPTIONAL) ) attrs->push_back(new Attr(ATTR_OPTIONAL)); } @@ -289,9 +288,9 @@ void Attributes::CheckAttr(Attr* a) { // &default is allowed for global tables, since it's used in initialization // of table fields. it's not allowed otherwise. - if ( global_var && ! type->IsSet() && type->Tag() != TYPE_TABLE ) + if ( global_var && ! type->IsTable() ) { - Error("&default is not valid for global variables"); + Error("&default is not valid for global variables except for tables"); break; } @@ -373,11 +372,21 @@ void Attributes::CheckAttr(Attr* a) { // &default applies to record field. - if ( same_type(atype, type) || - (atype->Tag() == TYPE_TABLE && atype->AsTableType()->IsUnspecifiedTable()) ) + if ( same_type(atype, type) ) // Ok. break; + if ( (atype->Tag() == TYPE_TABLE && atype->AsTableType()->IsUnspecifiedTable()) ) + { + Expr* e = a->AttrExpr(); + + if ( check_and_promote_expr(e, type) ) + { + a->SetAttrExpr(e); + break; + } + } + // Table defaults may be promotable. if ( ytype && ytype->Tag() == TYPE_RECORD && atype->Tag() == TYPE_RECORD && diff --git a/src/Attr.h b/src/Attr.h index fc33445618..53d60eda27 100644 --- a/src/Attr.h +++ b/src/Attr.h @@ -46,9 +46,6 @@ public: // previous expr as the new expr depends on it. void SetAttrExpr(Expr* e) { expr = e; } - int RedundantAttrOkay() const - { return tag == ATTR_REDEF || tag == ATTR_OPTIONAL; } - void Describe(ODesc* d) const override; void DescribeReST(ODesc* d, bool shorten = false) const; diff --git a/src/Base64.cc b/src/Base64.cc index ed45c513ed..53ddfecef6 100644 --- a/src/Base64.cc +++ b/src/Base64.cc @@ -230,7 +230,9 @@ BroString* decode_base64(const BroString* s, const BroString* a, Connection* con char* rbuf2, *rbuf = new char[rlen]; Base64Converter dec(conn, a ? a->CheckString() : ""); - if ( dec.Decode(s->Len(), (const char*) s->Bytes(), &rlen, &rbuf) == -1 ) + dec.Decode(s->Len(), (const char*) s->Bytes(), &rlen, &rbuf); + + if ( dec.Errored() ) goto err; rlen2 = buf_len - rlen; diff --git a/src/BroString.h b/src/BroString.h index 35102a7209..f8bcbbc7dd 100644 --- a/src/BroString.h +++ b/src/BroString.h @@ -7,7 +7,6 @@ #include #include #include -using namespace std; #include "util.h" @@ -20,23 +19,23 @@ class VectorVal; class BroString { public: - typedef vector Vec; + typedef std::vector Vec; typedef Vec::iterator VecIt; typedef Vec::const_iterator VecCIt; - typedef vector CVec; + typedef std::vector CVec; typedef Vec::iterator CVecIt; typedef Vec::const_iterator CVecCIt; // IdxVecs are vectors of indices of characters in a string. - typedef vector IdxVec; + typedef std::vector IdxVec; typedef IdxVec::iterator IdxVecIt; typedef IdxVec::const_iterator IdxVecCIt; // Constructors creating internal copies of the data passed in. BroString(const u_char* str, int arg_n, int add_NUL); explicit BroString(const char* str); - explicit BroString(const string& str); + explicit BroString(const std::string& str); BroString(const BroString& bs); // Constructor that takes owernship of the vector passed in. @@ -64,7 +63,7 @@ public: // void Set(const u_char* str, int len, int add_NUL=1); void Set(const char* str); - void Set(const string& str); + void Set(const std::string& str); void Set(const BroString &str); void SetUseFreeToDelete(int use_it) @@ -103,13 +102,13 @@ public: // Also more useful for debugging purposes since no deallocation // is required on your part here. // - ostream& Render(ostream& os, int format = ESC_SER) const; + std::ostream& Render(std::ostream& os, int format = ESC_SER) const; // Reads a string from an input stream. Unless you use a render // style combination that uses ESC_SER, note that the streams // will consider whitespace as a field delimiter. // - istream& Read(istream& is, int format = ESC_SER); + std::istream& Read(std::istream& is, int format = ESC_SER); // XXX Fix redundancy: strings.bif implements both to_lower // XXX and to_upper; the latter doesn't use BroString::ToUpper(). @@ -165,7 +164,7 @@ public: }; // Default output stream operator, using rendering mode EXPANDED_STRING. -ostream& operator<<(ostream& os, const BroString& bs); +std::ostream& operator<<(std::ostream& os, const BroString& bs); extern int Bstr_eq(const BroString* s1, const BroString* s2); extern int Bstr_cmp(const BroString* s1, const BroString* s2); diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 6ccb7c9124..d7b2532fd5 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -13,10 +13,6 @@ set(bro_REGISTER_BIFS CACHE INTERNAL "BIFs for automatic registering" FORCE) set(bro_BASE_BIF_SCRIPTS CACHE INTERNAL "Zeek script stubs for BIFs in base distribution of Zeek" FORCE) set(bro_PLUGIN_BIF_SCRIPTS CACHE INTERNAL "Zeek script stubs for BIFs in Zeek plugins" FORCE) -# If TRUE, use CMake's object libraries for sub-directories instead of -# static libraries. This requires CMake >= 2.8.8. -set(bro_HAVE_OBJECT_LIBRARIES FALSE) - configure_file(version.c.in ${CMAKE_CURRENT_BINARY_DIR}/version.c) configure_file(util-config.h.in ${CMAKE_CURRENT_BINARY_DIR}/util-config.h) @@ -140,7 +136,9 @@ list(APPEND BINPAC_OUTPUTS "${BINPAC_OUTPUT_CC}") ######################################################################## set(bro_SUBDIR_LIBS CACHE INTERNAL "subdir libraries" FORCE) +set(bro_SUBDIR_DEPS CACHE INTERNAL "subdir dependencies" FORCE) set(bro_PLUGIN_LIBS CACHE INTERNAL "plugin libraries" FORCE) +set(bro_PLUGIN_DEPS CACHE INTERNAL "plugin dependencies" FORCE) add_subdirectory(analyzer) add_subdirectory(broker) @@ -151,26 +149,6 @@ add_subdirectory(iosource) add_subdirectory(logging) add_subdirectory(probabilistic) -set(bro_SUBDIRS - # Order is important here. - ${bro_PLUGIN_LIBS} - ${bro_SUBDIR_LIBS} -) - -if ( NOT bro_HAVE_OBJECT_LIBRARIES ) - foreach (_plugin ${bro_PLUGIN_LIBS}) - string(REGEX REPLACE "plugin-" "" _plugin "${_plugin}") - string(REGEX REPLACE "-" "_" _plugin "${_plugin}") - set(_decl "namespace plugin { namespace ${_plugin} { class Plugin; extern Plugin plugin; } };") - set(_use "i += (size_t)(&(plugin::${_plugin}::plugin));") - set(__BRO_DECL_PLUGINS "${__BRO_DECL_PLUGINS}${_decl}\n") - set(__BRO_USE_PLUGINS "${__BRO_USE_PLUGINS}${_use}\n") - endforeach() - - configure_file(plugins.cc.in ${CMAKE_CURRENT_BINARY_DIR}/plugins.cc) - set(PLUGIN_INIT ${CMAKE_CURRENT_BINARY_DIR}/plugins.cc) -endif() - ######################################################################## ## bro target @@ -346,20 +324,14 @@ set(bro_SRCS ${FLEX_Scanner_INPUT} ${BISON_Parser_INPUT} ${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h - ${PLUGIN_INIT} ${THIRD_PARTY_SRCS} ${MAIN_SRCS} ) collect_headers(bro_HEADERS ${bro_SRCS}) -if ( bro_HAVE_OBJECT_LIBRARIES ) - add_executable(zeek ${bro_SRCS} ${bro_HEADERS} ${bro_SUBDIRS}) - target_link_libraries(zeek ${zeekdeps} ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS}) -else () - add_executable(zeek ${bro_SRCS} ${bro_HEADERS}) - target_link_libraries(zeek ${bro_SUBDIRS} ${zeekdeps} ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS}) -endif () +add_executable(zeek ${bro_SRCS} ${bro_HEADERS} ${bro_SUBDIR_LIBS} ${bro_PLUGIN_LIBS}) +target_link_libraries(zeek ${zeekdeps} ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS}) if ( NOT "${bro_LINKER_FLAGS}" STREQUAL "" ) set_target_properties(zeek PROPERTIES LINK_FLAGS "${bro_LINKER_FLAGS}") @@ -398,12 +370,12 @@ add_dependencies(generate_outputs generate_outputs_stage2a generate_outputs_stag # Build __load__.zeek files for standard *.bif.zeek. bro_bif_create_loader(bif_loader "${bro_BASE_BIF_SCRIPTS}") -add_dependencies(bif_loader ${bro_SUBDIRS}) +add_dependencies(bif_loader ${bro_PLUGIN_DEPS} ${bro_SUBDIR_DEPS}) add_dependencies(zeek bif_loader) # Build __load__.zeek files for plugins/*.bif.zeek. bro_bif_create_loader(bif_loader_plugins "${bro_PLUGIN_BIF_SCRIPTS}") -add_dependencies(bif_loader_plugins ${bro_SUBDIRS}) +add_dependencies(bif_loader_plugins ${bro_PLUGIN_DEPS} ${bro_SUBDIR_DEPS}) add_dependencies(zeek bif_loader_plugins) # Install *.bif.zeek. @@ -443,12 +415,17 @@ install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/ ) install(FILES - ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/fifo_map.hpp ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/json.hpp ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/sqlite3.h DESTINATION include/zeek/3rdparty ) +install(FILES + ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/tsl-ordered-map/ordered_map.h + ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/tsl-ordered-map/ordered_hash.h + DESTINATION include/zeek/3rdparty/tsl-ordered-map +) + ######################################################################## ## Clang-tidy target now that we have all of the sources diff --git a/src/DFA.h b/src/DFA.h index 12620568ae..a3b79920f6 100644 --- a/src/DFA.h +++ b/src/DFA.h @@ -4,6 +4,7 @@ #pragma once #include +#include class DFA_State; @@ -66,7 +67,7 @@ protected: static unsigned int transition_counter; // see Xtion() }; -using DigestStr = basic_string; +using DigestStr = std::basic_string; class DFA_State_Cache { public: diff --git a/src/Debug.h b/src/Debug.h index a036dd264e..42fd3bd42a 100644 --- a/src/Debug.h +++ b/src/Debug.h @@ -5,8 +5,6 @@ #include #include #include -using namespace std; - class Stmt; @@ -35,10 +33,10 @@ class DbgWatch; class DbgDisplay; class StmtHashFn; -typedef map BPIDMapType; -typedef multimap BPMapType; +typedef std::map BPIDMapType; +typedef std::multimap BPMapType; -extern string current_module; +extern std::string current_module; class TraceState { public: @@ -88,8 +86,8 @@ public: Location last_loc; // used by 'list'; the last location listed BPIDMapType breakpoints; // BPID -> Breakpoint - vector watches; - vector displays; + std::vector watches; + std::vector displays; BPMapType breakpoint_map; // maps Stmt -> Breakpoints on it protected: @@ -135,7 +133,7 @@ extern DebuggerState g_debugger_state; // Multiple results can be returned depending on the input, but always // at least 1. -vector parse_location_string(const string& s); +std::vector parse_location_string(const std::string& s); // ### TODO: Add a bunch of hook functions for various events // e.g. variable changed, breakpoint hit, etc. @@ -171,11 +169,11 @@ Val* dbg_eval_expr(const char* expr); int dbg_read_internal_state(); // Get line that looks like "In FnFoo(arg = val) at File:Line". -string get_context_description(const Stmt* stmt, const Frame* frame); +std::string get_context_description(const Stmt* stmt, const Frame* frame); extern Frame* g_dbg_locals; // variables created within debugger context -extern std::map g_dbgfilemaps; // filename => filemap +extern std::map g_dbgfilemaps; // filename => filemap // Perhaps add a code/priority argument to do selective output. int debug_msg(const char* fmt, ...) __attribute__ ((format (printf, 1, 2))); diff --git a/src/DebugCmds.h b/src/DebugCmds.h index 7eb0f025a7..470e4ae993 100644 --- a/src/DebugCmds.h +++ b/src/DebugCmds.h @@ -6,7 +6,6 @@ #include #include #include -using namespace std; #include "Queue.h" #include "DebugCmdConstants.h" @@ -23,7 +22,7 @@ public: int Cmd() const { return cmd; } int NumNames() const { return num_names; } - const vector& Names() const { return names; } + const std::vector& Names() const { return names; } bool ResumeExecution() const { return resume_execution; } const char* Helpstring() const { return helpstring; } bool Repeatable() const { return repeatable; } @@ -32,7 +31,7 @@ protected: DebugCmd cmd; int num_names; - vector names; + std::vector names; // Whether executing this should restart execution of the script. bool resume_execution; @@ -57,14 +56,14 @@ const DebugCmdInfo* get_debug_cmd_info(DebugCmd cmd); // on whether or not the prefix supplied matches a name (DebugCmdString) // of the corresponding DebugCmd. The size of the array should be at // least NUM_DEBUG_CMDS. The total number of matches is returned. -int find_all_matching_cmds(const string& prefix, const char* array_of_matches[]); +int find_all_matching_cmds(const std::string& prefix, const char* array_of_matches[]); // Implementation of debugging commands. // // These functions return <= 0 if failure, > 0 for success. // More particular return values are command-specific: see comments w/function. -typedef int DbgCmdFn(DebugCmd cmd, const vector& args); +typedef int DbgCmdFn(DebugCmd cmd, const std::vector& args); DbgCmdFn dbg_cmd_backtrace; DbgCmdFn dbg_cmd_frame; diff --git a/src/Desc.h b/src/Desc.h index fadf61eac7..ff59aba43c 100644 --- a/src/Desc.h +++ b/src/Desc.h @@ -5,6 +5,7 @@ #include #include #include +#include #include "BroString.h" @@ -56,13 +57,13 @@ public: void EnableUTF8(); void AddEscapeSequence(const char* s) { escape_sequences.insert(s); } void AddEscapeSequence(const char* s, size_t n) - { escape_sequences.insert(string(s, n)); } - void AddEscapeSequence(const string & s) + { escape_sequences.insert(std::string(s, n)); } + void AddEscapeSequence(const std::string & s) { escape_sequences.insert(s); } void RemoveEscapeSequence(const char* s) { escape_sequences.erase(s); } void RemoveEscapeSequence(const char* s, size_t n) - { escape_sequences.erase(string(s, n)); } - void RemoveEscapeSequence(const string & s) + { escape_sequences.erase(std::string(s, n)); } + void RemoveEscapeSequence(const std::string & s) { escape_sequences.erase(s); } void PushIndent(); @@ -76,7 +77,7 @@ public: void Add(const char* s, int do_indent=1); void AddN(const char* s, int len) { AddBytes(s, len); } - void Add(const string& s) { AddBytes(s.data(), s.size()); } + void Add(const std::string& s) { AddBytes(s.data(), s.size()); } void Add(int i); void Add(uint32_t u); void Add(int64_t i); @@ -119,7 +120,7 @@ public: // Bypasses the escaping enabled via SetEscape(). void AddRaw(const char* s, int len) { AddBytesRaw(s, len); } - void AddRaw(const string &s) { AddBytesRaw(s.data(), s.size()); } + void AddRaw(const std::string &s) { AddBytesRaw(s.data(), s.size()); } // Returns the description as a string. const char* Description() const { return (const char*) base; } @@ -167,7 +168,7 @@ protected: * second element. The first element may be 0 if nothing is * to be escaped. */ - pair FirstEscapeLoc(const char* bytes, size_t n); + std::pair FirstEscapeLoc(const char* bytes, size_t n); /** * @param start start of string to check for starting with an espace @@ -187,7 +188,7 @@ protected: bool utf8; // whether valid utf-8 sequences may pass through unescaped bool escape; // escape unprintable characters in output? - typedef set escape_set; + typedef std::set escape_set; escape_set escape_sequences; // additional sequences of chars to escape BroFile* f; // or the file we're using. diff --git a/src/Frame.cc b/src/Frame.cc index 2ecafd5089..f4eb49cbac 100644 --- a/src/Frame.cc +++ b/src/Frame.cc @@ -403,14 +403,14 @@ std::pair Frame::Unserialize(const broker::vector& data) broker::integer g = *has_type; BroType t( static_cast(g) ); - Val* val = bro_broker::data_to_val(std::move(val_tuple[0]), &t); + auto val = bro_broker::data_to_val(std::move(val_tuple[0]), &t); if ( ! val ) { Unref(rf); return std::make_pair(false, nullptr); } - rf->frame[i] = val; + rf->frame[i] = val.detach(); } return std::make_pair(true, rf); diff --git a/src/ID.h b/src/ID.h index 4ebcc9769d..b3e36ebd4f 100644 --- a/src/ID.h +++ b/src/ID.h @@ -27,7 +27,7 @@ public: bool IsExport() const { return is_export; } void SetExport() { is_export = true; } - string ModuleName() const; + std::string ModuleName() const; void SetType(BroType* t) { Unref(type); type = t; } BroType* Type() { return type; } @@ -83,7 +83,7 @@ public: void MakeDeprecated(Expr* deprecation); - string GetDeprecationWarning() const; + std::string GetDeprecationWarning() const; void Error(const char* msg, const BroObj* o2 = 0); @@ -106,7 +106,7 @@ public: // Takes ownership of callback. void AddOptionHandler(Func* callback, int priority); - vector GetOptionHandlers() const; + std::vector GetOptionHandlers() const; protected: ID() { name = 0; type = 0; val = 0; attrs = 0; } diff --git a/src/IntrusivePtr.h b/src/IntrusivePtr.h new file mode 100644 index 0000000000..99a19f80ff --- /dev/null +++ b/src/IntrusivePtr.h @@ -0,0 +1,301 @@ +// See the file "COPYING" in the main distribution directory for copyright. + +#pragma once + +#include +#include + +/** + * An intrusive, reference counting smart pointer implementation. Much like + * @c std::shared_ptr, this smart pointer models shared ownership of an object + * through a pointer. Several @c IntrusivePtr instances may point to the same + * object. + * + * The @c IntrusivePtr requires two free functions associated to @c T that must + * be available via argument-dependent lookup: @c Ref and @c Unref. The former + * increments the reference by one whenever a new owner participates in the + * lifetime of the shared object and the latter decrements the reference count + * by one. Once the reference count reaches zero, @c Unref also is responsible + * for destroying the shared object. + * + * The @c IntrusivePtr works with any type that offers the two free functions, + * but most notably is designed to work with @c BroObj and its subtypes. + * + * The same object may get managed via @c IntrusivePtr in one part of the + * code base while another part of the program manages it manually by passing + * raw pointers and calling @c Ref and @c Unref explicitly. However, new code + * should use a smart pointer whenever possible to reduce boilerplate code and + * increase robustness of the code (in particular w.r.t. exceptions). + */ +template +class IntrusivePtr { +public: + // -- member types + + using pointer = T*; + + using const_pointer = const T*; + + using element_type = T; + + using reference = T&; + + using const_reference = const T&; + + // -- constructors, destructors, and assignment operators + + constexpr IntrusivePtr() noexcept : ptr_(nullptr) + { + // nop + } + + constexpr IntrusivePtr(std::nullptr_t) noexcept : IntrusivePtr() + { + // nop + } + + /** + * Constructs a new intrusive pointer for managing the lifetime of the object + * pointed to by @c raw_ptr. + * @param raw_ptr Pointer to the shared object. + * @param add_ref Denotes whether the reference count of the object shall be + * increased during construction. + */ + IntrusivePtr(pointer raw_ptr, bool add_ref) noexcept + { + setPtr(raw_ptr, add_ref); + } + + IntrusivePtr(IntrusivePtr&& other) noexcept : ptr_(other.detach()) + { + // nop + } + + IntrusivePtr(const IntrusivePtr& other) noexcept + { + setPtr(other.get(), true); + } + + template >> + IntrusivePtr(IntrusivePtr other) noexcept : ptr_(other.detach()) + { + // nop + } + + ~IntrusivePtr() + { + if ( ptr_ ) + Unref(ptr_); + } + + void swap(IntrusivePtr& other) noexcept + { + std::swap(ptr_, other.ptr_); + } + + /** + * Detaches an object from the automated lifetime management and sets this + * intrusive pointer to @c nullptr. + * @returns the raw pointer without modifying the reference count. + */ + pointer detach() noexcept + { + auto result = ptr_; + if ( result ) + ptr_ = nullptr; + return result; + } + + /** + * Convenience function for assigning a new raw pointer. Equivalent to calling + * @c operator= with an @c IntrusivePtr constructed from the arguments. + * @param new_value Pointer to the new shared object. + * @param add_ref Denotes whether the reference count of the new shared object + * shall be increased. + */ + void reset(pointer new_value = nullptr, bool add_ref = true) noexcept + { + auto old = ptr_; + setPtr(new_value, add_ref); + if ( old ) + Unref(old); + } + + IntrusivePtr& operator=(IntrusivePtr other) noexcept + { + swap(other); + return *this; + } + + pointer get() const noexcept + { + return ptr_; + } + + pointer operator->() const noexcept + { + return ptr_; + } + + reference operator*() const noexcept + { + return *ptr_; + } + + bool operator!() const noexcept + { + return !ptr_; + } + + explicit operator bool() const noexcept + { + return ptr_ != nullptr; + } + +private: + void setPtr(pointer raw_ptr, bool add_ref) noexcept + { + ptr_ = raw_ptr; + if ( raw_ptr && add_ref ) + Ref(raw_ptr); + } + + pointer ptr_; +}; + +/** + * Convenience function for creating a reference counted object and wrapping it + * into an intrusive pointers. + * @param args Arguments for constructing the shared object of type @c T. + * @returns an @c IntrusivePtr pointing to the new object. + * @note This function assumes that any @c T starts with a reference count of 1. + * @relates IntrusivePtr + */ +template +IntrusivePtr make_intrusive(Ts&&... args) + { + // Assumes that objects start with a reference count of 1! + return {new T(std::forward(args)...), false}; + } + +// -- comparison to nullptr ---------------------------------------------------- + +/** + * @relates IntrusivePtr + */ +template +bool operator==(const IntrusivePtr& x, std::nullptr_t) { + return !x; +} + +/** + * @relates IntrusivePtr + */ +template +bool operator==(std::nullptr_t, const IntrusivePtr& x) { + return !x; +} + +/** + * @relates IntrusivePtr + */ +template +bool operator!=(const IntrusivePtr& x, std::nullptr_t) { + return static_cast(x); +} + +/** + * @relates IntrusivePtr + */ +template +bool operator!=(std::nullptr_t, const IntrusivePtr& x) { + return static_cast(x); +} + +// -- comparison to raw pointer ------------------------------------------------ + +/** + * @relates IntrusivePtr + */ +template +bool operator==(const IntrusivePtr& x, const T* y) { + return x.get() == y; +} + +/** + * @relates IntrusivePtr + */ +template +bool operator==(const T* x, const IntrusivePtr& y) { + return x == y.get(); +} + +/** + * @relates IntrusivePtr + */ +template +bool operator!=(const IntrusivePtr& x, const T* y) { + return x.get() != y; +} + +/** + * @relates IntrusivePtr + */ +template +bool operator!=(const T* x, const IntrusivePtr& y) { + return x != y.get(); +} + +/** + * @relates IntrusivePtr + */ +template +bool operator<(const IntrusivePtr& x, const T* y) + { + return x.get() < y; + } + +/** + * @relates IntrusivePtr + */ +template +bool operator<(const T* x, const IntrusivePtr& y) + { + return x < y.get(); + } + +// -- comparison to intrusive pointer ------------------------------------------ + +// Using trailing return type and decltype() here removes this function from +// overload resolution if the two pointers types are not comparable (SFINAE). + +/** + * @relates IntrusivePtr + */ +template +auto operator==(const IntrusivePtr& x, const IntrusivePtr& y) +-> decltype(x.get() == y.get()) + { + return x.get() == y.get(); + } + +/** + * @relates IntrusivePtr + */ +template +auto operator!=(const IntrusivePtr& x, const IntrusivePtr& y) +-> decltype(x.get() != y.get()) + { + return x.get() != y.get(); + } + +/** + * @relates IntrusivePtr + */ +template +auto operator<(const IntrusivePtr& x, const IntrusivePtr& y) +-> decltype(x.get() < y.get()) + { + return x.get() < y.get(); + } + diff --git a/src/OpaqueVal.cc b/src/OpaqueVal.cc index cddc230f64..033c4e6fe6 100644 --- a/src/OpaqueVal.cc +++ b/src/OpaqueVal.cc @@ -1,5 +1,7 @@ // See the file "COPYING" in the main distribution directory for copyright. +#include + #include "OpaqueVal.h" #include "NetVar.h" #include "Reporter.h" @@ -1073,7 +1075,7 @@ bool ParaglobVal::DoUnserialize(const broker::data& data) try { - this->internal_paraglob = build_unique(std::move(iv)); + this->internal_paraglob = std::make_unique(std::move(iv)); } catch (const paraglob::underflow_error& e) { @@ -1093,7 +1095,7 @@ Val* ParaglobVal::DoClone(CloneState* state) { try { return new ParaglobVal - (build_unique(this->internal_paraglob->serialize())); + (std::make_unique(this->internal_paraglob->serialize())); } catch (const paraglob::underflow_error& e) { diff --git a/src/OpaqueVal.h b/src/OpaqueVal.h index c95b046c10..faff969cbf 100644 --- a/src/OpaqueVal.h +++ b/src/OpaqueVal.h @@ -8,7 +8,7 @@ #include "RandTest.h" #include "Val.h" #include "digest.h" -#include "src/paraglob.h" +#include "paraglob/paraglob.h" class OpaqueVal; diff --git a/src/PacketDumper.h b/src/PacketDumper.h index e45adeb41f..78469b3d7c 100644 --- a/src/PacketDumper.h +++ b/src/PacketDumper.h @@ -2,8 +2,6 @@ #pragma once -using namespace std; - #include #include @@ -35,5 +33,5 @@ struct ltipid { } }; -typedef set IP_IDSet; +typedef std::set IP_IDSet; uint16_t NextIP_ID(const uint32_t src_addr, const uint16_t id); diff --git a/src/RE.h b/src/RE.h index 17fa36c0cf..ace38f2767 100644 --- a/src/RE.h +++ b/src/RE.h @@ -10,6 +10,7 @@ #include #include +#include #include typedef int (*cce_func)(int); @@ -60,7 +61,7 @@ public: // functionality. std::string LookupDef(const std::string& def); - void InsertCCL(const char* txt, CCL* ccl) { ccl_dict[string(txt)] = ccl; } + void InsertCCL(const char* txt, CCL* ccl) { ccl_dict[std::string(txt)] = ccl; } int InsertCCL(CCL* ccl) { ccl_list.push_back(ccl); @@ -68,7 +69,7 @@ public: } CCL* LookupCCL(const char* txt) { - const auto& iter = ccl_dict.find(string(txt)); + const auto& iter = ccl_dict.find(std::string(txt)); if ( iter != ccl_dict.end() ) return iter->second; @@ -125,8 +126,8 @@ protected: int multiline; char* pattern_text; - std::map defs; - std::map ccl_dict; + std::map defs; + std::map ccl_dict; PList ccl_list; EquivClass equiv_class; int* ecs; diff --git a/src/Rule.h b/src/Rule.h index bc478f18f3..663174f28f 100644 --- a/src/Rule.h +++ b/src/Rule.h @@ -15,7 +15,7 @@ class RuleHdrTest; class Rule; typedef PList rule_list; -typedef std::map rule_dict; +typedef std::map rule_dict; class Rule { public: diff --git a/src/Scope.h b/src/Scope.h index 2c25863e23..9b046fafa1 100644 --- a/src/Scope.h +++ b/src/Scope.h @@ -47,7 +47,7 @@ public: BroType* ReturnType() const { return return_type; } size_t Length() const { return local.size(); } - std::map& Vars() { return local; } + std::map& Vars() { return local; } ID* GenerateTemporary(const char* name); @@ -66,7 +66,7 @@ protected: ID* scope_id; attr_list* attrs; BroType* return_type; - std::map local; + std::map local; id_list* inits; }; @@ -90,4 +90,4 @@ extern Scope* current_scope(); extern Scope* global_scope(); // Current module (identified by its name). -extern string current_module; +extern std::string current_module; diff --git a/src/SerializationFormat.h b/src/SerializationFormat.h index 03bf758c31..772837ed91 100644 --- a/src/SerializationFormat.h +++ b/src/SerializationFormat.h @@ -4,8 +4,6 @@ #include -using namespace std; - #include "util.h" class IPAddr; @@ -29,7 +27,7 @@ public: virtual bool Read(char* v, const char* tag) = 0; virtual bool Read(bool* v, const char* tag) = 0; virtual bool Read(double* d, const char* tag) = 0; - virtual bool Read(string* s, const char* tag) = 0; + virtual bool Read(std::string* s, const char* tag) = 0; virtual bool Read(IPAddr* addr, const char* tag) = 0; virtual bool Read(IPPrefix* prefix, const char* tag) = 0; virtual bool Read(struct in_addr* addr, const char* tag) = 0; @@ -63,7 +61,7 @@ public: virtual bool Write(double d, const char* tag) = 0; virtual bool Write(const char* s, const char* tag) = 0; virtual bool Write(const char* buf, int len, const char* tag) = 0; - virtual bool Write(const string& s, const char* tag) = 0; + virtual bool Write(const std::string& s, const char* tag) = 0; virtual bool Write(const IPAddr& addr, const char* tag) = 0; virtual bool Write(const IPPrefix& prefix, const char* tag) = 0; virtual bool Write(const struct in_addr& addr, const char* tag) = 0; @@ -108,7 +106,7 @@ public: bool Read(bool* v, const char* tag) override; bool Read(double* d, const char* tag) override; bool Read(char** str, int* len, const char* tag) override; - bool Read(string* s, const char* tag) override; + bool Read(std::string* s, const char* tag) override; bool Read(IPAddr* addr, const char* tag) override; bool Read(IPPrefix* prefix, const char* tag) override; bool Read(struct in_addr* addr, const char* tag) override; @@ -123,7 +121,7 @@ public: bool Write(double d, const char* tag) override; bool Write(const char* s, const char* tag) override; bool Write(const char* buf, int len, const char* tag) override; - bool Write(const string& s, const char* tag) override; + bool Write(const std::string& s, const char* tag) override; bool Write(const IPAddr& addr, const char* tag) override; bool Write(const IPPrefix& prefix, const char* tag) override; bool Write(const struct in_addr& addr, const char* tag) override; diff --git a/src/SmithWaterman.h b/src/SmithWaterman.h index 541f777b89..3d29eb15a3 100644 --- a/src/SmithWaterman.h +++ b/src/SmithWaterman.h @@ -4,7 +4,6 @@ #include "BroString.h" #include -using namespace std; // BroSubstrings are essentially BroStrings, augmented with indexing // information required for the Smith-Waterman algorithm. Each substring @@ -15,7 +14,7 @@ using namespace std; class BroSubstring : public BroString { public: - typedef vector Vec; + typedef std::vector Vec; typedef Vec::iterator VecIt; typedef Vec::const_iterator VecCIt; @@ -36,11 +35,11 @@ public: int index; }; - typedef vector BSSAlignVec; + typedef std::vector BSSAlignVec; typedef BSSAlignVec::iterator BSSAlignVecIt; typedef BSSAlignVec::const_iterator BSSAlignVecCIt; - explicit BroSubstring(const string& string) + explicit BroSubstring(const std::string& string) : BroString(string), _num(), _new(false) { } explicit BroSubstring(const BroString& string) @@ -76,7 +75,7 @@ public: unsigned int index); private: - typedef map DataMap; + typedef std::map DataMap; typedef DataMap::iterator DataMapIt; BroSubstring(); diff --git a/src/Type.h b/src/Type.h index e0729cbf5f..a4261e453a 100644 --- a/src/Type.h +++ b/src/Type.h @@ -262,8 +262,8 @@ public: virtual unsigned MemoryAllocation() const; - void SetName(const string& arg_name) { name = arg_name; } - const string& GetName() const { return name; } + void SetName(const std::string& arg_name) { name = arg_name; } + const std::string& GetName() const { return name; } typedef std::map > TypeAliasMap; @@ -283,7 +283,7 @@ private: InternalTypeTag internal_tag; bool is_network_order; bool base_type; - string name; + std::string name; static TypeAliasMap type_aliases; }; @@ -402,7 +402,7 @@ public: const BroType* YieldType() const override; void SetYieldType(BroType* arg_yield) { yield = arg_yield; } function_flavor Flavor() const { return flavor; } - string FlavorString() const; + std::string FlavorString() const; // Used to convert a function type to an event or hook type. void ClearYieldType(function_flavor arg_flav) @@ -498,7 +498,13 @@ public: return decl && decl->FindAttr(ATTR_DEPRECATED) != 0; } - string GetFieldDeprecationWarning(int field, bool has_check) const; + bool FieldHasAttr(int field, attr_tag at) const + { + const TypeDecl* decl = FieldDecl(field); + return decl && decl->FindAttr(at) != 0; + } + + std::string GetFieldDeprecationWarning(int field, bool has_check) const; protected: RecordType() { types = 0; } @@ -531,11 +537,11 @@ protected: class OpaqueType : public BroType { public: - explicit OpaqueType(const string& name); + explicit OpaqueType(const std::string& name); OpaqueType* ShallowClone() override { return new OpaqueType(name); } ~OpaqueType() override { }; - const string& Name() const { return name; } + const std::string& Name() const { return name; } void Describe(ODesc* d) const override; void DescribeReST(ODesc* d, bool roles_only = false) const override; @@ -543,29 +549,29 @@ public: protected: OpaqueType() { } - string name; + std::string name; }; class EnumType : public BroType { public: - typedef std::list > enum_name_list; + typedef std::list > enum_name_list; explicit EnumType(const EnumType* e); - explicit EnumType(const string& arg_name); + explicit EnumType(const std::string& arg_name); EnumType* ShallowClone() override; ~EnumType() override; // The value of this name is next internal counter value, starting // with zero. The internal counter is incremented. - void AddName(const string& module_name, const char* name, bool is_export, Expr* deprecation = nullptr); + void AddName(const std::string& module_name, const char* name, bool is_export, Expr* deprecation = nullptr); // The value of this name is set to val. Once a value has been // explicitly assigned using this method, no further names can be // added that aren't likewise explicitly initalized. - void AddName(const string& module_name, const char* name, bro_int_t val, bool is_export, Expr* deprecation = nullptr); + void AddName(const std::string& module_name, const char* name, bro_int_t val, bool is_export, Expr* deprecation = nullptr); // -1 indicates not found. - bro_int_t Lookup(const string& module_name, const char* name) const; + bro_int_t Lookup(const std::string& module_name, const char* name) const; const char* Lookup(bro_int_t value) const; // Returns 0 if not found // Returns the list of defined names with their values. The names @@ -579,10 +585,10 @@ public: protected: EnumType() { counter = 0; } - void AddNameInternal(const string& module_name, + void AddNameInternal(const std::string& module_name, const char* name, bro_int_t val, bool is_export); - void CheckAndAddName(const string& module_name, + void CheckAndAddName(const std::string& module_name, const char* name, bro_int_t val, bool is_export, Expr* deprecation = nullptr); diff --git a/src/Val.cc b/src/Val.cc index 6a27dbfbc7..29959ff0f7 100644 --- a/src/Val.cc +++ b/src/Val.cc @@ -28,14 +28,19 @@ #include "broker/Data.h" #include "3rdparty/json.hpp" -#include "3rdparty/fifo_map.hpp" +#include "3rdparty/tsl-ordered-map/ordered_map.h" + // Define a class for use with the json library that orders the keys in the same order that // they were inserted. By default, the json library orders them alphabetically and we don't // want it like that. -template -using json_fifo_map = nlohmann::fifo_map, A>; -using ZeekJson = nlohmann::basic_json; +template, class KeyEqual = std::equal_to, + class AllocatorPair = typename std::allocator_traits::template rebind_alloc>, + class ValueTypeContainer = std::vector, AllocatorPair>> +using ordered_map = tsl::ordered_map; + +using ZeekJson = nlohmann::basic_json; Val::Val(Func* f) { @@ -458,6 +463,10 @@ TableVal* Val::GetRecordFields() // This is a static method in this file to avoid including json.hpp in Val.h since it's huge. static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new RE_Matcher("^_")) { + // If the value wasn't set, return a nullptr. This will get turned into a 'null' in the json output. + if ( ! val ) + return nullptr; + ZeekJson j; BroType* type = val->Type(); switch ( type->Tag() ) @@ -489,8 +498,8 @@ static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new case TYPE_PORT: { auto* pval = val->AsPortVal(); - j["port"] = pval->Port(); - j["proto"] = pval->Protocol(); + j.emplace("port", pval->Port()); + j.emplace("proto", pval->Protocol()); break; } @@ -537,34 +546,26 @@ static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new j = ZeekJson::object(); HashKey* k; + TableEntryVal* entry; auto c = table->InitForIteration(); - while ( table->NextEntry(k, c) ) + while ( (entry = table->NextEntry(k, c)) ) { auto lv = tval->RecoverIndex(k); delete k; + Val* entry_key; + if ( lv->Length() == 1 ) + entry_key = lv->Index(0)->Ref(); + else + entry_key = lv->Ref(); + + ZeekJson key_json = BuildJSON(entry_key, only_loggable, re); + if ( tval->Type()->IsSet() ) - { - auto* value = lv->Index(0)->Ref(); - j.push_back(BuildJSON(value, only_loggable, re)); - Unref(value); - } + j.emplace_back(std::move(key_json)); else { - ZeekJson key_json; - Val* entry_value; - if ( lv->Length() == 1 ) - { - Val* entry_key = lv->Index(0)->Ref(); - entry_value = tval->Lookup(entry_key, true); - key_json = BuildJSON(entry_key, only_loggable, re); - Unref(entry_key); - } - else - { - entry_value = tval->Lookup(lv, true); - key_json = BuildJSON(lv, only_loggable, re); - } + Val* entry_value = entry->Value(); string key_string; if ( key_json.is_string() ) @@ -572,9 +573,10 @@ static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new else key_string = key_json.dump(); - j[key_string] = BuildJSON(entry_value, only_loggable, re); + j.emplace(key_string, BuildJSON(entry_value, only_loggable, re)); } + Unref(entry_key); Unref(lv); } @@ -585,35 +587,32 @@ static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new { j = ZeekJson::object(); auto* rval = val->AsRecordVal(); - TableVal* fields = rval->GetRecordFields(); - auto* field_indexes = fields->ConvertToPureList(); - int num_indexes = field_indexes->Length(); + auto rt = rval->Type()->AsRecordType(); - for ( int i = 0; i < num_indexes; ++i ) + for ( auto i = 0; i < rt->NumFields(); ++i ) { - Val* key = field_indexes->Index(i); - auto* key_field = fields->Lookup(key)->AsRecordVal(); + auto field_name = rt->FieldName(i); + std::string key_string; - auto* key_val = key->AsStringVal(); - string key_string; - if ( re->MatchAnywhere(key_val->AsString()) != 0 ) + if ( re->MatchAnywhere(field_name) != 0 ) { StringVal blank(""); - key_val = key_val->Substitute(re, &blank, 0)->AsStringVal(); + StringVal fn_val(field_name); + auto key_val = fn_val.Substitute(re, &blank, 0)->AsStringVal(); key_string = key_val->ToStdString(); - delete key_val; + Unref(key_val); } else - key_string = key_val->ToStdString(); + key_string = field_name; - Val* value = key_field->Lookup("value", true); + Val* value = rval->LookupWithDefault(i); - if ( value && ( ! only_loggable || key_field->Lookup("log")->AsBool() ) ) - j[key_string] = BuildJSON(value, only_loggable, re); + if ( value && ( ! only_loggable || rt->FieldHasAttr(i, ATTR_LOG) ) ) + j.emplace(key_string, BuildJSON(value, only_loggable, re)); + + Unref(value); } - delete fields; - delete field_indexes; break; } @@ -641,9 +640,8 @@ static ZeekJson BuildJSON(Val* val, bool only_loggable=false, RE_Matcher* re=new case TYPE_OPAQUE: { - j = ZeekJson::object(); auto* oval = val->AsOpaqueVal(); - j["opaque_type"] = OpaqueMgr::mgr()->TypeID(oval); + j = { { "opaque_type", OpaqueMgr::mgr()->TypeID(oval) } }; break; } @@ -666,6 +664,17 @@ IntervalVal::IntervalVal(double quantity, double units) : void IntervalVal::ValDescribe(ODesc* d) const { + using unit_word = std::pair; + + constexpr std::array units = { + unit_word{ Days, "day" }, + unit_word{ Hours, "hr" }, + unit_word{ Minutes, "min" }, + unit_word{ Seconds, "sec" }, + unit_word{ Milliseconds, "msec" }, + unit_word{ Microseconds, "usec" }, + }; + double v = val.double_val; if ( v == 0.0 ) @@ -674,31 +683,55 @@ void IntervalVal::ValDescribe(ODesc* d) const return; } - int did_one = 0; + bool did_one = false; + constexpr auto last_idx = units.size() - 1; -#define DO_UNIT(unit, name) \ - if ( v >= unit || v <= -unit ) \ - { \ - double num = double(int(v / unit)); \ - if ( num != 0.0 ) \ - { \ - if ( did_one++ ) \ - d->SP(); \ - d->Add(num); \ - d->SP(); \ - d->Add(name); \ - if ( num != 1.0 && num != -1.0 ) \ - d->Add("s"); \ - v -= num * unit; \ - } \ + auto approx_equal = [](double a, double b, double tolerance = 1e-6) -> bool + { + auto v = a - b; + return v < 0 ? -v < tolerance : v < tolerance; + }; + + for ( size_t i = 0; i < units.size(); ++i ) + { + auto unit = units[i].first; + auto word = units[i].second; + double to_print = 0; + + if ( i == last_idx ) + { + to_print = v / unit; + + if ( approx_equal(to_print, 0) ) + { + if ( ! did_one ) + d->Add("0 secs"); + + break; + } + } + else + { + if ( ! (v >= unit || v <= -unit) ) + continue; + + double num = static_cast(static_cast(v / unit)); + v -= num * unit; + to_print = num; + } + + if ( did_one ) + d->SP(); + + d->Add(to_print); + d->SP(); + d->Add(word); + + if ( ! approx_equal(to_print, 1) && ! approx_equal(to_print, -1) ) + d->Add("s"); + + did_one = true; } - - DO_UNIT(Days, "day") - DO_UNIT(Hours, "hr") - DO_UNIT(Minutes, "min") - DO_UNIT(Seconds, "sec") - DO_UNIT(Milliseconds, "msec") - DO_UNIT(Microseconds, "usec") } PortVal* PortManager::Get(uint32_t port_num) const @@ -2869,7 +2902,7 @@ int VectorVal::AddTo(Val* val, int /* is_first_init */) const Val* VectorVal::Lookup(unsigned int index) const { if ( index >= val.vector_val->size() ) - return 0; + return nullptr; return (*val.vector_val)[index]; } @@ -3126,7 +3159,7 @@ Val* cast_value_to_type(Val* v, BroType* t) if ( ! dv ) return 0; - return static_cast(dv)->castTo(t); + return static_cast(dv)->castTo(t).detach(); } return 0; diff --git a/src/Var.cc b/src/Var.cc index 35162484ed..f7e15041e8 100644 --- a/src/Var.cc +++ b/src/Var.cc @@ -1,5 +1,7 @@ // See the file "COPYING" in the main distribution directory for copyright. +#include + #include "zeek-config.h" #include "Var.h" @@ -510,7 +512,7 @@ void end_func(Stmt* body) std::unique_ptr gather_function_ingredients(Scope* scope, Stmt* body) { - auto ingredients = build_unique(); + auto ingredients = std::make_unique(); ingredients->frame_size = scope->Length(); ingredients->inits = scope->GetInits(); diff --git a/src/analyzer/CMakeLists.txt b/src/analyzer/CMakeLists.txt index 4dc2830737..e0d2351c93 100644 --- a/src/analyzer/CMakeLists.txt +++ b/src/analyzer/CMakeLists.txt @@ -17,6 +17,6 @@ set(analyzer_SRCS bif_target(analyzer.bif) -bro_add_subdir_library(analyzer ${analyzer_SRCS} ${BIF_OUTPUT_CC}) +bro_add_subdir_library(analyzer ${analyzer_SRCS}) add_dependencies(bro_analyzer generate_outputs) diff --git a/src/analyzer/protocol/tcp/CMakeLists.txt b/src/analyzer/protocol/tcp/CMakeLists.txt index c00f3e5379..270beb36d8 100644 --- a/src/analyzer/protocol/tcp/CMakeLists.txt +++ b/src/analyzer/protocol/tcp/CMakeLists.txt @@ -6,5 +6,6 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DI zeek_plugin_begin(Zeek TCP) zeek_plugin_cc(TCP.cc TCP_Endpoint.cc TCP_Reassembler.cc ContentLine.cc Stats.cc Plugin.cc) zeek_plugin_bif(events.bif) +zeek_plugin_bif(types.bif) zeek_plugin_bif(functions.bif) zeek_plugin_end() diff --git a/src/analyzer/protocol/tcp/TCP.cc b/src/analyzer/protocol/tcp/TCP.cc index 54d1563efc..873f5d6689 100644 --- a/src/analyzer/protocol/tcp/TCP.cc +++ b/src/analyzer/protocol/tcp/TCP.cc @@ -11,6 +11,7 @@ #include "analyzer/protocol/tcp/TCP_Reassembler.h" #include "events.bif.h" +#include "types.bif.h" using namespace analyzer::tcp; @@ -1186,9 +1187,15 @@ void TCP_Analyzer::DeliverPacket(int len, const u_char* data, bool is_orig, GeneratePacketEvent(rel_seq, rel_ack, data, len, caplen, is_orig, flags); - if ( tcp_option && tcp_hdr_len > sizeof(*tp) && - tcp_hdr_len <= uint32_t(caplen) ) - ParseTCPOptions(tp, TCPOptionEvent, this, is_orig, 0); + if ( (tcp_option || tcp_options) && tcp_hdr_len > sizeof(*tp) ) + ParseTCPOptions(tp, is_orig); + + // PIA/signature matching state needs to be initialized before + // processing/reassembling any TCP data, since that processing may + // itself try to perform signature matching. Also note that a SYN + // packet may technically carry data (see RFC793 Section 3.4 and also + // TCP Fast Open). + CheckPIA_FirstPacket(is_orig, ip); if ( DEBUG_tcp_data_sent ) { @@ -1243,8 +1250,6 @@ void TCP_Analyzer::DeliverPacket(int len, const u_char* data, bool is_orig, if ( ! reassembling ) ForwardPacket(len, data, is_orig, rel_data_seq, ip, caplen); - - CheckPIA_FirstPacket(is_orig, ip); } void TCP_Analyzer::DeliverStream(int len, const u_char* data, bool orig) @@ -1287,14 +1292,12 @@ void TCP_Analyzer::UpdateConnVal(RecordVal *conn_val) (*i)->UpdateConnVal(conn_val); } -int TCP_Analyzer::ParseTCPOptions(const struct tcphdr* tcp, - proc_tcp_option_t proc, - TCP_Analyzer* analyzer, - bool is_orig, void* cookie) +int TCP_Analyzer::ParseTCPOptions(const struct tcphdr* tcp, bool is_orig) { // Parse TCP options. const u_char* options = (const u_char*) tcp + sizeof(struct tcphdr); const u_char* opt_end = (const u_char*) tcp + tcp->th_off * 4; + std::vector opts; while ( options < opt_end ) { @@ -1307,21 +1310,19 @@ int TCP_Analyzer::ParseTCPOptions(const struct tcphdr* tcp, else if ( options + 1 >= opt_end ) // We've run off the end, no room for the length. - return -1; + break; else opt_len = options[1]; if ( opt_len == 0 ) - return -1; // trashed length field + break; // trashed length field if ( options + opt_len > opt_end ) // No room for rest of option. - return -1; - - if ( (*proc)(opt, opt_len, options, analyzer, is_orig, cookie) == -1 ) - return -1; + break; + opts.emplace_back(options); options += opt_len; if ( opt == TCPOPT_EOL ) @@ -1329,25 +1330,134 @@ int TCP_Analyzer::ParseTCPOptions(const struct tcphdr* tcp, break; } - return 0; - } - -int TCP_Analyzer::TCPOptionEvent(unsigned int opt, - unsigned int optlen, - const u_char* /* option */, - TCP_Analyzer* analyzer, - bool is_orig, void* cookie) - { if ( tcp_option ) + for ( const auto& o : opts ) + { + auto kind = o[0]; + auto length = kind < 2 ? 1 : o[1]; + ConnectionEventFast(tcp_option, { + BuildConnVal(), + val_mgr->GetBool(is_orig), + val_mgr->GetCount(kind), + val_mgr->GetCount(length), + }); + } + + if ( tcp_options ) { - analyzer->ConnectionEventFast(tcp_option, { - analyzer->BuildConnVal(), + auto option_list = new VectorVal(BifType::Vector::TCP::OptionList); + + auto add_option_data = [](RecordVal* rv, const u_char* odata, int olen) + { + if ( olen <= 2 ) + return; + + auto data_len = olen - 2; + auto data = reinterpret_cast(odata + 2); + rv->Assign(2, new StringVal(data_len, data)); + }; + + for ( const auto& o : opts ) + { + auto kind = o[0]; + auto length = kind < 2 ? 1 : o[1]; + auto option_record = new RecordVal(BifType::Record::TCP::Option); + option_list->Assign(option_list->Size(), option_record); + option_record->Assign(0, val_mgr->GetCount(kind)); + option_record->Assign(1, val_mgr->GetCount(length)); + + switch ( kind ) { + case 2: + // MSS + if ( length == 4 ) + { + auto mss = ntohs(*reinterpret_cast(o + 2)); + option_record->Assign(3, val_mgr->GetCount(mss)); + } + else + { + add_option_data(option_record, o, length); + Weird("tcp_option_mss_invalid_len", fmt("%d", length)); + } + break; + + case 3: + // window scale + if ( length == 3 ) + { + auto scale = o[2]; + option_record->Assign(4, val_mgr->GetCount(scale)); + } + else + { + add_option_data(option_record, o, length); + Weird("tcp_option_window_scale_invalid_len", fmt("%d", length)); + } + break; + + case 4: + // sack permitted (implicit boolean) + if ( length != 2 ) + { + add_option_data(option_record, o, length); + Weird("tcp_option_sack_invalid_len", fmt("%d", length)); + } + break; + + case 5: + // SACK blocks (1-4 pairs of 32-bit begin+end pointers) + if ( length == 10 || length == 18 || + length == 26 || length == 34 ) + { + auto p = reinterpret_cast(o + 2); + auto num_pointers = (length - 2) / 4; + auto vt = internal_type("index_vec")->AsVectorType(); + auto sack = new VectorVal(vt); + + for ( auto i = 0; i < num_pointers; ++i ) + sack->Assign(sack->Size(), val_mgr->GetCount(ntohl(p[i]))); + + option_record->Assign(5, sack); + } + else + { + add_option_data(option_record, o, length); + Weird("tcp_option_sack_blocks_invalid_len", fmt("%d", length)); + } + break; + + case 8: + // timestamps + if ( length == 10 ) + { + auto send = ntohl(*reinterpret_cast(o + 2)); + auto echo = ntohl(*reinterpret_cast(o + 6)); + option_record->Assign(6, val_mgr->GetCount(send)); + option_record->Assign(7, val_mgr->GetCount(echo)); + } + else + { + add_option_data(option_record, o, length); + Weird("tcp_option_timestamps_invalid_len", fmt("%d", length)); + } + break; + + default: + add_option_data(option_record, o, length); + break; + } + } + + ConnectionEventFast(tcp_options, { + BuildConnVal(), val_mgr->GetBool(is_orig), - val_mgr->GetCount(opt), - val_mgr->GetCount(optlen), - }); + option_list, + }); } + if ( options < opt_end ) + return -1; + return 0; } diff --git a/src/analyzer/protocol/tcp/TCP.h b/src/analyzer/protocol/tcp/TCP.h index 7eeff0bd49..1204f56916 100644 --- a/src/analyzer/protocol/tcp/TCP.h +++ b/src/analyzer/protocol/tcp/TCP.h @@ -64,19 +64,10 @@ public: void SetContentsFile(unsigned int direction, BroFile* f) override; BroFile* GetContentsFile(unsigned int direction) const override; - // Callback to process a TCP option. - typedef int (*proc_tcp_option_t)(unsigned int opt, unsigned int optlen, - const u_char* option, TCP_Analyzer* analyzer, - bool is_orig, void* cookie); - // From Analyzer.h void UpdateConnVal(RecordVal *conn_val) override; - // Needs to be static because it's passed as a pointer-to-function - // rather than pointer-to-member-function. - static int ParseTCPOptions(const struct tcphdr* tcp, - proc_tcp_option_t proc, TCP_Analyzer* analyzer, - bool is_orig, void* cookie); + int ParseTCPOptions(const struct tcphdr* tcp, bool is_orig); static analyzer::Analyzer* Instantiate(Connection* conn) { return new TCP_Analyzer(conn); } @@ -168,12 +159,6 @@ protected: void SetReassembler(tcp::TCP_Reassembler* rorig, tcp::TCP_Reassembler* rresp); - // Needs to be static because it's passed as a pointer-to-function - // rather than pointer-to-member-function. - static int TCPOptionEvent(unsigned int opt, unsigned int optlen, - const u_char* option, TCP_Analyzer* analyzer, - bool is_orig, void* cookie); - // A couple utility functions that may also be useful to derived analyzers. static uint64_t get_relative_seq(const TCP_Endpoint* endpoint, uint32_t cur_base, uint32_t last, diff --git a/src/analyzer/protocol/tcp/events.bif b/src/analyzer/protocol/tcp/events.bif index 032e8f614f..2450df1828 100644 --- a/src/analyzer/protocol/tcp/events.bif +++ b/src/analyzer/protocol/tcp/events.bif @@ -250,11 +250,23 @@ event tcp_packet%(c: connection, is_orig: bool, flags: string, seq: count, ack: ## ## optlen: The length of the options value. ## -## .. zeek:see:: tcp_packet tcp_contents tcp_rexmit +## .. zeek:see:: tcp_packet tcp_contents tcp_rexmit tcp_options ## -## .. note:: There is currently no way to get the actual option value, if any. +## .. note:: To inspect the actual option values, if any, use :zeek:see:`tcp_options`. event tcp_option%(c: connection, is_orig: bool, opt: count, optlen: count%); +## Generated for each TCP header that contains TCP options. This is a very +## low-level event and potentially expensive as it may be raised very often. +## +## c: The connection the packet is part of. +## +## is_orig: True if the packet was sent by the connection's originator. +## +## options: The list of options parsed out of the TCP header. +## +## .. zeek:see:: tcp_packet tcp_contents tcp_rexmit tcp_option +event tcp_options%(c: connection, is_orig: bool, options: TCP::OptionList%); + ## Generated for each chunk of reassembled TCP payload. When content delivery is ## enabled for a TCP connection (via :zeek:id:`tcp_content_delivery_ports_orig`, ## :zeek:id:`tcp_content_delivery_ports_resp`, diff --git a/src/analyzer/protocol/tcp/types.bif b/src/analyzer/protocol/tcp/types.bif new file mode 100644 index 0000000000..8da29b900a --- /dev/null +++ b/src/analyzer/protocol/tcp/types.bif @@ -0,0 +1,2 @@ +type TCP::Option: record; +type TCP::OptionList: vector; diff --git a/src/broker/CMakeLists.txt b/src/broker/CMakeLists.txt index 08e02597e7..da0d1ca819 100644 --- a/src/broker/CMakeLists.txt +++ b/src/broker/CMakeLists.txt @@ -21,5 +21,5 @@ bif_target(data.bif) bif_target(messaging.bif) bif_target(store.bif) -bro_add_subdir_library(brokercomm ${comm_SRCS} ${BIF_OUTPUT_CC}) +bro_add_subdir_library(brokercomm ${comm_SRCS}) add_dependencies(bro_brokercomm generate_outputs) diff --git a/src/broker/Data.cc b/src/broker/Data.cc index 2efbe7f277..6bba3b6bb2 100644 --- a/src/broker/Data.cc +++ b/src/broker/Data.cc @@ -181,7 +181,7 @@ struct val_converter { return nullptr; auto tt = type->AsTableType(); - auto rval = new TableVal(tt); + auto rval = make_intrusive(tt); for ( auto& item : a ) { @@ -213,12 +213,9 @@ struct val_converter { if ( static_cast(expected_index_types->length()) != indices->size() ) - { - Unref(rval); return nullptr; - } - auto list_val = new ListVal(TYPE_ANY); + auto list_val = make_intrusive(TYPE_ANY); for ( auto i = 0u; i < indices->size(); ++i ) { @@ -226,21 +223,16 @@ struct val_converter { (*expected_index_types)[i]); if ( ! index_val ) - { - Unref(rval); - Unref(list_val); return nullptr; - } - list_val->Append(index_val); + list_val->Append(index_val.detach()); } - rval->Assign(list_val, nullptr); - Unref(list_val); + rval->Assign(list_val.get(), nullptr); } - return rval; + return rval.detach(); } result_type operator()(broker::table& a) @@ -249,7 +241,7 @@ struct val_converter { return nullptr; auto tt = type->AsTableType(); - auto rval = new TableVal(tt); + auto rval = make_intrusive(tt); for ( auto& item : a ) { @@ -281,12 +273,9 @@ struct val_converter { if ( static_cast(expected_index_types->length()) != indices->size() ) - { - Unref(rval); return nullptr; - } - auto list_val = new ListVal(TYPE_ANY); + auto list_val = make_intrusive(TYPE_ANY); for ( auto i = 0u; i < indices->size(); ++i ) { @@ -294,30 +283,21 @@ struct val_converter { (*expected_index_types)[i]); if ( ! index_val ) - { - Unref(rval); - Unref(list_val); return nullptr; - } - list_val->Append(index_val); + list_val->Append(index_val.detach()); } auto value_val = bro_broker::data_to_val(move(item.second), tt->YieldType()); if ( ! value_val ) - { - Unref(rval); - Unref(list_val); return nullptr; - } - rval->Assign(list_val, value_val); - Unref(list_val); + rval->Assign(list_val.get(), value_val.detach()); } - return rval; + return rval.detach(); } result_type operator()(broker::vector& a) @@ -325,22 +305,19 @@ struct val_converter { if ( type->Tag() == TYPE_VECTOR ) { auto vt = type->AsVectorType(); - auto rval = new VectorVal(vt); + auto rval = make_intrusive(vt); for ( auto& item : a ) { auto item_val = bro_broker::data_to_val(move(item), vt->YieldType()); if ( ! item_val ) - { - Unref(rval); return nullptr; - } - rval->Assign(rval->Size(), item_val); + rval->Assign(rval->Size(), item_val.detach()); } - return rval; + return rval.detach(); } else if ( type->Tag() == TYPE_FUNC ) { @@ -385,16 +362,13 @@ struct val_converter { else if ( type->Tag() == TYPE_RECORD ) { auto rt = type->AsRecordType(); - auto rval = new RecordVal(rt); + auto rval = make_intrusive(rt); auto idx = 0u; for ( auto i = 0u; i < static_cast(rt->NumFields()); ++i ) { if ( idx >= a.size() ) - { - Unref(rval); return nullptr; - } if ( caf::get_if(&a[idx]) != nullptr ) { @@ -404,19 +378,16 @@ struct val_converter { } auto item_val = bro_broker::data_to_val(move(a[idx]), - rt->FieldType(i)); + rt->FieldType(i)); if ( ! item_val ) - { - Unref(rval); return nullptr; - } - rval->Assign(i, item_val); + rval->Assign(i, item_val.detach()); ++idx; } - return rval; + return rval.detach(); } else if ( type->Tag() == TYPE_PATTERN ) { @@ -791,12 +762,12 @@ static bool data_type_check(const broker::data& d, BroType* t) return caf::visit(type_checker{t}, d); } -Val* bro_broker::data_to_val(broker::data d, BroType* type) +IntrusivePtr bro_broker::data_to_val(broker::data d, BroType* type) { if ( type->Tag() == TYPE_ANY ) - return bro_broker::make_data_val(move(d)); + return {bro_broker::make_data_val(move(d)), false}; - return caf::visit(val_converter{type}, std::move(d)); + return {caf::visit(val_converter{type}, std::move(d)), false}; } broker::expected bro_broker::val_to_data(Val* v) @@ -1161,7 +1132,7 @@ bool bro_broker::DataVal::canCastTo(BroType* t) const return data_type_check(data, t); } -Val* bro_broker::DataVal::castTo(BroType* t) +IntrusivePtr bro_broker::DataVal::castTo(BroType* t) { return data_to_val(data, t); } diff --git a/src/broker/Data.h b/src/broker/Data.h index 2d286352e4..fa7ca89400 100644 --- a/src/broker/Data.h +++ b/src/broker/Data.h @@ -7,6 +7,7 @@ #include "Reporter.h" #include "Frame.h" #include "Expr.h" +#include "IntrusivePtr.h" namespace bro_broker { @@ -58,7 +59,7 @@ broker::expected val_to_data(Val* v); * @return a pointer to a new Bro value or a nullptr if the conversion was not * possible. */ -Val* data_to_val(broker::data d, BroType* type); +IntrusivePtr data_to_val(broker::data d, BroType* type); /** * Convert a Bro threading::Value to a Broker data value. @@ -107,7 +108,7 @@ public: d->Add("}"); } - Val* castTo(BroType* t); + IntrusivePtr castTo(BroType* t); bool canCastTo(BroType* t) const; // Returns the Bro type that scripts use to represent a Broker data @@ -181,9 +182,9 @@ struct type_name_getter { { return "table"; } result_type operator()(const broker::vector&) - { + { assert(tag == TYPE_VECTOR || tag == TYPE_RECORD); - return tag == TYPE_VECTOR ? "vector" : "record"; + return tag == TYPE_VECTOR ? "vector" : "record"; } TypeTag tag; diff --git a/src/broker/Manager.cc b/src/broker/Manager.cc index 4c33d45d2e..392057e0f9 100644 --- a/src/broker/Manager.cc +++ b/src/broker/Manager.cc @@ -65,12 +65,6 @@ const broker::endpoint_info Manager::NoPeer{{}, {}}; int Manager::script_scope = 0; -struct unref_guard { - unref_guard(Val* v) : val(v) {} - ~unref_guard() { Unref(val); } - Val* val; -}; - struct scoped_reporter_location { scoped_reporter_location(Frame* frame) { @@ -1039,7 +1033,7 @@ void Manager::ProcessEvent(const broker::topic& topic, broker::zeek::Event ev) auto val = data_to_val(std::move(args[i]), expected_type); if ( val ) - vl.push_back(val); + vl.push_back(val.detach()); else { auto expected_name = type_name(expected_type->Tag()); @@ -1086,8 +1080,6 @@ bool bro_broker::Manager::ProcessLogCreate(broker::zeek::LogCreate lc) return false; } - unref_guard stream_id_unreffer{stream_id}; - auto writer_id = data_to_val(std::move(lc.writer_id()), writer_id_type); if ( ! writer_id ) { @@ -1095,8 +1087,6 @@ bool bro_broker::Manager::ProcessLogCreate(broker::zeek::LogCreate lc) return false; } - unref_guard writer_id_unreffer{writer_id}; - auto writer_info = std::unique_ptr(new logging::WriterBackend::WriterInfo); if ( ! writer_info->FromBroker(std::move(lc.writer_info())) ) { @@ -1163,8 +1153,6 @@ bool bro_broker::Manager::ProcessLogWrite(broker::zeek::LogWrite lw) return false; } - unref_guard stream_id_unreffer{stream_id}; - // Get writer ID. auto writer_id = data_to_val(std::move(lw.writer_id()), writer_id_type); if ( ! writer_id ) @@ -1173,7 +1161,6 @@ bool bro_broker::Manager::ProcessLogWrite(broker::zeek::LogWrite lw) return false; } - unref_guard writer_id_unreffer{writer_id}; auto path = caf::get_if(&lw.path()); if ( ! path ) @@ -1258,7 +1245,7 @@ bool Manager::ProcessIdentifierUpdate(broker::zeek::IdentifierUpdate iu) return false; } - id->SetVal(val); + id->SetVal(val.detach()); return true; } diff --git a/src/event.bif b/src/event.bif index 3b70b7d64c..4618931d28 100644 --- a/src/event.bif +++ b/src/event.bif @@ -380,7 +380,7 @@ event protocol_confirmation%(c: connection, atype: Analyzer::Tag, aid: count%); ## ``Analyzer::ANALYZER_HTTP`` means the HTTP analyzer determined that it's indeed ## parsing an HTTP connection. ## -## .. bro:see:: dpd_buffer_size +## .. zeek:see:: dpd_buffer_size event protocol_late_match%(c: connection, atype: Analyzer::Tag%); ## Generated when a protocol analyzer determines that a connection it is parsing diff --git a/src/file_analysis/Analyzer.h b/src/file_analysis/Analyzer.h index e60003d492..85fb37faa1 100644 --- a/src/file_analysis/Analyzer.h +++ b/src/file_analysis/Analyzer.h @@ -39,7 +39,7 @@ public: { } /** - * Subclasses may override this metod to receive file data non-sequentially. + * Subclasses may override this method to receive file data non-sequentially. * @param data points to start of a chunk of file data. * @param len length in bytes of the chunk of data pointed to by \a data. * @param offset the byte offset within full file that data chunk starts. diff --git a/src/file_analysis/CMakeLists.txt b/src/file_analysis/CMakeLists.txt index f9a2758920..8facf86c32 100644 --- a/src/file_analysis/CMakeLists.txt +++ b/src/file_analysis/CMakeLists.txt @@ -20,5 +20,5 @@ set(file_analysis_SRCS bif_target(file_analysis.bif) -bro_add_subdir_library(file_analysis ${file_analysis_SRCS} ${BIF_OUTPUT_CC}) +bro_add_subdir_library(file_analysis ${file_analysis_SRCS}) add_dependencies(bro_file_analysis generate_outputs) diff --git a/src/file_analysis/analyzer/data_event/CMakeLists.txt b/src/file_analysis/analyzer/data_event/CMakeLists.txt index 0a62b1d666..df8aada1f4 100644 --- a/src/file_analysis/analyzer/data_event/CMakeLists.txt +++ b/src/file_analysis/analyzer/data_event/CMakeLists.txt @@ -4,5 +4,5 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) zeek_plugin_begin(Zeek FileDataEvent) -zeek_plugin_cc(DataEvent.cc Plugin.cc ../../Analyzer.cc) +zeek_plugin_cc(DataEvent.cc Plugin.cc) zeek_plugin_end() diff --git a/src/file_analysis/analyzer/entropy/CMakeLists.txt b/src/file_analysis/analyzer/entropy/CMakeLists.txt index 7841f27f94..e6233a743b 100644 --- a/src/file_analysis/analyzer/entropy/CMakeLists.txt +++ b/src/file_analysis/analyzer/entropy/CMakeLists.txt @@ -4,6 +4,6 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) zeek_plugin_begin(Zeek FileEntropy) -zeek_plugin_cc(Entropy.cc Plugin.cc ../../Analyzer.cc) +zeek_plugin_cc(Entropy.cc Plugin.cc) zeek_plugin_bif(events.bif) zeek_plugin_end() diff --git a/src/file_analysis/analyzer/entropy/Entropy.h b/src/file_analysis/analyzer/entropy/Entropy.h index 86be06358d..d316291a5f 100644 --- a/src/file_analysis/analyzer/entropy/Entropy.h +++ b/src/file_analysis/analyzer/entropy/Entropy.h @@ -14,7 +14,7 @@ namespace file_analysis { /** - * An analyzer to produce a hash of file contents. + * An analyzer to produce entropy of file contents. */ class Entropy : public file_analysis::Analyzer { public: @@ -25,16 +25,16 @@ public: ~Entropy() override; /** - * Create a new instance of an Extract analyzer. + * Create a new instance of an Entropy analyzer. * @param args the \c AnalyzerArgs value which represents the analyzer. * @param file the file to which the analyzer will be attached. - * @return the new Extract analyzer instance or a null pointer if the + * @return the new Entropy analyzer instance or a null pointer if the * the "extraction_file" field of \a args wasn't set. */ static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file); /** - * Incrementally hash next chunk of file contents. + * Calculate entropy of next chunk of file contents. * @param data pointer to start of a chunk of a file data. * @param len number of bytes in the data chunk. * @return false if the digest is in an invalid state, else true. @@ -42,14 +42,14 @@ public: bool DeliverStream(const u_char* data, uint64_t len) override; /** - * Finalizes the hash and raises a "file_entropy_test" event. - * @return always false so analyze will be deteched from file. + * Finalizes the calculation and raises a "file_entropy_test" event. + * @return always false so analyze will be detached from file. */ bool EndOfFile() override; /** * Missing data can't be handled, so just indicate the this analyzer should - * be removed from receiving further data. The hash will not be finalized. + * be removed from receiving further data. The entropy will not be finalized. * @param offset byte offset in file at which missing chunk starts. * @param len number of missing bytes. * @return always false so analyzer will detach from file. @@ -68,8 +68,8 @@ protected: Entropy(RecordVal* args, File* file); /** - * If some file contents have been seen, finalizes the hash of them and - * raises the "file_hash" event with the results. + * If some file contents have been seen, finalizes the entropy of them and + * raises the "file_entropy" event with the results. */ void Finalize(); diff --git a/src/file_analysis/analyzer/extract/CMakeLists.txt b/src/file_analysis/analyzer/extract/CMakeLists.txt index 7df895af38..f24691ddff 100644 --- a/src/file_analysis/analyzer/extract/CMakeLists.txt +++ b/src/file_analysis/analyzer/extract/CMakeLists.txt @@ -4,7 +4,7 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) zeek_plugin_begin(Zeek FileExtract) -zeek_plugin_cc(Extract.cc Plugin.cc ../../Analyzer.cc) +zeek_plugin_cc(Extract.cc Plugin.cc) zeek_plugin_bif(events.bif) zeek_plugin_bif(functions.bif) zeek_plugin_end() diff --git a/src/file_analysis/analyzer/hash/CMakeLists.txt b/src/file_analysis/analyzer/hash/CMakeLists.txt index 46d557fd4b..66d6d7e6e8 100644 --- a/src/file_analysis/analyzer/hash/CMakeLists.txt +++ b/src/file_analysis/analyzer/hash/CMakeLists.txt @@ -4,6 +4,6 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) zeek_plugin_begin(Zeek FileHash) -zeek_plugin_cc(Hash.cc Plugin.cc ../../Analyzer.cc) +zeek_plugin_cc(Hash.cc Plugin.cc) zeek_plugin_bif(events.bif) zeek_plugin_end() diff --git a/src/file_analysis/analyzer/unified2/CMakeLists.txt b/src/file_analysis/analyzer/unified2/CMakeLists.txt index bd1537c8ef..68c3d2712c 100644 --- a/src/file_analysis/analyzer/unified2/CMakeLists.txt +++ b/src/file_analysis/analyzer/unified2/CMakeLists.txt @@ -5,7 +5,7 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) zeek_plugin_begin(Zeek Unified2) -zeek_plugin_cc(Unified2.cc Plugin.cc ../../Analyzer.cc) +zeek_plugin_cc(Unified2.cc Plugin.cc) zeek_plugin_bif(events.bif types.bif) zeek_plugin_pac(unified2.pac unified2-file.pac unified2-analyzer.pac) zeek_plugin_end() diff --git a/src/input.h b/src/input.h index 2fa815ccb0..70aa75ae28 100644 --- a/src/input.h +++ b/src/input.h @@ -4,7 +4,6 @@ #include #include -using namespace std; #include "BroList.h" @@ -40,7 +39,7 @@ extern const char* prog; extern std::vector zeek_script_prefixes; // -p flag extern const char* command_line_policy; // -e flag -extern vector params; +extern std::vector params; class Stmt; extern Stmt* stmts; // global statements diff --git a/src/input/CMakeLists.txt b/src/input/CMakeLists.txt index c928451cb3..2b41487021 100644 --- a/src/input/CMakeLists.txt +++ b/src/input/CMakeLists.txt @@ -18,6 +18,6 @@ set(input_SRCS bif_target(input.bif) -bro_add_subdir_library(input ${input_SRCS} ${BIF_OUTPUT_CC}) +bro_add_subdir_library(input ${input_SRCS}) add_dependencies(bro_input generate_outputs) diff --git a/src/input/Manager.cc b/src/input/Manager.cc index ad52edfa06..68964dd121 100644 --- a/src/input/Manager.cc +++ b/src/input/Manager.cc @@ -2536,8 +2536,31 @@ Val* Manager::ValueToVal(const Stream* i, const Value* val, bool& have_error) co if ( stag == TYPE_VOID ) TypeTag stag = val->val.set_val.vals[0]->type; - set_index = new TypeList(base_type(stag)->Ref()); - set_index->Append(base_type(stag)->Ref()); + BroType* index_type; + + if ( stag == TYPE_ENUM ) + { + // Enums are not a base-type, so need to look it up. + const auto& sv = val->val.set_val.vals[0]->val.string_val; + std::string enum_name(sv.data, sv.length); + auto enum_id = global_scope()->Lookup(enum_name); + + if ( ! enum_id ) + { + Warning(i, "Value '%s' for stream '%s' is not a valid enum.", + enum_name.data(), i->name.c_str()); + + have_error = true; + return nullptr; + } + + index_type = enum_id->Type()->AsEnumType(); + } + else + index_type = base_type_no_ref(stag); + + set_index = new TypeList(index_type); + set_index->Append(index_type->Ref()); } SetType* s = new SetType(set_index, 0); diff --git a/src/input/readers/config/Config.cc b/src/input/readers/config/Config.cc index d90020e1bf..307ce3cddc 100644 --- a/src/input/readers/config/Config.cc +++ b/src/input/readers/config/Config.cc @@ -199,7 +199,7 @@ bool Config::DoUpdate() } regex_t re; - if ( regcomp(&re, "^([^[:blank:]]+)[[:blank:]]+(.*)$", REG_EXTENDED) ) + if ( regcomp(&re, "^([^[:blank:]]+)[[:blank:]]+(.*[^[:blank:]])?[[:blank:]]*$", REG_EXTENDED) ) { Error(Fmt("Failed to compile regex.")); return true; @@ -215,7 +215,9 @@ bool Config::DoUpdate() } string key = line.substr(match[1].rm_so, match[1].rm_eo - match[1].rm_so); - string value = line.substr(match[2].rm_so, match[2].rm_eo - match[2].rm_so); + string value; + if ( match[2].rm_so > 0 ) + value = line.substr(match[2].rm_so, match[2].rm_eo - match[2].rm_so); auto typeit = option_types.find(key); if ( typeit == option_types.end() ) diff --git a/src/logging/CMakeLists.txt b/src/logging/CMakeLists.txt index b37f6f95cb..849444b647 100644 --- a/src/logging/CMakeLists.txt +++ b/src/logging/CMakeLists.txt @@ -20,6 +20,6 @@ set(logging_SRCS bif_target(logging.bif) -bro_add_subdir_library(logging ${logging_SRCS} ${BIF_OUTPUT_CC}) +bro_add_subdir_library(logging ${logging_SRCS}) add_dependencies(bro_logging generate_outputs) diff --git a/src/module_util.cc b/src/module_util.cc index d5817a7d7a..500d7f4d08 100644 --- a/src/module_util.cc +++ b/src/module_util.cc @@ -5,6 +5,8 @@ #include #include "module_util.h" +using namespace std; + static int streq(const char* s1, const char* s2) { return ! strcmp(s1, s2); diff --git a/src/module_util.h b/src/module_util.h index 4e865380d2..5e1597a987 100644 --- a/src/module_util.h +++ b/src/module_util.h @@ -6,14 +6,12 @@ #include -using namespace std; - static const char* GLOBAL_MODULE_NAME = "GLOBAL"; -extern string extract_module_name(const char* name); -extern string extract_var_name(const char* name); -extern string normalized_module_name(const char* module_name); // w/o :: +extern std::string extract_module_name(const char* name); +extern std::string extract_var_name(const char* name); +extern std::string normalized_module_name(const char* module_name); // w/o :: // Concatenates module_name::var_name unless var_name is already fully // qualified, in which case it is returned unmodified. -extern string make_full_var_name(const char* module_name, const char* var_name); +extern std::string make_full_var_name(const char* module_name, const char* var_name); diff --git a/src/option.bif b/src/option.bif index 02893a5a24..55e3c4a6cf 100644 --- a/src/option.bif +++ b/src/option.bif @@ -90,13 +90,24 @@ function Option::set%(ID: string, val: any, location: string &default=""%): bool return val_mgr->GetBool(0); } - auto rval = call_option_handlers_and_set_value(ID, i, val_from_data, location); - Unref(val_from_data); + auto rval = call_option_handlers_and_set_value(ID, i, val_from_data.get(), location); return val_mgr->GetBool(rval); } if ( ! same_type(i->Type(), val->Type()) ) { + if ( i->Type()->Tag() == TYPE_TABLE && + val->Type()->Tag() == TYPE_TABLE && + val->Type()->AsTableType()->IsUnspecifiedTable() ) + { + // Just coerce an empty/unspecified table to the right type. + auto tv = new TableVal(i->Type()->AsTableType(), + i->ID_Val()->AsTableVal()->Attrs()); + auto rval = call_option_handlers_and_set_value(ID, i, tv, location); + Unref(tv); + return val_mgr->GetBool(rval); + } + builtin_error(fmt("Incompatible type for set of ID '%s': got '%s', need '%s'", ID->CheckString(), type_name(val->Type()->Tag()), type_name(i->Type()->Tag()))); return val_mgr->GetBool(0); diff --git a/src/parse.y b/src/parse.y index e5e0627e06..6e8b4dc327 100644 --- a/src/parse.y +++ b/src/parse.y @@ -1058,7 +1058,7 @@ formal_args_decl: TOK_ID ':' type opt_attr { set_location(@1, @4); - $$ = new TypeDecl($3, $1, $4); + $$ = new TypeDecl($3, $1, $4, true); } ; diff --git a/src/patricia.c b/src/patricia.c index 9d18adf14c..1928d8ed0e 100644 --- a/src/patricia.c +++ b/src/patricia.c @@ -124,17 +124,18 @@ local_inet_pton (int af, const char *src, void *dst) return 1; } } -#ifdef NT else if (af == AF_INET6) { +#ifdef NT struct in6_addr Address; return (inet6_addr(src, &Address)); - } #else - else { + return inet_pton(AF_INET6, src, dst); +#endif /* NT */ + } + else { errno = EAFNOSUPPORT; return -1; } -#endif /* NT */ } /* this allows imcomplete prefix */ diff --git a/src/plugin/Manager.cc b/src/plugin/Manager.cc index ce13397046..58dc52e033 100644 --- a/src/plugin/Manager.cc +++ b/src/plugin/Manager.cc @@ -159,19 +159,15 @@ bool Manager::ActivateDynamicPluginInternal(const std::string& name, bool ok_if_ return false; } - if ( m->second == "" ) - // Already activated. - return true; - - std::string dir = m->second + "/"; - - if ( dir.empty() ) + if ( m->second.empty() ) { // That's our marker that we have already activated this // plugin. Silently ignore the new request. return true; } + std::string dir = m->second + "/"; + DBG_LOG(DBG_PLUGINS, "Activating plugin %s", name.c_str()); // Add the "scripts" and "bif" directories to ZEEKPATH. @@ -275,6 +271,8 @@ bool Manager::ActivateDynamicPluginInternal(const std::string& name, bool ok_if_ DBG_LOG(DBG_PLUGINS, " Loaded %s", path); } + + globfree(&gl); } else diff --git a/src/plugins.cc.in b/src/plugins.cc.in deleted file mode 100644 index 4527d5d1a9..0000000000 --- a/src/plugins.cc.in +++ /dev/null @@ -1,22 +0,0 @@ - -// A work-around the problem that for static libraries unused globals -// aren't linked into the final binary. CMake automatically inserts -// code here to reference the globals that initializes each of the -// statically compiled plugins. -// -// Note: This won't be necessary anymore once we can assume CMake >2.8.8 -// as a required depencendy. If so, switch bro_HAVE_OBJECT_LIBRARIES -// in src/CMakeLists.txt to TRUE and remove this. - -#include - -${__BRO_DECL_PLUGINS} - -size_t __make_sure_to_use_plugin_globals() -{ - // This function is never actually called. - - size_t i = 0; - ${__BRO_USE_PLUGINS} - return i; -} diff --git a/src/probabilistic/BloomFilter.h b/src/probabilistic/BloomFilter.h index 629605c6b6..950235e5f5 100644 --- a/src/probabilistic/BloomFilter.h +++ b/src/probabilistic/BloomFilter.h @@ -3,6 +3,7 @@ #pragma once #include +#include #include #include @@ -75,7 +76,7 @@ public: * Returns a string with a representation of the Bloom filter's * internal state. This is for debugging/testing purposes only. */ - virtual string InternalState() const = 0; + virtual std::string InternalState() const = 0; broker::expected Serialize() const; static std::unique_ptr Unserialize(const broker::data& data); @@ -154,7 +155,7 @@ public: void Clear() override; bool Merge(const BloomFilter* other) override; BasicBloomFilter* Clone() const override; - string InternalState() const override; + std::string InternalState() const override; protected: friend class BloomFilter; @@ -203,7 +204,7 @@ public: void Clear() override; bool Merge(const BloomFilter* other) override; CountingBloomFilter* Clone() const override; - string InternalState() const override; + std::string InternalState() const override; protected: friend class BloomFilter; diff --git a/src/probabilistic/Topk.cc b/src/probabilistic/Topk.cc index 8923292df2..131f490fae 100644 --- a/src/probabilistic/Topk.cc +++ b/src/probabilistic/Topk.cc @@ -505,14 +505,14 @@ bool TopkVal::DoUnserialize(const broker::data& data) for ( uint64_t j = 0; j < *elements_count; j++ ) { auto epsilon = caf::get_if(&(*v)[idx++]); - Val* val = bro_broker::data_to_val((*v)[idx++], type); + auto val = bro_broker::data_to_val((*v)[idx++], type); if ( ! (epsilon && val) ) return false; Element* e = new Element(); e->epsilon = *epsilon; - e->value = val; + e->value = val.detach(); e->parent = b; b->elements.insert(b->elements.end(), e); diff --git a/src/threading/formatters/JSON.cc b/src/threading/formatters/JSON.cc index 58b1ffd779..919ecd4a2b 100644 --- a/src/threading/formatters/JSON.cc +++ b/src/threading/formatters/JSON.cc @@ -37,7 +37,7 @@ bool JSON::Describe(ODesc* desc, int num_fields, const Field* const * fields, if ( new_entry.is_null() ) return false; - j[fields[i]->name] = new_entry; + j.emplace(fields[i]->name, new_entry); } } @@ -73,6 +73,10 @@ threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag ZeekJson JSON::BuildJSON(Value* val, const string& name) const { + // If the value wasn't set, return a nullptr. This will get turned into a 'null' in the json output. + if ( ! val->present ) + return nullptr; + ZeekJson j; switch ( val->type ) { @@ -182,11 +186,7 @@ ZeekJson JSON::BuildJSON(Value* val, const string& name) const } if ( ! name.empty() && ! j.is_null() ) - { - ZeekJson j2 = ZeekJson::object(); - j2[name] = j; - return j2; - } + return { { name, j } }; return j; } diff --git a/src/threading/formatters/JSON.h b/src/threading/formatters/JSON.h index 1015eb54c6..71edadc61d 100644 --- a/src/threading/formatters/JSON.h +++ b/src/threading/formatters/JSON.h @@ -4,7 +4,7 @@ #include "../Formatter.h" #include "3rdparty/json.hpp" -#include "3rdparty/fifo_map.hpp" +#include "3rdparty/tsl-ordered-map/ordered_map.h" namespace threading { namespace formatter { @@ -12,9 +12,13 @@ namespace threading { namespace formatter { // Define a class for use with the json library that orders the keys in the same order that // they were inserted. By default, the json library orders them alphabetically and we don't // want it like that. -template -using json_fifo_map = nlohmann::fifo_map, A>; -using ZeekJson = nlohmann::basic_json; +template, class KeyEqual = std::equal_to, + class AllocatorPair = typename std::allocator_traits::template rebind_alloc>, + class ValueTypeContainer = std::vector, AllocatorPair>> +using ordered_map = tsl::ordered_map; + +using ZeekJson = nlohmann::basic_json; /** * A thread-safe class for converting values into a JSON representation diff --git a/src/util.h b/src/util.h index bf4204d585..d4228990d4 100644 --- a/src/util.h +++ b/src/util.h @@ -570,15 +570,6 @@ void bro_strerror_r(int bro_errno, char* buf, size_t buflen); */ char* zeekenv(const char* name); -/** - * Small convenience function. Does what std::make_unique does in C++14. Will not - * work on arrays. - */ -template -std::unique_ptr build_unique (Args&&... args) { - return std::unique_ptr(new T(std::forward(args)...)); -} - /** * Escapes bytes in a string that are not valid UTF8 characters with \xYY format. Used * by the JSON writer and BIF methods. diff --git a/src/zeek.bif b/src/zeek.bif index e8474b83eb..970ad7c1d0 100644 --- a/src/zeek.bif +++ b/src/zeek.bif @@ -1689,6 +1689,22 @@ function fmt%(...%): string return new StringVal(s); %} +## Renders a sequence of values to a string of bytes and outputs them directly +## to ``stdout`` with no additional escape sequences added. No additional +## newline is added to the end either. +## +## Returns: Always true. +## +## .. zeek:see:: fmt cat cat_sep string_cat to_json +function print_raw%(...%): bool + %{ + ODesc d(DESC_READABLE); + d.SetStyle(RAW_STYLE); + describe_vals(&@ARG@, &d, 0); + printf("%.*s", d.Len(), d.Description()); + return val_mgr->GetBool(true); + %} + # =========================================================================== # # Math @@ -5065,6 +5081,8 @@ function anonymize_addr%(a: addr, cl: IPAddrAnonymizationClass%): addr ## fields with the &log attribute to be included in the JSON. ## ## returns: a JSON formatted string. +## +## .. zeek:see:: fmt cat cat_sep string_cat print_raw function to_json%(val: any, only_loggable: bool &default=F, field_escape_pattern: pattern &default=/^_/%): string %{ return val->ToJSON(only_loggable, field_escape_pattern); diff --git a/testing/btest/Baseline/bifs.print_raw/out b/testing/btest/Baseline/bifs.print_raw/out new file mode 100644 index 0000000000..6982e39727 --- /dev/null +++ b/testing/btest/Baseline/bifs.print_raw/out @@ -0,0 +1,3 @@ +"\\x07Էo" +start "\\x07Էo"137T[9, 10] finish +é diff --git a/testing/btest/Baseline/core.print-interval/out b/testing/btest/Baseline/core.print-interval/out new file mode 100644 index 0000000000..701e5f9cdb --- /dev/null +++ b/testing/btest/Baseline/core.print-interval/out @@ -0,0 +1,19 @@ +0 secs +0.123457 usecs +-0.123457 usecs +1.0 msec 234.567891 usecs +-1.0 msec -234.567891 usecs +12.0 secs 345.0 msecs 678.912345 usecs +-12.0 secs -345.0 msecs -678.912345 usecs +1.0 day 10.0 hrs 17.0 mins 36.0 secs 789.0 msecs 123.449984 usecs +-1.0 day -10.0 hrs -17.0 mins -36.0 secs -789.0 msecs -123.449984 usecs +1.001 usecs +1.0 msec 1 usec +11.0 msecs +8.0 days 12.0 hrs +7.0 hrs 30.0 mins +6.0 mins 30.0 secs +5.0 secs 500.0 msecs +4.0 msecs 500 usecs +3.5 usecs +2.0 days 2.0 secs diff --git a/testing/btest/Baseline/core.tcp.options/out b/testing/btest/Baseline/core.tcp.options/out new file mode 100644 index 0000000000..d85b2c9bf7 --- /dev/null +++ b/testing/btest/Baseline/core.tcp.options/out @@ -0,0 +1,50 @@ +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 2, 4 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 4, 2 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 8, 10 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 1, 1 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 3, 3 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T + kind: 2, length: 4 + mss: 1460 + kind: 4, length: 2 + sack permitted + kind: 8, length: 10 + send ts: 4294923497 + echo ts: 0 + kind: 1, length: 1 + kind: 3, length: 3 + window scale: 6 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], F, 2, 4 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], F, 4, 2 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], F, 8, 10 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], F, 1, 1 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], F, 3, 3 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], F + kind: 2, length: 4 + mss: 1380 + kind: 4, length: 2 + sack permitted + kind: 8, length: 10 + send ts: 419445911 + echo ts: 4294923497 + kind: 1, length: 1 + kind: 3, length: 3 + window scale: 7 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 1, 1 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 1, 1 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 8, 10 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T + kind: 1, length: 1 + kind: 1, length: 1 + kind: 8, length: 10 + send ts: 4294923545 + echo ts: 419445911 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 1, 1 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 1, 1 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T, 8, 10 +[orig_h=192.168.1.102, orig_p=36861/tcp, resp_h=193.1.193.64, resp_p=80/tcp], T + kind: 1, length: 1 + kind: 1, length: 1 + kind: 8, length: 10 + send ts: 4294923545 + echo ts: 419445911 diff --git a/testing/btest/Baseline/core.tcp.options/out-sack b/testing/btest/Baseline/core.tcp.options/out-sack new file mode 100644 index 0000000000..625b68ed29 --- /dev/null +++ b/testing/btest/Baseline/core.tcp.options/out-sack @@ -0,0 +1,24 @@ +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 5, 10 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 0, 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T + kind: 5, length: 10 + sack: [1, 16] + kind: 0, length: 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 5, 18 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 0, 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T + kind: 5, length: 18 + sack: [1, 16, 256, 4096] + kind: 0, length: 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 5, 26 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 0, 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T + kind: 5, length: 26 + sack: [1, 16, 256, 4096, 65536, 1048576] + kind: 0, length: 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 5, 34 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T, 0, 1 +[orig_h=127.0.0.1, orig_p=20/tcp, resp_h=127.0.0.1, resp_p=80/tcp], T + kind: 5, length: 34 + sack: [1, 16, 256, 4096, 65536, 1048576, 16777216, 268435456] + kind: 0, length: 1 diff --git a/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log b/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log index 76114aac6f..583e4f8def 100644 --- a/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log +++ b/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log @@ -158,6 +158,7 @@ scripts/base/init-frameworks-and-bifs.zeek build/scripts/base/bif/plugins/Zeek_SteppingStone.events.bif.zeek build/scripts/base/bif/plugins/Zeek_Syslog.events.bif.zeek build/scripts/base/bif/plugins/Zeek_TCP.events.bif.zeek + build/scripts/base/bif/plugins/Zeek_TCP.types.bif.zeek build/scripts/base/bif/plugins/Zeek_TCP.functions.bif.zeek build/scripts/base/bif/plugins/Zeek_Teredo.events.bif.zeek build/scripts/base/bif/plugins/Zeek_UDP.events.bif.zeek diff --git a/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log b/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log index a95b7505f3..82b307c9d2 100644 --- a/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log +++ b/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log @@ -158,6 +158,7 @@ scripts/base/init-frameworks-and-bifs.zeek build/scripts/base/bif/plugins/Zeek_SteppingStone.events.bif.zeek build/scripts/base/bif/plugins/Zeek_Syslog.events.bif.zeek build/scripts/base/bif/plugins/Zeek_TCP.events.bif.zeek + build/scripts/base/bif/plugins/Zeek_TCP.types.bif.zeek build/scripts/base/bif/plugins/Zeek_TCP.functions.bif.zeek build/scripts/base/bif/plugins/Zeek_Teredo.events.bif.zeek build/scripts/base/bif/plugins/Zeek_UDP.events.bif.zeek diff --git a/testing/btest/Baseline/coverage.find-bro-logs/out b/testing/btest/Baseline/coverage.find-bro-logs/out index 109fcf8a36..55ef0c557e 100644 --- a/testing/btest/Baseline/coverage.find-bro-logs/out +++ b/testing/btest/Baseline/coverage.find-bro-logs/out @@ -33,6 +33,7 @@ netcontrol_shunt notice notice_alarm ntlm +ntp ocsp openflow packet_filter diff --git a/testing/btest/Baseline/language.attr-default-global-set-error/out b/testing/btest/Baseline/language.attr-default-global-set-error/out index 55c70a62ff..ddcc6a8345 100644 --- a/testing/btest/Baseline/language.attr-default-global-set-error/out +++ b/testing/btest/Baseline/language.attr-default-global-set-error/out @@ -1,5 +1,5 @@ -error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: arithmetic mixed with non-arithmetic (set[string] and 0) -error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: &default value has inconsistent type (0 and set[string]) -error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &default is not valid for global variables (&default=10) -error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &default is not valid for global variables (&optional, &default=9) -error in /Users/tim/Desktop/projects/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &optional is not valid for global variables (&optional, &default=9, &optional) +error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 4: &default is not valid for global variables except for tables (&default=0) +error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &default is not valid for global variables except for tables (&default=10) +error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &default is not valid for global variables except for tables (&default=9) +error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 9: &optional is not valid for global variables (&default=9, &optional) +error in /home/jon/pro/zeek/zeek/testing/btest/.tmp/language.attr-default-global-set-error/attr-default-global-set-error.zeek, line 10: &default is not valid for global variables except for tables (&default=set()) diff --git a/testing/btest/Baseline/language.default-params/out b/testing/btest/Baseline/language.default-params/out index 0ae804cc6b..f874ca3fe3 100644 --- a/testing/btest/Baseline/language.default-params/out +++ b/testing/btest/Baseline/language.default-params/out @@ -3,6 +3,19 @@ foo_func, hello bar_func, hmm, hi, 5 bar_func, cool, beans, 5 bar_func, cool, beans, 13 +begin table_func, { + +} +end table_func, { +[the test] = works +} +begin table_func, { +[initial] = conditions +} +end table_func, { +[initial] = conditions, +[the test] = works +} foo_hook, test foo_hook, hello bar_hook, hmm, hi, 5 diff --git a/testing/btest/Baseline/language.expire_subnet/output b/testing/btest/Baseline/language.expire_subnet/output index 76fb3cd8d3..9bb289af0c 100644 --- a/testing/btest/Baseline/language.expire_subnet/output +++ b/testing/btest/Baseline/language.expire_subnet/output @@ -13,15 +13,15 @@ Time: 0 secs Accessed table nums: two; three Accessed table nets: two; zero, three -Time: 7.0 secs 518.0 msecs 828.0 usecs +Time: 7.0 secs 518.0 msecs 828.15361 usecs -Expired Num: 4 --> four at 8.0 secs 835.0 msecs 30.0 usecs -Expired Num: 1 --> one at 8.0 secs 835.0 msecs 30.0 usecs -Expired Num: 0 --> zero at 8.0 secs 835.0 msecs 30.0 usecs -Expired Subnet: 192.168.4.0/24 --> four at 8.0 secs 835.0 msecs 30.0 usecs -Expired Subnet: 192.168.1.0/24 --> one at 8.0 secs 835.0 msecs 30.0 usecs -Expired Subnet: 192.168.0.0/16 --> zero at 15.0 secs 150.0 msecs 681.0 usecs -Expired Subnet: 192.168.3.0/24 --> three at 15.0 secs 150.0 msecs 681.0 usecs -Expired Subnet: 192.168.2.0/24 --> two at 15.0 secs 150.0 msecs 681.0 usecs -Expired Num: 2 --> two at 15.0 secs 150.0 msecs 681.0 usecs -Expired Num: 3 --> three at 15.0 secs 150.0 msecs 681.0 usecs +Expired Num: 4 --> four at 8.0 secs 835.0 msecs 30.078888 usecs +Expired Num: 1 --> one at 8.0 secs 835.0 msecs 30.078888 usecs +Expired Num: 0 --> zero at 8.0 secs 835.0 msecs 30.078888 usecs +Expired Subnet: 192.168.4.0/24 --> four at 8.0 secs 835.0 msecs 30.078888 usecs +Expired Subnet: 192.168.1.0/24 --> one at 8.0 secs 835.0 msecs 30.078888 usecs +Expired Subnet: 192.168.0.0/16 --> zero at 15.0 secs 150.0 msecs 681.018829 usecs +Expired Subnet: 192.168.3.0/24 --> three at 15.0 secs 150.0 msecs 681.018829 usecs +Expired Subnet: 192.168.2.0/24 --> two at 15.0 secs 150.0 msecs 681.018829 usecs +Expired Num: 2 --> two at 15.0 secs 150.0 msecs 681.018829 usecs +Expired Num: 3 --> three at 15.0 secs 150.0 msecs 681.018829 usecs diff --git a/testing/btest/Baseline/language.record-default-coercion/out b/testing/btest/Baseline/language.record-default-coercion/out index bf76ba5033..0e9619bbd4 100644 --- a/testing/btest/Baseline/language.record-default-coercion/out +++ b/testing/btest/Baseline/language.record-default-coercion/out @@ -20,3 +20,6 @@ 0 [a=13, c=13, v=[test]] 1 +[a={ +[one] = 1 +}] diff --git a/testing/btest/Baseline/language.table-redef-default/out b/testing/btest/Baseline/language.table-redef-default/out new file mode 100644 index 0000000000..38dfcd606b --- /dev/null +++ b/testing/btest/Baseline/language.table-redef-default/out @@ -0,0 +1,2 @@ +uno +some number diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index de6971d4d5..cf551d9b4c 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -203,7 +203,7 @@ 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (KRB::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=kerberos, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (Modbus::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=modbus, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (NTLM::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=ntlm, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> -0.000000 MetaHookPost CallFunction(Log::__add_filter, , (NTP::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=, path_func=Log::default_path_func{ if ( != Log::path) return (Log::path)Log::id_str = fmt(%s, Log::id)Log::parts = split_string1(Log::id_str, <...>/, )return (cat(to_lower(Log::parts[0]), _, to_lower(Log::parts[1])))}elsereturn (to_lower(Log::id_str))}, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> +0.000000 MetaHookPost CallFunction(Log::__add_filter, , (NTP::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=ntp, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (NetControl::DROP, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol_drop, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (NetControl::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> 0.000000 MetaHookPost CallFunction(Log::__add_filter, , (NetControl::SHUNT, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol_shunt, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -> @@ -248,7 +248,7 @@ 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (KRB::LOG, [columns=KRB::Info, ev=KRB::log_krb, path=kerberos])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Modbus::LOG, [columns=Modbus::Info, ev=Modbus::log_modbus, path=modbus])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (NTLM::LOG, [columns=NTLM::Info, ev=, path=ntlm])) -> -0.000000 MetaHookPost CallFunction(Log::__create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=])) -> +0.000000 MetaHookPost CallFunction(Log::__create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=ntp])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (NetControl::DROP, [columns=NetControl::DropInfo, ev=NetControl::log_netcontrol_drop, path=netcontrol_drop])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (NetControl::LOG, [columns=NetControl::Info, ev=NetControl::log_netcontrol, path=netcontrol])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (NetControl::SHUNT, [columns=NetControl::ShuntInfo, ev=NetControl::log_netcontrol_shunt, path=netcontrol_shunt])) -> @@ -276,7 +276,7 @@ 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird, path=weird])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (X509::LOG, [columns=X509::Info, ev=X509::log_x509, path=x509])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (mysql::LOG, [columns=MySQL::Info, ev=MySQL::log_mysql, path=mysql])) -> -0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T])) -> +0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Broker::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Config::LOG)) -> @@ -429,7 +429,7 @@ 0.000000 MetaHookPost CallFunction(Log::create_stream, , (KRB::LOG, [columns=KRB::Info, ev=KRB::log_krb, path=kerberos])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Modbus::LOG, [columns=Modbus::Info, ev=Modbus::log_modbus, path=modbus])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (NTLM::LOG, [columns=NTLM::Info, ev=, path=ntlm])) -> -0.000000 MetaHookPost CallFunction(Log::create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=])) -> +0.000000 MetaHookPost CallFunction(Log::create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=ntp])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (NetControl::DROP, [columns=NetControl::DropInfo, ev=NetControl::log_netcontrol_drop, path=netcontrol_drop])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (NetControl::LOG, [columns=NetControl::Info, ev=NetControl::log_netcontrol, path=netcontrol])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (NetControl::SHUNT, [columns=NetControl::ShuntInfo, ev=NetControl::log_netcontrol_shunt, path=netcontrol_shunt])) -> @@ -457,7 +457,7 @@ 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird, path=weird])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (X509::LOG, [columns=X509::Info, ev=X509::log_x509, path=x509])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (mysql::LOG, [columns=MySQL::Info, ev=MySQL::log_mysql, path=mysql])) -> -0.000000 MetaHookPost CallFunction(Log::write, , (PacketFilter::LOG, [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T])) -> +0.000000 MetaHookPost CallFunction(Log::write, , (PacketFilter::LOG, [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T])) -> 0.000000 MetaHookPost CallFunction(NetControl::check_plugins, , ()) -> 0.000000 MetaHookPost CallFunction(NetControl::init, , ()) -> 0.000000 MetaHookPost CallFunction(Notice::want_pp, , ()) -> @@ -681,6 +681,7 @@ 0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_Syslog.events.bif.zeek) -> -1 0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_TCP.events.bif.zeek) -> -1 0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_TCP.functions.bif.zeek) -> -1 +0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_TCP.types.bif.zeek) -> -1 0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_Teredo.events.bif.zeek) -> -1 0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_UDP.events.bif.zeek) -> -1 0.000000 MetaHookPost LoadFile(0, .<...>/Zeek_Unified2.events.bif.zeek) -> -1 @@ -1105,7 +1106,7 @@ 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (KRB::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=kerberos, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (Modbus::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=modbus, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (NTLM::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=ntlm, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) -0.000000 MetaHookPre CallFunction(Log::__add_filter, , (NTP::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=, path_func=Log::default_path_func{ if ( != Log::path) return (Log::path)Log::id_str = fmt(%s, Log::id)Log::parts = split_string1(Log::id_str, <...>/, )return (cat(to_lower(Log::parts[0]), _, to_lower(Log::parts[1])))}elsereturn (to_lower(Log::id_str))}, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) +0.000000 MetaHookPre CallFunction(Log::__add_filter, , (NTP::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=ntp, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (NetControl::DROP, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol_drop, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (NetControl::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) 0.000000 MetaHookPre CallFunction(Log::__add_filter, , (NetControl::SHUNT, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol_shunt, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}])) @@ -1150,7 +1151,7 @@ 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (KRB::LOG, [columns=KRB::Info, ev=KRB::log_krb, path=kerberos])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Modbus::LOG, [columns=Modbus::Info, ev=Modbus::log_modbus, path=modbus])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (NTLM::LOG, [columns=NTLM::Info, ev=, path=ntlm])) -0.000000 MetaHookPre CallFunction(Log::__create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=])) +0.000000 MetaHookPre CallFunction(Log::__create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=ntp])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (NetControl::DROP, [columns=NetControl::DropInfo, ev=NetControl::log_netcontrol_drop, path=netcontrol_drop])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (NetControl::LOG, [columns=NetControl::Info, ev=NetControl::log_netcontrol, path=netcontrol])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (NetControl::SHUNT, [columns=NetControl::ShuntInfo, ev=NetControl::log_netcontrol_shunt, path=netcontrol_shunt])) @@ -1178,7 +1179,7 @@ 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird, path=weird])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (X509::LOG, [columns=X509::Info, ev=X509::log_x509, path=x509])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (mysql::LOG, [columns=MySQL::Info, ev=MySQL::log_mysql, path=mysql])) -0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T])) +0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Broker::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Config::LOG)) @@ -1331,7 +1332,7 @@ 0.000000 MetaHookPre CallFunction(Log::create_stream, , (KRB::LOG, [columns=KRB::Info, ev=KRB::log_krb, path=kerberos])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Modbus::LOG, [columns=Modbus::Info, ev=Modbus::log_modbus, path=modbus])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (NTLM::LOG, [columns=NTLM::Info, ev=, path=ntlm])) -0.000000 MetaHookPre CallFunction(Log::create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=])) +0.000000 MetaHookPre CallFunction(Log::create_stream, , (NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=ntp])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (NetControl::DROP, [columns=NetControl::DropInfo, ev=NetControl::log_netcontrol_drop, path=netcontrol_drop])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (NetControl::LOG, [columns=NetControl::Info, ev=NetControl::log_netcontrol, path=netcontrol])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (NetControl::SHUNT, [columns=NetControl::ShuntInfo, ev=NetControl::log_netcontrol_shunt, path=netcontrol_shunt])) @@ -1359,7 +1360,7 @@ 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird, path=weird])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (X509::LOG, [columns=X509::Info, ev=X509::log_x509, path=x509])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (mysql::LOG, [columns=MySQL::Info, ev=MySQL::log_mysql, path=mysql])) -0.000000 MetaHookPre CallFunction(Log::write, , (PacketFilter::LOG, [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T])) +0.000000 MetaHookPre CallFunction(Log::write, , (PacketFilter::LOG, [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T])) 0.000000 MetaHookPre CallFunction(NetControl::check_plugins, , ()) 0.000000 MetaHookPre CallFunction(NetControl::init, , ()) 0.000000 MetaHookPre CallFunction(Notice::want_pp, , ()) @@ -1583,6 +1584,7 @@ 0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_Syslog.events.bif.zeek) 0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_TCP.events.bif.zeek) 0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_TCP.functions.bif.zeek) +0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_TCP.types.bif.zeek) 0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_Teredo.events.bif.zeek) 0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_UDP.events.bif.zeek) 0.000000 MetaHookPre LoadFile(0, .<...>/Zeek_Unified2.events.bif.zeek) @@ -2006,7 +2008,7 @@ 0.000000 | HookCallFunction Log::__add_filter(KRB::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=kerberos, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) 0.000000 | HookCallFunction Log::__add_filter(Modbus::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=modbus, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) 0.000000 | HookCallFunction Log::__add_filter(NTLM::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=ntlm, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) -0.000000 | HookCallFunction Log::__add_filter(NTP::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=, path_func=Log::default_path_func{ if ( != Log::path) return (Log::path)Log::id_str = fmt(%s, Log::id)Log::parts = split_string1(Log::id_str, <...>/, )return (cat(to_lower(Log::parts[0]), _, to_lower(Log::parts[1])))}elsereturn (to_lower(Log::id_str))}, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) +0.000000 | HookCallFunction Log::__add_filter(NTP::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=ntp, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) 0.000000 | HookCallFunction Log::__add_filter(NetControl::DROP, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol_drop, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) 0.000000 | HookCallFunction Log::__add_filter(NetControl::LOG, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) 0.000000 | HookCallFunction Log::__add_filter(NetControl::SHUNT, [name=default, writer=Log::WRITER_ASCII, pred=, path=netcontrol_shunt, path_func=, include=, exclude=, log_local=T, log_remote=T, field_name_map={}, scope_sep=., ext_prefix=_, ext_func=lambda_<2899364261409031434>, interv=0 secs, postprocessor=, config={}]) @@ -2051,7 +2053,7 @@ 0.000000 | HookCallFunction Log::__create_stream(KRB::LOG, [columns=KRB::Info, ev=KRB::log_krb, path=kerberos]) 0.000000 | HookCallFunction Log::__create_stream(Modbus::LOG, [columns=Modbus::Info, ev=Modbus::log_modbus, path=modbus]) 0.000000 | HookCallFunction Log::__create_stream(NTLM::LOG, [columns=NTLM::Info, ev=, path=ntlm]) -0.000000 | HookCallFunction Log::__create_stream(NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=]) +0.000000 | HookCallFunction Log::__create_stream(NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=ntp]) 0.000000 | HookCallFunction Log::__create_stream(NetControl::DROP, [columns=NetControl::DropInfo, ev=NetControl::log_netcontrol_drop, path=netcontrol_drop]) 0.000000 | HookCallFunction Log::__create_stream(NetControl::LOG, [columns=NetControl::Info, ev=NetControl::log_netcontrol, path=netcontrol]) 0.000000 | HookCallFunction Log::__create_stream(NetControl::SHUNT, [columns=NetControl::ShuntInfo, ev=NetControl::log_netcontrol_shunt, path=netcontrol_shunt]) @@ -2079,7 +2081,7 @@ 0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird, path=weird]) 0.000000 | HookCallFunction Log::__create_stream(X509::LOG, [columns=X509::Info, ev=X509::log_x509, path=x509]) 0.000000 | HookCallFunction Log::__create_stream(mysql::LOG, [columns=MySQL::Info, ev=MySQL::log_mysql, path=mysql]) -0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T]) +0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T]) 0.000000 | HookCallFunction Log::add_default_filter(Broker::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Config::LOG) @@ -2232,7 +2234,7 @@ 0.000000 | HookCallFunction Log::create_stream(KRB::LOG, [columns=KRB::Info, ev=KRB::log_krb, path=kerberos]) 0.000000 | HookCallFunction Log::create_stream(Modbus::LOG, [columns=Modbus::Info, ev=Modbus::log_modbus, path=modbus]) 0.000000 | HookCallFunction Log::create_stream(NTLM::LOG, [columns=NTLM::Info, ev=, path=ntlm]) -0.000000 | HookCallFunction Log::create_stream(NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=]) +0.000000 | HookCallFunction Log::create_stream(NTP::LOG, [columns=NTP::Info, ev=NTP::log_ntp, path=ntp]) 0.000000 | HookCallFunction Log::create_stream(NetControl::DROP, [columns=NetControl::DropInfo, ev=NetControl::log_netcontrol_drop, path=netcontrol_drop]) 0.000000 | HookCallFunction Log::create_stream(NetControl::LOG, [columns=NetControl::Info, ev=NetControl::log_netcontrol, path=netcontrol]) 0.000000 | HookCallFunction Log::create_stream(NetControl::SHUNT, [columns=NetControl::ShuntInfo, ev=NetControl::log_netcontrol_shunt, path=netcontrol_shunt]) @@ -2260,7 +2262,7 @@ 0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=Weird::Info, ev=Weird::log_weird, path=weird]) 0.000000 | HookCallFunction Log::create_stream(X509::LOG, [columns=X509::Info, ev=X509::log_x509, path=x509]) 0.000000 | HookCallFunction Log::create_stream(mysql::LOG, [columns=MySQL::Info, ev=MySQL::log_mysql, path=mysql]) -0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T]) +0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T]) 0.000000 | HookCallFunction NetControl::check_plugins() 0.000000 | HookCallFunction NetControl::init() 0.000000 | HookCallFunction Notice::want_pp() @@ -2484,6 +2486,7 @@ 0.000000 | HookLoadFile .<...>/Zeek_Syslog.events.bif.zeek 0.000000 | HookLoadFile .<...>/Zeek_TCP.events.bif.zeek 0.000000 | HookLoadFile .<...>/Zeek_TCP.functions.bif.zeek +0.000000 | HookLoadFile .<...>/Zeek_TCP.types.bif.zeek 0.000000 | HookLoadFile .<...>/Zeek_Teredo.events.bif.zeek 0.000000 | HookLoadFile .<...>/Zeek_UDP.events.bif.zeek 0.000000 | HookLoadFile .<...>/Zeek_Unified2.events.bif.zeek @@ -2699,7 +2702,7 @@ 0.000000 | HookLoadFile base<...>/xmpp 0.000000 | HookLoadFile base<...>/zeek.bif.zeek 0.000000 | HookLogInit packet_filter 1/1 {ts (time), node (string), filter (string), init (bool), success (bool)} -0.000000 | HookLogWrite packet_filter [ts=1571104127.525167, node=zeek, filter=ip or not ip, init=T, success=T] +0.000000 | HookLogWrite packet_filter [ts=1572977305.329795, node=zeek, filter=ip or not ip, init=T, success=T] 0.000000 | HookQueueEvent NetControl::init() 0.000000 | HookQueueEvent filter_change_tracking() 0.000000 | HookQueueEvent zeek_init() @@ -3027,9 +3030,9 @@ 1362692527.009775 MetaHookPost CallFunction(HTTP::code_in_range, , (200, 100, 199)) -> 1362692527.009775 MetaHookPost CallFunction(HTTP::get_file_handle, , ([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], F)) -> 1362692527.009775 MetaHookPost CallFunction(HTTP::set_state, , ([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], F)) -> -1362692527.009775 MetaHookPost CallFunction(Log::__write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.0 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) -> +1362692527.009775 MetaHookPost CallFunction(Log::__write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.975693 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) -> 1362692527.009775 MetaHookPost CallFunction(Log::__write, , (HTTP::LOG, [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1])) -> -1362692527.009775 MetaHookPost CallFunction(Log::write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.0 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) -> +1362692527.009775 MetaHookPost CallFunction(Log::write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.975693 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) -> 1362692527.009775 MetaHookPost CallFunction(Log::write, , (HTTP::LOG, [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1])) -> 1362692527.009775 MetaHookPost CallFunction(cat, , (Analyzer::ANALYZER_HTTP, 1362692526.869344, F, 1, 1, 141.142.228.5:59856 > 192.150.187.43:80)) -> 1362692527.009775 MetaHookPost CallFunction(file_sniff, , ([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain]], inferred=T])) -> @@ -3056,9 +3059,9 @@ 1362692527.009775 MetaHookPre CallFunction(HTTP::code_in_range, , (200, 100, 199)) 1362692527.009775 MetaHookPre CallFunction(HTTP::get_file_handle, , ([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], F)) 1362692527.009775 MetaHookPre CallFunction(HTTP::set_state, , ([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], F)) -1362692527.009775 MetaHookPre CallFunction(Log::__write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.0 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) +1362692527.009775 MetaHookPre CallFunction(Log::__write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.975693 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) 1362692527.009775 MetaHookPre CallFunction(Log::__write, , (HTTP::LOG, [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1])) -1362692527.009775 MetaHookPre CallFunction(Log::write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.0 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) +1362692527.009775 MetaHookPre CallFunction(Log::write, , (Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.975693 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=])) 1362692527.009775 MetaHookPre CallFunction(Log::write, , (HTTP::LOG, [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1])) 1362692527.009775 MetaHookPre CallFunction(cat, , (Analyzer::ANALYZER_HTTP, 1362692526.869344, F, 1, 1, 141.142.228.5:59856 > 192.150.187.43:80)) 1362692527.009775 MetaHookPre CallFunction(file_sniff, , ([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain]], inferred=T])) @@ -3086,9 +3089,9 @@ 1362692527.009775 | HookCallFunction HTTP::code_in_range(200, 100, 199) 1362692527.009775 | HookCallFunction HTTP::get_file_handle([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], F) 1362692527.009775 | HookCallFunction HTTP::set_state([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=], F) -1362692527.009775 | HookCallFunction Log::__write(Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.0 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=]) +1362692527.009775 | HookCallFunction Log::__write(Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.975693 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=]) 1362692527.009775 | HookCallFunction Log::__write(HTTP::LOG, [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]) -1362692527.009775 | HookCallFunction Log::write(Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.0 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=]) +1362692527.009775 | HookCallFunction Log::write(Files::LOG, [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts={192.150.187.43}, rx_hosts={141.142.228.5}, conn_uids={CHhAvVGS1DHFjwGM9}, source=HTTP, depth=0, analyzers={}, mime_type=text/plain, filename=, duration=262.975693 usecs, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=]) 1362692527.009775 | HookCallFunction Log::write(HTTP::LOG, [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1]) 1362692527.009775 | HookCallFunction cat(Analyzer::ANALYZER_HTTP, 1362692526.869344, F, 1, 1, 141.142.228.5:59856 > 192.150.187.43:80) 1362692527.009775 | HookCallFunction file_sniff([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain]], inferred=T]) diff --git a/testing/btest/Baseline/scripts.base.frameworks.config.container-options/out b/testing/btest/Baseline/scripts.base.frameworks.config.container-options/out new file mode 100644 index 0000000000..c001c07916 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.config.container-options/out @@ -0,0 +1,41 @@ +{ +RED +} +{ + +} +{ +BLUE +} +{ +BLUE, +GREEN, +RED +} +{ + +} +--- +[RED] +[] +[BLUE] +[RED, GREEN, BLUE] +[] +--- +{ +[RED] = red +} +{ + +} +{ +[BLUE] = blue +} +{ +[BLUE] = blue, +[GREEN] = green, +[RED] = red +} +{ + +} diff --git a/testing/btest/Baseline/scripts.base.frameworks.config.read_config/zeek.config.log b/testing/btest/Baseline/scripts.base.frameworks.config.read_config/zeek.config.log index fa56b8455e..d8e337f668 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.config.read_config/zeek.config.log +++ b/testing/btest/Baseline/scripts.base.frameworks.config.read_config/zeek.config.log @@ -3,21 +3,22 @@ #empty_field (empty) #unset_field - #path config -#open 2017-10-11-20-47-09 +#open 2019-10-14-15-40-21 #fields ts id old_value new_value location #types time string string string string -1507754829.092788 testbool T F ../configfile -1507754829.092788 testcount 0 1 ../configfile -1507754829.092788 testcount 1 2 ../configfile -1507754829.092788 testint 0 -1 ../configfile -1507754829.092788 testenum SSH::LOG Conn::LOG ../configfile -1507754829.092788 testport 42/tcp 45/unknown ../configfile -1507754829.092788 testaddr 127.0.0.1 127.0.0.1 ../configfile -1507754829.092788 testaddr 127.0.0.1 2607:f8b0:4005:801::200e ../configfile -1507754829.092788 testinterval 1.0 sec 60.0 ../configfile -1507754829.092788 testtime 0.0 1507321987.0 ../configfile -1507754829.092788 test_set (empty) b,c,a,d,erdbeerschnitzel ../configfile -1507754829.092788 test_vector (empty) 1,2,3,4,5,6 ../configfile -1507754829.092788 test_set b,c,a,d,erdbeerschnitzel (empty) ../configfile -1507754829.092788 test_set (empty) \x2d ../configfile -#close 2017-10-11-20-47-09 +1571067621.558501 testbool T F ../configfile +1571067621.558501 testcount 0 1 ../configfile +1571067621.558501 testcount 1 2 ../configfile +1571067621.558501 testint 0 -1 ../configfile +1571067621.558501 testenum SSH::LOG Conn::LOG ../configfile +1571067621.558501 testport 42/tcp 45/unknown ../configfile +1571067621.558501 testaddr 127.0.0.1 127.0.0.1 ../configfile +1571067621.558501 testaddr 127.0.0.1 2607:f8b0:4005:801::200e ../configfile +1571067621.558501 testinterval 1.0 sec 60.0 ../configfile +1571067621.558501 teststring a abc ../configfile +1571067621.558501 testtime 0.0 1507321987.0 ../configfile +1571067621.558501 test_set (empty) b,c,a,d,erdbeerschnitzel ../configfile +1571067621.558501 test_vector (empty) 1,2,3,4,5,6 ../configfile +1571067621.558501 test_set b,c,a,d,erdbeerschnitzel (empty) ../configfile +1571067621.558501 test_set (empty) \x2d ../configfile +#close 2019-10-14-15-40-21 diff --git a/testing/btest/Baseline/scripts.base.frameworks.input.config.enum-set/zeek..stdout b/testing/btest/Baseline/scripts.base.frameworks.input.config.enum-set/zeek..stdout new file mode 100644 index 0000000000..c38913d6aa --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.input.config.enum-set/zeek..stdout @@ -0,0 +1,12 @@ +DPD::ignore_violations, { + +} +--- +{ +Analyzer::ANALYZER_SYSLOG +} +--- +DPD::ignore_violations, { +Analyzer::ANALYZER_SYSLOG +} +--- diff --git a/testing/btest/Baseline/scripts.base.frameworks.input.config.enum-set/zeek.config.log b/testing/btest/Baseline/scripts.base.frameworks.input.config.enum-set/zeek.config.log new file mode 100644 index 0000000000..732d185efa --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.input.config.enum-set/zeek.config.log @@ -0,0 +1,10 @@ +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path config +#open 2019-10-03-04-02-02 +#fields ts id old_value new_value location +#types time string string string string +1570075321.966826 DPD::ignore_violations (empty) Analyzer::ANALYZER_SYSLOG - +#close 2019-10-03-04-02-02 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-json/ssh.log b/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-json/ssh.log index 52c5e4856c..99f3925b2e 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-json/ssh.log +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-json/ssh.log @@ -1 +1 @@ -{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010.54321,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["BB","AA","CC"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\n{ \nif (0 < SSH::i) \n\treturn (Foo);\nelse\n\treturn (Bar);\n\n}"} +{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010.54321,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["BB","AA","CC"],"se":[],"vc":[10,20,30],"ve":[],"vn":[0,null,2],"f":"SSH::foo\n{ \nif (0 < SSH::i) \n\treturn (Foo);\nelse\n\treturn (Bar);\n\n}"} diff --git a/testing/btest/Baseline/scripts.base.frameworks.openflow.ryu-basic/.stdout b/testing/btest/Baseline/scripts.base.frameworks.openflow.ryu-basic/.stdout index 20db16d9ff..90659d1f70 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.openflow.ryu-basic/.stdout +++ b/testing/btest/Baseline/scripts.base.frameworks.openflow.ryu-basic/.stdout @@ -1,22 +1,22 @@ http://127.0.0.1:8080/stats/flowentry/clear/42 http://127.0.0.1:8080/stats/flowentry/add -{"priority":0,"idle_timeout":0,"hard_timeout":0,"match":{},"actions":[{"port":3,"type":"OUTPUT"},{"port":7,"type":"OUTPUT"}],"cookie":4398046511105,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511105,"idle_timeout":0,"hard_timeout":0,"priority":0,"flags":0,"match":{},"actions":[{"type":"OUTPUT","port":3},{"type":"OUTPUT","port":7}]} Flow_mod_success http://127.0.0.1:8080/stats/flowentry/add -{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"74.53.140.153/32","tp_dst":25,"tp_src":1470,"nw_proto":6,"dl_type":2048,"nw_src":"10.10.1.4/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511146,"idle_timeout":30,"hard_timeout":0,"priority":5,"flags":0,"match":{"dl_type":2048,"nw_proto":6,"nw_src":"10.10.1.4/32","nw_dst":"74.53.140.153/32","tp_src":1470,"tp_dst":25},"actions":[]} http://127.0.0.1:8080/stats/flowentry/add -{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"10.10.1.4/32","tp_dst":1470,"tp_src":25,"nw_proto":6,"dl_type":2048,"nw_src":"74.53.140.153/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511146,"idle_timeout":30,"hard_timeout":0,"priority":5,"flags":0,"match":{"dl_type":2048,"nw_proto":6,"nw_src":"74.53.140.153/32","nw_dst":"10.10.1.4/32","tp_src":25,"tp_dst":1470},"actions":[]} Flow_mod_success Flow_mod_success http://127.0.0.1:8080/stats/flowentry/add -{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.102/32","tp_dst":25,"tp_src":49648,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.100/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511146,"idle_timeout":30,"hard_timeout":0,"priority":5,"flags":0,"match":{"dl_type":2048,"nw_proto":6,"nw_src":"192.168.133.100/32","nw_dst":"192.168.133.102/32","tp_src":49648,"tp_dst":25},"actions":[]} http://127.0.0.1:8080/stats/flowentry/add -{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.100/32","tp_dst":49648,"tp_src":25,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.102/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511146,"idle_timeout":30,"hard_timeout":0,"priority":5,"flags":0,"match":{"dl_type":2048,"nw_proto":6,"nw_src":"192.168.133.102/32","nw_dst":"192.168.133.100/32","tp_src":25,"tp_dst":49648},"actions":[]} Flow_mod_success Flow_mod_success http://127.0.0.1:8080/stats/flowentry/add -{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"17.167.150.73/32","tp_dst":443,"tp_src":49655,"nw_proto":6,"dl_type":2048,"nw_src":"192.168.133.100/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511146,"idle_timeout":30,"hard_timeout":0,"priority":5,"flags":0,"match":{"dl_type":2048,"nw_proto":6,"nw_src":"192.168.133.100/32","nw_dst":"17.167.150.73/32","tp_src":49655,"tp_dst":443},"actions":[]} http://127.0.0.1:8080/stats/flowentry/add -{"priority":5,"idle_timeout":30,"hard_timeout":0,"match":{"nw_dst":"192.168.133.100/32","tp_dst":49655,"tp_src":443,"nw_proto":6,"dl_type":2048,"nw_src":"17.167.150.73/32"},"actions":[],"cookie":4398046511146,"flags":0,"dpid":42} +{"dpid":42,"cookie":4398046511146,"idle_timeout":30,"hard_timeout":0,"priority":5,"flags":0,"match":{"dl_type":2048,"nw_proto":6,"nw_src":"17.167.150.73/32","nw_dst":"192.168.133.100/32","tp_src":443,"tp_dst":49655},"actions":[]} Flow_mod_success Flow_mod_success diff --git a/testing/btest/Baseline/scripts.base.protocols.http.build-url/output b/testing/btest/Baseline/scripts.base.protocols.http.build-url/output new file mode 100644 index 0000000000..90e04842a8 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.protocols.http.build-url/output @@ -0,0 +1,4 @@ +Have: 192.150.187.43/download/CHANGES.bro-aux.txt Expected: 192.150.187.43/download/CHANGES.bro-aux.txt -> SUCCESS +Have: 192.150.187.43:123/download/CHANGES.bro-aux.txt Expected: 192.150.187.43:123/download/CHANGES.bro-aux.txt -> SUCCESS +Have: 192.150.187.43:123/ Expected: 192.150.187.43:123/ -> SUCCESS +Have: http://proxied.host/some/document Expected: http://proxied.host/some/document -> SUCCESS diff --git a/testing/btest/Baseline/scripts.base.utils.json/output b/testing/btest/Baseline/scripts.base.utils.json/output index 2d2e56253f..8757a51433 100644 --- a/testing/btest/Baseline/scripts.base.utils.json/output +++ b/testing/btest/Baseline/scripts.base.utils.json/output @@ -19,7 +19,7 @@ true "fe80::/64" "Red" "/^?(^abcd)$?/" -{"s":"test","c":100} +{"c":100,"s":"test"} {"s":"test"} {"s":"test"} {"m":{"s":"test"}} @@ -28,13 +28,16 @@ true ["1.2.3.4"] [[true,false]] [{"s":"test"}] +[0,null,2] [] [2,1] ["1.2.3.4"] [{"s":"test"}] [{"s":"test"}] +[["three",3],["one",1],["two",2]] {} {"2":"10.2.2.2","1":"10.1.1.1"} {"10.1.1.1":{"a":1},"10.2.2.2":{"b":2}} {"10.1.1.1":[1,2],"10.2.2.2":[3,5]} {"1":{"s":"test"}} +{"opaque_type":"TopkVal"} diff --git a/testing/btest/Baseline/scripts.policy.misc.dump-events/all-events.log b/testing/btest/Baseline/scripts.policy.misc.dump-events/all-events.log index cc3dfb6fe6..c939a079c0 100644 --- a/testing/btest/Baseline/scripts.policy.misc.dump-events/all-events.log +++ b/testing/btest/Baseline/scripts.policy.misc.dump-events/all-events.log @@ -41,13 +41,13 @@ [3] name: string = patriots.in 1254722767.526085 dns_A_reply - [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=1, num_bytes_ip=62, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=100, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.49206, duration=0.034025, service={\x0aDNS\x0a}, history=Dd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=1254722767.49206, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=34.0 msecs 24.0 usecs, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=0, rcode_name=NOERROR, AA=F, TC=F, RD=T, RA=T, Z=0, answers=[patriots.in], TTLs=[3.0 hrs 27.0 secs], rejected=F, total_answers=2, total_replies=4, saw_query=T, saw_reply=F], dns_state=[pending_query=, pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] + [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=1, num_bytes_ip=62, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=100, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.49206, duration=0.034025, service={\x0aDNS\x0a}, history=Dd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=1254722767.49206, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=34.0 msecs 24.953842 usecs, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=0, rcode_name=NOERROR, AA=F, TC=F, RD=T, RA=T, Z=0, answers=[patriots.in], TTLs=[3.0 hrs 27.0 secs], rejected=F, total_answers=2, total_replies=4, saw_query=T, saw_reply=F], dns_state=[pending_query=, pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] [1] msg: dns_msg = [id=31062, opcode=0, rcode=0, QR=T, AA=F, TC=F, RD=T, RA=T, Z=0, num_queries=1, num_answers=2, num_auth=2, num_addl=0] [2] ans: dns_answer = [answer_type=1, query=patriots.in, qtype=1, qclass=1, TTL=3.0 hrs 28.0 secs] [3] a: addr = 74.53.140.153 1254722767.526085 dns_end - [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=1, num_bytes_ip=62, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=100, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.49206, duration=0.034025, service={\x0aDNS\x0a}, history=Dd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=1254722767.49206, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=34.0 msecs 24.0 usecs, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=0, rcode_name=NOERROR, AA=F, TC=F, RD=T, RA=T, Z=0, answers=[patriots.in, 74.53.140.153], TTLs=[3.0 hrs 27.0 secs, 3.0 hrs 28.0 secs], rejected=F, total_answers=2, total_replies=4, saw_query=T, saw_reply=F], dns_state=[pending_query=, pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] + [0] c: connection = [id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], orig=[size=34, state=1, num_pkts=1, num_bytes_ip=62, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=100, state=1, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.49206, duration=0.034025, service={\x0aDNS\x0a}, history=Dd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=[ts=1254722767.49206, uid=CHhAvVGS1DHFjwGM9, id=[orig_h=10.10.1.4, orig_p=56166/udp, resp_h=10.10.1.1, resp_p=53/udp], proto=udp, trans_id=31062, rtt=34.0 msecs 24.953842 usecs, query=mail.patriots.in, qclass=1, qclass_name=C_INTERNET, qtype=1, qtype_name=A, rcode=0, rcode_name=NOERROR, AA=F, TC=F, RD=T, RA=T, Z=0, answers=[patriots.in, 74.53.140.153], TTLs=[3.0 hrs 27.0 secs, 3.0 hrs 28.0 secs], rejected=F, total_answers=2, total_replies=4, saw_query=T, saw_reply=F], dns_state=[pending_query=, pending_queries=, pending_replies=], ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=, smtp_state=, socks=, ssh=, syslog=] [1] msg: dns_msg = [id=31062, opcode=0, rcode=0, QR=T, AA=F, TC=F, RD=T, RA=T, Z=0, num_queries=1, num_answers=2, num_auth=2, num_addl=0] 1254722767.529046 new_connection @@ -362,7 +362,7 @@ [1] meta: fa_metadata = [mime_type=text/html, mime_types=[[strength=100, mime=text/html], [strength=20, mime=text/html], [strength=-20, mime=text/plain]], inferred=T] 1254722770.692804 file_state_remove - [0] f: fa_file = [id=Ft4M3f2yMvLlmwtbq9, parent_id=, source=SMTP, is_orig=T, conns={\x0a\x09[[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp]] = [id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=4530, state=4, num_pkts=11, num_bytes_ip=3518, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=462, state=4, num_pkts=10, num_bytes_ip=870, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.529046, duration=3.163758, service={\x0aSMTP\x0a\x09}, history=ShAdDa, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=1254722768.219663, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=GP, mailfrom=gurpartap@patriots.in, rcptto={\x0araj_deol2002in@yahoo.co.in\x0a\x09}, date=Mon, 5 Oct 2009 11:36:07 +0530, from="Gurpartap Singh" , to={\x0a\x0a\x09}, cc=, reply_to=, msg_id=<000301ca4581$ef9e57f0$cedb07d0$@in>, in_reply_to=, subject=SMTP, x_originating_ip=, first_received=, second_received=, last_reply=354 Enter message, ending with "." on a line by itself, path=[74.53.140.153, 10.10.1.4], user_agent=Microsoft Office Outlook 12.0, tls=F, process_received_from=T, has_client_activity=T, entity=, fuids=[Fel9gs4OtNEV6gUJZ5, Ft4M3f2yMvLlmwtbq9]], smtp_state=[helo=GP, messages_transferred=0, pending_messages=, mime_depth=4], socks=, ssh=, syslog=]\x0a}, last_active=1254722770.692804, seen_bytes=1868, total_bytes=, missing_bytes=0, overflow_bytes=0, timeout_interval=2.0 mins, bof_buffer_size=4096, bof_buffer=\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a
\x0d\x0a\x0d\x0a

Hello

\x0d\x0a\x0d\x0a

 

\x0d\x0a\x0d\x0a

I send u smtp pcap file

\x0d\x0a\x0d\x0a

Find the attachment

\x0d\x0a\x0d\x0a

 

\x0d\x0a\x0d\x0a

GPS

\x0d\x0a\x0d\x0a
\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a, info=[ts=1254722770.692743, fuid=Ft4M3f2yMvLlmwtbq9, tx_hosts={\x0a\x0910.10.1.4\x0a}, rx_hosts={\x0a\x0974.53.140.153\x0a}, conn_uids={\x0aClEkJM2Vm5giqnMf4h\x0a}, source=SMTP, depth=4, analyzers={\x0a\x0a}, mime_type=text/html, filename=, duration=61.0 usecs, local_orig=, is_orig=T, seen_bytes=1868, total_bytes=, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=], ftp=, http=, irc=, pe=] + [0] f: fa_file = [id=Ft4M3f2yMvLlmwtbq9, parent_id=, source=SMTP, is_orig=T, conns={\x0a\x09[[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp]] = [id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=4530, state=4, num_pkts=11, num_bytes_ip=3518, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=462, state=4, num_pkts=10, num_bytes_ip=870, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.529046, duration=3.163758, service={\x0aSMTP\x0a\x09}, history=ShAdDa, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=1254722768.219663, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=GP, mailfrom=gurpartap@patriots.in, rcptto={\x0araj_deol2002in@yahoo.co.in\x0a\x09}, date=Mon, 5 Oct 2009 11:36:07 +0530, from="Gurpartap Singh" , to={\x0a\x0a\x09}, cc=, reply_to=, msg_id=<000301ca4581$ef9e57f0$cedb07d0$@in>, in_reply_to=, subject=SMTP, x_originating_ip=, first_received=, second_received=, last_reply=354 Enter message, ending with "." on a line by itself, path=[74.53.140.153, 10.10.1.4], user_agent=Microsoft Office Outlook 12.0, tls=F, process_received_from=T, has_client_activity=T, entity=, fuids=[Fel9gs4OtNEV6gUJZ5, Ft4M3f2yMvLlmwtbq9]], smtp_state=[helo=GP, messages_transferred=0, pending_messages=, mime_depth=4], socks=, ssh=, syslog=]\x0a}, last_active=1254722770.692804, seen_bytes=1868, total_bytes=, missing_bytes=0, overflow_bytes=0, timeout_interval=2.0 mins, bof_buffer_size=4096, bof_buffer=\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a
\x0d\x0a\x0d\x0a

Hello

\x0d\x0a\x0d\x0a

 

\x0d\x0a\x0d\x0a

I send u smtp pcap file

\x0d\x0a\x0d\x0a

Find the attachment

\x0d\x0a\x0d\x0a

 

\x0d\x0a\x0d\x0a

GPS

\x0d\x0a\x0d\x0a
\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a, info=[ts=1254722770.692743, fuid=Ft4M3f2yMvLlmwtbq9, tx_hosts={\x0a\x0910.10.1.4\x0a}, rx_hosts={\x0a\x0974.53.140.153\x0a}, conn_uids={\x0aClEkJM2Vm5giqnMf4h\x0a}, source=SMTP, depth=4, analyzers={\x0a\x0a}, mime_type=text/html, filename=, duration=61.035156 usecs, local_orig=, is_orig=T, seen_bytes=1868, total_bytes=, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=], ftp=, http=, irc=, pe=] 1254722770.692804 get_file_handle [0] tag: enum = Analyzer::ANALYZER_SMTP @@ -426,7 +426,7 @@ [2] is_orig: bool = T 1254722771.858334 file_state_remove - [0] f: fa_file = [id=FL9Y0d45OI4LpS6fmh, parent_id=, source=SMTP, is_orig=T, conns={\x0a\x09[[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp]] = [id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=14699, state=4, num_pkts=23, num_bytes_ip=21438, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=462, state=4, num_pkts=15, num_bytes_ip=1070, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.529046, duration=4.329288, service={\x0aSMTP\x0a\x09}, history=ShAdDaT, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=1254722768.219663, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=GP, mailfrom=gurpartap@patriots.in, rcptto={\x0araj_deol2002in@yahoo.co.in\x0a\x09}, date=Mon, 5 Oct 2009 11:36:07 +0530, from="Gurpartap Singh" , to={\x0a\x0a\x09}, cc=, reply_to=, msg_id=<000301ca4581$ef9e57f0$cedb07d0$@in>, in_reply_to=, subject=SMTP, x_originating_ip=, first_received=, second_received=, last_reply=354 Enter message, ending with "." on a line by itself, path=[74.53.140.153, 10.10.1.4], user_agent=Microsoft Office Outlook 12.0, tls=F, process_received_from=T, has_client_activity=T, entity=, fuids=[Fel9gs4OtNEV6gUJZ5, Ft4M3f2yMvLlmwtbq9, FL9Y0d45OI4LpS6fmh]], smtp_state=[helo=GP, messages_transferred=0, pending_messages=, mime_depth=5], socks=, ssh=, syslog=]\x0a}, last_active=1254722771.858316, seen_bytes=10809, total_bytes=, missing_bytes=0, overflow_bytes=0, timeout_interval=2.0 mins, bof_buffer_size=4096, bof_buffer=Version 4.9.9.1\x0d\x0a* Many bug fixes\x0d\x0a* Improved editor\x0d\x0a\x0d\x0aVersion 4.9.9.0\x0d\x0a* Support for latest Mingw compiler system builds\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.9\x0d\x0a* New code tooltip display\x0d\x0a* Improved Indent/Unindent and Remove Comment\x0d\x0a* Improved automatic indent\x0d\x0a* Added support for the "interface" keyword\x0d\x0a* WebUpdate should now report installation problems from PackMan\x0d\x0a* New splash screen and association icons\x0d\x0a* Improved installer\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.7\x0d\x0a* Added support for GCC > 3.2\x0d\x0a* Debug variables are now resent during next debug session\x0d\x0a* Watched Variables not in correct context are now kept and updated when it is needed\x0d\x0a* Added new compiler/linker options: \x0d\x0a - Strip executable\x0d\x0a - Generate instructions for a specific machine (i386, i486, i586, i686, pentium, pentium-mmx, pentiumpro, pentium2, pentium3, pentium4, \x0d\x0a k6, k6-2, k6-3, athlon, athlon-tbird, athlon-4, athlon-xp, athlon-mp, winchip-c6, winchip2, k8, c3 and c3-2)\x0d\x0a - Enable use of processor specific built-in functions (mmmx, sse, sse2, pni, 3dnow)\x0d\x0a* "Default" button in Compiler Options is back\x0d\x0a* Error messages parsing improved\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.5\x0d\x0a* Added the possibility to modify the value of a variable during debugging (right click on a watch variable and select "Modify value")\x0d\x0a* During Dev-C++ First Time COnfiguration window, users can now choose between using or not class browser and code completion features.\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.4\x0d\x0a* Added the possibility to specify an include directory for the code completion cache to be created at Dev-C++ first startup\x0d\x0a* Improved code completion cache\x0d\x0a* WebUpdate will now backup downloaded DevPaks in Dev-C++\Packages directory, and Dev-C++ executable in devcpp.exe.BACKUP\x0d\x0a* Big speed up in function parameters listing while editing\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.3\x0d\x0a* On Dev-C++ first time configuration dialog, a code completion cache of all the standard \x0d\x0a include files can now be generated.\x0d\x0a* Improved WebUpdate module\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.2\x0d\x0a* New debug feature for DLLs: attach to a running process\x0d\x0a* New project option: Use custom Makefile. \x0d\x0a* New WebUpdater module.\x0d\x0a* Allow user to specify an alternate configuration file in Environment Options \x0d\x0a (still can be overriden by using "-c" command line parameter).\x0d\x0a* Lots of bug fixes.\x0d\x0a\x0d\x0aVersion 4.9.8.1\x0d\x0a* When creating a DLL, the created static lib respects now the project-defined output directory\x0d\x0a\x0d\x0aVersion 4.9.8.0\x0d\x0a* Changed position of compiler/linker parameters in Project Options.\x0d\x0a* Improved help file\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.9\x0d\x0a* Resource errors are now reported in the Resource sheet\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.8\x0d\x0a* Made whole bottom report control floating instead of only debug output.\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.7\x0d\x0a* Printing settings are now saved\x0d\x0a* New environment options : "watch variable under mouse" and "Report watch errors"\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.6\x0d\x0a* Debug variable browser\x0d\x0a* Added possibility to include in a Template the Project's directories (include, libs and ressources)\x0d\x0a* Changed tint of Class browser pictures colors to match the New Look style\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.5\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.4\x0d\x0a* When compiling with debugging symbols, an extra definition is passed to the\x0d\x0a compiler: -D__DEBUG__\x0d\x0a* Each project creates a _private.h file containing version\x0d\x0a information definitions\x0d\x0a* When compiling the current file only, no dependency checks are performed\x0d\x0a* ~300% Speed-up in class parser\x0d\x0a* Added "External programs" in Tools/Environment Options (for units "Open with")\x0d\x0a* Added "Open with" in project units context menu\x0d\x0a* Added "Classes" toolbar\x0d\x0a* Fixed pre-compilation dependency checks to work correctly\x0d\x0a* Added new file menu entry: Save Project As\x0d\x0a* Bug-fix for double quotes in devcpp.cfg file read by vUpdate\x0d\x0a* Other bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.3\x0d\x0a* When adding debugging symbols on request, remove "-s" option from linker\x0d\x0a* Compiling progress window\x0d\x0a* Environment options : "Show progress window" and "Auto-close progress , info=[ts=1254722770.692804, fuid=FL9Y0d45OI4LpS6fmh, tx_hosts={\x0a\x0910.10.1.4\x0a}, rx_hosts={\x0a\x0974.53.140.153\x0a}, conn_uids={\x0aClEkJM2Vm5giqnMf4h\x0a}, source=SMTP, depth=5, analyzers={\x0a\x0a}, mime_type=text/plain, filename=NEWS.txt, duration=801.0 msecs 376.0 usecs, local_orig=, is_orig=T, seen_bytes=4027, total_bytes=, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=], ftp=, http=, irc=, pe=] + [0] f: fa_file = [id=FL9Y0d45OI4LpS6fmh, parent_id=, source=SMTP, is_orig=T, conns={\x0a\x09[[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp]] = [id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], orig=[size=14699, state=4, num_pkts=23, num_bytes_ip=21438, flow_label=0, l2_addr=00:e0:1c:3c:17:c2], resp=[size=462, state=4, num_pkts=15, num_bytes_ip=1070, flow_label=0, l2_addr=00:1f:33:d9:81:60], start_time=1254722767.529046, duration=4.329288, service={\x0aSMTP\x0a\x09}, history=ShAdDaT, uid=ClEkJM2Vm5giqnMf4h, tunnel=, vlan=, inner_vlan=, dpd=, dpd_state=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, ntp=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smb_state=, smtp=[ts=1254722768.219663, uid=ClEkJM2Vm5giqnMf4h, id=[orig_h=10.10.1.4, orig_p=1470/tcp, resp_h=74.53.140.153, resp_p=25/tcp], trans_depth=1, helo=GP, mailfrom=gurpartap@patriots.in, rcptto={\x0araj_deol2002in@yahoo.co.in\x0a\x09}, date=Mon, 5 Oct 2009 11:36:07 +0530, from="Gurpartap Singh" , to={\x0a\x0a\x09}, cc=, reply_to=, msg_id=<000301ca4581$ef9e57f0$cedb07d0$@in>, in_reply_to=, subject=SMTP, x_originating_ip=, first_received=, second_received=, last_reply=354 Enter message, ending with "." on a line by itself, path=[74.53.140.153, 10.10.1.4], user_agent=Microsoft Office Outlook 12.0, tls=F, process_received_from=T, has_client_activity=T, entity=, fuids=[Fel9gs4OtNEV6gUJZ5, Ft4M3f2yMvLlmwtbq9, FL9Y0d45OI4LpS6fmh]], smtp_state=[helo=GP, messages_transferred=0, pending_messages=, mime_depth=5], socks=, ssh=, syslog=]\x0a}, last_active=1254722771.858316, seen_bytes=10809, total_bytes=, missing_bytes=0, overflow_bytes=0, timeout_interval=2.0 mins, bof_buffer_size=4096, bof_buffer=Version 4.9.9.1\x0d\x0a* Many bug fixes\x0d\x0a* Improved editor\x0d\x0a\x0d\x0aVersion 4.9.9.0\x0d\x0a* Support for latest Mingw compiler system builds\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.9\x0d\x0a* New code tooltip display\x0d\x0a* Improved Indent/Unindent and Remove Comment\x0d\x0a* Improved automatic indent\x0d\x0a* Added support for the "interface" keyword\x0d\x0a* WebUpdate should now report installation problems from PackMan\x0d\x0a* New splash screen and association icons\x0d\x0a* Improved installer\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.7\x0d\x0a* Added support for GCC > 3.2\x0d\x0a* Debug variables are now resent during next debug session\x0d\x0a* Watched Variables not in correct context are now kept and updated when it is needed\x0d\x0a* Added new compiler/linker options: \x0d\x0a - Strip executable\x0d\x0a - Generate instructions for a specific machine (i386, i486, i586, i686, pentium, pentium-mmx, pentiumpro, pentium2, pentium3, pentium4, \x0d\x0a k6, k6-2, k6-3, athlon, athlon-tbird, athlon-4, athlon-xp, athlon-mp, winchip-c6, winchip2, k8, c3 and c3-2)\x0d\x0a - Enable use of processor specific built-in functions (mmmx, sse, sse2, pni, 3dnow)\x0d\x0a* "Default" button in Compiler Options is back\x0d\x0a* Error messages parsing improved\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.5\x0d\x0a* Added the possibility to modify the value of a variable during debugging (right click on a watch variable and select "Modify value")\x0d\x0a* During Dev-C++ First Time COnfiguration window, users can now choose between using or not class browser and code completion features.\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.4\x0d\x0a* Added the possibility to specify an include directory for the code completion cache to be created at Dev-C++ first startup\x0d\x0a* Improved code completion cache\x0d\x0a* WebUpdate will now backup downloaded DevPaks in Dev-C++\Packages directory, and Dev-C++ executable in devcpp.exe.BACKUP\x0d\x0a* Big speed up in function parameters listing while editing\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.3\x0d\x0a* On Dev-C++ first time configuration dialog, a code completion cache of all the standard \x0d\x0a include files can now be generated.\x0d\x0a* Improved WebUpdate module\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.8.2\x0d\x0a* New debug feature for DLLs: attach to a running process\x0d\x0a* New project option: Use custom Makefile. \x0d\x0a* New WebUpdater module.\x0d\x0a* Allow user to specify an alternate configuration file in Environment Options \x0d\x0a (still can be overriden by using "-c" command line parameter).\x0d\x0a* Lots of bug fixes.\x0d\x0a\x0d\x0aVersion 4.9.8.1\x0d\x0a* When creating a DLL, the created static lib respects now the project-defined output directory\x0d\x0a\x0d\x0aVersion 4.9.8.0\x0d\x0a* Changed position of compiler/linker parameters in Project Options.\x0d\x0a* Improved help file\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.9\x0d\x0a* Resource errors are now reported in the Resource sheet\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.8\x0d\x0a* Made whole bottom report control floating instead of only debug output.\x0d\x0a* Many bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.7\x0d\x0a* Printing settings are now saved\x0d\x0a* New environment options : "watch variable under mouse" and "Report watch errors"\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.6\x0d\x0a* Debug variable browser\x0d\x0a* Added possibility to include in a Template the Project's directories (include, libs and ressources)\x0d\x0a* Changed tint of Class browser pictures colors to match the New Look style\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.5\x0d\x0a* Bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.4\x0d\x0a* When compiling with debugging symbols, an extra definition is passed to the\x0d\x0a compiler: -D__DEBUG__\x0d\x0a* Each project creates a _private.h file containing version\x0d\x0a information definitions\x0d\x0a* When compiling the current file only, no dependency checks are performed\x0d\x0a* ~300% Speed-up in class parser\x0d\x0a* Added "External programs" in Tools/Environment Options (for units "Open with")\x0d\x0a* Added "Open with" in project units context menu\x0d\x0a* Added "Classes" toolbar\x0d\x0a* Fixed pre-compilation dependency checks to work correctly\x0d\x0a* Added new file menu entry: Save Project As\x0d\x0a* Bug-fix for double quotes in devcpp.cfg file read by vUpdate\x0d\x0a* Other bug fixes\x0d\x0a\x0d\x0aVersion 4.9.7.3\x0d\x0a* When adding debugging symbols on request, remove "-s" option from linker\x0d\x0a* Compiling progress window\x0d\x0a* Environment options : "Show progress window" and "Auto-close progress , info=[ts=1254722770.692804, fuid=FL9Y0d45OI4LpS6fmh, tx_hosts={\x0a\x0910.10.1.4\x0a}, rx_hosts={\x0a\x0974.53.140.153\x0a}, conn_uids={\x0aClEkJM2Vm5giqnMf4h\x0a}, source=SMTP, depth=5, analyzers={\x0a\x0a}, mime_type=text/plain, filename=NEWS.txt, duration=801.0 msecs 376.819611 usecs, local_orig=, is_orig=T, seen_bytes=4027, total_bytes=, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, x509=, extracted=, extracted_cutoff=, extracted_size=], ftp=, http=, irc=, pe=] 1254722771.858334 get_file_handle [0] tag: enum = Analyzer::ANALYZER_SMTP diff --git a/testing/btest/Baseline/signatures.tcp-syn-with-payload/payload-syn.out b/testing/btest/Baseline/signatures.tcp-syn-with-payload/payload-syn.out new file mode 100644 index 0000000000..4c5110213b --- /dev/null +++ b/testing/btest/Baseline/signatures.tcp-syn-with-payload/payload-syn.out @@ -0,0 +1 @@ +signature_match [orig_h=192.168.0.1, orig_p=80/tcp, resp_h=192.168.0.2, resp_p=80/tcp] - payload of dst-port=80/tcp contains 'passwd' diff --git a/testing/btest/Baseline/signatures.tcp-syn-with-payload/payload-synack.out b/testing/btest/Baseline/signatures.tcp-syn-with-payload/payload-synack.out new file mode 100644 index 0000000000..4c5110213b --- /dev/null +++ b/testing/btest/Baseline/signatures.tcp-syn-with-payload/payload-synack.out @@ -0,0 +1 @@ +signature_match [orig_h=192.168.0.1, orig_p=80/tcp, resp_h=192.168.0.2, resp_p=80/tcp] - payload of dst-port=80/tcp contains 'passwd' diff --git a/testing/btest/Baseline/signatures.tcp-syn-with-payload/tcp-fast-open.out b/testing/btest/Baseline/signatures.tcp-syn-with-payload/tcp-fast-open.out new file mode 100644 index 0000000000..cb13fa5a47 --- /dev/null +++ b/testing/btest/Baseline/signatures.tcp-syn-with-payload/tcp-fast-open.out @@ -0,0 +1 @@ +signature_match [orig_h=10.99.99.1, orig_p=55534/tcp, resp_h=10.99.99.45, resp_p=80/tcp] - payload of dst-port=80/tcp contains 'passwd' diff --git a/testing/btest/Traces/tcp/option-sack.pcap b/testing/btest/Traces/tcp/option-sack.pcap new file mode 100644 index 0000000000..22d7118fc8 Binary files /dev/null and b/testing/btest/Traces/tcp/option-sack.pcap differ diff --git a/testing/btest/Traces/tcp/options.pcap b/testing/btest/Traces/tcp/options.pcap new file mode 100644 index 0000000000..d1ebd885b0 Binary files /dev/null and b/testing/btest/Traces/tcp/options.pcap differ diff --git a/testing/btest/Traces/tcp/payload-syn.pcap b/testing/btest/Traces/tcp/payload-syn.pcap new file mode 100644 index 0000000000..4082590877 Binary files /dev/null and b/testing/btest/Traces/tcp/payload-syn.pcap differ diff --git a/testing/btest/Traces/tcp/payload-synack.pcap b/testing/btest/Traces/tcp/payload-synack.pcap new file mode 100644 index 0000000000..2a2e58c9cb Binary files /dev/null and b/testing/btest/Traces/tcp/payload-synack.pcap differ diff --git a/testing/btest/Traces/tcp/tcp-fast-open.pcap b/testing/btest/Traces/tcp/tcp-fast-open.pcap new file mode 100644 index 0000000000..88e9a99b0d Binary files /dev/null and b/testing/btest/Traces/tcp/tcp-fast-open.pcap differ diff --git a/testing/btest/bifs/print_raw.zeek b/testing/btest/bifs/print_raw.zeek new file mode 100644 index 0000000000..bb8460dc3b --- /dev/null +++ b/testing/btest/bifs/print_raw.zeek @@ -0,0 +1,12 @@ +# @TEST-EXEC: zeek -b %INPUT >out +# @TEST-EXEC: btest-diff out + +local j = to_json("\x07\xd4\xb7o"); + +local v: vector of count = vector(); +v += 9; +v += 10; + +print_raw(j, "\n"); +print_raw("start ", j, 137, T, v, " finish", "\n"); +print_raw("\xc3\xa9", "\n"); diff --git a/testing/btest/core/leaks/to_json.zeek b/testing/btest/core/leaks/to_json.zeek new file mode 100644 index 0000000000..170de3c2eb --- /dev/null +++ b/testing/btest/core/leaks/to_json.zeek @@ -0,0 +1,140 @@ +# Needs perftools support. +# +# @TEST-GROUP: leaks +# +# @TEST-REQUIRES: zeek --help 2>&1 | grep -q mem-leaks +# +# @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run zeek zeek -m -b -r $TRACES/wikipedia.trace %INPUT +# @TEST-EXEC: btest-bg-wait 60 + +type color: enum { Red, White, Blue }; + +type myrec1: record { + c: count &optional; + s: string &log; +}; + +type myrec2: record { + m: myrec1 &log; +}; + +global did_it = F; + +event new_connection(myconn: connection) + { + if ( did_it ) + return; + + did_it = T; + # ##################################### + # Test the basic (non-container) types: + + local b: bool = T; + print to_json(b); + + local c: count = 123; + print to_json(c); + + local i: int = -999; + print to_json(i); + + local d1: double = 3.14; + local d2: double = -1.23456789e308; + local d3: double = 9e-308; + print to_json(d1); + print to_json(d2); + print to_json(d3); + + local t: time = double_to_time(1480788576.868945); + print to_json(t); + + local ti: interval = -12hr; + print to_json(ti); + + local s1: string = "hello"; + local s2: string = ""; + print to_json(s1); + print to_json(s2); + + local p1: port = 65535/tcp; + local p2: port = 1/udp; + local p3: port = 123/icmp; + local p4: port = 0/unknown; + print to_json(p1); + print to_json(p2); + print to_json(p3); + print to_json(p4); + + local a1: addr = 1.2.3.4; + local a2: addr = [ffff:1234::1]; + local a3: addr = [::ffff:123.123.123.123]; + print to_json(a1); + print to_json(a2); + print to_json(a3); + + local su1: subnet = 192.0.0.0/8; + local su2: subnet = [fe80::]/64; + print to_json(su1); + print to_json(su2); + + local e: color = Red; + print to_json(e); + + local p: pattern = /^abcd/; + print to_json(p); + + # ######################### + # Test the container types: + + # Records + local re1 = myrec1($c=100, $s="test"); + local re2 = myrec1($s="test"); + local re3 = myrec2($m=myrec1($c=15, $s="test")); + print to_json(re1); + print to_json(re1, T); + print to_json(re2); + print to_json(re3, T); + + # Vectors + local ve1: vector of count = vector(); + local ve2: vector of count = vector(2, 1); + local ve3: vector of addr = vector(1.2.3.4); + local ve4: vector of set[bool] = vector(set(T, F)); + local ve5: vector of myrec1 = vector(myrec1($s="test", $c=2)); + local ve6: vector of count; + ve6[0] = 0; + ve6[2] = 2; + print to_json(ve1); + print to_json(ve2); + print to_json(ve3); + print to_json(ve4); + print to_json(ve5, T); + print to_json(ve6); + + # Sets + local st1: set[count] = set(); + local st2: set[count] = set(2, 1); + local st3: set[addr] = set(1.2.3.4); + local st4: set[myrec1] = set(myrec1($s="test")); + local st5: set[myrec1] = set(myrec1($s="test", $c=2)); + local st6: set[string, count] = { ["one", 1], ["two", 2], ["three", 3] }; + print to_json(st1); + print to_json(st2); + print to_json(st3); + print to_json(st4); + print to_json(st5, T); + print to_json(st6); + + # Tables + local ta1: table[count] of addr = table(); + local ta2: table[count] of addr = {[1] = 10.1.1.1, [2] = 10.2.2.2}; + local ta3: table[addr] of table[string] of count = {[10.1.1.1] = table(["a"] = 1), [10.2.2.2] = table(["b"] = 2)}; + local ta4: table[addr] of vector of count = {[10.1.1.1] = vector(1, 2), [10.2.2.2] = vector(3, 5)}; + local ta5: table[count] of myrec1 = {[1] = myrec1($s="test", $c=2)}; + print to_json(ta1); + print to_json(ta2); + print to_json(ta3); + print to_json(ta4); + print to_json(ta5, T); + } + diff --git a/testing/btest/core/print-interval.zeek b/testing/btest/core/print-interval.zeek new file mode 100644 index 0000000000..0ccdde9372 --- /dev/null +++ b/testing/btest/core/print-interval.zeek @@ -0,0 +1,32 @@ +# @TEST-EXEC: zeek -b %INPUT >out +# @TEST-EXEC: btest-diff out + +function mult10k(p: interval, n: interval, i: count &default=0) + { + if ( i == 4 ) + return; + + print p; + print n; + mult10k(p * 10000, n * 10000, i + 1); + } + +local d = 0.12345678912345 usecs; +local nd = -d; +print 0sec; +mult10k(d, nd); + +d = 1.001 usec; +print d; +print d * 1000; + +print 1.1usec * 10000; + +print 8.5 days; +print 7.5 hrs; +print 6.5 mins; +print 5.5 secs; +print 4.5 msecs; +print 3.5 usecs; + +print 2 days + 2 secs; diff --git a/testing/btest/core/tcp/options.zeek b/testing/btest/core/tcp/options.zeek new file mode 100644 index 0000000000..4a561c988d --- /dev/null +++ b/testing/btest/core/tcp/options.zeek @@ -0,0 +1,44 @@ +# @TEST-EXEC: zeek -b -r $TRACES/tcp/options.pcap %INPUT > out +# @TEST-EXEC: zeek -b -r $TRACES/tcp/option-sack.pcap %INPUT > out-sack +# @TEST-EXEC: btest-diff out +# @TEST-EXEC: btest-diff out-sack + +event tcp_option(c: connection, is_orig: bool, opt: count, optlen: count) + { + print c$id, is_orig, opt, optlen; + } + +event tcp_options(c: connection, is_orig: bool, options: TCP::OptionList) + { + print c$id, is_orig; + + for ( i in options ) + { + local o = options[i]; + print fmt(" kind: %s, length: %s", o$kind, o$length); + + if ( o?$data ) + print fmt(" data (%s): %s", |o$data|, o$data); + else + { + switch ( o$kind ) { + case 2: + print fmt(" mss: %s", o$mss); + break; + case 3: + print fmt(" window scale: %s", o$window_scale); + break; + case 4: + print fmt(" sack permitted"); + break; + case 5: + print fmt(" sack: %s", o$sack); + break; + case 8: + print fmt(" send ts: %s", o$send_timestamp); + print fmt(" echo ts: %s", o$echo_timestamp); + break; + } + } + } + } diff --git a/testing/btest/language/attr-default-global-set-error.zeek b/testing/btest/language/attr-default-global-set-error.zeek index 05ae6b1722..41827f78a1 100644 --- a/testing/btest/language/attr-default-global-set-error.zeek +++ b/testing/btest/language/attr-default-global-set-error.zeek @@ -7,3 +7,4 @@ global d: count &default = 10 &optional &log &add_func = function(): count { return 3; }; +global myset: set[count] &default=set(); diff --git a/testing/btest/language/default-params.zeek b/testing/btest/language/default-params.zeek index c07bdee207..6543ec85d1 100644 --- a/testing/btest/language/default-params.zeek +++ b/testing/btest/language/default-params.zeek @@ -16,6 +16,13 @@ function bar_func(a: string, b: string &default="hi", c: count &default=5) print "bar_func", a, b, c; } +function table_func(a: table[string] of string &default=table()) + { + print "begin table_func", a; + a["the test"] = "works"; + print "end table_func", a; + } + ### events global foo_event: event(a: string &default="hello"); @@ -51,6 +58,8 @@ foo_func(); bar_func("hmm"); bar_func("cool", "beans"); bar_func("cool", "beans", 13); +table_func(); +table_func(table(["initial"] = "conditions")); event foo_event("test"); event foo_event(); diff --git a/testing/btest/language/record-default-coercion.zeek b/testing/btest/language/record-default-coercion.zeek index 83e48044a3..231edc7b60 100644 --- a/testing/btest/language/record-default-coercion.zeek +++ b/testing/btest/language/record-default-coercion.zeek @@ -17,6 +17,10 @@ type Bar: record { foo: Foo &default=[$foo=1234]; }; +type Qux: record { + a: table[string] of string &default=table(); +}; + function print_bar(b: Bar) { print b; @@ -46,3 +50,7 @@ print |r$v|; r$v += "test"; print r; print |r$v|; + +local q = Qux(); +q$a["one"] = "1"; +print q; diff --git a/testing/btest/language/table-redef-default.zeek b/testing/btest/language/table-redef-default.zeek new file mode 100644 index 0000000000..39e308c653 --- /dev/null +++ b/testing/btest/language/table-redef-default.zeek @@ -0,0 +1,14 @@ +# @TEST-EXEC: zeek -b %INPUT >out +# @TEST-EXEC: btest-diff out + +const mymap: table[count] of string = { + [1] = "one", + [2] = "two", +} &default="original default" &redef; + +redef mymap = { + [1] = "uno", +} &default="some number"; + +print mymap[1]; +print mymap[2]; diff --git a/testing/btest/scripts/base/frameworks/config/container-options.zeek b/testing/btest/scripts/base/frameworks/config/container-options.zeek new file mode 100644 index 0000000000..cf81b5dfa7 --- /dev/null +++ b/testing/btest/scripts/base/frameworks/config/container-options.zeek @@ -0,0 +1,49 @@ +# @TEST-EXEC: zeek -b %INPUT >out +# @TEST-EXEC: btest-diff out + +@load base/frameworks/config +@load base/frameworks/dpd + +type Color: enum { RED, GREEN, BLUE }; + +option my_set: set[Color] = set(RED); +option my_vector: vector of Color = vector(RED); +option my_table: table[Color] of string = table([RED] = "red"); + +event zeek_init() + { + print my_set; + Config::set_value("my_set", set()); + print my_set; + Config::set_value("my_set", set(BLUE)); + print my_set; + Config::set_value("my_set", set(RED, GREEN, BLUE)); + print my_set; + Config::set_value("my_set", set()); + print my_set; + + print "---"; + + print my_vector; + Config::set_value("my_vector", vector()); + print my_vector; + Config::set_value("my_vector", vector(BLUE)); + print my_vector; + Config::set_value("my_vector", vector(RED, GREEN, BLUE)); + print my_vector; + Config::set_value("my_vector", vector()); + print my_vector; + + print "---"; + + print my_table; + Config::set_value("my_table", table()); + print my_table; + Config::set_value("my_table", table([BLUE] = "blue")); + print my_table; + Config::set_value("my_table", table([RED] = "red", [GREEN] = "green", [BLUE] = "blue")); + print my_table; + Config::set_value("my_table", table()); + print my_table; + } + diff --git a/testing/btest/scripts/base/frameworks/config/read_config.zeek b/testing/btest/scripts/base/frameworks/config/read_config.zeek index 8ea2e4690e..caf24e6ffa 100644 --- a/testing/btest/scripts/base/frameworks/config/read_config.zeek +++ b/testing/btest/scripts/base/frameworks/config/read_config.zeek @@ -19,6 +19,7 @@ testport 45 testaddr 127.0.0.1 testaddr 2607:f8b0:4005:801::200e testinterval 60 +teststring abc testtime 1507321987 test_set a,b,c,d,erdbeerschnitzel test_vector 1,2,3,4,5,6 diff --git a/testing/btest/scripts/base/frameworks/input/config/basic.zeek b/testing/btest/scripts/base/frameworks/input/config/basic.zeek index b6f7c2a78a..3ceb0cea9a 100644 --- a/testing/btest/scripts/base/frameworks/input/config/basic.zeek +++ b/testing/btest/scripts/base/frameworks/input/config/basic.zeek @@ -12,11 +12,11 @@ testcount 1 testcount 2 testcount 2 testint -1 -testenum Conn::LOG +testenum Conn::LOG testport 45 testportandproto 45/udp testaddr 127.0.0.1 -testaddr 2607:f8b0:4005:801::200e +testaddr 2607:f8b0:4005:801::200e testinterval 60 testtime 1507321987 test_set a b c d erdbeerschnitzel diff --git a/testing/btest/scripts/base/frameworks/input/config/enum-set.zeek b/testing/btest/scripts/base/frameworks/input/config/enum-set.zeek new file mode 100644 index 0000000000..1bfd964202 --- /dev/null +++ b/testing/btest/scripts/base/frameworks/input/config/enum-set.zeek @@ -0,0 +1,46 @@ +# @TEST-EXEC: btest-bg-run zeek zeek -b %INPUT +# @TEST-EXEC: btest-bg-wait 10 +# @TEST-EXEC: btest-diff zeek/.stdout +# @TEST-EXEC: btest-diff zeek/config.log + +@TEST-START-FILE configfile4 +DPD::ignore_violations Analyzer::ANALYZER_SYSLOG +@TEST-END-FILE + +@load base/frameworks/config +@load base/frameworks/dpd + +redef exit_only_after_terminate = T; +redef InputConfig::empty_field = "EMPTY"; +redef InputConfig::set_separator = "\t"; + +type Idx: record { + option_name: string; +}; + +type Val: record { + option_val: string; +}; + +global currconfig: table[string] of string = table(); + +event InputConfig::new_value(name: string, source: string, id: string, value: any) + { + print id, lookup_ID(id); + print "---"; + print value; + print "---"; + Config::set_value(id, value); + print id, lookup_ID(id); + print "---"; + } + +event Input::end_of_data(name: string, source:string) + { + terminate(); + } + +event zeek_init() + { + Input::add_table([$reader=Input::READER_CONFIG, $source="../configfile4", $name="configuration", $idx=Idx, $val=Val, $destination=currconfig, $want_record=F]); + } diff --git a/testing/btest/scripts/base/frameworks/logging/ascii-json.zeek b/testing/btest/scripts/base/frameworks/logging/ascii-json.zeek index ab88225d97..6991e1336a 100644 --- a/testing/btest/scripts/base/frameworks/logging/ascii-json.zeek +++ b/testing/btest/scripts/base/frameworks/logging/ascii-json.zeek @@ -28,6 +28,7 @@ export { se: set[string]; vc: vector of count; ve: vector of string; + vn: vector of count; f: function(i: count) : string; } &log; } @@ -47,6 +48,10 @@ event zeek_init() local empty_set: set[string]; local empty_vector: vector of string; + local vector_with_null: vector of count; + vector_with_null[0] = 0; + vector_with_null[2] = 2; + Log::write(SSH::LOG, [ $b=T, $i=-42, @@ -64,6 +69,7 @@ event zeek_init() $se=empty_set, $vc=vector(10, 20, 30), $ve=empty_vector, + $vn=vector_with_null, $f=foo ]); } diff --git a/testing/btest/scripts/base/protocols/http/build-url.zeek b/testing/btest/scripts/base/protocols/http/build-url.zeek new file mode 100644 index 0000000000..0a1b5b53cf --- /dev/null +++ b/testing/btest/scripts/base/protocols/http/build-url.zeek @@ -0,0 +1,23 @@ +# @TEST-EXEC: zeek -r $TRACES/http/get.trace %INPUT >output +# @TEST-EXEC: btest-diff output + +function test(rec: HTTP::Info, expect: string) + { + local result = HTTP::build_url(rec); + print fmt("Have: %s Expected: %s -> %s", result, expect, (result == expect ? "SUCCESS" : "FAIL")); + } + +event http_request(c: connection, method: string, original_URI: string, unescaped_URI: string, version: string) &priority=5 + { + test(c$http, "192.150.187.43/download/CHANGES.bro-aux.txt"); + + # We fake some request instances for testing. + c$http$id$resp_p = 123/tcp; + test(c$http, "192.150.187.43:123/download/CHANGES.bro-aux.txt"); + + c$http$uri = "/"; + test(c$http, "192.150.187.43:123/"); + + c$http$uri = "http://proxied.host/some/document"; + test(c$http, "http://proxied.host/some/document"); + } diff --git a/testing/btest/scripts/base/utils/json.test b/testing/btest/scripts/base/utils/json.test index 6e7854b744..3572bd3e07 100644 --- a/testing/btest/scripts/base/utils/json.test +++ b/testing/btest/scripts/base/utils/json.test @@ -93,11 +93,15 @@ event zeek_init() local ve3: vector of addr = vector(1.2.3.4); local ve4: vector of set[bool] = vector(set(T, F)); local ve5: vector of myrec1 = vector(myrec1($s="test", $c=2)); + local ve6: vector of count; + ve6[0] = 0; + ve6[2] = 2; print to_json(ve1); print to_json(ve2); print to_json(ve3); print to_json(ve4); print to_json(ve5, T); + print to_json(ve6); # Sets local st1: set[count] = set(); @@ -105,11 +109,13 @@ event zeek_init() local st3: set[addr] = set(1.2.3.4); local st4: set[myrec1] = set(myrec1($s="test")); local st5: set[myrec1] = set(myrec1($s="test", $c=2)); + local st6: set[string, count] = { ["one", 1], ["two", 2], ["three", 3] }; print to_json(st1); print to_json(st2); print to_json(st3); print to_json(st4); print to_json(st5, T); + print to_json(st6); # Tables local ta1: table[count] of addr = table(); @@ -122,4 +128,8 @@ event zeek_init() print to_json(ta3); print to_json(ta4); print to_json(ta5, T); + + # Opaque + local o1: opaque of topk = topk_init(5); + print to_json(o1); } diff --git a/testing/btest/signatures/tcp-syn-with-payload.zeek b/testing/btest/signatures/tcp-syn-with-payload.zeek new file mode 100644 index 0000000000..0b20c23353 --- /dev/null +++ b/testing/btest/signatures/tcp-syn-with-payload.zeek @@ -0,0 +1,20 @@ +# @TEST-EXEC: zeek -b -s payload-http.sig -r $TRACES/tcp/payload-syn.pcap %INPUT >payload-syn.out +# @TEST-EXEC: zeek -b -s payload-http.sig -r $TRACES/tcp/payload-synack.pcap %INPUT >payload-synack.out +# @TEST-EXEC: zeek -b -s payload-http.sig -r $TRACES/tcp/tcp-fast-open.pcap %INPUT >tcp-fast-open.out +# @TEST-EXEC: btest-diff payload-syn.out +# @TEST-EXEC: btest-diff payload-synack.out +# @TEST-EXEC: btest-diff tcp-fast-open.out + +@TEST-START-FILE payload-http.sig +signature test-signature { + ip-proto == tcp + dst-port = 80 + payload /.*passwd/ + event "payload of dst-port=80/tcp contains 'passwd'" +} +@TEST-END-FILE + +event signature_match(state: signature_state, msg: string, data: string) + { + print fmt("signature_match %s - %s", state$conn$id, msg); + } diff --git a/testing/scripts/travis-job b/testing/scripts/travis-job index 4c87925f33..e6274bfff1 100644 --- a/testing/scripts/travis-job +++ b/testing/scripts/travis-job @@ -9,6 +9,7 @@ # (if you don't, then the private tests will be skipped). LEAK_TEST_DISTRO="ubuntu_18.04_leaktest" +COVERITY_DISTRO="ubuntu_18.04" usage() { echo "usage: $0 CMD DISTRO" @@ -84,19 +85,19 @@ install_in_docker() { local_distro=$distro case $distro in centos_7) - distro_cmds="yum -y install gdb cmake make gcc gcc-c++ flex bison libpcap-devel openssl-devel git openssl which" + distro_cmds="yum -y install gdb make gcc gcc-c++ flex bison libpcap-devel openssl-devel git openssl which centos-release-scl && yum -y install devtoolset-7 && yum -y install epel-release && yum -y install cmake3" ;; debian_9) - distro_cmds="apt-get update; apt-get -y install gdb cmake make gcc g++ flex bison python libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils" + distro_cmds="apt-get update; apt-get -y install gdb cmake make flex bison python libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils clang-7 libc++-7-dev libc++abi-7-dev; update-alternatives --install /usr/bin/cc cc /usr/bin/clang-7 100; update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-7 100" ;; - fedora_28) - distro_cmds="yum -y install gdb cmake make gcc gcc-c++ flex bison libpcap-devel openssl-devel git sqlite findutils which; ln -s /usr/bin/python3 /usr/local/bin/python" + fedora_30) + distro_cmds="yum -y install gdb cmake make gcc gcc-c++ flex bison libpcap-devel openssl-devel git sqlite findutils which zlib-devel; ln -s /usr/bin/python3 /usr/local/bin/python" ;; ubuntu_16.04) - distro_cmds="apt-get update; apt-get -y install gdb cmake make gcc g++ flex bison python libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils" + distro_cmds="apt-get update; apt-get -y install gdb cmake make flex bison python libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils wget xz-utils; wget -q https://releases.llvm.org/9.0.0/clang+llvm-9.0.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz; mkdir /clang-9; tar --strip-components=1 -C /clang-9 -xvf clang+llvm-9.0.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz; update-alternatives --install /usr/bin/cc cc /clang-9/bin/clang 100; update-alternatives --install /usr/bin/c++ c++ /clang-9/bin/clang++ 100" ;; ubuntu_18.04) - distro_cmds="apt-get update; apt-get -y install gdb cmake make gcc g++ flex bison python3 libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils; ln -s /usr/bin/python3 /usr/local/bin/python" + distro_cmds="apt-get update; apt-get -y install wget xz-utils gdb cmake make gcc g++ flex bison python3 libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils; ln -s /usr/bin/python3 /usr/local/bin/python" ;; ${LEAK_TEST_DISTRO}) distro_cmds="apt-get update; apt-get -y install gdb cmake make gcc g++ flex bison python3 libpcap-dev libssl-dev zlib1g-dev libkrb5-dev git sqlite3 curl bsdmainutils google-perftools libgoogle-perftools4 libgoogle-perftools-dev; ln -s /usr/bin/python3 /usr/local/bin/python" @@ -116,19 +117,41 @@ install_in_docker() { # Build Zeek in a docker container. build_in_docker() { + recursed_distro=travis + + if [ "${TRAVIS_EVENT_TYPE}" = "cron" ]; then + recursed_distro=coverity + fi + # Pass the distro as a different environment variable name to docker since # the script will set $distro to "travis" as part of the invocation. - docker exec -e BUILD_DISTRO=${distro} zeektest sh testing/scripts/travis-job build travis + docker exec -e COV_TOKEN -e BUILD_DISTRO=${distro} zeektest sh testing/scripts/travis-job build ${recursed_distro} } # Run Zeek tests in a docker container. run_in_docker() { + recursed_distro=travis + + if [ "${TRAVIS_EVENT_TYPE}" = "cron" ]; then + recursed_distro=coverity + fi + # Pass the distro as a different environment variable name to docker since # the script will set $distro to "travis" as part of the invocation. - docker exec -t -e TRAVIS -e TRAVIS_PULL_REQUEST -e TESTING_PRIVATE_DEPLOYKEY -e BUILD_DISTRO=${distro} zeektest sh testing/scripts/travis-job run travis + docker exec -t -e TRAVIS -e TRAVIS_PULL_REQUEST -e TESTING_PRIVATE_DEPLOYKEY -e COV_TOKEN -e BUILD_DISTRO=${distro} zeektest sh testing/scripts/travis-job run ${recursed_distro} } +update_env() { + if [ "${BUILD_DISTRO}" = "centos_7" ]; then + source /opt/rh/devtoolset-7/enable + elif [ "${BUILD_DISTRO}" = "debian_9" ]; then + export CXXFLAGS="-stdlib=libc++" + elif [ "${BUILD_DISTRO}" = "ubuntu_16.04" ]; then + export CXXFLAGS="-stdlib=libc++" + export LD_LIBRARY_PATH=/clang-9/lib + fi +} # Build Zeek. build() { @@ -136,6 +159,8 @@ build() { # outside of Travis). make distclean > /dev/null + update_env + # Skip building Broker tests, python bindings, and zeekctl, as these are # not needed by the Zeek tests. If the distro is set for leak tests, enable # those options as well. @@ -178,6 +203,8 @@ get_private_tests() { # Run Zeek tests. run() { + update_env + ulimit -c unlimited ulimit -a echo @@ -312,29 +339,7 @@ fi set -e -if [ "${TRAVIS_EVENT_TYPE}" = "cron" ]; then - # This is a Travis CI cron job, so check the job number. - - # Extract second component of the job number. - if [ -z "${TRAVIS_JOB_NUMBER}" ]; then - echo "Error: TRAVIS_JOB_NUMBER is not defined (it should be defined by Travis CI)" - exit 1 - fi - job=`echo ${TRAVIS_JOB_NUMBER} | cut -d . -f 2` - - # If this isn't the first job in a Travis CI build, then just output a - # message and exit (this is not an error). - if [ "$job" != "1" ]; then - echo "Coverity scan is performed only in the first job of this build" - exit 0 - fi -fi - - if [ "${TRAVIS_EVENT_TYPE}" = "cron" ] || [ "$distro" = "coverity" ]; then - # Run coverity scan when this script is run from a Travis cron job, or - # if the user specifies the "coverity" distro. - # Check if the project token is available (this is a secret value and # should not be hard-coded in this script). This value can be found by # logging into the coverity scan web site and looking in the project @@ -343,13 +348,26 @@ if [ "${TRAVIS_EVENT_TYPE}" = "cron" ] || [ "$distro" = "coverity" ]; then echo "Error: COV_TOKEN is not defined (should be defined in environment variables section of Travis settings for this repo)" exit 1 fi +fi +if [ "${TRAVIS_EVENT_TYPE}" = "cron" ]; then + # This is a Travis CI cron job which only runs the Coverity build inside + # a single container. + + if [ "$distro" != "${COVERITY_DISTRO}" ]; then + echo "Coverity scan is performed on ${COVERITY_DISTRO}" + exit 0 + fi +fi + +if [ "$distro" = "coverity" ]; then # The "build" and "run" steps are split up into separate steps because the # build outputs thousands of lines (which are conveniently collapsed into # a single line when viewing the "Job log" on the Travis CI web site). if [ "$step" = "install" ]; then - install_coverity + echo "No 'install' step needed for Coverity build" elif [ "$step" = "build" ]; then + install_coverity build_coverity elif [ "$step" = "run" ]; then run_coverity diff --git a/zeek-config.h.in b/zeek-config.h.in index d21a7bdc7c..343ab49363 100644 --- a/zeek-config.h.in +++ b/zeek-config.h.in @@ -163,13 +163,18 @@ /* Define u_int8_t */ #cmakedefine u_int8_t @u_int8_t@ -/* OpenBSD's bpf.h may not declare this data link type, but it's supposed to be +/* OpenBSD's bpf.h may not declare some data link types, but they're used consistently for the same purpose on all platforms. */ #cmakedefine HAVE_DLT_PPP_SERIAL #ifndef HAVE_DLT_PPP_SERIAL #define DLT_PPP_SERIAL @DLT_PPP_SERIAL@ #endif +#cmakedefine HAVE_DLT_NFLOG +#ifndef HAVE_DLT_NFLOG +#define DLT_NFLOG @DLT_NFLOG@ +#endif + /* IPv6 Next Header values defined by RFC 3542 */ #cmakedefine HAVE_IPPROTO_HOPOPTS #ifndef HAVE_IPPROTO_HOPOPTS