diff --git a/CHANGES b/CHANGES index 40afce7378..5fffea3198 100644 --- a/CHANGES +++ b/CHANGES @@ -1,4 +1,88 @@ +2.5-156 | 2017-06-13 11:01:56 -0700 + + * Add 2.5.1 news file to master. (Johanna Amann) + + * Remove link to no longer existing myricom plugin. (Johanna Amann) + +2.5-152 | 2017-06-05 15:16:49 -0700 + + * Remove non-existing links; this broke documentation build. (Johanna Amann) + + * Fix at_least in Version.bro - it did exactly the oposite of the documented + behavior. (Johanna Amann) + +2.5-147 | 2017-05-22 20:32:32 -0500 + + * Add nfs unittest. (Julien Wallior) + + * Added nfs_proc_rename event to rpc/nfs protocol analyzer. + (Roberto Del Valle Rodriguez) + + * Expand parsing of RPC Call packets to add Uid, Gid, Stamp, MachineName + and AuxGIDs (Julien Wallior) + + * Fix NFS protocol parser. (Julien Wallior) + +2.5-142 | 2017-05-22 00:08:52 -0500 + + * Add gzip log writing to the ascii writer. + + This feature can be enabled globally for all logs by setting + LogAscii::gzip_level to a value greater than 0. + + This feature can be enabled on a per-log basis by setting gzip-level in + $config to a value greater than 0. (Corelight) + +2.5-140 | 2017-05-12 15:31:32 -0400 + + * Lessen cluster load due to notice suppression. + (Johanna Amann, Justin Azoff) + +2.5-137 | 2017-05-04 11:37:48 -0500 + + * Add plugin hooks for log init and writing: HookLogInit and HookLogWrite. + (Corelight) + + * TLS: Fix compile warning (comparison between signed/unsigned). + + This was introduced with the addition of new TLS1.3 extensions. (Johanna Amann) + + +2.5-134 | 2017-05-01 10:34:34 -0500 + + * Add rename, unlink, and rmdir bifs. (Corelight) + +2.5-131 | 2017-04-21 14:27:16 -0700 + + * Guard more format strings with __attribute__((format)). (Johanna Amann) + + * Add support for two TLS 1.3 extensions. + + New events: + - event ssl_extension_supported_versions(c: connection, is_orig: bool, versions: index_vec) + - event ssl_extension_psk_key_exchange_modes(c: connection, is_orig: bool, modes: index_vec) (Johanna Amann) + +2.5-125 | 2017-04-17 22:02:39 +0200 + + * Documentation updates for loading Bro scripts. (Seth Hall) + +2.5-123 | 2017-04-10 13:30:14 -0700 + + * Fix some failing tests by increasing delay times. (Daniel Thayer) + + * Threading Types: add a bit of documentation to subnet type. (Johanna Amann) + + * Fixing couple issues reported by Coverity. (Robin Sommer) + +2.5-119 | 2017-04-07 10:30:09 -0700 + + * Fix the test group name in some broker test files. (Daniel Thayer) + + * NetControl: small rule_error changes (test, call fix). (Johanna Amann) + + * SSL: update dpd signature for TLS1.3. (Johanna Amann) + 2.5-115 | 2017-03-23 07:25:41 -0700 * Fix a test that was failing on some platforms. (Daniel Thayer) @@ -38,7 +122,7 @@ 2.5-84 | 2017-02-27 15:08:55 -0500 - * Change semantics of Broker's remote logging to match old communication + * Change semantics of Broker's remote logging to match old communication framework. (Robin Sommer) * Add and fix documentation for HookSetupAnalyzerTree (Johanna Amann) diff --git a/NEWS b/NEWS index 7fbc7cfd4f..8333ccf7f8 100644 --- a/NEWS +++ b/NEWS @@ -4,8 +4,35 @@ release. For an exhaustive list of changes, see the ``CHANGES`` file (note that submodules, such as BroControl and Broccoli, come with their own ``CHANGES``.) -Bro 2.6 -======= +Bro 2.5.1 +========= + +New Functionality +----------------- + +- Bro now includes bifs for rename, unlink, and rmdir. + +- Bro now includes events for two extensions used by TLS 1.3: + ssl_extension_supported_versions and ssl_extension_psk_key_exchange_modes + +- Bro now includes hooks that can be used to interact with log processing + on the C++ level. + +- Bro now supports ERSPAN. Currently this ignores the ethernet header that is + carried over the tunnel; if a MAC is logged currently only the outer MAC + is returned. + +- Added a new BroControl option CrashExpireInterval to enable + "broctl cron" to remove crash directories that are older than the + specified number of days (the default value is 0, which means crash + directories never expire). + +- Added a new BroControl option MailReceivingPackets to control + whether or not "broctl cron" will mail a warning when it notices + that no packets were seen on an interface. + +- There is a new broctl command-line option "--version" which outputs + the BroControl version. Changed Functionality --------------------- @@ -13,10 +40,20 @@ Changed Functionality - The input framework's Ascii reader is now more resilient. If an input is marked to reread a file when it changes and the file didn't exist during a check Bro would stop watching the file in previous versions. - The same could happen with bad data in a line of a file. These - situations do not cause Bro to stop watching input files anymore. The + The same could happen with bad data in a line of a file. These + situations do not cause Bro to stop watching input files anymore. The old behavior is available through settings in the Ascii reader. +- The RADIUS scripts have been reworked. Requests are now logged even if + there is no response. The new framed_addr field in the log indicates + if the radius server is hinting at an address for the client. The ttl + field indicates how quickly the server is replying to the network access + server. + +- With the introduction of the Bro package manager, the Bro plugin repository + is considered deprecated. The af_packet, postgresql, and tcprs plugins have + already been removed and are available via bro-pkg. + Bro 2.5 ======= diff --git a/VERSION b/VERSION index fc82bb53e0..6d510ac694 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.5-115 +2.5-156 diff --git a/aux/binpac b/aux/binpac index 0f1ecfa972..27356ae52f 160000 --- a/aux/binpac +++ b/aux/binpac @@ -1 +1 @@ -Subproject commit 0f1ecfa97236635fb93e013404e6b30d6c506ddd +Subproject commit 27356ae52ff9ff639b53a7325ea3262e1a13b704 diff --git a/aux/bro-aux b/aux/bro-aux index 51bf79d3fc..43f4b90bba 160000 --- a/aux/bro-aux +++ b/aux/bro-aux @@ -1 +1 @@ -Subproject commit 51bf79d3fc78b5e86c554afe7c24c44b025aa67f +Subproject commit 43f4b90bbaf87dae1a1073e7bf13301e58866011 diff --git a/aux/broccoli b/aux/broccoli index ed52e3414b..25907f6b0a 160000 --- a/aux/broccoli +++ b/aux/broccoli @@ -1 +1 @@ -Subproject commit ed52e3414b31b05ec9abed627b4153c8e2243441 +Subproject commit 25907f6b0a5347304d1ec8213bfad3d114260ca0 diff --git a/aux/broctl b/aux/broctl index cf7ea4e1ad..d3e6cdfba4 160000 --- a/aux/broctl +++ b/aux/broctl @@ -1 +1 @@ -Subproject commit cf7ea4e1ad18920058f32e95bbea3bdd765b6094 +Subproject commit d3e6cdfba496879bd55542c668ea959f524bd723 diff --git a/aux/broker b/aux/broker index 23def70c44..862c982f35 160000 --- a/aux/broker +++ b/aux/broker @@ -1 +1 @@ -Subproject commit 23def70c44128d19138029615dd154359286e111 +Subproject commit 862c982f35e342fb10fa281120135cf61eca66bb diff --git a/aux/btest b/aux/btest index dceda16935..e638fc65aa 160000 --- a/aux/btest +++ b/aux/btest @@ -1 +1 @@ -Subproject commit dceda169351ddd0c7fe7a5ae5496be1d7af2367b +Subproject commit e638fc65aa12bd136594451b8c185a7a01ef3e9a diff --git a/aux/plugins b/aux/plugins index c4b5df3aa8..00d039442b 160000 --- a/aux/plugins +++ b/aux/plugins @@ -1 +1 @@ -Subproject commit c4b5df3aa8e5c58a2dc5e5040c7da8369894f24d +Subproject commit 00d039442b97ba545e6020200d96a3cba9d9181b diff --git a/cmake b/cmake index d29fbf6152..79f2b2e944 160000 --- a/cmake +++ b/cmake @@ -1 +1 @@ -Subproject commit d29fbf6152e54fbb536910af02a80874b1917311 +Subproject commit 79f2b2e944da77774675be4d5254156451967371 diff --git a/doc/components/bro-plugins/af_packet/README.rst b/doc/components/bro-plugins/af_packet/README.rst deleted file mode 120000 index b8f745bed2..0000000000 --- a/doc/components/bro-plugins/af_packet/README.rst +++ /dev/null @@ -1 +0,0 @@ -../../../../aux/plugins/af_packet/README \ No newline at end of file diff --git a/doc/components/bro-plugins/myricom/README.rst b/doc/components/bro-plugins/myricom/README.rst deleted file mode 120000 index 3bfabcdae3..0000000000 --- a/doc/components/bro-plugins/myricom/README.rst +++ /dev/null @@ -1 +0,0 @@ -../../../../aux/plugins/myricom/README \ No newline at end of file diff --git a/doc/components/bro-plugins/postgresql/README.rst b/doc/components/bro-plugins/postgresql/README.rst deleted file mode 120000 index b8c815c45b..0000000000 --- a/doc/components/bro-plugins/postgresql/README.rst +++ /dev/null @@ -1 +0,0 @@ -../../../../aux/plugins/postgresql/README \ No newline at end of file diff --git a/doc/components/bro-plugins/tcprs/README.rst b/doc/components/bro-plugins/tcprs/README.rst deleted file mode 120000 index c0e84fd579..0000000000 --- a/doc/components/bro-plugins/tcprs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../../../../aux/plugins/tcprs/README \ No newline at end of file diff --git a/doc/quickstart/index.rst b/doc/quickstart/index.rst index 811fad53e1..d7e0491501 100644 --- a/doc/quickstart/index.rst +++ b/doc/quickstart/index.rst @@ -316,9 +316,8 @@ Analyzing live traffic from an interface is simple: bro -i en0 -``en0`` can be replaced by the interface of your choice and for the list of -scripts, you can just use "all" for now to perform all the default analysis -that's available. +``en0`` can be replaced by the interface of your choice. A selection +of common base scripts will be loaded by default. Bro will output log files into the working directory. @@ -326,22 +325,6 @@ Bro will output log files into the working directory. capturing as an unprivileged user and checksum offloading are particularly relevant at this point. -To use the site-specific ``local.bro`` script, just add it to the -command-line: - -.. console:: - - bro -i en0 local - -This will cause Bro to print a warning about lacking the -``Site::local_nets`` variable being configured. You can supply this -information at the command line like this (supply your "local" subnets -in place of the example subnets): - -.. console:: - - bro -r mypackets.trace local "Site::local_nets += { 1.2.3.0/24, 5.6.7.0/24 }" - Reading Packet Capture (pcap) Files ----------------------------------- @@ -373,7 +356,6 @@ script that we include as a suggested configuration: bro -r mypackets.trace local - Telling Bro Which Scripts to Load --------------------------------- @@ -381,33 +363,65 @@ A command-line invocation of Bro typically looks like: .. console:: - bro + bro Where the last arguments are the specific policy scripts that this Bro instance will load. These arguments don't have to include the ``.bro`` -file extension, and if the corresponding script resides under the default -installation path, ``$PREFIX/share/bro``, then it requires no path -qualification. Further, a directory of scripts can be specified as -an argument to be loaded as a "package" if it contains a ``__load__.bro`` -script that defines the scripts that are part of the package. +file extension, and if the corresponding script resides in the default +search path, then it requires no path qualification. The following +directories are included in the default search path for Bro scripts:: + + ./ + /share/bro/ + /share/bro/policy/ + /share/bro/site/ -This example does all of the base analysis (primarily protocol -logging) and adds SSL certificate validation. +These prefix paths can be used to load scripts like this: .. console:: - bro -r mypackets.trace protocols/ssl/validate-certs + bro -r mypackets.trace frameworks/files/extract-all + +This will load the +``/share/bro/policy/frameworks/files/extract-all.bro`` script which will +cause Bro to extract all of the files it discovers in the PCAP. + +.. note:: If one wants Bro to be able to load scripts that live outside the + default directories in Bro's installation root, the full path to the file(s) + must be provided. See the default search path by running ``bro --help``. You might notice that a script you load from the command line uses the ``@load`` directive in the Bro language to declare dependence on other scripts. This directive is similar to the ``#include`` of C/C++, except the semantics are, "load this script if it hasn't already been loaded." -.. note:: If one wants Bro to be able to load scripts that live outside the - default directories in Bro's installation root, the ``BROPATH`` environment - variable will need to be extended to include all the directories that need - to be searched for scripts. See the default search path by doing - ``bro --help``. +Further, a directory of scripts can be specified as +an argument to be loaded as a "package" if it contains a ``__load__.bro`` +script that defines the scripts that are part of the package. + +Local site customization +------------------------ + +There is one script that is installed which is considered "local site +customization" and is not overwritten when upgrades take place. To use +the site-specific ``local.bro`` script, just add it to the command-line (can +also be loaded through scripts with @load): + +.. console:: + + bro -i en0 local + +This causes Bro to load a script that prints a warning about lacking the +``Site::local_nets`` variable being configured. You can supply this +information at the command line like this (supply your "local" subnets +in place of the example subnets): + +.. console:: + + bro -r mypackets.trace local "Site::local_nets += { 1.2.3.0/24, 5.6.7.0/24 }" + +When running with Broctl, this value is set by configuring the ``networks.cfg`` +file. Running Bro Without Installing ------------------------------ diff --git a/scripts/base/frameworks/logging/writers/ascii.bro b/scripts/base/frameworks/logging/writers/ascii.bro index c10c86145e..bbf11c26e7 100644 --- a/scripts/base/frameworks/logging/writers/ascii.bro +++ b/scripts/base/frameworks/logging/writers/ascii.bro @@ -26,6 +26,13 @@ export { ## This option is also available as a per-filter ``$config`` option. const use_json = F &redef; + ## Define the gzip level to compress the logs. If 0, then no gzip + ## compression is performed. Enabling compression also changes + ## the log file name extension to include ".gz". + ## + ## This option is also available as a per-filter ``$config`` option. + const gzip_level = 0 &redef; + ## Format of timestamps when writing out JSON. By default, the JSON ## formatter will use double values for timestamps which represent the ## number of seconds from the UNIX epoch. diff --git a/scripts/base/frameworks/netcontrol/plugins/openflow.bro b/scripts/base/frameworks/netcontrol/plugins/openflow.bro index 07be594b57..c528a1ba3e 100644 --- a/scripts/base/frameworks/netcontrol/plugins/openflow.bro +++ b/scripts/base/frameworks/netcontrol/plugins/openflow.bro @@ -318,7 +318,7 @@ function openflow_add_rule(p: PluginState, r: Rule) : bool ++flow_mod$cookie; } else - event rule_error(r, p, "Error while executing OpenFlow::flow_mod"); + event NetControl::rule_error(r, p, "Error while executing OpenFlow::flow_mod"); } return T; @@ -338,7 +338,7 @@ function openflow_remove_rule(p: PluginState, r: Rule, reason: string) : bool of_messages[r$cid, flow_mod$command] = OfTable($p=p, $r=r); else { - event rule_error(r, p, "Error while executing OpenFlow::flow_mod"); + event NetControl::rule_error(r, p, "Error while executing OpenFlow::flow_mod"); return F; } diff --git a/scripts/base/frameworks/notice/cluster.bro b/scripts/base/frameworks/notice/cluster.bro index 3c3fbc6d36..2a45f957a8 100644 --- a/scripts/base/frameworks/notice/cluster.bro +++ b/scripts/base/frameworks/notice/cluster.bro @@ -21,10 +21,10 @@ redef Cluster::manager2worker_events += /Notice::begin_suppression/; redef Cluster::worker2manager_events += /Notice::cluster_notice/; @if ( Cluster::local_node_type() != Cluster::MANAGER ) -event Notice::begin_suppression(n: Notice::Info) +event Notice::begin_suppression(ts: time, suppress_for: interval, note: Type, identifier: string) { - local suppress_until = n$ts + n$suppress_for; - suppressing[n$note, n$identifier] = suppress_until; + local suppress_until = ts + suppress_for; + suppressing[note, identifier] = suppress_until; } @endif diff --git a/scripts/base/frameworks/notice/main.bro b/scripts/base/frameworks/notice/main.bro index a203f6a772..aa88c26174 100644 --- a/scripts/base/frameworks/notice/main.bro +++ b/scripts/base/frameworks/notice/main.bro @@ -261,9 +261,14 @@ export { ## This event is generated when a notice begins to be suppressed. ## - ## n: The record containing notice data regarding the notice type - ## about to be suppressed. - global begin_suppression: event(n: Notice::Info); + ## ts: time indicating then when the notice to be suppressed occured. + ## + ## suppress_for: length of time that this notice should be suppressed. + ## + ## note: The :bro:type:`Notice::Type` of the notice. + ## + ## identifier: The identifier string of the notice that should be suppressed. + global begin_suppression: event(ts: time, suppress_for: interval, note: Type, identifier: string); ## A function to determine if an event is supposed to be suppressed. ## @@ -504,7 +509,7 @@ hook Notice::notice(n: Notice::Info) &priority=-5 { local suppress_until = n$ts + n$suppress_for; suppressing[n$note, n$identifier] = suppress_until; - event Notice::begin_suppression(n); + event Notice::begin_suppression(n$ts, n$suppress_for, n$note, n$identifier); } } diff --git a/scripts/base/init-bare.bro b/scripts/base/init-bare.bro index d9691e63aa..1df5d747c9 100644 --- a/scripts/base/init-bare.bro +++ b/scripts/base/init-bare.bro @@ -2145,6 +2145,16 @@ export { rep_dur: interval; ## The length in bytes of the reply. rep_len: count; + ## The user id of the reply. + rpc_uid: count; + ## The group id of the reply. + rpc_gid: count; + ## The stamp of the reply. + rpc_stamp: count; + ## The machine name of the reply. + rpc_machine_name: string; + ## The auxiliary ids of the reply. + rpc_auxgids: index_vec; }; ## NFS file attributes. Field names are based on RFC 1813. @@ -2175,6 +2185,16 @@ export { fname: string; ##< The name of the file we are interested in. }; + ## NFS *rename* arguments. + ## + ## .. bro:see:: nfs_proc_rename + type renameopargs_t : record { + src_dirfh : string; + src_fname : string; + dst_dirfh : string; + dst_fname : string; + }; + ## NFS lookup reply. If the lookup failed, *dir_attr* may be set. If the ## lookup succeeded, *fh* is always set and *obj_attr* and *dir_attr* ## may be set. @@ -2267,6 +2287,16 @@ export { dir_post_attr: fattr_t &optional; ##< Optional attributes associated w/ dir. }; + ## NFS reply for *rename*. Corresponds to *wcc_data* in the spec. + ## + ## .. bro:see:: nfs_proc_rename + type renameobj_reply_t: record { + src_dir_pre_attr: wcc_attr_t; + src_dir_post_attr: fattr_t; + dst_dir_pre_attr: wcc_attr_t; + dst_dir_post_attr: fattr_t; + }; + ## NFS *readdir* arguments. Used for both *readdir* and *readdirplus*. ## ## .. bro:see:: nfs_proc_readdir diff --git a/scripts/base/misc/version.bro b/scripts/base/misc/version.bro index 259b7b1127..1dce1310df 100644 --- a/scripts/base/misc/version.bro +++ b/scripts/base/misc/version.bro @@ -86,5 +86,5 @@ export { function at_least(version_string: string): bool { - return Version::parse(version_string)$version_number >= Version::number; + return Version::number >= Version::parse(version_string)$version_number; } diff --git a/src/3rdparty b/src/3rdparty index 5d03436d9d..1dc8599df2 160000 --- a/src/3rdparty +++ b/src/3rdparty @@ -1 +1 @@ -Subproject commit 5d03436d9db8a6cbaee1f459d654f977ce722467 +Subproject commit 1dc8599df24112504aac5e1256b478eec5054848 diff --git a/src/Reporter.h b/src/Reporter.h index 19cdbb7e82..85b9a483a2 100644 --- a/src/Reporter.h +++ b/src/Reporter.h @@ -66,11 +66,11 @@ public: // Report a runtime error in evaluating a Bro script expression. This // function will not return but raise an InterpreterException. - void ExprRuntimeError(const Expr* expr, const char* fmt, ...); + void ExprRuntimeError(const Expr* expr, const char* fmt, ...) __attribute__((format(printf, 3, 4))); // Report a runtime error in evaluating a Bro script expression. This // function will not return but raise an InterpreterException. - void RuntimeError(const Location* location, const char* fmt, ...); + void RuntimeError(const Location* location, const char* fmt, ...) __attribute__((format(printf, 3, 4))); // Report a traffic weirdness, i.e., an unexpected protocol situation // that may lead to incorrectly processing a connnection. @@ -123,7 +123,7 @@ public: private: void DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Connection* conn, val_list* addl, bool location, bool time, - const char* postfix, const char* fmt, va_list ap); + const char* postfix, const char* fmt, va_list ap) __attribute__((format(printf, 10, 0))); // The order if addl, name needs to be like that since fmt_name can // contain format specifiers diff --git a/src/analyzer/protocol/rpc/NFS.cc b/src/analyzer/protocol/rpc/NFS.cc index 8a2620e2e5..03cd91e573 100644 --- a/src/analyzer/protocol/rpc/NFS.cc +++ b/src/analyzer/protocol/rpc/NFS.cc @@ -1,6 +1,7 @@ // See the file "COPYING" in the main distribution directory for copyright. #include +#include #include "bro-config.h" @@ -68,6 +69,10 @@ int NFS_Interp::RPC_BuildCall(RPC_CallInfo* c, const u_char*& buf, int& n) callarg = nfs3_diropargs(buf, n); break; + case BifEnum::NFS3::PROC_RENAME: + callarg = nfs3_renameopargs(buf, n); + break; + case BifEnum::NFS3::PROC_READDIR: callarg = nfs3_readdirargs(false, buf, n); break; @@ -196,6 +201,11 @@ int NFS_Interp::RPC_BuildReply(RPC_CallInfo* c, BifEnum::rpc_status rpc_status, event = nfs_proc_rmdir; break; + case BifEnum::NFS3::PROC_RENAME: + reply = nfs3_renameobj_reply(buf, n); + event = nfs_proc_rename; + break; + case BifEnum::NFS3::PROC_READDIR: reply = nfs3_readdir_reply(false, buf, n, nfs_status); event = nfs_proc_readdir; @@ -250,8 +260,9 @@ int NFS_Interp::RPC_BuildReply(RPC_CallInfo* c, BifEnum::rpc_status rpc_status, analyzer->ConnectionEvent(event, vl); } + else + Unref(reply); - Unref(reply); return 1; } @@ -288,6 +299,10 @@ val_list* NFS_Interp::event_common_vl(RPC_CallInfo *c, BifEnum::rpc_status rpc_s // These are the first parameters for each nfs_* event ... val_list *vl = new val_list; vl->append(analyzer->BuildConnVal()); + VectorVal* auxgids = new VectorVal(internal_type("index_vec")->AsVectorType()); + + for ( size_t i = 0; i < c->AuxGIDs().size(); ++i ) + auxgids->Assign(i, new Val(c->AuxGIDs()[i], TYPE_COUNT)); RecordVal *info = new RecordVal(BifType::Record::NFS3::info_t); info->Assign(0, new EnumVal(rpc_status, BifType::Enum::rpc_status)); @@ -298,6 +313,11 @@ val_list* NFS_Interp::event_common_vl(RPC_CallInfo *c, BifEnum::rpc_status rpc_s info->Assign(5, new Val(rep_start_time, TYPE_TIME)); info->Assign(6, new Val(rep_last_time-rep_start_time, TYPE_INTERVAL)); info->Assign(7, new Val(reply_len, TYPE_COUNT)); + info->Assign(8, new Val(c->Uid(), TYPE_COUNT)); + info->Assign(9, new Val(c->Gid(), TYPE_COUNT)); + info->Assign(10, new Val(c->Stamp(), TYPE_COUNT)); + info->Assign(11, new StringVal(c->MachineName())); + info->Assign(12, auxgids); vl->append(info); return vl; @@ -374,6 +394,17 @@ RecordVal *NFS_Interp::nfs3_diropargs(const u_char*& buf, int& n) return diropargs; } +RecordVal *NFS_Interp::nfs3_renameopargs(const u_char*& buf, int& n) + { + RecordVal *renameopargs = new RecordVal(BifType::Record::NFS3::renameopargs_t); + + renameopargs->Assign(0, nfs3_fh(buf, n)); + renameopargs->Assign(1, nfs3_filename(buf, n)); + renameopargs->Assign(2, nfs3_fh(buf, n)); + renameopargs->Assign(3, nfs3_filename(buf, n)); + + return renameopargs; + } RecordVal* NFS_Interp::nfs3_post_op_attr(const u_char*& buf, int& n) { @@ -558,6 +589,19 @@ RecordVal* NFS_Interp::nfs3_delobj_reply(const u_char*& buf, int& n) return rep; } +RecordVal* NFS_Interp::nfs3_renameobj_reply(const u_char*& buf, int& n) + { + RecordVal *rep = new RecordVal(BifType::Record::NFS3::renameobj_reply_t); + + // wcc_data + rep->Assign(0, nfs3_pre_op_attr(buf, n)); + rep->Assign(1, nfs3_post_op_attr(buf, n)); + rep->Assign(2, nfs3_pre_op_attr(buf, n)); + rep->Assign(3, nfs3_post_op_attr(buf, n)); + + return rep; + } + RecordVal* NFS_Interp::nfs3_readdirargs(bool isplus, const u_char*& buf, int&n) { RecordVal *args = new RecordVal(BifType::Record::NFS3::readdirargs_t); @@ -646,7 +690,7 @@ Val* NFS_Interp::ExtractBool(const u_char*& buf, int& n) NFS_Analyzer::NFS_Analyzer(Connection* conn) - : RPC_Analyzer("RPC", conn, new NFS_Interp(this)) + : RPC_Analyzer("NFS", conn, new NFS_Interp(this)) { orig_rpc = resp_rpc = 0; } diff --git a/src/analyzer/protocol/rpc/NFS.h b/src/analyzer/protocol/rpc/NFS.h index ba6cab38e2..85fb10ab49 100644 --- a/src/analyzer/protocol/rpc/NFS.h +++ b/src/analyzer/protocol/rpc/NFS.h @@ -37,6 +37,7 @@ protected: EnumVal* nfs3_ftype(const u_char*& buf, int& n); RecordVal* nfs3_wcc_attr(const u_char*& buf, int& n); RecordVal* nfs3_diropargs(const u_char*&buf, int &n); + RecordVal* nfs3_renameopargs(const u_char*&buf, int &n); StringVal* nfs3_filename(const u_char*& buf, int& n); StringVal* nfs3_nfspath(const u_char*& buf, int& n) { @@ -54,6 +55,7 @@ protected: RecordVal* nfs3_write_reply(const u_char*& buf, int& n, BifEnum::NFS3::status_t status); RecordVal* nfs3_newobj_reply(const u_char*& buf, int&n, BifEnum::NFS3::status_t status); RecordVal* nfs3_delobj_reply(const u_char*& buf, int& n); + RecordVal* nfs3_renameobj_reply(const u_char*& buf, int& n); StringVal* nfs3_post_op_fh(const u_char*& buf, int& n); RecordVal* nfs3_readdirargs(bool isplus, const u_char*& buf, int&n); RecordVal* nfs3_readdir_reply(bool isplus, const u_char*& buf, int&n, BifEnum::NFS3::status_t status); diff --git a/src/analyzer/protocol/rpc/RPC.cc b/src/analyzer/protocol/rpc/RPC.cc index aff6bfefc0..6ccd8008b8 100644 --- a/src/analyzer/protocol/rpc/RPC.cc +++ b/src/analyzer/protocol/rpc/RPC.cc @@ -40,7 +40,19 @@ RPC_CallInfo::RPC_CallInfo(uint32 arg_xid, const u_char*& buf, int& n, double ar prog = extract_XDR_uint32(buf, n); vers = extract_XDR_uint32(buf, n); proc = extract_XDR_uint32(buf, n); - cred_flavor = skip_XDR_opaque_auth(buf, n); + cred_flavor = extract_XDR_uint32(buf, n); + int cred_opaque_n, machinename_n; + const u_char* cred_opaque = extract_XDR_opaque(buf, n, cred_opaque_n); + stamp = extract_XDR_uint32(cred_opaque, cred_opaque_n); + const u_char* tmp = extract_XDR_opaque(cred_opaque, cred_opaque_n, machinename_n); + machinename = std::string(reinterpret_cast(tmp), machinename_n); + uid = extract_XDR_uint32(cred_opaque, cred_opaque_n); + gid = extract_XDR_uint32(cred_opaque, cred_opaque_n); + size_t number_of_gids = extract_XDR_uint32(cred_opaque, cred_opaque_n); + + for ( auto i = 0u; i < number_of_gids; ++i ) + auxgids.push_back(extract_XDR_uint32(cred_opaque, cred_opaque_n)); + verf_flavor = skip_XDR_opaque_auth(buf, n); header_len = call_n - n; diff --git a/src/analyzer/protocol/rpc/RPC.h b/src/analyzer/protocol/rpc/RPC.h index e87f8afa95..ab7b3968c7 100644 --- a/src/analyzer/protocol/rpc/RPC.h +++ b/src/analyzer/protocol/rpc/RPC.h @@ -62,6 +62,11 @@ public: uint32 Program() const { return prog; } uint32 Version() const { return vers; } uint32 Proc() const { return proc; } + uint32 Uid() const { return uid; } + uint32 Gid() const { return gid; } + uint32 Stamp() const { return stamp; } + const std::string& MachineName() const { return machinename; } + const std::vector& AuxGIDs() const { return auxgids; } double StartTime() const { return start_time; } void SetStartTime(double t) { start_time = t; } @@ -78,8 +83,12 @@ public: protected: uint32 xid, rpc_version, prog, vers, proc; - uint32 cred_flavor, verf_flavor; + uint32 cred_flavor, stamp; + uint32 uid, gid; + std::vector auxgids; + uint32 verf_flavor; u_char* call_buf; // copy of original call buffer + std::string machinename; double start_time; double last_time; int rpc_len; // size of the full RPC call, incl. xid and msg_type diff --git a/src/analyzer/protocol/rpc/events.bif b/src/analyzer/protocol/rpc/events.bif index fc42aac6a6..881faface1 100644 --- a/src/analyzer/protocol/rpc/events.bif +++ b/src/analyzer/protocol/rpc/events.bif @@ -274,6 +274,34 @@ event nfs_proc_remove%(c: connection, info: NFS3::info_t, req: NFS3::diropargs_t ## register a port for it or add a DPD payload signature. event nfs_proc_rmdir%(c: connection, info: NFS3::info_t, req: NFS3::diropargs_t, rep: NFS3::delobj_reply_t%); +## Generated for NFSv3 request/reply dialogues of type *rename*. The event is +## generated once we have either seen both the request and its corresponding +## reply, or an unanswered request has timed out. +## +## NFS is a service running on top of RPC. See `Wikipedia +## `__ for more +## information about the service. +## +## c: The RPC connection. +## +## info: Reports the status of the dialogue, along with some meta information. +## +## req: TODO. +## +## rep: The response returned in the reply. The values may not be valid if the +## request was unsuccessful. +## +## .. bro:see:: nfs_proc_create nfs_proc_getattr nfs_proc_lookup nfs_proc_mkdir +## nfs_proc_not_implemented nfs_proc_null nfs_proc_read nfs_proc_readdir +## nfs_proc_readlink nfs_proc_remove nfs_proc_rename nfs_proc_write +## nfs_reply_status rpc_call rpc_dialogue rpc_reply +## +## .. todo:: Bro's current default configuration does not activate the protocol +## analyzer that generates this event; the corresponding script has not yet +## been ported to Bro 2.x. To still enable this event, one needs to +## register a port for it or add a DPD payload signature. +event nfs_proc_rename%(c: connection, info: NFS3::info_t, req: NFS3::renameopargs_t, rep: NFS3::renameobj_reply_t%); + ## Generated for NFSv3 request/reply dialogues of type *readdir*. The event is ## generated once we have either seen both the request and its corresponding ## reply, or an unanswered request has timed out. diff --git a/src/analyzer/protocol/smb/smb-time.pac b/src/analyzer/protocol/smb/smb-time.pac index 0ad97d20a3..52654c7a2b 100644 --- a/src/analyzer/protocol/smb/smb-time.pac +++ b/src/analyzer/protocol/smb/smb-time.pac @@ -30,6 +30,7 @@ function time_from_lanman(t: SMB_time, d: SMB_date, tz: uint16): Val lTime.tm_mday = ${d.day}; lTime.tm_mon = ${d.month}; lTime.tm_year = 1980 + ${d.year}; + lTime.tm_isdst = -1; double lResult = mktime(&lTime); return new Val(lResult + tz, TYPE_TIME); %} diff --git a/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac b/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac index a0d5f233b4..60d03f3dce 100644 --- a/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac +++ b/src/analyzer/protocol/ssl/tls-handshake-analyzer.pac @@ -195,7 +195,7 @@ refine connection Handshake_Conn += { if ( versions_list ) { - for ( int i = 0; i < versions_list->size(); ++i ) + for ( unsigned int i = 0; i < versions_list->size(); ++i ) versions->Assign(i, new Val((*versions_list)[i], TYPE_COUNT)); } @@ -211,7 +211,7 @@ refine connection Handshake_Conn += { if ( mode_list ) { - for ( int i = 0; i < mode_list->size(); ++i ) + for ( unsigned int i = 0; i < mode_list->size(); ++i ) modes->Assign(i, new Val((*mode_list)[i], TYPE_COUNT)); } diff --git a/src/bro.bif b/src/bro.bif index e168016f5e..852f806230 100644 --- a/src/bro.bif +++ b/src/bro.bif @@ -4039,6 +4039,7 @@ function set_inactivity_timeout%(cid: conn_id, t: interval%): interval ## ## .. bro:see:: active_file open_for_append close write_file ## get_file_name set_buf flush_all mkdir enable_raw_output +## rmdir unlink rename function open%(f: string%): file %{ const char* file = f->CheckString(); @@ -4058,6 +4059,7 @@ function open%(f: string%): file ## ## .. bro:see:: active_file open close write_file ## get_file_name set_buf flush_all mkdir enable_raw_output +## rmdir unlink rename function open_for_append%(f: string%): file %{ return new Val(new BroFile(f->CheckString(), "a")); @@ -4071,6 +4073,7 @@ function open_for_append%(f: string%): file ## ## .. bro:see:: active_file open open_for_append write_file ## get_file_name set_buf flush_all mkdir enable_raw_output +## rmdir unlink rename function close%(f: file%): bool %{ return new Val(f->Close(), TYPE_BOOL); @@ -4086,6 +4089,7 @@ function close%(f: file%): bool ## ## .. bro:see:: active_file open open_for_append close ## get_file_name set_buf flush_all mkdir enable_raw_output +## rmdir unlink rename function write_file%(f: file, data: string%): bool %{ if ( ! f ) @@ -4106,6 +4110,7 @@ function write_file%(f: file, data: string%): bool ## ## .. bro:see:: active_file open open_for_append close ## get_file_name write_file flush_all mkdir enable_raw_output +## rmdir unlink rename function set_buf%(f: file, buffered: bool%): any %{ f->SetBuf(buffered); @@ -4118,6 +4123,7 @@ function set_buf%(f: file, buffered: bool%): any ## ## .. bro:see:: active_file open open_for_append close ## get_file_name write_file set_buf mkdir enable_raw_output +## rmdir unlink rename function flush_all%(%): bool %{ return new Val(fflush(0) == 0, TYPE_BOOL); @@ -4127,17 +4133,102 @@ function flush_all%(%): bool ## ## f: The directory name. ## -## Returns: Returns true if the operation succeeds or if *f* already exists, +## Returns: True if the operation succeeds or if *f* already exists, ## and false if the file creation fails. ## ## .. bro:see:: active_file open_for_append close write_file ## get_file_name set_buf flush_all enable_raw_output +## rmdir unlink rename function mkdir%(f: string%): bool %{ const char* filename = f->CheckString(); - if ( mkdir(filename, 0777) < 0 && errno != EEXIST ) + + if ( mkdir(filename, 0777) < 0 ) { - builtin_error("cannot create directory", @ARG@[0]); + int error = errno; + struct stat filestat; + // check if already exists and is directory. + if ( errno == EEXIST && stat(filename, &filestat) == 0 + && S_ISDIR(filestat.st_mode) ) + return new Val(1, TYPE_BOOL); + + builtin_error(fmt("cannot create directory '%s': %s", filename, + strerror(error))); + return new Val(0, TYPE_BOOL); + } + else + return new Val(1, TYPE_BOOL); + %} + + +## Removes a directory. +## +## d: The directory name. +## +## Returns: True if the operation succeeds, and false if the +## directory delete operation fails. +## +## .. bro:see:: active_file open_for_append close write_file +## get_file_name set_buf flush_all enable_raw_output +## mkdir unlink rename +function rmdir%(d: string%): bool + %{ + const char* dirname = d->CheckString(); + + if ( rmdir(dirname) < 0 ) + { + builtin_error(fmt("cannot remove directory '%s': %s", dirname, + strerror(errno))); + return new Val(0, TYPE_BOOL); + } + else + return new Val(1, TYPE_BOOL); + %} + +## Removes a file from a directory. +## +## f: the file to delete. +## +## Returns: True if the operation succeeds and the file was deleted, +## and false if the deletion fails. +## +## .. bro:see:: active_file open_for_append close write_file +## get_file_name set_buf flush_all enable_raw_output +## mkdir rmdir rename +function unlink%(f: string%): bool + %{ + const char* filename = f->CheckString(); + + if ( unlink(filename) < 0 ) + { + builtin_error(fmt("cannot unlink file '%s': %s", filename, + strerror(errno))); + return new Val(0, TYPE_BOOL); + } + else + return new Val(1, TYPE_BOOL); + %} + +## Renames a file from src_f to dst_f. +## +## src_f: the name of the file to rename. +## +## dest_f: the name of the file after the rename operation. +## +## Returns: True if the rename succeeds and false otherwise. +## +## .. bro:see:: active_file open_for_append close write_file +## get_file_name set_buf flush_all enable_raw_output +## mkdir rmdir unlink +function rename%(src_f: string, dst_f: string%): bool + %{ + const char* src_filename = src_f->CheckString(); + const char* dst_filename = dst_f->CheckString(); + + if ( rename(src_filename, dst_filename) < 0 ) + { + builtin_error(fmt("cannot rename file '%s' to '%s': %s", src_filename, + dst_filename, strerror(errno))); return new Val(0, TYPE_BOOL); } else diff --git a/src/input/Manager.h b/src/input/Manager.h index e7a1ebe2d6..8296ce9f8b 100644 --- a/src/input/Manager.h +++ b/src/input/Manager.h @@ -240,7 +240,7 @@ private: enum class ErrorType { INFO, WARNING, ERROR }; void ErrorHandler(const Stream* i, ErrorType et, bool reporter_send, const char* fmt, ...) __attribute__((format(printf, 5, 6))); - void ErrorHandler(const Stream* i, ErrorType et, bool reporter_send, const char* fmt, va_list ap); + void ErrorHandler(const Stream* i, ErrorType et, bool reporter_send, const char* fmt, va_list ap) __attribute__((format(printf, 5, 0))); Stream* FindStream(const string &name); Stream* FindStream(ReaderFrontend* reader); diff --git a/src/input/readers/ascii/Ascii.cc b/src/input/readers/ascii/Ascii.cc index 3440d9565d..d9120b91ae 100644 --- a/src/input/readers/ascii/Ascii.cc +++ b/src/input/readers/ascii/Ascii.cc @@ -50,6 +50,8 @@ Ascii::Ascii(ReaderFrontend *frontend) : ReaderBackend(frontend) { mtime = 0; suppress_warnings = false; + fail_on_file_problem = false; + fail_on_invalid_lines = false; } Ascii::~Ascii() diff --git a/src/logging/Manager.cc b/src/logging/Manager.cc index 8c720137e8..66a275736a 100644 --- a/src/logging/Manager.cc +++ b/src/logging/Manager.cc @@ -15,6 +15,8 @@ #include "WriterFrontend.h" #include "WriterBackend.h" #include "logging.bif.h" +#include "../plugin/Plugin.h" +#include "../plugin/Manager.h" #ifdef ENABLE_BROKER #include "broker/Manager.h" @@ -62,6 +64,7 @@ struct Manager::WriterInfo { WriterFrontend* writer; WriterBackend::WriterInfo* info; bool from_remote; + bool hook_initialized; string instantiating_filter; }; @@ -840,12 +843,26 @@ bool Manager::Write(EnumVal* id, RecordVal* columns) path = filter->path = filter->path_val->AsString()->CheckString(); } + WriterBackend::WriterInfo* info = 0; WriterFrontend* writer = 0; if ( w != stream->writers.end() ) { // We know this writer already. writer = w->second->writer; + info = w->second->info; + + if ( ! w->second->hook_initialized ) + { + auto wi = w->second; + wi->hook_initialized = true; + PLUGIN_HOOK_VOID(HOOK_LOG_INIT, + HookLogInit(filter->writer->Type()->AsEnumType()->Lookup(filter->writer->InternalInt()), + wi->instantiating_filter, filter->local, + filter->remote, *wi->info, + filter->num_fields, + filter->fields)); + } } else @@ -874,7 +891,7 @@ bool Manager::Write(EnumVal* id, RecordVal* columns) arg_fields[j] = new threading::Field(*filter->fields[j]); } - WriterBackend::WriterInfo* info = new WriterBackend::WriterInfo; + info = new WriterBackend::WriterInfo; info->path = copy_string(path.c_str()); info->network_time = network_time; @@ -909,6 +926,22 @@ bool Manager::Write(EnumVal* id, RecordVal* columns) threading::Value** vals = RecordToFilterVals(stream, filter, columns); + if ( ! PLUGIN_HOOK_WITH_RESULT(HOOK_LOG_WRITE, + HookLogWrite(filter->writer->Type()->AsEnumType()->Lookup(filter->writer->InternalInt()), + filter->name, *info, + filter->num_fields, + filter->fields, vals), + true) ) + { + DeleteVals(filter->num_fields, vals); + +#ifdef DEBUG + DBG_LOG(DBG_LOGGING, "Hook prevented writing to filter '%s' on stream '%s'", + filter->name.c_str(), stream->name.c_str()); +#endif + return true; + } + // Write takes ownership of vals. assert(writer); writer->Write(filter->num_fields, vals); @@ -1165,6 +1198,7 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken winfo->postprocessor = 0; winfo->info = info; winfo->from_remote = from_remote; + winfo->hook_initialized = false; winfo->instantiating_filter = instantiating_filter; // Search for a corresponding filter for the writer/path pair and use its @@ -1214,6 +1248,15 @@ WriterFrontend* Manager::CreateWriter(EnumVal* id, EnumVal* writer, WriterBacken #endif winfo->writer->Init(num_fields, fields); + if ( ! from_remote ) + { + winfo->hook_initialized = true; + PLUGIN_HOOK_VOID(HOOK_LOG_INIT, + HookLogInit(writer->Type()->AsEnumType()->Lookup(writer->InternalInt()), + instantiating_filter, local, remote, + *winfo->info, num_fields, fields)); + } + InstallRotationTimer(winfo); return winfo->writer; diff --git a/src/logging/writers/ascii/Ascii.cc b/src/logging/writers/ascii/Ascii.cc index d6f5daa7e7..dec1689df4 100644 --- a/src/logging/writers/ascii/Ascii.cc +++ b/src/logging/writers/ascii/Ascii.cc @@ -24,6 +24,8 @@ Ascii::Ascii(WriterFrontend* frontend) : WriterBackend(frontend) tsv = false; use_json = false; formatter = 0; + gzip_level = 0; + gzfile = nullptr; InitConfigOptions(); init_options = InitFilterOptions(); @@ -34,6 +36,7 @@ void Ascii::InitConfigOptions() output_to_stdout = BifConst::LogAscii::output_to_stdout; include_meta = BifConst::LogAscii::include_meta; use_json = BifConst::LogAscii::use_json; + gzip_level = BifConst::LogAscii::gzip_level; separator.assign( (const char*) BifConst::LogAscii::separator->Bytes(), @@ -89,6 +92,16 @@ bool Ascii::InitFilterOptions() } } + else if ( strcmp(i->first, "gzip_level" ) == 0 ) + { + gzip_level = atoi(i->second); + + if ( gzip_level < 0 || gzip_level > 9 ) + { + Error("invalid value for 'gzip_level', must be a number between 0 and 9."); + return false; + } + } else if ( strcmp(i->first, "use_json") == 0 ) { if ( strcmp(i->second, "T") == 0 ) @@ -192,7 +205,7 @@ bool Ascii::WriteHeaderField(const string& key, const string& val) { string str = meta_prefix + key + separator + val + "\n"; - return safe_write(fd, str.c_str(), str.length()); + return InternalWrite(fd, str.c_str(), str.length()); } void Ascii::CloseFile(double t) @@ -203,8 +216,9 @@ void Ascii::CloseFile(double t) if ( include_meta && ! tsv ) WriteHeaderField("close", Timestamp(0)); - safe_close(fd); + InternalClose(fd); fd = 0; + gzfile = nullptr; } bool Ascii::DoInit(const WriterInfo& info, int num_fields, const Field* const * fields) @@ -219,7 +233,8 @@ bool Ascii::DoInit(const WriterInfo& info, int num_fields, const Field* const * if ( output_to_stdout ) path = "/dev/stdout"; - fname = IsSpecial(path) ? path : path + "." + LogExt(); + fname = IsSpecial(path) ? path : path + "." + LogExt() + + (gzip_level > 0 ? ".gz" : ""); fd = open(fname.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666); @@ -231,6 +246,31 @@ bool Ascii::DoInit(const WriterInfo& info, int num_fields, const Field* const * return false; } + if ( gzip_level > 0 ) + { + if ( gzip_level < 0 || gzip_level > 9 ) + { + Error("invalid value for 'gzip_level', must be a number between 0 and 9."); + return false; + } + + char mode[4]; + snprintf(mode, sizeof(mode), "wb%d", gzip_level); + errno = 0; // errno will only be set under certain circumstances by gzdopen. + gzfile = gzdopen(fd, mode); + + if ( gzfile == nullptr ) + { + Error(Fmt("cannot gzip %s: %s", fname.c_str(), + Strerror(errno))); + return false; + } + } + else + { + gzfile = nullptr; + } + if ( ! WriteHeader(path) ) { Error(Fmt("error writing to %s: %s", fname.c_str(), Strerror(errno))); @@ -264,7 +304,7 @@ bool Ascii::WriteHeader(const string& path) { // A single TSV-style line is all we need. string str = names + "\n"; - if ( ! safe_write(fd, str.c_str(), str.length()) ) + if ( ! InternalWrite(fd, str.c_str(), str.length()) ) return false; return true; @@ -275,7 +315,7 @@ bool Ascii::WriteHeader(const string& path) + get_escaped_string(separator, false) + "\n"; - if ( ! safe_write(fd, str.c_str(), str.length()) ) + if ( ! InternalWrite(fd, str.c_str(), str.length()) ) return false; if ( ! (WriteHeaderField("set_separator", get_escaped_string(set_separator, false)) && @@ -337,14 +377,14 @@ bool Ascii::DoWrite(int num_fields, const Field* const * fields, char hex[4] = {'\\', 'x', '0', '0'}; bytetohex(bytes[0], hex + 2); - if ( ! safe_write(fd, hex, 4) ) + if ( ! InternalWrite(fd, hex, 4) ) goto write_error; ++bytes; --len; } - if ( ! safe_write(fd, bytes, len) ) + if ( ! InternalWrite(fd, bytes, len) ) goto write_error; if ( ! IsBuf() ) @@ -368,7 +408,8 @@ bool Ascii::DoRotate(const char* rotated_path, double open, double close, bool t CloseFile(close); - string nname = string(rotated_path) + "." + LogExt(); + string nname = string(rotated_path) + "." + LogExt() + + (gzip_level > 0 ? ".gz" : ""); if ( rename(fname.c_str(), nname.c_str()) != 0 ) { @@ -434,4 +475,58 @@ string Ascii::Timestamp(double t) return tmp; } +bool Ascii::InternalWrite(int fd, const char* data, int len) + { + if ( ! gzfile ) + return safe_write(fd, data, len); + + while ( len > 0 ) + { + int n = gzwrite(gzfile, data, len); + + if ( n <= 0 ) + { + const char* err = gzerror(gzfile, &n); + Error(Fmt("Ascii::InternalWrite error: %s\n", err)); + return false; + } + + data += n; + len -= n; + } + + return true; + } + +bool Ascii::InternalClose(int fd) + { + if ( ! gzfile ) + { + safe_close(fd); + return true; + } + + int res = gzclose(gzfile); + + if ( res == Z_OK ) + return true; + + switch ( res ) { + case Z_STREAM_ERROR: + Error("Ascii::InternalClose gzclose error: invalid file stream"); + break; + case Z_BUF_ERROR: + Error("Ascii::InternalClose gzclose error: " + "no compression progress possible during buffer flush"); + break; + case Z_ERRNO: + Error(Fmt("Ascii::InternalClose gzclose error: %s\n", Strerror(errno))); + break; + default: + Error("Ascii::InternalClose invalid gzclose result"); + break; + } + + return false; + } diff --git a/src/logging/writers/ascii/Ascii.h b/src/logging/writers/ascii/Ascii.h index 8648070111..eabeda4242 100644 --- a/src/logging/writers/ascii/Ascii.h +++ b/src/logging/writers/ascii/Ascii.h @@ -8,6 +8,7 @@ #include "logging/WriterBackend.h" #include "threading/formatters/Ascii.h" #include "threading/formatters/JSON.h" +#include "zlib.h" namespace logging { namespace writer { @@ -42,8 +43,11 @@ private: void InitConfigOptions(); bool InitFilterOptions(); bool InitFormatter(); + bool InternalWrite(int fd, const char* data, int len); + bool InternalClose(int fd); int fd; + gzFile gzfile; string fname; ODesc desc; bool ascii_done; @@ -59,6 +63,7 @@ private: string unset_field; string meta_prefix; + int gzip_level; // level > 0 enables gzip compression bool use_json; string json_timestamps; diff --git a/src/logging/writers/ascii/ascii.bif b/src/logging/writers/ascii/ascii.bif index 2817511152..b12b14f1a0 100644 --- a/src/logging/writers/ascii/ascii.bif +++ b/src/logging/writers/ascii/ascii.bif @@ -12,3 +12,4 @@ const empty_field: string; const unset_field: string; const use_json: bool; const json_timestamps: JSON::TimestampFormat; +const gzip_level: count; diff --git a/src/plugin/Manager.cc b/src/plugin/Manager.cc index c672614957..104bcfbdd5 100644 --- a/src/plugin/Manager.cc +++ b/src/plugin/Manager.cc @@ -712,7 +712,7 @@ void Manager::HookSetupAnalyzerTree(Connection *conn) const if ( HavePluginForHook(META_HOOK_PRE) ) { - args.push_back(conn); + args.push_back(HookArgument(conn)); MetaHookPre(HOOK_SETUP_ANALYZER_TREE, args); } @@ -739,7 +739,7 @@ void Manager::HookUpdateNetworkTime(double network_time) const if ( HavePluginForHook(META_HOOK_PRE) ) { - args.push_back(network_time); + args.push_back(HookArgument(network_time)); MetaHookPre(HOOK_UPDATE_NETWORK_TIME, args); } @@ -762,7 +762,7 @@ void Manager::HookBroObjDtor(void* obj) const if ( HavePluginForHook(META_HOOK_PRE) ) { - args.push_back(obj); + args.push_back(HookArgument(obj)); MetaHookPre(HOOK_BRO_OBJ_DTOR, args); } @@ -779,6 +779,84 @@ void Manager::HookBroObjDtor(void* obj) const MetaHookPost(HOOK_BRO_OBJ_DTOR, args, HookArgument()); } +void Manager::HookLogInit(const std::string& writer, + const std::string& instantiating_filter, + bool local, bool remote, + const logging::WriterBackend::WriterInfo& info, + int num_fields, + const threading::Field* const* fields) const + { + HookArgumentList args; + + if ( HavePluginForHook(META_HOOK_PRE) ) + { + args.push_back(HookArgument(writer)); + args.push_back(HookArgument(instantiating_filter)); + args.push_back(HookArgument(local)); + args.push_back(HookArgument(remote)); + args.push_back(HookArgument(&info)); + args.push_back(HookArgument(num_fields)); + args.push_back(HookArgument(std::make_pair(num_fields, fields))); + MetaHookPre(HOOK_LOG_INIT, args); + } + + hook_list* l = hooks[HOOK_LOG_INIT]; + + if ( l ) + for ( hook_list::iterator i = l->begin(); i != l->end(); ++i ) + { + Plugin* p = (*i).second; + p->HookLogInit(writer, instantiating_filter, local, remote, info, + num_fields, fields); + } + + if ( HavePluginForHook(META_HOOK_POST) ) + MetaHookPost(HOOK_LOG_INIT, args, HookArgument()); + } + +bool Manager::HookLogWrite(const std::string& writer, + const std::string& filter, + const logging::WriterBackend::WriterInfo& info, + int num_fields, + const threading::Field* const* fields, + threading::Value** vals) const + { + HookArgumentList args; + + if ( HavePluginForHook(META_HOOK_PRE) ) + { + args.push_back(HookArgument(writer)); + args.push_back(HookArgument(filter)); + args.push_back(HookArgument(&info)); + args.push_back(HookArgument(num_fields)); + args.push_back(HookArgument(std::make_pair(num_fields, fields))); + args.push_back(HookArgument(vals)); + MetaHookPre(HOOK_LOG_WRITE, args); + } + + hook_list* l = hooks[HOOK_LOG_WRITE]; + + bool result = true; + + if ( l ) + for ( hook_list::iterator i = l->begin(); i != l->end(); ++i ) + { + Plugin* p = (*i).second; + + if ( ! p->HookLogWrite(writer, filter, info, num_fields, fields, + vals) ) + { + result = false; + break; + } + } + + if ( HavePluginForHook(META_HOOK_POST) ) + MetaHookPost(HOOK_LOG_WRITE, args, HookArgument(result)); + + return result; + } + void Manager::MetaHookPre(HookType hook, const HookArgumentList& args) const { hook_list* l = hooks[HOOK_CALL_FUNCTION]; diff --git a/src/plugin/Manager.h b/src/plugin/Manager.h index 2a394b39ee..9ece86bfed 100644 --- a/src/plugin/Manager.h +++ b/src/plugin/Manager.h @@ -291,6 +291,70 @@ public: */ void HookBroObjDtor(void* obj) const; + /** + * Hook into log initialization. This method will be called when a + * logging writer is created. A writer represents a single logging + * filter. The method is called in the main thread, on the node that + * causes a log line to be written. It will _not_ be called on the logger + * node. The function will be called once for every instantiated writer. + * + * @param writer The name of the writer being instantiated. + * + * @param instantiating_filter Name of the filter causing the + * writer instantiation. + * + * @param local True if the filter is logging locally (writer + * thread will be located in same process). + * + * @param remote True if filter is logging remotely (writer thread + * will be located in different thread, typically + * in manager or logger node). + * + * @param info WriterBackend::WriterInfo with information about the writer. + * + * @param num_fields number of fields in the record being written. + * + * @param fields threading::Field description of the fields being logged. + */ + void HookLogInit(const std::string& writer, + const std::string& instantiating_filter, + bool local, bool remote, + const logging::WriterBackend::WriterInfo& info, + int num_fields, + const threading::Field* const* fields) const; + + /** + * Hook into log writing. This method will be called for each log line + * being written by each writer. Each writer represents a single logging + * filter. The method is called in the main thread, on the node that + * causes a log line to be written. It will _not_ be called on the logger + * node. + * This function allows plugins to modify or skip logging of information. + * Note - once a log line is skipped (by returning false), it will not + * passed on to hooks that have not yet been called. + * + * @param writer The name of the writer. + * + * @param filter Name of the filter being written to. + * + * @param info WriterBackend::WriterInfo with information about the writer. + * + * @param num_fields number of fields in the record being written. + * + * @param fields threading::Field description of the fields being logged. + * + * @param vals threading::Values containing the values being written. Values + * can be modified in the Hook. + * + * @return true if log line should be written, false if log line should be + * skipped and not passed on to the writer. + */ + bool HookLogWrite(const std::string& writer, + const std::string& filter, + const logging::WriterBackend::WriterInfo& info, + int num_fields, const threading::Field* const* fields, + threading::Value** vals) const; + /** * Internal method that registers a freshly instantiated plugin with * the manager. diff --git a/src/plugin/Plugin.cc b/src/plugin/Plugin.cc index b0eb19a628..7af9b9dfee 100644 --- a/src/plugin/Plugin.cc +++ b/src/plugin/Plugin.cc @@ -10,6 +10,8 @@ #include "../Desc.h" #include "../Event.h" +#include "../Conn.h" +#include "threading/SerialTypes.h" using namespace plugin; @@ -24,6 +26,8 @@ const char* plugin::hook_name(HookType h) "UpdateNetworkTime", "BroObjDtor", "SetupAnalyzerTree", + "LogInit", + "LogWrite", // MetaHooks "MetaHookPre", "MetaHookPost", @@ -84,6 +88,11 @@ void HookArgument::Describe(ODesc* d) const d->Add(""); break; + case CONN: + if ( arg.conn ) + arg.conn->Describe(d); + break; + case FUNC_RESULT: if ( func_result.first ) { @@ -145,6 +154,60 @@ void HookArgument::Describe(ODesc* d) const case VOIDP: d->Add(""); break; + + case WRITER_INFO: + { + d->Add(arg.winfo->path); + d->Add("("); + d->Add(arg.winfo->network_time); + d->Add(","); + d->Add(arg.winfo->rotation_interval); + d->Add(","); + d->Add(arg.winfo->rotation_base); + + if ( arg.winfo->config.size() > 0 ) + { + bool first = true; + d->Add("config: {"); + + for ( auto& v: arg.winfo->config ) + { + if ( ! first ) + d->Add(", "); + + d->Add(v.first); + d->Add(": "); + d->Add(v.second); + first = false; + } + + d->Add("}"); + } + + d->Add(")"); + } + break; + + case THREAD_FIELDS: + { + d->Add("{"); + + for ( int i=0; i < tfields.first; i++ ) + { + const threading::Field* f = tfields.second[i]; + + if ( i > 0 ) + d->Add(", "); + + d->Add(f->name); + d->Add(" ("); + d->Add(f->TypeName()); + d->Add(")"); + } + + d->Add("}"); + } + break; } } @@ -319,6 +382,22 @@ void Plugin::HookBroObjDtor(void* obj) { } +void Plugin::HookLogInit(const std::string& writer, + const std::string& instantiating_filter, + bool local, bool remote, + const logging::WriterBackend::WriterInfo& info, + int num_fields, const threading::Field* const* fields) + { + } + +bool Plugin::HookLogWrite(const std::string& writer, const std::string& filter, + const logging::WriterBackend::WriterInfo& info, + int num_fields, const threading::Field* const* fields, + threading::Value** vals) + { + return true; + } + void Plugin::MetaHookPre(HookType hook, const HookArgumentList& args) { } diff --git a/src/plugin/Plugin.h b/src/plugin/Plugin.h index 49fa7cdd84..aabec22bc4 100644 --- a/src/plugin/Plugin.h +++ b/src/plugin/Plugin.h @@ -11,16 +11,21 @@ #include "analyzer/Component.h" #include "file_analysis/Component.h" #include "iosource/Component.h" +#include "logging/WriterBackend.h" // We allow to override this externally for testing purposes. #ifndef BRO_PLUGIN_API_VERSION -#define BRO_PLUGIN_API_VERSION 4 +#define BRO_PLUGIN_API_VERSION 5 #endif class ODesc; class Func; class Event; +namespace threading { +struct Field; +} + namespace plugin { class Manager; @@ -39,7 +44,9 @@ enum HookType { HOOK_DRAIN_EVENTS, //< Activates Plugin::HookDrainEvents() HOOK_UPDATE_NETWORK_TIME, //< Activates Plugin::HookUpdateNetworkTime. HOOK_BRO_OBJ_DTOR, //< Activates Plugin::HookBroObjDtor. - HOOK_SETUP_ANALYZER_TREE, //< Activates Plugin::HookSetupAnalyzerTree + HOOK_SETUP_ANALYZER_TREE, //< Activates Plugin::HookAddToAnalyzerTree + HOOK_LOG_INIT, //< Activates Plugin::HookLogInit + HOOK_LOG_WRITE, //< Activates Plugin::HookLogWrite // Meta hooks. META_HOOK_PRE, //< Activates Plugin::MetaHookPre(). @@ -158,7 +165,8 @@ public: * Type of the argument. */ enum Type { - BOOL, DOUBLE, EVENT, FRAME, FUNC, FUNC_RESULT, INT, STRING, VAL, VAL_LIST, VOID, VOIDP + BOOL, DOUBLE, EVENT, FRAME, FUNC, FUNC_RESULT, INT, STRING, VAL, + VAL_LIST, VOID, VOIDP, WRITER_INFO, CONN, THREAD_FIELDS }; /** @@ -169,57 +177,72 @@ public: /** * Constructor with a boolean argument. */ - HookArgument(bool a) { type = BOOL; arg.bool_ = a; } + explicit HookArgument(bool a) { type = BOOL; arg.bool_ = a; } /** * Constructor with a double argument. */ - HookArgument(double a) { type = DOUBLE; arg.double_ = a; } + explicit HookArgument(double a) { type = DOUBLE; arg.double_ = a; } /** * Constructor with an event argument. */ - HookArgument(const Event* a) { type = EVENT; arg.event = a; } + explicit HookArgument(const Event* a) { type = EVENT; arg.event = a; } + + /** + * Constructor with an connection argument. + */ + explicit HookArgument(const Connection* c) { type = CONN; arg.conn = c; } /** * Constructor with a function argument. */ - HookArgument(const Func* a) { type = FUNC; arg.func = a; } + explicit HookArgument(const Func* a) { type = FUNC; arg.func = a; } /** * Constructor with an integer argument. */ - HookArgument(int a) { type = INT; arg.int_ = a; } + explicit HookArgument(int a) { type = INT; arg.int_ = a; } /** * Constructor with a string argument. */ - HookArgument(const std::string& a) { type = STRING; arg_string = a; } + explicit HookArgument(const std::string& a) { type = STRING; arg_string = a; } /** * Constructor with a Bro value argument. */ - HookArgument(const Val* a) { type = VAL; arg.val = a; } + explicit HookArgument(const Val* a) { type = VAL; arg.val = a; } /** * Constructor with a list of Bro values argument. */ - HookArgument(const val_list* a) { type = VAL_LIST; arg.vals = a; } + explicit HookArgument(const val_list* a) { type = VAL_LIST; arg.vals = a; } /** * Constructor with a void pointer argument. */ - HookArgument(void* p) { type = VOIDP; arg.voidp = p; } + explicit HookArgument(void* p) { type = VOIDP; arg.voidp = p; } /** * Constructor with a function result argument. */ - HookArgument(std::pair fresult) { type = FUNC_RESULT; func_result = fresult; } + explicit HookArgument(std::pair fresult) { type = FUNC_RESULT; func_result = fresult; } /** * Constructor with a Frame argument. */ - HookArgument(Frame* f) { type = FRAME; arg.frame = f; } + explicit HookArgument(Frame* f) { type = FRAME; arg.frame = f; } + + /** + * Constructor with a WriterInfo argument. + */ + explicit HookArgument(const logging::WriterBackend::WriterInfo* i) { type = WRITER_INFO; arg.winfo = i; } + + /** + * Constructor with a threading field argument. + */ + explicit HookArgument(const std::pair fpair) { type = THREAD_FIELDS; tfields = fpair; } /** * Returns the value for a boolen argument. The argument's type must @@ -239,6 +262,12 @@ public: */ const Event* AsEvent() const { assert(type == EVENT); return arg.event; } + /** + * Returns the value for an connection argument. The argument's type must + * match accordingly. + */ + const Connection* AsConnection() const { assert(type == CONN); return arg.conn; } + /** * Returns the value for a function argument. The argument's type must * match accordingly. @@ -275,6 +304,18 @@ public: */ const Frame* AsFrame() const { assert(type == FRAME); return arg.frame; } + /** + * Returns the value for a logging WriterInfo argument. The argument's type must + * match accordingly. + */ + const logging::WriterBackend::WriterInfo* AsWriterInfo() const { assert(type == WRITER_INFO); return arg.winfo; } + + /** + * Returns the value for a threading fields argument. The argument's type must + * match accordingly. + */ + const std::pair AsThreadFields() const { assert(type == THREAD_FIELDS); return tfields; } + /** * Returns the value for a list of Bro values argument. The argument's type must * match accordingly. @@ -305,16 +346,19 @@ private: bool bool_; double double_; const Event* event; + const Connection* conn; const Func* func; const Frame* frame; int int_; const Val* val; const val_list* vals; const void* voidp; + const logging::WriterBackend::WriterInfo* winfo; } arg; // Outside union because these have dtors. std::pair func_result; + std::pair tfields; std::string arg_string; }; @@ -663,6 +707,71 @@ protected: */ virtual void HookBroObjDtor(void* obj); + /** + * Hook into log initialization. This method will be called when a + * logging writer is created. A writer represents a single logging + * filter. The method is called in the main thread, on the node that + * causes a log line to be written. It will _not_ be called on the logger + * node. The function will be called each for every instantiated writer. + * + * @param writer The name of the writer being insantiated. + * + * @param instantiating_filter Name of the filter causing the + * writer instantiation. + * + * @param local True if the filter is logging locally (writer + * thread will be located in same process). + * + * @param remote True if filter is logging remotely (writer thread + * will be located in different thread, typically + * in manager or logger node). + * + * @param info WriterBackend::WriterInfo with information about the writer. + * + * @param num_fields number of fields in the record being written. + * + * @param fields threading::Field description of the fields being logged. + */ + virtual void HookLogInit(const std::string& writer, + const std::string& instantiating_filter, + bool local, bool remote, + const logging::WriterBackend::WriterInfo& info, + int num_fields, + const threading::Field* const* fields); + + /** + * Hook into log writing. This method will be called for each log line + * being written by each writer. Each writer represents a single logging + * filter. The method is called in the main thread, on the node that + * causes a log line to be written. It will _not_ be called on the logger + * node. + * This function allows plugins to modify or skip logging of information. + * Note - once a log line is skipped (by returning false), it will not + * passed on to hooks that have not yet been called. + * + * @param writer The name of the writer. + * + * @param filter Name of the filter being written to. + * + * @param info WriterBackend::WriterInfo with information about the writer. + * + * @param num_fields number of fields in the record being written. + * + * @param fields threading::Field description of the fields being logged. + * + * @param vals threading::Values containing the values being written. Values + * can be modified in the Hook. + * + * @return true if log line should be written, false if log line should be + * skipped and not passed on to the writer. + */ + virtual bool HookLogWrite(const std::string& writer, + const std::string& filter, + const logging::WriterBackend::WriterInfo& info, + int num_fields, + const threading::Field* const* fields, + threading::Value** vals); + // Meta hooks. /** diff --git a/src/threading/Formatter.cc b/src/threading/Formatter.cc index 3f366de90a..b881962732 100644 --- a/src/threading/Formatter.cc +++ b/src/threading/Formatter.cc @@ -22,7 +22,7 @@ Formatter::~Formatter() { } -string Formatter::Render(const threading::Value::addr_t& addr) const +string Formatter::Render(const threading::Value::addr_t& addr) { if ( addr.family == IPv4 ) { @@ -90,7 +90,7 @@ threading::Value::addr_t Formatter::ParseAddr(const string &s) const return val; } -string Formatter::Render(const threading::Value::subnet_t& subnet) const +string Formatter::Render(const threading::Value::subnet_t& subnet) { char l[16]; @@ -104,7 +104,7 @@ string Formatter::Render(const threading::Value::subnet_t& subnet) const return s; } -string Formatter::Render(double d) const +string Formatter::Render(double d) { char buf[256]; modp_dtoa(d, buf, 6); diff --git a/src/threading/Formatter.h b/src/threading/Formatter.h index c8337959bf..c564f3c945 100644 --- a/src/threading/Formatter.h +++ b/src/threading/Formatter.h @@ -87,7 +87,7 @@ public: * * @return An ASCII representation of the address. */ - string Render(const threading::Value::addr_t& addr) const; + static string Render(const threading::Value::addr_t& addr); /** * Convert an subnet value into a string. @@ -98,7 +98,7 @@ public: * * @return An ASCII representation of the subnet. */ - string Render(const threading::Value::subnet_t& subnet) const; + static string Render(const threading::Value::subnet_t& subnet); /** * Convert a double into a string. This renders the double with Bro's @@ -110,7 +110,7 @@ public: * * @return An ASCII representation of the double. */ - string Render(double d) const; + static string Render(double d); /** * Convert a string into a TransportProto. The string must be one of diff --git a/src/threading/SerialTypes.h b/src/threading/SerialTypes.h index e056f6c170..af3f92d416 100644 --- a/src/threading/SerialTypes.h +++ b/src/threading/SerialTypes.h @@ -98,7 +98,7 @@ struct Value { typedef set_t vec_t; struct port_t { bro_uint_t port; TransportProto proto; }; - struct addr_t { + struct addr_t { IPFamily family; union { struct in_addr in4; @@ -106,6 +106,13 @@ struct Value { } in; }; + // A small note for handling subnet values: Subnet values emitted from + // the logging framework will always have a length that is based on the + // internal IPv6 representation (so you have to substract 96 from it to + // get the correct value for IPv4). + // However, the Input framework expects the "normal" length for an IPv4 + // address (so do not add 96 to it), because the underlying constructors + // for the SubNet type want it like this. struct subnet_t { addr_t prefix; uint8_t length; }; /** diff --git a/src/types.bif b/src/types.bif index 500c8c9851..20995ef105 100644 --- a/src/types.bif +++ b/src/types.bif @@ -30,7 +30,7 @@ enum proc_t %{ # NFSv3 procedures PROC_MKNOD = 11, # not implemented PROC_REMOVE = 12, # done PROC_RMDIR = 13, # done - PROC_RENAME = 14, # not implemented + PROC_RENAME = 14, # done PROC_LINK = 15, # not implemented PROC_READDIR = 16, # done PROC_READDIRPLUS = 17, # done @@ -101,6 +101,7 @@ enum createmode_t %{ type info_t: record; type fattr_t: record; type diropargs_t: record; +type renameopargs_t: record; type lookup_reply_t: record; type readargs_t: record; type read_reply_t: record; @@ -110,6 +111,7 @@ type wcc_attr_t: record; type write_reply_t: record; type newobj_reply_t: record; type delobj_reply_t: record; +type renameobj_reply_t: record; type readdirargs_t: record; type direntry_t: record; type direntry_vec_t: vector; diff --git a/testing/btest/Baseline/bifs.directory_operations/out b/testing/btest/Baseline/bifs.directory_operations/out new file mode 100644 index 0000000000..8465596a92 --- /dev/null +++ b/testing/btest/Baseline/bifs.directory_operations/out @@ -0,0 +1,10 @@ +T +T +T +T +T +T +F +F +F +F diff --git a/testing/btest/Baseline/language.expire-expr-error/output b/testing/btest/Baseline/language.expire-expr-error/output index 544527fe23..cf43dd4c80 100644 --- a/testing/btest/Baseline/language.expire-expr-error/output +++ b/testing/btest/Baseline/language.expire-expr-error/output @@ -1,2 +1,2 @@ -error in /home/robin/bro/master/testing/btest/.tmp/language.expire-expr-error/expire-expr-error.bro, line 7: no such index (x[kaputt]) +error in /home/robin/bro/master/testing/btest/.tmp/language.expire-expr-error/expire-expr-error.bro, line 8: no such index (x[kaputt]) received termination signal diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index a4250da2ca..9f231d821f 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -256,7 +256,7 @@ 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -> -0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1493757624.27038, node=bro, filter=ip or not ip, init=T, success=T])) -> +0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1498500921.18004, node=bro, filter=ip or not ip, init=T, success=T])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Communication::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Conn::LOG)) -> @@ -386,7 +386,7 @@ 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -> -0.000000 MetaHookPost CallFunction(Log::write, , (PacketFilter::LOG, [ts=1493757624.27038, node=bro, filter=ip or not ip, init=T, success=T])) -> +0.000000 MetaHookPost CallFunction(Log::write, , (PacketFilter::LOG, [ts=1498500921.18004, node=bro, filter=ip or not ip, init=T, success=T])) -> 0.000000 MetaHookPost CallFunction(NetControl::check_plugins, , ()) -> 0.000000 MetaHookPost CallFunction(NetControl::init, , ()) -> 0.000000 MetaHookPost CallFunction(Notice::want_pp, , ()) -> @@ -723,6 +723,8 @@ 0.000000 MetaHookPost LoadFile(base<...>/weird) -> -1 0.000000 MetaHookPost LoadFile(base<...>/x509) -> -1 0.000000 MetaHookPost LoadFile(base<...>/xmpp) -> -1 +0.000000 MetaHookPost LogInit(Log::WRITER_ASCII, default, true, true, packet_filter(0.0,0.0,0.0), 5, {ts (time), node (string), filter (string), init (bool), success (bool)}) -> +0.000000 MetaHookPost LogWrite(Log::WRITER_ASCII, default, packet_filter(0.0,0.0,0.0), 5, {ts (time), node (string), filter (string), init (bool), success (bool)}, ) -> true 0.000000 MetaHookPost QueueEvent(NetControl::init()) -> false 0.000000 MetaHookPost QueueEvent(bro_init()) -> false 0.000000 MetaHookPost QueueEvent(filter_change_tracking()) -> false @@ -984,7 +986,7 @@ 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1493757624.27038, node=bro, filter=ip or not ip, init=T, success=T])) +0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1498500921.18004, node=bro, filter=ip or not ip, init=T, success=T])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Communication::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Conn::LOG)) @@ -1114,7 +1116,7 @@ 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -0.000000 MetaHookPre CallFunction(Log::write, , (PacketFilter::LOG, [ts=1493757624.27038, node=bro, filter=ip or not ip, init=T, success=T])) +0.000000 MetaHookPre CallFunction(Log::write, , (PacketFilter::LOG, [ts=1498500921.18004, node=bro, filter=ip or not ip, init=T, success=T])) 0.000000 MetaHookPre CallFunction(NetControl::check_plugins, , ()) 0.000000 MetaHookPre CallFunction(NetControl::init, , ()) 0.000000 MetaHookPre CallFunction(Notice::want_pp, , ()) @@ -1451,6 +1453,8 @@ 0.000000 MetaHookPre LoadFile(base<...>/weird) 0.000000 MetaHookPre LoadFile(base<...>/x509) 0.000000 MetaHookPre LoadFile(base<...>/xmpp) +0.000000 MetaHookPre LogInit(Log::WRITER_ASCII, default, true, true, packet_filter(0.0,0.0,0.0), 5, {ts (time), node (string), filter (string), init (bool), success (bool)}) +0.000000 MetaHookPre LogWrite(Log::WRITER_ASCII, default, packet_filter(0.0,0.0,0.0), 5, {ts (time), node (string), filter (string), init (bool), success (bool)}, ) 0.000000 MetaHookPre QueueEvent(NetControl::init()) 0.000000 MetaHookPre QueueEvent(bro_init()) 0.000000 MetaHookPre QueueEvent(filter_change_tracking()) @@ -1711,7 +1715,7 @@ 0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=, ev=Weird::log_weird, path=weird]) 0.000000 | HookCallFunction Log::__create_stream(X509::LOG, [columns=, ev=X509::log_x509, path=x509]) 0.000000 | HookCallFunction Log::__create_stream(mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql]) -0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1493757624.27038, node=bro, filter=ip or not ip, init=T, success=T]) +0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1498500921.18004, node=bro, filter=ip or not ip, init=T, success=T]) 0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Communication::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Conn::LOG) @@ -1841,7 +1845,7 @@ 0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=, ev=Weird::log_weird, path=weird]) 0.000000 | HookCallFunction Log::create_stream(X509::LOG, [columns=, ev=X509::log_x509, path=x509]) 0.000000 | HookCallFunction Log::create_stream(mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql]) -0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1493757624.27038, node=bro, filter=ip or not ip, init=T, success=T]) +0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1498500921.18004, node=bro, filter=ip or not ip, init=T, success=T]) 0.000000 | HookCallFunction NetControl::check_plugins() 0.000000 | HookCallFunction NetControl::init() 0.000000 | HookCallFunction Notice::want_pp() @@ -1882,6 +1886,8 @@ 0.000000 | HookLoadFile <...>/bro 0.000000 | HookLoadFile base<...>/bif 0.000000 | HookLoadFile base<...>/bro +0.000000 | HookLogInit packet_filter 1/1 {ts (time), node (string), filter (string), init (bool), success (bool)} +0.000000 | HookLogWrite packet_filter [ts=1498500921.180040, node=bro, filter=ip or not ip, init=T, success=T] 0.000000 | HookQueueEvent NetControl::init() 0.000000 | HookQueueEvent bro_init() 0.000000 | HookQueueEvent filter_change_tracking() @@ -1896,6 +1902,7 @@ 1362692526.869344 MetaHookPost QueueEvent(ChecksumOffloading::check()) -> false 1362692526.869344 MetaHookPost QueueEvent(filter_change_tracking()) -> false 1362692526.869344 MetaHookPost QueueEvent(new_connection([id=[orig_h=141.142.228.5, orig_p=59856<...>/tcp], orig=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=c8:bc:c8:96:d2:a0], resp=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:10:db:88:d2:ef], start_time=1362692526.869344, duration=0.0, service={}, history=, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=])) -> false +1362692526.869344 MetaHookPost SetupAnalyzerTree(1362692526.869344(1362692526.869344) TCP 141.142.228.5:59856 -> 192.150.187.43:80) -> 1362692526.869344 MetaHookPost UpdateNetworkTime(1362692526.869344) -> 1362692526.869344 MetaHookPre BroObjDtor() 1362692526.869344 MetaHookPre CallFunction(ChecksumOffloading::check, , ()) @@ -1908,6 +1915,7 @@ 1362692526.869344 MetaHookPre QueueEvent(ChecksumOffloading::check()) 1362692526.869344 MetaHookPre QueueEvent(filter_change_tracking()) 1362692526.869344 MetaHookPre QueueEvent(new_connection([id=[orig_h=141.142.228.5, orig_p=59856<...>/tcp], orig=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=c8:bc:c8:96:d2:a0], resp=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:10:db:88:d2:ef], start_time=1362692526.869344, duration=0.0, service={}, history=, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=])) +1362692526.869344 MetaHookPre SetupAnalyzerTree(1362692526.869344(1362692526.869344) TCP 141.142.228.5:59856 -> 192.150.187.43:80) 1362692526.869344 MetaHookPre UpdateNetworkTime(1362692526.869344) 1362692526.869344 | HookBroObjDtor 1362692526.869344 | HookUpdateNetworkTime 1362692526.869344 @@ -1921,6 +1929,7 @@ 1362692526.869344 | HookQueueEvent ChecksumOffloading::check() 1362692526.869344 | HookQueueEvent filter_change_tracking() 1362692526.869344 | HookQueueEvent new_connection([id=[orig_h=141.142.228.5, orig_p=59856<...>/tcp], orig=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=c8:bc:c8:96:d2:a0], resp=[size=0, state=0, num_pkts=0, num_bytes_ip=0, flow_label=0, l2_addr=00:10:db:88:d2:ef], start_time=1362692526.869344, duration=0.0, service={}, history=, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=, dpd=, conn=, extract_orig=F, extract_resp=F, thresholds=, dce_rpc=, dce_rpc_state=, dce_rpc_backing=, dhcp=, dnp3=, dns=, dns_state=, ftp=, ftp_data_reuse=F, ssl=, http=, http_state=, irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=]) +1362692526.869344 | HookSetupAnalyzerTree 1362692526.869344(1362692526.869344) TCP 141.142.228.5:59856 -> 192.150.187.43:80 1362692526.869344 | RequestObjDtor ChecksumOffloading::check() 1362692526.939084 MetaHookPost CallFunction(NetControl::catch_release_seen, , (141.142.228.5)) -> 1362692526.939084 MetaHookPost CallFunction(addr_to_subnet, , (141.142.228.5)) -> @@ -2235,6 +2244,10 @@ 1362692527.009775 MetaHookPost CallFunction(id_string, , ([orig_h=141.142.228.5, orig_p=59856<...>/tcp])) -> 1362692527.009775 MetaHookPost CallFunction(set_file_handle, , (Analyzer::ANALYZER_HTTP1362692526.869344F11141.142.228.5:59856 > 192.150.187.43:80)) -> 1362692527.009775 MetaHookPost DrainEvents() -> +1362692527.009775 MetaHookPost LogInit(Log::WRITER_ASCII, default, true, true, files(1362692527.009775,0.0,0.0), 25, {ts (time), fuid (string), tx_hosts (set[addr]), rx_hosts (set[addr]), conn_uids (set[string]), source (string), depth (count), analyzers (set[string]), mime_type (string), filename (string), duration (interval), local_orig (bool), is_orig (bool), seen_bytes (count), total_bytes (count), missing_bytes (count), overflow_bytes (count), timedout (bool), parent_fuid (string), md5 (string), sha1 (string), sha256 (string), extracted (string), extracted_cutoff (bool), extracted_size (count)}) -> +1362692527.009775 MetaHookPost LogInit(Log::WRITER_ASCII, default, true, true, http(1362692527.009775,0.0,0.0), 29, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), trans_depth (count), method (string), host (string), uri (string), referrer (string), version (string), user_agent (string), request_body_len (count), response_body_len (count), status_code (count), status_msg (string), info_code (count), info_msg (string), tags (set[enum]), username (string), password (string), proxied (set[string]), orig_fuids (vector[string]), orig_filenames (vector[string]), orig_mime_types (vector[string]), resp_fuids (vector[string]), resp_filenames (vector[string]), resp_mime_types (vector[string])}) -> +1362692527.009775 MetaHookPost LogWrite(Log::WRITER_ASCII, default, files(1362692527.009775,0.0,0.0), 25, {ts (time), fuid (string), tx_hosts (set[addr]), rx_hosts (set[addr]), conn_uids (set[string]), source (string), depth (count), analyzers (set[string]), mime_type (string), filename (string), duration (interval), local_orig (bool), is_orig (bool), seen_bytes (count), total_bytes (count), missing_bytes (count), overflow_bytes (count), timedout (bool), parent_fuid (string), md5 (string), sha1 (string), sha256 (string), extracted (string), extracted_cutoff (bool), extracted_size (count)}, ) -> true +1362692527.009775 MetaHookPost LogWrite(Log::WRITER_ASCII, default, http(1362692527.009775,0.0,0.0), 29, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), trans_depth (count), method (string), host (string), uri (string), referrer (string), version (string), user_agent (string), request_body_len (count), response_body_len (count), status_code (count), status_msg (string), info_code (count), info_msg (string), tags (set[enum]), username (string), password (string), proxied (set[string]), orig_fuids (vector[string]), orig_filenames (vector[string]), orig_mime_types (vector[string]), resp_fuids (vector[string]), resp_filenames (vector[string]), resp_mime_types (vector[string])}, ) -> true 1362692527.009775 MetaHookPost QueueEvent(file_sniff([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain]], inferred=T])) -> false 1362692527.009775 MetaHookPost QueueEvent(file_state_remove([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=[filename=], orig_mime_depth=1, resp_mime_depth=1], irc=, pe=, u2_events=])) -> false 1362692527.009775 MetaHookPost QueueEvent(get_file_handle(Analyzer::ANALYZER_HTTP, [id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=[filename=], orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=], F)) -> false @@ -2260,6 +2273,10 @@ 1362692527.009775 MetaHookPre CallFunction(id_string, , ([orig_h=141.142.228.5, orig_p=59856<...>/tcp])) 1362692527.009775 MetaHookPre CallFunction(set_file_handle, , (Analyzer::ANALYZER_HTTP1362692526.869344F11141.142.228.5:59856 > 192.150.187.43:80)) 1362692527.009775 MetaHookPre DrainEvents() +1362692527.009775 MetaHookPre LogInit(Log::WRITER_ASCII, default, true, true, files(1362692527.009775,0.0,0.0), 25, {ts (time), fuid (string), tx_hosts (set[addr]), rx_hosts (set[addr]), conn_uids (set[string]), source (string), depth (count), analyzers (set[string]), mime_type (string), filename (string), duration (interval), local_orig (bool), is_orig (bool), seen_bytes (count), total_bytes (count), missing_bytes (count), overflow_bytes (count), timedout (bool), parent_fuid (string), md5 (string), sha1 (string), sha256 (string), extracted (string), extracted_cutoff (bool), extracted_size (count)}) +1362692527.009775 MetaHookPre LogInit(Log::WRITER_ASCII, default, true, true, http(1362692527.009775,0.0,0.0), 29, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), trans_depth (count), method (string), host (string), uri (string), referrer (string), version (string), user_agent (string), request_body_len (count), response_body_len (count), status_code (count), status_msg (string), info_code (count), info_msg (string), tags (set[enum]), username (string), password (string), proxied (set[string]), orig_fuids (vector[string]), orig_filenames (vector[string]), orig_mime_types (vector[string]), resp_fuids (vector[string]), resp_filenames (vector[string]), resp_mime_types (vector[string])}) +1362692527.009775 MetaHookPre LogWrite(Log::WRITER_ASCII, default, files(1362692527.009775,0.0,0.0), 25, {ts (time), fuid (string), tx_hosts (set[addr]), rx_hosts (set[addr]), conn_uids (set[string]), source (string), depth (count), analyzers (set[string]), mime_type (string), filename (string), duration (interval), local_orig (bool), is_orig (bool), seen_bytes (count), total_bytes (count), missing_bytes (count), overflow_bytes (count), timedout (bool), parent_fuid (string), md5 (string), sha1 (string), sha256 (string), extracted (string), extracted_cutoff (bool), extracted_size (count)}, ) +1362692527.009775 MetaHookPre LogWrite(Log::WRITER_ASCII, default, http(1362692527.009775,0.0,0.0), 29, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), trans_depth (count), method (string), host (string), uri (string), referrer (string), version (string), user_agent (string), request_body_len (count), response_body_len (count), status_code (count), status_msg (string), info_code (count), info_msg (string), tags (set[enum]), username (string), password (string), proxied (set[string]), orig_fuids (vector[string]), orig_filenames (vector[string]), orig_mime_types (vector[string]), resp_fuids (vector[string]), resp_filenames (vector[string]), resp_mime_types (vector[string])}, ) 1362692527.009775 MetaHookPre QueueEvent(file_sniff([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain]], inferred=T])) 1362692527.009775 MetaHookPre QueueEvent(file_state_remove([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=[filename=], orig_mime_depth=1, resp_mime_depth=1], irc=, pe=, u2_events=])) 1362692527.009775 MetaHookPre QueueEvent(get_file_handle(Analyzer::ANALYZER_HTTP, [id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=[filename=], orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=], F)) @@ -2286,6 +2303,10 @@ 1362692527.009775 | HookCallFunction id_string([orig_h=141.142.228.5, orig_p=59856<...>/tcp]) 1362692527.009775 | HookCallFunction set_file_handle(Analyzer::ANALYZER_HTTP1362692526.869344F11141.142.228.5:59856 > 192.150.187.43:80) 1362692527.009775 | HookDrainEvents +1362692527.009775 | HookLogInit files 1/1 {ts (time), fuid (string), tx_hosts (set[addr]), rx_hosts (set[addr]), conn_uids (set[string]), source (string), depth (count), analyzers (set[string]), mime_type (string), filename (string), duration (interval), local_orig (bool), is_orig (bool), seen_bytes (count), total_bytes (count), missing_bytes (count), overflow_bytes (count), timedout (bool), parent_fuid (string), md5 (string), sha1 (string), sha256 (string), extracted (string), extracted_cutoff (bool), extracted_size (count)} +1362692527.009775 | HookLogInit http 1/1 {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), trans_depth (count), method (string), host (string), uri (string), referrer (string), version (string), user_agent (string), request_body_len (count), response_body_len (count), status_code (count), status_msg (string), info_code (count), info_msg (string), tags (set[enum]), username (string), password (string), proxied (set[string]), orig_fuids (vector[string]), orig_filenames (vector[string]), orig_mime_types (vector[string]), resp_fuids (vector[string]), resp_filenames (vector[string]), resp_mime_types (vector[string])} +1362692527.009775 | HookLogWrite files [ts=1362692527.009512, fuid=FakNcS1Jfe01uljb3, tx_hosts=192.150.187.43, rx_hosts=141.142.228.5, conn_uids=CHhAvVGS1DHFjwGM9, source=HTTP, depth=0, analyzers=, mime_type=text/plain, filename=, duration=0.000263, local_orig=, is_orig=F, seen_bytes=4705, total_bytes=4705, missing_bytes=0, overflow_bytes=0, timedout=F, parent_fuid=, md5=, sha1=, sha256=, extracted=, extracted_cutoff=, extracted_size=] +1362692527.009775 | HookLogWrite http [ts=1362692526.939527, uid=CHhAvVGS1DHFjwGM9, id.orig_h=141.142.228.5, id.orig_p=59856, id.resp_h=192.150.187.43, id.resp_p=80, trans_depth=1, method=GET, host=bro.org, uri=<...>/plain] 1362692527.009775 | HookQueueEvent file_sniff([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain]], inferred=T]) 1362692527.009775 | HookQueueEvent file_state_remove([id=FakNcS1Jfe01uljb3, parent_id=, source=HTTP, is_orig=F, conns={[[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=[filename=], orig_mime_depth=1, resp_mime_depth=1], irc=, pe=, u2_events=]) 1362692527.009775 | HookQueueEvent get_file_handle(Analyzer::ANALYZER_HTTP, [id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=[filename=], orig_mime_depth=1, resp_mime_depth=1]}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=], F) @@ -2340,6 +2361,8 @@ 1362692527.080972 MetaHookPost CallFunction(sub_bytes, , (HTTP, 0, 1)) -> 1362692527.080972 MetaHookPost CallFunction(to_lower, , (HTTP)) -> 1362692527.080972 MetaHookPost DrainEvents() -> +1362692527.080972 MetaHookPost LogInit(Log::WRITER_ASCII, default, true, true, conn(1362692527.080972,0.0,0.0), 21, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), proto (enum), service (string), duration (interval), orig_bytes (count), resp_bytes (count), conn_state (string), local_orig (bool), local_resp (bool), missed_bytes (count), history (string), orig_pkts (count), orig_ip_bytes (count), resp_pkts (count), resp_ip_bytes (count), tunnel_parents (set[string])}) -> +1362692527.080972 MetaHookPost LogWrite(Log::WRITER_ASCII, default, conn(1362692527.080972,0.0,0.0), 21, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), proto (enum), service (string), duration (interval), orig_bytes (count), resp_bytes (count), conn_state (string), local_orig (bool), local_resp (bool), missed_bytes (count), history (string), orig_pkts (count), orig_ip_bytes (count), resp_pkts (count), resp_ip_bytes (count), tunnel_parents (set[string])}, ) -> true 1362692527.080972 MetaHookPost QueueEvent(ChecksumOffloading::check()) -> false 1362692527.080972 MetaHookPost QueueEvent(bro_done()) -> false 1362692527.080972 MetaHookPost QueueEvent(connection_state_remove([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1], http_state=[pending={}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=])) -> false @@ -2371,6 +2394,8 @@ 1362692527.080972 MetaHookPre CallFunction(sub_bytes, , (HTTP, 0, 1)) 1362692527.080972 MetaHookPre CallFunction(to_lower, , (HTTP)) 1362692527.080972 MetaHookPre DrainEvents() +1362692527.080972 MetaHookPre LogInit(Log::WRITER_ASCII, default, true, true, conn(1362692527.080972,0.0,0.0), 21, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), proto (enum), service (string), duration (interval), orig_bytes (count), resp_bytes (count), conn_state (string), local_orig (bool), local_resp (bool), missed_bytes (count), history (string), orig_pkts (count), orig_ip_bytes (count), resp_pkts (count), resp_ip_bytes (count), tunnel_parents (set[string])}) +1362692527.080972 MetaHookPre LogWrite(Log::WRITER_ASCII, default, conn(1362692527.080972,0.0,0.0), 21, {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), proto (enum), service (string), duration (interval), orig_bytes (count), resp_bytes (count), conn_state (string), local_orig (bool), local_resp (bool), missed_bytes (count), history (string), orig_pkts (count), orig_ip_bytes (count), resp_pkts (count), resp_ip_bytes (count), tunnel_parents (set[string])}, ) 1362692527.080972 MetaHookPre QueueEvent(ChecksumOffloading::check()) 1362692527.080972 MetaHookPre QueueEvent(bro_done()) 1362692527.080972 MetaHookPre QueueEvent(connection_state_remove([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1], http_state=[pending={}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=])) @@ -2403,6 +2428,8 @@ 1362692527.080972 | HookCallFunction sub_bytes(HTTP, 0, 1) 1362692527.080972 | HookCallFunction to_lower(HTTP) 1362692527.080972 | HookDrainEvents +1362692527.080972 | HookLogInit conn 1/1 {ts (time), uid (string), id.orig_h (addr), id.orig_p (port), id.resp_h (addr), id.resp_p (port), proto (enum), service (string), duration (interval), orig_bytes (count), resp_bytes (count), conn_state (string), local_orig (bool), local_resp (bool), missed_bytes (count), history (string), orig_pkts (count), orig_ip_bytes (count), resp_pkts (count), resp_ip_bytes (count), tunnel_parents (set[string])} +1362692527.080972 | HookLogWrite conn [ts=1362692526.869344, uid=CHhAvVGS1DHFjwGM9, id.orig_h=141.142.228.5, id.orig_p=59856, id.resp_h=192.150.187.43, id.resp_p=80, proto=tcp, service=http, duration=0.211484, orig_bytes=136, resp_bytes=5007, conn_state=SF, local_orig=, local_resp=, missed_bytes=0, history=ShADadFf, orig_pkts=7, orig_ip_bytes=512, resp_pkts=7, resp_ip_bytes=5379, tunnel_parents=] 1362692527.080972 | HookQueueEvent ChecksumOffloading::check() 1362692527.080972 | HookQueueEvent bro_done() 1362692527.080972 | HookQueueEvent connection_state_remove([id=[orig_h=141.142.228.5, orig_p=59856<...>/plain], current_entity=, orig_mime_depth=1, resp_mime_depth=1], http_state=[pending={}, current_request=1, current_response=1, trans_depth=1], irc=, krb=, modbus=, mysql=, ntlm=, radius=, rdp=, rfb=, sip=, sip_state=, snmp=, smtp=, smtp_state=, socks=, ssh=, syslog=]) diff --git a/testing/btest/Baseline/plugins.logging-hooks/output b/testing/btest/Baseline/plugins.logging-hooks/output new file mode 100644 index 0000000000..54330845bc --- /dev/null +++ b/testing/btest/Baseline/plugins.logging-hooks/output @@ -0,0 +1 @@ +1488216470.960453 | HookLogInit ssh 1/1 {b (bool), i (int), e (enum), c (count), p (port), sn (subnet), a (addr), d (double), t (time), iv (interval), s (string), sc (set[count]), ss (set[string]), se (set[string]), vc (vector[count]), ve (vector[string]), f (func)} diff --git a/testing/btest/Baseline/plugins.logging-hooks/ssh.log b/testing/btest/Baseline/plugins.logging-hooks/ssh.log new file mode 100644 index 0000000000..4b62eb8aca --- /dev/null +++ b/testing/btest/Baseline/plugins.logging-hooks/ssh.log @@ -0,0 +1,11 @@ +#separator \x09 +#set_separator , +#empty_field EMPTY +#unset_field - +#path ssh +#open 2017-02-27-17-27-50 +#fields b i e c p sn a d t iv s sc ss se vc ve f +#types bool int enum count port subnet addr double time interval string set[count] set[string] set[string] vector[count] vector[string] func +F -2 SSH::LOG 21 123 10.0.0.0/24 1.2.3.4 3.14 1488216470.960453 100.000000 hurz 2,4,1,3 BB,AA,CC EMPTY 10,20,30 EMPTY SSH::foo\x0a{ \x0aif (0 < SSH::i) \x0a\x09return (Foo);\x0aelse\x0a\x09return (Bar);\x0a\x0a} +T - SSH::LOG 21 123 10.0.0.0/24 1.2.3.4 3.14 1488216470.960453 100.000000 hurz 2,4,1,3 BB,AA,CC EMPTY 10,20,30 EMPTY SSH::foo\x0a{ \x0aif (0 < SSH::i) \x0a\x09return (Foo);\x0aelse\x0a\x09return (Bar);\x0a\x0a} +#close 2017-02-27-17-27-50 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-gz/ssh-uncompressed.log b/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-gz/ssh-uncompressed.log new file mode 100644 index 0000000000..c6979d60b9 --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-gz/ssh-uncompressed.log @@ -0,0 +1,10 @@ +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path ssh-uncompressed +#open 2017-04-18-16-16-16 +#fields b i e c p sn a d t iv s sc ss se vc ve f +#types bool int enum count port subnet addr double time interval string set[count] set[string] set[string] vector[count] vector[string] func +T -42 SSH::LOG 21 123 10.0.0.0/24 1.2.3.4 3.14 1215620010.543210 100.000000 hurz 2,4,1,3 BB,AA,CC (empty) 10,20,30 (empty) SSH::foo\x0a{ \x0aif (0 < SSH::i) \x0a\x09return (Foo);\x0aelse\x0a\x09return (Bar);\x0a\x0a} +#close 2017-04-18-16-16-16 diff --git a/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-gz/ssh.log b/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-gz/ssh.log new file mode 100644 index 0000000000..22bac43cef --- /dev/null +++ b/testing/btest/Baseline/scripts.base.frameworks.logging.ascii-gz/ssh.log @@ -0,0 +1,10 @@ +#separator \x09 +#set_separator , +#empty_field (empty) +#unset_field - +#path ssh +#open 2017-04-18-16-15-17 +#fields b i e c p sn a d t iv s sc ss se vc ve f +#types bool int enum count port subnet addr double time interval string set[count] set[string] set[string] vector[count] vector[string] func +T -42 SSH::LOG 21 123 10.0.0.0/24 1.2.3.4 3.14 1215620010.543210 100.000000 hurz 2,4,1,3 BB,AA,CC (empty) 10,20,30 (empty) SSH::foo\x0a{ \x0aif (0 < SSH::i) \x0a\x09return (Foo);\x0aelse\x0a\x09return (Bar);\x0a\x0a} +#close 2017-04-18-16-15-17 diff --git a/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.netcontrol.log b/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.netcontrol.log index 70c1a28ee2..6170cb6ce0 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.netcontrol.log +++ b/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.netcontrol.log @@ -3,23 +3,23 @@ #empty_field (empty) #unset_field - #path netcontrol -#open 2016-03-24-22-04-41 +#open 2017-04-07-17-26-05 #fields ts rule_id category cmd state action target entity_type entity mod msg priority expire location plugin #types time string enum string enum string enum string string string string int interval string string 0.000000 - NetControl::MESSAGE - - - - - - - activating plugin with priority 0 - - - Acld-bro/event/netcontroltest 0.000000 - NetControl::MESSAGE - - - - - - - waiting for plugins to initialize - - - - -1458857080.863419 - NetControl::MESSAGE - - - - - - - activation finished - - - Acld-bro/event/netcontroltest -1458857080.863419 - NetControl::MESSAGE - - - - - - - plugin initialization done - - - - -1458857080.887618 2 NetControl::RULE ADD NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - - 0 36000.000000 here Acld-bro/event/netcontroltest -1458857080.887618 3 NetControl::RULE ADD NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - - 0 36000.000000 there Acld-bro/event/netcontroltest -1458857080.887618 4 NetControl::RULE ADD NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - - 0 36000.000000 - Acld-bro/event/netcontroltest -1458857080.888169 2 NetControl::RULE ADD NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - blockhosthost 0 36000.000000 here Acld-bro/event/netcontroltest -1458857080.888169 2 NetControl::RULE REMOVE NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - - 0 36000.000000 here Acld-bro/event/netcontroltest -1458857080.888169 3 NetControl::RULE ADD NetControl::EXISTS NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - droptcpport 0 36000.000000 there Acld-bro/event/netcontroltest -1458857080.888169 3 NetControl::RULE REMOVE NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - - 0 36000.000000 there Acld-bro/event/netcontroltest -1458857080.888169 4 NetControl::RULE ADD NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - drop 0 36000.000000 - Acld-bro/event/netcontroltest -1458857080.888169 4 NetControl::RULE REMOVE NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - - 0 36000.000000 - Acld-bro/event/netcontroltest -1458857080.888169 2 NetControl::RULE REMOVE NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - restorehosthost 0 36000.000000 here Acld-bro/event/netcontroltest -1458857080.888169 3 NetControl::RULE REMOVE NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - restoretcpport 0 36000.000000 there Acld-bro/event/netcontroltest -1458857080.888169 4 NetControl::RULE REMOVE NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - restore 0 36000.000000 - Acld-bro/event/netcontroltest -#close 2016-03-24-22-04-41 +1491585965.002956 - NetControl::MESSAGE - - - - - - - activation finished - - - Acld-bro/event/netcontroltest +1491585965.002956 - NetControl::MESSAGE - - - - - - - plugin initialization done - - - - +1491585965.027155 2 NetControl::RULE ADD NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - - 0 36000.000000 here Acld-bro/event/netcontroltest +1491585965.027155 3 NetControl::RULE ADD NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - - 0 36000.000000 there Acld-bro/event/netcontroltest +1491585965.027155 4 NetControl::RULE ADD NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - - 0 36000.000000 - Acld-bro/event/netcontroltest +1491585965.027706 2 NetControl::RULE ADD NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - blockhosthost 0 36000.000000 here Acld-bro/event/netcontroltest +1491585965.027706 2 NetControl::RULE REMOVE NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - - 0 36000.000000 here Acld-bro/event/netcontroltest +1491585965.027706 3 NetControl::RULE ADD NetControl::EXISTS NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - droptcpport 0 36000.000000 there Acld-bro/event/netcontroltest +1491585965.027706 3 NetControl::RULE REMOVE NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - - 0 36000.000000 there Acld-bro/event/netcontroltest +1491585965.027706 4 NetControl::RULE ADD NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - drop 0 36000.000000 - Acld-bro/event/netcontroltest +1491585965.027706 4 NetControl::RULE REMOVE NetControl::REQUESTED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - - 0 36000.000000 - Acld-bro/event/netcontroltest +1491585965.027706 2 NetControl::ERROR - - NetControl::DROP NetControl::FORWARD NetControl::FLOW 192.168.18.50/32/*->74.125.239.97/32/* - restorehosthost 0 36000.000000 here Acld-bro/event/netcontroltest +1491585965.027706 3 NetControl::RULE REMOVE NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::FLOW */*->*/443 - restoretcpport 0 36000.000000 there Acld-bro/event/netcontroltest +1491585965.027706 4 NetControl::RULE REMOVE NetControl::SUCCEEDED NetControl::DROP NetControl::FORWARD NetControl::ADDRESS 192.168.18.50/32 - restore 0 36000.000000 - Acld-bro/event/netcontroltest +#close 2017-04-07-17-26-05 diff --git a/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.send.out b/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.send.out index 0d0ce0fccd..a0a9354726 100644 --- a/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.send.out +++ b/testing/btest/Baseline/scripts.base.frameworks.netcontrol.acld/send.send.out @@ -2,6 +2,6 @@ Broker::outgoing_connection_established, 127.0.0.1, 9999/tcp rule added, [ty=NetControl::FLOW, conn=, flow=[src_h=192.168.18.50/32, src_p=, dst_h=74.125.239.97/32, dst_p=, src_m=, dst_m=], ip=, mac=], NetControl::DROP rule exists, [ty=NetControl::FLOW, conn=, flow=[src_h=, src_p=, dst_h=, dst_p=443/tcp, src_m=, dst_m=], ip=, mac=], NetControl::DROP rule added, [ty=NetControl::ADDRESS, conn=, flow=, ip=192.168.18.50/32, mac=], NetControl::DROP -rule removed, [ty=NetControl::FLOW, conn=, flow=[src_h=192.168.18.50/32, src_p=, dst_h=74.125.239.97/32, dst_p=, src_m=, dst_m=], ip=, mac=], NetControl::DROP +rule error, [ty=NetControl::FLOW, conn=, flow=[src_h=192.168.18.50/32, src_p=, dst_h=74.125.239.97/32, dst_p=, src_m=, dst_m=], ip=, mac=], NetControl::DROP rule removed, [ty=NetControl::FLOW, conn=, flow=[src_h=, src_p=, dst_h=, dst_p=443/tcp, src_m=, dst_m=], ip=, mac=], NetControl::DROP rule removed, [ty=NetControl::ADDRESS, conn=, flow=, ip=192.168.18.50/32, mac=], NetControl::DROP diff --git a/testing/btest/Baseline/scripts.base.protocols.nfs.basic/.stdout b/testing/btest/Baseline/scripts.base.protocols.nfs.basic/.stdout new file mode 100644 index 0000000000..58d51a773a --- /dev/null +++ b/testing/btest/Baseline/scripts.base.protocols.nfs.basic/.stdout @@ -0,0 +1,24 @@ +nfs_proc_not_implemented, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=425, state=3, num_pkts=5, num_bytes_ip=624, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=520, state=3, num_pkts=3, num_bytes_ip=516, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=8.972795, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059608.564809, req_dur=0.0, req_len=124, rep_start=1495059608.56485, rep_dur=0.0, rep_len=112, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704458, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], NFS3::PROC_ACCESS +nfs_proc_create, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=581, state=3, num_pkts=6, num_bytes_ip=820, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=792, state=3, num_pkts=4, num_bytes_ip=680, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=8.97641, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059608.565064, req_dur=0.0, req_len=144, rep_start=1495059608.568465, rep_dur=0.0, rep_len=260, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704458, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], [dirfh=\x01\x00\x06\x01\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\s\xc4\xfa\x00\x09\x8c\xbc\xd8, fname=testfile], [fh=\x01\x00\x06\x01\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\w\x1ew\x01]\xb6\x00=, obj_attr=[ftype=NFS3::FTYPE_REG, mode=32768, nlink=1, uid=1628, gid=200, size=0, used=0, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=24583799, atime=2044592128.0, mtime=51501766.0, ctime=1495059608.558778], dir_pre_attr=[size=0, atime=1495059608.558778, mtime=1495059608.558778], dir_post_attr=[ftype=NFS3::FTYPE_DIR, mode=16877, nlink=2, uid=1628, gid=200, size=21, used=0, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=16434291, atime=1495059608.558778, mtime=1495059608.558778, ctime=1495059608.558778]] +nfs_proc_not_implemented, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=745, state=3, num_pkts=7, num_bytes_ip=1024, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=940, state=3, num_pkts=5, num_bytes_ip=992, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=8.982349, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059608.568646, req_dur=0.0, req_len=152, rep_start=1495059608.574404, rep_dur=0.0, rep_len=136, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704458, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], NFS3::PROC_SETATTR +nfs_proc_lookup, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=1185, state=3, num_pkts=10, num_bytes_ip=1584, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=1388, state=3, num_pkts=8, num_bytes_ip=1588, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=8.989157, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_NOENT, req_start=1495059608.581163, req_dur=0.0, req_len=136, rep_start=1495059608.581212, rep_dur=0.0, rep_len=108, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704458, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], [dirfh=\x01\x00\x06\x01\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\s\xc4\xfa\x00\x09\x8c\xbc\xd8, fname=testfile2], [fh=, obj_attr=, dir_attr=[ftype=NFS3::FTYPE_DIR, mode=16877, nlink=2, uid=1628, gid=200, size=21, used=0, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=16434291, atime=1495059608.558778, mtime=1495059608.558778, ctime=1495059608.558778]] +nfs_proc_rename, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=1377, state=3, num_pkts=11, num_bytes_ip=1816, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=1652, state=3, num_pkts=9, num_bytes_ip=1748, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=8.991291, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059608.581412, req_dur=0.0, req_len=180, rep_start=1495059608.583346, rep_dur=0.0, rep_len=252, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704458, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], [src_dirfh=\x01\x00\x06\x01\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\s\xc4\xfa\x00\x09\x8c\xbc\xd8, src_fname=testfile, dst_dirfh=\x01\x00\x06\x01\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\s\xc4\xfa\x00\x09\x8c\xbc\xd8, dst_fname=testfile2], [src_dir_pre_attr=[size=0, atime=1495059608.558778, mtime=1495059608.558778], src_dir_post_attr=[ftype=NFS3::FTYPE_DIR, mode=16877, nlink=2, uid=1628, gid=200, size=22, used=0, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=16434291, atime=1495059608.558778, mtime=1495059608.574778, ctime=1495059608.574778], dst_dir_pre_attr=[size=0, atime=1495059608.558778, mtime=1495059608.558778], dst_dir_post_attr=[ftype=NFS3::FTYPE_DIR, mode=16877, nlink=2, uid=1628, gid=200, size=22, used=0, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=16434291, atime=1495059608.558778, mtime=1495059608.574778, ctime=1495059608.574778]] +nfs_proc_not_implemented, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=1777, state=3, num_pkts=14, num_bytes_ip=2336, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=2008, state=3, num_pkts=12, num_bytes_ip=2364, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=8.993098, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059608.585126, req_dur=0.0, req_len=124, rep_start=1495059608.585153, rep_dur=0.0, rep_len=112, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704458, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], NFS3::PROC_ACCESS +nfs_proc_remove, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=1925, state=3, num_pkts=16, num_bytes_ip=2564, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=2156, state=3, num_pkts=13, num_bytes_ip=2528, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=9.813823, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059609.400145, req_dur=0.0, req_len=136, rep_start=1495059609.405878, rep_dur=0.0, rep_len=136, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704459, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], [dirfh=\x01\x00\x06\x01\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\s\xc4\xfa\x00\x09\x8c\xbc\xd8, fname=testfile2], [dir_pre_attr=[size=0, atime=1495059608.574778, mtime=1495059608.574778], dir_post_attr=[ftype=NFS3::FTYPE_DIR, mode=16877, nlink=2, uid=1628, gid=200, size=6, used=0, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=16434291, atime=1495059608.558778, mtime=1495059609.398797, ctime=1495059609.398797]] +nfs_proc_rmdir, [id=[orig_h=10.111.131.132, orig_p=972/tcp, resp_h=10.111.131.14, resp_p=2049/tcp], orig=[size=2057, state=3, num_pkts=18, num_bytes_ip=2776, flow_label=0, l2_addr=00:50:56:b2:78:69], resp=[size=2304, state=3, num_pkts=14, num_bytes_ip=2716, flow_label=0, l2_addr=00:50:56:b2:47:b9], start_time=1495059599.592055, duration=9.818272, service={ + +}, history=AaDd, uid=CHhAvVGS1DHFjwGM9, tunnel=, vlan=, inner_vlan=], [rpc_stat=RPC_SUCCESS, nfs_stat=NFS3::NFS3ERR_OK, req_start=1495059609.407676, req_dur=0.0, req_len=120, rep_start=1495059609.410327, rep_dur=0.0, rep_len=136, rpc_uid=1628, rpc_gid=200, rpc_stamp=47704459, rpc_machine_name=pddevbal801, rpc_auxgids=[10, 24, 200, 320, 5189, 5196]], [dirfh=\x01\x00\x06\x00\x9e\xb5K+3\xeeB+\x82\xa7d\xc9%z\x82\, fname=test], [dir_pre_attr=[size=4096, atime=1495059608.558778, mtime=1495059608.558778], dir_post_attr=[ftype=NFS3::FTYPE_DIR, mode=17407, nlink=44, uid=0, gid=0, size=4096, used=4096, rdev1=0, rdev2=0, fsid=8629059712046797340, fileid=128, atime=0.0, mtime=1495059609.402797, ctime=1495059609.402797]] diff --git a/testing/btest/Traces/nfs/nfs_base.pcap b/testing/btest/Traces/nfs/nfs_base.pcap new file mode 100644 index 0000000000..1707107c83 Binary files /dev/null and b/testing/btest/Traces/nfs/nfs_base.pcap differ diff --git a/testing/btest/bifs/directory_operations.bro b/testing/btest/bifs/directory_operations.bro new file mode 100644 index 0000000000..9db34511b2 --- /dev/null +++ b/testing/btest/bifs/directory_operations.bro @@ -0,0 +1,24 @@ +# +# @TEST-EXEC: bro -b %INPUT >out +# @TEST-EXEC: btest-diff out + +event bro_init() + { + # Test succesful operations... + print mkdir("testdir"); + print mkdir("testdir"); + local a = open("testdir/testfile"); + close(a); + print rename("testdir/testfile", "testdir/testfile2"); + print rename("testdir", "testdir2"); + print unlink("testdir2/testfile2"); + print rmdir("testdir2"); + + + print unlink("nonexisting"); + print rename("a", "b"); + print rmdir("nonexisting"); + a = open("testfile"); + close(a); + print mkdir("testfile"); + } diff --git a/testing/btest/core/leaks/broker/clone_store.bro b/testing/btest/core/leaks/broker/clone_store.bro index c3b11a7a0d..4996d05bd2 100644 --- a/testing/btest/core/leaks/broker/clone_store.bro +++ b/testing/btest/core/leaks/broker/clone_store.bro @@ -1,7 +1,7 @@ # @TEST-SERIALIZE: brokercomm # @TEST-REQUIRES: grep -q ENABLE_BROKER:BOOL=true $BUILD/CMakeCache.txt # @TEST-REQUIRES: bro --help 2>&1 | grep -q mem-leaks -# @TEST-GROUP: leak +# @TEST-GROUP: leaks # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run clone "bro -m -b ../clone.bro broker_port=$BROKER_PORT >clone.out" # @TEST-EXEC: btest-bg-run master "bro -b ../master.bro broker_port=$BROKER_PORT >master.out" diff --git a/testing/btest/core/leaks/broker/remote_event.test b/testing/btest/core/leaks/broker/remote_event.test index 3f63fcba76..a7f174f7fd 100644 --- a/testing/btest/core/leaks/broker/remote_event.test +++ b/testing/btest/core/leaks/broker/remote_event.test @@ -1,7 +1,7 @@ # @TEST-SERIALIZE: brokercomm # @TEST-REQUIRES: grep -q ENABLE_BROKER:BOOL=true $BUILD/CMakeCache.txt # @TEST-REQUIRES: bro --help 2>&1 | grep -q mem-leaks -# @TEST-GROUP: leak +# @TEST-GROUP: leaks # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run recv "bro -m -b ../recv.bro broker_port=$BROKER_PORT >recv.out" # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run send "bro -m -b ../send.bro broker_port=$BROKER_PORT >send.out" diff --git a/testing/btest/core/leaks/broker/remote_log.test b/testing/btest/core/leaks/broker/remote_log.test index baeab906f1..0093e9db2e 100644 --- a/testing/btest/core/leaks/broker/remote_log.test +++ b/testing/btest/core/leaks/broker/remote_log.test @@ -1,7 +1,7 @@ # @TEST-SERIALIZE: brokercomm # @TEST-REQUIRES: grep -q ENABLE_BROKER:BOOL=true $BUILD/CMakeCache.txt # @TEST-REQUIRES: bro --help 2>&1 | grep -q mem-leaks -# @TEST-GROUP: leak +# @TEST-GROUP: leaks # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run recv "bro -m -b ../common.bro ../recv.bro broker_port=$BROKER_PORT >recv.out" # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run send "bro -m -b ../common.bro ../send.bro broker_port=$BROKER_PORT >send.out" diff --git a/testing/btest/core/leaks/broker/remote_print.test b/testing/btest/core/leaks/broker/remote_print.test index 26e6317034..9ecc913f34 100644 --- a/testing/btest/core/leaks/broker/remote_print.test +++ b/testing/btest/core/leaks/broker/remote_print.test @@ -1,7 +1,7 @@ # @TEST-SERIALIZE: brokercomm # @TEST-REQUIRES: grep -q ENABLE_BROKER:BOOL=true $BUILD/CMakeCache.txt # @TEST-REQUIRES: bro --help 2>&1 | grep -q mem-leaks -# @TEST-GROUP: leak +# @TEST-GROUP: leaks # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run recv "bro -m -b ../recv.bro broker_port=$BROKER_PORT >recv.out" # @TEST-EXEC: HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local btest-bg-run send "bro -m -b ../send.bro broker_port=$BROKER_PORT >send.out" diff --git a/testing/btest/doc/broxygen/identifier.bro b/testing/btest/doc/broxygen/identifier.bro index 3768b0c0c6..db5c2528ee 100644 --- a/testing/btest/doc/broxygen/identifier.bro +++ b/testing/btest/doc/broxygen/identifier.bro @@ -1,3 +1,4 @@ +# @TEST-SERIALIZE: comm # @TEST-EXEC: bro -b -X broxygen.config %INPUT # @TEST-EXEC: btest-diff test.rst diff --git a/testing/btest/doc/broxygen/package.bro b/testing/btest/doc/broxygen/package.bro index 6857d5e646..fd75a1ce21 100644 --- a/testing/btest/doc/broxygen/package.bro +++ b/testing/btest/doc/broxygen/package.bro @@ -1,3 +1,4 @@ +# @TEST-SERIALIZE: comm # @TEST-EXEC: bro -b -X broxygen.config %INPUT # @TEST-EXEC: btest-diff test.rst diff --git a/testing/btest/doc/broxygen/package_index.bro b/testing/btest/doc/broxygen/package_index.bro index e29479d49f..ef6cc4ab29 100644 --- a/testing/btest/doc/broxygen/package_index.bro +++ b/testing/btest/doc/broxygen/package_index.bro @@ -1,3 +1,4 @@ +# @TEST-SERIALIZE: comm # @TEST-EXEC: bro -b -X broxygen.config %INPUT # @TEST-EXEC: btest-diff test.rst diff --git a/testing/btest/doc/broxygen/script_index.bro b/testing/btest/doc/broxygen/script_index.bro index 91bb4b756f..86e1909863 100644 --- a/testing/btest/doc/broxygen/script_index.bro +++ b/testing/btest/doc/broxygen/script_index.bro @@ -1,3 +1,4 @@ +# @TEST-SERIALIZE: comm # @TEST-EXEC: bro -b -X broxygen.config %INPUT # @TEST-EXEC: btest-diff test.rst diff --git a/testing/btest/doc/broxygen/script_summary.bro b/testing/btest/doc/broxygen/script_summary.bro index 9d3cda012b..a7aafc65a0 100644 --- a/testing/btest/doc/broxygen/script_summary.bro +++ b/testing/btest/doc/broxygen/script_summary.bro @@ -1,3 +1,4 @@ +# @TEST-SERIALIZE: comm # @TEST-EXEC: bro -b -X broxygen.config %INPUT # @TEST-EXEC: btest-diff test.rst diff --git a/testing/btest/istate/broccoli-ipv6-socket.bro b/testing/btest/istate/broccoli-ipv6-socket.bro index be6266fdec..27df984471 100644 --- a/testing/btest/istate/broccoli-ipv6-socket.bro +++ b/testing/btest/istate/broccoli-ipv6-socket.bro @@ -4,8 +4,7 @@ # @TEST-REQUIRES: ifconfig | grep -q -E "inet6 ::1|inet6 addr: ::1" # # @TEST-EXEC: btest-bg-run bro bro $DIST/aux/broccoli/test/broccoli-v6addrs.bro "Communication::listen_ipv6=T" -# @TEST-EXEC: sleep 1 -# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broccoli-v6addrs -6 ::1 +# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broccoli-v6addrs -R -6 ::1 # @TEST-EXEC: btest-bg-wait 20 # @TEST-EXEC: btest-diff bro/.stdout # @TEST-EXEC: btest-diff broccoli/.stdout diff --git a/testing/btest/istate/broccoli-ipv6.bro b/testing/btest/istate/broccoli-ipv6.bro index b4fdfb5fcf..0e360df713 100644 --- a/testing/btest/istate/broccoli-ipv6.bro +++ b/testing/btest/istate/broccoli-ipv6.bro @@ -3,8 +3,7 @@ # @TEST-REQUIRES: test -e $BUILD/aux/broccoli/src/libbroccoli.so || test -e $BUILD/aux/broccoli/src/libbroccoli.dylib # # @TEST-EXEC: btest-bg-run bro bro $DIST/aux/broccoli/test/broccoli-v6addrs.bro -# @TEST-EXEC: sleep 1 -# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broccoli-v6addrs +# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broccoli-v6addrs -R # @TEST-EXEC: btest-bg-wait 20 # @TEST-EXEC: btest-diff bro/.stdout # @TEST-EXEC: btest-diff broccoli/.stdout diff --git a/testing/btest/istate/broccoli-ssl.bro b/testing/btest/istate/broccoli-ssl.bro index dcbea93150..fce5ed8535 100644 --- a/testing/btest/istate/broccoli-ssl.bro +++ b/testing/btest/istate/broccoli-ssl.bro @@ -4,8 +4,7 @@ # # @TEST-EXEC: chmod 600 broccoli.conf # @TEST-EXEC: btest-bg-run bro bro $DIST/aux/broccoli/test/broccoli-v6addrs.bro "Communication::listen_ssl=T" "ssl_ca_certificate=../ca_cert.pem" "ssl_private_key=../bro.pem" -# @TEST-EXEC: sleep 1 -# @TEST-EXEC: btest-bg-run broccoli BROCCOLI_CONFIG_FILE=../broccoli.conf $BUILD/aux/broccoli/test/broccoli-v6addrs +# @TEST-EXEC: btest-bg-run broccoli BROCCOLI_CONFIG_FILE=../broccoli.conf $BUILD/aux/broccoli/test/broccoli-v6addrs -R # @TEST-EXEC: btest-bg-wait 20 # @TEST-EXEC: btest-diff bro/.stdout # @TEST-EXEC: btest-diff broccoli/.stdout diff --git a/testing/btest/istate/broccoli-vector.bro b/testing/btest/istate/broccoli-vector.bro index ce107f45d3..cf0b0c8642 100644 --- a/testing/btest/istate/broccoli-vector.bro +++ b/testing/btest/istate/broccoli-vector.bro @@ -3,8 +3,7 @@ # @TEST-REQUIRES: test -e $BUILD/aux/broccoli/src/libbroccoli.so || test -e $BUILD/aux/broccoli/src/libbroccoli.dylib # # @TEST-EXEC: btest-bg-run bro bro $DIST/aux/broccoli/test/broccoli-vectors.bro -# @TEST-EXEC: sleep 1 -# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broccoli-vectors +# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broccoli-vectors -R # @TEST-EXEC: btest-bg-wait 20 # @TEST-EXEC: btest-diff bro/.stdout # @TEST-EXEC: btest-diff broccoli/.stdout diff --git a/testing/btest/istate/broccoli.bro b/testing/btest/istate/broccoli.bro index 2fdd4cbda4..a6427412bf 100644 --- a/testing/btest/istate/broccoli.bro +++ b/testing/btest/istate/broccoli.bro @@ -3,8 +3,7 @@ # @TEST-REQUIRES: test -e $BUILD/aux/broccoli/src/libbroccoli.so || test -e $BUILD/aux/broccoli/src/libbroccoli.dylib # # @TEST-EXEC: btest-bg-run bro bro %INPUT $DIST/aux/broccoli/test/broping-record.bro -# @TEST-EXEC: sleep 1 -# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broping -r -c 3 127.0.0.1 +# @TEST-EXEC: btest-bg-run broccoli $BUILD/aux/broccoli/test/broping -R -r -c 3 127.0.0.1 # @TEST-EXEC: btest-bg-wait 20 # @TEST-EXEC: cat bro/ping.log | sed 's/one-way.*//g' >bro.log # @TEST-EXEC: cat broccoli/.stdout | sed 's/time=.*//g' >broccoli.log diff --git a/testing/btest/language/expire-expr-error.bro b/testing/btest/language/expire-expr-error.bro index c355bd58ed..7c9a3aa318 100644 --- a/testing/btest/language/expire-expr-error.bro +++ b/testing/btest/language/expire-expr-error.bro @@ -1,13 +1,12 @@ -# @TEST-EXEC: btest-bg-run broproc bro %INPUT -# @TEST-EXEC: btest-bg-wait -k 5 -# @TEST-EXEC: cat broproc/.stderr > output +# @TEST-EXEC: bro -b %INPUT +# @TEST-EXEC: cp .stderr output # @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff output +redef exit_only_after_terminate = T; + global x: table[string] of interval; global data: table[int] of string &create_expire=x["kaputt"]; -@load frameworks/communication/listen - global runs = 0; event do_it() { @@ -16,6 +15,8 @@ event do_it() ++runs; if ( runs < 4 ) schedule 1sec { do_it() }; + else + terminate(); } @@ -24,6 +25,3 @@ event bro_init() &priority=-10 data[0] = "some data"; schedule 1sec { do_it() }; } - - - diff --git a/testing/btest/language/expire-redef.bro b/testing/btest/language/expire-redef.bro index f08ac8d3f2..6bf43ae98a 100644 --- a/testing/btest/language/expire-redef.bro +++ b/testing/btest/language/expire-redef.bro @@ -3,8 +3,6 @@ redef exit_only_after_terminate = T; -@load frameworks/communication/listen - const exp_val = -1sec &redef; global expired: function(tbl: table[int] of string, idx: int): interval; diff --git a/testing/btest/plugins/hooks-plugin/src/Plugin.cc b/testing/btest/plugins/hooks-plugin/src/Plugin.cc index 407ad1c242..d636e12b7c 100644 --- a/testing/btest/plugins/hooks-plugin/src/Plugin.cc +++ b/testing/btest/plugins/hooks-plugin/src/Plugin.cc @@ -3,6 +3,8 @@ #include #include +#include +#include namespace plugin { namespace Demo_Hooks { Plugin plugin; } } @@ -18,6 +20,9 @@ plugin::Configuration Plugin::Configure() EnableHook(META_HOOK_PRE); EnableHook(META_HOOK_POST); EnableHook(HOOK_BRO_OBJ_DTOR); + EnableHook(HOOK_SETUP_ANALYZER_TREE); + EnableHook(HOOK_LOG_INIT); + EnableHook(HOOK_LOG_WRITE); plugin::Configuration config; config.name = "Demo::Hooks"; @@ -121,3 +126,134 @@ void Plugin::MetaHookPost(HookType hook, const HookArgumentList& args, HookArgum hook_name(hook), d1.Description(), d2.Description()); } + +void Plugin::HookSetupAnalyzerTree(Connection *conn) + { + ODesc d; + d.SetShort(); + conn->Describe(&d); + + fprintf(stderr, "%.6f %-15s %s\n", network_time, "| HookSetupAnalyzerTree", d.Description()); + } + +void Plugin::HookLogInit(const std::string& writer, const std::string& instantiating_filter, bool local, bool remote, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields) + { + ODesc d; + + d.Add("{"); + for ( int i=0; i < num_fields; i++ ) + { + const threading::Field* f = fields[i]; + + if ( i > 0 ) + d.Add(", "); + + d.Add(f->name); + d.Add(" ("); + d.Add(f->TypeName()); + d.Add(")"); + } + d.Add("}"); + + fprintf(stderr, "%.6f %-15s %s %d/%d %s\n", network_time, "| HookLogInit", info.path, local, remote, d.Description()); + } + +void Plugin::RenderVal(const threading::Value* val, ODesc &d) const + { + if ( ! val->present ) + { + d.Add(""); + return; + } + + switch ( val->type ) { + + case TYPE_BOOL: + d.Add(val->val.int_val ? "T" : "F"); + break; + + case TYPE_INT: + d.Add(val->val.int_val); + break; + + case TYPE_COUNT: + case TYPE_COUNTER: + d.Add(val->val.uint_val); + break; + + case TYPE_PORT: + d.Add(val->val.port_val.port); + break; + + case TYPE_SUBNET: + d.Add(threading::formatter::Formatter::Render(val->val.subnet_val)); + break; + + case TYPE_ADDR: + d.Add(threading::formatter::Formatter::Render(val->val.addr_val)); + break; + + case TYPE_DOUBLE: + d.Add(val->val.double_val, true); + break; + + case TYPE_INTERVAL: + case TYPE_TIME: + d.Add(threading::formatter::Formatter::Render(val->val.double_val)); + break; + + case TYPE_ENUM: + case TYPE_STRING: + case TYPE_FILE: + case TYPE_FUNC: + d.AddN(val->val.string_val.data, val->val.string_val.length); + break; + + case TYPE_TABLE: + for ( int j = 0; j < val->val.set_val.size; j++ ) + { + if ( j > 0 ) + d.Add(","); + + RenderVal(val->val.set_val.vals[j], d); + } + break; + + case TYPE_VECTOR: + for ( int j = 0; j < val->val.vector_val.size; j++ ) + { + if ( j > 0 ) + d.Add(","); + + RenderVal(val->val.vector_val.vals[j], d); + } + break; + + default: + assert(false); + } + } + +bool Plugin::HookLogWrite(const std::string& writer, const std::string& filter, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields, threading::Value** vals) + { + ODesc d; + + d.Add("["); + for ( int i=0; i < num_fields; i++ ) + { + const threading::Field* f = fields[i]; + const threading::Value* val = vals[i]; + + if ( i > 0 ) + d.Add(", "); + + d.Add(f->name); + d.Add("="); + + RenderVal(val, d); + } + d.Add("]"); + + fprintf(stderr, "%.6f %-15s %s %s\n", network_time, "| HookLogWrite", info.path, d.Description()); + return true; + } diff --git a/testing/btest/plugins/hooks-plugin/src/Plugin.h b/testing/btest/plugins/hooks-plugin/src/Plugin.h index efbd25bc2d..64227c0660 100644 --- a/testing/btest/plugins/hooks-plugin/src/Plugin.h +++ b/testing/btest/plugins/hooks-plugin/src/Plugin.h @@ -10,17 +10,22 @@ namespace Demo_Hooks { class Plugin : public ::plugin::Plugin { protected: - virtual int HookLoadFile(const std::string& file, const std::string& ext); - virtual std::pair HookCallFunction(const Func* func, Frame* frame, val_list* args); - virtual bool HookQueueEvent(Event* event); - virtual void HookDrainEvents(); - virtual void HookUpdateNetworkTime(double network_time); - virtual void HookBroObjDtor(void* obj); - virtual void MetaHookPre(HookType hook, const HookArgumentList& args); - virtual void MetaHookPost(HookType hook, const HookArgumentList& args, HookArgument result); + int HookLoadFile(const std::string& file, const std::string& ext) override; + std::pair HookCallFunction(const Func* func, Frame* frame, val_list* args) override; + bool HookQueueEvent(Event* event) override; + void HookDrainEvents() override; + void HookUpdateNetworkTime(double network_time) override; + void HookBroObjDtor(void* obj) override; + void HookLogInit(const std::string& writer, const std::string& instantiating_filter, bool local, bool remote, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields) override; + bool HookLogWrite(const std::string& writer, const std::string& filter, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields, threading::Value** vals) override; + void HookSetupAnalyzerTree(Connection *conn) override; + void MetaHookPre(HookType hook, const HookArgumentList& args) override; + void MetaHookPost(HookType hook, const HookArgumentList& args, HookArgument result) override; + + void RenderVal(const threading::Value* val, ODesc &d) const; // Overridden from plugin::Plugin. - virtual plugin::Configuration Configure(); + plugin::Configuration Configure() override; }; extern Plugin plugin; diff --git a/testing/btest/plugins/logging-hooks-plugin/.btest-ignore b/testing/btest/plugins/logging-hooks-plugin/.btest-ignore new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testing/btest/plugins/logging-hooks-plugin/src/Plugin.cc b/testing/btest/plugins/logging-hooks-plugin/src/Plugin.cc new file mode 100644 index 0000000000..32dd2b17b3 --- /dev/null +++ b/testing/btest/plugins/logging-hooks-plugin/src/Plugin.cc @@ -0,0 +1,60 @@ + +#include "Plugin.h" + +#include +#include +#include +#include + +namespace plugin { namespace Log_Hooks { Plugin plugin; } } + +using namespace plugin::Log_Hooks; + +plugin::Configuration Plugin::Configure() + { + round = 0; + EnableHook(HOOK_LOG_INIT); + EnableHook(HOOK_LOG_WRITE); + + plugin::Configuration config; + config.name = "Log::Hooks"; + config.description = "Exercises Log hooks"; + config.version.major = 1; + config.version.minor = 0; + return config; + } + +void Plugin::HookLogInit(const std::string& writer, const std::string& instantiating_filter, bool local, bool remote, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields) + { + ODesc d; + + d.Add("{"); + for ( int i=0; i < num_fields; i++ ) + { + const threading::Field* f = fields[i]; + + if ( i > 0 ) + d.Add(", "); + + d.Add(f->name); + d.Add(" ("); + d.Add(f->TypeName()); + d.Add(")"); + } + d.Add("}"); + + fprintf(stderr, "%.6f %-15s %s %d/%d %s\n", network_time, "| HookLogInit", info.path, local, remote, d.Description()); + } + +bool Plugin::HookLogWrite(const std::string& writer, const std::string& filter, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields, threading::Value** vals) + { + round++; + if ( round == 1 ) // do not output line + return false; + else if ( round == 2 ) + vals[0]->val.int_val = 0; + else if ( round == 3 ) + vals[1]->present = false; + + return true; + } diff --git a/testing/btest/plugins/logging-hooks-plugin/src/Plugin.h b/testing/btest/plugins/logging-hooks-plugin/src/Plugin.h new file mode 100644 index 0000000000..12b821c22c --- /dev/null +++ b/testing/btest/plugins/logging-hooks-plugin/src/Plugin.h @@ -0,0 +1,28 @@ + +#ifndef BRO_PLUGIN_Log_Hooks +#define BRO_PLUGIN_Log_Hooks + +#include + +namespace plugin { +namespace Log_Hooks { + +class Plugin : public ::plugin::Plugin +{ +protected: + void HookLogInit(const std::string& writer, const std::string& instantiating_filter, bool local, bool remote, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields) override; + bool HookLogWrite(const std::string& writer, const std::string& filter, const logging::WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields, threading::Value** vals) override; + + // Overridden from plugin::Plugin. + plugin::Configuration Configure() override; + +private: + int round; +}; + +extern Plugin plugin; + +} +} + +#endif diff --git a/testing/btest/plugins/logging-hooks.bro b/testing/btest/plugins/logging-hooks.bro new file mode 100644 index 0000000000..f2ca926c06 --- /dev/null +++ b/testing/btest/plugins/logging-hooks.bro @@ -0,0 +1,72 @@ +# @TEST-EXEC: ${DIST}/aux/bro-aux/plugin-support/init-plugin -u . Log Hooks +# @TEST-EXEC: cp -r %DIR/logging-hooks-plugin/* . +# @TEST-EXEC: ./configure --bro-dist=${DIST} && make +# @TEST-EXEC: BRO_PLUGIN_ACTIVATE="Log::Hooks" BRO_PLUGIN_PATH=`pwd` bro -b %INPUT 2>&1 | $SCRIPTS/diff-remove-abspath | sort | uniq >output +# @TEST-EXEC: btest-diff output +# @TEST-EXEC: btest-diff ssh.log + +redef LogAscii::empty_field = "EMPTY"; + +module SSH; + +export { + redef enum Log::ID += { LOG }; + + type Log: record { + b: bool; + i: int &optional; + e: Log::ID; + c: count; + p: port; + sn: subnet; + a: addr; + d: double; + t: time; + iv: interval; + s: string; + sc: set[count]; + ss: set[string]; + se: set[string]; + vc: vector of count; + ve: vector of string; + f: function(i: count) : string; + } &log; +} + +function foo(i : count) : string + { + if ( i > 0 ) + return "Foo"; + else + return "Bar"; + } + +event bro_init() +{ + Log::create_stream(SSH::LOG, [$columns=Log]); + + local empty_set: set[string]; + local empty_vector: vector of string; + + local i = 0; + while ( ++i < 4 ) + Log::write(SSH::LOG, [ + $b=T, + $i=-i, + $e=SSH::LOG, + $c=21, + $p=123/tcp, + $sn=10.0.0.1/24, + $a=1.2.3.4, + $d=3.14, + $t=network_time(), + $iv=100secs, + $s="hurz", + $sc=set(1,2,3,4), + $ss=set("AA", "BB", "CC"), + $se=empty_set, + $vc=vector(10, 20, 30), + $ve=empty_vector, + $f=foo + ]); +} diff --git a/testing/btest/scripts/base/frameworks/intel/expire-item.bro b/testing/btest/scripts/base/frameworks/intel/expire-item.bro index dd915b3a03..690a461ea4 100644 --- a/testing/btest/scripts/base/frameworks/intel/expire-item.bro +++ b/testing/btest/scripts/base/frameworks/intel/expire-item.bro @@ -9,9 +9,10 @@ 1.2.3.4 Intel::ADDR source1 this host is bad http://some-data-distributor.com/1 # @TEST-END-FILE -@load frameworks/communication/listen @load frameworks/intel/do_expire +redef exit_only_after_terminate = T; + redef Intel::read_files += { "../intel.dat" }; redef enum Intel::Where += { SOMEWHERE }; redef Intel::item_expiration = 9sec; @@ -44,6 +45,8 @@ event do_it() if ( runs < 6 ) schedule 3sec { do_it() }; + else + terminate(); } event Intel::match(s: Intel::Seen, items: set[Intel::Item]) diff --git a/testing/btest/scripts/base/frameworks/intel/match-subnet.bro b/testing/btest/scripts/base/frameworks/intel/match-subnet.bro index 1e25868de1..8e3fe74116 100644 --- a/testing/btest/scripts/base/frameworks/intel/match-subnet.bro +++ b/testing/btest/scripts/base/frameworks/intel/match-subnet.bro @@ -14,7 +14,7 @@ 192.168.128.0/18 Intel::SUBNET source1 this subnetwork might be baaad http://some-data-distributor.com/5 # @TEST-END-FILE -@load frameworks/communication/listen +redef exit_only_after_terminate = T; redef Intel::read_files += { "../intel.dat" }; redef enum Intel::Where += { SOMEWHERE }; diff --git a/testing/btest/scripts/base/frameworks/intel/remove-non-existing.bro b/testing/btest/scripts/base/frameworks/intel/remove-non-existing.bro index 379d132834..1885f5bcf8 100644 --- a/testing/btest/scripts/base/frameworks/intel/remove-non-existing.bro +++ b/testing/btest/scripts/base/frameworks/intel/remove-non-existing.bro @@ -9,7 +9,7 @@ 192.168.1.1 Intel::ADDR source1 this host is just plain baaad http://some-data-distributor.com/1 # @TEST-END-FILE -@load frameworks/communication/listen +redef exit_only_after_terminate = T; redef Intel::read_files += { "../intel.dat" }; redef enum Intel::Where += { SOMEWHERE }; diff --git a/testing/btest/scripts/base/frameworks/logging/ascii-gz.bro b/testing/btest/scripts/base/frameworks/logging/ascii-gz.bro new file mode 100644 index 0000000000..9563f42c40 --- /dev/null +++ b/testing/btest/scripts/base/frameworks/logging/ascii-gz.bro @@ -0,0 +1,75 @@ +# +# @TEST-EXEC: bro -b %INPUT +# @TEST-EXEC: gunzip ssh.log.gz +# @TEST-EXEC: btest-diff ssh.log +# @TEST-EXEC: btest-diff ssh-uncompressed.log +# +# Testing all possible types. + +redef LogAscii::gzip_level = 9; + +module SSH; + +export { + redef enum Log::ID += { LOG }; + + type Log: record { + b: bool; + i: int; + e: Log::ID; + c: count; + p: port; + sn: subnet; + a: addr; + d: double; + t: time; + iv: interval; + s: string; + sc: set[count]; + ss: set[string]; + se: set[string]; + vc: vector of count; + ve: vector of string; + f: function(i: count) : string; + } &log; +} + +function foo(i : count) : string + { + if ( i > 0 ) + return "Foo"; + else + return "Bar"; + } + +event bro_init() +{ + Log::create_stream(SSH::LOG, [$columns=Log]); + local filter = Log::Filter($name="ssh-uncompressed", $path="ssh-uncompressed", + $config = table(["gzip_level"] = "0")); + Log::add_filter(SSH::LOG, filter); + + local empty_set: set[string]; + local empty_vector: vector of string; + + Log::write(SSH::LOG, [ + $b=T, + $i=-42, + $e=SSH::LOG, + $c=21, + $p=123/tcp, + $sn=10.0.0.1/24, + $a=1.2.3.4, + $d=3.14, + $t=(strptime("%Y-%m-%dT%H:%M:%SZ", "2008-07-09T16:13:30Z") + 0.543210 secs), + $iv=100secs, + $s="hurz", + $sc=set(1,2,3,4), + $ss=set("AA", "BB", "CC"), + $se=empty_set, + $vc=vector(10, 20, 30), + $ve=empty_vector, + $f=foo + ]); +} + diff --git a/testing/btest/scripts/base/frameworks/netcontrol/acld.bro b/testing/btest/scripts/base/frameworks/netcontrol/acld.bro index 364624e90e..e13b1d340e 100644 --- a/testing/btest/scripts/base/frameworks/netcontrol/acld.bro +++ b/testing/btest/scripts/base/frameworks/netcontrol/acld.bro @@ -79,6 +79,11 @@ event NetControl::rule_removed(r: NetControl::Rule, p: NetControl::PluginState, print "rule removed", r$entity, r$ty; } +event NetControl::rule_error(r: NetControl::Rule, p: NetControl::PluginState, msg: string) + { + print "rule error", r$entity, r$ty; + } + @TEST-END-FILE @TEST-START-FILE recv.bro @@ -115,7 +120,10 @@ event NetControl::acld_remove_rule(id: count, r: NetControl::Rule, ar: NetContro { print "remove_rule", id, r$entity, r$ty, ar; - Broker::send_event("bro/event/netcontroltest", Broker::event_args(NetControl::acld_rule_removed, id, r, ar$command)); + if ( r$cid != 2 ) + Broker::send_event("bro/event/netcontroltest", Broker::event_args(NetControl::acld_rule_removed, id, r, ar$command)); + else + Broker::send_event("bro/event/netcontroltest", Broker::event_args(NetControl::acld_rule_error, id, r, ar$command)); if ( r$cid == 4 ) terminate(); diff --git a/testing/btest/scripts/base/misc/version.bro b/testing/btest/scripts/base/misc/version.bro index cd19f0ee30..238003f10d 100644 --- a/testing/btest/scripts/base/misc/version.bro +++ b/testing/btest/scripts/base/misc/version.bro @@ -36,6 +36,6 @@ print "yup"; print "yup"; @endif -@if ( Version::at_least("2.4") ) +@if ( Version::at_least("2.9") ) print "no"; @endif diff --git a/testing/btest/scripts/base/protocols/nfs/basic.test b/testing/btest/scripts/base/protocols/nfs/basic.test new file mode 100755 index 0000000000..4940c095a5 --- /dev/null +++ b/testing/btest/scripts/base/protocols/nfs/basic.test @@ -0,0 +1,66 @@ +# @TEST-EXEC: bro -b -r $TRACES/nfs/nfs_base.pcap %INPUT +# @TEST-EXEC: btest-diff .stdout + +global nfs_ports: set[port] = { 2049/tcp, 2049/udp } &redef; +redef ignore_checksums = T; + +event bro_init() + { + Analyzer::register_for_ports(Analyzer::ANALYZER_NFS, nfs_ports); + } + +event nfs_proc_lookup(c: connection , info: NFS3::info_t , req: NFS3::diropargs_t , rep: NFS3::lookup_reply_t ) + { + print "nfs_proc_lookup", c, info, req, rep; + } + +event nfs_proc_read(c: connection , info: NFS3::info_t , req: NFS3::readargs_t , rep: NFS3::read_reply_t ) + { + print "nfs_proc_read", c, info, req, rep; + } + +event nfs_proc_readlink(c: connection , info: NFS3::info_t , fh: string , rep: NFS3::readlink_reply_t ) + { + print "nfs_proc_readlink", c, info, fh, rep; + } + +event nfs_proc_write(c: connection , info: NFS3::info_t , req: NFS3::writeargs_t , rep: NFS3::write_reply_t ) + { + print "nfs_proc_write", c, info, req, rep; + } + +event nfs_proc_create(c: connection , info: NFS3::info_t , req: NFS3::diropargs_t , rep: NFS3::newobj_reply_t ) + { + print "nfs_proc_create", c, info, req, rep; + } + +event nfs_proc_mkdir(c: connection , info: NFS3::info_t , req: NFS3::diropargs_t , rep: NFS3::newobj_reply_t ) + { + print "nfs_proc_mkdir", c, info, req, rep; + } + +event nfs_proc_remove(c: connection , info: NFS3::info_t , req: NFS3::diropargs_t , rep: NFS3::delobj_reply_t ) + { + print "nfs_proc_remove", c, info, req, rep; + } + +event nfs_proc_rmdir(c: connection , info: NFS3::info_t , req: NFS3::diropargs_t , rep: NFS3::delobj_reply_t ) + { + print "nfs_proc_rmdir", c, info, req, rep; + } + +event nfs_proc_readdir(c: connection , info: NFS3::info_t , req: NFS3::readdirargs_t , rep: NFS3::readdir_reply_t ) + { + print "nfs_proc_readdir", c, info, req, rep; + } + +event nfs_proc_rename(c: connection , info: NFS3::info_t , req: NFS3::renameopargs_t , rep: NFS3::renameobj_reply_t ) + { + print "nfs_proc_rename", c, info, req, rep; + } + +event nfs_proc_not_implemented(c: connection , info: NFS3::info_t , proc: NFS3::proc_t ) + { + print "nfs_proc_not_implemented", c, info, proc; + } +