diff --git a/CHANGES b/CHANGES index 3ce0107e15..96a6152d05 100644 --- a/CHANGES +++ b/CHANGES @@ -1,4 +1,27 @@ +2.5-beta-114 | 2016-10-27 09:00:24 -0700 + + * Fix for Sphinx >= 1.4 compability. (Robin Sommer) + +2.5-beta-113 | 2016-10-27 07:44:25 -0700 + + * XMPP: Fix detection of StartTLS when using namespaces. (Johanna + Amann) + +2.5-beta-110 | 2016-10-26 09:42:11 -0400 + + * Improvements DCE_RPC analyzer to make it perform fragment handling correctly + and generally be more resistent to unexpected traffic. (Seth Hall) + +2.5-beta-102 | 2016-10-25 09:43:45 -0700 + + * Update number of bytes in request/response of smb1-com-open-andx.pac. (balintm) + + * Fix a IPv4 CIDR specifications and payload-size condition of signature matching. + (Robin Sommer) + + * Python 3 compatibility fix for coverage-calc script. (Daniel Thayer) + 2.5-beta-93 | 2016-10-24 11:11:07 -0700 * Fix alignment issue of ones_complement_checksum. This error @@ -515,7 +538,7 @@ 2.4-623 | 2016-06-15 17:31:12 -0700 * &default values are no longer overwritten with uninitialized - by the input framework. (Jan Grashoefer) + by the input framework. (Jan Grashoefer) 2.4-621 | 2016-06-15 09:18:02 -0700 diff --git a/NEWS b/NEWS index 34d942a7db..c4aeb6133a 100644 --- a/NEWS +++ b/NEWS @@ -41,6 +41,9 @@ New Functionality New log files: net_control.log, netcontrol_catch_release.log, netcontrol_drop.log, and netcontrol_shunt.log. +- Bro now includes the OpenFlow framework which exposes the datastructures + necessary to interface to OpenFlow capable hardware. + - Bro's Intelligence Framework was refactored and new functionality has been added: @@ -154,8 +157,10 @@ New Functionality - The pcap buffer size can be set through the new option Pcap::bufsize. -- Input framework readers Table and Event can now define a custom - event to receive logging messages. +- Input framework readers stream types Table and Event can now define a custom + event (specified by the new "error_ev" field) to receive error messages + emitted by the input stream. This can, e.g., be used to raise notices in + case errors occur when reading an important input source. - The logging framework now supports user-defined record separators, renaming of column names, as well as extension data columns that can diff --git a/VERSION b/VERSION index a221981979..7375177fa9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.5-beta-93 +2.5-beta-114 diff --git a/aux/binpac b/aux/binpac index 097c1dde17..3f7b38c293 160000 --- a/aux/binpac +++ b/aux/binpac @@ -1 +1 @@ -Subproject commit 097c1dde17c218973a9adad9ba39f8cfd639d9c1 +Subproject commit 3f7b38c293e94143a757590918aac82281e46500 diff --git a/aux/bro-aux b/aux/bro-aux index 0191254451..a9c2717232 160000 --- a/aux/bro-aux +++ b/aux/bro-aux @@ -1 +1 @@ -Subproject commit 0191254451d1aa9a5c985d493ad51f4f1c5f7d85 +Subproject commit a9c2717232764808ca6029f8e727812b58424839 diff --git a/aux/broccoli b/aux/broccoli index 0743c4f516..3f036d36d1 160000 --- a/aux/broccoli +++ b/aux/broccoli @@ -1 +1 @@ -Subproject commit 0743c4f51600cc90aceccaee72ca879b271712d2 +Subproject commit 3f036d36d1e4a42dd672f8a03caf81e38f318f2d diff --git a/aux/broctl b/aux/broctl index 741f6aefce..895fae348a 160000 --- a/aux/broctl +++ b/aux/broctl @@ -1 +1 @@ -Subproject commit 741f6aefce5758d7a62ac5be05f4c750afb5e463 +Subproject commit 895fae348aa03032d198350d03bfc09eb46ed4b4 diff --git a/aux/broker b/aux/broker index 497924cdcc..35d292cbae 160000 --- a/aux/broker +++ b/aux/broker @@ -1 +1 @@ -Subproject commit 497924cdcc23d26221bc39b24bcddcb62ec13ca7 +Subproject commit 35d292cbaef2fafdaede5975d097c27d810382ab diff --git a/aux/btest b/aux/btest index 625dbecfd6..17d1c15476 160000 --- a/aux/btest +++ b/aux/btest @@ -1 +1 @@ -Subproject commit 625dbecfd63022d79a144b9651085e68cdf99ce4 +Subproject commit 17d1c1547678bfd54ef1202db5415bc85c7ae794 diff --git a/cmake b/cmake index 39510b5fb2..71932e91ab 160000 --- a/cmake +++ b/cmake @@ -1 +1 @@ -Subproject commit 39510b5fb2351d7aac85da0d335a128402db3bbc +Subproject commit 71932e91ab329158950d41d630f96509ffe7a217 diff --git a/doc/components/bro-plugins/elasticsearch/README b/doc/components/bro-plugins/elasticsearch/README.rst similarity index 100% rename from doc/components/bro-plugins/elasticsearch/README rename to doc/components/bro-plugins/elasticsearch/README.rst diff --git a/doc/ext/bro.py b/doc/ext/bro.py index 1df4a518c2..c78615813f 100644 --- a/doc/ext/bro.py +++ b/doc/ext/bro.py @@ -14,6 +14,7 @@ from sphinx.locale import l_, _ from sphinx.directives import ObjectDescription from sphinx.roles import XRefRole from sphinx.util.nodes import make_refnode +from sphinx import version_info import string from docutils import nodes @@ -32,6 +33,14 @@ class SeeDirective(Directive): n.refs = string.split(string.join(self.content)) return [n] +# Wrapper for creating a tuple for index nodes, staying backwards +# compatible to Sphinx < 1.4: +def make_index_tuple(indextype, indexentry, targetname, targetname2): + if version_info >= (1, 4, 0, '', 0): + return (indextype, indexentry, targetname, targetname2, None) + else: + return (indextype, indexentry, targetname, targetname2) + def process_see_nodes(app, doctree, fromdocname): for node in doctree.traverse(see): content = [] @@ -95,8 +104,9 @@ class BroGeneric(ObjectDescription): indextext = self.get_index_text(self.objtype, name) if indextext: - self.indexnode['entries'].append(('single', indextext, - targetname, targetname)) + self.indexnode['entries'].append(make_index_tuple('single', + indextext, targetname, + targetname)) def get_index_text(self, objectname, name): return _('%s (%s)') % (name, self.objtype) @@ -120,9 +130,9 @@ class BroNamespace(BroGeneric): self.update_type_map(name) indextext = self.get_index_text(self.objtype, name) - self.indexnode['entries'].append(('single', indextext, + self.indexnode['entries'].append(make_index_tuple('single', indextext, targetname, targetname)) - self.indexnode['entries'].append(('single', + self.indexnode['entries'].append(make_index_tuple('single', "namespaces; %s" % (sig), targetname, targetname)) @@ -148,7 +158,7 @@ class BroEnum(BroGeneric): self.update_type_map(name) indextext = self.get_index_text(self.objtype, name) - #self.indexnode['entries'].append(('single', indextext, + #self.indexnode['entries'].append(make_index_tuple('single', indextext, # targetname, targetname)) m = sig.split() @@ -162,7 +172,7 @@ class BroEnum(BroGeneric): self.env.domaindata['bro']['notices'] = [] self.env.domaindata['bro']['notices'].append( (m[0], self.env.docname, targetname)) - self.indexnode['entries'].append(('single', + self.indexnode['entries'].append(make_index_tuple('single', "%s (enum values); %s" % (m[1], m[0]), targetname, targetname)) diff --git a/scripts/base/init-bare.bro b/scripts/base/init-bare.bro index b8ebb9677e..a6d7c2d179 100644 --- a/scripts/base/init-bare.bro +++ b/scripts/base/init-bare.bro @@ -4390,6 +4390,19 @@ export { const bufsize = 128 &redef; } # end export +module DCE_RPC; +export { + ## The maximum number of simultaneous fragmented commands that + ## the DCE_RPC analyzer will tolerate before the it will generate + ## a weird and skip further input. + const max_cmd_reassembly = 20 &redef; + + ## The maximum number of fragmented bytes that the DCE_RPC analyzer + ## will tolerate on a command before the analyzer will generate a weird + ## and skip further input. + const max_frag_data = 30000 &redef; +} + module GLOBAL; ## Seed for hashes computed internally for probabilistic data structures. Using diff --git a/scripts/base/protocols/dce-rpc/consts.bro b/scripts/base/protocols/dce-rpc/consts.bro index 0cbb8a30b6..d9e0993b03 100644 --- a/scripts/base/protocols/dce-rpc/consts.bro +++ b/scripts/base/protocols/dce-rpc/consts.bro @@ -90,15 +90,15 @@ export { ["2f5f3220-c126-1076-b549-074d078619da"] = "nddeapi", } &redef &default=function(uuid: string): string { return fmt("unknown-%s", uuid); }; - ## This table is to map pipe names to the most common - ## service used over that pipe. It helps in cases + ## This table is to map pipe names to the most common + ## service used over that pipe. It helps in cases ## where the pipe binding wasn't seen. const pipe_name_to_common_uuid: table[string] of string = { ["winreg"] = "338cd001-2244-31f1-aaaa-900038001003", ["spoolss"] = "12345678-1234-abcd-ef00-0123456789ab", ["srvsvc"] = "4b324fc8-1670-01d3-1278-5a47bf6ee188", } &redef; - + const operations: table[string,count] of string = { # atsvc ["1ff70682-0a51-30e8-076d-740be8cee98b",0] = "NetrJobAdd", @@ -1460,7 +1460,7 @@ export { ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x14] = "DRSAddSidHistory", ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x15] = "DRSGetMemberships2", ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x16] = "DRSReplicaVerifyObjects", - ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x17] = "DRSGetObjectExistence", + ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x17] = "DRSGetObjectExistence", ["e3514235-4b06-11d1-ab04-00c04fc2dcd2",0x18] = "DRSQuerySitesByCost", # winspipe diff --git a/src/RuleCondition.cc b/src/RuleCondition.cc index 40ef5f0ad1..9df70f118b 100644 --- a/src/RuleCondition.cc +++ b/src/RuleCondition.cc @@ -89,6 +89,10 @@ bool RuleConditionPayloadSize::DoMatch(Rule* rule, RuleEndpointState* state, // on the pure rules now. return false; + if ( state->PayloadSize() == 0 ) + // We are interested in the first non-empty chunk. + return false; + uint32 payload_size = uint32(state->PayloadSize()); switch ( comp ) { diff --git a/src/RuleMatcher.cc b/src/RuleMatcher.cc index c88bb77a4f..3ee7306fb5 100644 --- a/src/RuleMatcher.cc +++ b/src/RuleMatcher.cc @@ -144,7 +144,7 @@ bool RuleHdrTest::operator==(const RuleHdrTest& h) void RuleHdrTest::PrintDebug() { static const char* str_comp[] = { "<=", ">=", "<", ">", "==", "!=" }; - static const char* str_prot[] = { "", "ip", "icmp", "tcp", "udp" }; + static const char* str_prot[] = { "", "ip", "ipv6", "icmp", "icmpv6", "tcp", "udp", "next", "ipsrc", "ipdst" }; fprintf(stderr, " RuleHdrTest %s[%d:%d] %s", str_prot[prot], offset, size, str_comp[comp]); @@ -1095,10 +1095,10 @@ void RuleMatcher::ExecRule(Rule* rule, RuleEndpointState* state, bool eos) void RuleMatcher::ClearEndpointState(RuleEndpointState* state) { - state->payload_size = -1; - ExecPureRules(state, 1); + state->payload_size = -1; + loop_over_list(state->matchers, j) state->matchers[j]->state->Clear(); } diff --git a/src/RuleMatcher.h b/src/RuleMatcher.h index b16a1556f9..23b7e6d731 100644 --- a/src/RuleMatcher.h +++ b/src/RuleMatcher.h @@ -72,6 +72,7 @@ extern uint32 id_to_uint(const char* id); class RuleHdrTest { public: + // Note: Adapt RuleHdrTest::PrintDebug() when changing these enums. enum Comp { LE, GE, LT, GT, EQ, NE }; enum Prot { NOPROT, IP, IPv6, ICMP, ICMPv6, TCP, UDP, NEXT, IPSrc, IPDst }; diff --git a/src/analyzer/protocol/dce-rpc/CMakeLists.txt b/src/analyzer/protocol/dce-rpc/CMakeLists.txt index 79ec16ada6..959e6ac87c 100644 --- a/src/analyzer/protocol/dce-rpc/CMakeLists.txt +++ b/src/analyzer/protocol/dce-rpc/CMakeLists.txt @@ -5,9 +5,9 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DI bro_plugin_begin(Bro DCE_RPC) bro_plugin_cc(DCE_RPC.cc Plugin.cc) -bro_plugin_bif(types.bif events.bif) +bro_plugin_bif(consts.bif types.bif events.bif) bro_plugin_pac( - dce_rpc.pac + dce_rpc.pac dce_rpc-protocol.pac dce_rpc-analyzer.pac dce_rpc-auth.pac diff --git a/src/analyzer/protocol/dce-rpc/DCE_RPC.cc b/src/analyzer/protocol/dce-rpc/DCE_RPC.cc index e93a2541f7..f7a96fbb6e 100644 --- a/src/analyzer/protocol/dce-rpc/DCE_RPC.cc +++ b/src/analyzer/protocol/dce-rpc/DCE_RPC.cc @@ -16,6 +16,7 @@ using namespace analyzer::dce_rpc; DCE_RPC_Analyzer::DCE_RPC_Analyzer(Connection *conn) : tcp::TCP_ApplicationAnalyzer("DCE_RPC", conn) { + had_gap = false; interp = new binpac::DCE_RPC::DCE_RPC_Conn(this); } @@ -41,6 +42,7 @@ void DCE_RPC_Analyzer::EndpointEOF(bool is_orig) void DCE_RPC_Analyzer::Undelivered(uint64 seq, int len, bool orig) { TCP_ApplicationAnalyzer::Undelivered(seq, len, orig); + had_gap = true; interp->NewGap(orig, len); } @@ -49,6 +51,12 @@ void DCE_RPC_Analyzer::DeliverStream(int len, const u_char* data, bool orig) TCP_ApplicationAnalyzer::DeliverStream(len, data, orig); assert(TCP()); + + if ( had_gap ) + // If only one side had a content gap, we could still try to + // deliver data to the other side if the script layer can handle this. + return; + try { interp->NewData(orig, data, data + len); diff --git a/src/analyzer/protocol/dce-rpc/DCE_RPC.h b/src/analyzer/protocol/dce-rpc/DCE_RPC.h index 714607f5e2..498e055e0a 100644 --- a/src/analyzer/protocol/dce-rpc/DCE_RPC.h +++ b/src/analyzer/protocol/dce-rpc/DCE_RPC.h @@ -29,6 +29,7 @@ public: { return new DCE_RPC_Analyzer(conn); } protected: + bool had_gap; binpac::DCE_RPC::DCE_RPC_Conn* interp; }; diff --git a/src/analyzer/protocol/dce-rpc/consts.bif b/src/analyzer/protocol/dce-rpc/consts.bif new file mode 100644 index 0000000000..68b052d84b --- /dev/null +++ b/src/analyzer/protocol/dce-rpc/consts.bif @@ -0,0 +1,2 @@ +const DCE_RPC::max_cmd_reassembly: count; +const DCE_RPC::max_frag_data: count; \ No newline at end of file diff --git a/src/analyzer/protocol/dce-rpc/dce_rpc-protocol.pac b/src/analyzer/protocol/dce-rpc/dce_rpc-protocol.pac index 5fe9380422..921b4ba51f 100644 --- a/src/analyzer/protocol/dce-rpc/dce_rpc-protocol.pac +++ b/src/analyzer/protocol/dce-rpc/dce_rpc-protocol.pac @@ -37,7 +37,7 @@ type DCE_RPC_PDU(is_orig: bool) = record { # Subtract an extra 8 when there is an auth section because we have some "auth header" fields in that structure. body_length : int = header.frag_length - sizeof(header) - header.auth_length - (header.auth_length > 0 ? 8 : 0); frag_reassembled : bool = $context.flow.reassemble_fragment(header, frag); - body : DCE_RPC_Body(header) withinput $context.flow.reassembled_body(header, frag) &if(header.lastfrag); + body : DCE_RPC_Body(header) withinput $context.flow.reassembled_body(header, frag) &if(frag_reassembled); } &byteorder = header.byteorder, &length = header.frag_length; type NDR_Format = record { @@ -174,23 +174,74 @@ flow DCE_RPC_Flow(is_orig: bool) { flowunit = DCE_RPC_PDU(is_orig) withcontext(connection, this); %member{ - std::map fb; + std::map> fb; %} # Fragment reassembly. function reassemble_fragment(header: DCE_RPC_Header, frag: bytestring): bool %{ - if ( ${header.firstfrag} && !${header.lastfrag} && - fb.count(${header.call_id}) == 0 ) - fb[${header.call_id}] = new FlowBuffer(); + if ( ${header.firstfrag} ) + { + if ( fb.count(${header.call_id}) > 0 ) + { + // We already had a first frag earlier. + reporter->Weird(connection()->bro_analyzer()->Conn(), + "multiple_first_fragments_in_dce_rpc_reassembly"); + connection()->bro_analyzer()->SetSkip(true); + return false; + } - if ( fb.count(${header.call_id}) == 0 ) + if ( ${header.lastfrag} ) + { + // all-in-one packet + return true; + } + else + { + // first frag, but not last so we start a flowbuffer + fb[${header.call_id}] = std::unique_ptr(new FlowBuffer()); + fb[${header.call_id}]->NewFrame(0, true); + fb[${header.call_id}]->BufferData(frag.begin(), frag.end()); + + if ( fb.size() > BifConst::DCE_RPC::max_cmd_reassembly ) + { + reporter->Weird(connection()->bro_analyzer()->Conn(), + "too_many_dce_rpc_msgs_in_reassembly"); + connection()->bro_analyzer()->SetSkip(true); + } + + if ( fb[${header.call_id}]->data_length() > (int)BifConst::DCE_RPC::max_frag_data ) + { + reporter->Weird(connection()->bro_analyzer()->Conn(), + "too_much_dce_rpc_fragment_data"); + connection()->bro_analyzer()->SetSkip(true); + } + + return false; + } + } + else if ( fb.count(${header.call_id}) > 0 ) + { + // not the first frag, but we have a flow buffer so add to it + fb[${header.call_id}]->BufferData(frag.begin(), frag.end()); + + if ( fb[${header.call_id}]->data_length() > (int)BifConst::DCE_RPC::max_frag_data ) + { + reporter->Weird(connection()->bro_analyzer()->Conn(), + "too_much_dce_rpc_fragment_data"); + connection()->bro_analyzer()->SetSkip(true); + } + + return ${header.lastfrag}; + } + else + { + // no flow buffer and not a first frag, ignore it. return false; + } - auto frag_reassembler_ = fb[${header.call_id}]; - frag_reassembler_->BufferData(frag.begin(), frag.end()); - - return (!${header.firstfrag} && ${header.lastfrag}); + // can't reach here. + return false; %} function reassembled_body(h: DCE_RPC_Header, body: bytestring): const_bytestring @@ -200,7 +251,6 @@ flow DCE_RPC_Flow(is_orig: bool) { if ( fb.count(${h.call_id}) > 0 ) { bd = const_bytestring(fb[${h.call_id}]->begin(), fb[${h.call_id}]->end()); - delete fb[${h.call_id}]; fb.erase(${h.call_id}); } diff --git a/src/analyzer/protocol/dce-rpc/dce_rpc.pac b/src/analyzer/protocol/dce-rpc/dce_rpc.pac index f4a54a1e62..87070e6216 100644 --- a/src/analyzer/protocol/dce-rpc/dce_rpc.pac +++ b/src/analyzer/protocol/dce-rpc/dce_rpc.pac @@ -2,6 +2,7 @@ %include bro.pac %extern{ +#include "consts.bif.h" #include "types.bif.h" #include "events.bif.h" %} diff --git a/src/analyzer/protocol/smb/smb1-com-open-andx.pac b/src/analyzer/protocol/smb/smb1-com-open-andx.pac index 8f19f6f3a4..c7e7bea03a 100644 --- a/src/analyzer/protocol/smb/smb1-com-open-andx.pac +++ b/src/analyzer/protocol/smb/smb1-com-open-andx.pac @@ -52,7 +52,7 @@ type SMB1_open_andx_request(header: SMB_Header, offset: uint16) = record { open_mode : uint16; allocation_size : uint32; timeout : uint32; - reserved : padding[2]; + reserved : padding[4]; byte_count : uint16; filename : SMB_string(header.unicode, offsetof(filename); @@ -74,7 +74,7 @@ type SMB1_open_andx_response(header: SMB_Header, offset: uint16) = record { resource_type : uint16; nm_pipe_status : uint16; open_results : uint16; - reserved : padding[3]; + reserved : padding[6]; byte_count : uint16; extra_byte_parameters : bytestring &transient &length=(andx.offset == 0 || andx.offset >= (offset+offsetof(extra_byte_parameters))+2) ? 0 : (andx.offset-(offset+offsetof(extra_byte_parameters))); diff --git a/src/analyzer/protocol/xmpp/xmpp-analyzer.pac b/src/analyzer/protocol/xmpp/xmpp-analyzer.pac index 3240b57bb3..5253ce050b 100644 --- a/src/analyzer/protocol/xmpp/xmpp-analyzer.pac +++ b/src/analyzer/protocol/xmpp/xmpp-analyzer.pac @@ -11,6 +11,11 @@ refine connection XMPP_Conn += { function proc_xmpp_token(is_orig: bool, name: bytestring, rest: bytestring): bool %{ string token = std_str(name); + // Result will either be text after ":" or original string; this discards the namespace + string token_no_ns = std_str(name); + auto offset = token_no_ns.find(":"); + if ( offset != std::string::npos && token_no_ns.length() > offset + 1 ) + token_no_ns = token_no_ns.substr(offset + 1); if ( is_orig && token == "stream:stream" ) // Yup, looks like xmpp... @@ -21,10 +26,10 @@ refine connection XMPP_Conn += { // Handshake has passed the phase where we should see StartTLS. Simply skip from hereon... bro_analyzer()->SetSkip(true); - if ( is_orig && token == "starttls" ) + if ( is_orig && ( token == "starttls" || token_no_ns == "starttls" ) ) client_starttls = true; - if ( !is_orig && token == "proceed" && client_starttls ) + if ( !is_orig && ( token == "proceed" || token_no_ns == "proceed" ) && client_starttls ) { bro_analyzer()->StartTLS(); BifEvent::generate_xmpp_starttls(bro_analyzer(), bro_analyzer()->Conn()); @@ -32,7 +37,7 @@ refine connection XMPP_Conn += { else if ( !is_orig && token == "proceed" ) reporter->Weird(bro_analyzer()->Conn(), "XMPP: proceed without starttls"); - //printf("Processed: %d %s %s \n", is_orig, c_str(name), c_str(rest)); + // printf("Processed: %d %s %s %s \n", is_orig, c_str(name), c_str(rest), token_no_ns.c_str()); return true; %} diff --git a/src/rule-parse.y b/src/rule-parse.y index 32ada02cb3..3e9c8d7ddf 100644 --- a/src/rule-parse.y +++ b/src/rule-parse.y @@ -14,7 +14,7 @@ extern void end_PS(); Rule* current_rule = 0; const char* current_rule_file = 0; -static uint8_t mask_to_len(uint32_t mask) +static uint8_t ip4_mask_to_len(uint32_t mask) { if ( mask == 0xffffffff ) return 32; @@ -23,7 +23,7 @@ static uint8_t mask_to_len(uint32_t mask) uint8_t len; for ( len = 0; len < 32 && (! (x & (1 << len))); ++len ); - return len; + return 32 - len; } %} @@ -315,7 +315,7 @@ prefix_value: TOK_IP { $$ = new IPPrefix(IPAddr(IPv4, &($1.val), IPAddr::Host), - mask_to_len($1.mask)); + ip4_mask_to_len($1.mask)); } | TOK_IP6 ; diff --git a/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log b/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log index fc4caff34e..71c1743860 100644 --- a/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log +++ b/testing/btest/Baseline/coverage.bare-load-baseline/canonified_loaded_scripts.log @@ -3,7 +3,7 @@ #empty_field (empty) #unset_field - #path loaded_scripts -#open 2016-10-07-19-25-03 +#open 2016-10-26-00-05-53 #fields name #types string scripts/base/init-bare.bro @@ -63,6 +63,7 @@ scripts/base/init-bare.bro build/scripts/base/bif/plugins/Bro_BitTorrent.events.bif.bro build/scripts/base/bif/plugins/Bro_ConnSize.events.bif.bro build/scripts/base/bif/plugins/Bro_ConnSize.functions.bif.bro + build/scripts/base/bif/plugins/Bro_DCE_RPC.consts.bif.bro build/scripts/base/bif/plugins/Bro_DCE_RPC.types.bif.bro build/scripts/base/bif/plugins/Bro_DCE_RPC.events.bif.bro build/scripts/base/bif/plugins/Bro_DHCP.events.bif.bro @@ -167,4 +168,4 @@ scripts/base/init-bare.bro build/scripts/base/bif/plugins/Bro_SQLiteWriter.sqlite.bif.bro scripts/policy/misc/loaded-scripts.bro scripts/base/utils/paths.bro -#close 2016-10-07-19-25-03 +#close 2016-10-26-00-05-53 diff --git a/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log b/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log index 603e9d7007..3e2a83dfd2 100644 --- a/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log +++ b/testing/btest/Baseline/coverage.default-load-baseline/canonified_loaded_scripts.log @@ -3,7 +3,7 @@ #empty_field (empty) #unset_field - #path loaded_scripts -#open 2016-10-07-19-25-14 +#open 2016-10-26-00-05-59 #fields name #types string scripts/base/init-bare.bro @@ -63,6 +63,7 @@ scripts/base/init-bare.bro build/scripts/base/bif/plugins/Bro_BitTorrent.events.bif.bro build/scripts/base/bif/plugins/Bro_ConnSize.events.bif.bro build/scripts/base/bif/plugins/Bro_ConnSize.functions.bif.bro + build/scripts/base/bif/plugins/Bro_DCE_RPC.consts.bif.bro build/scripts/base/bif/plugins/Bro_DCE_RPC.types.bif.bro build/scripts/base/bif/plugins/Bro_DCE_RPC.events.bif.bro build/scripts/base/bif/plugins/Bro_DHCP.events.bif.bro @@ -355,4 +356,4 @@ scripts/base/init-default.bro scripts/base/misc/find-filtered-trace.bro scripts/base/misc/version.bro scripts/policy/misc/loaded-scripts.bro -#close 2016-10-07-19-25-14 +#close 2016-10-26-00-05-59 diff --git a/testing/btest/Baseline/plugins.hooks/output b/testing/btest/Baseline/plugins.hooks/output index 177cdfb0d7..76a47699a4 100644 --- a/testing/btest/Baseline/plugins.hooks/output +++ b/testing/btest/Baseline/plugins.hooks/output @@ -247,7 +247,7 @@ 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) -> 0.000000 MetaHookPost CallFunction(Log::__create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -> -0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])) -> +0.000000 MetaHookPost CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Cluster::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Communication::LOG)) -> 0.000000 MetaHookPost CallFunction(Log::add_default_filter, , (Conn::LOG)) -> @@ -377,7 +377,7 @@ 0.000000 MetaHookPost CallFunction(Log::create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) -> 0.000000 MetaHookPost CallFunction(Log::create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -> -0.000000 MetaHookPost CallFunction(Log::write, , (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])) -> +0.000000 MetaHookPost CallFunction(Log::write, , (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])) -> 0.000000 MetaHookPost CallFunction(NetControl::check_plugins, , ()) -> 0.000000 MetaHookPost CallFunction(NetControl::init, , ()) -> 0.000000 MetaHookPost CallFunction(Notice::want_pp, , ()) -> @@ -410,7 +410,7 @@ 0.000000 MetaHookPost CallFunction(reading_live_traffic, , ()) -> 0.000000 MetaHookPost CallFunction(reading_traces, , ()) -> 0.000000 MetaHookPost CallFunction(set_to_regex, , ({}, (^\.?|\.)(~~)$)) -> -0.000000 MetaHookPost CallFunction(strftime, , (%Y, 1475869873.545611)) -> +0.000000 MetaHookPost CallFunction(strftime, , (%Y, 1477440372.839693)) -> 0.000000 MetaHookPost CallFunction(string_to_pattern, , ((^\.?|\.)()$, F)) -> 0.000000 MetaHookPost CallFunction(sub, , ((^\.?|\.)(~~)$, <...>/, )) -> 0.000000 MetaHookPost CallFunction(to_count, , (2016)) -> @@ -427,6 +427,7 @@ 0.000000 MetaHookPost LoadFile(./Bro_BitTorrent.events.bif.bro) -> -1 0.000000 MetaHookPost LoadFile(./Bro_ConnSize.events.bif.bro) -> -1 0.000000 MetaHookPost LoadFile(./Bro_ConnSize.functions.bif.bro) -> -1 +0.000000 MetaHookPost LoadFile(./Bro_DCE_RPC.consts.bif.bro) -> -1 0.000000 MetaHookPost LoadFile(./Bro_DCE_RPC.events.bif.bro) -> -1 0.000000 MetaHookPost LoadFile(./Bro_DCE_RPC.types.bif.bro) -> -1 0.000000 MetaHookPost LoadFile(./Bro_DHCP.events.bif.bro) -> -1 @@ -966,7 +967,7 @@ 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) 0.000000 MetaHookPre CallFunction(Log::__create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])) +0.000000 MetaHookPre CallFunction(Log::__write, , (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Cluster::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Communication::LOG)) 0.000000 MetaHookPre CallFunction(Log::add_default_filter, , (Conn::LOG)) @@ -1096,7 +1097,7 @@ 0.000000 MetaHookPre CallFunction(Log::create_stream, , (Weird::LOG, [columns=, ev=Weird::log_weird, path=weird])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (X509::LOG, [columns=, ev=X509::log_x509, path=x509])) 0.000000 MetaHookPre CallFunction(Log::create_stream, , (mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql])) -0.000000 MetaHookPre CallFunction(Log::write, , (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])) +0.000000 MetaHookPre CallFunction(Log::write, , (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])) 0.000000 MetaHookPre CallFunction(NetControl::check_plugins, , ()) 0.000000 MetaHookPre CallFunction(NetControl::init, , ()) 0.000000 MetaHookPre CallFunction(Notice::want_pp, , ()) @@ -1129,7 +1130,7 @@ 0.000000 MetaHookPre CallFunction(reading_live_traffic, , ()) 0.000000 MetaHookPre CallFunction(reading_traces, , ()) 0.000000 MetaHookPre CallFunction(set_to_regex, , ({}, (^\.?|\.)(~~)$)) -0.000000 MetaHookPre CallFunction(strftime, , (%Y, 1475869873.545611)) +0.000000 MetaHookPre CallFunction(strftime, , (%Y, 1477440372.839693)) 0.000000 MetaHookPre CallFunction(string_to_pattern, , ((^\.?|\.)()$, F)) 0.000000 MetaHookPre CallFunction(sub, , ((^\.?|\.)(~~)$, <...>/, )) 0.000000 MetaHookPre CallFunction(to_count, , (2016)) @@ -1146,6 +1147,7 @@ 0.000000 MetaHookPre LoadFile(./Bro_BitTorrent.events.bif.bro) 0.000000 MetaHookPre LoadFile(./Bro_ConnSize.events.bif.bro) 0.000000 MetaHookPre LoadFile(./Bro_ConnSize.functions.bif.bro) +0.000000 MetaHookPre LoadFile(./Bro_DCE_RPC.consts.bif.bro) 0.000000 MetaHookPre LoadFile(./Bro_DCE_RPC.events.bif.bro) 0.000000 MetaHookPre LoadFile(./Bro_DCE_RPC.types.bif.bro) 0.000000 MetaHookPre LoadFile(./Bro_DHCP.events.bif.bro) @@ -1684,7 +1686,7 @@ 0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=, ev=Weird::log_weird, path=weird]) 0.000000 | HookCallFunction Log::__create_stream(X509::LOG, [columns=, ev=X509::log_x509, path=x509]) 0.000000 | HookCallFunction Log::__create_stream(mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql]) -0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T]) +0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T]) 0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Communication::LOG) 0.000000 | HookCallFunction Log::add_default_filter(Conn::LOG) @@ -1814,7 +1816,7 @@ 0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=, ev=Weird::log_weird, path=weird]) 0.000000 | HookCallFunction Log::create_stream(X509::LOG, [columns=, ev=X509::log_x509, path=x509]) 0.000000 | HookCallFunction Log::create_stream(mysql::LOG, [columns=, ev=MySQL::log_mysql, path=mysql]) -0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T]) +0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T]) 0.000000 | HookCallFunction NetControl::check_plugins() 0.000000 | HookCallFunction NetControl::init() 0.000000 | HookCallFunction Notice::want_pp() @@ -1847,7 +1849,7 @@ 0.000000 | HookCallFunction reading_live_traffic() 0.000000 | HookCallFunction reading_traces() 0.000000 | HookCallFunction set_to_regex({}, (^\.?|\.)(~~)$) -0.000000 | HookCallFunction strftime(%Y, 1475869873.545611) +0.000000 | HookCallFunction strftime(%Y, 1477440372.839693) 0.000000 | HookCallFunction string_to_pattern((^\.?|\.)()$, F) 0.000000 | HookCallFunction sub((^\.?|\.)(~~)$, <...>/, ) 0.000000 | HookCallFunction to_count(2016) diff --git a/testing/btest/Baseline/signatures.dst-ip-cidr-v4/output b/testing/btest/Baseline/signatures.dst-ip-cidr-v4/output new file mode 100644 index 0000000000..eb07f77921 --- /dev/null +++ b/testing/btest/Baseline/signatures.dst-ip-cidr-v4/output @@ -0,0 +1,6 @@ +match, foo +match, foo +match, foo +match, foo +match, foo +match, foo diff --git a/testing/btest/Baseline/signatures.udp-payload-size/output b/testing/btest/Baseline/signatures.udp-payload-size/output new file mode 100644 index 0000000000..2ae3bbde9f --- /dev/null +++ b/testing/btest/Baseline/signatures.udp-payload-size/output @@ -0,0 +1,6 @@ +match, foo2 +match, foo2 +match, foo2 +match, foo2 +match, foo2 +match, foo2 diff --git a/testing/btest/Traces/ntp.pcap b/testing/btest/Traces/ntp.pcap new file mode 100644 index 0000000000..cc80d04afd Binary files /dev/null and b/testing/btest/Traces/ntp.pcap differ diff --git a/testing/btest/signatures/dst-ip-cidr-v4.bro b/testing/btest/signatures/dst-ip-cidr-v4.bro new file mode 100644 index 0000000000..e86a746e54 --- /dev/null +++ b/testing/btest/signatures/dst-ip-cidr-v4.bro @@ -0,0 +1,17 @@ +# @TEST-EXEC: bro -r $TRACES/ntp.pcap %INPUT >output +# @TEST-EXEC: btest-diff output + +@TEST-START-FILE a.sig +signature foo { + dst-ip == 17.0.0.0/8 + ip-proto == udp + event "match" +} +@TEST-END-FILE + +event signature_match(state: signature_state, msg: string, data: string) + { + print "match", state$sig_id; + } + +@load-sigs ./a.sig diff --git a/testing/btest/signatures/udp-payload-size.bro b/testing/btest/signatures/udp-payload-size.bro new file mode 100644 index 0000000000..efc5411feb --- /dev/null +++ b/testing/btest/signatures/udp-payload-size.bro @@ -0,0 +1,23 @@ +# @TEST-EXEC: bro -r $TRACES/ntp.pcap %INPUT >output +# @TEST-EXEC: btest-diff output + +@TEST-START-FILE a.sig +signature foo1 { + ip-proto == udp + payload-size < 1 + event "match" +} + +signature foo2 { + ip-proto == udp + payload-size > 0 + event "match" +} +@TEST-END-FILE + +event signature_match(state: signature_state, msg: string, data: string) + { + print "match", state$sig_id; + } + +@load-sigs ./a.sig diff --git a/testing/scripts/coverage-calc b/testing/scripts/coverage-calc index cc5253c75c..df12e0c86f 100755 --- a/testing/scripts/coverage-calc +++ b/testing/scripts/coverage-calc @@ -56,4 +56,4 @@ for k in stats: num_covered += 1 if len(stats) > 0: - print "%s/%s (%.1f%%) Bro script statements covered." % (num_covered, len(stats), float(num_covered)/len(stats)*100) + print("%s/%s (%.1f%%) Bro script statements covered." % (num_covered, len(stats), float(num_covered)/len(stats)*100))