mirror of
https://github.com/zeek/zeek.git
synced 2025-10-02 14:48:21 +00:00
Merge remote-tracking branch 'origin/master' into topic/seth/smb-auth-fixes
This commit is contained in:
commit
ff682b8c2c
35 changed files with 238 additions and 59 deletions
23
CHANGES
23
CHANGES
|
@ -1,4 +1,27 @@
|
|||
|
||||
2.5-beta-114 | 2016-10-27 09:00:24 -0700
|
||||
|
||||
* Fix for Sphinx >= 1.4 compability. (Robin Sommer)
|
||||
|
||||
2.5-beta-113 | 2016-10-27 07:44:25 -0700
|
||||
|
||||
* XMPP: Fix detection of StartTLS when using namespaces. (Johanna
|
||||
Amann)
|
||||
|
||||
2.5-beta-110 | 2016-10-26 09:42:11 -0400
|
||||
|
||||
* Improvements DCE_RPC analyzer to make it perform fragment handling correctly
|
||||
and generally be more resistent to unexpected traffic. (Seth Hall)
|
||||
|
||||
2.5-beta-102 | 2016-10-25 09:43:45 -0700
|
||||
|
||||
* Update number of bytes in request/response of smb1-com-open-andx.pac. (balintm)
|
||||
|
||||
* Fix a IPv4 CIDR specifications and payload-size condition of signature matching.
|
||||
(Robin Sommer)
|
||||
|
||||
* Python 3 compatibility fix for coverage-calc script. (Daniel Thayer)
|
||||
|
||||
2.5-beta-93 | 2016-10-24 11:11:07 -0700
|
||||
|
||||
* Fix alignment issue of ones_complement_checksum. This error
|
||||
|
|
9
NEWS
9
NEWS
|
@ -41,6 +41,9 @@ New Functionality
|
|||
New log files: net_control.log, netcontrol_catch_release.log,
|
||||
netcontrol_drop.log, and netcontrol_shunt.log.
|
||||
|
||||
- Bro now includes the OpenFlow framework which exposes the datastructures
|
||||
necessary to interface to OpenFlow capable hardware.
|
||||
|
||||
- Bro's Intelligence Framework was refactored and new functionality
|
||||
has been added:
|
||||
|
||||
|
@ -154,8 +157,10 @@ New Functionality
|
|||
|
||||
- The pcap buffer size can be set through the new option Pcap::bufsize.
|
||||
|
||||
- Input framework readers Table and Event can now define a custom
|
||||
event to receive logging messages.
|
||||
- Input framework readers stream types Table and Event can now define a custom
|
||||
event (specified by the new "error_ev" field) to receive error messages
|
||||
emitted by the input stream. This can, e.g., be used to raise notices in
|
||||
case errors occur when reading an important input source.
|
||||
|
||||
- The logging framework now supports user-defined record separators,
|
||||
renaming of column names, as well as extension data columns that can
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.5-beta-93
|
||||
2.5-beta-114
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 097c1dde17c218973a9adad9ba39f8cfd639d9c1
|
||||
Subproject commit 3f7b38c293e94143a757590918aac82281e46500
|
|
@ -1 +1 @@
|
|||
Subproject commit 0191254451d1aa9a5c985d493ad51f4f1c5f7d85
|
||||
Subproject commit a9c2717232764808ca6029f8e727812b58424839
|
|
@ -1 +1 @@
|
|||
Subproject commit 0743c4f51600cc90aceccaee72ca879b271712d2
|
||||
Subproject commit 3f036d36d1e4a42dd672f8a03caf81e38f318f2d
|
|
@ -1 +1 @@
|
|||
Subproject commit 741f6aefce5758d7a62ac5be05f4c750afb5e463
|
||||
Subproject commit 895fae348aa03032d198350d03bfc09eb46ed4b4
|
|
@ -1 +1 @@
|
|||
Subproject commit 497924cdcc23d26221bc39b24bcddcb62ec13ca7
|
||||
Subproject commit 35d292cbaef2fafdaede5975d097c27d810382ab
|
|
@ -1 +1 @@
|
|||
Subproject commit 625dbecfd63022d79a144b9651085e68cdf99ce4
|
||||
Subproject commit 17d1c1547678bfd54ef1202db5415bc85c7ae794
|
2
cmake
2
cmake
|
@ -1 +1 @@
|
|||
Subproject commit 39510b5fb2351d7aac85da0d335a128402db3bbc
|
||||
Subproject commit 71932e91ab329158950d41d630f96509ffe7a217
|
|
@ -14,6 +14,7 @@ from sphinx.locale import l_, _
|
|||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util.nodes import make_refnode
|
||||
from sphinx import version_info
|
||||
import string
|
||||
|
||||
from docutils import nodes
|
||||
|
@ -32,6 +33,14 @@ class SeeDirective(Directive):
|
|||
n.refs = string.split(string.join(self.content))
|
||||
return [n]
|
||||
|
||||
# Wrapper for creating a tuple for index nodes, staying backwards
|
||||
# compatible to Sphinx < 1.4:
|
||||
def make_index_tuple(indextype, indexentry, targetname, targetname2):
|
||||
if version_info >= (1, 4, 0, '', 0):
|
||||
return (indextype, indexentry, targetname, targetname2, None)
|
||||
else:
|
||||
return (indextype, indexentry, targetname, targetname2)
|
||||
|
||||
def process_see_nodes(app, doctree, fromdocname):
|
||||
for node in doctree.traverse(see):
|
||||
content = []
|
||||
|
@ -95,8 +104,9 @@ class BroGeneric(ObjectDescription):
|
|||
|
||||
indextext = self.get_index_text(self.objtype, name)
|
||||
if indextext:
|
||||
self.indexnode['entries'].append(('single', indextext,
|
||||
targetname, targetname))
|
||||
self.indexnode['entries'].append(make_index_tuple('single',
|
||||
indextext, targetname,
|
||||
targetname))
|
||||
|
||||
def get_index_text(self, objectname, name):
|
||||
return _('%s (%s)') % (name, self.objtype)
|
||||
|
@ -120,9 +130,9 @@ class BroNamespace(BroGeneric):
|
|||
self.update_type_map(name)
|
||||
|
||||
indextext = self.get_index_text(self.objtype, name)
|
||||
self.indexnode['entries'].append(('single', indextext,
|
||||
self.indexnode['entries'].append(make_index_tuple('single', indextext,
|
||||
targetname, targetname))
|
||||
self.indexnode['entries'].append(('single',
|
||||
self.indexnode['entries'].append(make_index_tuple('single',
|
||||
"namespaces; %s" % (sig),
|
||||
targetname, targetname))
|
||||
|
||||
|
@ -148,7 +158,7 @@ class BroEnum(BroGeneric):
|
|||
self.update_type_map(name)
|
||||
|
||||
indextext = self.get_index_text(self.objtype, name)
|
||||
#self.indexnode['entries'].append(('single', indextext,
|
||||
#self.indexnode['entries'].append(make_index_tuple('single', indextext,
|
||||
# targetname, targetname))
|
||||
m = sig.split()
|
||||
|
||||
|
@ -162,7 +172,7 @@ class BroEnum(BroGeneric):
|
|||
self.env.domaindata['bro']['notices'] = []
|
||||
self.env.domaindata['bro']['notices'].append(
|
||||
(m[0], self.env.docname, targetname))
|
||||
self.indexnode['entries'].append(('single',
|
||||
self.indexnode['entries'].append(make_index_tuple('single',
|
||||
"%s (enum values); %s" % (m[1], m[0]),
|
||||
targetname, targetname))
|
||||
|
||||
|
|
|
@ -4390,6 +4390,19 @@ export {
|
|||
const bufsize = 128 &redef;
|
||||
} # end export
|
||||
|
||||
module DCE_RPC;
|
||||
export {
|
||||
## The maximum number of simultaneous fragmented commands that
|
||||
## the DCE_RPC analyzer will tolerate before the it will generate
|
||||
## a weird and skip further input.
|
||||
const max_cmd_reassembly = 20 &redef;
|
||||
|
||||
## The maximum number of fragmented bytes that the DCE_RPC analyzer
|
||||
## will tolerate on a command before the analyzer will generate a weird
|
||||
## and skip further input.
|
||||
const max_frag_data = 30000 &redef;
|
||||
}
|
||||
|
||||
module GLOBAL;
|
||||
|
||||
## Seed for hashes computed internally for probabilistic data structures. Using
|
||||
|
|
|
@ -89,6 +89,10 @@ bool RuleConditionPayloadSize::DoMatch(Rule* rule, RuleEndpointState* state,
|
|||
// on the pure rules now.
|
||||
return false;
|
||||
|
||||
if ( state->PayloadSize() == 0 )
|
||||
// We are interested in the first non-empty chunk.
|
||||
return false;
|
||||
|
||||
uint32 payload_size = uint32(state->PayloadSize());
|
||||
|
||||
switch ( comp ) {
|
||||
|
|
|
@ -144,7 +144,7 @@ bool RuleHdrTest::operator==(const RuleHdrTest& h)
|
|||
void RuleHdrTest::PrintDebug()
|
||||
{
|
||||
static const char* str_comp[] = { "<=", ">=", "<", ">", "==", "!=" };
|
||||
static const char* str_prot[] = { "", "ip", "icmp", "tcp", "udp" };
|
||||
static const char* str_prot[] = { "", "ip", "ipv6", "icmp", "icmpv6", "tcp", "udp", "next", "ipsrc", "ipdst" };
|
||||
|
||||
fprintf(stderr, " RuleHdrTest %s[%d:%d] %s",
|
||||
str_prot[prot], offset, size, str_comp[comp]);
|
||||
|
@ -1095,10 +1095,10 @@ void RuleMatcher::ExecRule(Rule* rule, RuleEndpointState* state, bool eos)
|
|||
|
||||
void RuleMatcher::ClearEndpointState(RuleEndpointState* state)
|
||||
{
|
||||
state->payload_size = -1;
|
||||
|
||||
ExecPureRules(state, 1);
|
||||
|
||||
state->payload_size = -1;
|
||||
|
||||
loop_over_list(state->matchers, j)
|
||||
state->matchers[j]->state->Clear();
|
||||
}
|
||||
|
|
|
@ -72,6 +72,7 @@ extern uint32 id_to_uint(const char* id);
|
|||
|
||||
class RuleHdrTest {
|
||||
public:
|
||||
// Note: Adapt RuleHdrTest::PrintDebug() when changing these enums.
|
||||
enum Comp { LE, GE, LT, GT, EQ, NE };
|
||||
enum Prot { NOPROT, IP, IPv6, ICMP, ICMPv6, TCP, UDP, NEXT, IPSrc, IPDst };
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DI
|
|||
|
||||
bro_plugin_begin(Bro DCE_RPC)
|
||||
bro_plugin_cc(DCE_RPC.cc Plugin.cc)
|
||||
bro_plugin_bif(types.bif events.bif)
|
||||
bro_plugin_bif(consts.bif types.bif events.bif)
|
||||
bro_plugin_pac(
|
||||
dce_rpc.pac
|
||||
dce_rpc-protocol.pac
|
||||
|
|
|
@ -16,6 +16,7 @@ using namespace analyzer::dce_rpc;
|
|||
DCE_RPC_Analyzer::DCE_RPC_Analyzer(Connection *conn)
|
||||
: tcp::TCP_ApplicationAnalyzer("DCE_RPC", conn)
|
||||
{
|
||||
had_gap = false;
|
||||
interp = new binpac::DCE_RPC::DCE_RPC_Conn(this);
|
||||
}
|
||||
|
||||
|
@ -41,6 +42,7 @@ void DCE_RPC_Analyzer::EndpointEOF(bool is_orig)
|
|||
void DCE_RPC_Analyzer::Undelivered(uint64 seq, int len, bool orig)
|
||||
{
|
||||
TCP_ApplicationAnalyzer::Undelivered(seq, len, orig);
|
||||
had_gap = true;
|
||||
interp->NewGap(orig, len);
|
||||
}
|
||||
|
||||
|
@ -49,6 +51,12 @@ void DCE_RPC_Analyzer::DeliverStream(int len, const u_char* data, bool orig)
|
|||
TCP_ApplicationAnalyzer::DeliverStream(len, data, orig);
|
||||
|
||||
assert(TCP());
|
||||
|
||||
if ( had_gap )
|
||||
// If only one side had a content gap, we could still try to
|
||||
// deliver data to the other side if the script layer can handle this.
|
||||
return;
|
||||
|
||||
try
|
||||
{
|
||||
interp->NewData(orig, data, data + len);
|
||||
|
|
|
@ -29,6 +29,7 @@ public:
|
|||
{ return new DCE_RPC_Analyzer(conn); }
|
||||
|
||||
protected:
|
||||
bool had_gap;
|
||||
binpac::DCE_RPC::DCE_RPC_Conn* interp;
|
||||
};
|
||||
|
||||
|
|
2
src/analyzer/protocol/dce-rpc/consts.bif
Normal file
2
src/analyzer/protocol/dce-rpc/consts.bif
Normal file
|
@ -0,0 +1,2 @@
|
|||
const DCE_RPC::max_cmd_reassembly: count;
|
||||
const DCE_RPC::max_frag_data: count;
|
|
@ -37,7 +37,7 @@ type DCE_RPC_PDU(is_orig: bool) = record {
|
|||
# Subtract an extra 8 when there is an auth section because we have some "auth header" fields in that structure.
|
||||
body_length : int = header.frag_length - sizeof(header) - header.auth_length - (header.auth_length > 0 ? 8 : 0);
|
||||
frag_reassembled : bool = $context.flow.reassemble_fragment(header, frag);
|
||||
body : DCE_RPC_Body(header) withinput $context.flow.reassembled_body(header, frag) &if(header.lastfrag);
|
||||
body : DCE_RPC_Body(header) withinput $context.flow.reassembled_body(header, frag) &if(frag_reassembled);
|
||||
} &byteorder = header.byteorder, &length = header.frag_length;
|
||||
|
||||
type NDR_Format = record {
|
||||
|
@ -174,23 +174,74 @@ flow DCE_RPC_Flow(is_orig: bool) {
|
|||
flowunit = DCE_RPC_PDU(is_orig) withcontext(connection, this);
|
||||
|
||||
%member{
|
||||
std::map<uint32, FlowBuffer*> fb;
|
||||
std::map<uint32, std::unique_ptr<FlowBuffer>> fb;
|
||||
%}
|
||||
|
||||
# Fragment reassembly.
|
||||
function reassemble_fragment(header: DCE_RPC_Header, frag: bytestring): bool
|
||||
%{
|
||||
if ( ${header.firstfrag} && !${header.lastfrag} &&
|
||||
fb.count(${header.call_id}) == 0 )
|
||||
fb[${header.call_id}] = new FlowBuffer();
|
||||
|
||||
if ( fb.count(${header.call_id}) == 0 )
|
||||
if ( ${header.firstfrag} )
|
||||
{
|
||||
if ( fb.count(${header.call_id}) > 0 )
|
||||
{
|
||||
// We already had a first frag earlier.
|
||||
reporter->Weird(connection()->bro_analyzer()->Conn(),
|
||||
"multiple_first_fragments_in_dce_rpc_reassembly");
|
||||
connection()->bro_analyzer()->SetSkip(true);
|
||||
return false;
|
||||
}
|
||||
|
||||
auto frag_reassembler_ = fb[${header.call_id}];
|
||||
frag_reassembler_->BufferData(frag.begin(), frag.end());
|
||||
if ( ${header.lastfrag} )
|
||||
{
|
||||
// all-in-one packet
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// first frag, but not last so we start a flowbuffer
|
||||
fb[${header.call_id}] = std::unique_ptr<FlowBuffer>(new FlowBuffer());
|
||||
fb[${header.call_id}]->NewFrame(0, true);
|
||||
fb[${header.call_id}]->BufferData(frag.begin(), frag.end());
|
||||
|
||||
return (!${header.firstfrag} && ${header.lastfrag});
|
||||
if ( fb.size() > BifConst::DCE_RPC::max_cmd_reassembly )
|
||||
{
|
||||
reporter->Weird(connection()->bro_analyzer()->Conn(),
|
||||
"too_many_dce_rpc_msgs_in_reassembly");
|
||||
connection()->bro_analyzer()->SetSkip(true);
|
||||
}
|
||||
|
||||
if ( fb[${header.call_id}]->data_length() > (int)BifConst::DCE_RPC::max_frag_data )
|
||||
{
|
||||
reporter->Weird(connection()->bro_analyzer()->Conn(),
|
||||
"too_much_dce_rpc_fragment_data");
|
||||
connection()->bro_analyzer()->SetSkip(true);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if ( fb.count(${header.call_id}) > 0 )
|
||||
{
|
||||
// not the first frag, but we have a flow buffer so add to it
|
||||
fb[${header.call_id}]->BufferData(frag.begin(), frag.end());
|
||||
|
||||
if ( fb[${header.call_id}]->data_length() > (int)BifConst::DCE_RPC::max_frag_data )
|
||||
{
|
||||
reporter->Weird(connection()->bro_analyzer()->Conn(),
|
||||
"too_much_dce_rpc_fragment_data");
|
||||
connection()->bro_analyzer()->SetSkip(true);
|
||||
}
|
||||
|
||||
return ${header.lastfrag};
|
||||
}
|
||||
else
|
||||
{
|
||||
// no flow buffer and not a first frag, ignore it.
|
||||
return false;
|
||||
}
|
||||
|
||||
// can't reach here.
|
||||
return false;
|
||||
%}
|
||||
|
||||
function reassembled_body(h: DCE_RPC_Header, body: bytestring): const_bytestring
|
||||
|
@ -200,7 +251,6 @@ flow DCE_RPC_Flow(is_orig: bool) {
|
|||
if ( fb.count(${h.call_id}) > 0 )
|
||||
{
|
||||
bd = const_bytestring(fb[${h.call_id}]->begin(), fb[${h.call_id}]->end());
|
||||
delete fb[${h.call_id}];
|
||||
fb.erase(${h.call_id});
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
%include bro.pac
|
||||
|
||||
%extern{
|
||||
#include "consts.bif.h"
|
||||
#include "types.bif.h"
|
||||
#include "events.bif.h"
|
||||
%}
|
||||
|
|
|
@ -52,7 +52,7 @@ type SMB1_open_andx_request(header: SMB_Header, offset: uint16) = record {
|
|||
open_mode : uint16;
|
||||
allocation_size : uint32;
|
||||
timeout : uint32;
|
||||
reserved : padding[2];
|
||||
reserved : padding[4];
|
||||
byte_count : uint16;
|
||||
filename : SMB_string(header.unicode, offsetof(filename);
|
||||
|
||||
|
@ -74,7 +74,7 @@ type SMB1_open_andx_response(header: SMB_Header, offset: uint16) = record {
|
|||
resource_type : uint16;
|
||||
nm_pipe_status : uint16;
|
||||
open_results : uint16;
|
||||
reserved : padding[3];
|
||||
reserved : padding[6];
|
||||
byte_count : uint16;
|
||||
|
||||
extra_byte_parameters : bytestring &transient &length=(andx.offset == 0 || andx.offset >= (offset+offsetof(extra_byte_parameters))+2) ? 0 : (andx.offset-(offset+offsetof(extra_byte_parameters)));
|
||||
|
|
|
@ -11,6 +11,11 @@ refine connection XMPP_Conn += {
|
|||
function proc_xmpp_token(is_orig: bool, name: bytestring, rest: bytestring): bool
|
||||
%{
|
||||
string token = std_str(name);
|
||||
// Result will either be text after ":" or original string; this discards the namespace
|
||||
string token_no_ns = std_str(name);
|
||||
auto offset = token_no_ns.find(":");
|
||||
if ( offset != std::string::npos && token_no_ns.length() > offset + 1 )
|
||||
token_no_ns = token_no_ns.substr(offset + 1);
|
||||
|
||||
if ( is_orig && token == "stream:stream" )
|
||||
// Yup, looks like xmpp...
|
||||
|
@ -21,10 +26,10 @@ refine connection XMPP_Conn += {
|
|||
// Handshake has passed the phase where we should see StartTLS. Simply skip from hereon...
|
||||
bro_analyzer()->SetSkip(true);
|
||||
|
||||
if ( is_orig && token == "starttls" )
|
||||
if ( is_orig && ( token == "starttls" || token_no_ns == "starttls" ) )
|
||||
client_starttls = true;
|
||||
|
||||
if ( !is_orig && token == "proceed" && client_starttls )
|
||||
if ( !is_orig && ( token == "proceed" || token_no_ns == "proceed" ) && client_starttls )
|
||||
{
|
||||
bro_analyzer()->StartTLS();
|
||||
BifEvent::generate_xmpp_starttls(bro_analyzer(), bro_analyzer()->Conn());
|
||||
|
@ -32,7 +37,7 @@ refine connection XMPP_Conn += {
|
|||
else if ( !is_orig && token == "proceed" )
|
||||
reporter->Weird(bro_analyzer()->Conn(), "XMPP: proceed without starttls");
|
||||
|
||||
//printf("Processed: %d %s %s \n", is_orig, c_str(name), c_str(rest));
|
||||
// printf("Processed: %d %s %s %s \n", is_orig, c_str(name), c_str(rest), token_no_ns.c_str());
|
||||
|
||||
return true;
|
||||
%}
|
||||
|
|
|
@ -14,7 +14,7 @@ extern void end_PS();
|
|||
Rule* current_rule = 0;
|
||||
const char* current_rule_file = 0;
|
||||
|
||||
static uint8_t mask_to_len(uint32_t mask)
|
||||
static uint8_t ip4_mask_to_len(uint32_t mask)
|
||||
{
|
||||
if ( mask == 0xffffffff )
|
||||
return 32;
|
||||
|
@ -23,7 +23,7 @@ static uint8_t mask_to_len(uint32_t mask)
|
|||
uint8_t len;
|
||||
for ( len = 0; len < 32 && (! (x & (1 << len))); ++len );
|
||||
|
||||
return len;
|
||||
return 32 - len;
|
||||
}
|
||||
%}
|
||||
|
||||
|
@ -315,7 +315,7 @@ prefix_value:
|
|||
TOK_IP
|
||||
{
|
||||
$$ = new IPPrefix(IPAddr(IPv4, &($1.val), IPAddr::Host),
|
||||
mask_to_len($1.mask));
|
||||
ip4_mask_to_len($1.mask));
|
||||
}
|
||||
| TOK_IP6
|
||||
;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path loaded_scripts
|
||||
#open 2016-10-07-19-25-03
|
||||
#open 2016-10-26-00-05-53
|
||||
#fields name
|
||||
#types string
|
||||
scripts/base/init-bare.bro
|
||||
|
@ -63,6 +63,7 @@ scripts/base/init-bare.bro
|
|||
build/scripts/base/bif/plugins/Bro_BitTorrent.events.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_ConnSize.events.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_ConnSize.functions.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DCE_RPC.consts.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DCE_RPC.types.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DCE_RPC.events.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DHCP.events.bif.bro
|
||||
|
@ -167,4 +168,4 @@ scripts/base/init-bare.bro
|
|||
build/scripts/base/bif/plugins/Bro_SQLiteWriter.sqlite.bif.bro
|
||||
scripts/policy/misc/loaded-scripts.bro
|
||||
scripts/base/utils/paths.bro
|
||||
#close 2016-10-07-19-25-03
|
||||
#close 2016-10-26-00-05-53
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path loaded_scripts
|
||||
#open 2016-10-07-19-25-14
|
||||
#open 2016-10-26-00-05-59
|
||||
#fields name
|
||||
#types string
|
||||
scripts/base/init-bare.bro
|
||||
|
@ -63,6 +63,7 @@ scripts/base/init-bare.bro
|
|||
build/scripts/base/bif/plugins/Bro_BitTorrent.events.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_ConnSize.events.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_ConnSize.functions.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DCE_RPC.consts.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DCE_RPC.types.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DCE_RPC.events.bif.bro
|
||||
build/scripts/base/bif/plugins/Bro_DHCP.events.bif.bro
|
||||
|
@ -355,4 +356,4 @@ scripts/base/init-default.bro
|
|||
scripts/base/misc/find-filtered-trace.bro
|
||||
scripts/base/misc/version.bro
|
||||
scripts/policy/misc/loaded-scripts.bro
|
||||
#close 2016-10-07-19-25-14
|
||||
#close 2016-10-26-00-05-59
|
||||
|
|
|
@ -247,7 +247,7 @@
|
|||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Communication::LOG)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Conn::LOG)) -> <no result>
|
||||
|
@ -377,7 +377,7 @@
|
|||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(NetControl::check_plugins, <frame>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(NetControl::init, <null>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Notice::want_pp, <frame>, ()) -> <no result>
|
||||
|
@ -410,7 +410,7 @@
|
|||
0.000000 MetaHookPost CallFunction(reading_live_traffic, <frame>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(reading_traces, <frame>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(set_to_regex, <frame>, ({}, (^\.?|\.)(~~)$)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(strftime, <frame>, (%Y, 1475869873.545611)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(strftime, <frame>, (%Y, 1477440372.839693)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(string_to_pattern, <frame>, ((^\.?|\.)()$, F)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(sub, <frame>, ((^\.?|\.)(~~)$, <...>/, )) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(to_count, <frame>, (2016)) -> <no result>
|
||||
|
@ -427,6 +427,7 @@
|
|||
0.000000 MetaHookPost LoadFile(./Bro_BitTorrent.events.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./Bro_ConnSize.events.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./Bro_ConnSize.functions.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./Bro_DCE_RPC.consts.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./Bro_DCE_RPC.events.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./Bro_DCE_RPC.types.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./Bro_DHCP.events.bif.bro) -> -1
|
||||
|
@ -966,7 +967,7 @@
|
|||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Communication::LOG))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Conn::LOG))
|
||||
|
@ -1096,7 +1097,7 @@
|
|||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird]))
|
||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509]))
|
||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql]))
|
||||
0.000000 MetaHookPre CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(NetControl::check_plugins, <frame>, ())
|
||||
0.000000 MetaHookPre CallFunction(NetControl::init, <null>, ())
|
||||
0.000000 MetaHookPre CallFunction(Notice::want_pp, <frame>, ())
|
||||
|
@ -1129,7 +1130,7 @@
|
|||
0.000000 MetaHookPre CallFunction(reading_live_traffic, <frame>, ())
|
||||
0.000000 MetaHookPre CallFunction(reading_traces, <frame>, ())
|
||||
0.000000 MetaHookPre CallFunction(set_to_regex, <frame>, ({}, (^\.?|\.)(~~)$))
|
||||
0.000000 MetaHookPre CallFunction(strftime, <frame>, (%Y, 1475869873.545611))
|
||||
0.000000 MetaHookPre CallFunction(strftime, <frame>, (%Y, 1477440372.839693))
|
||||
0.000000 MetaHookPre CallFunction(string_to_pattern, <frame>, ((^\.?|\.)()$, F))
|
||||
0.000000 MetaHookPre CallFunction(sub, <frame>, ((^\.?|\.)(~~)$, <...>/, ))
|
||||
0.000000 MetaHookPre CallFunction(to_count, <frame>, (2016))
|
||||
|
@ -1146,6 +1147,7 @@
|
|||
0.000000 MetaHookPre LoadFile(./Bro_BitTorrent.events.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./Bro_ConnSize.events.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./Bro_ConnSize.functions.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./Bro_DCE_RPC.consts.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./Bro_DCE_RPC.events.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./Bro_DCE_RPC.types.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./Bro_DHCP.events.bif.bro)
|
||||
|
@ -1684,7 +1686,7 @@
|
|||
0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])
|
||||
0.000000 | HookCallFunction Log::__create_stream(X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])
|
||||
0.000000 | HookCallFunction Log::__create_stream(mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])
|
||||
0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG)
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Communication::LOG)
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Conn::LOG)
|
||||
|
@ -1814,7 +1816,7 @@
|
|||
0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])
|
||||
0.000000 | HookCallFunction Log::create_stream(X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])
|
||||
0.000000 | HookCallFunction Log::create_stream(mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])
|
||||
0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1475869873.545999, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1477440372.840195, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction NetControl::check_plugins()
|
||||
0.000000 | HookCallFunction NetControl::init()
|
||||
0.000000 | HookCallFunction Notice::want_pp()
|
||||
|
@ -1847,7 +1849,7 @@
|
|||
0.000000 | HookCallFunction reading_live_traffic()
|
||||
0.000000 | HookCallFunction reading_traces()
|
||||
0.000000 | HookCallFunction set_to_regex({}, (^\.?|\.)(~~)$)
|
||||
0.000000 | HookCallFunction strftime(%Y, 1475869873.545611)
|
||||
0.000000 | HookCallFunction strftime(%Y, 1477440372.839693)
|
||||
0.000000 | HookCallFunction string_to_pattern((^\.?|\.)()$, F)
|
||||
0.000000 | HookCallFunction sub((^\.?|\.)(~~)$, <...>/, )
|
||||
0.000000 | HookCallFunction to_count(2016)
|
||||
|
|
6
testing/btest/Baseline/signatures.dst-ip-cidr-v4/output
Normal file
6
testing/btest/Baseline/signatures.dst-ip-cidr-v4/output
Normal file
|
@ -0,0 +1,6 @@
|
|||
match, foo
|
||||
match, foo
|
||||
match, foo
|
||||
match, foo
|
||||
match, foo
|
||||
match, foo
|
|
@ -0,0 +1,6 @@
|
|||
match, foo2
|
||||
match, foo2
|
||||
match, foo2
|
||||
match, foo2
|
||||
match, foo2
|
||||
match, foo2
|
BIN
testing/btest/Traces/ntp.pcap
Normal file
BIN
testing/btest/Traces/ntp.pcap
Normal file
Binary file not shown.
17
testing/btest/signatures/dst-ip-cidr-v4.bro
Normal file
17
testing/btest/signatures/dst-ip-cidr-v4.bro
Normal file
|
@ -0,0 +1,17 @@
|
|||
# @TEST-EXEC: bro -r $TRACES/ntp.pcap %INPUT >output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
@TEST-START-FILE a.sig
|
||||
signature foo {
|
||||
dst-ip == 17.0.0.0/8
|
||||
ip-proto == udp
|
||||
event "match"
|
||||
}
|
||||
@TEST-END-FILE
|
||||
|
||||
event signature_match(state: signature_state, msg: string, data: string)
|
||||
{
|
||||
print "match", state$sig_id;
|
||||
}
|
||||
|
||||
@load-sigs ./a.sig
|
23
testing/btest/signatures/udp-payload-size.bro
Normal file
23
testing/btest/signatures/udp-payload-size.bro
Normal file
|
@ -0,0 +1,23 @@
|
|||
# @TEST-EXEC: bro -r $TRACES/ntp.pcap %INPUT >output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
@TEST-START-FILE a.sig
|
||||
signature foo1 {
|
||||
ip-proto == udp
|
||||
payload-size < 1
|
||||
event "match"
|
||||
}
|
||||
|
||||
signature foo2 {
|
||||
ip-proto == udp
|
||||
payload-size > 0
|
||||
event "match"
|
||||
}
|
||||
@TEST-END-FILE
|
||||
|
||||
event signature_match(state: signature_state, msg: string, data: string)
|
||||
{
|
||||
print "match", state$sig_id;
|
||||
}
|
||||
|
||||
@load-sigs ./a.sig
|
|
@ -56,4 +56,4 @@ for k in stats:
|
|||
num_covered += 1
|
||||
|
||||
if len(stats) > 0:
|
||||
print "%s/%s (%.1f%%) Bro script statements covered." % (num_covered, len(stats), float(num_covered)/len(stats)*100)
|
||||
print("%s/%s (%.1f%%) Bro script statements covered." % (num_covered, len(stats), float(num_covered)/len(stats)*100))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue