Merge remote-tracking branch 'origin/master' into topic/seth/intel-framework

This commit is contained in:
Seth Hall 2012-08-30 11:36:34 -04:00
commit 78401262d0
117 changed files with 2004 additions and 295 deletions

148
CHANGES
View file

@ -1,4 +1,152 @@
2.1 | 2012-08-28 16:46:42 -0700
* Make bif.identify_magic robust against FreeBSD's libmagic config.
(Robin Sommer)
* Remove automatic use of gperftools on non-Linux systems.
--enable-perftools must now explicity be supplied to ./configure
on non-Linux systems to link against the tcmalloc library.
* Fix uninitialized value for 'is_partial' in TCP analyzer. (Jon
Siwek)
* Parse 64-bit consts in Bro scripts correctly. (Bernhard Amann)
* Output 64-bit counts correctly on 32-bit machines (Bernhard Amann)
* Input framework fixes, including: (Bernhard Amann)
- One of the change events got the wrong parameters.
- Escape commas in sets and vectors that were unescaped before
tokenization.
- Handling of zero-length-strings as last element in a set was
broken (sets ending with a ,).
- Hashing of lines just containing zero-length-strings was broken.
- Make set_separators different from , work for input framework.
- Input framework was not handling counts and ints out of
32-bit-range correctly.
- Errors in single lines do not kill processing, but simply ignore
the line, log it, and continue.
* Update documentation for builtin types. (Daniel Thayer)
- Add missing description of interval "msec" unit.
- Improved description of pattern by clarifying the issue of
operand order and difference between exact and embedded
matching.
* Documentation fixes for signature 'eval' conditions. (Jon Siwek)
* Remove orphaned 1.5 unit tests. (Jon Siwek)
* Add type checking for signature 'eval' condition functions. (Jon
Siwek)
* Adding an identifier to the SMTP blocklist notices for duplicate
suppression. (Seth Hall)
2.1-beta-45 | 2012-08-22 16:11:10 -0700
* Add an option to the input framework that allows the user to chose
to not die upon encountering files/functions. (Bernhard Amann)
2.1-beta-41 | 2012-08-22 16:05:21 -0700
* Add test serialization to "leak" unit tests that use
communication. (Jon Siwek)
* Change to metrics/basic-cluster unit test for reliability. (Jon
Siwek)
* Fixed ack tracking which could overflow quickly in some
situations. (Seth Hall)
* Minor tweak to coverage.bare-mode-errors unit test to work with a
symlinked 'scripts' dir. (Jon Siwek)
2.1-beta-35 | 2012-08-22 08:44:52 -0700
* Add testcase for input framework reading sets (rather than
tables). (Bernhard Amann)
2.1-beta-31 | 2012-08-21 15:46:05 -0700
* Tweak to rotate-custom.bro unit test. (Jon Siwek)
* Ignore small mem leak every rotation interval for dataseries logs.
(Jon Siwek)
2.1-beta-28 | 2012-08-21 08:32:42 -0700
* Linking ES docs into logging document. (Robin Sommer)
2.1-beta-27 | 2012-08-20 20:06:20 -0700
* Add the Stream record to Log:active_streams to make more dynamic
logging possible. (Seth Hall)
* Fix portability of printing to files returned by
open("/dev/stderr"). (Jon Siwek)
* Fix mime type diff canonifier to also skip mime_desc columns. (Jon
Siwek)
* Unit test tweaks/fixes. (Jon Siwek)
- Some baselines for tests in "leaks" group were outdated.
- Changed a few of the cluster/communication tests to terminate
more explicitly instead of relying on btest-bg-wait to kill
processes. This makes the tests finish faster in the success case
and makes the reason for failing clearer in the that case.
* Fix memory leak of serialized IDs when compiled with
--enable-debug. (Jon Siwek)
2.1-beta-21 | 2012-08-16 11:48:56 -0700
* Installing a handler for running out of memory in "new". Bro will
now print an error message in that case rather than abort with an
uncaught exception. (Robin Sommer)
2.1-beta-20 | 2012-08-16 11:43:31 -0700
* Fixed potential problems with ElasticSearch output plugin. (Seth
Hall)
2.1-beta-13 | 2012-08-10 12:28:04 -0700
* Reporter warnings and error now print to stderr by default. New
options Reporter::warnings_to_stderr and
Reporter::errors_to_stderr to disable. (Seth Hall)
2.1-beta-9 | 2012-08-10 12:24:29 -0700
* Add more BIF tests. (Daniel Thayer)
2.1-beta-6 | 2012-08-10 12:22:52 -0700
* Fix bug in input framework with an edge case. (Bernhard Amann)
* Fix small bug in input framework test script. (Bernhard Amann)
2.1-beta-3 | 2012-08-03 10:46:49 -0700
* Merge branch 'master' of ssh://git.bro-ids.org/bro (Robin Sommer)
* Fix configure script to exit with non-zero status on error (Jon
Siwek)
* Improve ASCII output performance. (Robin Sommer)
2.1-beta | 2012-07-30 11:59:53 -0700
* Improve log filter compatibility with remote logging. Addresses

View file

@ -88,24 +88,30 @@ if (LIBGEOIP_FOUND)
list(APPEND OPTLIBS ${LibGeoIP_LIBRARY})
endif ()
set(USE_PERFTOOLS false)
set(HAVE_PERFTOOLS false)
set(USE_PERFTOOLS_DEBUG false)
set(USE_PERFTOOLS_TCMALLOC false)
if (NOT DISABLE_PERFTOOLS)
find_package(GooglePerftools)
endif ()
if (GOOGLEPERFTOOLS_FOUND)
include_directories(BEFORE ${GooglePerftools_INCLUDE_DIR})
set(USE_PERFTOOLS true)
set(HAVE_PERFTOOLS true)
# Non-Linux systems may not be well-supported by gperftools, so
# require explicit request from user to enable it in that case.
if (${CMAKE_SYSTEM_NAME} MATCHES "Linux" OR ENABLE_PERFTOOLS)
set(USE_PERFTOOLS_TCMALLOC true)
if (ENABLE_PERFTOOLS_DEBUG)
# Enable heap debugging with perftools.
set(USE_PERFTOOLS_DEBUG true)
list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES_DEBUG})
else ()
# Link in tcmalloc for better performance.
list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES})
if (ENABLE_PERFTOOLS_DEBUG)
# Enable heap debugging with perftools.
set(USE_PERFTOOLS_DEBUG true)
include_directories(BEFORE ${GooglePerftools_INCLUDE_DIR})
list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES_DEBUG})
else ()
# Link in tcmalloc for better performance.
list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES})
endif ()
endif ()
endif ()
@ -224,7 +230,8 @@ message(
"\nAux. Tools: ${INSTALL_AUX_TOOLS}"
"\n"
"\nGeoIP: ${USE_GEOIP}"
"\nGoogle perftools: ${USE_PERFTOOLS}"
"\ngperftools found: ${HAVE_PERFTOOLS}"
"\n tcmalloc: ${USE_PERFTOOLS_TCMALLOC}"
"\n debugging: ${USE_PERFTOOLS_DEBUG}"
"\ncURL: ${USE_CURL}"
"\n"

16
NEWS
View file

@ -7,8 +7,8 @@ release. For a complete list of changes, see the ``CHANGES`` file
(note that submodules, such as BroControl and Broccoli, come with
their own CHANGES.)
Bro 2.1 Beta
------------
Bro 2.1
-------
New Functionality
~~~~~~~~~~~~~~~~~
@ -102,9 +102,14 @@ the full set.
* Bro now requires CMake >= 2.6.3.
* Bro now links in tcmalloc (part of Google perftools) if found at
configure time. Doing so can significantly improve memory and
CPU use.
* On Linux, Bro now links in tcmalloc (part of Google perftools)
if found at configure time. Doing so can significantly improve
memory and CPU use.
On the other platforms, the new configure option
--enable-perftools can be used to enable linking to tcmalloc.
(Note that perftools's support for non-Linux platforms may be
less reliable).
- The configure switch --enable-brov6 is gone.
@ -161,6 +166,7 @@ the full set.
- The ASCII writers "header_*" options have been renamed to "meta_*"
(because there's now also a footer).
Bro 2.0
-------

View file

@ -1 +1 @@
2.1-beta
2.1

@ -1 +1 @@
Subproject commit 22120825f8ad70e051ef4ca42f2199aa195dff40
Subproject commit a93ef1373512c661ffcd0d0a61bd19b96667e0d5

@ -1 +1 @@
Subproject commit 941ee753f7c71ec08fc29de04f09a8a83aebb69d
Subproject commit 6748ec3a96d582a977cd9114ef19c76fe75c57ff

@ -1 +1 @@
Subproject commit 5ff3e6a8e8535ed91e1f70d355b815ae8eeacb71
Subproject commit ebfa4de45a839e58aec200e7e4bad33eaab4f1ed

@ -1 +1 @@
Subproject commit 903108f6b43ad228309713da880026d50add41f4
Subproject commit 2fb9ff62bf08f78071753016863640022fbfe338

7
configure vendored
View file

@ -29,6 +29,8 @@ Usage: $0 [OPTION]... [VAR=VALUE]...
Optional Features:
--enable-debug compile in debugging mode
--enable-mobile-ipv6 analyze mobile IPv6 features defined by RFC 6275
--enable-perftools force use of Google perftools on non-Linux systems
(automatically on when perftools is present on Linux)
--enable-perftools-debug use Google's perftools for debugging
--disable-broccoli don't build or install the Broccoli library
--disable-broctl don't install Broctl
@ -98,6 +100,7 @@ append_cache_entry PY_MOD_INSTALL_DIR PATH $prefix/lib/broctl
append_cache_entry BRO_SCRIPT_INSTALL_PATH STRING $prefix/share/bro
append_cache_entry BRO_ETC_INSTALL_DIR PATH $prefix/etc
append_cache_entry ENABLE_DEBUG BOOL false
append_cache_entry ENABLE_PERFTOOLS BOOL false
append_cache_entry ENABLE_PERFTOOLS_DEBUG BOOL false
append_cache_entry BinPAC_SKIP_INSTALL BOOL true
append_cache_entry BUILD_SHARED_LIBS BOOL true
@ -146,7 +149,11 @@ while [ $# -ne 0 ]; do
--enable-mobile-ipv6)
append_cache_entry ENABLE_MOBILE_IPV6 BOOL true
;;
--enable-perftools)
append_cache_entry ENABLE_PERFTOOLS BOOL true
;;
--enable-perftools-debug)
append_cache_entry ENABLE_PERFTOOLS BOOL true
append_cache_entry ENABLE_PERFTOOLS_DEBUG BOOL true
;;
--disable-broccoli)

View file

@ -383,3 +383,4 @@ Bro supports the following output formats other than ASCII:
:maxdepth: 1
logging-dataseries
logging-elasticsearch

View file

@ -55,8 +55,8 @@ The Bro scripting language supports the following built-in types.
A temporal type representing a relative time. An ``interval``
constant can be written as a numeric constant followed by a time
unit where the time unit is one of ``usec``, ``sec``, ``min``,
``hr``, or ``day`` which respectively represent microseconds,
unit where the time unit is one of ``usec``, ``msec``, ``sec``, ``min``,
``hr``, or ``day`` which respectively represent microseconds, milliseconds,
seconds, minutes, hours, and days. Whitespace between the numeric
constant and time unit is optional. Appending the letter "s" to the
time unit in order to pluralize it is also optional (to no semantic
@ -95,14 +95,14 @@ The Bro scripting language supports the following built-in types.
and embedded.
In exact matching the ``==`` equality relational operator is used
with one :bro:type:`string` operand and one :bro:type:`pattern`
operand to check whether the full string exactly matches the
pattern. In this case, the ``^`` beginning-of-line and ``$``
end-of-line anchors are redundant since pattern is implicitly
anchored to the beginning and end of the line to facilitate an exact
match. For example::
with one :bro:type:`pattern` operand and one :bro:type:`string`
operand (order of operands does not matter) to check whether the full
string exactly matches the pattern. In exact matching, the ``^``
beginning-of-line and ``$`` end-of-line anchors are redundant since
the pattern is implicitly anchored to the beginning and end of the
line to facilitate an exact match. For example::
"foo" == /foo|bar/
/foo|bar/ == "foo"
yields true, while::
@ -110,9 +110,9 @@ The Bro scripting language supports the following built-in types.
yields false. The ``!=`` operator would yield the negation of ``==``.
In embedded matching the ``in`` operator is again used with one
:bro:type:`string` operand and one :bro:type:`pattern` operand
(which must be on the left-hand side), but tests whether the pattern
In embedded matching the ``in`` operator is used with one
:bro:type:`pattern` operand (which must be on the left-hand side) and
one :bro:type:`string` operand, but tests whether the pattern
appears anywhere within the given string. For example::
/foo|bar/ in "foobar"

View file

@ -229,20 +229,10 @@ matched. The following context conditions are defined:
confirming the match. If false is returned, no signature match is
going to be triggered. The function has to be of type ``function
cond(state: signature_state, data: string): bool``. Here,
``content`` may contain the most recent content chunk available at
``data`` may contain the most recent content chunk available at
the time the signature was matched. If no such chunk is available,
``content`` will be the empty string. ``signature_state`` is
defined as follows:
.. code:: bro
type signature_state: record {
id: string; # ID of the signature
conn: connection; # Current connection
is_orig: bool; # True if current endpoint is originator
payload_size: count; # Payload size of the first packet
};
``data`` will be the empty string. See :bro:type:`signature_state`
for its definition.
``payload-size <cmp> <integer>``
Compares the integer to the size of the payload of a packet. For

View file

@ -8,8 +8,16 @@ export {
## The default input reader used. Defaults to `READER_ASCII`.
const default_reader = READER_ASCII &redef;
## The default reader mode used. Defaults to `MANUAL`.
const default_mode = MANUAL &redef;
## Flag that controls if the input framework accepts records
## that contain types that are not supported (at the moment
## file and function). If true, the input framework will
## warn in these cases, but continue. If false, it will
## abort. Defaults to false (abort)
const accept_unsupported_types = F &redef;
## TableFilter description type used for the `table` method.
type TableDescription: record {
## Common definitions for tables and events

View file

@ -329,9 +329,9 @@ export {
global run_rotation_postprocessor_cmd: function(info: RotationInfo, npath: string) : bool;
## The streams which are currently active and not disabled.
## This set is not meant to be modified by users! Only use it for
## This table is not meant to be modified by users! Only use it for
## examining which streams are active.
global active_streams: set[ID] = set();
global active_streams: table[ID] of Stream = table();
}
# We keep a script-level copy of all filters so that we can manipulate them.
@ -417,7 +417,7 @@ function create_stream(id: ID, stream: Stream) : bool
if ( ! __create_stream(id, stream) )
return F;
add active_streams[id];
active_streams[id] = stream;
return add_default_filter(id);
}

View file

@ -1,5 +1,5 @@
##! This framework is intended to create an output and filtering path for
##! internal messages/warnings/errors. It should typically be loaded to
##! This framework is intended to create an output and filtering path for
##! internal messages/warnings/errors. It should typically be loaded to
##! avoid Bro spewing internal messages to standard error and instead log
##! them to a file in a standard way. Note that this framework deals with
##! the handling of internally-generated reporter messages, for the
@ -13,11 +13,11 @@ export {
redef enum Log::ID += { LOG };
## An indicator of reporter message severity.
type Level: enum {
type Level: enum {
## Informational, not needing specific attention.
INFO,
INFO,
## Warning of a potential problem.
WARNING,
WARNING,
## A non-fatal error that should be addressed, but doesn't
## terminate program execution.
ERROR
@ -36,24 +36,55 @@ export {
## Not all reporter messages will have locations in them though.
location: string &log &optional;
};
## Tunable for sending reporter warning messages to STDERR. The option to
## turn it off is presented here in case Bro is being run by some
## external harness and shouldn't output anything to the console.
const warnings_to_stderr = T &redef;
## Tunable for sending reporter error messages to STDERR. The option to
## turn it off is presented here in case Bro is being run by some
## external harness and shouldn't output anything to the console.
const errors_to_stderr = T &redef;
}
global stderr: file;
event bro_init() &priority=5
{
Log::create_stream(Reporter::LOG, [$columns=Info]);
if ( errors_to_stderr || warnings_to_stderr )
stderr = open("/dev/stderr");
}
event reporter_info(t: time, msg: string, location: string)
event reporter_info(t: time, msg: string, location: string) &priority=-5
{
Log::write(Reporter::LOG, [$ts=t, $level=INFO, $message=msg, $location=location]);
}
event reporter_warning(t: time, msg: string, location: string)
event reporter_warning(t: time, msg: string, location: string) &priority=-5
{
if ( warnings_to_stderr )
{
if ( t > double_to_time(0.0) )
print stderr, fmt("WARNING: %.6f %s (%s)", t, msg, location);
else
print stderr, fmt("WARNING: %s (%s)", msg, location);
}
Log::write(Reporter::LOG, [$ts=t, $level=WARNING, $message=msg, $location=location]);
}
event reporter_error(t: time, msg: string, location: string)
event reporter_error(t: time, msg: string, location: string) &priority=-5
{
if ( errors_to_stderr )
{
if ( t > double_to_time(0.0) )
print stderr, fmt("ERROR: %.6f %s (%s)", t, msg, location);
else
print stderr, fmt("ERROR: %s (%s)", msg, location);
}
Log::write(Reporter::LOG, [$ts=t, $level=ERROR, $message=msg, $location=location]);
}

View file

@ -1,3 +1,4 @@
##! Watch for various SPAM blocklist URLs in SMTP error messages.
@load base/protocols/smtp
@ -5,9 +6,11 @@ module SMTP;
export {
redef enum Notice::Type += {
## Indicates that the server sent a reply mentioning an SMTP block list.
## An SMTP server sent a reply mentioning an SMTP block list.
Blocklist_Error_Message,
## Indicates the client's address is seen in the block list error message.
## The originator's address is seen in the block list error message.
## This is useful to detect local hosts sending SPAM with a high
## positive rate.
Blocklist_Blocked_Host,
};
@ -52,7 +55,8 @@ event smtp_reply(c: connection, is_orig: bool, code: count, cmd: string,
message = fmt("%s is on an SMTP block list", c$id$orig_h);
}
NOTICE([$note=note, $conn=c, $msg=message, $sub=msg]);
NOTICE([$note=note, $conn=c, $msg=message, $sub=msg,
$identifier=cat(c$id$orig_h)]);
}
}
}

View file

@ -8,13 +8,13 @@ export {
## Optionally ignore any :bro:type:`Log::ID` from being sent to
## ElasticSearch with this script.
const excluded_log_ids: set[string] = set("Communication::LOG") &redef;
const excluded_log_ids: set[Log::ID] &redef;
## If you want to explicitly only send certain :bro:type:`Log::ID`
## streams, add them to this set. If the set remains empty, all will
## be sent. The :bro:id:`LogElasticSearch::excluded_log_ids` option will remain in
## effect as well.
const send_logs: set[string] = set() &redef;
const send_logs: set[Log::ID] &redef;
}
event bro_init() &priority=-5
@ -24,8 +24,8 @@ event bro_init() &priority=-5
for ( stream_id in Log::active_streams )
{
if ( fmt("%s", stream_id) in excluded_log_ids ||
(|send_logs| > 0 && fmt("%s", stream_id) !in send_logs) )
if ( stream_id in excluded_log_ids ||
(|send_logs| > 0 && stream_id !in send_logs) )
next;
local filter: Log::Filter = [$name = "default-es",

View file

@ -138,11 +138,22 @@ BroFile::BroFile(FILE* arg_f, const char* arg_name, const char* arg_access)
BroFile::BroFile(const char* arg_name, const char* arg_access, BroType* arg_t)
{
Init();
f = 0;
name = copy_string(arg_name);
access = copy_string(arg_access);
t = arg_t ? arg_t : base_type(TYPE_STRING);
if ( ! Open() )
if ( streq(name, "/dev/stdin") )
f = stdin;
else if ( streq(name, "/dev/stdout") )
f = stdout;
else if ( streq(name, "/dev/stderr") )
f = stderr;
if ( f )
is_open = 1;
else if ( ! Open() )
{
reporter->Error("cannot open %s: %s", name, strerror(errno));
is_open = 0;
@ -342,8 +353,8 @@ int BroFile::Close()
FinishEncrypt();
// Do not close stdout/stderr.
if ( f == stdout || f == stderr )
// Do not close stdin/stdout/stderr.
if ( f == stdin || f == stdout || f == stderr )
return 0;
if ( is_in_cache )
@ -508,7 +519,7 @@ void BroFile::SetAttrs(Attributes* arg_attrs)
if ( attrs->FindAttr(ATTR_RAW_OUTPUT) )
EnableRawOutput();
InstallRotateTimer();
}
@ -523,6 +534,10 @@ RecordVal* BroFile::Rotate()
if ( ! is_open )
return 0;
// Do not rotate stdin/stdout/stderr.
if ( f == stdin || f == stdout || f == stderr )
return 0;
if ( okay_to_manage && ! is_in_cache )
BringIntoCache();

View file

@ -2897,11 +2897,6 @@ void RemoteSerializer::GotID(ID* id, Val* val)
(desc && *desc) ? desc : "not set"),
current_peer);
#ifdef USE_PERFTOOLS_DEBUG
// May still be cached, but we don't care.
heap_checker->IgnoreObject(id);
#endif
Unref(id);
return;
}

View file

@ -126,6 +126,23 @@ RuleConditionEval::RuleConditionEval(const char* func)
rules_error("unknown identifier", func);
return;
}
if ( id->Type()->Tag() == TYPE_FUNC )
{
// Validate argument quantity and type.
FuncType* f = id->Type()->AsFuncType();
if ( f->YieldType()->Tag() != TYPE_BOOL )
rules_error("eval function type must yield a 'bool'", func);
TypeList tl;
tl.Append(internal_type("signature_state")->Ref());
tl.Append(base_type(TYPE_STRING));
if ( ! f->CheckArgs(tl.Types()) )
rules_error("eval function parameters must be a 'signature_state' "
"and a 'string' type", func);
}
}
bool RuleConditionEval::DoMatch(Rule* rule, RuleEndpointState* state,

View file

@ -12,10 +12,10 @@
int killed_by_inactivity = 0;
uint32 tot_ack_events = 0;
uint32 tot_ack_bytes = 0;
uint32 tot_gap_events = 0;
uint32 tot_gap_bytes = 0;
uint64 tot_ack_events = 0;
uint64 tot_ack_bytes = 0;
uint64 tot_gap_events = 0;
uint64 tot_gap_bytes = 0;
class ProfileTimer : public Timer {

View file

@ -116,10 +116,10 @@ extern SampleLogger* sample_logger;
extern int killed_by_inactivity;
// Content gap statistics.
extern uint32 tot_ack_events;
extern uint32 tot_ack_bytes;
extern uint32 tot_gap_events;
extern uint32 tot_gap_bytes;
extern uint64 tot_ack_events;
extern uint64 tot_ack_bytes;
extern uint64 tot_gap_events;
extern uint64 tot_gap_bytes;
// A TCPStateStats object tracks the distribution of TCP states for

View file

@ -46,6 +46,7 @@ TCP_Analyzer::TCP_Analyzer(Connection* conn)
finished = 0;
reassembling = 0;
first_packet_seen = 0;
is_partial = 0;
orig = new TCP_Endpoint(this, 1);
resp = new TCP_Endpoint(this, 0);

View file

@ -20,10 +20,10 @@ const bool DEBUG_tcp_connection_close = false;
const bool DEBUG_tcp_match_undelivered = false;
static double last_gap_report = 0.0;
static uint32 last_ack_events = 0;
static uint32 last_ack_bytes = 0;
static uint32 last_gap_events = 0;
static uint32 last_gap_bytes = 0;
static uint64 last_ack_events = 0;
static uint64 last_ack_bytes = 0;
static uint64 last_gap_events = 0;
static uint64 last_gap_bytes = 0;
TCP_Reassembler::TCP_Reassembler(Analyzer* arg_dst_analyzer,
TCP_Analyzer* arg_tcp_analyzer,
@ -513,10 +513,10 @@ void TCP_Reassembler::AckReceived(int seq)
if ( gap_report && gap_report_freq > 0.0 &&
dt >= gap_report_freq )
{
int devents = tot_ack_events - last_ack_events;
int dbytes = tot_ack_bytes - last_ack_bytes;
int dgaps = tot_gap_events - last_gap_events;
int dgap_bytes = tot_gap_bytes - last_gap_bytes;
uint64 devents = tot_ack_events - last_ack_events;
uint64 dbytes = tot_ack_bytes - last_ack_bytes;
uint64 dgaps = tot_gap_events - last_gap_events;
uint64 dgap_bytes = tot_gap_bytes - last_gap_bytes;
RecordVal* r = new RecordVal(gap_info);
r->Assign(0, new Val(devents, TYPE_COUNT));

View file

@ -64,7 +64,7 @@ Val::~Val()
Unref(type);
#ifdef DEBUG
Unref(bound_id);
delete [] bound_id;
#endif
}

View file

@ -347,13 +347,15 @@ public:
#ifdef DEBUG
// For debugging, we keep a reference to the global ID to which a
// value has been bound *last*.
ID* GetID() const { return bound_id; }
ID* GetID() const
{
return bound_id ? global_scope()->Lookup(bound_id) : 0;
}
void SetID(ID* id)
{
if ( bound_id )
::Unref(bound_id);
bound_id = id;
::Ref(bound_id);
delete [] bound_id;
bound_id = id ? copy_string(id->Name()) : 0;
}
#endif
@ -401,8 +403,8 @@ protected:
RecordVal* attribs;
#ifdef DEBUG
// For debugging, we keep the ID to which a Val is bound.
ID* bound_id;
// For debugging, we keep the name of the ID to which a Val is bound.
const char* bound_id;
#endif
};

View file

@ -3787,7 +3787,7 @@ static GeoIP* open_geoip_db(GeoIPDBTypes type)
geoip = GeoIP_open_type(type, GEOIP_MEMORY_CACHE);
if ( ! geoip )
reporter->Warning("Failed to open GeoIP database: %s",
reporter->Info("Failed to open GeoIP database: %s",
GeoIPDBFileName[type]);
return geoip;
}
@ -3827,7 +3827,7 @@ function lookup_location%(a: addr%) : geo_location
if ( ! geoip )
builtin_error("Can't initialize GeoIP City/Country database");
else
reporter->Warning("Fell back to GeoIP Country database");
reporter->Info("Fell back to GeoIP Country database");
}
else
have_city_db = true;

View file

@ -34,6 +34,10 @@ function Input::__force_update%(id: string%) : bool
return new Val(res, TYPE_BOOL);
%}
# Options for the input framework
const accept_unsupported_types: bool;
# Options for Ascii Reader
module InputAscii;

View file

@ -388,6 +388,8 @@ bool Manager::CreateEventStream(RecordVal* fval)
FuncType* etype = event->FType()->AsFuncType();
bool allow_file_func = false;
if ( ! etype->IsEvent() )
{
reporter->Error("stream event is a function, not an event");
@ -453,6 +455,8 @@ bool Manager::CreateEventStream(RecordVal* fval)
return false;
}
allow_file_func = BifConst::Input::accept_unsupported_types;
}
else
@ -461,7 +465,7 @@ bool Manager::CreateEventStream(RecordVal* fval)
vector<Field*> fieldsV; // vector, because UnrollRecordType needs it
bool status = !UnrollRecordType(&fieldsV, fields, "");
bool status = (! UnrollRecordType(&fieldsV, fields, "", allow_file_func));
if ( status )
{
@ -609,12 +613,12 @@ bool Manager::CreateTableStream(RecordVal* fval)
vector<Field*> fieldsV; // vector, because we don't know the length beforehands
bool status = !UnrollRecordType(&fieldsV, idx, "");
bool status = (! UnrollRecordType(&fieldsV, idx, "", false));
int idxfields = fieldsV.size();
if ( val ) // if we are not a set
status = status || !UnrollRecordType(&fieldsV, val, "");
status = status || ! UnrollRecordType(&fieldsV, val, "", BifConst::Input::accept_unsupported_types);
int valfields = fieldsV.size() - idxfields;
@ -772,15 +776,29 @@ bool Manager::RemoveStreamContinuation(ReaderFrontend* reader)
return true;
}
bool Manager::UnrollRecordType(vector<Field*> *fields,
const RecordType *rec, const string& nameprepend)
bool Manager::UnrollRecordType(vector<Field*> *fields, const RecordType *rec,
const string& nameprepend, bool allow_file_func)
{
for ( int i = 0; i < rec->NumFields(); i++ )
{
if ( ! IsCompatibleType(rec->FieldType(i)) )
{
{
// If the field is a file or a function type
// and it is optional, we accept it nevertheless.
// This allows importing logfiles containing this
// stuff that we actually cannot read :)
if ( allow_file_func )
{
if ( ( rec->FieldType(i)->Tag() == TYPE_FILE ||
rec->FieldType(i)->Tag() == TYPE_FUNC ) &&
rec->FieldDecl(i)->FindAttr(ATTR_OPTIONAL) )
{
reporter->Info("Encountered incompatible type \"%s\" in table definition for ReaderFrontend. Ignoring field.", type_name(rec->FieldType(i)->Tag()));
continue;
}
}
reporter->Error("Incompatible type \"%s\" in table definition for ReaderFrontend", type_name(rec->FieldType(i)->Tag()));
return false;
}
@ -789,7 +807,7 @@ bool Manager::UnrollRecordType(vector<Field*> *fields,
{
string prep = nameprepend + rec->FieldName(i) + ".";
if ( !UnrollRecordType(fields, rec->FieldType(i)->AsRecordType(), prep) )
if ( !UnrollRecordType(fields, rec->FieldType(i)->AsRecordType(), prep, allow_file_func) )
{
return false;
}
@ -1044,9 +1062,7 @@ int Manager::SendEntryTable(Stream* i, const Value* const *vals)
if ( ! updated )
{
// throw away. Hence - we quit. And remove the entry from the current dictionary...
// (but why should it be in there? assert this).
assert ( stream->currDict->RemoveEntry(idxhash) == 0 );
// just quit and delete everything we created.
delete idxhash;
delete h;
return stream->num_val_fields + stream->num_idx_fields;
@ -1212,7 +1228,7 @@ void Manager::EndCurrentSend(ReaderFrontend* reader)
Ref(predidx);
Ref(val);
Ref(ev);
SendEvent(stream->event, 3, ev, predidx, val);
SendEvent(stream->event, 4, stream->description->Ref(), ev, predidx, val);
}
if ( predidx ) // if we have a stream or an event...
@ -1677,6 +1693,18 @@ RecordVal* Manager::ValueToRecordVal(const Value* const *vals,
Val* fieldVal = 0;
if ( request_type->FieldType(i)->Tag() == TYPE_RECORD )
fieldVal = ValueToRecordVal(vals, request_type->FieldType(i)->AsRecordType(), position);
else if ( request_type->FieldType(i)->Tag() == TYPE_FILE ||
request_type->FieldType(i)->Tag() == TYPE_FUNC )
{
// If those two unsupported types are encountered here, they have
// been let through by the type checking.
// That means that they are optional & the user agreed to ignore
// them and has been warned by reporter.
// Hence -> assign null to the field, done.
// Better check that it really is optional. Uou never know.
assert(request_type->FieldDecl(i)->FindAttr(ATTR_OPTIONAL));
}
else
{
fieldVal = ValueToVal(vals[*position], request_type->FieldType(i));
@ -1720,7 +1748,7 @@ int Manager::GetValueLength(const Value* val) {
case TYPE_STRING:
case TYPE_ENUM:
{
length += val->val.string_val.length;
length += val->val.string_val.length + 1;
break;
}
@ -1820,7 +1848,10 @@ int Manager::CopyValue(char *data, const int startpos, const Value* val)
case TYPE_ENUM:
{
memcpy(data+startpos, val->val.string_val.data, val->val.string_val.length);
return val->val.string_val.length;
// Add a \0 to the end. To be able to hash zero-length
// strings and differentiate from !present.
memset(data + startpos + val->val.string_val.length, 0, 1);
return val->val.string_val.length + 1;
}
case TYPE_ADDR:
@ -1911,13 +1942,15 @@ HashKey* Manager::HashValues(const int num_elements, const Value* const *vals)
const Value* val = vals[i];
if ( val->present )
length += GetValueLength(val);
// And in any case add 1 for the end-of-field-identifier.
length++;
}
if ( length == 0 )
{
reporter->Error("Input reader sent line where all elements are null values. Ignoring line");
assert ( length >= num_elements );
if ( length == num_elements )
return NULL;
}
int position = 0;
char *data = (char*) malloc(length);
@ -1929,6 +1962,12 @@ HashKey* Manager::HashValues(const int num_elements, const Value* const *vals)
const Value* val = vals[i];
if ( val->present )
position += CopyValue(data, position, val);
memset(data + position, 1, 1); // Add end-of-field-marker. Does not really matter which value it is,
// it just has to be... something.
position++;
}
HashKey *key = new HashKey(data, length);

View file

@ -158,7 +158,7 @@ private:
// Check if a record is made up of compatible types and return a list
// of all fields that are in the record in order. Recursively unrolls
// records
bool UnrollRecordType(vector<threading::Field*> *fields, const RecordType *rec, const string& nameprepend);
bool UnrollRecordType(vector<threading::Field*> *fields, const RecordType *rec, const string& nameprepend, bool allow_file_func);
// Send events
void SendEvent(EventHandlerPtr ev, const int numvals, ...);

View file

@ -11,6 +11,7 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
using namespace input::reader;
using threading::Value;
@ -209,6 +210,42 @@ bool Ascii::GetLine(string& str)
return false;
}
bool Ascii::CheckNumberError(const string& s, const char * end)
{
// Do this check first, before executing s.c_str() or similar.
// otherwise the value to which *end is pointing at the moment might
// be gone ...
bool endnotnull = (*end != '\0');
if ( s.length() == 0 )
{
Error("Got empty string for number field");
return true;
}
if ( end == s.c_str() ) {
Error(Fmt("String '%s' contained no parseable number", s.c_str()));
return true;
}
if ( endnotnull )
Warning(Fmt("Number '%s' contained non-numeric trailing characters. Ignored trailing characters '%s'", s.c_str(), end));
if ( errno == EINVAL )
{
Error(Fmt("String '%s' could not be converted to a number", s.c_str()));
return true;
}
else if ( errno == ERANGE )
{
Error(Fmt("Number '%s' out of supported range.", s.c_str()));
return true;
}
return false;
}
Value* Ascii::EntryToVal(string s, FieldMapping field)
{
@ -216,10 +253,13 @@ Value* Ascii::EntryToVal(string s, FieldMapping field)
return new Value(field.type, false);
Value* val = new Value(field.type, true);
char* end = 0;
errno = 0;
switch ( field.type ) {
case TYPE_ENUM:
case TYPE_STRING:
s = get_unescaped_string(s);
val->val.string_val.length = s.size();
val->val.string_val.data = copy_string(s.c_str());
break;
@ -238,27 +278,37 @@ Value* Ascii::EntryToVal(string s, FieldMapping field)
break;
case TYPE_INT:
val->val.int_val = atoi(s.c_str());
val->val.int_val = strtoll(s.c_str(), &end, 10);
if ( CheckNumberError(s, end) )
return 0;
break;
case TYPE_DOUBLE:
case TYPE_TIME:
case TYPE_INTERVAL:
val->val.double_val = atof(s.c_str());
val->val.double_val = strtod(s.c_str(), &end);
if ( CheckNumberError(s, end) )
return 0;
break;
case TYPE_COUNT:
case TYPE_COUNTER:
val->val.uint_val = atoi(s.c_str());
val->val.uint_val = strtoull(s.c_str(), &end, 10);
if ( CheckNumberError(s, end) )
return 0;
break;
case TYPE_PORT:
val->val.port_val.port = atoi(s.c_str());
val->val.port_val.port = strtoull(s.c_str(), &end, 10);
if ( CheckNumberError(s, end) )
return 0;
val->val.port_val.proto = TRANSPORT_UNKNOWN;
break;
case TYPE_SUBNET:
{
s = get_unescaped_string(s);
size_t pos = s.find("/");
if ( pos == s.npos )
{
@ -266,7 +316,11 @@ Value* Ascii::EntryToVal(string s, FieldMapping field)
return 0;
}
int width = atoi(s.substr(pos+1).c_str());
uint8_t width = (uint8_t) strtol(s.substr(pos+1).c_str(), &end, 10);
if ( CheckNumberError(s, end) )
return 0;
string addr = s.substr(0, pos);
val->val.subnet_val.prefix = StringToAddr(addr);
@ -275,6 +329,7 @@ Value* Ascii::EntryToVal(string s, FieldMapping field)
}
case TYPE_ADDR:
s = get_unescaped_string(s);
val->val.addr_val = StringToAddr(s);
break;
@ -288,7 +343,10 @@ Value* Ascii::EntryToVal(string s, FieldMapping field)
// how many entries do we have...
unsigned int length = 1;
for ( unsigned int i = 0; i < s.size(); i++ )
if ( s[i] == ',' ) length++;
{
if ( s[i] == set_separator[0] )
length++;
}
unsigned int pos = 0;
@ -342,9 +400,24 @@ Value* Ascii::EntryToVal(string s, FieldMapping field)
pos++;
}
// Test if the string ends with a set_separator... or if the
// complete string is empty. In either of these cases we have
// to push an empty val on top of it.
if ( s.empty() || *s.rbegin() == set_separator[0] )
{
lvals[pos] = EntryToVal("", field.subType());
if ( lvals[pos] == 0 )
{
Error("Error while trying to add empty set element");
return 0;
}
pos++;
}
if ( pos != length )
{
Error("Internal error while parsing set: did not find all elements");
Error(Fmt("Internal error while parsing set: did not find all elements: %s", s.c_str()));
return 0;
}
@ -428,6 +501,7 @@ bool Ascii::DoUpdate()
while ( GetLine(line ) )
{
// split on tabs
bool error = false;
istringstream splitstream(line);
map<int, string> stringfields;
@ -438,8 +512,6 @@ bool Ascii::DoUpdate()
if ( ! getline(splitstream, s, separator[0]) )
break;
s = get_unescaped_string(s);
stringfields[pos] = s;
pos++;
}
@ -474,8 +546,9 @@ bool Ascii::DoUpdate()
Value* val = EntryToVal(stringfields[(*fit).position], *fit);
if ( val == 0 )
{
Error("Could not convert String value to Val");
return false;
Error(Fmt("Could not convert line '%s' to Val. Ignoring line.", line.c_str()));
error = true;
break;
}
if ( (*fit).secondary_position != -1 )
@ -492,6 +565,19 @@ bool Ascii::DoUpdate()
fpos++;
}
if ( error )
{
// Encountered non-fatal error, ignoring line. But
// first, delete all successfully read fields and the
// array structure.
for ( int i = 0; i < fpos; i++ )
delete fields[fpos];
delete [] fields;
continue;
}
//printf("fpos: %d, second.num_fields: %d\n", fpos, (*it).second.num_fields);
assert ( fpos == NumFields() );

View file

@ -48,6 +48,7 @@ private:
bool ReadHeader(bool useCached);
bool GetLine(string& str);
threading::Value* EntryToVal(string s, FieldMapping type);
bool CheckNumberError(const string& s, const char * end);
ifstream* file;
time_t mtime;

View file

@ -359,7 +359,7 @@ bool Ascii::DoWrite(int num_fields, const Field* const * fields,
if ( ! safe_write(fd, bytes, len) )
goto write_error;
if ( IsBuf() )
if ( ! IsBuf() )
fsync(fd);
return true;

View file

@ -243,8 +243,25 @@ bool DataSeries::OpenLog(string path)
log_file->writeExtentLibrary(log_types);
for( size_t i = 0; i < schema_list.size(); ++i )
extents.insert(std::make_pair(schema_list[i].field_name,
GeneralField::create(log_series, schema_list[i].field_name)));
{
string fn = schema_list[i].field_name;
GeneralField* gf = 0;
#ifdef USE_PERFTOOLS_DEBUG
{
// GeneralField isn't cleaning up some results of xml parsing, reported
// here: https://github.com/dataseries/DataSeries/issues/1
// Ignore for now to make leak tests pass. There's confidence that
// we do clean up the GeneralField* since the ExtentSeries dtor for
// member log_series would trigger an assert if dynamically allocated
// fields aren't deleted beforehand.
HeapLeakChecker::Disabler disabler;
#endif
gf = GeneralField::create(log_series, fn);
#ifdef USE_PERFTOOLS_DEBUG
}
#endif
extents.insert(std::make_pair(fn, gf));
}
if ( ds_extent_size < ROW_MIN )
{

View file

@ -371,7 +371,11 @@ bool ElasticSearch::HTTPSend(CURL *handle)
// The best (only?) way to disable that is to just use HTTP 1.0
curl_easy_setopt(handle, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_0);
//curl_easy_setopt(handle, CURLOPT_TIMEOUT_MS, transfer_timeout);
// Some timeout options. These will need more attention later.
curl_easy_setopt(handle, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(handle, CURLOPT_CONNECTTIMEOUT_MS, transfer_timeout);
curl_easy_setopt(handle, CURLOPT_TIMEOUT_MS, transfer_timeout*2);
curl_easy_setopt(handle, CURLOPT_DNS_CACHE_TIMEOUT, 60*60);
CURLcode return_code = curl_easy_perform(handle);

View file

@ -337,6 +337,8 @@ void terminate_bro()
delete log_mgr;
delete thread_mgr;
delete reporter;
reporter = 0;
}
void termination_signal()
@ -380,6 +382,8 @@ static void bro_new_handler()
int main(int argc, char** argv)
{
std::set_new_handler(bro_new_handler);
brofiler.ReadStats();
bro_argc = argc;

View file

@ -56,7 +56,7 @@ void modp_uitoa10(uint32_t value, char* str)
void modp_litoa10(int64_t value, char* str)
{
char* wstr=str;
unsigned long uvalue = (value < 0) ? -value : value;
uint64_t uvalue = (value < 0) ? -value : value;
// Conversion. Number is reversed.
do *wstr++ = (char)(48 + (uvalue % 10)); while(uvalue /= 10);

View file

@ -437,9 +437,7 @@ F RET_CONST(new Val(false, TYPE_BOOL))
}
{D} {
// TODO: check if we can use strtoull instead of atol,
// and similarly for {HEX}.
RET_CONST(new Val(static_cast<unsigned int>(atol(yytext)),
RET_CONST(new Val(static_cast<bro_uint_t>(strtoul(yytext, (char**) NULL, 10)),
TYPE_COUNT))
}
{FLOAT} RET_CONST(new Val(atof(yytext), TYPE_DOUBLE))
@ -483,7 +481,7 @@ F RET_CONST(new Val(false, TYPE_BOOL))
({D}"."){3}{D} RET_CONST(new AddrVal(yytext))
"0x"{HEX}+ RET_CONST(new Val(static_cast<bro_uint_t>(strtol(yytext, 0, 16)), TYPE_COUNT))
"0x"{HEX}+ RET_CONST(new Val(static_cast<bro_uint_t>(strtoull(yytext, 0, 16)), TYPE_COUNT))
{H}("."{H})+ RET_CONST(dns_mgr->LookupHost(yytext))

View file

@ -1383,7 +1383,13 @@ void safe_close(int fd)
void out_of_memory(const char* where)
{
reporter->FatalError("out of memory in %s.\n", where);
fprintf(stderr, "out of memory in %s.\n", where);
if ( reporter )
// Guess that might fail here if memory is really tight ...
reporter->FatalError("out of memory in %s.\n", where);
abort();
}
void get_memory_usage(unsigned int* total, unsigned int* malloced)

View file

@ -1,5 +0,0 @@
1128727430.350788 ? 141.42.64.125 125.190.109.199 other 56729 12345 tcp ? ? S0 X 1 60 0 0 cc=1
1144876538.705610 5.921003 169.229.147.203 239.255.255.253 other 49370 427 udp 147 ? S0 X 3 231 0 0
1144876599.397603 0.815763 192.150.186.169 194.64.249.244 http 53063 80 tcp 377 445 SF X 6 677 5 713
1144876709.032670 9.000191 169.229.147.43 239.255.255.253 other 49370 427 udp 196 ? S0 X 4 308 0 0
1144876697.068273 0.000650 192.150.186.169 192.150.186.15 icmp-unreach 3 3 icmp 56 ? OTH X 2 112 0 0

View file

@ -1,5 +0,0 @@
1128727430.350788 ? 141.42.64.125 125.190.109.199 other 56729 12345 tcp ? ? S0 X 1 60 0 0
1144876538.705610 5.921003 169.229.147.203 239.255.255.253 other 49370 427 udp 147 ? S0 X 3 231 0 0
1144876599.397603 0.815763 192.150.186.169 194.64.249.244 http 53063 80 tcp 377 445 SF X 6 697 5 713
1144876709.032670 9.000191 169.229.147.43 239.255.255.253 other 49370 427 udp 196 ? S0 X 4 308 0 0
1144876697.068273 0.000650 192.150.186.169 192.150.186.15 icmp-unreach 3 3 icmp 56 ? OTH X 2 112 0 0

View file

@ -0,0 +1 @@
PIA_TCP

View file

@ -0,0 +1 @@
T

View file

@ -0,0 +1,2 @@
[entropy=4.715374, chi_square=591.981818, mean=75.472727, monte_carlo_pi=4.0, serial_correlation=-0.11027]
[entropy=2.083189, chi_square=3906.018182, mean=69.054545, monte_carlo_pi=4.0, serial_correlation=0.849402]

View file

@ -0,0 +1 @@
found bro_init

View file

@ -0,0 +1,4 @@
ASCII text, with no line terminators
text/plain; charset=us-ascii
PNG image
image/png; charset=binary

View file

@ -0,0 +1,4 @@
T
F
F
T

View file

@ -0,0 +1 @@
F

View file

@ -0,0 +1 @@
T

View file

@ -0,0 +1,4 @@
1970-01-01 00:00:00
000000 19700101
1973-11-29 21:33:09
213309 19731129

View file

@ -3,8 +3,10 @@
#empty_field (empty)
#unset_field -
#path metrics
#open 2012-07-20-01-50-41
#fields ts metric_id filter_name index.host index.str index.network value
#types time enum string addr string subnet count
1331256494.591966 TEST_METRIC foo-bar 6.5.4.3 - - 4
1331256494.591966 TEST_METRIC foo-bar 7.2.1.5 - - 2
1331256494.591966 TEST_METRIC foo-bar 1.2.3.4 - - 6
1342749041.601712 TEST_METRIC foo-bar 6.5.4.3 - - 4
1342749041.601712 TEST_METRIC foo-bar 7.2.1.5 - - 2
1342749041.601712 TEST_METRIC foo-bar 1.2.3.4 - - 6
#close 2012-07-20-01-50-49

View file

@ -3,8 +3,10 @@
#empty_field (empty)
#unset_field -
#path test.failure
#open 2012-07-20-01-50-18
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 failure US
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 failure UK
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 failure MX
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 failure US
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 failure UK
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 failure MX
#close 2012-07-20-01-50-18

View file

@ -3,10 +3,12 @@
#empty_field (empty)
#unset_field -
#path test
#open 2012-07-20-01-50-18
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 success unknown
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 failure US
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 failure UK
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 success BR
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 failure MX
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 success unknown
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 failure US
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 failure UK
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 success BR
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 failure MX
#close 2012-07-20-01-50-18

View file

@ -3,7 +3,9 @@
#empty_field (empty)
#unset_field -
#path test.success
#open 2012-07-20-01-50-18
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 success unknown
1331256472.375609 1.2.3.4 1234 2.3.4.5 80 success BR
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 success unknown
1342749018.970682 1.2.3.4 1234 2.3.4.5 80 success BR
#close 2012-07-20-01-50-18

View file

@ -1,2 +1,3 @@
error in /da/home/robin/bro/master/testing/btest/.tmp/core.reporter-error-in-handler/reporter-error-in-handler.bro, line 22: no such index (a[2])
error in /home/jsiwek/bro/testing/btest/.tmp/core.reporter-error-in-handler/reporter-error-in-handler.bro, line 22: no such index (a[2])
ERROR: no such index (a[1]) (/home/jsiwek/bro/testing/btest/.tmp/core.reporter-error-in-handler/reporter-error-in-handler.bro, line 28)
1st error printed on script level

View file

@ -1 +1,2 @@
error in /da/home/robin/bro/master/testing/btest/.tmp/core.reporter-runtime-error/reporter-runtime-error.bro, line 12: no such index (a[1])
error in /home/jsiwek/bro/testing/btest/.tmp/core.reporter-runtime-error/reporter-runtime-error.bro, line 12: no such index (a[1])
ERROR: no such index (a[2]) (/home/jsiwek/bro/testing/btest/.tmp/core.reporter-runtime-error/reporter-runtime-error.bro, line 9)

View file

@ -1,6 +1,6 @@
reporter_info|init test-info|/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 8|0.000000
reporter_warning|init test-warning|/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 9|0.000000
reporter_error|init test-error|/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 10|0.000000
reporter_info|done test-info|/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 15|0.000000
reporter_warning|done test-warning|/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 16|0.000000
reporter_error|done test-error|/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 17|0.000000
reporter_info|init test-info|/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 8|0.000000
reporter_warning|init test-warning|/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 9|0.000000
reporter_error|init test-error|/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 10|0.000000
reporter_info|done test-info|/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 15|0.000000
reporter_warning|done test-warning|/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 16|0.000000
reporter_error|done test-error|/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 17|0.000000

View file

@ -1,3 +1,7 @@
/da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 52: pre test-info
warning in /da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 53: pre test-warning
error in /da/home/robin/bro/master/testing/btest/.tmp/core.reporter/reporter.bro, line 54: pre test-error
/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 52: pre test-info
warning in /home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 53: pre test-warning
error in /home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 54: pre test-error
WARNING: init test-warning (/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 9)
ERROR: init test-error (/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 10)
WARNING: done test-warning (/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 16)
ERROR: done test-error (/home/jsiwek/bro/testing/btest/.tmp/core.reporter/reporter.bro, line 17)

View file

@ -0,0 +1 @@
error: unknown writer type requested

View file

@ -0,0 +1,4 @@
{
[9223372036854775800] = [c=18446744073709551612],
[-9223372036854775800] = [c=18446744073709551612]
}

View file

@ -0,0 +1,155 @@
============PREDICATE============
Input::EVENT_NEW
[i=1]
[s=<uninitialized>, ss=TEST]
============PREDICATE============
Input::EVENT_NEW
[i=2]
[s=<uninitialized>, ss=<uninitialized>]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[2] = [s=<uninitialized>, ss=<uninitialized>],
[1] = [s=<uninitialized>, ss=TEST]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_NEW
Left
[i=1]
Right
[s=<uninitialized>, ss=TEST]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[2] = [s=<uninitialized>, ss=<uninitialized>],
[1] = [s=<uninitialized>, ss=TEST]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_NEW
Left
[i=2]
Right
[s=<uninitialized>, ss=<uninitialized>]
==========SERVERS============
{
[2] = [s=<uninitialized>, ss=<uninitialized>],
[1] = [s=<uninitialized>, ss=TEST]
}
============PREDICATE============
Input::EVENT_CHANGED
[i=1]
[s=TEST, ss=<uninitialized>]
============PREDICATE============
Input::EVENT_CHANGED
[i=2]
[s=TEST, ss=TEST]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[2] = [s=TEST, ss=TEST],
[1] = [s=TEST, ss=<uninitialized>]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_CHANGED
Left
[i=1]
Right
[s=<uninitialized>, ss=TEST]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[2] = [s=TEST, ss=TEST],
[1] = [s=TEST, ss=<uninitialized>]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_CHANGED
Left
[i=2]
Right
[s=<uninitialized>, ss=<uninitialized>]
==========SERVERS============
{
[2] = [s=TEST, ss=TEST],
[1] = [s=TEST, ss=<uninitialized>]
}
done

View file

@ -0,0 +1,8 @@
error: ../input.log/Input::READER_ASCII: Number '12129223372036854775800' out of supported range.
error: ../input.log/Input::READER_ASCII: Could not convert line '12129223372036854775800 121218446744073709551612' to Val. Ignoring line.
warning: ../input.log/Input::READER_ASCII: Number '9223372036854775801TEXTHERE' contained non-numeric trailing characters. Ignored trailing characters 'TEXTHERE'
warning: ../input.log/Input::READER_ASCII: Number '1Justtext' contained non-numeric trailing characters. Ignored trailing characters 'Justtext'
error: ../input.log/Input::READER_ASCII: String 'Justtext' contained no parseable number
error: ../input.log/Input::READER_ASCII: Could not convert line 'Justtext 1' to Val. Ignoring line.
received termination signal
>>>

View file

@ -0,0 +1,4 @@
{
[9223372036854775800] = [c=4],
[9223372036854775801] = [c=1]
}

View file

@ -0,0 +1,3 @@
{
[1.228.83.33] = [asn=9318 HANARO-AS Hanaro Telecom Inc., severity=medium, confidence=95, detecttime=1342569600.0]
}

View file

@ -1084,7 +1084,7 @@ BB
}
============PREDICATE============
Input::EVENT_REMOVED
[i=-43]
[i=-44]
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
@ -1096,6 +1096,21 @@ AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============PREDICATE============
Input::EVENT_REMOVED
[i=-42]
[b=T, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============PREDICATE============
Input::EVENT_REMOVED
@ -1111,21 +1126,6 @@ AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============PREDICATE============
Input::EVENT_REMOVED
[i=-44]
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============PREDICATE============
Input::EVENT_REMOVED
@ -1159,25 +1159,7 @@ BB
}, vc=[10, 20, 30], ve=[]]
============PREDICATE============
Input::EVENT_REMOVED
[i=-42]
[b=T, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============EVENT============
Description
Input::EVENT_REMOVED
Type
[i=-43]
Left
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
@ -1190,14 +1172,10 @@ BB
}, se={
}, vc=[10, 20, 30], ve=[]]
Right
============EVENT============
Description
Input::EVENT_REMOVED
Type
[i=-46]
Left
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
@ -1209,13 +1187,32 @@ BB
}, se={
}, vc=[10, 20, 30], ve=[]]
Right
============EVENT============
Description
Input::EVENT_REMOVED
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_REMOVED
Left
[i=-44]
Left
Right
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
@ -1228,14 +1225,10 @@ BB
}, se={
}, vc=[10, 20, 30], ve=[]]
Right
============EVENT============
Description
Input::EVENT_REMOVED
Type
[i=-47]
Left
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
@ -1247,32 +1240,32 @@ BB
}, se={
}, vc=[10, 20, 30], ve=[]]
Right
============EVENT============
Description
Input::EVENT_REMOVED
Type
[i=-45]
Left
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}, vc=[10, 20, 30], ve=[]]
Right
============EVENT============
Description
Input::EVENT_REMOVED
}]
Type
Input::EVENT_REMOVED
Left
[i=-42]
Left
Right
[b=T, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
@ -1285,7 +1278,218 @@ BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_REMOVED
Left
[i=-46]
Right
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_REMOVED
Left
[i=-47]
Right
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_REMOVED
Left
[i=-45]
Right
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
============EVENT============
Description
[source=../input.log, reader=Input::READER_ASCII, mode=Input::REREAD, name=ssh, destination={
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
}, idx=<no value description>, val=<no value description>, want_record=T, ev=line
{
print A::outfile, ============EVENT============;
print A::outfile, Description;
print A::outfile, A::description;
print A::outfile, Type;
print A::outfile, A::tpe;
print A::outfile, Left;
print A::outfile, A::left;
print A::outfile, Right;
print A::outfile, A::right;
}, pred=anonymous-function
{
print A::outfile, ============PREDICATE============;
print A::outfile, A::typ;
print A::outfile, A::left;
print A::outfile, A::right;
return (T);
}, config={
}]
Type
Input::EVENT_REMOVED
Left
[i=-43]
Right
[b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
==========SERVERS============
{
[-48] = [b=F, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={

View file

@ -0,0 +1,7 @@
{
192.168.17.7,
192.168.17.42,
192.168.17.14,
192.168.17.1,
192.168.17.2
}

View file

@ -0,0 +1,10 @@
{
[1] = [s={
b,
e,
d,
c,
f,
a
}, ss=[1, 2, 3, 4, 5, 6]]
}

View file

@ -0,0 +1,23 @@
{
[2] = [s={
,
testing
}, s=[testing, , testing]],
[4] = [s={
,
testing
}, s=[testing, ]],
[6] = [s={
}, s=[]],
[1] = [s={
testing,testing,testing,
}, s=[testing,testing,testing,]],
[5] = [s={
}, s=[, , , ]],
[3] = [s={
,
testing
}, s=[, testing]]
}

View file

@ -0,0 +1,14 @@
{
[-42] = [fi=<uninitialized>, b=T, e=SSH::LOG, c=21, p=123/unknown, sn=10.0.0.0/24, a=1.2.3.4, d=3.14, t=1315801931.273616, iv=100.0, s=hurz, sc={
2,
4,
1,
3
}, ss={
CC,
AA,
BB
}, se={
}, vc=[10, 20, 30], ve=[]]
}

View file

@ -0,0 +1,10 @@
#separator \x09
#set_separator ,
#empty_field (empty)
#unset_field -
#path reporter
#open 2012-08-10-20-09-16
#fields ts level message location
#types time enum string string
0.000000 Reporter::ERROR no such index (test[3]) /da/home/robin/bro/master/testing/btest/.tmp/scripts.base.frameworks.reporter.disable-stderr/disable-stderr.bro, line 12
#close 2012-08-10-20-09-16

View file

@ -0,0 +1 @@
ERROR: no such index (test[3]) (/blah/testing/btest/.tmp/scripts.base.frameworks.reporter.stderr/stderr.bro, line 9)

View file

@ -0,0 +1,10 @@
#separator \x09
#set_separator ,
#empty_field (empty)
#unset_field -
#path reporter
#open 2012-08-10-20-09-23
#fields ts level message location
#types time enum string string
0.000000 Reporter::ERROR no such index (test[3]) /da/home/robin/bro/master/testing/btest/.tmp/scripts.base.frameworks.reporter.stderr/stderr.bro, line 9
#close 2012-08-10-20-09-23

View file

@ -0,0 +1,2 @@
error: Error in signature (./blah.sig:6): eval function parameters must be a 'signature_state' and a 'string' type (mark_conn)

View file

@ -0,0 +1,14 @@
#separator \x09
#set_separator ,
#empty_field (empty)
#unset_field -
#path conn
#open 2012-08-23-16-41-23
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state local_orig missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
#types time string addr port addr port enum string interval count count string bool count string count count count count table[string]
1329843175.736107 arKYeMETxOg 141.142.220.235 37604 199.233.217.249 56666 tcp ftp-data 0.112432 0 342 SF - 0 ShAdfFa 4 216 4 562 (empty)
1329843179.871641 k6kgXLOoSKl 141.142.220.235 59378 199.233.217.249 56667 tcp ftp-data 0.111218 0 77 SF - 0 ShAdfFa 4 216 4 297 (empty)
1329843194.151526 nQcgTWjvg4c 199.233.217.249 61920 141.142.220.235 33582 tcp ftp-data 0.056211 342 0 SF - 0 ShADaFf 5 614 3 164 (empty)
1329843197.783443 j4u32Pc5bif 199.233.217.249 61918 141.142.220.235 37835 tcp ftp-data 0.056005 77 0 SF - 0 ShADaFf 5 349 3 164 (empty)
1329843161.968492 UWkUyAuUGXf 141.142.220.235 50003 199.233.217.249 21 tcp ftp,blah 38.055625 180 3146 SF - 0 ShAdDfFa 38 2164 25 4458 (empty)
#close 2012-08-23-16-41-23

View file

@ -1,2 +0,0 @@
# @TEST-EXEC: bro -C -r ${TRACES}/conn-size.trace tcp udp icmp report_conn_size_analyzer=T
# @TEST-EXEC: btest-diff conn.log

View file

@ -1,2 +0,0 @@
# @TEST-EXEC: bro -C -r ${TRACES}/conn-size.trace tcp udp icmp report_conn_size_analyzer=T
# @TEST-EXEC: btest-diff conn.log

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT >out
# @TEST-EXEC: btest-diff out
event bro_init()
{
local a = 1;
print analyzer_name(a);
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = bro_version();
if ( |a| == 0 )
exit(1);
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT >out
# @TEST-EXEC: btest-diff out
# @TEST-EXEC: test -f testfile
event bro_init()
{
print capture_state_updates("testfile");
}

View file

@ -0,0 +1,10 @@
#
# @TEST-EXEC: bro %INPUT
# @TEST-EXEC: test -f .state/state.bst
event bro_init()
{
local a = checkpoint_state();
if ( a != T )
exit(1);
}

View file

@ -0,0 +1,11 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = current_analyzer();
if ( a != 0 )
exit(1);
# TODO: add a test for non-zero return value
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = current_time();
if ( a <= double_to_time(0) )
exit(1);
}

View file

@ -0,0 +1,24 @@
#
# @TEST-EXEC: bro %INPUT >out
# @TEST-EXEC: btest-diff out
event bro_init()
{
local a = "dh3Hie02uh^s#Sdf9L3frd243h$d78r2G4cM6*Q05d(7rh46f!0|4-f";
if ( entropy_test_init(1) != T )
exit(1);
if ( entropy_test_add(1, a) != T )
exit(1);
print entropy_test_finish(1);
local b = "0011000aaabbbbcccc000011111000000000aaaabbbbcccc0000000";
if ( entropy_test_init(2) != T )
exit(1);
if ( entropy_test_add(2, b) != T )
exit(1);
print entropy_test_finish(2);
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = get_matcher_stats();
if ( a$matchers == 0 )
exit(1);
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = gethostname();
if ( |a| == 0 )
exit(1);
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = getpid();
if ( a == 0 )
exit(1);
}

View file

@ -0,0 +1,16 @@
#
# @TEST-EXEC: bro %INPUT >out
# @TEST-EXEC: btest-diff out
event bro_init()
{
local a = global_sizes();
for ( i in a )
{
# the table is quite large, so just look for one item we expect
if ( i == "bro_init" )
print "found bro_init";
}
}

View file

@ -0,0 +1,16 @@
#
# @TEST-EXEC: bro %INPUT | sed 's/PNG image data/PNG image/g' >out
# @TEST-EXEC: btest-diff out
event bro_init()
{
# plain text
local a = "This is a test";
print identify_data(a, F);
print identify_data(a, T);
# PNG image
local b = "\x89\x50\x4e\x47\x0d\x0a\x1a\x0a";
print identify_data(b, F);
print identify_data(b, T);
}

View file

@ -0,0 +1,11 @@
#
# @TEST-EXEC: bro %INPUT >out
# @TEST-EXEC: btest-diff out
event bro_init()
{
print is_local_interface(127.0.0.1);
print is_local_interface(1.2.3.4);
print is_local_interface([2607::a:b:c:d]);
print is_local_interface([::1]);
}

View file

@ -0,0 +1,10 @@
# @TEST-EXEC: bro %INPUT >out1
# @TEST-EXEC: btest-diff out1
# @TEST-EXEC: bro -r $TRACES/web.trace %INPUT >out2
# @TEST-EXEC: btest-diff out2
event bro_init()
{
print reading_traces();
}

View file

@ -0,0 +1,9 @@
#
# @TEST-EXEC: bro %INPUT
event bro_init()
{
local a = resource_usage();
if ( a$version != bro_version() )
exit(1);
}

View file

@ -0,0 +1,17 @@
#
# @TEST-EXEC: bro %INPUT >out
# @TEST-EXEC: btest-diff out
event bro_init()
{
local f1 = "%Y-%m-%d %H:%M:%S";
local f2 = "%H%M%S %Y%m%d";
local a = double_to_time(0);
print strftime(f1, a);
print strftime(f2, a);
a = double_to_time(123456789);
print strftime(f1, a);
print strftime(f2, a);
}

View file

@ -1,5 +1,5 @@
[btest]
TestDirs = doc bifs language core scripts istate coverage
TestDirs = doc bifs language core scripts istate coverage signatures
TmpDir = %(testbase)s/.tmp
BaselineDir = %(testbase)s/Baseline
IgnoreDirs = .svn CVS .tmp

View file

@ -1,21 +1,22 @@
# Needs perftools support.
#
# @TEST-SERIALIZE: comm
# @TEST-GROUP: leaks
#
# @TEST-REQUIRES: bro --help 2>&1 | grep -q mem-leaks
#
# @TEST-EXEC: btest-bg-run manager-1 HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local BROPATH=$BROPATH:.. CLUSTER_NODE=manager-1 bro -m %INPUT
# @TEST-EXEC: btest-bg-run proxy-1 HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local BROPATH=$BROPATH:.. CLUSTER_NODE=proxy-1 bro -m %INPUT
# @TEST-EXEC: sleep 1
# @TEST-EXEC: btest-bg-run worker-1 HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local BROPATH=$BROPATH:.. CLUSTER_NODE=worker-1 bro -m -r $TRACES/web.trace --pseudo-realtime %INPUT
# @TEST-EXEC: btest-bg-run worker-2 HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local BROPATH=$BROPATH:.. CLUSTER_NODE=worker-2 bro -m -r $TRACES/web.trace --pseudo-realtime %INPUT
# @TEST-EXEC: btest-bg-wait -k 30
# @TEST-EXEC: btest-bg-wait 60
# @TEST-EXEC: btest-diff manager-1/metrics.log
@TEST-START-FILE cluster-layout.bro
redef Cluster::nodes = {
["manager-1"] = [$node_type=Cluster::MANAGER, $ip=127.0.0.1, $p=37757/tcp, $workers=set("worker-1")],
["proxy-1"] = [$node_type=Cluster::PROXY, $ip=127.0.0.1, $p=37758/tcp, $manager="manager-1", $workers=set("worker-1")],
["manager-1"] = [$node_type=Cluster::MANAGER, $ip=127.0.0.1, $p=37757/tcp, $workers=set("worker-1", "worker-2")],
["proxy-1"] = [$node_type=Cluster::PROXY, $ip=127.0.0.1, $p=37758/tcp, $manager="manager-1", $workers=set("worker-1", "worker-2")],
["worker-1"] = [$node_type=Cluster::WORKER, $ip=127.0.0.1, $p=37760/tcp, $manager="manager-1", $proxy="proxy-1", $interface="eth0"],
["worker-2"] = [$node_type=Cluster::WORKER, $ip=127.0.0.1, $p=37761/tcp, $manager="manager-1", $proxy="proxy-1", $interface="eth1"],
};
@ -32,11 +33,51 @@ event bro_init() &priority=5
Metrics::add_filter(TEST_METRIC,
[$name="foo-bar",
$break_interval=3secs]);
if ( Cluster::local_node_type() == Cluster::WORKER )
}
event remote_connection_closed(p: event_peer)
{
terminate();
}
global ready_for_data: event();
redef Cluster::manager2worker_events += /ready_for_data/;
@if ( Cluster::local_node_type() == Cluster::WORKER )
event ready_for_data()
{
Metrics::add_data(TEST_METRIC, [$host=1.2.3.4], 3);
Metrics::add_data(TEST_METRIC, [$host=6.5.4.3], 2);
Metrics::add_data(TEST_METRIC, [$host=7.2.1.5], 1);
}
@endif
@if ( Cluster::local_node_type() == Cluster::MANAGER )
global n = 0;
global peer_count = 0;
event Metrics::log_metrics(rec: Metrics::Info)
{
n = n + 1;
if ( n == 3 )
{
Metrics::add_data(TEST_METRIC, [$host=1.2.3.4], 3);
Metrics::add_data(TEST_METRIC, [$host=6.5.4.3], 2);
Metrics::add_data(TEST_METRIC, [$host=7.2.1.5], 1);
terminate_communication();
terminate();
}
}
event remote_connection_handshake_done(p: event_peer)
{
print p;
peer_count = peer_count + 1;
if ( peer_count == 3 )
{
event ready_for_data();
}
}
@endif

View file

@ -1,20 +1,23 @@
# Needs perftools support.
#
# @TEST-SERIALIZE: comm
# @TEST-GROUP: leaks
#
# @TEST-REQUIRES: bro --help 2>&1 | grep -q mem-leaks
#
# @TEST-EXEC: btest-bg-run sender HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local bro -m --pseudo-realtime %INPUT ../sender.bro
# @TEST-EXEC: btest-bg-run sender HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local bro -b -m --pseudo-realtime %INPUT ../sender.bro
# @TEST-EXEC: sleep 1
# @TEST-EXEC: btest-bg-run receiver HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local bro -m --pseudo-realtime %INPUT ../receiver.bro
# @TEST-EXEC: btest-bg-run receiver HEAP_CHECK_DUMP_DIRECTORY=. HEAPCHECK=local bro -b -m --pseudo-realtime %INPUT ../receiver.bro
# @TEST-EXEC: sleep 1
# @TEST-EXEC: btest-bg-wait -k 10
# @TEST-EXEC: btest-bg-wait 30
# @TEST-EXEC: btest-diff sender/test.log
# @TEST-EXEC: btest-diff sender/test.failure.log
# @TEST-EXEC: btest-diff sender/test.success.log
# @TEST-EXEC: cmp receiver/test.log sender/test.log
# @TEST-EXEC: cmp receiver/test.failure.log sender/test.failure.log
# @TEST-EXEC: cmp receiver/test.success.log sender/test.success.log
# @TEST-EXEC: ( cd sender && for i in *.log; do cat $i | $SCRIPTS/diff-remove-timestamps >c.$i; done )
# @TEST-EXEC: ( cd receiver && for i in *.log; do cat $i | $SCRIPTS/diff-remove-timestamps >c.$i; done )
# @TEST-EXEC: cmp receiver/c.test.log sender/c.test.log
# @TEST-EXEC: cmp receiver/c.test.failure.log sender/c.test.failure.log
# @TEST-EXEC: cmp receiver/c.test.success.log sender/c.test.success.log
# This is the common part loaded by both sender and receiver.
module Test;
@ -43,10 +46,10 @@ event bro_init()
@TEST-START-FILE sender.bro
module Test;
@load frameworks/communication/listen
module Test;
function fail(rec: Log): bool
{
return rec$status != "success";
@ -68,14 +71,27 @@ event remote_connection_handshake_done(p: event_peer)
Log::write(Test::LOG, [$t=network_time(), $id=cid, $status="failure", $country="MX"]);
disconnect(p);
}
event remote_connection_closed(p: event_peer)
{
terminate();
}
@TEST-END-FILE
@TEST-START-FILE receiver.bro
#####
@load base/frameworks/communication
redef Communication::nodes += {
["foo"] = [$host = 127.0.0.1, $connect=T, $request_logs=T]
};
event remote_connection_closed(p: event_peer)
{
terminate();
}
@TEST-END-FILE

View file

@ -8,6 +8,7 @@
# @TEST-SERIALIZE: comm
#
# @TEST-EXEC: test -d $DIST/scripts
# @TEST-EXEC: for script in `find $DIST/scripts -name \*\.bro -not -path '*/site/*'`; do echo $script; if echo "$script" | egrep -q 'communication/listen|controllee'; then rm -rf load_attempt .bgprocs; btest-bg-run load_attempt bro -b $script; btest-bg-wait -k 2; cat load_attempt/.stderr >>allerrors; else bro -b $script 2>>allerrors; fi done || exit 0
# @TEST-EXEC: for script in `find $DIST/scripts/ -name \*\.bro -not -path '*/site/*'`; do echo $script; if echo "$script" | egrep -q 'communication/listen|controllee'; then rm -rf load_attempt .bgprocs; btest-bg-run load_attempt bro -b $script; btest-bg-wait -k 2; cat load_attempt/.stderr >>allerrors; else bro -b $script 2>>allerrors; fi done || exit 0
# @TEST-EXEC: cat allerrors | grep -v "received termination signal" | sort | uniq > unique_errors
# @TEST-EXEC: btest-diff unique_errors
# @TEST-EXEC: if [ $(grep -c CURL_INCLUDE_DIR-NOTFOUND $BUILD/CMakeCache.txt) -ne 0 ]; then cp unique_errors unique_errors_no_elasticsearch; fi
# @TEST-EXEC: if [ $(grep -c CURL_INCLUDE_DIR-NOTFOUND $BUILD/CMakeCache.txt) -ne 0 ]; then btest-diff unique_errors_no_elasticsearch; else btest-diff unique_errors; fi

View file

@ -0,0 +1,45 @@
# (uses listen.bro just to ensure input sources are more reliably fully-read).
# @TEST-SERIALIZE: comm
#
# @TEST-EXEC: btest-bg-run bro bro -b %INPUT
# @TEST-EXEC: btest-bg-wait -k 5
# @TEST-EXEC: btest-diff out
@TEST-START-FILE input.log
#separator \x09
#fields i c
#types int count
9223372036854775800 18446744073709551612
-9223372036854775800 18446744073709551612
@TEST-END-FILE
@load frameworks/communication/listen
global outfile: file;
module A;
type Idx: record {
i: int;
};
type Val: record {
c: count;
};
global servers: table[int] of Val = table();
event bro_init()
{
outfile = open("../out");
# first read in the old stuff into the table...
Input::add_table([$source="../input.log", $name="ssh", $idx=Idx, $val=Val, $destination=servers]);
Input::remove("ssh");
}
event Input::update_finished(name: string, source:string)
{
print outfile, servers;
close(outfile);
terminate();
}

View file

@ -0,0 +1,89 @@
# (uses listen.bro just to ensure input sources are more reliably fully-read).
# @TEST-SERIALIZE: comm
#
# @TEST-EXEC: cp input1.log input.log
# @TEST-EXEC: btest-bg-run bro bro -b %INPUT
# @TEST-EXEC: sleep 2
# @TEST-EXEC: cp input2.log input.log
# @TEST-EXEC: btest-bg-wait -k 5
# @TEST-EXEC: btest-diff out
@TEST-START-FILE input1.log
#separator \x09
#fields i s ss
#types int sting string
1 - TEST
2 - -
@TEST-END-FILE
@TEST-START-FILE input2.log
#separator \x09
#fields i s ss
#types int sting string
1 TEST -
2 TEST TEST
@TEST-END-FILE
@load frameworks/communication/listen
module A;
type Idx: record {
i: int;
};
type Val: record {
s: string;
ss: string;
};
global servers: table[int] of Val = table();
global outfile: file;
global try: count;
event line(description: Input::TableDescription, tpe: Input::Event, left: Idx, right: Val)
{
print outfile, "============EVENT============";
print outfile, "Description";
print outfile, description;
print outfile, "Type";
print outfile, tpe;
print outfile, "Left";
print outfile, left;
print outfile, "Right";
print outfile, right;
}
event bro_init()
{
outfile = open("../out");
try = 0;
# first read in the old stuff into the table...
Input::add_table([$source="../input.log", $mode=Input::REREAD, $name="ssh", $idx=Idx, $val=Val, $destination=servers, $ev=line,
$pred(typ: Input::Event, left: Idx, right: Val) = {
print outfile, "============PREDICATE============";
print outfile, typ;
print outfile, left;
print outfile, right;
return T;
}
]);
}
event Input::update_finished(name: string, source: string)
{
print outfile, "==========SERVERS============";
print outfile, servers;
try = try + 1;
if ( try == 2 )
{
print outfile, "done";
close(outfile);
Input::remove("input");
terminate();
}
}

Some files were not shown because too many files have changed in this diff Show more