Merge branch 'master' of ssh://git.bro-ids.org/bro

Conflicts:
	scripts/base/frameworks/control/main.bro
This commit is contained in:
Seth Hall 2011-08-05 23:11:40 -04:00
commit d6a67f7c1f
17 changed files with 134 additions and 50 deletions

28
CHANGES
View file

@ -1,4 +1,32 @@
1.6-dev-1044 | 2011-08-05 19:07:32 -0700
* Fixing memory (and CPU) leak in log writer.
* Fixing crash in memory profiling. (Robin Sommer)
* Fix compiler warning. (Robin Sommer)
* Fixing missing sync in cluster setup. (Robin Sommer)
1.6-dev-1038 | 2011-08-05 18:25:44 -0700
* Smaller updates to script docs and their generation. (Jon Siwek)
* When using a `print` statement to write to a file that has raw output
enabled, NUL characters in string are no longer interpreted into "\0",
no newline is appended afterwards, and each argument to `print` is
written to the file without any additional separation. (Jon Siwek)
* Test portatibility tweaks. (Jon Siwek)
* Fixing PktSrc::Statistics() which retured bogus information
offline mode. Closes #500. (Jon Siwek)
* --with-perftools configure option now assumes --enable-perftools.
Closes #527. (Jon Siwek)
1.6-dev-1018 | 2011-07-31 21:30:31 -0700 1.6-dev-1018 | 2011-07-31 21:30:31 -0700
* Updating CHANGES. (Robin Sommer) * Updating CHANGES. (Robin Sommer)

View file

@ -1 +1 @@
1.6-dev-1018 1.6-dev-1044

@ -1 +1 @@
Subproject commit 2ad87692db1cb5b104937bdde8cd6e0447f8ad94 Subproject commit 870ee782bfeb3a60bac40fce4273436e5f2d280b

View file

@ -13,6 +13,7 @@
set(psd ${PROJECT_SOURCE_DIR}/policy) set(psd ${PROJECT_SOURCE_DIR}/policy)
rest_target(${CMAKE_CURRENT_SOURCE_DIR} example.bro internal) rest_target(${CMAKE_CURRENT_SOURCE_DIR} example.bro internal)
rest_target(${psd} bro.init internal)
rest_target(${CMAKE_BINARY_DIR}/src bro.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src bro.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src const.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src const.bif.bro)

View file

@ -38,20 +38,26 @@ by CMake:
This target removes Sphinx inputs and outputs from the CMake ``build/`` dir. This target removes Sphinx inputs and outputs from the CMake ``build/`` dir.
To schedule a script to be documented, edit ``DocSourcesList.cmake`` inside The ``genDocSourcesList.sh`` script can be run to automatically generate
this directory add a call to the ``rest_target()`` macro. Calling that macro ``DocSourcesList.cmake``, which is the file CMake uses to define the list
with a group name for the script is optional. If the group is omitted, the of documentation targets. This script should be run after adding new
only links to the script will be in the master TOC tree for all policy scripts Bro script source files, and the changes commited to git.
as well as the master TOC tree for script packages (derived from the path
component of the second argument to ``rest_target()``), with the exception
of ``.bif`` files which are grouped automatically.
When adding a new logical grouping e.g. "my/group" (groups are allowed If a script shouldn't have documentation generated for it, there's also a
to contain slashes specifying a path) for generated scripts, blacklist variable that can be maintained in the ``genDocSourcesList.sh``
create a new reST document in ``source/my/group.rst`` and add some default script.
documentation for the group. References to (and summaries of) documents
associated with the group get appended to this pre-created file during the The blacklist can also be used if you want to define a certain grouping for
``make doc`` process. the script's generated docs to belong to (as opposed to the automatic grouping
the happens for script packages/directories). To do that, add the
script's name to the blacklist, then append a ``rest_target()`` to the
``statictext`` variable where the first argument is the source directory
containing the policy script to document, the second argument is the file
name of the policy script, and the third argument is the path/name of a
pre-created reST document in the ``source/`` directory to which the
``make doc`` process can append script documentation references. This
pre-created reST document should also then be linked to from the TOC tree
in ``source/index.rst``.
The Sphinx source tree template in ``source/`` can be modified to add more The Sphinx source tree template in ``source/`` can be modified to add more
common/general documentation, style sheets, JavaScript, etc. The Sphinx common/general documentation, style sheets, JavaScript, etc. The Sphinx

View file

@ -30,6 +30,7 @@ statictext="\
set(psd \${PROJECT_SOURCE_DIR}/policy) set(psd \${PROJECT_SOURCE_DIR}/policy)
rest_target(\${CMAKE_CURRENT_SOURCE_DIR} example.bro internal) rest_target(\${CMAKE_CURRENT_SOURCE_DIR} example.bro internal)
rest_target(\${psd} bro.init internal)
" "
if [[ $# -ge 1 ]]; then if [[ $# -ge 1 ]]; then

View file

@ -14,7 +14,6 @@ Contents:
internal internal
bifs bifs
packages packages
collections
policy/index policy/index
Indices and tables Indices and tables

View file

@ -67,7 +67,7 @@ event bro_init() &priority=9
if ( n$node_type == PROXY && me$proxy == i ) if ( n$node_type == PROXY && me$proxy == i )
Communication::nodes["proxy"] = [$host=nodes[i]$ip, $p=nodes[i]$p, Communication::nodes["proxy"] = [$host=nodes[i]$ip, $p=nodes[i]$p,
$connect=T, $retry=1mins, $connect=T, $retry=1mins, $sync=T,
$class=node]; $class=node];
if ( n$node_type == TIME_MACHINE && me?$time_machine && me$time_machine == i ) if ( n$node_type == TIME_MACHINE && me?$time_machine && me$time_machine == i )

View file

@ -2,12 +2,14 @@
##! consts to a remote Bro then sends the :bro:id:`configuration_update` event ##! consts to a remote Bro then sends the :bro:id:`configuration_update` event
##! and terminates processing. ##! and terminates processing.
##! ##!
##! Intended to be used from the command line like this when starting a controller: ##! Intended to be used from the command line like this when starting a controller::
##!
##! bro <scripts> frameworks/control/controller Control::host=<host_addr> Control::port=<host_port> Control::cmd=<command> [Control::arg=<arg>] ##! bro <scripts> frameworks/control/controller Control::host=<host_addr> Control::port=<host_port> Control::cmd=<command> [Control::arg=<arg>]
##! ##!
##! A controllee only needs to load the controllee script in addition ##! A controllee only needs to load the controllee script in addition
##! to the specific analysis scripts desired. It may also need a node ##! to the specific analysis scripts desired. It may also need a node
##! configured as a controller node in the communications nodes configuration. ##! configured as a controller node in the communications nodes configuration::
##!
##! bro <scripts> frameworks/control/controllee ##! bro <scripts> frameworks/control/controllee
##! ##!
##! To use the framework as a controllee, it only needs to be loaded and ##! To use the framework as a controllee, it only needs to be loaded and

View file

@ -101,7 +101,7 @@ export {
## This is the record that defines the items that make up the notice policy. ## This is the record that defines the items that make up the notice policy.
type PolicyItem: record { type PolicyItem: record {
## This is the exact positional order in which the :id:type:`PolicyItem` ## This is the exact positional order in which the :bro:type:`PolicyItem`
## records are checked. This is set internally by the notice framework. ## records are checked. This is set internally by the notice framework.
position: count &log &optional; position: count &log &optional;
## Define the priority for this check. Items are checked in ordered ## Define the priority for this check. Items are checked in ordered

View file

@ -3,6 +3,7 @@
##! currently detected. ##! currently detected.
##! ##!
##! TODO: ##! TODO:
##!
##! * Find some heuristic to determine if email was sent through ##! * Find some heuristic to determine if email was sent through
##! a MS Exhange webmail interface as opposed to a desktop client. ##! a MS Exhange webmail interface as opposed to a desktop client.

View file

@ -398,13 +398,15 @@ LogMgr::Stream::~Stream()
{ {
WriterInfo* winfo = i->second; WriterInfo* winfo = i->second;
if ( ! winfo )
continue;
if ( winfo->rotation_timer ) if ( winfo->rotation_timer )
timer_mgr->Cancel(winfo->rotation_timer); timer_mgr->Cancel(winfo->rotation_timer);
Unref(winfo->type); Unref(winfo->type);
delete winfo->writer; delete winfo->writer;
delete i->second; delete winfo;
} }
for ( list<Filter*>::iterator f = filters.begin(); f != filters.end(); ++f ) for ( list<Filter*>::iterator f = filters.begin(); f != filters.end(); ++f )
@ -437,7 +439,7 @@ void LogMgr::RemoveDisabledWriters(Stream* stream)
for ( Stream::WriterMap::iterator j = stream->writers.begin(); j != stream->writers.end(); j++ ) for ( Stream::WriterMap::iterator j = stream->writers.begin(); j != stream->writers.end(); j++ )
{ {
if ( j->second->writer->Disabled() ) if ( j->second && j->second->writer->Disabled() )
{ {
delete j->second; delete j->second;
disabled.push_back(j->first); disabled.push_back(j->first);
@ -900,8 +902,8 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns)
LogWriter* writer = 0; LogWriter* writer = 0;
if ( w != stream->writers.end() ) if ( w != stream->writers.end() )
// We have a writer already. // We know this writer already.
writer = w->second->writer; writer = w->second ? w->second->writer : 0;
else else
{ {
@ -926,6 +928,11 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns)
return false; return false;
} }
} }
else
// Insert a null pointer into the map to make
// sure we don't try creating it again.
stream->writers.insert(Stream::WriterMap::value_type(
Stream::WriterPathPair(filter->writer->AsEnum(), path), 0));
if ( filter->remote ) if ( filter->remote )
remote_serializer->SendLogCreateWriter(stream->id, remote_serializer->SendLogCreateWriter(stream->id,
@ -937,24 +944,36 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns)
// Alright, can do the write now. // Alright, can do the write now.
LogVal** vals = RecordToFilterVals(stream, filter, columns); if ( filter->local || filter->remote )
{
LogVal** vals = RecordToFilterVals(stream, filter, columns);
if ( filter->remote ) if ( filter->remote )
remote_serializer->SendLogWrite(stream->id, remote_serializer->SendLogWrite(stream->id,
filter->writer, filter->writer,
path, path,
filter->num_fields, filter->num_fields,
vals); vals);
if ( filter->local )
{
assert(writer);
// Write takes ownership of vals.
if ( ! writer->Write(filter->num_fields, vals) )
error = true;
}
else
DeleteVals(filter->num_fields, vals);
}
if ( filter->local && ! writer->Write(filter->num_fields, vals) )
error = true;
#ifdef DEBUG #ifdef DEBUG
DBG_LOG(DBG_LOGGING, "Wrote record to filter '%s' on stream '%s'", DBG_LOG(DBG_LOGGING, "Wrote record to filter '%s' on stream '%s'",
filter->name.c_str(), stream->name.c_str()); filter->name.c_str(), stream->name.c_str());
#endif #endif
delete [] vals;
} }
Unref(columns); Unref(columns);
@ -1124,7 +1143,7 @@ LogWriter* LogMgr::CreateWriter(EnumVal* id, EnumVal* writer, string path,
Stream::WriterMap::iterator w = Stream::WriterMap::iterator w =
stream->writers.find(Stream::WriterPathPair(writer->AsEnum(), path)); stream->writers.find(Stream::WriterPathPair(writer->AsEnum(), path));
if ( w != stream->writers.end() ) if ( w != stream->writers.end() && w->second )
// If we already have a writer for this. That's fine, we just // If we already have a writer for this. That's fine, we just
// return it. // return it.
return w->second->writer; return w->second->writer;
@ -1194,6 +1213,14 @@ LogWriter* LogMgr::CreateWriter(EnumVal* id, EnumVal* writer, string path,
return writer_obj; return writer_obj;
} }
void LogMgr::DeleteVals(int num_fields, LogVal** vals)
{
for ( int i = 0; i < num_fields; i++ )
delete vals[i];
delete [] vals;
}
bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields, bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields,
LogVal** vals) LogVal** vals)
{ {
@ -1208,11 +1235,15 @@ bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields,
DBG_LOG(DBG_LOGGING, "unknown stream %s in LogMgr::Write()", DBG_LOG(DBG_LOGGING, "unknown stream %s in LogMgr::Write()",
desc.Description()); desc.Description());
#endif #endif
DeleteVals(num_fields, vals);
return false; return false;
} }
if ( ! stream->enabled ) if ( ! stream->enabled )
{
DeleteVals(num_fields, vals);
return true; return true;
}
Stream::WriterMap::iterator w = Stream::WriterMap::iterator w =
stream->writers.find(Stream::WriterPathPair(writer->AsEnum(), path)); stream->writers.find(Stream::WriterPathPair(writer->AsEnum(), path));
@ -1226,10 +1257,11 @@ bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields,
DBG_LOG(DBG_LOGGING, "unknown writer %s in LogMgr::Write()", DBG_LOG(DBG_LOGGING, "unknown writer %s in LogMgr::Write()",
desc.Description()); desc.Description());
#endif #endif
DeleteVals(num_fields, vals);
return false; return false;
} }
bool success = w->second->writer->Write(num_fields, vals); bool success = (w->second ? w->second->writer->Write(num_fields, vals) : true);
DBG_LOG(DBG_LOGGING, DBG_LOG(DBG_LOGGING,
"Wrote pre-filtered record to path '%s' on stream '%s' [%s]", "Wrote pre-filtered record to path '%s' on stream '%s' [%s]",
@ -1250,7 +1282,11 @@ void LogMgr::SendAllWritersTo(RemoteSerializer::PeerID peer)
for ( Stream::WriterMap::iterator i = stream->writers.begin(); for ( Stream::WriterMap::iterator i = stream->writers.begin();
i != stream->writers.end(); i++ ) i != stream->writers.end(); i++ )
{ {
if ( ! i->second )
continue;
LogWriter* writer = i->second->writer; LogWriter* writer = i->second->writer;
EnumVal writer_val(i->first.first, BifType::Enum::Log::Writer); EnumVal writer_val(i->first.first, BifType::Enum::Log::Writer);
remote_serializer->SendLogCreateWriter(peer, (*s)->id, remote_serializer->SendLogCreateWriter(peer, (*s)->id,
&writer_val, &writer_val,
@ -1269,7 +1305,10 @@ bool LogMgr::SetBuf(EnumVal* id, bool enabled)
for ( Stream::WriterMap::iterator i = stream->writers.begin(); for ( Stream::WriterMap::iterator i = stream->writers.begin();
i != stream->writers.end(); i++ ) i != stream->writers.end(); i++ )
i->second->writer->SetBuf(enabled); {
if ( i->second )
i->second->writer->SetBuf(enabled);
}
RemoveDisabledWriters(stream); RemoveDisabledWriters(stream);
@ -1287,7 +1326,10 @@ bool LogMgr::Flush(EnumVal* id)
for ( Stream::WriterMap::iterator i = stream->writers.begin(); for ( Stream::WriterMap::iterator i = stream->writers.begin();
i != stream->writers.end(); i++ ) i != stream->writers.end(); i++ )
i->second->writer->Flush(); {
if ( i->second )
i->second->writer->Flush();
}
RemoveDisabledWriters(stream); RemoveDisabledWriters(stream);

View file

@ -106,6 +106,9 @@ protected:
// Reports an error for the given writer. // Reports an error for the given writer.
void Error(LogWriter* writer, const char* msg); void Error(LogWriter* writer, const char* msg);
// Deletes the values as passed into Write().
void DeleteVals(int num_fields, LogVal** vals);
private: private:
struct Filter; struct Filter;
struct Stream; struct Stream;

View file

@ -47,6 +47,7 @@ bool LogWriter::Write(int arg_num_fields, LogVal** vals)
DBG_LOG(DBG_LOGGING, "Number of fields don't match in LogWriter::Write() (%d vs. %d)", DBG_LOG(DBG_LOGGING, "Number of fields don't match in LogWriter::Write() (%d vs. %d)",
arg_num_fields, num_fields); arg_num_fields, num_fields);
DeleteVals(vals);
return false; return false;
} }
@ -56,6 +57,7 @@ bool LogWriter::Write(int arg_num_fields, LogVal** vals)
{ {
DBG_LOG(DBG_LOGGING, "Field type doesn't match in LogWriter::Write() (%d vs. %d)", DBG_LOG(DBG_LOGGING, "Field type doesn't match in LogWriter::Write() (%d vs. %d)",
vals[i]->type, fields[i]->type); vals[i]->type, fields[i]->type);
DeleteVals(vals);
return false; return false;
} }
} }
@ -146,8 +148,7 @@ void LogWriter::Error(const char *msg)
void LogWriter::DeleteVals(LogVal** vals) void LogWriter::DeleteVals(LogVal** vals)
{ {
for ( int i = 0; i < num_fields; i++ ) log_mgr->DeleteVals(num_fields, vals);
delete vals[i];
} }
bool LogWriter::RunPostProcessor(string fname, string postprocessor, bool LogWriter::RunPostProcessor(string fname, string postprocessor,

View file

@ -12,7 +12,7 @@
Contents_Rlogin_Analyzer::Contents_Rlogin_Analyzer(Connection* conn, bool orig, Rlogin_Analyzer* arg_analyzer) Contents_Rlogin_Analyzer::Contents_Rlogin_Analyzer(Connection* conn, bool orig, Rlogin_Analyzer* arg_analyzer)
: ContentLine_Analyzer(AnalyzerTag::Contents_Rlogin, conn, orig) : ContentLine_Analyzer(AnalyzerTag::Contents_Rlogin, conn, orig)
{ {
num_bytes_to_scan = num_bytes_to_scan = 0; num_bytes_to_scan = 0;
analyzer = arg_analyzer; analyzer = arg_analyzer;
peer = 0; peer = 0;

View file

@ -3163,14 +3163,13 @@ unsigned int RecordVal::MemoryAllocation() const
{ {
unsigned int size = 0; unsigned int size = 0;
for ( int i = 0; i < type->AsRecordType()->NumFields(); ++i ) const val_list* vl = AsRecord();
{
Val* v = (*val.val_list_val)[i];
// v might be nil for records that don't wind loop_over_list(*vl, i)
// up being set to a value. {
Val* v = (*vl)[i];
if ( v ) if ( v )
size += v->MemoryAllocation(); size += v->MemoryAllocation();
} }
return size + padded_sizeof(*this) + val.val_list_val->MemoryAllocation(); return size + padded_sizeof(*this) + val.val_list_val->MemoryAllocation();

View file

@ -799,6 +799,7 @@ EnumVal* map_conn_type(TransportProto tp)
// Cannot be reached; // Cannot be reached;
assert(false); assert(false);
return 0; // Make compiler happy.
} }
%%} %%}