diff --git a/CHANGES b/CHANGES index 3b5b46c659..e40dc49085 100644 --- a/CHANGES +++ b/CHANGES @@ -1,4 +1,32 @@ +1.6-dev-1044 | 2011-08-05 19:07:32 -0700 + + * Fixing memory (and CPU) leak in log writer. + + * Fixing crash in memory profiling. (Robin Sommer) + + * Fix compiler warning. (Robin Sommer) + + * Fixing missing sync in cluster setup. (Robin Sommer) + + +1.6-dev-1038 | 2011-08-05 18:25:44 -0700 + + * Smaller updates to script docs and their generation. (Jon Siwek) + + * When using a `print` statement to write to a file that has raw output + enabled, NUL characters in string are no longer interpreted into "\0", + no newline is appended afterwards, and each argument to `print` is + written to the file without any additional separation. (Jon Siwek) + + * Test portatibility tweaks. (Jon Siwek) + + * Fixing PktSrc::Statistics() which retured bogus information + offline mode. Closes #500. (Jon Siwek) + + * --with-perftools configure option now assumes --enable-perftools. + Closes #527. (Jon Siwek) + 1.6-dev-1018 | 2011-07-31 21:30:31 -0700 * Updating CHANGES. (Robin Sommer) diff --git a/VERSION b/VERSION index b8218c5f8e..59b89dec9a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.6-dev-1018 +1.6-dev-1044 diff --git a/aux/broctl b/aux/broctl index 2ad87692db..870ee782bf 160000 --- a/aux/broctl +++ b/aux/broctl @@ -1 +1 @@ -Subproject commit 2ad87692db1cb5b104937bdde8cd6e0447f8ad94 +Subproject commit 870ee782bfeb3a60bac40fce4273436e5f2d280b diff --git a/doc/scripts/DocSourcesList.cmake b/doc/scripts/DocSourcesList.cmake index a26b9dcedd..c5c3851f67 100644 --- a/doc/scripts/DocSourcesList.cmake +++ b/doc/scripts/DocSourcesList.cmake @@ -13,6 +13,7 @@ set(psd ${PROJECT_SOURCE_DIR}/policy) rest_target(${CMAKE_CURRENT_SOURCE_DIR} example.bro internal) +rest_target(${psd} bro.init internal) rest_target(${CMAKE_BINARY_DIR}/src bro.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src const.bif.bro) diff --git a/doc/scripts/README b/doc/scripts/README index 93700ab012..bd7ec5c065 100644 --- a/doc/scripts/README +++ b/doc/scripts/README @@ -38,20 +38,26 @@ by CMake: This target removes Sphinx inputs and outputs from the CMake ``build/`` dir. -To schedule a script to be documented, edit ``DocSourcesList.cmake`` inside -this directory add a call to the ``rest_target()`` macro. Calling that macro -with a group name for the script is optional. If the group is omitted, the -only links to the script will be in the master TOC tree for all policy scripts -as well as the master TOC tree for script packages (derived from the path -component of the second argument to ``rest_target()``), with the exception -of ``.bif`` files which are grouped automatically. +The ``genDocSourcesList.sh`` script can be run to automatically generate +``DocSourcesList.cmake``, which is the file CMake uses to define the list +of documentation targets. This script should be run after adding new +Bro script source files, and the changes commited to git. -When adding a new logical grouping e.g. "my/group" (groups are allowed -to contain slashes specifying a path) for generated scripts, -create a new reST document in ``source/my/group.rst`` and add some default -documentation for the group. References to (and summaries of) documents -associated with the group get appended to this pre-created file during the -``make doc`` process. +If a script shouldn't have documentation generated for it, there's also a +blacklist variable that can be maintained in the ``genDocSourcesList.sh`` +script. + +The blacklist can also be used if you want to define a certain grouping for +the script's generated docs to belong to (as opposed to the automatic grouping +the happens for script packages/directories). To do that, add the +script's name to the blacklist, then append a ``rest_target()`` to the +``statictext`` variable where the first argument is the source directory +containing the policy script to document, the second argument is the file +name of the policy script, and the third argument is the path/name of a +pre-created reST document in the ``source/`` directory to which the +``make doc`` process can append script documentation references. This +pre-created reST document should also then be linked to from the TOC tree +in ``source/index.rst``. The Sphinx source tree template in ``source/`` can be modified to add more common/general documentation, style sheets, JavaScript, etc. The Sphinx diff --git a/doc/scripts/genDocSourcesList.sh b/doc/scripts/genDocSourcesList.sh index 9cbf8a11db..23d2355c91 100755 --- a/doc/scripts/genDocSourcesList.sh +++ b/doc/scripts/genDocSourcesList.sh @@ -30,6 +30,7 @@ statictext="\ set(psd \${PROJECT_SOURCE_DIR}/policy) rest_target(\${CMAKE_CURRENT_SOURCE_DIR} example.bro internal) +rest_target(\${psd} bro.init internal) " if [[ $# -ge 1 ]]; then diff --git a/doc/scripts/source/index.rst b/doc/scripts/source/index.rst index 29e2fa396a..83bceebb0c 100644 --- a/doc/scripts/source/index.rst +++ b/doc/scripts/source/index.rst @@ -14,7 +14,6 @@ Contents: internal bifs packages - collections policy/index Indices and tables diff --git a/scripts/base/frameworks/cluster/setup-connections.bro b/scripts/base/frameworks/cluster/setup-connections.bro index c8a187fb6b..04d474e604 100644 --- a/scripts/base/frameworks/cluster/setup-connections.bro +++ b/scripts/base/frameworks/cluster/setup-connections.bro @@ -67,7 +67,7 @@ event bro_init() &priority=9 if ( n$node_type == PROXY && me$proxy == i ) Communication::nodes["proxy"] = [$host=nodes[i]$ip, $p=nodes[i]$p, - $connect=T, $retry=1mins, + $connect=T, $retry=1mins, $sync=T, $class=node]; if ( n$node_type == TIME_MACHINE && me?$time_machine && me$time_machine == i ) diff --git a/scripts/base/frameworks/control/main.bro b/scripts/base/frameworks/control/main.bro index ebb8b08002..22422eb51d 100644 --- a/scripts/base/frameworks/control/main.bro +++ b/scripts/base/frameworks/control/main.bro @@ -2,12 +2,14 @@ ##! consts to a remote Bro then sends the :bro:id:`configuration_update` event ##! and terminates processing. ##! -##! Intended to be used from the command line like this when starting a controller: +##! Intended to be used from the command line like this when starting a controller:: +##! ##! bro frameworks/control/controller Control::host= Control::port= Control::cmd= [Control::arg=] ##! ##! A controllee only needs to load the controllee script in addition -##! to the specific analysis scripts desired. It may also need a node -##! configured as a controller node in the communications nodes configuration. +##! to the specific analysis scripts desired. It may also need a node +##! configured as a controller node in the communications nodes configuration:: +##! ##! bro frameworks/control/controllee ##! ##! To use the framework as a controllee, it only needs to be loaded and diff --git a/scripts/base/frameworks/notice/main.bro b/scripts/base/frameworks/notice/main.bro index 2f39343a42..595851b7c5 100644 --- a/scripts/base/frameworks/notice/main.bro +++ b/scripts/base/frameworks/notice/main.bro @@ -101,7 +101,7 @@ export { ## This is the record that defines the items that make up the notice policy. type PolicyItem: record { - ## This is the exact positional order in which the :id:type:`PolicyItem` + ## This is the exact positional order in which the :bro:type:`PolicyItem` ## records are checked. This is set internally by the notice framework. position: count &log &optional; ## Define the priority for this check. Items are checked in ordered diff --git a/scripts/policy/protocols/smtp/software.bro b/scripts/policy/protocols/smtp/software.bro index 36f9621b86..09bc59c636 100644 --- a/scripts/policy/protocols/smtp/software.bro +++ b/scripts/policy/protocols/smtp/software.bro @@ -3,6 +3,7 @@ ##! currently detected. ##! ##! TODO: +##! ##! * Find some heuristic to determine if email was sent through ##! a MS Exhange webmail interface as opposed to a desktop client. diff --git a/src/LogMgr.cc b/src/LogMgr.cc index b194d0da2a..461bf25e02 100644 --- a/src/LogMgr.cc +++ b/src/LogMgr.cc @@ -398,13 +398,15 @@ LogMgr::Stream::~Stream() { WriterInfo* winfo = i->second; + if ( ! winfo ) + continue; + if ( winfo->rotation_timer ) timer_mgr->Cancel(winfo->rotation_timer); Unref(winfo->type); - delete winfo->writer; - delete i->second; + delete winfo; } for ( list::iterator f = filters.begin(); f != filters.end(); ++f ) @@ -437,7 +439,7 @@ void LogMgr::RemoveDisabledWriters(Stream* stream) for ( Stream::WriterMap::iterator j = stream->writers.begin(); j != stream->writers.end(); j++ ) { - if ( j->second->writer->Disabled() ) + if ( j->second && j->second->writer->Disabled() ) { delete j->second; disabled.push_back(j->first); @@ -900,8 +902,8 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns) LogWriter* writer = 0; if ( w != stream->writers.end() ) - // We have a writer already. - writer = w->second->writer; + // We know this writer already. + writer = w->second ? w->second->writer : 0; else { @@ -926,6 +928,11 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns) return false; } } + else + // Insert a null pointer into the map to make + // sure we don't try creating it again. + stream->writers.insert(Stream::WriterMap::value_type( + Stream::WriterPathPair(filter->writer->AsEnum(), path), 0)); if ( filter->remote ) remote_serializer->SendLogCreateWriter(stream->id, @@ -937,24 +944,36 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns) // Alright, can do the write now. - LogVal** vals = RecordToFilterVals(stream, filter, columns); + if ( filter->local || filter->remote ) + { + LogVal** vals = RecordToFilterVals(stream, filter, columns); - if ( filter->remote ) - remote_serializer->SendLogWrite(stream->id, - filter->writer, - path, - filter->num_fields, - vals); + if ( filter->remote ) + remote_serializer->SendLogWrite(stream->id, + filter->writer, + path, + filter->num_fields, + vals); + + if ( filter->local ) + { + assert(writer); + + // Write takes ownership of vals. + if ( ! writer->Write(filter->num_fields, vals) ) + error = true; + } + + else + DeleteVals(filter->num_fields, vals); + + } - if ( filter->local && ! writer->Write(filter->num_fields, vals) ) - error = true; #ifdef DEBUG DBG_LOG(DBG_LOGGING, "Wrote record to filter '%s' on stream '%s'", filter->name.c_str(), stream->name.c_str()); #endif - - delete [] vals; } Unref(columns); @@ -1124,7 +1143,7 @@ LogWriter* LogMgr::CreateWriter(EnumVal* id, EnumVal* writer, string path, Stream::WriterMap::iterator w = stream->writers.find(Stream::WriterPathPair(writer->AsEnum(), path)); - if ( w != stream->writers.end() ) + if ( w != stream->writers.end() && w->second ) // If we already have a writer for this. That's fine, we just // return it. return w->second->writer; @@ -1194,6 +1213,14 @@ LogWriter* LogMgr::CreateWriter(EnumVal* id, EnumVal* writer, string path, return writer_obj; } +void LogMgr::DeleteVals(int num_fields, LogVal** vals) + { + for ( int i = 0; i < num_fields; i++ ) + delete vals[i]; + + delete [] vals; + } + bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields, LogVal** vals) { @@ -1208,11 +1235,15 @@ bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields, DBG_LOG(DBG_LOGGING, "unknown stream %s in LogMgr::Write()", desc.Description()); #endif + DeleteVals(num_fields, vals); return false; } if ( ! stream->enabled ) + { + DeleteVals(num_fields, vals); return true; + } Stream::WriterMap::iterator w = stream->writers.find(Stream::WriterPathPair(writer->AsEnum(), path)); @@ -1226,10 +1257,11 @@ bool LogMgr::Write(EnumVal* id, EnumVal* writer, string path, int num_fields, DBG_LOG(DBG_LOGGING, "unknown writer %s in LogMgr::Write()", desc.Description()); #endif + DeleteVals(num_fields, vals); return false; } - bool success = w->second->writer->Write(num_fields, vals); + bool success = (w->second ? w->second->writer->Write(num_fields, vals) : true); DBG_LOG(DBG_LOGGING, "Wrote pre-filtered record to path '%s' on stream '%s' [%s]", @@ -1250,7 +1282,11 @@ void LogMgr::SendAllWritersTo(RemoteSerializer::PeerID peer) for ( Stream::WriterMap::iterator i = stream->writers.begin(); i != stream->writers.end(); i++ ) { + if ( ! i->second ) + continue; + LogWriter* writer = i->second->writer; + EnumVal writer_val(i->first.first, BifType::Enum::Log::Writer); remote_serializer->SendLogCreateWriter(peer, (*s)->id, &writer_val, @@ -1269,7 +1305,10 @@ bool LogMgr::SetBuf(EnumVal* id, bool enabled) for ( Stream::WriterMap::iterator i = stream->writers.begin(); i != stream->writers.end(); i++ ) - i->second->writer->SetBuf(enabled); + { + if ( i->second ) + i->second->writer->SetBuf(enabled); + } RemoveDisabledWriters(stream); @@ -1287,7 +1326,10 @@ bool LogMgr::Flush(EnumVal* id) for ( Stream::WriterMap::iterator i = stream->writers.begin(); i != stream->writers.end(); i++ ) - i->second->writer->Flush(); + { + if ( i->second ) + i->second->writer->Flush(); + } RemoveDisabledWriters(stream); diff --git a/src/LogMgr.h b/src/LogMgr.h index ce4de1ab5d..cc593374c5 100644 --- a/src/LogMgr.h +++ b/src/LogMgr.h @@ -106,6 +106,9 @@ protected: // Reports an error for the given writer. void Error(LogWriter* writer, const char* msg); + // Deletes the values as passed into Write(). + void DeleteVals(int num_fields, LogVal** vals); + private: struct Filter; struct Stream; diff --git a/src/LogWriter.cc b/src/LogWriter.cc index 41c5cd1dbc..0017f8f246 100644 --- a/src/LogWriter.cc +++ b/src/LogWriter.cc @@ -47,6 +47,7 @@ bool LogWriter::Write(int arg_num_fields, LogVal** vals) DBG_LOG(DBG_LOGGING, "Number of fields don't match in LogWriter::Write() (%d vs. %d)", arg_num_fields, num_fields); + DeleteVals(vals); return false; } @@ -56,6 +57,7 @@ bool LogWriter::Write(int arg_num_fields, LogVal** vals) { DBG_LOG(DBG_LOGGING, "Field type doesn't match in LogWriter::Write() (%d vs. %d)", vals[i]->type, fields[i]->type); + DeleteVals(vals); return false; } } @@ -146,8 +148,7 @@ void LogWriter::Error(const char *msg) void LogWriter::DeleteVals(LogVal** vals) { - for ( int i = 0; i < num_fields; i++ ) - delete vals[i]; + log_mgr->DeleteVals(num_fields, vals); } bool LogWriter::RunPostProcessor(string fname, string postprocessor, diff --git a/src/Rlogin.cc b/src/Rlogin.cc index 8404eacca7..6dd1cc362d 100644 --- a/src/Rlogin.cc +++ b/src/Rlogin.cc @@ -12,7 +12,7 @@ Contents_Rlogin_Analyzer::Contents_Rlogin_Analyzer(Connection* conn, bool orig, Rlogin_Analyzer* arg_analyzer) : ContentLine_Analyzer(AnalyzerTag::Contents_Rlogin, conn, orig) { - num_bytes_to_scan = num_bytes_to_scan = 0; + num_bytes_to_scan = 0; analyzer = arg_analyzer; peer = 0; diff --git a/src/Val.cc b/src/Val.cc index ddec9b616d..7db45cf648 100644 --- a/src/Val.cc +++ b/src/Val.cc @@ -3163,14 +3163,13 @@ unsigned int RecordVal::MemoryAllocation() const { unsigned int size = 0; - for ( int i = 0; i < type->AsRecordType()->NumFields(); ++i ) - { - Val* v = (*val.val_list_val)[i]; + const val_list* vl = AsRecord(); - // v might be nil for records that don't wind - // up being set to a value. + loop_over_list(*vl, i) + { + Val* v = (*vl)[i]; if ( v ) - size += v->MemoryAllocation(); + size += v->MemoryAllocation(); } return size + padded_sizeof(*this) + val.val_list_val->MemoryAllocation(); diff --git a/src/bro.bif b/src/bro.bif index 66e6a51e17..240eeed9dd 100644 --- a/src/bro.bif +++ b/src/bro.bif @@ -799,6 +799,7 @@ EnumVal* map_conn_type(TransportProto tp) // Cannot be reached; assert(false); + return 0; // Make compiler happy. } %%}