mirror of
https://github.com/zeek/zeek.git
synced 2025-10-13 20:18:20 +00:00
Merge remote-tracking branch 'origin/master' into topic/matthias/bloom-filter
This commit is contained in:
commit
69a7dd03bc
229 changed files with 7840 additions and 2802 deletions
3175
src/3rdparty/sqlite3.c
vendored
3175
src/3rdparty/sqlite3.c
vendored
File diff suppressed because it is too large
Load diff
109
src/3rdparty/sqlite3.h
vendored
109
src/3rdparty/sqlite3.h
vendored
|
@ -107,9 +107,9 @@ extern "C" {
|
|||
** [sqlite3_libversion_number()], [sqlite3_sourceid()],
|
||||
** [sqlite_version()] and [sqlite_source_id()].
|
||||
*/
|
||||
#define SQLITE_VERSION "3.7.16.2"
|
||||
#define SQLITE_VERSION_NUMBER 3007016
|
||||
#define SQLITE_SOURCE_ID "2013-04-12 11:52:43 cbea02d93865ce0e06789db95fd9168ebac970c7"
|
||||
#define SQLITE_VERSION "3.7.17"
|
||||
#define SQLITE_VERSION_NUMBER 3007017
|
||||
#define SQLITE_SOURCE_ID "2013-05-20 00:56:22 118a3b35693b134d56ebd780123b7fd6f1497668"
|
||||
|
||||
/*
|
||||
** CAPI3REF: Run-Time Library Version Numbers
|
||||
|
@ -425,6 +425,8 @@ SQLITE_API int sqlite3_exec(
|
|||
#define SQLITE_FORMAT 24 /* Auxiliary database format error */
|
||||
#define SQLITE_RANGE 25 /* 2nd parameter to sqlite3_bind out of range */
|
||||
#define SQLITE_NOTADB 26 /* File opened that is not a database file */
|
||||
#define SQLITE_NOTICE 27 /* Notifications from sqlite3_log() */
|
||||
#define SQLITE_WARNING 28 /* Warnings from sqlite3_log() */
|
||||
#define SQLITE_ROW 100 /* sqlite3_step() has another row ready */
|
||||
#define SQLITE_DONE 101 /* sqlite3_step() has finished executing */
|
||||
/* end-of-error-codes */
|
||||
|
@ -475,6 +477,7 @@ SQLITE_API int sqlite3_exec(
|
|||
#define SQLITE_IOERR_SHMMAP (SQLITE_IOERR | (21<<8))
|
||||
#define SQLITE_IOERR_SEEK (SQLITE_IOERR | (22<<8))
|
||||
#define SQLITE_IOERR_DELETE_NOENT (SQLITE_IOERR | (23<<8))
|
||||
#define SQLITE_IOERR_MMAP (SQLITE_IOERR | (24<<8))
|
||||
#define SQLITE_LOCKED_SHAREDCACHE (SQLITE_LOCKED | (1<<8))
|
||||
#define SQLITE_BUSY_RECOVERY (SQLITE_BUSY | (1<<8))
|
||||
#define SQLITE_CANTOPEN_NOTEMPDIR (SQLITE_CANTOPEN | (1<<8))
|
||||
|
@ -494,6 +497,8 @@ SQLITE_API int sqlite3_exec(
|
|||
#define SQLITE_CONSTRAINT_TRIGGER (SQLITE_CONSTRAINT | (7<<8))
|
||||
#define SQLITE_CONSTRAINT_UNIQUE (SQLITE_CONSTRAINT | (8<<8))
|
||||
#define SQLITE_CONSTRAINT_VTAB (SQLITE_CONSTRAINT | (9<<8))
|
||||
#define SQLITE_NOTICE_RECOVER_WAL (SQLITE_NOTICE | (1<<8))
|
||||
#define SQLITE_NOTICE_RECOVER_ROLLBACK (SQLITE_NOTICE | (2<<8))
|
||||
|
||||
/*
|
||||
** CAPI3REF: Flags For File Open Operations
|
||||
|
@ -733,6 +738,9 @@ struct sqlite3_io_methods {
|
|||
void (*xShmBarrier)(sqlite3_file*);
|
||||
int (*xShmUnmap)(sqlite3_file*, int deleteFlag);
|
||||
/* Methods above are valid for version 2 */
|
||||
int (*xFetch)(sqlite3_file*, sqlite3_int64 iOfst, int iAmt, void **pp);
|
||||
int (*xUnfetch)(sqlite3_file*, sqlite3_int64 iOfst, void *p);
|
||||
/* Methods above are valid for version 3 */
|
||||
/* Additional methods may be added in future releases */
|
||||
};
|
||||
|
||||
|
@ -869,7 +877,8 @@ struct sqlite3_io_methods {
|
|||
** it is able to override built-in [PRAGMA] statements.
|
||||
**
|
||||
** <li>[[SQLITE_FCNTL_BUSYHANDLER]]
|
||||
** ^This file-control may be invoked by SQLite on the database file handle
|
||||
** ^The [SQLITE_FCNTL_BUSYHANDLER]
|
||||
** file-control may be invoked by SQLite on the database file handle
|
||||
** shortly after it is opened in order to provide a custom VFS with access
|
||||
** to the connections busy-handler callback. The argument is of type (void **)
|
||||
** - an array of two (void *) values. The first (void *) actually points
|
||||
|
@ -880,13 +889,24 @@ struct sqlite3_io_methods {
|
|||
** current operation.
|
||||
**
|
||||
** <li>[[SQLITE_FCNTL_TEMPFILENAME]]
|
||||
** ^Application can invoke this file-control to have SQLite generate a
|
||||
** ^Application can invoke the [SQLITE_FCNTL_TEMPFILENAME] file-control
|
||||
** to have SQLite generate a
|
||||
** temporary filename using the same algorithm that is followed to generate
|
||||
** temporary filenames for TEMP tables and other internal uses. The
|
||||
** argument should be a char** which will be filled with the filename
|
||||
** written into memory obtained from [sqlite3_malloc()]. The caller should
|
||||
** invoke [sqlite3_free()] on the result to avoid a memory leak.
|
||||
**
|
||||
** <li>[[SQLITE_FCNTL_MMAP_SIZE]]
|
||||
** The [SQLITE_FCNTL_MMAP_SIZE] file control is used to query or set the
|
||||
** maximum number of bytes that will be used for memory-mapped I/O.
|
||||
** The argument is a pointer to a value of type sqlite3_int64 that
|
||||
** is an advisory maximum number of bytes in the file to memory map. The
|
||||
** pointer is overwritten with the old value. The limit is not changed if
|
||||
** the value originally pointed to is negative, and so the current limit
|
||||
** can be queried by passing in a pointer to a negative number. This
|
||||
** file-control is used internally to implement [PRAGMA mmap_size].
|
||||
**
|
||||
** </ul>
|
||||
*/
|
||||
#define SQLITE_FCNTL_LOCKSTATE 1
|
||||
|
@ -905,6 +925,7 @@ struct sqlite3_io_methods {
|
|||
#define SQLITE_FCNTL_PRAGMA 14
|
||||
#define SQLITE_FCNTL_BUSYHANDLER 15
|
||||
#define SQLITE_FCNTL_TEMPFILENAME 16
|
||||
#define SQLITE_FCNTL_MMAP_SIZE 18
|
||||
|
||||
/*
|
||||
** CAPI3REF: Mutex Handle
|
||||
|
@ -1571,7 +1592,9 @@ struct sqlite3_mem_methods {
|
|||
** page cache implementation into that object.)^ </dd>
|
||||
**
|
||||
** [[SQLITE_CONFIG_LOG]] <dt>SQLITE_CONFIG_LOG</dt>
|
||||
** <dd> ^The SQLITE_CONFIG_LOG option takes two arguments: a pointer to a
|
||||
** <dd> The SQLITE_CONFIG_LOG option is used to configure the SQLite
|
||||
** global [error log].
|
||||
** (^The SQLITE_CONFIG_LOG option takes two arguments: a pointer to a
|
||||
** function with a call signature of void(*)(void*,int,const char*),
|
||||
** and a pointer to void. ^If the function pointer is not NULL, it is
|
||||
** invoked by [sqlite3_log()] to process each logging event. ^If the
|
||||
|
@ -1617,12 +1640,12 @@ struct sqlite3_mem_methods {
|
|||
** <dt>SQLITE_CONFIG_PCACHE and SQLITE_CONFIG_GETPCACHE
|
||||
** <dd> These options are obsolete and should not be used by new code.
|
||||
** They are retained for backwards compatibility but are now no-ops.
|
||||
** </dl>
|
||||
** </dd>
|
||||
**
|
||||
** [[SQLITE_CONFIG_SQLLOG]]
|
||||
** <dt>SQLITE_CONFIG_SQLLOG
|
||||
** <dd>This option is only available if sqlite is compiled with the
|
||||
** SQLITE_ENABLE_SQLLOG pre-processor macro defined. The first argument should
|
||||
** [SQLITE_ENABLE_SQLLOG] pre-processor macro defined. The first argument should
|
||||
** be a pointer to a function of type void(*)(void*,sqlite3*,const char*, int).
|
||||
** The second should be of type (void*). The callback is invoked by the library
|
||||
** in three separate circumstances, identified by the value passed as the
|
||||
|
@ -1632,7 +1655,23 @@ struct sqlite3_mem_methods {
|
|||
** fourth parameter is 1, then the SQL statement that the third parameter
|
||||
** points to has just been executed. Or, if the fourth parameter is 2, then
|
||||
** the connection being passed as the second parameter is being closed. The
|
||||
** third parameter is passed NULL In this case.
|
||||
** third parameter is passed NULL In this case. An example of using this
|
||||
** configuration option can be seen in the "test_sqllog.c" source file in
|
||||
** the canonical SQLite source tree.</dd>
|
||||
**
|
||||
** [[SQLITE_CONFIG_MMAP_SIZE]]
|
||||
** <dt>SQLITE_CONFIG_MMAP_SIZE
|
||||
** <dd>SQLITE_CONFIG_MMAP_SIZE takes two 64-bit integer (sqlite3_int64) values
|
||||
** that are the default mmap size limit (the default setting for
|
||||
** [PRAGMA mmap_size]) and the maximum allowed mmap size limit.
|
||||
** The default setting can be overridden by each database connection using
|
||||
** either the [PRAGMA mmap_size] command, or by using the
|
||||
** [SQLITE_FCNTL_MMAP_SIZE] file control. The maximum allowed mmap size
|
||||
** cannot be changed at run-time. Nor may the maximum allowed mmap size
|
||||
** exceed the compile-time maximum mmap size set by the
|
||||
** [SQLITE_MAX_MMAP_SIZE] compile-time option.
|
||||
** If either argument to this option is negative, then that argument is
|
||||
** changed to its compile-time default.
|
||||
** </dl>
|
||||
*/
|
||||
#define SQLITE_CONFIG_SINGLETHREAD 1 /* nil */
|
||||
|
@ -1656,6 +1695,7 @@ struct sqlite3_mem_methods {
|
|||
#define SQLITE_CONFIG_GETPCACHE2 19 /* sqlite3_pcache_methods2* */
|
||||
#define SQLITE_CONFIG_COVERING_INDEX_SCAN 20 /* int */
|
||||
#define SQLITE_CONFIG_SQLLOG 21 /* xSqllog, void* */
|
||||
#define SQLITE_CONFIG_MMAP_SIZE 22 /* sqlite3_int64, sqlite3_int64 */
|
||||
|
||||
/*
|
||||
** CAPI3REF: Database Connection Configuration Options
|
||||
|
@ -2489,6 +2529,9 @@ SQLITE_API int sqlite3_set_authorizer(
|
|||
** as each triggered subprogram is entered. The callbacks for triggers
|
||||
** contain a UTF-8 SQL comment that identifies the trigger.)^
|
||||
**
|
||||
** The [SQLITE_TRACE_SIZE_LIMIT] compile-time option can be used to limit
|
||||
** the length of [bound parameter] expansion in the output of sqlite3_trace().
|
||||
**
|
||||
** ^The callback function registered by sqlite3_profile() is invoked
|
||||
** as each SQL statement finishes. ^The profile callback contains
|
||||
** the original statement text and an estimate of wall-clock time
|
||||
|
@ -3027,7 +3070,8 @@ SQLITE_API int sqlite3_limit(sqlite3*, int id, int newVal);
|
|||
** <li>
|
||||
** ^If the database schema changes, instead of returning [SQLITE_SCHEMA] as it
|
||||
** always used to do, [sqlite3_step()] will automatically recompile the SQL
|
||||
** statement and try to run it again.
|
||||
** statement and try to run it again. As many as [SQLITE_MAX_SCHEMA_RETRY]
|
||||
** retries will occur before sqlite3_step() gives up and returns an error.
|
||||
** </li>
|
||||
**
|
||||
** <li>
|
||||
|
@ -3231,6 +3275,9 @@ typedef struct sqlite3_context sqlite3_context;
|
|||
** parameter [SQLITE_LIMIT_VARIABLE_NUMBER] (default value: 999).
|
||||
**
|
||||
** ^The third argument is the value to bind to the parameter.
|
||||
** ^If the third parameter to sqlite3_bind_text() or sqlite3_bind_text16()
|
||||
** or sqlite3_bind_blob() is a NULL pointer then the fourth parameter
|
||||
** is ignored and the end result is the same as sqlite3_bind_null().
|
||||
**
|
||||
** ^(In those routines that have a fourth argument, its value is the
|
||||
** number of bytes in the parameter. To be clear: the value is the
|
||||
|
@ -4187,7 +4234,7 @@ SQLITE_API void sqlite3_set_auxdata(sqlite3_context*, int N, void*, void (*)(voi
|
|||
** the content before returning.
|
||||
**
|
||||
** The typedef is necessary to work around problems in certain
|
||||
** C++ compilers. See ticket #2191.
|
||||
** C++ compilers.
|
||||
*/
|
||||
typedef void (*sqlite3_destructor_type)(void*);
|
||||
#define SQLITE_STATIC ((sqlite3_destructor_type)0)
|
||||
|
@ -4986,11 +5033,20 @@ SQLITE_API int sqlite3_table_column_metadata(
|
|||
** ^This interface loads an SQLite extension library from the named file.
|
||||
**
|
||||
** ^The sqlite3_load_extension() interface attempts to load an
|
||||
** SQLite extension library contained in the file zFile.
|
||||
** [SQLite extension] library contained in the file zFile. If
|
||||
** the file cannot be loaded directly, attempts are made to load
|
||||
** with various operating-system specific extensions added.
|
||||
** So for example, if "samplelib" cannot be loaded, then names like
|
||||
** "samplelib.so" or "samplelib.dylib" or "samplelib.dll" might
|
||||
** be tried also.
|
||||
**
|
||||
** ^The entry point is zProc.
|
||||
** ^zProc may be 0, in which case the name of the entry point
|
||||
** defaults to "sqlite3_extension_init".
|
||||
** ^(zProc may be 0, in which case SQLite will try to come up with an
|
||||
** entry point name on its own. It first tries "sqlite3_extension_init".
|
||||
** If that does not work, it constructs a name "sqlite3_X_init" where the
|
||||
** X is consists of the lower-case equivalent of all ASCII alphabetic
|
||||
** characters in the filename from the last "/" to the first following
|
||||
** "." and omitting any initial "lib".)^
|
||||
** ^The sqlite3_load_extension() interface returns
|
||||
** [SQLITE_OK] on success and [SQLITE_ERROR] if something goes wrong.
|
||||
** ^If an error occurs and pzErrMsg is not 0, then the
|
||||
|
@ -5016,11 +5072,11 @@ SQLITE_API int sqlite3_load_extension(
|
|||
** CAPI3REF: Enable Or Disable Extension Loading
|
||||
**
|
||||
** ^So as not to open security holes in older applications that are
|
||||
** unprepared to deal with extension loading, and as a means of disabling
|
||||
** extension loading while evaluating user-entered SQL, the following API
|
||||
** unprepared to deal with [extension loading], and as a means of disabling
|
||||
** [extension loading] while evaluating user-entered SQL, the following API
|
||||
** is provided to turn the [sqlite3_load_extension()] mechanism on and off.
|
||||
**
|
||||
** ^Extension loading is off by default. See ticket #1863.
|
||||
** ^Extension loading is off by default.
|
||||
** ^Call the sqlite3_enable_load_extension() routine with onoff==1
|
||||
** to turn extension loading on and call it with onoff==0 to turn
|
||||
** it back off again.
|
||||
|
@ -5032,7 +5088,7 @@ SQLITE_API int sqlite3_enable_load_extension(sqlite3 *db, int onoff);
|
|||
**
|
||||
** ^This interface causes the xEntryPoint() function to be invoked for
|
||||
** each new [database connection] that is created. The idea here is that
|
||||
** xEntryPoint() is the entry point for a statically linked SQLite extension
|
||||
** xEntryPoint() is the entry point for a statically linked [SQLite extension]
|
||||
** that is to be automatically loaded into all new database connections.
|
||||
**
|
||||
** ^(Even though the function prototype shows that xEntryPoint() takes
|
||||
|
@ -6812,10 +6868,25 @@ SQLITE_API int sqlite3_unlock_notify(
|
|||
SQLITE_API int sqlite3_stricmp(const char *, const char *);
|
||||
SQLITE_API int sqlite3_strnicmp(const char *, const char *, int);
|
||||
|
||||
/*
|
||||
** CAPI3REF: String Globbing
|
||||
*
|
||||
** ^The [sqlite3_strglob(P,X)] interface returns zero if string X matches
|
||||
** the glob pattern P, and it returns non-zero if string X does not match
|
||||
** the glob pattern P. ^The definition of glob pattern matching used in
|
||||
** [sqlite3_strglob(P,X)] is the same as for the "X GLOB P" operator in the
|
||||
** SQL dialect used by SQLite. ^The sqlite3_strglob(P,X) function is case
|
||||
** sensitive.
|
||||
**
|
||||
** Note that this routine returns zero on a match and non-zero if the strings
|
||||
** do not match, the same as [sqlite3_stricmp()] and [sqlite3_strnicmp()].
|
||||
*/
|
||||
SQLITE_API int sqlite3_strglob(const char *zGlob, const char *zStr);
|
||||
|
||||
/*
|
||||
** CAPI3REF: Error Logging Interface
|
||||
**
|
||||
** ^The [sqlite3_log()] interface writes a message into the error log
|
||||
** ^The [sqlite3_log()] interface writes a message into the [error log]
|
||||
** established by the [SQLITE_CONFIG_LOG] option to [sqlite3_config()].
|
||||
** ^If logging is enabled, the zFormat string and subsequent arguments are
|
||||
** used with [sqlite3_snprintf()] to generate the final output string.
|
||||
|
|
310
src/BroDoc.cc
310
src/BroDoc.cc
|
@ -8,6 +8,9 @@
|
|||
#include "BroDoc.h"
|
||||
#include "BroDocObj.h"
|
||||
#include "util.h"
|
||||
#include "plugin/Manager.h"
|
||||
#include "analyzer/Manager.h"
|
||||
#include "analyzer/Component.h"
|
||||
|
||||
BroDoc::BroDoc(const std::string& rel, const std::string& abs)
|
||||
{
|
||||
|
@ -164,84 +167,77 @@ void BroDoc::SetPacketFilter(const std::string& s)
|
|||
packet_filter.clear();
|
||||
}
|
||||
|
||||
void BroDoc::AddPortAnalysis(const std::string& analyzer,
|
||||
const std::string& ports)
|
||||
{
|
||||
std::string reST_string = analyzer + "::\n" + ports + "\n\n";
|
||||
port_analysis.push_back(reST_string);
|
||||
}
|
||||
|
||||
void BroDoc::WriteDocFile() const
|
||||
{
|
||||
WriteToDoc(".. Automatically generated. Do not edit.\n\n");
|
||||
WriteToDoc(reST_file, ".. Automatically generated. Do not edit.\n\n");
|
||||
|
||||
WriteToDoc(":tocdepth: 3\n\n");
|
||||
WriteToDoc(reST_file, ":tocdepth: 3\n\n");
|
||||
|
||||
WriteSectionHeading(doc_title.c_str(), '=');
|
||||
WriteSectionHeading(reST_file, doc_title.c_str(), '=');
|
||||
|
||||
WriteStringList(".. bro:namespace:: %s\n", modules);
|
||||
WriteStringList(reST_file, ".. bro:namespace:: %s\n", modules);
|
||||
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(reST_file, "\n");
|
||||
|
||||
// WriteSectionHeading("Overview", '-');
|
||||
WriteStringList("%s\n", summary);
|
||||
// WriteSectionHeading(reST_file, "Overview", '-');
|
||||
WriteStringList(reST_file, "%s\n", summary);
|
||||
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(reST_file, "\n");
|
||||
|
||||
if ( ! modules.empty() )
|
||||
{
|
||||
WriteToDoc(":Namespace%s: ", (modules.size() > 1 ? "s" : ""));
|
||||
// WriteStringList(":bro:namespace:`%s`", modules);
|
||||
WriteStringList("``%s``, ", "``%s``", modules);
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(reST_file, ":Namespace%s: ", (modules.size() > 1 ? "s" : ""));
|
||||
// WriteStringList(reST_file, ":bro:namespace:`%s`", modules);
|
||||
WriteStringList(reST_file, "``%s``, ", "``%s``", modules);
|
||||
WriteToDoc(reST_file, "\n");
|
||||
}
|
||||
|
||||
if ( ! imports.empty() )
|
||||
{
|
||||
WriteToDoc(":Imports: ");
|
||||
WriteToDoc(reST_file, ":Imports: ");
|
||||
std::list<std::string>::const_iterator it;
|
||||
for ( it = imports.begin(); it != imports.end(); ++it )
|
||||
{
|
||||
if ( it != imports.begin() )
|
||||
WriteToDoc(", ");
|
||||
WriteToDoc(reST_file, ", ");
|
||||
|
||||
string pretty(*it);
|
||||
size_t pos = pretty.find("/index");
|
||||
if ( pos != std::string::npos && pos + 6 == pretty.size() )
|
||||
pretty = pretty.substr(0, pos);
|
||||
WriteToDoc(":doc:`%s </scripts/%s>`", pretty.c_str(), it->c_str());
|
||||
WriteToDoc(reST_file, ":doc:`%s </scripts/%s>`", pretty.c_str(), it->c_str());
|
||||
}
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(reST_file, "\n");
|
||||
}
|
||||
|
||||
WriteToDoc(":Source File: :download:`%s`\n",
|
||||
WriteToDoc(reST_file, ":Source File: :download:`%s`\n",
|
||||
downloadable_filename.c_str());
|
||||
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(reST_file, "\n");
|
||||
|
||||
WriteInterface("Summary", '~', '#', true, true);
|
||||
|
||||
if ( ! notices.empty() )
|
||||
WriteBroDocObjList(notices, "Notices", '#');
|
||||
WriteBroDocObjList(reST_file, notices, "Notices", '#');
|
||||
|
||||
if ( port_analysis.size() || packet_filter.size() )
|
||||
WriteSectionHeading("Configuration Changes", '#');
|
||||
WriteSectionHeading(reST_file, "Configuration Changes", '#');
|
||||
|
||||
if ( ! port_analysis.empty() )
|
||||
{
|
||||
WriteSectionHeading("Port Analysis", '^');
|
||||
WriteToDoc("Loading this script makes the following changes to "
|
||||
WriteSectionHeading(reST_file, "Port Analysis", '^');
|
||||
WriteToDoc(reST_file, "Loading this script makes the following changes to "
|
||||
":bro:see:`dpd_config`.\n\n");
|
||||
WriteStringList("%s, ", "%s", port_analysis);
|
||||
WriteStringList(reST_file, "%s, ", "%s", port_analysis);
|
||||
}
|
||||
|
||||
if ( ! packet_filter.empty() )
|
||||
{
|
||||
WriteSectionHeading("Packet Filter", '^');
|
||||
WriteToDoc("Loading this script makes the following changes to "
|
||||
WriteSectionHeading(reST_file, "Packet Filter", '^');
|
||||
WriteToDoc(reST_file, "Loading this script makes the following changes to "
|
||||
":bro:see:`capture_filters`.\n\n");
|
||||
WriteToDoc("Filters added::\n\n");
|
||||
WriteToDoc("%s\n", packet_filter.c_str());
|
||||
WriteToDoc(reST_file, "Filters added::\n\n");
|
||||
WriteToDoc(reST_file, "%s\n", packet_filter.c_str());
|
||||
}
|
||||
|
||||
WriteInterface("Detailed Interface", '~', '#', true, false);
|
||||
|
@ -267,23 +263,23 @@ void BroDoc::WriteDocFile() const
|
|||
void BroDoc::WriteInterface(const char* heading, char underline,
|
||||
char sub, bool isPublic, bool isShort) const
|
||||
{
|
||||
WriteSectionHeading(heading, underline);
|
||||
WriteBroDocObjList(options, isPublic, "Options", sub, isShort);
|
||||
WriteBroDocObjList(constants, isPublic, "Constants", sub, isShort);
|
||||
WriteBroDocObjList(state_vars, isPublic, "State Variables", sub, isShort);
|
||||
WriteBroDocObjList(types, isPublic, "Types", sub, isShort);
|
||||
WriteBroDocObjList(events, isPublic, "Events", sub, isShort);
|
||||
WriteBroDocObjList(hooks, isPublic, "Hooks", sub, isShort);
|
||||
WriteBroDocObjList(functions, isPublic, "Functions", sub, isShort);
|
||||
WriteBroDocObjList(redefs, isPublic, "Redefinitions", sub, isShort);
|
||||
WriteSectionHeading(reST_file, heading, underline);
|
||||
WriteBroDocObjList(reST_file, options, isPublic, "Options", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, constants, isPublic, "Constants", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, state_vars, isPublic, "State Variables", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, types, isPublic, "Types", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, events, isPublic, "Events", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, hooks, isPublic, "Hooks", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, functions, isPublic, "Functions", sub, isShort);
|
||||
WriteBroDocObjList(reST_file, redefs, isPublic, "Redefinitions", sub, isShort);
|
||||
}
|
||||
|
||||
void BroDoc::WriteStringList(const char* format, const char* last_format,
|
||||
const std::list<std::string>& l) const
|
||||
void BroDoc::WriteStringList(FILE* f, const char* format, const char* last_format,
|
||||
const std::list<std::string>& l)
|
||||
{
|
||||
if ( l.empty() )
|
||||
{
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(f, "\n");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -292,12 +288,12 @@ void BroDoc::WriteStringList(const char* format, const char* last_format,
|
|||
last--;
|
||||
|
||||
for ( it = l.begin(); it != last; ++it )
|
||||
WriteToDoc(format, it->c_str());
|
||||
WriteToDoc(f, format, it->c_str());
|
||||
|
||||
WriteToDoc(last_format, last->c_str());
|
||||
WriteToDoc(f, last_format, last->c_str());
|
||||
}
|
||||
|
||||
void BroDoc::WriteBroDocObjTable(const BroDocObjList& l) const
|
||||
void BroDoc::WriteBroDocObjTable(FILE* f, const BroDocObjList& l)
|
||||
{
|
||||
int max_id_col = 0;
|
||||
int max_com_col = 0;
|
||||
|
@ -317,38 +313,38 @@ void BroDoc::WriteBroDocObjTable(const BroDocObjList& l) const
|
|||
}
|
||||
|
||||
// Start table.
|
||||
WriteRepeatedChar('=', max_id_col);
|
||||
WriteToDoc(" ");
|
||||
WriteRepeatedChar(f, '=', max_id_col);
|
||||
WriteToDoc(f, " ");
|
||||
|
||||
if ( max_com_col == 0 )
|
||||
WriteToDoc("=");
|
||||
WriteToDoc(f, "=");
|
||||
else
|
||||
WriteRepeatedChar('=', max_com_col);
|
||||
WriteRepeatedChar(f, '=', max_com_col);
|
||||
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(f, "\n");
|
||||
|
||||
for ( it = l.begin(); it != l.end(); ++it )
|
||||
{
|
||||
if ( it != l.begin() )
|
||||
WriteToDoc("\n\n");
|
||||
(*it)->WriteReSTCompact(reST_file, max_id_col);
|
||||
WriteToDoc(f, "\n\n");
|
||||
(*it)->WriteReSTCompact(f, max_id_col);
|
||||
}
|
||||
|
||||
// End table.
|
||||
WriteToDoc("\n");
|
||||
WriteRepeatedChar('=', max_id_col);
|
||||
WriteToDoc(" ");
|
||||
WriteToDoc(f, "\n");
|
||||
WriteRepeatedChar(f, '=', max_id_col);
|
||||
WriteToDoc(f, " ");
|
||||
|
||||
if ( max_com_col == 0 )
|
||||
WriteToDoc("=");
|
||||
WriteToDoc(f, "=");
|
||||
else
|
||||
WriteRepeatedChar('=', max_com_col);
|
||||
WriteRepeatedChar(f, '=', max_com_col);
|
||||
|
||||
WriteToDoc("\n\n");
|
||||
WriteToDoc(f, "\n\n");
|
||||
}
|
||||
|
||||
void BroDoc::WriteBroDocObjList(const BroDocObjList& l, bool wantPublic,
|
||||
const char* heading, char underline, bool isShort) const
|
||||
void BroDoc::WriteBroDocObjList(FILE* f, const BroDocObjList& l, bool wantPublic,
|
||||
const char* heading, char underline, bool isShort)
|
||||
{
|
||||
if ( l.empty() )
|
||||
return;
|
||||
|
@ -366,7 +362,7 @@ void BroDoc::WriteBroDocObjList(const BroDocObjList& l, bool wantPublic,
|
|||
if ( it == l.end() )
|
||||
return;
|
||||
|
||||
WriteSectionHeading(heading, underline);
|
||||
WriteSectionHeading(f, heading, underline);
|
||||
|
||||
BroDocObjList filtered_list;
|
||||
|
||||
|
@ -377,13 +373,13 @@ void BroDoc::WriteBroDocObjList(const BroDocObjList& l, bool wantPublic,
|
|||
}
|
||||
|
||||
if ( isShort )
|
||||
WriteBroDocObjTable(filtered_list);
|
||||
WriteBroDocObjTable(f, filtered_list);
|
||||
else
|
||||
WriteBroDocObjList(filtered_list);
|
||||
WriteBroDocObjList(f, filtered_list);
|
||||
}
|
||||
|
||||
void BroDoc::WriteBroDocObjList(const BroDocObjMap& m, bool wantPublic,
|
||||
const char* heading, char underline, bool isShort) const
|
||||
void BroDoc::WriteBroDocObjList(FILE* f, const BroDocObjMap& m, bool wantPublic,
|
||||
const char* heading, char underline, bool isShort)
|
||||
{
|
||||
BroDocObjMap::const_iterator it;
|
||||
BroDocObjList l;
|
||||
|
@ -391,24 +387,24 @@ void BroDoc::WriteBroDocObjList(const BroDocObjMap& m, bool wantPublic,
|
|||
for ( it = m.begin(); it != m.end(); ++it )
|
||||
l.push_back(it->second);
|
||||
|
||||
WriteBroDocObjList(l, wantPublic, heading, underline, isShort);
|
||||
WriteBroDocObjList(f, l, wantPublic, heading, underline, isShort);
|
||||
}
|
||||
|
||||
void BroDoc::WriteBroDocObjList(const BroDocObjList& l, const char* heading,
|
||||
char underline) const
|
||||
void BroDoc::WriteBroDocObjList(FILE* f, const BroDocObjList& l, const char* heading,
|
||||
char underline)
|
||||
{
|
||||
WriteSectionHeading(heading, underline);
|
||||
WriteBroDocObjList(l);
|
||||
WriteSectionHeading(f, heading, underline);
|
||||
WriteBroDocObjList(f, l);
|
||||
}
|
||||
|
||||
void BroDoc::WriteBroDocObjList(const BroDocObjList& l) const
|
||||
void BroDoc::WriteBroDocObjList(FILE* f, const BroDocObjList& l)
|
||||
{
|
||||
for ( BroDocObjList::const_iterator it = l.begin(); it != l.end(); ++it )
|
||||
(*it)->WriteReST(reST_file);
|
||||
(*it)->WriteReST(f);
|
||||
}
|
||||
|
||||
void BroDoc::WriteBroDocObjList(const BroDocObjMap& m, const char* heading,
|
||||
char underline) const
|
||||
void BroDoc::WriteBroDocObjList(FILE* f, const BroDocObjMap& m, const char* heading,
|
||||
char underline)
|
||||
{
|
||||
BroDocObjMap::const_iterator it;
|
||||
BroDocObjList l;
|
||||
|
@ -416,28 +412,28 @@ void BroDoc::WriteBroDocObjList(const BroDocObjMap& m, const char* heading,
|
|||
for ( it = m.begin(); it != m.end(); ++it )
|
||||
l.push_back(it->second);
|
||||
|
||||
WriteBroDocObjList(l, heading, underline);
|
||||
WriteBroDocObjList(f, l, heading, underline);
|
||||
}
|
||||
|
||||
void BroDoc::WriteToDoc(const char* format, ...) const
|
||||
void BroDoc::WriteToDoc(FILE* f, const char* format, ...)
|
||||
{
|
||||
va_list argp;
|
||||
va_start(argp, format);
|
||||
vfprintf(reST_file, format, argp);
|
||||
vfprintf(f, format, argp);
|
||||
va_end(argp);
|
||||
}
|
||||
|
||||
void BroDoc::WriteSectionHeading(const char* heading, char underline) const
|
||||
void BroDoc::WriteSectionHeading(FILE* f, const char* heading, char underline)
|
||||
{
|
||||
WriteToDoc("%s\n", heading);
|
||||
WriteRepeatedChar(underline, strlen(heading));
|
||||
WriteToDoc("\n");
|
||||
WriteToDoc(f, "%s\n", heading);
|
||||
WriteRepeatedChar(f, underline, strlen(heading));
|
||||
WriteToDoc(f, "\n");
|
||||
}
|
||||
|
||||
void BroDoc::WriteRepeatedChar(char c, size_t n) const
|
||||
void BroDoc::WriteRepeatedChar(FILE* f, char c, size_t n)
|
||||
{
|
||||
for ( size_t i = 0; i < n; ++i )
|
||||
WriteToDoc("%c", c);
|
||||
WriteToDoc(f, "%c", c);
|
||||
}
|
||||
|
||||
void BroDoc::FreeBroDocObjPtrList(BroDocObjList& l)
|
||||
|
@ -459,3 +455,143 @@ void BroDoc::AddFunction(BroDocObj* o)
|
|||
else
|
||||
functions[o->Name()]->Combine(o);
|
||||
}
|
||||
|
||||
static void WritePluginSectionHeading(FILE* f, const plugin::Plugin* p)
|
||||
{
|
||||
string name = p->Name();
|
||||
|
||||
fprintf(f, "%s\n", name.c_str());
|
||||
for ( size_t i = 0; i < name.size(); ++i )
|
||||
fprintf(f, "-");
|
||||
fprintf(f, "\n\n");
|
||||
|
||||
fprintf(f, "%s\n\n", p->Description());
|
||||
}
|
||||
|
||||
static void WriteAnalyzerComponent(FILE* f, const analyzer::Component* c)
|
||||
{
|
||||
EnumType* atag = analyzer_mgr->GetTagEnumType();
|
||||
string tag = fmt("ANALYZER_%s", c->CanonicalName());
|
||||
|
||||
if ( atag->Lookup("Analyzer", tag.c_str()) < 0 )
|
||||
reporter->InternalError("missing analyzer tag for %s", tag.c_str());
|
||||
|
||||
fprintf(f, ":bro:enum:`Analyzer::%s`\n\n", tag.c_str());
|
||||
}
|
||||
|
||||
static void WritePluginComponents(FILE* f, const plugin::Plugin* p)
|
||||
{
|
||||
plugin::Plugin::component_list components = p->Components();
|
||||
plugin::Plugin::component_list::const_iterator it;
|
||||
|
||||
fprintf(f, "Components\n");
|
||||
fprintf(f, "++++++++++\n\n");
|
||||
|
||||
for ( it = components.begin(); it != components.end(); ++it )
|
||||
{
|
||||
switch ( (*it)->Type() ) {
|
||||
case plugin::component::ANALYZER:
|
||||
WriteAnalyzerComponent(f,
|
||||
dynamic_cast<const analyzer::Component*>(*it));
|
||||
break;
|
||||
case plugin::component::READER:
|
||||
reporter->InternalError("docs for READER component unimplemented");
|
||||
case plugin::component::WRITER:
|
||||
reporter->InternalError("docs for WRITER component unimplemented");
|
||||
default:
|
||||
reporter->InternalError("docs for unknown component unimplemented");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void WritePluginBifItems(FILE* f, const plugin::Plugin* p,
|
||||
plugin::BifItem::Type t, const string& heading)
|
||||
{
|
||||
plugin::Plugin::bif_item_list bifitems = p->BifItems();
|
||||
plugin::Plugin::bif_item_list::iterator it = bifitems.begin();
|
||||
|
||||
while ( it != bifitems.end() )
|
||||
{
|
||||
if ( it->GetType() != t )
|
||||
it = bifitems.erase(it);
|
||||
else
|
||||
++it;
|
||||
}
|
||||
|
||||
if ( bifitems.empty() )
|
||||
return;
|
||||
|
||||
fprintf(f, "%s\n", heading.c_str());
|
||||
for ( size_t i = 0; i < heading.size(); ++i )
|
||||
fprintf(f, "+");
|
||||
fprintf(f, "\n\n");
|
||||
|
||||
for ( it = bifitems.begin(); it != bifitems.end(); ++it )
|
||||
{
|
||||
BroDocObj* o = doc_ids[it->GetID()];
|
||||
|
||||
if ( o )
|
||||
o->WriteReST(f);
|
||||
else
|
||||
reporter->Warning("No docs for ID: %s\n", it->GetID());
|
||||
}
|
||||
}
|
||||
|
||||
static void WriteAnalyzerTagDefn(FILE* f, EnumType* e)
|
||||
{
|
||||
e = new CommentedEnumType(e);
|
||||
e->SetTypeID(copy_string("Analyzer::Tag"));
|
||||
|
||||
ID* dummy_id = new ID(copy_string("Analyzer::Tag"), SCOPE_GLOBAL, true);
|
||||
dummy_id->SetType(e);
|
||||
dummy_id->MakeType();
|
||||
|
||||
list<string>* r = new list<string>();
|
||||
r->push_back("Unique identifiers for protocol analyzers.");
|
||||
|
||||
BroDocObj bdo(dummy_id, r, true);
|
||||
|
||||
bdo.WriteReST(f);
|
||||
}
|
||||
|
||||
static bool IsAnalyzerPlugin(const plugin::Plugin* p)
|
||||
{
|
||||
plugin::Plugin::component_list components = p->Components();
|
||||
plugin::Plugin::component_list::const_iterator it;
|
||||
|
||||
for ( it = components.begin(); it != components.end(); ++it )
|
||||
if ( (*it)->Type() != plugin::component::ANALYZER )
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void CreateProtoAnalyzerDoc(const char* filename)
|
||||
{
|
||||
FILE* f = fopen(filename, "w");
|
||||
|
||||
fprintf(f, "Protocol Analyzer Reference\n");
|
||||
fprintf(f, "===========================\n\n");
|
||||
|
||||
WriteAnalyzerTagDefn(f, analyzer_mgr->GetTagEnumType());
|
||||
|
||||
plugin::Manager::plugin_list plugins = plugin_mgr->Plugins();
|
||||
plugin::Manager::plugin_list::const_iterator it;
|
||||
|
||||
for ( it = plugins.begin(); it != plugins.end(); ++it )
|
||||
{
|
||||
if ( ! IsAnalyzerPlugin(*it) )
|
||||
continue;
|
||||
|
||||
WritePluginSectionHeading(f, *it);
|
||||
WritePluginComponents(f, *it);
|
||||
WritePluginBifItems(f, *it, plugin::BifItem::CONSTANT,
|
||||
"Options/Constants");
|
||||
WritePluginBifItems(f, *it, plugin::BifItem::GLOBAL, "Globals");
|
||||
WritePluginBifItems(f, *it, plugin::BifItem::TYPE, "Types");
|
||||
WritePluginBifItems(f, *it, plugin::BifItem::EVENT, "Events");
|
||||
WritePluginBifItems(f, *it, plugin::BifItem::FUNCTION, "Functions");
|
||||
}
|
||||
|
||||
fclose(f);
|
||||
}
|
||||
|
|
230
src/BroDoc.h
230
src/BroDoc.h
|
@ -81,15 +81,6 @@ public:
|
|||
*/
|
||||
void SetPacketFilter(const std::string& s);
|
||||
|
||||
/**
|
||||
* Schedules documentation of a given set of ports being associated
|
||||
* with a particular analyzer as a result of the current script
|
||||
* being loaded -- the way the "dpd_config" table is changed.
|
||||
* @param analyzer An analyzer that changed the "dpd_config" table.
|
||||
* @param ports The set of ports assigned to the analyzer in table.
|
||||
*/
|
||||
void AddPortAnalysis(const std::string& analyzer, const std::string& ports);
|
||||
|
||||
/**
|
||||
* Schedules documentation of a script option. An option is
|
||||
* defined as any variable in the script that is declared 'const'
|
||||
|
@ -242,7 +233,115 @@ public:
|
|||
return reST_filename.c_str();
|
||||
}
|
||||
|
||||
protected:
|
||||
typedef std::list<const BroDocObj*> BroDocObjList;
|
||||
typedef std::map<std::string, BroDocObj*> BroDocObjMap;
|
||||
|
||||
/**
|
||||
* Writes out a table of BroDocObj's to the reST document
|
||||
* @param f The file to write to.
|
||||
* @param l A list of BroDocObj pointers
|
||||
*/
|
||||
static void WriteBroDocObjTable(FILE* f, const BroDocObjList& l);
|
||||
|
||||
/**
|
||||
* Writes out given number of characters to reST document
|
||||
* @param f The file to write to.
|
||||
* @param c the character to write
|
||||
* @param n the number of characters to write
|
||||
*/
|
||||
static void WriteRepeatedChar(FILE* f, char c, size_t n);
|
||||
|
||||
/**
|
||||
* A wrapper to fprintf() that always uses the reST document
|
||||
* for the FILE* argument.
|
||||
* @param f The file to write to.
|
||||
* @param format A printf style format string.
|
||||
*/
|
||||
static void WriteToDoc(FILE* f, const char* format, ...);
|
||||
|
||||
/**
|
||||
* Writes out a list of strings to the reST document.
|
||||
* If the list is empty, prints a newline character.
|
||||
* @param f The file to write to.
|
||||
* @param format A printf style format string for elements of the list
|
||||
* except for the last one in the list
|
||||
* @param last_format A printf style format string to use for the last
|
||||
* element of the list
|
||||
* @param l A reference to a list of strings
|
||||
*/
|
||||
static void WriteStringList(FILE* f, const char* format, const char* last_format,
|
||||
const std::list<std::string>& l);
|
||||
|
||||
/**
|
||||
* @see WriteStringList(FILE* f, const char*, const char*,
|
||||
* const std::list<std::string>&>)
|
||||
*/
|
||||
static void WriteStringList(FILE* f, const char* format,
|
||||
const std::list<std::string>& l){
|
||||
WriteStringList(f, format, format, l);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes out a list of BroDocObj objects to the reST document
|
||||
* @param f The file to write to.
|
||||
* @param l A list of BroDocObj pointers
|
||||
* @param wantPublic If true, filter out objects that are not declared
|
||||
* in the global scope. If false, filter out those that are in
|
||||
* the global scope.
|
||||
* @param heading The title of the section to create in the reST doc.
|
||||
* @param underline The character to use to underline the reST
|
||||
* section heading.
|
||||
* @param isShort Whether to write the full documentation or a "short"
|
||||
* version (a single sentence)
|
||||
*/
|
||||
static void WriteBroDocObjList(FILE* f, const BroDocObjList& l, bool wantPublic,
|
||||
const char* heading, char underline,
|
||||
bool isShort);
|
||||
|
||||
/**
|
||||
* Wraps the BroDocObjMap into a BroDocObjList and the writes that list
|
||||
* to the reST document
|
||||
* @see WriteBroDocObjList(FILE* f, const BroDocObjList&, bool, const char*, char,
|
||||
bool)
|
||||
*/
|
||||
static void WriteBroDocObjList(FILE* f, const BroDocObjMap& m, bool wantPublic,
|
||||
const char* heading, char underline,
|
||||
bool isShort);
|
||||
|
||||
/**
|
||||
* Writes out a list of BroDocObj objects to the reST document
|
||||
* @param l A list of BroDocObj pointers
|
||||
* @param heading The title of the section to create in the reST doc.
|
||||
* @param underline The character to use to underline the reST
|
||||
* section heading.
|
||||
*/
|
||||
static void WriteBroDocObjList(FILE* f, const BroDocObjList& l, const char* heading,
|
||||
char underline);
|
||||
|
||||
/**
|
||||
* Writes out a list of BroDocObj objects to the reST document
|
||||
* @param l A list of BroDocObj pointers
|
||||
*/
|
||||
static void WriteBroDocObjList(FILE* f, const BroDocObjList& l);
|
||||
|
||||
/**
|
||||
* Wraps the BroDocObjMap into a BroDocObjList and the writes that list
|
||||
* to the reST document
|
||||
* @see WriteBroDocObjList(FILE* f, const BroDocObjList&, const char*, char)
|
||||
*/
|
||||
static void WriteBroDocObjList(FILE* f, const BroDocObjMap& m, const char* heading,
|
||||
char underline);
|
||||
|
||||
/**
|
||||
* Writes out a reST section heading
|
||||
* @param f The file to write to.
|
||||
* @param heading The title of the heading to create
|
||||
* @param underline The character to use to underline the section title
|
||||
* within the reST document
|
||||
*/
|
||||
static void WriteSectionHeading(FILE* f, const char* heading, char underline);
|
||||
|
||||
private:
|
||||
FILE* reST_file;
|
||||
std::string reST_filename;
|
||||
std::string source_filename; // points to the basename of source file
|
||||
|
@ -255,9 +354,6 @@ protected:
|
|||
std::list<std::string> imports;
|
||||
std::list<std::string> port_analysis;
|
||||
|
||||
typedef std::list<const BroDocObj*> BroDocObjList;
|
||||
typedef std::map<std::string, BroDocObj*> BroDocObjMap;
|
||||
|
||||
BroDocObjList options;
|
||||
BroDocObjList constants;
|
||||
BroDocObjList state_vars;
|
||||
|
@ -272,107 +368,6 @@ protected:
|
|||
|
||||
BroDocObjList all;
|
||||
|
||||
/**
|
||||
* Writes out a list of strings to the reST document.
|
||||
* If the list is empty, prints a newline character.
|
||||
* @param format A printf style format string for elements of the list
|
||||
* except for the last one in the list
|
||||
* @param last_format A printf style format string to use for the last
|
||||
* element of the list
|
||||
* @param l A reference to a list of strings
|
||||
*/
|
||||
void WriteStringList(const char* format, const char* last_format,
|
||||
const std::list<std::string>& l) const;
|
||||
|
||||
/**
|
||||
* @see WriteStringList(const char*, const char*,
|
||||
* const std::list<std::string>&>)
|
||||
*/
|
||||
void WriteStringList(const char* format,
|
||||
const std::list<std::string>& l) const
|
||||
{
|
||||
WriteStringList(format, format, l);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Writes out a table of BroDocObj's to the reST document
|
||||
* @param l A list of BroDocObj pointers
|
||||
*/
|
||||
void WriteBroDocObjTable(const BroDocObjList& l) const;
|
||||
|
||||
/**
|
||||
* Writes out a list of BroDocObj objects to the reST document
|
||||
* @param l A list of BroDocObj pointers
|
||||
* @param wantPublic If true, filter out objects that are not declared
|
||||
* in the global scope. If false, filter out those that are in
|
||||
* the global scope.
|
||||
* @param heading The title of the section to create in the reST doc.
|
||||
* @param underline The character to use to underline the reST
|
||||
* section heading.
|
||||
* @param isShort Whether to write the full documentation or a "short"
|
||||
* version (a single sentence)
|
||||
*/
|
||||
void WriteBroDocObjList(const BroDocObjList& l, bool wantPublic,
|
||||
const char* heading, char underline,
|
||||
bool isShort) const;
|
||||
|
||||
/**
|
||||
* Wraps the BroDocObjMap into a BroDocObjList and the writes that list
|
||||
* to the reST document
|
||||
* @see WriteBroDocObjList(const BroDocObjList&, bool, const char*, char,
|
||||
bool)
|
||||
*/
|
||||
void WriteBroDocObjList(const BroDocObjMap& m, bool wantPublic,
|
||||
const char* heading, char underline,
|
||||
bool isShort) const;
|
||||
|
||||
/**
|
||||
* Writes out a list of BroDocObj objects to the reST document
|
||||
* @param l A list of BroDocObj pointers
|
||||
* @param heading The title of the section to create in the reST doc.
|
||||
* @param underline The character to use to underline the reST
|
||||
* section heading.
|
||||
*/
|
||||
void WriteBroDocObjList(const BroDocObjList& l, const char* heading,
|
||||
char underline) const;
|
||||
|
||||
/**
|
||||
* Writes out a list of BroDocObj objects to the reST document
|
||||
* @param l A list of BroDocObj pointers
|
||||
*/
|
||||
void WriteBroDocObjList(const BroDocObjList& l) const;
|
||||
|
||||
/**
|
||||
* Wraps the BroDocObjMap into a BroDocObjList and the writes that list
|
||||
* to the reST document
|
||||
* @see WriteBroDocObjList(const BroDocObjList&, const char*, char)
|
||||
*/
|
||||
void WriteBroDocObjList(const BroDocObjMap& m, const char* heading,
|
||||
char underline) const;
|
||||
|
||||
/**
|
||||
* A wrapper to fprintf() that always uses the reST document
|
||||
* for the FILE* argument.
|
||||
* @param format A printf style format string.
|
||||
*/
|
||||
void WriteToDoc(const char* format, ...) const;
|
||||
|
||||
/**
|
||||
* Writes out a reST section heading
|
||||
* @param heading The title of the heading to create
|
||||
* @param underline The character to use to underline the section title
|
||||
* within the reST document
|
||||
*/
|
||||
void WriteSectionHeading(const char* heading, char underline) const;
|
||||
|
||||
/**
|
||||
* Writes out given number of characters to reST document
|
||||
* @param c the character to write
|
||||
* @param n the number of characters to write
|
||||
*/
|
||||
void WriteRepeatedChar(char c, size_t n) const;
|
||||
|
||||
/**
|
||||
* Writes out the reST for either the script's public or private interface
|
||||
* @param heading The title of the interfaces section heading
|
||||
|
@ -387,7 +382,6 @@ protected:
|
|||
*/
|
||||
void WriteInterface(const char* heading, char underline, char subunderline,
|
||||
bool isPublic, bool isShort) const;
|
||||
private:
|
||||
|
||||
/**
|
||||
* Frees memory allocated to BroDocObj's objects in a given list.
|
||||
|
@ -413,4 +407,10 @@ private:
|
|||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Writes out plugin index documentation for all analyzer plugins.
|
||||
* @param filename the name of the file to write.
|
||||
*/
|
||||
void CreateProtoAnalyzerDoc(const char* filename);
|
||||
|
||||
#endif
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
#include "ID.h"
|
||||
#include "BroDocObj.h"
|
||||
|
||||
map<string, BroDocObj*> doc_ids = map<string, BroDocObj*>();
|
||||
|
||||
BroDocObj* BroDocObj::last = 0;
|
||||
|
||||
BroDocObj::BroDocObj(const ID* id, std::list<std::string>*& reST,
|
||||
|
@ -16,6 +18,7 @@ BroDocObj::BroDocObj(const ID* id, std::list<std::string>*& reST,
|
|||
is_fake_id = is_fake;
|
||||
use_role = 0;
|
||||
FormulateShortDesc();
|
||||
doc_ids[id->Name()] = this;
|
||||
}
|
||||
|
||||
BroDocObj::~BroDocObj()
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include <cstdio>
|
||||
#include <string>
|
||||
#include <list>
|
||||
#include <map>
|
||||
|
||||
#include "ID.h"
|
||||
|
||||
|
@ -134,4 +135,9 @@ protected:
|
|||
private:
|
||||
};
|
||||
|
||||
/**
|
||||
* Map identifiers to their broxygen documentation objects.
|
||||
*/
|
||||
extern map<string, BroDocObj*> doc_ids;
|
||||
|
||||
#endif
|
||||
|
|
|
@ -114,7 +114,6 @@ set(BIF_SRCS
|
|||
logging.bif
|
||||
input.bif
|
||||
event.bif
|
||||
file_analysis.bif
|
||||
const.bif
|
||||
types.bif
|
||||
strings.bif
|
||||
|
@ -150,6 +149,7 @@ set(bro_SUBDIR_LIBS CACHE INTERNAL "subdir libraries" FORCE)
|
|||
set(bro_PLUGIN_LIBS CACHE INTERNAL "plugin libraries" FORCE)
|
||||
|
||||
add_subdirectory(analyzer)
|
||||
add_subdirectory(file_analysis)
|
||||
|
||||
set(bro_SUBDIRS
|
||||
${bro_SUBDIR_LIBS}
|
||||
|
@ -359,21 +359,12 @@ set(bro_SRCS
|
|||
input/readers/Binary.cc
|
||||
input/readers/SQLite.cc
|
||||
|
||||
file_analysis/Manager.cc
|
||||
file_analysis/File.cc
|
||||
file_analysis/FileTimer.cc
|
||||
file_analysis/FileID.h
|
||||
file_analysis/Analyzer.h
|
||||
file_analysis/AnalyzerSet.cc
|
||||
file_analysis/Extract.cc
|
||||
file_analysis/Hash.cc
|
||||
file_analysis/DataEvent.cc
|
||||
|
||||
3rdparty/sqlite3.c
|
||||
|
||||
plugin/Component.cc
|
||||
plugin/Manager.cc
|
||||
plugin/Plugin.cc
|
||||
plugin/Macros.h
|
||||
|
||||
nb_dns.c
|
||||
digest.h
|
||||
|
@ -395,7 +386,8 @@ set(BRO_EXE bro
|
|||
CACHE STRING "Bro executable binary" FORCE)
|
||||
|
||||
# Target to create all the autogenerated files.
|
||||
add_custom_target(generate_outputs DEPENDS ${bro_ALL_GENERATED_OUTPUTS})
|
||||
add_custom_target(generate_outputs)
|
||||
add_dependencies(generate_outputs ${bro_ALL_GENERATED_OUTPUTS})
|
||||
|
||||
# Build __load__.bro files for plugins/*.bif.bro.
|
||||
bro_bif_create_loader(bif_loader_plugins ${CMAKE_BINARY_DIR}/scripts/base/bif/plugins)
|
||||
|
|
|
@ -553,14 +553,12 @@ void builtin_error(const char* msg, BroObj* arg)
|
|||
#include "input.bif.func_h"
|
||||
#include "reporter.bif.func_h"
|
||||
#include "strings.bif.func_h"
|
||||
#include "file_analysis.bif.func_h"
|
||||
|
||||
#include "bro.bif.func_def"
|
||||
#include "logging.bif.func_def"
|
||||
#include "input.bif.func_def"
|
||||
#include "reporter.bif.func_def"
|
||||
#include "strings.bif.func_def"
|
||||
#include "file_analysis.bif.func_def"
|
||||
|
||||
void init_builtin_funcs()
|
||||
{
|
||||
|
@ -575,7 +573,6 @@ void init_builtin_funcs()
|
|||
#include "input.bif.func_init"
|
||||
#include "reporter.bif.func_init"
|
||||
#include "strings.bif.func_init"
|
||||
#include "file_analysis.bif.func_init"
|
||||
|
||||
did_builtin_init = true;
|
||||
}
|
||||
|
|
|
@ -250,7 +250,6 @@ OpaqueType* bloomfilter_type;
|
|||
#include "logging.bif.netvar_def"
|
||||
#include "input.bif.netvar_def"
|
||||
#include "reporter.bif.netvar_def"
|
||||
#include "file_analysis.bif.netvar_def"
|
||||
|
||||
void init_event_handlers()
|
||||
{
|
||||
|
@ -319,7 +318,6 @@ void init_net_var()
|
|||
#include "logging.bif.netvar_init"
|
||||
#include "input.bif.netvar_init"
|
||||
#include "reporter.bif.netvar_init"
|
||||
#include "file_analysis.bif.netvar_init"
|
||||
|
||||
conn_id = internal_type("conn_id")->AsRecordType();
|
||||
endpoint = internal_type("endpoint")->AsRecordType();
|
||||
|
|
|
@ -261,6 +261,5 @@ extern void init_net_var();
|
|||
#include "logging.bif.netvar_h"
|
||||
#include "input.bif.netvar_h"
|
||||
#include "reporter.bif.netvar_h"
|
||||
#include "file_analysis.bif.netvar_h"
|
||||
|
||||
#endif
|
||||
|
|
10
src/Type.cc
10
src/Type.cc
|
@ -1334,6 +1334,16 @@ EnumType::EnumType(const string& arg_name)
|
|||
counter = 0;
|
||||
}
|
||||
|
||||
EnumType::EnumType(EnumType* e)
|
||||
: BroType(TYPE_ENUM)
|
||||
{
|
||||
name = e->name;
|
||||
counter = e->counter;
|
||||
|
||||
for ( NameMap::iterator it = e->names.begin(); it != e->names.end(); ++it )
|
||||
names[copy_string(it->first)] = it->second;
|
||||
}
|
||||
|
||||
EnumType::~EnumType()
|
||||
{
|
||||
for ( NameMap::iterator iter = names.begin(); iter != names.end(); ++iter )
|
||||
|
|
|
@ -523,6 +523,7 @@ protected:
|
|||
class EnumType : public BroType {
|
||||
public:
|
||||
EnumType(const string& arg_name);
|
||||
EnumType(EnumType* e);
|
||||
~EnumType();
|
||||
|
||||
// The value of this name is next internal counter value, starting
|
||||
|
@ -567,6 +568,7 @@ protected:
|
|||
class CommentedEnumType: public EnumType {
|
||||
public:
|
||||
CommentedEnumType(const string& arg_name) : EnumType(arg_name) {}
|
||||
CommentedEnumType(EnumType* e) : EnumType(e) {}
|
||||
~CommentedEnumType();
|
||||
|
||||
void DescribeReST(ODesc* d) const;
|
||||
|
|
|
@ -156,6 +156,12 @@ static void make_var(ID* id, BroType* t, init_class c, Expr* init,
|
|||
|
||||
if ( do_init )
|
||||
{
|
||||
if ( c == INIT_NONE && dt == VAR_REDEF && t->IsTable() &&
|
||||
init && init->Tag() == EXPR_ASSIGN )
|
||||
// e.g. 'redef foo["x"] = 1' is missing an init class, but the
|
||||
// intention clearly isn't to overwrite entire existing table val.
|
||||
c = INIT_EXTRA;
|
||||
|
||||
if ( (c == INIT_EXTRA && id->FindAttr(ATTR_ADD_FUNC)) ||
|
||||
(c == INIT_REMOVE && id->FindAttr(ATTR_DEL_FUNC)) )
|
||||
// Just apply the function.
|
||||
|
|
|
@ -4,26 +4,12 @@
|
|||
#include "Manager.h"
|
||||
|
||||
#include "../Desc.h"
|
||||
#include "../util.h"
|
||||
|
||||
using namespace analyzer;
|
||||
|
||||
Tag::type_t Component::type_counter = 0;
|
||||
|
||||
static const char* canonify_name(const char* name)
|
||||
{
|
||||
unsigned int len = strlen(name);
|
||||
char* nname = new char[len + 1];
|
||||
|
||||
for ( unsigned int i = 0; i < len; i++ )
|
||||
{
|
||||
char c = isalnum(name[i]) ? name[i] : '_';
|
||||
nname[i] = toupper(c);
|
||||
}
|
||||
|
||||
nname[len] = '\0';
|
||||
return nname;
|
||||
}
|
||||
|
||||
Component::Component(const char* arg_name, factory_callback arg_factory, Tag::subtype_t arg_subtype, bool arg_enabled, bool arg_partial)
|
||||
: plugin::Component(plugin::component::ANALYZER)
|
||||
{
|
||||
|
@ -58,7 +44,7 @@ analyzer::Tag Component::Tag() const
|
|||
return tag;
|
||||
}
|
||||
|
||||
void Component::Describe(ODesc* d)
|
||||
void Component::Describe(ODesc* d) const
|
||||
{
|
||||
plugin::Component::Describe(d);
|
||||
d->Add(name);
|
||||
|
|
|
@ -23,7 +23,6 @@ class Analyzer;
|
|||
*/
|
||||
class Component : public plugin::Component {
|
||||
public:
|
||||
typedef bool (*available_callback)();
|
||||
typedef Analyzer* (*factory_callback)(Connection* conn);
|
||||
|
||||
/**
|
||||
|
@ -73,7 +72,7 @@ public:
|
|||
* from what's passed to the constructor but upper-cased and
|
||||
* canonified to allow being part of a script-level ID.
|
||||
*/
|
||||
const char* Name() const { return name; }
|
||||
virtual const char* Name() const { return name; }
|
||||
|
||||
/**
|
||||
* Returns a canonocalized version of the analyzer's name. The
|
||||
|
@ -120,7 +119,7 @@ public:
|
|||
* Generates a human-readable description of the component's main
|
||||
* parameters. This goes into the output of \c "bro -NN".
|
||||
*/
|
||||
virtual void Describe(ODesc* d);
|
||||
virtual void Describe(ODesc* d) const;
|
||||
|
||||
Component& operator=(const Component& other);
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ Tag::Tag(type_t arg_type, subtype_t arg_subtype)
|
|||
|
||||
Tag::Tag(EnumVal* arg_val)
|
||||
{
|
||||
assert(val);
|
||||
assert(arg_val);
|
||||
|
||||
val = arg_val;
|
||||
Ref(val);
|
||||
|
|
|
@ -8,6 +8,11 @@
|
|||
|
||||
class EnumVal;
|
||||
|
||||
namespace file_analysis {
|
||||
class Manager;
|
||||
class Component;
|
||||
}
|
||||
|
||||
namespace analyzer {
|
||||
|
||||
class Manager;
|
||||
|
@ -24,7 +29,7 @@ class Component;
|
|||
* subtype form an analyzer "tag". Each unique tag corresponds to a single
|
||||
* "analyzer" from the user's perspective. At the script layer, these tags
|
||||
* are mapped into enums of type \c Analyzer::Tag. Internally, the
|
||||
* analyzer::Mangager maintains the mapping of tag to analyzer (and it also
|
||||
* analyzer::Manager maintains the mapping of tag to analyzer (and it also
|
||||
* assigns them their main types), and analyzer::Component creates new
|
||||
* tags.
|
||||
*
|
||||
|
@ -121,9 +126,11 @@ public:
|
|||
protected:
|
||||
friend class analyzer::Manager;
|
||||
friend class analyzer::Component;
|
||||
friend class file_analysis::Manager;
|
||||
friend class file_analysis::Component;
|
||||
|
||||
/**
|
||||
* Constructor. Note
|
||||
* Constructor.
|
||||
*
|
||||
* @param type The main type. Note that the \a analyzer::Manager
|
||||
* manages the value space internally, so noone else should assign
|
||||
|
|
|
@ -22,7 +22,7 @@ static RecordType* bittorrent_benc_value;
|
|||
static TableType* bittorrent_benc_dir;
|
||||
|
||||
BitTorrentTracker_Analyzer::BitTorrentTracker_Analyzer(Connection* c)
|
||||
: tcp::TCP_ApplicationAnalyzer("BITTORRENT", c)
|
||||
: tcp::TCP_ApplicationAnalyzer("BITTORRENTTRACKER", c)
|
||||
{
|
||||
if ( ! bt_tracker_headers )
|
||||
{
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
BRO_PLUGIN_BEGIN(Bro, BitTorrent)
|
||||
BRO_PLUGIN_DESCRIPTION("BitTorrent Analyzer");
|
||||
BRO_PLUGIN_ANALYZER("BitTorrent", bittorrent::BitTorrent_Analyzer);
|
||||
BRO_PLUGIN_ANALYZER("BitTorrentTracker", bittorrent::BitTorrent_Analyzer);
|
||||
BRO_PLUGIN_ANALYZER("BitTorrentTracker", bittorrent::BitTorrentTracker_Analyzer);
|
||||
BRO_PLUGIN_BIF_FILE(events);
|
||||
BRO_PLUGIN_END
|
||||
|
|
|
@ -693,7 +693,7 @@ refine connection SSL_Conn += {
|
|||
head2 : uint8) : int
|
||||
%{
|
||||
if ( head0 >= 20 && head0 <= 23 &&
|
||||
head1 == 0x03 && head2 < 0x03 )
|
||||
head1 == 0x03 && head2 <= 0x03 )
|
||||
// This is most probably SSL version 3.
|
||||
return (head1 << 8) | head2;
|
||||
|
||||
|
|
|
@ -23,5 +23,3 @@ const Tunnel::delay_gtp_confirmation: bool;
|
|||
const Tunnel::ip_tunnel_timeout: interval;
|
||||
|
||||
const Threading::heartbeat_interval: interval;
|
||||
|
||||
const FileAnalysis::salt: string;
|
||||
|
|
|
@ -920,7 +920,7 @@ event file_over_new_connection%(f: fa_file, c: connection%);
|
|||
## f: The file.
|
||||
##
|
||||
## .. bro:see:: file_new file_over_new_connection file_gap file_state_remove
|
||||
## default_file_timeout_interval FileAnalysis::postpone_timeout
|
||||
## default_file_timeout_interval FileAnalysis::set_timeout_interval
|
||||
## FileAnalysis::set_timeout_interval
|
||||
event file_timeout%(f: fa_file%);
|
||||
|
||||
|
@ -942,19 +942,6 @@ event file_gap%(f: fa_file, offset: count, len: count%);
|
|||
## .. bro:see:: file_new file_over_new_connection file_timeout file_gap
|
||||
event file_state_remove%(f: fa_file%);
|
||||
|
||||
## This event is generated each time file analysis generates a digest of the
|
||||
## file contents.
|
||||
##
|
||||
## f: The file.
|
||||
##
|
||||
## kind: The type of digest algorithm.
|
||||
##
|
||||
## hash: The result of the hashing.
|
||||
##
|
||||
## .. bro:see:: FileAnalysis::add_analyzer FileAnalysis::ANALYZER_MD5
|
||||
## FileAnalysis::ANALYZER_SHA1 FileAnalysis::ANALYZER_SHA256
|
||||
event file_hash%(f: fa_file, kind: string, hash: string%);
|
||||
|
||||
## Generated when an internal DNS lookup produces the same result as last time.
|
||||
## Bro keeps an internal DNS cache for host names and IP addresses it has
|
||||
## already resolved. This event is generated when a subsequent lookup returns
|
||||
|
|
|
@ -1,127 +0,0 @@
|
|||
##! Internal functions and types used by the logging framework.
|
||||
|
||||
module FileAnalysis;
|
||||
|
||||
%%{
|
||||
#include "file_analysis/Manager.h"
|
||||
%%}
|
||||
|
||||
type AnalyzerArgs: record;
|
||||
|
||||
## An enumeration of various file analysis actions that can be taken.
|
||||
enum Analyzer %{
|
||||
|
||||
## Extract a file to local filesystem
|
||||
ANALYZER_EXTRACT,
|
||||
|
||||
## Calculate an MD5 digest of the file's contents.
|
||||
ANALYZER_MD5,
|
||||
|
||||
## Calculate an SHA1 digest of the file's contents.
|
||||
ANALYZER_SHA1,
|
||||
|
||||
## Calculate an SHA256 digest of the file's contents.
|
||||
ANALYZER_SHA256,
|
||||
|
||||
## Deliver the file contents to the script-layer in an event.
|
||||
ANALYZER_DATA_EVENT,
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::postpone_timeout`.
|
||||
function FileAnalysis::__postpone_timeout%(file_id: string%): bool
|
||||
%{
|
||||
using file_analysis::FileID;
|
||||
bool result = file_mgr->PostponeTimeout(FileID(file_id->CheckString()));
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::set_timeout_interval`.
|
||||
function FileAnalysis::__set_timeout_interval%(file_id: string, t: interval%): bool
|
||||
%{
|
||||
using file_analysis::FileID;
|
||||
bool result = file_mgr->SetTimeoutInterval(FileID(file_id->CheckString()),
|
||||
t);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::add_analyzer`.
|
||||
function FileAnalysis::__add_analyzer%(file_id: string, args: any%): bool
|
||||
%{
|
||||
using file_analysis::FileID;
|
||||
using BifType::Record::FileAnalysis::AnalyzerArgs;
|
||||
RecordVal* rv = args->AsRecordVal()->CoerceTo(AnalyzerArgs);
|
||||
bool result = file_mgr->AddAnalyzer(FileID(file_id->CheckString()), rv);
|
||||
Unref(rv);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::remove_analyzer`.
|
||||
function FileAnalysis::__remove_analyzer%(file_id: string, args: any%): bool
|
||||
%{
|
||||
using file_analysis::FileID;
|
||||
using BifType::Record::FileAnalysis::AnalyzerArgs;
|
||||
RecordVal* rv = args->AsRecordVal()->CoerceTo(AnalyzerArgs);
|
||||
bool result = file_mgr->RemoveAnalyzer(FileID(file_id->CheckString()), rv);
|
||||
Unref(rv);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::stop`.
|
||||
function FileAnalysis::__stop%(file_id: string%): bool
|
||||
%{
|
||||
using file_analysis::FileID;
|
||||
bool result = file_mgr->IgnoreFile(FileID(file_id->CheckString()));
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::data_stream`.
|
||||
function FileAnalysis::__data_stream%(source: string, data: string%): any
|
||||
%{
|
||||
file_mgr->DataIn(data->Bytes(), data->Len(), source->CheckString());
|
||||
return 0;
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::data_chunk`.
|
||||
function FileAnalysis::__data_chunk%(source: string, data: string,
|
||||
offset: count%): any
|
||||
%{
|
||||
file_mgr->DataIn(data->Bytes(), data->Len(), offset, source->CheckString());
|
||||
return 0;
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::gap`.
|
||||
function FileAnalysis::__gap%(source: string, offset: count, len: count%): any
|
||||
%{
|
||||
file_mgr->Gap(offset, len, source->CheckString());
|
||||
return 0;
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::set_size`.
|
||||
function FileAnalysis::__set_size%(source: string, size: count%): any
|
||||
%{
|
||||
file_mgr->SetSize(size, source->CheckString());
|
||||
return 0;
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::eof`.
|
||||
function FileAnalysis::__eof%(source: string%): any
|
||||
%{
|
||||
file_mgr->EndOfFile(source->CheckString());
|
||||
return 0;
|
||||
%}
|
||||
|
||||
module GLOBAL;
|
||||
|
||||
## For use within a :bro:see:`get_file_handle` handler to set a unique
|
||||
## identifier to associate with the current input to the file analysis
|
||||
## framework. Using an empty string for the handle signifies that the
|
||||
## input will be ignored/discarded.
|
||||
##
|
||||
## handle: A string that uniquely identifies a file.
|
||||
##
|
||||
## .. bro:see:: get_file_handle
|
||||
function set_file_handle%(handle: string%): any
|
||||
%{
|
||||
file_mgr->SetHandle(handle->CheckString());
|
||||
return 0;
|
||||
%}
|
|
@ -5,10 +5,13 @@
|
|||
|
||||
#include "Val.h"
|
||||
#include "NetVar.h"
|
||||
#include "analyzer/Tag.h"
|
||||
|
||||
#include "file_analysis/file_analysis.bif.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
typedef BifEnum::FileAnalysis::Analyzer FA_Tag;
|
||||
typedef int FA_Tag;
|
||||
|
||||
class File;
|
||||
|
||||
|
@ -17,6 +20,11 @@ class File;
|
|||
*/
|
||||
class Analyzer {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Destructor. Nothing special about it. Virtual since we definitely expect
|
||||
* to delete instances of derived classes via pointers to this class.
|
||||
*/
|
||||
virtual ~Analyzer()
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Destroy file analyzer %d", tag);
|
||||
|
@ -24,7 +32,10 @@ public:
|
|||
}
|
||||
|
||||
/**
|
||||
* Subclasses may override this to receive file data non-sequentially.
|
||||
* Subclasses may override this metod to receive file data non-sequentially.
|
||||
* @param data points to start of a chunk of file data.
|
||||
* @param len length in bytes of the chunk of data pointed to by \a data.
|
||||
* @param offset the byte offset within full file that data chunk starts.
|
||||
* @return true if the analyzer is still in a valid state to continue
|
||||
* receiving data/events or false if it's essentially "done".
|
||||
*/
|
||||
|
@ -32,7 +43,9 @@ public:
|
|||
{ return true; }
|
||||
|
||||
/**
|
||||
* Subclasses may override this to receive file sequentially.
|
||||
* Subclasses may override this method to receive file sequentially.
|
||||
* @param data points to start of the next chunk of file data.
|
||||
* @param len length in bytes of the chunk of data pointed to by \a data.
|
||||
* @return true if the analyzer is still in a valid state to continue
|
||||
* receiving data/events or false if it's essentially "done".
|
||||
*/
|
||||
|
@ -40,7 +53,7 @@ public:
|
|||
{ return true; }
|
||||
|
||||
/**
|
||||
* Subclasses may override this to specifically handle an EOF signal,
|
||||
* Subclasses may override this method to specifically handle an EOF signal,
|
||||
* which means no more data is going to be incoming and the analyzer
|
||||
* may be deleted/cleaned up soon.
|
||||
* @return true if the analyzer is still in a valid state to continue
|
||||
|
@ -50,7 +63,10 @@ public:
|
|||
{ return true; }
|
||||
|
||||
/**
|
||||
* Subclasses may override this to handle missing data in a file stream.
|
||||
* Subclasses may override this method to handle missing data in a file.
|
||||
* @param offset the byte offset within full file at which the missing
|
||||
* data chunk occurs.
|
||||
* @param len the number of missing bytes.
|
||||
* @return true if the analyzer is still in a valid state to continue
|
||||
* receiving data/events or false if it's essentially "done".
|
||||
*/
|
||||
|
@ -73,17 +89,25 @@ public:
|
|||
File* GetFile() const { return file; }
|
||||
|
||||
/**
|
||||
* Retrieves an analyzer tag field from full analyzer argument record.
|
||||
* @param args an \c AnalyzerArgs (script-layer type) value.
|
||||
* @return the analyzer tag equivalent of the 'tag' field from the
|
||||
* AnalyzerArgs value \a args.
|
||||
* \c AnalyzerArgs value \a args.
|
||||
*/
|
||||
static FA_Tag ArgsTag(const RecordVal* args)
|
||||
{
|
||||
using BifType::Record::FileAnalysis::AnalyzerArgs;
|
||||
return static_cast<FA_Tag>(
|
||||
args->Lookup(AnalyzerArgs->FieldOffset("tag"))->AsEnum());
|
||||
return args->Lookup(AnalyzerArgs->FieldOffset("tag"))->AsEnum();
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor. Only derived classes are meant to be instantiated.
|
||||
* @param arg_args an \c AnalyzerArgs (script-layer type) value specifiying
|
||||
* tunable options, if any, related to a particular analyzer type.
|
||||
* @param arg_file the file to which the the analyzer is being attached.
|
||||
*/
|
||||
Analyzer(RecordVal* arg_args, File* arg_file)
|
||||
: tag(file_analysis::Analyzer::ArgsTag(arg_args)),
|
||||
args(arg_args->Ref()->AsRecordVal()),
|
||||
|
@ -91,13 +115,11 @@ protected:
|
|||
{}
|
||||
|
||||
private:
|
||||
FA_Tag tag;
|
||||
RecordVal* args;
|
||||
File* file;
|
||||
};
|
||||
|
||||
typedef file_analysis::Analyzer* (*AnalyzerInstantiator)(RecordVal* args,
|
||||
File* file);
|
||||
FA_Tag tag; /**< The particular analyzer type of the analyzer instance. */
|
||||
RecordVal* args; /**< \c AnalyzerArgs val gives tunable analyzer params. */
|
||||
File* file; /**< The file to which the analyzer is attached. */
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
|
|
|
@ -3,21 +3,10 @@
|
|||
#include "AnalyzerSet.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
#include "Extract.h"
|
||||
#include "DataEvent.h"
|
||||
#include "Hash.h"
|
||||
#include "Manager.h"
|
||||
|
||||
using namespace file_analysis;
|
||||
|
||||
// keep in order w/ declared enum values in file_analysis.bif
|
||||
static AnalyzerInstantiator analyzer_factory[] = {
|
||||
file_analysis::Extract::Instantiate,
|
||||
file_analysis::MD5::Instantiate,
|
||||
file_analysis::SHA1::Instantiate,
|
||||
file_analysis::SHA256::Instantiate,
|
||||
file_analysis::DataEvent::Instantiate,
|
||||
};
|
||||
|
||||
static void analyzer_del_func(void* v)
|
||||
{
|
||||
delete (file_analysis::Analyzer*) v;
|
||||
|
@ -154,14 +143,13 @@ HashKey* AnalyzerSet::GetKey(const RecordVal* args) const
|
|||
|
||||
file_analysis::Analyzer* AnalyzerSet::InstantiateAnalyzer(RecordVal* args) const
|
||||
{
|
||||
file_analysis::Analyzer* a =
|
||||
analyzer_factory[file_analysis::Analyzer::ArgsTag(args)](args, file);
|
||||
FA_Tag tag = file_analysis::Analyzer::ArgsTag(args);
|
||||
file_analysis::Analyzer* a = file_mgr->InstantiateAnalyzer(tag, args, file);
|
||||
|
||||
if ( ! a )
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Instantiate analyzer %d failed for file id",
|
||||
" %s", file_analysis::Analyzer::ArgsTag(args),
|
||||
file->GetID().c_str());
|
||||
reporter->Error("Failed file analyzer %s instantiation for file id %s",
|
||||
file_mgr->GetAnalyzerName(tag), file->GetID().c_str());
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,67 +16,144 @@ class File;
|
|||
declare(PDict,Analyzer);
|
||||
|
||||
/**
|
||||
* A set of file analysis analyzers indexed by AnalyzerArgs. Allows queueing
|
||||
* of addition/removals so that those modifications can happen at well-defined
|
||||
* times (e.g. to make sure a loop iterator isn't invalidated).
|
||||
* A set of file analysis analyzers indexed by an \c AnalyzerArgs (script-layer
|
||||
* type) value. Allows queueing of addition/removals so that those
|
||||
* modifications can happen at well-defined times (e.g. to make sure a loop
|
||||
* iterator isn't invalidated).
|
||||
*/
|
||||
class AnalyzerSet {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Constructor. Nothing special.
|
||||
* @param arg_file the file to which all analyzers in the set are attached.
|
||||
*/
|
||||
AnalyzerSet(File* arg_file);
|
||||
|
||||
/**
|
||||
* Destructor. Any queued analyzer additions/removals are aborted and
|
||||
* will not occur.
|
||||
*/
|
||||
~AnalyzerSet();
|
||||
|
||||
/**
|
||||
* Attach an analyzer to #file immediately.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return true if analyzer was instantiated/attached, else false.
|
||||
*/
|
||||
bool Add(RecordVal* args);
|
||||
|
||||
/**
|
||||
* Queue the attachment of an analyzer to #file.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return true if analyzer was able to be instantiated, else false.
|
||||
*/
|
||||
bool QueueAdd(RecordVal* args);
|
||||
|
||||
/**
|
||||
* Remove an analyzer from #file immediately.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return false if analyzer didn't exist and so wasn't removed, else true.
|
||||
*/
|
||||
bool Remove(const RecordVal* args);
|
||||
|
||||
/**
|
||||
* Queue the removal of an analyzer from #file.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return true if analyzer exists at time of call, else false;
|
||||
*/
|
||||
bool QueueRemove(const RecordVal* args);
|
||||
|
||||
/**
|
||||
* Perform all queued modifications to the currently active analyzers.
|
||||
* Perform all queued modifications to the current analyzer set.
|
||||
*/
|
||||
void DrainModifications();
|
||||
|
||||
/**
|
||||
* Prepare the analyzer set to be iterated over.
|
||||
* @see Dictionary#InitForIteration
|
||||
* @return an iterator that may be used to loop over analyzers in the set.
|
||||
*/
|
||||
IterCookie* InitForIteration() const
|
||||
{ return analyzer_map.InitForIteration(); }
|
||||
|
||||
/**
|
||||
* Get next entry in the analyzer set.
|
||||
* @see Dictionary#NextEntry
|
||||
* @param c a set iterator.
|
||||
* @return the next analyzer in the set or a null pointer if there is no
|
||||
* more left (in that case the cookie is also deleted).
|
||||
*/
|
||||
file_analysis::Analyzer* NextEntry(IterCookie* c)
|
||||
{ return analyzer_map.NextEntry(c); }
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Get a hash key which represents an analyzer instance.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return the hash key calculated from \a args
|
||||
*/
|
||||
HashKey* GetKey(const RecordVal* args) const;
|
||||
|
||||
/**
|
||||
* Create an instance of a file analyzer.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return a new file analyzer instance.
|
||||
*/
|
||||
file_analysis::Analyzer* InstantiateAnalyzer(RecordVal* args) const;
|
||||
|
||||
/**
|
||||
* Insert an analyzer instance in to the set.
|
||||
* @param a an analyzer instance.
|
||||
* @param key the hash key which represents the analyzer's \c AnalyzerArgs.
|
||||
*/
|
||||
void Insert(file_analysis::Analyzer* a, HashKey* key);
|
||||
|
||||
/**
|
||||
* Remove an analyzer instance from the set.
|
||||
* @param tag enumarator which specifies type of the analyzer to remove,
|
||||
* just used for debugging messages.
|
||||
* @param key the hash key which represents the analyzer's \c AnalyzerArgs.
|
||||
*/
|
||||
bool Remove(FA_Tag tag, HashKey* key);
|
||||
|
||||
private:
|
||||
File* file;
|
||||
|
||||
File* file; /**< File which owns the set */
|
||||
CompositeHash* analyzer_hash; /**< AnalyzerArgs hashes. */
|
||||
PDict(file_analysis::Analyzer) analyzer_map; /**< Indexed by AnalyzerArgs. */
|
||||
|
||||
/**
|
||||
* Abstract base class for analyzer set modifications.
|
||||
*/
|
||||
class Modification {
|
||||
public:
|
||||
virtual ~Modification() {}
|
||||
|
||||
/**
|
||||
* Perform the modification on an analyzer set.
|
||||
* @param set the analyzer set on which the modification will happen.
|
||||
* @return true if the modification altered \a set.
|
||||
*/
|
||||
virtual bool Perform(AnalyzerSet* set) = 0;
|
||||
|
||||
/**
|
||||
* Don't perform the modification on the analyzer set and clean up.
|
||||
*/
|
||||
virtual void Abort() = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a request to add an analyzer to an analyzer set.
|
||||
*/
|
||||
class AddMod : public Modification {
|
||||
public:
|
||||
/**
|
||||
* Construct request which can add an analyzer to an analyzer set.
|
||||
* @param arg_a an analyzer instance to add to an analyzer set.
|
||||
* @param arg_key hash key representing the analyzer's \c AnalyzerArgs.
|
||||
*/
|
||||
AddMod(file_analysis::Analyzer* arg_a, HashKey* arg_key)
|
||||
: Modification(), a(arg_a), key(arg_key) {}
|
||||
virtual ~AddMod() {}
|
||||
|
@ -88,8 +165,16 @@ private:
|
|||
HashKey* key;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a request to remove an analyzer from an analyzer set.
|
||||
*/
|
||||
class RemoveMod : public Modification {
|
||||
public:
|
||||
/**
|
||||
* Construct request which can remove an analyzer from an analyzer set.
|
||||
* @param arg_a an analyzer instance to add to an analyzer set.
|
||||
* @param arg_key hash key representing the analyzer's \c AnalyzerArgs.
|
||||
*/
|
||||
RemoveMod(FA_Tag arg_tag, HashKey* arg_key)
|
||||
: Modification(), tag(arg_tag), key(arg_key) {}
|
||||
virtual ~RemoveMod() {}
|
||||
|
@ -102,7 +187,7 @@ private:
|
|||
};
|
||||
|
||||
typedef queue<Modification*> ModQueue;
|
||||
ModQueue mod_queue;
|
||||
ModQueue mod_queue; /**< A queue of analyzer additions/removals requests. */
|
||||
};
|
||||
|
||||
} // namespace file_analysiss
|
||||
|
|
22
src/file_analysis/CMakeLists.txt
Normal file
22
src/file_analysis/CMakeLists.txt
Normal file
|
@ -0,0 +1,22 @@
|
|||
include(BroSubdir)
|
||||
|
||||
include_directories(BEFORE
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${CMAKE_CURRENT_BINARY_DIR}
|
||||
)
|
||||
|
||||
add_subdirectory(analyzer)
|
||||
|
||||
set(file_analysis_SRCS
|
||||
Manager.cc
|
||||
File.cc
|
||||
FileTimer.cc
|
||||
Analyzer.h
|
||||
AnalyzerSet.cc
|
||||
Component.cc
|
||||
)
|
||||
|
||||
bif_target(file_analysis.bif)
|
||||
|
||||
bro_add_subdir_library(file_analysis ${file_analysis_SRCS} ${BIF_OUTPUT_CC})
|
||||
add_dependencies(bro_file_analysis generate_outputs)
|
69
src/file_analysis/Component.cc
Normal file
69
src/file_analysis/Component.cc
Normal file
|
@ -0,0 +1,69 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#include "Component.h"
|
||||
#include "Manager.h"
|
||||
|
||||
#include "../Desc.h"
|
||||
#include "../util.h"
|
||||
|
||||
using namespace file_analysis;
|
||||
|
||||
analyzer::Tag::type_t Component::type_counter = 0;
|
||||
|
||||
Component::Component(const char* arg_name, factory_callback arg_factory,
|
||||
analyzer::Tag::subtype_t arg_subtype)
|
||||
: plugin::Component(plugin::component::FILE_ANALYZER)
|
||||
{
|
||||
name = copy_string(arg_name);
|
||||
canon_name = canonify_name(arg_name);
|
||||
factory = arg_factory;
|
||||
|
||||
tag = analyzer::Tag(++type_counter, arg_subtype);
|
||||
}
|
||||
|
||||
Component::Component(const Component& other)
|
||||
: plugin::Component(Type())
|
||||
{
|
||||
name = copy_string(other.name);
|
||||
canon_name = copy_string(other.canon_name);
|
||||
factory = other.factory;
|
||||
tag = other.tag;
|
||||
}
|
||||
|
||||
Component::~Component()
|
||||
{
|
||||
delete [] name;
|
||||
delete [] canon_name;
|
||||
}
|
||||
|
||||
analyzer::Tag Component::Tag() const
|
||||
{
|
||||
return tag;
|
||||
}
|
||||
|
||||
void Component::Describe(ODesc* d) const
|
||||
{
|
||||
plugin::Component::Describe(d);
|
||||
d->Add(name);
|
||||
d->Add(" (");
|
||||
|
||||
if ( factory )
|
||||
{
|
||||
d->Add("ANALYZER_");
|
||||
d->Add(canon_name);
|
||||
}
|
||||
|
||||
d->Add(")");
|
||||
}
|
||||
|
||||
Component& Component::operator=(const Component& other)
|
||||
{
|
||||
if ( &other != this )
|
||||
{
|
||||
name = copy_string(other.name);
|
||||
factory = other.factory;
|
||||
tag = other.tag;
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
109
src/file_analysis/Component.h
Normal file
109
src/file_analysis/Component.h
Normal file
|
@ -0,0 +1,109 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYZER_PLUGIN_COMPONENT_H
|
||||
#define FILE_ANALYZER_PLUGIN_COMPONENT_H
|
||||
|
||||
#include "analyzer/Tag.h"
|
||||
#include "plugin/Component.h"
|
||||
|
||||
#include "Val.h"
|
||||
|
||||
#include "../config.h"
|
||||
#include "../util.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
class File;
|
||||
class Analyzer;
|
||||
|
||||
/**
|
||||
* Component description for plugins providing file analyzers.
|
||||
*
|
||||
* A plugin can provide a specific file analyzer by registering this
|
||||
* analyzer component, describing the analyzer.
|
||||
*/
|
||||
class Component : public plugin::Component {
|
||||
public:
|
||||
typedef Analyzer* (*factory_callback)(RecordVal* args, File* file);
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param name The name of the provided analyzer. This name is used
|
||||
* across the system to identify the analyzer, e.g., when calling
|
||||
* file_analysis::Manager::InstantiateAnalyzer with a name.
|
||||
*
|
||||
* @param factory A factory function to instantiate instances of the
|
||||
* analyzer's class, which must be derived directly or indirectly
|
||||
* from file_analysis::Analyzer. This is typically a static \c
|
||||
* Instatiate() method inside the class that just allocates and
|
||||
* returns a new instance.
|
||||
*
|
||||
* @param subtype A subtype associated with this component that
|
||||
* further distinguishes it. The subtype will be integrated into
|
||||
* the analyzer::Tag that the manager associates with this analyzer,
|
||||
* and analyzer instances can accordingly access it via analyzer::Tag().
|
||||
* If not used, leave at zero.
|
||||
*/
|
||||
Component(const char* name, factory_callback factory,
|
||||
analyzer::Tag::subtype_t subtype = 0);
|
||||
|
||||
/**
|
||||
* Copy constructor.
|
||||
*/
|
||||
Component(const Component& other);
|
||||
|
||||
/**
|
||||
* Destructor.
|
||||
*/
|
||||
~Component();
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer. This name is unique across all
|
||||
* analyzers and used to identify it. The returned name is derived
|
||||
* from what's passed to the constructor but upper-cased and
|
||||
* canonified to allow being part of a script-level ID.
|
||||
*/
|
||||
virtual const char* Name() const { return name; }
|
||||
|
||||
/**
|
||||
* Returns a canonocalized version of the analyzer's name. The
|
||||
* returned name is derived from what's passed to the constructor but
|
||||
* upper-cased and transformed to allow being part of a script-level
|
||||
* ID.
|
||||
*/
|
||||
const char* CanonicalName() const { return canon_name; }
|
||||
|
||||
/**
|
||||
* Returns the analyzer's factory function.
|
||||
*/
|
||||
factory_callback Factory() const { return factory; }
|
||||
|
||||
/**
|
||||
* Returns the analyzer's tag. Note that this is automatically
|
||||
* generated for each new Components, and hence unique across all of
|
||||
* them.
|
||||
*/
|
||||
analyzer::Tag Tag() const;
|
||||
|
||||
/**
|
||||
* Generates a human-readable description of the component's main
|
||||
* parameters. This goes into the output of \c "bro -NN".
|
||||
*/
|
||||
virtual void Describe(ODesc* d) const;
|
||||
|
||||
Component& operator=(const Component& other);
|
||||
|
||||
private:
|
||||
const char* name; // The analyzer's name.
|
||||
const char* canon_name; // The analyzer's canonical name.
|
||||
factory_callback factory; // The analyzer's factory callback.
|
||||
analyzer::Tag tag; // The automatically assigned analyzer tag.
|
||||
|
||||
// Global counter used to generate unique tags.
|
||||
static analyzer::Tag::type_t type_counter;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
|
@ -1,36 +0,0 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_DATAEVENT_H
|
||||
#define FILE_ANALYSIS_DATAEVENT_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "Val.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* An analyzer to send file data to script-layer events.
|
||||
*/
|
||||
class DataEvent : public file_analysis::Analyzer {
|
||||
public:
|
||||
virtual bool DeliverChunk(const u_char* data, uint64 len, uint64 offset);
|
||||
|
||||
virtual bool DeliverStream(const u_char* data, uint64 len);
|
||||
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file);
|
||||
|
||||
protected:
|
||||
DataEvent(RecordVal* args, File* file,
|
||||
EventHandlerPtr ce, EventHandlerPtr se);
|
||||
|
||||
private:
|
||||
EventHandlerPtr chunk_event;
|
||||
EventHandlerPtr stream_event;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
|
@ -1,35 +0,0 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_EXTRACT_H
|
||||
#define FILE_ANALYSIS_EXTRACT_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "Val.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* An analyzer to extract files to disk.
|
||||
*/
|
||||
class Extract : public file_analysis::Analyzer {
|
||||
public:
|
||||
virtual ~Extract();
|
||||
|
||||
virtual bool DeliverChunk(const u_char* data, uint64 len, uint64 offset);
|
||||
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file);
|
||||
|
||||
protected:
|
||||
Extract(RecordVal* args, File* file, const string& arg_filename);
|
||||
|
||||
private:
|
||||
string filename;
|
||||
int fd;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
|
@ -1,11 +1,9 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#include <string>
|
||||
#include <openssl/md5.h>
|
||||
|
||||
#include "File.h"
|
||||
#include "FileTimer.h"
|
||||
#include "FileID.h"
|
||||
#include "Analyzer.h"
|
||||
#include "Manager.h"
|
||||
#include "Reporter.h"
|
||||
|
@ -53,8 +51,6 @@ int File::bof_buffer_size_idx = -1;
|
|||
int File::bof_buffer_idx = -1;
|
||||
int File::mime_type_idx = -1;
|
||||
|
||||
string File::salt;
|
||||
|
||||
void File::StaticInit()
|
||||
{
|
||||
if ( id_idx != -1 )
|
||||
|
@ -74,42 +70,27 @@ void File::StaticInit()
|
|||
bof_buffer_size_idx = Idx("bof_buffer_size");
|
||||
bof_buffer_idx = Idx("bof_buffer");
|
||||
mime_type_idx = Idx("mime_type");
|
||||
|
||||
salt = BifConst::FileAnalysis::salt->CheckString();
|
||||
}
|
||||
|
||||
File::File(const string& unique, Connection* conn, analyzer::Tag tag,
|
||||
File::File(const string& file_id, Connection* conn, analyzer::Tag tag,
|
||||
bool is_orig)
|
||||
: id(""), unique(unique), val(0), postpone_timeout(false),
|
||||
first_chunk(true), missed_bof(false), need_reassembly(false), done(false),
|
||||
analyzers(this)
|
||||
: id(file_id), val(0), postpone_timeout(false), first_chunk(true),
|
||||
missed_bof(false), need_reassembly(false), done(false), analyzers(this)
|
||||
{
|
||||
StaticInit();
|
||||
|
||||
char tmp[20];
|
||||
uint64 hash[2];
|
||||
string msg(unique + salt);
|
||||
MD5(reinterpret_cast<const u_char*>(msg.data()), msg.size(),
|
||||
reinterpret_cast<u_char*>(hash));
|
||||
uitoa_n(hash[0], tmp, sizeof(tmp), 62);
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Creating new File object %s (%s)", tmp,
|
||||
unique.c_str());
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Creating new File object %s", file_id.c_str());
|
||||
|
||||
val = new RecordVal(fa_file_type);
|
||||
val->Assign(id_idx, new StringVal(tmp));
|
||||
id = FileID(tmp);
|
||||
val->Assign(id_idx, new StringVal(file_id.c_str()));
|
||||
|
||||
if ( conn )
|
||||
{
|
||||
// add source, connection, is_orig fields
|
||||
val->Assign(source_idx, new StringVal(analyzer_mgr->GetAnalyzerName(tag)));
|
||||
SetSource(analyzer_mgr->GetAnalyzerName(tag));
|
||||
val->Assign(is_orig_idx, new Val(is_orig, TYPE_BOOL));
|
||||
UpdateConnectionFields(conn);
|
||||
}
|
||||
else
|
||||
// use the unique file handle as source
|
||||
val->Assign(source_idx, new StringVal(unique.c_str()));
|
||||
|
||||
UpdateLastActivityTime();
|
||||
}
|
||||
|
@ -189,6 +170,18 @@ int File::Idx(const string& field)
|
|||
return rval;
|
||||
}
|
||||
|
||||
string File::GetSource() const
|
||||
{
|
||||
Val* v = val->Lookup(source_idx);
|
||||
|
||||
return v ? v->AsString()->CheckString() : string();
|
||||
}
|
||||
|
||||
void File::SetSource(const string& source)
|
||||
{
|
||||
val->Assign(source_idx, new StringVal(source.c_str()));
|
||||
}
|
||||
|
||||
double File::GetTimeoutInterval() const
|
||||
{
|
||||
return LookupFieldDefaultInterval(timeout_interval_idx);
|
||||
|
@ -425,7 +418,7 @@ void File::Gap(uint64 offset, uint64 len)
|
|||
|
||||
bool File::FileEventAvailable(EventHandlerPtr h)
|
||||
{
|
||||
return h && ! file_mgr->IsIgnored(unique);
|
||||
return h && ! file_mgr->IsIgnored(id);
|
||||
}
|
||||
|
||||
void File::FileEvent(EventHandlerPtr h)
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#include "Conn.h"
|
||||
#include "Val.h"
|
||||
#include "AnalyzerSet.h"
|
||||
#include "FileID.h"
|
||||
#include "BroString.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
@ -19,13 +18,30 @@ namespace file_analysis {
|
|||
*/
|
||||
class File {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Destructor. Nothing fancy, releases a reference to the wrapped
|
||||
* \c fa_file value.
|
||||
*/
|
||||
~File();
|
||||
|
||||
/**
|
||||
* @return the #val record.
|
||||
* @return the wrapped \c fa_file record value, #val.
|
||||
*/
|
||||
RecordVal* GetVal() const { return val; }
|
||||
|
||||
/**
|
||||
* @return the value of the "source" field from #val record or an empty
|
||||
* string if it's not initialized.
|
||||
*/
|
||||
string GetSource() const;
|
||||
|
||||
/**
|
||||
* Set the "source" field from #val record to \a source.
|
||||
* @param source the new value of the "source" field.
|
||||
*/
|
||||
void SetSource(const string& source);
|
||||
|
||||
/**
|
||||
* @return value (seconds) of the "timeout_interval" field from #val record.
|
||||
*/
|
||||
|
@ -33,18 +49,14 @@ public:
|
|||
|
||||
/**
|
||||
* Set the "timeout_interval" field from #val record to \a interval seconds.
|
||||
* @param interval the new value of the "timeout_interval" field.
|
||||
*/
|
||||
void SetTimeoutInterval(double interval);
|
||||
|
||||
/**
|
||||
* @return value of the "id" field from #val record.
|
||||
*/
|
||||
FileID GetID() const { return id; }
|
||||
|
||||
/**
|
||||
* @return the string which uniquely identifies the file.
|
||||
*/
|
||||
string GetUnique() const { return unique; }
|
||||
string GetID() const { return id; }
|
||||
|
||||
/**
|
||||
* @return value of "last_active" field in #val record;
|
||||
|
@ -58,13 +70,15 @@ public:
|
|||
|
||||
/**
|
||||
* Set "total_bytes" field of #val record to \a size.
|
||||
* @param size the new value of the "total_bytes" field.
|
||||
*/
|
||||
void SetTotalBytes(uint64 size);
|
||||
|
||||
/**
|
||||
* Compares "seen_bytes" field to "total_bytes" field of #val record
|
||||
* and returns true if the comparison indicates the full file was seen.
|
||||
* If "total_bytes" hasn't been set yet, it returns false.
|
||||
* Compares "seen_bytes" field to "total_bytes" field of #val record to
|
||||
* determine if the full file has been seen.
|
||||
* @return false if "total_bytes" hasn't been set yet or "seen_bytes" is
|
||||
* less than it, else true.
|
||||
*/
|
||||
bool IsComplete() const;
|
||||
|
||||
|
@ -78,23 +92,30 @@ public:
|
|||
/**
|
||||
* Queues attaching an analyzer. Only one analyzer per type can be attached
|
||||
* at a time unless the arguments differ.
|
||||
* @param args an \c AnalyzerArgs value representing a file analyzer.
|
||||
* @return false if analyzer can't be instantiated, else true.
|
||||
*/
|
||||
bool AddAnalyzer(RecordVal* args);
|
||||
|
||||
/**
|
||||
* Queues removal of an analyzer.
|
||||
* @param args an \c AnalyzerArgs value representing a file analyzer.
|
||||
* @return true if analyzer was active at time of call, else false.
|
||||
*/
|
||||
bool RemoveAnalyzer(const RecordVal* args);
|
||||
|
||||
/**
|
||||
* Pass in non-sequential data and deliver to attached analyzers.
|
||||
* @param data pointer to start of a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param offset number of bytes from start of file at which chunk occurs.
|
||||
*/
|
||||
void DataIn(const u_char* data, uint64 len, uint64 offset);
|
||||
|
||||
/**
|
||||
* Pass in sequential data and deliver to attached analyzers.
|
||||
* @param data pointer to start of a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
*/
|
||||
void DataIn(const u_char* data, uint64 len);
|
||||
|
||||
|
@ -105,10 +126,13 @@ public:
|
|||
|
||||
/**
|
||||
* Inform attached analyzers about a gap in file stream.
|
||||
* @param offset number of bytes in to file at which missing chunk starts.
|
||||
* @param len length in bytes of the missing chunk of file data.
|
||||
*/
|
||||
void Gap(uint64 offset, uint64 len);
|
||||
|
||||
/**
|
||||
* @param h pointer to an event handler.
|
||||
* @return true if event has a handler and the file isn't ignored.
|
||||
*/
|
||||
bool FileEventAvailable(EventHandlerPtr h);
|
||||
|
@ -116,11 +140,14 @@ public:
|
|||
/**
|
||||
* Raises an event related to the file's life-cycle, the only parameter
|
||||
* to that event is the \c fa_file record..
|
||||
* @param h pointer to an event handler.
|
||||
*/
|
||||
void FileEvent(EventHandlerPtr h);
|
||||
|
||||
/**
|
||||
* Raises an event related to the file's life-cycle.
|
||||
* @param h pointer to an event handler.
|
||||
* @param vl list of argument values to pass to event call.
|
||||
*/
|
||||
void FileEvent(EventHandlerPtr h, val_list* vl);
|
||||
|
||||
|
@ -129,35 +156,51 @@ protected:
|
|||
|
||||
/**
|
||||
* Constructor; only file_analysis::Manager should be creating these.
|
||||
* @param file_id an identifier string for the file in pretty hash form
|
||||
* (similar to connection uids).
|
||||
* @param conn a network connection over which the file is transferred.
|
||||
* @param tag the network protocol over which the file is transferred.
|
||||
* @param is_orig true if the file is being transferred from the originator
|
||||
* of the connection to the responder. False indicates the other
|
||||
* direction.
|
||||
*/
|
||||
File(const string& unique, Connection* conn = 0,
|
||||
File(const string& file_id, Connection* conn = 0,
|
||||
analyzer::Tag tag = analyzer::Tag::Error, bool is_orig = false);
|
||||
|
||||
/**
|
||||
* Updates the "conn_ids" and "conn_uids" fields in #val record with the
|
||||
* \c conn_id and UID taken from \a conn.
|
||||
* @param conn the connection over which a part of the file has been seen.
|
||||
*/
|
||||
void UpdateConnectionFields(Connection* conn);
|
||||
|
||||
/**
|
||||
* Increment a byte count field of #val record by \a size.
|
||||
* @param size number of bytes by which to increment.
|
||||
* @param field_idx the index of the field in \c fa_file to increment.
|
||||
*/
|
||||
void IncrementByteCount(uint64 size, int field_idx);
|
||||
|
||||
/**
|
||||
* Wrapper to RecordVal::LookupWithDefault for the field in #val at index
|
||||
* \a idx which automatically unrefs the Val and returns a converted value.
|
||||
* @param idx the index of a field of type "count" in \c fa_file.
|
||||
* @return the value of the field, which may be it &default.
|
||||
*/
|
||||
uint64 LookupFieldDefaultCount(int idx) const;
|
||||
|
||||
/**
|
||||
* Wrapper to RecordVal::LookupWithDefault for the field in #val at index
|
||||
* \a idx which automatically unrefs the Val and returns a converted value.
|
||||
* @param idx the index of a field of type "interval" in \c fa_file.
|
||||
* @return the value of the field, which may be it &default.
|
||||
*/
|
||||
double LookupFieldDefaultInterval(int idx) const;
|
||||
|
||||
/**
|
||||
* Buffers incoming data at the beginning of a file.
|
||||
* @param data pointer to a data chunk to buffer.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @return true if buffering is still required, else false
|
||||
*/
|
||||
bool BufferBOF(const u_char* data, uint64 len);
|
||||
|
@ -170,11 +213,15 @@ protected:
|
|||
/**
|
||||
* Does mime type detection and assigns type (if available) to \c mime_type
|
||||
* field in #val.
|
||||
* @param data pointer to a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @return whether mime type was available.
|
||||
*/
|
||||
bool DetectMIME(const u_char* data, uint64 len);
|
||||
|
||||
/**
|
||||
* Lookup a record field index/offset by name.
|
||||
* @param field_name the name of the \c fa_file record field.
|
||||
* @return the field offset in #val record corresponding to \a field_name.
|
||||
*/
|
||||
static int Idx(const string& field_name);
|
||||
|
@ -185,15 +232,14 @@ protected:
|
|||
static void StaticInit();
|
||||
|
||||
private:
|
||||
FileID id; /**< A pretty hash that likely identifies file */
|
||||
string unique; /**< A string that uniquely identifies file */
|
||||
string id; /**< A pretty hash that likely identifies file */
|
||||
RecordVal* val; /**< \c fa_file from script layer. */
|
||||
bool postpone_timeout; /**< Whether postponing timeout is requested. */
|
||||
bool first_chunk; /**< Track first non-linear chunk. */
|
||||
bool missed_bof; /**< Flags that we missed start of file. */
|
||||
bool need_reassembly; /**< Whether file stream reassembly is needed. */
|
||||
bool done; /**< If this object is about to be deleted. */
|
||||
AnalyzerSet analyzers;
|
||||
AnalyzerSet analyzers; /**< A set of attached file analyzer. */
|
||||
|
||||
struct BOF_Buffer {
|
||||
BOF_Buffer() : full(false), replayed(false), size(0) {}
|
||||
|
@ -206,8 +252,6 @@ private:
|
|||
BroString::CVec chunks;
|
||||
} bof_buffer; /**< Beginning of file buffer. */
|
||||
|
||||
static string salt;
|
||||
|
||||
static int id_idx;
|
||||
static int parent_id_idx;
|
||||
static int source_idx;
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_FILEID_H
|
||||
#define FILE_ANALYSIS_FILEID_H
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* A simple string wrapper class to help enforce some type safety between
|
||||
* methods of FileAnalysis::Manager, some of which use a unique string to
|
||||
* identify files, and others which use a pretty hash (the FileID) to identify
|
||||
* files. A FileID is primarily used in methods which interface with the
|
||||
* script-layer, while the unique strings are used for methods which interface
|
||||
* with protocol analyzers or anything that sends data to the file analysis
|
||||
* framework.
|
||||
*/
|
||||
struct FileID {
|
||||
string id;
|
||||
|
||||
explicit FileID(const string arg_id) : id(arg_id) {}
|
||||
FileID(const FileID& other) : id(other.id) {}
|
||||
|
||||
const char* c_str() const { return id.c_str(); }
|
||||
|
||||
bool operator==(const FileID& rhs) const { return id == rhs.id; }
|
||||
bool operator<(const FileID& rhs) const { return id < rhs.id; }
|
||||
|
||||
FileID& operator=(const FileID& rhs) { id = rhs.id; return *this; }
|
||||
FileID& operator=(const string& rhs) { id = rhs; return *this; }
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
using namespace file_analysis;
|
||||
|
||||
FileTimer::FileTimer(double t, const FileID& id, double interval)
|
||||
FileTimer::FileTimer(double t, const string& id, double interval)
|
||||
: Timer(t + interval, TIMER_FILE_ANALYSIS_INACTIVITY), file_id(id)
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "New %f second timeout timer for %s",
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
|
||||
#include <string>
|
||||
#include "Timer.h"
|
||||
#include "FileID.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
|
@ -14,16 +13,25 @@ namespace file_analysis {
|
|||
*/
|
||||
class FileTimer : public Timer {
|
||||
public:
|
||||
FileTimer(double t, const FileID& id, double interval);
|
||||
|
||||
/**
|
||||
* Constructor, nothing interesting about it.
|
||||
* @param t unix time at which the timer should start ticking.
|
||||
* @param id the file identifier which will be checked for inactivity.
|
||||
* @param interval amount of time after \a t to check for inactivity.
|
||||
*/
|
||||
FileTimer(double t, const string& id, double interval);
|
||||
|
||||
/**
|
||||
* Check inactivity of file_analysis::File corresponding to #file_id,
|
||||
* reschedule if active, else call file_analysis::Manager::Timeout.
|
||||
* @param t current unix time
|
||||
* @param is_expire true if all pending timers are being expired.
|
||||
*/
|
||||
void Dispatch(double t, int is_expire);
|
||||
|
||||
private:
|
||||
FileID file_id;
|
||||
string file_id;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_HASH_H
|
||||
#define FILE_ANALYSIS_HASH_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "Val.h"
|
||||
#include "OpaqueVal.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* An analyzer to produce a hash of file contents.
|
||||
*/
|
||||
class Hash : public file_analysis::Analyzer {
|
||||
public:
|
||||
virtual ~Hash();
|
||||
|
||||
virtual bool DeliverStream(const u_char* data, uint64 len);
|
||||
|
||||
virtual bool EndOfFile();
|
||||
|
||||
virtual bool Undelivered(uint64 offset, uint64 len);
|
||||
|
||||
protected:
|
||||
Hash(RecordVal* args, File* file, HashVal* hv, const char* kind);
|
||||
|
||||
void Finalize();
|
||||
|
||||
private:
|
||||
HashVal* hash;
|
||||
bool fed;
|
||||
const char* kind;
|
||||
};
|
||||
|
||||
class MD5 : public Hash {
|
||||
public:
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file)
|
||||
{ return file_hash ? new MD5(args, file) : 0; }
|
||||
|
||||
protected:
|
||||
MD5(RecordVal* args, File* file)
|
||||
: Hash(args, file, new MD5Val(), "md5")
|
||||
{}
|
||||
};
|
||||
|
||||
class SHA1 : public Hash {
|
||||
public:
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file)
|
||||
{ return file_hash ? new SHA1(args, file) : 0; }
|
||||
|
||||
protected:
|
||||
SHA1(RecordVal* args, File* file)
|
||||
: Hash(args, file, new SHA1Val(), "sha1")
|
||||
{}
|
||||
};
|
||||
|
||||
class SHA256 : public Hash {
|
||||
public:
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file)
|
||||
{ return file_hash ? new SHA256(args, file) : 0; }
|
||||
|
||||
protected:
|
||||
SHA256(RecordVal* args, File* file)
|
||||
: Hash(args, file, new SHA256Val(), "sha256")
|
||||
{}
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <openssl/md5.h>
|
||||
|
||||
#include "Manager.h"
|
||||
#include "File.h"
|
||||
|
@ -9,12 +10,18 @@
|
|||
#include "Var.h"
|
||||
#include "Event.h"
|
||||
|
||||
#include "plugin/Manager.h"
|
||||
|
||||
using namespace file_analysis;
|
||||
|
||||
TableVal* Manager::disabled = 0;
|
||||
string Manager::salt;
|
||||
|
||||
Manager::Manager()
|
||||
{
|
||||
tag_enum_type = new EnumType("FileAnalysis::Tag");
|
||||
::ID* id = install_ID("Tag", "FileAnalysis", true, true);
|
||||
add_type(id, tag_enum_type, 0, 0);
|
||||
}
|
||||
|
||||
Manager::~Manager()
|
||||
|
@ -22,9 +29,44 @@ Manager::~Manager()
|
|||
Terminate();
|
||||
}
|
||||
|
||||
void Manager::InitPreScript()
|
||||
{
|
||||
std::list<Component*> analyzers = plugin_mgr->Components<Component>();
|
||||
|
||||
for ( std::list<Component*>::const_iterator i = analyzers.begin();
|
||||
i != analyzers.end(); ++i )
|
||||
RegisterAnalyzerComponent(*i);
|
||||
}
|
||||
|
||||
void Manager::RegisterAnalyzerComponent(Component* component)
|
||||
{
|
||||
const char* cname = component->CanonicalName();
|
||||
|
||||
if ( tag_enum_type->Lookup("FileAnalysis", cname) != -1 )
|
||||
reporter->FatalError("File Analyzer %s defined more than once", cname);
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Registering analyzer %s (tag %s)",
|
||||
component->Name(), component->Tag().AsString().c_str());
|
||||
|
||||
analyzers_by_name.insert(std::make_pair(cname, component));
|
||||
analyzers_by_tag.insert(std::make_pair(component->Tag(), component));
|
||||
analyzers_by_val.insert(std::make_pair(
|
||||
component->Tag().AsEnumVal()->InternalInt(), component));
|
||||
|
||||
string id = fmt("ANALYZER_%s", cname);
|
||||
tag_enum_type->AddName("FileAnalysis", id.c_str(),
|
||||
component->Tag().AsEnumVal()->InternalInt(), true);
|
||||
}
|
||||
|
||||
void Manager::InitPostScript()
|
||||
{
|
||||
#include "file_analysis.bif.init.cc"
|
||||
}
|
||||
|
||||
void Manager::Terminate()
|
||||
{
|
||||
vector<FileID> keys;
|
||||
vector<string> keys;
|
||||
|
||||
for ( IDMap::iterator it = id_map.begin(); it != id_map.end(); ++it )
|
||||
keys.push_back(it->first);
|
||||
|
||||
|
@ -32,66 +74,77 @@ void Manager::Terminate()
|
|||
Timeout(keys[i], true);
|
||||
}
|
||||
|
||||
string Manager::HashHandle(const string& handle) const
|
||||
{
|
||||
if ( salt.empty() )
|
||||
salt = BifConst::FileAnalysis::salt->CheckString();
|
||||
|
||||
char tmp[20];
|
||||
uint64 hash[2];
|
||||
string msg(handle + salt);
|
||||
|
||||
MD5(reinterpret_cast<const u_char*>(msg.data()), msg.size(),
|
||||
reinterpret_cast<u_char*>(hash));
|
||||
uitoa_n(hash[0], tmp, sizeof(tmp), 62);
|
||||
|
||||
return tmp;
|
||||
}
|
||||
|
||||
void Manager::SetHandle(const string& handle)
|
||||
{
|
||||
current_handle = handle;
|
||||
if ( handle.empty() )
|
||||
return;
|
||||
|
||||
current_file_id = HashHandle(handle);
|
||||
}
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, uint64 offset,
|
||||
analyzer::Tag tag, Connection* conn, bool is_orig)
|
||||
{
|
||||
if ( IsDisabled(tag) )
|
||||
return;
|
||||
|
||||
GetFileHandle(tag, conn, is_orig);
|
||||
DataIn(data, len, offset, GetFile(current_handle, conn, tag, is_orig));
|
||||
}
|
||||
File* file = GetFile(current_file_id, conn, tag, is_orig);
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, uint64 offset,
|
||||
const string& unique)
|
||||
{
|
||||
DataIn(data, len, offset, GetFile(unique));
|
||||
}
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, uint64 offset,
|
||||
File* file)
|
||||
{
|
||||
if ( ! file )
|
||||
return;
|
||||
|
||||
file->DataIn(data, len, offset);
|
||||
|
||||
if ( file->IsComplete() )
|
||||
RemoveFile(file->GetUnique());
|
||||
RemoveFile(file->GetID());
|
||||
}
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, analyzer::Tag tag,
|
||||
Connection* conn, bool is_orig)
|
||||
{
|
||||
if ( IsDisabled(tag) )
|
||||
return;
|
||||
|
||||
GetFileHandle(tag, conn, is_orig);
|
||||
|
||||
// Sequential data input shouldn't be going over multiple conns, so don't
|
||||
// do the check to update connection set.
|
||||
DataIn(data, len, GetFile(current_handle, conn, tag, is_orig, false));
|
||||
}
|
||||
File* file = GetFile(current_file_id, conn, tag, is_orig, false);
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, const string& unique)
|
||||
{
|
||||
DataIn(data, len, GetFile(unique));
|
||||
}
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, File* file)
|
||||
{
|
||||
if ( ! file )
|
||||
return;
|
||||
|
||||
file->DataIn(data, len);
|
||||
|
||||
if ( file->IsComplete() )
|
||||
RemoveFile(file->GetUnique());
|
||||
RemoveFile(file->GetID());
|
||||
}
|
||||
|
||||
void Manager::DataIn(const u_char* data, uint64 len, const string& file_id,
|
||||
const string& source)
|
||||
{
|
||||
File* file = GetFile(file_id);
|
||||
|
||||
if ( ! file )
|
||||
return;
|
||||
|
||||
if ( file->GetSource().empty() )
|
||||
file->SetSource(source);
|
||||
|
||||
file->DataIn(data, len);
|
||||
|
||||
if ( file->IsComplete() )
|
||||
RemoveFile(file->GetID());
|
||||
}
|
||||
|
||||
void Manager::EndOfFile(analyzer::Tag tag, Connection* conn)
|
||||
|
@ -102,35 +155,22 @@ void Manager::EndOfFile(analyzer::Tag tag, Connection* conn)
|
|||
|
||||
void Manager::EndOfFile(analyzer::Tag tag, Connection* conn, bool is_orig)
|
||||
{
|
||||
if ( IsDisabled(tag) )
|
||||
return;
|
||||
|
||||
// Don't need to create a file if we're just going to remove it right away.
|
||||
GetFileHandle(tag, conn, is_orig);
|
||||
EndOfFile(current_handle);
|
||||
RemoveFile(current_file_id);
|
||||
}
|
||||
|
||||
void Manager::EndOfFile(const string& unique)
|
||||
void Manager::EndOfFile(const string& file_id)
|
||||
{
|
||||
RemoveFile(unique);
|
||||
RemoveFile(file_id);
|
||||
}
|
||||
|
||||
void Manager::Gap(uint64 offset, uint64 len, analyzer::Tag tag,
|
||||
Connection* conn, bool is_orig)
|
||||
{
|
||||
if ( IsDisabled(tag) )
|
||||
return;
|
||||
|
||||
GetFileHandle(tag, conn, is_orig);
|
||||
Gap(offset, len, GetFile(current_handle, conn, tag, is_orig));
|
||||
}
|
||||
File* file = GetFile(current_file_id, conn, tag, is_orig);
|
||||
|
||||
void Manager::Gap(uint64 offset, uint64 len, const string& unique)
|
||||
{
|
||||
Gap(offset, len, GetFile(unique));
|
||||
}
|
||||
|
||||
void Manager::Gap(uint64 offset, uint64 len, File* file)
|
||||
{
|
||||
if ( ! file )
|
||||
return;
|
||||
|
||||
|
@ -140,52 +180,33 @@ void Manager::Gap(uint64 offset, uint64 len, File* file)
|
|||
void Manager::SetSize(uint64 size, analyzer::Tag tag, Connection* conn,
|
||||
bool is_orig)
|
||||
{
|
||||
if ( IsDisabled(tag) )
|
||||
return;
|
||||
|
||||
GetFileHandle(tag, conn, is_orig);
|
||||
SetSize(size, GetFile(current_handle, conn, tag, is_orig));
|
||||
}
|
||||
File* file = GetFile(current_file_id, conn, tag, is_orig);
|
||||
|
||||
void Manager::SetSize(uint64 size, const string& unique)
|
||||
{
|
||||
SetSize(size, GetFile(unique));
|
||||
}
|
||||
|
||||
void Manager::SetSize(uint64 size, File* file)
|
||||
{
|
||||
if ( ! file )
|
||||
return;
|
||||
|
||||
file->SetTotalBytes(size);
|
||||
|
||||
if ( file->IsComplete() )
|
||||
RemoveFile(file->GetUnique());
|
||||
RemoveFile(file->GetID());
|
||||
}
|
||||
|
||||
bool Manager::PostponeTimeout(const FileID& file_id) const
|
||||
bool Manager::SetTimeoutInterval(const string& file_id, double interval) const
|
||||
{
|
||||
File* file = Lookup(file_id);
|
||||
|
||||
if ( ! file )
|
||||
return false;
|
||||
|
||||
file->postpone_timeout = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::SetTimeoutInterval(const FileID& file_id, double interval) const
|
||||
{
|
||||
File* file = Lookup(file_id);
|
||||
|
||||
if ( ! file )
|
||||
return false;
|
||||
if ( interval > 0 )
|
||||
file->postpone_timeout = true;
|
||||
|
||||
file->SetTimeoutInterval(interval);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::AddAnalyzer(const FileID& file_id, RecordVal* args) const
|
||||
bool Manager::AddAnalyzer(const string& file_id, RecordVal* args) const
|
||||
{
|
||||
File* file = Lookup(file_id);
|
||||
|
||||
|
@ -195,7 +216,7 @@ bool Manager::AddAnalyzer(const FileID& file_id, RecordVal* args) const
|
|||
return file->AddAnalyzer(args);
|
||||
}
|
||||
|
||||
bool Manager::RemoveAnalyzer(const FileID& file_id, const RecordVal* args) const
|
||||
bool Manager::RemoveAnalyzer(const string& file_id, const RecordVal* args) const
|
||||
{
|
||||
File* file = Lookup(file_id);
|
||||
|
||||
|
@ -205,32 +226,23 @@ bool Manager::RemoveAnalyzer(const FileID& file_id, const RecordVal* args) const
|
|||
return file->RemoveAnalyzer(args);
|
||||
}
|
||||
|
||||
File* Manager::GetFile(const string& unique, Connection* conn,
|
||||
File* Manager::GetFile(const string& file_id, Connection* conn,
|
||||
analyzer::Tag tag, bool is_orig, bool update_conn)
|
||||
{
|
||||
if ( unique.empty() )
|
||||
if ( file_id.empty() )
|
||||
return 0;
|
||||
|
||||
if ( IsIgnored(unique) )
|
||||
if ( IsIgnored(file_id) )
|
||||
return 0;
|
||||
|
||||
File* rval = str_map[unique];
|
||||
File* rval = id_map[file_id];
|
||||
|
||||
if ( ! rval )
|
||||
{
|
||||
rval = str_map[unique] = new File(unique, conn, tag, is_orig);
|
||||
FileID id = rval->GetID();
|
||||
|
||||
if ( id_map[id] )
|
||||
{
|
||||
reporter->Error("Evicted duplicate file ID: %s", id.c_str());
|
||||
RemoveFile(unique);
|
||||
}
|
||||
|
||||
id_map[id] = rval;
|
||||
rval = id_map[file_id] = new File(file_id, conn, tag, is_orig);
|
||||
rval->ScheduleInactivityTimer();
|
||||
|
||||
if ( IsIgnored(unique) )
|
||||
if ( IsIgnored(file_id) )
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
|
@ -244,7 +256,7 @@ File* Manager::GetFile(const string& unique, Connection* conn,
|
|||
return rval;
|
||||
}
|
||||
|
||||
File* Manager::Lookup(const FileID& file_id) const
|
||||
File* Manager::Lookup(const string& file_id) const
|
||||
{
|
||||
IDMap::const_iterator it = id_map.find(file_id);
|
||||
|
||||
|
@ -254,7 +266,7 @@ File* Manager::Lookup(const FileID& file_id) const
|
|||
return it->second;
|
||||
}
|
||||
|
||||
void Manager::Timeout(const FileID& file_id, bool is_terminating)
|
||||
void Manager::Timeout(const string& file_id, bool is_terminating)
|
||||
{
|
||||
File* file = Lookup(file_id);
|
||||
|
||||
|
@ -277,53 +289,50 @@ void Manager::Timeout(const FileID& file_id, bool is_terminating)
|
|||
DBG_LOG(DBG_FILE_ANALYSIS, "File analysis timeout for %s",
|
||||
file->GetID().c_str());
|
||||
|
||||
RemoveFile(file->GetUnique());
|
||||
RemoveFile(file->GetID());
|
||||
}
|
||||
|
||||
bool Manager::IgnoreFile(const FileID& file_id)
|
||||
bool Manager::IgnoreFile(const string& file_id)
|
||||
{
|
||||
if ( id_map.find(file_id) == id_map.end() )
|
||||
return false;
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Ignore FileID %s", file_id.c_str());
|
||||
|
||||
ignored.insert(file_id);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::RemoveFile(const string& file_id)
|
||||
{
|
||||
IDMap::iterator it = id_map.find(file_id);
|
||||
|
||||
if ( it == id_map.end() )
|
||||
return false;
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Ignore FileID %s", file_id.c_str());
|
||||
|
||||
ignored.insert(it->second->GetUnique());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::RemoveFile(const string& unique)
|
||||
{
|
||||
StrMap::iterator it = str_map.find(unique);
|
||||
|
||||
if ( it == str_map.end() )
|
||||
return false;
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Remove FileID %s", file_id.c_str());
|
||||
|
||||
it->second->EndOfFile();
|
||||
|
||||
FileID id = it->second->GetID();
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Remove FileID %s", id.c_str());
|
||||
|
||||
if ( ! id_map.erase(id) )
|
||||
reporter->Error("No mapping for fileID %s", id.c_str());
|
||||
|
||||
ignored.erase(unique);
|
||||
delete it->second;
|
||||
str_map.erase(unique);
|
||||
id_map.erase(file_id);
|
||||
ignored.erase(file_id);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::IsIgnored(const string& unique)
|
||||
bool Manager::IsIgnored(const string& file_id)
|
||||
{
|
||||
return ignored.find(unique) != ignored.end();
|
||||
return ignored.find(file_id) != ignored.end();
|
||||
}
|
||||
|
||||
void Manager::GetFileHandle(analyzer::Tag tag, Connection* c, bool is_orig)
|
||||
{
|
||||
current_handle.clear();
|
||||
current_file_id.clear();
|
||||
|
||||
if ( IsDisabled(tag) )
|
||||
return;
|
||||
|
||||
if ( ! get_file_handle )
|
||||
return;
|
||||
|
@ -357,3 +366,31 @@ bool Manager::IsDisabled(analyzer::Tag tag)
|
|||
|
||||
return rval;
|
||||
}
|
||||
|
||||
Analyzer* Manager::InstantiateAnalyzer(int tag, RecordVal* args, File* f) const
|
||||
{
|
||||
analyzer_map_by_val::const_iterator it = analyzers_by_val.find(tag);
|
||||
|
||||
if ( it == analyzers_by_val.end() )
|
||||
reporter->InternalError("cannot instantiate unknown file analyzer: %d",
|
||||
tag);
|
||||
|
||||
Component* c = it->second;
|
||||
|
||||
if ( ! c->Factory() )
|
||||
reporter->InternalError("file analyzer %s cannot be instantiated "
|
||||
"dynamically", c->CanonicalName());
|
||||
|
||||
return c->Factory()(args, f);
|
||||
}
|
||||
|
||||
const char* Manager::GetAnalyzerName(int tag) const
|
||||
{
|
||||
analyzer_map_by_val::const_iterator it = analyzers_by_val.find(tag);
|
||||
|
||||
if ( it == analyzers_by_val.end() )
|
||||
reporter->InternalError("cannot get name of unknown file analyzer: %d",
|
||||
tag);
|
||||
|
||||
return it->second->CanonicalName();
|
||||
}
|
||||
|
|
|
@ -17,10 +17,12 @@
|
|||
|
||||
#include "File.h"
|
||||
#include "FileTimer.h"
|
||||
#include "FileID.h"
|
||||
#include "Component.h"
|
||||
|
||||
#include "analyzer/Tag.h"
|
||||
|
||||
#include "file_analysis/file_analysis.bif.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
|
@ -28,152 +30,280 @@ namespace file_analysis {
|
|||
*/
|
||||
class Manager {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
Manager();
|
||||
|
||||
/**
|
||||
* Destructor. Times out any currently active file analyses.
|
||||
*/
|
||||
~Manager();
|
||||
|
||||
/**
|
||||
* First-stage initializion of the manager. This is called early on
|
||||
* during Bro's initialization, before any scripts are processed.
|
||||
*/
|
||||
void InitPreScript();
|
||||
|
||||
/**
|
||||
* Second-stage initialization of the manager. This is called late
|
||||
* during Bro's initialization after any scripts are processed.
|
||||
*/
|
||||
void InitPostScript();
|
||||
|
||||
/**
|
||||
* Times out any active file analysis to prepare for shutdown.
|
||||
*/
|
||||
void Terminate();
|
||||
|
||||
/**
|
||||
* Take in a unique file handle string to identifiy incoming file data.
|
||||
* Creates a file identifier from a unique file handle string.
|
||||
* @param handle a unique string which identifies a single file.
|
||||
* @return a prettified MD5 hash of \a handle, truncated to 64-bits.
|
||||
*/
|
||||
string HashHandle(const string& handle) const;
|
||||
|
||||
/**
|
||||
* Take in a unique file handle string to identify next piece of
|
||||
* incoming file data/information.
|
||||
* @param handle a unique string which identifies a single file.
|
||||
*/
|
||||
void SetHandle(const string& handle);
|
||||
|
||||
/**
|
||||
* Pass in non-sequential file data.
|
||||
* @param data pointer to start of a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param offset number of bytes from start of file that data chunk occurs.
|
||||
* @param tag network protocol over which the file data is transferred.
|
||||
* @param conn network connection over which the file data is transferred.
|
||||
* @param is_orig true if the file is being sent from connection originator
|
||||
* or false if is being sent in the opposite direction.
|
||||
*/
|
||||
void DataIn(const u_char* data, uint64 len, uint64 offset,
|
||||
analyzer::Tag tag, Connection* conn, bool is_orig);
|
||||
void DataIn(const u_char* data, uint64 len, uint64 offset,
|
||||
const string& unique);
|
||||
void DataIn(const u_char* data, uint64 len, uint64 offset,
|
||||
File* file);
|
||||
|
||||
/**
|
||||
* Pass in sequential file data.
|
||||
* @param data pointer to start of a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param tag network protocol over which the file data is transferred.
|
||||
* @param conn network connection over which the file data is transferred.
|
||||
* @param is_orig true if the file is being sent from connection originator
|
||||
* or false if is being sent in the opposite direction.
|
||||
*/
|
||||
void DataIn(const u_char* data, uint64 len, analyzer::Tag tag,
|
||||
Connection* conn, bool is_orig);
|
||||
void DataIn(const u_char* data, uint64 len, const string& unique);
|
||||
void DataIn(const u_char* data, uint64 len, File* file);
|
||||
|
||||
/**
|
||||
* Signal the end of file data.
|
||||
* Pass in sequential file data from external source (e.g. input framework).
|
||||
* @param data pointer to start of a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param file_id an identifier for the file (usually a hash of \a source).
|
||||
* @param source uniquely identifies the file and should also describe
|
||||
* in human-readable form where the file input is coming from (e.g.
|
||||
* a local file path).
|
||||
*/
|
||||
void DataIn(const u_char* data, uint64 len, const string& file_id,
|
||||
const string& source);
|
||||
|
||||
/**
|
||||
* Signal the end of file data regardless of which direction it is being
|
||||
* sent over the connection.
|
||||
* @param tag network protocol over which the file data is transferred.
|
||||
* @param conn network connection over which the file data is transferred.
|
||||
*/
|
||||
void EndOfFile(analyzer::Tag tag, Connection* conn);
|
||||
|
||||
/**
|
||||
* Signal the end of file data being transferred over a connection in
|
||||
* a particular direction.
|
||||
* @param tag network protocol over which the file data is transferred.
|
||||
* @param conn network connection over which the file data is transferred.
|
||||
*/
|
||||
void EndOfFile(analyzer::Tag tag, Connection* conn, bool is_orig);
|
||||
void EndOfFile(const string& unique);
|
||||
|
||||
/**
|
||||
* Signal the end of file data being transferred using the file identifier.
|
||||
* @param file_id the file identifier/hash.
|
||||
*/
|
||||
void EndOfFile(const string& file_id);
|
||||
|
||||
/**
|
||||
* Signal a gap in the file data stream.
|
||||
* @param offset number of bytes in to file at which missing chunk starts.
|
||||
* @param len length in bytes of the missing chunk of file data.
|
||||
* @param tag network protocol over which the file data is transferred.
|
||||
* @param conn network connection over which the file data is transferred.
|
||||
* @param is_orig true if the file is being sent from connection originator
|
||||
* or false if is being sent in the opposite direction.
|
||||
*/
|
||||
void Gap(uint64 offset, uint64 len, analyzer::Tag tag, Connection* conn,
|
||||
bool is_orig);
|
||||
void Gap(uint64 offset, uint64 len, const string& unique);
|
||||
void Gap(uint64 offset, uint64 len, File* file);
|
||||
|
||||
/**
|
||||
* Provide the expected number of bytes that comprise a file.
|
||||
* @param size the number of bytes in the full file.
|
||||
* @param tag network protocol over which the file data is transferred.
|
||||
* @param conn network connection over which the file data is transferred.
|
||||
* @param is_orig true if the file is being sent from connection originator
|
||||
* or false if is being sent in the opposite direction.
|
||||
*/
|
||||
void SetSize(uint64 size, analyzer::Tag tag, Connection* conn,
|
||||
bool is_orig);
|
||||
void SetSize(uint64 size, const string& unique);
|
||||
void SetSize(uint64 size, File* file);
|
||||
|
||||
/**
|
||||
* Starts ignoring a file, which will finally be removed from internal
|
||||
* mappings on EOF or TIMEOUT.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @return false if file identifier did not map to anything, else true.
|
||||
*/
|
||||
bool IgnoreFile(const FileID& file_id);
|
||||
|
||||
/**
|
||||
* If called during a \c file_timeout event handler, requests deferral of
|
||||
* analysis timeout.
|
||||
*/
|
||||
bool PostponeTimeout(const FileID& file_id) const;
|
||||
bool IgnoreFile(const string& file_id);
|
||||
|
||||
/**
|
||||
* Set's an inactivity threshold for the file.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @param interval the amount of time in which no activity is seen for
|
||||
* the file identified by \a file_id that will cause the file
|
||||
* to be considered stale, timed out, and then resource reclaimed.
|
||||
* @return false if file identifier did not map to anything, else true.
|
||||
*/
|
||||
bool SetTimeoutInterval(const FileID& file_id, double interval) const;
|
||||
bool SetTimeoutInterval(const string& file_id, double interval) const;
|
||||
|
||||
/**
|
||||
* Queue attachment of an analzer to the file identifier. Multiple
|
||||
* analyzers of a given type can be attached per file identifier at a time
|
||||
* as long as the arguments differ.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @param args a \c AnalyzerArgs value which describes a file analyzer.
|
||||
* @return false if the analyzer failed to be instantiated, else true.
|
||||
*/
|
||||
bool AddAnalyzer(const FileID& file_id, RecordVal* args) const;
|
||||
bool AddAnalyzer(const string& file_id, RecordVal* args) const;
|
||||
|
||||
/**
|
||||
* Queue removal of an analyzer for a given file identifier.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @param args a \c AnalyzerArgs value which describes a file analyzer.
|
||||
* @return true if the analyzer is active at the time of call, else false.
|
||||
*/
|
||||
bool RemoveAnalyzer(const FileID& file_id, const RecordVal* args) const;
|
||||
bool RemoveAnalyzer(const string& file_id, const RecordVal* args) const;
|
||||
|
||||
/**
|
||||
* @return whether the file mapped to \a unique is being ignored.
|
||||
* Tells whether analysis for a file is active or ignored.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @return whether the file mapped to \a file_id is being ignored.
|
||||
*/
|
||||
bool IsIgnored(const string& unique);
|
||||
bool IsIgnored(const string& file_id);
|
||||
|
||||
/**
|
||||
* Instantiates a new file analyzer instance for the file.
|
||||
* @param tag The file analyzer's tag.
|
||||
* @param args The file analzer argument/option values.
|
||||
* @param f The file analzer is to be associated with.
|
||||
* @return The new analyzer instance or null if tag is invalid.
|
||||
*/
|
||||
Analyzer* InstantiateAnalyzer(int tag, RecordVal* args, File* f) const;
|
||||
|
||||
/**
|
||||
* Translates a script-level file analyzer tag in to corresponding file
|
||||
* analyzer name.
|
||||
* @param tag The enum val of a file analyzer.
|
||||
* @return The human-readable name of the file analyzer.
|
||||
*/
|
||||
const char* GetAnalyzerName(int tag) const;
|
||||
|
||||
protected:
|
||||
friend class FileTimer;
|
||||
|
||||
typedef map<string, File*> StrMap;
|
||||
typedef set<string> StrSet;
|
||||
typedef map<FileID, File*> IDMap;
|
||||
typedef set<string> IDSet;
|
||||
typedef map<string, File*> IDMap;
|
||||
|
||||
/**
|
||||
* @return the File object mapped to \a unique or a null pointer if analysis
|
||||
* is being ignored for the associated file. An File object may be
|
||||
* created if a mapping doesn't exist, and if it did exist, the
|
||||
* activity time is refreshed along with any connection-related
|
||||
* fields.
|
||||
* Create a new file to be analyzed or retrieve an existing one.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @param conn network connection, if any, over which the file is
|
||||
* transferred.
|
||||
* @param tag network protocol, if any, over which the file is transferred.
|
||||
* @param is_orig true if the file is being sent from connection originator
|
||||
* or false if is being sent in the opposite direction (or if it
|
||||
* this file isn't related to a connection).
|
||||
* @param update_conn whether we need to update connection-related field
|
||||
* in the \c fa_file record value associated with the file.
|
||||
* @return the File object mapped to \a file_id or a null pointer if
|
||||
* analysis is being ignored for the associated file. An File
|
||||
* object may be created if a mapping doesn't exist, and if it did
|
||||
* exist, the activity time is refreshed along with any
|
||||
* connection-related fields.
|
||||
*/
|
||||
File* GetFile(const string& unique, Connection* conn = 0,
|
||||
File* GetFile(const string& file_id, Connection* conn = 0,
|
||||
analyzer::Tag tag = analyzer::Tag::Error,
|
||||
bool is_orig = false, bool update_conn = true);
|
||||
|
||||
/**
|
||||
* Try to retrieve a file that's being analyzed, using its identifier/hash.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @return the File object mapped to \a file_id, or a null pointer if no
|
||||
* mapping exists.
|
||||
*/
|
||||
File* Lookup(const FileID& file_id) const;
|
||||
File* Lookup(const string& file_id) const;
|
||||
|
||||
/**
|
||||
* Evaluate timeout policy for a file and remove the File object mapped to
|
||||
* \a file_id if needed.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @param is_termination whether the Manager (and probably Bro) is in a
|
||||
* terminating state. If true, then the timeout cannot be postponed.
|
||||
*/
|
||||
void Timeout(const FileID& file_id, bool is_terminating = ::terminating);
|
||||
void Timeout(const string& file_id, bool is_terminating = ::terminating);
|
||||
|
||||
/**
|
||||
* Immediately remove file_analysis::File object associated with \a unique.
|
||||
* @return false if file string did not map to anything, else true.
|
||||
* Immediately remove file_analysis::File object associated with \a file_id.
|
||||
* @param file_id the file identifier/hash.
|
||||
* @return false if file id string did not map to anything, else true.
|
||||
*/
|
||||
bool RemoveFile(const string& unique);
|
||||
bool RemoveFile(const string& file_id);
|
||||
|
||||
/**
|
||||
* Sets #current_handle to a unique file handle string based on what the
|
||||
* \c get_file_handle event derives from the connection params. The
|
||||
* event queue is flushed so that we can get the handle value immediately.
|
||||
* Sets #current_file_id to a hash of a unique file handle string based on
|
||||
* what the \c get_file_handle event derives from the connection params.
|
||||
* Event queue is flushed so that we can get the handle value immediately.
|
||||
* @param tag network protocol over which the file is transferred.
|
||||
* @param conn network connection over which the file is transferred.
|
||||
* @param is_orig true if the file is being sent from connection originator
|
||||
* or false if is being sent in the opposite direction.
|
||||
*/
|
||||
void GetFileHandle(analyzer::Tag tag, Connection* c, bool is_orig);
|
||||
|
||||
/**
|
||||
* @return whether file analysis is disabled for the given analyzer.
|
||||
* Check if analysis is available for files transferred over a given
|
||||
* network protocol.
|
||||
* @param tag the network protocol over which files can be transferred and
|
||||
* analyzed by the file analysis framework.
|
||||
* @return whether file analysis is disabled for the analyzer given by
|
||||
* \a tag.
|
||||
*/
|
||||
static bool IsDisabled(analyzer::Tag tag);
|
||||
|
||||
private:
|
||||
StrMap str_map; /**< Map unique string to file_analysis::File. */
|
||||
typedef map<string, Component*> analyzer_map_by_name;
|
||||
typedef map<analyzer::Tag, Component*> analyzer_map_by_tag;
|
||||
typedef map<int, Component*> analyzer_map_by_val;
|
||||
|
||||
void RegisterAnalyzerComponent(Component* component);
|
||||
|
||||
IDMap id_map; /**< Map file ID to file_analysis::File records. */
|
||||
StrSet ignored; /**< Ignored files. Will be finally removed on EOF. */
|
||||
string current_handle; /**< Last file handle set by get_file_handle event.*/
|
||||
IDSet ignored; /**< Ignored files. Will be finally removed on EOF. */
|
||||
string current_file_id; /**< Hash of what get_file_handle event sets. */
|
||||
EnumType* tag_enum_type; /**< File analyzer tag type. */
|
||||
|
||||
analyzer_map_by_name analyzers_by_name;
|
||||
analyzer_map_by_tag analyzers_by_tag;
|
||||
analyzer_map_by_val analyzers_by_val;
|
||||
|
||||
static TableVal* disabled; /**< Table of disabled analyzers. */
|
||||
static string salt; /**< A salt added to file handles before hashing. */
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
|
3
src/file_analysis/analyzer/CMakeLists.txt
Normal file
3
src/file_analysis/analyzer/CMakeLists.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
add_subdirectory(data_event)
|
||||
add_subdirectory(extract)
|
||||
add_subdirectory(hash)
|
8
src/file_analysis/analyzer/data_event/CMakeLists.txt
Normal file
8
src/file_analysis/analyzer/data_event/CMakeLists.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
include(BroPlugin)
|
||||
|
||||
include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
bro_plugin_begin(Bro FileDataEvent)
|
||||
bro_plugin_cc(DataEvent.cc Plugin.cc)
|
||||
bro_plugin_end()
|
69
src/file_analysis/analyzer/data_event/DataEvent.h
Normal file
69
src/file_analysis/analyzer/data_event/DataEvent.h
Normal file
|
@ -0,0 +1,69 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_DATAEVENT_H
|
||||
#define FILE_ANALYSIS_DATAEVENT_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "Val.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* An analyzer to send file data to script-layer via events.
|
||||
*/
|
||||
class DataEvent : public file_analysis::Analyzer {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Generates the event, if any, specified by the "chunk_event" field of this
|
||||
* analyzer's \c AnalyzerArgs. This is for non-sequential file data input.
|
||||
* @param data pointer to start of file data chunk.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param offset number of bytes from start of file at which chunk occurs.
|
||||
* @return always true
|
||||
*/
|
||||
virtual bool DeliverChunk(const u_char* data, uint64 len, uint64 offset);
|
||||
|
||||
/**
|
||||
* Generates the event, if any, specified by the "stream_event" field of
|
||||
* this analyzer's \c AnalyzerArgs. This is for sequential file data input.
|
||||
* @param data pointer to start of file data chunk.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @return always true
|
||||
*/
|
||||
virtual bool DeliverStream(const u_char* data, uint64 len);
|
||||
|
||||
/**
|
||||
* Create a new instance of a DataEvent analyzer.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @return the new DataEvent analyzer instance or a null pointer if
|
||||
* no "chunk_event" or "stream_event" field was specfied in \a args.
|
||||
*/
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file);
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @param ce pointer to event handler which will be called to receive
|
||||
* non-sequential file data.
|
||||
* @param se pointer to event handler which will be called to receive
|
||||
* sequential file data.
|
||||
*/
|
||||
DataEvent(RecordVal* args, File* file,
|
||||
EventHandlerPtr ce, EventHandlerPtr se);
|
||||
|
||||
private:
|
||||
EventHandlerPtr chunk_event;
|
||||
EventHandlerPtr stream_event;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
26
src/file_analysis/analyzer/data_event/Plugin.cc
Normal file
26
src/file_analysis/analyzer/data_event/Plugin.cc
Normal file
|
@ -0,0 +1,26 @@
|
|||
#include "plugin/Plugin.h"
|
||||
#include "file_analysis/Component.h"
|
||||
|
||||
#include "DataEvent.h"
|
||||
|
||||
namespace plugin { namespace Bro_FileDataEvent {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
protected:
|
||||
void InitPreScript()
|
||||
{
|
||||
SetName("Bro::FileDataEvent");
|
||||
SetVersion(-1);
|
||||
SetAPIVersion(BRO_PLUGIN_API_VERSION);
|
||||
SetDynamicPlugin(false);
|
||||
|
||||
SetDescription("Delivers file content via events");
|
||||
|
||||
AddComponent(new ::file_analysis::Component("DATA_EVENT",
|
||||
::file_analysis::DataEvent::Instantiate));
|
||||
}
|
||||
};
|
||||
|
||||
Plugin __plugin;
|
||||
|
||||
} }
|
8
src/file_analysis/analyzer/extract/CMakeLists.txt
Normal file
8
src/file_analysis/analyzer/extract/CMakeLists.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
include(BroPlugin)
|
||||
|
||||
include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
bro_plugin_begin(Bro FileExtract)
|
||||
bro_plugin_cc(Extract.cc Plugin.cc)
|
||||
bro_plugin_end()
|
62
src/file_analysis/analyzer/extract/Extract.h
Normal file
62
src/file_analysis/analyzer/extract/Extract.h
Normal file
|
@ -0,0 +1,62 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_EXTRACT_H
|
||||
#define FILE_ANALYSIS_EXTRACT_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "Val.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* An analyzer to extract content of files to local disk.
|
||||
*/
|
||||
class Extract : public file_analysis::Analyzer {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Destructor. Will close the file that was used for data extraction.
|
||||
*/
|
||||
virtual ~Extract();
|
||||
|
||||
/**
|
||||
* Write a chunk of file data to the local extraction file.
|
||||
* @param data pointer to a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param offset number of bytes from start of file at which chunk starts.
|
||||
* @return false if there was no extraction file open and the data couldn't
|
||||
* be written, else true.
|
||||
*/
|
||||
virtual bool DeliverChunk(const u_char* data, uint64 len, uint64 offset);
|
||||
|
||||
/**
|
||||
* Create a new instance of an Extract analyzer.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @return the new Extract analyzer instance or a null pointer if the
|
||||
* the "extraction_file" field of \a args wasn't set.
|
||||
*/
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file);
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @param arg_filename a file system path which specifies the local file
|
||||
* to which the contents of the file will be extracted/written.
|
||||
*/
|
||||
Extract(RecordVal* args, File* file, const string& arg_filename);
|
||||
|
||||
private:
|
||||
string filename;
|
||||
int fd;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
26
src/file_analysis/analyzer/extract/Plugin.cc
Normal file
26
src/file_analysis/analyzer/extract/Plugin.cc
Normal file
|
@ -0,0 +1,26 @@
|
|||
#include "plugin/Plugin.h"
|
||||
#include "file_analysis/Component.h"
|
||||
|
||||
#include "Extract.h"
|
||||
|
||||
namespace plugin { namespace Bro_FileExtract {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
protected:
|
||||
void InitPreScript()
|
||||
{
|
||||
SetName("Bro::FileExtract");
|
||||
SetVersion(-1);
|
||||
SetAPIVersion(BRO_PLUGIN_API_VERSION);
|
||||
SetDynamicPlugin(false);
|
||||
|
||||
SetDescription("Extract file content to local file system");
|
||||
|
||||
AddComponent(new ::file_analysis::Component("EXTRACT",
|
||||
::file_analysis::Extract::Instantiate));
|
||||
}
|
||||
};
|
||||
|
||||
Plugin __plugin;
|
||||
|
||||
} }
|
9
src/file_analysis/analyzer/hash/CMakeLists.txt
Normal file
9
src/file_analysis/analyzer/hash/CMakeLists.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
include(BroPlugin)
|
||||
|
||||
include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
bro_plugin_begin(Bro FileHash)
|
||||
bro_plugin_cc(Hash.cc Plugin.cc)
|
||||
bro_plugin_bif(events.bif)
|
||||
bro_plugin_end()
|
160
src/file_analysis/analyzer/hash/Hash.h
Normal file
160
src/file_analysis/analyzer/hash/Hash.h
Normal file
|
@ -0,0 +1,160 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYSIS_HASH_H
|
||||
#define FILE_ANALYSIS_HASH_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "Val.h"
|
||||
#include "OpaqueVal.h"
|
||||
#include "File.h"
|
||||
#include "Analyzer.h"
|
||||
|
||||
#include "events.bif.h"
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
/**
|
||||
* An analyzer to produce a hash of file contents.
|
||||
*/
|
||||
class Hash : public file_analysis::Analyzer {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Destructor.
|
||||
*/
|
||||
virtual ~Hash();
|
||||
|
||||
/**
|
||||
* Incrementally hash next chunk of file contents.
|
||||
* @param data pointer to start of a chunk of a file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @return false if the digest is in an invalid state, else true.
|
||||
*/
|
||||
virtual bool DeliverStream(const u_char* data, uint64 len);
|
||||
|
||||
/**
|
||||
* Finalizes the hash and raises a "file_hash" event.
|
||||
* @return always false so analyze will be deteched from file.
|
||||
*/
|
||||
virtual bool EndOfFile();
|
||||
|
||||
/**
|
||||
* Missing data can't be handled, so just indicate the this analyzer should
|
||||
* be removed from receiving further data. The hash will not be finalized.
|
||||
* @param offset byte offset in file at which missing chunk starts.
|
||||
* @param len number of missing bytes.
|
||||
* @return always false so analyzer will detach from file.
|
||||
*/
|
||||
virtual bool Undelivered(uint64 offset, uint64 len);
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @param hv specific hash calculator object.
|
||||
* @param kind human readable name of the hash algorithm to use.
|
||||
*/
|
||||
Hash(RecordVal* args, File* file, HashVal* hv, const char* kind);
|
||||
|
||||
/**
|
||||
* If some file contents have been seen, finalizes the hash of them and
|
||||
* raises the "file_hash" event with the results.
|
||||
*/
|
||||
void Finalize();
|
||||
|
||||
private:
|
||||
HashVal* hash;
|
||||
bool fed;
|
||||
const char* kind;
|
||||
};
|
||||
|
||||
/**
|
||||
* An analyzer to produce an MD5 hash of file contents.
|
||||
*/
|
||||
class MD5 : public Hash {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Create a new instance of the MD5 hashing file analyzer.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @return the new MD5 analyzer instance or a null pointer if there's no
|
||||
* handler for the "file_hash" event.
|
||||
*/
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file)
|
||||
{ return file_hash ? new MD5(args, file) : 0; }
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
*/
|
||||
MD5(RecordVal* args, File* file)
|
||||
: Hash(args, file, new MD5Val(), "md5")
|
||||
{}
|
||||
};
|
||||
|
||||
/**
|
||||
* An analyzer to produce a SHA1 hash of file contents.
|
||||
*/
|
||||
class SHA1 : public Hash {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Create a new instance of the SHA1 hashing file analyzer.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @return the new MD5 analyzer instance or a null pointer if there's no
|
||||
* handler for the "file_hash" event.
|
||||
*/
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file)
|
||||
{ return file_hash ? new SHA1(args, file) : 0; }
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
*/
|
||||
SHA1(RecordVal* args, File* file)
|
||||
: Hash(args, file, new SHA1Val(), "sha1")
|
||||
{}
|
||||
};
|
||||
|
||||
/**
|
||||
* An analyzer to produce a SHA256 hash of file contents.
|
||||
*/
|
||||
class SHA256 : public Hash {
|
||||
public:
|
||||
|
||||
/**
|
||||
* Create a new instance of the SHA256 hashing file analyzer.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
* @return the new MD5 analyzer instance or a null pointer if there's no
|
||||
* handler for the "file_hash" event.
|
||||
*/
|
||||
static file_analysis::Analyzer* Instantiate(RecordVal* args, File* file)
|
||||
{ return file_hash ? new SHA256(args, file) : 0; }
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param args the \c AnalyzerArgs value which represents the analyzer.
|
||||
* @param file the file to which the analyzer will be attached.
|
||||
*/
|
||||
SHA256(RecordVal* args, File* file)
|
||||
: Hash(args, file, new SHA256Val(), "sha256")
|
||||
{}
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
||||
#endif
|
33
src/file_analysis/analyzer/hash/Plugin.cc
Normal file
33
src/file_analysis/analyzer/hash/Plugin.cc
Normal file
|
@ -0,0 +1,33 @@
|
|||
#include "plugin/Plugin.h"
|
||||
#include "file_analysis/Component.h"
|
||||
|
||||
#include "Hash.h"
|
||||
|
||||
namespace plugin { namespace Bro_FileHash {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
protected:
|
||||
void InitPreScript()
|
||||
{
|
||||
SetName("Bro::FileHash");
|
||||
SetVersion(-1);
|
||||
SetAPIVersion(BRO_PLUGIN_API_VERSION);
|
||||
SetDynamicPlugin(false);
|
||||
|
||||
SetDescription("Hash file content");
|
||||
|
||||
AddComponent(new ::file_analysis::Component("MD5",
|
||||
::file_analysis::MD5::Instantiate));
|
||||
AddComponent(new ::file_analysis::Component("SHA1",
|
||||
::file_analysis::SHA1::Instantiate));
|
||||
AddComponent(new ::file_analysis::Component("SHA256",
|
||||
::file_analysis::SHA256::Instantiate));
|
||||
|
||||
extern std::list<std::pair<const char*, int> > __bif_events_init();
|
||||
AddBifInitFunction(&__bif_events_init);
|
||||
}
|
||||
};
|
||||
|
||||
Plugin __plugin;
|
||||
|
||||
} }
|
12
src/file_analysis/analyzer/hash/events.bif
Normal file
12
src/file_analysis/analyzer/hash/events.bif
Normal file
|
@ -0,0 +1,12 @@
|
|||
## This event is generated each time file analysis generates a digest of the
|
||||
## file contents.
|
||||
##
|
||||
## f: The file.
|
||||
##
|
||||
## kind: The type of digest algorithm.
|
||||
##
|
||||
## hash: The result of the hashing.
|
||||
##
|
||||
## .. bro:see:: FileAnalysis::add_analyzer FileAnalysis::ANALYZER_MD5
|
||||
## FileAnalysis::ANALYZER_SHA1 FileAnalysis::ANALYZER_SHA256
|
||||
event file_hash%(f: fa_file, kind: string, hash: string%);
|
61
src/file_analysis/file_analysis.bif
Normal file
61
src/file_analysis/file_analysis.bif
Normal file
|
@ -0,0 +1,61 @@
|
|||
##! Internal functions and types used by the file analysis framework.
|
||||
|
||||
module FileAnalysis;
|
||||
|
||||
%%{
|
||||
#include "file_analysis/Manager.h"
|
||||
%%}
|
||||
|
||||
type AnalyzerArgs: record;
|
||||
|
||||
## :bro:see:`FileAnalysis::set_timeout_interval`.
|
||||
function FileAnalysis::__set_timeout_interval%(file_id: string, t: interval%): bool
|
||||
%{
|
||||
bool result = file_mgr->SetTimeoutInterval(file_id->CheckString(), t);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::add_analyzer`.
|
||||
function FileAnalysis::__add_analyzer%(file_id: string, args: any%): bool
|
||||
%{
|
||||
using BifType::Record::FileAnalysis::AnalyzerArgs;
|
||||
RecordVal* rv = args->AsRecordVal()->CoerceTo(AnalyzerArgs);
|
||||
bool result = file_mgr->AddAnalyzer(file_id->CheckString(), rv);
|
||||
Unref(rv);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::remove_analyzer`.
|
||||
function FileAnalysis::__remove_analyzer%(file_id: string, args: any%): bool
|
||||
%{
|
||||
using BifType::Record::FileAnalysis::AnalyzerArgs;
|
||||
RecordVal* rv = args->AsRecordVal()->CoerceTo(AnalyzerArgs);
|
||||
bool result = file_mgr->RemoveAnalyzer(file_id->CheckString(), rv);
|
||||
Unref(rv);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`FileAnalysis::stop`.
|
||||
function FileAnalysis::__stop%(file_id: string%): bool
|
||||
%{
|
||||
bool result = file_mgr->IgnoreFile(file_id->CheckString());
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
module GLOBAL;
|
||||
|
||||
## For use within a :bro:see:`get_file_handle` handler to set a unique
|
||||
## identifier to associate with the current input to the file analysis
|
||||
## framework. Using an empty string for the handle signifies that the
|
||||
## input will be ignored/discarded.
|
||||
##
|
||||
## handle: A string that uniquely identifies a file.
|
||||
##
|
||||
## .. bro:see:: get_file_handle
|
||||
function set_file_handle%(handle: string%): any
|
||||
%{
|
||||
file_mgr->SetHandle(handle->CheckString());
|
||||
return 0;
|
||||
%}
|
||||
|
||||
const FileAnalysis::salt: string;
|
|
@ -9,6 +9,7 @@ module Input;
|
|||
|
||||
type TableDescription: record;
|
||||
type EventDescription: record;
|
||||
type AnalysisDescription: record;
|
||||
|
||||
function Input::__create_table_stream%(description: Input::TableDescription%) : bool
|
||||
%{
|
||||
|
@ -22,6 +23,12 @@ function Input::__create_event_stream%(description: Input::EventDescription%) :
|
|||
return new Val(res, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
function Input::__create_analysis_stream%(description: Input::AnalysisDescription%) : bool
|
||||
%{
|
||||
bool res = input_mgr->CreateAnalysisStream(description->AsRecordVal());
|
||||
return new Val(res, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
function Input::__remove_stream%(id: string%) : bool
|
||||
%{
|
||||
bool res = input_mgr->RemoveStream(id->AsString()->CheckString());
|
||||
|
|
|
@ -15,10 +15,9 @@
|
|||
#include "EventHandler.h"
|
||||
#include "NetVar.h"
|
||||
#include "Net.h"
|
||||
|
||||
|
||||
#include "CompHash.h"
|
||||
|
||||
#include "../file_analysis/Manager.h"
|
||||
#include "../threading/SerialTypes.h"
|
||||
|
||||
using namespace input;
|
||||
|
@ -148,6 +147,14 @@ public:
|
|||
~EventStream();
|
||||
};
|
||||
|
||||
class Manager::AnalysisStream: public Manager::Stream {
|
||||
public:
|
||||
string file_id;
|
||||
|
||||
AnalysisStream();
|
||||
~AnalysisStream();
|
||||
};
|
||||
|
||||
Manager::TableStream::TableStream() : Manager::Stream::Stream()
|
||||
{
|
||||
stream_type = TABLE_STREAM;
|
||||
|
@ -198,6 +205,15 @@ Manager::TableStream::~TableStream()
|
|||
}
|
||||
}
|
||||
|
||||
Manager::AnalysisStream::AnalysisStream() : Manager::Stream::Stream()
|
||||
{
|
||||
stream_type = ANALYSIS_STREAM;
|
||||
}
|
||||
|
||||
Manager::AnalysisStream::~AnalysisStream()
|
||||
{
|
||||
}
|
||||
|
||||
Manager::Manager()
|
||||
{
|
||||
end_of_data = internal_handler("Input::end_of_data");
|
||||
|
@ -274,7 +290,8 @@ bool Manager::CreateStream(Stream* info, RecordVal* description)
|
|||
|
||||
RecordType* rtype = description->Type()->AsRecordType();
|
||||
if ( ! ( same_type(rtype, BifType::Record::Input::TableDescription, 0)
|
||||
|| same_type(rtype, BifType::Record::Input::EventDescription, 0) ) )
|
||||
|| same_type(rtype, BifType::Record::Input::EventDescription, 0)
|
||||
|| same_type(rtype, BifType::Record::Input::AnalysisDescription, 0) ) )
|
||||
{
|
||||
reporter->Error("Streamdescription argument not of right type for new input stream");
|
||||
return false;
|
||||
|
@ -303,6 +320,7 @@ bool Manager::CreateStream(Stream* info, RecordVal* description)
|
|||
|
||||
ReaderBackend::ReaderInfo* rinfo = new ReaderBackend::ReaderInfo();
|
||||
rinfo->source = copy_string(source.c_str());
|
||||
rinfo->name = copy_string(name.c_str());
|
||||
|
||||
EnumVal* mode = description->LookupWithDefault(rtype->FieldOffset("mode"))->AsEnumVal();
|
||||
switch ( mode->InternalInt() )
|
||||
|
@ -680,6 +698,40 @@ bool Manager::CreateTableStream(RecordVal* fval)
|
|||
return true;
|
||||
}
|
||||
|
||||
bool Manager::CreateAnalysisStream(RecordVal* fval)
|
||||
{
|
||||
RecordType* rtype = fval->Type()->AsRecordType();
|
||||
|
||||
if ( ! same_type(rtype, BifType::Record::Input::AnalysisDescription, 0) )
|
||||
{
|
||||
reporter->Error("AnalysisDescription argument not of right type");
|
||||
return false;
|
||||
}
|
||||
|
||||
AnalysisStream* stream = new AnalysisStream();
|
||||
|
||||
if ( ! CreateStream(stream, fval) )
|
||||
{
|
||||
delete stream;
|
||||
return false;
|
||||
}
|
||||
|
||||
stream->file_id = file_mgr->HashHandle(stream->name);
|
||||
|
||||
assert(stream->reader);
|
||||
|
||||
// reader takes in a byte stream as the only field
|
||||
Field** fields = new Field*[1];
|
||||
fields[0] = new Field("bytestream", 0, TYPE_STRING, TYPE_VOID, false);
|
||||
stream->reader->Init(1, fields);
|
||||
|
||||
readers[stream->reader] = stream;
|
||||
|
||||
DBG_LOG(DBG_INPUT, "Successfully created analysis stream %s",
|
||||
stream->name.c_str());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::IsCompatibleType(BroType* t, bool atomic_only)
|
||||
{
|
||||
|
@ -966,6 +1018,15 @@ void Manager::SendEntry(ReaderFrontend* reader, Value* *vals)
|
|||
readFields = SendEventStreamEvent(i, type, vals);
|
||||
}
|
||||
|
||||
else if ( i->stream_type == ANALYSIS_STREAM )
|
||||
{
|
||||
readFields = 1;
|
||||
assert(vals[0]->type == TYPE_STRING);
|
||||
file_mgr->DataIn(reinterpret_cast<u_char*>(vals[0]->val.string_val.data),
|
||||
vals[0]->val.string_val.length,
|
||||
static_cast<AnalysisStream*>(i)->file_id, i->name);
|
||||
}
|
||||
|
||||
else
|
||||
assert(false);
|
||||
|
||||
|
@ -1179,8 +1240,11 @@ void Manager::EndCurrentSend(ReaderFrontend* reader)
|
|||
DBG_LOG(DBG_INPUT, "Got EndCurrentSend stream %s", i->name.c_str());
|
||||
#endif
|
||||
|
||||
if ( i->stream_type == EVENT_STREAM )
|
||||
if ( i->stream_type != TABLE_STREAM )
|
||||
{
|
||||
#ifdef DEBUG
|
||||
DBG_LOG(DBG_INPUT, "%s is event, sending end of data", i->name.c_str());
|
||||
#endif
|
||||
// just signal the end of the data source
|
||||
SendEndOfData(i);
|
||||
return;
|
||||
|
@ -1285,9 +1349,17 @@ void Manager::SendEndOfData(ReaderFrontend* reader)
|
|||
SendEndOfData(i);
|
||||
}
|
||||
|
||||
|
||||
void Manager::SendEndOfData(const Stream *i)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
DBG_LOG(DBG_INPUT, "SendEndOfData for stream %s",
|
||||
i->name.c_str());
|
||||
#endif
|
||||
SendEvent(end_of_data, 2, new StringVal(i->name.c_str()), new StringVal(i->info->source));
|
||||
|
||||
if ( i->stream_type == ANALYSIS_STREAM )
|
||||
file_mgr->EndOfFile(static_cast<const AnalysisStream*>(i)->file_id);
|
||||
}
|
||||
|
||||
void Manager::Put(ReaderFrontend* reader, Value* *vals)
|
||||
|
@ -1299,6 +1371,11 @@ void Manager::Put(ReaderFrontend* reader, Value* *vals)
|
|||
return;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
DBG_LOG(DBG_INPUT, "Put for stream %s",
|
||||
i->name.c_str());
|
||||
#endif
|
||||
|
||||
int readFields = 0;
|
||||
|
||||
if ( i->stream_type == TABLE_STREAM )
|
||||
|
@ -1310,6 +1387,15 @@ void Manager::Put(ReaderFrontend* reader, Value* *vals)
|
|||
readFields = SendEventStreamEvent(i, type, vals);
|
||||
}
|
||||
|
||||
else if ( i->stream_type == ANALYSIS_STREAM )
|
||||
{
|
||||
readFields = 1;
|
||||
assert(vals[0]->type == TYPE_STRING);
|
||||
file_mgr->DataIn(reinterpret_cast<u_char*>(vals[0]->val.string_val.data),
|
||||
vals[0]->val.string_val.length,
|
||||
static_cast<AnalysisStream*>(i)->file_id, i->name);
|
||||
}
|
||||
|
||||
else
|
||||
assert(false);
|
||||
|
||||
|
@ -1577,6 +1663,12 @@ bool Manager::Delete(ReaderFrontend* reader, Value* *vals)
|
|||
success = true;
|
||||
}
|
||||
|
||||
else if ( i->stream_type == ANALYSIS_STREAM )
|
||||
{
|
||||
// can't do anything
|
||||
success = true;
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
assert(false);
|
||||
|
@ -1622,6 +1714,11 @@ bool Manager::SendEvent(const string& name, const int num_vals, Value* *vals)
|
|||
return false;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
DBG_LOG(DBG_INPUT, "SendEvent for event %s with num_vals vals",
|
||||
name.c_str(), num_vals);
|
||||
#endif
|
||||
|
||||
RecordType *type = handler->FType()->Args();
|
||||
int num_event_vals = type->NumFields();
|
||||
if ( num_vals != num_event_vals )
|
||||
|
@ -1634,7 +1731,7 @@ bool Manager::SendEvent(const string& name, const int num_vals, Value* *vals)
|
|||
for ( int i = 0; i < num_vals; i++)
|
||||
vl->append(ValueToVal(vals[i], type->FieldType(i)));
|
||||
|
||||
mgr.Dispatch(new Event(handler, vl));
|
||||
mgr.QueueEvent(handler, vl, SOURCE_LOCAL);
|
||||
|
||||
for ( int i = 0; i < num_vals; i++ )
|
||||
delete vals[i];
|
||||
|
@ -1648,6 +1745,11 @@ void Manager::SendEvent(EventHandlerPtr ev, const int numvals, ...)
|
|||
{
|
||||
val_list* vl = new val_list;
|
||||
|
||||
#ifdef DEBUG
|
||||
DBG_LOG(DBG_INPUT, "SendEvent with %d vals",
|
||||
numvals);
|
||||
#endif
|
||||
|
||||
va_list lP;
|
||||
va_start(lP, numvals);
|
||||
for ( int i = 0; i < numvals; i++ )
|
||||
|
@ -1662,6 +1764,11 @@ void Manager::SendEvent(EventHandlerPtr ev, list<Val*> events)
|
|||
{
|
||||
val_list* vl = new val_list;
|
||||
|
||||
#ifdef DEBUG
|
||||
DBG_LOG(DBG_INPUT, "SendEvent with %d vals (list)",
|
||||
events.size());
|
||||
#endif
|
||||
|
||||
for ( list<Val*>::iterator i = events.begin(); i != events.end(); i++ )
|
||||
{
|
||||
vl->append( *i );
|
||||
|
@ -2166,3 +2273,18 @@ Manager::Stream* Manager::FindStream(ReaderFrontend* reader)
|
|||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Function is called on Bro shutdown.
|
||||
// Signal all frontends that they will cease operation.
|
||||
void Manager::Terminate()
|
||||
{
|
||||
for ( map<ReaderFrontend*, Stream*>::iterator i = readers.begin(); i != readers.end(); ++i )
|
||||
{
|
||||
if ( i->second->removed )
|
||||
continue;
|
||||
|
||||
i->second->removed = true;
|
||||
i->second->reader->Stop();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -55,6 +55,18 @@ public:
|
|||
*/
|
||||
bool CreateEventStream(RecordVal* description);
|
||||
|
||||
/**
|
||||
* Creates a new input stream which will forward the data from the data
|
||||
* source on to the file analysis framework. The internal BiF defined
|
||||
* in input.bif just forward here. For an input reader to be compatible
|
||||
* with this method, it must be able to accept a filter of a single string
|
||||
* type (i.e. they read a byte stream).
|
||||
*
|
||||
* @param description A record of the script type \c
|
||||
* Input::AnalysisDescription
|
||||
*/
|
||||
bool CreateAnalysisStream(RecordVal* description);
|
||||
|
||||
/**
|
||||
* Force update on a input stream. Forces a re-read of the whole
|
||||
* input source. Usually used when an input stream is opened in
|
||||
|
@ -79,6 +91,11 @@ public:
|
|||
*/
|
||||
bool RemoveStream(const string &id);
|
||||
|
||||
/**
|
||||
* Signals the manager to shutdown at Bro's termination.
|
||||
*/
|
||||
void Terminate();
|
||||
|
||||
protected:
|
||||
friend class ReaderFrontend;
|
||||
friend class PutMessage;
|
||||
|
@ -138,6 +155,7 @@ private:
|
|||
class Stream;
|
||||
class TableStream;
|
||||
class EventStream;
|
||||
class AnalysisStream;
|
||||
|
||||
// Actual RemoveStream implementation -- the function's public and
|
||||
// protected definitions are wrappers around this function.
|
||||
|
@ -202,7 +220,7 @@ private:
|
|||
Stream* FindStream(const string &name);
|
||||
Stream* FindStream(ReaderFrontend* reader);
|
||||
|
||||
enum StreamType { TABLE_STREAM, EVENT_STREAM };
|
||||
enum StreamType { TABLE_STREAM, EVENT_STREAM, ANALYSIS_STREAM };
|
||||
|
||||
map<ReaderFrontend*, Stream*> readers;
|
||||
|
||||
|
|
|
@ -85,6 +85,11 @@ public:
|
|||
*/
|
||||
const char* source;
|
||||
|
||||
/**
|
||||
* The name of the input stream.
|
||||
*/
|
||||
const char* name;
|
||||
|
||||
/**
|
||||
* A map of key/value pairs corresponding to the relevant
|
||||
* filter's "config" table.
|
||||
|
@ -99,12 +104,14 @@ public:
|
|||
ReaderInfo()
|
||||
{
|
||||
source = 0;
|
||||
name = 0;
|
||||
mode = MODE_NONE;
|
||||
}
|
||||
|
||||
ReaderInfo(const ReaderInfo& other)
|
||||
{
|
||||
source = other.source ? copy_string(other.source) : 0;
|
||||
name = other.name ? copy_string(other.name) : 0;
|
||||
mode = other.mode;
|
||||
|
||||
for ( config_map::const_iterator i = other.config.begin(); i != other.config.end(); i++ )
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
/* The following code declares classes to read from and write to
|
||||
* file descriptore or file handles.
|
||||
*
|
||||
* See
|
||||
* http://www.josuttis.com/cppcode
|
||||
* for details and the latest version.
|
||||
*
|
||||
* - open:
|
||||
* - integrating BUFSIZ on some systems?
|
||||
* - optimized reading of multiple characters
|
||||
* - stream for reading AND writing
|
||||
* - i18n
|
||||
*
|
||||
* (C) Copyright Nicolai M. Josuttis 2001.
|
||||
* Permission to copy, use, modify, sell and distribute this software
|
||||
* is granted provided this copyright notice appears in all copies.
|
||||
* This software is provided "as is" without express or implied
|
||||
* warranty, and with no claim as to its suitability for any purpose.
|
||||
*
|
||||
* Version: Jul 28, 2002
|
||||
* History:
|
||||
* Jul 28, 2002: bugfix memcpy() => memmove()
|
||||
* fdinbuf::underflow(): cast for return statements
|
||||
* Aug 05, 2001: first public version
|
||||
*/
|
||||
#ifndef BOOST_FDSTREAM_HPP
|
||||
#define BOOST_FDSTREAM_HPP
|
||||
|
||||
#include <istream>
|
||||
#include <ostream>
|
||||
#include <streambuf>
|
||||
// for EOF:
|
||||
#include <cstdio>
|
||||
// for memmove():
|
||||
#include <cstring>
|
||||
|
||||
|
||||
|
||||
// low-level read and write functions
|
||||
#ifdef _MSC_VER
|
||||
# include <io.h>
|
||||
#else
|
||||
# include <sys/errno.h>
|
||||
# include <unistd.h>
|
||||
//extern "C" {
|
||||
// int write (int fd, const char* buf, int num);
|
||||
// int read (int fd, char* buf, int num);
|
||||
//}
|
||||
#endif
|
||||
|
||||
|
||||
// BEGIN namespace BOOST
|
||||
namespace boost {
|
||||
|
||||
|
||||
/************************************************************
|
||||
* fdostream
|
||||
* - a stream that writes on a file descriptor
|
||||
************************************************************/
|
||||
|
||||
|
||||
class fdoutbuf : public std::streambuf {
|
||||
protected:
|
||||
int fd; // file descriptor
|
||||
public:
|
||||
// constructor
|
||||
fdoutbuf (int _fd) : fd(_fd) {
|
||||
}
|
||||
protected:
|
||||
// write one character
|
||||
virtual int_type overflow (int_type c) {
|
||||
if (c != EOF) {
|
||||
char z = c;
|
||||
if (write (fd, &z, 1) != 1) {
|
||||
return EOF;
|
||||
}
|
||||
}
|
||||
return c;
|
||||
}
|
||||
// write multiple characters
|
||||
virtual
|
||||
std::streamsize xsputn (const char* s,
|
||||
std::streamsize num) {
|
||||
return write(fd,s,num);
|
||||
}
|
||||
};
|
||||
|
||||
class fdostream : public std::ostream {
|
||||
protected:
|
||||
fdoutbuf buf;
|
||||
public:
|
||||
fdostream (int fd) : std::ostream(0), buf(fd) {
|
||||
rdbuf(&buf);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/************************************************************
|
||||
* fdistream
|
||||
* - a stream that reads on a file descriptor
|
||||
************************************************************/
|
||||
|
||||
class fdinbuf : public std::streambuf {
|
||||
protected:
|
||||
int fd; // file descriptor
|
||||
protected:
|
||||
/* data buffer:
|
||||
* - at most, pbSize characters in putback area plus
|
||||
* - at most, bufSize characters in ordinary read buffer
|
||||
*/
|
||||
static const int pbSize = 4; // size of putback area
|
||||
static const int bufSize = 1024; // size of the data buffer
|
||||
char buffer[bufSize+pbSize]; // data buffer
|
||||
|
||||
public:
|
||||
/* constructor
|
||||
* - initialize file descriptor
|
||||
* - initialize empty data buffer
|
||||
* - no putback area
|
||||
* => force underflow()
|
||||
*/
|
||||
fdinbuf (int _fd) : fd(_fd) {
|
||||
setg (buffer+pbSize, // beginning of putback area
|
||||
buffer+pbSize, // read position
|
||||
buffer+pbSize); // end position
|
||||
}
|
||||
|
||||
protected:
|
||||
// insert new characters into the buffer
|
||||
virtual int_type underflow () {
|
||||
#ifndef _MSC_VER
|
||||
using std::memmove;
|
||||
#endif
|
||||
|
||||
// is read position before end of buffer?
|
||||
if (gptr() < egptr()) {
|
||||
return traits_type::to_int_type(*gptr());
|
||||
}
|
||||
|
||||
/* process size of putback area
|
||||
* - use number of characters read
|
||||
* - but at most size of putback area
|
||||
*/
|
||||
int numPutback;
|
||||
numPutback = gptr() - eback();
|
||||
if (numPutback > pbSize) {
|
||||
numPutback = pbSize;
|
||||
}
|
||||
|
||||
/* copy up to pbSize characters previously read into
|
||||
* the putback area
|
||||
*/
|
||||
memmove (buffer+(pbSize-numPutback), gptr()-numPutback,
|
||||
numPutback);
|
||||
|
||||
// read at most bufSize new characters
|
||||
int num;
|
||||
num = read (fd, buffer+pbSize, bufSize);
|
||||
if ( num == EAGAIN ) {
|
||||
return 0;
|
||||
}
|
||||
if (num <= 0) {
|
||||
// ERROR or EOF
|
||||
return EOF;
|
||||
}
|
||||
|
||||
// reset buffer pointers
|
||||
setg (buffer+(pbSize-numPutback), // beginning of putback area
|
||||
buffer+pbSize, // read position
|
||||
buffer+pbSize+num); // end of buffer
|
||||
|
||||
// return next character
|
||||
return traits_type::to_int_type(*gptr());
|
||||
}
|
||||
};
|
||||
|
||||
class fdistream : public std::istream {
|
||||
protected:
|
||||
fdinbuf buf;
|
||||
public:
|
||||
fdistream (int fd) : std::istream(0), buf(fd) {
|
||||
rdbuf(&buf);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
} // END namespace boost
|
||||
|
||||
#endif /*BOOST_FDSTREAM_HPP*/
|
|
@ -3,32 +3,46 @@
|
|||
#include "Raw.h"
|
||||
#include "NetVar.h"
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
#include "../../threading/SerialTypes.h"
|
||||
#include "../fdstream.h"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/wait.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <stdio.h>
|
||||
#include <fcntl.h>
|
||||
#include <errno.h>
|
||||
#include <signal.h>
|
||||
|
||||
using namespace input::reader;
|
||||
using threading::Value;
|
||||
using threading::Field;
|
||||
|
||||
const int Raw::block_size = 4096; // how big do we expect our chunks of data to be.
|
||||
|
||||
|
||||
Raw::Raw(ReaderFrontend *frontend) : ReaderBackend(frontend)
|
||||
{
|
||||
file = 0;
|
||||
in = 0;
|
||||
|
||||
stderrfile = 0;
|
||||
forcekill = false;
|
||||
separator.assign( (const char*) BifConst::InputRaw::record_separator->Bytes(),
|
||||
BifConst::InputRaw::record_separator->Len());
|
||||
|
||||
if ( separator.size() != 1 )
|
||||
Error("separator length has to be 1. Separator will be truncated.");
|
||||
sep_length = BifConst::InputRaw::record_separator->Len();
|
||||
|
||||
buf = 0;
|
||||
outbuf = 0;
|
||||
bufpos = 0;
|
||||
|
||||
stdin_fileno = fileno(stdin);
|
||||
stdout_fileno = fileno(stdout);
|
||||
stderr_fileno = fileno(stderr);
|
||||
|
||||
childpid = -1;
|
||||
|
||||
stdin_towrite = 0; // by default do not open stdin
|
||||
use_stderr = false;
|
||||
}
|
||||
|
||||
Raw::~Raw()
|
||||
|
@ -40,41 +54,130 @@ void Raw::DoClose()
|
|||
{
|
||||
if ( file != 0 )
|
||||
CloseInput();
|
||||
|
||||
if ( buf != 0 )
|
||||
{
|
||||
// we still have output that has not been flushed. Throw away.
|
||||
delete buf;
|
||||
buf = 0;
|
||||
}
|
||||
|
||||
if ( execute && childpid > 0 && kill(childpid, 0) == 0 )
|
||||
{
|
||||
// kill child process
|
||||
kill(childpid, 15); // sigterm
|
||||
|
||||
if ( forcekill )
|
||||
{
|
||||
usleep(200); // 200 msecs should be enough for anyone ;)
|
||||
|
||||
if ( kill(childpid, 0) == 0 ) // perhaps it is already gone
|
||||
kill(childpid, 9); // TERMINATE
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool Raw::Execute()
|
||||
{
|
||||
if ( pipe(pipes) != 0 || pipe(pipes+2) || pipe(pipes+4) )
|
||||
{
|
||||
Error(Fmt("Could not open pipe: %d", errno));
|
||||
return false;
|
||||
}
|
||||
|
||||
childpid = fork();
|
||||
if ( childpid < 0 )
|
||||
{
|
||||
Error(Fmt("Could not create child process: %d", errno));
|
||||
return false;
|
||||
}
|
||||
|
||||
else if ( childpid == 0 )
|
||||
{
|
||||
// we are the child.
|
||||
close(pipes[stdout_in]);
|
||||
dup2(pipes[stdout_out], stdout_fileno);
|
||||
|
||||
if ( stdin_towrite )
|
||||
{
|
||||
close(pipes[stdin_out]);
|
||||
dup2(pipes[stdin_in], stdin_fileno);
|
||||
}
|
||||
|
||||
if ( use_stderr )
|
||||
{
|
||||
close(pipes[stderr_in]);
|
||||
dup2(pipes[stderr_out], stderr_fileno);
|
||||
}
|
||||
|
||||
execl("/bin/sh", "sh", "-c", fname.c_str(), NULL);
|
||||
fprintf(stderr, "Exec failed :(......\n");
|
||||
exit(255);
|
||||
}
|
||||
else
|
||||
{
|
||||
// we are the parent
|
||||
close(pipes[stdout_out]);
|
||||
pipes[stdout_out] = -1;
|
||||
|
||||
if ( Info().mode == MODE_STREAM )
|
||||
fcntl(pipes[stdout_in], F_SETFL, O_NONBLOCK);
|
||||
|
||||
if ( stdin_towrite )
|
||||
{
|
||||
close(pipes[stdin_in]);
|
||||
pipes[stdin_in] = -1;
|
||||
fcntl(pipes[stdin_out], F_SETFL, O_NONBLOCK); // ya, just always set this to nonblocking. we do not want to block on a program receiving data.
|
||||
// note that there is a small gotcha with it. More data is queued when more data is read from the program output. Hence, when having
|
||||
// a program in mode_manual where the first write cannot write everything, the rest will be stuck in a queue that is never emptied.
|
||||
}
|
||||
|
||||
if ( use_stderr )
|
||||
{
|
||||
close(pipes[stderr_out]);
|
||||
pipes[stderr_out] = -1;
|
||||
fcntl(pipes[stderr_in], F_SETFL, O_NONBLOCK); // true for this too.
|
||||
}
|
||||
|
||||
file = fdopen(pipes[stdout_in], "r");
|
||||
pipes[stdout_in] = -1; // will be closed by fclose
|
||||
|
||||
if ( use_stderr )
|
||||
stderrfile = fdopen(pipes[stderr_in], "r");
|
||||
pipes[stderr_in] = -1; // will be closed by fclose
|
||||
if ( file == 0 || (stderrfile == 0 && use_stderr) )
|
||||
{
|
||||
Error("Could not convert fileno to file");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
bool Raw::OpenInput()
|
||||
{
|
||||
if ( execute )
|
||||
{
|
||||
file = popen(fname.c_str(), "r");
|
||||
if ( file == NULL )
|
||||
{
|
||||
Error(Fmt("Could not execute command %s", fname.c_str()));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Execute();
|
||||
|
||||
else
|
||||
{
|
||||
file = fopen(fname.c_str(), "r");
|
||||
if ( file == NULL )
|
||||
if ( ! file )
|
||||
{
|
||||
Error(Fmt("Init: cannot open %s", fname.c_str()));
|
||||
return false;
|
||||
}
|
||||
fcntl(fileno(file), F_SETFD, FD_CLOEXEC);
|
||||
}
|
||||
|
||||
// This is defined in input/fdstream.h
|
||||
in = new boost::fdistream(fileno(file));
|
||||
|
||||
if ( execute && Info().mode == MODE_STREAM )
|
||||
fcntl(fileno(file), F_SETFL, O_NONBLOCK);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Raw::CloseInput()
|
||||
{
|
||||
if ( file == NULL )
|
||||
if ( file == 0 )
|
||||
{
|
||||
InternalError(Fmt("Trying to close closed file for stream %s", fname.c_str()));
|
||||
return false;
|
||||
|
@ -83,15 +186,20 @@ bool Raw::CloseInput()
|
|||
Debug(DBG_INPUT, "Raw reader starting close");
|
||||
#endif
|
||||
|
||||
delete in;
|
||||
fclose(file);
|
||||
|
||||
if ( execute )
|
||||
pclose(file);
|
||||
else
|
||||
fclose(file);
|
||||
if ( use_stderr )
|
||||
fclose(stderrfile);
|
||||
|
||||
in = NULL;
|
||||
file = NULL;
|
||||
if ( execute ) // we do not care if any of those fails. They should all be defined.
|
||||
{
|
||||
for ( int i = 0; i < 6; i ++ )
|
||||
if ( pipes[i] != -1 )
|
||||
close(pipes[i]);
|
||||
}
|
||||
|
||||
file = 0;
|
||||
stderrfile = 0;
|
||||
|
||||
#ifdef DEBUG
|
||||
Debug(DBG_INPUT, "Raw reader finished close");
|
||||
|
@ -106,28 +214,9 @@ bool Raw::DoInit(const ReaderInfo& info, int num_fields, const Field* const* fie
|
|||
mtime = 0;
|
||||
execute = false;
|
||||
firstrun = true;
|
||||
int want_fields = 1;
|
||||
bool result;
|
||||
|
||||
if ( ! info.source || strlen(info.source) == 0 )
|
||||
{
|
||||
Error("No source path provided");
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( num_fields != 1 )
|
||||
{
|
||||
Error("Filter for raw reader contains more than one field. "
|
||||
"Filters for the raw reader may only contain exactly one string field. "
|
||||
"Filter ignored.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( fields[0]->type != TYPE_STRING )
|
||||
{
|
||||
Error("Filter for raw reader contains a field that is not of type string.");
|
||||
return false;
|
||||
}
|
||||
|
||||
// do Initialization
|
||||
string source = string(info.source);
|
||||
char last = info.source[source.length() - 1];
|
||||
|
@ -135,23 +224,63 @@ bool Raw::DoInit(const ReaderInfo& info, int num_fields, const Field* const* fie
|
|||
{
|
||||
execute = true;
|
||||
fname = source.substr(0, fname.length() - 1);
|
||||
|
||||
if ( (info.mode != MODE_MANUAL) )
|
||||
{
|
||||
Error(Fmt("Unsupported read mode %d for source %s in execution mode",
|
||||
info.mode, fname.c_str()));
|
||||
return false;
|
||||
}
|
||||
|
||||
result = OpenInput();
|
||||
|
||||
}
|
||||
else
|
||||
|
||||
if ( ! info.source || strlen(info.source) == 0 )
|
||||
{
|
||||
execute = false;
|
||||
result = OpenInput();
|
||||
Error("No source path provided");
|
||||
return false;
|
||||
}
|
||||
|
||||
map<const char*, const char*>::const_iterator it = info.config.find("stdin"); // data that is sent to the child process
|
||||
if ( it != info.config.end() )
|
||||
{
|
||||
stdin_string = it->second;
|
||||
stdin_towrite = stdin_string.length();
|
||||
}
|
||||
|
||||
it = info.config.find("read_stderr"); // we want to read stderr
|
||||
if ( it != info.config.end() && execute )
|
||||
{
|
||||
use_stderr = true;
|
||||
want_fields = 2;
|
||||
}
|
||||
|
||||
it = info.config.find("force_kill"); // we want to be sure that our child is dead when we exit
|
||||
if ( it != info.config.end() && execute )
|
||||
{
|
||||
forcekill = true;
|
||||
}
|
||||
|
||||
if ( num_fields != want_fields )
|
||||
{
|
||||
Error(Fmt("Filter for raw reader contains wrong number of fields -- got %d, expected %d. "
|
||||
"Filters for the raw reader contain one string field when used in normal mode and one string and one bool fields when using execute mode with stderr capuring. "
|
||||
"Filter ignored.", num_fields, want_fields));
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( fields[0]->type != TYPE_STRING )
|
||||
{
|
||||
Error("First field for raw reader always has to be of type string.");
|
||||
return false;
|
||||
}
|
||||
if ( use_stderr && fields[1]->type != TYPE_BOOL )
|
||||
{
|
||||
Error("Second field for raw reader always has to be of type bool.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( execute && Info().mode == MODE_REREAD )
|
||||
{
|
||||
// for execs this makes no sense - would have to execute each heartbeat?
|
||||
Error("Rereading only supported for files, not for executables.");
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
result = OpenInput();
|
||||
|
||||
if ( result == false )
|
||||
return result;
|
||||
|
||||
|
@ -168,18 +297,115 @@ bool Raw::DoInit(const ReaderInfo& info, int num_fields, const Field* const* fie
|
|||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool Raw::GetLine(string& str)
|
||||
int64_t Raw::GetLine(FILE* arg_file)
|
||||
{
|
||||
if ( in->peek() == std::iostream::traits_type::eof() )
|
||||
return false;
|
||||
errno = 0;
|
||||
int pos = 0; // strstr_n only works on ints - so no use to use something different here
|
||||
int offset = 0;
|
||||
|
||||
if ( in->eofbit == true || in->failbit == true )
|
||||
return false;
|
||||
if ( buf == 0 )
|
||||
buf = new char[block_size];
|
||||
|
||||
return getline(*in, str, separator[0]);
|
||||
int repeats = 1;
|
||||
|
||||
for (;;)
|
||||
{
|
||||
size_t readbytes = fread(buf+bufpos+offset, 1, block_size-bufpos, arg_file);
|
||||
pos += bufpos + readbytes;
|
||||
//printf("Pos: %d\n", pos);
|
||||
bufpos = offset = 0; // read full block size in next read...
|
||||
|
||||
if ( pos == 0 && errno != 0 )
|
||||
break;
|
||||
|
||||
// researching everything each time is a bit... cpu-intensive. But otherwhise we have
|
||||
// to deal with situations where the separator is multi-character and split over multiple
|
||||
// reads...
|
||||
int found = strstr_n(pos, (unsigned char*) buf, separator.size(), (unsigned char*) separator.c_str());
|
||||
|
||||
if ( found == -1 )
|
||||
{
|
||||
// we did not find it and have to search again in the next try. resize buffer....
|
||||
// but first check if we encountered the file end - because if we did this was it.
|
||||
if ( feof(arg_file) != 0 )
|
||||
{
|
||||
if ( pos == 0 )
|
||||
return -1; // signal EOF - and that we had no more data.
|
||||
else
|
||||
{
|
||||
outbuf = buf;
|
||||
buf = 0;
|
||||
return pos;
|
||||
}
|
||||
}
|
||||
|
||||
repeats++;
|
||||
// bah, we cannot use realloc because we would have to change the delete in the manager to a free.
|
||||
char * newbuf = new char[block_size*repeats];
|
||||
memcpy(newbuf, buf, block_size*(repeats-1));
|
||||
delete buf;
|
||||
buf = newbuf;
|
||||
offset = block_size*(repeats-1);
|
||||
}
|
||||
else
|
||||
{
|
||||
outbuf = buf;
|
||||
buf = 0;
|
||||
|
||||
if ( found < pos )
|
||||
{
|
||||
// we have leftovers. copy them into the buffer for the next line
|
||||
buf = new char[block_size];
|
||||
memcpy(buf, outbuf + found + sep_length, pos - found - sep_length);
|
||||
bufpos = pos - found - sep_length;
|
||||
}
|
||||
|
||||
return found;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ( errno == EAGAIN || errno == EWOULDBLOCK || errno == EINTR )
|
||||
return -2;
|
||||
|
||||
else
|
||||
{
|
||||
// an error code we did no expect. This probably is bad.
|
||||
Error(Fmt("Reader encountered unexpected error code %d", errno));
|
||||
return -3;
|
||||
}
|
||||
|
||||
InternalError("Internal control flow execution error in raw reader");
|
||||
assert(false);
|
||||
}
|
||||
|
||||
// write to the stdin of the child process
|
||||
void Raw::WriteToStdin()
|
||||
{
|
||||
assert(stdin_towrite <= stdin_string.length());
|
||||
uint64_t pos = stdin_string.length() - stdin_towrite;
|
||||
|
||||
errno = 0;
|
||||
ssize_t written = write(pipes[stdin_out], stdin_string.c_str() + pos, stdin_towrite);
|
||||
stdin_towrite -= written;
|
||||
|
||||
if ( errno != 0 && errno != EAGAIN && errno != EWOULDBLOCK )
|
||||
{
|
||||
Error(Fmt("Writing to child process stdin failed: %d. Stopping writing at position %d", errno, pos));
|
||||
stdin_towrite = 0;
|
||||
close(pipes[stdin_out]);
|
||||
}
|
||||
|
||||
if ( stdin_towrite == 0 ) // send EOF when we are done.
|
||||
close(pipes[stdin_out]);
|
||||
|
||||
if ( Info().mode == MODE_MANUAL && stdin_towrite != 0 )
|
||||
{
|
||||
Error(Fmt("Could not write whole string to stdin of child process in one go. Please use STREAM mode to pass more data to child."));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// read the entire file and send appropriate thingies back to InputMgr
|
||||
bool Raw::DoUpdate()
|
||||
{
|
||||
|
@ -191,6 +417,7 @@ bool Raw::DoUpdate()
|
|||
switch ( Info().mode ) {
|
||||
case MODE_REREAD:
|
||||
{
|
||||
assert(childpid == -1); // mode may not be used to execute child programs
|
||||
// check if the file has changed
|
||||
struct stat sb;
|
||||
if ( stat(fname.c_str(), &sb) == -1 )
|
||||
|
@ -211,10 +438,9 @@ bool Raw::DoUpdate()
|
|||
|
||||
case MODE_MANUAL:
|
||||
case MODE_STREAM:
|
||||
if ( Info().mode == MODE_STREAM && file != NULL && in != NULL )
|
||||
if ( Info().mode == MODE_STREAM && file != 0 )
|
||||
{
|
||||
//fpurge(file);
|
||||
in->clear(); // remove end of file evil bits
|
||||
clearerr(file); // remove end of file evil bits
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -230,21 +456,118 @@ bool Raw::DoUpdate()
|
|||
}
|
||||
|
||||
string line;
|
||||
while ( GetLine(line) )
|
||||
assert ( (NumFields() == 1 && !use_stderr) || (NumFields() == 2 && use_stderr));
|
||||
for ( ;; )
|
||||
{
|
||||
assert (NumFields() == 1);
|
||||
if ( stdin_towrite > 0 )
|
||||
WriteToStdin();
|
||||
|
||||
Value** fields = new Value*[1];
|
||||
int64_t length = GetLine(file);
|
||||
//printf("Read %lld bytes\n", length);
|
||||
|
||||
if ( length == -3 )
|
||||
return false;
|
||||
|
||||
else if ( length == -2 || length == -1 )
|
||||
// no data ready or eof
|
||||
break;
|
||||
|
||||
Value** fields = new Value*[2]; // just always reserve 2. This means that our [] is too long by a count of 1 if not using stderr. But who cares...
|
||||
|
||||
// filter has exactly one text field. convert to it.
|
||||
Value* val = new Value(TYPE_STRING, true);
|
||||
val->val.string_val.data = copy_string(line.c_str());
|
||||
val->val.string_val.length = line.size();
|
||||
val->val.string_val.data = outbuf;
|
||||
val->val.string_val.length = length;
|
||||
fields[0] = val;
|
||||
|
||||
if ( use_stderr )
|
||||
{
|
||||
Value* bval = new Value(TYPE_BOOL, true);
|
||||
bval->val.int_val = 0;
|
||||
fields[1] = bval;
|
||||
}
|
||||
|
||||
Put(fields);
|
||||
|
||||
outbuf = 0;
|
||||
}
|
||||
|
||||
if ( use_stderr )
|
||||
{
|
||||
for ( ;; )
|
||||
{
|
||||
int64_t length = GetLine(stderrfile);
|
||||
//printf("Read stderr %lld bytes\n", length);
|
||||
if ( length == -3 )
|
||||
return false;
|
||||
|
||||
else if ( length == -2 || length == -1 )
|
||||
break;
|
||||
|
||||
Value** fields = new Value*[2];
|
||||
Value* val = new Value(TYPE_STRING, true);
|
||||
val->val.string_val.data = outbuf;
|
||||
val->val.string_val.length = length;
|
||||
fields[0] = val;
|
||||
Value* bval = new Value(TYPE_BOOL, true);
|
||||
bval->val.int_val = 1; // yes, we are stderr
|
||||
fields[1] = bval;
|
||||
|
||||
Put(fields);
|
||||
|
||||
outbuf = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if ( ( Info().mode == MODE_MANUAL ) || ( Info().mode == MODE_REREAD ) )
|
||||
// done with the current data source
|
||||
EndCurrentSend();
|
||||
|
||||
// and let's check if the child process is still alive
|
||||
int return_code;
|
||||
if ( childpid != -1 && waitpid(childpid, &return_code, WNOHANG) != 0 )
|
||||
{
|
||||
// child died
|
||||
bool signal = false;
|
||||
int code = 0;
|
||||
if ( WIFEXITED(return_code) )
|
||||
{
|
||||
code = WEXITSTATUS(return_code);
|
||||
if ( code != 0 )
|
||||
Error(Fmt("Child process exited with non-zero return code %d", code));
|
||||
}
|
||||
|
||||
else if ( WIFSIGNALED(return_code) )
|
||||
{
|
||||
signal = false;
|
||||
code = WTERMSIG(return_code);
|
||||
Error(Fmt("Child process exited due to signal %d", code));
|
||||
}
|
||||
|
||||
else
|
||||
assert(false);
|
||||
|
||||
Value** vals = new Value*[4];
|
||||
vals[0] = new Value(TYPE_STRING, true);
|
||||
vals[0]->val.string_val.data = copy_string(Info().name);
|
||||
vals[0]->val.string_val.length = strlen(Info().name);
|
||||
vals[1] = new Value(TYPE_STRING, true);
|
||||
vals[1]->val.string_val.data = copy_string(Info().source);
|
||||
vals[1]->val.string_val.length = strlen(Info().source);
|
||||
vals[2] = new Value(TYPE_COUNT, true);
|
||||
vals[2]->val.int_val = code;
|
||||
vals[3] = new Value(TYPE_BOOL, true);
|
||||
vals[3]->val.int_val = signal;
|
||||
|
||||
// and in this case we can signal end_of_data even for the streaming reader
|
||||
if ( Info().mode == MODE_STREAM )
|
||||
EndCurrentSend();
|
||||
|
||||
SendEvent("InputRaw::process_finished", 4, vals);
|
||||
}
|
||||
|
||||
|
||||
|
||||
#ifdef DEBUG
|
||||
Debug(DBG_INPUT, "DoUpdate finished successfully");
|
||||
#endif
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#ifndef INPUT_READERS_RAW_H
|
||||
#define INPUT_READERS_RAW_H
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include "../ReaderBackend.h"
|
||||
|
@ -30,17 +29,49 @@ protected:
|
|||
private:
|
||||
bool OpenInput();
|
||||
bool CloseInput();
|
||||
bool GetLine(string& str);
|
||||
int64_t GetLine(FILE* file);
|
||||
bool Execute();
|
||||
void WriteToStdin();
|
||||
|
||||
string fname; // Source with a potential "|" removed.
|
||||
istream* in;
|
||||
FILE* file;
|
||||
FILE* stderrfile;
|
||||
bool execute;
|
||||
bool firstrun;
|
||||
time_t mtime;
|
||||
|
||||
// options set from the script-level.
|
||||
string separator;
|
||||
unsigned int sep_length; // length of the separator
|
||||
|
||||
static const int block_size;
|
||||
int bufpos;
|
||||
char* buf;
|
||||
char* outbuf;
|
||||
|
||||
int stdin_fileno;
|
||||
int stdout_fileno;
|
||||
int stderr_fileno;
|
||||
|
||||
string stdin_string;
|
||||
uint64_t stdin_towrite;
|
||||
|
||||
bool use_stderr;
|
||||
|
||||
bool forcekill;
|
||||
|
||||
int pipes[6];
|
||||
pid_t childpid;
|
||||
|
||||
enum IoChannels {
|
||||
stdout_in = 0,
|
||||
stdout_out = 1,
|
||||
stdin_in = 2,
|
||||
stdin_out = 3,
|
||||
stderr_in = 4,
|
||||
stderr_out = 5
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -357,6 +357,7 @@ void terminate_bro()
|
|||
|
||||
file_mgr->Terminate();
|
||||
log_mgr->Terminate();
|
||||
input_mgr->Terminate();
|
||||
thread_mgr->Terminate();
|
||||
|
||||
mgr.Drain();
|
||||
|
@ -834,6 +835,7 @@ int main(int argc, char** argv)
|
|||
|
||||
plugin_mgr->InitPreScript();
|
||||
analyzer_mgr->InitPreScript();
|
||||
file_mgr->InitPreScript();
|
||||
|
||||
if ( events_file )
|
||||
event_player = new EventPlayer(events_file);
|
||||
|
@ -855,6 +857,7 @@ int main(int argc, char** argv)
|
|||
|
||||
plugin_mgr->InitPostScript();
|
||||
analyzer_mgr->InitPostScript();
|
||||
file_mgr->InitPostScript();
|
||||
|
||||
if ( print_plugins )
|
||||
{
|
||||
|
@ -868,6 +871,8 @@ int main(int argc, char** argv)
|
|||
|
||||
if ( generate_documentation )
|
||||
{
|
||||
CreateProtoAnalyzerDoc("proto-analyzers.rst");
|
||||
|
||||
std::list<BroDoc*>::iterator it;
|
||||
|
||||
for ( it = docs_generated.begin(); it != docs_generated.end(); ++it )
|
||||
|
@ -1109,6 +1114,7 @@ int main(int argc, char** argv)
|
|||
|
||||
reporter->ReportViaEvents(true);
|
||||
|
||||
// Drain the event queue here to support the protocols framework configuring DPM
|
||||
mgr.Drain();
|
||||
|
||||
analyzer_mgr->DumpDebug();
|
||||
|
|
|
@ -21,7 +21,7 @@ component::Type Component::Type() const
|
|||
return type;
|
||||
}
|
||||
|
||||
void Component::Describe(ODesc* d)
|
||||
void Component::Describe(ODesc* d) const
|
||||
{
|
||||
d->Add(" ");
|
||||
d->Add("[");
|
||||
|
@ -39,6 +39,10 @@ void Component::Describe(ODesc* d)
|
|||
d->Add("Analyzer");
|
||||
break;
|
||||
|
||||
case component::FILE_ANALYZER:
|
||||
d->Add("File Analyzer");
|
||||
break;
|
||||
|
||||
default:
|
||||
reporter->InternalError("unknown component type in plugin::Component::Describe");
|
||||
}
|
||||
|
|
|
@ -15,16 +15,11 @@ namespace component {
|
|||
enum Type {
|
||||
READER, /// An input reader (not currently used).
|
||||
WRITER, /// An logging writer (not currenly used).
|
||||
ANALYZER /// A protocol analyzer.
|
||||
ANALYZER, /// A protocol analyzer.
|
||||
FILE_ANALYZER /// A file analyzer.
|
||||
};
|
||||
}
|
||||
|
||||
#if 0
|
||||
namespace input { class PluginComponent; }
|
||||
namespace logging { class PluginComponent; }
|
||||
namespace analyzer { class PluginComponent; }
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Base class for plugin components. A component is a specific piece of
|
||||
* functionality that a plugin provides, such as a protocol analyzer or a log
|
||||
|
@ -50,6 +45,12 @@ public:
|
|||
*/
|
||||
component::Type Type() const;
|
||||
|
||||
/**
|
||||
* Returns a descriptive name for the analyzer. This name must be
|
||||
* unique across all components of the same type.
|
||||
*/
|
||||
virtual const char* Name() const = 0;
|
||||
|
||||
/**
|
||||
* Returns a textual representation of the component. The default
|
||||
* version just output the type. Derived version should call the
|
||||
|
@ -57,7 +58,7 @@ public:
|
|||
*
|
||||
* @param d The description object to use.
|
||||
*/
|
||||
virtual void Describe(ODesc* d);
|
||||
virtual void Describe(ODesc* d) const;
|
||||
|
||||
private:
|
||||
component::Type type;
|
||||
|
|
|
@ -30,9 +30,18 @@ bool Manager::LoadPluginsFrom(const std::string& dir)
|
|||
return false;
|
||||
}
|
||||
|
||||
static bool plugin_cmp(const Plugin* a, const Plugin* b)
|
||||
{
|
||||
return a->Name() < b->Name();
|
||||
}
|
||||
|
||||
bool Manager::RegisterPlugin(Plugin *plugin)
|
||||
{
|
||||
Manager::PluginsInternal()->push_back(plugin);
|
||||
|
||||
// Sort plugins by name to make sure we have a deterministic order.
|
||||
PluginsInternal()->sort(plugin_cmp);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ Plugin::~Plugin()
|
|||
delete [] description;
|
||||
}
|
||||
|
||||
const char* Plugin::Name()
|
||||
const char* Plugin::Name() const
|
||||
{
|
||||
return name;
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ void Plugin::SetName(const char* arg_name)
|
|||
name = copy_string(arg_name);
|
||||
}
|
||||
|
||||
const char* Plugin::Description()
|
||||
const char* Plugin::Description() const
|
||||
{
|
||||
return description;
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ void Plugin::SetDescription(const char* arg_description)
|
|||
description = copy_string(arg_description);
|
||||
}
|
||||
|
||||
int Plugin::Version()
|
||||
int Plugin::Version() const
|
||||
{
|
||||
return dynamic ? version : 0;
|
||||
}
|
||||
|
@ -89,12 +89,12 @@ void Plugin::SetVersion(int arg_version)
|
|||
version = arg_version;
|
||||
}
|
||||
|
||||
int Plugin::APIVersion()
|
||||
int Plugin::APIVersion() const
|
||||
{
|
||||
return api_version;
|
||||
}
|
||||
|
||||
bool Plugin::DynamicPlugin()
|
||||
bool Plugin::DynamicPlugin() const
|
||||
{
|
||||
return dynamic;
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ void Plugin::InitPostScript()
|
|||
}
|
||||
}
|
||||
|
||||
Plugin::bif_item_list Plugin::BifItems()
|
||||
Plugin::bif_item_list Plugin::BifItems() const
|
||||
{
|
||||
bif_item_list l1 = bif_items;
|
||||
bif_item_list l2 = CustomBifItems();
|
||||
|
@ -138,7 +138,7 @@ Plugin::bif_item_list Plugin::BifItems()
|
|||
return l1;
|
||||
}
|
||||
|
||||
Plugin::bif_item_list Plugin::CustomBifItems()
|
||||
Plugin::bif_item_list Plugin::CustomBifItems() const
|
||||
{
|
||||
return bif_item_list();
|
||||
}
|
||||
|
@ -151,14 +151,23 @@ void Plugin::Done()
|
|||
components.clear();
|
||||
}
|
||||
|
||||
Plugin::component_list Plugin::Components()
|
||||
Plugin::component_list Plugin::Components() const
|
||||
{
|
||||
return components;
|
||||
}
|
||||
|
||||
static bool component_cmp(const Component* a, const Component* b)
|
||||
{
|
||||
return a->Name() < b->Name();
|
||||
}
|
||||
|
||||
void Plugin::AddComponent(Component* c)
|
||||
{
|
||||
components.push_back(c);
|
||||
|
||||
// Sort components by name to make sure we have a deterministic
|
||||
// order.
|
||||
components.sort(component_cmp);
|
||||
}
|
||||
|
||||
void Plugin::AddBifInitFunction(bif_init_func c)
|
||||
|
@ -166,7 +175,7 @@ void Plugin::AddBifInitFunction(bif_init_func c)
|
|||
bif_inits.push_back(c);
|
||||
}
|
||||
|
||||
void Plugin::Describe(ODesc* d)
|
||||
void Plugin::Describe(ODesc* d) const
|
||||
{
|
||||
d->Add("Plugin: ");
|
||||
d->Add(name);
|
||||
|
|
|
@ -102,24 +102,24 @@ public:
|
|||
/**
|
||||
* Returns the name of the plugin.
|
||||
*/
|
||||
const char* Name();
|
||||
const char* Name() const;
|
||||
|
||||
/**
|
||||
* Returns a short textual description of the plugin, if provided.
|
||||
*/
|
||||
const char* Description();
|
||||
const char* Description() const;
|
||||
|
||||
/**
|
||||
* Returns the version of the plugin. Version are only meaningful for
|
||||
* dynamically compiled plugins; for statically compiled ones, this
|
||||
* will always return 0.
|
||||
*/
|
||||
int Version();
|
||||
int Version() const;
|
||||
|
||||
/**
|
||||
* Returns true if this is a dynamically linked in plugin.
|
||||
*/
|
||||
bool DynamicPlugin();
|
||||
bool DynamicPlugin() const;
|
||||
|
||||
/**
|
||||
* Returns the internal API version that this plugin relies on. Only
|
||||
|
@ -128,18 +128,18 @@ public:
|
|||
* dynamically loaded plugins may cause a mismatch if they were
|
||||
* compiled for a different Bro version.
|
||||
*/
|
||||
int APIVersion();
|
||||
int APIVersion() const;
|
||||
|
||||
/**
|
||||
* Returns a list of all components the plugin provides.
|
||||
*/
|
||||
component_list Components();
|
||||
component_list Components() const;
|
||||
|
||||
/**
|
||||
* Returns a list of all BiF items that the plugin provides. This
|
||||
* must be called only after InitBif() has been executed.
|
||||
*/
|
||||
bif_item_list BifItems();
|
||||
bif_item_list BifItems() const;
|
||||
|
||||
/**
|
||||
* First-stage initialization of the plugin called early during Bro's
|
||||
|
@ -171,7 +171,7 @@ public:
|
|||
* is disabled, the rendering will include a list of all components
|
||||
* and BiF items.
|
||||
*/
|
||||
void Describe(ODesc* d);
|
||||
void Describe(ODesc* d) const;
|
||||
|
||||
protected:
|
||||
typedef std::list<std::pair<const char*, int> > bif_init_func_result;
|
||||
|
@ -225,7 +225,7 @@ protected:
|
|||
* for informational purpuses only and will show up in the result of
|
||||
* BifItems() as well as in the Describe() output.
|
||||
*/
|
||||
virtual bif_item_list CustomBifItems() ;
|
||||
virtual bif_item_list CustomBifItems() const;
|
||||
|
||||
/**
|
||||
* Internal function adding an entry point for registering
|
||||
|
|
15
src/util.cc
15
src/util.cc
|
@ -1637,3 +1637,18 @@ const char* bro_magic_buffer(magic_t cookie, const void* buffer, size_t length)
|
|||
|
||||
return rval;
|
||||
}
|
||||
|
||||
const char* canonify_name(const char* name)
|
||||
{
|
||||
unsigned int len = strlen(name);
|
||||
char* nname = new char[len + 1];
|
||||
|
||||
for ( unsigned int i = 0; i < len; i++ )
|
||||
{
|
||||
char c = isalnum(name[i]) ? name[i] : '_';
|
||||
nname[i] = toupper(c);
|
||||
}
|
||||
|
||||
nname[len] = '\0';
|
||||
return nname;
|
||||
}
|
||||
|
|
|
@ -391,4 +391,12 @@ extern magic_t magic_mime_cookie;
|
|||
void bro_init_magic(magic_t* cookie_ptr, int flags);
|
||||
const char* bro_magic_buffer(magic_t cookie, const void* buffer, size_t length);
|
||||
|
||||
/**
|
||||
* Canonicalizes a name by converting it to uppercase letters and replacing
|
||||
* all non-alphanumeric characters with an underscore.
|
||||
* @param name The string to canonicalize.
|
||||
* @return The canonicalized version of \a name which caller may later delete[].
|
||||
*/
|
||||
const char* canonify_name(const char* name);
|
||||
|
||||
#endif
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue