mirror of
https://github.com/zeek/zeek.git
synced 2025-10-04 23:58:20 +00:00
Merge remote-tracking branch 'origin/master' into topic/vladg/file-analysis-exe-analyzer
Conflicts: src/types.bif
This commit is contained in:
commit
b90c8cb8ec
709 changed files with 119094 additions and 88762 deletions
|
@ -3,9 +3,17 @@
|
|||
#include "Analyzer.h"
|
||||
#include "Manager.h"
|
||||
|
||||
file_analysis::ID file_analysis::Analyzer::id_counter = 0;
|
||||
|
||||
file_analysis::Analyzer::~Analyzer()
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Destroy file analyzer %s",
|
||||
file_mgr->GetComponentName(tag));
|
||||
file_mgr->GetComponentName(tag).c_str());
|
||||
Unref(args);
|
||||
}
|
||||
|
||||
void file_analysis::Analyzer::SetAnalyzerTag(const file_analysis::Tag& arg_tag)
|
||||
{
|
||||
assert(! tag || tag == arg_tag);
|
||||
tag = arg_tag;
|
||||
}
|
||||
|
|
|
@ -13,6 +13,8 @@ namespace file_analysis {
|
|||
|
||||
class File;
|
||||
|
||||
typedef uint32 ID;
|
||||
|
||||
/**
|
||||
* Base class for analyzers that can be attached to file_analysis::File objects.
|
||||
*/
|
||||
|
@ -25,6 +27,18 @@ public:
|
|||
*/
|
||||
virtual ~Analyzer();
|
||||
|
||||
/**
|
||||
* Initializes the analyzer before input processing starts.
|
||||
*/
|
||||
virtual void Init()
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Finishes the analyzer's operation after all input has been parsed.
|
||||
*/
|
||||
virtual void Done()
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Subclasses may override this metod to receive file data non-sequentially.
|
||||
* @param data points to start of a chunk of file data.
|
||||
|
@ -72,6 +86,13 @@ public:
|
|||
*/
|
||||
file_analysis::Tag Tag() const { return tag; }
|
||||
|
||||
/**
|
||||
* Returns the analyzer instance's internal ID. These IDs are unique
|
||||
* across all analyzers instantiated and can thus be used to
|
||||
* indentify a specific instance.
|
||||
*/
|
||||
ID GetID() const { return id; }
|
||||
|
||||
/**
|
||||
* @return the AnalyzerArgs associated with the analyzer.
|
||||
*/
|
||||
|
@ -82,10 +103,31 @@ public:
|
|||
*/
|
||||
File* GetFile() const { return file; }
|
||||
|
||||
/**
|
||||
* Sets the tag associated with the analyzer's type. Note that this
|
||||
* can be called only right after construction, if the constructor
|
||||
* did not receive a name or tag. The method cannot be used to change
|
||||
* an existing tag.
|
||||
*/
|
||||
void SetAnalyzerTag(const file_analysis::Tag& tag);
|
||||
|
||||
/**
|
||||
* @return true if the analyzer has ever seen a stream-wise delivery.
|
||||
*/
|
||||
bool GotStreamDelivery() const
|
||||
{ return got_stream_delivery; }
|
||||
|
||||
/**
|
||||
* Flag the analyzer as having seen a stream-wise delivery.
|
||||
*/
|
||||
void SetGotStreamDelivery()
|
||||
{ got_stream_delivery = true; }
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Constructor. Only derived classes are meant to be instantiated.
|
||||
* @param arg_tag the tag definining the analyzer's type.
|
||||
* @param arg_args an \c AnalyzerArgs (script-layer type) value specifiying
|
||||
* tunable options, if any, related to a particular analyzer type.
|
||||
* @param arg_file the file to which the the analyzer is being attached.
|
||||
|
@ -93,14 +135,39 @@ protected:
|
|||
Analyzer(file_analysis::Tag arg_tag, RecordVal* arg_args, File* arg_file)
|
||||
: tag(arg_tag),
|
||||
args(arg_args->Ref()->AsRecordVal()),
|
||||
file(arg_file)
|
||||
{}
|
||||
file(arg_file),
|
||||
got_stream_delivery(false)
|
||||
{
|
||||
id = ++id_counter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor. Only derived classes are meant to be instantiated.
|
||||
* As this version of the constructor does not receive a name or tag,
|
||||
* SetAnalyzerTag() must be called before the instance can be used.
|
||||
*
|
||||
* @param arg_args an \c AnalyzerArgs (script-layer type) value specifiying
|
||||
* tunable options, if any, related to a particular analyzer type.
|
||||
* @param arg_file the file to which the the analyzer is being attached.
|
||||
*/
|
||||
Analyzer(RecordVal* arg_args, File* arg_file)
|
||||
: tag(),
|
||||
args(arg_args->Ref()->AsRecordVal()),
|
||||
file(arg_file),
|
||||
got_stream_delivery(false)
|
||||
{
|
||||
id = ++id_counter;
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
ID id; /**< Unique instance ID. */
|
||||
file_analysis::Tag tag; /**< The particular type of the analyzer instance. */
|
||||
RecordVal* args; /**< \c AnalyzerArgs val gives tunable analyzer params. */
|
||||
File* file; /**< The file to which the analyzer is attached. */
|
||||
bool got_stream_delivery;
|
||||
|
||||
static ID id_counter;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
|
|
@ -9,7 +9,10 @@ using namespace file_analysis;
|
|||
|
||||
static void analyzer_del_func(void* v)
|
||||
{
|
||||
delete (file_analysis::Analyzer*) v;
|
||||
file_analysis::Analyzer* a = (file_analysis::Analyzer*)v;
|
||||
|
||||
a->Done();
|
||||
delete a;
|
||||
}
|
||||
|
||||
AnalyzerSet::AnalyzerSet(File* arg_file) : file(arg_file)
|
||||
|
@ -50,7 +53,7 @@ bool AnalyzerSet::Add(file_analysis::Tag tag, RecordVal* args)
|
|||
if ( analyzer_map.Lookup(key) )
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Instantiate analyzer %s skipped for file id"
|
||||
" %s: already exists", file_mgr->GetComponentName(tag),
|
||||
" %s: already exists", file_mgr->GetComponentName(tag).c_str(),
|
||||
file->GetID().c_str());
|
||||
delete key;
|
||||
return true;
|
||||
|
@ -69,7 +72,7 @@ bool AnalyzerSet::Add(file_analysis::Tag tag, RecordVal* args)
|
|||
return true;
|
||||
}
|
||||
|
||||
bool AnalyzerSet::QueueAdd(file_analysis::Tag tag, RecordVal* args)
|
||||
Analyzer* AnalyzerSet::QueueAdd(file_analysis::Tag tag, RecordVal* args)
|
||||
{
|
||||
HashKey* key = GetKey(tag, args);
|
||||
file_analysis::Analyzer* a = InstantiateAnalyzer(tag, args);
|
||||
|
@ -77,12 +80,12 @@ bool AnalyzerSet::QueueAdd(file_analysis::Tag tag, RecordVal* args)
|
|||
if ( ! a )
|
||||
{
|
||||
delete key;
|
||||
return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
mod_queue.push(new AddMod(a, key));
|
||||
|
||||
return true;
|
||||
return a;
|
||||
}
|
||||
|
||||
bool AnalyzerSet::AddMod::Perform(AnalyzerSet* set)
|
||||
|
@ -90,7 +93,7 @@ bool AnalyzerSet::AddMod::Perform(AnalyzerSet* set)
|
|||
if ( set->analyzer_map.Lookup(key) )
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Add analyzer %s skipped for file id"
|
||||
" %s: already exists", file_mgr->GetComponentName(a->Tag()),
|
||||
" %s: already exists", file_mgr->GetComponentName(a->Tag()).c_str(),
|
||||
a->GetFile()->GetID().c_str());
|
||||
|
||||
Abort();
|
||||
|
@ -98,6 +101,7 @@ bool AnalyzerSet::AddMod::Perform(AnalyzerSet* set)
|
|||
}
|
||||
|
||||
set->Insert(a, key);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -116,15 +120,17 @@ bool AnalyzerSet::Remove(file_analysis::Tag tag, HashKey* key)
|
|||
if ( ! a )
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Skip remove analyzer %s for file id %s",
|
||||
file_mgr->GetComponentName(tag), file->GetID().c_str());
|
||||
file_mgr->GetComponentName(tag).c_str(), file->GetID().c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Remove analyzer %s for file id %s",
|
||||
file_mgr->GetComponentName(tag),
|
||||
file_mgr->GetComponentName(tag).c_str(),
|
||||
file->GetID().c_str());
|
||||
|
||||
a->Done();
|
||||
delete a;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -163,7 +169,7 @@ file_analysis::Analyzer* AnalyzerSet::InstantiateAnalyzer(Tag tag,
|
|||
if ( ! a )
|
||||
{
|
||||
reporter->Error("Failed file analyzer %s instantiation for file id %s",
|
||||
file_mgr->GetComponentName(tag), file->GetID().c_str());
|
||||
file_mgr->GetComponentName(tag).c_str(), file->GetID().c_str());
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -173,9 +179,11 @@ file_analysis::Analyzer* AnalyzerSet::InstantiateAnalyzer(Tag tag,
|
|||
void AnalyzerSet::Insert(file_analysis::Analyzer* a, HashKey* key)
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Add analyzer %s for file id %s",
|
||||
file_mgr->GetComponentName(a->Tag()), file->GetID().c_str());
|
||||
file_mgr->GetComponentName(a->Tag()).c_str(), file->GetID().c_str());
|
||||
analyzer_map.Insert(key, a);
|
||||
delete key;
|
||||
|
||||
a->Init();
|
||||
}
|
||||
|
||||
void AnalyzerSet::DrainModifications()
|
||||
|
|
|
@ -57,9 +57,10 @@ public:
|
|||
* Queue the attachment of an analyzer to #file.
|
||||
* @param tag the analyzer tag of the file analyzer to add.
|
||||
* @param args an \c AnalyzerArgs value which specifies an analyzer.
|
||||
* @return true if analyzer was able to be instantiated, else false.
|
||||
* @return if successful, a pointer to a newly instantiated analyzer else
|
||||
* a null pointer. The caller does *not* take ownership of the memory.
|
||||
*/
|
||||
bool QueueAdd(file_analysis::Tag tag, RecordVal* args);
|
||||
file_analysis::Analyzer* QueueAdd(file_analysis::Tag tag, RecordVal* args);
|
||||
|
||||
/**
|
||||
* Remove an analyzer from #file immediately.
|
||||
|
|
|
@ -11,6 +11,7 @@ set(file_analysis_SRCS
|
|||
Manager.cc
|
||||
File.cc
|
||||
FileTimer.cc
|
||||
FileReassembler.cc
|
||||
Analyzer.cc
|
||||
AnalyzerSet.cc
|
||||
Component.cc
|
||||
|
|
|
@ -8,54 +8,24 @@
|
|||
|
||||
using namespace file_analysis;
|
||||
|
||||
Component::Component(const char* arg_name, factory_callback arg_factory)
|
||||
: plugin::Component(plugin::component::FILE_ANALYZER),
|
||||
plugin::TaggedComponent<file_analysis::Tag>()
|
||||
Component::Component(const std::string& name, factory_callback arg_factory, Tag::subtype_t subtype)
|
||||
: plugin::Component(plugin::component::FILE_ANALYZER, name),
|
||||
plugin::TaggedComponent<file_analysis::Tag>(subtype)
|
||||
{
|
||||
name = copy_string(arg_name);
|
||||
canon_name = canonify_name(arg_name);
|
||||
factory = arg_factory;
|
||||
}
|
||||
|
||||
Component::Component(const Component& other)
|
||||
: plugin::Component(Type()),
|
||||
plugin::TaggedComponent<file_analysis::Tag>(other)
|
||||
{
|
||||
name = copy_string(other.name);
|
||||
canon_name = copy_string(other.canon_name);
|
||||
factory = other.factory;
|
||||
file_mgr->RegisterComponent(this, "ANALYZER_");
|
||||
}
|
||||
|
||||
Component::~Component()
|
||||
{
|
||||
delete [] name;
|
||||
delete [] canon_name;
|
||||
}
|
||||
|
||||
void Component::Describe(ODesc* d) const
|
||||
void Component::DoDescribe(ODesc* d) const
|
||||
{
|
||||
plugin::Component::Describe(d);
|
||||
d->Add(name);
|
||||
d->Add(" (");
|
||||
|
||||
if ( factory )
|
||||
{
|
||||
d->Add("ANALYZER_");
|
||||
d->Add(canon_name);
|
||||
d->Add(CanonicalName());
|
||||
}
|
||||
|
||||
d->Add(")");
|
||||
}
|
||||
|
||||
Component& Component::operator=(const Component& other)
|
||||
{
|
||||
plugin::TaggedComponent<file_analysis::Tag>::operator=(other);
|
||||
|
||||
if ( &other != this )
|
||||
{
|
||||
name = copy_string(other.name);
|
||||
factory = other.factory;
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#ifndef FILE_ANALYZER_PLUGIN_COMPONENT_H
|
||||
#define FILE_ANALYZER_PLUGIN_COMPONENT_H
|
||||
#ifndef FILE_ANALYZER_COMPONENT_H
|
||||
#define FILE_ANALYZER_COMPONENT_H
|
||||
|
||||
#include "Tag.h"
|
||||
#include "plugin/Component.h"
|
||||
|
@ -40,51 +40,32 @@ public:
|
|||
* from file_analysis::Analyzer. This is typically a static \c
|
||||
* Instatiate() method inside the class that just allocates and
|
||||
* returns a new instance.
|
||||
*
|
||||
* @param subtype A subtype associated with this component that
|
||||
* further distinguishes it. The subtype will be integrated into the
|
||||
* analyzer::Tag that the manager associates with this analyzer, and
|
||||
* analyzer instances can accordingly access it via analyzer::Tag().
|
||||
* If not used, leave at zero.
|
||||
*/
|
||||
Component(const char* name, factory_callback factory);
|
||||
|
||||
/**
|
||||
* Copy constructor.
|
||||
*/
|
||||
Component(const Component& other);
|
||||
Component(const std::string& name, factory_callback factory, Tag::subtype_t subtype = 0);
|
||||
|
||||
/**
|
||||
* Destructor.
|
||||
*/
|
||||
~Component();
|
||||
|
||||
/**
|
||||
* Returns the name of the analyzer. This name is unique across all
|
||||
* analyzers and used to identify it. The returned name is derived
|
||||
* from what's passed to the constructor but upper-cased and
|
||||
* canonified to allow being part of a script-level ID.
|
||||
*/
|
||||
virtual const char* Name() const { return name; }
|
||||
|
||||
/**
|
||||
* Returns a canonocalized version of the analyzer's name. The
|
||||
* returned name is derived from what's passed to the constructor but
|
||||
* upper-cased and transformed to allow being part of a script-level
|
||||
* ID.
|
||||
*/
|
||||
const char* CanonicalName() const { return canon_name; }
|
||||
|
||||
/**
|
||||
* Returns the analyzer's factory function.
|
||||
*/
|
||||
factory_callback Factory() const { return factory; }
|
||||
|
||||
protected:
|
||||
/**
|
||||
* Generates a human-readable description of the component's main
|
||||
* parameters. This goes into the output of \c "bro -NN".
|
||||
*/
|
||||
virtual void Describe(ODesc* d) const;
|
||||
|
||||
Component& operator=(const Component& other);
|
||||
* Overriden from plugin::Component.
|
||||
*/
|
||||
virtual void DoDescribe(ODesc* d) const;
|
||||
|
||||
private:
|
||||
const char* name; // The analyzer's name.
|
||||
const char* canon_name; // The analyzer's canonical name.
|
||||
factory_callback factory; // The analyzer's factory callback.
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#include <string>
|
||||
#include <algorithm>
|
||||
|
||||
#include "File.h"
|
||||
#include "FileTimer.h"
|
||||
|
@ -52,8 +53,6 @@ int File::overflow_bytes_idx = -1;
|
|||
int File::timeout_interval_idx = -1;
|
||||
int File::bof_buffer_size_idx = -1;
|
||||
int File::bof_buffer_idx = -1;
|
||||
int File::mime_type_idx = -1;
|
||||
int File::mime_types_idx = -1;
|
||||
|
||||
void File::StaticInit()
|
||||
{
|
||||
|
@ -73,27 +72,25 @@ void File::StaticInit()
|
|||
timeout_interval_idx = Idx("timeout_interval");
|
||||
bof_buffer_size_idx = Idx("bof_buffer_size");
|
||||
bof_buffer_idx = Idx("bof_buffer");
|
||||
mime_type_idx = Idx("mime_type");
|
||||
mime_types_idx = Idx("mime_types");
|
||||
}
|
||||
|
||||
File::File(const string& file_id, Connection* conn, analyzer::Tag tag,
|
||||
bool is_orig)
|
||||
: id(file_id), val(0), postpone_timeout(false), first_chunk(true),
|
||||
missed_bof(false), need_reassembly(false), done(false),
|
||||
did_file_new_event(false), analyzers(this)
|
||||
File::File(const string& file_id, const string& source_name, Connection* conn,
|
||||
analyzer::Tag tag, bool is_orig)
|
||||
: id(file_id), val(0), file_reassembler(0), stream_offset(0),
|
||||
reassembly_max_buffer(0), did_mime_type(false),
|
||||
reassembly_enabled(false), postpone_timeout(false), done(false),
|
||||
analyzers(this)
|
||||
{
|
||||
StaticInit();
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Creating new File object %s", file_id.c_str());
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] Creating new File object", file_id.c_str());
|
||||
|
||||
val = new RecordVal(fa_file_type);
|
||||
val->Assign(id_idx, new StringVal(file_id.c_str()));
|
||||
SetSource(source_name);
|
||||
|
||||
if ( conn )
|
||||
{
|
||||
// add source, connection, is_orig fields
|
||||
SetSource(analyzer_mgr->GetComponentName(tag));
|
||||
val->Assign(is_orig_idx, new Val(is_orig, TYPE_BOOL));
|
||||
UpdateConnectionFields(conn, is_orig);
|
||||
}
|
||||
|
@ -103,14 +100,9 @@ File::File(const string& file_id, Connection* conn, analyzer::Tag tag,
|
|||
|
||||
File::~File()
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Destroying File object %s", id.c_str());
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] Destroying File object", id.c_str());
|
||||
Unref(val);
|
||||
|
||||
while ( ! fonc_queue.empty() )
|
||||
{
|
||||
delete_vals(fonc_queue.front().second);
|
||||
fonc_queue.pop();
|
||||
}
|
||||
delete file_reassembler;
|
||||
}
|
||||
|
||||
void File::UpdateLastActivityTime()
|
||||
|
@ -123,10 +115,10 @@ double File::GetLastActivityTime() const
|
|||
return val->Lookup(last_active_idx)->AsTime();
|
||||
}
|
||||
|
||||
void File::UpdateConnectionFields(Connection* conn, bool is_orig)
|
||||
bool File::UpdateConnectionFields(Connection* conn, bool is_orig)
|
||||
{
|
||||
if ( ! conn )
|
||||
return;
|
||||
return false;
|
||||
|
||||
Val* conns = val->Lookup(conns_idx);
|
||||
|
||||
|
@ -137,27 +129,28 @@ void File::UpdateConnectionFields(Connection* conn, bool is_orig)
|
|||
}
|
||||
|
||||
Val* idx = get_conn_id_val(conn);
|
||||
if ( ! conns->AsTableVal()->Lookup(idx) )
|
||||
|
||||
if ( conns->AsTableVal()->Lookup(idx) )
|
||||
{
|
||||
Val* conn_val = conn->BuildConnVal();
|
||||
conns->AsTableVal()->Assign(idx, conn_val);
|
||||
|
||||
if ( FileEventAvailable(file_over_new_connection) )
|
||||
{
|
||||
val_list* vl = new val_list();
|
||||
vl->append(val->Ref());
|
||||
vl->append(conn_val->Ref());
|
||||
vl->append(new Val(is_orig, TYPE_BOOL));
|
||||
|
||||
if ( did_file_new_event )
|
||||
FileEvent(file_over_new_connection, vl);
|
||||
else
|
||||
fonc_queue.push(pair<EventHandlerPtr, val_list*>(
|
||||
file_over_new_connection, vl));
|
||||
}
|
||||
Unref(idx);
|
||||
return false;
|
||||
}
|
||||
|
||||
conns->AsTableVal()->Assign(idx, conn->BuildConnVal());
|
||||
Unref(idx);
|
||||
return true;
|
||||
}
|
||||
|
||||
void File::RaiseFileOverNewConnection(Connection* conn, bool is_orig)
|
||||
{
|
||||
if ( conn && FileEventAvailable(file_over_new_connection) )
|
||||
{
|
||||
val_list* vl = new val_list();
|
||||
vl->append(val->Ref());
|
||||
vl->append(conn->BuildConnVal());
|
||||
vl->append(new Val(is_orig, TYPE_BOOL));
|
||||
FileEvent(file_over_new_connection, vl);
|
||||
}
|
||||
}
|
||||
|
||||
uint64 File::LookupFieldDefaultCount(int idx) const
|
||||
|
@ -231,6 +224,7 @@ void File::IncrementByteCount(uint64 size, int field_idx)
|
|||
|
||||
void File::SetTotalBytes(uint64 size)
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] Total bytes %" PRIu64, id.c_str(), size);
|
||||
val->Assign(total_bytes_idx, new Val(size, TYPE_COUNT));
|
||||
}
|
||||
|
||||
|
@ -240,7 +234,7 @@ bool File::IsComplete() const
|
|||
if ( ! total )
|
||||
return false;
|
||||
|
||||
if ( LookupFieldDefaultCount(seen_bytes_idx) >= total->AsCount() )
|
||||
if ( stream_offset >= total->AsCount() )
|
||||
return true;
|
||||
|
||||
return false;
|
||||
|
@ -253,17 +247,87 @@ void File::ScheduleInactivityTimer() const
|
|||
|
||||
bool File::AddAnalyzer(file_analysis::Tag tag, RecordVal* args)
|
||||
{
|
||||
return done ? false : analyzers.QueueAdd(tag, args);
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] Queuing addition of %s analyzer",
|
||||
id.c_str(), file_mgr->GetComponentName(tag).c_str());
|
||||
|
||||
if ( done )
|
||||
return false;
|
||||
|
||||
return analyzers.QueueAdd(tag, args) != 0;
|
||||
}
|
||||
|
||||
bool File::RemoveAnalyzer(file_analysis::Tag tag, RecordVal* args)
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] Queuing remove of %s analyzer",
|
||||
id.c_str(), file_mgr->GetComponentName(tag).c_str());
|
||||
|
||||
return done ? false : analyzers.QueueRemove(tag, args);
|
||||
}
|
||||
|
||||
void File::EnableReassembly()
|
||||
{
|
||||
reassembly_enabled = true;
|
||||
}
|
||||
|
||||
void File::DisableReassembly()
|
||||
{
|
||||
reassembly_enabled = false;
|
||||
delete file_reassembler;
|
||||
file_reassembler = 0;
|
||||
}
|
||||
|
||||
void File::SetReassemblyBuffer(uint64 max)
|
||||
{
|
||||
reassembly_max_buffer = max;
|
||||
}
|
||||
|
||||
bool File::DetectMIME()
|
||||
{
|
||||
did_mime_type = true;
|
||||
|
||||
Val* bof_buffer_val = val->Lookup(bof_buffer_idx);
|
||||
|
||||
if ( ! bof_buffer_val )
|
||||
{
|
||||
if ( bof_buffer.size == 0 )
|
||||
return false;
|
||||
|
||||
BroString* bs = concatenate(bof_buffer.chunks);
|
||||
bof_buffer_val = new StringVal(bs);
|
||||
val->Assign(bof_buffer_idx, bof_buffer_val);
|
||||
}
|
||||
|
||||
RuleMatcher::MIME_Matches matches;
|
||||
const u_char* data = bof_buffer_val->AsString()->Bytes();
|
||||
uint64 len = bof_buffer_val->AsString()->Len();
|
||||
len = min(len, LookupFieldDefaultCount(bof_buffer_size_idx));
|
||||
file_mgr->DetectMIME(data, len, &matches);
|
||||
|
||||
if ( matches.empty() )
|
||||
return false;
|
||||
|
||||
if ( FileEventAvailable(file_mime_type) )
|
||||
{
|
||||
val_list* vl = new val_list();
|
||||
vl->append(val->Ref());
|
||||
vl->append(new StringVal(*(matches.begin()->second.begin())));
|
||||
FileEvent(file_mime_type, vl);
|
||||
}
|
||||
|
||||
if ( FileEventAvailable(file_mime_types) )
|
||||
{
|
||||
val_list* vl = new val_list();
|
||||
vl->append(val->Ref());
|
||||
vl->append(file_analysis::GenMIMEMatchesVal(matches));
|
||||
FileEvent(file_mime_types, vl);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool File::BufferBOF(const u_char* data, uint64 len)
|
||||
{
|
||||
if ( bof_buffer.full || bof_buffer.replayed )
|
||||
if ( bof_buffer.full )
|
||||
return false;
|
||||
|
||||
uint64 desired_size = LookupFieldDefaultCount(bof_buffer_size_idx);
|
||||
|
@ -271,133 +335,174 @@ bool File::BufferBOF(const u_char* data, uint64 len)
|
|||
bof_buffer.chunks.push_back(new BroString(data, len, 0));
|
||||
bof_buffer.size += len;
|
||||
|
||||
if ( bof_buffer.size >= desired_size )
|
||||
if ( bof_buffer.size < desired_size )
|
||||
return true;
|
||||
|
||||
bof_buffer.full = true;
|
||||
|
||||
if ( bof_buffer.size > 0 )
|
||||
{
|
||||
bof_buffer.full = true;
|
||||
ReplayBOF();
|
||||
BroString* bs = concatenate(bof_buffer.chunks);
|
||||
val->Assign(bof_buffer_idx, new StringVal(bs));
|
||||
}
|
||||
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
bool File::DetectMIME(const u_char* data, uint64 len)
|
||||
void File::DeliverStream(const u_char* data, uint64 len)
|
||||
{
|
||||
RuleMatcher::MIME_Matches matches;
|
||||
len = min(len, LookupFieldDefaultCount(bof_buffer_size_idx));
|
||||
file_mgr->DetectMIME(data, len, &matches);
|
||||
bool bof_was_full = bof_buffer.full;
|
||||
// Buffer enough data for the BOF buffer
|
||||
BufferBOF(data, len);
|
||||
|
||||
if ( matches.empty() )
|
||||
return false;
|
||||
if ( ! did_mime_type && bof_buffer.full &&
|
||||
LookupFieldDefaultCount(missing_bytes_idx) == 0 )
|
||||
DetectMIME();
|
||||
|
||||
val->Assign(mime_type_idx,
|
||||
new StringVal(*(matches.begin()->second.begin())));
|
||||
val->Assign(mime_types_idx, file_analysis::GenMIMEMatchesVal(matches));
|
||||
DBG_LOG(DBG_FILE_ANALYSIS,
|
||||
"[%s] %" PRIu64 " stream bytes in at offset %" PRIu64 "; %s [%s%s]",
|
||||
id.c_str(), len, stream_offset,
|
||||
IsComplete() ? "complete" : "incomplete",
|
||||
fmt_bytes((const char*) data, min((uint64)40, len)),
|
||||
len > 40 ? "..." : "");
|
||||
|
||||
return true;
|
||||
}
|
||||
file_analysis::Analyzer* a = 0;
|
||||
IterCookie* c = analyzers.InitForIteration();
|
||||
|
||||
void File::ReplayBOF()
|
||||
{
|
||||
if ( bof_buffer.replayed )
|
||||
return;
|
||||
|
||||
bof_buffer.replayed = true;
|
||||
|
||||
if ( bof_buffer.chunks.empty() )
|
||||
while ( (a = analyzers.NextEntry(c)) )
|
||||
{
|
||||
// Since we missed the beginning, try file type detect on next data in.
|
||||
missed_bof = true;
|
||||
return;
|
||||
if ( ! a->GotStreamDelivery() )
|
||||
{
|
||||
int num_bof_chunks_behind = bof_buffer.chunks.size();
|
||||
|
||||
if ( ! bof_was_full )
|
||||
// We just added a chunk to the BOF buffer, don't count it
|
||||
// as it will get delivered on its own.
|
||||
num_bof_chunks_behind -= 1;
|
||||
|
||||
uint64 bytes_delivered = 0;
|
||||
|
||||
// Catch this analyzer up with the BOF buffer.
|
||||
for ( int i = 0; i < num_bof_chunks_behind; ++i )
|
||||
{
|
||||
if ( ! a->DeliverStream(bof_buffer.chunks[i]->Bytes(),
|
||||
bof_buffer.chunks[i]->Len()) )
|
||||
analyzers.QueueRemove(a->Tag(), a->Args());
|
||||
|
||||
bytes_delivered += bof_buffer.chunks[i]->Len();
|
||||
}
|
||||
|
||||
a->SetGotStreamDelivery();
|
||||
// May need to catch analyzer up on missed gap?
|
||||
// Analyzer should be fully caught up to stream_offset now.
|
||||
}
|
||||
|
||||
if ( ! a->DeliverStream(data, len) )
|
||||
analyzers.QueueRemove(a->Tag(), a->Args());
|
||||
}
|
||||
|
||||
BroString* bs = concatenate(bof_buffer.chunks);
|
||||
val->Assign(bof_buffer_idx, new StringVal(bs));
|
||||
stream_offset += len;
|
||||
IncrementByteCount(len, seen_bytes_idx);
|
||||
}
|
||||
|
||||
DetectMIME(bs->Bytes(), bs->Len());
|
||||
void File::DeliverChunk(const u_char* data, uint64 len, uint64 offset)
|
||||
{
|
||||
// Potentially handle reassembly and deliver to the stream analyzers.
|
||||
if ( file_reassembler )
|
||||
{
|
||||
if ( reassembly_max_buffer > 0 &&
|
||||
reassembly_max_buffer < file_reassembler->TotalSize() )
|
||||
{
|
||||
uint64 current_offset = stream_offset;
|
||||
uint64 gap_bytes = file_reassembler->Flush();
|
||||
IncrementByteCount(gap_bytes, overflow_bytes_idx);
|
||||
|
||||
FileEvent(file_new);
|
||||
if ( FileEventAvailable(file_reassembly_overflow) )
|
||||
{
|
||||
val_list* vl = new val_list();
|
||||
vl->append(val->Ref());
|
||||
vl->append(new Val(current_offset, TYPE_COUNT));
|
||||
vl->append(new Val(gap_bytes, TYPE_COUNT));
|
||||
FileEvent(file_reassembly_overflow, vl);
|
||||
}
|
||||
}
|
||||
|
||||
for ( size_t i = 0; i < bof_buffer.chunks.size(); ++i )
|
||||
DataIn(bof_buffer.chunks[i]->Bytes(), bof_buffer.chunks[i]->Len());
|
||||
// Forward data to the reassembler.
|
||||
file_reassembler->NewBlock(network_time, offset, len, data);
|
||||
}
|
||||
else if ( stream_offset == offset )
|
||||
{
|
||||
// This is the normal case where a file is transferred linearly.
|
||||
// Nothing special should be done here.
|
||||
DeliverStream(data, len);
|
||||
}
|
||||
else if ( reassembly_enabled )
|
||||
{
|
||||
// This is data that doesn't match the offset and the reassembler
|
||||
// needs to be enabled.
|
||||
file_reassembler = new FileReassembler(this, stream_offset);
|
||||
file_reassembler->NewBlock(network_time, offset, len, data);
|
||||
}
|
||||
else
|
||||
{
|
||||
// We can't reassemble so we throw out the data for streaming.
|
||||
IncrementByteCount(len, overflow_bytes_idx);
|
||||
}
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS,
|
||||
"[%s] %" PRIu64 " chunk bytes in at offset %" PRIu64 "; %s [%s%s]",
|
||||
id.c_str(), len, offset,
|
||||
IsComplete() ? "complete" : "incomplete",
|
||||
fmt_bytes((const char*) data, min((uint64)40, len)),
|
||||
len > 40 ? "..." : "");
|
||||
|
||||
file_analysis::Analyzer* a = 0;
|
||||
IterCookie* c = analyzers.InitForIteration();
|
||||
|
||||
while ( (a = analyzers.NextEntry(c)) )
|
||||
{
|
||||
if ( ! a->DeliverChunk(data, len, offset) )
|
||||
{
|
||||
analyzers.QueueRemove(a->Tag(), a->Args());
|
||||
}
|
||||
}
|
||||
|
||||
if ( IsComplete() )
|
||||
EndOfFile();
|
||||
}
|
||||
|
||||
void File::DataIn(const u_char* data, uint64 len, uint64 offset)
|
||||
{
|
||||
analyzers.DrainModifications();
|
||||
|
||||
if ( first_chunk )
|
||||
{
|
||||
// TODO: this should all really be delayed until we attempt reassembly
|
||||
DetectMIME(data, len);
|
||||
FileEvent(file_new);
|
||||
first_chunk = false;
|
||||
}
|
||||
|
||||
file_analysis::Analyzer* a = 0;
|
||||
IterCookie* c = analyzers.InitForIteration();
|
||||
|
||||
while ( (a = analyzers.NextEntry(c)) )
|
||||
{
|
||||
if ( ! a->DeliverChunk(data, len, offset) )
|
||||
analyzers.QueueRemove(a->Tag(), a->Args());
|
||||
}
|
||||
|
||||
DeliverChunk(data, len, offset);
|
||||
analyzers.DrainModifications();
|
||||
|
||||
// TODO: check reassembly requirement based on buffer size in record
|
||||
if ( need_reassembly )
|
||||
reporter->InternalError("file_analyzer::File TODO: reassembly not yet supported");
|
||||
|
||||
// TODO: reassembly overflow stuff, increment overflow count, eval trigger
|
||||
|
||||
IncrementByteCount(len, seen_bytes_idx);
|
||||
}
|
||||
|
||||
void File::DataIn(const u_char* data, uint64 len)
|
||||
{
|
||||
analyzers.DrainModifications();
|
||||
|
||||
if ( BufferBOF(data, len) )
|
||||
return;
|
||||
|
||||
if ( missed_bof )
|
||||
{
|
||||
DetectMIME(data, len);
|
||||
FileEvent(file_new);
|
||||
missed_bof = false;
|
||||
}
|
||||
|
||||
file_analysis::Analyzer* a = 0;
|
||||
IterCookie* c = analyzers.InitForIteration();
|
||||
|
||||
while ( (a = analyzers.NextEntry(c)) )
|
||||
{
|
||||
if ( ! a->DeliverStream(data, len) )
|
||||
{
|
||||
analyzers.QueueRemove(a->Tag(), a->Args());
|
||||
continue;
|
||||
}
|
||||
|
||||
uint64 offset = LookupFieldDefaultCount(seen_bytes_idx) +
|
||||
LookupFieldDefaultCount(missing_bytes_idx);
|
||||
|
||||
if ( ! a->DeliverChunk(data, len, offset) )
|
||||
analyzers.QueueRemove(a->Tag(), a->Args());
|
||||
}
|
||||
|
||||
DeliverChunk(data, len, stream_offset);
|
||||
analyzers.DrainModifications();
|
||||
IncrementByteCount(len, seen_bytes_idx);
|
||||
}
|
||||
|
||||
void File::EndOfFile()
|
||||
{
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] End of file", id.c_str());
|
||||
|
||||
if ( done )
|
||||
return;
|
||||
|
||||
if ( ! did_mime_type &&
|
||||
LookupFieldDefaultCount(missing_bytes_idx) == 0 )
|
||||
DetectMIME();
|
||||
|
||||
analyzers.DrainModifications();
|
||||
|
||||
// Send along anything that's been buffered, but never flushed.
|
||||
ReplayBOF();
|
||||
if ( file_reassembler )
|
||||
{
|
||||
file_reassembler->Flush();
|
||||
analyzers.DrainModifications();
|
||||
}
|
||||
|
||||
done = true;
|
||||
|
||||
|
@ -417,11 +522,17 @@ void File::EndOfFile()
|
|||
|
||||
void File::Gap(uint64 offset, uint64 len)
|
||||
{
|
||||
analyzers.DrainModifications();
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "[%s] Gap of size %" PRIu64 " at offset %," PRIu64,
|
||||
id.c_str(), len, offset);
|
||||
|
||||
// If we were buffering the beginning of the file, a gap means we've got
|
||||
// as much contiguous stuff at the beginning as possible, so work with that.
|
||||
ReplayBOF();
|
||||
if ( file_reassembler && ! file_reassembler->IsCurrentlyFlushing() )
|
||||
{
|
||||
file_reassembler->FlushTo(offset + len);
|
||||
// The reassembler will call us back with all the gaps we need to know.
|
||||
return;
|
||||
}
|
||||
|
||||
analyzers.DrainModifications();
|
||||
|
||||
file_analysis::Analyzer* a = 0;
|
||||
IterCookie* c = analyzers.InitForIteration();
|
||||
|
@ -442,6 +553,8 @@ void File::Gap(uint64 offset, uint64 len)
|
|||
}
|
||||
|
||||
analyzers.DrainModifications();
|
||||
|
||||
stream_offset += len;
|
||||
IncrementByteCount(len, missing_bytes_idx);
|
||||
}
|
||||
|
||||
|
@ -460,30 +573,13 @@ void File::FileEvent(EventHandlerPtr h)
|
|||
FileEvent(h, vl);
|
||||
}
|
||||
|
||||
static void flush_file_event_queue(queue<pair<EventHandlerPtr, val_list*> >& q)
|
||||
{
|
||||
while ( ! q.empty() )
|
||||
{
|
||||
pair<EventHandlerPtr, val_list*> p = q.front();
|
||||
mgr.QueueEvent(p.first, p.second);
|
||||
q.pop();
|
||||
}
|
||||
}
|
||||
|
||||
void File::FileEvent(EventHandlerPtr h, val_list* vl)
|
||||
{
|
||||
if ( h == file_state_remove )
|
||||
flush_file_event_queue(fonc_queue);
|
||||
|
||||
mgr.QueueEvent(h, vl);
|
||||
|
||||
if ( h == file_new )
|
||||
{
|
||||
did_file_new_event = true;
|
||||
flush_file_event_queue(fonc_queue);
|
||||
}
|
||||
|
||||
if ( h == file_new || h == file_timeout || h == file_extraction_limit )
|
||||
if ( h == file_new || h == file_over_new_connection ||
|
||||
h == file_mime_type ||
|
||||
h == file_timeout || h == file_extraction_limit )
|
||||
{
|
||||
// immediate feedback is required for these events.
|
||||
mgr.Drain();
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
#ifndef FILE_ANALYSIS_FILE_H
|
||||
#define FILE_ANALYSIS_FILE_H
|
||||
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "FileReassembler.h"
|
||||
#include "Conn.h"
|
||||
#include "Val.h"
|
||||
#include "Tag.h"
|
||||
|
@ -16,6 +16,8 @@
|
|||
|
||||
namespace file_analysis {
|
||||
|
||||
class FileReassembler;
|
||||
|
||||
/**
|
||||
* Wrapper class around \c fa_file record values from script layer.
|
||||
*/
|
||||
|
@ -86,10 +88,10 @@ public:
|
|||
void SetTotalBytes(uint64 size);
|
||||
|
||||
/**
|
||||
* Compares "seen_bytes" field to "total_bytes" field of #val record to
|
||||
* determine if the full file has been seen.
|
||||
* @return false if "total_bytes" hasn't been set yet or "seen_bytes" is
|
||||
* less than it, else true.
|
||||
* @return true if file analysis is complete for the file, else false.
|
||||
* It is incomplete if the total size is unknown or if the number of bytes
|
||||
* streamed to analyzers (either as data delivers or gap information)
|
||||
* matches the known total size.
|
||||
*/
|
||||
bool IsComplete() const;
|
||||
|
||||
|
@ -166,18 +168,20 @@ public:
|
|||
|
||||
protected:
|
||||
friend class Manager;
|
||||
friend class FileReassembler;
|
||||
|
||||
/**
|
||||
* Constructor; only file_analysis::Manager should be creating these.
|
||||
* @param file_id an identifier string for the file in pretty hash form
|
||||
* (similar to connection uids).
|
||||
* @param source_name the value for the source field to fill in.
|
||||
* @param conn a network connection over which the file is transferred.
|
||||
* @param tag the network protocol over which the file is transferred.
|
||||
* @param is_orig true if the file is being transferred from the originator
|
||||
* of the connection to the responder. False indicates the other
|
||||
* direction.
|
||||
*/
|
||||
File(const string& file_id, Connection* conn = 0,
|
||||
File(const string& file_id, const string& source_name, Connection* conn = 0,
|
||||
analyzer::Tag tag = analyzer::Tag::Error, bool is_orig = false);
|
||||
|
||||
/**
|
||||
|
@ -185,8 +189,14 @@ protected:
|
|||
* \c conn_id and UID taken from \a conn.
|
||||
* @param conn the connection over which a part of the file has been seen.
|
||||
* @param is_orig true if the connection originator is sending the file.
|
||||
* @return true if the connection was previously unknown.
|
||||
*/
|
||||
void UpdateConnectionFields(Connection* conn, bool is_orig);
|
||||
bool UpdateConnectionFields(Connection* conn, bool is_orig);
|
||||
|
||||
/**
|
||||
* Raise the file_over_new_connection event with given arguments.
|
||||
*/
|
||||
void RaiseFileOverNewConnection(Connection* conn, bool is_orig);
|
||||
|
||||
/**
|
||||
* Increment a byte count field of #val record by \a size.
|
||||
|
@ -219,20 +229,40 @@ protected:
|
|||
*/
|
||||
bool BufferBOF(const u_char* data, uint64 len);
|
||||
|
||||
/**
|
||||
* Forward any beginning-of-file buffered data on to DataIn stream.
|
||||
*/
|
||||
void ReplayBOF();
|
||||
|
||||
/**
|
||||
* Does mime type detection via file magic signatures and assigns
|
||||
* strongest matching mime type (if available) to \c mime_type
|
||||
* field in #val.
|
||||
* @param data pointer to a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* field in #val. It uses the data in the BOF buffer.
|
||||
* @return whether a mime type match was found.
|
||||
*/
|
||||
bool DetectMIME(const u_char* data, uint64 len);
|
||||
bool DetectMIME();
|
||||
|
||||
/**
|
||||
* Enables reassembly on the file.
|
||||
*/
|
||||
void EnableReassembly();
|
||||
|
||||
/**
|
||||
* Disables reassembly on the file. If there is an existing reassembler
|
||||
* for the file, this will cause it to be deleted and won't allow a new
|
||||
* one to be created until reassembly is reenabled.
|
||||
*/
|
||||
void DisableReassembly();
|
||||
|
||||
/**
|
||||
* Set a maximum allowed bytes of memory for file reassembly for this file.
|
||||
*/
|
||||
void SetReassemblyBuffer(uint64 max);
|
||||
|
||||
/**
|
||||
* Perform stream-wise delivery for analyzers that need it.
|
||||
*/
|
||||
void DeliverStream(const u_char* data, uint64 len);
|
||||
|
||||
/**
|
||||
* Perform chunk-wise delivery for analyzers that need it.
|
||||
*/
|
||||
void DeliverChunk(const u_char* data, uint64 len, uint64 offset);
|
||||
|
||||
/**
|
||||
* Lookup a record field index/offset by name.
|
||||
|
@ -246,25 +276,24 @@ protected:
|
|||
*/
|
||||
static void StaticInit();
|
||||
|
||||
private:
|
||||
protected:
|
||||
string id; /**< A pretty hash that likely identifies file */
|
||||
RecordVal* val; /**< \c fa_file from script layer. */
|
||||
FileReassembler* file_reassembler; /**< A reassembler for the file if it's needed. */
|
||||
uint64 stream_offset; /**< The offset of the file which has been forwarded. */
|
||||
uint64 reassembly_max_buffer; /**< Maximum allowed buffer for reassembly. */
|
||||
bool did_mime_type; /**< Whether the mime type ident has already been attempted. */
|
||||
bool reassembly_enabled; /**< Whether file stream reassembly is needed. */
|
||||
bool postpone_timeout; /**< Whether postponing timeout is requested. */
|
||||
bool first_chunk; /**< Track first non-linear chunk. */
|
||||
bool missed_bof; /**< Flags that we missed start of file. */
|
||||
bool need_reassembly; /**< Whether file stream reassembly is needed. */
|
||||
bool done; /**< If this object is about to be deleted. */
|
||||
bool did_file_new_event; /**< Whether the file_new event has been done. */
|
||||
AnalyzerSet analyzers; /**< A set of attached file analyzer. */
|
||||
queue<pair<EventHandlerPtr, val_list*> > fonc_queue;
|
||||
AnalyzerSet analyzers; /**< A set of attached file analyzers. */
|
||||
|
||||
struct BOF_Buffer {
|
||||
BOF_Buffer() : full(false), replayed(false), size(0) {}
|
||||
BOF_Buffer() : full(false), size(0) {}
|
||||
~BOF_Buffer()
|
||||
{ for ( size_t i = 0; i < chunks.size(); ++i ) delete chunks[i]; }
|
||||
|
||||
bool full;
|
||||
bool replayed;
|
||||
uint64 size;
|
||||
BroString::CVec chunks;
|
||||
} bof_buffer; /**< Beginning of file buffer. */
|
||||
|
|
128
src/file_analysis/FileReassembler.cc
Normal file
128
src/file_analysis/FileReassembler.cc
Normal file
|
@ -0,0 +1,128 @@
|
|||
|
||||
#include "FileReassembler.h"
|
||||
#include "File.h"
|
||||
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
class File;
|
||||
|
||||
FileReassembler::FileReassembler(File *f, uint64 starting_offset)
|
||||
: Reassembler(starting_offset), the_file(f), flushing(false)
|
||||
{
|
||||
}
|
||||
|
||||
FileReassembler::FileReassembler()
|
||||
: Reassembler(), the_file(0), flushing(false)
|
||||
{
|
||||
}
|
||||
|
||||
FileReassembler::~FileReassembler()
|
||||
{
|
||||
}
|
||||
|
||||
uint64 FileReassembler::Flush()
|
||||
{
|
||||
if ( flushing )
|
||||
return 0;
|
||||
|
||||
if ( last_block )
|
||||
{
|
||||
// This is expected to call back into FileReassembler::Undelivered().
|
||||
flushing = true;
|
||||
uint64 rval = TrimToSeq(last_block->upper);
|
||||
flushing = false;
|
||||
return rval;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint64 FileReassembler::FlushTo(uint64 sequence)
|
||||
{
|
||||
if ( flushing )
|
||||
return 0;
|
||||
|
||||
flushing = true;
|
||||
uint64 rval = TrimToSeq(sequence);
|
||||
flushing = false;
|
||||
last_reassem_seq = sequence;
|
||||
return rval;
|
||||
}
|
||||
|
||||
void FileReassembler::BlockInserted(DataBlock* start_block)
|
||||
{
|
||||
if ( start_block->seq > last_reassem_seq ||
|
||||
start_block->upper <= last_reassem_seq )
|
||||
return;
|
||||
|
||||
for ( DataBlock* b = start_block;
|
||||
b && b->seq <= last_reassem_seq; b = b->next )
|
||||
{
|
||||
if ( b->seq == last_reassem_seq )
|
||||
{ // New stuff.
|
||||
uint64 len = b->Size();
|
||||
last_reassem_seq += len;
|
||||
the_file->DeliverStream(b->block, len);
|
||||
}
|
||||
}
|
||||
|
||||
// Throw out forwarded data
|
||||
TrimToSeq(last_reassem_seq);
|
||||
}
|
||||
|
||||
void FileReassembler::Undelivered(uint64 up_to_seq)
|
||||
{
|
||||
// If we have blocks that begin below up_to_seq, deliver them.
|
||||
DataBlock* b = blocks;
|
||||
|
||||
while ( b )
|
||||
{
|
||||
if ( b->seq < last_reassem_seq )
|
||||
{
|
||||
// Already delivered this block.
|
||||
b = b->next;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( b->seq >= up_to_seq )
|
||||
// Block is beyond what we need to process at this point.
|
||||
break;
|
||||
|
||||
uint64 gap_at_seq = last_reassem_seq;
|
||||
uint64 gap_len = b->seq - last_reassem_seq;
|
||||
the_file->Gap(gap_at_seq, gap_len);
|
||||
last_reassem_seq += gap_len;
|
||||
BlockInserted(b);
|
||||
// Inserting a block may cause trimming of what's buffered,
|
||||
// so have to assume 'b' is invalid, hence re-assign to start.
|
||||
b = blocks;
|
||||
}
|
||||
|
||||
if ( up_to_seq > last_reassem_seq )
|
||||
{
|
||||
the_file->Gap(last_reassem_seq, up_to_seq - last_reassem_seq);
|
||||
last_reassem_seq = up_to_seq;
|
||||
}
|
||||
}
|
||||
|
||||
void FileReassembler::Overlap(const u_char* b1, const u_char* b2, uint64 n)
|
||||
{
|
||||
// Not doing anything here yet.
|
||||
}
|
||||
|
||||
IMPLEMENT_SERIAL(FileReassembler, SER_FILE_REASSEMBLER);
|
||||
|
||||
bool FileReassembler::DoSerialize(SerialInfo* info) const
|
||||
{
|
||||
reporter->InternalError("FileReassembler::DoSerialize not implemented");
|
||||
return false; // Cannot be reached.
|
||||
}
|
||||
|
||||
bool FileReassembler::DoUnserialize(UnserialInfo* info)
|
||||
{
|
||||
reporter->InternalError("FileReassembler::DoUnserialize not implemented");
|
||||
return false; // Cannot be reached.
|
||||
}
|
||||
|
||||
} // end file_analysis
|
65
src/file_analysis/FileReassembler.h
Normal file
65
src/file_analysis/FileReassembler.h
Normal file
|
@ -0,0 +1,65 @@
|
|||
#ifndef FILE_ANALYSIS_FILEREASSEMBLER_H
|
||||
#define FILE_ANALYSIS_FILEREASSEMBLER_H
|
||||
|
||||
#include "Reassem.h"
|
||||
#include "File.h"
|
||||
|
||||
class BroFile;
|
||||
class Connection;
|
||||
|
||||
namespace file_analysis {
|
||||
|
||||
class File;
|
||||
|
||||
class FileReassembler : public Reassembler {
|
||||
public:
|
||||
|
||||
FileReassembler(File* f, uint64 starting_offset);
|
||||
virtual ~FileReassembler();
|
||||
|
||||
void Done();
|
||||
|
||||
// Checks if we have delivered all contents that we can possibly
|
||||
// deliver for this endpoint.
|
||||
void CheckEOF();
|
||||
|
||||
/**
|
||||
* Discards all contents of the reassembly buffer. This will spin through
|
||||
* the buffer and call File::DeliverStream() and File::Gap() wherever
|
||||
* appropriate.
|
||||
* @return the number of new bytes now detected as gaps in the file.
|
||||
*/
|
||||
uint64 Flush();
|
||||
|
||||
/**
|
||||
* Discards all contents of the reassembly buffer up to a given sequence
|
||||
* number. This will spin through the buffer and call
|
||||
* File::DeliverStream() and File::Gap() wherever appropriate.
|
||||
* @param sequence the sequence number to flush until.
|
||||
* @return the number of new bytes now detected as gaps in the file.
|
||||
*/
|
||||
uint64 FlushTo(uint64 sequence);
|
||||
|
||||
/**
|
||||
* @return whether the reassembler is currently is the process of flushing
|
||||
* out the contents of its buffer.
|
||||
*/
|
||||
bool IsCurrentlyFlushing() const
|
||||
{ return flushing; }
|
||||
|
||||
protected:
|
||||
FileReassembler();
|
||||
|
||||
DECLARE_SERIAL(FileReassembler);
|
||||
|
||||
void Undelivered(uint64 up_to_seq);
|
||||
void BlockInserted(DataBlock* b);
|
||||
void Overlap(const u_char* b1, const u_char* b2, uint64 n);
|
||||
|
||||
File* the_file;
|
||||
bool flushing;
|
||||
};
|
||||
|
||||
} // namespace analyzer::*
|
||||
|
||||
#endif
|
|
@ -12,21 +12,26 @@
|
|||
#include "UID.h"
|
||||
|
||||
#include "plugin/Manager.h"
|
||||
#include "analyzer/Manager.h"
|
||||
|
||||
using namespace file_analysis;
|
||||
|
||||
TableVal* Manager::disabled = 0;
|
||||
TableType* Manager::tag_set_type = 0;
|
||||
string Manager::salt;
|
||||
|
||||
Manager::Manager()
|
||||
: plugin::ComponentManager<file_analysis::Tag,
|
||||
file_analysis::Component>("Files"),
|
||||
file_analysis::Component>("Files", "Tag"),
|
||||
id_map(), ignored(), current_file_id(), magic_state()
|
||||
{
|
||||
}
|
||||
|
||||
Manager::~Manager()
|
||||
{
|
||||
for ( MIMEMap::iterator i = mime_types.begin(); i != mime_types.end(); i++ )
|
||||
delete i->second;
|
||||
|
||||
// Have to assume that too much of Bro has been shutdown by this point
|
||||
// to do anything more than reclaim memory.
|
||||
|
||||
|
@ -48,11 +53,6 @@ Manager::~Manager()
|
|||
|
||||
void Manager::InitPreScript()
|
||||
{
|
||||
std::list<Component*> analyzers = plugin_mgr->Components<Component>();
|
||||
|
||||
for ( std::list<Component*>::const_iterator i = analyzers.begin();
|
||||
i != analyzers.end(); ++i )
|
||||
RegisterComponent(*i, "ANALYZER_");
|
||||
}
|
||||
|
||||
void Manager::InitPostScript()
|
||||
|
@ -104,6 +104,7 @@ void Manager::SetHandle(const string& handle)
|
|||
if ( handle.empty() )
|
||||
return;
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Set current handle to %s", handle.c_str());
|
||||
current_file_id = HashHandle(handle);
|
||||
}
|
||||
|
||||
|
@ -153,14 +154,12 @@ string Manager::DataIn(const u_char* data, uint64 len, analyzer::Tag tag,
|
|||
void Manager::DataIn(const u_char* data, uint64 len, const string& file_id,
|
||||
const string& source)
|
||||
{
|
||||
File* file = GetFile(file_id);
|
||||
File* file = GetFile(file_id, 0, analyzer::Tag::Error, false, false,
|
||||
source.c_str());
|
||||
|
||||
if ( ! file )
|
||||
return;
|
||||
|
||||
if ( file->GetSource().empty() )
|
||||
file->SetSource(source);
|
||||
|
||||
file->DataIn(data, len);
|
||||
|
||||
if ( file->IsComplete() )
|
||||
|
@ -231,6 +230,39 @@ bool Manager::SetTimeoutInterval(const string& file_id, double interval) const
|
|||
return true;
|
||||
}
|
||||
|
||||
bool Manager::EnableReassembly(const string& file_id)
|
||||
{
|
||||
File* file = LookupFile(file_id);
|
||||
|
||||
if ( ! file )
|
||||
return false;
|
||||
|
||||
file->EnableReassembly();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::DisableReassembly(const string& file_id)
|
||||
{
|
||||
File* file = LookupFile(file_id);
|
||||
|
||||
if ( ! file )
|
||||
return false;
|
||||
|
||||
file->DisableReassembly();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::SetReassemblyBuffer(const string& file_id, uint64 max)
|
||||
{
|
||||
File* file = LookupFile(file_id);
|
||||
|
||||
if ( ! file )
|
||||
return false;
|
||||
|
||||
file->SetReassemblyBuffer(max);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::SetExtractionLimit(const string& file_id, RecordVal* args,
|
||||
uint64 n) const
|
||||
{
|
||||
|
@ -265,7 +297,8 @@ bool Manager::RemoveAnalyzer(const string& file_id, file_analysis::Tag tag,
|
|||
}
|
||||
|
||||
File* Manager::GetFile(const string& file_id, Connection* conn,
|
||||
analyzer::Tag tag, bool is_orig, bool update_conn)
|
||||
analyzer::Tag tag, bool is_orig, bool update_conn,
|
||||
const char* source_name)
|
||||
{
|
||||
if ( file_id.empty() )
|
||||
return 0;
|
||||
|
@ -277,10 +310,19 @@ File* Manager::GetFile(const string& file_id, Connection* conn,
|
|||
|
||||
if ( ! rval )
|
||||
{
|
||||
rval = new File(file_id, conn, tag, is_orig);
|
||||
rval = new File(file_id,
|
||||
source_name ? source_name
|
||||
: analyzer_mgr->GetComponentName(tag),
|
||||
conn, tag, is_orig);
|
||||
id_map.Insert(file_id.c_str(), rval);
|
||||
rval->ScheduleInactivityTimer();
|
||||
|
||||
// Generate file_new after inserting it into manager's mapping
|
||||
// in case script-layer calls back in to core from the event.
|
||||
rval->FileEvent(file_new);
|
||||
// Same for file_over_new_connection.
|
||||
rval->RaiseFileOverNewConnection(conn, is_orig);
|
||||
|
||||
if ( IsIgnored(file_id) )
|
||||
return 0;
|
||||
}
|
||||
|
@ -288,8 +330,8 @@ File* Manager::GetFile(const string& file_id, Connection* conn,
|
|||
{
|
||||
rval->UpdateLastActivityTime();
|
||||
|
||||
if ( update_conn )
|
||||
rval->UpdateConnectionFields(conn, is_orig);
|
||||
if ( update_conn && rval->UpdateConnectionFields(conn, is_orig) )
|
||||
rval->RaiseFileOverNewConnection(conn, is_orig);
|
||||
}
|
||||
|
||||
return rval;
|
||||
|
@ -372,6 +414,9 @@ string Manager::GetFileID(analyzer::Tag tag, Connection* c, bool is_orig)
|
|||
if ( ! get_file_handle )
|
||||
return "";
|
||||
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Raise get_file_handle() for protocol analyzer %s",
|
||||
analyzer_mgr->GetComponentName(tag).c_str());
|
||||
|
||||
EnumVal* tagval = tag.AsEnumVal();
|
||||
Ref(tagval);
|
||||
|
||||
|
@ -418,11 +463,21 @@ Analyzer* Manager::InstantiateAnalyzer(Tag tag, RecordVal* args, File* f) const
|
|||
if ( ! c->Factory() )
|
||||
{
|
||||
reporter->InternalWarning("file analyzer %s cannot be instantiated "
|
||||
"dynamically", c->CanonicalName());
|
||||
"dynamically", c->CanonicalName().c_str());
|
||||
return 0;
|
||||
}
|
||||
|
||||
return c->Factory()(args, f);
|
||||
DBG_LOG(DBG_FILE_ANALYSIS, "Instantiate analyzer %s for file %s",
|
||||
GetComponentName(tag).c_str(), f->id.c_str());
|
||||
|
||||
Analyzer* a = c->Factory()(args, f);
|
||||
|
||||
if ( ! a )
|
||||
reporter->InternalError("file analyzer instantiation failed");
|
||||
|
||||
a->SetAnalyzerTag(tag);
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
RuleMatcher::MIME_Matches* Manager::DetectMIME(const u_char* data, uint64 len,
|
||||
|
|
|
@ -213,6 +213,21 @@ public:
|
|||
*/
|
||||
bool SetTimeoutInterval(const string& file_id, double interval) const;
|
||||
|
||||
/**
|
||||
* Enable the reassembler for a file.
|
||||
*/
|
||||
bool EnableReassembly(const string& file_id);
|
||||
|
||||
/**
|
||||
* Disable the reassembler for a file.
|
||||
*/
|
||||
bool DisableReassembly(const string& file_id);
|
||||
|
||||
/**
|
||||
* Set the reassembly for a file in bytes.
|
||||
*/
|
||||
bool SetReassemblyBuffer(const string& file_id, uint64 max);
|
||||
|
||||
/**
|
||||
* Sets a limit on the maximum size allowed for extracting the file
|
||||
* to local disk;
|
||||
|
@ -304,6 +319,7 @@ protected:
|
|||
* this file isn't related to a connection).
|
||||
* @param update_conn whether we need to update connection-related field
|
||||
* in the \c fa_file record value associated with the file.
|
||||
* @param an optional value of the source field to fill in.
|
||||
* @return the File object mapped to \a file_id or a null pointer if
|
||||
* analysis is being ignored for the associated file. An File
|
||||
* object may be created if a mapping doesn't exist, and if it did
|
||||
|
@ -312,7 +328,8 @@ protected:
|
|||
*/
|
||||
File* GetFile(const string& file_id, Connection* conn = 0,
|
||||
analyzer::Tag tag = analyzer::Tag::Error,
|
||||
bool is_orig = false, bool update_conn = true);
|
||||
bool is_orig = false, bool update_conn = true,
|
||||
const char* source_name = 0);
|
||||
|
||||
/**
|
||||
* Try to retrieve a file that's being analyzed, using its identifier/hash.
|
||||
|
@ -362,13 +379,19 @@ protected:
|
|||
static bool IsDisabled(analyzer::Tag tag);
|
||||
|
||||
private:
|
||||
typedef set<Tag> TagSet;
|
||||
typedef map<string, TagSet*> MIMEMap;
|
||||
|
||||
TagSet* LookupMIMEType(const string& mtype, bool add_if_not_found);
|
||||
|
||||
PDict(File) id_map; /**< Map file ID to file_analysis::File records. */
|
||||
PDict(bool) ignored; /**< Ignored files. Will be finally removed on EOF. */
|
||||
string current_file_id; /**< Hash of what get_file_handle event sets. */
|
||||
RuleFileMagicState* magic_state; /**< File magic signature match state. */
|
||||
MIMEMap mime_types;/**< Mapping of MIME types to analyzers. */
|
||||
|
||||
static TableVal* disabled; /**< Table of disabled analyzers. */
|
||||
static TableType* tag_set_type; /**< Type for set[tag]. */
|
||||
static string salt; /**< A salt added to file handles before hashing. */
|
||||
};
|
||||
|
||||
|
|
|
@ -1,8 +1,24 @@
|
|||
// See the file in the main distribution directory for copyright.
|
||||
|
||||
#include "plugin/Plugin.h"
|
||||
|
||||
#include "DataEvent.h"
|
||||
|
||||
BRO_PLUGIN_BEGIN(Bro, FileDataEvent)
|
||||
BRO_PLUGIN_DESCRIPTION("Delivers file content via events");
|
||||
BRO_PLUGIN_FILE_ANALYZER("DATA_EVENT", DataEvent);
|
||||
BRO_PLUGIN_END
|
||||
namespace plugin {
|
||||
namespace Bro_FileDataEvent {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
public:
|
||||
plugin::Configuration Configure()
|
||||
{
|
||||
AddComponent(new ::file_analysis::Component("DATA_EVENT", ::file_analysis::DataEvent::Instantiate));
|
||||
|
||||
plugin::Configuration config;
|
||||
config.name = "Bro::FileDataEvent";
|
||||
config.description = "Delivers file content";
|
||||
return config;
|
||||
}
|
||||
} plugin;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,9 +12,9 @@ using namespace file_analysis;
|
|||
Extract::Extract(RecordVal* args, File* file, const string& arg_filename,
|
||||
uint64 arg_limit)
|
||||
: file_analysis::Analyzer(file_mgr->GetComponentTag("EXTRACT"), args, file),
|
||||
filename(arg_filename), limit(arg_limit)
|
||||
filename(arg_filename), limit(arg_limit), depth(0)
|
||||
{
|
||||
fd = open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666);
|
||||
fd = open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC | O_APPEND, 0666);
|
||||
|
||||
if ( fd < 0 )
|
||||
{
|
||||
|
@ -53,7 +53,7 @@ file_analysis::Analyzer* Extract::Instantiate(RecordVal* args, File* file)
|
|||
limit->AsCount());
|
||||
}
|
||||
|
||||
static bool check_limit_exceeded(uint64 lim, uint64 off, uint64 len, uint64* n)
|
||||
static bool check_limit_exceeded(uint64 lim, uint64 depth, uint64 len, uint64* n)
|
||||
{
|
||||
if ( lim == 0 )
|
||||
{
|
||||
|
@ -61,29 +61,31 @@ static bool check_limit_exceeded(uint64 lim, uint64 off, uint64 len, uint64* n)
|
|||
return false;
|
||||
}
|
||||
|
||||
if ( off >= lim )
|
||||
if ( depth >= lim )
|
||||
{
|
||||
*n = 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
*n = lim - off;
|
||||
|
||||
if ( len > *n )
|
||||
else if ( depth + len > lim )
|
||||
{
|
||||
*n = lim - depth;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
*n = len;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Extract::DeliverChunk(const u_char* data, uint64 len, uint64 offset)
|
||||
bool Extract::DeliverStream(const u_char* data, uint64 len)
|
||||
{
|
||||
if ( ! fd )
|
||||
return false;
|
||||
|
||||
uint64 towrite = 0;
|
||||
bool limit_exceeded = check_limit_exceeded(limit, offset, len, &towrite);
|
||||
bool limit_exceeded = check_limit_exceeded(limit, depth, len, &towrite);
|
||||
|
||||
if ( limit_exceeded && file_extraction_limit )
|
||||
{
|
||||
|
@ -92,16 +94,31 @@ bool Extract::DeliverChunk(const u_char* data, uint64 len, uint64 offset)
|
|||
vl->append(f->GetVal()->Ref());
|
||||
vl->append(Args()->Ref());
|
||||
vl->append(new Val(limit, TYPE_COUNT));
|
||||
vl->append(new Val(offset, TYPE_COUNT));
|
||||
vl->append(new Val(len, TYPE_COUNT));
|
||||
f->FileEvent(file_extraction_limit, vl);
|
||||
|
||||
// Limit may have been modified by BIF, re-check it.
|
||||
limit_exceeded = check_limit_exceeded(limit, offset, len, &towrite);
|
||||
// Limit may have been modified by a BIF, re-check it.
|
||||
limit_exceeded = check_limit_exceeded(limit, depth, len, &towrite);
|
||||
}
|
||||
|
||||
if ( towrite > 0 )
|
||||
safe_pwrite(fd, data, towrite, offset);
|
||||
{
|
||||
safe_write(fd, reinterpret_cast<const char*>(data), towrite);
|
||||
depth += towrite;
|
||||
}
|
||||
|
||||
return ( ! limit_exceeded );
|
||||
}
|
||||
|
||||
bool Extract::Undelivered(uint64 offset, uint64 len)
|
||||
{
|
||||
if ( depth == offset )
|
||||
{
|
||||
char* tmp = new char[len]();
|
||||
safe_write(fd, tmp, len);
|
||||
delete [] tmp;
|
||||
depth += len;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -28,11 +28,18 @@ public:
|
|||
* Write a chunk of file data to the local extraction file.
|
||||
* @param data pointer to a chunk of file data.
|
||||
* @param len number of bytes in the data chunk.
|
||||
* @param offset number of bytes from start of file at which chunk starts.
|
||||
* @return false if there was no extraction file open and the data couldn't
|
||||
* be written, else true.
|
||||
*/
|
||||
virtual bool DeliverChunk(const u_char* data, uint64 len, uint64 offset);
|
||||
virtual bool DeliverStream(const u_char* data, uint64 len);
|
||||
|
||||
/**
|
||||
* Report undelivered bytes.
|
||||
* @param offset distance into the file where the gap occurred.
|
||||
* @param len number of bytes undelivered.
|
||||
* @return true
|
||||
*/
|
||||
virtual bool Undelivered(uint64 offset, uint64 len);
|
||||
|
||||
/**
|
||||
* Create a new instance of an Extract analyzer.
|
||||
|
@ -67,6 +74,7 @@ private:
|
|||
string filename;
|
||||
int fd;
|
||||
uint64 limit;
|
||||
uint64 depth;
|
||||
};
|
||||
|
||||
} // namespace file_analysis
|
||||
|
|
|
@ -1,10 +1,24 @@
|
|||
// See the file in the main distribution directory for copyright.
|
||||
|
||||
#include "plugin/Plugin.h"
|
||||
|
||||
#include "Extract.h"
|
||||
|
||||
BRO_PLUGIN_BEGIN(Bro, FileExtract)
|
||||
BRO_PLUGIN_DESCRIPTION("Extract file content to local file system");
|
||||
BRO_PLUGIN_FILE_ANALYZER("EXTRACT", Extract);
|
||||
BRO_PLUGIN_BIF_FILE(events);
|
||||
BRO_PLUGIN_BIF_FILE(functions);
|
||||
BRO_PLUGIN_END
|
||||
namespace plugin {
|
||||
namespace Bro_FileExtract {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
public:
|
||||
plugin::Configuration Configure()
|
||||
{
|
||||
AddComponent(new ::file_analysis::Component("EXTRACT", ::file_analysis::Extract::Instantiate));
|
||||
|
||||
plugin::Configuration config;
|
||||
config.name = "Bro::FileExtract";
|
||||
config.description = "Extract file content";
|
||||
return config;
|
||||
}
|
||||
} plugin;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,9 +11,7 @@
|
|||
##
|
||||
## limit: The limit, in bytes, the extracted file is about to breach.
|
||||
##
|
||||
## offset: The offset at which a file chunk is about to be written.
|
||||
##
|
||||
## len: The length of the file chunk about to be written.
|
||||
##
|
||||
## .. bro:see:: Files::add_analyzer Files::ANALYZER_EXTRACT
|
||||
event file_extraction_limit%(f: fa_file, args: any, limit: count, offset: count, len: count%);
|
||||
event file_extraction_limit%(f: fa_file, args: any, limit: count, len: count%);
|
||||
|
|
|
@ -1,11 +1,26 @@
|
|||
// See the file in the main distribution directory for copyright.
|
||||
|
||||
#include "plugin/Plugin.h"
|
||||
|
||||
#include "Hash.h"
|
||||
|
||||
BRO_PLUGIN_BEGIN(Bro, FileHash)
|
||||
BRO_PLUGIN_DESCRIPTION("Hash file content");
|
||||
BRO_PLUGIN_FILE_ANALYZER("MD5", MD5);
|
||||
BRO_PLUGIN_FILE_ANALYZER("SHA1", SHA1);
|
||||
BRO_PLUGIN_FILE_ANALYZER("SHA256", SHA256);
|
||||
BRO_PLUGIN_BIF_FILE(events);
|
||||
BRO_PLUGIN_END
|
||||
namespace plugin {
|
||||
namespace Bro_FileHash {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
public:
|
||||
plugin::Configuration Configure()
|
||||
{
|
||||
AddComponent(new ::file_analysis::Component("MD5", ::file_analysis::MD5::Instantiate));
|
||||
AddComponent(new ::file_analysis::Component("SHA1", ::file_analysis::SHA1::Instantiate));
|
||||
AddComponent(new ::file_analysis::Component("SHA256", ::file_analysis::SHA256::Instantiate));
|
||||
|
||||
plugin::Configuration config;
|
||||
config.name = "Bro::FileHash";
|
||||
config.description = "Hash file content";
|
||||
return config;
|
||||
}
|
||||
} plugin;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,26 @@
|
|||
// See the file in the main distribution directory for copyright.
|
||||
|
||||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
|
||||
#include "plugin/Plugin.h"
|
||||
|
||||
#include "Unified2.h"
|
||||
|
||||
BRO_PLUGIN_BEGIN(Bro, Unified2)
|
||||
BRO_PLUGIN_DESCRIPTION("Analyze Unified2 alert files.");
|
||||
BRO_PLUGIN_FILE_ANALYZER("UNIFIED2", Unified2);
|
||||
BRO_PLUGIN_BIF_FILE(events);
|
||||
BRO_PLUGIN_BIF_FILE(types);
|
||||
BRO_PLUGIN_END
|
||||
namespace plugin {
|
||||
namespace Bro_Unified2 {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
public:
|
||||
plugin::Configuration Configure()
|
||||
{
|
||||
AddComponent(new ::file_analysis::Component("UNIFIED2", ::file_analysis::Unified2::Instantiate));
|
||||
|
||||
plugin::Configuration config;
|
||||
config.name = "Bro::Unified2";
|
||||
config.description = "Analyze Unified2 alert files.";
|
||||
return config;
|
||||
}
|
||||
} plugin;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,25 @@
|
|||
// See the file in the main distribution directory for copyright.
|
||||
|
||||
|
||||
#include "plugin/Plugin.h"
|
||||
|
||||
#include "X509.h"
|
||||
|
||||
BRO_PLUGIN_BEGIN(Bro, X509)
|
||||
BRO_PLUGIN_DESCRIPTION("X509 certificate parser");
|
||||
BRO_PLUGIN_FILE_ANALYZER("X509", X509);
|
||||
BRO_PLUGIN_BIF_FILE(events);
|
||||
BRO_PLUGIN_BIF_FILE(types);
|
||||
BRO_PLUGIN_BIF_FILE(functions);
|
||||
BRO_PLUGIN_END
|
||||
namespace plugin {
|
||||
namespace Bro_X509 {
|
||||
|
||||
class Plugin : public plugin::Plugin {
|
||||
public:
|
||||
plugin::Configuration Configure()
|
||||
{
|
||||
AddComponent(new ::file_analysis::Component("X509", ::file_analysis::X509::Instantiate));
|
||||
|
||||
plugin::Configuration config;
|
||||
config.name = "Bro::X509";
|
||||
config.description = "X509 analyzer";
|
||||
return config;
|
||||
}
|
||||
} plugin;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,7 +147,7 @@ RecordVal* file_analysis::X509::ParseCertificate(X509Val* cert_val)
|
|||
#ifndef OPENSSL_NO_EC
|
||||
else if ( pkey->type == EVP_PKEY_EC )
|
||||
{
|
||||
pX509Cert->Assign(8, new StringVal("dsa"));
|
||||
pX509Cert->Assign(8, new StringVal("ecdsa"));
|
||||
pX509Cert->Assign(11, KeyCurve(pkey));
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -104,6 +104,39 @@ STACK_OF(X509)* x509_get_untrusted_stack(VectorVal* certs_vec)
|
|||
return untrusted_certs;
|
||||
}
|
||||
|
||||
// We need this function to be able to identify the signer certificate of an
|
||||
// OCSP request out of a list of possible certificates.
|
||||
X509* x509_get_ocsp_signer(STACK_OF(X509) *certs, OCSP_RESPID *rid)
|
||||
{
|
||||
// We support two lookup types - either by response id or by key.
|
||||
if ( rid->type == V_OCSP_RESPID_NAME )
|
||||
return X509_find_by_subject(certs, rid->value.byName);
|
||||
|
||||
// There only should be name and type - but let's be sure...
|
||||
if ( rid->type != V_OCSP_RESPID_KEY )
|
||||
return 0;
|
||||
|
||||
// Just like OpenSSL, we just support SHA-1 lookups and bail out otherwhise.
|
||||
if ( rid->value.byKey->length != SHA_DIGEST_LENGTH )
|
||||
return 0;
|
||||
|
||||
unsigned char* key_hash = rid->value.byKey->data;
|
||||
for ( int i = 0; i < sk_X509_num(certs); ++i )
|
||||
{
|
||||
unsigned char digest[SHA_DIGEST_LENGTH];
|
||||
X509* cert = sk_X509_value(certs, i);
|
||||
if ( ! X509_pubkey_digest(cert, EVP_sha1(), digest, NULL) )
|
||||
// digest failed for this certificate, try with next
|
||||
continue;
|
||||
|
||||
if ( memcmp(digest, key_hash, SHA_DIGEST_LENGTH) == 0 )
|
||||
// keys match, return certificate
|
||||
return cert;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
%%}
|
||||
|
||||
## Parses a certificate into an X509::Certificate structure.
|
||||
|
@ -221,6 +254,7 @@ function x509_ocsp_verify%(certs: x509_opaque_vector, ocsp_reply: string, root_c
|
|||
int out = -1;
|
||||
int result = -1;
|
||||
X509* issuer_certificate = 0;
|
||||
X509* signer = 0;
|
||||
OCSP_RESPONSE *resp = d2i_OCSP_RESPONSE(NULL, &start, ocsp_reply->Len());
|
||||
if ( ! resp )
|
||||
{
|
||||
|
@ -250,19 +284,47 @@ function x509_ocsp_verify%(certs: x509_opaque_vector, ocsp_reply: string, root_c
|
|||
// inject the certificates in the certificate list of the OCSP reply, they actually are used during
|
||||
// the lookup.
|
||||
// Yay.
|
||||
|
||||
if ( ! basic->certs )
|
||||
{
|
||||
basic->certs = sk_X509_new_null();
|
||||
if ( ! basic->certs )
|
||||
{
|
||||
rval = x509_result_record(-1, "Could not allocate basic x509 stack");
|
||||
goto x509_ocsp_cleanup;
|
||||
}
|
||||
}
|
||||
|
||||
issuer_certificate = 0;
|
||||
for ( int i = 0; i < sk_X509_num(untrusted_certs); i++)
|
||||
{
|
||||
sk_X509_push(basic->certs, X509_dup(sk_X509_value(untrusted_certs, i)));
|
||||
|
||||
if ( X509_NAME_cmp(X509_get_issuer_name(cert), X509_get_subject_name(sk_X509_value(untrusted_certs, i))) )
|
||||
if ( X509_NAME_cmp(X509_get_issuer_name(cert), X509_get_subject_name(sk_X509_value(untrusted_certs, i))) == 0 )
|
||||
issuer_certificate = sk_X509_value(untrusted_certs, i);
|
||||
}
|
||||
|
||||
// Because we actually want to be able to give nice error messages that show why we were
|
||||
// not able to verify the OCSP response - do our own verification logic first.
|
||||
signer = x509_get_ocsp_signer(basic->certs, basic->tbsResponseData->responderId);
|
||||
|
||||
/*
|
||||
Do this perhaps - OpenSSL also cannot do it, so I do not really feel bad about it.
|
||||
Needs a different lookup because the root store is no stack of X509 certs
|
||||
|
||||
if ( !s igner )
|
||||
// if we did not find it in the certificates that were sent, search in the root store
|
||||
signer = x509_get_ocsp_signer(basic->certs, basic->tbsResponseData->responderId);
|
||||
*/
|
||||
|
||||
if ( ! signer )
|
||||
{
|
||||
rval = x509_result_record(-1, "Could not find OCSP responder certificate");
|
||||
goto x509_ocsp_cleanup;
|
||||
}
|
||||
|
||||
csc = X509_STORE_CTX_new();
|
||||
X509_STORE_CTX_init(csc, ctx, sk_X509_value(basic->certs, 0), basic->certs);
|
||||
X509_STORE_CTX_init(csc, ctx, signer, basic->certs);
|
||||
X509_STORE_CTX_set_time(csc, 0, (time_t) verify_time);
|
||||
X509_STORE_CTX_set_purpose(csc, X509_PURPOSE_OCSP_HELPER);
|
||||
|
||||
|
@ -281,7 +343,6 @@ function x509_ocsp_verify%(certs: x509_opaque_vector, ocsp_reply: string, root_c
|
|||
goto x509_ocsp_cleanup;
|
||||
}
|
||||
|
||||
|
||||
// ok, now we verified the OCSP response. This means that we have a valid chain tying it
|
||||
// to a root that we trust and that the signature also hopefully is valid. This does not yet
|
||||
// mean that the ocsp response actually matches the certificate the server send us or that
|
||||
|
@ -322,7 +383,7 @@ function x509_ocsp_verify%(certs: x509_opaque_vector, ocsp_reply: string, root_c
|
|||
goto x509_ocsp_cleanup;
|
||||
}
|
||||
|
||||
if ( ! OCSP_id_cmp(certid, single->certId) )
|
||||
if ( OCSP_id_cmp(certid, single->certId) != 0 )
|
||||
return x509_result_record(-1, "OCSP reply is not for host certificate");
|
||||
|
||||
// next - check freshness of proof...
|
||||
|
|
|
@ -15,6 +15,27 @@ function Files::__set_timeout_interval%(file_id: string, t: interval%): bool
|
|||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`Files::enable_reassembly`.
|
||||
function Files::__enable_reassembly%(file_id: string%): bool
|
||||
%{
|
||||
bool result = file_mgr->EnableReassembly(file_id->CheckString());
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`Files::disable_reassembly`.
|
||||
function Files::__disable_reassembly%(file_id: string%): bool
|
||||
%{
|
||||
bool result = file_mgr->DisableReassembly(file_id->CheckString());
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`Files::set_reassembly_buffer`.
|
||||
function Files::__set_reassembly_buffer%(file_id: string, max: count%): bool
|
||||
%{
|
||||
bool result = file_mgr->SetReassemblyBuffer(file_id->CheckString(), max);
|
||||
return new Val(result, TYPE_BOOL);
|
||||
%}
|
||||
|
||||
## :bro:see:`Files::add_analyzer`.
|
||||
function Files::__add_analyzer%(file_id: string, tag: Files::Tag, args: any%): bool
|
||||
%{
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue