mirror of
https://github.com/zeek/zeek.git
synced 2025-10-12 11:38:20 +00:00
Merge remote-tracking branch 'origin/topic/jsiwek/faf-cleanup'
Closes #1002. * origin/topic/jsiwek/faf-cleanup: Move file analyzers to new plugin infrastructure. Add a general file analysis overview/how-to document. Improve file analysis doxygen comments. Improve tracking of HTTP file extraction (addresses #988). Fix HTTP multipart body file analysis. Remove logging of analyzers field of FileAnalysis::Info. Remove extraction counter in default file extraction scripts. Remove FileAnalysis::postpone_timeout. Make default get_file_handle handlers &priority=5. Add input interface to forward data for file analysis. File analysis framework interface simplifications.
This commit is contained in:
commit
d8b05af7e5
127 changed files with 2458 additions and 1412 deletions
|
@ -15,10 +15,9 @@
|
|||
#include "EventHandler.h"
|
||||
#include "NetVar.h"
|
||||
#include "Net.h"
|
||||
|
||||
|
||||
#include "CompHash.h"
|
||||
|
||||
#include "../file_analysis/Manager.h"
|
||||
#include "../threading/SerialTypes.h"
|
||||
|
||||
using namespace input;
|
||||
|
@ -148,6 +147,14 @@ public:
|
|||
~EventStream();
|
||||
};
|
||||
|
||||
class Manager::AnalysisStream: public Manager::Stream {
|
||||
public:
|
||||
string file_id;
|
||||
|
||||
AnalysisStream();
|
||||
~AnalysisStream();
|
||||
};
|
||||
|
||||
Manager::TableStream::TableStream() : Manager::Stream::Stream()
|
||||
{
|
||||
stream_type = TABLE_STREAM;
|
||||
|
@ -198,6 +205,15 @@ Manager::TableStream::~TableStream()
|
|||
}
|
||||
}
|
||||
|
||||
Manager::AnalysisStream::AnalysisStream() : Manager::Stream::Stream()
|
||||
{
|
||||
stream_type = ANALYSIS_STREAM;
|
||||
}
|
||||
|
||||
Manager::AnalysisStream::~AnalysisStream()
|
||||
{
|
||||
}
|
||||
|
||||
Manager::Manager()
|
||||
{
|
||||
end_of_data = internal_handler("Input::end_of_data");
|
||||
|
@ -274,7 +290,8 @@ bool Manager::CreateStream(Stream* info, RecordVal* description)
|
|||
|
||||
RecordType* rtype = description->Type()->AsRecordType();
|
||||
if ( ! ( same_type(rtype, BifType::Record::Input::TableDescription, 0)
|
||||
|| same_type(rtype, BifType::Record::Input::EventDescription, 0) ) )
|
||||
|| same_type(rtype, BifType::Record::Input::EventDescription, 0)
|
||||
|| same_type(rtype, BifType::Record::Input::AnalysisDescription, 0) ) )
|
||||
{
|
||||
reporter->Error("Streamdescription argument not of right type for new input stream");
|
||||
return false;
|
||||
|
@ -680,6 +697,40 @@ bool Manager::CreateTableStream(RecordVal* fval)
|
|||
return true;
|
||||
}
|
||||
|
||||
bool Manager::CreateAnalysisStream(RecordVal* fval)
|
||||
{
|
||||
RecordType* rtype = fval->Type()->AsRecordType();
|
||||
|
||||
if ( ! same_type(rtype, BifType::Record::Input::AnalysisDescription, 0) )
|
||||
{
|
||||
reporter->Error("AnalysisDescription argument not of right type");
|
||||
return false;
|
||||
}
|
||||
|
||||
AnalysisStream* stream = new AnalysisStream();
|
||||
|
||||
if ( ! CreateStream(stream, fval) )
|
||||
{
|
||||
delete stream;
|
||||
return false;
|
||||
}
|
||||
|
||||
stream->file_id = file_mgr->HashHandle(stream->name);
|
||||
|
||||
assert(stream->reader);
|
||||
|
||||
// reader takes in a byte stream as the only field
|
||||
Field** fields = new Field*[1];
|
||||
fields[0] = new Field("bytestream", 0, TYPE_STRING, TYPE_VOID, false);
|
||||
stream->reader->Init(1, fields);
|
||||
|
||||
readers[stream->reader] = stream;
|
||||
|
||||
DBG_LOG(DBG_INPUT, "Successfully created analysis stream %s",
|
||||
stream->name.c_str());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Manager::IsCompatibleType(BroType* t, bool atomic_only)
|
||||
{
|
||||
|
@ -966,6 +1017,15 @@ void Manager::SendEntry(ReaderFrontend* reader, Value* *vals)
|
|||
readFields = SendEventStreamEvent(i, type, vals);
|
||||
}
|
||||
|
||||
else if ( i->stream_type == ANALYSIS_STREAM )
|
||||
{
|
||||
readFields = 1;
|
||||
assert(vals[0]->type == TYPE_STRING);
|
||||
file_mgr->DataIn(reinterpret_cast<u_char*>(vals[0]->val.string_val.data),
|
||||
vals[0]->val.string_val.length,
|
||||
static_cast<AnalysisStream*>(i)->file_id, i->name);
|
||||
}
|
||||
|
||||
else
|
||||
assert(false);
|
||||
|
||||
|
@ -1179,7 +1239,7 @@ void Manager::EndCurrentSend(ReaderFrontend* reader)
|
|||
DBG_LOG(DBG_INPUT, "Got EndCurrentSend stream %s", i->name.c_str());
|
||||
#endif
|
||||
|
||||
if ( i->stream_type == EVENT_STREAM )
|
||||
if ( i->stream_type != TABLE_STREAM )
|
||||
{
|
||||
// just signal the end of the data source
|
||||
SendEndOfData(i);
|
||||
|
@ -1288,6 +1348,9 @@ void Manager::SendEndOfData(ReaderFrontend* reader)
|
|||
void Manager::SendEndOfData(const Stream *i)
|
||||
{
|
||||
SendEvent(end_of_data, 2, new StringVal(i->name.c_str()), new StringVal(i->info->source));
|
||||
|
||||
if ( i->stream_type == ANALYSIS_STREAM )
|
||||
file_mgr->EndOfFile(static_cast<const AnalysisStream*>(i)->file_id);
|
||||
}
|
||||
|
||||
void Manager::Put(ReaderFrontend* reader, Value* *vals)
|
||||
|
@ -1310,6 +1373,15 @@ void Manager::Put(ReaderFrontend* reader, Value* *vals)
|
|||
readFields = SendEventStreamEvent(i, type, vals);
|
||||
}
|
||||
|
||||
else if ( i->stream_type == ANALYSIS_STREAM )
|
||||
{
|
||||
readFields = 1;
|
||||
assert(vals[0]->type == TYPE_STRING);
|
||||
file_mgr->DataIn(reinterpret_cast<u_char*>(vals[0]->val.string_val.data),
|
||||
vals[0]->val.string_val.length,
|
||||
static_cast<AnalysisStream*>(i)->file_id, i->name);
|
||||
}
|
||||
|
||||
else
|
||||
assert(false);
|
||||
|
||||
|
@ -1577,6 +1649,12 @@ bool Manager::Delete(ReaderFrontend* reader, Value* *vals)
|
|||
success = true;
|
||||
}
|
||||
|
||||
else if ( i->stream_type == ANALYSIS_STREAM )
|
||||
{
|
||||
// can't do anything
|
||||
success = true;
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
assert(false);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue