Logging framework core functionality now implemented.

This commit is contained in:
Seth Hall 2011-01-26 15:29:20 -05:00
parent 4df961aa60
commit d2628d30fa
3 changed files with 112 additions and 94 deletions

View file

@ -3,66 +3,67 @@ module Logging;
export {
# The set of writers Bro provides.
type Writer: enum {
WRITER_DEFAULT, # See default_writer below.
WRITER_CSV,
WRITER_DATA_SERIES,
WRITER_SYSLOG
WRITER_DEFAULT, # See default_writer below.
WRITER_CSV,
WRITER_DATA_SERIES,
WRITER_SYSLOG
};
# Each stream gets a unique ID. This type will be extended by
# other scripts.
type ID: enum {
Unknown
};
Unknown
};
# The default writer to use if a filter does not specify
# anything else.
const default_writer = WRITER_CSV &redef;
# Type defining a stream.
#type Stream: record {
# id : string; # The ID of the stream.
# columns : string_vec; # A record type defining the stream's output columns.
#};
type Stream: record {
name: string;
columns: string_vec;
};
# A filter defining what to record.
type Filter: record {
# A name to reference this filter.
name: string;
# A predicate returning True if the filter wants a log entry
# to be recorded. If not given, an implicit True is assumed
# for all entries. The predicate receives one parameter:
# an instance of the log's record type with the fields to be
# logged.
pred: function(log: any) &optional;
# A path for outputting everything matching this
# filter. The path is either a string, or a function
# called with a single ``ID`` argument and returning a string.
#
# The specific interpretation of the string is left to the
# Writer, but if it's refering to a file, it's assumed that no
# extension is given; the writer will add whatever is
# appropiate.
path: any &optional;
# A subset of column names to record. If not given, all
# columns are recorded.
select: set[string] &optional;
# An event that is raised whenever the filter is applied
# to an entry. The event receives the same parameter
# as the predicate. It will always be generated,
# independent of what the predicate returns.
ev: event(c: connection, log: any) &optional;
# The writer to use.
writer: Writer &default=default_writer;
};
# A name to reference this filter.
name: string;
# A predicate returning True if the filter wants a log entry
# to be recorded. If not given, an implicit True is assumed
# for all entries. The predicate receives one parameter:
# an instance of the log's record type with the fields to be
# logged.
pred: function(log: any) &optional;
# A path for outputting everything matching this
# filter. The path is either a string, or a function
# called with a single ``ID`` argument and returning a string.
#
# The specific interpretation of the string is left to the
# Writer, but if it's refering to a file, it's assumed that no
# extension is given; the writer will add whatever is
# appropiate.
path: any &optional;
# A subset of column names to record. If not given, all
# columns are recorded.
select: set[string] &optional;
# An event that is raised whenever the filter is applied
# to an entry. The event receives the same parameter
# as the predicate. It will always be generated,
# independent of what the predicate returns.
ev: event(l: any) &optional;
# The writer to use.
writer: Writer &default=default_writer;
};
global filters: table[string] of set[Filter];
global streams: table[string] of string_vec;
global streams: table[string] of Stream;
# Logs the record "rec" to the stream "id". The type of
# "rec" must match the stream's "columns" field.
@ -74,7 +75,7 @@ export {
# the record "NoSuchFilter" is returned.
global get_filter: function(id: string, name: string) : Filter;
global create_stream: function(id: string, columns: string);
global create_stream: function(id: string, log_record_type: string);
global add_filter: function(id: string, filter: Filter);
global open_log_files: function(id: string);
@ -84,20 +85,22 @@ export {
# Sentinel representing an unknown filter.
const NoSuchFilter: Filter = [$name="<unknown filter>"];
function create_stream(id: string, columns: string)
function create_stream(id: string, log_record_type: string)
{
if ( id in streams )
print fmt("Stream %s already exists!", id);
streams[id] = record_type_to_vector(columns);
streams[id] = [$name=log_record_type, $columns=record_type_to_vector(log_record_type)];
}
function add_filter(id: string, filter: Filter)
{
#if ( id !in filters )
# filters[id] = set();
#
#add filters[id][filter];
if ( id !in filters )
filters[id] = set();
# TODO: This is broken and waiting on a bug fix for &optional fields
# in records being used as indexes.
#add filt[filter];
}
function log(id: string, rec: any)
@ -105,14 +108,3 @@ function log(id: string, rec: any)
logging_log(id, rec);
}
# THIS IS ONLY FOR THE PROTOTYPE.
# It will be implemented in the core later
function open_log_files(id: string)
{
# Open default log
#open_log_file(id);
# Find all second names from filters
# Open log for each secondary name
}

View file

@ -15,22 +15,37 @@ export {
country: string &default="unknown";
};
global ssh_log: event(rec: Log);
# This is the prototype for the event that the logging framework tries
# to generate if there is a handler for it.
global log: event(rec: Log);
}
event bro_init()
{
# Create the stream.
# First argument is the ID for the stream.
# Second argument is the log record type.
Logging::create_stream("ssh", "SSH::Log");
# Add a default filter that simply logs everything to "ssh.log" using the default writer.
#Logging::add_filter("SSH", [$name="default", $path="ssh"]);
# Filtering is not implemented yet. Waiting on ticket #366
# Log line event generation is autogenerated for now by checking for
# handlers for MODULE_NAME::log
#Logging::add_filter("ssh", [$name="default", $path="ssh", $ev=log]);
# Log something.
Logging::log("ssh", [$t=network_time(), $country="US", $status="ok"]);
}
event ssh_log(rec: Log)
event log(rec: Log)
{
print "Ran the ssh_log handler! kick ass";
print fmt("Ran the log handler from the same module. Extracting time: %0.6f", rec$t);
}
module WHATEVER;
event SSH::log(rec: SSH::Log)
{
print fmt("Ran the SSH::log handler from a different module. Extracting time: %0.6f", rec$t);
}

View file

@ -360,17 +360,22 @@ function cat%(...%): string
return new StringVal(s);
%}
function logging_log%(index: string, rec: any%): bool
function logging_log%(index: string, rec: any%): any
%{
// Verify that rec is a record
// Lookup the stream
TableVal *streams = opt_internal_table("Logging::streams");
VectorVal *columns;
RecordVal *stream_record;
if ( streams )
{
Val *lookup_v = streams->Lookup(index);
if ( lookup_v )
columns = lookup_v->AsVectorVal();
stream_record = streams->Lookup(index)->AsRecordVal();
if ( stream_record )
{
int columns_field = stream_record->Type()->AsRecordType()->FieldOffset("columns");
columns = stream_record->Lookup(columns_field)->AsVectorVal();
}
}
else
{
@ -378,31 +383,40 @@ function logging_log%(index: string, rec: any%): bool
return false;
}
// Generate the event for the log stream
// TODO: make it actually figure out the right handler name.
EventHandlerPtr ev_ptr = internal_handler("SSH::ssh_log");
// Generate the event for the log stream
// This happens regardless of all filters.
int name_field = stream_record->Type()->AsRecordType()->FieldOffset("name");
StringVal *log_type = stream_record->AsRecordVal()->Lookup(name_field)->AsStringVal();
string ID_module = extract_module_name(log_type->CheckString());
// The log event that is generated by default is MODULE_NAME::log
string log_event_name = make_full_var_name(ID_module.c_str(), "log");
EventHandlerPtr ev_ptr = internal_handler(log_event_name.c_str());
if ( ev_ptr )
{
val_list* vl = new val_list;
vl->append(rec->Ref());
mgr.QueueEvent(, vl, SOURCE_LOCAL);
mgr.QueueEvent(ev_ptr, vl, SOURCE_LOCAL);
}
// Lookup all filters for stream
TableVal *filters = opt_internal_table("Logging::filters");
RecordVal *stream_filters;
if ( filters )
{
Val *lookup_v = filters->Lookup(index);
if ( lookup_v )
stream_filters = lookup_v->AsRecordVal();
}
else
{
printf("Logging framework is dead (Logging::filters not found).\n");
return false;
}
// (ignore this code, it will probably be done in the logging.bro script
// with the "match" statement)
//TableVal *filters = opt_internal_table("Logging::filters");
//RecordVal *stream_filters;
//if ( filters )
// {
// Val *lookup_v = filters->Lookup(index);
// if ( lookup_v )
// stream_filters = lookup_v->AsRecordVal();
// }
//else
// {
// printf("Logging framework is dead (Logging::filters not found).\n");
// return false;
// }
// Print the line
// (send line onward to the filter's WRITER in the future)
ODesc d;
const char *field_name;
int field = 0;
@ -416,13 +430,10 @@ function logging_log%(index: string, rec: any%): bool
rec->AsRecordVal()->Lookup(field)->Describe(&d);
d.Add("\t",0);
}
//printf("Test: %s\n", field_name);
}
printf("Full line: %s\n", d.TakeBytes());
// For each filter on 'id'
// Format the output (iterate through columns and grab fields from rec as found)
// Print the line (send line onward to WRITER)
return false;
printf("%s: %s\n", ID_module.c_str(), d.TakeBytes());
return 0;
%}
function record_type_to_vector%(rt: string%): string_vec