mirror of
https://github.com/zeek/zeek.git
synced 2025-10-17 22:18:20 +00:00
Merge branch 'topic/logging-framework' into topic/policy-scripts
This commit is contained in:
commit
2900f6bcee
12 changed files with 584 additions and 35 deletions
177
policy/logging.bro
Normal file
177
policy/logging.bro
Normal file
|
@ -0,0 +1,177 @@
|
|||
module Log;
|
||||
|
||||
export {
|
||||
# The set of writers Bro provides.
|
||||
type Writer: enum {
|
||||
WRITER_DEFAULT, # See default_writer below.
|
||||
WRITER_CSV,
|
||||
WRITER_DATA_SERIES,
|
||||
WRITER_SYSLOG
|
||||
};
|
||||
|
||||
# Each stream gets a unique ID. This type will be extended by
|
||||
# other scripts.
|
||||
type ID: enum {
|
||||
Unknown
|
||||
};
|
||||
|
||||
# The default writer to use if a filter does not specify
|
||||
# anything else.
|
||||
const default_writer = WRITER_CSV &redef;
|
||||
|
||||
# Type defining a stream.
|
||||
type Stream: record {
|
||||
name: string;
|
||||
columns: string_vec;
|
||||
};
|
||||
|
||||
# A filter defining what to record.
|
||||
type Filter: record {
|
||||
# A name to reference this filter.
|
||||
name: string;
|
||||
|
||||
# A predicate returning True if the filter wants a log entry
|
||||
# to be recorded. If not given, an implicit True is assumed
|
||||
# for all entries. The predicate receives one parameter:
|
||||
# an instance of the log's record type with the fields to be
|
||||
# logged.
|
||||
pred: function(rec: any): bool &optional;
|
||||
|
||||
# A path for outputting everything matching this
|
||||
# filter. The path is either a string, or a function
|
||||
# called with a single ``ID`` argument and returning a string.
|
||||
#
|
||||
# The specific interpretation of the string is left to the
|
||||
# Writer, but if it's refering to a file, it's assumed that no
|
||||
# extension is given; the writer will add whatever is
|
||||
# appropiate.
|
||||
path: string &optional;
|
||||
dynamic_path: function(id: string): string &optional;
|
||||
|
||||
# A subset of column names to record. If not given, all
|
||||
# columns are recorded.
|
||||
#select: set[string] &optional;
|
||||
|
||||
# An event that is raised whenever the filter is applied
|
||||
# to an entry. The event receives the same parameter
|
||||
# as the predicate. It will always be generated,
|
||||
# independent of what the predicate returns.
|
||||
#ev: event(rec: any) &optional;
|
||||
|
||||
# The writer to use.
|
||||
writer: Writer &default=default_writer;
|
||||
|
||||
# Internal tracking of header names and order for this filter.
|
||||
#columns: string_vec &optional;
|
||||
};
|
||||
|
||||
# Logs the record "rec" to the stream "id". The type of
|
||||
# "rec" must match the stream's "columns" field.
|
||||
global write: function(id: string, rec: any);
|
||||
#global log_ev: event(id: string, rec: any);
|
||||
|
||||
# Returns an existing filter previously installed for stream
|
||||
# "id" under the given "name". If no such filter exists,
|
||||
# the record "NoSuchFilter" is returned.
|
||||
global get_filter: function(id: string, name: string) : Filter;
|
||||
|
||||
|
||||
global create_stream: function(id: string, log_record_type: string);
|
||||
global add_filter: function(id: string, filter: Filter);
|
||||
global remove_filter: function(id: string, filter: string): bool;
|
||||
|
||||
global add_default_filter: function(id: string);
|
||||
global remove_default_filter: function(id: string): bool;
|
||||
|
||||
global open_log_files: function(id: string);
|
||||
|
||||
# This is the internal filter store. The outer table is indexed with a string
|
||||
# representing the stream name that the set of Logging::Filters is applied to.
|
||||
global filters: table[string] of set[Filter];
|
||||
|
||||
# This is the internal stream store. The table is indexed by the stream name.
|
||||
global streams: table[string] of Stream;
|
||||
|
||||
global files: table[string] of file;
|
||||
}
|
||||
|
||||
|
||||
# Sentinel representing an unknown filter.d
|
||||
const NoSuchFilter: Filter = [$name="<unknown filter>", $path="unknown"];
|
||||
|
||||
function create_stream(id: string, log_record_type: string)
|
||||
{
|
||||
if ( id in streams )
|
||||
print fmt("Stream %s already exists!", id);
|
||||
|
||||
streams[id] = [$name=log_record_type, $columns=record_type_to_vector(log_record_type)];
|
||||
# Insert this as a separate step because the file_opened event needs
|
||||
# the stream id to already exist.
|
||||
#streams[id]$_file = open_log_file(id);
|
||||
}
|
||||
|
||||
function add_filter(id: string, filter: Filter)
|
||||
{
|
||||
if ( id !in filters )
|
||||
filters[id] = set();
|
||||
|
||||
add filters[id][filter];
|
||||
}
|
||||
|
||||
function remove_filter(id: string, filter: string): bool
|
||||
{
|
||||
for ( filt in filters[id] )
|
||||
{
|
||||
if ( filt$name == "default" )
|
||||
{
|
||||
delete filters[id][filt];
|
||||
return T;
|
||||
}
|
||||
}
|
||||
return F;
|
||||
}
|
||||
|
||||
function add_default_filter(id: string)
|
||||
{
|
||||
add_filter(id, [$name="default", $path=id]);
|
||||
}
|
||||
|
||||
function remove_default_filter(id: string): bool
|
||||
{
|
||||
return remove_filter("ssh", "default");
|
||||
}
|
||||
|
||||
event file_opened(f: file) &priority=10
|
||||
{
|
||||
# Only do any of this for files opened locally.
|
||||
if ( is_remote_event() ) return;
|
||||
|
||||
# TODO: this shouldn't rely on .log being the extension
|
||||
local filename = gsub(get_file_name(f), /\.log$/, "");
|
||||
if ( filename in streams )
|
||||
{
|
||||
enable_raw_output(f);
|
||||
|
||||
if (peer_description == "" ||
|
||||
peer_description == "manager" ||
|
||||
peer_description == "standalone")
|
||||
{
|
||||
print f, join_string_vec(streams[filename]$columns, "\t");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
print "no raw output", filename;
|
||||
}
|
||||
}
|
||||
|
||||
function write(id: string, rec: any)
|
||||
{
|
||||
logging_log(id, rec);
|
||||
}
|
||||
|
||||
|
||||
event bro_init() &priority=-10
|
||||
{
|
||||
# TODO: Check for logging streams without filters.
|
||||
}
|
71
policy/test-logging.bro
Normal file
71
policy/test-logging.bro
Normal file
|
@ -0,0 +1,71 @@
|
|||
module TEST_LOGGING;
|
||||
|
||||
@load logging
|
||||
|
||||
export {
|
||||
# Create a new ID for our log stream
|
||||
redef enum Log::ID += { TEST_LOGGING };
|
||||
|
||||
# Define a record with all the columns the log file can have.
|
||||
# (I'm using a subset of fields from ssh-ext for demonstration.)
|
||||
type Log: record {
|
||||
t: time;
|
||||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
|
||||
# This is the prototype for the event that the logging framework tries
|
||||
# to generate if there is a handler for it.
|
||||
global log: event(rec: Log);
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
{
|
||||
# Create the stream.
|
||||
# First argument is the ID for the stream.
|
||||
# Second argument is the log record type.
|
||||
Log::create_stream("TEST_LOGGING", "TEST_LOGGING::Log");
|
||||
|
||||
# Add a default filter that simply logs everything to "ssh.log" using the default writer.
|
||||
# Log line event generation is autogenerated for now by checking for
|
||||
# handlers for MODULE_NAME::log (which isn't the right thing to do, but it will be dealt with later)
|
||||
Log::add_default_filter("TEST_LOGGING");
|
||||
|
||||
# There is currently some problem with &optional values in the records
|
||||
# passed into the predicate. Maybe it's because I'm not really coercing
|
||||
# the record to the correct record type before passing it as an argument
|
||||
# to the Call method?
|
||||
|
||||
# There is also a problem with using &optional sets in the filter records.
|
||||
# It was found when trying to include the "select" variable.
|
||||
|
||||
# Printing headers for the filters doesn't work yet either and needs to
|
||||
# be considered in the final design. (based on the "select" set).
|
||||
#Log::add_filter("ssh", [$name="successful logins",
|
||||
# #$pred(rec: Log) = { print rec$status; return T; },
|
||||
# $path="ssh-logins",
|
||||
# #$select=set("t"),
|
||||
# $writer=Log::WRITER_CSV]);
|
||||
|
||||
# Log something.
|
||||
Log::write("TEST_LOGGING", [$t=network_time(),$status="success"]);
|
||||
Log::write("TEST_LOGGING", [$t=network_time(),$status="failure", $country="US"]);
|
||||
Log::write("TEST_LOGGING", [$t=network_time(),$status="failure", $country="UK"]);
|
||||
Log::write("TEST_LOGGING", [$t=network_time(),$status="success", $country="BR"]);
|
||||
Log::write("TEST_LOGGING", [$t=network_time(),$status="failure", $country="MX"]);
|
||||
|
||||
}
|
||||
|
||||
#event log(rec: Log)
|
||||
# {
|
||||
# print fmt("Ran the log handler from the same module. Extracting time: %0.6f", rec$t);
|
||||
# }
|
||||
#
|
||||
#
|
||||
#module WHATEVER;
|
||||
#
|
||||
#event SSH::log(rec: SSH::Log)
|
||||
# {
|
||||
# print fmt("Ran the SSH::log handler from a different module. Extracting time: %0.6f", rec$t);
|
||||
# }
|
35
src/Attr.cc
35
src/Attr.cc
|
@ -327,6 +327,41 @@ void Attributes::CheckAttr(Attr* a)
|
|||
}
|
||||
}
|
||||
|
||||
bool Attributes::operator==(const Attributes& other) const
|
||||
{
|
||||
if ( ! attrs )
|
||||
return other.attrs;
|
||||
|
||||
if ( ! other.attrs )
|
||||
return false;
|
||||
|
||||
loop_over_list(*attrs, i)
|
||||
{
|
||||
Attr* a = (*attrs)[i];
|
||||
Attr* o = other.FindAttr(a->Tag());
|
||||
|
||||
if ( ! o )
|
||||
return false;
|
||||
|
||||
if ( ! (*a == *o) )
|
||||
return false;
|
||||
}
|
||||
|
||||
loop_over_list(*other.attrs, j)
|
||||
{
|
||||
Attr* o = (*other.attrs)[j];
|
||||
Attr* a = FindAttr(o->Tag());
|
||||
|
||||
if ( ! a )
|
||||
return false;
|
||||
|
||||
if ( ! (*a == *o) )
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Attributes::Serialize(SerialInfo* info) const
|
||||
{
|
||||
return SerialObj::Serialize(info);
|
||||
|
|
16
src/Attr.h
16
src/Attr.h
|
@ -52,6 +52,20 @@ public:
|
|||
|
||||
void Describe(ODesc* d) const;
|
||||
|
||||
bool operator==(const Attr& other) const
|
||||
{
|
||||
if ( tag != other.tag )
|
||||
return false;
|
||||
|
||||
if ( expr || other.expr )
|
||||
// If any has an expression and they aren't the same object, we
|
||||
// declare them unequal, as we can't really find out if the two
|
||||
// expressions are equivalent.
|
||||
return (expr == other.expr);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected:
|
||||
void AddTag(ODesc* d) const;
|
||||
|
||||
|
@ -79,6 +93,8 @@ public:
|
|||
bool Serialize(SerialInfo* info) const;
|
||||
static Attributes* Unserialize(UnserialInfo* info);
|
||||
|
||||
bool operator==(const Attributes& other) const;
|
||||
|
||||
protected:
|
||||
Attributes() { type = 0; attrs = 0; }
|
||||
void CheckAttr(Attr* attr);
|
||||
|
|
|
@ -65,11 +65,22 @@ CompositeHash::~CompositeHash()
|
|||
|
||||
// Computes the piece of the hash for Val*, returning the new kp.
|
||||
char* CompositeHash::SingleValHash(int type_check, char* kp0,
|
||||
BroType* bt, Val* v) const
|
||||
BroType* bt, Val* v, bool optional) const
|
||||
{
|
||||
char* kp1 = 0;
|
||||
InternalTypeTag t = bt->InternalType();
|
||||
|
||||
if ( optional )
|
||||
{
|
||||
// Add a marker saying whether the optional field is set.
|
||||
char* kp = AlignAndPadType<char>(kp0);
|
||||
*kp = ( v ? 1 : 0);
|
||||
kp0 = reinterpret_cast<char*>(kp+1);
|
||||
|
||||
if ( ! v )
|
||||
return kp0;
|
||||
}
|
||||
|
||||
if ( type_check )
|
||||
{
|
||||
InternalTypeTag vt = v->Type()->InternalType();
|
||||
|
@ -163,12 +174,16 @@ char* CompositeHash::SingleValHash(int type_check, char* kp0,
|
|||
for ( int i = 0; i < num_fields; ++i )
|
||||
{
|
||||
Val* rv_i = rv->Lookup(i);
|
||||
if ( ! rv_i )
|
||||
|
||||
Attributes* a = rt->FieldDecl(i)->attrs;
|
||||
bool optional = (a && a->FindAttr(ATTR_OPTIONAL));
|
||||
|
||||
if ( ! (rv_i || optional) )
|
||||
return 0;
|
||||
|
||||
if ( ! (kp = SingleValHash(type_check, kp,
|
||||
rt->FieldType(i),
|
||||
rv_i)) )
|
||||
rv_i, optional)) )
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -248,7 +263,7 @@ HashKey* CompositeHash::ComputeHash(const Val* v, int type_check) const
|
|||
char* kp = k;
|
||||
loop_over_list(*tl, i)
|
||||
{
|
||||
kp = SingleValHash(type_check, kp, (*tl)[i], (*vl)[i]);
|
||||
kp = SingleValHash(type_check, kp, (*tl)[i], (*vl)[i], false);
|
||||
if ( ! kp )
|
||||
return 0;
|
||||
}
|
||||
|
@ -315,10 +330,13 @@ HashKey* CompositeHash::ComputeSingletonHash(const Val* v, int type_check) const
|
|||
}
|
||||
|
||||
int CompositeHash::SingleTypeKeySize(BroType* bt, const Val* v,
|
||||
int type_check, int sz) const
|
||||
int type_check, int sz, bool optional) const
|
||||
{
|
||||
InternalTypeTag t = bt->InternalType();
|
||||
|
||||
if ( optional )
|
||||
sz = SizeAlign(sz, sizeof(char));
|
||||
|
||||
if ( type_check && v )
|
||||
{
|
||||
InternalTypeTag vt = v->Type()->InternalType();
|
||||
|
@ -369,9 +387,12 @@ int CompositeHash::SingleTypeKeySize(BroType* bt, const Val* v,
|
|||
|
||||
for ( int i = 0; i < num_fields; ++i )
|
||||
{
|
||||
Attributes* a = rt->FieldDecl(i)->attrs;
|
||||
bool optional = (a && a->FindAttr(ATTR_OPTIONAL));
|
||||
|
||||
sz = SingleTypeKeySize(rt->FieldType(i),
|
||||
rv ? rv->Lookup(i) : 0,
|
||||
type_check, sz);
|
||||
type_check, sz, optional);
|
||||
if ( ! sz )
|
||||
return 0;
|
||||
}
|
||||
|
@ -386,7 +407,7 @@ int CompositeHash::SingleTypeKeySize(BroType* bt, const Val* v,
|
|||
|
||||
case TYPE_INTERNAL_STRING:
|
||||
if ( ! v )
|
||||
return 0;
|
||||
return optional ? sz : 0;
|
||||
|
||||
// Factor in length field.
|
||||
sz = SizeAlign(sz, sizeof(int));
|
||||
|
@ -418,7 +439,7 @@ int CompositeHash::ComputeKeySize(const Val* v, int type_check) const
|
|||
loop_over_list(*tl, i)
|
||||
{
|
||||
sz = SingleTypeKeySize((*tl)[i], v ? v->AsListVal()->Index(i) : 0,
|
||||
type_check, sz);
|
||||
type_check, sz, false);
|
||||
if ( ! sz )
|
||||
return 0;
|
||||
}
|
||||
|
@ -495,20 +516,20 @@ ListVal* CompositeHash::RecoverVals(const HashKey* k) const
|
|||
loop_over_list(*tl, i)
|
||||
{
|
||||
Val* v;
|
||||
kp = RecoverOneVal(k, kp, k_end, (*tl)[i], v);
|
||||
kp = RecoverOneVal(k, kp, k_end, (*tl)[i], v, false);
|
||||
ASSERT(v);
|
||||
l->Append(v);
|
||||
}
|
||||
|
||||
if ( kp != k_end )
|
||||
internal_error("under-ran key in CompositeHash::DescribeKey");
|
||||
internal_error("under-ran key in CompositeHash::DescribeKey %ld", k_end - kp);
|
||||
|
||||
return l;
|
||||
}
|
||||
|
||||
const char* CompositeHash::RecoverOneVal(const HashKey* k, const char* kp0,
|
||||
const char* const k_end, BroType* t,
|
||||
Val*& pval) const
|
||||
Val*& pval, bool optional) const
|
||||
{
|
||||
// k->Size() == 0 for a single empty string.
|
||||
if ( kp0 >= k_end && k->Size() > 0 )
|
||||
|
@ -516,9 +537,20 @@ const char* CompositeHash::RecoverOneVal(const HashKey* k, const char* kp0,
|
|||
|
||||
TypeTag tag = t->Tag();
|
||||
InternalTypeTag it = t->InternalType();
|
||||
|
||||
const char* kp1 = 0;
|
||||
|
||||
if ( optional )
|
||||
{
|
||||
const char* kp = AlignType<char>(kp0);
|
||||
kp0 = kp1 = reinterpret_cast<const char*>(kp+1);
|
||||
|
||||
if ( ! *kp )
|
||||
{
|
||||
pval = 0;
|
||||
return kp0;
|
||||
}
|
||||
}
|
||||
|
||||
switch ( it ) {
|
||||
case TYPE_INTERNAL_INT:
|
||||
{
|
||||
|
@ -647,9 +679,13 @@ const char* CompositeHash::RecoverOneVal(const HashKey* k, const char* kp0,
|
|||
for ( i = 0; i < num_fields; ++i )
|
||||
{
|
||||
Val* v;
|
||||
|
||||
Attributes* a = rt->FieldDecl(i)->attrs;
|
||||
bool optional = (a && a->FindAttr(ATTR_OPTIONAL));
|
||||
|
||||
kp = RecoverOneVal(k, kp, k_end,
|
||||
rt->FieldType(i), v);
|
||||
if ( ! v )
|
||||
rt->FieldType(i), v, optional);
|
||||
if ( ! (v || optional) )
|
||||
{
|
||||
internal_error("didn't recover expected number of fields from HashKey");
|
||||
pval = 0;
|
||||
|
|
|
@ -30,7 +30,7 @@ protected:
|
|||
// Computes the piece of the hash for Val*, returning the new kp.
|
||||
// Used as a helper for ComputeHash in the non-singleton case.
|
||||
char* SingleValHash(int type_check, char* kp,
|
||||
BroType* bt, Val* v) const;
|
||||
BroType* bt, Val* v, bool optional) const;
|
||||
|
||||
// Recovers just one Val of possibly many; called from RecoverVals.
|
||||
// Upon return, pval will point to the recovered Val of type t.
|
||||
|
@ -38,7 +38,7 @@ protected:
|
|||
// upon errors, so there is no return value for invalid input.
|
||||
const char* RecoverOneVal(const HashKey* k,
|
||||
const char* kp, const char* const k_end,
|
||||
BroType* t, Val*& pval) const;
|
||||
BroType* t, Val*& pval, bool optional) const;
|
||||
|
||||
// Rounds the given pointer up to the nearest multiple of the
|
||||
// given size, if not already a multiple.
|
||||
|
@ -77,7 +77,7 @@ protected:
|
|||
int ComputeKeySize(const Val* v = 0, int type_check = 1) const;
|
||||
|
||||
int SingleTypeKeySize(BroType*, const Val*,
|
||||
int type_check, int sz) const;
|
||||
int type_check, int sz, bool optional) const;
|
||||
|
||||
TypeList* type;
|
||||
char* key; // space for composite key
|
||||
|
|
71
src/Expr.cc
71
src/Expr.cc
|
@ -2531,16 +2531,35 @@ bool AssignExpr::TypeCheck()
|
|||
return true;
|
||||
}
|
||||
|
||||
if ( op1->Type()->Tag() == TYPE_RECORD &&
|
||||
op2->Type()->Tag() == TYPE_RECORD )
|
||||
{
|
||||
if ( same_type(op1->Type(), op2->Type()) )
|
||||
{
|
||||
RecordType* rt1 = op1->Type()->AsRecordType();
|
||||
RecordType* rt2 = op2->Type()->AsRecordType();
|
||||
|
||||
// Make sure the attributes match as well.
|
||||
for ( int i = 0; i < rt1->NumFields(); ++i )
|
||||
{
|
||||
const TypeDecl* td1 = rt1->FieldDecl(i);
|
||||
const TypeDecl* td2 = rt2->FieldDecl(i);
|
||||
|
||||
if ( same_attrs(td1->attrs, td2->attrs) )
|
||||
// Everything matches.
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Need to coerce.
|
||||
op2 = new RecordCoerceExpr(op2, op1->Type()->AsRecordType());
|
||||
return true;
|
||||
}
|
||||
|
||||
if ( ! same_type(op1->Type(), op2->Type()) )
|
||||
{
|
||||
if ( op1->Type()->Tag() == TYPE_RECORD &&
|
||||
op2->Type()->Tag() == TYPE_RECORD )
|
||||
op2 = new RecordCoerceExpr(op2, op1->Type()->AsRecordType());
|
||||
else
|
||||
{
|
||||
ExprError("type clash in assignment");
|
||||
return false;
|
||||
}
|
||||
ExprError("type clash in assignment");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -5308,21 +5327,39 @@ int check_and_promote_expr(Expr*& e, BroType* t)
|
|||
return 1;
|
||||
}
|
||||
|
||||
else if ( ! same_type(t, et) )
|
||||
if ( t->Tag() == TYPE_RECORD && et->Tag() == TYPE_RECORD )
|
||||
{
|
||||
if ( t->Tag() == TYPE_RECORD && et->Tag() == TYPE_RECORD )
|
||||
{
|
||||
RecordType* t_r = t->AsRecordType();
|
||||
RecordType* et_r = et->AsRecordType();
|
||||
RecordType* t_r = t->AsRecordType();
|
||||
RecordType* et_r = et->AsRecordType();
|
||||
|
||||
if ( record_promotion_compatible(t_r, et_r) )
|
||||
if ( same_type(t, et) )
|
||||
{
|
||||
// Make sure the attributes match as well.
|
||||
for ( int i = 0; i < t_r->NumFields(); ++i )
|
||||
{
|
||||
e = new RecordCoerceExpr(e, t_r);
|
||||
return 1;
|
||||
const TypeDecl* td1 = t_r->FieldDecl(i);
|
||||
const TypeDecl* td2 = et_r->FieldDecl(i);
|
||||
|
||||
if ( same_attrs(td1->attrs, td2->attrs) )
|
||||
// Everything matches perfectly.
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
else if ( t->Tag() == TYPE_TABLE && et->Tag() == TYPE_TABLE &&
|
||||
if ( record_promotion_compatible(t_r, et_r) ) // Note: This is always true currently.
|
||||
{
|
||||
e = new RecordCoerceExpr(e, t_r);
|
||||
return 1;
|
||||
}
|
||||
|
||||
t->Error("incompatible record types", e);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
if ( ! same_type(t, et) )
|
||||
{
|
||||
if ( t->Tag() == TYPE_TABLE && et->Tag() == TYPE_TABLE &&
|
||||
et->AsTableType()->IsUnspecifiedTable() )
|
||||
{
|
||||
e = new TableCoerceExpr(e, t->AsTableType());
|
||||
|
|
15
src/Type.cc
15
src/Type.cc
|
@ -1320,7 +1320,9 @@ static int is_init_compat(const BroType* t1, const BroType* t2)
|
|||
|
||||
int same_type(const BroType* t1, const BroType* t2, int is_init)
|
||||
{
|
||||
if ( t1 == t2 )
|
||||
if ( t1 == t2 ||
|
||||
t1->Tag() == TYPE_ANY ||
|
||||
t2->Tag() == TYPE_ANY )
|
||||
return 1;
|
||||
|
||||
t1 = flatten_type(t1);
|
||||
|
@ -1453,6 +1455,17 @@ int same_type(const BroType* t1, const BroType* t2, int is_init)
|
|||
return 0;
|
||||
}
|
||||
|
||||
int same_attrs(const Attributes* a1, const Attributes* a2)
|
||||
{
|
||||
if ( ! a1 )
|
||||
return (a2 != 0);
|
||||
|
||||
if ( ! a2 )
|
||||
return 0;
|
||||
|
||||
return (*a1 == *a2);
|
||||
}
|
||||
|
||||
int record_promotion_compatible(const RecordType* /* super_rec */,
|
||||
const RecordType* /* sub_rec */)
|
||||
{
|
||||
|
|
|
@ -515,6 +515,9 @@ inline BroType* error_type() { return base_type(TYPE_ERROR); }
|
|||
// test is done in the context of an initialization.
|
||||
extern int same_type(const BroType* t1, const BroType* t2, int is_init=0);
|
||||
|
||||
// True if the two attribute lists are equivalent.
|
||||
extern int same_attrs(const Attributes* a1, const Attributes* a2);
|
||||
|
||||
// Returns true if the record sub_rec can be promoted to the record
|
||||
// super_rec.
|
||||
extern int record_promotion_compatible(const RecordType* super_rec,
|
||||
|
|
|
@ -3292,6 +3292,10 @@ Val* check_and_promote(Val* v, const BroType* t, int is_init)
|
|||
|
||||
TypeTag t_tag = t->Tag();
|
||||
TypeTag v_tag = vt->Tag();
|
||||
|
||||
// More thought definitely needs to go into this.
|
||||
if ( t_tag == TYPE_ANY || v_tag == TYPE_ANY )
|
||||
return v;
|
||||
|
||||
if ( ! EitherArithmetic(t_tag, v_tag) ||
|
||||
/* allow sets as initializers */
|
||||
|
|
141
src/bro.bif
141
src/bro.bif
|
@ -359,7 +359,148 @@ function cat%(...%): string
|
|||
|
||||
return new StringVal(s);
|
||||
%}
|
||||
|
||||
function logging_log%(index: string, rec: any%): any
|
||||
%{
|
||||
// TODO: Verify that rec is a record
|
||||
// TODO: Coerce to the correct record type to fill in defaults.
|
||||
RecordVal *recval = rec->AsRecordVal();
|
||||
|
||||
// Lookup the stream
|
||||
TableVal *streams = opt_internal_table("Log::streams");
|
||||
VectorVal *columns;
|
||||
RecordVal *stream_record;
|
||||
if ( streams )
|
||||
{
|
||||
stream_record = streams->Lookup(index)->AsRecordVal();
|
||||
if ( stream_record )
|
||||
{
|
||||
int columns_field = stream_record->Type()->AsRecordType()->FieldOffset("columns");
|
||||
columns = stream_record->Lookup(columns_field)->AsVectorVal();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
printf("Logging framework is dead (Log::streams not found).\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Lookup all filters for stream
|
||||
TableVal *filters = opt_internal_table("Log::filters");
|
||||
TableVal *stream_filters;
|
||||
if ( filters )
|
||||
{
|
||||
Val *lookup_v = filters->Lookup(index);
|
||||
if ( lookup_v )
|
||||
stream_filters = lookup_v->AsTableVal();
|
||||
}
|
||||
else
|
||||
{
|
||||
printf("Logging framework is dead (Log::filters not found).\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Generate the event for the log stream
|
||||
// This happens regardless of all filters.
|
||||
int name_field = stream_record->Type()->AsRecordType()->FieldOffset("name");
|
||||
StringVal *log_type = stream_record->AsRecordVal()->Lookup(name_field)->AsStringVal();
|
||||
string ID_module = extract_module_name(log_type->CheckString());
|
||||
// The log event that is generated by default is MODULE_NAME::log
|
||||
string log_event_name = make_full_var_name(ID_module.c_str(), "log");
|
||||
EventHandlerPtr ev_ptr = internal_handler(log_event_name.c_str());
|
||||
if ( ev_ptr )
|
||||
{
|
||||
val_list* vl = new val_list;
|
||||
vl->append(recval->Ref());
|
||||
mgr.QueueEvent(ev_ptr, vl, SOURCE_LOCAL);
|
||||
}
|
||||
|
||||
// Iterate over the stream_filters
|
||||
ListVal* filter_recs = stream_filters->ConvertToList(TYPE_ANY);
|
||||
for ( int i = 0; i < filter_recs->Length(); ++i )
|
||||
{
|
||||
RecordVal* rv = filter_recs->Index(i)->AsListVal()->Index(0)->AsRecordVal();
|
||||
int pred_field_index = rv->Type()->AsRecordType()->FieldOffset("pred");
|
||||
Val *pred_func = rv->Lookup(pred_field_index);
|
||||
if ( pred_func )
|
||||
{
|
||||
val_list args(1);
|
||||
args.append(recval->Ref());
|
||||
int pred_val = pred_func->AsFunc()->Call(&args)->AsBool();
|
||||
//printf("predicate val: %d\n", pred_val);
|
||||
// If the predicate returns false, don't send this record on to a writer.
|
||||
if ( !pred_val )
|
||||
continue;
|
||||
}
|
||||
|
||||
// Format and print the line
|
||||
// (send line onward to the filter's WRITER in the future)
|
||||
|
||||
// Get a path name for this filter
|
||||
int path_field = rv->Type()->AsRecordType()->FieldOffset("path");
|
||||
Val *path_val = rv->Lookup(path_field);
|
||||
StringVal *path;
|
||||
// If no path is given, use the Stream ID as the path
|
||||
if ( path_val )
|
||||
path = path_val->AsStringVal();
|
||||
else
|
||||
path = index;
|
||||
|
||||
// Get the file with the "path" name found above for this filter.
|
||||
// Open a new file is one does not exist yet.
|
||||
TableVal *logging_files = opt_internal_table("Log::files");
|
||||
Val *ff = logging_files->Lookup(path);
|
||||
if ( !ff )
|
||||
{
|
||||
logging_files->Assign(path, new Val(new BroFile(path->CheckString(), "w")));
|
||||
ff = logging_files->Lookup(path);
|
||||
}
|
||||
BroFile *f = ff->AsFile();
|
||||
if ( ! f->IsOpen() )
|
||||
continue;
|
||||
|
||||
// Actually format the log line and print it to the file found above.
|
||||
ODesc d;
|
||||
RecordType *rt = rec->Type()->AsRecordType();
|
||||
for ( unsigned i = 1; i <= columns->Size(); ++i )
|
||||
{
|
||||
const char *field_name = columns->Lookup(i)->AsStringVal()->CheckString();
|
||||
int field = rt->FieldOffset(field_name);
|
||||
if ( field >= 0 ) // or if there is a default value
|
||||
{
|
||||
rec->AsRecordVal()->Lookup(field)->Describe(&d);
|
||||
d.Add("\t");
|
||||
}
|
||||
}
|
||||
f->Write(d.Description(), 0);
|
||||
f->Write("\n",0);
|
||||
|
||||
|
||||
}
|
||||
|
||||
Unref(filter_recs);
|
||||
|
||||
return 0;
|
||||
%}
|
||||
|
||||
function record_type_to_vector%(rt: string%): string_vec
|
||||
%{
|
||||
VectorVal* result =
|
||||
new VectorVal(internal_type("string_vec")->AsVectorType());
|
||||
|
||||
RecordType *type = internal_type(rt->CheckString())->AsRecordType();
|
||||
if ( type )
|
||||
{
|
||||
for ( int i = 0; i < type->NumFields(); ++i )
|
||||
{
|
||||
StringVal* val = new StringVal(type->FieldName(i));
|
||||
result->Assign(i+1, val, 0);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
%}
|
||||
|
||||
|
||||
function cat_sep%(sep: string, def: string, ...%): string
|
||||
%{
|
||||
ODesc d;
|
||||
|
|
|
@ -135,6 +135,22 @@ function sort_string_array%(a: string_array%): string_array
|
|||
vs_to_string_array(vs, b, 1, n);
|
||||
return b;
|
||||
%}
|
||||
|
||||
function join_string_vec%(vec: string_vec, sep: string%): string
|
||||
%{
|
||||
ODesc d;
|
||||
VectorVal *v = vec->AsVectorVal();
|
||||
for ( unsigned i = 0; i < v->Size(); ++i )
|
||||
{
|
||||
if ( i > 0 )
|
||||
d.Add(sep->CheckString(), 0);
|
||||
v->Lookup(i+1)->Describe(&d);
|
||||
}
|
||||
BroString* s = new BroString(1, d.TakeBytes(), d.Len());
|
||||
s->SetUseFreeToDelete(true);
|
||||
return new StringVal(s);
|
||||
%}
|
||||
|
||||
|
||||
function edit%(arg_s: string, arg_edit_char: string%): string
|
||||
%{
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue