mirror of
https://github.com/zeek/zeek.git
synced 2025-10-02 06:38:20 +00:00
Merge remote branch 'origin/topic/robin/logging-internals' into topic/policy-scripts-new
This commit is contained in:
commit
4943a87205
41 changed files with 413 additions and 51 deletions
|
@ -21,7 +21,7 @@ type conn_id: record {
|
|||
orig_p: port;
|
||||
resp_h: addr;
|
||||
resp_p: port;
|
||||
};
|
||||
} &log;
|
||||
|
||||
type icmp_conn: record {
|
||||
orig_h: addr;
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include "Attr.h"
|
||||
#include "Expr.h"
|
||||
#include "Serializer.h"
|
||||
#include "LogMgr.h"
|
||||
|
||||
const char* attr_name(attr_tag t)
|
||||
{
|
||||
|
@ -332,6 +333,11 @@ void Attributes::CheckAttr(Attr* a)
|
|||
}
|
||||
break;
|
||||
|
||||
case ATTR_LOG:
|
||||
if ( ! LogVal::IsCompatibleType(type) )
|
||||
Error("&log applied to a type that cannot be logged");
|
||||
break;
|
||||
|
||||
default:
|
||||
BadTag("Attributes::CheckAttr", attr_name(a->Tag()));
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ typedef enum {
|
|||
ATTR_MERGEABLE,
|
||||
ATTR_PRIORITY,
|
||||
ATTR_GROUP,
|
||||
ATTR_LOG,
|
||||
ATTR_TRACKED, // hidden attribute, tracked by NotifierRegistry
|
||||
#define NUM_ATTRS (int(ATTR_TRACKED) + 1)
|
||||
} attr_tag;
|
||||
|
|
19
src/ID.cc
19
src/ID.cc
|
@ -235,6 +235,25 @@ void ID::UpdateValAttrs()
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( Type()->Tag() == TYPE_RECORD )
|
||||
{
|
||||
Attr* attr = attrs->FindAttr(ATTR_LOG);
|
||||
if ( attr )
|
||||
{
|
||||
// Apply &log to all record fields.
|
||||
RecordType* rt = Type()->AsRecordType();
|
||||
for ( int i = 0; i < rt->NumFields(); ++i )
|
||||
{
|
||||
TypeDecl* fd = rt->FieldDecl(i);
|
||||
|
||||
if ( ! fd->attrs )
|
||||
fd->attrs = new Attributes(new attr_list, rt->FieldType(i));
|
||||
|
||||
fd->attrs->AddAttr(new Attr(ATTR_LOG));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ID::AddAttrs(Attributes* a)
|
||||
|
|
114
src/LogMgr.cc
114
src/LogMgr.cc
|
@ -75,6 +75,14 @@ LogVal::~LogVal()
|
|||
|
||||
delete [] val.set_val.vals;
|
||||
}
|
||||
|
||||
if ( type == TYPE_VECTOR && present )
|
||||
{
|
||||
for ( int i = 0; i < val.vector_val.size; i++ )
|
||||
delete val.vector_val.vals[i];
|
||||
|
||||
delete [] val.vector_val.vals;
|
||||
}
|
||||
}
|
||||
|
||||
bool LogVal::IsCompatibleType(BroType* t, bool atomic_only)
|
||||
|
@ -112,6 +120,14 @@ bool LogVal::IsCompatibleType(BroType* t, bool atomic_only)
|
|||
return IsCompatibleType(t->AsSetType()->Indices()->PureType());
|
||||
}
|
||||
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
if ( atomic_only )
|
||||
return false;
|
||||
|
||||
return IsCompatibleType(t->AsVectorType()->YieldType());
|
||||
}
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
@ -149,7 +165,7 @@ bool LogVal::Read(SerializationFormat* fmt)
|
|||
&& fmt->Read(&net[2], "net2")
|
||||
&& fmt->Read(&net[3], "net3")
|
||||
&& fmt->Read(&val.subnet_val.width, "width")) )
|
||||
return false;
|
||||
return false;
|
||||
|
||||
#ifdef BROv6
|
||||
val.subnet_val.net[0] = net[0];
|
||||
|
@ -212,8 +228,25 @@ bool LogVal::Read(SerializationFormat* fmt)
|
|||
return true;
|
||||
}
|
||||
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
if ( ! fmt->Read(&val.vector_val.size, "vector_size") )
|
||||
return false;
|
||||
|
||||
val.vector_val.vals = new LogVal* [val.vector_val.size];
|
||||
|
||||
for ( int i = 0; i < val.vector_val.size; ++i )
|
||||
{
|
||||
val.vector_val.vals[i] = new LogVal;
|
||||
if ( ! val.vector_val.vals[i]->Read(fmt) )
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
default:
|
||||
internal_error(::fmt("unsupported type %s in LogVal::Write", type_name(type)));
|
||||
internal_error("unsupported type %s in LogVal::Write", type_name(type));
|
||||
}
|
||||
|
||||
return false;
|
||||
|
@ -292,8 +325,22 @@ bool LogVal::Write(SerializationFormat* fmt) const
|
|||
return true;
|
||||
}
|
||||
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
if ( ! fmt->Write(val.vector_val.size, "vector_size") )
|
||||
return false;
|
||||
|
||||
for ( int i = 0; i < val.vector_val.size; ++i )
|
||||
{
|
||||
if ( ! val.vector_val.vals[i]->Write(fmt) )
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
default:
|
||||
internal_error(::fmt("unsupported type %s in LogVal::REad", type_name(type)));
|
||||
internal_error("unsupported type %s in LogVal::REad", type_name(type));
|
||||
}
|
||||
|
||||
return false;
|
||||
|
@ -379,13 +426,26 @@ bool LogMgr::CreateStream(EnumVal* id, RecordVal* sval)
|
|||
|
||||
RecordType* columns = sval->Lookup(rtype->FieldOffset("columns"))->AsType()->AsTypeType()->Type()->AsRecordType();
|
||||
|
||||
bool log_attr_present = false;
|
||||
|
||||
for ( int i = 0; i < columns->NumFields(); i++ )
|
||||
{
|
||||
if ( ! (columns->FieldDecl(i)->FindAttr(ATTR_LOG)) )
|
||||
continue;
|
||||
|
||||
if ( ! LogVal::IsCompatibleType(columns->FieldType(i)) )
|
||||
{
|
||||
run_time("type of field '%s' is not support for logging output", columns->FieldName(i));
|
||||
return false;
|
||||
}
|
||||
|
||||
log_attr_present = true;
|
||||
}
|
||||
|
||||
if ( ! log_attr_present )
|
||||
{
|
||||
run_time("logged record type does not have any &log attributes");
|
||||
return false;
|
||||
}
|
||||
|
||||
Val* event_val = sval->Lookup(rtype->FieldOffset("ev"));
|
||||
|
@ -473,12 +533,16 @@ bool LogMgr::DisableStream(EnumVal* id)
|
|||
}
|
||||
|
||||
// Helper for recursive record field unrolling.
|
||||
bool LogMgr::TraverseRecord(Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, string path, list<int> indices)
|
||||
bool LogMgr::TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, string path, list<int> indices)
|
||||
{
|
||||
for ( int i = 0; i < rt->NumFields(); ++i )
|
||||
{
|
||||
BroType* t = rt->FieldType(i);
|
||||
|
||||
// Ignore if &log not specified.
|
||||
if ( ! rt->FieldDecl(i)->FindAttr(ATTR_LOG) )
|
||||
continue;
|
||||
|
||||
list<int> new_indices = indices;
|
||||
new_indices.push_back(i);
|
||||
|
||||
|
@ -496,16 +560,22 @@ bool LogMgr::TraverseRecord(Filter* filter, RecordType* rt, TableVal* include, T
|
|||
if ( t->Tag() == TYPE_RECORD )
|
||||
{
|
||||
// Recurse.
|
||||
if ( ! TraverseRecord(filter, t->AsRecordType(), include, exclude, new_path, new_indices) )
|
||||
if ( ! TraverseRecord(stream, filter, t->AsRecordType(), include, exclude, new_path, new_indices) )
|
||||
return false;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
else if ( t->Tag() == TYPE_TABLE && t->AsTableType()->IsSet() )
|
||||
{
|
||||
// That's ok, handle it with all the other types below.
|
||||
}
|
||||
|
||||
else if ( t->Tag() == TYPE_VECTOR )
|
||||
{
|
||||
// That's ok, handle it with all the other types below.
|
||||
}
|
||||
|
||||
else {
|
||||
run_time("unsupported field type for log column");
|
||||
return false;
|
||||
|
@ -595,7 +665,7 @@ bool LogMgr::AddFilter(EnumVal* id, RecordVal* fval)
|
|||
|
||||
filter->num_fields = 0;
|
||||
filter->fields = 0;
|
||||
if ( ! TraverseRecord(filter, stream->columns, include ? include->AsTableVal() : 0, exclude ? exclude->AsTableVal() : 0, "", list<int>()) )
|
||||
if ( ! TraverseRecord(stream, filter, stream->columns, include ? include->AsTableVal() : 0, exclude ? exclude->AsTableVal() : 0, "", list<int>()) )
|
||||
return false;
|
||||
|
||||
// Get the path for the filter.
|
||||
|
@ -780,7 +850,7 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns)
|
|||
|
||||
// Alright, can do the write now.
|
||||
|
||||
LogVal** vals = RecordToFilterVals(filter, columns);
|
||||
LogVal** vals = RecordToFilterVals(stream, filter, columns);
|
||||
|
||||
if ( filter->remote )
|
||||
remote_serializer->SendLogWrite(stream->id, filter->writer, path, filter->num_fields, vals);
|
||||
|
@ -801,9 +871,15 @@ bool LogMgr::Write(EnumVal* id, RecordVal* columns)
|
|||
return true;
|
||||
}
|
||||
|
||||
LogVal* LogMgr::ValToLogVal(Val* val)
|
||||
LogVal* LogMgr::ValToLogVal(Val* val, BroType* ty)
|
||||
{
|
||||
LogVal* lval = new LogVal(val->Type()->Tag());
|
||||
if ( ! ty )
|
||||
ty = val->Type();
|
||||
|
||||
if ( ! val )
|
||||
return new LogVal(ty->Tag(), false);
|
||||
|
||||
LogVal* lval = new LogVal(ty->Tag());
|
||||
|
||||
switch ( lval->type ) {
|
||||
case TYPE_BOOL:
|
||||
|
@ -864,14 +940,26 @@ LogVal* LogMgr::ValToLogVal(Val* val)
|
|||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
internal_error("unsupported type for log_write");
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
VectorVal* vec = val->AsVectorVal();
|
||||
lval->val.vector_val.size = vec->Size();
|
||||
lval->val.vector_val.vals = new LogVal* [lval->val.vector_val.size];
|
||||
|
||||
for ( int i = 0; i < lval->val.vector_val.size; i++ )
|
||||
lval->val.vector_val.vals[i] = ValToLogVal(vec->Lookup(VECTOR_MIN + i), vec->Type()->YieldType());
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
internal_error("unsupported type for log_write");
|
||||
}
|
||||
|
||||
return lval;
|
||||
}
|
||||
|
||||
LogVal** LogMgr::RecordToFilterVals(Filter* filter, RecordVal* columns)
|
||||
LogVal** LogMgr::RecordToFilterVals(Stream* stream, Filter* filter, RecordVal* columns)
|
||||
{
|
||||
LogVal** vals = new LogVal*[filter->num_fields];
|
||||
|
||||
|
@ -921,7 +1009,7 @@ LogWriter* LogMgr::CreateWriter(EnumVal* id, EnumVal* writer, string path, int n
|
|||
|
||||
// Need to instantiate a new writer.
|
||||
|
||||
LogWriterDefinition* ld = log_writers;
|
||||
LogWriterDefinition* ld = log_writers;
|
||||
|
||||
while ( true )
|
||||
{
|
||||
|
|
16
src/LogMgr.h
16
src/LogMgr.h
|
@ -37,15 +37,17 @@ struct LogVal {
|
|||
// The following union is a subset of BroValUnion, including only the
|
||||
// atomic types.
|
||||
struct set_t { bro_int_t size; LogVal** vals; };
|
||||
typedef set_t vec_t;
|
||||
|
||||
union _val {
|
||||
bro_int_t int_val;
|
||||
bro_uint_t uint_val;
|
||||
addr_type addr_val;
|
||||
subnet_type subnet_val;
|
||||
double double_val;
|
||||
double double_val;
|
||||
string* string_val;
|
||||
set_t set_val;
|
||||
vec_t vector_val;
|
||||
} val;
|
||||
|
||||
LogVal(TypeTag arg_type = TYPE_ERROR, bool arg_present = true) : type(arg_type), present(arg_present) {}
|
||||
|
@ -75,11 +77,11 @@ public:
|
|||
bool EnableStream(EnumVal* id);
|
||||
bool DisableStream(EnumVal* id);
|
||||
bool AddFilter(EnumVal* id, RecordVal* filter);
|
||||
bool RemoveFilter(EnumVal* id, StringVal* name);
|
||||
bool RemoveFilter(EnumVal* id, StringVal* name);
|
||||
bool RemoveFilter(EnumVal* id, string name);
|
||||
bool Write(EnumVal* id, RecordVal* columns);
|
||||
bool SetBuf(EnumVal* id, bool enabled); // Changes the state for all writers for that stream.
|
||||
bool Flush(EnumVal* id); // Flushes all writers for the stream.
|
||||
bool SetBuf(EnumVal* id, bool enabled); // Changes the state for all writers for that stream.
|
||||
bool Flush(EnumVal* id); // Flushes all writers for the stream.
|
||||
|
||||
protected:
|
||||
friend class LogWriter;
|
||||
|
@ -101,9 +103,9 @@ private:
|
|||
struct Stream;
|
||||
struct WriterInfo;
|
||||
|
||||
bool TraverseRecord(Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, string path, list<int> indices);
|
||||
LogVal* ValToLogVal(Val* val);
|
||||
LogVal** RecordToFilterVals(Filter* filter, RecordVal* columns);
|
||||
bool TraverseRecord(Stream* stream, Filter* filter, RecordType* rt, TableVal* include, TableVal* exclude, string path, list<int> indices);
|
||||
LogVal* ValToLogVal(Val* val, BroType* ty = 0);
|
||||
LogVal** RecordToFilterVals(Stream* stream, Filter* filter, RecordVal* columns);
|
||||
Stream* FindStream(EnumVal* id);
|
||||
void RemoveDisabledWriters(Stream* stream);
|
||||
void InstallRotationTimer(WriterInfo* winfo);
|
||||
|
|
|
@ -167,6 +167,26 @@ bool LogWriterAscii::DoWriteOne(ODesc* desc, LogVal* val, const LogField* field)
|
|||
break;
|
||||
}
|
||||
|
||||
case TYPE_VECTOR:
|
||||
{
|
||||
if ( ! val->val.vector_val.size )
|
||||
{
|
||||
desc->AddN(empty_field, empty_field_len);
|
||||
break;
|
||||
}
|
||||
|
||||
for ( int j = 0; j < val->val.vector_val.size; j++ )
|
||||
{
|
||||
if ( j > 0 )
|
||||
desc->AddN(set_separator, set_separator_len);
|
||||
|
||||
if ( ! DoWriteOne(desc, val->val.vector_val.vals[j], field) )
|
||||
return false;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
Error(Fmt("unsupported field format %d for %s", val->type, field->name.c_str()));
|
||||
return false;
|
||||
|
|
39
src/Type.cc
39
src/Type.cc
|
@ -900,6 +900,45 @@ void RecordType::Describe(ODesc* d) const
|
|||
}
|
||||
}
|
||||
|
||||
const char* RecordType::AddFields(type_decl_list* others, attr_list* attr)
|
||||
{
|
||||
assert(types);
|
||||
|
||||
bool log = false;
|
||||
|
||||
if ( attr )
|
||||
{
|
||||
loop_over_list(*attr, j)
|
||||
{
|
||||
if ( (*attr)[j]->Tag() == ATTR_LOG )
|
||||
log = true;
|
||||
}
|
||||
}
|
||||
|
||||
loop_over_list(*others, i)
|
||||
{
|
||||
TypeDecl* td = (*others)[i];
|
||||
|
||||
if ( ! td->FindAttr(ATTR_DEFAULT) && ! td->FindAttr(ATTR_OPTIONAL) )
|
||||
return "extension field must be &optional or have &default";
|
||||
|
||||
if ( log )
|
||||
{
|
||||
if ( ! td->attrs )
|
||||
td->attrs = new Attributes(new attr_list, td->type);
|
||||
|
||||
td->attrs->AddAttr(new Attr(ATTR_LOG));
|
||||
}
|
||||
|
||||
types->append(td);
|
||||
}
|
||||
|
||||
delete others;
|
||||
|
||||
num_fields = types->length();
|
||||
return 0;
|
||||
}
|
||||
|
||||
void RecordType::DescribeFields(ODesc* d) const
|
||||
{
|
||||
if ( d->IsReadable() )
|
||||
|
|
|
@ -437,6 +437,10 @@ public:
|
|||
|
||||
int NumFields() const { return num_fields; }
|
||||
|
||||
// Returns 0 if all is ok, otherwise a pointer to an error message. Takes
|
||||
// ownership of list.
|
||||
const char* AddFields(type_decl_list* types, attr_list* attr);
|
||||
|
||||
void Describe(ODesc* d) const;
|
||||
void DescribeFields(ODesc* d) const;
|
||||
|
||||
|
|
26
src/parse.y
26
src/parse.y
|
@ -3,7 +3,7 @@
|
|||
// See the file "COPYING" in the main distribution directory for copyright.
|
||||
%}
|
||||
|
||||
%expect 71
|
||||
%expect 74
|
||||
|
||||
%token TOK_ADD TOK_ADD_TO TOK_ADDR TOK_ALARM TOK_ANY
|
||||
%token TOK_ATENDIF TOK_ATELSE TOK_ATIF TOK_ATIFDEF TOK_ATIFNDEF
|
||||
|
@ -24,7 +24,7 @@
|
|||
%token TOK_ATTR_EXPIRE_CREATE TOK_ATTR_EXPIRE_READ TOK_ATTR_EXPIRE_WRITE
|
||||
%token TOK_ATTR_PERSISTENT TOK_ATTR_SYNCHRONIZED
|
||||
%token TOK_ATTR_DISABLE_PRINT_HOOK TOK_ATTR_RAW_OUTPUT TOK_ATTR_MERGEABLE
|
||||
%token TOK_ATTR_PRIORITY TOK_ATTR_GROUP
|
||||
%token TOK_ATTR_PRIORITY TOK_ATTR_GROUP TOK_ATTR_LOG
|
||||
|
||||
%token TOK_DEBUG
|
||||
|
||||
|
@ -862,7 +862,25 @@ decl:
|
|||
'{' { parser_redef_enum($3); } enum_body '}' ';'
|
||||
{ /* no action */ }
|
||||
|
||||
| TOK_TYPE def_global_id ':' refined_type opt_attr ';'
|
||||
| TOK_REDEF TOK_RECORD global_id TOK_ADD_TO
|
||||
'{' type_decl_list '}' opt_attr ';'
|
||||
{
|
||||
if ( ! $3->Type() )
|
||||
$3->Error("unknown identifier");
|
||||
else
|
||||
{
|
||||
RecordType* add_to = $3->Type()->AsRecordType();
|
||||
if ( ! add_to )
|
||||
$3->Error("not a record type");
|
||||
else {
|
||||
const char* error = add_to->AddFields($6, $8);
|
||||
if ( error )
|
||||
$3->Error(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
| TOK_TYPE global_id ':' refined_type opt_attr ';'
|
||||
{
|
||||
add_type($2, $4, $5, 0);
|
||||
}
|
||||
|
@ -1034,6 +1052,8 @@ attr:
|
|||
{ $$ = new Attr(ATTR_PRIORITY, $3); }
|
||||
| TOK_ATTR_GROUP '=' expr
|
||||
{ $$ = new Attr(ATTR_GROUP, $3); }
|
||||
| TOK_ATTR_LOG
|
||||
{ $$ = new Attr(ATTR_LOG); }
|
||||
;
|
||||
|
||||
stmt:
|
||||
|
|
|
@ -196,6 +196,7 @@ when return TOK_WHEN;
|
|||
&encrypt return TOK_ATTR_ENCRYPT;
|
||||
&expire_func return TOK_ATTR_EXPIRE_FUNC;
|
||||
&group return TOK_ATTR_GROUP;
|
||||
&log return TOK_ATTR_LOG;
|
||||
&mergeable return TOK_ATTR_MERGEABLE;
|
||||
&optional return TOK_ATTR_OPTIONAL;
|
||||
&persistent return TOK_ATTR_PERSISTENT;
|
||||
|
|
2
testing/btest/Baseline/logging.attr-extend/ssh.log
Normal file
2
testing/btest/Baseline/logging.attr-extend/ssh.log
Normal file
|
@ -0,0 +1,2 @@
|
|||
# status country a1 b1 b2
|
||||
success unknown 1 3 4
|
6
testing/btest/Baseline/logging.attr/ssh.log
Normal file
6
testing/btest/Baseline/logging.attr/ssh.log
Normal file
|
@ -0,0 +1,6 @@
|
|||
# status country
|
||||
success unknown
|
||||
failure US
|
||||
failure UK
|
||||
success BR
|
||||
failure MX
|
|
@ -1,2 +1,2 @@
|
|||
# b i e c p sn n a d t iv s sc ss se
|
||||
T -42 SSH::SSH 21 123 10.0.0.0/24 10.0.0.0 1.2.3.4 3.14 1299727902.65854 100.0 hurz 4,1,3,2 CC,BB,AA EMPTY
|
||||
# b i e c p sn n a d t iv s sc ss se vc ve
|
||||
T -42 SSH::SSH 21 123 10.0.0.0/24 10.0.0.0 1.2.3.4 3.14 1301360085.98852 100.0 hurz 4,1,3,2 CC,BB,AA EMPTY 10,20,30 EMPTY
|
||||
|
|
|
@ -68,8 +68,6 @@
|
|||
# t id.orig_h id.orig_p id.resp_h id.resp_p
|
||||
1299499205.0 10.0.0.1 20 10.0.0.2 1033
|
||||
1299502795.0 10.0.0.2 20 10.0.0.3 9
|
||||
> test.log
|
||||
# t id.orig_h id.orig_p id.resp_h id.resp_p
|
||||
> test2-11-03-06_19.00.05.log
|
||||
# t id.orig_h id.orig_p id.resp_h id.resp_p
|
||||
1299466805.0 10.0.0.1 20 10.0.0.2 1024
|
||||
|
@ -132,3 +130,5 @@
|
|||
1299502795.0 10.0.0.2 20 10.0.0.3 9
|
||||
> test2.log
|
||||
# t id.orig_h id.orig_p id.resp_h id.resp_p
|
||||
> test.log
|
||||
# t id.orig_h id.orig_p id.resp_h id.resp_p
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
# b i e c p sn n a d t iv s sc ss se
|
||||
T -42 SSH::SSH 21 123 10.0.0.0/24 10.0.0.0 1.2.3.4 3.14 1299727493.47095 100.0 hurz 4,1,3,2 CC,BB,AA EMPTY
|
||||
# b i e c p sn n a d t iv s sc ss se vc ve
|
||||
T -42 SSH::SSH 21 123 10.0.0.0/24 10.0.0.0 1.2.3.4 3.14 1301359781.8203 100.0 hurz 4,1,3,2 CC,BB,AA EMPTY 10,20,30 EMPTY
|
||||
|
|
2
testing/btest/Baseline/logging.vec/ssh.log
Normal file
2
testing/btest/Baseline/logging.vec/ssh.log
Normal file
|
@ -0,0 +1,2 @@
|
|||
# vec
|
||||
-,2,-,-,5
|
|
@ -18,7 +18,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -19,7 +19,7 @@ export {
|
|||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
b: bool &optional;
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -14,7 +14,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -16,7 +16,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
37
testing/btest/logging/attr-extend.bro
Normal file
37
testing/btest/logging/attr-extend.bro
Normal file
|
@ -0,0 +1,37 @@
|
|||
#
|
||||
# @TEST-EXEC: bro %INPUT
|
||||
# @TEST-EXEC: btest-diff ssh.log
|
||||
|
||||
module SSH;
|
||||
|
||||
export {
|
||||
redef enum Log::ID += { SSH };
|
||||
|
||||
type Log: record {
|
||||
t: time;
|
||||
id: conn_id;
|
||||
status: string &optional &log;
|
||||
country: string &default="unknown" &log;
|
||||
};
|
||||
}
|
||||
|
||||
redef record Log += {
|
||||
a1: count &log &optional;
|
||||
a2: count &optional;
|
||||
};
|
||||
|
||||
redef record Log += {
|
||||
b1: count &optional;
|
||||
b2: count &optional;
|
||||
} &log;
|
||||
|
||||
|
||||
event bro_init()
|
||||
{
|
||||
Log::create_stream(SSH, [$columns=Log]);
|
||||
|
||||
local cid = [$orig_h=1.2.3.4, $orig_p=1234/tcp, $resp_h=2.3.4.5, $resp_p=80/tcp];
|
||||
|
||||
Log::write(SSH, [$t=network_time(), $id=cid, $status="success", $a1=1, $a2=2, $b1=3, $b2=4]);
|
||||
}
|
||||
|
31
testing/btest/logging/attr.bro
Normal file
31
testing/btest/logging/attr.bro
Normal file
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# @TEST-EXEC: bro %INPUT
|
||||
# @TEST-EXEC: btest-diff ssh.log
|
||||
|
||||
module SSH;
|
||||
|
||||
export {
|
||||
redef enum Log::ID += { SSH };
|
||||
|
||||
type Log: record {
|
||||
t: time;
|
||||
id: conn_id;
|
||||
status: string &optional &log;
|
||||
country: string &default="unknown" &log;
|
||||
};
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
{
|
||||
Log::create_stream(SSH, [$columns=Log]);
|
||||
|
||||
local cid = [$orig_h=1.2.3.4, $orig_p=1234/tcp, $resp_h=2.3.4.5, $resp_p=80/tcp];
|
||||
|
||||
Log::write(SSH, [$t=network_time(), $id=cid, $status="success"]);
|
||||
Log::write(SSH, [$t=network_time(), $id=cid, $status="failure", $country="US"]);
|
||||
Log::write(SSH, [$t=network_time(), $id=cid, $status="failure", $country="UK"]);
|
||||
Log::write(SSH, [$t=network_time(), $id=cid, $status="success", $country="BR"]);
|
||||
Log::write(SSH, [$t=network_time(), $id=cid, $status="failure", $country="MX"]);
|
||||
|
||||
}
|
||||
|
|
@ -12,7 +12,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -12,7 +12,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
global log_ssh: event(rec: Log);
|
||||
|
|
|
@ -17,7 +17,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
global ssh_log: event(rec: Log);
|
||||
|
|
|
@ -12,7 +12,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
redef Log::enable_local_logging = F;
|
||||
|
|
|
@ -18,7 +18,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
global c = -1;
|
||||
|
|
|
@ -18,7 +18,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
function fail(rec: Log): bool
|
||||
|
|
|
@ -35,7 +35,9 @@ export {
|
|||
sc: set[count];
|
||||
ss: set[string];
|
||||
se: set[string];
|
||||
};
|
||||
vc: vector of count;
|
||||
ve: vector of string;
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
@ -54,6 +56,7 @@ module SSH;
|
|||
event remote_connection_handshake_done(p: event_peer)
|
||||
{
|
||||
local empty_set: set[string];
|
||||
local empty_vector: vector of string;
|
||||
|
||||
Log::write(SSH, [
|
||||
$b=T,
|
||||
|
@ -70,7 +73,9 @@ event remote_connection_handshake_done(p: event_peer)
|
|||
$s="hurz",
|
||||
$sc=set(1,2,3,4),
|
||||
$ss=set("AA", "BB", "CC"),
|
||||
$se=empty_set
|
||||
$se=empty_set,
|
||||
$vc=vector(10, 20, 30),
|
||||
$ve=empty_vector
|
||||
]);
|
||||
}
|
||||
@TEST-END-FILE
|
||||
|
|
|
@ -25,7 +25,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -16,7 +16,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#
|
||||
# @TEST-EXEC: bro -r %DIR/rotation.trace %INPUT >out
|
||||
# @TEST-EXEC: for i in test*.log; do printf '> %s\n' $i; cat $i; done >>out
|
||||
# @TEST-EXEC: for i in `ls test*.log | sort`; do printf '> %s\n' $i; cat $i; done >>out
|
||||
# @TEST-EXEC: btest-diff out
|
||||
|
||||
module Test;
|
||||
|
@ -14,7 +14,7 @@ export {
|
|||
type Log: record {
|
||||
t: time;
|
||||
id: conn_id; # Will be rolled out into individual columns.
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
redef Log::default_rotation_interval = 1hr;
|
||||
|
|
|
@ -14,7 +14,7 @@ export {
|
|||
type Log: record {
|
||||
t: time;
|
||||
id: conn_id; # Will be rolled out into individual columns.
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
redef Log::default_rotation_interval = 1hr;
|
||||
|
|
|
@ -13,7 +13,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -12,7 +12,7 @@ export {
|
|||
id: conn_id; # Will be rolled out into individual columns.
|
||||
status: string &optional;
|
||||
country: string &default="unknown";
|
||||
};
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
|
|
@ -27,7 +27,9 @@ export {
|
|||
sc: set[count];
|
||||
ss: set[string];
|
||||
se: set[string];
|
||||
};
|
||||
vc: vector of count;
|
||||
ve: vector of string;
|
||||
} &log;
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
|
@ -35,6 +37,7 @@ event bro_init()
|
|||
Log::create_stream(SSH, [$columns=Log]);
|
||||
|
||||
local empty_set: set[string];
|
||||
local empty_vector: vector of string;
|
||||
|
||||
Log::write(SSH, [
|
||||
$b=T,
|
||||
|
@ -51,7 +54,9 @@ event bro_init()
|
|||
$s="hurz",
|
||||
$sc=set(1,2,3,4),
|
||||
$ss=set("AA", "BB", "CC"),
|
||||
$se=empty_set
|
||||
$se=empty_set,
|
||||
$vc=vector(10, 20, 30),
|
||||
$ve=empty_vector
|
||||
]);
|
||||
}
|
||||
|
||||
|
|
27
testing/btest/logging/vec.bro
Normal file
27
testing/btest/logging/vec.bro
Normal file
|
@ -0,0 +1,27 @@
|
|||
#
|
||||
# @TEST-EXEC: bro %INPUT
|
||||
# @TEST-EXEC: btest-diff ssh.log
|
||||
|
||||
module SSH;
|
||||
|
||||
export {
|
||||
redef enum Log::ID += { SSH };
|
||||
|
||||
type Log: record {
|
||||
vec: vector of string &log;
|
||||
};
|
||||
}
|
||||
|
||||
event bro_init()
|
||||
{
|
||||
Log::create_stream(SSH, [$columns=Log]);
|
||||
|
||||
local v: vector of string;
|
||||
|
||||
v[2] = "2";
|
||||
v[5] = "5";
|
||||
|
||||
Log::write(SSH, [$vec=v]);
|
||||
}
|
||||
|
||||
|
17
testing/rec.bro
Normal file
17
testing/rec.bro
Normal file
|
@ -0,0 +1,17 @@
|
|||
# @TEST-EXEC: bro %INPUT >output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
type Foo: record {
|
||||
a: count;
|
||||
b: count &optional;
|
||||
};
|
||||
|
||||
redef record Foo += {
|
||||
c: count &default=42;
|
||||
d: count &optional;
|
||||
};
|
||||
|
||||
global f: Foo = [$a=21];
|
||||
|
||||
print f;
|
||||
|
17
testing/rec2.bro
Normal file
17
testing/rec2.bro
Normal file
|
@ -0,0 +1,17 @@
|
|||
# @TEST-EXEC: bro %INPUT >output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
type Foo: record {
|
||||
a: count;
|
||||
b: count &optional;
|
||||
};
|
||||
|
||||
redef record Foo += {
|
||||
c: count &default=42;
|
||||
d: string &optional;
|
||||
};
|
||||
|
||||
global f: Foo = [$a=21, $d="XXX"];
|
||||
|
||||
print f;
|
||||
|
13
testing/wrong-rec.bro
Normal file
13
testing/wrong-rec.bro
Normal file
|
@ -0,0 +1,13 @@
|
|||
# @TEST-EXEC-FAIL: bro %INPUT >output 2>&1
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
type Foo: record {
|
||||
a: count;
|
||||
b: count &optional;
|
||||
};
|
||||
|
||||
redef record Foo += {
|
||||
c: count;
|
||||
d: string &optional;
|
||||
};
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue