mirror of
https://github.com/zeek/zeek.git
synced 2025-10-05 08:08:19 +00:00
Checkpoint commit. This is all a huge mess right now. :)
This commit is contained in:
parent
78401262d0
commit
50e319a417
9 changed files with 495 additions and 314 deletions
|
@ -1,11 +1,11 @@
|
|||
##! The intelligence framework provides a way to store and query IP addresses,
|
||||
##! and strings (with a subtype). Metadata can
|
||||
##! and strings (with a str_type). Metadata can
|
||||
##! also be associated with the intelligence like for making more informated
|
||||
##! decisions about matching and handling of intelligence.
|
||||
#
|
||||
# TODO:
|
||||
# Comments
|
||||
# Better Intel::Item comparison (same_meta)
|
||||
# Better Intel::Item comparison (has_meta)
|
||||
# Generate a notice when messed up data is discovered.
|
||||
# Complete "net" support as an intelligence type.
|
||||
|
||||
|
@ -22,195 +22,352 @@ export {
|
|||
Detection,
|
||||
};
|
||||
|
||||
type Classification: enum {
|
||||
## String data needs to be further categoried since it could represent
|
||||
## and number of types of data.
|
||||
type SubType: enum {
|
||||
## A complete URL.
|
||||
URL,
|
||||
## User-Agent string, typically HTTP or mail message body.
|
||||
USER_AGENT,
|
||||
## Email address.
|
||||
EMAIL,
|
||||
## DNS domain name (DNS Zones are implemented in an intelligence plugin).
|
||||
DOMAIN,
|
||||
## A user name.
|
||||
USER_NAME,
|
||||
## File hash which is non hash type specific. It's up to the user to query
|
||||
## for any relevant hash types.
|
||||
FILE_HASH,
|
||||
## Certificate hash. Normally for X.509 certificates from the SSL analyzer.
|
||||
CERT_HASH,
|
||||
};
|
||||
|
||||
## Why a piece of intelligence is being added or looked up. The intent a human
|
||||
## placed upon the data when it was decided to be worthwhile as intelligence.
|
||||
type Intent: enum {
|
||||
## Data is to be considered malicious.
|
||||
MALICIOUS,
|
||||
INFRASTRUCTURE,
|
||||
## Data is to be considered sensitive. In many cases this may be
|
||||
## hosts containing contractually or legally restricted data such
|
||||
## as HIPPA, PCI, Sarbanes-Oxley, etc.
|
||||
SENSITIVE,
|
||||
FRIEND,
|
||||
## Data that is never to be seen. This acts like the "canary in
|
||||
## the coal mine". A possibility could be file hashes for
|
||||
## critically important files.
|
||||
CANARY,
|
||||
## Data that is whitelisted. The primary use for this intent is to
|
||||
## locally whitelist false positive data from external feeds.
|
||||
WHITELIST,
|
||||
};
|
||||
|
||||
type SubType: enum {
|
||||
URL,
|
||||
EMAIL,
|
||||
DOMAIN,
|
||||
USER_NAME,
|
||||
FILE_HASH, # (non hash type specific, md5, sha1, sha256)
|
||||
CERT_HASH,
|
||||
ASN,
|
||||
## Enum to represent where data came from when it was discovered.
|
||||
type Where: enum {
|
||||
## A catchall value to represent data of unknown provenance.
|
||||
ANYWHERE,
|
||||
};
|
||||
|
||||
## Data about an :bro:type:`Intel::Item`
|
||||
type MetaData: record {
|
||||
## An arbitrary string value representing the data source. Typically,
|
||||
## the convention for this field will be the source name and feed name
|
||||
## separated by a hyphen. For example: "source1-c&c".
|
||||
source: string;
|
||||
## The intent of the data.
|
||||
intent: Intent;
|
||||
## A freeform description for the data.
|
||||
desc: string &optional;
|
||||
## A URL for more information about the data.
|
||||
url: string &optional;
|
||||
};
|
||||
|
||||
type Item: record {
|
||||
host: addr &optional;
|
||||
net: subnet &optional;
|
||||
str: string &optional;
|
||||
str_type: SubType &optional;
|
||||
|
||||
meta: MetaData;
|
||||
};
|
||||
|
||||
type Found: record {
|
||||
host: addr &optional;
|
||||
str: string &optional;
|
||||
str_type: SubType &optional;
|
||||
|
||||
where: Where;
|
||||
};
|
||||
|
||||
type Info: record {
|
||||
ts: time &log;
|
||||
## This value should be one of: "info", "warn", "error"
|
||||
level: string &log;
|
||||
message: string &log;
|
||||
};
|
||||
|
||||
type MetaData: record {
|
||||
source: string;
|
||||
class: Classification;
|
||||
desc: string &optional;
|
||||
url: string &optional;
|
||||
tags: set[string] &optional;
|
||||
};
|
||||
|
||||
type Item: record {
|
||||
ip: addr &optional;
|
||||
net: subnet &optional;
|
||||
|
||||
str: string &optional;
|
||||
subtype: SubType &optional;
|
||||
|
||||
meta: MetaData;
|
||||
item: Item &log;
|
||||
};
|
||||
|
||||
type Query: record {
|
||||
ip: addr &optional;
|
||||
|
||||
str: string &optional;
|
||||
subtype: SubType &optional;
|
||||
|
||||
class: Classification &optional;
|
||||
|
||||
or_tags: set[string] &optional;
|
||||
and_tags: set[string] &optional;
|
||||
|
||||
## The predicate can be given when searching for a match. It will
|
||||
## be tested against every :bro:type:`MetaData` item associated with
|
||||
## the data being matched on. If it returns T a single time, the
|
||||
## matcher will consider that the item has matched.
|
||||
pred: function(meta: Intel::Item): bool &optional;
|
||||
};
|
||||
|
||||
type Importer: enum {
|
||||
NULL_IMPORTER
|
||||
type Plugin: record {
|
||||
index: function() &optional;
|
||||
match: function(found: Found): bool &optional;
|
||||
lookup: function(found: Found): set[Item] &optional;
|
||||
};
|
||||
|
||||
global insert: function(item: Item): bool;
|
||||
global insert_event: event(item: Item);
|
||||
## Manipulation and query API functions.
|
||||
global insert: function(item: Item);
|
||||
global delete_item: function(item: Item): bool;
|
||||
global unique_data: function(): count;
|
||||
|
||||
global matcher: function(query: Query): bool;
|
||||
global lookup: function(query: Query): set[Item];
|
||||
## Function to declare discovery of a piece of data in order to check
|
||||
## it against known intelligence for matches.
|
||||
global found_in_conn: function(c: connection, found: Found);
|
||||
|
||||
global register_custom_matcher: function(subtype: SubType,
|
||||
func: function(query: Query): bool);
|
||||
global register_custom_lookup: function(subtype: SubType,
|
||||
func: function(query: Query): set[Item]);
|
||||
## Event to represent a match happening in a connection. On clusters there
|
||||
## is no assurance as to where this event will be generated so don't
|
||||
## assume that arbitrary global state beyond the given data
|
||||
## will be available.
|
||||
global match_in_conn: event(c: connection, found: Found, items: set[Item]);
|
||||
|
||||
global find: function(found: Found): bool;
|
||||
global lookup: function(found: Found): set[Item];
|
||||
|
||||
|
||||
## Plugin API functions
|
||||
global register_custom_matcher: function(str_type: SubType,
|
||||
func: function(found: Found): bool);
|
||||
global register_custom_lookup: function(str_type: SubType,
|
||||
func: function(found: Found): set[Item]);
|
||||
|
||||
## API Events
|
||||
global new_item: event(item: Item);
|
||||
global updated_item: event(item: Item);
|
||||
global insert_event: event(item: Item);
|
||||
|
||||
## Optionally store metadata. This is primarily used internally depending on
|
||||
## if this is a cluster deployment or not. On clusters, workers probably
|
||||
## shouldn't be storing the full metadata.
|
||||
const store_metadata = T &redef;
|
||||
}
|
||||
|
||||
## Store collections of :bro:type:`MetaData` records indexed by a source name.
|
||||
type IndexedItems: table[string, Classification] of MetaData;
|
||||
# Internal handler for conn oriented matches with no metadata base on the store_metadata setting.
|
||||
global match_in_conn_no_items: event(c: connection, found: Found);
|
||||
|
||||
type DataStore: record {
|
||||
ip_data: table[addr] of IndexedItems;
|
||||
string_data: table[string, SubType] of IndexedItems;
|
||||
host_data: table[addr] of set[MetaData];
|
||||
string_data: table[string, SubType] of set[MetaData];
|
||||
};
|
||||
global data_store: DataStore;
|
||||
|
||||
global custom_matchers: table[SubType] of set[function(query: Query): bool];
|
||||
global custom_lookup: table[SubType] of set[function(query: Query): set[Item]];
|
||||
global custom_matchers: table[SubType] of set[function(found: Found): bool];
|
||||
global custom_lookup: table[SubType] of set[function(found: Found): set[Item]];
|
||||
|
||||
|
||||
event bro_init() &priority=5
|
||||
{
|
||||
Log::create_stream(Intel::LOG, [$columns=Info]);
|
||||
}
|
||||
|
||||
function register_custom_matcher(subtype: SubType, func: function(query: Query): bool)
|
||||
|
||||
function find(found: Found): bool
|
||||
{
|
||||
if ( subtype !in custom_matchers )
|
||||
custom_matchers[subtype] = set();
|
||||
add custom_matchers[subtype][func];
|
||||
}
|
||||
|
||||
function register_custom_lookup(subtype: SubType, func: function(query: Query): set[Item])
|
||||
{
|
||||
if ( subtype !in custom_lookup )
|
||||
custom_lookup[subtype] = set();
|
||||
add custom_lookup[subtype][func];
|
||||
}
|
||||
|
||||
|
||||
|
||||
function same_meta(meta1: MetaData, meta2: MetaData): bool
|
||||
{
|
||||
# "any" type values can't be compared so this generic implementation doesn't work.
|
||||
#local rf1 = record_fields(item1);
|
||||
#local rf2 = record_fields(item2);
|
||||
#for ( field in rf1 )
|
||||
# {
|
||||
# if ( ((rf1[field]?$value && rf1[field]?$value) &&
|
||||
# rf1[field]$value != rf2[field]$value) ||
|
||||
# ! (rf1[field]?$value && rf1[field]?$value) )
|
||||
# return F;
|
||||
# }
|
||||
|
||||
if ( meta1$source == meta2$source &&
|
||||
meta1$class == meta2$class &&
|
||||
((!meta1?$desc && !meta2?$desc) || (meta1?$desc && meta2?$desc && meta1$desc == meta2$desc)) &&
|
||||
((!meta1?$url && !meta2?$url) || (meta1?$url && meta2?$url && meta1$url == meta2$url)) &&
|
||||
((!meta1?$tags && !meta2?$tags) || (meta1?$tags && meta2?$tags && |meta1$tags| == |meta2$tags|)) )
|
||||
if ( found?$host && found$host in data_store$host_data)
|
||||
{
|
||||
# TODO: match on all of the tag values
|
||||
return T;
|
||||
}
|
||||
else if ( found?$str && found?$str_type &&
|
||||
[found$str, found$str_type] in data_store$string_data )
|
||||
{
|
||||
return T;
|
||||
}
|
||||
|
||||
# Finder plugins!
|
||||
for ( plugin in plugins )
|
||||
{
|
||||
if ( plugin?$match && plugin$match(found) )
|
||||
return T;
|
||||
}
|
||||
|
||||
return F;
|
||||
}
|
||||
|
||||
function lookup(found: Found): set[Item]
|
||||
{
|
||||
local item: Item;
|
||||
local return_data: set[Item] = set();
|
||||
|
||||
if ( found?$host )
|
||||
{
|
||||
# See if the host is known about and it has meta values
|
||||
if ( found$host in data_store$host_data )
|
||||
{
|
||||
for ( m in data_store$host_data[found$host] )
|
||||
{
|
||||
item = [$host=found$host, $meta=m];
|
||||
add return_data[item];
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ( found?$str && found?$str_type )
|
||||
{
|
||||
# See if the string is known about and it has meta values
|
||||
if ( [found$str, found$str_type] in data_store$string_data )
|
||||
{
|
||||
for ( m in data_store$string_data[found$str, found$str_type] )
|
||||
{
|
||||
item = [$str=found$str, $str_type=found$str_type, $meta=m];
|
||||
add return_data[item];
|
||||
}
|
||||
}
|
||||
|
||||
# Check if there are any custom str_type lookup functions and add the values to
|
||||
# the result set.
|
||||
if ( found$str_type in custom_lookup )
|
||||
{
|
||||
for ( lookup_func in custom_lookup[found$str_type] )
|
||||
{
|
||||
# Iterating here because there is no way to merge sets generically.
|
||||
for ( custom_lookup_item in lookup_func(found) )
|
||||
add return_data[custom_lookup_item];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
# TODO: Later we should probably track whitelist matches.
|
||||
# TODO: base this on a set instead of iterating the items.
|
||||
for ( item in return_data )
|
||||
{
|
||||
if ( item$meta$intent == WHITELIST )
|
||||
{
|
||||
return set();
|
||||
}
|
||||
}
|
||||
|
||||
return return_data;
|
||||
}
|
||||
|
||||
function Intel::found_in_conn(c: connection, found: Found)
|
||||
{
|
||||
if ( find(found) )
|
||||
{
|
||||
if ( store_metadata )
|
||||
{
|
||||
local items = lookup(found);
|
||||
event Intel::match_in_conn(c, found, items);
|
||||
}
|
||||
else
|
||||
{
|
||||
event Intel::match_in_conn_no_items(c, found);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function register_custom_matcher(str_type: SubType, func: function(found: Found): bool)
|
||||
{
|
||||
if ( str_type !in custom_matchers )
|
||||
custom_matchers[str_type] = set(func);
|
||||
else
|
||||
add custom_matchers[str_type][func];
|
||||
}
|
||||
|
||||
function register_custom_lookup(str_type: SubType, func: function(found: Found): set[Item])
|
||||
{
|
||||
if ( str_type !in custom_lookup )
|
||||
custom_lookup[str_type] = set(func);
|
||||
else
|
||||
add custom_lookup[str_type][func];
|
||||
}
|
||||
|
||||
function unique_data(): count
|
||||
{
|
||||
return |data_store$host_data| + |data_store$string_data|;
|
||||
}
|
||||
|
||||
#function get_meta(check: MetaData, metas: set[MetaData]): MetaData
|
||||
# {
|
||||
# local check_hash = md5_hash(check);
|
||||
# for ( m in metas )
|
||||
# {
|
||||
# if ( check_hash == md5_hash(m) )
|
||||
# return m;
|
||||
# }
|
||||
#
|
||||
# return [$source=""];
|
||||
# }
|
||||
|
||||
function has_meta(check: MetaData, metas: set[MetaData]): bool
|
||||
{
|
||||
local check_hash = md5_hash(check);
|
||||
for ( m in metas )
|
||||
{
|
||||
if ( check_hash == md5_hash(m) )
|
||||
return T;
|
||||
}
|
||||
|
||||
# The records must not be equivalent if we made it this far.
|
||||
return F;
|
||||
}
|
||||
|
||||
function insert(item: Item): bool
|
||||
function insert(item: Item)
|
||||
{
|
||||
local err_msg = "";
|
||||
if ( item?$str && ! item?$subtype )
|
||||
err_msg = "You must provide a subtype for strings or this item doesn't make sense.";
|
||||
if ( item?$str && ! item?$str_type )
|
||||
err_msg = "You must provide a str_type for strings or this item doesn't make sense.";
|
||||
|
||||
if ( err_msg == "" )
|
||||
{
|
||||
# Create and fill out the meta data item.
|
||||
local meta = item$meta;
|
||||
local metas: set[MetaData];
|
||||
|
||||
if ( item?$ip )
|
||||
if ( item?$host )
|
||||
{
|
||||
if ( item$ip !in data_store$ip_data )
|
||||
data_store$ip_data[item$ip] = table();
|
||||
if ( item$host !in data_store$host_data )
|
||||
data_store$host_data[item$host] = set();
|
||||
|
||||
if ( [meta$source, meta$class] !in data_store$ip_data[item$ip] )
|
||||
event Intel::new_item(item);
|
||||
else if ( ! same_meta(data_store$ip_data[item$ip][meta$source, meta$class], meta) )
|
||||
event Intel::updated_item(item);
|
||||
else
|
||||
return F;
|
||||
|
||||
data_store$ip_data[item$ip][meta$source, meta$class] = item$meta;
|
||||
return T;
|
||||
metas = data_store$host_data[item$host];
|
||||
}
|
||||
else if ( item?$str )
|
||||
{
|
||||
if ( [item$str, item$subtype] !in data_store$string_data )
|
||||
data_store$string_data[item$str, item$subtype] = table();
|
||||
|
||||
if ( [meta$source, meta$class] !in data_store$string_data[item$str, item$subtype] )
|
||||
event Intel::new_item(item);
|
||||
else if ( ! same_meta(data_store$string_data[item$str, item$subtype][meta$source, meta$class], meta) )
|
||||
event Intel::updated_item(item);
|
||||
else
|
||||
return F;
|
||||
if ( [item$str, item$str_type] !in data_store$string_data )
|
||||
data_store$string_data[item$str, item$str_type] = set();
|
||||
|
||||
data_store$string_data[item$str, item$subtype][meta$source, meta$class] = item$meta;
|
||||
return T;
|
||||
metas = data_store$string_data[item$str, item$str_type];
|
||||
}
|
||||
else
|
||||
err_msg = "Failed to insert intelligence item for some unknown reason.";
|
||||
{
|
||||
err_msg = "Malformed intelligence item";
|
||||
}
|
||||
|
||||
for ( m in metas )
|
||||
{
|
||||
if ( meta$source == m$source )
|
||||
{
|
||||
if ( has_meta(meta, metas) )
|
||||
{
|
||||
# It's the same item being inserted again.
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
event Intel::updated_item(item);
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
event Intel::new_item(item);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
add metas[item$meta];
|
||||
return;
|
||||
}
|
||||
|
||||
if ( err_msg != "" )
|
||||
Log::write(Intel::LOG, [$ts=network_time(), $level="warn", $message=fmt(err_msg)]);
|
||||
return F;
|
||||
Log::write(Intel::LOG, [$ts=network_time(), $level="warn", $message=err_msg, $item=item]);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
event insert_event(item: Item)
|
||||
|
@ -218,160 +375,3 @@ event insert_event(item: Item)
|
|||
insert(item);
|
||||
}
|
||||
|
||||
function match_item_with_query(item: Item, query: Query): bool
|
||||
{
|
||||
if ( ! query?$and_tags && ! query?$or_tags && ! query?$pred )
|
||||
return T;
|
||||
|
||||
if ( query?$and_tags )
|
||||
{
|
||||
local matched = T;
|
||||
# Every tag given has to match in a single MetaData entry.
|
||||
for ( tag in query$and_tags )
|
||||
{
|
||||
if ( item$meta?$tags && tag !in item$meta$tags )
|
||||
matched = F;
|
||||
}
|
||||
if ( matched )
|
||||
return T;
|
||||
}
|
||||
else if ( query?$or_tags )
|
||||
{
|
||||
# For OR tags, only a single tag has to match.
|
||||
for ( tag in query$or_tags )
|
||||
{
|
||||
if ( item$meta?$tags && tag in item$meta$tags )
|
||||
return T;
|
||||
}
|
||||
}
|
||||
else if ( query?$pred )
|
||||
return query$pred(item);
|
||||
|
||||
# This indicates some sort of failure in the query
|
||||
return F;
|
||||
}
|
||||
|
||||
function lookup(query: Query): set[Item]
|
||||
{
|
||||
local meta: MetaData;
|
||||
local item: Item;
|
||||
local return_data: set[Item] = set();
|
||||
|
||||
if ( query?$ip )
|
||||
{
|
||||
if ( query$ip in data_store$ip_data )
|
||||
{
|
||||
for ( [source, class] in data_store$ip_data[query$ip] )
|
||||
{
|
||||
meta = data_store$ip_data[query$ip][source, class];
|
||||
item = [$ip=query$ip,$meta=meta];
|
||||
if ( match_item_with_query(item, query) )
|
||||
add return_data[item];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else if ( query?$str )
|
||||
{
|
||||
if ( [query$str, query$subtype] in data_store$string_data )
|
||||
{
|
||||
for ( [source, class] in data_store$string_data[query$str, query$subtype] )
|
||||
{
|
||||
meta = data_store$string_data[query$str, query$subtype][source, class];
|
||||
item = [$str=query$str,$subtype=query$subtype,$meta=meta];
|
||||
if ( match_item_with_query(item, query) )
|
||||
add return_data[item];
|
||||
}
|
||||
}
|
||||
|
||||
# Check if there are any custom subtype lookup functons and add the values to
|
||||
# the result set.
|
||||
if ( query$subtype in custom_lookup )
|
||||
{
|
||||
for ( lookup_func in custom_lookup[query$subtype] )
|
||||
{
|
||||
# Iterating here because there is no way to merge sets generically.
|
||||
for ( custom_lookup_item in lookup_func(query) )
|
||||
add return_data[custom_lookup_item];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return return_data;
|
||||
}
|
||||
|
||||
|
||||
function matcher(query: Query): bool
|
||||
{
|
||||
local err_msg = "";
|
||||
if ( (query?$or_tags || query?$and_tags) && query?$pred )
|
||||
err_msg = "You can't match with both tags and a predicate.";
|
||||
else if ( query?$or_tags && query?$and_tags )
|
||||
err_msg = "You can't match with both OR'd together tags and AND'd together tags";
|
||||
else if ( query?$str && ! query?$subtype )
|
||||
err_msg = "You must provide a subtype to matcher or this query doesn't make sense.";
|
||||
|
||||
local item: Item;
|
||||
local meta: MetaData;
|
||||
|
||||
if ( err_msg == "" )
|
||||
{
|
||||
if ( query?$ip )
|
||||
{
|
||||
if ( query$ip in data_store$ip_data )
|
||||
{
|
||||
if ( ! query?$and_tags && ! query?$or_tags && ! query?$pred )
|
||||
return T;
|
||||
|
||||
for ( [source, class] in data_store$ip_data[query$ip] )
|
||||
{
|
||||
meta = data_store$ip_data[query$ip][source, class];
|
||||
item = [$ip=query$ip,$meta=meta];
|
||||
if ( match_item_with_query(item, query) )
|
||||
return T;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else if ( query?$str )
|
||||
{
|
||||
if ( [query$str, query$subtype] in data_store$string_data )
|
||||
{
|
||||
if ( ! query?$and_tags && ! query?$or_tags && ! query?$pred )
|
||||
return T;
|
||||
|
||||
for ( [source, class] in data_store$string_data[query$str, query$subtype] )
|
||||
{
|
||||
meta = data_store$string_data[query$str, query$subtype][source, class];
|
||||
item = [$str=query$str,$subtype=query$subtype,$meta=meta];
|
||||
if ( match_item_with_query(item, query) )
|
||||
return T;
|
||||
}
|
||||
}
|
||||
|
||||
# Check if there are any custom subtype matchers in case we haven't matched yet.
|
||||
if ( query$subtype in custom_matchers )
|
||||
{
|
||||
for ( match_func in custom_matchers[query$subtype] )
|
||||
{
|
||||
if ( match_func(query) )
|
||||
return T;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
err_msg = "You must supply one of the $ip or $str fields to search on";
|
||||
}
|
||||
|
||||
if ( err_msg != "" )
|
||||
Log::write(Intel::LOG, [$ts=network_time(), $level="error", $message=fmt(err_msg)]);
|
||||
return F;
|
||||
}
|
||||
|
||||
module GLOBAL;
|
||||
|
||||
function INTEL(item: Intel::Query): bool
|
||||
{
|
||||
return Intel::matcher(item);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue