mirror of
https://github.com/zeek/zeek.git
synced 2025-10-11 11:08:20 +00:00
Merge remote-tracking branch 'origin/master' into topic/seth/smb
This commit is contained in:
commit
7b3ec047d0
73 changed files with 1141 additions and 424 deletions
52
CHANGES
52
CHANGES
|
@ -1,4 +1,56 @@
|
|||
|
||||
2.4-759 | 2016-08-05 09:32:42 -0400
|
||||
|
||||
* Intel framework improvements (Jan Grashoefer)
|
||||
* Added expiration for intelligence items.
|
||||
* Improved intel notices.
|
||||
* Added hook to allow extending the intel log.
|
||||
* Added support for subnets to intel-framework.
|
||||
|
||||
2.4-742 | 2016-08-02 15:28:31 -0700
|
||||
|
||||
* Fix duplicate SSH authentication failure events. Addresses BIT-1641.
|
||||
(Robin Sommer)
|
||||
|
||||
* Remove OpenSSL dependency for plugins. (Robin Sommer)
|
||||
|
||||
2.4-737 | 2016-08-02 11:38:07 -0700
|
||||
|
||||
* Fix some Coverity warnings. (Robin Sommer)
|
||||
|
||||
2.4-735 | 2016-08-02 11:05:36 -0700
|
||||
|
||||
* Added string slicing examples to documentation. (Moshe Kaplan)
|
||||
|
||||
2.4-733 | 2016-08-01 09:09:29 -0700
|
||||
|
||||
* Fixing a CMake dependency issue for the pcap bifs. (Robin Sommer)
|
||||
|
||||
2.4-732 | 2016-08-01 08:33:00 -0700
|
||||
|
||||
* Removing pkg/make-*-packages scripts. BIT-1509 #closed (Robin
|
||||
Sommer)
|
||||
|
||||
2.4-731 | 2016-08-01 08:14:06 -0700
|
||||
|
||||
* Correct endianness of IP addresses in SNMP. Addresses BIT-1644.
|
||||
(Anony Mous)
|
||||
|
||||
2.4-729 | 2016-08-01 08:00:54 -0700
|
||||
|
||||
* Fix behavior of connection_pending event. It is now really only
|
||||
raised when Bro is terminating. Also adds a test-case that raises
|
||||
the event. (Johanna Amann)
|
||||
|
||||
* Retired remove -J/-K options (set md5/hash key) from the manpage.
|
||||
They had already been removed from the code. (Johanna Amann)
|
||||
|
||||
* NetControl: Add catch-and-release event when IPs are forgotten.
|
||||
This adds an event catch_release_forgotten() that is raised once
|
||||
Catch & Release ceases block management for an IP address because
|
||||
the IP has not been seen in traffic during the watch interval.
|
||||
(Johanna Amann)
|
||||
|
||||
2.4-723 | 2016-07-26 15:04:26 -0700
|
||||
|
||||
* Add error events to input framework. (Johanna Amann)
|
||||
|
|
5
NEWS
5
NEWS
|
@ -191,6 +191,11 @@ Removed Functionality
|
|||
- The command line options --set-seed and --md5-hashkey have been
|
||||
removed.
|
||||
|
||||
- The packaging scripts pkg/make-*-packages are gone. They aren't
|
||||
used anymore for the binary Bro packages that the projects
|
||||
distributes; haven't been supported in a while; and have
|
||||
problems.
|
||||
|
||||
Deprecated Functionality
|
||||
------------------------
|
||||
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.4-723
|
||||
2.4-759
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 6cffeec0d5d2e61be93d4f52ddb7f9b60842ad86
|
||||
Subproject commit 3664242a218c21100d62917866d6b8cb0d6f0fa1
|
|
@ -1 +1 @@
|
|||
Subproject commit dcc64f4ab9f5d80f808aaaf39979525e22817019
|
||||
Subproject commit 3568621c9bd5836956f2a6401039fdd7d0886c9e
|
|
@ -1 +1 @@
|
|||
Subproject commit 3aef01c600b29e936348798bc27bdfb57ddf4053
|
||||
Subproject commit c46bc9077c4e4ee28f8778d9cbf58a708c2b998c
|
|
@ -181,11 +181,14 @@ Here is a more detailed description of each type:
|
|||
second-to-last character, etc. Here are a few examples::
|
||||
|
||||
local orig = "0123456789";
|
||||
local second_char = orig[1];
|
||||
local last_char = orig[-1];
|
||||
local first_two_chars = orig[:2];
|
||||
local last_two_chars = orig[8:];
|
||||
local no_first_and_last = orig[1:9];
|
||||
local second_char = orig[1]; # "1"
|
||||
local last_char = orig[-1]; # "9"
|
||||
local first_two_chars = orig[:2]; # "01"
|
||||
local last_two_chars = orig[8:]; # "89"
|
||||
local no_first_and_last = orig[1:9]; # "12345678"
|
||||
local no_first = orig[1:]; # "123456789"
|
||||
local no_last = orig[:-1]; # "012345678"
|
||||
local copy_orig = orig[:]; # "0123456789"
|
||||
|
||||
Note that the subscript operator cannot be used to modify a string (i.e.,
|
||||
it cannot be on the left side of an assignment operator).
|
||||
|
|
|
@ -78,12 +78,6 @@ force DNS
|
|||
\fB\-I\fR,\ \-\-print\-id <ID name>
|
||||
print out given ID
|
||||
.TP
|
||||
\fB\-J\fR,\ \-\-set\-seed <seed>
|
||||
set the random number seed
|
||||
.TP
|
||||
\fB\-K\fR,\ \-\-md5\-hashkey <hashkey>
|
||||
set key for MD5\-keyed hashing
|
||||
.TP
|
||||
\fB\-N\fR,\ \-\-print\-plugins
|
||||
print available plugins and exit (\fB\-NN\fR for verbose)
|
||||
.TP
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# This script generates binary DEB packages.
|
||||
# They can be found in ../build/ after running.
|
||||
|
||||
# The DEB CPack generator depends on `dpkg-shlibdeps` to automatically
|
||||
# determine what dependencies to set for the packages
|
||||
type dpkg-shlibdeps > /dev/null 2>&1 || {
|
||||
echo "\
|
||||
Creating DEB packages requires the "dpkg-shlibs" command, usually provided by
|
||||
the 'dpkg-dev' package, please install it first.
|
||||
" >&2;
|
||||
exit 1;
|
||||
}
|
||||
|
||||
prefix=/opt/bro
|
||||
localstatedir=/var/opt/bro
|
||||
|
||||
# During the packaging process, `dpkg-shlibs` will fail if used on a library
|
||||
# that links to other internal/project libraries unless an RPATH is used or
|
||||
# we set LD_LIBRARY_PATH such that it can find the internal/project library
|
||||
# in the temporary packaging tree.
|
||||
export LD_LIBRARY_PATH=./${prefix}/lib
|
||||
|
||||
cd ..
|
||||
|
||||
# Minimum Bro
|
||||
./configure --prefix=${prefix} --disable-broccoli --disable-broctl \
|
||||
--pkg-name-prefix=Bro-minimal --binary-package
|
||||
( cd build && make package )
|
||||
|
||||
# Full Bro package
|
||||
./configure --prefix=${prefix} --localstatedir=${localstatedir} --pkg-name-prefix=Bro --binary-package
|
||||
( cd build && make package )
|
||||
|
||||
# Broccoli
|
||||
cd aux/broccoli
|
||||
./configure --prefix=${prefix} --binary-package
|
||||
( cd build && make package && mv *.deb ../../../build/ )
|
||||
cd ../..
|
||||
|
||||
# Broctl
|
||||
cd aux/broctl
|
||||
./configure --prefix=${prefix} --localstatedir=${localstatedir} --binary-package
|
||||
( cd build && make package && mv *.deb ../../../build/ )
|
||||
cd ../..
|
|
@ -1,57 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# This script creates binary packages for Mac OS X.
|
||||
# They can be found in ../build/ after running.
|
||||
|
||||
type sw_vers > /dev/null 2>&1 || {
|
||||
echo "Unable to get Mac OS X version" >&2;
|
||||
exit 1;
|
||||
}
|
||||
|
||||
# Get the OS X minor version
|
||||
# 5 = Leopard, 6 = Snow Leopard, 7 = Lion ...
|
||||
osx_ver=`sw_vers | sed -n 's/ProductVersion://p' | cut -d . -f 2`
|
||||
|
||||
if [ ${osx_ver} -lt 5 ]; then
|
||||
echo "Packages for OS X < 10.5 are not supported" >&2
|
||||
exit 1
|
||||
elif [ ${osx_ver} -eq 5 ]; then
|
||||
# On OS X 10.5, the x86_64 version of libresolv is broken,
|
||||
# so we build for i386 as the easiest solution
|
||||
arch=i386
|
||||
else
|
||||
# Currently it's just easiest to build the 10.5 package on
|
||||
# on 10.5, but if it weren't for the libresolv issue, we could
|
||||
# potentially build packages for older OS X version by using the
|
||||
# --osx-sysroot and --osx-min-version options
|
||||
arch=x86_64
|
||||
fi
|
||||
|
||||
prefix=/opt/bro
|
||||
|
||||
cd ..
|
||||
|
||||
# Minimum Bro
|
||||
CMAKE_PREFIX_PATH=/usr CMAKE_OSX_ARCHITECTURES=${arch} ./configure --prefix=${prefix} \
|
||||
--disable-broccoli --disable-broctl --pkg-name-prefix=Bro-minimal \
|
||||
--binary-package
|
||||
( cd build && make package )
|
||||
|
||||
# Full Bro package
|
||||
CMAKE_PREFIX_PATH=/usr CMAKE_OSX_ARCHITECTURES=${arch} ./configure --prefix=${prefix} \
|
||||
--pkg-name-prefix=Bro --binary-package
|
||||
( cd build && make package )
|
||||
|
||||
# Broccoli
|
||||
cd aux/broccoli
|
||||
CMAKE_PREFIX_PATH=/usr CMAKE_OSX_ARCHITECTURES=${arch} ./configure --prefix=${prefix} \
|
||||
--binary-package
|
||||
( cd build && make package && mv *.dmg ../../../build/ )
|
||||
cd ../..
|
||||
|
||||
# Broctl
|
||||
cd aux/broctl
|
||||
CMAKE_PREFIX_PATH=/usr CMAKE_OSX_ARCHITECTURES=${arch} ./configure --prefix=${prefix} \
|
||||
--binary-package
|
||||
( cd build && make package && mv *.dmg ../../../build/ )
|
||||
cd ../..
|
|
@ -1,39 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# This script generates binary RPM packages.
|
||||
# They can be found in ../build/ after running.
|
||||
|
||||
# The RPM CPack generator depends on `rpmbuild` to create packages
|
||||
type rpmbuild > /dev/null 2>&1 || {
|
||||
echo "\
|
||||
Creating RPM packages requires the "rpmbuild" command, usually provided by
|
||||
the 'rpm-build' package, please install it first.
|
||||
" >&2;
|
||||
exit 1;
|
||||
}
|
||||
|
||||
prefix=/opt/bro
|
||||
localstatedir=/var/opt/bro
|
||||
|
||||
cd ..
|
||||
|
||||
# Minimum Bro
|
||||
./configure --prefix=${prefix} --disable-broccoli --disable-broctl \
|
||||
--pkg-name-prefix=Bro-minimal --binary-package
|
||||
( cd build && make package )
|
||||
|
||||
# Full Bro package
|
||||
./configure --prefix=${prefix} --localstatedir=${localstatedir} --pkg-name-prefix=Bro --binary-package
|
||||
( cd build && make package )
|
||||
|
||||
# Broccoli
|
||||
cd aux/broccoli
|
||||
./configure --prefix=${prefix} --binary-package
|
||||
( cd build && make package && mv *.rpm ../../../build/ )
|
||||
cd ../..
|
||||
|
||||
# Broctl
|
||||
cd aux/broctl
|
||||
./configure --prefix=${prefix} --localstatedir=${localstatedir} --binary-package
|
||||
( cd build && make package && mv *.rpm ../../../build/ )
|
||||
cd ../..
|
|
@ -1,5 +1,8 @@
|
|||
@load ./main
|
||||
|
||||
# File analysis framework integration.
|
||||
@load ./files
|
||||
|
||||
# The cluster framework must be loaded first.
|
||||
@load base/frameworks/cluster
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
##! Cluster transparency support for the intelligence framework. This is mostly
|
||||
##! oriented toward distributing intelligence information across clusters.
|
||||
|
||||
@load ./main
|
||||
@load base/frameworks/cluster
|
||||
@load ./input
|
||||
|
||||
module Intel;
|
||||
|
||||
|
@ -17,19 +17,17 @@ redef record Item += {
|
|||
redef have_full_data = F;
|
||||
@endif
|
||||
|
||||
# Internal event for cluster data distribution.
|
||||
global cluster_new_item: event(item: Item);
|
||||
|
||||
# Primary intelligence distribution comes from manager.
|
||||
redef Cluster::manager2worker_events += /^Intel::(cluster_new_item)$/;
|
||||
# If a worker finds intelligence and adds it, it should share it back to the manager.
|
||||
redef Cluster::worker2manager_events += /^Intel::(cluster_new_item|match_no_items)$/;
|
||||
# Primary intelligence management is done by the manager:
|
||||
# The manager informs the workers about new items and item removal.
|
||||
redef Cluster::manager2worker_events += /^Intel::(cluster_new_item|purge_item)$/;
|
||||
# A worker queries the manager to insert, remove or indicate the match of an item.
|
||||
redef Cluster::worker2manager_events += /^Intel::(cluster_new_item|remove_item|match_no_items)$/;
|
||||
|
||||
@if ( Cluster::local_node_type() == Cluster::MANAGER )
|
||||
event Intel::match_no_items(s: Seen) &priority=5
|
||||
{
|
||||
event Intel::match(s, Intel::get_items(s));
|
||||
}
|
||||
|
||||
# Handling of new worker nodes.
|
||||
event remote_connection_handshake_done(p: event_peer)
|
||||
{
|
||||
# When a worker connects, send it the complete minimal data store.
|
||||
|
@ -39,15 +37,22 @@ event remote_connection_handshake_done(p: event_peer)
|
|||
send_id(p, "Intel::min_data_store");
|
||||
}
|
||||
}
|
||||
@endif
|
||||
|
||||
event Intel::cluster_new_item(item: Intel::Item) &priority=5
|
||||
# Handling of matches triggered by worker nodes.
|
||||
event Intel::match_no_items(s: Seen) &priority=5
|
||||
{
|
||||
# Ignore locally generated events to avoid event storms.
|
||||
if ( is_remote_event() )
|
||||
Intel::insert(item);
|
||||
if ( Intel::find(s) )
|
||||
event Intel::match(s, Intel::get_items(s));
|
||||
}
|
||||
|
||||
# Handling of item removal triggered by worker nodes.
|
||||
event Intel::remove_item(item: Item, purge_indicator: bool)
|
||||
{
|
||||
remove(item, purge_indicator);
|
||||
}
|
||||
@endif
|
||||
|
||||
# Handling of item insertion.
|
||||
event Intel::new_item(item: Intel::Item) &priority=5
|
||||
{
|
||||
# The cluster manager always rebroadcasts intelligence.
|
||||
|
@ -59,3 +64,11 @@ event Intel::new_item(item: Intel::Item) &priority=5
|
|||
event Intel::cluster_new_item(item);
|
||||
}
|
||||
}
|
||||
|
||||
# Handling of item insertion by remote node.
|
||||
event Intel::cluster_new_item(item: Intel::Item) &priority=5
|
||||
{
|
||||
# Ignore locally generated events to avoid event storms.
|
||||
if ( is_remote_event() )
|
||||
Intel::insert(item);
|
||||
}
|
||||
|
|
84
scripts/base/frameworks/intel/files.bro
Normal file
84
scripts/base/frameworks/intel/files.bro
Normal file
|
@ -0,0 +1,84 @@
|
|||
##! File analysis framework integration for the intelligence framework. This
|
||||
##! script manages file information in intelligence framework datastructures.
|
||||
|
||||
@load ./main
|
||||
|
||||
module Intel;
|
||||
|
||||
export {
|
||||
## Enum type to represent various types of intelligence data.
|
||||
redef enum Type += {
|
||||
## File hash which is non-hash type specific. It's up to the
|
||||
## user to query for any relevant hash types.
|
||||
FILE_HASH,
|
||||
## File name. Typically with protocols with definite
|
||||
## indications of a file name.
|
||||
FILE_NAME,
|
||||
};
|
||||
|
||||
## Information about a piece of "seen" data.
|
||||
redef record Seen += {
|
||||
## If the data was discovered within a file, the file record
|
||||
## should go here to provide context to the data.
|
||||
f: fa_file &optional;
|
||||
## If the data was discovered within a file, the file uid should
|
||||
## go here to provide context to the data. If the file record *f*
|
||||
## is provided, this will be automatically filled out.
|
||||
fuid: string &optional;
|
||||
};
|
||||
|
||||
## Record used for the logging framework representing a positive
|
||||
## hit within the intelligence framework.
|
||||
redef record Info += {
|
||||
## If a file was associated with this intelligence hit,
|
||||
## this is the uid for the file.
|
||||
fuid: string &log &optional;
|
||||
## A mime type if the intelligence hit is related to a file.
|
||||
## If the $f field is provided this will be automatically filled
|
||||
## out.
|
||||
file_mime_type: string &log &optional;
|
||||
## Frequently files can be "described" to give a bit more context.
|
||||
## If the $f field is provided this field will be automatically
|
||||
## filled out.
|
||||
file_desc: string &log &optional;
|
||||
};
|
||||
}
|
||||
|
||||
# Add file information to matches if available.
|
||||
hook extend_match(info: Info, s: Seen, items: set[Item]) &priority=5
|
||||
{
|
||||
if ( s?$f )
|
||||
{
|
||||
s$fuid = s$f$id;
|
||||
|
||||
if ( s$f?$conns && |s$f$conns| == 1 )
|
||||
{
|
||||
for ( cid in s$f$conns )
|
||||
s$conn = s$f$conns[cid];
|
||||
}
|
||||
|
||||
if ( ! info?$file_mime_type && s$f?$info && s$f$info?$mime_type )
|
||||
info$file_mime_type = s$f$info$mime_type;
|
||||
|
||||
if ( ! info?$file_desc )
|
||||
info$file_desc = Files::describe(s$f);
|
||||
}
|
||||
|
||||
if ( s?$fuid )
|
||||
info$fuid = s$fuid;
|
||||
|
||||
if ( s?$conn )
|
||||
{
|
||||
s$uid = s$conn$uid;
|
||||
info$id = s$conn$id;
|
||||
}
|
||||
|
||||
if ( s?$uid )
|
||||
info$uid = s$uid;
|
||||
|
||||
for ( item in items )
|
||||
{
|
||||
add info$sources[item$meta$source];
|
||||
add info$matched[item$indicator_type];
|
||||
}
|
||||
}
|
|
@ -1,11 +1,14 @@
|
|||
##! Input handling for the intelligence framework. This script implements the
|
||||
##! import of intelligence data from files using the input framework.
|
||||
|
||||
@load ./main
|
||||
|
||||
module Intel;
|
||||
|
||||
export {
|
||||
## Intelligence files that will be read off disk. The files are
|
||||
## reread every time they are updated so updates must be atomic with
|
||||
## "mv" instead of writing the file in place.
|
||||
## Intelligence files that will be read off disk. The files are
|
||||
## reread every time they are updated so updates must be atomic
|
||||
## with "mv" instead of writing the file in place.
|
||||
const read_files: set[string] = {} &redef;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
##! The intelligence framework provides a way to store and query IP addresses,
|
||||
##! and strings (with a str_type). Metadata can
|
||||
##! also be associated with the intelligence, like for making more informed
|
||||
##! decisions about matching and handling of intelligence.
|
||||
##! The intelligence framework provides a way to store and query intelligence data
|
||||
##! (e.g. IP addresses, URLs and hashes). The intelligence items can be associated
|
||||
##! with metadata to allow informed decisions about matching and handling.
|
||||
|
||||
@load base/frameworks/notice
|
||||
|
||||
|
@ -14,6 +13,8 @@ export {
|
|||
type Type: enum {
|
||||
## An IP address.
|
||||
ADDR,
|
||||
## A subnet in CIDR notation.
|
||||
SUBNET,
|
||||
## A complete URL without the prefix ``"http://"``.
|
||||
URL,
|
||||
## Software name.
|
||||
|
@ -24,24 +25,20 @@ export {
|
|||
DOMAIN,
|
||||
## A user name.
|
||||
USER_NAME,
|
||||
## File hash which is non-hash type specific. It's up to the
|
||||
## user to query for any relevant hash types.
|
||||
FILE_HASH,
|
||||
## File name. Typically with protocols with definite
|
||||
## indications of a file name.
|
||||
FILE_NAME,
|
||||
## Certificate SHA-1 hash.
|
||||
CERT_HASH,
|
||||
## Public key MD5 hash. (SSH server host keys are a good example.)
|
||||
PUBKEY_HASH,
|
||||
};
|
||||
|
||||
|
||||
## Set of intelligence data types.
|
||||
type TypeSet: set[Type];
|
||||
|
||||
## Data about an :bro:type:`Intel::Item`.
|
||||
type MetaData: record {
|
||||
## An arbitrary string value representing the data source.
|
||||
## Typically, the convention for this field will be the source
|
||||
## name and feed name separated by a hyphen.
|
||||
## For example: "source1-c&c".
|
||||
## An arbitrary string value representing the data source. This
|
||||
## value is used as unique key to identify a metadata record in
|
||||
## the scope of a single intelligence item.
|
||||
source: string;
|
||||
## A freeform description for the data.
|
||||
desc: string &optional;
|
||||
|
@ -57,7 +54,7 @@ export {
|
|||
## The type of data that the indicator field represents.
|
||||
indicator_type: Type;
|
||||
|
||||
## Metadata for the item. Typically represents more deeply
|
||||
## Metadata for the item. Typically represents more deeply
|
||||
## descriptive data for a piece of intelligence.
|
||||
meta: MetaData;
|
||||
};
|
||||
|
@ -96,15 +93,6 @@ export {
|
|||
## If the *conn* field is provided, this will be automatically
|
||||
## filled out.
|
||||
uid: string &optional;
|
||||
|
||||
## If the data was discovered within a file, the file record
|
||||
## should go here to provide context to the data.
|
||||
f: fa_file &optional;
|
||||
|
||||
## If the data was discovered within a file, the file uid should
|
||||
## go here to provide context to the data. If the *f* field is
|
||||
## provided, this will be automatically filled out.
|
||||
fuid: string &optional;
|
||||
};
|
||||
|
||||
## Record used for the logging framework representing a positive
|
||||
|
@ -120,41 +108,70 @@ export {
|
|||
## this is the conn_id for the connection.
|
||||
id: conn_id &log &optional;
|
||||
|
||||
## If a file was associated with this intelligence hit,
|
||||
## this is the uid for the file.
|
||||
fuid: string &log &optional;
|
||||
|
||||
## A mime type if the intelligence hit is related to a file.
|
||||
## If the $f field is provided this will be automatically filled
|
||||
## out.
|
||||
file_mime_type: string &log &optional;
|
||||
## Frequently files can be "described" to give a bit more context.
|
||||
## If the $f field is provided this field will be automatically
|
||||
## filled out.
|
||||
file_desc: string &log &optional;
|
||||
|
||||
## Where the data was seen.
|
||||
seen: Seen &log;
|
||||
## Which indicator types matched.
|
||||
matched: TypeSet &log;
|
||||
## Sources which supplied data that resulted in this match.
|
||||
sources: set[string] &log &default=string_set();
|
||||
};
|
||||
|
||||
## Intelligence data manipulation function.
|
||||
## Function to insert intelligence data. If the indicator is already
|
||||
## present, the associated metadata will be added to the indicator. If
|
||||
## the indicator already contains a metadata record from the same source,
|
||||
## the existing metadata record will be updated.
|
||||
global insert: function(item: Item);
|
||||
|
||||
## Function to remove intelligence data. If purge_indicator is set, the
|
||||
## given metadata is ignored and the indicator is removed completely.
|
||||
global remove: function(item: Item, purge_indicator: bool &default = F);
|
||||
|
||||
## Function to declare discovery of a piece of data in order to check
|
||||
## it against known intelligence for matches.
|
||||
global seen: function(s: Seen);
|
||||
|
||||
## Event to represent a match in the intelligence data from data that
|
||||
## was seen. On clusters there is no assurance as to where this event
|
||||
## was seen. On clusters there is no assurance as to when this event
|
||||
## will be generated so do not assume that arbitrary global state beyond
|
||||
## the given data will be available.
|
||||
##
|
||||
## This is the primary mechanism where a user will take actions based on
|
||||
## data within the intelligence framework.
|
||||
## This is the primary mechanism where a user may take actions based on
|
||||
## data provided by the intelligence framework.
|
||||
global match: event(s: Seen, items: set[Item]);
|
||||
|
||||
## This hook can be used to influence the logging of intelligence hits
|
||||
## (e.g. by adding data to the Info record). The default information is
|
||||
## added with a priority of 5.
|
||||
##
|
||||
## info: The Info record that will be logged.
|
||||
##
|
||||
## s: Information about the data seen.
|
||||
##
|
||||
## items: The intel items that match the seen data.
|
||||
##
|
||||
## In case the hook execution is terminated using break, the match will
|
||||
## not be logged.
|
||||
global extend_match: hook(info: Info, s: Seen, items: set[Item]);
|
||||
|
||||
## The expiration timeout for intelligence items. Once an item expires, the
|
||||
## :bro:id:`Intel::item_expired` hook is called. Reinsertion of an item
|
||||
## resets the timeout. A negative value disables expiration of intelligence
|
||||
## items.
|
||||
const item_expiration = -1 min &redef;
|
||||
|
||||
## This hook can be used to handle expiration of intelligence items.
|
||||
##
|
||||
## indicator: The indicator of the expired item.
|
||||
##
|
||||
## indicator_type: The indicator type of the expired item.
|
||||
##
|
||||
## metas: The set of metadata describing the expired item.
|
||||
##
|
||||
## If all hook handlers are executed, the expiration timeout will be reset.
|
||||
## Otherwise, if one of the handlers terminates using break, the item will
|
||||
## be removed.
|
||||
global item_expired: hook(indicator: string, indicator_type: Type, metas: set[MetaData]);
|
||||
|
||||
global log_intel: event(rec: Info);
|
||||
}
|
||||
|
||||
|
@ -163,16 +180,26 @@ global match_no_items: event(s: Seen);
|
|||
|
||||
# Internal events for cluster data distribution.
|
||||
global new_item: event(item: Item);
|
||||
global updated_item: event(item: Item);
|
||||
global remove_item: event(item: Item, purge_indicator: bool);
|
||||
global purge_item: event(item: Item);
|
||||
|
||||
# Optionally store metadata. This is used internally depending on
|
||||
# if this is a cluster deployment or not.
|
||||
const have_full_data = T &redef;
|
||||
|
||||
# Table of metadata, indexed by source string.
|
||||
type MetaDataTable: table[string] of MetaData;
|
||||
|
||||
# Expiration handlers.
|
||||
global expire_host_data: function(data: table[addr] of MetaDataTable, idx: addr): interval;
|
||||
global expire_subnet_data: function(data: table[subnet] of MetaDataTable, idx: subnet): interval;
|
||||
global expire_string_data: function(data: table[string, Type] of MetaDataTable, idx: any): interval;
|
||||
|
||||
# The in memory data structure for holding intelligence.
|
||||
type DataStore: record {
|
||||
host_data: table[addr] of set[MetaData];
|
||||
string_data: table[string, Type] of set[MetaData];
|
||||
host_data: table[addr] of MetaDataTable &write_expire=item_expiration &expire_func=expire_host_data;
|
||||
subnet_data: table[subnet] of MetaDataTable &write_expire=item_expiration &expire_func=expire_subnet_data;
|
||||
string_data: table[string, Type] of MetaDataTable &write_expire=item_expiration &expire_func=expire_string_data;
|
||||
};
|
||||
global data_store: DataStore &redef;
|
||||
|
||||
|
@ -181,6 +208,7 @@ global data_store: DataStore &redef;
|
|||
# a minimal amount of data for the full match to happen on the manager.
|
||||
type MinDataStore: record {
|
||||
host_data: set[addr];
|
||||
subnet_data: set[subnet];
|
||||
string_data: set[string, Type];
|
||||
};
|
||||
global min_data_store: MinDataStore &redef;
|
||||
|
@ -191,33 +219,78 @@ event bro_init() &priority=5
|
|||
Log::create_stream(LOG, [$columns=Info, $ev=log_intel, $path="intel"]);
|
||||
}
|
||||
|
||||
# Function that abstracts expiration of different types.
|
||||
function expire_item(indicator: string, indicator_type: Type, metas: set[MetaData]): interval
|
||||
{
|
||||
if ( hook item_expired(indicator, indicator_type, metas) )
|
||||
return item_expiration;
|
||||
else
|
||||
remove([$indicator=indicator, $indicator_type=indicator_type, $meta=[$source=""]], T);
|
||||
return 0 sec;
|
||||
}
|
||||
|
||||
# Expiration handler definitions.
|
||||
function expire_host_data(data: table[addr] of MetaDataTable, idx: addr): interval
|
||||
{
|
||||
local meta_tbl: MetaDataTable = data[idx];
|
||||
local metas: set[MetaData];
|
||||
for ( src in meta_tbl )
|
||||
add metas[meta_tbl[src]];
|
||||
|
||||
return expire_item(cat(idx), ADDR, metas);
|
||||
}
|
||||
|
||||
function expire_subnet_data(data: table[subnet] of MetaDataTable, idx: subnet): interval
|
||||
{
|
||||
local meta_tbl: MetaDataTable = data[idx];
|
||||
local metas: set[MetaData];
|
||||
for ( src in meta_tbl )
|
||||
add metas[meta_tbl[src]];
|
||||
|
||||
return expire_item(cat(idx), ADDR, metas);
|
||||
}
|
||||
|
||||
function expire_string_data(data: table[string, Type] of MetaDataTable, idx: any): interval
|
||||
{
|
||||
local indicator: string;
|
||||
local indicator_type: Type;
|
||||
[indicator, indicator_type] = idx;
|
||||
|
||||
local meta_tbl: MetaDataTable = data[indicator, indicator_type];
|
||||
local metas: set[MetaData];
|
||||
for ( src in meta_tbl )
|
||||
add metas[meta_tbl[src]];
|
||||
|
||||
return expire_item(indicator, indicator_type, metas);
|
||||
}
|
||||
|
||||
# Function to check for intelligence hits.
|
||||
function find(s: Seen): bool
|
||||
{
|
||||
local ds = have_full_data ? data_store : min_data_store;
|
||||
|
||||
if ( s?$host )
|
||||
{
|
||||
return ((s$host in min_data_store$host_data) ||
|
||||
(have_full_data && s$host in data_store$host_data));
|
||||
}
|
||||
else if ( ([to_lower(s$indicator), s$indicator_type] in min_data_store$string_data) ||
|
||||
(have_full_data && [to_lower(s$indicator), s$indicator_type] in data_store$string_data) )
|
||||
{
|
||||
return T;
|
||||
return ((s$host in ds$host_data) ||
|
||||
(|matching_subnets(addr_to_subnet(s$host), ds$subnet_data)| > 0));
|
||||
}
|
||||
else
|
||||
{
|
||||
return F;
|
||||
return ([to_lower(s$indicator), s$indicator_type] in ds$string_data);
|
||||
}
|
||||
}
|
||||
|
||||
# Function to retrieve intelligence items while abstracting from different
|
||||
# data stores for different indicator types.
|
||||
function get_items(s: Seen): set[Item]
|
||||
{
|
||||
local return_data: set[Item];
|
||||
local mt: MetaDataTable;
|
||||
|
||||
if ( ! have_full_data )
|
||||
{
|
||||
# A reporter warning should be generated here because this function
|
||||
# should never be called from a host that doesn't have the full data.
|
||||
# TODO: do a reporter warning.
|
||||
Reporter::warning(fmt("Intel::get_items was called from a host (%s) that doesn't have the full data.",
|
||||
peer_description));
|
||||
return return_data;
|
||||
}
|
||||
|
||||
|
@ -226,11 +299,23 @@ function get_items(s: Seen): set[Item]
|
|||
# See if the host is known about and it has meta values
|
||||
if ( s$host in data_store$host_data )
|
||||
{
|
||||
for ( m in data_store$host_data[s$host] )
|
||||
mt = data_store$host_data[s$host];
|
||||
for ( m in mt )
|
||||
{
|
||||
add return_data[Item($indicator=cat(s$host), $indicator_type=ADDR, $meta=m)];
|
||||
add return_data[Item($indicator=cat(s$host), $indicator_type=ADDR, $meta=mt[m])];
|
||||
}
|
||||
}
|
||||
# See if the host is part of a known subnet, which has meta values
|
||||
local nets: table[subnet] of MetaDataTable;
|
||||
nets = filter_subnet_table(addr_to_subnet(s$host), data_store$subnet_data);
|
||||
for ( n in nets )
|
||||
{
|
||||
mt = nets[n];
|
||||
for ( m in mt )
|
||||
{
|
||||
add return_data[Item($indicator=cat(n), $indicator_type=SUBNET, $meta=mt[m])];
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -238,9 +323,10 @@ function get_items(s: Seen): set[Item]
|
|||
# See if the string is known about and it has meta values
|
||||
if ( [lower_indicator, s$indicator_type] in data_store$string_data )
|
||||
{
|
||||
for ( m in data_store$string_data[lower_indicator, s$indicator_type] )
|
||||
mt = data_store$string_data[lower_indicator, s$indicator_type];
|
||||
for ( m in mt )
|
||||
{
|
||||
add return_data[Item($indicator=s$indicator, $indicator_type=s$indicator_type, $meta=m)];
|
||||
add return_data[Item($indicator=s$indicator, $indicator_type=s$indicator_type, $meta=mt[m])];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -275,64 +361,20 @@ function Intel::seen(s: Seen)
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
function has_meta(check: MetaData, metas: set[MetaData]): bool
|
||||
{
|
||||
local check_hash = md5_hash(check);
|
||||
for ( m in metas )
|
||||
{
|
||||
if ( check_hash == md5_hash(m) )
|
||||
return T;
|
||||
}
|
||||
|
||||
# The records must not be equivalent if we made it this far.
|
||||
return F;
|
||||
}
|
||||
|
||||
event Intel::match(s: Seen, items: set[Item]) &priority=5
|
||||
{
|
||||
local info = Info($ts=network_time(), $seen=s);
|
||||
local info = Info($ts=network_time(), $seen=s, $matched=TypeSet());
|
||||
|
||||
if ( s?$f )
|
||||
{
|
||||
s$fuid = s$f$id;
|
||||
|
||||
if ( s$f?$conns && |s$f$conns| == 1 )
|
||||
{
|
||||
for ( cid in s$f$conns )
|
||||
s$conn = s$f$conns[cid];
|
||||
}
|
||||
|
||||
if ( ! info?$file_mime_type && s$f?$info && s$f$info?$mime_type )
|
||||
info$file_mime_type = s$f$info$mime_type;
|
||||
|
||||
if ( ! info?$file_desc )
|
||||
info$file_desc = Files::describe(s$f);
|
||||
}
|
||||
|
||||
if ( s?$fuid )
|
||||
info$fuid = s$fuid;
|
||||
|
||||
if ( s?$conn )
|
||||
{
|
||||
s$uid = s$conn$uid;
|
||||
info$id = s$conn$id;
|
||||
}
|
||||
|
||||
if ( s?$uid )
|
||||
info$uid = s$uid;
|
||||
|
||||
for ( item in items )
|
||||
add info$sources[item$meta$source];
|
||||
|
||||
Log::write(Intel::LOG, info);
|
||||
if ( hook extend_match(info, s, items) )
|
||||
Log::write(Intel::LOG, info);
|
||||
}
|
||||
|
||||
function insert(item: Item)
|
||||
{
|
||||
# Create and fill out the meta data item.
|
||||
# Create and fill out the metadata item.
|
||||
local meta = item$meta;
|
||||
local metas: set[MetaData];
|
||||
local meta_tbl: table [string] of MetaData;
|
||||
local is_new: bool = T;
|
||||
|
||||
# All intelligence is case insensitive at the moment.
|
||||
local lower_indicator = to_lower(item$indicator);
|
||||
|
@ -343,51 +385,133 @@ function insert(item: Item)
|
|||
if ( have_full_data )
|
||||
{
|
||||
if ( host !in data_store$host_data )
|
||||
data_store$host_data[host] = set();
|
||||
data_store$host_data[host] = table();
|
||||
else
|
||||
is_new = F;
|
||||
|
||||
metas = data_store$host_data[host];
|
||||
meta_tbl = data_store$host_data[host];
|
||||
}
|
||||
|
||||
add min_data_store$host_data[host];
|
||||
}
|
||||
else if ( item$indicator_type == SUBNET )
|
||||
{
|
||||
local net = to_subnet(item$indicator);
|
||||
if ( have_full_data )
|
||||
{
|
||||
if ( !check_subnet(net, data_store$subnet_data) )
|
||||
data_store$subnet_data[net] = table();
|
||||
else
|
||||
is_new = F;
|
||||
|
||||
meta_tbl = data_store$subnet_data[net];
|
||||
}
|
||||
|
||||
add min_data_store$subnet_data[net];
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( have_full_data )
|
||||
{
|
||||
if ( [lower_indicator, item$indicator_type] !in data_store$string_data )
|
||||
data_store$string_data[lower_indicator, item$indicator_type] = set();
|
||||
data_store$string_data[lower_indicator, item$indicator_type] = table();
|
||||
else
|
||||
is_new = F;
|
||||
|
||||
metas = data_store$string_data[lower_indicator, item$indicator_type];
|
||||
meta_tbl = data_store$string_data[lower_indicator, item$indicator_type];
|
||||
}
|
||||
|
||||
add min_data_store$string_data[lower_indicator, item$indicator_type];
|
||||
}
|
||||
|
||||
local updated = F;
|
||||
if ( have_full_data )
|
||||
{
|
||||
for ( m in metas )
|
||||
{
|
||||
if ( meta$source == m$source )
|
||||
{
|
||||
if ( has_meta(meta, metas) )
|
||||
{
|
||||
# It's the same item being inserted again.
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
# Same source, different metadata means updated item.
|
||||
updated = T;
|
||||
}
|
||||
}
|
||||
}
|
||||
add metas[item$meta];
|
||||
# Insert new metadata or update if already present
|
||||
meta_tbl[meta$source] = meta;
|
||||
}
|
||||
|
||||
if ( updated )
|
||||
event Intel::updated_item(item);
|
||||
else
|
||||
|
||||
if ( is_new )
|
||||
# Trigger insert for cluster in case the item is new
|
||||
# or insert was called on a worker
|
||||
event Intel::new_item(item);
|
||||
}
|
||||
|
||||
|
||||
# Function to remove metadata of an item. The function returns T
|
||||
# if there is no metadata left for the given indicator.
|
||||
function remove_meta_data(item: Item): bool
|
||||
{
|
||||
if ( ! have_full_data )
|
||||
{
|
||||
Reporter::warning(fmt("Intel::remove_meta_data was called from a host (%s) that doesn't have the full data.",
|
||||
peer_description));
|
||||
return F;
|
||||
}
|
||||
|
||||
switch ( item$indicator_type )
|
||||
{
|
||||
case ADDR:
|
||||
local host = to_addr(item$indicator);
|
||||
delete data_store$host_data[host][item$meta$source];
|
||||
return (|data_store$host_data[host]| == 0);
|
||||
case SUBNET:
|
||||
local net = to_subnet(item$indicator);
|
||||
delete data_store$subnet_data[net][item$meta$source];
|
||||
return (|data_store$subnet_data[net]| == 0);
|
||||
default:
|
||||
delete data_store$string_data[item$indicator, item$indicator_type][item$meta$source];
|
||||
return (|data_store$string_data[item$indicator, item$indicator_type]| == 0);
|
||||
}
|
||||
}
|
||||
|
||||
function remove(item: Item, purge_indicator: bool)
|
||||
{
|
||||
# Delegate removal if we are on a worker
|
||||
if ( !have_full_data )
|
||||
{
|
||||
event Intel::remove_item(item, purge_indicator);
|
||||
return;
|
||||
}
|
||||
|
||||
# Remove metadata from manager's data store
|
||||
local no_meta_data = remove_meta_data(item);
|
||||
# Remove whole indicator if necessary
|
||||
if ( no_meta_data || purge_indicator )
|
||||
{
|
||||
switch ( item$indicator_type )
|
||||
{
|
||||
case ADDR:
|
||||
local host = to_addr(item$indicator);
|
||||
delete data_store$host_data[host];
|
||||
break;
|
||||
case SUBNET:
|
||||
local net = to_subnet(item$indicator);
|
||||
delete data_store$subnet_data[net];
|
||||
break;
|
||||
default:
|
||||
delete data_store$string_data[item$indicator, item$indicator_type];
|
||||
break;
|
||||
}
|
||||
# Trigger deletion in minimal data stores
|
||||
event Intel::purge_item(item);
|
||||
}
|
||||
}
|
||||
|
||||
# Handling of indicator removal in minimal data stores.
|
||||
event purge_item(item: Item)
|
||||
{
|
||||
switch ( item$indicator_type )
|
||||
{
|
||||
case ADDR:
|
||||
local host = to_addr(item$indicator);
|
||||
delete min_data_store$host_data[host];
|
||||
break;
|
||||
case SUBNET:
|
||||
local net = to_subnet(item$indicator);
|
||||
delete min_data_store$subnet_data[net];
|
||||
break;
|
||||
default:
|
||||
delete min_data_store$string_data[item$indicator, item$indicator_type];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
14
scripts/policy/frameworks/intel/do_expire.bro
Normal file
14
scripts/policy/frameworks/intel/do_expire.bro
Normal file
|
@ -0,0 +1,14 @@
|
|||
##! This script enables expiration for intelligence items.
|
||||
|
||||
@load base/frameworks/intel
|
||||
|
||||
module Intel;
|
||||
|
||||
redef Intel::item_expiration = 10min;
|
||||
|
||||
hook item_expired(indicator: string, indicator_type: Type,
|
||||
metas: set[MetaData]) &priority=-10
|
||||
{
|
||||
# Trigger removal of the expired item.
|
||||
break;
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
##! This script enables notice generation for intelligence matches.
|
||||
|
||||
@load base/frameworks/intel
|
||||
@load base/frameworks/notice
|
||||
|
@ -6,14 +7,14 @@ module Intel;
|
|||
|
||||
export {
|
||||
redef enum Notice::Type += {
|
||||
## Intel::Notice is a notice that happens when an intelligence
|
||||
## Intel::Notice is a notice that happens when an intelligence
|
||||
## indicator is denoted to be notice-worthy.
|
||||
Intel::Notice
|
||||
};
|
||||
|
||||
redef record Intel::MetaData += {
|
||||
## A boolean value to allow the data itself to represent
|
||||
## if the indicator that this metadata is attached to
|
||||
## if the indicator that this metadata is attached to
|
||||
## is notice worthy.
|
||||
do_notice: bool &default=F;
|
||||
|
||||
|
@ -29,15 +30,42 @@ event Intel::match(s: Seen, items: set[Item])
|
|||
for ( item in items )
|
||||
{
|
||||
if ( item$meta$do_notice &&
|
||||
(! item$meta?$if_in || s$where == item$meta$if_in) )
|
||||
(! item$meta?$if_in || s$where == item$meta$if_in) )
|
||||
{
|
||||
local n = Notice::Info($note=Intel::Notice,
|
||||
$msg=fmt("Intel hit on %s at %s", s$indicator, s$where),
|
||||
$sub=s$indicator);
|
||||
$msg = fmt("Intel hit on %s at %s", s$indicator, s$where),
|
||||
$sub = s$indicator);
|
||||
local service_str = "";
|
||||
|
||||
if ( s?$conn )
|
||||
{
|
||||
n$conn = s$conn;
|
||||
|
||||
# Add identifier composed of indicator, originator's and responder's IP,
|
||||
# without considering the direction of the flow.
|
||||
local intel_id = s$indicator;
|
||||
if( s$conn?$id )
|
||||
{
|
||||
if( s$conn$id$orig_h < s$conn$id$resp_h)
|
||||
intel_id = cat(intel_id, s$conn$id$orig_h, s$conn$id$resp_h);
|
||||
else
|
||||
intel_id = cat(intel_id, s$conn$id$resp_h, s$conn$id$orig_h);
|
||||
}
|
||||
n$identifier = intel_id;
|
||||
|
||||
if ( s$conn?$service )
|
||||
{
|
||||
for ( service in s$conn$service )
|
||||
service_str = cat(service_str, service, " ");
|
||||
}
|
||||
}
|
||||
|
||||
# Add additional information to the generated mail
|
||||
local mail_ext = vector(
|
||||
fmt("Service: %s\n", service_str),
|
||||
fmt("Intel source: %s\n", item$meta$source));
|
||||
n$email_body_sections = mail_ext;
|
||||
|
||||
NOTICE(n);
|
||||
}
|
||||
}
|
||||
|
|
30
scripts/policy/frameworks/intel/whitelist.bro
Normal file
30
scripts/policy/frameworks/intel/whitelist.bro
Normal file
|
@ -0,0 +1,30 @@
|
|||
##! This script enables whitelisting for intelligence items.
|
||||
|
||||
@load base/frameworks/intel
|
||||
|
||||
module Intel;
|
||||
|
||||
export {
|
||||
redef record Intel::MetaData += {
|
||||
## A boolean value to indicate whether the item is whitelisted.
|
||||
whitelist: bool &default=F;
|
||||
};
|
||||
}
|
||||
|
||||
hook Intel::extend_match(info: Info, s: Seen, items: set[Item]) &priority=9
|
||||
{
|
||||
local whitelisted = F;
|
||||
for ( item in items )
|
||||
{
|
||||
if ( item$meta$whitelist )
|
||||
{
|
||||
whitelisted = T;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ( whitelisted )
|
||||
# Prevent logging
|
||||
break;
|
||||
}
|
||||
|
|
@ -15,6 +15,8 @@
|
|||
@load frameworks/dpd/detect-protocols.bro
|
||||
@load frameworks/dpd/packet-segment-logging.bro
|
||||
@load frameworks/intel/do_notice.bro
|
||||
@load frameworks/intel/do_expire.bro
|
||||
@load frameworks/intel/whitelist.bro
|
||||
@load frameworks/intel/seen/__load__.bro
|
||||
@load frameworks/intel/seen/conn-established.bro
|
||||
@load frameworks/intel/seen/dns.bro
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
#include <sys/time.h>
|
||||
#include <netinet/in.h>
|
||||
#include <assert.h>
|
||||
|
||||
#include <openssl/ssl.h>
|
||||
#include <openssl/err.h>
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
|
|
|
@ -14,9 +14,6 @@
|
|||
# include <krb5.h>
|
||||
#endif
|
||||
|
||||
#include <openssl/ssl.h>
|
||||
#include <openssl/err.h>
|
||||
|
||||
class CompressedChunkedIO;
|
||||
|
||||
// #define DEBUG_COMMUNICATION 10
|
||||
|
@ -244,6 +241,11 @@ private:
|
|||
bro::Flare read_flare;
|
||||
};
|
||||
|
||||
// From OpenSSL. We forward-declare these here to avoid introducing a
|
||||
// dependency on OpenSSL headers just for this header file.
|
||||
typedef struct ssl_ctx_st SSL_CTX;
|
||||
typedef struct ssl_st SSL;
|
||||
|
||||
// Chunked I/O using an SSL connection.
|
||||
class ChunkedIOSSL : public ChunkedIO {
|
||||
public:
|
||||
|
|
|
@ -18,6 +18,10 @@
|
|||
#include <errno.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <openssl/evp.h>
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/err.h>
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "File.h"
|
||||
|
|
10
src/File.h
10
src/File.h
|
@ -11,11 +11,11 @@
|
|||
# ifdef NEED_KRB5_H
|
||||
# include <krb5.h>
|
||||
# endif // NEED_KRB5_H
|
||||
extern "C" {
|
||||
# include "openssl/evp.h"
|
||||
# include "openssl/pem.h"
|
||||
# include "openssl/err.h"
|
||||
}
|
||||
|
||||
// From OpenSSL. We forward-declare these here to avoid introducing a
|
||||
// dependency on OpenSSL headers just for this header file.
|
||||
typedef struct evp_pkey_st EVP_PKEY;
|
||||
typedef struct evp_cipher_ctx_st EVP_CIPHER_CTX;
|
||||
|
||||
class BroType;
|
||||
class RotateTimer;
|
||||
|
|
14
src/Tag.cc
14
src/Tag.cc
|
@ -65,6 +65,20 @@ Tag& Tag::operator=(const Tag& other)
|
|||
return *this;
|
||||
}
|
||||
|
||||
Tag& Tag::operator=(const Tag&& other)
|
||||
{
|
||||
if ( this != &other )
|
||||
{
|
||||
type = other.type;
|
||||
subtype = other.subtype;
|
||||
Unref(val);
|
||||
val = other.val;
|
||||
other.val = nullptr;
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
EnumVal* Tag::AsEnumVal(EnumType* etype) const
|
||||
{
|
||||
if ( ! val )
|
||||
|
|
|
@ -77,6 +77,11 @@ protected:
|
|||
*/
|
||||
Tag& operator=(const Tag& other);
|
||||
|
||||
/**
|
||||
* Move assignment operator.
|
||||
*/
|
||||
Tag& operator=(const Tag&& other);
|
||||
|
||||
/**
|
||||
* Compares two tags for equality.
|
||||
*/
|
||||
|
|
|
@ -2278,7 +2278,7 @@ double TableVal::GetExpireTime()
|
|||
Unref(timeout);
|
||||
|
||||
if ( interval >= 0 )
|
||||
return timeout->AsInterval();
|
||||
return interval;
|
||||
|
||||
expire_time = 0;
|
||||
|
||||
|
@ -2327,6 +2327,7 @@ double TableVal::CallExpireFunc(Val* idx)
|
|||
if ( vf->Type()->Tag() != TYPE_FUNC )
|
||||
{
|
||||
Unref(vf);
|
||||
delete_vals(vl);
|
||||
vf->Error("not a function");
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ AddrVal* network_address_to_val(const ASN1Encoding* na)
|
|||
|
||||
const u_char* data = reinterpret_cast<const u_char*>(bs.data());
|
||||
uint32 network_order = extract_uint32(data);
|
||||
return new AddrVal(network_order);
|
||||
return new AddrVal(ntohl(network_order));
|
||||
}
|
||||
|
||||
Val* asn1_obj_to_val(const ASN1Encoding* obj)
|
||||
|
|
|
@ -16,7 +16,7 @@ SSH_Analyzer::SSH_Analyzer(Connection* c)
|
|||
{
|
||||
interp = new binpac::SSH::SSH_Conn(this);
|
||||
had_gap = false;
|
||||
auth_decision_made = false;
|
||||
auth_decision = AUTH_UNKNOWN;
|
||||
skipped_banner = false;
|
||||
service_accept_size = 0;
|
||||
userauth_failure_size = 0;
|
||||
|
@ -60,7 +60,7 @@ void SSH_Analyzer::DeliverStream(int len, const u_char* data, bool orig)
|
|||
BifEvent::generate_ssh_encrypted_packet(interp->bro_analyzer(), interp->bro_analyzer()->Conn(),
|
||||
orig, len);
|
||||
|
||||
if ( ! auth_decision_made )
|
||||
if ( auth_decision != AUTH_SUCCESS )
|
||||
ProcessEncrypted(len, orig);
|
||||
|
||||
return;
|
||||
|
@ -105,9 +105,10 @@ void SSH_Analyzer::ProcessEncrypted(int len, bool orig)
|
|||
// -16.
|
||||
if ( ! userauth_failure_size && (len + 16 == service_accept_size) )
|
||||
{
|
||||
auth_decision_made = true;
|
||||
if ( ssh_auth_successful )
|
||||
BifEvent::generate_ssh_auth_successful(interp->bro_analyzer(), interp->bro_analyzer()->Conn(), true);
|
||||
|
||||
auth_decision = AUTH_SUCCESS;
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -131,17 +132,20 @@ void SSH_Analyzer::ProcessEncrypted(int len, bool orig)
|
|||
// another packet of the same size.
|
||||
if ( len == userauth_failure_size )
|
||||
{
|
||||
if ( ssh_auth_failed )
|
||||
if ( ssh_auth_failed && auth_decision != AUTH_FAILURE )
|
||||
BifEvent::generate_ssh_auth_failed(interp->bro_analyzer(), interp->bro_analyzer()->Conn());
|
||||
|
||||
auth_decision = AUTH_FAILURE;
|
||||
return;
|
||||
}
|
||||
|
||||
// ...or a success packet.
|
||||
if ( len - service_accept_size == -16 )
|
||||
{
|
||||
auth_decision_made = true;
|
||||
if ( ssh_auth_successful )
|
||||
BifEvent::generate_ssh_auth_successful(interp->bro_analyzer(), interp->bro_analyzer()->Conn(), false);
|
||||
|
||||
auth_decision = AUTH_SUCCESS;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,12 +35,14 @@ namespace analyzer {
|
|||
bool had_gap;
|
||||
|
||||
// Packet analysis stuff
|
||||
bool auth_decision_made;
|
||||
bool skipped_banner;
|
||||
|
||||
int service_accept_size;
|
||||
int userauth_failure_size;
|
||||
|
||||
enum AuthDecision {
|
||||
AUTH_UNKNOWN, AUTH_FAILURE, AUTH_SUCCESS
|
||||
} auth_decision;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -355,7 +355,7 @@ void TCP_Analyzer::Done()
|
|||
{
|
||||
Analyzer::Done();
|
||||
|
||||
if ( connection_pending && is_active && ! BothClosed() )
|
||||
if ( terminating && connection_pending && is_active && ! BothClosed() )
|
||||
Event(connection_pending);
|
||||
|
||||
LOOP_OVER_GIVEN_CHILDREN(i, packet_children)
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <openssl/x509v3.h>
|
||||
#include <openssl/asn1.h>
|
||||
#include <openssl/opensslconf.h>
|
||||
#include <openssl/err.h>
|
||||
|
||||
using namespace file_analysis;
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
#include <openssl/asn1.h>
|
||||
#include <openssl/x509_vfy.h>
|
||||
#include <openssl/ocsp.h>
|
||||
#include <openssl/pem.h>
|
||||
#include <openssl/err.h>
|
||||
|
||||
// This is the indexed map of X509 certificate stores.
|
||||
static map<Val*, X509_STORE*> x509_stores;
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
#include "Net.h"
|
||||
#include "Sessions.h"
|
||||
|
||||
#include "pcap/const.bif.h"
|
||||
#include "pcap/pcap.bif.h"
|
||||
|
||||
using namespace iosource;
|
||||
|
||||
|
|
|
@ -5,6 +5,5 @@ include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DI
|
|||
|
||||
bro_plugin_begin(Bro Pcap)
|
||||
bro_plugin_cc(Source.cc Dumper.cc Plugin.cc)
|
||||
bif_target(functions.bif)
|
||||
bif_target(const.bif)
|
||||
bif_target(pcap.bif)
|
||||
bro_plugin_end()
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
#include "../PktSrc.h"
|
||||
#include "../../Net.h"
|
||||
|
||||
#include "const.bif.h"
|
||||
#include "pcap.bif.h"
|
||||
|
||||
using namespace iosource::pcap;
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
#include "Source.h"
|
||||
#include "iosource/Packet.h"
|
||||
|
||||
#include "const.bif.h"
|
||||
#include "pcap.bif.h"
|
||||
|
||||
#ifdef HAVE_PCAP_INT_H
|
||||
#include <pcap-int.h>
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
|
||||
|
||||
const Pcap::snaplen: count;
|
||||
const Pcap::bufsize: count;
|
|
@ -1,6 +1,9 @@
|
|||
|
||||
module Pcap;
|
||||
|
||||
const snaplen: count;
|
||||
const bufsize: count;
|
||||
|
||||
## Precompiles a PCAP filter and binds it to a given identifier.
|
||||
##
|
||||
## id: The PCAP identifier to reference the filter *s* later on.
|
|
@ -640,8 +640,6 @@ bool Manager::RemoveFilter(EnumVal* id, string name)
|
|||
|
||||
bool Manager::Write(EnumVal* id, RecordVal* columns)
|
||||
{
|
||||
bool error = false;
|
||||
|
||||
Stream* stream = FindStream(id);
|
||||
if ( ! stream )
|
||||
return false;
|
||||
|
@ -850,9 +848,6 @@ bool Manager::Write(EnumVal* id, RecordVal* columns)
|
|||
|
||||
Unref(columns);
|
||||
|
||||
if ( error )
|
||||
RemoveDisabledWriters(stream);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,8 @@ extern "C" {
|
|||
}
|
||||
#endif
|
||||
|
||||
#include <openssl/md5.h>
|
||||
#include <openssl/ssl.h>
|
||||
#include <openssl/err.h>
|
||||
|
||||
extern "C" void OPENSSL_add_all_algorithms_conf(void);
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path loaded_scripts
|
||||
#open 2016-07-12-17-20-37
|
||||
#open 2016-08-01-16-08-40
|
||||
#fields name
|
||||
#types string
|
||||
scripts/base/init-bare.bro
|
||||
|
@ -52,7 +52,7 @@ scripts/base/init-bare.bro
|
|||
build/scripts/base/bif/__load__.bro
|
||||
build/scripts/base/bif/stats.bif.bro
|
||||
build/scripts/base/bif/broxygen.bif.bro
|
||||
build/scripts/base/bif/functions.bif.bro
|
||||
build/scripts/base/bif/pcap.bif.bro
|
||||
build/scripts/base/bif/bloom-filter.bif.bro
|
||||
build/scripts/base/bif/cardinality-counter.bif.bro
|
||||
build/scripts/base/bif/top-k.bif.bro
|
||||
|
@ -166,4 +166,4 @@ scripts/base/init-bare.bro
|
|||
build/scripts/base/bif/plugins/Bro_SQLiteWriter.sqlite.bif.bro
|
||||
scripts/policy/misc/loaded-scripts.bro
|
||||
scripts/base/utils/paths.bro
|
||||
#close 2016-07-12-17-20-37
|
||||
#close 2016-08-01-16-08-40
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path loaded_scripts
|
||||
#open 2016-07-12-17-20-44
|
||||
#open 2016-06-15-19-16-09
|
||||
#fields name
|
||||
#types string
|
||||
scripts/base/init-bare.bro
|
||||
|
@ -52,7 +52,7 @@ scripts/base/init-bare.bro
|
|||
build/scripts/base/bif/__load__.bro
|
||||
build/scripts/base/bif/stats.bif.bro
|
||||
build/scripts/base/bif/broxygen.bif.bro
|
||||
build/scripts/base/bif/functions.bif.bro
|
||||
build/scripts/base/bif/pcap.bif.bro
|
||||
build/scripts/base/bif/bloom-filter.bif.bro
|
||||
build/scripts/base/bif/cardinality-counter.bif.bro
|
||||
build/scripts/base/bif/top-k.bif.bro
|
||||
|
@ -234,6 +234,7 @@ scripts/base/init-default.bro
|
|||
scripts/base/frameworks/communication/main.bro
|
||||
scripts/base/frameworks/intel/__load__.bro
|
||||
scripts/base/frameworks/intel/main.bro
|
||||
scripts/base/frameworks/intel/files.bro
|
||||
scripts/base/frameworks/intel/input.bro
|
||||
scripts/base/frameworks/sumstats/__load__.bro
|
||||
scripts/base/frameworks/sumstats/main.bro
|
||||
|
@ -352,4 +353,4 @@ scripts/base/init-default.bro
|
|||
scripts/base/misc/find-checksum-offloading.bro
|
||||
scripts/base/misc/find-filtered-trace.bro
|
||||
scripts/policy/misc/loaded-scripts.bro
|
||||
#close 2016-07-12-17-20-44
|
||||
#close 2016-06-15-19-16-09
|
||||
|
|
|
@ -15,13 +15,13 @@ Accessed table nums: two; three
|
|||
Accessed table nets: two; zero, three
|
||||
Time: 7.0 secs 518.0 msecs 828.0 usecs
|
||||
|
||||
Expired Subnet: 192.168.4.0/24 --> four at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Subnet: 192.168.1.0/24 --> one at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Num: 4 --> four at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Num: 1 --> one at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Num: 0 --> zero at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Subnet: 192.168.4.0/24 --> four at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Subnet: 192.168.1.0/24 --> one at 8.0 secs 835.0 msecs 30.0 usecs
|
||||
Expired Num: 2 --> two at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
Expired Num: 3 --> three at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
Expired Subnet: 192.168.0.0/16 --> zero at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
Expired Subnet: 192.168.3.0/24 --> three at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
Expired Subnet: 192.168.2.0/24 --> two at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
Expired Num: 2 --> two at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
Expired Num: 3 --> three at 15.0 secs 150.0 msecs 681.0 usecs
|
||||
|
|
|
@ -247,7 +247,7 @@
|
|||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1468432721.269887, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1470067740.18502, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Communication::LOG)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::add_default_filter, <frame>, (Conn::LOG)) -> <no result>
|
||||
|
@ -377,7 +377,7 @@
|
|||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1468432721.269887, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1470067740.18502, node=bro, filter=ip or not ip, init=T, success=T])) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(NetControl::check_plugins, <frame>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(NetControl::init, <null>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(Notice::want_pp, <frame>, ()) -> <no result>
|
||||
|
@ -410,7 +410,7 @@
|
|||
0.000000 MetaHookPost CallFunction(reading_live_traffic, <frame>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(reading_traces, <frame>, ()) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(set_to_regex, <frame>, ({}, (^\.?|\.)(~~)$)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(strftime, <frame>, (%Y, 1468432721.269431)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(strftime, <frame>, (%Y, 1470067740.184492)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(string_to_pattern, <frame>, ((^\.?|\.)()$, F)) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(sub, <frame>, ((^\.?|\.)(~~)$, <...>/, )) -> <no result>
|
||||
0.000000 MetaHookPost CallFunction(to_count, <frame>, (2016)) -> <no result>
|
||||
|
@ -552,7 +552,6 @@
|
|||
0.000000 MetaHookPost LoadFile(./exec) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./file_analysis.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./files) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./functions.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./gridftp) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./hll_unique) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./hooks.bif.bro) -> -1
|
||||
|
@ -576,6 +575,7 @@
|
|||
0.000000 MetaHookPost LoadFile(./openflow) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./packetfilter) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./patterns) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./pcap.bif.bro) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./plugin) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./plugins) -> -1
|
||||
0.000000 MetaHookPost LoadFile(./polling) -> -1
|
||||
|
@ -964,7 +964,7 @@
|
|||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1468432721.269887, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::__write, <frame>, (PacketFilter::LOG, [ts=1470067740.18502, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Cluster::LOG))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Communication::LOG))
|
||||
0.000000 MetaHookPre CallFunction(Log::add_default_filter, <frame>, (Conn::LOG))
|
||||
|
@ -1094,7 +1094,7 @@
|
|||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird]))
|
||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509]))
|
||||
0.000000 MetaHookPre CallFunction(Log::create_stream, <frame>, (mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql]))
|
||||
0.000000 MetaHookPre CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1468432721.269887, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(Log::write, <frame>, (PacketFilter::LOG, [ts=1470067740.18502, node=bro, filter=ip or not ip, init=T, success=T]))
|
||||
0.000000 MetaHookPre CallFunction(NetControl::check_plugins, <frame>, ())
|
||||
0.000000 MetaHookPre CallFunction(NetControl::init, <null>, ())
|
||||
0.000000 MetaHookPre CallFunction(Notice::want_pp, <frame>, ())
|
||||
|
@ -1127,7 +1127,7 @@
|
|||
0.000000 MetaHookPre CallFunction(reading_live_traffic, <frame>, ())
|
||||
0.000000 MetaHookPre CallFunction(reading_traces, <frame>, ())
|
||||
0.000000 MetaHookPre CallFunction(set_to_regex, <frame>, ({}, (^\.?|\.)(~~)$))
|
||||
0.000000 MetaHookPre CallFunction(strftime, <frame>, (%Y, 1468432721.269431))
|
||||
0.000000 MetaHookPre CallFunction(strftime, <frame>, (%Y, 1470067740.184492))
|
||||
0.000000 MetaHookPre CallFunction(string_to_pattern, <frame>, ((^\.?|\.)()$, F))
|
||||
0.000000 MetaHookPre CallFunction(sub, <frame>, ((^\.?|\.)(~~)$, <...>/, ))
|
||||
0.000000 MetaHookPre CallFunction(to_count, <frame>, (2016))
|
||||
|
@ -1269,7 +1269,6 @@
|
|||
0.000000 MetaHookPre LoadFile(./exec)
|
||||
0.000000 MetaHookPre LoadFile(./file_analysis.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./files)
|
||||
0.000000 MetaHookPre LoadFile(./functions.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./gridftp)
|
||||
0.000000 MetaHookPre LoadFile(./hll_unique)
|
||||
0.000000 MetaHookPre LoadFile(./hooks.bif.bro)
|
||||
|
@ -1293,6 +1292,7 @@
|
|||
0.000000 MetaHookPre LoadFile(./openflow)
|
||||
0.000000 MetaHookPre LoadFile(./packetfilter)
|
||||
0.000000 MetaHookPre LoadFile(./patterns)
|
||||
0.000000 MetaHookPre LoadFile(./pcap.bif.bro)
|
||||
0.000000 MetaHookPre LoadFile(./plugin)
|
||||
0.000000 MetaHookPre LoadFile(./plugins)
|
||||
0.000000 MetaHookPre LoadFile(./polling)
|
||||
|
@ -1680,7 +1680,7 @@
|
|||
0.000000 | HookCallFunction Log::__create_stream(Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])
|
||||
0.000000 | HookCallFunction Log::__create_stream(X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])
|
||||
0.000000 | HookCallFunction Log::__create_stream(mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])
|
||||
0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1468432721.269887, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::__write(PacketFilter::LOG, [ts=1470067740.18502, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Cluster::LOG)
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Communication::LOG)
|
||||
0.000000 | HookCallFunction Log::add_default_filter(Conn::LOG)
|
||||
|
@ -1810,7 +1810,7 @@
|
|||
0.000000 | HookCallFunction Log::create_stream(Weird::LOG, [columns=<no value description>, ev=Weird::log_weird, path=weird])
|
||||
0.000000 | HookCallFunction Log::create_stream(X509::LOG, [columns=<no value description>, ev=X509::log_x509, path=x509])
|
||||
0.000000 | HookCallFunction Log::create_stream(mysql::LOG, [columns=<no value description>, ev=MySQL::log_mysql, path=mysql])
|
||||
0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1468432721.269887, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction Log::write(PacketFilter::LOG, [ts=1470067740.18502, node=bro, filter=ip or not ip, init=T, success=T])
|
||||
0.000000 | HookCallFunction NetControl::check_plugins()
|
||||
0.000000 | HookCallFunction NetControl::init()
|
||||
0.000000 | HookCallFunction Notice::want_pp()
|
||||
|
@ -1843,7 +1843,7 @@
|
|||
0.000000 | HookCallFunction reading_live_traffic()
|
||||
0.000000 | HookCallFunction reading_traces()
|
||||
0.000000 | HookCallFunction set_to_regex({}, (^\.?|\.)(~~)$)
|
||||
0.000000 | HookCallFunction strftime(%Y, 1468432721.269431)
|
||||
0.000000 | HookCallFunction strftime(%Y, 1470067740.184492)
|
||||
0.000000 | HookCallFunction string_to_pattern((^\.?|\.)()$, F)
|
||||
0.000000 | HookCallFunction sub((^\.?|\.)(~~)$, <...>/, )
|
||||
0.000000 | HookCallFunction to_count(2016)
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2014-09-23-16-13-39
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc seen.indicator seen.indicator_type seen.where seen.node sources
|
||||
#types time string addr port addr port string string string string enum enum string set[string]
|
||||
1411488819.555114 - - - - - - - - 123.123.123.123 Intel::ADDR Intel::IN_ANYWHERE worker-2 worker-1
|
||||
#close 2014-09-23-16-13-49
|
||||
#open 2016-06-15-19-11-27
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1466017887.060652 - - - - - 123.123.123.123 Intel::ADDR Intel::IN_ANYWHERE worker-2 Intel::ADDR worker-1 - - -
|
||||
#close 2016-06-15-19-11-36
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-06-15-19-11-06
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1466017866.348490 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1 - - -
|
||||
1466017867.349583 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1 - - -
|
||||
1466017868.349656 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1 - - -
|
||||
#close 2016-06-15-19-11-12
|
||||
Trigger: 1.2.3.4
|
||||
Seen: 1.2.3.4
|
||||
Trigger: 1.2.3.4
|
||||
Seen: 1.2.3.4
|
||||
Trigger: 1.2.3.4
|
||||
Seen: 1.2.3.4
|
||||
Expired: 1.2.3.4
|
||||
Trigger: 1.2.3.4
|
||||
Trigger: 1.2.3.4
|
||||
Trigger: 1.2.3.4
|
|
@ -3,9 +3,9 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2014-09-23-16-14-49
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc seen.indicator seen.indicator_type seen.where seen.node sources
|
||||
#types time string addr port addr port string string string string enum enum string set[string]
|
||||
1411488889.571819 - - - - - - - - e@mail.com Intel::EMAIL SOMEWHERE bro source1
|
||||
1411488889.571819 - - - - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro source1
|
||||
#close 2014-09-23-16-14-49
|
||||
#open 2016-06-15-19-12-26
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1466017946.413077 - - - - - e@mail.com Intel::EMAIL SOMEWHERE bro Intel::EMAIL source1 - - -
|
||||
1466017946.413077 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1 - - -
|
||||
#close 2016-06-15-19-12-26
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-08-05-13-13-14
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1470402794.307931 - - - - - 192.168.1.1 Intel::ADDR SOMEWHERE bro Intel::ADDR source1 - - -
|
||||
1470402794.307931 - - - - - 192.168.2.1 Intel::ADDR SOMEWHERE bro Intel::SUBNET source1 - - -
|
||||
1470402794.307931 - - - - - 192.168.142.1 Intel::ADDR SOMEWHERE bro Intel::SUBNET,Intel::ADDR source1 - - -
|
||||
#close 2016-08-05-13-13-14
|
||||
|
||||
Seen: [indicator=192.168.1.1, indicator_type=Intel::ADDR, host=192.168.1.1, where=SOMEWHERE, node=bro, conn=<uninitialized>, uid=<uninitialized>, f=<uninitialized>, fuid=<uninitialized>]
|
||||
Item: [indicator=192.168.1.1, indicator_type=Intel::ADDR, meta=[source=source1, desc=this host is just plain baaad, url=http://some-data-distributor.com/1]]
|
||||
|
||||
Seen: [indicator=192.168.2.1, indicator_type=Intel::ADDR, host=192.168.2.1, where=SOMEWHERE, node=bro, conn=<uninitialized>, uid=<uninitialized>, f=<uninitialized>, fuid=<uninitialized>]
|
||||
Item: [indicator=192.168.2.0/24, indicator_type=Intel::SUBNET, meta=[source=source1, desc=this subnetwork is just plain baaad, url=http://some-data-distributor.com/2]]
|
||||
|
||||
Seen: [indicator=192.168.142.1, indicator_type=Intel::ADDR, host=192.168.142.1, where=SOMEWHERE, node=bro, conn=<uninitialized>, uid=<uninitialized>, f=<uninitialized>, fuid=<uninitialized>]
|
||||
Item: [indicator=192.168.142.1, indicator_type=Intel::ADDR, meta=[source=source1, desc=this host is just plain baaad, url=http://some-data-distributor.com/3]]
|
||||
Item: [indicator=192.168.128.0/18, indicator_type=Intel::SUBNET, meta=[source=source1, desc=this subnetwork might be baaad, url=http://some-data-distributor.com/5]]
|
||||
Item: [indicator=192.168.142.0/26, indicator_type=Intel::SUBNET, meta=[source=source1, desc=this subnetwork is inside, url=http://some-data-distributor.com/4]]
|
||||
Item: [indicator=192.168.142.0/24, indicator_type=Intel::SUBNET, meta=[source=source1, desc=this subnetwork is baaad, url=http://some-data-distributor.com/4]]
|
|
@ -3,11 +3,11 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2014-09-23-16-15-00
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc seen.indicator seen.indicator_type seen.where seen.node sources
|
||||
#types time string addr port addr port string string string string enum enum string set[string]
|
||||
1411488900.900403 - - - - - - - - 1.2.3.4 Intel::ADDR Intel::IN_A_TEST worker-1 source1
|
||||
1411488900.900403 - - - - - - - - e@mail.com Intel::EMAIL Intel::IN_A_TEST worker-1 source1
|
||||
1411488901.923543 - - - - - - - - 1.2.3.4 Intel::ADDR Intel::IN_A_TEST worker-2 source1
|
||||
1411488901.923543 - - - - - - - - e@mail.com Intel::EMAIL Intel::IN_A_TEST worker-2 source1
|
||||
#close 2014-09-23-16-15-09
|
||||
#open 2016-06-15-19-14-30
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1466018070.494693 - - - - - 1.2.3.4 Intel::ADDR Intel::IN_A_TEST worker-1 Intel::ADDR source1 - - -
|
||||
1466018070.494693 - - - - - e@mail.com Intel::EMAIL Intel::IN_A_TEST worker-1 Intel::EMAIL source1 - - -
|
||||
1466018071.505800 - - - - - 1.2.3.4 Intel::ADDR Intel::IN_A_TEST worker-2 Intel::ADDR source1 - - -
|
||||
1466018071.505800 - - - - - e@mail.com Intel::EMAIL Intel::IN_A_TEST worker-2 Intel::EMAIL source1 - - -
|
||||
#close 2016-06-15-19-14-39
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
Purging 192.168.0.1.
|
||||
Purging 192.168.0.2.
|
||||
Removing 192.168.1.2 (source: source1).
|
||||
Removing 192.168.1.2 (source: source2).
|
||||
Purging 192.168.1.2.
|
||||
Logging intel hit!
|
|
@ -0,0 +1,10 @@
|
|||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-06-15-19-10-09
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1466017809.810005 - - - - - 10.10.10.10 Intel::ADDR Intel::IN_ANYWHERE worker-1 Intel::ADDR end - - -
|
||||
#close 2016-06-15-19-10-19
|
|
@ -0,0 +1,5 @@
|
|||
Removing 192.168.1.2 (source: source1).
|
||||
Removing 192.168.1.2 (source: source2).
|
||||
Purging 192.168.0.1.
|
||||
Purging 192.168.0.2.
|
||||
Purging 192.168.1.2.
|
|
@ -0,0 +1,25 @@
|
|||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-08-05-13-14-12
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1470402852.531769 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1 - - -
|
||||
1470402855.546089 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1,source2 - - -
|
||||
1470402855.546089 - - - - - 4.3.2.1 Intel::ADDR SOMEWHERE bro Intel::ADDR source2 - - -
|
||||
1470402858.547977 - - - - - 1.2.3.4 Intel::ADDR SOMEWHERE bro Intel::ADDR source1,source2 - - -
|
||||
1470402858.547977 - - - - - 4.3.2.1 Intel::ADDR SOMEWHERE bro Intel::ADDR source2 - - -
|
||||
#close 2016-08-05-13-14-18
|
||||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path notice
|
||||
#open 2016-08-05-13-14-18
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc proto note msg sub src dst p n peer_descr actions suppress_for dropped remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude
|
||||
#types time string addr port addr port string string string enum enum string string addr addr port count string set[enum] interval bool string string string double double
|
||||
1470402858.547977 - - - - - - - - - Intel::Notice Intel hit on 1.2.3.4 at SOMEWHERE 1.2.3.4 - - - - bro Notice::ACTION_LOG 3600.000000 F - - - - -
|
||||
1470402858.547977 - - - - - - - - - Intel::Notice Intel hit on 4.3.2.1 at SOMEWHERE 4.3.2.1 - - - - bro Notice::ACTION_LOG 3600.000000 F - - - - -
|
||||
#close 2016-08-05-13-14-18
|
|
@ -0,0 +1,30 @@
|
|||
138.68.0.1
|
||||
138.68.14.240
|
||||
169.254.169.254
|
||||
10.46.0.5
|
||||
127.0.0.1
|
||||
138.68.10.203
|
||||
255.255.0.0
|
||||
255.0.0.0
|
||||
255.255.240.0
|
||||
0.0.0.0
|
||||
10.46.0.0
|
||||
138.68.0.0
|
||||
138.68.0.1
|
||||
0.0.0.0
|
||||
0.0.0.0
|
||||
0.0.0.0
|
||||
255.255.0.0
|
||||
255.255.240.0
|
||||
138.68.0.1
|
||||
138.68.14.240
|
||||
169.254.169.254
|
||||
0.0.0.0
|
||||
10.46.0.0
|
||||
138.68.0.0
|
||||
0.0.0.0
|
||||
255.255.0.0
|
||||
255.255.240.0
|
||||
138.68.0.1
|
||||
0.0.0.0
|
||||
0.0.0.0
|
|
@ -3,7 +3,7 @@ snmp_trap
|
|||
is_orig: T
|
||||
[community=public]
|
||||
enterprise: 1.3.6.1.4.1.31337.0
|
||||
agent: 1.0.0.127
|
||||
agent: 127.0.0.1
|
||||
generic_trap: 0
|
||||
specific_trap: 0
|
||||
time_stamp: 0
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
C0LAHyvtKSQHyJxIl
|
||||
C37jN32gN3y3AZzyf6
|
||||
C3eiCBGOLw3VtHfOj
|
||||
C4J4Th3PJpwUYZZ6gc
|
||||
CHhAvVGS1DHFjwGM9
|
||||
CP5puj4I8PtEU4qzYg
|
||||
CUM0KZ3MLUfNB0cl11
|
||||
ClEkJM2Vm5giqnMf4h
|
||||
CmES5u32sYpV7JYN
|
||||
CtPZjS20MLrsMUOJi2
|
||||
CwjjYJ2WqgTbAqiHl6
|
|
@ -0,0 +1 @@
|
|||
1469573308.013636, Connection pending, [orig_h=192.168.4.149, orig_p=55881/tcp, resp_h=74.125.239.152, resp_p=443/tcp], ShADad
|
|
@ -3,23 +3,23 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-07-13-16-17-18
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc seen.indicator seen.indicator_type seen.where seen.node sources
|
||||
#types time string addr port addr port string string string string enum enum string set[string]
|
||||
1416942644.593119 CHhAvVGS1DHFjwGM9 192.168.4.149 49422 23.92.19.75 443 F0txuw2pvrkZOn04a8 application/pkix-cert 23.92.19.75:443/tcp www.pantz.org Intel::DOMAIN X509::IN_CERT bro source1
|
||||
#close 2016-07-13-16-17-18
|
||||
#open 2016-08-05-13-22-37
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1416942644.593119 CHhAvVGS1DHFjwGM9 192.168.4.149 49422 23.92.19.75 443 www.pantz.org Intel::DOMAIN X509::IN_CERT bro Intel::DOMAIN source1 Fi6J8q3lDJpbQWAnvi application/pkix-cert 23.92.19.75:443/tcp
|
||||
#close 2016-08-05-13-22-37
|
||||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-07-13-16-17-19
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc seen.indicator seen.indicator_type seen.where seen.node sources
|
||||
#types time string addr port addr port string string string string enum enum string set[string]
|
||||
1170717505.735416 CHhAvVGS1DHFjwGM9 192.150.187.164 58868 194.127.84.106 443 FeCwNK3rzqPnZ7eBQ5 application/pkix-cert 194.127.84.106:443/tcp 2c322ae2b7fe91391345e070b63668978bb1c9da Intel::CERT_HASH X509::IN_CERT bro source1
|
||||
1170717505.934612 CHhAvVGS1DHFjwGM9 192.150.187.164 58868 194.127.84.106 443 FeCwNK3rzqPnZ7eBQ5 - - www.dresdner-privat.de Intel::DOMAIN X509::IN_CERT bro source1
|
||||
1170717508.883051 ClEkJM2Vm5giqnMf4h 192.150.187.164 58869 194.127.84.106 443 FjkLnG4s34DVZlaBNc application/pkix-cert 194.127.84.106:443/tcp 2c322ae2b7fe91391345e070b63668978bb1c9da Intel::CERT_HASH X509::IN_CERT bro source1
|
||||
1170717509.082241 ClEkJM2Vm5giqnMf4h 192.150.187.164 58869 194.127.84.106 443 FjkLnG4s34DVZlaBNc - - www.dresdner-privat.de Intel::DOMAIN X509::IN_CERT bro source1
|
||||
1170717511.909717 C4J4Th3PJpwUYZZ6gc 192.150.187.164 58870 194.127.84.106 443 FQXAWgI2FB5STbrff application/pkix-cert 194.127.84.106:443/tcp 2c322ae2b7fe91391345e070b63668978bb1c9da Intel::CERT_HASH X509::IN_CERT bro source1
|
||||
1170717512.108799 C4J4Th3PJpwUYZZ6gc 192.150.187.164 58870 194.127.84.106 443 FQXAWgI2FB5STbrff - - www.dresdner-privat.de Intel::DOMAIN X509::IN_CERT bro source1
|
||||
#close 2016-07-13-16-17-19
|
||||
#open 2016-08-05-13-22-37
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1170717505.735416 CHhAvVGS1DHFjwGM9 192.150.187.164 58868 194.127.84.106 443 2c322ae2b7fe91391345e070b63668978bb1c9da Intel::CERT_HASH X509::IN_CERT bro Intel::CERT_HASH source1 FeCwNK3rzqPnZ7eBQ5 application/pkix-cert 194.127.84.106:443/tcp
|
||||
1170717505.934612 CHhAvVGS1DHFjwGM9 192.150.187.164 58868 194.127.84.106 443 www.dresdner-privat.de Intel::DOMAIN X509::IN_CERT bro Intel::DOMAIN source1 FeCwNK3rzqPnZ7eBQ5 - -
|
||||
1170717508.883051 ClEkJM2Vm5giqnMf4h 192.150.187.164 58869 194.127.84.106 443 2c322ae2b7fe91391345e070b63668978bb1c9da Intel::CERT_HASH X509::IN_CERT bro Intel::CERT_HASH source1 FjkLnG4s34DVZlaBNc application/pkix-cert 194.127.84.106:443/tcp
|
||||
1170717509.082241 ClEkJM2Vm5giqnMf4h 192.150.187.164 58869 194.127.84.106 443 www.dresdner-privat.de Intel::DOMAIN X509::IN_CERT bro Intel::DOMAIN source1 FjkLnG4s34DVZlaBNc - -
|
||||
1170717511.909717 C4J4Th3PJpwUYZZ6gc 192.150.187.164 58870 194.127.84.106 443 2c322ae2b7fe91391345e070b63668978bb1c9da Intel::CERT_HASH X509::IN_CERT bro Intel::CERT_HASH source1 FQXAWgI2FB5STbrff application/pkix-cert 194.127.84.106:443/tcp
|
||||
1170717512.108799 C4J4Th3PJpwUYZZ6gc 192.150.187.164 58870 194.127.84.106 443 www.dresdner-privat.de Intel::DOMAIN X509::IN_CERT bro Intel::DOMAIN source1 FQXAWgI2FB5STbrff - -
|
||||
#close 2016-08-05-13-22-38
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-07-13-16-17-20
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc seen.indicator seen.indicator_type seen.where seen.node sources
|
||||
#types time string addr port addr port string string string string enum enum string set[string]
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - jan.grashofer@cern.ch Intel::EMAIL SMTP::IN_RCPT_TO bro source1
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - jan.grashoefer@cern.ch Intel::EMAIL SMTP::IN_FROM bro source1
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - jan.grashoefer@gmail.com Intel::EMAIL SMTP::IN_TO bro source1
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - jan.grashofer@cern.ch Intel::EMAIL SMTP::IN_TO bro source1
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - addr-spec@example.com Intel::EMAIL SMTP::IN_TO bro source1
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - name-addr@example.com Intel::EMAIL SMTP::IN_TO bro source1
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 - - - angle-addr@example.com Intel::EMAIL SMTP::IN_TO bro source1
|
||||
#close 2016-07-13-16-17-20
|
||||
#open 2016-08-05-13-22-00
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 jan.grashofer@cern.ch Intel::EMAIL SMTP::IN_RCPT_TO bro Intel::EMAIL source1 - - -
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 jan.grashoefer@cern.ch Intel::EMAIL SMTP::IN_FROM bro Intel::EMAIL source1 - - -
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 jan.grashoefer@gmail.com Intel::EMAIL SMTP::IN_TO bro Intel::EMAIL source1 - - -
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 jan.grashofer@cern.ch Intel::EMAIL SMTP::IN_TO bro Intel::EMAIL source1 - - -
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 addr-spec@example.com Intel::EMAIL SMTP::IN_TO bro Intel::EMAIL source1 - - -
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 name-addr@example.com Intel::EMAIL SMTP::IN_TO bro Intel::EMAIL source1 - - -
|
||||
1449610263.071201 CHhAvVGS1DHFjwGM9 188.184.129.157 35119 188.184.36.24 25 angle-addr@example.com Intel::EMAIL SMTP::IN_TO bro Intel::EMAIL source1 - - -
|
||||
#close 2016-08-05-13-22-00
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
#separator \x09
|
||||
#set_separator ,
|
||||
#empty_field (empty)
|
||||
#unset_field -
|
||||
#path intel
|
||||
#open 2016-08-05-13-24-29
|
||||
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
|
||||
#types time string addr port addr port string enum enum string set[enum] set[string] string string string
|
||||
1300475168.853899 CmES5u32sYpV7JYN 141.142.220.118 43927 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.854837 C37jN32gN3y3AZzyf6 141.142.220.118 40526 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.857956 C0LAHyvtKSQHyJxIl 141.142.220.118 32902 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.858713 C9rXSW3KSpTYvPrlI1 141.142.220.118 59714 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.891644 C9mvWx3ezztgzcexV7 141.142.220.118 58206 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.892414 C7fIlMZDuRiqjpYbb 141.142.220.118 59746 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.893988 CpmdRlaUoJLN3uIRa 141.142.220.118 45000 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.894787 CqlVyW1YwZ15RhTBc4 141.142.220.118 48128 141.142.2.2 53 upload.wikimedia.org Intel::DOMAIN DNS::IN_REQUEST bro Intel::DOMAIN source1 - - -
|
||||
1300475168.916018 CwjjYJ2WqgTbAqiHl6 141.142.220.118 49997 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.916183 C3eiCBGOLw3VtHfOj 141.142.220.118 49996 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.918358 Ck51lg1bScffFj34Ri 141.142.220.118 49998 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.952296 CykQaM33ztNt0csB9a 141.142.220.118 49999 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.952307 CtxTCR2Yer0FR1tIBg 141.142.220.118 50000 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.954820 CLNN1k2QMum1aexUK7 141.142.220.118 50001 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.975934 CwjjYJ2WqgTbAqiHl6 141.142.220.118 49997 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.976436 C3eiCBGOLw3VtHfOj 141.142.220.118 49996 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475168.979264 Ck51lg1bScffFj34Ri 141.142.220.118 49998 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475169.014593 CykQaM33ztNt0csB9a 141.142.220.118 49999 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475169.014619 CtxTCR2Yer0FR1tIBg 141.142.220.118 50000 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
1300475169.014927 CLNN1k2QMum1aexUK7 141.142.220.118 50001 208.80.152.3 80 upload.wikimedia.org Intel::DOMAIN HTTP::IN_HOST_HEADER bro Intel::DOMAIN source1 - - -
|
||||
#close 2016-08-05-13-24-29
|
BIN
testing/btest/Traces/snmp/snmpwalk-short.pcap
Normal file
BIN
testing/btest/Traces/snmp/snmpwalk-short.pcap
Normal file
Binary file not shown.
|
@ -9,9 +9,6 @@ endif ()
|
|||
|
||||
set(CMAKE_MODULE_PATH ${BRO_DIST}/cmake)
|
||||
|
||||
find_package(OpenSSL)
|
||||
include_directories(${OPENSSL_INCLUDE_DIR})
|
||||
|
||||
include(BroPlugin)
|
||||
|
||||
bro_plugin_begin(Demo Foo)
|
||||
|
|
|
@ -9,9 +9,6 @@ endif ()
|
|||
|
||||
set(CMAKE_MODULE_PATH ${BRO_DIST}/cmake)
|
||||
|
||||
find_package(OpenSSL)
|
||||
include_directories(${OPENSSL_INCLUDE_DIR})
|
||||
|
||||
include(BroPlugin)
|
||||
|
||||
bro_plugin_begin(Demo Foo)
|
||||
|
|
46
testing/btest/scripts/base/frameworks/intel/expire-item.bro
Normal file
46
testing/btest/scripts/base/frameworks/intel/expire-item.bro
Normal file
|
@ -0,0 +1,46 @@
|
|||
# @TEST-EXEC: btest-bg-run broproc bro %INPUT
|
||||
# @TEST-EXEC: btest-bg-wait -k 7
|
||||
# @TEST-EXEC: cat broproc/intel.log > output
|
||||
# @TEST-EXEC: cat broproc/.stdout >> output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
# @TEST-START-FILE intel.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.url
|
||||
1.2.3.4 Intel::ADDR source1 this host is bad http://some-data-distributor.com/1
|
||||
# @TEST-END-FILE
|
||||
|
||||
@load frameworks/communication/listen
|
||||
@load frameworks/intel/do_expire
|
||||
|
||||
redef Intel::read_files += { "../intel.dat" };
|
||||
redef enum Intel::Where += { SOMEWHERE };
|
||||
redef Intel::item_expiration = 3sec;
|
||||
redef table_expire_interval = 1sec;
|
||||
|
||||
global runs = 0;
|
||||
event do_it()
|
||||
{
|
||||
print "Trigger: 1.2.3.4";
|
||||
Intel::seen([$host=1.2.3.4,
|
||||
$where=SOMEWHERE]);
|
||||
|
||||
++runs;
|
||||
if ( runs < 6 )
|
||||
schedule 1sec { do_it() };
|
||||
}
|
||||
|
||||
event Intel::match(s: Intel::Seen, items: set[Intel::Item])
|
||||
{
|
||||
print fmt("Seen: %s", s$indicator);
|
||||
}
|
||||
|
||||
hook Intel::item_expired(indicator: string, indicator_type: Intel::Type,
|
||||
metas: set[Intel::MetaData])
|
||||
{
|
||||
print fmt("Expired: %s", indicator);
|
||||
}
|
||||
|
||||
event bro_init() &priority=-10
|
||||
{
|
||||
schedule 1sec { do_it() };
|
||||
}
|
51
testing/btest/scripts/base/frameworks/intel/match-subnet.bro
Normal file
51
testing/btest/scripts/base/frameworks/intel/match-subnet.bro
Normal file
|
@ -0,0 +1,51 @@
|
|||
# @TEST-EXEC: btest-bg-run broproc bro %INPUT
|
||||
# @TEST-EXEC: btest-bg-wait -k 5
|
||||
# @TEST-EXEC: cat broproc/intel.log > output
|
||||
# @TEST-EXEC: cat broproc/.stdout >> output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
# @TEST-START-FILE intel.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.url
|
||||
192.168.1.1 Intel::ADDR source1 this host is just plain baaad http://some-data-distributor.com/1
|
||||
192.168.2.0/24 Intel::SUBNET source1 this subnetwork is just plain baaad http://some-data-distributor.com/2
|
||||
192.168.142.1 Intel::ADDR source1 this host is just plain baaad http://some-data-distributor.com/3
|
||||
192.168.142.0/24 Intel::SUBNET source1 this subnetwork is baaad http://some-data-distributor.com/4
|
||||
192.168.142.0/26 Intel::SUBNET source1 this subnetwork is inside http://some-data-distributor.com/4
|
||||
192.168.128.0/18 Intel::SUBNET source1 this subnetwork might be baaad http://some-data-distributor.com/5
|
||||
# @TEST-END-FILE
|
||||
|
||||
@load frameworks/communication/listen
|
||||
|
||||
redef Intel::read_files += { "../intel.dat" };
|
||||
redef enum Intel::Where += { SOMEWHERE };
|
||||
|
||||
event do_it()
|
||||
{
|
||||
Intel::seen([$host=192.168.1.1,
|
||||
$where=SOMEWHERE]);
|
||||
Intel::seen([$host=192.168.2.1,
|
||||
$where=SOMEWHERE]);
|
||||
Intel::seen([$host=192.168.142.1,
|
||||
$where=SOMEWHERE]);
|
||||
}
|
||||
|
||||
event bro_init() &priority=-10
|
||||
{
|
||||
schedule 1sec { do_it() };
|
||||
}
|
||||
|
||||
global log_lines = 0;
|
||||
event Intel::log_intel(rec: Intel::Info)
|
||||
{
|
||||
++log_lines;
|
||||
if ( log_lines == 2 )
|
||||
terminate();
|
||||
}
|
||||
|
||||
event Intel::match(s: Intel::Seen, items: set[Intel::Item])
|
||||
{
|
||||
print "";
|
||||
print fmt("Seen: %s", s);
|
||||
for ( item in items )
|
||||
print fmt("Item: %s", item);
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
# @TEST-SERIALIZE: comm
|
||||
#
|
||||
# @TEST-EXEC: btest-bg-run manager-1 BROPATH=$BROPATH:.. CLUSTER_NODE=manager-1 bro %INPUT
|
||||
# @TEST-EXEC: btest-bg-run worker-1 BROPATH=$BROPATH:.. CLUSTER_NODE=worker-1 bro %INPUT
|
||||
# @TEST-EXEC: btest-bg-wait -k 10
|
||||
# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-sort btest-diff manager-1/.stdout
|
||||
# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-sort btest-diff worker-1/.stdout
|
||||
# @TEST-EXEC: btest-diff manager-1/intel.log
|
||||
|
||||
# @TEST-START-FILE cluster-layout.bro
|
||||
redef Cluster::nodes = {
|
||||
["manager-1"] = [$node_type=Cluster::MANAGER, $ip=127.0.0.1, $p=37757/tcp, $workers=set("worker-1")],
|
||||
["worker-1"] = [$node_type=Cluster::WORKER, $ip=127.0.0.1, $p=37760/tcp, $manager="manager-1"],
|
||||
};
|
||||
# @TEST-END-FILE
|
||||
|
||||
@load base/frameworks/control
|
||||
|
||||
module Intel;
|
||||
|
||||
redef Log::default_rotation_interval=0sec;
|
||||
|
||||
event test_manager()
|
||||
{
|
||||
Intel::remove([$indicator="192.168.0.1", $indicator_type=Intel::ADDR, $meta=[$source="source1"]]);
|
||||
Intel::seen([$host=192.168.0.1, $where=Intel::IN_ANYWHERE]);
|
||||
Intel::remove([$indicator="192.168.0.2", $indicator_type=Intel::ADDR, $meta=[$source="source1"]], T);
|
||||
Intel::seen([$host=192.168.0.2, $where=Intel::IN_ANYWHERE]);
|
||||
}
|
||||
|
||||
event test_worker()
|
||||
{
|
||||
Intel::remove([$indicator="192.168.1.2", $indicator_type=Intel::ADDR, $meta=[$source="source1"]]);
|
||||
Intel::remove([$indicator="192.168.1.2", $indicator_type=Intel::ADDR, $meta=[$source="source2"]]);
|
||||
Intel::seen([$host=192.168.1.2, $where=Intel::IN_ANYWHERE]);
|
||||
# Trigger shutdown by matching data that should be present
|
||||
Intel::seen([$host=10.10.10.10, $where=Intel::IN_ANYWHERE]);
|
||||
}
|
||||
|
||||
event remote_connection_handshake_done(p: event_peer)
|
||||
{
|
||||
# Insert the data once all workers are connected.
|
||||
if ( Cluster::local_node_type() == Cluster::MANAGER && Cluster::worker_count == 1 )
|
||||
{
|
||||
Intel::insert([$indicator="192.168.0.1", $indicator_type=Intel::ADDR, $meta=[$source="source1"]]);
|
||||
Intel::insert([$indicator="192.168.0.2", $indicator_type=Intel::ADDR, $meta=[$source="source1"]]);
|
||||
Intel::insert([$indicator="192.168.0.2", $indicator_type=Intel::ADDR, $meta=[$source="source2"]]);
|
||||
Intel::insert([$indicator="192.168.1.2", $indicator_type=Intel::ADDR, $meta=[$source="source1"]]);
|
||||
Intel::insert([$indicator="192.168.1.2", $indicator_type=Intel::ADDR, $meta=[$source="source2"]]);
|
||||
Intel::insert([$indicator="10.10.10.10", $indicator_type=Intel::ADDR, $meta=[$source="end"]]);
|
||||
|
||||
event test_manager();
|
||||
}
|
||||
}
|
||||
|
||||
global worker_data = 0;
|
||||
event Intel::cluster_new_item(item: Intel::Item)
|
||||
{
|
||||
# Run test on worker-1 when all items have been inserted
|
||||
if ( Cluster::node == "worker-1" )
|
||||
{
|
||||
++worker_data;
|
||||
if ( worker_data == 4 )
|
||||
event test_worker();
|
||||
}
|
||||
}
|
||||
|
||||
event Intel::remove_item(item: Item, purge_indicator: bool)
|
||||
{
|
||||
print fmt("Removing %s (source: %s).", item$indicator, item$meta$source);
|
||||
}
|
||||
|
||||
event purge_item(item: Item)
|
||||
{
|
||||
print fmt("Purging %s.", item$indicator);
|
||||
}
|
||||
|
||||
event Intel::log_intel(rec: Intel::Info)
|
||||
{
|
||||
print "Logging intel hit!";
|
||||
event Control::shutdown_request();
|
||||
}
|
||||
|
||||
event remote_connection_closed(p: event_peer)
|
||||
{
|
||||
# Cascading termination
|
||||
terminate_communication();
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
# @TEST-SERIALIZE: comm
|
||||
|
||||
# @TEST-EXEC: cp intel1.dat intel.dat
|
||||
# @TEST-EXEC: btest-bg-run broproc bro %INPUT
|
||||
# @TEST-EXEC: sleep 2
|
||||
# @TEST-EXEC: cp intel2.dat intel.dat
|
||||
# @TEST-EXEC: sleep 2
|
||||
# @TEST-EXEC: cp intel3.dat intel.dat
|
||||
# @TEST-EXEC: btest-bg-wait 6
|
||||
# @TEST-EXEC: cat broproc/intel.log > output
|
||||
# @TEST-EXEC: cat broproc/notice.log >> output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
# @TEST-START-FILE intel1.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.url meta.do_notice
|
||||
1.2.3.4 Intel::ADDR source1 this host is just plain baaad http://some-data-distributor.com/1234 F
|
||||
# @TEST-END-FILE
|
||||
|
||||
# @TEST-START-FILE intel2.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.url meta.do_notice
|
||||
1.2.3.4 Intel::ADDR source2 this host is just plain baaad http://some-data-distributor.com/1234 F
|
||||
4.3.2.1 Intel::ADDR source2 this host might also be baaad http://some-data-distributor.com/4321 F
|
||||
# @TEST-END-FILE
|
||||
|
||||
# @TEST-START-FILE intel3.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.url meta.do_notice
|
||||
1.2.3.4 Intel::ADDR source2 this host is just plain baaad http://some-data-distributor.com/1234 T
|
||||
4.3.2.1 Intel::ADDR source2 this host might also be baaad http://some-data-distributor.com/4321 T
|
||||
# @TEST-END-FILE
|
||||
|
||||
@load base/frameworks/communication # let network-time run
|
||||
@load frameworks/intel/do_notice
|
||||
|
||||
redef exit_only_after_terminate = T;
|
||||
redef Intel::read_files += { "../intel.dat" };
|
||||
redef enum Intel::Where += { SOMEWHERE };
|
||||
|
||||
global runs = 0;
|
||||
event do_it()
|
||||
{
|
||||
Intel::seen([$host=1.2.3.4,
|
||||
$where=SOMEWHERE]);
|
||||
Intel::seen([$host=4.3.2.1,
|
||||
$where=SOMEWHERE]);
|
||||
|
||||
++runs;
|
||||
if ( runs < 3 )
|
||||
schedule 3sec { do_it() };
|
||||
}
|
||||
|
||||
global log_lines = 0;
|
||||
event Intel::log_intel(rec: Intel::Info)
|
||||
{
|
||||
++log_lines;
|
||||
if ( log_lines == 5 )
|
||||
terminate();
|
||||
}
|
||||
|
||||
event bro_init() &priority=-10
|
||||
{
|
||||
schedule 1sec { do_it() };
|
||||
}
|
15
testing/btest/scripts/base/protocols/snmp/snmp-addr.bro
Normal file
15
testing/btest/scripts/base/protocols/snmp/snmp-addr.bro
Normal file
|
@ -0,0 +1,15 @@
|
|||
# @TEST-EXEC: bro -C -b -r $TRACES/snmp/snmpwalk-short.pcap %INPUT
|
||||
# @TEST-EXEC: btest-diff .stdout
|
||||
|
||||
@load base/protocols/snmp
|
||||
|
||||
event snmp_response(c: connection, is_orig: bool, header: SNMP::Header, pdu: SNMP::PDU) {
|
||||
|
||||
for (i in pdu$bindings) {
|
||||
local binding = pdu$bindings[i];
|
||||
|
||||
if (binding$value?$address)
|
||||
print binding$value$address;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
# @TEST-EXEC: bro -C -r $TRACES/ssh/sshguess.pcap %INPUT | sort >output
|
||||
# @TEST-EXEC: btest-diff output
|
||||
|
||||
event ssh_auth_failed(c: connection)
|
||||
{
|
||||
print c$uid;
|
||||
}
|
7
testing/btest/scripts/base/protocols/tcp/pending.bro
Normal file
7
testing/btest/scripts/base/protocols/tcp/pending.bro
Normal file
|
@ -0,0 +1,7 @@
|
|||
# @TEST-EXEC: bro -C -r $TRACES/tls/chrome-34-google.trace %INPUT
|
||||
# @TEST-EXEC: btest-diff .stdout
|
||||
|
||||
event connection_pending(c: connection)
|
||||
{
|
||||
print current_time(), "Connection pending", c$id, c$history;
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
# @TEST-EXEC: bro -r $TRACES/tls/ecdsa-cert.pcap %INPUT
|
||||
# @TEST-EXEC: bro -Cr $TRACES/tls/ecdsa-cert.pcap %INPUT
|
||||
# @TEST-EXEC: cat intel.log > intel-all.log
|
||||
# @TEST-EXEC: bro -r $TRACES/tls/ssl.v3.trace %INPUT
|
||||
# @TEST-EXEC: cat intel.log >> intel-all.log
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# @TEST-EXEC: bro -Cr $TRACES/wikipedia.trace %INPUT
|
||||
# @TEST-EXEC: btest-diff intel.log
|
||||
|
||||
#@TEST-START-FILE intel.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.url
|
||||
upload.wikimedia.org Intel::DOMAIN source1 somehow bad http://some-data-distributor.com/1
|
||||
meta.wikimedia.org Intel::DOMAIN source1 also bad http://some-data-distributor.com/1
|
||||
#@TEST-END-FILE
|
||||
|
||||
#@TEST-START-FILE whitelist.dat
|
||||
#fields indicator indicator_type meta.source meta.desc meta.whitelist meta.url
|
||||
meta.wikimedia.org Intel::DOMAIN source2 also bad T http://some-data-distributor.com/1
|
||||
#@TEST-END-FILE
|
||||
|
||||
@load base/frameworks/intel
|
||||
@load frameworks/intel/whitelist
|
||||
@load frameworks/intel/seen
|
||||
|
||||
redef Intel::read_files += {
|
||||
"intel.dat",
|
||||
"whitelist.dat",
|
||||
};
|
||||
|
||||
global total_files_read = 0;
|
||||
|
||||
event bro_init()
|
||||
{
|
||||
suspend_processing();
|
||||
}
|
||||
|
||||
event Input::end_of_data(name: string, source: string)
|
||||
{
|
||||
# Wait until both intel files are read.
|
||||
if ( /^intel-/ in name && (++total_files_read == 2) )
|
||||
{
|
||||
continue_processing();
|
||||
}
|
||||
}
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue