mirror of
https://github.com/zeek/zeek.git
synced 2025-10-05 16:18:19 +00:00
Merge remote-tracking branch 'origin/master' into topic/bernhard/file-analysis-x509
Conflicts: src/analyzer/protocol/ssl/events.bif Still broken.
This commit is contained in:
commit
f821a13cce
736 changed files with 16014 additions and 17843 deletions
16
.gitmodules
vendored
16
.gitmodules
vendored
|
@ -1,24 +1,24 @@
|
||||||
[submodule "aux/bro-aux"]
|
[submodule "aux/bro-aux"]
|
||||||
path = aux/bro-aux
|
path = aux/bro-aux
|
||||||
url = ../bro-aux
|
url = git://git.bro.org/bro-aux
|
||||||
[submodule "aux/binpac"]
|
[submodule "aux/binpac"]
|
||||||
path = aux/binpac
|
path = aux/binpac
|
||||||
url = ../binpac
|
url = git://git.bro.org/binpac
|
||||||
[submodule "aux/broccoli"]
|
[submodule "aux/broccoli"]
|
||||||
path = aux/broccoli
|
path = aux/broccoli
|
||||||
url = ../broccoli
|
url = git://git.bro.org/broccoli
|
||||||
[submodule "aux/broctl"]
|
[submodule "aux/broctl"]
|
||||||
path = aux/broctl
|
path = aux/broctl
|
||||||
url = ../broctl
|
url = git://git.bro.org/broctl
|
||||||
[submodule "aux/btest"]
|
[submodule "aux/btest"]
|
||||||
path = aux/btest
|
path = aux/btest
|
||||||
url = ../btest
|
url = git://git.bro.org/btest
|
||||||
[submodule "cmake"]
|
[submodule "cmake"]
|
||||||
path = cmake
|
path = cmake
|
||||||
url = ../cmake
|
url = git://git.bro.org/cmake
|
||||||
[submodule "magic"]
|
[submodule "magic"]
|
||||||
path = magic
|
path = magic
|
||||||
url = ../bromagic
|
url = git://git.bro.org/bromagic
|
||||||
[submodule "src/3rdparty"]
|
[submodule "src/3rdparty"]
|
||||||
path = src/3rdparty
|
path = src/3rdparty
|
||||||
url = ../bro-3rdparty
|
url = git://git.bro.org/bro-3rdparty
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
project(Bro C CXX)
|
project(Bro C CXX)
|
||||||
cmake_minimum_required(VERSION 2.6.3 FATAL_ERROR)
|
cmake_minimum_required(VERSION 2.8.0 FATAL_ERROR)
|
||||||
include(cmake/CommonCMakeConfig.cmake)
|
include(cmake/CommonCMakeConfig.cmake)
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
|
@ -10,7 +10,6 @@ if (NOT BRO_SCRIPT_INSTALL_PATH)
|
||||||
# set the default Bro script installation path (user did not specify one)
|
# set the default Bro script installation path (user did not specify one)
|
||||||
set(BRO_SCRIPT_INSTALL_PATH ${BRO_ROOT_DIR}/share/bro)
|
set(BRO_SCRIPT_INSTALL_PATH ${BRO_ROOT_DIR}/share/bro)
|
||||||
endif ()
|
endif ()
|
||||||
set(BRO_SCRIPT_SOURCE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/scripts)
|
|
||||||
|
|
||||||
# sanitize the Bro script install directory into an absolute path
|
# sanitize the Bro script install directory into an absolute path
|
||||||
# (CMake is confused by ~ as a representation of home directory)
|
# (CMake is confused by ~ as a representation of home directory)
|
||||||
|
@ -40,6 +39,32 @@ set(VERSION_MAJ_MIN "${VERSION_MAJOR}.${VERSION_MINOR}")
|
||||||
########################################################################
|
########################################################################
|
||||||
## Dependency Configuration
|
## Dependency Configuration
|
||||||
|
|
||||||
|
include(ExternalProject)
|
||||||
|
|
||||||
|
# LOG_* options to ExternalProject_Add appear in CMake 2.8.3. If
|
||||||
|
# available, using them hides external project configure/build output.
|
||||||
|
if("${CMAKE_VERSION}" VERSION_GREATER 2.8.2)
|
||||||
|
set(EXTERNAL_PROJECT_LOG_OPTIONS
|
||||||
|
LOG_DOWNLOAD 1 LOG_UPDATE 1 LOG_CONFIGURE 1 LOG_BUILD 1 LOG_INSTALL 1)
|
||||||
|
else()
|
||||||
|
set(EXTERNAL_PROJECT_LOG_OPTIONS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(LIBMAGIC_PREFIX ${CMAKE_CURRENT_BINARY_DIR}/libmagic-prefix)
|
||||||
|
set(LIBMAGIC_INCLUDE_DIR ${LIBMAGIC_PREFIX}/include)
|
||||||
|
set(LIBMAGIC_LIB_DIR ${LIBMAGIC_PREFIX}/lib)
|
||||||
|
set(LIBMAGIC_LIBRARY ${LIBMAGIC_LIB_DIR}/libmagic.a)
|
||||||
|
ExternalProject_Add(libmagic
|
||||||
|
PREFIX ${LIBMAGIC_PREFIX}
|
||||||
|
URL ${CMAKE_CURRENT_SOURCE_DIR}/src/3rdparty/file-5.16.tar.gz
|
||||||
|
CONFIGURE_COMMAND ./configure --enable-static --disable-shared
|
||||||
|
--prefix=${LIBMAGIC_PREFIX}
|
||||||
|
--includedir=${LIBMAGIC_INCLUDE_DIR}
|
||||||
|
--libdir=${LIBMAGIC_LIB_DIR}
|
||||||
|
BUILD_IN_SOURCE 1
|
||||||
|
${EXTERNAL_PROJECT_LOG_OPTIONS}
|
||||||
|
)
|
||||||
|
|
||||||
include(FindRequiredPackage)
|
include(FindRequiredPackage)
|
||||||
|
|
||||||
# Check cache value first to avoid displaying "Found sed" messages everytime
|
# Check cache value first to avoid displaying "Found sed" messages everytime
|
||||||
|
@ -58,7 +83,6 @@ FindRequiredPackage(BISON)
|
||||||
FindRequiredPackage(PCAP)
|
FindRequiredPackage(PCAP)
|
||||||
FindRequiredPackage(OpenSSL)
|
FindRequiredPackage(OpenSSL)
|
||||||
FindRequiredPackage(BIND)
|
FindRequiredPackage(BIND)
|
||||||
FindRequiredPackage(LibMagic)
|
|
||||||
FindRequiredPackage(ZLIB)
|
FindRequiredPackage(ZLIB)
|
||||||
|
|
||||||
if (NOT BinPAC_ROOT_DIR AND
|
if (NOT BinPAC_ROOT_DIR AND
|
||||||
|
@ -74,18 +98,12 @@ if (MISSING_PREREQS)
|
||||||
message(FATAL_ERROR "Configuration aborted due to missing prerequisites")
|
message(FATAL_ERROR "Configuration aborted due to missing prerequisites")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set(libmagic_req 5.04)
|
|
||||||
if ( LibMagic_VERSION VERSION_LESS ${libmagic_req} )
|
|
||||||
message(FATAL_ERROR "libmagic of at least version ${libmagic_req} required "
|
|
||||||
"(found ${LibMagic_VERSION})")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
include_directories(BEFORE
|
include_directories(BEFORE
|
||||||
${PCAP_INCLUDE_DIR}
|
${PCAP_INCLUDE_DIR}
|
||||||
${OpenSSL_INCLUDE_DIR}
|
${OpenSSL_INCLUDE_DIR}
|
||||||
${BIND_INCLUDE_DIR}
|
${BIND_INCLUDE_DIR}
|
||||||
${BinPAC_INCLUDE_DIR}
|
${BinPAC_INCLUDE_DIR}
|
||||||
${LibMagic_INCLUDE_DIR}
|
${LIBMAGIC_INCLUDE_DIR}
|
||||||
${ZLIB_INCLUDE_DIR}
|
${ZLIB_INCLUDE_DIR}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -153,9 +171,10 @@ if (NOT DISABLE_ELASTICSEARCH AND LIBCURL_FOUND)
|
||||||
list(APPEND OPTLIBS ${LibCURL_LIBRARIES})
|
list(APPEND OPTLIBS ${LibCURL_LIBRARIES})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (ENABLE_PERFTOOLS_DEBUG)
|
if (ENABLE_PERFTOOLS_DEBUG OR ENABLE_PERFTOOLS)
|
||||||
# Just a no op to prevent CMake from complaining about manually-specified
|
# Just a no op to prevent CMake from complaining about manually-specified
|
||||||
# ENABLE_PERFTOOLS_DEBUG not being used if google perftools weren't found
|
# ENABLE_PERFTOOLS_DEBUG or ENABLE_PERFTOOLS not being used if google
|
||||||
|
# perftools weren't found
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set(brodeps
|
set(brodeps
|
||||||
|
@ -163,7 +182,7 @@ set(brodeps
|
||||||
${PCAP_LIBRARY}
|
${PCAP_LIBRARY}
|
||||||
${OpenSSL_LIBRARIES}
|
${OpenSSL_LIBRARIES}
|
||||||
${BIND_LIBRARY}
|
${BIND_LIBRARY}
|
||||||
${LibMagic_LIBRARY}
|
${LIBMAGIC_LIBRARY}
|
||||||
${ZLIB_LIBRARY}
|
${ZLIB_LIBRARY}
|
||||||
${OPTLIBS}
|
${OPTLIBS}
|
||||||
)
|
)
|
||||||
|
|
2
COPYING
2
COPYING
|
@ -1,4 +1,4 @@
|
||||||
Copyright (c) 1995-2012, The Regents of the University of California
|
Copyright (c) 1995-2013, The Regents of the University of California
|
||||||
through the Lawrence Berkeley National Laboratory and the
|
through the Lawrence Berkeley National Laboratory and the
|
||||||
International Computer Science Institute. All rights reserved.
|
International Computer Science Institute. All rights reserved.
|
||||||
|
|
||||||
|
|
28
Makefile
28
Makefile
|
@ -6,7 +6,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
BUILD=build
|
BUILD=build
|
||||||
REPO=`basename \`git config --get remote.origin.url\``
|
REPO=`basename \`git config --get remote.origin.url | sed 's/^[^:]*://g'\``
|
||||||
VERSION_FULL=$(REPO)-`cat VERSION`
|
VERSION_FULL=$(REPO)-`cat VERSION`
|
||||||
VERSION_MIN=$(REPO)-`cat VERSION`-minimal
|
VERSION_MIN=$(REPO)-`cat VERSION`-minimal
|
||||||
HAVE_MODULES=git submodule | grep -v cmake >/dev/null
|
HAVE_MODULES=git submodule | grep -v cmake >/dev/null
|
||||||
|
@ -29,28 +29,17 @@ doc: configured
|
||||||
docclean: configured
|
docclean: configured
|
||||||
$(MAKE) -C $(BUILD) $@
|
$(MAKE) -C $(BUILD) $@
|
||||||
|
|
||||||
restdoc: configured
|
|
||||||
$(MAKE) -C $(BUILD) $@
|
|
||||||
|
|
||||||
restclean: configured
|
|
||||||
$(MAKE) -C $(BUILD) $@
|
|
||||||
|
|
||||||
broxygen: configured
|
|
||||||
$(MAKE) -C $(BUILD) $@
|
|
||||||
|
|
||||||
broxygenclean: configured
|
|
||||||
$(MAKE) -C $(BUILD) $@
|
|
||||||
|
|
||||||
dist:
|
dist:
|
||||||
@rm -rf $(VERSION_FULL) $(VERSION_FULL).tgz
|
@rm -rf $(VERSION_FULL) $(VERSION_FULL).tgz
|
||||||
@rm -rf $(VERSION_MIN) $(VERSION_MIN).tgz
|
@rm -rf $(VERSION_MIN) $(VERSION_MIN).tgz
|
||||||
@mkdir $(VERSION_FULL)
|
@git clone --recursive . $(VERSION_FULL) >/dev/null 2>&1
|
||||||
@tar --exclude=$(VERSION_FULL)* --exclude=$(VERSION_MIN)* --exclude=.git -cf - . | ( cd $(VERSION_FULL) && tar -xpf - )
|
@find $(VERSION_FULL) -name .git\* | xargs rm -rf
|
||||||
@( cd $(VERSION_FULL) && cp -R ../.git . && git reset -q --hard HEAD && git clean -xdfq && rm -rf .git )
|
|
||||||
@tar -czf $(VERSION_FULL).tgz $(VERSION_FULL) && echo Package: $(VERSION_FULL).tgz && rm -rf $(VERSION_FULL)
|
@tar -czf $(VERSION_FULL).tgz $(VERSION_FULL) && echo Package: $(VERSION_FULL).tgz && rm -rf $(VERSION_FULL)
|
||||||
@$(HAVE_MODULES) && mkdir $(VERSION_MIN) || exit 0
|
@$(HAVE_MODULES) && git clone . $(VERSION_MIN) >/dev/null 2>&1 || exit 0
|
||||||
@$(HAVE_MODULES) && tar --exclude=$(VERSION_FULL)* --exclude=$(VERSION_MIN)* --exclude=.git `git submodule | awk '{print "--exclude="$$2}' | grep -v cmake | tr '\n' ' '` -cf - . | ( cd $(VERSION_MIN) && tar -xpf - ) || exit 0
|
@$(HAVE_MODULES) && (cd $(VERSION_MIN) && git submodule update --init cmake >/dev/null 2>&1) || exit 0
|
||||||
@$(HAVE_MODULES) && ( cd $(VERSION_MIN) && cp -R ../.git . && git reset -q --hard HEAD && git clean -xdfq && rm -rf .git ) || exit 0
|
@$(HAVE_MODULES) && (cd $(VERSION_MIN) && git submodule update --init src/3rdparty >/dev/null 2>&1) || exit 0
|
||||||
|
@$(HAVE_MODULES) && (cd $(VERSION_MIN) && git submodule update --init magic >/dev/null 2>&1) || exit 0
|
||||||
|
@$(HAVE_MODULES) && find $(VERSION_MIN) -name .git\* | xargs rm -rf || exit 0
|
||||||
@$(HAVE_MODULES) && tar -czf $(VERSION_MIN).tgz $(VERSION_MIN) && echo Package: $(VERSION_MIN).tgz && rm -rf $(VERSION_MIN) || exit 0
|
@$(HAVE_MODULES) && tar -czf $(VERSION_MIN).tgz $(VERSION_MIN) && echo Package: $(VERSION_MIN).tgz && rm -rf $(VERSION_MIN) || exit 0
|
||||||
|
|
||||||
bindist:
|
bindist:
|
||||||
|
@ -65,6 +54,7 @@ test:
|
||||||
|
|
||||||
test-all: test
|
test-all: test
|
||||||
test -d aux/broctl && ( cd aux/broctl && make test )
|
test -d aux/broctl && ( cd aux/broctl && make test )
|
||||||
|
test -d aux/btest && ( cd aux/btest && make test )
|
||||||
|
|
||||||
configured:
|
configured:
|
||||||
@test -d $(BUILD) || ( echo "Error: No build/ directory found. Did you run configure?" && exit 1 )
|
@test -d $(BUILD) || ( echo "Error: No build/ directory found. Did you run configure?" && exit 1 )
|
||||||
|
|
468
NEWS
468
NEWS
|
@ -1,53 +1,204 @@
|
||||||
|
|
||||||
This document summarizes the most important changes in the current Bro
|
This document summarizes the most important changes in the current Bro
|
||||||
release. For a complete list of changes, see the ``CHANGES`` file
|
release. For an exhaustive list of changes, see the ``CHANGES`` file
|
||||||
(note that submodules, such as BroControl and Broccoli, come with
|
(note that submodules, such as BroControl and Broccoli, come with
|
||||||
their own CHANGES.)
|
their own ``CHANGES``.)
|
||||||
|
|
||||||
Bro 2.2 (Work In Progress)
|
Bro 2.3
|
||||||
==========================
|
=======
|
||||||
|
|
||||||
|
[In progress]
|
||||||
|
|
||||||
|
Dependencies
|
||||||
|
------------
|
||||||
|
|
||||||
|
- Bro no longer requires a pre-installed libmagic (because it now
|
||||||
|
ships its own).
|
||||||
|
|
||||||
|
- Compiling from source now needs a CMake version >= 2.8.0.
|
||||||
|
|
||||||
New Functionality
|
New Functionality
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
|
- Support for GRE tunnel decapsulation, including enhanced GRE
|
||||||
|
headers. GRE tunnels are treated just like IP-in-IP tunnels by
|
||||||
|
parsing past the GRE header in between the delivery and payload IP
|
||||||
|
packets.
|
||||||
|
|
||||||
|
Changed Functionality
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
- string slices now exclude the end index (e.g., "123"[1:2] returns
|
||||||
|
"2"). Generally, Bro's string slices now behave similar to Python.
|
||||||
|
|
||||||
|
- ssl_client_hello() now receives a vector of ciphers, instead of a
|
||||||
|
set, to preserve their order.
|
||||||
|
|
||||||
|
- Notice::end_suppression() has been removed.
|
||||||
|
|
||||||
|
Bro 2.2
|
||||||
|
=======
|
||||||
|
|
||||||
|
New Functionality
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
- A completely overhauled intelligence framework for consuming
|
||||||
|
external intelligence data. It provides an abstracted mechanism
|
||||||
|
for feeding data into the framework to be matched against the
|
||||||
|
data available. It also provides a function named ``Intel::match``
|
||||||
|
which makes any hits on intelligence data available to the
|
||||||
|
scripting language.
|
||||||
|
|
||||||
|
Using input framework, the intel framework can load data from
|
||||||
|
text files. It can also update and add data if changes are
|
||||||
|
made to the file being monitored. Files to monitor for
|
||||||
|
intelligence can be provided by redef-ing the
|
||||||
|
``Intel::read_files`` variable.
|
||||||
|
|
||||||
|
The intel framework is cluster-ready. On a cluster, the
|
||||||
|
manager is the only node that needs to load in data from disk,
|
||||||
|
the cluster support will distribute the data across a cluster
|
||||||
|
automatically.
|
||||||
|
|
||||||
|
Scripts are provided at ``policy/frameworks/intel/seen`` that
|
||||||
|
provide a broad set of sources of data to feed into the intel
|
||||||
|
framwork to be matched.
|
||||||
|
|
||||||
|
- A new file analysis framework moves most of the processing of file
|
||||||
|
content from script-land into the core, where it belongs. See
|
||||||
|
``doc/file-analysis.rst``, or the online documentation, for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
Much of this is an internal change, but the framework also comes
|
||||||
|
with the following user-visible functionality (some of that was
|
||||||
|
already available before but is done differently, and more
|
||||||
|
efficiently, now):
|
||||||
|
|
||||||
|
- HTTP:
|
||||||
|
|
||||||
|
* Identify MIME type of messages.
|
||||||
|
* Extract messages to disk.
|
||||||
|
* Compute MD5 for messages.
|
||||||
|
|
||||||
|
- SMTP:
|
||||||
|
|
||||||
|
* Identify MIME type of messages.
|
||||||
|
* Extract messages to disk.
|
||||||
|
* Compute MD5 for messages.
|
||||||
|
* Provide access to start of entity data.
|
||||||
|
|
||||||
|
- FTP data transfers:
|
||||||
|
|
||||||
|
* Identify MIME types of data.
|
||||||
|
* Record to disk.
|
||||||
|
|
||||||
|
- IRC DCC transfers: Record to disk.
|
||||||
|
|
||||||
|
- Support for analyzing data transferred via HTTP range requests.
|
||||||
|
|
||||||
|
- A binary input reader interfaces the input framework with the
|
||||||
|
file analysis, allowing to inject files on disk into Bro's
|
||||||
|
content processing.
|
||||||
|
|
||||||
|
- A new framework for computing a wide array of summary statistics,
|
||||||
|
such as counters and thresholds checks, standard deviation and mean,
|
||||||
|
set cardinality, top K, and more. The framework operates in
|
||||||
|
real-time, independent of the underlying data, and can aggregate
|
||||||
|
information from many independent monitoring points (including
|
||||||
|
clusters). It provides a transparent, easy-to-use user interface,
|
||||||
|
and can optionally deploy a set of probabilistic data structures for
|
||||||
|
memory-efficient operation. The framework is located in
|
||||||
|
``scripts/base/frameworks/sumstats``.
|
||||||
|
|
||||||
|
A number of new applications now ship with Bro that are built on top
|
||||||
|
of the summary statistics framework:
|
||||||
|
|
||||||
|
* Scan detection: Detectors for port and address scans. See
|
||||||
|
``policy/misc/scan.bro`` (these scan detectors used to exist in
|
||||||
|
Bro versions <2.0; it's now back, but quite different).
|
||||||
|
|
||||||
|
* Tracerouter detector: ``policy/misc/detect-traceroute.bro``
|
||||||
|
|
||||||
|
* Web application detection/measurement:
|
||||||
|
``policy/misc/app-stats/*``
|
||||||
|
|
||||||
|
* FTP and SSH brute-forcing detector:
|
||||||
|
``policy/protocols/ftp/detect-bruteforcing.bro``,
|
||||||
|
``policy/protocols/ssh/detect-bruteforcing.bro``
|
||||||
|
|
||||||
|
* HTTP-based SQL injection detector:
|
||||||
|
``policy/protocols/http/detect-sqli.bro`` (existed before, but
|
||||||
|
now ported to the new framework)
|
||||||
|
|
||||||
|
- GridFTP support. This is an extension to the standard FTP analyzer
|
||||||
|
and includes:
|
||||||
|
|
||||||
|
- An analyzer for the GSI mechanism of GSSAPI FTP AUTH method.
|
||||||
|
GSI authentication involves an encoded TLS/SSL handshake over
|
||||||
|
the FTP control session. For FTP sessions that attempt GSI
|
||||||
|
authentication, the ``service`` field of the connection log
|
||||||
|
will include ``gridftp`` (as well as also ``ftp`` and
|
||||||
|
``ssl``).
|
||||||
|
|
||||||
|
- An example of a GridFTP data channel detection script. It
|
||||||
|
relies on the heuristics of GridFTP data channels commonly
|
||||||
|
default to SSL mutual authentication with a NULL bulk cipher
|
||||||
|
and that they usually transfer large datasets (default
|
||||||
|
threshold of script is 1 GB). For identified GridFTP data
|
||||||
|
channels, the ``services`` fields of the connection log will
|
||||||
|
include ``gridftp-data``.
|
||||||
|
|
||||||
|
- Modbus and DNP3 support. Script-level support is only basic at this
|
||||||
|
point but see ``src/analyzer/protocol/{modbus,dnp3}/events.bif``, or
|
||||||
|
the online documentation, for the events Bro generates. For Modbus,
|
||||||
|
there are also some example policies in
|
||||||
|
``policy/protocols/modbus/*``.
|
||||||
|
|
||||||
|
- The documentation now includes a new introduction to writing Bro
|
||||||
|
scripts. See ``doc/scripting/index.rst`` or, much better, the online
|
||||||
|
version. There's also the beginning of a chapter on "Using Bro" in
|
||||||
|
``doc/using/index.rst``.
|
||||||
|
|
||||||
- GPRS Tunnelling Protocol (GTPv1) decapsulation.
|
- GPRS Tunnelling Protocol (GTPv1) decapsulation.
|
||||||
|
|
||||||
- GridFTP support. TODO: Extend.
|
- The scripting language now provide "hooks", a new flavor of
|
||||||
|
functions that share characteristics of both standard functions and
|
||||||
|
events. They are like events in that multiple bodies can be defined
|
||||||
|
for the same hook identifier. They are more like functions in the
|
||||||
|
way they are invoked/called, because, unlike events, their execution
|
||||||
|
is immediate and they do not get scheduled through an event queue.
|
||||||
|
Also, a unique feature of a hook is that a given hook handler body
|
||||||
|
can short-circuit the execution of remaining hook handlers simply by
|
||||||
|
exiting from the body as a result of a ``break`` statement (as
|
||||||
|
opposed to a ``return`` or just reaching the end of the body). See
|
||||||
|
``doc/scripts/builtins.rst``, or the online documentation, for more
|
||||||
|
informatin.
|
||||||
|
|
||||||
- Modbus support. TODO: Extend.
|
- Bro's language now has a working ``switch`` statement that generally
|
||||||
|
behaves like C-style switches (except that case labels can be
|
||||||
|
comprised of multiple literal constants delimited by commas). Only
|
||||||
|
atomic types are allowed for now. Case label bodies that don't
|
||||||
|
execute a ``return`` or ``break`` statement will fall through to
|
||||||
|
subsequent cases. A ``default`` case label is supported.
|
||||||
|
|
||||||
- DNP3 support. TODO: Extend.
|
- Bro's language now has a new set of types ``opaque of X``. Opaque
|
||||||
|
|
||||||
- ssl.log now also records the subject client and issuer certificates.
|
|
||||||
|
|
||||||
- Hooks: TODO: Briefly summarize the documention from
|
|
||||||
doc/scripts/builtins.rst here.
|
|
||||||
|
|
||||||
- The ASCII writer can now output CSV files on a per filter basis.
|
|
||||||
|
|
||||||
- Bro's language now has a working "switch" statement that generally
|
|
||||||
behaves like C-style switches except case labels can be comprised of
|
|
||||||
multiple literal constants delimited by commas. Only atomic types
|
|
||||||
are allowed for now. Case label bodies that don't execute a
|
|
||||||
"return" or "break" statement will fall through to subsequent cases.
|
|
||||||
A default case label is allowed.
|
|
||||||
|
|
||||||
- Bro's language now has a new set of types "opaque of X". Opaque
|
|
||||||
values can be passed around like other values but they can only be
|
values can be passed around like other values but they can only be
|
||||||
manipulated with BiF functions, not with other operators. Currently,
|
manipulated with BiF functions, not with other operators. Currently,
|
||||||
the following opaque types are supported:
|
the following opaque types are supported::
|
||||||
|
|
||||||
- opaque of md5
|
opaque of md5
|
||||||
- opaque of sha1
|
opaque of sha1
|
||||||
- opaque of sha256
|
opaque of sha256
|
||||||
- opaquey of entropy.
|
opaque of cardinality
|
||||||
|
opaque of topk
|
||||||
|
opaque of bloomfilter
|
||||||
|
|
||||||
They go along with the corrsponding BiF functions md5_*, sha1_*,
|
These go along with the corrsponding BiF functions ``md5_*``,
|
||||||
sha256_*, and entropy_*, respectively. Note that these functions
|
``sha1_*``, ``sha256_*``, ``entropy_*``, etc. . Note that where
|
||||||
have changed their signatures to work with opaques types rather
|
these functions existed before, they have changed their signatures
|
||||||
than global state as it was before.
|
to work with opaques types rather than global state.
|
||||||
|
|
||||||
- The scripting language now supports a constructing sets, tables,
|
- The scripting language now supports constructing sets, tables,
|
||||||
vectors, and records by name::
|
vectors, and records by name::
|
||||||
|
|
||||||
type MyRecordType: record {
|
type MyRecordType: record {
|
||||||
|
@ -61,57 +212,33 @@ New Functionality
|
||||||
global s = MySet([$c=1], [$c=2]);
|
global s = MySet([$c=1], [$c=2]);
|
||||||
|
|
||||||
- Strings now support the subscript operator to extract individual
|
- Strings now support the subscript operator to extract individual
|
||||||
characters and substrings (e.g., s[4], s[1,5]). The index expression
|
characters and substrings (e.g., ``s[4]``, ``s[1:5]``). The index
|
||||||
can take up to two indices for the start and end index of the
|
expression can take up to two indices for the start and end index of
|
||||||
substring to return (e.g. "mystring[1,3]").
|
the substring to return (e.g. ``mystring[1:3]``).
|
||||||
|
|
||||||
- Functions now support default parameters, e.g.:
|
- Functions now support default parameters, e.g.::
|
||||||
|
|
||||||
global foo: function(s: string, t: string &default="abc", u: count &default=0);
|
global foo: function(s: string, t: string &default="abc", u: count &default=0);
|
||||||
|
|
||||||
- Scripts can now use two new "magic constants" @DIR and @FILENAME
|
- Scripts can now use two new "magic constants" ``@DIR`` and
|
||||||
that expand to the directory path of the current script and just the
|
``@FILENAME`` that expand to the directory path of the current
|
||||||
script file name without path, respectively. (Jon Siwek)
|
script and just the script file name without path, respectively.
|
||||||
|
|
||||||
- The new file analysis framework moves most of the processing of file
|
- ``ssl.log`` now also records the subject client and issuer
|
||||||
content from script-land into the core, where it belongs. See
|
certificates.
|
||||||
doc/file-analysis.rst for more information.
|
|
||||||
|
|
||||||
Much of this is an internal change, but the framework also comes
|
- The ASCII writer can now output CSV files on a per filter basis.
|
||||||
with the following user-visibible functionality (some of that was
|
|
||||||
already available before, but done differently):
|
|
||||||
|
|
||||||
[TODO: Update with changes from 984e9793db56.]
|
- New SQLite reader and writer plugins for the logging framework allow
|
||||||
|
to read/write persistent data from on disk SQLite databases.
|
||||||
|
|
||||||
- A binary input reader interfaces the input framework with file
|
- A new packet filter framework supports BPF-based load-balancing,
|
||||||
analysis, allowing to inject files on disk into Bro's
|
|
||||||
processing.
|
|
||||||
|
|
||||||
- Supports for analyzing data transfereed via HTTP range
|
|
||||||
requests.
|
|
||||||
|
|
||||||
- HTTP:
|
|
||||||
* Identify MIME type of message.
|
|
||||||
* Extract message to disk.
|
|
||||||
* Compute MD5 for messages.
|
|
||||||
|
|
||||||
- SMTP:
|
|
||||||
* Identify MIME type of message.
|
|
||||||
* Extract message to disk.
|
|
||||||
* Compute MD5 for messages.
|
|
||||||
* Provide access to start of entity data.
|
|
||||||
|
|
||||||
- FTP data transfers: Identify MIME type; record to disk.
|
|
||||||
|
|
||||||
- IRC DCC transfers: Record to disk.
|
|
||||||
|
|
||||||
- New packet filter framework supports BPF-based load-balancing,
|
|
||||||
shunting, and sampling; plus plugin support to customize filters
|
shunting, and sampling; plus plugin support to customize filters
|
||||||
dynamically.
|
dynamically.
|
||||||
|
|
||||||
- Bro now provides Bloom filters of two kinds: basic Bloom filters
|
- Bro now provides Bloom filters of two kinds: basic Bloom filters
|
||||||
supporting membership tests, and counting Bloom filters that track
|
supporting membership tests, and counting Bloom filters that track
|
||||||
the frequency of elements. The corresponding functions are:
|
the frequency of elements. The corresponding functions are::
|
||||||
|
|
||||||
bloomfilter_basic_init(fp: double, capacity: count, name: string &default=""): opaque of bloomfilter
|
bloomfilter_basic_init(fp: double, capacity: count, name: string &default=""): opaque of bloomfilter
|
||||||
bloomfilter_basic_init2(k: count, cells: count, name: string &default=""): opaque of bloomfilter
|
bloomfilter_basic_init2(k: count, cells: count, name: string &default=""): opaque of bloomfilter
|
||||||
|
@ -121,10 +248,11 @@ New Functionality
|
||||||
bloomfilter_merge(bf1: opaque of bloomfilter, bf2: opaque of bloomfilter): opaque of bloomfilter
|
bloomfilter_merge(bf1: opaque of bloomfilter, bf2: opaque of bloomfilter): opaque of bloomfilter
|
||||||
bloomfilter_clear(bf: opaque of bloomfilter)
|
bloomfilter_clear(bf: opaque of bloomfilter)
|
||||||
|
|
||||||
See <INSERT LINK> for full documentation.
|
See ``src/probabilistic/bloom-filter.bif``, or the online
|
||||||
|
documentation, for full documentation.
|
||||||
|
|
||||||
- Bro now provides a probabilistic data structure for computing
|
- Bro now provides a probabilistic data structure for computing
|
||||||
"top k" elements. The corresponding functions are:
|
"top k" elements. The corresponding functions are::
|
||||||
|
|
||||||
topk_init(size: count): opaque of topk
|
topk_init(size: count): opaque of topk
|
||||||
topk_add(handle: opaque of topk, value: any)
|
topk_add(handle: opaque of topk, value: any)
|
||||||
|
@ -136,73 +264,142 @@ New Functionality
|
||||||
topk_merge(handle1: opaque of topk, handle2: opaque of topk)
|
topk_merge(handle1: opaque of topk, handle2: opaque of topk)
|
||||||
topk_merge_prune(handle1: opaque of topk, handle2: opaque of topk)
|
topk_merge_prune(handle1: opaque of topk, handle2: opaque of topk)
|
||||||
|
|
||||||
See <INSERT LINK> for full documentation.
|
See ``src/probabilistic/top-k.bif``, or the online documentation,
|
||||||
|
for full documentation.
|
||||||
|
|
||||||
- base/utils/exec.bro provides a module to start external processes
|
- Bro now provides a probabilistic data structure for computing set
|
||||||
asynchronously and retrieve their output on termination.
|
cardinality, using the HyperLogLog algorithm. The corresponding
|
||||||
base/utils/dir.bro uses it to monitor a directory for changes, and
|
functions are::
|
||||||
base/utils/active-http.bro for providing an interface for querying
|
|
||||||
remote web servers.
|
|
||||||
|
|
||||||
- Summary statistics framework. [Extend]
|
hll_cardinality_init(err: double, confidence: double): opaque of cardinality
|
||||||
|
hll_cardinality_add(handle: opaque of cardinality, elem: any): bool
|
||||||
|
hll_cardinality_merge_into(handle1: opaque of cardinality, handle2: opaque of cardinality): bool
|
||||||
|
hll_cardinality_estimate(handle: opaque of cardinality): double
|
||||||
|
hll_cardinality_copy(handle: opaque of cardinality): opaque of cardinality
|
||||||
|
|
||||||
- A number of new applications build on top of the summary statistics
|
See ``src/probabilistic/cardinality-counter.bif``, or the online
|
||||||
framework:
|
documentation, for full documentation.
|
||||||
|
|
||||||
* Scan detection: Detectors for port and address scans return. See
|
- ``base/utils/exec.bro`` provides a module to start external
|
||||||
policy/misc/scan.bro.
|
processes asynchronously and retrieve their output on termination.
|
||||||
|
``base/utils/dir.bro`` uses it to monitor a directory for changes,
|
||||||
|
and ``base/utils/active-http.bro`` for providing an interface for
|
||||||
|
querying remote web servers.
|
||||||
|
|
||||||
* Tracerouter detector: policy/misc/detect-traceroute
|
- BroControl can now pin Bro processes to CPUs on supported platforms:
|
||||||
|
To use CPU pinning, a new per-node option ``pin_cpus`` can be
|
||||||
|
specified in node.cfg if the OS is either Linux or FreeBSD.
|
||||||
|
|
||||||
* Web application detection/measurement: policy/misc/app-metrics.bro
|
- BroControl now returns useful exit codes. Most BroControl commands
|
||||||
|
return 0 if everything was OK, and 1 otherwise. However, there are
|
||||||
|
a few exceptions. The "status" and "top" commands return 0 if all Bro
|
||||||
|
nodes are running, and 1 if not all nodes are running. The "cron"
|
||||||
|
command always returns 0 (but it still sends email if there were any
|
||||||
|
problems). Any command provided by a plugin always returns 0.
|
||||||
|
|
||||||
* FTP brute-forcing detector: policy/protocols/ftp/detect-bruteforcing.bro
|
- BroControl now has an option "env_vars" to set Bro environment variables.
|
||||||
|
The value of this option is a comma-separated list of environment variable
|
||||||
|
assignments (e.g., "VAR1=value, VAR2=another"). The "env_vars" option
|
||||||
|
can apply to all Bro nodes (by setting it in broctl.cfg), or can be
|
||||||
|
node-specific (by setting it in node.cfg). Environment variables in
|
||||||
|
node.cfg have priority over any specified in broctl.cfg.
|
||||||
|
|
||||||
* HTTP-based SQL injection detector: policy/protocols/http/detect-sqli.bro
|
- BroControl now supports load balancing with PF_RING while sniffing
|
||||||
(existed before, but now ported to the new framework)
|
multiple interfaces. Rather than assigning the same PF_RING cluster ID
|
||||||
|
to all workers on a host, cluster ID assignment is now based on which
|
||||||
|
interface a worker is sniffing (i.e., all workers on a host that sniff
|
||||||
|
the same interface will share a cluster ID). This is handled by
|
||||||
|
BroControl automatically.
|
||||||
|
|
||||||
* SSH brute-forcing detector feeding the intelligence framework:
|
- BroControl has several new options: MailConnectionSummary (for
|
||||||
policy/protocols/ssh/detect-bruteforcing.bro
|
disabling the sending of connection summary report emails),
|
||||||
|
MailAlarmsInterval (for specifying a different interval to send alarm
|
||||||
|
summary emails), CompressCmd (if archived log files will be compressed,
|
||||||
|
this specifies the command that will be used to compress them),
|
||||||
|
CompressExtension (if archived log files will be compressed, this
|
||||||
|
specifies the file extension to use).
|
||||||
|
|
||||||
|
- BroControl comes with its own test-suite now. ``make test`` in
|
||||||
|
``aux/broctl`` will run it.
|
||||||
|
|
||||||
|
In addition to these, Bro 2.2 comes with a large set of smaller
|
||||||
|
extensions, tweaks, and fixes across the whole code base, including
|
||||||
|
most submodules.
|
||||||
|
|
||||||
Changed Functionality
|
Changed Functionality
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
- We removed the following, already deprecated, functionality:
|
- Previous versions of ``$prefix/share/bro/site/local.bro`` (where
|
||||||
|
"$prefix" indicates the installation prefix of Bro), aren't compatible
|
||||||
|
with Bro 2.2. This file won't be overwritten when installing over a
|
||||||
|
previous Bro installation to prevent clobbering users' modifications,
|
||||||
|
but an example of the new version is located in
|
||||||
|
``$prefix/share/bro/site/local.bro.example``. So if no modification
|
||||||
|
has been done to the previous local.bro, just copy the new example
|
||||||
|
version over it, else merge in the differences. For reference,
|
||||||
|
a common error message when attempting to use an outdated local.bro
|
||||||
|
looks like::
|
||||||
|
|
||||||
|
fatal error in /usr/local/bro/share/bro/policy/frameworks/software/vulnerable.bro, line 41: BroType::AsRecordType (table/record) (set[record { min:record { major:count; minor:count; minor2:count; minor3:count; addl:string; }; max:record { major:count; minor:count; minor2:count; minor3:count; addl:string; }; }])
|
||||||
|
|
||||||
|
- The type of ``Software::vulnerable_versions`` changed to allow
|
||||||
|
more flexibility and range specifications. An example usage:
|
||||||
|
|
||||||
|
.. code:: bro
|
||||||
|
|
||||||
|
const java_1_6_vuln = Software::VulnerableVersionRange(
|
||||||
|
$max = Software::Version($major = 1, $minor = 6, $minor2 = 0, $minor3 = 44)
|
||||||
|
);
|
||||||
|
|
||||||
|
const java_1_7_vuln = Software::VulnerableVersionRange(
|
||||||
|
$min = Software::Version($major = 1, $minor = 7),
|
||||||
|
$max = Software::Version($major = 1, $minor = 7, $minor2 = 0, $minor3 = 20)
|
||||||
|
);
|
||||||
|
|
||||||
|
redef Software::vulnerable_versions += {
|
||||||
|
["Java"] = set(java_1_6_vuln, java_1_7_vuln)
|
||||||
|
};
|
||||||
|
|
||||||
|
- The interface to extracting content from application-layer protocols
|
||||||
|
(including HTTP, SMTP, FTP) has changed significantly due to the
|
||||||
|
introduction of the new file analysis framework (see above).
|
||||||
|
|
||||||
|
- Removed the following, already deprecated, functionality:
|
||||||
|
|
||||||
* Scripting language:
|
* Scripting language:
|
||||||
- &disable_print_hook attribute.
|
- ``&disable_print_hook attribute``.
|
||||||
|
|
||||||
* BiF functions:
|
* BiF functions:
|
||||||
- parse_dotted_addr(), dump_config(),
|
- ``parse_dotted_addr()``, ``dump_config()``,
|
||||||
make_connection_persistent(), generate_idmef(),
|
``make_connection_persistent()``, ``generate_idmef()``,
|
||||||
split_complete()
|
``split_complete()``
|
||||||
|
|
||||||
- md5_*, sha1_*, sha256_*, and entropy_* have all changed
|
- ``md5_*``, ``sha1_*``, ``sha256_*``, and ``entropy_*`` have
|
||||||
their signatures to work with opaque types (see above).
|
all changed their signatures to work with opaque types (see
|
||||||
|
above).
|
||||||
|
|
||||||
|
- Removed a now unused argument from ``do_split`` helper function.
|
||||||
|
|
||||||
- Removed a now unused argument from "do_split" helper function.
|
- ``this`` is no longer a reserved keyword.
|
||||||
|
|
||||||
- "this" is no longer a reserved keyword.
|
- The Input Framework's ``update_finished`` event has been renamed to
|
||||||
|
``end_of_data``. It will now not only fire after table-reads have
|
||||||
- The Input Framework's update_finished event has been renamed to
|
been completed, but also after the last event of a whole-file-read
|
||||||
end_of_data. It will now not only fire after table-reads have been
|
(or whole-db-read, etc.).
|
||||||
completed, but also after the last event of a whole-file-read (or
|
|
||||||
whole-db-read, etc.).
|
|
||||||
|
|
||||||
- Renamed the option defining the frequency of alarm summary mails to
|
- Renamed the option defining the frequency of alarm summary mails to
|
||||||
'Logging::default_alarm_mail_interval'. When using BroControl, the
|
``Logging::default_alarm_mail_interval``. When using BroControl, the
|
||||||
value can now be set with the new broctl.cfg option
|
value can now be set with the new broctl.cfg option
|
||||||
"MailAlarmsInterval".
|
``MailAlarmsInterval``.
|
||||||
|
|
||||||
- We have completely reworded the "notice_policy" mechanism. It now no
|
- We have completely rewritten the ``notice_policy`` mechanism. It now
|
||||||
linger uses a record of policy items but a "hook", a new language
|
no longer uses a record of policy items but a ``hook``, a new
|
||||||
element that's roughly equivalent to a function with multiple
|
language element that's roughly equivalent to a function with
|
||||||
bodies. The documentation [TODO: insert link] describes how to use
|
multiple bodies (see above). For existing code, the two main changes
|
||||||
the new notice policy. For existing code, the two main changes are:
|
are:
|
||||||
|
|
||||||
- What used to be a "redef" of "Notice::policy" now becomes a hook
|
- What used to be a ``redef`` of ``Notice::policy`` now becomes a
|
||||||
implementation. Example:
|
hook implementation. Example:
|
||||||
|
|
||||||
Old::
|
Old::
|
||||||
|
|
||||||
|
@ -221,9 +418,9 @@ Changed Functionality
|
||||||
add n$actions[Notice::ACTION_EMAIL];
|
add n$actions[Notice::ACTION_EMAIL];
|
||||||
}
|
}
|
||||||
|
|
||||||
- notice() is now likewise a hook, no longer an event. If you have
|
- notice() is now likewise a hook, no longer an event. If you
|
||||||
handlers for that event, you'll likely just need to change the
|
have handlers for that event, you'll likely just need to change
|
||||||
type accordingly. Example:
|
the type accordingly. Example:
|
||||||
|
|
||||||
Old::
|
Old::
|
||||||
|
|
||||||
|
@ -233,21 +430,34 @@ Changed Functionality
|
||||||
|
|
||||||
hook notice(n: Notice::Info) { ... }
|
hook notice(n: Notice::Info) { ... }
|
||||||
|
|
||||||
- The notice_policy.log is gone. That's a result of the new notice
|
- The ``notice_policy.log`` is gone. That's a result of the new notice
|
||||||
policy setup.
|
policy setup.
|
||||||
|
|
||||||
- Removed the byte_len() and length() bif functions. Use the ``|...|``
|
- Removed the ``byte_len()`` and ``length()`` bif functions. Use the
|
||||||
operator instead.
|
``|...|`` operator instead.
|
||||||
|
|
||||||
- The SSH::Login notice has been superseded by an corresponding
|
- The ``SSH::Login`` notice has been superseded by an corresponding
|
||||||
intelligence framework observation (SSH::SUCCESSFUL_LOGIN).
|
intelligence framework observation (``SSH::SUCCESSFUL_LOGIN``).
|
||||||
|
|
||||||
- PacketFilter::all_packets has been replaced with
|
- ``PacketFilter::all_packets`` has been replaced with
|
||||||
PacketFilter::enable_auto_protocol_capture_filters.
|
``PacketFilter::enable_auto_protocol_capture_filters``.
|
||||||
|
|
||||||
- We removed the BitTorrent DPD signatures pending further updates to
|
- We removed the BitTorrent DPD signatures pending further updates to
|
||||||
that analyzer.
|
that analyzer.
|
||||||
|
|
||||||
|
- In previous versions of BroControl, running "broctl cron" would create
|
||||||
|
a file ``$prefix/logs/stats/www`` (where "$prefix" indicates the
|
||||||
|
installation prefix of Bro). Now, it is created as a directory.
|
||||||
|
Therefore, if you perform an upgrade install and you're using BroControl,
|
||||||
|
then you may see an email (generated by "broctl cron") containing an
|
||||||
|
error message: "error running update-stats". To fix this problem,
|
||||||
|
either remove that file (it is not needed) or rename it.
|
||||||
|
|
||||||
|
- Due to lack of maintenance the Ruby bindings for Broccoli are now
|
||||||
|
deprecated, and the build process no longer includes them by
|
||||||
|
default. For the time being, they can still be enabled by
|
||||||
|
configuring with ``--enable-ruby``, however we plan to remove
|
||||||
|
Broccoli's Ruby support with the next Bro release.
|
||||||
|
|
||||||
Bro 2.1
|
Bro 2.1
|
||||||
=======
|
=======
|
||||||
|
@ -479,8 +689,8 @@ with the new version. The two rules of thumb are:
|
||||||
if you need help.
|
if you need help.
|
||||||
|
|
||||||
Below we summarize changes from 1.x to 2.x in more detail. This list
|
Below we summarize changes from 1.x to 2.x in more detail. This list
|
||||||
isn't complete, see the ``CHANGES`` file in the distribution or
|
isn't complete, see the ``CHANGES`` file in the distribution.
|
||||||
:doc:`here <changes>` for the full story.
|
for the full story.
|
||||||
|
|
||||||
Script Organization
|
Script Organization
|
||||||
-------------------
|
-------------------
|
||||||
|
@ -568,8 +778,7 @@ Logging Framework
|
||||||
endpoint.
|
endpoint.
|
||||||
|
|
||||||
- The new logging framework makes it possible to extend, customize,
|
- The new logging framework makes it possible to extend, customize,
|
||||||
and filter logs very easily. See the :doc:`logging framework
|
and filter logs very easily.
|
||||||
</frameworks/logging>` for more information on usage.
|
|
||||||
|
|
||||||
- A common pattern found in the new scripts is to store logging stream
|
- A common pattern found in the new scripts is to store logging stream
|
||||||
records for protocols inside the ``connection`` records so that
|
records for protocols inside the ``connection`` records so that
|
||||||
|
@ -592,8 +801,7 @@ Notice Framework
|
||||||
|
|
||||||
The way users interact with "notices" has changed significantly in order
|
The way users interact with "notices" has changed significantly in order
|
||||||
to make it easier to define a site policy and more extensible for adding
|
to make it easier to define a site policy and more extensible for adding
|
||||||
customized actions. See the :doc:`notice framework
|
customized actions.
|
||||||
</frameworks/notice>`.
|
|
||||||
|
|
||||||
|
|
||||||
New Default Settings
|
New Default Settings
|
||||||
|
|
10
README
10
README
|
@ -8,11 +8,21 @@ and pointers for getting started. NEWS contains release notes for the
|
||||||
current version, and CHANGES has the complete history of changes.
|
current version, and CHANGES has the complete history of changes.
|
||||||
Please see COPYING for licensing information.
|
Please see COPYING for licensing information.
|
||||||
|
|
||||||
|
You can download source and binary releases on:
|
||||||
|
|
||||||
|
http://www.bro.org/download
|
||||||
|
|
||||||
|
To get the current development version, clone our master git
|
||||||
|
repository:
|
||||||
|
|
||||||
|
git clone --recursive git://git.bro.org/bro
|
||||||
|
|
||||||
For more documentation, research publications, and community contact
|
For more documentation, research publications, and community contact
|
||||||
information, please see Bro's home page:
|
information, please see Bro's home page:
|
||||||
|
|
||||||
http://www.bro.org
|
http://www.bro.org
|
||||||
|
|
||||||
|
|
||||||
On behalf of the Bro Development Team,
|
On behalf of the Bro Development Team,
|
||||||
|
|
||||||
Vern Paxson & Robin Sommer,
|
Vern Paxson & Robin Sommer,
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
2.1-1335
|
2.2-117
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 75e6906efb23036ea5a11936487498761a5f7eab
|
Subproject commit 54b321009b750268526419bdbd841f421c839313
|
|
@ -1 +1 @@
|
||||||
Subproject commit cfe77b184c2362fe85d36a597a1cda776aac0a80
|
Subproject commit ebf9c0d88ae8230845b91f15755156f93ff21aa8
|
|
@ -1 +1 @@
|
||||||
Subproject commit 5bcee430700f714b19a9e794de75cb42408c9ecf
|
Subproject commit 52ba12128e0673a09cbc7a68b8485f5d19030633
|
|
@ -1 +1 @@
|
||||||
Subproject commit 4c87f14d1797b254934dac34e739c08ede89c052
|
Subproject commit c4b5fb7336f2b598cf69777a7ec91b4aa16cacd1
|
|
@ -1 +1 @@
|
||||||
Subproject commit 13a7718b475d670ae22c1914ac1c26bd11be46b6
|
Subproject commit 23ff11bf0edbad2c6f1acbeb3f9a029ff4b61785
|
|
@ -10,10 +10,4 @@
|
||||||
# BROPATH=`./bro-path-dev` ./src/bro
|
# BROPATH=`./bro-path-dev` ./src/bro
|
||||||
#
|
#
|
||||||
|
|
||||||
broPolicies=${BRO_SCRIPT_SOURCE_PATH}:${BRO_SCRIPT_SOURCE_PATH}/policy:${BRO_SCRIPT_SOURCE_PATH}/site
|
echo .:${CMAKE_SOURCE_DIR}/scripts:${CMAKE_SOURCE_DIR}/scripts/policy:${CMAKE_SOURCE_DIR}/scripts/site:${CMAKE_BINARY_DIR}/scripts
|
||||||
|
|
||||||
broGenPolicies=${CMAKE_BINARY_DIR}/scripts
|
|
||||||
|
|
||||||
installedPolicies=${BRO_SCRIPT_INSTALL_PATH}:${BRO_SCRIPT_INSTALL_PATH}/site
|
|
||||||
|
|
||||||
echo .:$broPolicies:$broGenPolicies
|
|
||||||
|
|
2
cmake
2
cmake
|
@ -1 +1 @@
|
||||||
Subproject commit c966aecf2bc83f7bbfdd1ac716c6627dd95cb2ec
|
Subproject commit e7a46cb82ee10aa522c4d88115baf10181277d20
|
9
configure
vendored
9
configure
vendored
|
@ -32,12 +32,12 @@ Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||||
--enable-perftools force use of Google perftools on non-Linux systems
|
--enable-perftools force use of Google perftools on non-Linux systems
|
||||||
(automatically on when perftools is present on Linux)
|
(automatically on when perftools is present on Linux)
|
||||||
--enable-perftools-debug use Google's perftools for debugging
|
--enable-perftools-debug use Google's perftools for debugging
|
||||||
|
--enable-ruby build ruby bindings for broccoli (deprecated)
|
||||||
--disable-broccoli don't build or install the Broccoli library
|
--disable-broccoli don't build or install the Broccoli library
|
||||||
--disable-broctl don't install Broctl
|
--disable-broctl don't install Broctl
|
||||||
--disable-auxtools don't build or install auxiliary tools
|
--disable-auxtools don't build or install auxiliary tools
|
||||||
--disable-perftools don't try to build with Google Perftools
|
--disable-perftools don't try to build with Google Perftools
|
||||||
--disable-python don't try to build python bindings for broccoli
|
--disable-python don't try to build python bindings for broccoli
|
||||||
--disable-ruby don't try to build ruby bindings for broccoli
|
|
||||||
--disable-dataseries don't use the optional DataSeries log writer
|
--disable-dataseries don't use the optional DataSeries log writer
|
||||||
--disable-elasticsearch don't use the optional ElasticSearch log writer
|
--disable-elasticsearch don't use the optional ElasticSearch log writer
|
||||||
|
|
||||||
|
@ -49,9 +49,9 @@ Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||||
--with-flex=PATH path to flex executable
|
--with-flex=PATH path to flex executable
|
||||||
--with-bison=PATH path to bison executable
|
--with-bison=PATH path to bison executable
|
||||||
--with-perl=PATH path to perl executable
|
--with-perl=PATH path to perl executable
|
||||||
|
--with-libmagic=PATH path to libmagic install root
|
||||||
|
|
||||||
Optional Packages in Non-Standard Locations:
|
Optional Packages in Non-Standard Locations:
|
||||||
--with-libmagic=PATH path to libmagic install root
|
|
||||||
--with-geoip=PATH path to the libGeoIP install root
|
--with-geoip=PATH path to the libGeoIP install root
|
||||||
--with-perftools=PATH path to Google Perftools install root
|
--with-perftools=PATH path to Google Perftools install root
|
||||||
--with-python=PATH path to Python interpreter
|
--with-python=PATH path to Python interpreter
|
||||||
|
@ -113,6 +113,7 @@ append_cache_entry INSTALL_BROCTL BOOL true
|
||||||
append_cache_entry CPACK_SOURCE_IGNORE_FILES STRING
|
append_cache_entry CPACK_SOURCE_IGNORE_FILES STRING
|
||||||
append_cache_entry ENABLE_MOBILE_IPV6 BOOL false
|
append_cache_entry ENABLE_MOBILE_IPV6 BOOL false
|
||||||
append_cache_entry DISABLE_PERFTOOLS BOOL false
|
append_cache_entry DISABLE_PERFTOOLS BOOL false
|
||||||
|
append_cache_entry DISABLE_RUBY_BINDINGS BOOL true
|
||||||
|
|
||||||
# parse arguments
|
# parse arguments
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
|
@ -174,8 +175,8 @@ while [ $# -ne 0 ]; do
|
||||||
--disable-python)
|
--disable-python)
|
||||||
append_cache_entry DISABLE_PYTHON_BINDINGS BOOL true
|
append_cache_entry DISABLE_PYTHON_BINDINGS BOOL true
|
||||||
;;
|
;;
|
||||||
--disable-ruby)
|
--enable-ruby)
|
||||||
append_cache_entry DISABLE_RUBY_BINDINGS BOOL true
|
append_cache_entry DISABLE_RUBY_BINDINGS BOOL false
|
||||||
;;
|
;;
|
||||||
--disable-dataseries)
|
--disable-dataseries)
|
||||||
append_cache_entry DISABLE_DATASERIES BOOL true
|
append_cache_entry DISABLE_DATASERIES BOOL true
|
||||||
|
|
|
@ -1,75 +1,86 @@
|
||||||
set(BIF_SRC_DIR ${PROJECT_SOURCE_DIR}/src)
|
set(BROCCOLI_DOCS_SRC ${CMAKE_BINARY_DIR}/aux/broccoli/doc/html)
|
||||||
set(RST_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/rest_output)
|
set(BROCCOLI_DOCS_DST ${CMAKE_BINARY_DIR}/html/broccoli-api)
|
||||||
set(DOC_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/out)
|
set(SPHINX_INPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/sphinx_input)
|
||||||
set(DOC_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
set(SPHINX_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/sphinx_output)
|
||||||
set(DOC_SOURCE_WORKDIR ${CMAKE_CURRENT_BINARY_DIR}/sphinx-sources)
|
set(BROXYGEN_SCRIPT_OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/broxygen_script_output)
|
||||||
|
set(BROXYGEN_CACHE_DIR ${CMAKE_CURRENT_BINARY_DIR}/broxygen_cache)
|
||||||
|
|
||||||
set(MASTER_POLICY_INDEX ${CMAKE_CURRENT_BINARY_DIR}/scripts/policy_index)
|
# Find out what BROPATH to use when executing bro.
|
||||||
set(MASTER_PACKAGE_INDEX ${CMAKE_CURRENT_BINARY_DIR}/scripts/pkg_index)
|
execute_process(COMMAND ${CMAKE_BINARY_DIR}/bro-path-dev
|
||||||
|
OUTPUT_VARIABLE BROPATH
|
||||||
|
RESULT_VARIABLE retval
|
||||||
|
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
if (NOT ${retval} EQUAL 0)
|
||||||
|
message(FATAL_ERROR "Problem setting BROPATH")
|
||||||
|
endif ()
|
||||||
|
|
||||||
file(GLOB_RECURSE DOC_SOURCES FOLLOW_SYMLINKS "*")
|
set(BROMAGIC ${BRO_MAGIC_SOURCE_PATH})
|
||||||
|
|
||||||
# configure the Sphinx config file (expand variables CMake might know about)
|
# Configure the Sphinx config file (expand variables CMake might know about).
|
||||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/conf.py
|
${CMAKE_CURRENT_BINARY_DIR}/conf.py
|
||||||
@ONLY)
|
@ONLY)
|
||||||
|
|
||||||
add_subdirectory(scripts)
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/broxygen.conf.in
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/broxygen.conf
|
||||||
|
@ONLY)
|
||||||
|
|
||||||
# The "broxygen" target generates reST documentation for any outdated bro
|
add_custom_target(sphinxdoc
|
||||||
# scripts and then uses Sphinx to generate HTML documentation from the reST
|
# Copy the template documentation to build directory to use as input tree
|
||||||
add_custom_target(broxygen
|
# for Sphinx. This is needed because some parts are dynamically generated
|
||||||
# copy the template documentation to the build directory
|
# in to that tree by Bro/Broxygen.
|
||||||
# to give as input for sphinx
|
COMMAND rsync -q -r --copy-links --times --delete
|
||||||
COMMAND rsync -r --copy-links --times
|
--filter='protect scripts/*'
|
||||||
${DOC_SOURCE_DIR}/
|
${CMAKE_CURRENT_SOURCE_DIR}/ ${SPHINX_INPUT_DIR}
|
||||||
${DOC_SOURCE_WORKDIR}
|
# Use Bro/Broxygen to dynamically generate reST for all Bro scripts.
|
||||||
# copy generated policy script documentation into the
|
COMMAND BROPATH=${BROPATH}
|
||||||
# working copy of the template documentation
|
BROMAGIC=${BROMAGIC}
|
||||||
COMMAND rsync -r --copy-links --times
|
${CMAKE_BINARY_DIR}/src/bro
|
||||||
${RST_OUTPUT_DIR}/
|
-X ${CMAKE_CURRENT_BINARY_DIR}/broxygen.conf
|
||||||
${DOC_SOURCE_WORKDIR}/scripts
|
broxygen >/dev/null
|
||||||
# append to the master index of all policy scripts
|
# Rsync over the generated reST to the Sphinx source tree in the build dir.
|
||||||
COMMAND cat ${MASTER_POLICY_INDEX} >>
|
COMMAND rsync -q -r --copy-links --times --delete --filter='protect *.bro'
|
||||||
${DOC_SOURCE_WORKDIR}/scripts/scripts.rst
|
${BROXYGEN_SCRIPT_OUTPUT}/ ${SPHINX_INPUT_DIR}/scripts
|
||||||
# append to the master index of all policy packages
|
# Rsync over Bro scripts to the Sphinx source tree in the build dir.
|
||||||
COMMAND cat ${MASTER_PACKAGE_INDEX} >>
|
# These are used by :download: references in the generated script docs.
|
||||||
${DOC_SOURCE_WORKDIR}/scripts/packages.rst
|
COMMAND rsync -q -r --copy-links --times --delete
|
||||||
# construct a reST file for each group
|
--filter='protect /base/bif/*' --filter='protect *.rst'
|
||||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bin/group_index_generator.py
|
--filter='include */' --filter='include *.bro' --filter='exclude *'
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/scripts/group_list
|
${CMAKE_SOURCE_DIR}/scripts/ ${SPHINX_INPUT_DIR}/scripts
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/scripts
|
# Rsync over Bro scripts created by BIF compiler to the Sphinx source tree.
|
||||||
${DOC_SOURCE_WORKDIR}/scripts
|
COMMAND rsync -q -r --copy-links --times --delete
|
||||||
# tell sphinx to generate html
|
--filter='protect *.rst' --filter='include */'
|
||||||
|
--filter='include *.bro' --filter='exclude *'
|
||||||
|
${CMAKE_BINARY_DIR}/scripts/base/bif/
|
||||||
|
${SPHINX_INPUT_DIR}/scripts/base/bif
|
||||||
|
# Use Sphinx to build HTML.
|
||||||
COMMAND sphinx-build
|
COMMAND sphinx-build
|
||||||
-b html
|
-b html
|
||||||
-c ${CMAKE_CURRENT_BINARY_DIR}
|
-c ${CMAKE_CURRENT_BINARY_DIR}
|
||||||
-d ${DOC_OUTPUT_DIR}/doctrees
|
-d ${SPHINX_OUTPUT_DIR}/doctrees
|
||||||
${DOC_SOURCE_WORKDIR}
|
${SPHINX_INPUT_DIR}
|
||||||
${DOC_OUTPUT_DIR}/html
|
${SPHINX_OUTPUT_DIR}/html
|
||||||
# create symlink to the html output directory for convenience
|
# Create symlink to the html output directory for convenience.
|
||||||
COMMAND "${CMAKE_COMMAND}" -E create_symlink
|
COMMAND "${CMAKE_COMMAND}" -E create_symlink
|
||||||
${DOC_OUTPUT_DIR}/html
|
${SPHINX_OUTPUT_DIR}/html
|
||||||
${CMAKE_BINARY_DIR}/html
|
${CMAKE_BINARY_DIR}/html
|
||||||
# copy Broccoli API reference into output dir if it exists
|
# Copy Broccoli API reference into output dir if it exists.
|
||||||
COMMAND test -d ${CMAKE_BINARY_DIR}/aux/broccoli/doc/html && ( rm -rf ${CMAKE_BINARY_DIR}/html/broccoli-api && cp -r ${CMAKE_BINARY_DIR}/aux/broccoli/doc/html ${CMAKE_BINARY_DIR}/html/broccoli-api ) || true
|
COMMAND test -d ${BROCCOLI_DOCS_SRC} &&
|
||||||
|
( rm -rf ${BROCCOLI_DOCS_DST} &&
|
||||||
|
cp -r ${BROCCOLI_DOCS_SRC} ${BROCCOLI_DOCS_DST} ) || true
|
||||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
||||||
COMMENT "[Sphinx] Generating HTML policy script docs"
|
COMMENT "[Sphinx] Generate HTML documentation in ${CMAKE_BINARY_DIR}/html")
|
||||||
# SOURCES just adds stuff to IDE projects as a convenience
|
|
||||||
SOURCES ${DOC_SOURCES})
|
|
||||||
|
|
||||||
# The "broxygenclean" target removes just the Sphinx input/output directories
|
add_dependencies(sphinxdoc bro)
|
||||||
# from the build directory.
|
|
||||||
add_custom_target(broxygenclean
|
add_custom_target(sphinxdoc_clean
|
||||||
COMMAND "${CMAKE_COMMAND}" -E remove_directory
|
COMMAND "${CMAKE_COMMAND}" -E remove_directory ${SPHINX_INPUT_DIR}
|
||||||
${DOC_SOURCE_WORKDIR}
|
COMMAND "${CMAKE_COMMAND}" -E remove_directory ${SPHINX_OUTPUT_DIR}
|
||||||
COMMAND "${CMAKE_COMMAND}" -E remove_directory
|
COMMAND "${CMAKE_COMMAND}" -E remove_directory ${BROXYGEN_SCRIPT_OUTPUT}
|
||||||
${DOC_OUTPUT_DIR}
|
COMMAND "${CMAKE_COMMAND}" -E remove_directory ${BROXYGEN_CACHE_DIR}
|
||||||
VERBATIM)
|
VERBATIM)
|
||||||
|
|
||||||
add_dependencies(broxygen restdoc)
|
|
||||||
|
|
||||||
add_custom_target(doc)
|
add_custom_target(doc)
|
||||||
add_custom_target(docclean)
|
add_custom_target(docclean)
|
||||||
add_dependencies(doc broxygen)
|
add_dependencies(doc sphinxdoc)
|
||||||
add_dependencies(docclean broxygenclean restclean)
|
add_dependencies(docclean sphinxdoc_clean)
|
||||||
|
|
|
@ -15,17 +15,16 @@ which adds some reST directives and roles that aid in generating useful
|
||||||
index entries and cross-references. Other extensions can be added in
|
index entries and cross-references. Other extensions can be added in
|
||||||
a similar fashion.
|
a similar fashion.
|
||||||
|
|
||||||
Either the ``make doc`` or ``make broxygen`` targets in the top-level
|
The ``make doc`` target in the top-level Makefile can be used to locally
|
||||||
Makefile can be used to locally render the reST files into HTML.
|
render the reST files into HTML. That target depends on:
|
||||||
Those targets depend on:
|
|
||||||
|
|
||||||
* Python interpreter >= 2.5
|
* Python interpreter >= 2.5
|
||||||
* `Sphinx <http://sphinx.pocoo.org/>`_ >= 1.0.1
|
* `Sphinx <http://sphinx.pocoo.org/>`_ >= 1.0.1
|
||||||
|
|
||||||
After completion, HTML documentation is symlinked in ``build/html``.
|
After completion, HTML documentation is symlinked in ``build/html``.
|
||||||
|
|
||||||
There's also ``make docclean`` and ``make broxygenclean`` targets to
|
There's also a ``make docclean`` target which deletes any files
|
||||||
clean the resulting documentation.
|
created during the documentation build process.
|
||||||
|
|
||||||
Notes for Writing Documentation
|
Notes for Writing Documentation
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
9
doc/_static/basic.css
vendored
9
doc/_static/basic.css
vendored
|
@ -439,8 +439,17 @@ td.linenos pre {
|
||||||
color: #aaa;
|
color: #aaa;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.highlight-guess {
|
||||||
|
overflow:auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.highlight-none {
|
||||||
|
overflow:auto;
|
||||||
|
}
|
||||||
|
|
||||||
table.highlighttable {
|
table.highlighttable {
|
||||||
margin-left: 0.5em;
|
margin-left: 0.5em;
|
||||||
|
overflow:scroll;
|
||||||
}
|
}
|
||||||
|
|
||||||
table.highlighttable td {
|
table.highlighttable td {
|
||||||
|
|
52
doc/_static/broxygen.css
vendored
52
doc/_static/broxygen.css
vendored
|
@ -150,8 +150,14 @@ sup, sub {
|
||||||
line-height:0;
|
line-height:0;
|
||||||
}
|
}
|
||||||
|
|
||||||
pre {
|
pre, code {
|
||||||
white-space:pre;
|
white-space: pre;
|
||||||
|
overflow: auto;
|
||||||
|
margin-left: 2em;
|
||||||
|
margin-right: 2em;
|
||||||
|
margin-top: .5em;
|
||||||
|
margin-bottom: 1.5em;
|
||||||
|
word-wrap: normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
pre, code, tt {
|
pre, code, tt {
|
||||||
|
@ -166,6 +172,10 @@ dl dt {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
li dl dt {
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
dd {
|
dd {
|
||||||
margin:0 0 20px 20px;
|
margin:0 0 20px 20px;
|
||||||
}
|
}
|
||||||
|
@ -174,6 +184,16 @@ small {
|
||||||
font-size:75%;
|
font-size:75%;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.small-text {
|
||||||
|
font-size:75%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-center {
|
||||||
|
display: block;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
a:link,
|
a:link,
|
||||||
a:visited,
|
a:visited,
|
||||||
a:active
|
a:active
|
||||||
|
@ -435,3 +455,31 @@ li {
|
||||||
margin-bottom: .5em;
|
margin-bottom: .5em;
|
||||||
margin-top: 0em;
|
margin-top: 0em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.btest-cmd .hll {
|
||||||
|
font-weight: bold;
|
||||||
|
background: #FFFAE2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btest-include .hll {
|
||||||
|
display: block;
|
||||||
|
text-align: center;
|
||||||
|
font-family: Palatino;
|
||||||
|
background: #FFFAE2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btest-include .hll * {
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.linenodiv pre {
|
||||||
|
margin-left: 0px;
|
||||||
|
margin-right: 0px;
|
||||||
|
width: 1.5em;
|
||||||
|
text-align: right;
|
||||||
|
background: #000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btest-cmd .code pre, .btest-include .code pre {
|
||||||
|
margin-left: 0px;
|
||||||
|
}
|
|
@ -1,72 +0,0 @@
|
||||||
#! /usr/bin/env python
|
|
||||||
|
|
||||||
# This script automatically generates a reST documents that lists
|
|
||||||
# a collection of Bro scripts that are "grouped" together.
|
|
||||||
# The summary text (##! comments) of the script is embedded in the list
|
|
||||||
#
|
|
||||||
# 1st argument is the file containing list of groups
|
|
||||||
# 2nd argument is the directory containing ${group}_files lists of
|
|
||||||
# scripts that belong to the group and ${group}_doc_names lists of
|
|
||||||
# document names that can be supplied to a reST :doc: role
|
|
||||||
# 3rd argument is a directory in which write a ${group}.rst file that contains
|
|
||||||
# reST style references to script docs along with summary text contained
|
|
||||||
# in original script. If ${group} ends with "index", then the file
|
|
||||||
# is always clobbered by this script, but for other unique group names,
|
|
||||||
# this script will append to existing files.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import string
|
|
||||||
|
|
||||||
group_list = sys.argv[1]
|
|
||||||
file_manifest_dir = sys.argv[2]
|
|
||||||
output_dir = sys.argv[3]
|
|
||||||
|
|
||||||
def make_group_file_index(dir_name, group_name):
|
|
||||||
group_file = os.path.join(dir_name, group_name + ".rst")
|
|
||||||
|
|
||||||
if not os.path.exists(group_file):
|
|
||||||
if not os.path.exists(os.path.dirname(group_file)):
|
|
||||||
os.makedirs(os.path.dirname(group_file))
|
|
||||||
|
|
||||||
if group_name.endswith("index"):
|
|
||||||
with open(group_file, 'w') as f_group_file:
|
|
||||||
f_group_file.write(":orphan:\n\n")
|
|
||||||
title = "Package Index: %s\n" % os.path.dirname(group_name)
|
|
||||||
f_group_file.write(title);
|
|
||||||
for n in range(len(title)):
|
|
||||||
f_group_file.write("=")
|
|
||||||
f_group_file.write("\n");
|
|
||||||
|
|
||||||
return group_file
|
|
||||||
|
|
||||||
with open(group_list, 'r') as f_group_list:
|
|
||||||
for group in f_group_list.read().splitlines():
|
|
||||||
#print group
|
|
||||||
group_file = make_group_file_index(output_dir, group)
|
|
||||||
file_manifest = os.path.join(file_manifest_dir, group + "_files")
|
|
||||||
doc_manifest = os.path.join(file_manifest_dir, group + "_doc_names")
|
|
||||||
src_files = []
|
|
||||||
doc_names = []
|
|
||||||
|
|
||||||
with open(file_manifest, 'r') as f_file_manifest:
|
|
||||||
src_files = f_file_manifest.read().splitlines()
|
|
||||||
|
|
||||||
with open(doc_manifest, 'r') as f_doc_manifest:
|
|
||||||
doc_names = f_doc_manifest.read().splitlines()
|
|
||||||
|
|
||||||
for i in range(len(src_files)):
|
|
||||||
src_file = src_files[i]
|
|
||||||
#print "\t" + src_file
|
|
||||||
summary_comments = []
|
|
||||||
with open(src_file, 'r') as f_src_file:
|
|
||||||
for line in f_src_file:
|
|
||||||
sum_pos = string.find(line, "##!")
|
|
||||||
if sum_pos != -1:
|
|
||||||
summary_comments.append(line[(sum_pos+3):])
|
|
||||||
#print summary_comments
|
|
||||||
|
|
||||||
with open(group_file, 'a') as f_group_file:
|
|
||||||
f_group_file.write("\n:doc:`/scripts/%s`\n" % doc_names[i])
|
|
||||||
for line in summary_comments:
|
|
||||||
f_group_file.write(" " + line)
|
|
80
doc/broids/index.rst
Normal file
80
doc/broids/index.rst
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
|
||||||
|
.. _bro-ids:
|
||||||
|
|
||||||
|
=======
|
||||||
|
Bro IDS
|
||||||
|
=======
|
||||||
|
|
||||||
|
An Intrusion Detection System (IDS) allows you to detect suspicious
|
||||||
|
activities happening on your network as a result of a past or active
|
||||||
|
attack. Because of its programming capabilities, Bro can easily be
|
||||||
|
configured to behave like traditional IDSs and detect common attacks
|
||||||
|
with well known patterns, or you can create your own scripts to detect
|
||||||
|
conditions specific to your particular case.
|
||||||
|
|
||||||
|
In the following sections, we present a few examples of common uses of
|
||||||
|
Bro as an IDS.
|
||||||
|
|
||||||
|
------------------------------------------------
|
||||||
|
Detecting an FTP Bruteforce attack and notifying
|
||||||
|
------------------------------------------------
|
||||||
|
|
||||||
|
For the purpose of this exercise, we define FTP bruteforcing as too many
|
||||||
|
rejected usernames and passwords occurring from a single address. We
|
||||||
|
start by defining a threshold for the number of attempts and a
|
||||||
|
monitoring interval in minutes as well as a new notice type.
|
||||||
|
|
||||||
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/protocols/ftp/detect-bruteforcing.bro
|
||||||
|
:lines: 9-25
|
||||||
|
|
||||||
|
Now, using the ftp_reply event, we check for error codes from the `500
|
||||||
|
series <http://en.wikipedia.org/wiki/List_of_FTP_server_return_codes>`_
|
||||||
|
for the "USER" and "PASS" commands, representing rejected usernames or
|
||||||
|
passwords. For this, we can use the :bro:see:`FTP::parse_ftp_reply_code`
|
||||||
|
function to break down the reply code and check if the first digit is a
|
||||||
|
"5" or not. If true, we then use the :ref:`Summary Statistics Framework
|
||||||
|
<sumstats-framework>` to keep track of the number of failed attempts.
|
||||||
|
|
||||||
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/protocols/ftp/detect-bruteforcing.bro
|
||||||
|
:lines: 52-60
|
||||||
|
|
||||||
|
Next, we use the SumStats framework to raise a notice of the attack of
|
||||||
|
the attack when the number of failed attempts exceeds the specified
|
||||||
|
threshold during the measuring interval.
|
||||||
|
|
||||||
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/protocols/ftp/detect-bruteforcing.bro
|
||||||
|
:lines: 28-50
|
||||||
|
|
||||||
|
Below is the final code for our script.
|
||||||
|
|
||||||
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/protocols/ftp/detect-bruteforcing.bro
|
||||||
|
|
||||||
|
.. btest:: ftp-bruteforce
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/ftp/bruteforce.pcap protocols/ftp/detect-bruteforcing.bro
|
||||||
|
@TEST-EXEC: btest-rst-include notice.log
|
||||||
|
|
||||||
|
As a final note, the :doc:`detect-bruteforcing.bro
|
||||||
|
</scripts/policy/protocols/ftp/detect-bruteforcing.bro>` script above is
|
||||||
|
include with Bro out of the box, so you only need to load it at startup
|
||||||
|
to instruct Bro to detect and notify of FTP bruteforce attacks.
|
||||||
|
|
||||||
|
-------------
|
||||||
|
Other Attacks
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Detecting SQL Injection attacks
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
Checking files against known malware hashes
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
Files transmitted on your network could either be completely harmless or
|
||||||
|
contain viruses and other threats. One possible action against this
|
||||||
|
threat is to compute the hashes of the files and compare them against a
|
||||||
|
list of known malware hashes. Bro simplifies this task by offering a
|
||||||
|
:doc:`detect-MHR.bro </scripts/policy/frameworks/files/detect-MHR.bro>`
|
||||||
|
script that creates and compares hashes against the `Malware Hash
|
||||||
|
Registry <https://www.team-cymru.org/Services/MHR/>`_ maintained by Team
|
||||||
|
Cymru. You only need to load this script along with your other scripts
|
||||||
|
at startup time.
|
1
doc/broxygen.conf.in
Normal file
1
doc/broxygen.conf.in
Normal file
|
@ -0,0 +1 @@
|
||||||
|
script * @BROXYGEN_SCRIPT_OUTPUT@/
|
|
@ -17,7 +17,7 @@ extensions = []
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
sys.path.insert(0, os.path.abspath('sphinx-sources/ext'))
|
sys.path.insert(0, os.path.abspath('sphinx_input/ext'))
|
||||||
|
|
||||||
# ----- Begin of BTest configuration. -----
|
# ----- Begin of BTest configuration. -----
|
||||||
btest = os.path.abspath("@CMAKE_SOURCE_DIR@/aux/btest")
|
btest = os.path.abspath("@CMAKE_SOURCE_DIR@/aux/btest")
|
||||||
|
@ -33,6 +33,13 @@ btest_base="@CMAKE_SOURCE_DIR@/testing/btest"
|
||||||
btest_tests="doc/sphinx"
|
btest_tests="doc/sphinx"
|
||||||
# ----- End of BTest configuration. -----
|
# ----- End of BTest configuration. -----
|
||||||
|
|
||||||
|
# ----- Begin of Broxygen configuration. -----
|
||||||
|
extensions += ["broxygen"]
|
||||||
|
bro_binary = os.path.abspath("@CMAKE_SOURCE_DIR@/build/src/bro")
|
||||||
|
broxygen_cache="@BROXYGEN_CACHE_DIR@"
|
||||||
|
os.environ["BROPATH"] = "@BROPATH@"
|
||||||
|
os.environ["BROMAGIC"] = "@BROMAGIC@"
|
||||||
|
# ----- End of Broxygen configuration. -----
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
|
@ -47,7 +54,7 @@ os.environ["BRO_SRC_ROOT"] = "@CMAKE_SOURCE_DIR@"
|
||||||
os.environ["DOC_ROOT"] = "@CMAKE_SOURCE_DIR@/doc"
|
os.environ["DOC_ROOT"] = "@CMAKE_SOURCE_DIR@/doc"
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['sphinx-sources/_templates', 'sphinx-sources/_static']
|
templates_path = ['sphinx_input/_templates', 'sphinx_input/_static']
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = '.rst'
|
||||||
|
@ -141,7 +148,7 @@ html_theme_options = { }
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['sphinx-sources/_static']
|
html_static_path = ['sphinx_input/_static']
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
|
|
|
@ -191,6 +191,10 @@ class BroNotices(Index):
|
||||||
|
|
||||||
def generate(self, docnames=None):
|
def generate(self, docnames=None):
|
||||||
content = {}
|
content = {}
|
||||||
|
|
||||||
|
if 'notices' not in self.domain.env.domaindata['bro']:
|
||||||
|
return content, False
|
||||||
|
|
||||||
for n in self.domain.env.domaindata['bro']['notices']:
|
for n in self.domain.env.domaindata['bro']['notices']:
|
||||||
modname = n[0].split("::")[0]
|
modname = n[0].split("::")[0]
|
||||||
entries = content.setdefault(modname, [])
|
entries = content.setdefault(modname, [])
|
||||||
|
|
317
doc/ext/broxygen.py
Normal file
317
doc/ext/broxygen.py
Normal file
|
@ -0,0 +1,317 @@
|
||||||
|
"""
|
||||||
|
Broxygen domain for Sphinx.
|
||||||
|
|
||||||
|
Adds directives that allow Sphinx to invoke Bro in order to generate script
|
||||||
|
reference documentation on the fly. The directives are:
|
||||||
|
|
||||||
|
broxygen:package
|
||||||
|
- Shows links to all scripts contained within matching package(s).
|
||||||
|
broxygen:package_index
|
||||||
|
- An index with links to matching package document(s).
|
||||||
|
broxygen:script
|
||||||
|
- Reference for matching script(s) (i.e. everything declared by the script).
|
||||||
|
broxygen:script_summary
|
||||||
|
- Shows link to matching script(s) with it's summary-section comments.
|
||||||
|
broxygen:script_index
|
||||||
|
- An index with links to all matching scrips.
|
||||||
|
broxygen:proto_analyzer
|
||||||
|
- All protocol analyzers and their components (events/bifs, etc.)
|
||||||
|
broxygen:file_analyzer
|
||||||
|
- All file analyzers and their components (events/bifs, etc.)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from sphinx.domains import Domain, ObjType
|
||||||
|
from sphinx.locale import l_
|
||||||
|
from docutils.parsers.rst.directives.misc import Include
|
||||||
|
|
||||||
|
|
||||||
|
App = None
|
||||||
|
|
||||||
|
|
||||||
|
def info(msg):
|
||||||
|
"""Use Sphinx builder to output a console message."""
|
||||||
|
global App
|
||||||
|
from sphinx.util.console import blue
|
||||||
|
App.builder.info(blue(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def pattern_to_filename_component(pattern):
|
||||||
|
"""Replace certain characters in Broxygen config file target pattern.
|
||||||
|
|
||||||
|
Such that it can be used as part of a (sane) filename.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return pattern.replace("/", ".").replace("*", "star")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dir(path):
|
||||||
|
"""Should act like ``mkdir -p``."""
|
||||||
|
import os
|
||||||
|
import errno
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def generate_config(env, type, pattern):
|
||||||
|
"""Create a Broxygen config file for a particular target.
|
||||||
|
|
||||||
|
It can be used by Bro to generate reST docs for that target.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from sphinx.errors import SphinxError
|
||||||
|
|
||||||
|
work_dir = env.config.broxygen_cache
|
||||||
|
|
||||||
|
if not work_dir:
|
||||||
|
raise SphinxError("broxygen_cache not set in sphinx config file")
|
||||||
|
|
||||||
|
ensure_dir(work_dir)
|
||||||
|
prefix = "{0}-{1}-".format(type, pattern_to_filename_component(pattern))
|
||||||
|
(fd, cfg) = tempfile.mkstemp(suffix=".cfg", prefix=prefix, dir=work_dir)
|
||||||
|
generated_file = "{0}.rst".format(cfg)
|
||||||
|
config = "{0}\t{1}\t{2}".format(type, pattern, generated_file)
|
||||||
|
f = os.fdopen(fd, "w")
|
||||||
|
f.write(config)
|
||||||
|
f.close()
|
||||||
|
return (cfg, generated_file)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_target(env, type, pattern):
|
||||||
|
"""Create a Broxygen target and build it.
|
||||||
|
|
||||||
|
For a target which hasn't been referenced by any other script, this function
|
||||||
|
creates an associated config file then uses Bro w/ it to build the target
|
||||||
|
and stores the target information in the build environment.
|
||||||
|
|
||||||
|
If a script references a target that's already found in the build
|
||||||
|
environment the results of the previous built are re-used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
app_data = env.domaindata["broxygen"]
|
||||||
|
|
||||||
|
if (type, pattern) in app_data["targets"]:
|
||||||
|
info("Broxygen has cached doc for target '{0} {1}'".format(
|
||||||
|
type, pattern))
|
||||||
|
return app_data["targets"]
|
||||||
|
|
||||||
|
(cfg, gend_file) = generate_config(env, type, pattern)
|
||||||
|
target = BroxygenTarget(type, pattern, cfg, gend_file)
|
||||||
|
app_data["targets"][(type, pattern)] = target
|
||||||
|
build_target(env, target)
|
||||||
|
info("Broxygen built target '{0} {1}'".format(type, pattern))
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
def build_target(env, target):
|
||||||
|
"""Invoke a Bro process to build a Broxygen target."""
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
path_to_bro = env.config.bro_binary
|
||||||
|
|
||||||
|
if not path_to_bro:
|
||||||
|
raise SphinxError("'bro' not set in sphinx config file (path to bro)")
|
||||||
|
|
||||||
|
bro_cmd = "{0} -X {1} broxygen".format(path_to_bro, target.config_file)
|
||||||
|
cwd = os.getcwd()
|
||||||
|
os.chdir(os.path.dirname(target.config_file))
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.check_output(bro_cmd, stderr=subprocess.STDOUT, shell=True)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
from sphinx.errors import SphinxError
|
||||||
|
raise SphinxError(
|
||||||
|
"Command '{0}' returned non-zero exit status {1}: {2}".format(
|
||||||
|
e.cmd, e.returncode, e.output))
|
||||||
|
finally:
|
||||||
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
|
||||||
|
class BroxygenTarget(object):
|
||||||
|
|
||||||
|
"""Some portion of reST documentation that Bro knows how to generate.
|
||||||
|
|
||||||
|
A target is identified by its type and pattern. E.g. type "script" and
|
||||||
|
pattern "broxygen/example.bro".
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, type, pattern, config_file, generated_file):
|
||||||
|
self.type = type
|
||||||
|
self.pattern = pattern
|
||||||
|
self.config_file = config_file
|
||||||
|
self.generated_file = generated_file
|
||||||
|
self.used_in_docs = set()
|
||||||
|
|
||||||
|
|
||||||
|
class BroxygenDirective(Include):
|
||||||
|
|
||||||
|
"""Base class for Broxygen directives.
|
||||||
|
|
||||||
|
It can use Bro to generate reST documentation on the fly and embed it in
|
||||||
|
the document at the location of the directive just like the ``.. include::``
|
||||||
|
directive. The only argument is a pattern to identify to Bro which
|
||||||
|
pieces of documentation it needs to create.
|
||||||
|
"""
|
||||||
|
|
||||||
|
required_arguments = 1
|
||||||
|
has_content = False
|
||||||
|
|
||||||
|
target_type = None
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
env = self.state.document.settings.env
|
||||||
|
info("Broxygen running .. {0}:: {1} in {2}".format(
|
||||||
|
self.name, self.arguments[0], env.docname))
|
||||||
|
target = generate_target(env, self.target_type, self.arguments[0])
|
||||||
|
target.used_in_docs.add(env.docname)
|
||||||
|
self.arguments = [target.generated_file]
|
||||||
|
return super(BroxygenDirective, self).run()
|
||||||
|
|
||||||
|
|
||||||
|
class PackageDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "package"
|
||||||
|
|
||||||
|
|
||||||
|
class PackageIndexDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "package_index"
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "script"
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptSummaryDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "script_summary"
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptIndexDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "script_index"
|
||||||
|
|
||||||
|
|
||||||
|
class ProtoAnalyzerDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "proto_analyzer"
|
||||||
|
|
||||||
|
|
||||||
|
class FileAnalyzerDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "file_analyzer"
|
||||||
|
|
||||||
|
|
||||||
|
class IdentifierDirective(BroxygenDirective):
|
||||||
|
|
||||||
|
target_type = "identifier"
|
||||||
|
|
||||||
|
|
||||||
|
class BroxygenDomain(Domain):
|
||||||
|
|
||||||
|
name = "broxygen"
|
||||||
|
label = "Broxygen"
|
||||||
|
|
||||||
|
object_types = {
|
||||||
|
"package": ObjType(l_("package")),
|
||||||
|
"package_index": ObjType(l_("package_index")),
|
||||||
|
"script": ObjType(l_("script")),
|
||||||
|
"script_summary": ObjType(l_("script_summary")),
|
||||||
|
"script_index": ObjType(l_("script_index")),
|
||||||
|
"proto_analyzer": ObjType(l_("proto_analyzer")),
|
||||||
|
"file_analyzer": ObjType(l_("file_analyzer")),
|
||||||
|
"identifier": ObjType(l_("identifier")),
|
||||||
|
}
|
||||||
|
|
||||||
|
directives = {
|
||||||
|
"package": PackageDirective,
|
||||||
|
"package_index": PackageIndexDirective,
|
||||||
|
"script": ScriptDirective,
|
||||||
|
"script_summary": ScriptSummaryDirective,
|
||||||
|
"script_index": ScriptIndexDirective,
|
||||||
|
"proto_analyzer": ProtoAnalyzerDirective,
|
||||||
|
"file_analyzer": FileAnalyzerDirective,
|
||||||
|
"identifier": IdentifierDirective,
|
||||||
|
}
|
||||||
|
|
||||||
|
roles = {}
|
||||||
|
|
||||||
|
initial_data = {
|
||||||
|
"targets": {}
|
||||||
|
}
|
||||||
|
|
||||||
|
def clear_doc(self, docname):
|
||||||
|
"""Update Broxygen targets referenced in docname.
|
||||||
|
|
||||||
|
If it's the last place the target was referenced, remove it from
|
||||||
|
the build environment and delete any generated config/reST files
|
||||||
|
associated with it from the cache.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
stale_targets = []
|
||||||
|
|
||||||
|
for (type, pattern), target in self.data["targets"].items():
|
||||||
|
if docname in target.used_in_docs:
|
||||||
|
target.used_in_docs.remove(docname)
|
||||||
|
|
||||||
|
if not target.used_in_docs:
|
||||||
|
stale_targets.append(target)
|
||||||
|
|
||||||
|
for target in stale_targets:
|
||||||
|
del self.data["targets"][(target.type, target.pattern)]
|
||||||
|
os.remove(target.config_file)
|
||||||
|
os.remove(target.generated_file)
|
||||||
|
|
||||||
|
def get_objects(self):
|
||||||
|
"""No Broxygen-generated content is itself linkable/searchable."""
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def env_get_outdated_hook(app, env, added, changed, removed):
|
||||||
|
"""Check whether to re-read any documents referencing Broxygen targets.
|
||||||
|
|
||||||
|
To do that we have to ask Bro to rebuild each target and compare the
|
||||||
|
before and after modification times of the generated reST output file.
|
||||||
|
If Bro changed it, then the document containing the Broxygen directive
|
||||||
|
needs to be re-read.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
reread = set()
|
||||||
|
|
||||||
|
for target in app.env.domaindata["broxygen"]["targets"].values():
|
||||||
|
before_mtime = os.stat(target.generated_file)
|
||||||
|
build_target(env, target)
|
||||||
|
after_mtime = os.stat(target.generated_file)
|
||||||
|
|
||||||
|
if after_mtime > before_mtime:
|
||||||
|
info("Broxygen target '{0} {1}' outdated".format(
|
||||||
|
target.type, target.pattern))
|
||||||
|
|
||||||
|
for docname in target.used_in_docs:
|
||||||
|
if docname not in removed:
|
||||||
|
info(" in document: {0}".format(docname))
|
||||||
|
reread.add(docname)
|
||||||
|
|
||||||
|
return list(reread)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
global App
|
||||||
|
App = app
|
||||||
|
app.add_domain(BroxygenDomain)
|
||||||
|
app.add_config_value("bro_binary", None, "env")
|
||||||
|
app.add_config_value("broxygen_cache", None, "env")
|
||||||
|
app.connect("env-get-outdated", env_get_outdated_hook)
|
|
@ -1,3 +1,6 @@
|
||||||
|
|
||||||
|
.. _file-analysis-framework:
|
||||||
|
|
||||||
=============
|
=============
|
||||||
File Analysis
|
File Analysis
|
||||||
=============
|
=============
|
||||||
|
@ -31,40 +34,13 @@ some information about the file such as which network
|
||||||
:bro:see:`connection` and protocol are transporting the file, how many
|
:bro:see:`connection` and protocol are transporting the file, how many
|
||||||
bytes have been transferred so far, and its MIME type.
|
bytes have been transferred so far, and its MIME type.
|
||||||
|
|
||||||
.. code:: bro
|
Here's a simple example:
|
||||||
|
|
||||||
event connection_state_remove(c: connection)
|
.. btest-include:: ${DOC_ROOT}/frameworks/file_analysis_01.bro
|
||||||
{
|
|
||||||
print "connection_state_remove";
|
|
||||||
print c$uid;
|
|
||||||
print c$id;
|
|
||||||
for ( s in c$service )
|
|
||||||
print s;
|
|
||||||
}
|
|
||||||
|
|
||||||
event file_state_remove(f: fa_file)
|
.. btest:: file-analysis-01
|
||||||
{
|
|
||||||
print "file_state_remove";
|
|
||||||
print f$id;
|
|
||||||
for ( cid in f$conns )
|
|
||||||
{
|
|
||||||
print f$conns[cid]$uid;
|
|
||||||
print cid;
|
|
||||||
}
|
|
||||||
print f$source;
|
|
||||||
}
|
|
||||||
|
|
||||||
might give output like::
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/get.trace ${DOC_ROOT}/frameworks/file_analysis_01.bro
|
||||||
|
|
||||||
file_state_remove
|
|
||||||
Cx92a0ym5R8
|
|
||||||
REs2LQfVW2j
|
|
||||||
[orig_h=10.0.0.7, orig_p=59856/tcp, resp_h=192.150.187.43, resp_p=80/tcp]
|
|
||||||
HTTP
|
|
||||||
connection_state_remove
|
|
||||||
REs2LQfVW2j
|
|
||||||
[orig_h=10.0.0.7, orig_p=59856/tcp, resp_h=192.150.187.43, resp_p=80/tcp]
|
|
||||||
HTTP
|
|
||||||
|
|
||||||
This doesn't perform any interesting analysis yet, but does highlight
|
This doesn't perform any interesting analysis yet, but does highlight
|
||||||
the similarity between analysis of connections and files. Connections
|
the similarity between analysis of connections and files. Connections
|
||||||
|
@ -90,27 +66,16 @@ will write the contents of the file out to the local file system).
|
||||||
In the future there may be file analyzers that automatically attach to
|
In the future there may be file analyzers that automatically attach to
|
||||||
files based on heuristics, similar to the Dynamic Protocol Detection
|
files based on heuristics, similar to the Dynamic Protocol Detection
|
||||||
(DPD) framework for connections, but many will always require an
|
(DPD) framework for connections, but many will always require an
|
||||||
explicit attachment decision:
|
explicit attachment decision.
|
||||||
|
|
||||||
.. code:: bro
|
Here's a simple example of how to use the MD5 file analyzer to
|
||||||
|
calculate the MD5 of plain text files:
|
||||||
|
|
||||||
event file_new(f: fa_file)
|
.. btest-include:: ${DOC_ROOT}/frameworks/file_analysis_02.bro
|
||||||
{
|
|
||||||
print "new file", f$id;
|
|
||||||
if ( f?$mime_type && f$mime_type == "text/plain" )
|
|
||||||
Files::add_analyzer(f, Files::ANALYZER_MD5);
|
|
||||||
}
|
|
||||||
|
|
||||||
event file_hash(f: fa_file, kind: string, hash: string)
|
.. btest:: file-analysis-02
|
||||||
{
|
|
||||||
print "file_hash", f$id, kind, hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
this script calculates MD5s for all plain text files and might give
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/get.trace ${DOC_ROOT}/frameworks/file_analysis_02.bro
|
||||||
output::
|
|
||||||
|
|
||||||
new file, Cx92a0ym5R8
|
|
||||||
file_hash, Cx92a0ym5R8, md5, 397168fd09991a0e712254df7bc639ac
|
|
||||||
|
|
||||||
Some file analyzers might have tunable parameters that need to be
|
Some file analyzers might have tunable parameters that need to be
|
||||||
specified in the call to :bro:see:`Files::add_analyzer`:
|
specified in the call to :bro:see:`Files::add_analyzer`:
|
||||||
|
@ -144,41 +109,19 @@ in the same way it analyzes files that it sees coming over traffic from
|
||||||
a network interface it's monitoring. It only requires a call to
|
a network interface it's monitoring. It only requires a call to
|
||||||
:bro:see:`Input::add_analysis`:
|
:bro:see:`Input::add_analysis`:
|
||||||
|
|
||||||
.. code:: bro
|
.. btest-include:: ${DOC_ROOT}/frameworks/file_analysis_03.bro
|
||||||
|
|
||||||
redef exit_only_after_terminate = T;
|
|
||||||
|
|
||||||
event file_new(f: fa_file)
|
|
||||||
{
|
|
||||||
print "new file", f$id;
|
|
||||||
Files::add_analyzer(f, Files::ANALYZER_MD5);
|
|
||||||
}
|
|
||||||
|
|
||||||
event file_state_remove(f: fa_file)
|
|
||||||
{
|
|
||||||
Input::remove(f$source);
|
|
||||||
terminate();
|
|
||||||
}
|
|
||||||
|
|
||||||
event file_hash(f: fa_file, kind: string, hash: string)
|
|
||||||
{
|
|
||||||
print "file_hash", f$id, kind, hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
event bro_init()
|
|
||||||
{
|
|
||||||
local source: string = "./myfile";
|
|
||||||
Input::add_analysis([$source=source, $name=source]);
|
|
||||||
}
|
|
||||||
|
|
||||||
Note that the "source" field of :bro:see:`fa_file` corresponds to the
|
Note that the "source" field of :bro:see:`fa_file` corresponds to the
|
||||||
"name" field of :bro:see:`Input::AnalysisDescription` since that is what
|
"name" field of :bro:see:`Input::AnalysisDescription` since that is what
|
||||||
the input framework uses to uniquely identify an input stream.
|
the input framework uses to uniquely identify an input stream.
|
||||||
|
|
||||||
The output of the above script may be::
|
The output of the above script may be (assuming a file called "myfile"
|
||||||
|
exists):
|
||||||
|
|
||||||
new file, G1fS2xthS4l
|
.. btest:: file-analysis-03
|
||||||
file_hash, G1fS2xthS4l, md5, 54098b367d2e87b078671fad4afb9dbb
|
|
||||||
|
@TEST-EXEC: echo "Hello world" > myfile
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro ${DOC_ROOT}/frameworks/file_analysis_03.bro
|
||||||
|
|
||||||
Nothing that special, but it at least verifies the MD5 file analyzer
|
Nothing that special, but it at least verifies the MD5 file analyzer
|
||||||
saw all the bytes of the input file and calculated the checksum
|
saw all the bytes of the input file and calculated the checksum
|
||||||
|
|
20
doc/frameworks/file_analysis_01.bro
Normal file
20
doc/frameworks/file_analysis_01.bro
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
event connection_state_remove(c: connection)
|
||||||
|
{
|
||||||
|
print "connection_state_remove";
|
||||||
|
print c$uid;
|
||||||
|
print c$id;
|
||||||
|
for ( s in c$service )
|
||||||
|
print s;
|
||||||
|
}
|
||||||
|
|
||||||
|
event file_state_remove(f: fa_file)
|
||||||
|
{
|
||||||
|
print "file_state_remove";
|
||||||
|
print f$id;
|
||||||
|
for ( cid in f$conns )
|
||||||
|
{
|
||||||
|
print f$conns[cid]$uid;
|
||||||
|
print cid;
|
||||||
|
}
|
||||||
|
print f$source;
|
||||||
|
}
|
11
doc/frameworks/file_analysis_02.bro
Normal file
11
doc/frameworks/file_analysis_02.bro
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
event file_new(f: fa_file)
|
||||||
|
{
|
||||||
|
print "new file", f$id;
|
||||||
|
if ( f?$mime_type && f$mime_type == "text/plain" )
|
||||||
|
Files::add_analyzer(f, Files::ANALYZER_MD5);
|
||||||
|
}
|
||||||
|
|
||||||
|
event file_hash(f: fa_file, kind: string, hash: string)
|
||||||
|
{
|
||||||
|
print "file_hash", f$id, kind, hash;
|
||||||
|
}
|
25
doc/frameworks/file_analysis_03.bro
Normal file
25
doc/frameworks/file_analysis_03.bro
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
redef exit_only_after_terminate = T;
|
||||||
|
|
||||||
|
event file_new(f: fa_file)
|
||||||
|
{
|
||||||
|
print "new file", f$id;
|
||||||
|
Files::add_analyzer(f, Files::ANALYZER_MD5);
|
||||||
|
}
|
||||||
|
|
||||||
|
event file_state_remove(f: fa_file)
|
||||||
|
{
|
||||||
|
print "file_state_remove";
|
||||||
|
Input::remove(f$source);
|
||||||
|
terminate();
|
||||||
|
}
|
||||||
|
|
||||||
|
event file_hash(f: fa_file, kind: string, hash: string)
|
||||||
|
{
|
||||||
|
print "file_hash", f$id, kind, hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
event bro_init()
|
||||||
|
{
|
||||||
|
local source: string = "./myfile";
|
||||||
|
Input::add_analysis([$source=source, $name=source]);
|
||||||
|
}
|
|
@ -11,10 +11,41 @@ GeoLocation
|
||||||
to find the geographic location for an IP address. Bro has support
|
to find the geographic location for an IP address. Bro has support
|
||||||
for the `GeoIP library <http://www.maxmind.com/app/c>`__ at the
|
for the `GeoIP library <http://www.maxmind.com/app/c>`__ at the
|
||||||
policy script level beginning with release 1.3 to account for this
|
policy script level beginning with release 1.3 to account for this
|
||||||
need.
|
need. To use this functionality, you need to first install the libGeoIP
|
||||||
|
software, and then install the GeoLite city database before building
|
||||||
|
Bro.
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
|
Install libGeoIP
|
||||||
|
----------------
|
||||||
|
|
||||||
|
* FreeBSD:
|
||||||
|
|
||||||
|
.. console::
|
||||||
|
|
||||||
|
sudo pkg_add -r GeoIP
|
||||||
|
|
||||||
|
* RPM/RedHat-based Linux:
|
||||||
|
|
||||||
|
.. console::
|
||||||
|
|
||||||
|
sudo yum install GeoIP-devel
|
||||||
|
|
||||||
|
* DEB/Debian-based Linux:
|
||||||
|
|
||||||
|
.. console::
|
||||||
|
|
||||||
|
sudo apt-get install libgeoip-dev
|
||||||
|
|
||||||
|
* Mac OS X:
|
||||||
|
|
||||||
|
Vanilla OS X installations don't ship with libGeoIP, but if
|
||||||
|
installed from your preferred package management system (e.g.
|
||||||
|
MacPorts, Fink, or Homebrew), they should be automatically detected
|
||||||
|
and Bro will compile against them.
|
||||||
|
|
||||||
|
|
||||||
GeoIPLite Database Installation
|
GeoIPLite Database Installation
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
|
@ -22,39 +53,23 @@ A country database for GeoIPLite is included when you do the C API
|
||||||
install, but for Bro, we are using the city database which includes
|
install, but for Bro, we are using the city database which includes
|
||||||
cities and regions in addition to countries.
|
cities and regions in addition to countries.
|
||||||
|
|
||||||
`Download <http://www.maxmind.com/app/geolitecity>`__ the geolitecity
|
`Download <http://www.maxmind.com/app/geolitecity>`__ the GeoLite city
|
||||||
binary database and follow the directions to install it.
|
binary database.
|
||||||
|
|
||||||
FreeBSD Quick Install
|
.. console::
|
||||||
---------------------
|
|
||||||
|
|
||||||
.. console::
|
|
||||||
|
|
||||||
pkg_add -r GeoIP
|
|
||||||
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
|
|
||||||
gunzip GeoLiteCity.dat.gz
|
|
||||||
mv GeoLiteCity.dat /usr/local/share/GeoIP/GeoIPCity.dat
|
|
||||||
|
|
||||||
# Set your environment correctly before running Bro's configure script
|
|
||||||
export CFLAGS=-I/usr/local/include
|
|
||||||
export LDFLAGS=-L/usr/local/lib
|
|
||||||
|
|
||||||
|
|
||||||
CentOS Quick Install
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
.. console::
|
|
||||||
|
|
||||||
yum install GeoIP-devel
|
|
||||||
|
|
||||||
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
|
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
|
||||||
gunzip GeoLiteCity.dat.gz
|
gunzip GeoLiteCity.dat.gz
|
||||||
mkdir -p /var/lib/GeoIP/
|
|
||||||
mv GeoLiteCity.dat /var/lib/GeoIP/GeoIPCity.dat
|
|
||||||
|
|
||||||
# Set your environment correctly before running Bro's configure script
|
Next, the file needs to be put in the database directory. This directory
|
||||||
export CFLAGS=-I/usr/local/include
|
should already exist and will vary depending on which platform and package
|
||||||
export LDFLAGS=-L/usr/local/lib
|
you are using. For FreeBSD, use ``/usr/local/share/GeoIP``. For Linux,
|
||||||
|
use ``/usr/share/GeoIP`` or ``/var/lib/GeoIP`` (choose whichever one
|
||||||
|
already exists).
|
||||||
|
|
||||||
|
.. console::
|
||||||
|
|
||||||
|
mv GeoLiteCity.dat <path_to_database_dir>/GeoIPCity.dat
|
||||||
|
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
|
@ -67,8 +82,8 @@ functionality:
|
||||||
|
|
||||||
function lookup_location(a:addr): geo_location
|
function lookup_location(a:addr): geo_location
|
||||||
|
|
||||||
There is also the ``geo_location`` data structure that is returned
|
There is also the :bro:see:`geo_location` data structure that is returned
|
||||||
from the ``lookup_location`` function:
|
from the :bro:see:`lookup_location` function:
|
||||||
|
|
||||||
.. code:: bro
|
.. code:: bro
|
||||||
|
|
||||||
|
|
|
@ -13,4 +13,5 @@ Frameworks
|
||||||
logging
|
logging
|
||||||
notice
|
notice
|
||||||
signatures
|
signatures
|
||||||
|
sumstats
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
|
|
||||||
|
.. _framework-input:
|
||||||
|
|
||||||
===============
|
===============
|
||||||
Input Framework
|
Input Framework
|
||||||
===============
|
===============
|
||||||
|
@ -260,8 +262,13 @@ to optimize the speed of the input framework. It can generate arbitrary
|
||||||
amounts of semi-random data in all Bro data types supported by the input
|
amounts of semi-random data in all Bro data types supported by the input
|
||||||
framework.
|
framework.
|
||||||
|
|
||||||
In the future, the input framework will get support for new data sources
|
Currently, Bro supports the following readers in addition to the
|
||||||
like, for example, different databases.
|
aforementioned ones:
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
logging-input-sqlite
|
||||||
|
|
||||||
Add_table options
|
Add_table options
|
||||||
-----------------
|
-----------------
|
||||||
|
|
|
@ -59,15 +59,15 @@ intelligence framework has distribution mechanisms which will push
|
||||||
data out to all of the nodes that need it.
|
data out to all of the nodes that need it.
|
||||||
|
|
||||||
Here is an example of the intelligence data format. Note that all
|
Here is an example of the intelligence data format. Note that all
|
||||||
whitespace separators are literal tabs and fields containing only a
|
whitespace field separators are literal tabs and fields containing only a
|
||||||
hyphen are considered to be null values.::
|
hyphen are considered to be null values. ::
|
||||||
|
|
||||||
#fields indicator indicator_type meta.source meta.desc meta.url
|
#fields indicator indicator_type meta.source meta.desc meta.url
|
||||||
1.2.3.4 Intel::ADDR source1 Sending phishing email http://source1.com/badhosts/1.2.3.4
|
1.2.3.4 Intel::ADDR source1 Sending phishing email http://source1.com/badhosts/1.2.3.4
|
||||||
a.b.com Intel::DOMAIN source2 Name used for data exfiltration -
|
a.b.com Intel::DOMAIN source2 Name used for data exfiltration -
|
||||||
|
|
||||||
For more examples of built in `indicator_type` values, please refer to the
|
For a list of all built-in `indicator_type` values, please refer to the
|
||||||
autogenerated documentation for the intelligence framework.
|
documentation of :bro:see:`Intel::Type`.
|
||||||
|
|
||||||
To load the data once files are created, use the following example
|
To load the data once files are created, use the following example
|
||||||
code to define files to load with your own file names of course::
|
code to define files to load with your own file names of course::
|
||||||
|
@ -87,7 +87,7 @@ When some bit of data is extracted (such as an email address in the
|
||||||
"From" header in a message over SMTP), the Intelligence Framework
|
"From" header in a message over SMTP), the Intelligence Framework
|
||||||
needs to be informed that this data was discovered and it's presence
|
needs to be informed that this data was discovered and it's presence
|
||||||
should be checked within the intelligence data set. This is
|
should be checked within the intelligence data set. This is
|
||||||
accomplished through the Intel::seen function.
|
accomplished through the :bro:see:`Intel::seen` function.
|
||||||
|
|
||||||
Typically users won't need to work with this function due to built in
|
Typically users won't need to work with this function due to built in
|
||||||
hook scripts that Bro ships with that will "see" data and send it into
|
hook scripts that Bro ships with that will "see" data and send it into
|
||||||
|
@ -95,8 +95,8 @@ the intelligence framework. A user may only need to load the entire
|
||||||
package of hook scripts as a module or pick and choose specific
|
package of hook scripts as a module or pick and choose specific
|
||||||
scripts to load. Keep in mind that as more data is sent into the
|
scripts to load. Keep in mind that as more data is sent into the
|
||||||
intelligence framework, the CPU load consumed by Bro will increase
|
intelligence framework, the CPU load consumed by Bro will increase
|
||||||
depending on how many times the Intel::seen function is being called
|
depending on how many times the :bro:see:`Intel::seen` function is
|
||||||
which is heavily traffic dependent.
|
being called which is heavily traffic dependent.
|
||||||
|
|
||||||
The full package of hook scripts that Bro ships with for sending this
|
The full package of hook scripts that Bro ships with for sending this
|
||||||
"seen" data into the intelligence framework can be loading by adding
|
"seen" data into the intelligence framework can be loading by adding
|
||||||
|
@ -110,12 +110,12 @@ Intelligence Matches
|
||||||
Against all hopes, most networks will eventually have a hit on
|
Against all hopes, most networks will eventually have a hit on
|
||||||
intelligence data which could indicate a possible compromise or other
|
intelligence data which could indicate a possible compromise or other
|
||||||
unwanted activity. The Intelligence Framework provides an event that
|
unwanted activity. The Intelligence Framework provides an event that
|
||||||
is generated whenever a match is discovered named Intel::match (TODO:
|
is generated whenever a match is discovered named :bro:see:`Intel::match`.
|
||||||
make a link to inline docs). Due to design restrictions placed upon
|
Due to design restrictions placed upon
|
||||||
the intelligence framework, there is no assurance as to where this
|
the intelligence framework, there is no assurance as to where this
|
||||||
event will be generated. It could be generated on the worker where
|
event will be generated. It could be generated on the worker where
|
||||||
the data was seen or on the manager. When the Intel::match event is
|
the data was seen or on the manager. When the ``Intel::match`` event is
|
||||||
handled, only the data given as event arguments to the event can be
|
handled, only the data given as event arguments to the event can be
|
||||||
assured since the host where the data was seen may not be where
|
assured since the host where the data was seen may not be where
|
||||||
Intel::match is handled.
|
``Intel::match`` is handled.
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,7 @@ code like this to your ``local.bro``:
|
||||||
}
|
}
|
||||||
|
|
||||||
Bro's DataSeries writer comes with a few tuning options, see
|
Bro's DataSeries writer comes with a few tuning options, see
|
||||||
:doc:`/scripts/base/frameworks/logging/writers/dataseries`.
|
:doc:`/scripts/base/frameworks/logging/writers/dataseries.bro`.
|
||||||
|
|
||||||
Working with DataSeries
|
Working with DataSeries
|
||||||
=======================
|
=======================
|
||||||
|
|
|
@ -31,12 +31,12 @@ Once extracted, start ElasticSearch with::
|
||||||
# ./bin/elasticsearch
|
# ./bin/elasticsearch
|
||||||
|
|
||||||
For more detailed information, refer to the ElasticSearch installation
|
For more detailed information, refer to the ElasticSearch installation
|
||||||
documentation: http://www.elasticsearch.org/guide/reference/setup/installation.html
|
documentation: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/setup.html
|
||||||
|
|
||||||
Compiling Bro with ElasticSearch Support
|
Compiling Bro with ElasticSearch Support
|
||||||
----------------------------------------
|
----------------------------------------
|
||||||
|
|
||||||
First, ensure that you have libcurl installed the run configure.::
|
First, ensure that you have libcurl installed then run configure::
|
||||||
|
|
||||||
# ./configure
|
# ./configure
|
||||||
[...]
|
[...]
|
||||||
|
@ -51,9 +51,9 @@ First, ensure that you have libcurl installed the run configure.::
|
||||||
Activating ElasticSearch
|
Activating ElasticSearch
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
The easiest way to enable ElasticSearch output is to load the tuning/logs-to-
|
The easiest way to enable ElasticSearch output is to load the
|
||||||
elasticsearch.bro script. If you are using BroControl, the following line in
|
tuning/logs-to-elasticsearch.bro script. If you are using BroControl,
|
||||||
local.bro will enable it.
|
the following line in local.bro will enable it:
|
||||||
|
|
||||||
.. console::
|
.. console::
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ A common problem encountered with ElasticSearch is too many files being held
|
||||||
open. The ElasticSearch website has some suggestions on how to increase the
|
open. The ElasticSearch website has some suggestions on how to increase the
|
||||||
open file limit.
|
open file limit.
|
||||||
|
|
||||||
- http://www.elasticsearch.org/tutorials/2011/04/06/too-many-open-files.html
|
- http://www.elasticsearch.org/tutorials/too-many-open-files/
|
||||||
|
|
||||||
TODO
|
TODO
|
||||||
----
|
----
|
||||||
|
|
166
doc/frameworks/logging-input-sqlite.rst
Normal file
166
doc/frameworks/logging-input-sqlite.rst
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
|
||||||
|
============================================
|
||||||
|
Logging To and Reading From SQLite Databases
|
||||||
|
============================================
|
||||||
|
|
||||||
|
.. rst-class:: opening
|
||||||
|
|
||||||
|
Starting with version 2.2, Bro features a SQLite logging writer
|
||||||
|
as well as a SQLite input reader. SQLite is a simple, file-based,
|
||||||
|
widely used SQL database system. Using SQLite allows Bro to write
|
||||||
|
and access data in a format that is easy to use in interchange with
|
||||||
|
other applications. Due to the transactional nature of SQLite,
|
||||||
|
databases can be used by several applications simultaneously. Hence,
|
||||||
|
they can, for example, be used to make data that changes regularly available
|
||||||
|
to Bro on a continuing basis.
|
||||||
|
|
||||||
|
.. contents::
|
||||||
|
|
||||||
|
Warning
|
||||||
|
=======
|
||||||
|
|
||||||
|
In contrast to the ASCII reader and writer, the SQLite plugins have not yet
|
||||||
|
seen extensive use in production environments. While we are not aware
|
||||||
|
of any issues with them, we urge to caution when using them
|
||||||
|
in production environments. There could be lingering issues which only occur
|
||||||
|
when the plugins are used with high amounts of data or in high-load environments.
|
||||||
|
|
||||||
|
Logging Data into SQLite Databases
|
||||||
|
==================================
|
||||||
|
|
||||||
|
Logging support for SQLite is available in all Bro installations starting with
|
||||||
|
version 2.2. There is no need to load any additional scripts or for any compile-time
|
||||||
|
configurations.
|
||||||
|
|
||||||
|
Sending data from existing logging streams to SQLite is rather straightforward. You
|
||||||
|
have to define a filter which specifies SQLite as the writer.
|
||||||
|
|
||||||
|
The following example code adds SQLite as a filter for the connection log:
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/frameworks/sqlite-conn-filter.bro
|
||||||
|
|
||||||
|
.. btest:: sqlite-conn-filter-check
|
||||||
|
|
||||||
|
# Make sure this parses correctly at least.
|
||||||
|
@TEST-EXEC: bro ${DOC_ROOT}/frameworks/sqlite-conn-filter.bro
|
||||||
|
|
||||||
|
Bro will create the database file ``/var/db/conn.sqlite``, if it does not already exist.
|
||||||
|
It will also create a table with the name ``conn`` (if it does not exist) and start
|
||||||
|
appending connection information to the table.
|
||||||
|
|
||||||
|
At the moment, SQLite databases are not rotated the same way ASCII log-files are. You
|
||||||
|
have to take care to create them in an adequate location.
|
||||||
|
|
||||||
|
If you examine the resulting SQLite database, the schema will contain the same fields
|
||||||
|
that are present in the ASCII log files::
|
||||||
|
|
||||||
|
# sqlite3 /var/db/conn.sqlite
|
||||||
|
|
||||||
|
SQLite version 3.8.0.2 2013-09-03 17:11:13
|
||||||
|
Enter ".help" for instructions
|
||||||
|
Enter SQL statements terminated with a ";"
|
||||||
|
sqlite> .schema
|
||||||
|
CREATE TABLE conn (
|
||||||
|
'ts' double precision,
|
||||||
|
'uid' text,
|
||||||
|
'id.orig_h' text,
|
||||||
|
'id.orig_p' integer,
|
||||||
|
...
|
||||||
|
|
||||||
|
Note that the ASCII ``conn.log`` will still be created. To disable the ASCII writer for a
|
||||||
|
log stream, you can remove the default filter:
|
||||||
|
|
||||||
|
.. code:: bro
|
||||||
|
|
||||||
|
Log::remove_filter(Conn::LOG, "default");
|
||||||
|
|
||||||
|
|
||||||
|
To create a custom SQLite log file, you have to create a new log stream that contains
|
||||||
|
just the information you want to commit to the database. Please refer to the
|
||||||
|
:ref:`framework-logging` documentation on how to create custom log streams.
|
||||||
|
|
||||||
|
Reading Data from SQLite Databases
|
||||||
|
==================================
|
||||||
|
|
||||||
|
Like logging support, support for reading data from SQLite databases is built into Bro starting
|
||||||
|
with version 2.2.
|
||||||
|
|
||||||
|
Just as with the text-based input readers (please refer to the :ref:`framework-input`
|
||||||
|
documentation for them and for basic information on how to use the input-framework), the SQLite reader
|
||||||
|
can be used to read data - in this case the result of SQL queries - into tables or into events.
|
||||||
|
|
||||||
|
Reading Data into Tables
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
To read data from a SQLite database, we first have to provide Bro with the information, how
|
||||||
|
the resulting data will be structured. For this example, we expect that we have a SQLite database,
|
||||||
|
which contains host IP addresses and the user accounts that are allowed to log into a specific
|
||||||
|
machine.
|
||||||
|
|
||||||
|
The SQLite commands to create the schema are as follows::
|
||||||
|
|
||||||
|
create table machines_to_users (
|
||||||
|
host text unique not null,
|
||||||
|
users text not null);
|
||||||
|
|
||||||
|
insert into machines_to_users values ('192.168.17.1', 'bernhard,matthias,seth');
|
||||||
|
insert into machines_to_users values ('192.168.17.2', 'bernhard');
|
||||||
|
insert into machines_to_users values ('192.168.17.3', 'seth,matthias');
|
||||||
|
|
||||||
|
After creating a file called ``hosts.sqlite`` with this content, we can read the resulting table
|
||||||
|
into Bro:
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/frameworks/sqlite-read-table.bro
|
||||||
|
|
||||||
|
.. btest:: sqlite-read-table-check
|
||||||
|
|
||||||
|
# Make sure this parses correctly at least.
|
||||||
|
@TEST-EXEC: bro ${DOC_ROOT}/frameworks/sqlite-read-table.bro
|
||||||
|
|
||||||
|
Afterwards, that table can be used to check logins into hosts against the available
|
||||||
|
userlist.
|
||||||
|
|
||||||
|
Turning Data into Events
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
The second mode is to use the SQLite reader to output the input data as events. Typically there
|
||||||
|
are two reasons to do this. First, when the structure of the input data is too complicated
|
||||||
|
for a direct table import. In this case, the data can be read into an event which can then
|
||||||
|
create the necessary data structures in Bro in scriptland.
|
||||||
|
|
||||||
|
The second reason is, that the dataset is too big to hold it in memory. In this case, the checks
|
||||||
|
can be performed on-demand, when Bro encounters a situation where it needs additional information.
|
||||||
|
|
||||||
|
An example for this would be an internal huge database with malware hashes. Live database queries
|
||||||
|
could be used to check the sporadically happening downloads against the database.
|
||||||
|
|
||||||
|
The SQLite commands to create the schema are as follows::
|
||||||
|
|
||||||
|
create table malware_hashes (
|
||||||
|
hash text unique not null,
|
||||||
|
description text not null);
|
||||||
|
|
||||||
|
insert into malware_hashes values ('86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', 'malware a');
|
||||||
|
insert into malware_hashes values ('e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98', 'malware b');
|
||||||
|
insert into malware_hashes values ('84a516841ba77a5b4648de2cd0dfcb30ea46dbb4', 'malware c');
|
||||||
|
insert into malware_hashes values ('3c363836cf4e16666669a25da280a1865c2d2874', 'malware d');
|
||||||
|
insert into malware_hashes values ('58e6b3a414a1e090dfc6029add0f3555ccba127f', 'malware e');
|
||||||
|
insert into malware_hashes values ('4a0a19218e082a343a1b17e5333409af9d98f0f5', 'malware f');
|
||||||
|
insert into malware_hashes values ('54fd1711209fb1c0781092374132c66e79e2241b', 'malware g');
|
||||||
|
insert into malware_hashes values ('27d5482eebd075de44389774fce28c69f45c8a75', 'malware h');
|
||||||
|
insert into malware_hashes values ('73f45106968ff8dc51fba105fa91306af1ff6666', 'ftp-trace');
|
||||||
|
|
||||||
|
|
||||||
|
The following code uses the file-analysis framework to get the sha1 hashes of files that are
|
||||||
|
transmitted over the network. For each hash, a SQL-query is run against SQLite. If the query
|
||||||
|
returns with a result, we had a hit against our malware-database and output the matching hash.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/frameworks/sqlite-read-events.bro
|
||||||
|
|
||||||
|
.. btest:: sqlite-read-events-check
|
||||||
|
|
||||||
|
# Make sure this parses correctly at least.
|
||||||
|
@TEST-EXEC: bro ${DOC_ROOT}/frameworks/sqlite-read-events.bro
|
||||||
|
|
||||||
|
If you run this script against the trace in ``testing/btest/Traces/ftp/ipv4.trace``, you
|
||||||
|
will get one hit.
|
|
@ -48,7 +48,7 @@ Basics
|
||||||
The data fields that a stream records are defined by a record type
|
The data fields that a stream records are defined by a record type
|
||||||
specified when it is created. Let's look at the script generating Bro's
|
specified when it is created. Let's look at the script generating Bro's
|
||||||
connection summaries as an example,
|
connection summaries as an example,
|
||||||
:doc:`/scripts/base/protocols/conn/main`. It defines a record
|
:doc:`/scripts/base/protocols/conn/main.bro`. It defines a record
|
||||||
:bro:type:`Conn::Info` that lists all the fields that go into
|
:bro:type:`Conn::Info` that lists all the fields that go into
|
||||||
``conn.log``, each marked with a ``&log`` attribute indicating that it
|
``conn.log``, each marked with a ``&log`` attribute indicating that it
|
||||||
is part of the information written out. To write a log record, the
|
is part of the information written out. To write a log record, the
|
||||||
|
@ -309,7 +309,7 @@ ASCII Writer Configuration
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
The ASCII writer has a number of options for customizing the format of
|
The ASCII writer has a number of options for customizing the format of
|
||||||
its output, see :doc:`/scripts/base/frameworks/logging/writers/ascii`.
|
its output, see :doc:`/scripts/base/frameworks/logging/writers/ascii.bro`.
|
||||||
|
|
||||||
Adding Streams
|
Adding Streams
|
||||||
==============
|
==============
|
||||||
|
@ -369,7 +369,7 @@ save the logged ``Foo::Info`` record into the connection record:
|
||||||
}
|
}
|
||||||
|
|
||||||
See the existing scripts for how to work with such a new connection
|
See the existing scripts for how to work with such a new connection
|
||||||
field. A simple example is :doc:`/scripts/base/protocols/syslog/main`.
|
field. A simple example is :doc:`/scripts/base/protocols/syslog/main.bro`.
|
||||||
|
|
||||||
When you are developing scripts that add data to the :bro:type:`connection`
|
When you are developing scripts that add data to the :bro:type:`connection`
|
||||||
record, care must be given to when and how long data is stored.
|
record, care must be given to when and how long data is stored.
|
||||||
|
@ -387,3 +387,4 @@ Bro supports the following output formats other than ASCII:
|
||||||
|
|
||||||
logging-dataseries
|
logging-dataseries
|
||||||
logging-elasticsearch
|
logging-elasticsearch
|
||||||
|
logging-input-sqlite
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
|
|
||||||
|
.. _notice-framework:
|
||||||
|
|
||||||
Notice Framework
|
Notice Framework
|
||||||
================
|
================
|
||||||
|
|
||||||
|
@ -283,7 +285,7 @@ information to suppress duplicates for a configurable period of time.
|
||||||
The ``$identifier`` field is typically comprised of several pieces of
|
The ``$identifier`` field is typically comprised of several pieces of
|
||||||
data related to the notice that when combined represent a unique
|
data related to the notice that when combined represent a unique
|
||||||
instance of that notice. Here is an example of the script
|
instance of that notice. Here is an example of the script
|
||||||
:doc:`/scripts/policy/protocols/ssl/validate-certs` raising a notice
|
:doc:`/scripts/policy/protocols/ssl/validate-certs.bro` raising a notice
|
||||||
for session negotiations where the certificate or certificate chain did
|
for session negotiations where the certificate or certificate chain did
|
||||||
not validate successfully against the available certificate authority
|
not validate successfully against the available certificate authority
|
||||||
certificates.
|
certificates.
|
||||||
|
|
|
@ -46,7 +46,7 @@ signature's event statement (``Found root!``), and data is the last
|
||||||
piece of payload which triggered the pattern match.
|
piece of payload which triggered the pattern match.
|
||||||
|
|
||||||
To turn such :bro:id:`signature_match` events into actual alarms, you can
|
To turn such :bro:id:`signature_match` events into actual alarms, you can
|
||||||
load Bro's :doc:`/scripts/base/frameworks/signatures/main` script.
|
load Bro's :doc:`/scripts/base/frameworks/signatures/main.bro` script.
|
||||||
This script contains a default event handler that raises
|
This script contains a default event handler that raises
|
||||||
:bro:enum:`Signatures::Sensitive_Signature` :doc:`Notices <notice>`
|
:bro:enum:`Signatures::Sensitive_Signature` :doc:`Notices <notice>`
|
||||||
(as well as others; see the beginning of the script).
|
(as well as others; see the beginning of the script).
|
||||||
|
|
12
doc/frameworks/sqlite-conn-filter.bro
Normal file
12
doc/frameworks/sqlite-conn-filter.bro
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
event bro_init()
|
||||||
|
{
|
||||||
|
local filter: Log::Filter =
|
||||||
|
[
|
||||||
|
$name="sqlite",
|
||||||
|
$path="/var/db/conn",
|
||||||
|
$config=table(["tablename"] = "conn"),
|
||||||
|
$writer=Log::WRITER_SQLITE
|
||||||
|
];
|
||||||
|
|
||||||
|
Log::add_filter(Conn::LOG, filter);
|
||||||
|
}
|
40
doc/frameworks/sqlite-read-events.bro
Normal file
40
doc/frameworks/sqlite-read-events.bro
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
@load frameworks/files/hash-all-files
|
||||||
|
|
||||||
|
type Val: record {
|
||||||
|
hash: string;
|
||||||
|
description: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
event line(description: Input::EventDescription, tpe: Input::Event, r: Val)
|
||||||
|
{
|
||||||
|
print fmt("malware-hit with hash %s, description %s", r$hash, r$description);
|
||||||
|
}
|
||||||
|
|
||||||
|
global malware_source = "/var/db/malware";
|
||||||
|
|
||||||
|
event file_hash(f: fa_file, kind: string, hash: string)
|
||||||
|
{
|
||||||
|
|
||||||
|
# check all sha1 hashes
|
||||||
|
if ( kind=="sha1" )
|
||||||
|
{
|
||||||
|
Input::add_event(
|
||||||
|
[
|
||||||
|
$source=malware_source,
|
||||||
|
$name=hash,
|
||||||
|
$fields=Val,
|
||||||
|
$ev=line,
|
||||||
|
$want_record=T,
|
||||||
|
$config=table(
|
||||||
|
["query"] = fmt("select * from malware_hashes where hash='%s';", hash)
|
||||||
|
),
|
||||||
|
$reader=Input::READER_SQLITE
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
event Input::end_of_data(name: string, source:string)
|
||||||
|
{
|
||||||
|
if ( source == malware_source )
|
||||||
|
Input::remove(name);
|
||||||
|
}
|
35
doc/frameworks/sqlite-read-table.bro
Normal file
35
doc/frameworks/sqlite-read-table.bro
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
type Idx: record {
|
||||||
|
host: addr;
|
||||||
|
};
|
||||||
|
|
||||||
|
type Val: record {
|
||||||
|
users: set[string];
|
||||||
|
};
|
||||||
|
|
||||||
|
global hostslist: table[addr] of Val = table();
|
||||||
|
|
||||||
|
event bro_init()
|
||||||
|
{
|
||||||
|
Input::add_table([$source="/var/db/hosts",
|
||||||
|
$name="hosts",
|
||||||
|
$idx=Idx,
|
||||||
|
$val=Val,
|
||||||
|
$destination=hostslist,
|
||||||
|
$reader=Input::READER_SQLITE,
|
||||||
|
$config=table(["query"] = "select * from machines_to_users;")
|
||||||
|
]);
|
||||||
|
|
||||||
|
Input::remove("hosts");
|
||||||
|
}
|
||||||
|
|
||||||
|
event Input::end_of_data(name: string, source: string)
|
||||||
|
{
|
||||||
|
if ( name != "hosts" )
|
||||||
|
return;
|
||||||
|
|
||||||
|
# now all data is in the table
|
||||||
|
print "Hosts list has been successfully imported";
|
||||||
|
|
||||||
|
# List the users of one host.
|
||||||
|
print hostslist[192.168.17.1]$users;
|
||||||
|
}
|
36
doc/frameworks/sumstats-countconns.bro
Normal file
36
doc/frameworks/sumstats-countconns.bro
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
@load base/frameworks/sumstats
|
||||||
|
|
||||||
|
event connection_established(c: connection)
|
||||||
|
{
|
||||||
|
# Make an observation!
|
||||||
|
# This observation is global so the key is empty.
|
||||||
|
# Each established connection counts as one so the observation is always 1.
|
||||||
|
SumStats::observe("conn established",
|
||||||
|
SumStats::Key(),
|
||||||
|
SumStats::Observation($num=1));
|
||||||
|
}
|
||||||
|
|
||||||
|
event bro_init()
|
||||||
|
{
|
||||||
|
# Create the reducer.
|
||||||
|
# The reducer attaches to the "conn established" observation stream
|
||||||
|
# and uses the summing calculation on the observations.
|
||||||
|
local r1 = SumStats::Reducer($stream="conn established",
|
||||||
|
$apply=set(SumStats::SUM));
|
||||||
|
|
||||||
|
# Create the final sumstat.
|
||||||
|
# We give it an arbitrary name and make it collect data every minute.
|
||||||
|
# The reducer is then attached and a $epoch_result callback is given
|
||||||
|
# to finally do something with the data collected.
|
||||||
|
SumStats::create([$name = "counting connections",
|
||||||
|
$epoch = 1min,
|
||||||
|
$reducers = set(r1),
|
||||||
|
$epoch_result(ts: time, key: SumStats::Key, result: SumStats::Result) =
|
||||||
|
{
|
||||||
|
# This is the body of the callback that is called when a single
|
||||||
|
# result has been collected. We are just printing the total number
|
||||||
|
# of connections that were seen. The $sum field is provided as a
|
||||||
|
# double type value so we need to use %f as the format specifier.
|
||||||
|
print fmt("Number of connections established: %.0f", result["conn established"]$sum);
|
||||||
|
}]);
|
||||||
|
}
|
45
doc/frameworks/sumstats-toy-scan.bro
Normal file
45
doc/frameworks/sumstats-toy-scan.bro
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
@load base/frameworks/sumstats
|
||||||
|
|
||||||
|
# We use the connection_attempt event to limit our observations to those
|
||||||
|
# which were attempted and not successful.
|
||||||
|
event connection_attempt(c: connection)
|
||||||
|
{
|
||||||
|
# Make an observation!
|
||||||
|
# This observation is about the host attempting the connection.
|
||||||
|
# Each established connection counts as one so the observation is always 1.
|
||||||
|
SumStats::observe("conn attempted",
|
||||||
|
SumStats::Key($host=c$id$orig_h),
|
||||||
|
SumStats::Observation($num=1));
|
||||||
|
}
|
||||||
|
|
||||||
|
event bro_init()
|
||||||
|
{
|
||||||
|
# Create the reducer.
|
||||||
|
# The reducer attaches to the "conn attempted" observation stream
|
||||||
|
# and uses the summing calculation on the observations. Keep
|
||||||
|
# in mind that there will be one result per key (connection originator).
|
||||||
|
local r1 = SumStats::Reducer($stream="conn attempted",
|
||||||
|
$apply=set(SumStats::SUM));
|
||||||
|
|
||||||
|
# Create the final sumstat.
|
||||||
|
# This is slightly different from the last example since we're providing
|
||||||
|
# a callback to calculate a value to check against the threshold with
|
||||||
|
# $threshold_val. The actual threshold itself is provided with $threshold.
|
||||||
|
# Another callback is provided for when a key crosses the threshold.
|
||||||
|
SumStats::create([$name = "finding scanners",
|
||||||
|
$epoch = 5min,
|
||||||
|
$reducers = set(r1),
|
||||||
|
# Provide a threshold.
|
||||||
|
$threshold = 5.0,
|
||||||
|
# Provide a callback to calculate a value from the result
|
||||||
|
# to check against the threshold field.
|
||||||
|
$threshold_val(key: SumStats::Key, result: SumStats::Result) =
|
||||||
|
{
|
||||||
|
return result["conn attempted"]$sum;
|
||||||
|
},
|
||||||
|
# Provide a callback for when a key crosses the threshold.
|
||||||
|
$threshold_crossed(key: SumStats::Key, result: SumStats::Result) =
|
||||||
|
{
|
||||||
|
print fmt("%s attempted %.0f or more connections", key$host, result["conn attempted"]$sum);
|
||||||
|
}]);
|
||||||
|
}
|
105
doc/frameworks/sumstats.rst
Normal file
105
doc/frameworks/sumstats.rst
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
|
||||||
|
.. _sumstats-framework:
|
||||||
|
|
||||||
|
==================
|
||||||
|
Summary Statistics
|
||||||
|
==================
|
||||||
|
|
||||||
|
.. rst-class:: opening
|
||||||
|
|
||||||
|
Measuring aspects of network traffic is an extremely common task in Bro.
|
||||||
|
Bro provides data structures which make this very easy as well in
|
||||||
|
simplistic cases such as size limited trace file processing. In
|
||||||
|
real-world deployments though, there are difficulties that arise from
|
||||||
|
clusterization (many processes sniffing traffic) and unbounded data sets
|
||||||
|
(traffic never stops). The Summary Statistics (otherwise referred to as
|
||||||
|
SumStats) framework aims to define a mechanism for consuming unbounded
|
||||||
|
data sets and making them measurable in practice on large clustered and
|
||||||
|
non-clustered Bro deployments.
|
||||||
|
|
||||||
|
.. contents::
|
||||||
|
|
||||||
|
Overview
|
||||||
|
========
|
||||||
|
|
||||||
|
The Sumstat processing flow is broken into three pieces. Observations, where
|
||||||
|
some aspect of an event is observed and fed into the Sumstats framework.
|
||||||
|
Reducers, where observations are collected and measured, typically by taking
|
||||||
|
some sort of summary statistic measurement like average or variance (among
|
||||||
|
others). Sumstats, where reducers have an epoch (time interval) that their
|
||||||
|
measurements are performed over along with callbacks for monitoring thresholds
|
||||||
|
or viewing the collected and measured data.
|
||||||
|
|
||||||
|
Terminology
|
||||||
|
===========
|
||||||
|
|
||||||
|
Observation
|
||||||
|
|
||||||
|
A single point of data. Observations have a few components of their
|
||||||
|
own. They are part of an arbitrarily named observation stream, they
|
||||||
|
have a key that is something the observation is about, and the actual
|
||||||
|
observation itself.
|
||||||
|
|
||||||
|
Reducer
|
||||||
|
|
||||||
|
Calculations are applied to an observation stream here to reduce the
|
||||||
|
full unbounded set of observations down to a smaller representation.
|
||||||
|
Results are collected within each reducer per-key so care must be
|
||||||
|
taken to keep the total number of keys tracked down to a reasonable
|
||||||
|
level.
|
||||||
|
|
||||||
|
Sumstat
|
||||||
|
|
||||||
|
The final definition of a Sumstat where one or more reducers is
|
||||||
|
collected over an interval, also known as an epoch. Thresholding can
|
||||||
|
be applied here along with a callback in the event that a threshold is
|
||||||
|
crossed. Additionally, a callback can be provided to access each
|
||||||
|
result (per-key) at the end of each epoch.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
========
|
||||||
|
|
||||||
|
These examples may seem very simple to an experienced Bro script developer and
|
||||||
|
they're intended to look that way. Keep in mind that these scripts will work
|
||||||
|
on small single process Bro instances as well as large many-worker clusters.
|
||||||
|
The complications from dealing with flow based load balancing can be ignored
|
||||||
|
by developers writing scripts that use Sumstats due to its built-in cluster
|
||||||
|
transparency.
|
||||||
|
|
||||||
|
Printing the number of connections
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
Sumstats provides a simple way of approaching the problem of trying to count
|
||||||
|
the number of connections over a given time interval. Here is a script with
|
||||||
|
inline documentation that does this with the Sumstats framework:
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/frameworks/sumstats-countconns.bro
|
||||||
|
|
||||||
|
When run on a sample PCAP file from the Bro test suite, the following output
|
||||||
|
is created:
|
||||||
|
|
||||||
|
.. btest:: sumstats-countconns
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/workshop_2011_browse.trace ${DOC_ROOT}/frameworks/sumstats-countconns.bro
|
||||||
|
|
||||||
|
|
||||||
|
Toy scan detection
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Taking the previous example even further, we can implement a simple detection
|
||||||
|
to demonstrate the thresholding functionality. This example is a toy to
|
||||||
|
demonstrate how thresholding works in Sumstats and is not meant to be a
|
||||||
|
real-world functional example, that is left to the
|
||||||
|
:doc:`/scripts/policy/misc/scan.bro` script that is included with Bro.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/frameworks/sumstats-toy-scan.bro
|
||||||
|
|
||||||
|
Let's see if there are any hosts that crossed the threshold in a PCAP file
|
||||||
|
containing a host running nmap:
|
||||||
|
|
||||||
|
.. btest:: sumstats-toy-scan
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/nmap-vsn.trace ${DOC_ROOT}/frameworks/sumstats-toy-scan.bro
|
||||||
|
|
||||||
|
It seems the host running nmap was detected!
|
||||||
|
|
24
doc/httpmonitor/file_extraction.bro
Normal file
24
doc/httpmonitor/file_extraction.bro
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
|
||||||
|
global mime_to_ext: table[string] of string = {
|
||||||
|
["application/x-dosexec"] = "exe",
|
||||||
|
["text/plain"] = "txt",
|
||||||
|
["image/jpeg"] = "jpg",
|
||||||
|
["image/png"] = "png",
|
||||||
|
["text/html"] = "html",
|
||||||
|
};
|
||||||
|
|
||||||
|
event file_new(f: fa_file)
|
||||||
|
{
|
||||||
|
if ( f$source != "HTTP" )
|
||||||
|
return;
|
||||||
|
|
||||||
|
if ( ! f?$mime_type )
|
||||||
|
return;
|
||||||
|
|
||||||
|
if ( f$mime_type !in mime_to_ext )
|
||||||
|
return;
|
||||||
|
|
||||||
|
local fname = fmt("%s-%s.%s", f$source, f$id, mime_to_ext[f$mime_type]);
|
||||||
|
print fmt("Extracting file %s", fname);
|
||||||
|
Files::add_analyzer(f, Files::ANALYZER_EXTRACT, [$extract_filename=fname]);
|
||||||
|
}
|
5
doc/httpmonitor/http_proxy_01.bro
Normal file
5
doc/httpmonitor/http_proxy_01.bro
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
event http_reply(c: connection, version: string, code: count, reason: string)
|
||||||
|
{
|
||||||
|
if ( /^[hH][tT][tT][pP]:/ in c$http$uri && c$http$status_code == 200 )
|
||||||
|
print fmt("A local server is acting as an open proxy: %s", c$id$resp_h);
|
||||||
|
}
|
26
doc/httpmonitor/http_proxy_02.bro
Normal file
26
doc/httpmonitor/http_proxy_02.bro
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
|
||||||
|
module HTTP;
|
||||||
|
|
||||||
|
export {
|
||||||
|
|
||||||
|
global success_status_codes: set[count] = {
|
||||||
|
200,
|
||||||
|
201,
|
||||||
|
202,
|
||||||
|
203,
|
||||||
|
204,
|
||||||
|
205,
|
||||||
|
206,
|
||||||
|
207,
|
||||||
|
208,
|
||||||
|
226,
|
||||||
|
304
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
event http_reply(c: connection, version: string, code: count, reason: string)
|
||||||
|
{
|
||||||
|
if ( /^[hH][tT][tT][pP]:/ in c$http$uri &&
|
||||||
|
c$http$status_code in HTTP::success_status_codes )
|
||||||
|
print fmt("A local server is acting as an open proxy: %s", c$id$resp_h);
|
||||||
|
}
|
31
doc/httpmonitor/http_proxy_03.bro
Normal file
31
doc/httpmonitor/http_proxy_03.bro
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
|
||||||
|
@load base/utils/site
|
||||||
|
|
||||||
|
redef Site::local_nets += { 192.168.0.0/16 };
|
||||||
|
|
||||||
|
module HTTP;
|
||||||
|
|
||||||
|
export {
|
||||||
|
|
||||||
|
global success_status_codes: set[count] = {
|
||||||
|
200,
|
||||||
|
201,
|
||||||
|
202,
|
||||||
|
203,
|
||||||
|
204,
|
||||||
|
205,
|
||||||
|
206,
|
||||||
|
207,
|
||||||
|
208,
|
||||||
|
226,
|
||||||
|
304
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
event http_reply(c: connection, version: string, code: count, reason: string)
|
||||||
|
{
|
||||||
|
if ( Site::is_local_addr(c$id$resp_h) &&
|
||||||
|
/^[hH][tT][tT][pP]:/ in c$http$uri &&
|
||||||
|
c$http$status_code in HTTP::success_status_codes )
|
||||||
|
print fmt("A local server is acting as an open proxy: %s", c$id$resp_h);
|
||||||
|
}
|
40
doc/httpmonitor/http_proxy_04.bro
Normal file
40
doc/httpmonitor/http_proxy_04.bro
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
@load base/utils/site
|
||||||
|
@load base/frameworks/notice
|
||||||
|
|
||||||
|
redef Site::local_nets += { 192.168.0.0/16 };
|
||||||
|
|
||||||
|
module HTTP;
|
||||||
|
|
||||||
|
export {
|
||||||
|
|
||||||
|
redef enum Notice::Type += {
|
||||||
|
Open_Proxy
|
||||||
|
};
|
||||||
|
|
||||||
|
global success_status_codes: set[count] = {
|
||||||
|
200,
|
||||||
|
201,
|
||||||
|
202,
|
||||||
|
203,
|
||||||
|
204,
|
||||||
|
205,
|
||||||
|
206,
|
||||||
|
207,
|
||||||
|
208,
|
||||||
|
226,
|
||||||
|
304
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
event http_reply(c: connection, version: string, code: count, reason: string)
|
||||||
|
{
|
||||||
|
if ( Site::is_local_addr(c$id$resp_h) &&
|
||||||
|
/^[hH][tT][tT][pP]:/ in c$http$uri &&
|
||||||
|
c$http$status_code in HTTP::success_status_codes )
|
||||||
|
NOTICE([$note=HTTP::Open_Proxy,
|
||||||
|
$msg=fmt("A local server is acting as an open proxy: %s",
|
||||||
|
c$id$resp_h),
|
||||||
|
$conn=c,
|
||||||
|
$identifier=cat(c$id$resp_h),
|
||||||
|
$suppress_for=1day]);
|
||||||
|
}
|
163
doc/httpmonitor/index.rst
Normal file
163
doc/httpmonitor/index.rst
Normal file
|
@ -0,0 +1,163 @@
|
||||||
|
|
||||||
|
.. _http-monitor:
|
||||||
|
|
||||||
|
================================
|
||||||
|
Monitoring HTTP Traffic with Bro
|
||||||
|
================================
|
||||||
|
|
||||||
|
Bro can be used to log the entire HTTP traffic from your network to the
|
||||||
|
http.log file. This file can then be used for analysis and auditing
|
||||||
|
purposes.
|
||||||
|
|
||||||
|
In the sections below we briefly explain the structure of the http.log
|
||||||
|
file. Then, we show you how to perform basic HTTP traffic monitoring and
|
||||||
|
analysis tasks with Bro. Some of these ideas and techniques can later be
|
||||||
|
applied to monitor different protocols in a similar way.
|
||||||
|
|
||||||
|
----------------------------
|
||||||
|
Introduction to the HTTP log
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
The http.log file contains a summary of all HTTP requests and responses
|
||||||
|
sent over a Bro-monitored network. Here are the first few columns of
|
||||||
|
``http.log``::
|
||||||
|
|
||||||
|
# ts uid orig_h orig_p resp_h resp_p
|
||||||
|
1311627961.8 HSH4uV8KVJg 192.168.1.100 52303 192.150.187.43 80
|
||||||
|
|
||||||
|
Every single line in this log starts with a timestamp, a unique
|
||||||
|
connection identifier (UID), and a connection 4-tuple (originator
|
||||||
|
host/port and responder host/port). The UID can be used to identify all
|
||||||
|
logged activity (possibly across multiple log files) associated with a
|
||||||
|
given connection 4-tuple over its lifetime.
|
||||||
|
|
||||||
|
The remaining columns detail the activity that's occurring. For
|
||||||
|
example, the columns on the line below (shortened for brevity) show a
|
||||||
|
request to the root of Bro website::
|
||||||
|
|
||||||
|
# method host uri referrer user_agent
|
||||||
|
GET bro.org / - <...>Chrome/12.0.742.122<...>
|
||||||
|
|
||||||
|
Network administrators and security engineers, for instance, can use the
|
||||||
|
information in this log to understand the HTTP activity on the network
|
||||||
|
and troubleshoot network problems or search for anomalous activities. At
|
||||||
|
this point, we would like to stress out the fact that there is no just
|
||||||
|
one right way to perform analysis; it will depend on the expertise of
|
||||||
|
the person doing the analysis and the specific details of the task to
|
||||||
|
accomplish.
|
||||||
|
|
||||||
|
For more information about how to handle the HTTP protocol in Bro,
|
||||||
|
including a complete list of the fields available in http.log, go to
|
||||||
|
Bro's :doc:`HTTP script reference
|
||||||
|
</scripts/base/protocols/http/main.bro>`.
|
||||||
|
|
||||||
|
------------------------
|
||||||
|
Detecting a Proxy Server
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
A proxy server is a device on your network configured to request a
|
||||||
|
service on behalf of a third system; one of the most common examples is
|
||||||
|
a Web proxy server. A client without Internet access connects to the
|
||||||
|
proxy and requests a Web page; the proxy then sends the request to the
|
||||||
|
actual Web server, receives the response and passes it to the original
|
||||||
|
client.
|
||||||
|
|
||||||
|
Proxies were conceived to help manage a network and provide better
|
||||||
|
encapsulation. By themselves, proxies are not a security threat, but a
|
||||||
|
misconfigured or unauthorized proxy can allow others, either inside or
|
||||||
|
outside the network, to access any Web site and even conduct malicious
|
||||||
|
activities anonymously using the network resources.
|
||||||
|
|
||||||
|
What Proxy Server traffic looks like
|
||||||
|
-------------------------------------
|
||||||
|
|
||||||
|
In general, when a client starts talking with a proxy server, the
|
||||||
|
traffic consists of two parts: (i) a GET request, and (ii) an HTTP/
|
||||||
|
reply::
|
||||||
|
|
||||||
|
Request: GET http://www.bro.org/ HTTP/1.1
|
||||||
|
Reply: HTTP/1.0 200 OK
|
||||||
|
|
||||||
|
This will differ from traffic between a client and a normal Web server
|
||||||
|
because GET requests should not include "http" on the string. So we can
|
||||||
|
use this to identify a proxy server.
|
||||||
|
|
||||||
|
We can write a basic script in Bro to handle the http_reply event and
|
||||||
|
detect a reply for a ``GET http://`` request.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/httpmonitor/http_proxy_01.bro
|
||||||
|
|
||||||
|
.. btest:: http_proxy_01
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/proxy.pcap ${DOC_ROOT}/httpmonitor/http_proxy_01.bro
|
||||||
|
|
||||||
|
Basically, the script is checking for a "200 OK" status code on a reply
|
||||||
|
for a request that includes "http:" (case insensitive). In reality, the
|
||||||
|
HTTP protocol defines several success status codes other than 200, so we
|
||||||
|
will extend our basic script to also consider the additional codes.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/httpmonitor/http_proxy_02.bro
|
||||||
|
|
||||||
|
.. btest:: http_proxy_02
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/proxy.pcap ${DOC_ROOT}/httpmonitor/http_proxy_02.bro
|
||||||
|
|
||||||
|
Next, we will make sure that the responding proxy is part of our local
|
||||||
|
network.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/httpmonitor/http_proxy_03.bro
|
||||||
|
|
||||||
|
.. btest:: http_proxy_03
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/proxy.pcap ${DOC_ROOT}/httpmonitor/http_proxy_03.bro
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The redefinition of :bro:see:`Site::local_nets` is only done inside
|
||||||
|
this script to make it a self-contained example. It's typically
|
||||||
|
redefined somewhere else.
|
||||||
|
|
||||||
|
Finally, our goal should be to generate an alert when a proxy has been
|
||||||
|
detected instead of printing a message on the console output. For that,
|
||||||
|
we will tag the traffic accordingly and define a new ``Open_Proxy``
|
||||||
|
``Notice`` type to alert of all tagged communications. Once a
|
||||||
|
notification has been fired, we will further suppress it for one day.
|
||||||
|
Below is the complete script.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/httpmonitor/http_proxy_04.bro
|
||||||
|
|
||||||
|
.. btest:: http_proxy_04
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/proxy.pcap ${DOC_ROOT}/httpmonitor/http_proxy_04.bro
|
||||||
|
@TEST-EXEC: btest-rst-include notice.log
|
||||||
|
|
||||||
|
Note that this script only logs the presence of the proxy to
|
||||||
|
``notice.log``, but if an additional email is desired (and email
|
||||||
|
functionality is enabled), then that's done simply by redefining
|
||||||
|
:bro:see:`Notice::emailed_types` to add the ``Open_proxy`` notice type
|
||||||
|
to it.
|
||||||
|
|
||||||
|
----------------
|
||||||
|
Inspecting Files
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Files are often transmitted on regular HTTP conversations between a
|
||||||
|
client and a server. Most of the time these files are harmless, just
|
||||||
|
images and some other multimedia content, but there are also types of
|
||||||
|
files, specially executable files, that can damage your system. We can
|
||||||
|
instruct Bro to create a copy of all files of certain types that it sees
|
||||||
|
using the :ref:`File Analysis Framework <file-analysis-framework>`
|
||||||
|
(introduced with Bro 2.2):
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/httpmonitor/file_extraction.bro
|
||||||
|
|
||||||
|
.. btest:: file_extraction
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd -n 5 bro -r ${TRACES}/http/bro.org.pcap ${DOC_ROOT}/httpmonitor/file_extraction.bro
|
||||||
|
|
||||||
|
Here, the ``mime_to_ext`` table serves two purposes. It defines which
|
||||||
|
mime types to extract and also the file suffix of the extracted files.
|
||||||
|
Extracted files are written to a new ``extract_files`` subdirectory.
|
||||||
|
Also note that the first conditional in the :bro:see:`file_new` event
|
||||||
|
handler can be removed to make this behavior generic to other protocols
|
||||||
|
besides HTTP.
|
|
@ -1,9 +1,12 @@
|
||||||
|
|
||||||
.. Bro documentation master file
|
.. Bro documentation master file
|
||||||
|
|
||||||
=================
|
==========
|
||||||
Bro Documentation
|
Bro Manual
|
||||||
=================
|
==========
|
||||||
|
|
||||||
|
Introduction Section
|
||||||
|
====================
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
@ -11,15 +14,36 @@ Bro Documentation
|
||||||
intro/index.rst
|
intro/index.rst
|
||||||
install/index.rst
|
install/index.rst
|
||||||
quickstart/index.rst
|
quickstart/index.rst
|
||||||
using/index.rst
|
|
||||||
|
..
|
||||||
|
|
||||||
|
Using Bro Section
|
||||||
|
=================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
logs/index.rst
|
||||||
|
httpmonitor/index.rst
|
||||||
|
broids/index.rst
|
||||||
|
mimestats/index.rst
|
||||||
|
cluster/index.rst
|
||||||
|
|
||||||
|
..
|
||||||
|
|
||||||
|
Reference Section
|
||||||
|
=================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
scripting/index.rst
|
scripting/index.rst
|
||||||
frameworks/index.rst
|
frameworks/index.rst
|
||||||
cluster/index.rst
|
script-reference/index.rst
|
||||||
scripts/index.rst
|
|
||||||
components/index.rst
|
components/index.rst
|
||||||
|
|
||||||
* `Notice Index <bro-noticeindex.html>`_ (TODO: Move to reference
|
..
|
||||||
section, but can't figure out how to include it into toctree)
|
|
||||||
* :ref:`General Index <genindex>`
|
* :ref:`General Index <genindex>`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
||||||
|
|
|
@ -29,35 +29,37 @@ before you begin:
|
||||||
* Libpcap (http://www.tcpdump.org)
|
* Libpcap (http://www.tcpdump.org)
|
||||||
* OpenSSL libraries (http://www.openssl.org)
|
* OpenSSL libraries (http://www.openssl.org)
|
||||||
* BIND8 library
|
* BIND8 library
|
||||||
* Libmagic
|
|
||||||
* Libz
|
* Libz
|
||||||
* Bash (for BroControl)
|
* Bash (for BroControl)
|
||||||
|
* Python (for BroControl)
|
||||||
|
|
||||||
To build Bro from source, the following additional dependencies are required:
|
To build Bro from source, the following additional dependencies are required:
|
||||||
|
|
||||||
* CMake 2.6.3 or greater (http://www.cmake.org)
|
* CMake 2.8.0 or greater (http://www.cmake.org)
|
||||||
|
* Make
|
||||||
|
* C/C++ compiler
|
||||||
* SWIG (http://www.swig.org)
|
* SWIG (http://www.swig.org)
|
||||||
* Bison (GNU Parser Generator)
|
* Bison (GNU Parser Generator)
|
||||||
* Flex (Fast Lexical Analyzer)
|
* Flex (Fast Lexical Analyzer)
|
||||||
* Libpcap headers (http://www.tcpdump.org)
|
* Libpcap headers (http://www.tcpdump.org)
|
||||||
* OpenSSL headers (http://www.openssl.org)
|
* OpenSSL headers (http://www.openssl.org)
|
||||||
* libmagic headers
|
|
||||||
* zlib headers
|
* zlib headers
|
||||||
* Perl
|
* Perl
|
||||||
|
|
||||||
To install the required dependencies, you can use:
|
To install the required dependencies, you can use (when done, make sure
|
||||||
|
that ``bash`` and ``python`` are in your ``PATH``):
|
||||||
|
|
||||||
* RPM/RedHat-based Linux:
|
* RPM/RedHat-based Linux:
|
||||||
|
|
||||||
.. console::
|
.. console::
|
||||||
|
|
||||||
sudo yum install cmake make gcc gcc-c++ flex bison libpcap-devel openssl-devel python-devel swig zlib-devel file-devel
|
sudo yum install cmake make gcc gcc-c++ flex bison libpcap-devel openssl-devel python-devel swig zlib-devel
|
||||||
|
|
||||||
* DEB/Debian-based Linux:
|
* DEB/Debian-based Linux:
|
||||||
|
|
||||||
.. console::
|
.. console::
|
||||||
|
|
||||||
sudo apt-get install cmake make gcc g++ flex bison libpcap-dev libssl-dev python-dev swig zlib1g-dev libmagic-dev
|
sudo apt-get install cmake make gcc g++ flex bison libpcap-dev libssl-dev python-dev swig zlib1g-dev
|
||||||
|
|
||||||
* FreeBSD:
|
* FreeBSD:
|
||||||
|
|
||||||
|
@ -66,11 +68,7 @@ To install the required dependencies, you can use:
|
||||||
|
|
||||||
.. console::
|
.. console::
|
||||||
|
|
||||||
sudo pkg_add -r bash cmake swig bison python
|
sudo pkg_add -r bash cmake swig bison python perl
|
||||||
|
|
||||||
Note that ``bash`` needs to be in ``PATH``, which by default it is
|
|
||||||
not. The FreeBSD package installs the binary into
|
|
||||||
``/usr/local/bin``.
|
|
||||||
|
|
||||||
* Mac OS X:
|
* Mac OS X:
|
||||||
|
|
||||||
|
@ -78,15 +76,11 @@ To install the required dependencies, you can use:
|
||||||
then going through its "Preferences..." -> "Downloads" menus to
|
then going through its "Preferences..." -> "Downloads" menus to
|
||||||
install the "Command Line Tools" component.
|
install the "Command Line Tools" component.
|
||||||
|
|
||||||
Lion (10.7) and Mountain Lion (10.8) come with all required
|
OS X comes with all required dependencies except for CMake_ and SWIG_.
|
||||||
dependencies except for CMake_, SWIG_, and ``libmagic``.
|
|
||||||
|
|
||||||
Distributions of these dependencies can likely be obtained from your
|
Distributions of these dependencies can likely be obtained from your
|
||||||
preferred Mac OS X package management system (e.g. MacPorts_, Fink_,
|
preferred Mac OS X package management system (e.g. MacPorts_, Fink_,
|
||||||
or Homebrew_).
|
or Homebrew_). Specifically for MacPorts, the ``cmake``, ``swig``,
|
||||||
|
``swig-python`` and packages provide the required dependencies.
|
||||||
Specifically for MacPorts, the ``swig``, ``swig-ruby``, ``swig-python``
|
|
||||||
and ``file`` packages provide the required dependencies.
|
|
||||||
|
|
||||||
|
|
||||||
Optional Dependencies
|
Optional Dependencies
|
||||||
|
@ -96,45 +90,16 @@ Bro can make use of some optional libraries and tools if they are found at
|
||||||
build time:
|
build time:
|
||||||
|
|
||||||
* LibGeoIP (for geo-locating IP addresses)
|
* LibGeoIP (for geo-locating IP addresses)
|
||||||
|
* sendmail (enables Bro and BroControl to send mail)
|
||||||
|
* gawk (enables all features of bro-cut)
|
||||||
|
* curl (used by a Bro script that implements active HTTP)
|
||||||
* gperftools (tcmalloc is used to improve memory and CPU usage)
|
* gperftools (tcmalloc is used to improve memory and CPU usage)
|
||||||
* ipsumdump (for trace-summary; http://www.cs.ucla.edu/~kohler/ipsumdump)
|
* ipsumdump (for trace-summary; http://www.cs.ucla.edu/~kohler/ipsumdump)
|
||||||
* Ruby executable, library, and headers (for Broccoli Ruby bindings)
|
* Ruby executable, library, and headers (for Broccoli Ruby bindings)
|
||||||
|
|
||||||
LibGeoIP is probably the most interesting and can be easily installed
|
LibGeoIP is probably the most interesting and can be installed
|
||||||
on most platforms:
|
on most platforms by following the instructions for :ref:`installing
|
||||||
|
libGeoIP and the GeoIP database
|
||||||
* RedHat Enterprise Linux:
|
|
||||||
|
|
||||||
.. console::
|
|
||||||
|
|
||||||
sudo yum install geoip-devel sendmail
|
|
||||||
|
|
||||||
* CentOS Linux:
|
|
||||||
|
|
||||||
.. console::
|
|
||||||
|
|
||||||
sudo yum install GeoIP-devel sendmail
|
|
||||||
|
|
||||||
* DEB/Debian-based Linux:
|
|
||||||
|
|
||||||
.. console::
|
|
||||||
|
|
||||||
sudo apt-get install libgeoip-dev sendmail
|
|
||||||
|
|
||||||
* FreeBSD using ports:
|
|
||||||
|
|
||||||
.. console::
|
|
||||||
|
|
||||||
sudo pkg_add -r GeoIP
|
|
||||||
|
|
||||||
* Mac OS X:
|
|
||||||
|
|
||||||
Vanilla OS X installations don't ship with libGeoIP, but if
|
|
||||||
installed from your preferred package management system (e.g.
|
|
||||||
MacPorts, Fink, or Homebrew), they should be automatically detected
|
|
||||||
and Bro will compile against them.
|
|
||||||
|
|
||||||
Additional steps may be needed to :ref:`get the right GeoIP database
|
|
||||||
<geolocation>`.
|
<geolocation>`.
|
||||||
|
|
||||||
|
|
||||||
|
@ -215,7 +180,7 @@ turn off unwanted auxiliary projects that would otherwise be installed
|
||||||
automatically. Finally, use ``make install-aux`` to install some of
|
automatically. Finally, use ``make install-aux`` to install some of
|
||||||
the other programs that are in the ``aux/bro-aux`` directory.
|
the other programs that are in the ``aux/bro-aux`` directory.
|
||||||
|
|
||||||
OpenBSD users, please see our at `FAQ
|
OpenBSD users, please see our `FAQ
|
||||||
<http://www.bro.org/documentation/faq.html>`_ if you are having
|
<http://www.bro.org/documentation/faq.html>`_ if you are having
|
||||||
problems installing Bro.
|
problems installing Bro.
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
|
|
||||||
.. _using-bro:
|
.. _bro-logging:
|
||||||
|
|
||||||
=========
|
===========
|
||||||
Using Bro
|
Bro Logging
|
||||||
=========
|
===========
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
|
@ -251,3 +251,42 @@ stream and Bro is able to extract and track that information for you,
|
||||||
giving you an in-depth and structured view into HTTP traffic on your
|
giving you an in-depth and structured view into HTTP traffic on your
|
||||||
network.
|
network.
|
||||||
|
|
||||||
|
-----------------------
|
||||||
|
Common Log Files
|
||||||
|
-----------------------
|
||||||
|
As a monitoring tool, Bro records a detailed view of the traffic inspected and the events generated in
|
||||||
|
a series of relevant log files. These files can later be reviewed for monitoring, auditing and troubleshooting
|
||||||
|
purposes.
|
||||||
|
|
||||||
|
In this section we present a brief explanation of the most commonly used log files generated by Bro including links
|
||||||
|
to descriptions of some of the fields for each log type.
|
||||||
|
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| Log File | Description | Field Descriptions |
|
||||||
|
+=================+=======================================+==============================+
|
||||||
|
| http.log | Shows all HTTP requests and replies | :bro:type:`HTTP::Info` |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| ftp.log | Records FTP activity | :bro:type:`FTP::Info` |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| ssl.log | Records SSL sessions including | :bro:type:`SSL::Info` |
|
||||||
|
| | certificates used | |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| known_certs.log | Includes SSL certificates used | :bro:type:`Known::CertsInfo` |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| smtp.log | Summarizes SMTP traffic on a network | :bro:type:`SMTP::Info` |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| dns.log | Shows all DNS activity on a network | :bro:type:`DNS::Info` |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| conn.log | Records all connections seen by Bro | :bro:type:`Conn::Info` |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| dpd.log | Shows network activity on | :bro:type:`DPD::Info` |
|
||||||
|
| | non-standard ports | |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| files.log | Records information about all files | :bro:type:`Files::Info` |
|
||||||
|
| | transmitted over the network | |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
| weird.log | Records unexpected protocol-level | :bro:type:`Weird::Info` |
|
||||||
|
| | activity | |
|
||||||
|
+-----------------+---------------------------------------+------------------------------+
|
||||||
|
|
||||||
|
|
71
doc/mimestats/index.rst
Normal file
71
doc/mimestats/index.rst
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
|
||||||
|
.. _mime-stats:
|
||||||
|
|
||||||
|
====================
|
||||||
|
MIME Type Statistics
|
||||||
|
====================
|
||||||
|
|
||||||
|
Files are constantly transmitted over HTTP on regular networks. These
|
||||||
|
files belong to a specific category (i.e., executable, text, image,
|
||||||
|
etc.) identified by a `Multipurpose Internet Mail Extension (MIME)
|
||||||
|
<http://en.wikipedia.org/wiki/MIME>`_. Although MIME was originally
|
||||||
|
developed to identify the type of non-text attachments on email, it is
|
||||||
|
also used by Web browser to identify the type of files transmitted and
|
||||||
|
present them accordingly.
|
||||||
|
|
||||||
|
In this tutorial, we will show how to use the Sumstats Framework to
|
||||||
|
collect some statistics information based on MIME types, specifically
|
||||||
|
the total number of occurrences, size in bytes, and number of unique
|
||||||
|
hosts transmitting files over HTTP per each type. For instructions about
|
||||||
|
extracting and creating a local copy of these files, visit :ref:`this
|
||||||
|
<http-monitor>` tutorial instead.
|
||||||
|
|
||||||
|
------------------------------------------------
|
||||||
|
MIME Statistics with Sumstats
|
||||||
|
------------------------------------------------
|
||||||
|
|
||||||
|
When working with the :ref:`Summary Statistics Framework
|
||||||
|
<sumstats-framework>`, you need to define three different pieces: (i)
|
||||||
|
Observations, where the event is observed and fed into the framework.
|
||||||
|
(ii) Reducers, where observations are collected and measured. (iii)
|
||||||
|
Sumstats, where the main functionality is implemented.
|
||||||
|
|
||||||
|
So, we start by defining our observation along with a record to store
|
||||||
|
all statistics values and an observation interval. We are conducting our
|
||||||
|
observation on the :bro:see:`HTTP::log_http` event and we are interested
|
||||||
|
in the MIME type, size of the file ("response_body_len") and the
|
||||||
|
originator host ("orig_h"). We use the MIME type as our key and create
|
||||||
|
observers for the other two values.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/mimestats/mimestats.bro
|
||||||
|
:lines: 6-29, 54-64
|
||||||
|
|
||||||
|
Next, we create the reducers. The first one will accumulate file sizes
|
||||||
|
and the second one will make sure we only store a host ID once. Below is
|
||||||
|
the partial code from a :bro:see:`bro_init` handler.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/mimestats/mimestats.bro
|
||||||
|
:lines: 34-37
|
||||||
|
|
||||||
|
In our final step, we create the SumStats where we check for the
|
||||||
|
observation interval and once it expires, we populate the record
|
||||||
|
(defined above) with all the relevant data and write it to a log.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/mimestats/mimestats.bro
|
||||||
|
:lines: 38-51
|
||||||
|
|
||||||
|
Putting everything together we end up with the following final code for
|
||||||
|
our script.
|
||||||
|
|
||||||
|
.. btest-include:: ${DOC_ROOT}/mimestats/mimestats.bro
|
||||||
|
|
||||||
|
.. btest:: mimestats
|
||||||
|
|
||||||
|
@TEST-EXEC: btest-rst-cmd bro -r ${TRACES}/http/bro.org.pcap ${DOC_ROOT}/mimestats/mimestats.bro
|
||||||
|
@TEST-EXEC: btest-rst-include mime_metrics.log
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The redefinition of :bro:see:`Site::local_nets` is only done inside
|
||||||
|
this script to make it a self-contained example. It's typically
|
||||||
|
redefined somewhere else.
|
64
doc/mimestats/mimestats.bro
Normal file
64
doc/mimestats/mimestats.bro
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
@load base/utils/site
|
||||||
|
@load base/frameworks/sumstats
|
||||||
|
|
||||||
|
redef Site::local_nets += { 10.0.0.0/8 };
|
||||||
|
|
||||||
|
module MimeMetrics;
|
||||||
|
|
||||||
|
export {
|
||||||
|
|
||||||
|
redef enum Log::ID += { LOG };
|
||||||
|
|
||||||
|
type Info: record {
|
||||||
|
## Timestamp when the log line was finished and written.
|
||||||
|
ts: time &log;
|
||||||
|
## Time interval that the log line covers.
|
||||||
|
ts_delta: interval &log;
|
||||||
|
## The mime type
|
||||||
|
mtype: string &log;
|
||||||
|
## The number of unique local hosts that fetched this mime type
|
||||||
|
uniq_hosts: count &log;
|
||||||
|
## The number of hits to the mime type
|
||||||
|
hits: count &log;
|
||||||
|
## The total number of bytes received by this mime type
|
||||||
|
bytes: count &log;
|
||||||
|
};
|
||||||
|
|
||||||
|
## The frequency of logging the stats collected by this script.
|
||||||
|
const break_interval = 5mins &redef;
|
||||||
|
}
|
||||||
|
|
||||||
|
event bro_init() &priority=3
|
||||||
|
{
|
||||||
|
Log::create_stream(MimeMetrics::LOG, [$columns=Info]);
|
||||||
|
local r1: SumStats::Reducer = [$stream="mime.bytes",
|
||||||
|
$apply=set(SumStats::SUM)];
|
||||||
|
local r2: SumStats::Reducer = [$stream="mime.hits",
|
||||||
|
$apply=set(SumStats::UNIQUE)];
|
||||||
|
SumStats::create([$name="mime-metrics",
|
||||||
|
$epoch=break_interval,
|
||||||
|
$reducers=set(r1, r2),
|
||||||
|
$epoch_result(ts: time, key: SumStats::Key, result: SumStats::Result) =
|
||||||
|
{
|
||||||
|
local l: Info;
|
||||||
|
l$ts = network_time();
|
||||||
|
l$ts_delta = break_interval;
|
||||||
|
l$mtype = key$str;
|
||||||
|
l$bytes = double_to_count(floor(result["mime.bytes"]$sum));
|
||||||
|
l$hits = result["mime.hits"]$num;
|
||||||
|
l$uniq_hosts = result["mime.hits"]$unique;
|
||||||
|
Log::write(MimeMetrics::LOG, l);
|
||||||
|
}]);
|
||||||
|
}
|
||||||
|
|
||||||
|
event HTTP::log_http(rec: HTTP::Info)
|
||||||
|
{
|
||||||
|
if ( Site::is_local_addr(rec$id$orig_h) && rec?$resp_mime_types )
|
||||||
|
{
|
||||||
|
local mime_type = rec$resp_mime_types[0];
|
||||||
|
SumStats::observe("mime.bytes", [$str=mime_type],
|
||||||
|
[$num=rec$response_body_len]);
|
||||||
|
SumStats::observe("mime.hits", [$str=mime_type],
|
||||||
|
[$str=cat(rec$id$orig_h)]);
|
||||||
|
}
|
||||||
|
}
|
|
@ -13,7 +13,7 @@ Bro works on most modern, Unix-based systems and requires no custom
|
||||||
hardware. It can be downloaded in either pre-built binary package or
|
hardware. It can be downloaded in either pre-built binary package or
|
||||||
source code forms. See :ref:`installing-bro` for instructions on how to
|
source code forms. See :ref:`installing-bro` for instructions on how to
|
||||||
install Bro. Below, ``$PREFIX`` is used to reference the Bro
|
install Bro. Below, ``$PREFIX`` is used to reference the Bro
|
||||||
installation root directory, which by default is ``/usr/local/`` if
|
installation root directory, which by default is ``/usr/local/bro/`` if
|
||||||
you install from source.
|
you install from source.
|
||||||
|
|
||||||
Managing Bro with BroControl
|
Managing Bro with BroControl
|
||||||
|
@ -26,8 +26,8 @@ traffic-monitoring cluster.
|
||||||
A Minimal Starting Configuration
|
A Minimal Starting Configuration
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
These are the basic configuration changes to make for a minimal BroControl installation
|
These are the basic configuration changes to make for a minimal BroControl
|
||||||
that will manage a single Bro instance on the ``localhost``:
|
installation that will manage a single Bro instance on the ``localhost``:
|
||||||
|
|
||||||
1) In ``$PREFIX/etc/node.cfg``, set the right interface to monitor.
|
1) In ``$PREFIX/etc/node.cfg``, set the right interface to monitor.
|
||||||
2) In ``$PREFIX/etc/networks.cfg``, comment out the default settings and add
|
2) In ``$PREFIX/etc/networks.cfg``, comment out the default settings and add
|
||||||
|
@ -72,7 +72,8 @@ You can leave it running for now, but to stop this Bro instance you would do:
|
||||||
|
|
||||||
[BroControl] > stop
|
[BroControl] > stop
|
||||||
|
|
||||||
We also recommend to insert the following entry into `crontab`::
|
We also recommend to insert the following entry into the crontab of the user
|
||||||
|
running BroControl::
|
||||||
|
|
||||||
0-59/5 * * * * $PREFIX/bin/broctl cron
|
0-59/5 * * * * $PREFIX/bin/broctl cron
|
||||||
|
|
||||||
|
@ -197,7 +198,7 @@ the variable's value may not change at run-time, but whose initial value can be
|
||||||
modified via the ``redef`` operator at parse-time.
|
modified via the ``redef`` operator at parse-time.
|
||||||
|
|
||||||
So let's continue on our path to modify the behavior for the two SSL
|
So let's continue on our path to modify the behavior for the two SSL
|
||||||
and SSH notices. Looking at :doc:`/scripts/base/frameworks/notice/main`,
|
and SSH notices. Looking at :doc:`/scripts/base/frameworks/notice/main.bro`,
|
||||||
we see that it advertises:
|
we see that it advertises:
|
||||||
|
|
||||||
.. code:: bro
|
.. code:: bro
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
Built-in Types and Attributes
|
Types and Attributes
|
||||||
=============================
|
====================
|
||||||
|
|
||||||
Types
|
Types
|
||||||
-----
|
-----
|
||||||
|
@ -23,7 +23,8 @@ The Bro scripting language supports the following built-in types.
|
||||||
|
|
||||||
.. bro:type:: void
|
.. bro:type:: void
|
||||||
|
|
||||||
An internal Bro type representing the absence of a return type for a
|
An internal Bro type (i.e., "void" is not a reserved keyword in the Bro
|
||||||
|
scripting language) representing the absence of a return type for a
|
||||||
function.
|
function.
|
||||||
|
|
||||||
.. bro:type:: bool
|
.. bro:type:: bool
|
||||||
|
@ -132,10 +133,23 @@ The Bro scripting language supports the following built-in types.
|
||||||
|
|
||||||
Strings support concatenation (``+``), and assignment (``=``, ``+=``).
|
Strings support concatenation (``+``), and assignment (``=``, ``+=``).
|
||||||
Strings also support the comparison operators (``==``, ``!=``, ``<``,
|
Strings also support the comparison operators (``==``, ``!=``, ``<``,
|
||||||
``<=``, ``>``, ``>=``). Substring searching can be performed using
|
``<=``, ``>``, ``>=``). The number of characters in a string can be
|
||||||
the "in" or "!in" operators (e.g., "bar" in "foobar" yields true).
|
found by enclosing the string within pipe characters (e.g., ``|"abc"|``
|
||||||
The number of characters in a string can be found by enclosing the
|
is 3).
|
||||||
string within pipe characters (e.g., ``|"abc"|`` is 3).
|
|
||||||
|
The subscript operator can extract an individual character or a substring
|
||||||
|
of a string (string indexing is zero-based, but an index of
|
||||||
|
-1 refers to the last character in the string, and -2 refers to the
|
||||||
|
second-to-last character, etc.). When extracting a substring, the
|
||||||
|
starting and ending index values are separated by a colon. For example::
|
||||||
|
|
||||||
|
local orig = "0123456789";
|
||||||
|
local third_char = orig[2];
|
||||||
|
local last_char = orig[-1];
|
||||||
|
local first_three_chars = orig[0:2];
|
||||||
|
|
||||||
|
Substring searching can be performed using the "in" or "!in"
|
||||||
|
operators (e.g., "bar" in "foobar" yields true).
|
||||||
|
|
||||||
Note that Bro represents strings internally as a count and vector of
|
Note that Bro represents strings internally as a count and vector of
|
||||||
bytes rather than a NUL-terminated byte string (although string
|
bytes rather than a NUL-terminated byte string (although string
|
||||||
|
@ -767,7 +781,7 @@ The Bro scripting language supports the following built-in types.
|
||||||
.. bro:type:: hook
|
.. bro:type:: hook
|
||||||
|
|
||||||
A hook is another flavor of function that shares characteristics of
|
A hook is another flavor of function that shares characteristics of
|
||||||
both a :bro:type:`function` and a :bro:type:`event`. They are like
|
both a :bro:type:`function` and an :bro:type:`event`. They are like
|
||||||
events in that many handler bodies can be defined for the same hook
|
events in that many handler bodies can be defined for the same hook
|
||||||
identifier and the order of execution can be enforced with
|
identifier and the order of execution can be enforced with
|
||||||
:bro:attr:`&priority`. They are more like functions in the way they
|
:bro:attr:`&priority`. They are more like functions in the way they
|
||||||
|
@ -856,14 +870,14 @@ scripting language supports the following built-in attributes.
|
||||||
.. bro:attr:: &optional
|
.. bro:attr:: &optional
|
||||||
|
|
||||||
Allows a record field to be missing. For example the type ``record {
|
Allows a record field to be missing. For example the type ``record {
|
||||||
a: int, b: port &optional }`` could be instantiated both as
|
a: addr; b: port &optional; }`` could be instantiated both as
|
||||||
singleton ``[$a=127.0.0.1]`` or pair ``[$a=127.0.0.1, $b=80/tcp]``.
|
singleton ``[$a=127.0.0.1]`` or pair ``[$a=127.0.0.1, $b=80/tcp]``.
|
||||||
|
|
||||||
.. bro:attr:: &default
|
.. bro:attr:: &default
|
||||||
|
|
||||||
Uses a default value for a record field, a function/hook/event
|
Uses a default value for a record field, a function/hook/event
|
||||||
parameter, or container elements. For example, ``table[int] of
|
parameter, or container elements. For example, ``table[int] of
|
||||||
string &default="foo" }`` would create a table that returns the
|
string &default="foo"`` would create a table that returns the
|
||||||
:bro:type:`string` ``"foo"`` for any non-existing index.
|
:bro:type:`string` ``"foo"`` for any non-existing index.
|
||||||
|
|
||||||
.. bro:attr:: &redef
|
.. bro:attr:: &redef
|
||||||
|
@ -901,7 +915,7 @@ scripting language supports the following built-in attributes.
|
||||||
Called right before a container element expires. The function's
|
Called right before a container element expires. The function's
|
||||||
first parameter is of the same type of the container and the second
|
first parameter is of the same type of the container and the second
|
||||||
parameter the same type of the container's index. The return
|
parameter the same type of the container's index. The return
|
||||||
value is a :bro:type:`interval` indicating the amount of additional
|
value is an :bro:type:`interval` indicating the amount of additional
|
||||||
time to wait before expiring the container element at the given
|
time to wait before expiring the container element at the given
|
||||||
index (which will trigger another execution of this function).
|
index (which will trigger another execution of this function).
|
||||||
|
|
||||||
|
@ -925,7 +939,7 @@ scripting language supports the following built-in attributes.
|
||||||
|
|
||||||
.. bro:attr:: &persistent
|
.. bro:attr:: &persistent
|
||||||
|
|
||||||
Makes a variable persistent, i.e., its value is writen to disk (per
|
Makes a variable persistent, i.e., its value is written to disk (per
|
||||||
default at shutdown time).
|
default at shutdown time).
|
||||||
|
|
||||||
.. bro:attr:: &synchronized
|
.. bro:attr:: &synchronized
|
||||||
|
@ -957,8 +971,9 @@ scripting language supports the following built-in attributes.
|
||||||
|
|
||||||
.. bro:attr:: &priority
|
.. bro:attr:: &priority
|
||||||
|
|
||||||
Specifies the execution priority of an event handler. Higher values
|
Specifies the execution priority (as a signed integer) of a hook or
|
||||||
are executed before lower ones. The default value is 0.
|
event handler. Higher values are executed before lower ones. The
|
||||||
|
default value is 0.
|
||||||
|
|
||||||
.. bro:attr:: &group
|
.. bro:attr:: &group
|
||||||
|
|
1
doc/script-reference/file-analyzers.rst
Normal file
1
doc/script-reference/file-analyzers.rst
Normal file
|
@ -0,0 +1 @@
|
||||||
|
.. broxygen:file_analyzer:: *
|
|
@ -1,5 +1,3 @@
|
||||||
.. This is a stub doc to which broxygen appends during the build process
|
|
||||||
|
|
||||||
================
|
================
|
||||||
Script Reference
|
Script Reference
|
||||||
================
|
================
|
||||||
|
@ -7,14 +5,10 @@ Script Reference
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
builtins
|
notices
|
||||||
Built-In Functions (BIFs) <base/bif/index>
|
|
||||||
scripts
|
|
||||||
packages
|
|
||||||
internal
|
|
||||||
proto-analyzers
|
proto-analyzers
|
||||||
file-analyzers
|
file-analyzers
|
||||||
|
builtins
|
||||||
|
packages
|
||||||
|
scripts
|
||||||
|
Broxygen Example Script </scripts/broxygen/example.bro>
|
8
doc/script-reference/notices.rst
Normal file
8
doc/script-reference/notices.rst
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
|
||||||
|
.. Not nice but I don't find a way to link to the notice index
|
||||||
|
.. directly from the upper level TOC tree.
|
||||||
|
|
||||||
|
Notices
|
||||||
|
=======
|
||||||
|
|
||||||
|
See the `Bro Notice Index <../bro-noticeindex.html>`_.
|
|
@ -1,9 +1,7 @@
|
||||||
.. This is a stub doc to which broxygen appends during the build process
|
|
||||||
|
|
||||||
.. _script-packages:
|
.. _script-packages:
|
||||||
|
|
||||||
Bro Script Packages
|
Bro Package Index
|
||||||
===================
|
=================
|
||||||
|
|
||||||
Bro has the following script packages (e.g. collections of related scripts in
|
Bro has the following script packages (e.g. collections of related scripts in
|
||||||
a common directory). If the package directory contains a ``__load__.bro``
|
a common directory). If the package directory contains a ``__load__.bro``
|
||||||
|
@ -12,3 +10,5 @@ script, it supports being loaded in mass as a whole directory for convenience.
|
||||||
Packages/scripts in the ``base/`` directory are all loaded by default, while
|
Packages/scripts in the ``base/`` directory are all loaded by default, while
|
||||||
ones in ``policy/`` provide functionality and customization options that are
|
ones in ``policy/`` provide functionality and customization options that are
|
||||||
more appropriate for users to decide whether they'd like to load it or not.
|
more appropriate for users to decide whether they'd like to load it or not.
|
||||||
|
|
||||||
|
.. broxygen:package_index:: *
|
1
doc/script-reference/proto-analyzers.rst
Normal file
1
doc/script-reference/proto-analyzers.rst
Normal file
|
@ -0,0 +1 @@
|
||||||
|
.. broxygen:proto_analyzer:: *
|
5
doc/script-reference/scripts.rst
Normal file
5
doc/script-reference/scripts.rst
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
================
|
||||||
|
Bro Script Index
|
||||||
|
================
|
||||||
|
|
||||||
|
.. broxygen:script_index:: *
|
|
@ -10,13 +10,6 @@ Writing Bro Scripts
|
||||||
Understanding Bro Scripts
|
Understanding Bro Scripts
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
.. todo::
|
|
||||||
|
|
||||||
The MHR integration has changed significantly since the text was
|
|
||||||
written. We need to update it, however I'm actually not sure this
|
|
||||||
script is a good introductory example anymore unfortunately.
|
|
||||||
-Robin
|
|
||||||
|
|
||||||
Bro includes an event-driven scripting language that provides
|
Bro includes an event-driven scripting language that provides
|
||||||
the primary means for an organization to extend and customize Bro's
|
the primary means for an organization to extend and customize Bro's
|
||||||
functionality. Virtually all of the output generated by Bro
|
functionality. Virtually all of the output generated by Bro
|
||||||
|
@ -33,96 +26,108 @@ are invalid. This entire process is setup by telling Bro that should
|
||||||
it see a server or client issue an SSL ``HELLO`` message, we want to know
|
it see a server or client issue an SSL ``HELLO`` message, we want to know
|
||||||
about the information about that connection.
|
about the information about that connection.
|
||||||
|
|
||||||
It's often the easiest to understand Bro's scripting language by
|
It's often easiest to understand Bro's scripting language by
|
||||||
looking at a complete script and breaking it down into its
|
looking at a complete script and breaking it down into its
|
||||||
identifiable components. In this example, we'll take a look at how
|
identifiable components. In this example, we'll take a look at how
|
||||||
Bro queries the `Team Cymru Malware hash registry
|
Bro checks the SHA1 hash of various files extracted from network traffic
|
||||||
<http://www.team-cymru.org/Services/MHR/>`_ for downloads via
|
against the `Team Cymru Malware hash registry
|
||||||
HTTP. Part of the Team Cymru Malware Hash registry includes the
|
<http://www.team-cymru.org/Services/MHR/>`_. Part of the Team Cymru Malware
|
||||||
ability to do a host lookup on a domain with the format
|
Hash registry includes the ability to do a host lookup on a domain with the format
|
||||||
``MALWARE_HASH.malware.hash.cymru.com`` where ``MALWARE_HASH`` is the MD5 or
|
``<MALWARE_HASH>.malware.hash.cymru.com`` where ``<MALWARE_HASH>`` is the SHA1 hash of a file.
|
||||||
SHA1 hash of a file. Team Cymru also populates the TXT record of
|
Team Cymru also populates the TXT record of their DNS responses with both a "first seen"
|
||||||
their DNS responses with both a "last seen" timestamp and a numerical
|
timestamp and a numerical "detection rate". The important aspect to understand is Bro already
|
||||||
"detection rate". The important aspect to understand is Bro already
|
generating hashes for files via the Files framework, but it is the
|
||||||
generates hashes for files it can parse from HTTP streams, but the
|
script ``detect-MHR.bro`` that is responsible for generating the
|
||||||
script ``detect-MHR.bro`` is responsible for generating the
|
appropriate DNS lookup, parsing the response, and generating a notice if appropriate.
|
||||||
appropriate DNS lookup and parsing the response.
|
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
||||||
|
|
||||||
Visually, there are three distinct sections of the script. A base
|
Visually, there are three distinct sections of the script. First, there is a base
|
||||||
level with no indentation followed by an indented and formatted
|
level with no indentation where libraries are included in the script through ``@load``
|
||||||
section explaining the custom variables being provided (``export``) and another
|
and a namespace is defined with ``module``. This is followed by an indented and formatted
|
||||||
indented and formatted section describing the instructions for a
|
section explaining the custom variables being provided (``export``) as part of the script's namespace.
|
||||||
specific event (``event log_http``). Don't get discouraged if you don't
|
Finally there is a second indented and formatted section describing the instructions to take for a
|
||||||
|
specific event (``event file_hash``). Don't get discouraged if you don't
|
||||||
understand every section of the script; we'll cover the basics of the
|
understand every section of the script; we'll cover the basics of the
|
||||||
script and much more in following sections.
|
script and much more in following sections.
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
||||||
:lines: 7-11
|
:lines: 4-6
|
||||||
|
|
||||||
Lines 7 and 8 of the script process the ``__load__.bro`` script in the
|
Lines 3 to 5 of the script process the ``__load__.bro`` script in the
|
||||||
respective directories being loaded. The ``@load`` directives are
|
respective directories being loaded. The ``@load`` directives are
|
||||||
often considered good practice or even just good manners when writing
|
often considered good practice or even just good manners when writing
|
||||||
Bro scripts to make sure they can be
|
Bro scripts to make sure they can be used on their own. While it's unlikely that in a
|
||||||
used on their own. While it's unlikely that in a
|
|
||||||
full production deployment of Bro these additional resources wouldn't
|
full production deployment of Bro these additional resources wouldn't
|
||||||
already be loaded, it's not a bad habit to try to get into as you get
|
already be loaded, it's not a bad habit to try to get into as you get
|
||||||
more experienced with Bro scripting. If you're just starting out,
|
more experienced with Bro scripting. If you're just starting out,
|
||||||
this level of granularity might not be entirely necessary though.
|
this level of granularity might not be entirely necessary. The ``@load`` directives
|
||||||
|
are ensuring the Files framework, the Notice framework and the script to hash all files has
|
||||||
|
been loaded by Bro.
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
||||||
:lines: 12-24
|
:lines: 10-36
|
||||||
|
|
||||||
The export section redefines an enumerable constant that describes the
|
The export section redefines an enumerable constant that describes the
|
||||||
type of notice we will generate with the logging framework. Bro
|
type of notice we will generate with the Notice framework. Bro
|
||||||
allows for redefinable constants, which at first, might seem
|
allows for re-definable constants, which at first, might seem
|
||||||
counter-intuitive. We'll get more in-depth with constants in a later
|
counter-intuitive. We'll get more in-depth with constants in a later
|
||||||
chapter, for now, think of them as variables that can only be altered
|
chapter, for now, think of them as variables that can only be altered
|
||||||
before Bro starts running. The notice type listed allows for the use
|
before Bro starts running. The notice type listed allows for the use
|
||||||
of the :bro:id:`NOTICE` function to generate notices of type
|
of the :bro:id:`NOTICE` function to generate notices of type
|
||||||
``Malware_Hash_Registry_Match`` as done in the next section. Notices
|
``TeamCymruMalwareHashRegistry::Match`` as done in the next section. Notices
|
||||||
allow Bro to generate some kind of extra notification beyond its
|
allow Bro to generate some kind of extra notification beyond its
|
||||||
default log types. Often times, this extra notification comes in the
|
default log types. Often times, this extra notification comes in the
|
||||||
form of an email generated and sent to a pre-configured address.
|
form of an email generated and sent to a preconfigured address, but can be altered
|
||||||
|
depending on the needs of the deployment. The export section is finished off with
|
||||||
|
the definition of two constants that list the kind of files we want to match against and
|
||||||
|
the minimum percentage of detection threshold in which we are interested.
|
||||||
|
|
||||||
|
Up until this point, the script has merely done some basic setup. With the next section,
|
||||||
|
the script starts to define instructions to take in a given event.
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/frameworks/files/detect-MHR.bro
|
||||||
:lines: 26-44
|
:lines: 38-62
|
||||||
|
|
||||||
The workhorse of the script is contained in the event handler for
|
The workhorse of the script is contained in the event handler for
|
||||||
``log_http``. The ``log_http`` event is defined as an event-hook in
|
``file_hash``. The :bro:see:`file_hash` event allows scripts to access
|
||||||
the :doc:`/scripts/base/protocols/http/main` script and allows scripts
|
the information associated with a file for which Bro's file analysis framework has
|
||||||
to handle a connection as it is being passed to the logging framework.
|
generated a hash. The event handler is passed the file itself as ``f``, the type of digest
|
||||||
The event handler is passed an :bro:type:`HTTP::Info` data structure
|
algorithm used as ``kind`` and the hash generated as ``hash``.
|
||||||
which will be referred to as ``rec`` in body of the event handler.
|
|
||||||
|
|
||||||
An ``if`` statement is used to check for the existence of a data structure
|
On line 3, an ``if`` statement is used to check for the correct type of hash, in this case
|
||||||
named ``md5`` nested within the ``rec`` data structure. Bro uses the ``$`` as
|
a SHA1 hash. It also checks for a mime type we've defined as being of interest as defined in the
|
||||||
a deference operator and as such, and it is employed in this script to
|
constant ``match_file_types``. The comparison is made against the expression ``f$mime_type``, which uses
|
||||||
check if ``rec$md5`` is present by including the ``?`` operator within the
|
the ``$`` dereference operator to check the value ``mime_type`` inside the variable ``f``. Once both
|
||||||
path. If the ``rec`` data structure includes a nested data structure
|
values resolve to true, a local variable is defined to hold a string comprised of the SHA1 hash concatenated
|
||||||
named ``md5``, the statement is processed as true and a local variable
|
with ``.malware.hash.cymru.com``; this value will be the domain queried in the malware hash registry.
|
||||||
named ``hash_domain`` is provisioned and given a format string based on
|
|
||||||
the contents of ``rec$md5`` to produce a valid DNS lookup.
|
|
||||||
|
|
||||||
The rest of the script is contained within a ``when`` block. In
|
The rest of the script is contained within a ``when`` block. In
|
||||||
short, a ``when`` block is used when Bro needs to perform asynchronous
|
short, a ``when`` block is used when Bro needs to perform asynchronous
|
||||||
actions, such a DNS lookup, to ensure that performance isn't effected.
|
actions, such as a DNS lookup, to ensure that performance isn't effected.
|
||||||
The ``when`` block performs a DNS TXT lookup and stores the result
|
The ``when`` block performs a DNS TXT lookup and stores the result
|
||||||
in the local variable ``MHR_result``. Effectively, processing for
|
in the local variable ``MHR_result``. Effectively, processing for
|
||||||
this event continues and upon receipt of the values returned by
|
this event continues and upon receipt of the values returned by
|
||||||
:bro:id:`lookup_hostname_txt`, the ``when`` block is executed. The
|
:bro:id:`lookup_hostname_txt`, the ``when`` block is executed. The
|
||||||
``when`` block splits the string returned into two seperate values and
|
``when`` block splits the string returned into a portion for the date on which
|
||||||
checks to ensure an expected format. If the format is invalid, the
|
the malware was first detected and the detection rate by splitting on an text space
|
||||||
script assumes that the hash wasn't found in the respository and
|
and storing the values returned in a local table variable. In line 12, if the table
|
||||||
processing is concluded. If the format is as expected and the
|
returned by ``split1`` has two entries, indicating a successful split, we store the detection
|
||||||
detection rate is above the threshold set by ``MHR_threshold``, two
|
date in ``mhr_first_detected`` and the rate in ``mhr_detect_rate`` on lines 14 and 15 respectively
|
||||||
new local variables are created and used in the notice issued by
|
using the appropriate conversion functions. From this point on, Bro knows it has seen a file
|
||||||
:bro:id:`NOTICE`.
|
transmitted which has a hash that has been seen by the Team Cymru Malware Hash Registry, the rest
|
||||||
|
of the script is dedicated to producing a notice.
|
||||||
|
|
||||||
In approximately 15 lines of actual code, Bro provides an amazing
|
On line 17, the detection time is processed into a string representation and stored in
|
||||||
|
``readable_first_detected``. The script then compares the detection rate against the
|
||||||
|
``notice_threshold`` that was defined earlier. If the detection rate is high enough, the script
|
||||||
|
creates a concise description of the notice on line 22, a possible URL to check the sample against
|
||||||
|
``virustotal.com``'s database, and makes the call to :bro:id:`NOTICE` to hand the relevant information
|
||||||
|
off to the Notice framework.
|
||||||
|
|
||||||
|
In approximately 25 lines of code, Bro provides an amazing
|
||||||
utility that would be incredibly difficult to implement and deploy
|
utility that would be incredibly difficult to implement and deploy
|
||||||
with other products. In truth, claiming that Bro does this in 15
|
with other products. In truth, claiming that Bro does this in 25
|
||||||
lines is a misdirection; there is a truly massive number of things
|
lines is a misdirection; there is a truly massive number of things
|
||||||
going on behind-the-scenes in Bro, but it is the inclusion of the
|
going on behind-the-scenes in Bro, but it is the inclusion of the
|
||||||
scripting language that gives analysts access to those underlying
|
scripting language that gives analysts access to those underlying
|
||||||
|
@ -168,7 +173,7 @@ the event, and a concise explanation of the functions use.
|
||||||
:lines: 29-54
|
:lines: 29-54
|
||||||
|
|
||||||
Above is a segment of the documentation for the event
|
Above is a segment of the documentation for the event
|
||||||
:bro:id:`dns_request` (and the preceeding link points to the
|
:bro:id:`dns_request` (and the preceding link points to the
|
||||||
documentation generated out of that). It's organized such that the
|
documentation generated out of that). It's organized such that the
|
||||||
documentation, commentary, and list of arguments precede the actual
|
documentation, commentary, and list of arguments precede the actual
|
||||||
event definition used by Bro. As Bro detects DNS requests being
|
event definition used by Bro. As Bro detects DNS requests being
|
||||||
|
@ -197,13 +202,8 @@ such, there are events defined for the primary parts of the connection
|
||||||
life-cycle as you'll see from the small selection of
|
life-cycle as you'll see from the small selection of
|
||||||
connection-related events below.
|
connection-related events below.
|
||||||
|
|
||||||
.. todo::
|
|
||||||
|
|
||||||
Update the line numbers, this isn't pulling in the right events
|
|
||||||
anymore but I don't know which ones it were.
|
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/build/scripts/base/bif/event.bif.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/build/scripts/base/bif/event.bif.bro
|
||||||
:lines: 135-138,154,204-208,218,255-256,266,335-340,351
|
:lines: 69-72,88,106-109,129,132-137,148
|
||||||
|
|
||||||
Of the events listed, the event that will give us the best insight
|
Of the events listed, the event that will give us the best insight
|
||||||
into the connection record data type will be
|
into the connection record data type will be
|
||||||
|
@ -214,7 +214,7 @@ take a look at a simple script, stored as
|
||||||
``connection_record_01.bro``, that will output the connection record
|
``connection_record_01.bro``, that will output the connection record
|
||||||
for a single connection.
|
for a single connection.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/connection_record_02.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/connection_record_01.bro
|
||||||
|
|
||||||
Again, we start with ``@load``, this time importing the
|
Again, we start with ``@load``, this time importing the
|
||||||
:doc:`/scripts/base/protocols/conn/index` scripts which supply the tracking
|
:doc:`/scripts/base/protocols/conn/index` scripts which supply the tracking
|
||||||
|
@ -245,7 +245,7 @@ information gleaned from the analysis of a connection as a complete
|
||||||
unit. To break down this collection of information, you will have to
|
unit. To break down this collection of information, you will have to
|
||||||
make use of use Bro's field delimiter ``$``. For example, the
|
make use of use Bro's field delimiter ``$``. For example, the
|
||||||
originating host is referenced by ``c$id$orig_h`` which if given a
|
originating host is referenced by ``c$id$orig_h`` which if given a
|
||||||
narritive relates to ``orig_h`` which is a member of ``id`` which is
|
narrative relates to ``orig_h`` which is a member of ``id`` which is
|
||||||
a member of the data structure referred to as ``c`` that was passed
|
a member of the data structure referred to as ``c`` that was passed
|
||||||
into the event handler." Given that the responder port
|
into the event handler." Given that the responder port
|
||||||
(``c$id$resp_p``) is ``53/tcp``, it's likely that Bro's base DNS scripts
|
(``c$id$resp_p``) is ``53/tcp``, it's likely that Bro's base DNS scripts
|
||||||
|
@ -316,7 +316,7 @@ block that variable is available to any other script through the
|
||||||
naming convention of ``MODULE::variable_name``.
|
naming convention of ``MODULE::variable_name``.
|
||||||
|
|
||||||
The declaration below is taken from the
|
The declaration below is taken from the
|
||||||
:doc:`/scripts/policy/protocols/conn/known-hosts` script and
|
:doc:`/scripts/policy/protocols/conn/known-hosts.bro` script and
|
||||||
declares a variable called ``known_hosts`` as a global set of unique
|
declares a variable called ``known_hosts`` as a global set of unique
|
||||||
IP addresses within the ``Known`` namespace and exports it for use
|
IP addresses within the ``Known`` namespace and exports it for use
|
||||||
outside of the ``Known`` namespace. Were we to want to use the
|
outside of the ``Known`` namespace. Were we to want to use the
|
||||||
|
@ -343,15 +343,15 @@ Constants
|
||||||
Bro also makes use of constants, which are denoted by the ``const``
|
Bro also makes use of constants, which are denoted by the ``const``
|
||||||
keyword. Unlike globals, constants can only be set or altered at
|
keyword. Unlike globals, constants can only be set or altered at
|
||||||
parse time if the ``&redef`` attribute has been used. Afterwards (in
|
parse time if the ``&redef`` attribute has been used. Afterwards (in
|
||||||
runtime) the constants are unalterable. In most cases, redefinable
|
runtime) the constants are unalterable. In most cases, re-definable
|
||||||
constants are used in Bro scripts as containers for configuration
|
constants are used in Bro scripts as containers for configuration
|
||||||
options. For example, the configuration option to log password
|
options. For example, the configuration option to log password
|
||||||
decrypted from HTTP streams is stored in
|
decrypted from HTTP streams is stored in
|
||||||
``HTTP::default_capture_password`` as shown in the stripped down
|
``HTTP::default_capture_password`` as shown in the stripped down
|
||||||
excerpt from :doc:`/scripts/base/protocols/http/main` below.
|
excerpt from :doc:`/scripts/base/protocols/http/main.bro` below.
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/base/protocols/http/main.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/base/protocols/http/main.bro
|
||||||
:lines: 8-10,19,20,118
|
:lines: 8-10,19-21,120
|
||||||
|
|
||||||
Because the constant was declared with the ``&redef`` attribute, if we
|
Because the constant was declared with the ``&redef`` attribute, if we
|
||||||
needed to turn this option on globally, we could do so by adding the
|
needed to turn this option on globally, we could do so by adding the
|
||||||
|
@ -359,7 +359,7 @@ following line to our ``site/local.bro`` file before firing up Bro.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/data_type_const_simple.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/data_type_const_simple.bro
|
||||||
|
|
||||||
While the idea of a redefinable constant might be odd, the constraint
|
While the idea of a re-definable constant might be odd, the constraint
|
||||||
that constants can only be altered at parse-time remains even with the
|
that constants can only be altered at parse-time remains even with the
|
||||||
``&redef`` attribute. In the code snippet below, a table of strings
|
``&redef`` attribute. In the code snippet below, a table of strings
|
||||||
indexed by ports is declared as a constant before two values are added
|
indexed by ports is declared as a constant before two values are added
|
||||||
|
@ -417,7 +417,7 @@ The table below shows the atomic types used in Bro, of which the
|
||||||
first four should seem familiar if you have some scripting experience,
|
first four should seem familiar if you have some scripting experience,
|
||||||
while the remaining six are less common in other languages. It should
|
while the remaining six are less common in other languages. It should
|
||||||
come as no surprise that a scripting language for a Network Security
|
come as no surprise that a scripting language for a Network Security
|
||||||
Monitoring platform has a fairly robust set of network centric data
|
Monitoring platform has a fairly robust set of network-centric data
|
||||||
types and taking note of them here may well save you a late night of
|
types and taking note of them here may well save you a late night of
|
||||||
reinventing the wheel.
|
reinventing the wheel.
|
||||||
|
|
||||||
|
@ -479,7 +479,7 @@ the ``for`` loop, the next element is chosen. Since sets are not an
|
||||||
ordered data type, you cannot guarantee the order of the elements as
|
ordered data type, you cannot guarantee the order of the elements as
|
||||||
the ``for`` loop processes.
|
the ``for`` loop processes.
|
||||||
|
|
||||||
To test for membership in a set the ``in`` statment can be combined
|
To test for membership in a set the ``in`` statement can be combined
|
||||||
with an ``if`` statement to return a true or false value. If the
|
with an ``if`` statement to return a true or false value. If the
|
||||||
exact element in the condition is already in the set, the condition
|
exact element in the condition is already in the set, the condition
|
||||||
returns true and the body executes. The ``in`` statement can also be
|
returns true and the body executes. The ``in`` statement can also be
|
||||||
|
@ -509,16 +509,16 @@ values don't have to be unique, each key in the table must be unique
|
||||||
to preserve a one-to-one mapping of keys to values. In the example
|
to preserve a one-to-one mapping of keys to values. In the example
|
||||||
below, we've compiled a table of SSL-enabled services and their common
|
below, we've compiled a table of SSL-enabled services and their common
|
||||||
ports. The explicit declaration and constructor for the table on
|
ports. The explicit declaration and constructor for the table on
|
||||||
lines 3 and 4 lay out the data types of the keys (strings) and the
|
lines 5 and 7 lay out the data types of the keys (strings) and the
|
||||||
data types of the yields (ports) and then fill in some sample key and
|
data types of the yields (ports) and then fill in some sample key and
|
||||||
yield pairs. Line 5 shows how to use a table accessor to insert one
|
yield pairs. Line 8 shows how to use a table accessor to insert one
|
||||||
key-yield pair into the table. When using the ``in`` operator on a table,
|
key-yield pair into the table. When using the ``in`` operator on a table,
|
||||||
you are effectively working with the keys of the table. In the case
|
you are effectively working with the keys of the table. In the case
|
||||||
of an ``if`` statement, the ``in`` operator will check for membership among
|
of an ``if`` statement, the ``in`` operator will check for membership among
|
||||||
the set of keys and return a true or false value. As seen on line 7,
|
the set of keys and return a true or false value. As seen on line 10,
|
||||||
we are checking if ``SMTPS`` is not in the set of keys for the
|
we are checking if ``SMTPS`` is not in the set of keys for the
|
||||||
ssl_services table and if the condition holds true, we add the
|
ssl_services table and if the condition holds true, we add the
|
||||||
key-yield pair to the table. Line 12 shows the use of a ``for`` statement
|
key-yield pair to the table. Line 13 shows the use of a ``for`` statement
|
||||||
to iterate over each key currently in the table.
|
to iterate over each key currently in the table.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/data_struct_table_declaration.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/data_struct_table_declaration.bro
|
||||||
|
@ -546,7 +546,7 @@ iterate over, say, the directors; we have to iterate with the exact
|
||||||
format as the keys themselves. In this case, we need squared brackets
|
format as the keys themselves. In this case, we need squared brackets
|
||||||
surrounding four temporary variables to act as a collection for our
|
surrounding four temporary variables to act as a collection for our
|
||||||
iteration. While this is a contrived example, we could easily have
|
iteration. While this is a contrived example, we could easily have
|
||||||
had keys containin IP addresses (``addr``), ports (``port``) and even a ``string``
|
had keys containing IP addresses (``addr``), ports (``port``) and even a ``string``
|
||||||
calculated as the result of a reverse hostname lookup.
|
calculated as the result of a reverse hostname lookup.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/data_struct_table_complex.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/data_struct_table_complex.bro
|
||||||
|
@ -647,7 +647,7 @@ subnet
|
||||||
~~~~~~
|
~~~~~~
|
||||||
|
|
||||||
Bro has full support for CIDR notation subnets as a base data type.
|
Bro has full support for CIDR notation subnets as a base data type.
|
||||||
There is no need to manage the IP and the subnet mask as two seperate
|
There is no need to manage the IP and the subnet mask as two separate
|
||||||
entities when you can provide the same information in CIDR notation in
|
entities when you can provide the same information in CIDR notation in
|
||||||
your scripts. The following example below uses a Bro script to
|
your scripts. The following example below uses a Bro script to
|
||||||
determine if a series of IP addresses are within a set of subnets
|
determine if a series of IP addresses are within a set of subnets
|
||||||
|
@ -780,7 +780,7 @@ inequality operators through the ``==`` and ``!=`` operators
|
||||||
respectively. When used in this manner however, the string must match
|
respectively. When used in this manner however, the string must match
|
||||||
entirely to resolve to true. For example, the script below uses two
|
entirely to resolve to true. For example, the script below uses two
|
||||||
ternary conditional statements to illustrate the use of the ``==``
|
ternary conditional statements to illustrate the use of the ``==``
|
||||||
operators with patterns. On lines 5 and 8 the output is altered based
|
operators with patterns. On lines 8 and 11 the output is altered based
|
||||||
on the result of the comparison between the pattern and the string.
|
on the result of the comparison between the pattern and the string.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/data_type_pattern_02.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/data_type_pattern_02.bro
|
||||||
|
@ -807,10 +807,10 @@ composite type. We have, in fact, already encountered a a complex
|
||||||
example of the ``record`` data type in the earlier sections, the
|
example of the ``record`` data type in the earlier sections, the
|
||||||
:bro:type:`connection` record passed to many events. Another one,
|
:bro:type:`connection` record passed to many events. Another one,
|
||||||
:bro:type:`Conn::Info`, which corresponds to the fields logged into
|
:bro:type:`Conn::Info`, which corresponds to the fields logged into
|
||||||
``conn.log``, is shown by the exerpt below.
|
``conn.log``, is shown by the excerpt below.
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/base/protocols/conn/main.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/base/protocols/conn/main.bro
|
||||||
:lines: 10-12,16,17,19,21,23,25,28,31,35,37,56,62,68,90,93,97,100,104,108,109,114
|
:lines: 10-12,16-17,19,21,23,25,28,31,35,38,57,63,69,92,95,99,102,106,110-111,116
|
||||||
|
|
||||||
Looking at the structure of the definition, a new collection of data
|
Looking at the structure of the definition, a new collection of data
|
||||||
types is being defined as a type called ``Info``. Since this type
|
types is being defined as a type called ``Info``. Since this type
|
||||||
|
@ -818,7 +818,7 @@ definition is within the confines of an export block, what is defined
|
||||||
is, in fact, ``Conn::Info``.
|
is, in fact, ``Conn::Info``.
|
||||||
|
|
||||||
The formatting for a declaration of a record type in Bro includes the
|
The formatting for a declaration of a record type in Bro includes the
|
||||||
descriptive name of the type being defined and the seperate fields
|
descriptive name of the type being defined and the separate fields
|
||||||
that make up the record. The individual fields that make up the new
|
that make up the record. The individual fields that make up the new
|
||||||
record are not limited in type or number as long as the name for each
|
record are not limited in type or number as long as the name for each
|
||||||
field is unique.
|
field is unique.
|
||||||
|
@ -834,7 +834,7 @@ string, a set of ports, and a count to define a service type. Also
|
||||||
included is a function to print each field of a record in a formatted
|
included is a function to print each field of a record in a formatted
|
||||||
fashion and a :bro:id:`bro_init` event handler to show some
|
fashion and a :bro:id:`bro_init` event handler to show some
|
||||||
functionality of working with records. The definitions of the DNS and
|
functionality of working with records. The definitions of the DNS and
|
||||||
HTTP services are both done inline using squared brackets before being
|
HTTP services are both done in-line using squared brackets before being
|
||||||
passed to the ``print_service`` function. The ``print_service``
|
passed to the ``print_service`` function. The ``print_service``
|
||||||
function makes use of the ``$`` dereference operator to access the
|
function makes use of the ``$`` dereference operator to access the
|
||||||
fields within the newly defined Service record type.
|
fields within the newly defined Service record type.
|
||||||
|
@ -851,7 +851,7 @@ record.
|
||||||
@TEST-EXEC: btest-rst-cmd bro ${DOC_ROOT}/scripting/data_struct_record_02.bro
|
@TEST-EXEC: btest-rst-cmd bro ${DOC_ROOT}/scripting/data_struct_record_02.bro
|
||||||
|
|
||||||
The example above includes a second record type in which a field is
|
The example above includes a second record type in which a field is
|
||||||
used as the data type for a set. Records can be reapeatedly nested
|
used as the data type for a set. Records can be repeatedly nested
|
||||||
within other records, their fields reachable through repeated chains
|
within other records, their fields reachable through repeated chains
|
||||||
of the ``$`` dereference operator.
|
of the ``$`` dereference operator.
|
||||||
|
|
||||||
|
@ -934,12 +934,12 @@ method and produce a logfile. As we are working within a namespace
|
||||||
and informing an outside entity of workings and data internal to the
|
and informing an outside entity of workings and data internal to the
|
||||||
namespace, we use an ``export`` block. First we need to inform Bro
|
namespace, we use an ``export`` block. First we need to inform Bro
|
||||||
that we are going to be adding another Log Stream by adding a value to
|
that we are going to be adding another Log Stream by adding a value to
|
||||||
the :bro:type:`Log::ID` enumerable. In line 3 of the script, we append the
|
the :bro:type:`Log::ID` enumerable. In line 6 of the script, we append the
|
||||||
value ``LOG`` to the ``Log::ID`` enumerable, however due to this being in
|
value ``LOG`` to the ``Log::ID`` enumerable, however due to this being in
|
||||||
an export block the value appended to ``Log::ID`` is actually
|
an export block the value appended to ``Log::ID`` is actually
|
||||||
``Factor::Log``. Next, we need to define the name and value pairs
|
``Factor::Log``. Next, we need to define the name and value pairs
|
||||||
that make up the data of our logs and dictate its format. Lines 5
|
that make up the data of our logs and dictate its format. Lines 8
|
||||||
through 9 define a new datatype called an ``Info`` record (actually,
|
through 11 define a new datatype called an ``Info`` record (actually,
|
||||||
``Factor::Info``) with two fields, both unsigned integers. Each of the
|
``Factor::Info``) with two fields, both unsigned integers. Each of the
|
||||||
fields in the ``Factor::Log`` record type include the ``&log``
|
fields in the ``Factor::Log`` record type include the ``&log``
|
||||||
attribute, indicating that these fields should be passed to the
|
attribute, indicating that these fields should be passed to the
|
||||||
|
@ -948,7 +948,7 @@ any name value pairs without the ``&log`` attribute, those fields
|
||||||
would simply be ignored during logging but remain available for the
|
would simply be ignored during logging but remain available for the
|
||||||
lifespan of the variable. The next step is to create the logging
|
lifespan of the variable. The next step is to create the logging
|
||||||
stream with :bro:id:`Log::create_stream` which takes a Log::ID and a
|
stream with :bro:id:`Log::create_stream` which takes a Log::ID and a
|
||||||
record as its arguments. In this example, on line 28, we call the
|
record as its arguments. In this example, on line 25, we call the
|
||||||
``Log::create_stream`` method and pass ``Factor::LOG`` and the
|
``Log::create_stream`` method and pass ``Factor::LOG`` and the
|
||||||
``Factor::Info`` record as arguments. From here on out, if we issue
|
``Factor::Info`` record as arguments. From here on out, if we issue
|
||||||
the ``Log::write`` command with the correct ``Log::ID`` and a properly
|
the ``Log::write`` command with the correct ``Log::ID`` and a properly
|
||||||
|
@ -1128,7 +1128,7 @@ which we will cover shortly.
|
||||||
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
||||||
| policy_items | set[count] | &log &optional | Policy items that have been applied |
|
| policy_items | set[count] | &log &optional | Policy items that have been applied |
|
||||||
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
||||||
| email_body_sections | vector | &optinal | Body of the email for email notices. |
|
| email_body_sections | vector | &optional | Body of the email for email notices. |
|
||||||
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
||||||
| email_delay_tokens | set[string] | &optional | Delay functionality for email notices. |
|
| email_delay_tokens | set[string] | &optional | Delay functionality for email notices. |
|
||||||
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
+---------------------+------------------------------------------------------------------+----------------+----------------------------------------+
|
||||||
|
@ -1142,7 +1142,7 @@ has been heuristically detected and the originating hostname is one
|
||||||
that would raise suspicion. Effectively, the script attempts to
|
that would raise suspicion. Effectively, the script attempts to
|
||||||
define a list of hosts from which you would never want to see SSH
|
define a list of hosts from which you would never want to see SSH
|
||||||
traffic originating, like DNS servers, mail servers, etc. To
|
traffic originating, like DNS servers, mail servers, etc. To
|
||||||
accomplish this, the script adhere's to the seperation of detection
|
accomplish this, the script adheres to the separation of detection
|
||||||
and reporting by detecting a behavior and raising a notice. Whether
|
and reporting by detecting a behavior and raising a notice. Whether
|
||||||
or not that notice is acted upon is decided by the local Notice
|
or not that notice is acted upon is decided by the local Notice
|
||||||
Policy, but the script attempts to supply as much information as
|
Policy, but the script attempts to supply as much information as
|
||||||
|
@ -1153,12 +1153,12 @@ possible while staying concise.
|
||||||
|
|
||||||
While much of the script relates to the actual detection, the parts
|
While much of the script relates to the actual detection, the parts
|
||||||
specific to the Notice Framework are actually quite interesting in
|
specific to the Notice Framework are actually quite interesting in
|
||||||
themselves. On line 12 the script's ``export`` block adds the value
|
themselves. On line 18 the script's ``export`` block adds the value
|
||||||
``SSH::Interesting_Hostname_Login`` to the enumerable constant
|
``SSH::Interesting_Hostname_Login`` to the enumerable constant
|
||||||
``Notice::Type`` to indicate to the Bro core that a new type of notice
|
``Notice::Type`` to indicate to the Bro core that a new type of notice
|
||||||
is being defined. The script then calls ``NOTICE`` and defines the
|
is being defined. The script then calls ``NOTICE`` and defines the
|
||||||
``$note``, ``$msg``, ``$sub`` and ``$conn`` fields of the
|
``$note``, ``$msg``, ``$sub`` and ``$conn`` fields of the
|
||||||
:bro:type:`Notice::Info` record. Line 39 also includes a ternary if
|
:bro:type:`Notice::Info` record. Line 42 also includes a ternary if
|
||||||
statement that modifies the ``$msg`` text depending on whether the
|
statement that modifies the ``$msg`` text depending on whether the
|
||||||
host is a local address and whether it is the client or the server.
|
host is a local address and whether it is the client or the server.
|
||||||
This use of :bro:id:`fmt` and a ternary operators is a concise way to
|
This use of :bro:id:`fmt` and a ternary operators is a concise way to
|
||||||
|
@ -1182,7 +1182,7 @@ passing in the ``Notice::Info`` record. The simplest kind of
|
||||||
action based on the answer. The hook below adds the
|
action based on the answer. The hook below adds the
|
||||||
:bro:enum:`Notice::ACTION_EMAIL` action for the
|
:bro:enum:`Notice::ACTION_EMAIL` action for the
|
||||||
``SSH::Interesting_Hostname_Login`` notice raised in the
|
``SSH::Interesting_Hostname_Login`` notice raised in the
|
||||||
:doc:`/scripts/policy/protocols/ssh/interesting-hostnames` script.
|
:doc:`/scripts/policy/protocols/ssh/interesting-hostnames.bro` script.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/framework_notice_hook_01.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/framework_notice_hook_01.bro
|
||||||
|
|
||||||
|
@ -1222,11 +1222,11 @@ from the connection relative to the behavior that has been observed by
|
||||||
Bro.
|
Bro.
|
||||||
|
|
||||||
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/protocols/ssl/expiring-certs.bro
|
.. btest-include:: ${BRO_SRC_ROOT}/scripts/policy/protocols/ssl/expiring-certs.bro
|
||||||
:lines: 59-62
|
:lines: 60-63
|
||||||
|
|
||||||
In the :doc:`/scripts/policy/protocols/ssl/expiring-certs` script
|
In the :doc:`/scripts/policy/protocols/ssl/expiring-certs.bro` script
|
||||||
which identifies when SSL certificates are set to expire and raises
|
which identifies when SSL certificates are set to expire and raises
|
||||||
notices when it crosses a pre-defined threshold, the call to
|
notices when it crosses a predefined threshold, the call to
|
||||||
``NOTICE`` above also sets the ``$identifier`` entry by concatenating
|
``NOTICE`` above also sets the ``$identifier`` entry by concatenating
|
||||||
the responder IP, port, and the hash of the certificate. The
|
the responder IP, port, and the hash of the certificate. The
|
||||||
selection of responder IP, port and certificate hash fits perfectly
|
selection of responder IP, port and certificate hash fits perfectly
|
||||||
|
@ -1262,7 +1262,7 @@ In short, there will be notice policy considerations where a broad
|
||||||
decision can be made based on the ``Notice::Type`` alone. To
|
decision can be made based on the ``Notice::Type`` alone. To
|
||||||
facilitate these types of decisions, the Notice Framework supports
|
facilitate these types of decisions, the Notice Framework supports
|
||||||
Notice Policy shortcuts. These shortcuts are implemented through the
|
Notice Policy shortcuts. These shortcuts are implemented through the
|
||||||
means of a group of data structures that map specific, pre-defined
|
means of a group of data structures that map specific, predefined
|
||||||
details and actions to the effective name of a notice. Primarily
|
details and actions to the effective name of a notice. Primarily
|
||||||
implemented as a set or table of enumerables of :bro:type:`Notice::Type`,
|
implemented as a set or table of enumerables of :bro:type:`Notice::Type`,
|
||||||
Notice Policy shortcuts can be placed as a single directive in your
|
Notice Policy shortcuts can be placed as a single directive in your
|
||||||
|
@ -1308,5 +1308,3 @@ Notice::emailed_types set while the shortcut below alters the length
|
||||||
of time for which those notices will be suppressed.
|
of time for which those notices will be suppressed.
|
||||||
|
|
||||||
.. btest-include:: ${DOC_ROOT}/scripting/framework_notice_shortcuts_02.bro
|
.. btest-include:: ${DOC_ROOT}/scripting/framework_notice_shortcuts_02.bro
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,226 +0,0 @@
|
||||||
# find out what BROPATH to use when executing bro
|
|
||||||
execute_process(COMMAND ${CMAKE_BINARY_DIR}/bro-path-dev
|
|
||||||
OUTPUT_VARIABLE BROPATH
|
|
||||||
RESULT_VARIABLE retval
|
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
|
||||||
if (NOT ${retval} EQUAL 0)
|
|
||||||
message(FATAL_ERROR "Problem setting BROPATH")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# This macro is used to add a new makefile target for reST policy script
|
|
||||||
# documentation that can be generated using Bro itself to parse policy scripts.
|
|
||||||
# It's called like:
|
|
||||||
#
|
|
||||||
# rest_target(srcDir broInput [group])
|
|
||||||
#
|
|
||||||
# srcDir: the directory which contains broInput
|
|
||||||
# broInput: the file name of a bro policy script, any path prefix of this
|
|
||||||
# argument will be used to derive what path under scripts/ the generated
|
|
||||||
# documentation will be placed.
|
|
||||||
# group: optional name of group that the script documentation will belong to.
|
|
||||||
# If this is not given, the group is automatically set to any path portion
|
|
||||||
# of the broInput argument.
|
|
||||||
#
|
|
||||||
# In addition to adding the makefile target, several CMake variables are set:
|
|
||||||
#
|
|
||||||
# MASTER_POLICY_INDEX_TEXT: a running list of policy scripts docs that have
|
|
||||||
# been generated so far, formatted such that it can be appended to a file
|
|
||||||
# that ends in a Sphinx toctree directive
|
|
||||||
# ALL_REST_OUTPUTS: a running list (the CMake list type) of all reST docs
|
|
||||||
# that are to be generated
|
|
||||||
# MASTER_GROUP_LIST: a running list (the CMake list type) of all script groups
|
|
||||||
# MASTER_PKG_LIST: a running list (the CMake list type) of all script groups
|
|
||||||
# that were defived from the path portion of the broInput argument
|
|
||||||
# ${group}_files: a running list of files belonging to a given group, from
|
|
||||||
# which summary text can be extracted at build time
|
|
||||||
# ${group}_doc_names: a running list of reST style document names that can be
|
|
||||||
# given to a :doc: role, shared indices with ${group}_files
|
|
||||||
|
|
||||||
macro(REST_TARGET srcDir broInput)
|
|
||||||
set(absSrcPath ${srcDir}/${broInput})
|
|
||||||
get_filename_component(basename ${broInput} NAME)
|
|
||||||
string(REPLACE .bro "" basename ${basename})
|
|
||||||
get_filename_component(extension ${broInput} EXT)
|
|
||||||
get_filename_component(relDstDir ${broInput} PATH)
|
|
||||||
|
|
||||||
set(sumTextSrc ${absSrcPath})
|
|
||||||
set(ogSourceFile ${absSrcPath})
|
|
||||||
|
|
||||||
if (NOT relDstDir)
|
|
||||||
set(docName "${basename}")
|
|
||||||
set(dstDir "${RST_OUTPUT_DIR}")
|
|
||||||
else ()
|
|
||||||
set(docName "${relDstDir}/${basename}")
|
|
||||||
set(dstDir "${RST_OUTPUT_DIR}/${relDstDir}")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
set(restFile "${docName}.rst")
|
|
||||||
string(REPLACE "/" "^" restFile ${restFile})
|
|
||||||
set(restOutput "${dstDir}/${basename}.rst")
|
|
||||||
|
|
||||||
set(MASTER_POLICY_INDEX_TEXT
|
|
||||||
"${MASTER_POLICY_INDEX_TEXT}\n ${docName} <${docName}>")
|
|
||||||
list(APPEND ALL_REST_OUTPUTS ${restOutput})
|
|
||||||
|
|
||||||
if (NOT "${ARGN}" STREQUAL "")
|
|
||||||
set(group ${ARGN})
|
|
||||||
elseif (relDstDir)
|
|
||||||
set(group ${relDstDir}/index)
|
|
||||||
# add package index to master package list if not already in it
|
|
||||||
# and if a __load__.bro exists in the original script directory
|
|
||||||
list(FIND MASTER_PKG_LIST ${relDstDir} _found)
|
|
||||||
if (_found EQUAL -1)
|
|
||||||
if (EXISTS ${CMAKE_SOURCE_DIR}/scripts/${relDstDir}/__load__.bro)
|
|
||||||
list(APPEND MASTER_PKG_LIST ${relDstDir})
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
else ()
|
|
||||||
set(group "")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (NOT "${group}" STREQUAL "")
|
|
||||||
# add group to master group list if not already in it
|
|
||||||
list(FIND MASTER_GROUP_LIST ${group} _found)
|
|
||||||
if (_found EQUAL -1)
|
|
||||||
list(APPEND MASTER_GROUP_LIST ${group})
|
|
||||||
if (MASTER_GROUP_LIST_TEXT)
|
|
||||||
set(MASTER_GROUP_LIST_TEXT "${MASTER_GROUP_LIST_TEXT}\n${group}")
|
|
||||||
else ()
|
|
||||||
set(MASTER_GROUP_LIST_TEXT "${group}")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
list(APPEND ${group}_files ${sumTextSrc})
|
|
||||||
list(APPEND ${group}_doc_names ${docName})
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
add_custom_command(OUTPUT ${restOutput}
|
|
||||||
# delete any leftover state from previous bro runs
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E remove_directory .state
|
|
||||||
# generate the reST documentation using bro
|
|
||||||
COMMAND BROPATH=${BROPATH}:${srcDir} BROMAGIC=${CMAKE_SOURCE_DIR}/magic/database ${CMAKE_BINARY_DIR}/src/bro
|
|
||||||
ARGS -b -Z ${broInput} || (rm -rf .state *.log *.rst && exit 1)
|
|
||||||
# move generated doc into a new directory tree that
|
|
||||||
# defines the final structure of documents
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E make_directory ${dstDir}
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E copy ${restFile} ${restOutput}
|
|
||||||
# copy the bro or bif script, too
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E copy ${ogSourceFile} ${dstDir}
|
|
||||||
# clean up the build directory
|
|
||||||
COMMAND rm
|
|
||||||
ARGS -rf .state *.log *.rst
|
|
||||||
DEPENDS bro
|
|
||||||
DEPENDS ${absSrcPath}
|
|
||||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
|
||||||
COMMENT "[Bro] Generating reST docs for ${broInput}"
|
|
||||||
)
|
|
||||||
|
|
||||||
endmacro(REST_TARGET)
|
|
||||||
|
|
||||||
# Schedule Bro scripts for which to generate documentation.
|
|
||||||
include(DocSourcesList.cmake)
|
|
||||||
|
|
||||||
# Macro for generating reST docs that are independent of any particular Bro
|
|
||||||
# script.
|
|
||||||
macro(INDEPENDENT_REST_TARGET reST_file)
|
|
||||||
add_custom_command(OUTPUT ${reST_file}
|
|
||||||
# delete any leftover state from previous bro runs
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E remove_directory .state
|
|
||||||
# generate the reST documentation using bro
|
|
||||||
COMMAND BROPATH=${BROPATH}:${srcDir} BROMAGIC=${CMAKE_SOURCE_DIR}/magic/database ${CMAKE_BINARY_DIR}/src/bro
|
|
||||||
ARGS -b -Z base/init-bare.bro || (rm -rf .state *.log *.rst && exit 1)
|
|
||||||
# move generated doc into a new directory tree that
|
|
||||||
# defines the final structure of documents
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E make_directory ${dstDir}
|
|
||||||
COMMAND "${CMAKE_COMMAND}"
|
|
||||||
ARGS -E copy ${reST_file} ${dstDir}
|
|
||||||
# clean up the build directory
|
|
||||||
COMMAND rm
|
|
||||||
ARGS -rf .state *.log *.rst
|
|
||||||
DEPENDS bro
|
|
||||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
|
||||||
COMMENT "[Bro] Generating reST docs for ${reST_file}"
|
|
||||||
)
|
|
||||||
list(APPEND ALL_REST_OUTPUTS ${reST_file})
|
|
||||||
endmacro(INDEPENDENT_REST_TARGET)
|
|
||||||
|
|
||||||
independent_rest_target(proto-analyzers.rst)
|
|
||||||
independent_rest_target(file-analyzers.rst)
|
|
||||||
|
|
||||||
# create temporary list of all docs to include in the master policy/index file
|
|
||||||
file(WRITE ${MASTER_POLICY_INDEX} "${MASTER_POLICY_INDEX_TEXT}")
|
|
||||||
|
|
||||||
# create the temporary list of all packages to include in the master
|
|
||||||
# policy/packages.rst file
|
|
||||||
set(MASTER_PKG_INDEX_TEXT "")
|
|
||||||
foreach (pkg ${MASTER_PKG_LIST})
|
|
||||||
set(MASTER_PKG_INDEX_TEXT
|
|
||||||
"${MASTER_PKG_INDEX_TEXT}\n:doc:`${pkg} <${pkg}/index>`\n")
|
|
||||||
if (EXISTS ${CMAKE_SOURCE_DIR}/scripts/${pkg}/README)
|
|
||||||
file(STRINGS ${CMAKE_SOURCE_DIR}/scripts/${pkg}/README pkgreadme)
|
|
||||||
foreach (line ${pkgreadme})
|
|
||||||
set(MASTER_PKG_INDEX_TEXT "${MASTER_PKG_INDEX_TEXT}\n ${line}")
|
|
||||||
endforeach ()
|
|
||||||
set(MASTER_PKG_INDEX_TEXT "${MASTER_PKG_INDEX_TEXT}\n")
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
file(WRITE ${MASTER_PACKAGE_INDEX} "${MASTER_PKG_INDEX_TEXT}")
|
|
||||||
|
|
||||||
# create temporary file containing list of all groups
|
|
||||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/group_list
|
|
||||||
"${MASTER_GROUP_LIST_TEXT}")
|
|
||||||
|
|
||||||
# create temporary files containing list of each source file in a given group
|
|
||||||
foreach (group ${MASTER_GROUP_LIST})
|
|
||||||
if (EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${group}_files)
|
|
||||||
file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/${group}_files)
|
|
||||||
endif ()
|
|
||||||
if (EXISTS ${CMAKE_CURRENT_BINARY_DIR}/${group}_doc_names)
|
|
||||||
file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/${group}_doc_names)
|
|
||||||
endif ()
|
|
||||||
foreach (src ${${group}_files})
|
|
||||||
file(APPEND ${CMAKE_CURRENT_BINARY_DIR}/${group}_files "${src}\n")
|
|
||||||
endforeach ()
|
|
||||||
foreach (dname ${${group}_doc_names})
|
|
||||||
file(APPEND ${CMAKE_CURRENT_BINARY_DIR}/${group}_doc_names "${dname}\n")
|
|
||||||
endforeach ()
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
# remove previously generated docs no longer scheduled for generation
|
|
||||||
if (EXISTS ${RST_OUTPUT_DIR})
|
|
||||||
file(GLOB_RECURSE EXISTING_REST_DOCS "${RST_OUTPUT_DIR}/*.rst")
|
|
||||||
foreach (_doc ${EXISTING_REST_DOCS})
|
|
||||||
list(FIND ALL_REST_OUTPUTS ${_doc} _found)
|
|
||||||
if (_found EQUAL -1)
|
|
||||||
file(REMOVE ${_doc})
|
|
||||||
message(STATUS "Broxygen: remove stale reST doc: ${_doc}")
|
|
||||||
string(REPLACE .rst .bro _brofile ${_doc})
|
|
||||||
if (EXISTS ${_brofile})
|
|
||||||
file(REMOVE ${_brofile})
|
|
||||||
message(STATUS "Broxygen: remove stale bro source: ${_brofile}")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
endforeach ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# The "restdoc" target uses Bro to parse policy scripts in order to
|
|
||||||
# generate reST documentation from them.
|
|
||||||
add_custom_target(restdoc
|
|
||||||
# create symlink to the reST output directory for convenience
|
|
||||||
COMMAND "${CMAKE_COMMAND}" -E create_symlink
|
|
||||||
${RST_OUTPUT_DIR}
|
|
||||||
${CMAKE_BINARY_DIR}/reST
|
|
||||||
DEPENDS ${ALL_REST_OUTPUTS})
|
|
||||||
|
|
||||||
# The "restclean" target removes all generated reST documentation from the
|
|
||||||
# build directory.
|
|
||||||
add_custom_target(restclean
|
|
||||||
COMMAND "${CMAKE_COMMAND}" -E remove_directory
|
|
||||||
${RST_OUTPUT_DIR}
|
|
||||||
VERBATIM)
|
|
|
@ -1,280 +0,0 @@
|
||||||
# DO NOT EDIT
|
|
||||||
# This file is auto-generated from the genDocSourcesList.sh script.
|
|
||||||
#
|
|
||||||
# This is a list of Bro script sources for which to generate reST documentation.
|
|
||||||
# It will be included inline in the CMakeLists.txt found in the same directory
|
|
||||||
# in order to create Makefile targets that define how to generate reST from
|
|
||||||
# a given Bro script.
|
|
||||||
#
|
|
||||||
# Note: any path prefix of the script (2nd argument of rest_target macro)
|
|
||||||
# will be used to derive what path under scripts/ the generated documentation
|
|
||||||
# will be placed.
|
|
||||||
|
|
||||||
set(psd ${PROJECT_SOURCE_DIR}/scripts)
|
|
||||||
|
|
||||||
rest_target(${CMAKE_CURRENT_SOURCE_DIR} example.bro internal)
|
|
||||||
rest_target(${psd} base/init-default.bro internal)
|
|
||||||
rest_target(${psd} base/init-bare.bro internal)
|
|
||||||
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/analyzer.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/bloom-filter.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/bro.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/cardinality-counter.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/const.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/event.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/file_analysis.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/input.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/logging.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_ARP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_AYIYA.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_BackDoor.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_BitTorrent.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_ConnSize.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_DCE_RPC.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_DHCP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_DNP3.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_DNS.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_FTP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_FTP.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_File.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_FileExtract.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_FileExtract.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_FileHash.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Finger.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_GTPv1.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Gnutella.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_HTTP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_HTTP.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_ICMP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_IRC.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Ident.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_InterConn.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Login.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Login.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_MIME.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Modbus.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_NCP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_NTP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_NetBIOS.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_NetBIOS.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_NetFlow.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_PIA.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_POP3.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_RPC.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SMB.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SMTP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SMTP.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SOCKS.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SSH.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SSL.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SSL.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_SteppingStone.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Syslog.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_TCP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_TCP.functions.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Teredo.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_UDP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Unified2.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_Unified2.types.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/plugins/Bro_ZIP.events.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/reporter.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/strings.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/top-k.bif.bro)
|
|
||||||
rest_target(${CMAKE_BINARY_DIR}/scripts base/bif/types.bif.bro)
|
|
||||||
rest_target(${psd} base/files/extract/main.bro)
|
|
||||||
rest_target(${psd} base/files/hash/main.bro)
|
|
||||||
rest_target(${psd} base/files/unified2/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/analyzer/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/cluster/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/cluster/nodes/manager.bro)
|
|
||||||
rest_target(${psd} base/frameworks/cluster/nodes/proxy.bro)
|
|
||||||
rest_target(${psd} base/frameworks/cluster/nodes/worker.bro)
|
|
||||||
rest_target(${psd} base/frameworks/cluster/setup-connections.bro)
|
|
||||||
rest_target(${psd} base/frameworks/communication/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/control/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/dpd/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/files/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/input/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/input/readers/ascii.bro)
|
|
||||||
rest_target(${psd} base/frameworks/input/readers/benchmark.bro)
|
|
||||||
rest_target(${psd} base/frameworks/input/readers/binary.bro)
|
|
||||||
rest_target(${psd} base/frameworks/input/readers/raw.bro)
|
|
||||||
rest_target(${psd} base/frameworks/input/readers/sqlite.bro)
|
|
||||||
rest_target(${psd} base/frameworks/intel/cluster.bro)
|
|
||||||
rest_target(${psd} base/frameworks/intel/input.bro)
|
|
||||||
rest_target(${psd} base/frameworks/intel/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/postprocessors/scp.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/postprocessors/sftp.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/writers/ascii.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/writers/dataseries.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/writers/elasticsearch.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/writers/none.bro)
|
|
||||||
rest_target(${psd} base/frameworks/logging/writers/sqlite.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/actions/add-geodata.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/actions/drop.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/actions/email_admin.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/actions/page.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/actions/pp-alarms.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/cluster.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/extend-email/hostnames.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/non-cluster.bro)
|
|
||||||
rest_target(${psd} base/frameworks/notice/weird.bro)
|
|
||||||
rest_target(${psd} base/frameworks/packet-filter/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/packet-filter/netstats.bro)
|
|
||||||
rest_target(${psd} base/frameworks/packet-filter/utils.bro)
|
|
||||||
rest_target(${psd} base/frameworks/reporter/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/signatures/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/software/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/cluster.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/main.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/non-cluster.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/average.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/hll_unique.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/last.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/max.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/min.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/sample.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/std-dev.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/sum.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/topk.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/unique.bro)
|
|
||||||
rest_target(${psd} base/frameworks/sumstats/plugins/variance.bro)
|
|
||||||
rest_target(${psd} base/frameworks/tunnels/main.bro)
|
|
||||||
rest_target(${psd} base/misc/find-checksum-offloading.bro)
|
|
||||||
rest_target(${psd} base/protocols/conn/contents.bro)
|
|
||||||
rest_target(${psd} base/protocols/conn/inactivity.bro)
|
|
||||||
rest_target(${psd} base/protocols/conn/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/conn/polling.bro)
|
|
||||||
rest_target(${psd} base/protocols/dhcp/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/dhcp/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/dhcp/utils.bro)
|
|
||||||
rest_target(${psd} base/protocols/dnp3/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/dnp3/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/dns/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/dns/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/ftp/files.bro)
|
|
||||||
rest_target(${psd} base/protocols/ftp/gridftp.bro)
|
|
||||||
rest_target(${psd} base/protocols/ftp/info.bro)
|
|
||||||
rest_target(${psd} base/protocols/ftp/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/ftp/utils-commands.bro)
|
|
||||||
rest_target(${psd} base/protocols/ftp/utils.bro)
|
|
||||||
rest_target(${psd} base/protocols/http/entities.bro)
|
|
||||||
rest_target(${psd} base/protocols/http/files.bro)
|
|
||||||
rest_target(${psd} base/protocols/http/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/http/utils.bro)
|
|
||||||
rest_target(${psd} base/protocols/irc/dcc-send.bro)
|
|
||||||
rest_target(${psd} base/protocols/irc/files.bro)
|
|
||||||
rest_target(${psd} base/protocols/irc/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/modbus/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/modbus/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/smtp/entities.bro)
|
|
||||||
rest_target(${psd} base/protocols/smtp/files.bro)
|
|
||||||
rest_target(${psd} base/protocols/smtp/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/socks/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/socks/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/ssh/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/ssl/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/ssl/main.bro)
|
|
||||||
rest_target(${psd} base/protocols/ssl/mozilla-ca-list.bro)
|
|
||||||
rest_target(${psd} base/protocols/syslog/consts.bro)
|
|
||||||
rest_target(${psd} base/protocols/syslog/main.bro)
|
|
||||||
rest_target(${psd} base/utils/active-http.bro)
|
|
||||||
rest_target(${psd} base/utils/addrs.bro)
|
|
||||||
rest_target(${psd} base/utils/conn-ids.bro)
|
|
||||||
rest_target(${psd} base/utils/dir.bro)
|
|
||||||
rest_target(${psd} base/utils/directions-and-hosts.bro)
|
|
||||||
rest_target(${psd} base/utils/exec.bro)
|
|
||||||
rest_target(${psd} base/utils/files.bro)
|
|
||||||
rest_target(${psd} base/utils/numbers.bro)
|
|
||||||
rest_target(${psd} base/utils/paths.bro)
|
|
||||||
rest_target(${psd} base/utils/patterns.bro)
|
|
||||||
rest_target(${psd} base/utils/queue.bro)
|
|
||||||
rest_target(${psd} base/utils/site.bro)
|
|
||||||
rest_target(${psd} base/utils/strings.bro)
|
|
||||||
rest_target(${psd} base/utils/thresholds.bro)
|
|
||||||
rest_target(${psd} base/utils/time.bro)
|
|
||||||
rest_target(${psd} base/utils/urls.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/communication/listen.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/control/controllee.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/control/controller.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/dpd/detect-protocols.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/dpd/packet-segment-logging.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/files/detect-MHR.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/files/hash-all-files.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/do_notice.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/conn-established.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/dns.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/file-hashes.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/file-names.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/http-headers.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/http-url.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/smtp-url-extraction.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/smtp.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/ssl.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/intel/seen/where-locations.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/packet-filter/shunt.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/software/version-changes.bro)
|
|
||||||
rest_target(${psd} policy/frameworks/software/vulnerable.bro)
|
|
||||||
rest_target(${psd} policy/integration/barnyard2/main.bro)
|
|
||||||
rest_target(${psd} policy/integration/barnyard2/types.bro)
|
|
||||||
rest_target(${psd} policy/integration/collective-intel/main.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/main.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/plugins/facebook.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/plugins/gmail.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/plugins/google.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/plugins/netflix.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/plugins/pandora.bro)
|
|
||||||
rest_target(${psd} policy/misc/app-stats/plugins/youtube.bro)
|
|
||||||
rest_target(${psd} policy/misc/capture-loss.bro)
|
|
||||||
rest_target(${psd} policy/misc/detect-traceroute/main.bro)
|
|
||||||
rest_target(${psd} policy/misc/known-devices.bro)
|
|
||||||
rest_target(${psd} policy/misc/load-balancing.bro)
|
|
||||||
rest_target(${psd} policy/misc/loaded-scripts.bro)
|
|
||||||
rest_target(${psd} policy/misc/profiling.bro)
|
|
||||||
rest_target(${psd} policy/misc/scan.bro)
|
|
||||||
rest_target(${psd} policy/misc/stats.bro)
|
|
||||||
rest_target(${psd} policy/misc/trim-trace-file.bro)
|
|
||||||
rest_target(${psd} policy/protocols/conn/known-hosts.bro)
|
|
||||||
rest_target(${psd} policy/protocols/conn/known-services.bro)
|
|
||||||
rest_target(${psd} policy/protocols/conn/weirds.bro)
|
|
||||||
rest_target(${psd} policy/protocols/dhcp/known-devices-and-hostnames.bro)
|
|
||||||
rest_target(${psd} policy/protocols/dns/auth-addl.bro)
|
|
||||||
rest_target(${psd} policy/protocols/dns/detect-external-names.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ftp/detect-bruteforcing.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ftp/detect.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ftp/software.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/detect-sqli.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/detect-webapps.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/header-names.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/software-browser-plugins.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/software.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/var-extraction-cookies.bro)
|
|
||||||
rest_target(${psd} policy/protocols/http/var-extraction-uri.bro)
|
|
||||||
rest_target(${psd} policy/protocols/modbus/known-masters-slaves.bro)
|
|
||||||
rest_target(${psd} policy/protocols/modbus/track-memmap.bro)
|
|
||||||
rest_target(${psd} policy/protocols/smtp/blocklists.bro)
|
|
||||||
rest_target(${psd} policy/protocols/smtp/detect-suspicious-orig.bro)
|
|
||||||
rest_target(${psd} policy/protocols/smtp/entities-excerpt.bro)
|
|
||||||
rest_target(${psd} policy/protocols/smtp/software.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssh/detect-bruteforcing.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssh/geo-data.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssh/interesting-hostnames.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssh/software.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssl/cert-hash.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssl/expiring-certs.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssl/extract-certs-pem.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssl/known-certs.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssl/notary.bro)
|
|
||||||
rest_target(${psd} policy/protocols/ssl/validate-certs.bro)
|
|
||||||
rest_target(${psd} policy/tuning/defaults/extracted_file_limits.bro)
|
|
||||||
rest_target(${psd} policy/tuning/defaults/packet-fragments.bro)
|
|
||||||
rest_target(${psd} policy/tuning/defaults/warnings.bro)
|
|
||||||
rest_target(${psd} policy/tuning/logs-to-elasticsearch.bro)
|
|
||||||
rest_target(${psd} policy/tuning/track-all-assets.bro)
|
|
||||||
rest_target(${psd} site/local-manager.bro)
|
|
||||||
rest_target(${psd} site/local-proxy.bro)
|
|
||||||
rest_target(${psd} site/local-worker.bro)
|
|
||||||
rest_target(${psd} site/local.bro)
|
|
||||||
rest_target(${psd} test-all-policy.bro)
|
|
|
@ -1,44 +0,0 @@
|
||||||
This directory contains scripts and templates that can be used to automate
|
|
||||||
the generation of Bro script documentation. Several build targets are defined
|
|
||||||
by CMake and available in the top-level Makefile:
|
|
||||||
|
|
||||||
``restdoc``
|
|
||||||
|
|
||||||
This target uses Bro to parse policy scripts in order to generate
|
|
||||||
reStructuredText (reST) documentation from them. The list of scripts
|
|
||||||
for which to generate reST documentation is defined in the
|
|
||||||
``CMakeLists.txt`` file in this directory. Script documentation is
|
|
||||||
rebuild automatically if the policy script from which it is derived
|
|
||||||
or the Bro binary becomes out of date
|
|
||||||
|
|
||||||
The resulting output from this target can be found in the CMake
|
|
||||||
``build/`` directory inside ``reST`` (a symlink to
|
|
||||||
``doc/scripts/rest_output``).
|
|
||||||
|
|
||||||
``restclean``
|
|
||||||
|
|
||||||
This target removes any reST documentation that has been generated so far.
|
|
||||||
|
|
||||||
The ``genDocSourcesList.sh`` script can be run to automatically generate
|
|
||||||
``DocSourcesList.cmake``, which is the file CMake uses to define the list
|
|
||||||
of documentation targets. This script should be run after adding new
|
|
||||||
Bro script source files, and the changes commited to git.
|
|
||||||
|
|
||||||
If a script shouldn't have documentation generated for it, there's also a
|
|
||||||
blacklist manifest that can be maintained in the ``genDocSourcesList.sh``
|
|
||||||
script.
|
|
||||||
|
|
||||||
The blacklist can also be used if you want to define a certain grouping for
|
|
||||||
the script's generated docs to belong to (as opposed to the automatic grouping
|
|
||||||
the happens for script packages/directories). To do that, add the
|
|
||||||
script's name to the blacklist, then append a ``rest_target()`` to the
|
|
||||||
``statictext`` variable where the first argument is the source directory
|
|
||||||
containing the policy script to document, the second argument is the file
|
|
||||||
name of the policy script, and the third argument is the path/name of a
|
|
||||||
pre-created reST document in the ``../`` source directory to which the
|
|
||||||
``make doc`` process can append script documentation references. This
|
|
||||||
pre-created reST document should also then be linked to from the TOC tree
|
|
||||||
in ``../index.rst``.
|
|
||||||
|
|
||||||
See ``example.bro`` for an example of how to document a Bro script such that
|
|
||||||
``make doc`` will be able to produce reST/HTML documentation for it.
|
|
|
@ -1,225 +0,0 @@
|
||||||
##! This is an example script that demonstrates documentation features.
|
|
||||||
##! Comments of the form ``##!`` are for the script summary. The contents of
|
|
||||||
##! these comments are transferred directly into the auto-generated
|
|
||||||
##! `reStructuredText <http://docutils.sourceforge.net/rst.html>`_
|
|
||||||
##! (reST) document's summary section.
|
|
||||||
##!
|
|
||||||
##! .. tip:: You can embed directives and roles within ``##``-stylized comments.
|
|
||||||
##!
|
|
||||||
##! There's also a custom role to reference any identifier node in
|
|
||||||
##! the Bro Sphinx domain that's good for "see alsos", e.g.
|
|
||||||
##!
|
|
||||||
##! See also: :bro:see:`Example::a_var`, :bro:see:`Example::ONE`,
|
|
||||||
##! :bro:see:`SSH::Info`
|
|
||||||
##!
|
|
||||||
##! And a custom directive does the equivalent references:
|
|
||||||
##!
|
|
||||||
##! .. bro:see:: Example::a_var Example::ONE SSH::Info
|
|
||||||
|
|
||||||
# Comments that use a single pound sign (#) are not significant to
|
|
||||||
# a script's auto-generated documentation, but ones that use a
|
|
||||||
# double pound sign (##) do matter. In some cases, like record
|
|
||||||
# field comments, it's necessary to disambiguate the field with
|
|
||||||
# which a comment associates: e.g. "##<" can be used on the same line
|
|
||||||
# as a field to signify the comment relates to it and not the
|
|
||||||
# following field. "##<" can also be used more generally in any
|
|
||||||
# variable declarations to associate with the last-declared identifier.
|
|
||||||
#
|
|
||||||
# Generally, the auto-doc comments (##) are associated with the
|
|
||||||
# next declaration/identifier found in the script, but the doc framework
|
|
||||||
# will track/render identifiers regardless of whether they have any
|
|
||||||
# of these special comments associated with them.
|
|
||||||
#
|
|
||||||
# The first sentence contained within the "##"-stylized comments for
|
|
||||||
# a given identifier is special in that it will be used as summary
|
|
||||||
# text in a table containing all such identifiers and short summaries.
|
|
||||||
# If there are no sentences (text terminated with '.'), then everything
|
|
||||||
# in the "##"-stylized comments up until the first empty comment
|
|
||||||
# is taken as the summary text for a given identifier.
|
|
||||||
|
|
||||||
# @load directives are self-documenting
|
|
||||||
@load frameworks/software/vulnerable
|
|
||||||
|
|
||||||
# "module" statements are self-documenting
|
|
||||||
module Example;
|
|
||||||
|
|
||||||
# redefinitions of "capture_filters" are self-documenting and
|
|
||||||
# go into the generated documentation's "Packet Filter" section
|
|
||||||
redef capture_filters += {
|
|
||||||
["ssl"] = "tcp port 443",
|
|
||||||
["nntps"] = "tcp port 562",
|
|
||||||
};
|
|
||||||
|
|
||||||
global example_ports = {
|
|
||||||
443/tcp, 562/tcp,
|
|
||||||
} &redef;
|
|
||||||
|
|
||||||
|
|
||||||
event bro_init()
|
|
||||||
{
|
|
||||||
Analyzer::register_for_ports(Analyzer::ANALYZER_SSL, example_ports);
|
|
||||||
}
|
|
||||||
|
|
||||||
# redefinitions of "Notice::Type" are self-documenting, but
|
|
||||||
# more information can be supplied in two different ways
|
|
||||||
redef enum Notice::Type += {
|
|
||||||
## any number of this type of comment
|
|
||||||
## will document "Notice_One"
|
|
||||||
Notice_One,
|
|
||||||
Notice_Two, ##< any number of this type of comment
|
|
||||||
##< will document "Notice_Two"
|
|
||||||
Notice_Three,
|
|
||||||
Notice_Four,
|
|
||||||
};
|
|
||||||
|
|
||||||
# Redef'ing the ID enumeration for logging streams is automatically tracked.
|
|
||||||
# Comments of the "##" form can be use to further document it, but it's
|
|
||||||
# better to do all documentation related to logging in the summary section
|
|
||||||
# as is shown above.
|
|
||||||
redef enum Log::ID += { LOG };
|
|
||||||
|
|
||||||
# Anything declared in the export section will show up in the rendered
|
|
||||||
# documentation's "public interface" section
|
|
||||||
|
|
||||||
export {
|
|
||||||
|
|
||||||
# these headings don't mean anything special to the
|
|
||||||
# doc framework right now, I'm just including them
|
|
||||||
# to make it more clear to the reader how the doc
|
|
||||||
# framework will actually categorize a script's identifiers
|
|
||||||
|
|
||||||
############## types ################
|
|
||||||
|
|
||||||
# Note that I'm just mixing the "##" and "##<"
|
|
||||||
# types of comments in the following declarations
|
|
||||||
# as a demonstration. Normally, it would be good style
|
|
||||||
# to pick one and be consistent.
|
|
||||||
|
|
||||||
## documentation for "SimpleEnum"
|
|
||||||
## goes here.
|
|
||||||
type SimpleEnum: enum {
|
|
||||||
## and more specific info for "ONE"
|
|
||||||
## can span multiple lines
|
|
||||||
ONE,
|
|
||||||
TWO, ##< or more info like this for "TWO"
|
|
||||||
##< can span multiple lines
|
|
||||||
THREE,
|
|
||||||
};
|
|
||||||
|
|
||||||
## document the "SimpleEnum" redef here
|
|
||||||
redef enum SimpleEnum += {
|
|
||||||
FOUR, ##< and some documentation for "FOUR"
|
|
||||||
## also "FIVE" for good measure
|
|
||||||
FIVE
|
|
||||||
};
|
|
||||||
|
|
||||||
## general documentation for a type "SimpleRecord"
|
|
||||||
## goes here.
|
|
||||||
type SimpleRecord: record {
|
|
||||||
## counts something
|
|
||||||
field1: count;
|
|
||||||
field2: bool; ##< toggles something
|
|
||||||
};
|
|
||||||
|
|
||||||
## document the record extension redef here
|
|
||||||
redef record SimpleRecord += {
|
|
||||||
## document the extending field here
|
|
||||||
field_ext: string &optional; ##< (or here)
|
|
||||||
};
|
|
||||||
|
|
||||||
## general documentation for a type "ComplexRecord" goes here
|
|
||||||
type ComplexRecord: record {
|
|
||||||
field1: count; ##< counts something
|
|
||||||
field2: bool; ##< toggles something
|
|
||||||
field3: SimpleRecord;
|
|
||||||
msg: string &default="blah"; ##< attributes are self-documenting
|
|
||||||
} &redef;
|
|
||||||
|
|
||||||
## An example record to be used with a logging stream.
|
|
||||||
type Info: record {
|
|
||||||
ts: time &log;
|
|
||||||
uid: string &log;
|
|
||||||
status: count &log &optional;
|
|
||||||
};
|
|
||||||
|
|
||||||
############## options ################
|
|
||||||
# right now, I'm just defining an option as
|
|
||||||
# any const with &redef (something that can
|
|
||||||
# change at parse time, but not at run time.
|
|
||||||
|
|
||||||
## add documentation for "an_option" here
|
|
||||||
const an_option: set[addr, addr, string] &redef;
|
|
||||||
|
|
||||||
# default initialization will be self-documenting
|
|
||||||
const option_with_init = 0.01 secs &redef; ##< More docs can be added here.
|
|
||||||
|
|
||||||
############## state variables ############
|
|
||||||
# right now, I'm defining this as any global
|
|
||||||
# that's not a function/event. doesn't matter
|
|
||||||
# if &redef attribute is present
|
|
||||||
|
|
||||||
## put some documentation for "a_var" here
|
|
||||||
global a_var: bool;
|
|
||||||
|
|
||||||
# attributes are self-documenting
|
|
||||||
global var_with_attr: count &persistent;
|
|
||||||
|
|
||||||
# it's fine if the type is inferred, that information is self-documenting
|
|
||||||
global var_without_explicit_type = "this works";
|
|
||||||
|
|
||||||
############## functions/events ############
|
|
||||||
|
|
||||||
## Summarize purpose of "a_function" here.
|
|
||||||
## Give more details about "a_function" here.
|
|
||||||
## Separating the documentation of the params/return values with
|
|
||||||
## empty comments is optional, but improves readability of script.
|
|
||||||
##
|
|
||||||
## tag: function arguments can be described
|
|
||||||
## like this
|
|
||||||
## msg: another param
|
|
||||||
##
|
|
||||||
## Returns: describe the return type here
|
|
||||||
global a_function: function(tag: string, msg: string): string;
|
|
||||||
|
|
||||||
## Summarize "an_event" here.
|
|
||||||
## Give more details about "an_event" here.
|
|
||||||
## Example::an_event should not be confused as a parameter.
|
|
||||||
## name: describe the argument here
|
|
||||||
global an_event: event(name: string);
|
|
||||||
|
|
||||||
## This is a declaration of an example event that can be used in
|
|
||||||
## logging streams and is raised once for each log entry.
|
|
||||||
global log_example: event(rec: Info);
|
|
||||||
}
|
|
||||||
|
|
||||||
function filter_func(rec: Info): bool
|
|
||||||
{
|
|
||||||
return T;
|
|
||||||
}
|
|
||||||
|
|
||||||
# this function is documented in the "private interface" section
|
|
||||||
# of generated documentation and any "##"-stylized comments would also
|
|
||||||
# be rendered there
|
|
||||||
function function_without_proto(tag: string): string
|
|
||||||
{
|
|
||||||
return "blah";
|
|
||||||
}
|
|
||||||
|
|
||||||
# this record type is documented in the "private interface" section
|
|
||||||
# of generated documentation and any "##"-stylized comments would also
|
|
||||||
# be rendered there
|
|
||||||
type PrivateRecord: record {
|
|
||||||
field1: bool;
|
|
||||||
field2: count;
|
|
||||||
};
|
|
||||||
|
|
||||||
event bro_init()
|
|
||||||
{
|
|
||||||
Log::create_stream(Example::LOG, [$columns=Info, $ev=log_example]);
|
|
||||||
Log::add_filter(Example::LOG, [
|
|
||||||
$name="example-filter",
|
|
||||||
$path="example-filter",
|
|
||||||
$pred=filter_func,
|
|
||||||
$exclude=set("ts")
|
|
||||||
]);
|
|
||||||
}
|
|
|
@ -1,291 +0,0 @@
|
||||||
.. Automatically generated. Do not edit.
|
|
||||||
|
|
||||||
example.bro
|
|
||||||
===========
|
|
||||||
|
|
||||||
:download:`Original Source File <example.bro>`
|
|
||||||
|
|
||||||
Overview
|
|
||||||
--------
|
|
||||||
This is an example script that demonstrates how to document. Comments
|
|
||||||
of the form ``##!`` are for the script summary. The contents of
|
|
||||||
these comments are transferred directly into the auto-generated
|
|
||||||
`reStructuredText <http://docutils.sourceforge.net/rst.html>`_
|
|
||||||
(reST) document's summary section.
|
|
||||||
|
|
||||||
.. tip:: You can embed directives and roles within ``##``-stylized comments.
|
|
||||||
|
|
||||||
:Imports: :doc:`policy/frameworks/software/vulnerable </scripts/policy/frameworks/software/vulnerable>`
|
|
||||||
|
|
||||||
Summary
|
|
||||||
~~~~~~~
|
|
||||||
Options
|
|
||||||
#######
|
|
||||||
============================================================================ ======================================
|
|
||||||
:bro:id:`Example::an_option`: :bro:type:`set` :bro:attr:`&redef` add documentation for "an_option" here
|
|
||||||
|
|
||||||
:bro:id:`Example::option_with_init`: :bro:type:`interval` :bro:attr:`&redef`
|
|
||||||
============================================================================ ======================================
|
|
||||||
|
|
||||||
State Variables
|
|
||||||
###############
|
|
||||||
=========================================================================== =======================================
|
|
||||||
:bro:id:`Example::a_var`: :bro:type:`bool` put some documentation for "a_var" here
|
|
||||||
|
|
||||||
:bro:id:`Example::var_with_attr`: :bro:type:`count` :bro:attr:`&persistent`
|
|
||||||
|
|
||||||
:bro:id:`Example::var_without_explicit_type`: :bro:type:`string`
|
|
||||||
=========================================================================== =======================================
|
|
||||||
|
|
||||||
Types
|
|
||||||
#####
|
|
||||||
====================================================== ==========================================================
|
|
||||||
:bro:type:`Example::SimpleEnum`: :bro:type:`enum` documentation for "SimpleEnum"
|
|
||||||
goes here.
|
|
||||||
|
|
||||||
:bro:type:`Example::SimpleRecord`: :bro:type:`record` general documentation for a type "SimpleRecord"
|
|
||||||
goes here.
|
|
||||||
|
|
||||||
:bro:type:`Example::ComplexRecord`: :bro:type:`record` general documentation for a type "ComplexRecord" goes here
|
|
||||||
|
|
||||||
:bro:type:`Example::Info`: :bro:type:`record` An example record to be used with a logging stream.
|
|
||||||
====================================================== ==========================================================
|
|
||||||
|
|
||||||
Events
|
|
||||||
######
|
|
||||||
================================================= =============================================================
|
|
||||||
:bro:id:`Example::an_event`: :bro:type:`event` Summarize "an_event" here.
|
|
||||||
|
|
||||||
:bro:id:`Example::log_example`: :bro:type:`event` This is a declaration of an example event that can be used in
|
|
||||||
logging streams and is raised once for each log entry.
|
|
||||||
================================================= =============================================================
|
|
||||||
|
|
||||||
Functions
|
|
||||||
#########
|
|
||||||
=============================================== =======================================
|
|
||||||
:bro:id:`Example::a_function`: :bro:type:`func` Summarize purpose of "a_function" here.
|
|
||||||
=============================================== =======================================
|
|
||||||
|
|
||||||
Redefinitions
|
|
||||||
#############
|
|
||||||
===================================================== ========================================
|
|
||||||
:bro:type:`Log::ID`: :bro:type:`enum`
|
|
||||||
|
|
||||||
:bro:type:`Example::SimpleEnum`: :bro:type:`enum` document the "SimpleEnum" redef here
|
|
||||||
|
|
||||||
:bro:type:`Example::SimpleRecord`: :bro:type:`record` document the record extension redef here
|
|
||||||
===================================================== ========================================
|
|
||||||
|
|
||||||
Namespaces
|
|
||||||
~~~~~~~~~~
|
|
||||||
.. bro:namespace:: Example
|
|
||||||
|
|
||||||
Notices
|
|
||||||
~~~~~~~
|
|
||||||
:bro:type:`Notice::Type`
|
|
||||||
|
|
||||||
:Type: :bro:type:`enum`
|
|
||||||
|
|
||||||
.. bro:enum:: Example::Notice_One Notice::Type
|
|
||||||
|
|
||||||
any number of this type of comment
|
|
||||||
will document "Notice_One"
|
|
||||||
|
|
||||||
.. bro:enum:: Example::Notice_Two Notice::Type
|
|
||||||
|
|
||||||
any number of this type of comment
|
|
||||||
will document "Notice_Two"
|
|
||||||
|
|
||||||
.. bro:enum:: Example::Notice_Three Notice::Type
|
|
||||||
|
|
||||||
.. bro:enum:: Example::Notice_Four Notice::Type
|
|
||||||
|
|
||||||
Public Interface
|
|
||||||
----------------
|
|
||||||
Options
|
|
||||||
~~~~~~~
|
|
||||||
.. bro:id:: Example::an_option
|
|
||||||
|
|
||||||
:Type: :bro:type:`set` [:bro:type:`addr`, :bro:type:`addr`, :bro:type:`string`]
|
|
||||||
:Attributes: :bro:attr:`&redef`
|
|
||||||
:Default: ``{}``
|
|
||||||
|
|
||||||
add documentation for "an_option" here
|
|
||||||
|
|
||||||
.. bro:id:: Example::option_with_init
|
|
||||||
|
|
||||||
:Type: :bro:type:`interval`
|
|
||||||
:Attributes: :bro:attr:`&redef`
|
|
||||||
:Default: ``10.0 msecs``
|
|
||||||
|
|
||||||
State Variables
|
|
||||||
~~~~~~~~~~~~~~~
|
|
||||||
.. bro:id:: Example::a_var
|
|
||||||
|
|
||||||
:Type: :bro:type:`bool`
|
|
||||||
|
|
||||||
put some documentation for "a_var" here
|
|
||||||
|
|
||||||
.. bro:id:: Example::var_with_attr
|
|
||||||
|
|
||||||
:Type: :bro:type:`count`
|
|
||||||
:Attributes: :bro:attr:`&persistent`
|
|
||||||
|
|
||||||
.. bro:id:: Example::var_without_explicit_type
|
|
||||||
|
|
||||||
:Type: :bro:type:`string`
|
|
||||||
:Default: ``"this works"``
|
|
||||||
|
|
||||||
Types
|
|
||||||
~~~~~
|
|
||||||
.. bro:type:: Example::SimpleEnum
|
|
||||||
|
|
||||||
:Type: :bro:type:`enum`
|
|
||||||
|
|
||||||
.. bro:enum:: Example::ONE Example::SimpleEnum
|
|
||||||
|
|
||||||
and more specific info for "ONE"
|
|
||||||
can span multiple lines
|
|
||||||
|
|
||||||
.. bro:enum:: Example::TWO Example::SimpleEnum
|
|
||||||
|
|
||||||
or more info like this for "TWO"
|
|
||||||
can span multiple lines
|
|
||||||
|
|
||||||
.. bro:enum:: Example::THREE Example::SimpleEnum
|
|
||||||
|
|
||||||
documentation for "SimpleEnum"
|
|
||||||
goes here.
|
|
||||||
|
|
||||||
.. bro:type:: Example::SimpleRecord
|
|
||||||
|
|
||||||
:Type: :bro:type:`record`
|
|
||||||
|
|
||||||
field1: :bro:type:`count`
|
|
||||||
counts something
|
|
||||||
|
|
||||||
field2: :bro:type:`bool`
|
|
||||||
toggles something
|
|
||||||
|
|
||||||
general documentation for a type "SimpleRecord"
|
|
||||||
goes here.
|
|
||||||
|
|
||||||
.. bro:type:: Example::ComplexRecord
|
|
||||||
|
|
||||||
:Type: :bro:type:`record`
|
|
||||||
|
|
||||||
field1: :bro:type:`count`
|
|
||||||
counts something
|
|
||||||
|
|
||||||
field2: :bro:type:`bool`
|
|
||||||
toggles something
|
|
||||||
|
|
||||||
field3: :bro:type:`Example::SimpleRecord`
|
|
||||||
|
|
||||||
msg: :bro:type:`string` :bro:attr:`&default` = ``"blah"`` :bro:attr:`&optional`
|
|
||||||
attributes are self-documenting
|
|
||||||
|
|
||||||
general documentation for a type "ComplexRecord" goes here
|
|
||||||
|
|
||||||
.. bro:type:: Example::Info
|
|
||||||
|
|
||||||
:Type: :bro:type:`record`
|
|
||||||
|
|
||||||
ts: :bro:type:`time` :bro:attr:`&log`
|
|
||||||
|
|
||||||
uid: :bro:type:`string` :bro:attr:`&log`
|
|
||||||
|
|
||||||
status: :bro:type:`count` :bro:attr:`&log` :bro:attr:`&optional`
|
|
||||||
|
|
||||||
An example record to be used with a logging stream.
|
|
||||||
|
|
||||||
Events
|
|
||||||
~~~~~~
|
|
||||||
.. bro:id:: Example::an_event
|
|
||||||
|
|
||||||
:Type: :bro:type:`event` (name: :bro:type:`string`)
|
|
||||||
|
|
||||||
Summarize "an_event" here.
|
|
||||||
Give more details about "an_event" here.
|
|
||||||
|
|
||||||
:param name: describe the argument here
|
|
||||||
|
|
||||||
.. bro:id:: Example::log_example
|
|
||||||
|
|
||||||
:Type: :bro:type:`event` (rec: :bro:type:`Example::Info`)
|
|
||||||
|
|
||||||
This is a declaration of an example event that can be used in
|
|
||||||
logging streams and is raised once for each log entry.
|
|
||||||
|
|
||||||
Functions
|
|
||||||
~~~~~~~~~
|
|
||||||
.. bro:id:: Example::a_function
|
|
||||||
|
|
||||||
:Type: :bro:type:`function` (tag: :bro:type:`string`, msg: :bro:type:`string`) : :bro:type:`string`
|
|
||||||
|
|
||||||
Summarize purpose of "a_function" here.
|
|
||||||
Give more details about "a_function" here.
|
|
||||||
Separating the documentation of the params/return values with
|
|
||||||
empty comments is optional, but improves readability of script.
|
|
||||||
|
|
||||||
|
|
||||||
:param tag: function arguments can be described
|
|
||||||
like this
|
|
||||||
|
|
||||||
:param msg: another param
|
|
||||||
|
|
||||||
|
|
||||||
:returns: describe the return type here
|
|
||||||
|
|
||||||
Redefinitions
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
:bro:type:`Log::ID`
|
|
||||||
|
|
||||||
:Type: :bro:type:`enum`
|
|
||||||
|
|
||||||
.. bro:enum:: Example::LOG Log::ID
|
|
||||||
|
|
||||||
:bro:type:`Example::SimpleEnum`
|
|
||||||
|
|
||||||
:Type: :bro:type:`enum`
|
|
||||||
|
|
||||||
.. bro:enum:: Example::FOUR Example::SimpleEnum
|
|
||||||
|
|
||||||
and some documentation for "FOUR"
|
|
||||||
|
|
||||||
.. bro:enum:: Example::FIVE Example::SimpleEnum
|
|
||||||
|
|
||||||
also "FIVE" for good measure
|
|
||||||
|
|
||||||
document the "SimpleEnum" redef here
|
|
||||||
|
|
||||||
:bro:type:`Example::SimpleRecord`
|
|
||||||
|
|
||||||
:Type: :bro:type:`record`
|
|
||||||
|
|
||||||
field_ext: :bro:type:`string` :bro:attr:`&optional`
|
|
||||||
document the extending field here
|
|
||||||
(or here)
|
|
||||||
|
|
||||||
document the record extension redef here
|
|
||||||
|
|
||||||
Port Analysis
|
|
||||||
-------------
|
|
||||||
:ref:`More Information <common_port_analysis_doc>`
|
|
||||||
|
|
||||||
SSL::
|
|
||||||
|
|
||||||
[ports={
|
|
||||||
443/tcp,
|
|
||||||
562/tcp
|
|
||||||
}]
|
|
||||||
|
|
||||||
Packet Filter
|
|
||||||
-------------
|
|
||||||
:ref:`More Information <common_packet_filter_doc>`
|
|
||||||
|
|
||||||
Filters added::
|
|
||||||
|
|
||||||
[ssl] = tcp port 443,
|
|
||||||
[nntps] = tcp port 562
|
|
||||||
|
|
|
@ -1,86 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# ./genDocSourcesList.sh [output file]
|
|
||||||
#
|
|
||||||
# Run this script to a generate file that's used to tell CMake about all the
|
|
||||||
# possible scripts for which reST documentation can be created.
|
|
||||||
#
|
|
||||||
# The optional argument can be used to avoid overwriting the file CMake uses
|
|
||||||
# by default.
|
|
||||||
#
|
|
||||||
# Specific scripts can be blacklisted below when e.g. they currently aren't
|
|
||||||
# parseable or they just aren't meant to be documented.
|
|
||||||
|
|
||||||
export LC_ALL=C # Make sorting stable.
|
|
||||||
|
|
||||||
blacklist ()
|
|
||||||
{
|
|
||||||
if [[ "$blacklist" == "" ]]; then
|
|
||||||
blacklist="$1"
|
|
||||||
else
|
|
||||||
blacklist="$blacklist|$1"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# files passed into this function are meant to be temporary workarounds
|
|
||||||
# because they're not finished or otherwise can't be loaded for some reason
|
|
||||||
tmp_blacklist ()
|
|
||||||
{
|
|
||||||
echo "Warning: temporarily blacklisted files named '$1'" 1>&2
|
|
||||||
blacklist $1
|
|
||||||
}
|
|
||||||
|
|
||||||
blacklist __load__.bro
|
|
||||||
blacklist test-all.bro
|
|
||||||
blacklist all.bro
|
|
||||||
blacklist init-default.bro
|
|
||||||
blacklist init-bare.bro
|
|
||||||
|
|
||||||
statictext="\
|
|
||||||
# DO NOT EDIT
|
|
||||||
# This file is auto-generated from the "genDocSourcesList.sh" script.
|
|
||||||
#
|
|
||||||
# This is a list of Bro script sources for which to generate reST documentation.
|
|
||||||
# It will be included inline in the CMakeLists.txt found in the same directory
|
|
||||||
# in order to create Makefile targets that define how to generate reST from
|
|
||||||
# a given Bro script.
|
|
||||||
#
|
|
||||||
# Note: any path prefix of the script (2nd argument of rest_target macro)
|
|
||||||
# will be used to derive what path under scripts/ the generated documentation
|
|
||||||
# will be placed.
|
|
||||||
|
|
||||||
set(psd \${PROJECT_SOURCE_DIR}/scripts)
|
|
||||||
|
|
||||||
rest_target(\${CMAKE_CURRENT_SOURCE_DIR} example.bro internal)
|
|
||||||
rest_target(\${psd} base/init-default.bro internal)
|
|
||||||
rest_target(\${psd} base/init-bare.bro internal)
|
|
||||||
"
|
|
||||||
|
|
||||||
if [[ $# -ge 1 ]]; then
|
|
||||||
outfile=$1
|
|
||||||
else
|
|
||||||
outfile=DocSourcesList.cmake
|
|
||||||
fi
|
|
||||||
|
|
||||||
thisdir="$( cd "$( dirname "$0" )" && pwd )"
|
|
||||||
sourcedir=${thisdir}/../..
|
|
||||||
|
|
||||||
echo "$statictext" > $outfile
|
|
||||||
|
|
||||||
bifs=`( cd ${sourcedir}/build/scripts/base && find . -name \*\.bif.bro | sort )`
|
|
||||||
|
|
||||||
for file in $bifs
|
|
||||||
do
|
|
||||||
f=${file:2}
|
|
||||||
echo "rest_target(\${CMAKE_BINARY_DIR}/scripts base/$f)" >> $outfile
|
|
||||||
done
|
|
||||||
|
|
||||||
scriptfiles=`( cd ${sourcedir}/scripts && find . -name \*\.bro | sort )`
|
|
||||||
|
|
||||||
for file in $scriptfiles
|
|
||||||
do
|
|
||||||
f=${file:2}
|
|
||||||
if [[ ! $f =~ $blacklist ]]; then
|
|
||||||
echo "rest_target(\${psd} $f)" >> $outfile
|
|
||||||
fi
|
|
||||||
done
|
|
|
@ -1,5 +0,0 @@
|
||||||
.. This is a stub doc to which broxygen appends during the build process
|
|
||||||
|
|
||||||
Internal Scripts
|
|
||||||
================
|
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
.. This is a stub doc to which broxygen appends during the build process
|
|
||||||
|
|
||||||
========================
|
|
||||||
Index of All Bro Scripts
|
|
||||||
========================
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
2
magic
2
magic
|
@ -1 +1 @@
|
||||||
Subproject commit e87fe13a7b776182ffc8c75076d42702f5c28fed
|
Subproject commit 99c6b89230e2b9b0e781c42b0b9412d2ab4e14b2
|
1
scripts/base/files/extract/README
Normal file
1
scripts/base/files/extract/README
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Support for extracing files with the file analysis framework.
|
|
@ -8,18 +8,21 @@ export {
|
||||||
const prefix = "./extract_files/" &redef;
|
const prefix = "./extract_files/" &redef;
|
||||||
|
|
||||||
## The default max size for extracted files (they won't exceed this
|
## The default max size for extracted files (they won't exceed this
|
||||||
## number of bytes), unlimited.
|
## number of bytes). A value of zero means unlimited.
|
||||||
const default_limit = 0 &redef;
|
const default_limit = 0 &redef;
|
||||||
|
|
||||||
redef record Files::Info += {
|
redef record Files::Info += {
|
||||||
## Local filenames of extracted file.
|
## Local filename of extracted file.
|
||||||
extracted: string &optional &log;
|
extracted: string &optional &log;
|
||||||
};
|
};
|
||||||
|
|
||||||
redef record Files::AnalyzerArgs += {
|
redef record Files::AnalyzerArgs += {
|
||||||
## The local filename to which to write an extracted file.
|
## The local filename to which to write an extracted file.
|
||||||
## This field is used in the core by the extraction plugin
|
## This field is used in the core by the extraction plugin
|
||||||
## to know where to write the file to. It's also optional
|
## to know where to write the file to. If not specified, then
|
||||||
|
## a filename in the format "extract-<source>-<id>" is
|
||||||
|
## automatically assigned (using the *source* and *id*
|
||||||
|
## fields of :bro:see:`fa_file`).
|
||||||
extract_filename: string &optional;
|
extract_filename: string &optional;
|
||||||
## The maximum allowed file size in bytes of *extract_filename*.
|
## The maximum allowed file size in bytes of *extract_filename*.
|
||||||
## Once reached, a :bro:see:`file_extraction_limit` event is
|
## Once reached, a :bro:see:`file_extraction_limit` event is
|
||||||
|
|
1
scripts/base/files/hash/README
Normal file
1
scripts/base/files/hash/README
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Support for file hashes with the file analysis framework.
|
1
scripts/base/files/unified2/README
Normal file
1
scripts/base/files/unified2/README
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Support for Unified2 files in the file analysis framework.
|
|
@ -42,17 +42,17 @@ export {
|
||||||
sensor_id: count &log;
|
sensor_id: count &log;
|
||||||
## Sig id for this generator.
|
## Sig id for this generator.
|
||||||
signature_id: count &log;
|
signature_id: count &log;
|
||||||
## A string representation of the "signature_id" field if a sid_msg.map file was loaded.
|
## A string representation of the *signature_id* field if a sid_msg.map file was loaded.
|
||||||
signature: string &log &optional;
|
signature: string &log &optional;
|
||||||
## Which generator generated the alert?
|
## Which generator generated the alert?
|
||||||
generator_id: count &log;
|
generator_id: count &log;
|
||||||
## A string representation of the "generator_id" field if a gen_msg.map file was loaded.
|
## A string representation of the *generator_id* field if a gen_msg.map file was loaded.
|
||||||
generator: string &log &optional;
|
generator: string &log &optional;
|
||||||
## Sig revision for this id.
|
## Sig revision for this id.
|
||||||
signature_revision: count &log;
|
signature_revision: count &log;
|
||||||
## Event classification.
|
## Event classification.
|
||||||
classification_id: count &log;
|
classification_id: count &log;
|
||||||
## Descriptive classification string,
|
## Descriptive classification string.
|
||||||
classification: string &log &optional;
|
classification: string &log &optional;
|
||||||
## Event priority.
|
## Event priority.
|
||||||
priority_id: count &log;
|
priority_id: count &log;
|
||||||
|
|
3
scripts/base/frameworks/analyzer/README
Normal file
3
scripts/base/frameworks/analyzer/README
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
The analyzer framework allows to dynamically enable or disable Bro's
|
||||||
|
protocol analyzers, as well as to manage the well-known ports which
|
||||||
|
automatically activate a particular analyzer for new connections.
|
|
@ -5,8 +5,8 @@
|
||||||
##! particular analyzer for new connections.
|
##! particular analyzer for new connections.
|
||||||
##!
|
##!
|
||||||
##! Protocol analyzers are identified by unique tags of type
|
##! Protocol analyzers are identified by unique tags of type
|
||||||
##! :bro:type:`Analyzer::Tag`, such as :bro:enum:`Analyzer::ANALYZER_HTTP` and
|
##! :bro:type:`Analyzer::Tag`, such as :bro:enum:`Analyzer::ANALYZER_HTTP`.
|
||||||
##! :bro:enum:`Analyzer::ANALYZER_HTTP`. These tags are defined internally by
|
##! These tags are defined internally by
|
||||||
##! the analyzers themselves, and documented in their analyzer-specific
|
##! the analyzers themselves, and documented in their analyzer-specific
|
||||||
##! description along with the events that they generate.
|
##! description along with the events that they generate.
|
||||||
|
|
||||||
|
@ -15,8 +15,8 @@
|
||||||
module Analyzer;
|
module Analyzer;
|
||||||
|
|
||||||
export {
|
export {
|
||||||
## If true, all available analyzers are initially disabled at startup. One
|
## If true, all available analyzers are initially disabled at startup.
|
||||||
## can then selectively enable them with
|
## One can then selectively enable them with
|
||||||
## :bro:id:`Analyzer::enable_analyzer`.
|
## :bro:id:`Analyzer::enable_analyzer`.
|
||||||
global disable_all = F &redef;
|
global disable_all = F &redef;
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ export {
|
||||||
##
|
##
|
||||||
## ports: The set of well-known ports to associate with the analyzer.
|
## ports: The set of well-known ports to associate with the analyzer.
|
||||||
##
|
##
|
||||||
## Returns: True if the ports were sucessfully registered.
|
## Returns: True if the ports were successfully registered.
|
||||||
global register_for_ports: function(tag: Analyzer::Tag, ports: set[port]) : bool;
|
global register_for_ports: function(tag: Analyzer::Tag, ports: set[port]) : bool;
|
||||||
|
|
||||||
## Registers an individual well-known port for an analyzer. If a future
|
## Registers an individual well-known port for an analyzer. If a future
|
||||||
|
@ -57,7 +57,7 @@ export {
|
||||||
##
|
##
|
||||||
## p: The well-known port to associate with the analyzer.
|
## p: The well-known port to associate with the analyzer.
|
||||||
##
|
##
|
||||||
## Returns: True if the port was sucessfully registered.
|
## Returns: True if the port was successfully registered.
|
||||||
global register_for_port: function(tag: Analyzer::Tag, p: port) : bool;
|
global register_for_port: function(tag: Analyzer::Tag, p: port) : bool;
|
||||||
|
|
||||||
## Returns a set of all well-known ports currently registered for a
|
## Returns a set of all well-known ports currently registered for a
|
||||||
|
@ -88,8 +88,8 @@ export {
|
||||||
## Returns: The analyzer tag corresponding to the name.
|
## Returns: The analyzer tag corresponding to the name.
|
||||||
global get_tag: function(name: string): Analyzer::Tag;
|
global get_tag: function(name: string): Analyzer::Tag;
|
||||||
|
|
||||||
## Schedules an analyzer for a future connection originating from a given IP
|
## Schedules an analyzer for a future connection originating from a
|
||||||
## address and port.
|
## given IP address and port.
|
||||||
##
|
##
|
||||||
## orig: The IP address originating a connection in the future.
|
## orig: The IP address originating a connection in the future.
|
||||||
## 0.0.0.0 can be used as a wildcard to match any originator address.
|
## 0.0.0.0 can be used as a wildcard to match any originator address.
|
||||||
|
@ -103,7 +103,7 @@ export {
|
||||||
## tout: A timeout interval after which the scheduling request will be
|
## tout: A timeout interval after which the scheduling request will be
|
||||||
## discarded if the connection has not yet been seen.
|
## discarded if the connection has not yet been seen.
|
||||||
##
|
##
|
||||||
## Returns: True if succesful.
|
## Returns: True if successful.
|
||||||
global schedule_analyzer: function(orig: addr, resp: addr, resp_p: port,
|
global schedule_analyzer: function(orig: addr, resp: addr, resp_p: port,
|
||||||
analyzer: Analyzer::Tag, tout: interval) : bool;
|
analyzer: Analyzer::Tag, tout: interval) : bool;
|
||||||
|
|
||||||
|
|
2
scripts/base/frameworks/cluster/README
Normal file
2
scripts/base/frameworks/cluster/README
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
The cluster framework provides for establishing and controlling a cluster
|
||||||
|
of Bro instances.
|
|
@ -39,7 +39,8 @@ export {
|
||||||
## The node type doing all the actual traffic analysis.
|
## The node type doing all the actual traffic analysis.
|
||||||
WORKER,
|
WORKER,
|
||||||
## A node acting as a traffic recorder using the
|
## A node acting as a traffic recorder using the
|
||||||
## `Time Machine <http://tracker.bro.org/time-machine>`_ software.
|
## `Time Machine <http://bro.org/community/time-machine.html>`_
|
||||||
|
## software.
|
||||||
TIME_MACHINE,
|
TIME_MACHINE,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -58,7 +59,7 @@ export {
|
||||||
## Events raised by workers and handled by a manager.
|
## Events raised by workers and handled by a manager.
|
||||||
const worker2manager_events = /(TimeMachine::command|Drop::.*)/ &redef;
|
const worker2manager_events = /(TimeMachine::command|Drop::.*)/ &redef;
|
||||||
|
|
||||||
## Events raised by workers and handled by proxies..
|
## Events raised by workers and handled by proxies.
|
||||||
const worker2proxy_events = /EMPTY/ &redef;
|
const worker2proxy_events = /EMPTY/ &redef;
|
||||||
|
|
||||||
## Events raised by TimeMachine instances and handled by a manager.
|
## Events raised by TimeMachine instances and handled by a manager.
|
||||||
|
@ -80,7 +81,7 @@ export {
|
||||||
## If the *ip* field is a non-global IPv6 address, this field
|
## If the *ip* field is a non-global IPv6 address, this field
|
||||||
## can specify a particular :rfc:`4007` ``zone_id``.
|
## can specify a particular :rfc:`4007` ``zone_id``.
|
||||||
zone_id: string &default="";
|
zone_id: string &default="";
|
||||||
## The port to which the this local node can connect when
|
## The port to which this local node can connect when
|
||||||
## establishing communication.
|
## establishing communication.
|
||||||
p: port;
|
p: port;
|
||||||
## Identifier for the interface a worker is sniffing.
|
## Identifier for the interface a worker is sniffing.
|
||||||
|
@ -119,6 +120,7 @@ export {
|
||||||
## The cluster layout definition. This should be placed into a filter
|
## The cluster layout definition. This should be placed into a filter
|
||||||
## named cluster-layout.bro somewhere in the BROPATH. It will be
|
## named cluster-layout.bro somewhere in the BROPATH. It will be
|
||||||
## automatically loaded if the CLUSTER_NODE environment variable is set.
|
## automatically loaded if the CLUSTER_NODE environment variable is set.
|
||||||
|
## Note that BroControl handles all of this automatically.
|
||||||
const nodes: table[string] of Node = {} &redef;
|
const nodes: table[string] of Node = {} &redef;
|
||||||
|
|
||||||
## This is usually supplied on the command line for each instance
|
## This is usually supplied on the command line for each instance
|
||||||
|
|
|
@ -19,6 +19,6 @@ redef Log::default_rotation_postprocessor_cmd = "delete-log";
|
||||||
## Record all packets into trace file.
|
## Record all packets into trace file.
|
||||||
##
|
##
|
||||||
## Note that this only indicates that *if* we are recording packets, we want all
|
## Note that this only indicates that *if* we are recording packets, we want all
|
||||||
## of them (rather than just those the core deems sufficiently important). Setting
|
## of them (rather than just those the core deems sufficiently important).
|
||||||
## this does not turn recording on. Use '-w <trace>' for that.
|
## Setting this does not turn recording on. Use '-w <trace>' for that.
|
||||||
redef record_all_packets = T;
|
redef record_all_packets = T;
|
||||||
|
|
2
scripts/base/frameworks/communication/README
Normal file
2
scripts/base/frameworks/communication/README
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
The communication framework facilitates connecting to remote Bro or
|
||||||
|
Broccoli instances to share state and transfer events.
|
|
@ -15,13 +15,16 @@ export {
|
||||||
## are wildcards.
|
## are wildcards.
|
||||||
const listen_interface = 0.0.0.0 &redef;
|
const listen_interface = 0.0.0.0 &redef;
|
||||||
|
|
||||||
## Which port to listen on.
|
## Which port to listen on. Note that BroControl sets this
|
||||||
|
## automatically.
|
||||||
const listen_port = 47757/tcp &redef;
|
const listen_port = 47757/tcp &redef;
|
||||||
|
|
||||||
## This defines if a listening socket should use SSL.
|
## This defines if a listening socket should use SSL.
|
||||||
const listen_ssl = F &redef;
|
const listen_ssl = F &redef;
|
||||||
|
|
||||||
## Defines if a listening socket can bind to IPv6 addresses.
|
## Defines if a listening socket can bind to IPv6 addresses.
|
||||||
|
##
|
||||||
|
## Note that this is overridden by the BroControl IPv6Comm option.
|
||||||
const listen_ipv6 = F &redef;
|
const listen_ipv6 = F &redef;
|
||||||
|
|
||||||
## If :bro:id:`Communication::listen_interface` is a non-global
|
## If :bro:id:`Communication::listen_interface` is a non-global
|
||||||
|
@ -42,10 +45,11 @@ export {
|
||||||
type Info: record {
|
type Info: record {
|
||||||
## The network time at which a communication event occurred.
|
## The network time at which a communication event occurred.
|
||||||
ts: time &log;
|
ts: time &log;
|
||||||
## The peer name (if any) with which a communication event is concerned.
|
## The peer name (if any) with which a communication event is
|
||||||
|
## concerned.
|
||||||
peer: string &log &optional;
|
peer: string &log &optional;
|
||||||
## Where the communication event message originated from, that is,
|
## Where the communication event message originated from, that
|
||||||
## either from the scripting layer or inside the Bro process.
|
## is, either from the scripting layer or inside the Bro process.
|
||||||
src_name: string &log &optional;
|
src_name: string &log &optional;
|
||||||
## .. todo:: currently unused.
|
## .. todo:: currently unused.
|
||||||
connected_peer_desc: string &log &optional;
|
connected_peer_desc: string &log &optional;
|
||||||
|
@ -71,8 +75,8 @@ export {
|
||||||
## can specify a particular :rfc:`4007` ``zone_id``.
|
## can specify a particular :rfc:`4007` ``zone_id``.
|
||||||
zone_id: string &optional;
|
zone_id: string &optional;
|
||||||
|
|
||||||
## Port of the remote Bro communication endpoint if we are initiating
|
## Port of the remote Bro communication endpoint if we are
|
||||||
## the connection based on the :bro:id:`connect` field.
|
## initiating the connection (based on the *connect* field).
|
||||||
p: port &optional;
|
p: port &optional;
|
||||||
|
|
||||||
## When accepting a connection, the configuration only
|
## When accepting a connection, the configuration only
|
||||||
|
@ -87,7 +91,7 @@ export {
|
||||||
events: pattern &optional;
|
events: pattern &optional;
|
||||||
|
|
||||||
## Whether we are going to connect (rather than waiting
|
## Whether we are going to connect (rather than waiting
|
||||||
## for the other sie to connect to us).
|
## for the other side to connect to us).
|
||||||
connect: bool &default = F;
|
connect: bool &default = F;
|
||||||
|
|
||||||
## If disconnected, reconnect after this many seconds.
|
## If disconnected, reconnect after this many seconds.
|
||||||
|
@ -103,13 +107,14 @@ export {
|
||||||
request_logs: bool &default = F;
|
request_logs: bool &default = F;
|
||||||
|
|
||||||
## When performing state synchronization, whether we consider
|
## When performing state synchronization, whether we consider
|
||||||
## our state to be authoritative. If so, we will send the peer
|
## our state to be authoritative (only one side can be
|
||||||
## our current set when the connection is set up.
|
## authoritative). If so, we will send the peer our current
|
||||||
## (Only one side can be authoritative)
|
## set when the connection is set up.
|
||||||
auth: bool &default = F;
|
auth: bool &default = F;
|
||||||
|
|
||||||
## If not set, no capture filter is sent.
|
## If not set, no capture filter is sent.
|
||||||
## If set to "", the default capture filter is sent.
|
## If set to an empty string, then the default capture filter
|
||||||
|
## is sent.
|
||||||
capture_filter: string &optional;
|
capture_filter: string &optional;
|
||||||
|
|
||||||
## Whether to use SSL-based communication.
|
## Whether to use SSL-based communication.
|
||||||
|
@ -126,7 +131,8 @@ export {
|
||||||
};
|
};
|
||||||
|
|
||||||
## The table of Bro or Broccoli nodes that Bro will initiate connections
|
## The table of Bro or Broccoli nodes that Bro will initiate connections
|
||||||
## to or respond to connections from.
|
## to or respond to connections from. Note that BroControl sets this
|
||||||
|
## automatically.
|
||||||
global nodes: table[string] of Node &redef;
|
global nodes: table[string] of Node &redef;
|
||||||
|
|
||||||
## A table of peer nodes for which this node issued a
|
## A table of peer nodes for which this node issued a
|
||||||
|
|
3
scripts/base/frameworks/control/README
Normal file
3
scripts/base/frameworks/control/README
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
The control framework provides the foundation for providing "commands"
|
||||||
|
that can be taken remotely at runtime to modify a running Bro instance
|
||||||
|
or collect information from the running instance.
|
|
@ -57,7 +57,8 @@ export {
|
||||||
## Returns the current net_stats.
|
## Returns the current net_stats.
|
||||||
global net_stats_response: event(s: string);
|
global net_stats_response: event(s: string);
|
||||||
|
|
||||||
## Inform the remote Bro instance that it's configuration may have been updated.
|
## Inform the remote Bro instance that it's configuration may have been
|
||||||
|
## updated.
|
||||||
global configuration_update_request: event();
|
global configuration_update_request: event();
|
||||||
## This event is a wrapper and alias for the
|
## This event is a wrapper and alias for the
|
||||||
## :bro:id:`Control::configuration_update_request` event.
|
## :bro:id:`Control::configuration_update_request` event.
|
||||||
|
|
2
scripts/base/frameworks/dpd/README
Normal file
2
scripts/base/frameworks/dpd/README
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
The DPD (dynamic protocol detection) activates port-independent protocol
|
||||||
|
detection and selectively disables analyzers if protocol violations occur.
|
3
scripts/base/frameworks/files/README
Normal file
3
scripts/base/frameworks/files/README
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
The file analysis framework provides an interface for driving the analysis
|
||||||
|
of files, possibly independent of any network protocol over which they're
|
||||||
|
transported.
|
|
@ -14,10 +14,11 @@ export {
|
||||||
LOG
|
LOG
|
||||||
};
|
};
|
||||||
|
|
||||||
## A structure which represents a desired type of file analysis.
|
## A structure which parameterizes a type of file analysis.
|
||||||
type AnalyzerArgs: record {
|
type AnalyzerArgs: record {
|
||||||
## An event which will be generated for all new file contents,
|
## An event which will be generated for all new file contents,
|
||||||
## chunk-wise. Used when *tag* is
|
## chunk-wise. Used when *tag* (in the
|
||||||
|
## :bro:see:`Files::add_analyzer` function) is
|
||||||
## :bro:see:`Files::ANALYZER_DATA_EVENT`.
|
## :bro:see:`Files::ANALYZER_DATA_EVENT`.
|
||||||
chunk_event: event(f: fa_file, data: string, off: count) &optional;
|
chunk_event: event(f: fa_file, data: string, off: count) &optional;
|
||||||
|
|
||||||
|
@ -47,12 +48,12 @@ export {
|
||||||
## the data traveled to.
|
## the data traveled to.
|
||||||
rx_hosts: set[addr] &log;
|
rx_hosts: set[addr] &log;
|
||||||
|
|
||||||
## Connection UIDS over which the file was transferred.
|
## Connection UIDs over which the file was transferred.
|
||||||
conn_uids: set[string] &log;
|
conn_uids: set[string] &log;
|
||||||
|
|
||||||
## An identification of the source of the file data. E.g. it may be
|
## An identification of the source of the file data. E.g. it
|
||||||
## a network protocol over which it was transferred, or a local file
|
## may be a network protocol over which it was transferred, or a
|
||||||
## path which was read, or some other input source.
|
## local file path which was read, or some other input source.
|
||||||
source: string &log &optional;
|
source: string &log &optional;
|
||||||
|
|
||||||
## A value to represent the depth of this file in relation
|
## A value to represent the depth of this file in relation
|
||||||
|
@ -64,9 +65,10 @@ export {
|
||||||
## A set of analysis types done during the file analysis.
|
## A set of analysis types done during the file analysis.
|
||||||
analyzers: set[string] &log;
|
analyzers: set[string] &log;
|
||||||
|
|
||||||
## A mime type provided by libmagic against the *bof_buffer*, or
|
## A mime type provided by libmagic against the *bof_buffer*
|
||||||
## in the cases where no buffering of the beginning of file occurs,
|
## field of :bro:see:`fa_file`, or in the cases where no
|
||||||
## an initial guess of the mime type based on the first data seen.
|
## buffering of the beginning of file occurs, an initial
|
||||||
|
## guess of the mime type based on the first data seen.
|
||||||
mime_type: string &log &optional;
|
mime_type: string &log &optional;
|
||||||
|
|
||||||
## A filename for the file if one is available from the source
|
## A filename for the file if one is available from the source
|
||||||
|
@ -79,12 +81,12 @@ export {
|
||||||
|
|
||||||
## If the source of this file is a network connection, this field
|
## If the source of this file is a network connection, this field
|
||||||
## indicates if the data originated from the local network or not as
|
## indicates if the data originated from the local network or not as
|
||||||
## determined by the configured bro:see:`Site::local_nets`.
|
## determined by the configured :bro:see:`Site::local_nets`.
|
||||||
local_orig: bool &log &optional;
|
local_orig: bool &log &optional;
|
||||||
|
|
||||||
## If the source of this file is a network connection, this field
|
## If the source of this file is a network connection, this field
|
||||||
## indicates if the file is being sent by the originator of the connection
|
## indicates if the file is being sent by the originator of the
|
||||||
## or the responder.
|
## connection or the responder.
|
||||||
is_orig: bool &log &optional;
|
is_orig: bool &log &optional;
|
||||||
|
|
||||||
## Number of bytes provided to the file analysis engine for the file.
|
## Number of bytes provided to the file analysis engine for the file.
|
||||||
|
@ -116,15 +118,15 @@ export {
|
||||||
## The salt concatenated to unique file handle strings generated by
|
## The salt concatenated to unique file handle strings generated by
|
||||||
## :bro:see:`get_file_handle` before hashing them in to a file id
|
## :bro:see:`get_file_handle` before hashing them in to a file id
|
||||||
## (the *id* field of :bro:see:`fa_file`).
|
## (the *id* field of :bro:see:`fa_file`).
|
||||||
## Provided to help mitigate the possiblility of manipulating parts of
|
## Provided to help mitigate the possibility of manipulating parts of
|
||||||
## network connections that factor in to the file handle in order to
|
## network connections that factor in to the file handle in order to
|
||||||
## generate two handles that would hash to the same file id.
|
## generate two handles that would hash to the same file id.
|
||||||
const salt = "I recommend changing this." &redef;
|
const salt = "I recommend changing this." &redef;
|
||||||
|
|
||||||
## Sets the *timeout_interval* field of :bro:see:`fa_file`, which is
|
## Sets the *timeout_interval* field of :bro:see:`fa_file`, which is
|
||||||
## used to determine the length of inactivity that is allowed for a file
|
## used to determine the length of inactivity that is allowed for a file
|
||||||
## before internal state related to it is cleaned up. When used within a
|
## before internal state related to it is cleaned up. When used within
|
||||||
## :bro:see:`file_timeout` handler, the analysis will delay timing out
|
## a :bro:see:`file_timeout` handler, the analysis will delay timing out
|
||||||
## again for the period specified by *t*.
|
## again for the period specified by *t*.
|
||||||
##
|
##
|
||||||
## f: the file.
|
## f: the file.
|
||||||
|
@ -132,7 +134,7 @@ export {
|
||||||
## t: the amount of time the file can remain inactive before discarding.
|
## t: the amount of time the file can remain inactive before discarding.
|
||||||
##
|
##
|
||||||
## Returns: true if the timeout interval was set, or false if analysis
|
## Returns: true if the timeout interval was set, or false if analysis
|
||||||
## for the *id* isn't currently active.
|
## for the file isn't currently active.
|
||||||
global set_timeout_interval: function(f: fa_file, t: interval): bool;
|
global set_timeout_interval: function(f: fa_file, t: interval): bool;
|
||||||
|
|
||||||
## Adds an analyzer to the analysis of a given file.
|
## Adds an analyzer to the analysis of a given file.
|
||||||
|
@ -144,7 +146,7 @@ export {
|
||||||
## args: any parameters the analyzer takes.
|
## args: any parameters the analyzer takes.
|
||||||
##
|
##
|
||||||
## Returns: true if the analyzer will be added, or false if analysis
|
## Returns: true if the analyzer will be added, or false if analysis
|
||||||
## for the *id* isn't currently active or the *args*
|
## for the file isn't currently active or the *args*
|
||||||
## were invalid for the analyzer type.
|
## were invalid for the analyzer type.
|
||||||
global add_analyzer: function(f: fa_file,
|
global add_analyzer: function(f: fa_file,
|
||||||
tag: Files::Tag,
|
tag: Files::Tag,
|
||||||
|
@ -154,10 +156,12 @@ export {
|
||||||
##
|
##
|
||||||
## f: the file.
|
## f: the file.
|
||||||
##
|
##
|
||||||
|
## tag: the analyzer type.
|
||||||
|
##
|
||||||
## args: the analyzer (type and args) to remove.
|
## args: the analyzer (type and args) to remove.
|
||||||
##
|
##
|
||||||
## Returns: true if the analyzer will be removed, or false if analysis
|
## Returns: true if the analyzer will be removed, or false if analysis
|
||||||
## for the *id* isn't currently active.
|
## for the file isn't currently active.
|
||||||
global remove_analyzer: function(f: fa_file,
|
global remove_analyzer: function(f: fa_file,
|
||||||
tag: Files::Tag,
|
tag: Files::Tag,
|
||||||
args: AnalyzerArgs &default=AnalyzerArgs()): bool;
|
args: AnalyzerArgs &default=AnalyzerArgs()): bool;
|
||||||
|
@ -167,11 +171,12 @@ export {
|
||||||
## f: the file.
|
## f: the file.
|
||||||
##
|
##
|
||||||
## Returns: true if analysis for the given file will be ignored for the
|
## Returns: true if analysis for the given file will be ignored for the
|
||||||
## rest of it's contents, or false if analysis for the *id*
|
## rest of its contents, or false if analysis for the file
|
||||||
## isn't currently active.
|
## isn't currently active.
|
||||||
global stop: function(f: fa_file): bool;
|
global stop: function(f: fa_file): bool;
|
||||||
|
|
||||||
## Translates an file analyzer enum value to a string with the analyzer's name.
|
## Translates a file analyzer enum value to a string with the
|
||||||
|
## analyzer's name.
|
||||||
##
|
##
|
||||||
## tag: The analyzer tag.
|
## tag: The analyzer tag.
|
||||||
##
|
##
|
||||||
|
@ -183,7 +188,7 @@ export {
|
||||||
##
|
##
|
||||||
## f: The file to be described.
|
## f: The file to be described.
|
||||||
##
|
##
|
||||||
## Returns a text description regarding metadata of the file.
|
## Returns: a text description regarding metadata of the file.
|
||||||
global describe: function(f: fa_file): string;
|
global describe: function(f: fa_file): string;
|
||||||
|
|
||||||
type ProtoRegistration: record {
|
type ProtoRegistration: record {
|
||||||
|
@ -209,10 +214,10 @@ export {
|
||||||
## Returns: true if the protocol being registered was not previously registered.
|
## Returns: true if the protocol being registered was not previously registered.
|
||||||
global register_protocol: function(tag: Analyzer::Tag, reg: ProtoRegistration): bool;
|
global register_protocol: function(tag: Analyzer::Tag, reg: ProtoRegistration): bool;
|
||||||
|
|
||||||
## Register a callback for file analyzers to use if they need to do some manipulation
|
## Register a callback for file analyzers to use if they need to do some
|
||||||
## when they are being added to a file before the core code takes over. This is
|
## manipulation when they are being added to a file before the core code
|
||||||
## unlikely to be interesting for users and should only be called by file analyzer
|
## takes over. This is unlikely to be interesting for users and should
|
||||||
## authors but it *not required*.
|
## only be called by file analyzer authors but is *not required*.
|
||||||
##
|
##
|
||||||
## tag: Tag for the file analyzer.
|
## tag: Tag for the file analyzer.
|
||||||
##
|
##
|
||||||
|
|
2
scripts/base/frameworks/input/README
Normal file
2
scripts/base/frameworks/input/README
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
The input framework provides a way to read previously stored data either as
|
||||||
|
an event stream or into a Bro table.
|
|
@ -33,45 +33,45 @@ export {
|
||||||
## that contain types that are not supported (at the moment
|
## that contain types that are not supported (at the moment
|
||||||
## file and function). If true, the input framework will
|
## file and function). If true, the input framework will
|
||||||
## warn in these cases, but continue. If false, it will
|
## warn in these cases, but continue. If false, it will
|
||||||
## abort. Defaults to false (abort)
|
## abort. Defaults to false (abort).
|
||||||
const accept_unsupported_types = F &redef;
|
const accept_unsupported_types = F &redef;
|
||||||
|
|
||||||
## TableFilter description type used for the `table` method.
|
## TableFilter description type used for the `table` method.
|
||||||
type TableDescription: record {
|
type TableDescription: record {
|
||||||
## Common definitions for tables and events
|
# Common definitions for tables and events
|
||||||
|
|
||||||
## String that allows the reader to find the source.
|
## String that allows the reader to find the source.
|
||||||
## For `READER_ASCII`, this is the filename.
|
## For `READER_ASCII`, this is the filename.
|
||||||
source: string;
|
source: string;
|
||||||
|
|
||||||
## Reader to use for this stream
|
## Reader to use for this stream.
|
||||||
reader: Reader &default=default_reader;
|
reader: Reader &default=default_reader;
|
||||||
|
|
||||||
## Read mode to use for this stream
|
## Read mode to use for this stream.
|
||||||
mode: Mode &default=default_mode;
|
mode: Mode &default=default_mode;
|
||||||
|
|
||||||
## Descriptive name. Used to remove a stream at a later time
|
## Descriptive name. Used to remove a stream at a later time.
|
||||||
name: string;
|
name: string;
|
||||||
|
|
||||||
# Special definitions for tables
|
# Special definitions for tables
|
||||||
|
|
||||||
## Table which will receive the data read by the input framework
|
## Table which will receive the data read by the input framework.
|
||||||
destination: any;
|
destination: any;
|
||||||
|
|
||||||
## Record that defines the values used as the index of the table
|
## Record that defines the values used as the index of the table.
|
||||||
idx: any;
|
idx: any;
|
||||||
|
|
||||||
## Record that defines the values used as the elements of the table
|
## Record that defines the values used as the elements of the table.
|
||||||
## If val is undefined, destination has to be a set.
|
## If this is undefined, then *destination* has to be a set.
|
||||||
val: any &optional;
|
val: any &optional;
|
||||||
|
|
||||||
## Defines if the value of the table is a record (default), or a single value. Val
|
## Defines if the value of the table is a record (default), or a single value.
|
||||||
## can only contain one element when this is set to false.
|
## When this is set to false, then *val* can only contain one element.
|
||||||
want_record: bool &default=T;
|
want_record: bool &default=T;
|
||||||
|
|
||||||
## The event that is raised each time a value is added to, changed in or removed
|
## The event that is raised each time a value is added to, changed in or removed
|
||||||
## from the table. The event will receive an Input::Event enum as the first
|
## from the table. The event will receive an Input::Event enum as the first
|
||||||
## argument, the idx record as the second argument and the value (record) as the
|
## argument, the *idx* record as the second argument and the value (record) as the
|
||||||
## third argument.
|
## third argument.
|
||||||
ev: any &optional; # event containing idx, val as values.
|
ev: any &optional; # event containing idx, val as values.
|
||||||
|
|
||||||
|
@ -88,19 +88,19 @@ export {
|
||||||
|
|
||||||
## EventFilter description type used for the `event` method.
|
## EventFilter description type used for the `event` method.
|
||||||
type EventDescription: record {
|
type EventDescription: record {
|
||||||
## Common definitions for tables and events
|
# Common definitions for tables and events
|
||||||
|
|
||||||
## String that allows the reader to find the source.
|
## String that allows the reader to find the source.
|
||||||
## For `READER_ASCII`, this is the filename.
|
## For `READER_ASCII`, this is the filename.
|
||||||
source: string;
|
source: string;
|
||||||
|
|
||||||
## Reader to use for this steam
|
## Reader to use for this stream.
|
||||||
reader: Reader &default=default_reader;
|
reader: Reader &default=default_reader;
|
||||||
|
|
||||||
## Read mode to use for this stream
|
## Read mode to use for this stream.
|
||||||
mode: Mode &default=default_mode;
|
mode: Mode &default=default_mode;
|
||||||
|
|
||||||
## Descriptive name. Used to remove a stream at a later time
|
## Descriptive name. Used to remove a stream at a later time.
|
||||||
name: string;
|
name: string;
|
||||||
|
|
||||||
# Special definitions for events
|
# Special definitions for events
|
||||||
|
@ -108,8 +108,8 @@ export {
|
||||||
## Record describing the fields to be retrieved from the source input.
|
## Record describing the fields to be retrieved from the source input.
|
||||||
fields: any;
|
fields: any;
|
||||||
|
|
||||||
## If want_record if false, the event receives each value in fields as a separate argument.
|
## If this is false, the event receives each value in fields as a separate argument.
|
||||||
## If it is set to true (default), the event receives all fields in a single record value.
|
## If this is set to true (default), the event receives all fields in a single record value.
|
||||||
want_record: bool &default=T;
|
want_record: bool &default=T;
|
||||||
|
|
||||||
## The event that is raised each time a new line is received from the reader.
|
## The event that is raised each time a new line is received from the reader.
|
||||||
|
@ -122,23 +122,23 @@ export {
|
||||||
config: table[string] of string &default=table();
|
config: table[string] of string &default=table();
|
||||||
};
|
};
|
||||||
|
|
||||||
## A file analyis input stream type used to forward input data to the
|
## A file analysis input stream type used to forward input data to the
|
||||||
## file analysis framework.
|
## file analysis framework.
|
||||||
type AnalysisDescription: record {
|
type AnalysisDescription: record {
|
||||||
## String that allows the reader to find the source.
|
## String that allows the reader to find the source.
|
||||||
## For `READER_ASCII`, this is the filename.
|
## For `READER_ASCII`, this is the filename.
|
||||||
source: string;
|
source: string;
|
||||||
|
|
||||||
## Reader to use for this steam. Compatible readers must be
|
## Reader to use for this stream. Compatible readers must be
|
||||||
## able to accept a filter of a single string type (i.e.
|
## able to accept a filter of a single string type (i.e.
|
||||||
## they read a byte stream).
|
## they read a byte stream).
|
||||||
reader: Reader &default=Input::READER_BINARY;
|
reader: Reader &default=Input::READER_BINARY;
|
||||||
|
|
||||||
## Read mode to use for this stream
|
## Read mode to use for this stream.
|
||||||
mode: Mode &default=default_mode;
|
mode: Mode &default=default_mode;
|
||||||
|
|
||||||
## Descriptive name that uniquely identifies the input source.
|
## Descriptive name that uniquely identifies the input source.
|
||||||
## Can be used used to remove a stream at a later time.
|
## Can be used to remove a stream at a later time.
|
||||||
## This will also be used for the unique *source* field of
|
## This will also be used for the unique *source* field of
|
||||||
## :bro:see:`fa_file`. Most of the time, the best choice for this
|
## :bro:see:`fa_file`. Most of the time, the best choice for this
|
||||||
## field will be the same value as the *source* field.
|
## field will be the same value as the *source* field.
|
||||||
|
@ -150,38 +150,44 @@ export {
|
||||||
config: table[string] of string &default=table();
|
config: table[string] of string &default=table();
|
||||||
};
|
};
|
||||||
|
|
||||||
## Create a new table input from a given source. Returns true on success.
|
## Create a new table input from a given source.
|
||||||
##
|
##
|
||||||
## description: `TableDescription` record describing the source.
|
## description: `TableDescription` record describing the source.
|
||||||
|
##
|
||||||
|
## Returns: true on success.
|
||||||
global add_table: function(description: Input::TableDescription) : bool;
|
global add_table: function(description: Input::TableDescription) : bool;
|
||||||
|
|
||||||
## Create a new event input from a given source. Returns true on success.
|
## Create a new event input from a given source.
|
||||||
##
|
##
|
||||||
## description: `TableDescription` record describing the source.
|
## description: `EventDescription` record describing the source.
|
||||||
|
##
|
||||||
|
## Returns: true on success.
|
||||||
global add_event: function(description: Input::EventDescription) : bool;
|
global add_event: function(description: Input::EventDescription) : bool;
|
||||||
|
|
||||||
## Create a new file analysis input from a given source. Data read from
|
## Create a new file analysis input from a given source. Data read from
|
||||||
## the source is automatically forwarded to the file analysis framework.
|
## the source is automatically forwarded to the file analysis framework.
|
||||||
##
|
##
|
||||||
## description: A record describing the source
|
## description: A record describing the source.
|
||||||
##
|
##
|
||||||
## Returns: true on sucess.
|
## Returns: true on success.
|
||||||
global add_analysis: function(description: Input::AnalysisDescription) : bool;
|
global add_analysis: function(description: Input::AnalysisDescription) : bool;
|
||||||
|
|
||||||
## Remove a input stream. Returns true on success and false if the named stream was
|
## Remove an input stream.
|
||||||
## not found.
|
|
||||||
##
|
##
|
||||||
## id: string value identifying the stream to be removed
|
## id: string value identifying the stream to be removed.
|
||||||
|
##
|
||||||
|
## Returns: true on success and false if the named stream was not found.
|
||||||
global remove: function(id: string) : bool;
|
global remove: function(id: string) : bool;
|
||||||
|
|
||||||
## Forces the current input to be checked for changes.
|
## Forces the current input to be checked for changes.
|
||||||
## Returns true on success and false if the named stream was not found
|
|
||||||
##
|
##
|
||||||
## id: string value identifying the stream
|
## id: string value identifying the stream.
|
||||||
|
##
|
||||||
|
## Returns: true on success and false if the named stream was not found.
|
||||||
global force_update: function(id: string) : bool;
|
global force_update: function(id: string) : bool;
|
||||||
|
|
||||||
## Event that is called, when the end of a data source has been reached, including
|
## Event that is called when the end of a data source has been reached,
|
||||||
## after an update.
|
## including after an update.
|
||||||
global end_of_data: event(name: string, source:string);
|
global end_of_data: event(name: string, source:string);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,11 +6,11 @@ module InputAscii;
|
||||||
|
|
||||||
export {
|
export {
|
||||||
## Separator between fields.
|
## Separator between fields.
|
||||||
## Please note that the separator has to be exactly one character long
|
## Please note that the separator has to be exactly one character long.
|
||||||
const separator = Input::separator &redef;
|
const separator = Input::separator &redef;
|
||||||
|
|
||||||
## Separator between set elements.
|
## Separator between set elements.
|
||||||
## Please note that the separator has to be exactly one character long
|
## Please note that the separator has to be exactly one character long.
|
||||||
const set_separator = Input::set_separator &redef;
|
const set_separator = Input::set_separator &redef;
|
||||||
|
|
||||||
## String to use for empty fields.
|
## String to use for empty fields.
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
##! Interface for the ascii input reader.
|
##! Interface for the benchmark input reader.
|
||||||
|
|
||||||
module InputBenchmark;
|
module InputBenchmark;
|
||||||
|
|
||||||
export {
|
export {
|
||||||
## multiplication factor for each second
|
## Multiplication factor for each second.
|
||||||
const factor = 1.0 &redef;
|
const factor = 1.0 &redef;
|
||||||
|
|
||||||
## spread factor between lines
|
## Spread factor between lines.
|
||||||
const spread = 0 &redef;
|
const spread = 0 &redef;
|
||||||
|
|
||||||
## spreading where usleep = 1000000 / autospread * num_lines
|
## Spreading where usleep = 1000000 / autospread * num_lines
|
||||||
const autospread = 0.0 &redef;
|
const autospread = 0.0 &redef;
|
||||||
|
|
||||||
## addition factor for each heartbeat
|
## Addition factor for each heartbeat.
|
||||||
const addfactor = 0 &redef;
|
const addfactor = 0 &redef;
|
||||||
|
|
||||||
## stop spreading at x lines per heartbeat
|
## Stop spreading at x lines per heartbeat.
|
||||||
const stopspreadat = 0 &redef;
|
const stopspreadat = 0 &redef;
|
||||||
|
|
||||||
## 1 -> enable timed spreading
|
## 1 -> enable timed spreading.
|
||||||
const timedspread = 0.0 &redef;
|
const timedspread = 0.0 &redef;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,14 +4,14 @@ module InputRaw;
|
||||||
|
|
||||||
export {
|
export {
|
||||||
## Separator between input records.
|
## Separator between input records.
|
||||||
## Please note that the separator has to be exactly one character long
|
## Please note that the separator has to be exactly one character long.
|
||||||
const record_separator = "\n" &redef;
|
const record_separator = "\n" &redef;
|
||||||
|
|
||||||
## Event that is called when a process created by the raw reader exits.
|
## Event that is called when a process created by the raw reader exits.
|
||||||
##
|
##
|
||||||
## name: name of the input stream
|
## name: name of the input stream.
|
||||||
## source: source of the input stream
|
## source: source of the input stream.
|
||||||
## exit_code: exit code of the program, or number of the signal that forced the program to exit
|
## exit_code: exit code of the program, or number of the signal that forced the program to exit.
|
||||||
## signal_exit: false when program exitted normally, true when program was forced to exit by a signal
|
## signal_exit: false when program exited normally, true when program was forced to exit by a signal.
|
||||||
global process_finished: event(name: string, source:string, exit_code:count, signal_exit:bool);
|
global process_finished: event(name: string, source:string, exit_code:count, signal_exit:bool);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
##! Interface for the SQLite input reader.
|
##! Interface for the SQLite input reader. Redefinable options are available
|
||||||
|
##! to tweak the input format of the SQLite reader.
|
||||||
##!
|
##!
|
||||||
##! The defaults are set to match Bro's ASCII output.
|
##! See :doc:`/frameworks/logging-input-sqlite` for an introduction on how to
|
||||||
|
##! use the SQLite reader.
|
||||||
|
##!
|
||||||
|
##! When using the SQLite reader, you have to specify the SQL query that returns
|
||||||
|
##! the desired data by setting ``query`` in the ``config`` table. See the
|
||||||
|
##! introduction mentioned above for an example.
|
||||||
|
|
||||||
module InputSQLite;
|
module InputSQLite;
|
||||||
|
|
||||||
|
|
3
scripts/base/frameworks/intel/README
Normal file
3
scripts/base/frameworks/intel/README
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
The intelligence framework provides a way to store and query intelligence
|
||||||
|
data (such as IP addresses or strings). Metadata can also be associated
|
||||||
|
with the intelligence.
|
|
@ -1,5 +1,5 @@
|
||||||
##! Cluster transparency support for the intelligence framework. This is mostly oriented
|
##! Cluster transparency support for the intelligence framework. This is mostly
|
||||||
##! toward distributing intelligence information across clusters.
|
##! oriented toward distributing intelligence information across clusters.
|
||||||
|
|
||||||
@load base/frameworks/cluster
|
@load base/frameworks/cluster
|
||||||
@load ./input
|
@load ./input
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue