Merge branch 'master' into topic/tunnels

Conflicts:
	doc/scripts/DocSourcesList.cmake
	scripts/base/init-bare.bro
	src/ConnCompressor.cc
	src/Sessions.cc

Just trying to bring topic/gregor/tunnel up to date in this new branch.
Compiles, but untested.
This commit is contained in:
Jon Siwek 2012-04-18 16:59:49 -05:00
commit 4062fc1776
1252 changed files with 39817 additions and 45839 deletions

1
.gitignore vendored
View file

@ -1 +1,2 @@
build build
tmp

3
.gitmodules vendored
View file

@ -13,3 +13,6 @@
[submodule "aux/btest"] [submodule "aux/btest"]
path = aux/btest path = aux/btest
url = git://git.bro-ids.org/btest url = git://git.bro-ids.org/btest
[submodule "cmake"]
path = cmake
url = git://git.bro-ids.org/cmake

1261
CHANGES

File diff suppressed because it is too large Load diff

View file

@ -1,35 +1,6 @@
project(Bro C CXX) project(Bro C CXX)
cmake_minimum_required(VERSION 2.6.3 FATAL_ERROR)
if (NOT CMAKE_C_COMPILER) include(cmake/CommonCMakeConfig.cmake)
message(FATAL_ERROR "Could not find prerequisite C compiler")
endif ()
if (NOT CMAKE_CXX_COMPILER)
message(FATAL_ERROR "Could not find prerequisite C++ compiler")
endif ()
########################################################################
## CMake Configuration
cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
# Prohibit in-source builds.
if ("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message(FATAL_ERROR "In-source builds are not allowed. Please use "
"./configure to choose a build directory and "
"initialize the build configuration.")
endif ()
set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
if ("${PROJECT_SOURCE_DIR}" STREQUAL "${CMAKE_SOURCE_DIR}")
# uninstall target
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
@ONLY)
add_custom_target(uninstall COMMAND
${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
endif ()
######################################################################## ########################################################################
## Project/Build Configuration ## Project/Build Configuration
@ -61,25 +32,9 @@ list(GET version_numbers 0 VERSION_MAJOR)
list(GET version_numbers 1 VERSION_MINOR) list(GET version_numbers 1 VERSION_MINOR)
set(VERSION_MAJ_MIN "${VERSION_MAJOR}.${VERSION_MINOR}") set(VERSION_MAJ_MIN "${VERSION_MAJOR}.${VERSION_MINOR}")
set(EXTRA_COMPILE_FLAGS "-Wall -Wno-unused")
if (ENABLE_DEBUG)
set(CMAKE_BUILD_TYPE Debug)
# manual add of -g works around its omission in FreeBSD's CMake port
set(EXTRA_COMPILE_FLAGS "${EXTRA_COMPILE_FLAGS} -g -DDEBUG")
else ()
set(CMAKE_BUILD_TYPE RelWithDebInfo)
endif ()
# Compiler flags may already exist in CMake cache (e.g. when specifying
# CFLAGS environment variable before running cmake for the the first time)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_COMPILE_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${EXTRA_COMPILE_FLAGS}")
######################################################################## ########################################################################
## Dependency Configuration ## Dependency Configuration
include(MacDependencyPaths)
include(FindRequiredPackage) include(FindRequiredPackage)
# Check cache value first to avoid displaying "Found sed" messages everytime # Check cache value first to avoid displaying "Found sed" messages everytime
@ -98,6 +53,8 @@ FindRequiredPackage(BISON)
FindRequiredPackage(PCAP) FindRequiredPackage(PCAP)
FindRequiredPackage(OpenSSL) FindRequiredPackage(OpenSSL)
FindRequiredPackage(BIND) FindRequiredPackage(BIND)
FindRequiredPackage(LibMagic)
FindRequiredPackage(ZLIB)
if (NOT BinPAC_ROOT_DIR AND if (NOT BinPAC_ROOT_DIR AND
EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/aux/binpac/CMakeLists.txt) EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/aux/binpac/CMakeLists.txt)
@ -117,26 +74,12 @@ include_directories(BEFORE
${OpenSSL_INCLUDE_DIR} ${OpenSSL_INCLUDE_DIR}
${BIND_INCLUDE_DIR} ${BIND_INCLUDE_DIR}
${BinPAC_INCLUDE_DIR} ${BinPAC_INCLUDE_DIR}
${LibMagic_INCLUDE_DIR}
${ZLIB_INCLUDE_DIR}
) )
# Optional Dependencies # Optional Dependencies
set(HAVE_LIBMAGIC false)
find_package(LibMagic)
if (LIBMAGIC_FOUND)
set(HAVE_LIBMAGIC true)
include_directories(BEFORE ${LibMagic_INCLUDE_DIR})
list(APPEND OPTLIBS ${LibMagic_LIBRARY})
endif ()
set(HAVE_LIBZ false)
find_package(ZLIB)
if (ZLIB_FOUND)
set(HAVE_LIBZ true)
include_directories(BEFORE ${ZLIB_INCLUDE_DIR})
list(APPEND OPTLIBS ${ZLIB_LIBRARY})
endif ()
set(USE_GEOIP false) set(USE_GEOIP false)
find_package(LibGeoIP) find_package(LibGeoIP)
if (LIBGEOIP_FOUND) if (LIBGEOIP_FOUND)
@ -146,15 +89,39 @@ if (LIBGEOIP_FOUND)
endif () endif ()
set(USE_PERFTOOLS false) set(USE_PERFTOOLS false)
if (ENABLE_PERFTOOLS) set(USE_PERFTOOLS_DEBUG false)
find_package(GooglePerftools)
if (GOOGLEPERFTOOLS_FOUND) find_package(GooglePerftools)
set(USE_PERFTOOLS true)
include_directories(BEFORE ${GooglePerftools_INCLUDE_DIR}) if (GOOGLEPERFTOOLS_FOUND)
include_directories(BEFORE ${GooglePerftools_INCLUDE_DIR})
set(USE_PERFTOOLS true)
if (ENABLE_PERFTOOLS_DEBUG)
# Enable heap debugging with perftools.
set(USE_PERFTOOLS_DEBUG true)
list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES_DEBUG})
else ()
# Link in tcmalloc for better performance.
list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES}) list(APPEND OPTLIBS ${GooglePerftools_LIBRARIES})
endif () endif ()
endif () endif ()
if (ENABLE_PERFTOOLS_DEBUG)
# Just a no op to prevent CMake from complaining about manually-specified
# ENABLE_PERFTOOLS_DEBUG not being used if google perftools weren't found
endif ()
set(brodeps
${BinPAC_LIBRARY}
${PCAP_LIBRARY}
${OpenSSL_LIBRARIES}
${BIND_LIBRARY}
${LibMagic_LIBRARY}
${ZLIB_LIBRARY}
${OPTLIBS}
)
######################################################################## ########################################################################
## System Introspection ## System Introspection
@ -229,23 +196,10 @@ message(
"\nAux. Tools: ${INSTALL_AUX_TOOLS}" "\nAux. Tools: ${INSTALL_AUX_TOOLS}"
"\n" "\n"
"\nGeoIP: ${USE_GEOIP}" "\nGeoIP: ${USE_GEOIP}"
"\nlibz: ${HAVE_LIBZ}"
"\nlibmagic: ${HAVE_LIBMAGIC}"
"\nGoogle perftools: ${USE_PERFTOOLS}" "\nGoogle perftools: ${USE_PERFTOOLS}"
"\n debugging: ${USE_PERFTOOLS_DEBUG}"
"\n" "\n"
"\n================================================================\n" "\n================================================================\n"
) )
######################################################################## include(UserChangedWarning)
## Show warning when installing user is different from the one that configured
install(CODE "
if (NOT $ENV{USER} STREQUAL \$ENV{USER})
message(STATUS \"ATTENTION: Install is being performed by user \"
\"'\$ENV{USER}', but the build directory was configured by \"
\"user '$ENV{USER}'. This may result in a permissions error \"
\"when writing the install manifest, but you can ignore it \"
\"and consider the installation as successful if you don't \"
\"care about the install manifest.\")
endif ()
")

28
COPYING
View file

@ -1,11 +1,12 @@
Copyright (c) 1995-2010, The Regents of the University of California, Copyright (c) 1995-2012, The Regents of the University of California
through Lawrence Berkeley National Laboratory. All rights reserved. through the Lawrence Berkeley National Laboratory and the
International Computer Science Institute. All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met: modification, are permitted provided that the following conditions are met:
(1) Redistributions of source code must retain the above copyright notice, (1) Redistributions of source code must retain the above copyright
this list of conditions and the following disclaimer. notice, this list of conditions and the following disclaimer.
(2) Redistributions in binary form must reproduce the above copyright (2) Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the notice, this list of conditions and the following disclaimer in the
@ -29,20 +30,5 @@ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE. POSSIBILITY OF SUCH DAMAGE.
Note that some files in the Bro distribution carry their own copyright Note that some files in the distribution may carry their own copyright
notices. The above applies to the Bro scripts in policy/ (other than as notices.
noted below) and the source files in src/, other than:
policy/sigs/p0fsyn.osf
src/H3.h
src/OSFinger.cc
src/OSFinger.h
src/bsd-getopt-long.c
src/bsd-getopt-long.h
src/md5.c
src/md5.h
src/patricia.c
src/patricia.h
In addition, other components, such as the build system, may have
separate copyrights.

129
INSTALL
View file

@ -8,98 +8,85 @@ Prerequisites
Bro relies on the following libraries and tools, which need to be installed Bro relies on the following libraries and tools, which need to be installed
before you begin: before you begin:
* A C/C++ compiler * CMake 2.6.3 or greater http://www.cmake.org
* Libpcap headers and libraries * Libpcap (headers and libraries) http://www.tcpdump.org
Network traffic capture library
* Flex (Fast Lexical Analyzer) * OpenSSL (headers and libraries) http://www.openssl.org
Flex is already installed on most systems, so with luck you can
skip having to install it yourself.
* Bison (GNU Parser Generator) * SWIG http://www.swig.org
This comes with many systems, but if you get errors compiling
parse.y, you will need to install it.
* Perl
Used only during the Bro build process
* sed
Used only during the Bro build process
* BIND8 headers and libraries
These are usually already installed as well.
* OpenSSL headers and libraries
For analysis of SSL certificates by the HTTP analyzer, and
for encrypted Bro-to-Bro communication. These are likely installed,
though some platforms may require installation of a 'devel' package
for the headers.
* CMake 2.6 or greater
CMake is a cross-platform, open-source build system, typically
not installed by default. See http://www.cmake.org for more
information regarding CMake and the installation steps below for
how to use it to build this distribution. CMake generates native
Makefiles that depend on GNU Make by default.
Bro can also make uses of some optional libraries if they are found at
installation time:
* Libmagic * Libmagic
For identifying file types (e.g., in FTP transfers).
* LibGeoIP
For geo-locating IP addresses.
* Libz * Libz
For decompressing HTTP bodies by the HTTP analyzer, and for
compressed Bro-to-Bro communication. Bro can make uses of some optional libraries if they are found at
installation time:
* LibGeoIP For geo-locating IP addresses.
Bro also needs the following tools, but on most systems they will
already come preinstalled:
* Bash (For Bro Control).
* BIND8 (headers and libraries)
* Bison (GNU Parser Generator)
* Flex (Fast Lexical Analyzer)
* Perl (Used only during the Bro build process)
Installation Installation
============ ============
To build and install into /usr/local/bro: To build and install into ``/usr/local/bro``::
> ./configure ./configure
> cd build make
> make make install
> make install
This will perform an out-of-source build into a directory called This will first build Bro into a directory inside the distribution
build/, using default build options. It then installs the Bro binary called ``build/``, using default build options. It then installs all
into /usr/local/bro/bin. Depending on the Bro package you required files into ``/usr/local/bro``, including the Bro binary in
downloaded, there may be auxiliary tools and libraries available in ``/usr/local/bro/bin/bro``.
the aux/ directory. If so, they will be installed by default as well
if not explicitly disabled via configure options and may also have
additional installation/configuration instructions that you can
find in their source directories.
You can specify a different installation directory with You can specify a different installation directory with::
> ./configure --prefix=<dir> ./configure --prefix=<dir>
Note that "/usr" and "/opt/bro" are standard prefixes for binary Note that ``/usr`` and ``/opt/bro`` are the standard prefixes for
packages to be installed, so those are typically not good choices binary Bro packages to be installed, so those are typically not good
unless you are creating such a package. choices unless you are creating such a package.
Run "./configure --help" for more options. Run ``./configure --help`` for more options.
Depending on the Bro package you downloaded, there may be auxiliary
tools and libraries available in the ``aux/`` directory. All of them
except for ``aux/bro-aux`` will also be built and installed by doing
``make install``. To install the programs that come in the
``aux/bro-aux`` directory, use ``make install-aux``. There are
``--disable-*`` options that can be given to the configure script to
turn off unwanted auxiliary projects.
OpenBSD users, please see our `FAQ
<http://www.bro-ids.org/documentation/faq.html>` if you are having
problems installing Bro.
Running Bro Running Bro
=========== ===========
Bro is a complex program and it takes a bit of time to get familiar Bro is a complex program and it takes a bit of time to get familiar
with it. In the following we give a few simple examples. See the with it. A good place for newcomers to start is the Quickstart Guide
quickstart guide at http://www.bro-ids.org for more information; you at http://www.bro-ids.org/documentation/quickstart.html.
can the source that in doc/quick-start.
For developers that wish to run Bro from the the build/ directory For developers that wish to run Bro directly from the ``build/``
after performing "make", but without performing "make install", they directory (i.e., without performing ``make install``), they will have
will have to first set BROPATH to look for scripts inside the build to first adjust ``BROPATH`` to look for scripts inside the build
directory. Sourcing either build/bro-path-dev.sh or build/bro-path-dev.csh directory. Sourcing either ``build/bro-path-dev.sh`` or
as appropriate for the current shell accomplishes this. e.g.: ``build/bro-path-dev.csh`` as appropriate for the current shell
accomplishes this and also augments your ``PATH`` so you can use the
Bro binary directly::
> ./configure ./configure
> make make
> source build/bro-path-dev.sh source build/bro-path-dev.sh
> ./build/src/bro bro <options>

View file

@ -2,47 +2,56 @@
# A simple static wrapper for a number of standard Makefile targets, # A simple static wrapper for a number of standard Makefile targets,
# mostly just forwarding to build/Makefile. This is provided only for # mostly just forwarding to build/Makefile. This is provided only for
# convenience and supports only a subset of what CMake's Makefile # convenience and supports only a subset of what CMake's Makefile
# to offer. For more, execute that one directly. # offers. For more, execute that one directly.
# #
SOURCE=$(PWD) BUILD=build
BUILD=$(SOURCE)/build REPO=`basename \`git config --get remote.origin.url\``
TMP=/tmp/bro-dist.$(UID) VERSION_FULL=$(REPO)-`cat VERSION`
BRO_V=`cat $(SOURCE)/VERSION` VERSION_MIN=$(REPO)-`cat VERSION`-minimal
BROCCOLI_V=`cat $(SOURCE)/aux/broccoli/VERSION` HAVE_MODULES=git submodule | grep -v cmake >/dev/null
BROCTL_V=`cat $(SOURCE)/aux/broctl/VERSION`
all: configured all: configured
( cd $(BUILD) && make ) $(MAKE) -C $(BUILD) $@
install: configured install: configured all
( cd $(BUILD) && make install ) $(MAKE) -C $(BUILD) $@
clean: configured install-aux: configured
( cd $(BUILD) && make clean ) $(MAKE) -C $(BUILD) $@
( cd $(BUILD) && make docclean && make restclean )
clean: configured docclean
$(MAKE) -C $(BUILD) $@
doc: configured doc: configured
( cd $(BUILD) && make doc ) $(MAKE) -C $(BUILD) $@
docclean: configured docclean: configured
( cd $(BUILD) && make docclean && make restclean ) $(MAKE) -C $(BUILD) $@
restdoc: configured
$(MAKE) -C $(BUILD) $@
restclean: configured
$(MAKE) -C $(BUILD) $@
broxygen: configured
$(MAKE) -C $(BUILD) $@
broxygenclean: configured
$(MAKE) -C $(BUILD) $@
dist: dist:
@( mkdir -p $(BUILD) && rm -rf $(TMP) && mkdir $(TMP) ) @rm -rf $(VERSION_FULL) $(VERSION_FULL).tgz
@cp -R $(SOURCE) $(TMP)/Bro-$(BRO_V) @rm -rf $(VERSION_MIN) $(VERSION_MIN).tgz
@( cd $(TMP) && find . -name .git\* | xargs rm -rf ) @mkdir $(VERSION_FULL)
@( cd $(TMP) && find . -name \*.swp | xargs rm -rf ) @tar --exclude=$(VERSION_FULL)* --exclude=$(VERSION_MIN)* --exclude=.git -cf - . | ( cd $(VERSION_FULL) && tar -xpf - )
@( cd $(TMP) && find . -type d -name build | xargs rm -rf ) @( cd $(VERSION_FULL) && cp -R ../.git . && git reset -q --hard HEAD && git clean -xdfq && rm -rf .git )
@( cd $(TMP) && tar -czf $(BUILD)/Bro-all-$(BRO_V).tar.gz Bro-$(BRO_V) ) @tar -czf $(VERSION_FULL).tgz $(VERSION_FULL) && echo Package: $(VERSION_FULL).tgz && rm -rf $(VERSION_FULL)
@( cd $(TMP)/Bro-$(BRO_V)/aux && mv broccoli Broccoli-$(BROCCOLI_V) && \ @$(HAVE_MODULES) && mkdir $(VERSION_MIN) || exit 0
tar -czf $(BUILD)/Broccoli-$(BROCCOLI_V).tar.gz Broccoli-$(BROCCOLI_V) ) @$(HAVE_MODULES) && tar --exclude=$(VERSION_FULL)* --exclude=$(VERSION_MIN)* --exclude=.git `git submodule | awk '{print "--exclude="$$2}' | grep -v cmake | tr '\n' ' '` -cf - . | ( cd $(VERSION_MIN) && tar -xpf - ) || exit 0
@( cd $(TMP)/Bro-$(BRO_V)/aux && mv broctl Broctl-$(BROCTL_V) && \ @$(HAVE_MODULES) && ( cd $(VERSION_MIN) && cp -R ../.git . && git reset -q --hard HEAD && git clean -xdfq && rm -rf .git ) || exit 0
tar -czf $(BUILD)/Broctl-$(BROCTL_V).tar.gz Broctl-$(BROCTL_V) ) @$(HAVE_MODULES) && tar -czf $(VERSION_MIN).tgz $(VERSION_MIN) && echo Package: $(VERSION_MIN).tgz && rm -rf $(VERSION_MIN) || exit 0
@( cd $(TMP)/Bro-$(BRO_V)/aux && rm -rf Broctl* Broccoli* )
@( cd $(TMP) && tar -czf $(BUILD)/Bro-$(BRO_V).tar.gz Bro-$(BRO_V) )
@rm -rf $(TMP)
@echo "Distribution source tarballs have been compiled in $(BUILD)"
bindist: bindist:
@( cd pkg && ( ./make-deb-packages || ./make-mac-packages || \ @( cd pkg && ( ./make-deb-packages || ./make-mac-packages || \
@ -51,6 +60,9 @@ bindist:
distclean: distclean:
rm -rf $(BUILD) rm -rf $(BUILD)
test:
@(cd testing && make )
configured: configured:
@test -d $(BUILD) || ( echo "Error: No build/ directory found. Did you run configure?" && exit 1 ) @test -d $(BUILD) || ( echo "Error: No build/ directory found. Did you run configure?" && exit 1 )
@test -e $(BUILD)/Makefile || ( echo "Error: No build/Makefile found. Did you run configure?" && exit 1 ) @test -e $(BUILD)/Makefile || ( echo "Error: No build/Makefile found. Did you run configure?" && exit 1 )

108
NEWS Normal file
View file

@ -0,0 +1,108 @@
Release Notes
=============
This document summarizes the most important changes in the current Bro
release. For a complete list of changes, see the ``CHANGES`` file.
Bro 2.1
-------
- Dependencies:
* Bro now requires CMake >= 2.6.3.
* Bro now links in tcmalloc (part of Google perftools) if found at
configure time. Doing so can significantly improve memory and
CPU use.
- Bro now supports IPv6 out of the box; the configure switch
--enable-brov6 is gone.
- DNS name lookups performed by Bro now also query AAAA records. The
results of the A and AAAA queries for a given hostname are combined
such that at the scripting layer, the name resolution can yield a
set with both IPv4 and IPv6 addresses.
- The connection compressor was already deprecated in 2.0 and has now
been removed from the code base.
- We removed the "match" statement, which was no longer used by any of
the default scripts, nor was it likely to be used by anybody anytime
soon. With that, "match" and "using" are no longer reserved keywords.
- The syntax for IPv6 literals changed from "2607:f8b0:4009:802::1012"
to "[2607:f8b0:4009:802::1012]".
- Bro now spawn threads for doing its logging. From a user's
perspective not much should change, except that the OS may now show
a bunch of Bro threads.
- We renamed the configure option --enable-perftools to
--enable-perftool-debug to indicate that the switch is only relevant
for debugging the heap.
- Bro's ICMP analyzer now handles both IPv4 and IPv6 messages with a
joint set of events. The `icmp_conn` record got a new boolean field
'v6' that indicates whether the ICMP message is v4 or v6.
TODO: Extend.
Bro 2.0
-------
As the version number jump suggests, Bro 2.0 is a major upgrade and
lots of things have changed. We have assembled a separate upgrade
guide with the most important changes compared to Bro 1.5 at
http://www.bro-ids.org/documentation/upgrade.html. You can find
the offline version of that document in ``doc/upgrade.rst.``.
Compared to the earlier 2.0 Beta version, the major changes in the
final release are:
* The default scripts now come with complete reference
documentation. See
http://www.bro-ids.org/documentation/index.html.
* libz and libmagic are now required dependencies.
* Reduced snaplen default from 65535 to old default of 8192. The
large value was introducing performance problems on many
systems.
* Replaced the --snaplen/-l command line option with a
scripting-layer option called "snaplen". The new option can also
be redefined on the command line, e.g. ``bro -i eth0
snaplen=65535``.
* Reintroduced the BRO_LOG_SUFFIX environment variable that the
ASCII logger now respects to add a suffix to the log files it
creates.
* The ASCII logs now include further header information, and
fields set to an empty value are now logged as ``(empty)`` by
default (instead of ``-``, which is already used for fields that
are not set at all).
* Some NOTICES were renamed, and the signatures of some SSL events
have changed.
* bro-cut got some new capabilities:
- If no field names are given on the command line, we now pass
through all fields.
- New options -u/-U for time output in UTC.
- New option -F to give output field separator.
* Broccoli supports more types internally, allowing to send
complex records.
* Many smaller bug fixes, portability improvements, and general
polishing across all modules.

38
README
View file

@ -1,31 +1,21 @@
This is release 1.6 of Bro, a system for detecting network intruders in ============================
real-time using passive network monitoring. Bro Network Security Monitor
============================
Please see the file INSTALL for installation instructions and Bro is a powerful framework for network analysis and security
pointers for getting started. For more documentation, see the monitoring. Please see the INSTALL file for installation instructions
documentation on Bro's home page: and pointers for getting started. NEWS contains release notes for the
current version, and CHANGES has the complete history of changes.
Please see COPYING for licensing information.
http://www.bro-ids.org/docs For more documentation, research publications, and community contact
information, please see Bro's home page:
The main parts of Bro's documentation are also available in the doc/ http://www.bro-ids.org
directory of the distribution. (Please note that the documentation
is still a work in progress; there will be more in future releases.)
Numerous other Bro-related publications, including a paper describing the On behalf of the Bro Development Team,
system, can be found at
http://www.bro-ids.org/publications.html
Send comments, etc., to the Bro mailing list, bro@bro-ids.org.
However, please note that you must first subscribe to the list in
order to be able to post to it.
- Vern Paxson & Robin Sommer, on behalf of the Bro development team
Vern Paxson & Robin Sommer,
International Computer Science Institute &
Lawrence Berkeley National Laboratory Lawrence Berkeley National Laboratory
University of California, Berkeley USA
ICSI Center for Internet Research (ICIR)
International Computer Science Institute
Berkeley, CA USA
vern@icir.org / robin@icir.org vern@icir.org / robin@icir.org

View file

@ -1 +1 @@
1.6-dev-1120 2.0-301

@ -1 +1 @@
Subproject commit a3a9410dedc842f6bb9859642f334ed354633b57 Subproject commit 71c37019bc371eb7863fb6aa47a7daa4540f4f1f

@ -1 +1 @@
Subproject commit d68b98bb995a105b257f805ec4ff22c4929c7476 Subproject commit d885987e7968669e34504b0403ac89bd13928e9a

@ -1 +1 @@
Subproject commit 743f10dda8cd5655ea3dc6eb705ff5414ed4f535 Subproject commit bead1168ae9c2d2ae216dd58522fbc05498ff2c8

@ -1 +1 @@
Subproject commit cf4ce9dfc5d6dc4e6d311955eeaec2d679e8669b Subproject commit 44cc3de5f6f98a86b2516bdc48dd168e6a6a28fd

@ -1 +1 @@
Subproject commit d1c620d98ce9d9c0b203314108b413784965d2ed Subproject commit 1897d224ce295e91d20e458851759c99734a0a74

1
cmake Submodule

@ -0,0 +1 @@
Subproject commit 49278736c1404cb8c077272b80312c947e68bf52

View file

@ -1,87 +0,0 @@
# Calling this macro with the name of a list variable will modify that
# list such that any third party libraries that do not come with a
# vanilla Mac OS X system will be replaced by an adjusted library that
# has an install_name relative to the location of any executable that
# links to it.
#
# Also, it will schedule the modified libraries for installation in a
# 'support_libs' subdirectory of the CMAKE_INSTALL_PREFIX.
#
# The case of third party libraries depending on other third party
# libraries is currently not handled by this macro.
#
# Ex.
#
# set(libs /usr/lib/libz.dylib
# /usr/lib/libssl.dylib
# /usr/local/lib/libmagic.dylib
# /usr/local/lib/libGeoIP.dylib
# /usr/local/lib/somestaticlib.a)
#
# include(ChangeMacInstallNames)
# ChangeMacInstallNames(libs)
#
# Should result in ${libs} containing:
# /usr/lib/libz.dylib
# /usr/lib/libssl.dylib
# ${CMAKE_BINARY_DIR}/darwin_support_libs/libmagic.dylib
# ${CMAKE_BINARY_DIR}/darwin_support_libs/libGeoIP.dylib
# /usr/local/lib/somestaticlib.a
#
# such that we can now do:
#
# add_executable(some_exe ${srcs})
# target_link_libraries(some_exe ${libs})
#
# Any binary packages created from such a build should be self-contained
# and provide working installs on vanilla OS X systems.
macro(ChangeMacInstallNames libListVar)
if (APPLE)
find_program(INSTALL_NAME_TOOL install_name_tool)
set(MAC_INSTALL_NAME_DEPS)
set(SUPPORT_BIN_DIR ${CMAKE_BINARY_DIR}/darwin_support_libs)
set(SUPPORT_INSTALL_DIR support_libs)
file(MAKE_DIRECTORY ${SUPPORT_BIN_DIR})
foreach (_lib ${${libListVar}})
# only care about install_name for shared libraries that are
# not shipped in Apple's vanilla OS X installs
string(REGEX MATCH ^/usr/lib/* apple_provided_lib ${_lib})
string(REGEX MATCH dylib$ is_shared_lib ${_lib})
if (NOT apple_provided_lib AND is_shared_lib)
get_filename_component(_libname ${_lib} NAME)
set(_adjustedLib ${SUPPORT_BIN_DIR}/${_libname})
set(_tmpLib
${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${_libname})
# make a tempory copy so we can adjust permissions
configure_file(${_lib} ${_tmpLib} COPYONLY)
# copy to build directory with correct write permissions
file(COPY ${_tmpLib}
DESTINATION ${SUPPORT_BIN_DIR}
FILE_PERMISSIONS OWNER_READ OWNER_WRITE
GROUP_READ WORLD_READ)
# remove the old library from the list provided as macro
# argument and add the new library with modified install_name
list(REMOVE_ITEM ${libListVar} ${_lib})
list(APPEND ${libListVar} ${_adjustedLib})
# update the install target to install the third party libs
# with modified install_name
install(FILES ${_adjustedLib}
DESTINATION ${SUPPORT_INSTALL_DIR})
# perform the install_name change
execute_process(COMMAND install_name_tool -id
@executable_path/../${SUPPORT_INSTALL_DIR}/${_libname}
${_adjustedLib})
endif ()
endforeach ()
endif ()
endmacro()

View file

@ -1,12 +0,0 @@
include(CheckFunctionExists)
check_function_exists(getopt_long HAVE_GETOPT_LONG)
check_function_exists(mallinfo HAVE_MALLINFO)
check_function_exists(strcasestr HAVE_STRCASESTR)
check_function_exists(strerror HAVE_STRERROR)
check_function_exists(strsep HAVE_STRSEP)
check_function_exists(sigset HAVE_SIGSET)
if (NOT HAVE_SIGSET)
check_function_exists(sigaction HAVE_SIGACTION)
endif ()

View file

@ -1,28 +0,0 @@
include(CheckIncludeFiles)
include(CheckStructHasMember)
check_include_files(getopt.h HAVE_GETOPT_H)
check_include_files(magic.h HAVE_MAGIC_H)
check_include_files(memory.h HAVE_MEMORY_H)
check_include_files("sys/socket.h;netinet/in.h;net/if.h;netinet/if_ether.h"
HAVE_NETINET_IF_ETHER_H)
check_include_files("sys/socket.h;netinet/in.h;net/if.h;netinet/ip6.h"
HAVE_NETINET_IP6_H)
check_include_files("sys/socket.h;net/if.h;net/ethernet.h" HAVE_NET_ETHERNET_H)
check_include_files(sys/ethernet.h HAVE_SYS_ETHERNET_H)
check_include_files(sys/time.h HAVE_SYS_TIME_H)
check_include_files("time.h;sys/time.h" TIME_WITH_SYS_TIME)
check_include_files(os-proto.h HAVE_OS_PROTO_H)
check_struct_has_member(HISTORY_STATE entries "stdio.h;readline/readline.h"
HAVE_READLINE_HISTORY_ENTRIES)
check_include_files("stdio.h;readline/readline.h" HAVE_READLINE_READLINE_H)
check_include_files("stdio.h;readline/history.h" HAVE_READLINE_HISTORY_H)
if (HAVE_READLINE_READLINE_H AND
HAVE_READLINE_HISTORY_H AND
HAVE_READLINE_HISTORY_ENTRIES)
set(HAVE_READLINE true)
endif ()
check_struct_has_member("struct sockaddr_in" sin_len "netinet/in.h" SIN_LEN)

View file

@ -1,21 +0,0 @@
include(CheckCSourceCompiles)
# Check whether the namser compatibility header is required
# This can be the case on the Darwin platform
check_c_source_compiles("
#include <arpa/nameser.h>
int main() { HEADER *hdr; int d = NS_IN6ADDRSZ; return 0; }"
have_nameser_header)
if (NOT have_nameser_header)
check_c_source_compiles("
#include <arpa/nameser.h>
#include <arpa/nameser_compat.h>
int main() { HEADER *hdr; int d = NS_IN6ADDRSZ; return 0; }"
NEED_NAMESER_COMPAT_H)
if (NOT NEED_NAMESER_COMPAT_H)
message(FATAL_ERROR
"Asynchronous DNS support compatibility check failed.")
endif ()
endif ()

View file

@ -1,21 +0,0 @@
# A macro that checks whether optional sources exist and if they do, they
# are added to the build/install process, else a warning is issued
#
# _dir: the subdir of the current source dir in which the optional
# sources are located
# _packageName: a string that identifies the package
# _varName: name of the variable indicating whether package is scheduled
# to be installed
macro(CheckOptionalBuildSources _dir _packageName _varName)
if (${_varName})
if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${_dir}/CMakeLists.txt)
add_subdirectory(${_dir})
else ()
message(WARNING "${_packageName} source code does not exist in "
"${CMAKE_CURRENT_SOURCE_DIR}/${_dir} "
"so it will not be built or installed")
set(${_varName} false)
endif ()
endif ()
endmacro(CheckOptionalBuildSources)

View file

@ -1,31 +0,0 @@
include(CheckTypeSize)
check_type_size("long int" SIZEOF_LONG_INT)
check_type_size("long long" SIZEOF_LONG_LONG)
check_type_size("void *" SIZEOF_VOID_P)
# checks existence of ${_type}, and if it does not, sets CMake variable ${_var}
# to alternative type, ${_alt_type}
macro(CheckType _type _alt_type _var)
# don't perform check if we have a result from a previous CMake run
if (NOT HAVE_${_var})
check_type_size(${_type} ${_var})
if (NOT ${_var})
set(${_var} ${_alt_type})
else ()
unset(${_var})
unset(${_var} CACHE)
endif ()
endif ()
endmacro(CheckType _type _alt_type _var)
set(CMAKE_EXTRA_INCLUDE_FILES sys/types.h)
CheckType(int32_t int int32_t)
CheckType(u_int32_t u_int u_int32_t)
CheckType(u_int16_t u_short u_int16_t)
CheckType(u_int8_t u_char u_int8_t)
set(CMAKE_EXTRA_INCLUDE_FILES)
set(CMAKE_EXTRA_INCLUDE_FILES sys/socket.h)
CheckType(socklen_t int socklen_t)
set(CMAKE_EXTRA_INCLUDE_FILES)

View file

@ -1,238 +0,0 @@
# A collection of macros to assist in configuring CMake/Cpack
# source and binary packaging
# Sets CPack version variables by splitting the first macro argument
# using "." as a delimiter. If the length of the split list is
# greater than 2, all remaining elements are tacked on to the patch
# level version. Not that the version set by the macro is internal
# to binary packaging, the file name of our package will reflect the
# exact version number.
macro(SetPackageVersion _version)
string(REPLACE "." " " version_numbers ${_version})
separate_arguments(version_numbers)
list(GET version_numbers 0 CPACK_PACKAGE_VERSION_MAJOR)
list(REMOVE_AT version_numbers 0)
list(GET version_numbers 0 CPACK_PACKAGE_VERSION_MINOR)
list(REMOVE_AT version_numbers 0)
list(LENGTH version_numbers version_length)
while (version_length GREATER 0)
list(GET version_numbers 0 patch_level)
if (CPACK_PACKAGE_VERSION_PATCH)
set(CPACK_PACKAGE_VERSION_PATCH
"${CPACK_PACKAGE_VERSION_PATCH}.${patch_level}")
else ()
set(CPACK_PACKAGE_VERSION_PATCH ${patch_level})
endif ()
list(REMOVE_AT version_numbers 0)
list(LENGTH version_numbers version_length)
endwhile ()
if (APPLE)
# Mac PackageMaker package requires only numbers in the versioning
string(REGEX REPLACE "[_a-zA-Z-]" "" CPACK_PACKAGE_VERSION_MAJOR
${CPACK_PACKAGE_VERSION_MAJOR})
string(REGEX REPLACE "[_a-zA-Z-]" "" CPACK_PACKAGE_VERSION_MINOR
${CPACK_PACKAGE_VERSION_MINOR})
if (CPACK_PACKAGE_VERSION_PATCH)
string(REGEX REPLACE "[_a-zA-Z-]" "" CPACK_PACKAGE_VERSION_PATCH
${CPACK_PACKAGE_VERSION_PATCH})
endif ()
endif ()
if (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
# RPM version accepts letters, but not dashes.
string(REGEX REPLACE "[-]" "" CPACK_PACKAGE_VERSION_MAJOR
${CPACK_PACKAGE_VERSION_MAJOR})
string(REGEX REPLACE "[-]" "" CPACK_PACKAGE_VERSION_MINOR
${CPACK_PACKAGE_VERSION_MINOR})
if (CPACK_PACKAGE_VERSION_PATCH)
string(REGEX REPLACE "[-]" "" CPACK_PACKAGE_VERSION_PATCH
${CPACK_PACKAGE_VERSION_PATCH})
endif ()
endif ()
# Minimum supported OS X version
set(CPACK_OSX_PACKAGE_VERSION 10.5)
endmacro(SetPackageVersion)
# Sets the list of desired package types to be created by the make
# package target. A .tar.gz is only made for source packages, and
# binary pacakage format depends on the operating system:
#
# Darwin - PackageMaker
# Linux - RPM if the platform has rpmbuild installed
# DEB if the platform has dpkg-shlibdeps installed
#
# CPACK_GENERATOR is set by this macro
# CPACK_SOURCE_GENERATOR is set by this macro
macro(SetPackageGenerators)
set(CPACK_SOURCE_GENERATOR TGZ)
#set(CPACK_GENERATOR TGZ)
if (APPLE)
list(APPEND CPACK_GENERATOR PackageMaker)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
find_program(RPMBUILD_EXE rpmbuild)
find_program(DPKGSHLIB_EXE dpkg-shlibdeps)
if (RPMBUILD_EXE)
set(CPACK_GENERATOR ${CPACK_GENERATOR} RPM)
endif ()
if (DPKGSHLIB_EXE)
set(CPACK_GENERATOR ${CPACK_GENERATOR} DEB)
set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS true)
endif ()
endif ()
endmacro(SetPackageGenerators)
# Sets CPACK_PACKAGE_FILE_NAME in the following format:
#
# <project_name>-<version>-<OS/platform>-<arch>
#
# and CPACK_SOURCE_PACKAGE_FILE_NAME as:
#
# <project_name>-<version>
macro(SetPackageFileName _version)
if (PACKAGE_NAME_PREFIX)
set(CPACK_PACKAGE_FILE_NAME "${PACKAGE_NAME_PREFIX}-${_version}")
set(CPACK_SOURCE_PACKAGE_FILE_NAME "${PACKAGE_NAME_PREFIX}-${_version}")
else ()
set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${_version}")
set(CPACK_SOURCE_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}-${_version}")
endif ()
set(CPACK_PACKAGE_FILE_NAME
"${CPACK_PACKAGE_FILE_NAME}-${CMAKE_SYSTEM_NAME}")
if (APPLE)
# Only Intel-based Macs are supported. CMAKE_SYSTEM_PROCESSOR may
# return the confusing 'i386' if running a 32-bit kernel, but chances
# are the binary is x86_64 (or more generally 'Intel') compatible.
set(arch "Intel")
else ()
set (arch ${CMAKE_SYSTEM_PROCESSOR})
endif ()
set(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_FILE_NAME}-${arch}")
endmacro(SetPackageFileName)
# Sets up binary package metadata
macro(SetPackageMetadata)
set(CPACK_PACKAGE_VENDOR "Lawrence Berkeley National Laboratory")
set(CPACK_PACKAGE_CONTACT "info@bro-ids.org")
set(CPACK_PACKAGE_DESCRIPTION_SUMMARY
"The Bro Network Intrusion Detection System")
# CPack may enforce file name extensions for certain package generators
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/README
${CMAKE_CURRENT_BINARY_DIR}/README.txt
COPYONLY)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/COPYING
${CMAKE_CURRENT_BINARY_DIR}/COPYING.txt
COPYONLY)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/MAC_PACKAGE_INTRO
${CMAKE_CURRENT_BINARY_DIR}/MAC_PACKAGE_INTRO.txt)
set(CPACK_PACKAGE_DESCRIPTION_FILE ${CMAKE_CURRENT_BINARY_DIR}/README.txt)
set(CPACK_RESOURCE_FILE_LICENSE ${CMAKE_CURRENT_BINARY_DIR}/COPYING.txt)
set(CPACK_RESOURCE_FILE_README ${CMAKE_CURRENT_BINARY_DIR}/README.txt)
set(CPACK_RESOURCE_FILE_WELCOME
${CMAKE_CURRENT_BINARY_DIR}/MAC_PACKAGE_INTRO.txt)
set(CPACK_RPM_PACKAGE_LICENSE "BSD")
endmacro(SetPackageMetadata)
# Sets pre and post install scripts for PackageMaker packages.
# The main functionality that such scripts offer is a way to make backups
# of "configuration" files that a user may have modified.
# Note that RPMs already have a robust mechanism for dealing with
# user-modified files, so we do not need this additional functionality
macro(SetPackageInstallScripts VERSION)
if (INSTALLED_CONFIG_FILES)
# Remove duplicates from the list of installed config files
separate_arguments(INSTALLED_CONFIG_FILES)
list(REMOVE_DUPLICATES INSTALLED_CONFIG_FILES)
# Space delimit the list again
foreach (_file ${INSTALLED_CONFIG_FILES})
set(_tmp "${_tmp} ${_file}")
endforeach ()
set(INSTALLED_CONFIG_FILES "${_tmp}" CACHE STRING "" FORCE)
endif ()
if (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
# DEB packages can automatically handle configuration files
# if provided in a "conffiles" file in the packaging
set(conffiles_file ${CMAKE_CURRENT_BINARY_DIR}/conffiles)
if (INSTALLED_CONFIG_FILES)
string(REPLACE " " ";" conffiles ${INSTALLED_CONFIG_FILES})
endif ()
file(WRITE ${conffiles_file} "")
foreach (_file ${conffiles})
file(APPEND ${conffiles_file} "${_file}\n")
endforeach ()
list(APPEND CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA
${CMAKE_CURRENT_BINARY_DIR}/conffiles)
# RPMs don't need any explicit direction regarding config files.
# Leaving the set of installed config files empty will just
# bypass the logic in the default pre/post install scripts and let
# the RPMs/DEBs do their own thing (regarding backups, etc.)
# when upgrading packages.
set(INSTALLED_CONFIG_FILES "")
endif ()
if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/cmake/package_preinstall.sh.in)
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/cmake/package_preinstall.sh.in
${CMAKE_CURRENT_BINARY_DIR}/package_preinstall.sh
@ONLY)
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/cmake/package_preinstall.sh.in
${CMAKE_CURRENT_BINARY_DIR}/preinst
@ONLY)
set(CPACK_PREFLIGHT_SCRIPT
${CMAKE_CURRENT_BINARY_DIR}/package_preinstall.sh)
set(CPACK_RPM_PRE_INSTALL_SCRIPT_FILE
${CMAKE_CURRENT_BINARY_DIR}/package_preinstall.sh)
list(APPEND CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA
${CMAKE_CURRENT_BINARY_DIR}/preinst)
endif ()
if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/cmake/package_postupgrade.sh.in)
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/cmake/package_postupgrade.sh.in
${CMAKE_CURRENT_BINARY_DIR}/package_postupgrade.sh
@ONLY)
configure_file(
${CMAKE_CURRENT_SOURCE_DIR}/cmake/package_postupgrade.sh.in
${CMAKE_CURRENT_BINARY_DIR}/postinst
@ONLY)
set(CPACK_POSTUPGRADE_SCRIPT
${CMAKE_CURRENT_BINARY_DIR}/package_postupgrade.sh)
set(CPACK_RPM_POST_INSTALL_SCRIPT_FILE
${CMAKE_CURRENT_BINARY_DIR}/package_postupgrade.sh)
list(APPEND CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA
${CMAKE_CURRENT_BINARY_DIR}/postinst)
endif ()
endmacro(SetPackageInstallScripts)
# Main macro to configure all the packaging options
macro(ConfigurePackaging _version)
SetPackageVersion(${_version})
SetPackageGenerators()
SetPackageFileName(${_version})
SetPackageMetadata()
SetPackageInstallScripts(${_version})
set(CPACK_SET_DESTDIR true)
set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
# add default files/directories to ignore for source package
# user may specify others via configure script
list(APPEND CPACK_SOURCE_IGNORE_FILES ${CMAKE_BINARY_DIR} ".git")
include(CPack)
endmacro(ConfigurePackaging)

View file

@ -1,101 +0,0 @@
# - Try to find libpcap include dirs and libraries
#
# Usage of this module as follows:
#
# find_package(BIND)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# BIND_ROOT_DIR Set this variable to the root installation of BIND
# if the module has problems finding the proper
# installation path.
#
# Variables defined by this module:
#
# BIND_FOUND System has BIND, include and library dirs found
# BIND_INCLUDE_DIR The BIND include directories.
# BIND_LIBRARY The BIND library (if any) required for
# ns_inittab and res_mkquery symbols
find_path(BIND_ROOT_DIR
NAMES include/resolv.h
)
find_path(BIND_INCLUDE_DIR
NAMES resolv.h
HINTS ${BIND_ROOT_DIR}/include
)
if (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
# the static resolv library is preferred because
# on some systems, the ns_initparse symbol is not
# exported in the shared library (strangely)
# see http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=291609
set(bind_libs none libresolv.a resolv bind)
else ()
set(bind_libs none resolv bind)
endif ()
include(CheckCSourceCompiles)
# Find which library has the res_mkquery and ns_initparse symbols
set(CMAKE_REQUIRED_INCLUDES ${BIND_INCLUDE_DIR})
foreach (bindlib ${bind_libs})
if (NOT ${bindlib} MATCHES "none")
find_library(BIND_LIBRARY
NAMES ${bindlib}
HINTS ${BIND_ROOT_DIR}/lib
)
endif ()
set(CMAKE_REQUIRED_LIBRARIES ${BIND_LIBRARY})
check_c_source_compiles("
#include <arpa/nameser.h>
int main() {
ns_initparse(0, 0, 0);
return 0;
}
" ns_initparse_works_${bindlib})
check_c_source_compiles("
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/nameser.h>
#include <resolv.h>
int main() {
int (*p)() = res_mkquery;
}
" res_mkquery_works_${bindlib})
set(CMAKE_REQUIRED_LIBRARIES)
if (ns_initparse_works_${bindlib} AND res_mkquery_works_${bindlib})
break ()
else ()
set(BIND_LIBRARY BIND_LIBRARY-NOTFOUND)
endif ()
endforeach ()
set(CMAKE_REQUIRED_INCLUDES)
include(FindPackageHandleStandardArgs)
if (ns_initparse_works_none AND res_mkquery_works_none)
# system does not require linking to a BIND library
find_package_handle_standard_args(BIND DEFAULT_MSG
BIND_INCLUDE_DIR
)
else ()
find_package_handle_standard_args(BIND DEFAULT_MSG
BIND_LIBRARY
BIND_INCLUDE_DIR
)
endif ()
mark_as_advanced(
BIND_ROOT_DIR
BIND_LIBRARY
BIND_INCLUDE_DIR
)

View file

@ -1,221 +0,0 @@
# - Find bison executable and provides macros to generate custom build rules
# The module defines the following variables:
#
# BISON_EXECUTABLE - path to the bison program
# BISON_VERSION - version of bison
# BISON_FOUND - true if the program was found
#
# If bison is found, the module defines the macros:
# BISON_TARGET(<Name> <YaccInput> <CodeOutput> [VERBOSE <file>]
# [COMPILE_FLAGS <string>] [HEADER <FILE>])
# which will create a custom rule to generate a parser. <YaccInput> is
# the path to a yacc file. <CodeOutput> is the name of the source file
# generated by bison. A header file containing the token list is also
# generated according to bison's -d option by default or if the HEADER
# option is used, the argument is passed to bison's --defines option to
# specify output file. If COMPILE_FLAGS option is specified, the next
# parameter is added in the bison command line. if VERBOSE option is
# specified, <file> is created and contains verbose descriptions of the
# grammar and parser. The macro defines a set of variables:
# BISON_${Name}_DEFINED - true is the macro ran successfully
# BISON_${Name}_INPUT - The input source file, an alias for <YaccInput>
# BISON_${Name}_OUTPUT_SOURCE - The source file generated by bison
# BISON_${Name}_OUTPUT_HEADER - The header file generated by bison
# BISON_${Name}_OUTPUTS - The sources files generated by bison
# BISON_${Name}_COMPILE_FLAGS - Options used in the bison command line
#
# ====================================================================
# Example:
#
# find_package(BISON)
# BISON_TARGET(MyParser parser.y ${CMAKE_CURRENT_BINARY_DIR}/parser.cpp)
# add_executable(Foo main.cpp ${BISON_MyParser_OUTPUTS})
# ====================================================================
#=============================================================================
# Copyright 2009 Kitware, Inc.
# Copyright 2006 Tristan Carel
# Modified 2010 by Jon Siwek, adding HEADER option
#
# Distributed under the OSI-approved BSD License (the "License"):
# CMake - Cross Platform Makefile Generator
# Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the names of Kitware, Inc., the Insight Software Consortium,
# nor the names of their contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
FIND_PROGRAM(BISON_EXECUTABLE bison DOC "path to the bison executable")
MARK_AS_ADVANCED(BISON_EXECUTABLE)
IF(BISON_EXECUTABLE)
EXECUTE_PROCESS(COMMAND ${BISON_EXECUTABLE} --version
OUTPUT_VARIABLE BISON_version_output
ERROR_VARIABLE BISON_version_error
RESULT_VARIABLE BISON_version_result
OUTPUT_STRIP_TRAILING_WHITESPACE)
IF(NOT ${BISON_version_result} EQUAL 0)
MESSAGE(SEND_ERROR "Command \"${BISON_EXECUTABLE} --version\" failed with output:\n${BISON_version_error}")
ELSE()
STRING(REGEX REPLACE "^bison \\(GNU Bison\\) ([^\n]+)\n.*" "\\1"
BISON_VERSION "${BISON_version_output}")
ENDIF()
# internal macro
MACRO(BISON_TARGET_option_verbose Name BisonOutput filename)
LIST(APPEND BISON_TARGET_cmdopt "--verbose")
GET_FILENAME_COMPONENT(BISON_TARGET_output_path "${BisonOutput}" PATH)
GET_FILENAME_COMPONENT(BISON_TARGET_output_name "${BisonOutput}" NAME_WE)
ADD_CUSTOM_COMMAND(OUTPUT ${filename}
COMMAND ${CMAKE_COMMAND}
ARGS -E copy
"${BISON_TARGET_output_path}/${BISON_TARGET_output_name}.output"
"${filename}"
DEPENDS
"${BISON_TARGET_output_path}/${BISON_TARGET_output_name}.output"
COMMENT "[BISON][${Name}] Copying bison verbose table to ${filename}"
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR})
SET(BISON_${Name}_VERBOSE_FILE ${filename})
LIST(APPEND BISON_TARGET_extraoutputs
"${BISON_TARGET_output_path}/${BISON_TARGET_output_name}.output")
ENDMACRO(BISON_TARGET_option_verbose)
# internal macro
MACRO(BISON_TARGET_option_extraopts Options)
SET(BISON_TARGET_extraopts "${Options}")
SEPARATE_ARGUMENTS(BISON_TARGET_extraopts)
LIST(APPEND BISON_TARGET_cmdopt ${BISON_TARGET_extraopts})
ENDMACRO(BISON_TARGET_option_extraopts)
#============================================================
# BISON_TARGET (public macro)
#============================================================
#
MACRO(BISON_TARGET Name BisonInput BisonOutput)
SET(BISON_TARGET_output_header "")
#SET(BISON_TARGET_command_opt "")
SET(BISON_TARGET_cmdopt "")
SET(BISON_TARGET_outputs "${BisonOutput}")
IF(NOT ${ARGC} EQUAL 3 AND
NOT ${ARGC} EQUAL 5 AND
NOT ${ARGC} EQUAL 7 AND
NOT ${ARGC} EQUAL 9)
MESSAGE(SEND_ERROR "Usage")
ELSE()
# Parsing parameters
IF(${ARGC} GREATER 5 OR ${ARGC} EQUAL 5)
IF("${ARGV3}" STREQUAL "VERBOSE")
BISON_TARGET_option_verbose(${Name} ${BisonOutput} "${ARGV4}")
ENDIF()
IF("${ARGV3}" STREQUAL "COMPILE_FLAGS")
BISON_TARGET_option_extraopts("${ARGV4}")
ENDIF()
IF("${ARGV3}" STREQUAL "HEADER")
set(BISON_TARGET_output_header "${ARGV4}")
ENDIF()
ENDIF()
IF(${ARGC} GREATER 7 OR ${ARGC} EQUAL 7)
IF("${ARGV5}" STREQUAL "VERBOSE")
BISON_TARGET_option_verbose(${Name} ${BisonOutput} "${ARGV6}")
ENDIF()
IF("${ARGV5}" STREQUAL "COMPILE_FLAGS")
BISON_TARGET_option_extraopts("${ARGV6}")
ENDIF()
IF("${ARGV5}" STREQUAL "HEADER")
set(BISON_TARGET_output_header "${ARGV6}")
ENDIF()
ENDIF()
IF(${ARGC} EQUAL 9)
IF("${ARGV7}" STREQUAL "VERBOSE")
BISON_TARGET_option_verbose(${Name} ${BisonOutput} "${ARGV8}")
ENDIF()
IF("${ARGV7}" STREQUAL "COMPILE_FLAGS")
BISON_TARGET_option_extraopts("${ARGV8}")
ENDIF()
IF("${ARGV7}" STREQUAL "HEADER")
set(BISON_TARGET_output_header "${ARGV8}")
ENDIF()
ENDIF()
IF(BISON_TARGET_output_header)
# Header's name passed in as argument to be used in --defines option
LIST(APPEND BISON_TARGET_cmdopt
"--defines=${BISON_TARGET_output_header}")
set(BISON_${Name}_OUTPUT_HEADER ${BISON_TARGET_output_header})
ELSE()
# Header's name generated by bison (see option -d)
LIST(APPEND BISON_TARGET_cmdopt "-d")
STRING(REGEX REPLACE "^(.*)(\\.[^.]*)$" "\\2" _fileext "${ARGV2}")
STRING(REPLACE "c" "h" _fileext ${_fileext})
STRING(REGEX REPLACE "^(.*)(\\.[^.]*)$" "\\1${_fileext}"
BISON_${Name}_OUTPUT_HEADER "${ARGV2}")
ENDIF()
LIST(APPEND BISON_TARGET_outputs "${BISON_${Name}_OUTPUT_HEADER}")
ADD_CUSTOM_COMMAND(OUTPUT ${BISON_TARGET_outputs}
${BISON_TARGET_extraoutputs}
COMMAND ${BISON_EXECUTABLE}
ARGS ${BISON_TARGET_cmdopt} -o ${ARGV2} ${ARGV1}
DEPENDS ${ARGV1}
COMMENT "[BISON][${Name}] Building parser with bison ${BISON_VERSION}"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
# define target variables
SET(BISON_${Name}_DEFINED TRUE)
SET(BISON_${Name}_INPUT ${ARGV1})
SET(BISON_${Name}_OUTPUTS ${BISON_TARGET_outputs})
SET(BISON_${Name}_COMPILE_FLAGS ${BISON_TARGET_cmdopt})
SET(BISON_${Name}_OUTPUT_SOURCE "${BisonOutput}")
ENDIF(NOT ${ARGC} EQUAL 3 AND
NOT ${ARGC} EQUAL 5 AND
NOT ${ARGC} EQUAL 7 AND
NOT ${ARGC} EQUAL 9)
ENDMACRO(BISON_TARGET)
#
#============================================================
ENDIF(BISON_EXECUTABLE)
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(BISON DEFAULT_MSG BISON_EXECUTABLE)
# FindBISON.cmake ends here

View file

@ -1,53 +0,0 @@
# - Try to find BinPAC binary and library
#
# Usage of this module as follows:
#
# find_package(BinPAC)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# BinPAC_ROOT_DIR Set this variable to the root installation of
# BinPAC if the module has problems finding the
# proper installation path.
#
# Variables defined by this module:
#
# BINPAC_FOUND System has BinPAC binary and library
# BinPAC_EXE The binpac executable
# BinPAC_LIBRARY The libbinpac.a library
# BinPAC_INCLUDE_DIR The binpac headers
# look for BinPAC in standard locations or user-provided root
find_path(BinPAC_ROOT_DIR
NAMES include/binpac.h
)
find_file(BinPAC_EXE
NAMES binpac
HINTS ${BinPAC_ROOT_DIR}/bin
)
find_library(BinPAC_LIBRARY
NAMES libbinpac.a
HINTS ${BinPAC_ROOT_DIR}/lib
)
find_path(BinPAC_INCLUDE_DIR
NAMES binpac.h
HINTS ${BinPAC_ROOT_DIR}/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(BinPAC DEFAULT_MSG
BinPAC_EXE
BinPAC_LIBRARY
BinPAC_INCLUDE_DIR
)
mark_as_advanced(
BinPAC_ROOT_DIR
BinPAC_EXE
BinPAC_LIBRARY
BinPAC_INCLUDE_DIR
)

View file

@ -1,179 +0,0 @@
# - Find flex executable and provides a macro to generate custom build rules
#
# The module defines the following variables:
# FLEX_FOUND - true is flex executable is found
# FLEX_EXECUTABLE - the path to the flex executable
# FLEX_VERSION - the version of flex
# FLEX_LIBRARIES - The flex libraries
#
# The minimum required version of flex can be specified using the
# standard syntax, e.g. FIND_PACKAGE(FLEX 2.5.13)
#
#
# If flex is found on the system, the module provides the macro:
# FLEX_TARGET(Name FlexInput FlexOutput [COMPILE_FLAGS <string>])
# which creates a custom command to generate the <FlexOutput> file from
# the <FlexInput> file. If COMPILE_FLAGS option is specified, the next
# parameter is added to the flex command line. Name is an alias used to
# get details of this custom command. Indeed the macro defines the
# following variables:
# FLEX_${Name}_DEFINED - true is the macro ran successfully
# FLEX_${Name}_OUTPUTS - the source file generated by the custom rule, an
# alias for FlexOutput
# FLEX_${Name}_INPUT - the flex source file, an alias for ${FlexInput}
#
# Flex scanners oftenly use tokens defined by Bison: the code generated
# by Flex depends of the header generated by Bison. This module also
# defines a macro:
# ADD_FLEX_BISON_DEPENDENCY(FlexTarget BisonTarget)
# which adds the required dependency between a scanner and a parser
# where <FlexTarget> and <BisonTarget> are the first parameters of
# respectively FLEX_TARGET and BISON_TARGET macros.
#
# ====================================================================
# Example:
#
# find_package(BISON)
# find_package(FLEX)
#
# BISON_TARGET(MyParser parser.y ${CMAKE_CURRENT_BINARY_DIR}/parser.cpp
# FLEX_TARGET(MyScanner lexer.l ${CMAKE_CURRENT_BIANRY_DIR}/lexer.cpp)
# ADD_FLEX_BISON_DEPENDENCY(MyScanner MyParser)
#
# include_directories(${CMAKE_CURRENT_BINARY_DIR})
# add_executable(Foo
# Foo.cc
# ${BISON_MyParser_OUTPUTS}
# ${FLEX_MyScanner_OUTPUTS}
# )
# ====================================================================
#=============================================================================
# Copyright 2009 Kitware, Inc.
# Copyright 2006 Tristan Carel
# Modified 2010 by Jon Siwek, backporting for CMake 2.6 compat
#
# Distributed under the OSI-approved BSD License (the "License"):
# CMake - Cross Platform Makefile Generator
# Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the names of Kitware, Inc., the Insight Software Consortium,
# nor the names of their contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
FIND_PROGRAM(FLEX_EXECUTABLE flex DOC "path to the flex executable")
MARK_AS_ADVANCED(FLEX_EXECUTABLE)
FIND_LIBRARY(FL_LIBRARY NAMES fl
DOC "path to the fl library")
MARK_AS_ADVANCED(FL_LIBRARY)
SET(FLEX_LIBRARIES ${FL_LIBRARY})
IF(FLEX_EXECUTABLE)
EXECUTE_PROCESS(COMMAND ${FLEX_EXECUTABLE} --version
OUTPUT_VARIABLE FLEX_version_output
ERROR_VARIABLE FLEX_version_error
RESULT_VARIABLE FLEX_version_result
OUTPUT_STRIP_TRAILING_WHITESPACE)
IF(NOT ${FLEX_version_result} EQUAL 0)
IF(FLEX_FIND_REQUIRED)
MESSAGE(SEND_ERROR "Command \"${FLEX_EXECUTABLE} --version\" failed with output:\n${FLEX_version_output}\n${FLEX_version_error}")
ELSE()
MESSAGE("Command \"${FLEX_EXECUTABLE} --version\" failed with output:\n${FLEX_version_output}\n${FLEX_version_error}\nFLEX_VERSION will not be available")
ENDIF()
ELSE()
STRING(REGEX REPLACE "^flex (.*)$" "\\1"
FLEX_VERSION "${FLEX_version_output}")
ENDIF()
#============================================================
# FLEX_TARGET (public macro)
#============================================================
#
MACRO(FLEX_TARGET Name Input Output)
SET(FLEX_TARGET_usage "FLEX_TARGET(<Name> <Input> <Output> [COMPILE_FLAGS <string>]")
IF(${ARGC} GREATER 3)
IF(${ARGC} EQUAL 5)
IF("${ARGV3}" STREQUAL "COMPILE_FLAGS")
SET(FLEX_EXECUTABLE_opts "${ARGV4}")
SEPARATE_ARGUMENTS(FLEX_EXECUTABLE_opts)
ELSE()
MESSAGE(SEND_ERROR ${FLEX_TARGET_usage})
ENDIF()
ELSE()
MESSAGE(SEND_ERROR ${FLEX_TARGET_usage})
ENDIF()
ENDIF()
ADD_CUSTOM_COMMAND(OUTPUT ${Output}
COMMAND ${FLEX_EXECUTABLE}
ARGS ${FLEX_EXECUTABLE_opts} -o${Output} ${Input}
DEPENDS ${Input}
COMMENT "[FLEX][${Name}] Building scanner with flex ${FLEX_VERSION}"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
SET(FLEX_${Name}_DEFINED TRUE)
SET(FLEX_${Name}_OUTPUTS ${Output})
SET(FLEX_${Name}_INPUT ${Input})
SET(FLEX_${Name}_COMPILE_FLAGS ${FLEX_EXECUTABLE_opts})
ENDMACRO(FLEX_TARGET)
#============================================================
#============================================================
# ADD_FLEX_BISON_DEPENDENCY (public macro)
#============================================================
#
MACRO(ADD_FLEX_BISON_DEPENDENCY FlexTarget BisonTarget)
IF(NOT FLEX_${FlexTarget}_OUTPUTS)
MESSAGE(SEND_ERROR "Flex target `${FlexTarget}' does not exists.")
ENDIF()
IF(NOT BISON_${BisonTarget}_OUTPUT_HEADER)
MESSAGE(SEND_ERROR "Bison target `${BisonTarget}' does not exists.")
ENDIF()
SET_SOURCE_FILES_PROPERTIES(${FLEX_${FlexTarget}_OUTPUTS}
PROPERTIES OBJECT_DEPENDS ${BISON_${BisonTarget}_OUTPUT_HEADER})
ENDMACRO(ADD_FLEX_BISON_DEPENDENCY)
#============================================================
ENDIF(FLEX_EXECUTABLE)
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(FLEX FLEX_EXECUTABLE
FLEX_VERSION)
# FindFLEX.cmake ends here

View file

@ -1,44 +0,0 @@
# - Try to find GooglePerftools headers and libraries
#
# Usage of this module as follows:
#
# find_package(GooglePerftools)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# GooglePerftools_ROOT_DIR Set this variable to the root installation of
# GooglePerftools if the module has problems finding
# the proper installation path.
#
# Variables defined by this module:
#
# GOOGLEPERFTOOLS_FOUND System has GooglePerftools libs/headers
# GooglePerftools_LIBRARIES The GooglePerftools libraries
# GooglePerftools_INCLUDE_DIR The location of GooglePerftools headers
find_path(GooglePerftools_ROOT_DIR
NAMES include/google/heap-profiler.h
)
find_library(GooglePerftools_LIBRARIES
NAMES tcmalloc
HINTS ${GooglePerftools_ROOT_DIR}/lib
)
find_path(GooglePerftools_INCLUDE_DIR
NAMES google/heap-profiler.h
HINTS ${GooglePerftools_ROOT_DIR}/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GooglePerftools DEFAULT_MSG
GooglePerftools_LIBRARIES
GooglePerftools_INCLUDE_DIR
)
mark_as_advanced(
GooglePerftools_ROOT_DIR
GooglePerftools_LIBRARIES
GooglePerftools_INCLUDE_DIR
)

View file

@ -1,52 +0,0 @@
# - Try to find GeoIP headers and libraries
#
# Usage of this module as follows:
#
# find_package(LibGeoIP)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# LibGeoIP_ROOT_DIR Set this variable to the root installation of
# libGeoIP if the module has problems finding the
# proper installation path.
#
# Variables defined by this module:
#
# LIBGEOIP_FOUND System has GeoIP libraries and headers
# LibGeoIP_LIBRARY The GeoIP library
# LibGeoIP_INCLUDE_DIR The location of GeoIP headers
find_path(LibGeoIP_ROOT_DIR
NAMES include/GeoIPCity.h
)
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
# the static version of the library is preferred on OS X for the
# purposes of making packages (libGeoIP doesn't ship w/ OS X)
set(libgeoip_names libGeoIp.a GeoIP)
else ()
set(libgeoip_names GeoIP)
endif ()
find_library(LibGeoIP_LIBRARY
NAMES ${libgeoip_names}
HINTS ${LibGeoIP_ROOT_DIR}/lib
)
find_path(LibGeoIP_INCLUDE_DIR
NAMES GeoIPCity.h
HINTS ${LibGeoIP_ROOT_DIR}/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibGeoIP DEFAULT_MSG
LibGeoIP_LIBRARY
LibGeoIP_INCLUDE_DIR
)
mark_as_advanced(
LibGeoIP_ROOT_DIR
LibGeoIP_LIBRARY
LibGeoIP_INCLUDE_DIR
)

View file

@ -1,52 +0,0 @@
# - Try to find libmagic header and library
#
# Usage of this module as follows:
#
# find_package(LibMagic)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# LibMagic_ROOT_DIR Set this variable to the root installation of
# libmagic if the module has problems finding the
# proper installation path.
#
# Variables defined by this module:
#
# LIBMAGIC_FOUND System has libmagic and magic.h
# LibMagic_LIBRARY The libmagic library
# LibMagic_INCLUDE_DIR The location of magic.h
find_path(LibMagic_ROOT_DIR
NAMES include/magic.h
)
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
# the static version of the library is preferred on OS X for the
# purposes of making packages (libmagic doesn't ship w/ OS X)
set(libmagic_names libmagic.a magic)
else ()
set(libmagic_names magic)
endif ()
find_library(LibMagic_LIBRARY
NAMES ${libmagic_names}
HINTS ${LibMagic_ROOT_DIR}/lib
)
find_path(LibMagic_INCLUDE_DIR
NAMES magic.h
HINTS ${LibMagic_ROOT_DIR}/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibMagic DEFAULT_MSG
LibMagic_LIBRARY
LibMagic_INCLUDE_DIR
)
mark_as_advanced(
LibMagic_ROOT_DIR
LibMagic_LIBRARY
LibMagic_INCLUDE_DIR
)

View file

@ -1,56 +0,0 @@
# - Try to find openssl include dirs and libraries
#
# Usage of this module as follows:
#
# find_package(OpenSSL)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# OpenSSL_ROOT_DIR Set this variable to the root installation of
# openssl if the module has problems finding the
# proper installation path.
#
# Variables defined by this module:
#
# OPENSSL_FOUND System has openssl, include and library dirs found
# OpenSSL_INCLUDE_DIR The openssl include directories.
# OpenSSL_LIBRARIES The openssl libraries.
# OpenSSL_CYRPTO_LIBRARY The openssl crypto library.
# OpenSSL_SSL_LIBRARY The openssl ssl library.
find_path(OpenSSL_ROOT_DIR
NAMES include/openssl/ssl.h
)
find_path(OpenSSL_INCLUDE_DIR
NAMES openssl/ssl.h
HINTS ${OpenSSL_ROOT_DIR}/include
)
find_library(OpenSSL_SSL_LIBRARY
NAMES ssl ssleay32 ssleay32MD
HINTS ${OpenSSL_ROOT_DIR}/lib
)
find_library(OpenSSL_CRYPTO_LIBRARY
NAMES crypto
HINTS ${OpenSSL_ROOT_DIR}/lib
)
set(OpenSSL_LIBRARIES ${OpenSSL_SSL_LIBRARY} ${OpenSSL_CRYPTO_LIBRARY}
CACHE STRING "OpenSSL SSL and crypto libraries" FORCE)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(OpenSSL DEFAULT_MSG
OpenSSL_LIBRARIES
OpenSSL_INCLUDE_DIR
)
mark_as_advanced(
OpenSSL_ROOT_DIR
OpenSSL_INCLUDE_DIR
OpenSSL_LIBRARIES
OpenSSL_CRYPTO_LIBRARY
OpenSSL_SSL_LIBRARY
)

View file

@ -1,44 +0,0 @@
# - Try to find libpcap include dirs and libraries
#
# Usage of this module as follows:
#
# find_package(PCAP)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# PCAP_ROOT_DIR Set this variable to the root installation of
# libpcap if the module has problems finding the
# proper installation path.
#
# Variables defined by this module:
#
# PCAP_FOUND System has libpcap, include and library dirs found
# PCAP_INCLUDE_DIR The libpcap include directories.
# PCAP_LIBRARY The libpcap library.
find_path(PCAP_ROOT_DIR
NAMES include/pcap.h
)
find_path(PCAP_INCLUDE_DIR
NAMES pcap.h
HINTS ${PCAP_ROOT_DIR}/include
)
find_library(PCAP_LIBRARY
NAMES pcap
HINTS ${PCAP_ROOT_DIR}/lib
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(PCAP DEFAULT_MSG
PCAP_LIBRARY
PCAP_INCLUDE_DIR
)
mark_as_advanced(
PCAP_ROOT_DIR
PCAP_INCLUDE_DIR
PCAP_LIBRARY
)

View file

@ -1,44 +0,0 @@
# A wrapper macro around the standard CMake find_package macro that
# facilitates displaying better error messages by default, or even
# accepting custom error messages on a per package basis.
#
# If a package is not found, then the MISSING_PREREQS variable gets
# set to true and either a default or custom error message appended
# to MISSING_PREREQ_DESCS.
#
# The caller can use these variables to display a list of any missing
# packages and abort the build/configuration if there were any.
#
# Use as follows:
#
# include(FindRequiredPackage)
# FindRequiredPackage(Perl)
# FindRequiredPackage(FLEX "You need to install flex (Fast Lexical Analyzer)")
#
# if (MISSING_PREREQS)
# foreach (prereq ${MISSING_PREREQ_DESCS})
# message(SEND_ERROR ${prereq})
# endforeach ()
# message(FATAL_ERROR "Configuration aborted due to missing prerequisites")
# endif ()
macro(FindRequiredPackage packageName)
find_package(${packageName})
string(TOUPPER ${packageName} canonPackageName)
if (NOT ${canonPackageName}_FOUND)
set(MISSING_PREREQS true)
set(customDesc)
foreach (descArg ${ARGN})
set(customDesc "${customDesc} ${descArg}")
endforeach ()
if (customDesc)
# append the custom error message that was provided as an argument
list(APPEND MISSING_PREREQ_DESCS ${customDesc})
else ()
list(APPEND MISSING_PREREQ_DESCS
" Could not find prerequisite package '${packageName}'")
endif ()
endif ()
endmacro(FindRequiredPackage)

View file

@ -1,26 +0,0 @@
# Determines at `make install` time if a file, typically a configuration
# file placed in $PREFIX/etc, shouldn't be installed to prevent overwrite
# of an existing file.
#
# _srcfile: the file to install
# _dstfile: the absolute file name after installation
macro(InstallClobberImmune _srcfile _dstfile)
install(CODE "
if (EXISTS ${_dstfile})
message(STATUS \"Skipping: ${_dstfile} (already exists)\")
execute_process(COMMAND \"${CMAKE_COMMAND}\" -E compare_files
${_srcfile} ${_dstfile} RESULT_VARIABLE _diff)
if (NOT \"\${_diff}\" STREQUAL \"0\")
message(STATUS \"Installing: ${_dstfile}.example\")
configure_file(${_srcfile} ${_dstfile}.example COPY_ONLY)
endif ()
else ()
message(STATUS \"Installing: ${_dstfile}\")
# install() is not scriptable within install(), and
# configure_file() is the next best thing
configure_file(${_srcfile} ${_dstfile} COPY_ONLY)
# TODO: create additional install_manifest files?
endif ()
")
endmacro(InstallClobberImmune)

View file

@ -1,42 +0,0 @@
include(InstallClobberImmune)
# This macro can be used to install configuration files which
# users are expected to modify after installation. It will:
#
# - If binary packaging is enabled:
# Install the file in the typical CMake fashion, but append to the
# INSTALLED_CONFIG_FILES cache variable for use with the Mac package's
# pre/post install scripts
# - If binary packaging is not enabled:
# Install the script in a way such that it will check at `make install`
# time whether the file does not exist. See InstallClobberImmune.cmake
#
# _srcfile: the absolute path to the file to install
# _dstdir: absolute path to the directory in which to install the file
# _dstfilename: how to (re)name the file inside _dstdir
macro(InstallPackageConfigFile _srcfile _dstdir _dstfilename)
set(_dstfile ${_dstdir}/${_dstfilename})
if (BINARY_PACKAGING_MODE)
# If packaging mode is enabled, always install the distribution's
# version of the file. The Mac package's pre/post install scripts
# or native functionality of RPMs will take care of not clobbering it.
install(FILES ${_srcfile} DESTINATION ${_dstdir} RENAME ${_dstfilename})
# This cache variable is what the Mac package pre/post install scripts
# use to avoid clobbering user-modified config files
set(INSTALLED_CONFIG_FILES
"${INSTALLED_CONFIG_FILES} ${_dstfile}" CACHE STRING "" FORCE)
# Additionally, the Mac PackageMaker packages don't have any automatic
# handling of configuration file conflicts so install an example file
# that the post install script will cleanup in the case it's extraneous
if (APPLE)
install(FILES ${_srcfile} DESTINATION ${_dstdir}
RENAME ${_dstfilename}.example)
endif ()
else ()
# Have `make install` check at run time whether the file does not exist
InstallClobberImmune(${_srcfile} ${_dstfile})
endif ()
endmacro(InstallPackageConfigFile)

View file

@ -1,20 +0,0 @@
This package will install @CMAKE_PROJECT_NAME@ into the following location:
@CMAKE_INSTALL_PREFIX@
You may choose to update your PATH environment variable:
# For Bash
export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH
# For CSH
setenv PATH @CMAKE_INSTALL_PREFIX@/bin:$PATH
If you have more than one volume, please choose the install
destination as the one that contains the root filesystem.
If you have existing configuration files that are modified or
otherwise different from the version included in the package,
this installer will attempt to prevent overwirting them,
but its also advisable to make your own backups of important
files before proceeding.

View file

@ -1,10 +0,0 @@
if (NOT _MAC_DEPENDENCY_PATHS)
set(_MAC_DEPENDENCY_PATHS)
# As of CMake 2.8.3, Fink and MacPorts search paths are appended to the
# default search prefix paths, but the nicer thing would be if they are
# prepended to the default, so that is fixed here.
if (APPLE)
list(INSERT CMAKE_SYSTEM_PREFIX_PATH 0 /opt/local) # MacPorts
list(INSERT CMAKE_SYSTEM_PREFIX_PATH 0 /sw) # Fink
endif ()
endif ()

View file

@ -1,34 +0,0 @@
include(CheckCXXSourceCompiles)
include(CheckCSourceCompiles)
# This autoconf variable is obsolete; it's portable to assume C89 and signal
# handlers returning void
set(RETSIGTYPE "void")
set(RETSIGVAL "")
check_c_source_compiles("
#include <sys/types.h>
#include <sys/socket.h>
extern int socket(int, int, int);
extern int connect(int, const struct sockaddr *, int);
extern int send(int, const void *, int, int);
extern int recvfrom(int, void *, int, int, struct sockaddr *, int *);
int main() { return 0; }
" DO_SOCK_DECL)
if (DO_SOCK_DECL)
message(STATUS "socket() and friends need explicit declaration")
endif ()
check_cxx_source_compiles("
#include <stdlib.h>
#include <syslog.h>
extern \"C\" {
int openlog(const char* ident, int logopt, int facility);
int syslog(int priority, const char* message_fmt, ...);
int closelog();
}
int main() { return 0; }
" SYSLOG_INT)
if (SYSLOG_INT)
message(STATUS "syslog prototypes need declaration")
endif ()

View file

@ -1,66 +0,0 @@
if (${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
# alternate malloc is faster for FreeBSD, but needs more testing
# need to add way to set this from the command line
set(USE_NMALLOC true)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "OpenBSD")
set(USE_NMALLOC true)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
set(HAVE_LINUX true)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Solaris")
set(SOCKET_LIBS nsl socket)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "osf")
# Workaround ip_hl vs. ip_vhl problem in netinet/ip.h
add_definitions(-D__STDC__=2)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "irix")
list(APPEND CMAKE_C_FLAGS -xansi -signed -g3)
list(APPEND CMAKE_CXX_FLAGS -xansi -signed -g3)
elseif (${CMAKE_SYSTEM_NAME} MATCHES "ultrix")
list(APPEND CMAKE_C_FLAGS -std1 -g3)
list(APPEND CMAKE_CXX_FLAGS -std1 -g3)
include(CheckCSourceCompiles)
check_c_source_compiles("
#include <sys/types.h>
int main() {
void c(const struct a *);
return 0;
}
" have_ultrix_const)
if (NOT have_ultrix_const)
set(NEED_ULTRIX_CONST_HACK true)
endif ()
elseif (${CMAKE_SYSTEM_NAME} MATCHES "hpux" OR
${CMAKE_SYSTEM_NAME} MATCHES "HP-UX")
include(CheckCSourceCompiles)
set(CMAKE_REQUIRED_FLAGS -Aa)
set(CMAKE_REQUIRED_DEFINITIONS -D_HPUX_SOURCE)
check_c_source_compiles("
#include <sys/types.h>
int main() {
int frob(int, char *);
return 0;
}
" have_ansi_prototypes)
set(CMAKE_REQUIRED_FLAGS)
set(CMAKE_REQUIRED_DEFINITIONS)
if (have_ansi_prototypes)
add_definitions(-D_HPUX_SOURCE)
list(APPEND CMAKE_C_FLAGS -Aa)
list(APPEND CMAKE_CXX_FLAGS -Aa)
endif ()
if (NOT have_ansi_prototypes)
message(FATAL_ERROR "Can't get HPUX compiler to handle ANSI prototypes")
endif ()
endif ()

View file

@ -1,72 +0,0 @@
include(CheckCSourceCompiles)
include(CheckCXXSourceCompiles)
set(CMAKE_REQUIRED_LIBRARIES ${OpenSSL_LIBRARIES})
set(CMAKE_REQUIRED_INCLUDES ${OpenSSL_INCLUDE_DIR})
check_c_source_compiles("
#include <openssl/ssl.h>
int main() { return 0; }
" including_ssl_h_works)
if (NOT including_ssl_h_works)
# On Red Hat we may need to include Kerberos header.
set(CMAKE_REQUIRED_INCLUDES ${OpenSSL_INCLUDE_DIR} /usr/kerberos/include)
check_c_source_compiles("
#include <krb5.h>
#include <openssl/ssl.h>
int main() { return 0; }
" NEED_KRB5_H)
set(CMAKE_REQUIRED_INCLUDES ${OpenSSL_INCLUDE_DIR})
if (NOT NEED_KRB5_H)
message(FATAL_ERROR
"OpenSSL test failure. See CmakeError.log for details.")
else ()
message(STATUS "OpenSSL requires Kerberos header")
include_directories("/usr/kerberos/include")
endif ()
endif ()
# check for OPENSSL_add_all_algorithms_conf function
# and thus OpenSSL >= v0.9.7
check_c_source_compiles("
#include <openssl/evp.h>
int main() {
OPENSSL_add_all_algorithms_conf();
return 0;
}
" openssl_greater_than_0_9_7)
if (NOT openssl_greater_than_0_9_7)
message(FATAL_ERROR "OpenSSL >= v0.9.7 required")
endif ()
check_cxx_source_compiles("
#include <openssl/x509.h>
int main() {
const unsigned char** cpp = 0;
X509** x =0;
d2i_X509(x, cpp, 0);
return 0;
}
" OPENSSL_D2I_X509_USES_CONST_CHAR)
if (NOT OPENSSL_D2I_X509_USES_CONST_CHAR)
# double check that it compiles without const
check_cxx_source_compiles("
#include <openssl/x509.h>
int main() {
unsigned char** cpp = 0;
X509** x =0;
d2i_X509(x, cpp, 0);
return 0;
}
" OPENSSL_D2I_X509_USES_CHAR)
if (NOT OPENSSL_D2I_X509_USES_CHAR)
message(FATAL_ERROR
"Can't determine if openssl_d2i_x509() takes const char parameter")
endif ()
endif ()
set(CMAKE_REQUIRED_INCLUDES)
set(CMAKE_REQUIRED_LIBRARIES)

View file

@ -1,63 +0,0 @@
include(CheckFunctionExists)
include(CheckCSourceCompiles)
include(CheckIncludeFiles)
set(CMAKE_REQUIRED_INCLUDES ${PCAP_INCLUDE_DIR})
set(CMAKE_REQUIRED_LIBRARIES ${PCAP_LIBRARY})
check_include_files(pcap-int.h HAVE_PCAP_INT_H)
check_function_exists(pcap_freecode HAVE_LIBPCAP_PCAP_FREECODE)
if (NOT HAVE_LIBPCAP_PCAP_FREECODE)
set(DONT_HAVE_LIBPCAP_PCAP_FREECODE true)
message(STATUS "No implementation for pcap_freecode()")
endif ()
check_c_source_compiles("
#include <pcap.h>
int main () {
int snaplen;
int linktype;
struct bpf_program fp;
int optimize;
bpf_u_int32 netmask;
char str[10];
char error[1024];
snaplen = 50;
linktype = DLT_EN10MB;
optimize = 1;
netmask = 0L;
str[0] = 'i'; str[1] = 'p'; str[2] = '\\\\0';
(void)pcap_compile_nopcap(
snaplen, linktype, &fp, str, optimize, netmask, &error);
return 0;
}
" LIBPCAP_PCAP_COMPILE_NOPCAP_HAS_ERROR_PARAMETER)
if (NOT LIBPCAP_PCAP_COMPILE_NOPCAP_HAS_ERROR_PARAMETER)
# double check
check_c_source_compiles("
#include <pcap.h>
int main () {
int snaplen;
int linktype;
struct bpf_program fp;
int optimize;
bpf_u_int32 netmask;
char str[10];
snaplen = 50;
linktype = DLT_EN10MB;
optimize = 1;
netmask = 0L;
str[0] = 'i'; str[1] = 'p'; str[2] = '\\\\0';
(void)pcap_compile_nopcap(snaplen, linktype, &fp, str, optimize, netmask);
return 0;
}
" LIBPCAP_PCAP_COMPILE_NOPCAP_NO_ERROR_PARAMETER)
if (NOT LIBPCAP_PCAP_COMPILE_NOPCAP_NO_ERROR_PARAMETER)
message(FATAL_ERROR
"Can't determine if pcap_compile_nopcap takes an error parameter")
endif ()
endif ()
set(CMAKE_REQUIRED_INCLUDES)
set(CMAKE_REQUIRED_LIBRARIES)

View file

@ -1,35 +0,0 @@
function(uninstall_manifest manifestPath)
file(READ "${manifestPath}" files)
string(REGEX REPLACE "\n" ";" files "${files}")
foreach (file ${files})
set(fileName $ENV{DESTDIR}${file})
if (EXISTS "${fileName}" OR IS_SYMLINK "${fileName}")
message(STATUS "Uninstalling: ${fileName}")
execute_process(
COMMAND "@CMAKE_COMMAND@" -E remove "${fileName}"
OUTPUT_VARIABLE rm_out
RESULT_VARIABLE rm_retval
)
if (NOT ${rm_retval} EQUAL 0)
message(FATAL_ERROR "Problem when removing: ${fileName}")
endif ()
else ()
message(STATUS "Does not exist: ${fileName}")
endif ()
endforeach ()
endfunction(uninstall_manifest)
file(GLOB install_manifests @CMAKE_CURRENT_BINARY_DIR@/install_manifest*.txt)
if (install_manifests)
foreach (manifest ${install_manifests})
uninstall_manifest(${manifest})
endforeach ()
else ()
message(FATAL_ERROR "Cannot find any install manifests in: "
"\"@CMAKE_CURRENT_BINARY_DIR@/install_manifest*.txt\"")
endif ()

View file

@ -1,61 +0,0 @@
#!/bin/sh
# This script is meant to be used by binary packages post-installation.
# Variables between @ symbols are replaced by CMake at configure time.
backupNamesFile=/tmp/bro_install_backups
version=@VERSION@
sampleFiles=""
# check whether it's safe to remove backup configuration files that
# the most recent package install created
if [ -e ${backupNamesFile} ]; then
backupFileList=`cat ${backupNamesFile}`
for backupFile in ${backupFileList}; do
origFileName=`echo ${backupFile} | sed 's/\(.*\)\..*/\1/'`
diff ${origFileName} ${backupFile} > /dev/null 2>&1
if [ $? -eq 0 ]; then
# if the installed version and the backup version don't differ
# then we can remove the backup version and the example file
rm ${backupFile}
rm ${origFileName}.example
else
# The backup file differs from the newly installed version,
# since we can't tell if the backup version has been modified
# by the user, we should restore it to its original location
# and rename the new version appropriately.
sampleFiles="${sampleFiles}\n${origFileName}.example"
mv ${backupFile} ${origFileName}
fi
done
rm ${backupNamesFile}
fi
if [ -n "${sampleFiles}" ]; then
# Use some apple script to display a message to user
/usr/bin/osascript << EOF
tell application "System Events"
activate
display alert "Existing configuration files differ from the ones that would be installed by this package. To avoid overwriting configuration which you may have modified, the following new config files have been installed:\n${sampleFiles}\n\nIf you have previously modified configuration files, please make sure that they are still compatible, else you should update your config files to the new versions."
end tell
EOF
fi
# Set up world writeable spool and logs directory for broctl, making sure
# to set the sticky bit so that unprivileged users can't rename/remove files.
# (CMake/CPack is supposed to install them, but has problems with empty dirs)
if [ -n "@EMPTY_WORLD_DIRS@" ]; then
for dir in "@EMPTY_WORLD_DIRS@"; do
mkdir -p ${dir}
chmod 777 ${dir}
chmod +t ${dir}
done
fi

View file

@ -1,34 +0,0 @@
#!/bin/sh
# This script is meant to be used by binary packages pre-installation.
# Variables between @ symbols are replaced by CMake at configure time.
configFiles="@INSTALLED_CONFIG_FILES@"
backupNamesFile=/tmp/bro_install_backups
# Checks if a config file exists in a default location and makes a backup
# so that a modified version is not clobbered
backupFile () {
origFile="$1"
if [ -e ${origFile} ]; then
# choose a file suffix that doesn't already exist
ver=1
while [ -e ${origFile}.${ver} ]; do
ver=$(( ver + 1 ))
done
backupFile=${origFile}.${ver}
cp -p ${origFile} ${backupFile}
# the post upgrade script will check whether the installed
# config file actually differs from existing version
# and delete unnecessary backups
echo "${backupFile}" >> ${backupNamesFile}
fi
}
for file in ${configFiles}; do
backupFile "${file}"
done

View file

@ -1,6 +1,3 @@
/* enable IPV6 processing */
#cmakedefine BROv6
/* Old libpcap versions (< 0.6.1) need defining pcap_freecode and /* Old libpcap versions (< 0.6.1) need defining pcap_freecode and
pcap_compile_nopcap */ pcap_compile_nopcap */
#cmakedefine DONT_HAVE_LIBPCAP_PCAP_FREECODE #cmakedefine DONT_HAVE_LIBPCAP_PCAP_FREECODE
@ -14,18 +11,9 @@
/* Define if you have the `getopt_long' function. */ /* Define if you have the `getopt_long' function. */
#cmakedefine HAVE_GETOPT_LONG #cmakedefine HAVE_GETOPT_LONG
/* Define if you have the `magic' library (-lmagic). */
#cmakedefine HAVE_LIBMAGIC
/* Define if you have the `z' library (-lz). */
#cmakedefine HAVE_LIBZ
/* We are on a Linux system */ /* We are on a Linux system */
#cmakedefine HAVE_LINUX #cmakedefine HAVE_LINUX
/* Define if you have the <magic.h> header file. */
#cmakedefine HAVE_MAGIC_H
/* Define if you have the `mallinfo' function. */ /* Define if you have the `mallinfo' function. */
#cmakedefine HAVE_MALLINFO #cmakedefine HAVE_MALLINFO
@ -41,8 +29,8 @@
/* Define if you have the <net/ethernet.h> header file. */ /* Define if you have the <net/ethernet.h> header file. */
#cmakedefine HAVE_NET_ETHERNET_H #cmakedefine HAVE_NET_ETHERNET_H
/* We are on a OpenBSD system */ /* Define if you have the <net/ethertypes.h> header file. */
#cmakedefine HAVE_OPENBSD #cmakedefine HAVE_NET_ETHERTYPES_H
/* have os-proto.h */ /* have os-proto.h */
#cmakedefine HAVE_OS_PROTO_H #cmakedefine HAVE_OS_PROTO_H
@ -114,8 +102,17 @@
/* GeoIP geographic lookup functionality */ /* GeoIP geographic lookup functionality */
#cmakedefine USE_GEOIP #cmakedefine USE_GEOIP
/* Whether the found GeoIP API supports IPv6 Country Edition */
#cmakedefine HAVE_GEOIP_COUNTRY_EDITION_V6
/* Whether the found GeoIP API supports IPv6 City Edition */
#cmakedefine HAVE_GEOIP_CITY_EDITION_REV0_V6
/* Use Google's perftools */ /* Use Google's perftools */
#cmakedefine USE_PERFTOOLS #cmakedefine USE_PERFTOOLS_DEBUG
/* Analyze Mobile IPv6 traffic */
#cmakedefine ENABLE_MOBILE_IPV6
/* Version number of package */ /* Version number of package */
#define VERSION "@VERSION@" #define VERSION "@VERSION@"
@ -148,3 +145,54 @@
/* Define u_int8_t */ /* Define u_int8_t */
#cmakedefine u_int8_t @u_int8_t@ #cmakedefine u_int8_t @u_int8_t@
/* OpenBSD's bpf.h may not declare this data link type, but it's supposed to be
used consistently for the same purpose on all platforms. */
#cmakedefine HAVE_DLT_PPP_SERIAL
#ifndef HAVE_DLT_PPP_SERIAL
#define DLT_PPP_SERIAL @DLT_PPP_SERIAL@
#endif
/* IPv6 Next Header values defined by RFC 3542 */
#cmakedefine HAVE_IPPROTO_HOPOPTS
#ifndef HAVE_IPPROTO_HOPOPTS
#define IPPROTO_HOPOPTS 0
#endif
#cmakedefine HAVE_IPPROTO_IPV6
#ifndef HAVE_IPPROTO_IPV6
#define IPPROTO_IPV6 41
#endif
#cmakedefine HAVE_IPPROTO_ROUTING
#ifndef HAVE_IPPROTO_ROUTING
#define IPPROTO_ROUTING 43
#endif
#cmakedefine HAVE_IPPROTO_FRAGMENT
#ifndef HAVE_IPPROTO_FRAGMENT
#define IPPROTO_FRAGMENT 44
#endif
#cmakedefine HAVE_IPPROTO_ESP
#ifndef HAVE_IPPROTO_ESP
#define IPPROTO_ESP 50
#endif
#cmakedefine HAVE_IPPROTO_AH
#ifndef HAVE_IPPROTO_AH
#define IPPROTO_AH 51
#endif
#cmakedefine HAVE_IPPROTO_ICMPV6
#ifndef HAVE_IPPROTO_ICMPV6
#define IPPROTO_ICMPV6 58
#endif
#cmakedefine HAVE_IPPROTO_NONE
#ifndef HAVE_IPPROTO_NONE
#define IPPROTO_NONE 59
#endif
#cmakedefine HAVE_IPPROTO_DSTOPTS
#ifndef HAVE_IPPROTO_DSTOPTS
#define IPPROTO_DSTOPTS 60
#endif
/* IPv6 options structure defined by RFC 3542 */
#cmakedefine HAVE_IP6_OPT
/* Common IPv6 extension structure */
#cmakedefine HAVE_IP6_EXT

38
configure vendored
View file

@ -27,11 +27,13 @@ Usage: $0 [OPTION]... [VAR=VALUE]...
Optional Features: Optional Features:
--enable-debug compile in debugging mode --enable-debug compile in debugging mode
--enable-brov6 enable IPv6 processing --enable-mobile-ipv6 analyze mobile IPv6 features defined by RFC 6275
--enable-perftools use Google's perftools --enable-perftools-debug use Google's perftools for debugging
--disable-broccoli don't build or install the Broccoli library --disable-broccoli don't build or install the Broccoli library
--disable-broctl don't install Broctl --disable-broctl don't install Broctl
--disable-auxtools don't build or install auxilliary tools --disable-auxtools don't build or install auxilliary tools
--disable-python don't try to build python bindings for broccoli
--disable-ruby don't try to build ruby bindings for broccoli
Required Packages in Non-Standard Locations: Required Packages in Non-Standard Locations:
--with-openssl=PATH path to OpenSSL install root --with-openssl=PATH path to OpenSSL install root
@ -49,6 +51,9 @@ Usage: $0 [OPTION]... [VAR=VALUE]...
--with-python=PATH path to Python interpreter --with-python=PATH path to Python interpreter
--with-python-lib=PATH path to libpython --with-python-lib=PATH path to libpython
--with-python-inc=PATH path to Python headers --with-python-inc=PATH path to Python headers
--with-ruby=PATH path to ruby interpreter
--with-ruby-lib=PATH path to ruby library
--with-ruby-inc=PATH path to ruby headers
--with-swig=PATH path to SWIG executable --with-swig=PATH path to SWIG executable
Packaging Options (for developers): Packaging Options (for developers):
@ -87,14 +92,14 @@ append_cache_entry BRO_ROOT_DIR PATH /usr/local/bro
append_cache_entry PY_MOD_INSTALL_DIR PATH /usr/local/bro/lib/broctl append_cache_entry PY_MOD_INSTALL_DIR PATH /usr/local/bro/lib/broctl
append_cache_entry BRO_SCRIPT_INSTALL_PATH STRING /usr/local/bro/share/bro append_cache_entry BRO_SCRIPT_INSTALL_PATH STRING /usr/local/bro/share/bro
append_cache_entry ENABLE_DEBUG BOOL false append_cache_entry ENABLE_DEBUG BOOL false
append_cache_entry BROv6 BOOL false append_cache_entry ENABLE_PERFTOOLS_DEBUG BOOL false
append_cache_entry ENABLE_PERFTOOLS BOOL false
append_cache_entry BinPAC_SKIP_INSTALL BOOL true append_cache_entry BinPAC_SKIP_INSTALL BOOL true
append_cache_entry BUILD_SHARED_LIBS BOOL true append_cache_entry BUILD_SHARED_LIBS BOOL true
append_cache_entry INSTALL_AUX_TOOLS BOOL true append_cache_entry INSTALL_AUX_TOOLS BOOL true
append_cache_entry INSTALL_BROCCOLI BOOL true append_cache_entry INSTALL_BROCCOLI BOOL true
append_cache_entry INSTALL_BROCTL BOOL true append_cache_entry INSTALL_BROCTL BOOL true
append_cache_entry CPACK_SOURCE_IGNORE_FILES STRING append_cache_entry CPACK_SOURCE_IGNORE_FILES STRING
append_cache_entry ENABLE_MOBILE_IPV6 BOOL false
# parse arguments # parse arguments
while [ $# -ne 0 ]; do while [ $# -ne 0 ]; do
@ -129,11 +134,11 @@ while [ $# -ne 0 ]; do
--enable-debug) --enable-debug)
append_cache_entry ENABLE_DEBUG BOOL true append_cache_entry ENABLE_DEBUG BOOL true
;; ;;
--enable-brov6) --enable-mobile-ipv6)
append_cache_entry BROv6 BOOL true append_cache_entry ENABLE_MOBILE_IPV6 BOOL true
;; ;;
--enable-perftools) --enable-perftools-debug)
append_cache_entry ENABLE_PERFTOOLS BOOL true append_cache_entry ENABLE_PERFTOOLS_DEBUG BOOL true
;; ;;
--disable-broccoli) --disable-broccoli)
append_cache_entry INSTALL_BROCCOLI BOOL false append_cache_entry INSTALL_BROCCOLI BOOL false
@ -144,6 +149,12 @@ while [ $# -ne 0 ]; do
--disable-auxtools) --disable-auxtools)
append_cache_entry INSTALL_AUX_TOOLS BOOL false append_cache_entry INSTALL_AUX_TOOLS BOOL false
;; ;;
--disable-python)
append_cache_entry DISABLE_PYTHON_BINDINGS BOOL true
;;
--disable-ruby)
append_cache_entry DISABLE_RUBY_BINDINGS BOOL true
;;
--with-openssl=*) --with-openssl=*)
append_cache_entry OpenSSL_ROOT_DIR PATH $optarg append_cache_entry OpenSSL_ROOT_DIR PATH $optarg
;; ;;
@ -172,7 +183,6 @@ while [ $# -ne 0 ]; do
append_cache_entry LibGeoIP_ROOT_DIR PATH $optarg append_cache_entry LibGeoIP_ROOT_DIR PATH $optarg
;; ;;
--with-perftools=*) --with-perftools=*)
append_cache_entry ENABLE_PERFTOOLS BOOL true
append_cache_entry GooglePerftools_ROOT_DIR PATH $optarg append_cache_entry GooglePerftools_ROOT_DIR PATH $optarg
;; ;;
--with-python=*) --with-python=*)
@ -185,6 +195,16 @@ while [ $# -ne 0 ]; do
append_cache_entry PYTHON_INCLUDE_DIR PATH $optarg append_cache_entry PYTHON_INCLUDE_DIR PATH $optarg
append_cache_entry PYTHON_INCLUDE_PATH PATH $optarg append_cache_entry PYTHON_INCLUDE_PATH PATH $optarg
;; ;;
--with-ruby=*)
append_cache_entry RUBY_EXECUTABLE PATH $optarg
;;
--with-ruby-lib=*)
append_cache_entry RUBY_LIBRARY PATH $optarg
;;
--with-ruby-inc=*)
append_cache_entry RUBY_INCLUDE_DIRS PATH $optarg
append_cache_entry RUBY_INCLUDE_PATH PATH $optarg
;;
--with-swig=*) --with-swig=*)
append_cache_entry SWIG_EXECUTABLE PATH $optarg append_cache_entry SWIG_EXECUTABLE PATH $optarg
;; ;;

2
doc/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
html
*.pyc

1
doc/CHANGES Symbolic link
View file

@ -0,0 +1 @@
../CHANGES

View file

@ -1 +1,75 @@
set(BIF_SRC_DIR ${PROJECT_SOURCE_DIR}/src)
set(RST_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/rest_output)
set(DOC_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/out)
set(DOC_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR})
set(DOC_SOURCE_WORKDIR ${CMAKE_CURRENT_BINARY_DIR}/sphinx-sources)
set(MASTER_POLICY_INDEX ${CMAKE_CURRENT_BINARY_DIR}/scripts/policy_index)
set(MASTER_PACKAGE_INDEX ${CMAKE_CURRENT_BINARY_DIR}/scripts/pkg_index)
file(GLOB_RECURSE DOC_SOURCES FOLLOW_SYMLINKS "*")
# configure the Sphinx config file (expand variables CMake might know about)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in
${CMAKE_CURRENT_BINARY_DIR}/conf.py
@ONLY)
add_subdirectory(scripts) add_subdirectory(scripts)
# The "broxygen" target generates reST documentation for any outdated bro
# scripts and then uses Sphinx to generate HTML documentation from the reST
add_custom_target(broxygen
# copy the template documentation to the build directory
# to give as input for sphinx
COMMAND "${CMAKE_COMMAND}" -E copy_directory
${DOC_SOURCE_DIR}
${DOC_SOURCE_WORKDIR}
# copy generated policy script documentation into the
# working copy of the template documentation
COMMAND "${CMAKE_COMMAND}" -E copy_directory
${RST_OUTPUT_DIR}
${DOC_SOURCE_WORKDIR}/scripts
# append to the master index of all policy scripts
COMMAND cat ${MASTER_POLICY_INDEX} >>
${DOC_SOURCE_WORKDIR}/scripts/index.rst
# append to the master index of all policy packages
COMMAND cat ${MASTER_PACKAGE_INDEX} >>
${DOC_SOURCE_WORKDIR}/scripts/packages.rst
# construct a reST file for each group
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bin/group_index_generator.py
${CMAKE_CURRENT_BINARY_DIR}/scripts/group_list
${CMAKE_CURRENT_BINARY_DIR}/scripts
${DOC_SOURCE_WORKDIR}/scripts
# tell sphinx to generate html
COMMAND sphinx-build
-b html
-c ${CMAKE_CURRENT_BINARY_DIR}
-d ${DOC_OUTPUT_DIR}/doctrees
${DOC_SOURCE_WORKDIR}
${DOC_OUTPUT_DIR}/html
# create symlink to the html output directory for convenience
COMMAND "${CMAKE_COMMAND}" -E create_symlink
${DOC_OUTPUT_DIR}/html
${CMAKE_BINARY_DIR}/html
# copy Broccoli API reference into output dir if it exists
COMMAND test -d ${CMAKE_BINARY_DIR}/aux/broccoli/doc/html && ( rm -rf ${CMAKE_BINARY_DIR}/html/broccoli-api && cp -r ${CMAKE_BINARY_DIR}/aux/broccoli/doc/html ${CMAKE_BINARY_DIR}/html/broccoli-api ) || true
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "[Sphinx] Generating HTML policy script docs"
# SOURCES just adds stuff to IDE projects as a convenience
SOURCES ${DOC_SOURCES})
# The "sphinxclean" target removes just the Sphinx input/output directories
# from the build directory.
add_custom_target(broxygenclean
COMMAND "${CMAKE_COMMAND}" -E remove_directory
${DOC_SOURCE_WORKDIR}
COMMAND "${CMAKE_COMMAND}" -E remove_directory
${DOC_OUTPUT_DIR}
VERBATIM)
add_dependencies(broxygen broxygenclean restdoc)
add_custom_target(doc)
add_custom_target(docclean)
add_dependencies(doc broxygen)
add_dependencies(docclean broxygenclean restclean)

1
doc/INSTALL.rst Symbolic link
View file

@ -0,0 +1 @@
../INSTALL

View file

@ -1 +1,47 @@
TODO
Documentation
=============
This directory contains Bro documentation in reStructuredText format
(see http://docutils.sourceforge.net/rst.html).
It is the root of a Sphinx source tree and can be modified to add more
common/general documentation, style sheets, JavaScript, etc. The Sphinx
config file is produced from ``conf.py.in``, and can be edited to change
various Sphinx options.
There is also a custom Sphinx domain implemented in ``source/ext/bro.py``
which adds some reST directives and roles that aid in generating useful
index entries and cross-references. Other extensions can be added in
a similar fashion.
Either the ``make doc`` or ``make broxygen`` targets in the top-level
Makefile can be used to locally render the reST files into HTML.
Those targets depend on:
* Python interpreter >= 2.5
* `Sphinx <http://sphinx.pocoo.org/>`_ >= 1.0.1
After completion, HTML documentation is symlinked in ``build/html``.
There's also ``make docclean`` and ``make broxygenclean`` targets to
clean the resulting documentation.
Notes for Writing Documentation
-------------------------------
* If you want to refer to a document that's part of the
distribution, it currently needs to be copied or otherwise symlinked
somewhere in to this Sphinx source tree. Then, it can be referenced
in a toc tree or with the :doc: role. Use the :download: role to
refer to static files that will not undergo sphinx rendering.
* If you want to refer to a page on the Bro web site, use an HTTP URL.
Guidelines
----------
TODO.

1
doc/_static/960.css vendored Normal file

File diff suppressed because one or more lines are too long

513
doc/_static/basic.css vendored Normal file
View file

@ -0,0 +1,513 @@
/*
* basic.css
* ~~~~~~~~~
*
* Sphinx stylesheet -- basic theme.
*
* :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
* :license: BSD, see LICENSE for details.
*
*/
/* -- main layout ----------------------------------------------------------- */
div.clearer {
clear: both;
}
/* -- relbar ---------------------------------------------------------------- */
div.related {
width: 100%;
font-size: 90%;
}
div.related h3 {
display: none;
}
div.related ul {
margin: 0;
padding: 0 0 0 10px;
list-style: none;
}
div.related li {
display: inline;
}
div.related li.right {
float: right;
margin-right: 5px;
}
/* -- sidebar --------------------------------------------------------------- */
div.sphinxsidebarwrapper {
padding: 10px 5px 0 10px;
}
div.sphinxsidebar {
float: left;
width: 230px;
margin-left: -100%;
font-size: 90%;
}
div.sphinxsidebar ul {
list-style: none;
}
div.sphinxsidebar ul ul,
div.sphinxsidebar ul.want-points {
margin-left: 20px;
list-style: square;
}
div.sphinxsidebar ul ul {
margin-top: 0;
margin-bottom: 0;
}
div.sphinxsidebar form {
margin-top: 10px;
}
div.sphinxsidebar input {
border: 1px solid #98dbcc;
font-family: sans-serif;
font-size: 1em;
}
div.sphinxsidebar input[type="text"] {
width: 170px;
}
div.sphinxsidebar input[type="submit"] {
width: 30px;
}
img {
border: 0;
}
/* -- search page ----------------------------------------------------------- */
ul.search {
margin: 10px 0 0 20px;
padding: 0;
}
ul.search li {
padding: 5px 0 5px 20px;
background-image: url(file.png);
background-repeat: no-repeat;
background-position: 0 7px;
}
ul.search li a {
font-weight: bold;
}
ul.search li div.context {
color: #888;
margin: 2px 0 0 30px;
text-align: left;
}
ul.keywordmatches li.goodmatch a {
font-weight: bold;
}
/* -- index page ------------------------------------------------------------ */
table.contentstable {
width: 90%;
}
table.contentstable p.biglink {
line-height: 150%;
}
a.biglink {
font-size: 1.3em;
}
span.linkdescr {
font-style: italic;
padding-top: 5px;
font-size: 90%;
}
/* -- general index --------------------------------------------------------- */
table.indextable {
width: 100%;
}
table.indextable td {
text-align: left;
vertical-align: top;
}
table.indextable dl, table.indextable dd {
margin-top: 0;
margin-bottom: 0;
}
table.indextable tr.pcap {
height: 10px;
}
table.indextable tr.cap {
margin-top: 10px;
background-color: #f2f2f2;
}
img.toggler {
margin-right: 3px;
margin-top: 3px;
cursor: pointer;
}
div.modindex-jumpbox {
border-top: 1px solid #ddd;
border-bottom: 1px solid #ddd;
margin: 1em 0 1em 0;
padding: 0.4em;
}
div.genindex-jumpbox {
border-top: 1px solid #ddd;
border-bottom: 1px solid #ddd;
margin: 1em 0 1em 0;
padding: 0.4em;
}
/* -- general body styles --------------------------------------------------- */
a.headerlink {
visibility: hidden;
}
div.body p.caption {
text-align: inherit;
}
div.body td {
text-align: left;
}
.field-list ul {
padding-left: 1em;
}
.first {
margin-top: 0 !important;
}
p.rubric {
margin-top: 30px;
font-weight: bold;
}
img.align-left, .figure.align-left, object.align-left {
clear: left;
float: left;
margin-right: 1em;
}
img.align-right, .figure.align-right, object.align-right {
clear: right;
float: right;
margin-left: 1em;
}
img.align-center, .figure.align-center, object.align-center {
display: block;
margin-left: auto;
margin-right: auto;
}
.align-left {
text-align: left;
}
.align-center {
text-align: center;
}
.align-right {
text-align: right;
}
/* -- sidebars -------------------------------------------------------------- */
div.sidebar {
margin: 0 0 0.5em 1em;
border: 1px solid #ddb;
padding: 7px 7px 0 7px;
background-color: #ffe;
width: 40%;
float: right;
}
p.sidebar-title {
font-weight: bold;
}
/* -- topics ---------------------------------------------------------------- */
div.topic {
border: 1px solid #ccc;
padding: 7px 7px 0 7px;
margin: 10px 0 10px 0;
}
p.topic-title {
font-size: 1.1em;
font-weight: bold;
margin-top: 10px;
}
/* -- admonitions ----------------------------------------------------------- */
div.admonition {
margin-top: 10px;
margin-bottom: 10px;
padding: 7px;
}
div.admonition dt {
font-weight: bold;
}
div.admonition dl {
margin-bottom: 0;
}
p.admonition-title {
margin: 0px 10px 5px 0px;
font-weight: bold;
}
div.body p.centered {
text-align: center;
margin-top: 25px;
}
/* -- tables ---------------------------------------------------------------- */
table.field-list td, table.field-list th {
border: 0 !important;
}
table.footnote td, table.footnote th {
border: 0 !important;
}
th {
text-align: left;
padding-right: 5px;
}
table.citation {
border-left: solid 1px gray;
margin-left: 1px;
}
table.citation td {
border-bottom: none;
}
/* -- other body styles ----------------------------------------------------- */
ol.arabic {
list-style: decimal;
}
ol.loweralpha {
list-style: lower-alpha;
}
ol.upperalpha {
list-style: upper-alpha;
}
ol.lowerroman {
list-style: lower-roman;
}
ol.upperroman {
list-style: upper-roman;
}
dd p {
margin-top: 0px;
}
dd ul, dd table {
margin-bottom: 10px;
}
dd {
margin-top: 3px;
margin-bottom: 10px;
margin-left: 30px;
}
dt:target, .highlighted {
background-color: #fbe54e;
}
dl.glossary dt {
font-weight: bold;
font-size: 1.1em;
}
.field-list ul {
margin: 0;
padding-left: 1em;
}
.field-list p {
margin: 0;
}
.refcount {
color: #060;
}
.optional {
font-size: 1.3em;
}
.versionmodified {
font-style: italic;
}
.system-message {
background-color: #fda;
padding: 5px;
border: 3px solid red;
}
.footnote:target {
background-color: #ffa;
}
.line-block {
display: block;
margin-top: 1em;
margin-bottom: 1em;
}
.line-block .line-block {
margin-top: 0;
margin-bottom: 0;
margin-left: 1.5em;
}
.guilabel, .menuselection {
font-family: sans-serif;
}
.accelerator {
text-decoration: underline;
}
.classifier {
font-style: oblique;
}
abbr, acronym {
border-bottom: dotted 1px;
cursor: help;
}
/* -- code displays --------------------------------------------------------- */
pre {
overflow: auto;
overflow-y: hidden; /* fixes display issues on Chrome browsers */
}
td.linenos pre {
padding: 5px 0px;
border: 0;
background-color: transparent;
color: #aaa;
}
table.highlighttable {
margin-left: 0.5em;
}
table.highlighttable td {
padding: 0 0.5em 0 0.5em;
}
tt.descname {
background-color: transparent;
font-weight: bold;
# font-size: 1.2em;
}
tt.descclassname {
background-color: transparent;
}
tt.xref, a tt {
background-color: transparent;
# font-weight: bold;
}
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
background-color: transparent;
}
.viewcode-link {
float: right;
}
.viewcode-back {
float: right;
font-family: sans-serif;
}
div.viewcode-block:target {
margin: -1px -10px;
padding: 0 10px;
}
/* -- math display ---------------------------------------------------------- */
img.math {
vertical-align: middle;
}
div.body div.math p {
text-align: center;
}
span.eqno {
float: right;
}
/* -- printout stylesheet --------------------------------------------------- */
@media print {
div.document,
div.documentwrapper,
div.bodywrapper {
margin: 0 !important;
width: 100%;
}
div.sphinxsidebar,
div.related,
div.footer,
#top-link {
display: none;
}
}

160
doc/_static/broxygen-extra.css vendored Normal file
View file

@ -0,0 +1,160 @@
a.toc-backref {
color: #333;
}
h1, h2, h3, h4, h5, h6,
h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {
padding:0 0 0px 0;
}
ul {
padding-bottom: 0px;
}
h1 {
font-weight: bold;
font-size: 32px;
line-height:32px;
text-align: center;
padding-top: 3px;
margin-bottom: 30px;
font-family: Palatino,'Palatino Linotype',Georgia,serif;;
color: #000;
border-bottom: 0px;
}
th.field-name
{
white-space:nowrap;
}
h2 {
margin-top: 50px;
padding-bottom: 5px;
margin-bottom: 30px;
border-bottom: 1px solid;
border-color: #aaa;
font-style: normal;
}
div.section h3 {
font-style: normal;
}
h3 {
font-size: 20px;
margin-top: 40px;
margin-bottom: 0¡px;
font-weight: bold;
font-style: normal;
}
h3.widgettitle {
font-style: normal;
}
h4 {
font-size:18px;
font-style: normal;
margin-bottom: 0em;
margin-top: 40px;
font-style: italic;
}
h5 {
font-size:16px;
}
h6 {
font-size:15px;
}
.toc-backref {
color: #333;
}
.contents ul {
padding-bottom: 1em;
}
dl.namespace {
display: none;
}
dl dt {
font-weight: normal;
}
table.docutils tbody {
margin: 1em 1em 1em 1em;
}
table.docutils td {
padding: 5pt 5pt 5pt 5pt;
font-size: 14px;
border-left: 0;
border-right: 0;
}
dl pre {
font-size: 14px;
}
table.docutils th {
padding: 5pt 5pt 5pt 5pt;
font-size: 14px;
font-style: normal;
border-left: 0;
border-right: 0;
}
table.docutils tr:first-child td {
#border-top: 1px solid #aaa;
}
.download {
font-family:"Courier New", Courier, mono;
font-weight: normal;
}
dt:target, .highlighted {
background-color: #ccc;
}
p {
padding-bottom: 0px;
}
p.last {
margin-bottom: 0px;
}
dl {
padding: 1em 1em 1em 1em;
background: #fffff0;
border: 1px solid #aaa;
}
dl {
margin-bottom: 10px;
}
table.docutils {
background: #fffff0;
border-collapse: collapse;
border: 1px solid #ddd;
}
dl table.docutils {
border: 0;
}
table.docutils dl {
border: 1px dashed #666;
}

437
doc/_static/broxygen.css vendored Normal file
View file

@ -0,0 +1,437 @@
/* Automatically generated. Do not edit. */
#bro-main, #bro-standalone-main {
padding: 0 0 0 0;
position:relative;
z-index:1;
}
#bro-main {
margin-bottom: 2em;
}
#bro-standalone-main {
margin-bottom: 0em;
padding-left: 50px;
padding-right: 50px;
}
#bro-outer {
color: #333;
background: #ffffff;
}
#bro-title {
font-weight: bold;
font-size: 32px;
line-height:32px;
text-align: center;
padding-top: 3px;
margin-bottom: 30px;
font-family: Palatino,'Palatino Linotype',Georgia,serif;;
color: #000;
}
.opening:first-letter {
font-size: 24px;
font-weight: bold;
letter-spacing: 0.05em;
}
.opening {
font-size: 17px;
}
.version {
text-align: right;
font-size: 12px;
color: #aaa;
line-height: 0;
height: 0;
}
.git-info-version {
position: relative;
height: 2em;
top: -1em;
color: #ccc;
float: left;
font-size: 12px;
}
.git-info-date {
position: relative;
height: 2em;
top: -1em;
color: #ccc;
float: right;
font-size: 12px;
}
body {
font-family:Arial, Helvetica, sans-serif;
font-size:15px;
line-height:22px;
color: #333;
margin: 0px;
}
h1, h2, h3, h4, h5, h6,
h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {
padding:0 0 20px 0;
font-weight:bold;
text-decoration:none;
}
div.section h3, div.section h4, div.section h5, div.section h6 {
font-style: italic;
}
h1, h2 {
font-size:27px;
letter-spacing:-1px;
}
h3 {
margin-top: 1em;
font-size:18px;
}
h4 {
font-size:16px;
}
h5 {
font-size:15px;
}
h6 {
font-size:12px;
}
p {
padding:0 0 20px 0;
}
hr {
background:none;
height:1px;
line-height:1px;
border:0;
margin:0 0 20px 0;
}
ul, ol {
margin:0 20px 20px 0;
padding-left:40px;
}
ul.simple, ol.simple {
margin:0 0px 0px 0;
}
blockquote {
margin:0 0 0 40px;
}
strong, dfn {
font-weight:bold;
}
em, dfn {
font-style:italic;
}
sup, sub {
line-height:0;
}
pre {
white-space:pre;
}
pre, code, tt {
font-family:"Courier New", Courier, mono;
}
dl {
margin: 0 0 20px 0;
}
dl dt {
font-weight: bold;
}
dd {
margin:0 0 20px 20px;
}
small {
font-size:75%;
}
a:link,
a:visited,
a:active
{
color: #2a85a7;
}
a:hover
{
color:#c24444;
}
h1, h2, h3, h4, h5, h6,
h1 a, h2 a, h3 a, h4 a, h5 a, h6 a
{
color: #333;
}
hr {
border-bottom:1px solid #ddd;
}
pre {
color: #333;
background: #FFFAE2;
padding: 7px 5px 3px 5px;
margin-bottom: 25px;
margin-top: 0px;
}
ul {
padding-bottom: 5px;
}
h1, h2 {
margin-top: 30px;
}
h1 {
margin-bottom: 50px;
margin-bottom: 20px;
padding-bottom: 5px;
border-bottom: 1px solid;
border-color: #aaa;
}
h2 {
font-size: 24px;
}
pre {
-moz-box-shadow:0 0 6px #ddd;
-webkit-box-shadow:0 0 6px #ddd;
box-shadow:0 0 6px #ddd;
}
a {
text-decoration:none;
}
p {
padding-bottom: 15px;
}
p, dd, li {
text-align: justify;
}
li {
margin-bottom: 5px;
}
#footer .widget_links ul a,
#footer .widget_links ol a
{
color: #ddd;
}
#footer .widget_links ul a:hover,
#footer .widget_links ol a:hover
{
color:#c24444;
}
#footer .widget li {
padding-bottom:10px;
}
#footer .widget_links li {
padding-bottom:1px;
}
#footer .widget li:last-child {
padding-bottom:0;
}
#footer .widgettitle {
color: #ddd;
}
.widget {
margin:0 0 40px 0;
}
.widget, .widgettitle {
font-size:12px;
line-height:18px;
}
.widgettitle {
font-weight:bold;
text-transform:uppercase;
padding:0 0 10px 0;
margin:0 0 20px 0;
line-height:100%;
}
.widget UL, .widget OL {
list-style-type:none;
margin:0;
padding:0;
}
.widget p {
padding:0;
}
.widget li {
padding-bottom:10px;
}
.widget a {
text-decoration:none;
}
#bro-main .widgettitle,
{
color: #333;
}
.widget img.left {
padding:5px 10px 10px 0;
}
.widget img.right {
padding:5px 0 10px 10px;
}
.ads .widgettitle {
margin-right:16px;
}
.widget {
margin-left: 1em;
}
.widgettitle {
color: #333;
}
.widgettitle {
border-bottom:1px solid #ddd;
}
.sidebar-toc ul li {
padding-bottom: 0px;
text-align: left;
list-style-type: square;
list-style-position: inside;
padding-left: 1em;
text-indent: -1em;
}
.sidebar-toc ul li li {
margin-left: 1em;
margin-bottom: 0px;
list-style-type: square;
}
.sidebar-toc ul li li a {
font-size: 8pt;
}
.contents {
padding: 10px;
background: #FFFAE2;
margin: 20px;
}
.topic-title {
font-size: 20px;
font-weight: bold;
padding: 0px 0px 5px 0px;
text-align: center;
padding-top: .5em;
}
.contents li {
margin-bottom: 0px;
list-style-type: square;
}
.contents ul ul li {
margin-left: 0px;
padding-left: 0px;
padding-top: 0em;
font-size: 90%;
list-style-type: square;
font-weight: normal;
}
.contents ul ul ul li {
list-style-type: none;
}
.contents ul ul ul ul li {
display:none;
}
.contents ul li {
padding-top: 1em;
list-style-type: none;
font-weight: bold;
}
.contents ul {
margin-left: 0px;
padding-left: 2em;
margin: 0px 0px 0px 0px;
}
.note, .warning, .error {
margin-left: 2em;
margin-right: 2em;
margin-top: 1.5em;
margin-bottom: 1.5em;
padding: 0.5em 1em 0.5em 1em;
overflow: auto;
border-left: solid 3px #aaa;
font-size: 15px;
color: #333;
}
.admonition p {
margin-left: 1em;
}
.admonition-title {
font-size: 16px;
font-weight: bold;
color: #000;
padding-bottom: 0em;
margin-bottom: .5em;
margin-top: 0em;
}

BIN
doc/_static/logo-bro.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

58
doc/_static/pygments.css vendored Normal file
View file

@ -0,0 +1,58 @@
.hll { background-color: #ffffcc }
.c { color: #aaaaaa; font-style: italic } /* Comment */
.err { color: #F00000; background-color: #F0A0A0 } /* Error */
.k { color: #0000aa } /* Keyword */
.cm { color: #aaaaaa; font-style: italic } /* Comment.Multiline */
.cp { color: #4c8317 } /* Comment.Preproc */
.c1 { color: #aaaaaa; font-style: italic } /* Comment.Single */
.cs { color: #0000aa; font-style: italic } /* Comment.Special */
.gd { color: #aa0000 } /* Generic.Deleted */
.ge { font-style: italic } /* Generic.Emph */
.gr { color: #aa0000 } /* Generic.Error */
.gh { color: #000080; font-weight: bold } /* Generic.Heading */
.gi { color: #00aa00 } /* Generic.Inserted */
.go { color: #888888 } /* Generic.Output */
.gp { color: #555555 } /* Generic.Prompt */
.gs { font-weight: bold } /* Generic.Strong */
.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
.gt { color: #aa0000 } /* Generic.Traceback */
.kc { color: #0000aa } /* Keyword.Constant */
.kd { color: #0000aa } /* Keyword.Declaration */
.kn { color: #0000aa } /* Keyword.Namespace */
.kp { color: #0000aa } /* Keyword.Pseudo */
.kr { color: #0000aa } /* Keyword.Reserved */
.kt { color: #00aaaa } /* Keyword.Type */
.m { color: #009999 } /* Literal.Number */
.s { color: #aa5500 } /* Literal.String */
.na { color: #1e90ff } /* Name.Attribute */
.nb { color: #00aaaa } /* Name.Builtin */
.nc { color: #00aa00; text-decoration: underline } /* Name.Class */
.no { color: #aa0000 } /* Name.Constant */
.nd { color: #888888 } /* Name.Decorator */
.ni { color: #800000; font-weight: bold } /* Name.Entity */
.nf { color: #00aa00 } /* Name.Function */
.nn { color: #00aaaa; text-decoration: underline } /* Name.Namespace */
.nt { color: #1e90ff; font-weight: bold } /* Name.Tag */
.nv { color: #aa0000 } /* Name.Variable */
.ow { color: #0000aa } /* Operator.Word */
.w { color: #bbbbbb } /* Text.Whitespace */
.mf { color: #009999 } /* Literal.Number.Float */
.mh { color: #009999 } /* Literal.Number.Hex */
.mi { color: #009999 } /* Literal.Number.Integer */
.mo { color: #009999 } /* Literal.Number.Oct */
.sb { color: #aa5500 } /* Literal.String.Backtick */
.sc { color: #aa5500 } /* Literal.String.Char */
.sd { color: #aa5500 } /* Literal.String.Doc */
.s2 { color: #aa5500 } /* Literal.String.Double */
.se { color: #aa5500 } /* Literal.String.Escape */
.sh { color: #aa5500 } /* Literal.String.Heredoc */
.si { color: #aa5500 } /* Literal.String.Interpol */
.sx { color: #aa5500 } /* Literal.String.Other */
.sr { color: #009999 } /* Literal.String.Regex */
.s1 { color: #aa5500 } /* Literal.String.Single */
.ss { color: #0000aa } /* Literal.String.Symbol */
.bp { color: #00aaaa } /* Name.Builtin.Pseudo */
.vc { color: #aa0000 } /* Name.Variable.Class */
.vg { color: #aa0000 } /* Name.Variable.Global */
.vi { color: #aa0000 } /* Name.Variable.Instance */
.il { color: #009999 } /* Literal.Number.Integer.Long */

113
doc/_templates/layout.html vendored Normal file
View file

@ -0,0 +1,113 @@
{% extends "!layout.html" %}
{% block extrahead %}
<link rel="stylesheet" type="text/css" href="{{ pathto('_static/broxygen.css', 1) }}"></script>
<link rel="stylesheet" type="text/css" href="{{ pathto('_static/960.css', 1) }}"></script>
<link rel="stylesheet" type="text/css" href="{{ pathto('_static/pygments.css', 1) }}"></script>
<link rel="stylesheet" type="text/css" href="{{ pathto('_static/broxygen-extra.css', 1) }}"></script>
<script type="text/javascript" src="{{ pathto('_static/broxygen-extra.js', 1) }}"></script>
{% endblock %}
{% block header %}
<iframe src="http://www.bro-ids.org/frames/header-no-logo.html" width="100%" height="100px" frameborder="0" marginheight="0" scrolling="no" marginwidth="0">
</iframe>
{% endblock %}
{% block relbar2 %}{% endblock %}
{% block relbar1 %}{% endblock %}
{% block content %}
<div id="bro-main" class="clearfix">
<div class="container_12">
<div class="grid_9">
<div>
{{ relbar() }}
</div>
<div class="body">
{% block body %}
{% endblock %}
</div>
</div>
<!-- Sidebar -->
<div class="grid_3 omega">
<div>
<img id="logo" src="{{pathto('_static/logo-bro.png', 1)}}" alt="Logo" />
</div>
<br />
<div class="widget sidebar-toc">
<h3 class="widgettitle">
Table of Contents
</h3>
<p>
<!-- <ul id="sidebar-toc"></ul> -->
<ul>{{toc}}</ul>
</p>
</div>
{% if next %}
<div class="widget">
<h3 class="widgettitle">
Next Page
</h3>
<p>
<a href="{{ next.link|e }}">{{ next.title }}</a>
</p>
</div>
{% endif %}
{% if prev %}
<div class="widget">
<h3 class="widgettitle">
Previous Page
</h3>
<p>
<a href="{{ prev.link|e }}">{{ prev.title }}</a>
</p>
</div>
{% endif %}
{%- if pagename != "search" %}
<div id="searchbox" style="display: none" class="widget">
<h3 class="widgettitle">{{ _('Search') }}</h3>
<form class="search" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" />
<input type="submit" value="{{ _('Search') }}" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
{%- endif %}
</div>
</div>
<div class="container_12">
<div class="grid_12 alpha omega">
<div class="center">
<small>
Copyright {{ copyright }}.
Last updated on {{ last_updated }}.
Created using <a href="http://sphinx.pocoo.org/">Sphinx</a> {{ sphinx_version }}.
</small>
</div>
</div>
</div>
</div>
{% endblock %}
{% block footer %}
<iframe src="http://www.bro-ids.org/frames/footer.html" width="100%" height="420px" frameborder="0" marginheight="0" scrolling="no" marginwidth="0">
</iframe>
{% endblock %}

View file

@ -49,6 +49,7 @@ with open(group_list, 'r') as f_group_list:
if not os.path.exists(os.path.dirname(group_file)): if not os.path.exists(os.path.dirname(group_file)):
os.makedirs(os.path.dirname(group_file)) os.makedirs(os.path.dirname(group_file))
with open(group_file, 'w') as f_group_file: with open(group_file, 'w') as f_group_file:
f_group_file.write(":orphan:\n\n")
title = "Package Index: %s\n" % os.path.dirname(group) title = "Package Index: %s\n" % os.path.dirname(group)
f_group_file.write(title); f_group_file.write(title);
for n in range(len(title)): for n in range(len(title)):

84
doc/cluster.rst Normal file
View file

@ -0,0 +1,84 @@
Bro Cluster
===========
Intro
------
Bro is not multithreaded, so once the limitations of a single processor core are reached, the only option currently is to spread the workload across many cores or even many physical computers. The cluster deployment scenario for Bro is the current solution to build these larger systems. The accompanying tools and scripts provide the structure to easily manage many Bro processes examining packets and doing correlation activities but acting as a singular, cohesive entity.
Architecture
---------------
The figure below illustrates the main components of a Bro cluster.
.. image:: images/deployment.png
Tap
***
This is a mechanism that splits the packet stream in order to make a copy
available for inspection. Examples include the monitoring port on a switch and
an optical splitter for fiber networks.
Frontend
********
This is a discrete hardware device or on-host technique that will split your traffic into many streams or flows. The Bro binary does not do this job. There are numerous ways to accomplish this task, some of which are described below in `Frontend Options`_.
Manager
*******
This is a Bro process which has two primary jobs. It receives log messages and notices from the rest of the nodes in the cluster using the Bro communications protocol. The result is that you will end up with single logs for each log instead of many discrete logs that you have to later combine in some manner with post processing. The manager also takes the opportunity to de-duplicate notices and it has the ability to do so since its acting as the choke point for notices and how notices might be processed into actions such as emailing, paging, or blocking.
The manager process is started first by BroControl and it only opens its designated port and waits for connections, it doesnt initiate any connections to the rest of the cluster. Once the workers are started and connect to the manager, logs and notices will start arriving to the manager process from the workers.
Proxy
*****
This is a Bro process which manages synchronized state. Variables can be synchronized across connected Bro processes automatically in Bro and proxies will help the workers by alleviating the need for all of the workers to connect directly to each other.
Examples of synchronized state from the scripts that ship with Bro are things such as the full list of “known” hosts and services which are hosts or services which have been detected as performing full TCP handshakes or an analyzed protocol has been found on the connection. If worker A detects host 1.2.3.4 as an active host, it would be beneficial for worker B to know that as well so worker A shares that information as an insertion to a set <link to set documentation would be good here> which travels to the clusters proxy and the proxy then sends that same set insertion to worker B. The result is that worker A and worker B have shared knowledge about host and services that are active on the network being monitored.
The proxy model extends to having multiple proxies as well if necessary for performance reasons, it only adds one additional step for the Bro processes. Each proxy connects to another proxy in a ring and the workers are shared between them as evenly as possible. When a proxy receives some new bit of state, it will share that with its proxy which is then shared around the ring of proxies and down to all of the workers. From a practical standpoint, there are no rules of thumb established yet for the number of proxies necessary for the number of workers they are serving. Best is to start with a single proxy and add more if communication performance problems are found.
Bro processes acting as proxies dont tend to be extremely intense to CPU or memory and users frequently run proxy processes on the same physical host as the manager.
Worker
******
This is the Bro process that sniffs network traffic and does protocol analysis on the reassembled traffic streams. Most of the work of an active cluster takes place on the workers and as such, the workers typically represent the bulk of the Bro processes that are running in a cluster. The fastest memory and CPU core speed you can afford is best here since all of the protocol parsing and most analysis will take place here. There are no particular requirements for the disks in workers since almost all logging is done remotely to the manager and very little is normally written to disk.
The rule of thumb we have followed recently is to allocate approximately 1 core for every 80Mbps of traffic that is being analyzed, however this estimate could be extremely traffic mix specific. It has generally worked for mixed traffic with many users and servers. For example, if your traffic peaks around 2Gbps (combined) and you want to handle traffic at peak load, you may want to have 26 cores available (2048 / 80 == 25.6). If the 80Mbps estimate works for your traffic, this could be handled by 3 physical hosts dedicated to being workers with each one containing dual 6-core processors.
Once a flow based load balancer is put into place this model is extremely easy to scale as well so its recommended that you guess at the amount of hardware you will need to fully analyze your traffic. If it turns out that you need more, its relatively easy to increase the size of the cluster in most cases.
Frontend Options
----------------
There are many options for setting up a frontend flow distributor and in many cases it may even be beneficial to do multiple stages of flow distribution on the network and on the host.
Discrete hardware flow balancers
********************************
cPacket
^^^^^^^
If you are monitoring one or more 10G physical interfaces, the recommended solution is to use either a cFlow or cVu device from cPacket because they are currently being used very successfully at a number of sites. These devices will perform layer-2 load balancing by rewriting the destination ethernet MAC address to cause each packet associated with a particular flow to have the same destination MAC. The packets can then be passed directly to a monitoring host where each worker has a BPF filter to limit its visibility to only that stream of flows or onward to a commodity switch to split the traffic out to multiple 1G interfaces for the workers. This can ultimately greatly reduce costs since workers can use relatively inexpensive 1G interfaces.
OpenFlow Switches
^^^^^^^^^^^^^^^^^
We are currently exploring the use of OpenFlow based switches to do flow based load balancing directly on the switch which can greatly reduce frontend costs for many users. This document will be updated when we have more information.
On host flow balancing
**********************
PF_RING
^^^^^^^
The PF_RING software for Linux has a “clustering” feature which will do flow based load balancing across a number of processes that are sniffing the same interface. This will allow you to easily take advantage of multiple cores in a single physical host because Bros main event loop is single threaded and cant natively utilize all of the cores. More information about Bro with PF_RING can be found here: (someone want to write a quick Bro/PF_RING tutorial to link to here? document installing kernel module, libpcap wrapper, building Bro with the --with-pcap configure option)
Netmap
^^^^^^
FreeBSD has an in-progress project named Netmap which will enable flow based load balancing as well. When it becomes viable for real world use, this document will be updated.
Click! Software Router
^^^^^^^^^^^^^^^^^^^^^^
Click! can be used for flow based load balancing with a simple configuration. (link to an example for the config). This solution is not recommended on Linux due to Bros PF_RING support and only as a last resort on other operating systems since it causes a lot of overhead due to context switching back and forth between kernel and userland several times per packet.

View file

@ -0,0 +1 @@
../../../aux/binpac/README

View file

@ -0,0 +1 @@
../../../aux/bro-aux/README

View file

@ -0,0 +1 @@
../../../aux/broccoli/bindings/broccoli-python/README

View file

@ -0,0 +1 @@
../../../aux/broccoli/bindings/broccoli-ruby/README

View file

@ -0,0 +1 @@
../../../aux/broccoli/README

View file

@ -0,0 +1 @@
../../../aux/broccoli/doc/broccoli-manual.rst

View file

@ -0,0 +1 @@
../../../aux/broctl/doc/broctl.rst

View file

@ -0,0 +1 @@
../../../aux/btest/README

View file

@ -0,0 +1 @@
../../../aux/broctl/aux/capstats/README

View file

@ -0,0 +1 @@
../../../aux/broctl/aux/pysubnettree/README

View file

@ -0,0 +1 @@
../../../aux/broctl/aux/trace-summary/README

View file

@ -15,7 +15,7 @@ import sys, os
# If extensions (or modules to document with autodoc) are in another directory, # If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the # add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here. # documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('source/ext')) sys.path.insert(0, os.path.abspath('sphinx-sources/ext'))
# -- General configuration ----------------------------------------------------- # -- General configuration -----------------------------------------------------
@ -24,10 +24,10 @@ sys.path.insert(0, os.path.abspath('source/ext'))
# Add any Sphinx extension module names here, as strings. They can be extensions # Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['bro'] extensions = ['bro', 'rst_directive', 'sphinx.ext.todo', 'adapt-toc']
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['source/_templates'] templates_path = ['sphinx-sources/_templates', 'sphinx-sources/_static']
# The suffix of source filenames. # The suffix of source filenames.
source_suffix = '.rst' source_suffix = '.rst'
@ -40,7 +40,7 @@ master_doc = 'index'
# General information about the project. # General information about the project.
project = u'Bro' project = u'Bro'
copyright = u'2011, The Bro Project' copyright = u'2012, The Bro Project'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
@ -59,7 +59,7 @@ release = '@VERSION@'
# non-false value, then it is used: # non-false value, then it is used:
#today = '' #today = ''
# Else, today_fmt is used as the format for a strftime call. # Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y' today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
@ -90,18 +90,20 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
html_theme = 'default' html_theme = 'basic'
html_last_updated_fmt = '%B %d, %Y'
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
# documentation. # documentation.
#html_theme_options = {} html_theme_options = { }
# Add any paths that contain custom themes here, relative to this directory. # Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [] #html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to # The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation". # "<project> v<release> Documentation".
#html_title = None #html_title = None
# A shorter title for the navigation bar. Default is the same as html_title. # A shorter title for the navigation bar. Default is the same as html_title.
@ -119,7 +121,7 @@ html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['source/_static'] html_static_path = ['sphinx-sources/_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format. # using the given strftime format.
@ -130,7 +132,9 @@ html_static_path = ['source/_static']
#html_use_smartypants = True #html_use_smartypants = True
# Custom sidebar templates, maps document names to template names. # Custom sidebar templates, maps document names to template names.
#html_sidebars = {} html_sidebars = {
'**': ['localtoc.html', 'sourcelink.html', 'searchbox.html'],
}
# Additional templates that should be rendered to pages, maps page names to # Additional templates that should be rendered to pages, maps page names to
# template names. # template names.
@ -163,8 +167,9 @@ html_static_path = ['source/_static']
#html_file_suffix = None #html_file_suffix = None
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = 'Brodoc' htmlhelp_basename = 'Broxygen'
html_add_permalinks = None
# -- Options for LaTeX output -------------------------------------------------- # -- Options for LaTeX output --------------------------------------------------
@ -204,7 +209,6 @@ latex_documents = [
# If false, no module index is generated. # If false, no module index is generated.
#latex_domain_indices = True #latex_domain_indices = True
# -- Options for manual page output -------------------------------------------- # -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
@ -213,3 +217,6 @@ man_pages = [
('index', 'bro', u'Bro Documentation', ('index', 'bro', u'Bro Documentation',
[u'The Bro Project'], 1) [u'The Bro Project'], 1)
] ]
# -- Options for todo plugin --------------------------------------------
todo_include_todos=True

29
doc/ext/adapt-toc.py Normal file
View file

@ -0,0 +1,29 @@
import sys
import re
# Removes the first TOC level, which is just the page title.
def process_html_toc(app, pagename, templatename, context, doctree):
if not "toc" in context:
return
toc = context["toc"]
lines = toc.strip().split("\n")
lines = lines[2:-2]
toc = "\n".join(lines)
toc = "<ul>" + toc
context["toc"] = toc
# print >>sys.stderr, pagename
# print >>sys.stderr, context["toc"]
# print >>sys.stderr, "-----"
# print >>sys.stderr, toc
# print >>sys.stderr, "===="
def setup(app):
app.connect('html-page-context', process_html_toc)

View file

@ -4,6 +4,9 @@
def setup(Sphinx): def setup(Sphinx):
Sphinx.add_domain(BroDomain) Sphinx.add_domain(BroDomain)
Sphinx.add_node(see)
Sphinx.add_directive_to_domain('bro', 'see', SeeDirective)
Sphinx.connect('doctree-resolved', process_see_nodes)
from sphinx import addnodes from sphinx import addnodes
from sphinx.domains import Domain, ObjType, Index from sphinx.domains import Domain, ObjType, Index
@ -18,7 +21,57 @@ from docutils.parsers.rst import Directive
from docutils.parsers.rst import directives from docutils.parsers.rst import directives
from docutils.parsers.rst.roles import set_classes from docutils.parsers.rst.roles import set_classes
class see(nodes.General, nodes.Element):
refs = []
class SeeDirective(Directive):
has_content = True
def run(self):
n = see('')
n.refs = string.split(string.join(self.content))
return [n]
def process_see_nodes(app, doctree, fromdocname):
for node in doctree.traverse(see):
content = []
para = nodes.paragraph()
para += nodes.Text("See also:", "See also:")
for name in node.refs:
join_str = " "
if name != node.refs[0]:
join_str = ", "
link_txt = join_str + name;
if name not in app.env.domaindata['bro']['idtypes']:
# Just create the text and issue warning
app.env.warn(fromdocname,
'unknown target for ".. bro:see:: %s"' % (name))
para += nodes.Text(link_txt, link_txt)
else:
# Create a reference
typ = app.env.domaindata['bro']['idtypes'][name]
todocname = app.env.domaindata['bro']['objects'][(typ, name)]
newnode = nodes.reference('', '')
innernode = nodes.literal(_(name), _(name))
newnode['refdocname'] = todocname
newnode['refuri'] = app.builder.get_relative_uri(
fromdocname, todocname)
newnode['refuri'] += '#' + typ + '-' + name
newnode.append(innernode)
para += nodes.Text(join_str, join_str)
para += newnode
content.append(para)
node.replace_self(content)
class BroGeneric(ObjectDescription): class BroGeneric(ObjectDescription):
def update_type_map(self, idname):
if 'idtypes' not in self.env.domaindata['bro']:
self.env.domaindata['bro']['idtypes'] = {}
self.env.domaindata['bro']['idtypes'][idname] = self.objtype
def add_target_and_index(self, name, sig, signode): def add_target_and_index(self, name, sig, signode):
targetname = self.objtype + '-' + name targetname = self.objtype + '-' + name
if targetname not in self.state.document.ids: if targetname not in self.state.document.ids:
@ -29,9 +82,6 @@ class BroGeneric(ObjectDescription):
objects = self.env.domaindata['bro']['objects'] objects = self.env.domaindata['bro']['objects']
key = (self.objtype, name) key = (self.objtype, name)
# this is commented out mostly just to avoid having a special directive
# for events in order to avoid the duplicate warnings in that case
"""
if key in objects: if key in objects:
self.env.warn(self.env.docname, self.env.warn(self.env.docname,
'duplicate description of %s %s, ' % 'duplicate description of %s %s, ' %
@ -39,8 +89,9 @@ class BroGeneric(ObjectDescription):
'other instance in ' + 'other instance in ' +
self.env.doc2path(objects[key]), self.env.doc2path(objects[key]),
self.lineno) self.lineno)
"""
objects[key] = self.env.docname objects[key] = self.env.docname
self.update_type_map(name)
indextext = self.get_index_text(self.objtype, name) indextext = self.get_index_text(self.objtype, name)
if indextext: if indextext:
self.indexnode['entries'].append(('single', indextext, self.indexnode['entries'].append(('single', indextext,
@ -65,6 +116,8 @@ class BroNamespace(BroGeneric):
objects = self.env.domaindata['bro']['objects'] objects = self.env.domaindata['bro']['objects']
key = (self.objtype, name) key = (self.objtype, name)
objects[key] = self.env.docname objects[key] = self.env.docname
self.update_type_map(name)
indextext = self.get_index_text(self.objtype, name) indextext = self.get_index_text(self.objtype, name)
self.indexnode['entries'].append(('single', indextext, self.indexnode['entries'].append(('single', indextext,
targetname, targetname)) targetname, targetname))
@ -91,10 +144,17 @@ class BroEnum(BroGeneric):
objects = self.env.domaindata['bro']['objects'] objects = self.env.domaindata['bro']['objects']
key = (self.objtype, name) key = (self.objtype, name)
objects[key] = self.env.docname objects[key] = self.env.docname
self.update_type_map(name)
indextext = self.get_index_text(self.objtype, name) indextext = self.get_index_text(self.objtype, name)
#self.indexnode['entries'].append(('single', indextext, #self.indexnode['entries'].append(('single', indextext,
# targetname, targetname)) # targetname, targetname))
m = sig.split() m = sig.split()
if m[1] == "Notice::Type":
if 'notices' not in self.env.domaindata['bro']:
self.env.domaindata['bro']['notices'] = []
self.env.domaindata['bro']['notices'].append(
(m[0], self.env.docname, targetname))
self.indexnode['entries'].append(('single', self.indexnode['entries'].append(('single',
"%s (enum values); %s" % (m[1], m[0]), "%s (enum values); %s" % (m[1], m[0]),
targetname, targetname)) targetname, targetname))
@ -113,6 +173,26 @@ class BroAttribute(BroGeneric):
def get_index_text(self, objectname, name): def get_index_text(self, objectname, name):
return _('%s (attribute)') % (name) return _('%s (attribute)') % (name)
class BroNotices(Index):
"""
Index subclass to provide the Bro notices index.
"""
name = 'noticeindex'
localname = l_('Bro Notice Index')
shortname = l_('notices')
def generate(self, docnames=None):
content = {}
for n in self.domain.env.domaindata['bro']['notices']:
modname = n[0].split("::")[0]
entries = content.setdefault(modname, [])
entries.append([n[0], 0, n[1], n[2], '', '', ''])
content = sorted(content.iteritems())
return content, False
class BroDomain(Domain): class BroDomain(Domain):
"""Bro domain.""" """Bro domain."""
name = 'bro' name = 'bro'
@ -140,8 +220,13 @@ class BroDomain(Domain):
'id': XRefRole(), 'id': XRefRole(),
'enum': XRefRole(), 'enum': XRefRole(),
'attr': XRefRole(), 'attr': XRefRole(),
'see': XRefRole(),
} }
indices = [
BroNotices,
]
initial_data = { initial_data = {
'objects': {}, # fullname -> docname, objtype 'objects': {}, # fullname -> docname, objtype
} }
@ -154,13 +239,27 @@ class BroDomain(Domain):
def resolve_xref(self, env, fromdocname, builder, typ, target, node, def resolve_xref(self, env, fromdocname, builder, typ, target, node,
contnode): contnode):
objects = self.data['objects'] objects = self.data['objects']
objtypes = self.objtypes_for_role(typ) if typ == "see":
for objtype in objtypes: if target not in self.data['idtypes']:
if (objtype, target) in objects: self.env.warn(fromdocname,
return make_refnode(builder, fromdocname, 'unknown target for ":bro:see:`%s`"' % (target))
objects[objtype, target], return []
objtype + '-' + target, objtype = self.data['idtypes'][target]
contnode, target + ' ' + objtype) return make_refnode(builder, fromdocname,
objects[objtype, target],
objtype + '-' + target,
contnode, target + ' ' + objtype)
else:
objtypes = self.objtypes_for_role(typ)
for objtype in objtypes:
if (objtype, target) in objects:
return make_refnode(builder, fromdocname,
objects[objtype, target],
objtype + '-' + target,
contnode, target + ' ' + objtype)
else:
self.env.warn(fromdocname,
'unknown target for ":bro:%s:`%s`"' % (typ, target))
def get_objects(self): def get_objects(self):
for (typ, name), docname in self.data['objects'].iteritems(): for (typ, name), docname in self.data['objects'].iteritems():

Binary file not shown.

76
doc/ext/bro_lexer/bro.py Normal file
View file

@ -0,0 +1,76 @@
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import *
__all__ = ["BroLexer"]
class BroLexer(RegexLexer):
name = 'Bro'
aliases = ['bro']
filenames = ['*.bro']
_hex = r'[0-9a-fA-F_]+'
_float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
_h = r'[A-Za-z0-9][-A-Za-z0-9]*'
tokens = {
'root': [
# Whitespace
('^@.*?\n', Comment.Preproc),
(r'#.*?\n', Comment.Single),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text),
# Keywords
(r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
r'|export|for|function|if|global|local|module|next'
r'|of|print|redef|return|schedule|when|while)\b', Keyword),
(r'(addr|any|bool|count|counter|double|file|int|interval|net'
r'|pattern|port|record|set|string|subnet|table|time|timer'
r'|vector)\b', Keyword.Type),
(r'(T|F)\b', Keyword.Constant),
(r'(&)((?:add|delete|expire)_func|attr|(create|read|write)_expire'
r'|default|disable_print_hook|raw_output|encrypt|group|log'
r'|mergeable|optional|persistent|priority|redef'
r'|rotate_(?:interval|size)|synchronized)\b', bygroups(Punctuation,
Keyword)),
(r'\s+module\b', Keyword.Namespace),
# Addresses, ports and networks
(r'\d+/(tcp|udp|icmp|unknown)\b', Number),
(r'(\d+\.){3}\d+', Number),
(r'(' + _hex + r'){7}' + _hex, Number),
(r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
(r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
(r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
# Hostnames
(_h + r'(\.' + _h + r')+', String),
# Numeric
(_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
(r'0[xX]' + _hex, Number.Hex),
(_float, Number.Float),
(r'\d+', Number.Integer),
(r'/', String.Regex, 'regex'),
(r'"', String, 'string'),
# Operators
(r'[!%*/+-:<=>?~|]', Operator),
(r'([-+=&|]{2}|[+-=!><]=)', Operator),
(r'(in|match)\b', Operator.Word),
(r'[{}()\[\]$.,;]', Punctuation),
# Identfier
(r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
(r'[a-zA-Z_][a-zA-Z_0-9]*', Name)
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String),
(r'\\\n', String),
(r'\\', String)
],
'regex': [
(r'/', String.Regex, '#pop'),
(r'\\[\\nt/]', String.Regex), # String.Escape is too intense.
(r'[^\\/\n]+', String.Regex),
(r'\\\n', String.Regex),
(r'\\', String.Regex)
]
}

BIN
doc/ext/bro_lexer/bro.pyc Normal file

Binary file not shown.

180
doc/ext/rst_directive.py Normal file
View file

@ -0,0 +1,180 @@
def setup(app):
pass
# -*- coding: utf-8 -*-
"""
Modified version of the the Pygments reStructuredText directive. -Robin
This provides two new directives:
- .. code:: [<format>]
Highlights the following code block according to <format> if
given (e.g., "c", "python", etc.).
- .. console::
Highlits the following code block as a shell session.
For compatibility with the original version, "sourcecode" is
equivalent to "code".
Original comment:
The Pygments reStructuredText directive
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This fragment is a Docutils_ 0.5 directive that renders source code
(to HTML only, currently) via Pygments.
To use it, adjust the options below and copy the code into a module
that you import on initialization. The code then automatically
registers a ``sourcecode`` directive that you can use instead of
normal code blocks like this::
.. sourcecode:: python
My code goes here.
If you want to have different code styles, e.g. one with line numbers
and one without, add formatters with their names in the VARIANTS dict
below. You can invoke them instead of the DEFAULT one by using a
directive option::
.. sourcecode:: python
:linenos:
My code goes here.
Look at the `directive documentation`_ to get all the gory details.
.. _Docutils: http://docutils.sf.net/
.. _directive documentation:
http://docutils.sourceforge.net/docs/howto/rst-directives.html
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# Options
# ~~~~~~~
# Set to True if you want inline CSS styles instead of classes
INLINESTYLES = False
from pygments.formatters import HtmlFormatter
class MyHtmlFormatter(HtmlFormatter):
def format_unencoded(self, tokensource, outfile):
# A NOP currently.
new_tokens = []
for (i, piece) in tokensource:
new_tokens += [(i, piece)]
return super(MyHtmlFormatter, self).format_unencoded(new_tokens, outfile)
# The default formatter
DEFAULT = MyHtmlFormatter(noclasses=INLINESTYLES, cssclass="pygments")
# Add name -> formatter pairs for every variant you want to use
VARIANTS = {
# 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
}
import textwrap
from docutils import nodes
from docutils.parsers.rst import directives, Directive
from pygments import highlight
from pygments.lexers import get_lexer_by_name, guess_lexer, TextLexer
from pygments.token import Text, Keyword, Error, Operator, Name
from pygments.filter import Filter
# Ugly hack to register the Bro lexer. I'm sure there's a better way to do it,
# but it's not obvious ...
from bro_lexer.bro import BroLexer
from pygments.lexers._mapping import LEXERS
LEXERS['BroLexer'] = ('bro_lexer.bro', BroLexer.name, BroLexer.aliases, BroLexer.filenames, ())
class Pygments(Directive):
""" Source code syntax hightlighting.
"""
#max_line_length = 68
max_line_length = 0
required_arguments = 0
optional_arguments = 1
final_argument_whitespace = True
option_spec = dict([(key, directives.flag) for key in VARIANTS])
has_content = True
def wrapped_content(self):
content = []
if Console.max_line_length:
for line in self.content:
content += textwrap.wrap(line, Console.max_line_length, subsequent_indent=" ")
else:
content = self.content
return u'\n'.join(content)
def run(self):
self.assert_has_content()
content = self.wrapped_content()
if len(self.arguments) > 0:
try:
lexer = get_lexer_by_name(self.arguments[0])
except (ValueError, IndexError):
# lexer not found, use default.
lexer = TextLexer()
else:
lexer = guess_lexer(content)
# import sys
# print >>sys.stderr, self.arguments, lexer.__class__
# take an arbitrary option if more than one is given
formatter = self.options and VARIANTS[self.options.keys()[0]] or DEFAULT
parsed = highlight(content, lexer, formatter)
return [nodes.raw('', parsed, format='html')]
class MyFilter(Filter):
def filter(self, lexer, stream):
bol = True
for (ttype, value) in stream:
# Color the '>' prompt sign.
if bol and ttype is Text and value == ">":
ttype = Name.Variable.Class # This gives us a nice red.
# Discolor builtin, that can look funny.
if ttype is Name.Builtin:
ttype = Text
bol = value.endswith("\n")
yield (ttype, value)
class Console(Pygments):
required_arguments = 0
optional_arguments = 0
def run(self):
self.assert_has_content()
content = self.wrapped_content()
lexer = get_lexer_by_name("sh")
lexer.add_filter(MyFilter())
parsed = highlight(content, lexer, DEFAULT)
return [nodes.raw('', parsed, format='html')]
directives.register_directive('sourcecode', Pygments)
directives.register_directive('code', Pygments)
directives.register_directive('console', Console)

171
doc/faq.rst Normal file
View file

@ -0,0 +1,171 @@
==========================
Frequently Asked Questions
==========================
.. raw:: html
<div class="faq">
.. contents::
Installation and Configuration
==============================
How can I tune my operating system for best capture performance?
----------------------------------------------------------------
Here are some pointers to more information:
* Fabian Schneider's research on `high performance packet capture
<http://www.net.t-labs.tu-berlin.de/research/hppc>`_
* `NSMWiki <http://nsmwiki.org/Main_Page>`_ has page on
*Collecting Data*.
* An `IMC 2010 paper
<http://conferences.sigcomm.org/imc/2010/papers/p206.pdf>`_ by
Lothar Braun et. al evaluates packet capture performance on
commodity hardware
Are there any gotchas regarding interface configuration for live capture? Or why might I be seeing abnormally large packets much greater than interface MTU?
-------------------------------------------------------------------------------------------------------------------------------------------------------------
Some NICs offload the reassembly of traffic into "superpackets" so that
fewer packets are then passed up the stack (e.g. "TCP segmentation
offload", or "generic segmentation offload"). The result is that the
capturing application will observe packets much larger than the MTU size
of the interface they were captured from and may also interfere with the
maximum packet capture length, ``snaplen``, so it's a good idea to disable
an interface's offloading features.
You can use the ``ethtool`` program on Linux to view and disable
offloading features of an interface. See this page for more explicit
directions:
http://securityonion.blogspot.com/2011/10/when-is-full-packet-capture-not-full.html
What does an error message like ``internal error: NB-DNS error`` mean?
---------------------------------------------------------------------------------------------------------------------------------
That often means that DNS is not set up correctly on the system
running Bro. Try verifying from the command line that DNS lookups
work, e.g., ``host www.google.com``.
I am using OpenBSD and having problems installing Bro?
------------------------------------------------------
One potential issue is that the top-level Makefile may not work with
OpenBSD's default make program, in which case you can either install
the ``gmake`` package and use it instead or first change into the
``build/`` directory before doing either ``make`` or ``make install``
such that the CMake-generated Makefile's are used directly.
Generally, please note that we do not regularly test OpenBSD builds.
We appreciate any patches that improve Bro's support for this
platform.
Usage
=====
How can I identify backscatter?
-------------------------------
Identifying backscatter via connections labeled as ``OTH`` is not a reliable
means to detect backscatter. Backscatter is however visible by interpreting
the contents of the ``history`` field in the ``conn.log`` file. The basic idea
is to watch for connections that never had an initial ``SYN`` but started
instead with a ``SYN-ACK`` or ``RST`` (though this latter generally is just
discarded). Here are some history fields which provide backscatter examples:
``hAFf``, ``r``. Refer to the conn protocol analysis scripts to interpret the
individual character meanings in the history field.
Is there help for understanding Bro's resource consumption?
-----------------------------------------------------------
There are two scripts that collect statistics on resource usage:
``misc/stats.bro`` and ``misc/profiling.bro``. The former is quite
lightweight, while the latter should only be used for debugging.
How can I capture packets as an unprivileged user?
--------------------------------------------------
Normally, unprivileged users cannot capture packets from a network interface,
which means they would not be able to use Bro to read/analyze live traffic.
However, there are operating system specific ways to enable packet capture
permission for non-root users, which is worth doing in the context of using
Bro to monitor live traffic.
With Linux Capabilities
^^^^^^^^^^^^^^^^^^^^^^^
Fully implemented since Linux kernel 2.6.24, capabilities are a way of
parceling superuser privileges into distinct units. Attach capabilities
required to capture packets to the ``bro`` executable file like this:
.. console::
sudo setcap cap_net_raw,cap_net_admin=eip /path/to/bro
Now any unprivileged user should have the capability to capture packets
using Bro provided that they have the traditional file permissions to
read/execute the ``bro`` binary.
With BPF Devices
^^^^^^^^^^^^^^^^
Systems using Berkeley Packet Filter (BPF) (e.g. FreeBSD & Mac OS X)
can allow users with read access to a BPF device to capture packets from
it using libpcap.
* Example of manually changing BPF device permissions to allow users in
the ``admin`` group to capture packets:
.. console::
sudo chgrp admin /dev/bpf*
sudo chmod g+r /dev/bpf*
* Example of configuring devfs to set permissions of BPF devices, adding
entries to ``/etc/devfs.conf`` to grant ``admin`` group permission to
capture packets:
.. console::
sudo sh -c 'echo "own bpf root:admin" >> /etc/devfs.conf'
sudo sh -c 'echo "perm bpf 0640" >> /etc/devfs.conf'
sudo service devfs restart
.. note:: As of Mac OS X 10.6, the BPF device is on devfs, but the used version
of devfs isn't capable of setting the device permissions. The permissions
can be changed manually, but they will not survive a reboot.
Why isn't Bro producing the logs I expect? (A Note About Checksums)
-------------------------------------------------------------------
Normally, Bro's event engine will discard packets which don't have valid
checksums. This can be a problem if one wants to analyze locally
generated/captured traffic on a system that offloads checksumming to the
network adapter. In that case, all transmitted/captured packets will have
bad checksums because they haven't yet been calculated by the NIC, thus
such packets will not undergo analysis defined in Bro policy scripts as they
normally would. Bad checksums in traces may also be a result of some packet
alteration tools.
Bro has two options to workaround such situations and ignore bad checksums:
1) The ``-C`` command line option to ``bro``.
2) An option called ``ignore_checksums`` that can be redefined at the
policy script layer (e.g. in your ``$PREFIX/share/bro/site/local.bro``):
.. code:: bro
redef ignore_checksums = T;
The other alternative is to disable checksum offloading for your
network adapter, but this is not always possible or desirable.
.. raw:: html
</div>

102
doc/geoip.rst Normal file
View file

@ -0,0 +1,102 @@
===========
GeoLocation
===========
.. rst-class:: opening
During the process of creating policy scripts the need may arise
to find the geographic location for an IP address. Bro has support
for the `GeoIP library <http://www.maxmind.com/app/c>`__ at the
policy script level beginning with release 1.3 to account for this
need.
.. contents::
GeoIPLite Database Installation
------------------------------------
A country database for GeoIPLite is included when you do the C API
install, but for Bro, we are using the city database which includes
cities and regions in addition to countries.
`Download <http://www.maxmind.com/app/geolitecity>`__ the geolitecity
binary database and follow the directions to install it.
FreeBSD Quick Install
---------------------
.. console::
pkg_add -r GeoIP
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
gunzip GeoLiteCity.dat.gz
mv GeoLiteCity.dat /usr/local/share/GeoIP/GeoIPCity.dat
# Set your environment correctly before running Bro's configure script
export CFLAGS=-I/usr/local/include
export LDFLAGS=-L/usr/local/lib
CentOS Quick Install
--------------------
.. console::
yum install GeoIP-devel
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
gunzip GeoLiteCity.dat.gz
mkdir -p /var/lib/GeoIP/
mv GeoLiteCity.dat /var/lib/GeoIP/GeoIPCity.dat
# Set your environment correctly before running Bro's configure script
export CFLAGS=-I/usr/local/include
export LDFLAGS=-L/usr/local/lib
Usage
-----
There is a single built in function that provides the GeoIP
functionality:
.. code:: bro
function lookup_location(a:addr): geo_location
There is also the ``geo_location`` data structure that is returned
from the ``lookup_location`` function:
.. code:: bro
type geo_location: record {
country_code: string;
region: string;
city: string;
latitude: double;
longitude: double;
};
Example
-------
To write a line in a log file for every ftp connection from hosts in
Ohio, this is now very easy:
.. code:: bro
global ftp_location_log: file = open_log_file("ftp-location");
event ftp_reply(c: connection, code: count, msg: string, cont_resp: bool)
{
local client = c$id$orig_h;
local loc = lookup_location(client);
if (loc$region == "OH" && loc$country_code == "US")
{
print ftp_location_log, fmt("FTP Connection from:%s (%s,%s,%s)", client, loc$city, loc$region, loc$country_code);
}
}

BIN
doc/images/deployment.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

89
doc/index.rst Normal file
View file

@ -0,0 +1,89 @@
.. Bro documentation master file
=================
Bro Documentation
=================
Guides
------
.. toctree::
:maxdepth: 1
INSTALL
quickstart
upgrade
faq
reporting-problems
Frameworks
----------
.. toctree::
:maxdepth: 1
notice
logging
cluster
signatures
How-Tos
-------
.. toctree::
:maxdepth: 1
geoip
Script Reference
----------------
.. toctree::
:maxdepth: 1
scripts/packages
scripts/index
scripts/builtins
scripts/bifs
Other Bro Components
--------------------
The following are snapshots of documentation for components that come
with this version of Bro (|version|). Since they can also be used
independently, see the `download page
<http://bro-ids.org/download/index.html>`_ for documentation of any
current, independent component releases.
.. toctree::
:maxdepth: 1
BinPAC - A protocol parser generator <components/binpac/README>
Broccoli - The Bro Client Communication Library (README) <components/broccoli/README>
Broccoli - User Manual <components/broccoli/broccoli-manual>
Broccoli Python Bindings <components/broccoli-python/README>
Broccoli Ruby Bindings <components/broccoli-ruby/README>
BroControl - Interactive Bro management shell <components/broctl/README>
Bro-Aux - Small auxiliary tools for Bro <components/bro-aux/README>
BTest - A unit testing framework <components/btest/README>
Capstats - Command-line packet statistic tool <components/capstats/README>
PySubnetTree - Python module for CIDR lookups<components/pysubnettree/README>
trace-summary - Script for generating break-downs of network traffic <components/trace-summary/README>
The `Broccoli API Reference <broccoli-api/index.html>`_ may also be of
interest.
Other Indices and References
----------------------------
* :ref:`General Index <genindex>`
* `Notice Index <bro-noticeindex.html>`_
* :ref:`search`
Internal References
-------------------
.. toctree::
:maxdepth: 1
scripts/internal

375
doc/logging.rst Normal file
View file

@ -0,0 +1,375 @@
==========================
Customizing Bro's Logging
==========================
.. rst-class:: opening
Bro comes with a flexible key-value based logging interface that
allows fine-grained control of what gets logged and how it is
logged. This document describes how logging can be customized and
extended.
.. contents::
Terminology
===========
Bro's logging interface is built around three main abstractions:
Log streams
A stream corresponds to a single log. It defines the set of
fields that a log consists of with their names and fields.
Examples are the ``conn`` for recording connection summaries,
and the ``http`` stream for recording HTTP activity.
Filters
Each stream has a set of filters attached to it that determine
what information gets written out. By default, each stream has
one default filter that just logs everything directly to disk
with an automatically generated file name. However, further
filters can be added to record only a subset, split a stream
into different outputs, or to even duplicate the log to
multiple outputs. If all filters are removed from a stream,
all output is disabled.
Writers
A writer defines the actual output format for the information
being logged. At the moment, Bro comes with only one type of
writer, which produces tab separated ASCII files. In the
future we will add further writers, like for binary output and
direct logging into a database.
Basics
======
The data fields that a stream records are defined by a record type
specified when it is created. Let's look at the script generating Bro's
connection summaries as an example,
:doc:`scripts/base/protocols/conn/main`. It defines a record
:bro:type:`Conn::Info` that lists all the fields that go into
``conn.log``, each marked with a ``&log`` attribute indicating that it
is part of the information written out. To write a log record, the
script then passes an instance of :bro:type:`Conn::Info` to the logging
framework's :bro:id:`Log::write` function.
By default, each stream automatically gets a filter named ``default``
that generates the normal output by recording all record fields into a
single output file.
In the following, we summarize ways in which the logging can be
customized. We continue using the connection summaries as our example
to work with.
Filtering
---------
To create a new output file for an existing stream, you can add a
new filter. A filter can, e.g., restrict the set of fields being
logged:
.. code:: bro
event bro_init()
{
# Add a new filter to the Conn::LOG stream that logs only
# timestamp and originator address.
local filter: Log::Filter = [$name="orig-only", $path="origs", $include=set("ts", "id.orig_h")];
Log::add_filter(Conn::LOG, filter);
}
Note the fields that are set for the filter:
``name``
A mandatory name for the filter that can later be used
to manipulate it further.
``path``
The filename for the output file, without any extension (which
may be automatically added by the writer). Default path values
are generated by taking the stream's ID and munging it slightly.
:bro:enum:`Conn::LOG` is converted into ``conn``,
:bro:enum:`PacketFilter::LOG` is converted into
``packet_filter``, and :bro:enum:`Notice::POLICY_LOG` is
converted into ``notice_policy``.
``include``
A set limiting the fields to the ones given. The names
correspond to those in the :bro:type:`Conn::Info` record, with
sub-records unrolled by concatenating fields (separated with
dots).
Using the code above, you will now get a new log file ``origs.log``
that looks like this::
#separator \x09
#path origs
#fields ts id.orig_h
#types time addr
1128727430.350788 141.42.64.125
1128727435.450898 141.42.64.125
If you want to make this the only log file for the stream, you can
remove the default filter (which, conveniently, has the name
``default``):
.. code:: bro
event bro_init()
{
# Remove the filter called "default".
Log::remove_filter(Conn::LOG, "default");
}
An alternate approach to "turning off" a log is to completely disable
the stream:
.. code:: bro
event bro_init()
{
Log::disable_stream(Conn::LOG);
}
If you want to skip only some fields but keep the rest, there is a
corresponding ``exclude`` filter attribute that you can use instead of
``include`` to list only the ones you are not interested in.
A filter can also determine output paths *dynamically* based on the
record being logged. That allows, e.g., to record local and remote
connections into separate files. To do this, you define a function
that returns the desired path:
.. code:: bro
function split_log(id: Log::ID, path: string, rec: Conn::Info) : string
{
# Return "conn-local" if originator is a local IP, otherwise "conn-remote".
local lr = Site::is_local_addr(rec$id$orig_h) ? "local" : "remote";
return fmt("%s-%s", path, lr);
}
event bro_init()
{
local filter: Log::Filter = [$name="conn-split", $path_func=split_log, $include=set("ts", "id.orig_h")];
Log::add_filter(Conn::LOG, filter);
}
Running this will now produce two files, ``local.log`` and
``remote.log``, with the corresponding entries. One could extend this
further for example to log information by subnets or even by IP
address. Be careful, however, as it is easy to create many files very
quickly ...
.. sidebar:: A More Generic Path Function
The ``split_log`` method has one draw-back: it can be used
only with the :bro:enum:`Conn::LOG` stream as the record type is hardcoded
into its argument list. However, Bro allows to do a more generic
variant:
.. code:: bro
function split_log(id: Log::ID, path: string, rec: record { id: conn_id; } ) : string
{
return Site::is_local_addr(rec$id$orig_h) ? "local" : "remote";
}
This function can be used with all log streams that have records
containing an ``id: conn_id`` field.
While so far we have seen how to customize the columns being logged,
you can also control which records are written out by providing a
predicate that will be called for each log record:
.. code:: bro
function http_only(rec: Conn::Info) : bool
{
# Record only connections with successfully analyzed HTTP traffic
return rec$service == "http";
}
event bro_init()
{
local filter: Log::Filter = [$name="http-only", $path="conn-http", $pred=http_only];
Log::add_filter(Conn::LOG, filter);
}
This will result in a log file ``conn-http.log`` that contains only
traffic detected and analyzed as HTTP traffic.
Extending
---------
You can add further fields to a log stream by extending the record
type that defines its content. Let's say we want to add a boolean
field ``is_private`` to :bro:type:`Conn::Info` that indicates whether the
originator IP address is part of the :rfc:`1918` space:
.. code:: bro
# Add a field to the connection log record.
redef record Conn::Info += {
## Indicate if the originator of the connection is part of the
## "private" address space defined in RFC1918.
is_private: bool &default=F &log;
};
Now we need to set the field. A connection's summary is generated at
the time its state is removed from memory. We can add another handler
at that time that sets our field correctly:
.. code:: bro
event connection_state_remove(c: connection)
{
if ( c$id$orig_h in Site::private_address_space )
c$conn$is_private = T;
}
Now ``conn.log`` will show a new field ``is_private`` of type
``bool``.
Notes:
- For extending logs this way, one needs a bit of knowledge about how
the script that creates the log stream is organizing its state
keeping. Most of the standard Bro scripts attach their log state to
the :bro:type:`connection` record where it can then be accessed, just
as the ``c$conn`` above. For example, the HTTP analysis adds a field
``http`` of type :bro:type:`HTTP::Info` to the :bro:type:`connection`
record. See the script reference for more information.
- When extending records as shown above, the new fields must always be
declared either with a ``&default`` value or as ``&optional``.
Furthermore, you need to add the ``&log`` attribute or otherwise the
field won't appear in the output.
Hooking into the Logging
------------------------
Sometimes it is helpful to do additional analysis of the information
being logged. For these cases, a stream can specify an event that will
be generated every time a log record is written to it. All of Bro's
default log streams define such an event. For example, the connection
log stream raises the event :bro:id:`Conn::log_conn`. You
could use that for example for flagging when a connection to a
specific destination exceeds a certain duration:
.. code:: bro
redef enum Notice::Type += {
## Indicates that a connection remained established longer
## than 5 minutes.
Long_Conn_Found
};
event Conn::log_conn(rec: Conn::Info)
{
if ( rec$duration > 5mins )
NOTICE([$note=Long_Conn_Found,
$msg=fmt("unusually long conn to %s", rec$id$resp_h),
$id=rec$id]);
}
Often, these events can be an alternative to post-processing Bro logs
externally with Perl scripts. Much of what such an external script
would do later offline, one may instead do directly inside of Bro in
real-time.
Rotation
--------
By default, no log rotation occurs, but it's globally controllable for all
filters by redefining the :bro:id:`Log::default_rotation_interval` option:
.. code:: bro
redef Log::default_rotation_interval = 1 hr;
Or specifically for certain :bro:type:`Log::Filter` instances by setting
their ``interv`` field. Here's an example of changing just the
:bro:enum:`Conn::LOG` stream's default filter rotation.
.. code:: bro
event bro_init()
{
local f = Log::get_filter(Conn::LOG, "default");
f$interv = 1 min;
Log::remove_filter(Conn::LOG, "default");
Log::add_filter(Conn::LOG, f);
}
ASCII Writer Configuration
--------------------------
The ASCII writer has a number of options for customizing the format of
its output, see :doc:`scripts/base/frameworks/logging/writers/ascii`.
Adding Streams
==============
It's easy to create a new log stream for custom scripts. Here's an
example for the ``Foo`` module:
.. code:: bro
module Foo;
export {
# Create an ID for our new stream. By convention, this is
# called "LOG".
redef enum Log::ID += { LOG };
# Define the fields. By convention, the type is called "Info".
type Info: record {
ts: time &log;
id: conn_id &log;
};
# Define a hook event. By convention, this is called
# "log_<stream>".
global log_foo: event(rec: Info);
}
# This event should be handled at a higher priority so that when
# users modify your stream later and they do it at priority 0,
# their code runs after this.
event bro_init() &priority=5
{
# Create the stream. This also adds a default filter automatically.
Log::create_stream(Foo::LOG, [$columns=Info, $ev=log_foo]);
}
You can also add the state to the :bro:type:`connection` record to make
it easily accessible across event handlers:
.. code:: bro
redef record connection += {
foo: Info &optional;
}
Now you can use the :bro:id:`Log::write` method to output log records and
save the logged ``Foo::Info`` record into the connection record:
.. code:: bro
event connection_established(c: connection)
{
local rec: Foo::Info = [$ts=network_time(), $id=c$id];
c$foo = rec;
Log::write(Foo::LOG, rec);
}
See the existing scripts for how to work with such a new connection
field. A simple example is :doc:`scripts/base/protocols/syslog/main`.
When you are developing scripts that add data to the :bro:type:`connection`
record, care must be given to when and how long data is stored.
Normally data saved to the connection record will remain there for the
duration of the connection and from a practical perspective it's not
uncommon to need to delete that data before the end of the connection.

395
doc/notice.rst Normal file
View file

@ -0,0 +1,395 @@
Notice Framework
================
.. rst-class:: opening
One of the easiest ways to customize Bro is writing a local notice
policy. Bro can detect a large number of potentially interesting
situations, and the notice policy tells which of them the user wants to be
acted upon in some manner. In particular, the notice policy can specify
actions to be taken, such as sending an email or compiling regular
alarm emails. This page gives an introduction into writing such a notice
policy.
.. contents::
Overview
--------
Let's start with a little bit of background on Bro's philosophy on reporting
things. Bro ships with a large number of policy scripts which perform a wide
variety of analyses. Most of these scripts monitor for activity which might be
of interest for the user. However, none of these scripts determines the
importance of what it finds itself. Instead, the scripts only flag situations
as *potentially* interesting, leaving it to the local configuration to define
which of them are in fact actionable. This decoupling of detection and
reporting allows Bro to address the different needs that sites have:
definitions of what constitutes an attack or even a compromise differ quite a
bit between environments, and activity deemed malicious at one site might be
fully acceptable at another.
Whenever one of Bro's analysis scripts sees something potentially
interesting it flags the situation by calling the :bro:see:`NOTICE`
function and giving it a single :bro:see:`Notice::Info` record. A Notice
has a :bro:see:`Notice::Type`, which reflects the kind of activity that
has been seen, and it is usually also augmented with further context
about the situation.
More information about raising notices can be found in the `Raising Notices`_
section.
Once a notice is raised, it can have any number of actions applied to it by
the :bro:see:`Notice::policy` set which is described in the `Notice Policy`_
section below. Such actions can be to send a mail to the configured
address(es) or to simply ignore the notice. Currently, the following actions
are defined:
.. list-table::
:widths: 20 80
:header-rows: 1
* - Action
- Description
* - Notice::ACTION_LOG
- Write the notice to the :bro:see:`Notice::LOG` logging stream.
* - Notice::ACTION_ALARM
- Log into the :bro:see:`Notice::ALARM_LOG` stream which will rotate
hourly and email the contents to the email address or addresses
defined in the :bro:see:`Notice::mail_dest` variable.
* - Notice::ACTION_EMAIL
- Send the notice in an email to the email address or addresses given in
the :bro:see:`Notice::mail_dest` variable.
* - Notice::ACTION_PAGE
- Send an email to the email address or addresses given in the
:bro:see:`Notice::mail_page_dest` variable.
* - Notice::ACTION_NO_SUPPRESS
- This action will disable the built in notice suppression for the
notice. Keep in mind that this action will need to be applied to
every notice that shouldn't be suppressed including each of the future
notices that would have normally been suppressed.
How these notice actions are applied to notices is discussed in the
`Notice Policy`_ and `Notice Policy Shortcuts`_ sections.
Processing Notices
------------------
Notice Policy
*************
The predefined set :bro:see:`Notice::policy` provides the mechanism for
applying actions and other behavior modifications to notices. Each entry
of :bro:see:`Notice::policy` is a record of the type
:bro:see:`Notice::PolicyItem` which defines a condition to be matched
against all raised notices and one or more of a variety of behavior
modifiers. The notice policy is defined by adding any number of
:bro:see:`Notice::PolicyItem` records to the :bro:see:`Notice::policy`
set.
Here's a simple example which tells Bro to send an email for all notices of
type :bro:see:`SSH::Login` if the server is 10.0.0.1:
.. code:: bro
redef Notice::policy += {
[$pred(n: Notice::Info) = {
return n$note == SSH::Login && n$id$resp_h == 10.0.0.1;
},
$action = Notice::ACTION_EMAIL]
};
.. note::
Keep in mind that the semantics of the SSH::Login notice are
such that it is only raised when Bro heuristically detects a successful
login. No apparently failed logins will raise this notice.
While the syntax might look a bit convoluted at first, it provides a lot of
flexibility due to having access to Bro's full programming language.
Predicate Field
^^^^^^^^^^^^^^^
The :bro:see:`Notice::PolicyItem` record type has a field name ``$pred``
which defines the entry's condition in the form of a predicate written
as a Bro function. The function is passed the notice as a
:bro:see:`Notice::Info` record and it returns a boolean value indicating
if the entry is applicable to that particular notice.
.. note::
The lack of a predicate in a ``Notice::PolicyItem`` is implicitly true
(``T``) since an implicit false (``F``) value would never be used.
Bro evaluates the predicates of each entry in the order defined by the
``$priority`` field in :bro:see:`Notice::PolicyItem` records. The valid
values are 0-10 with 10 being earliest evaluated. If ``$priority`` is
omitted, the default priority is 5.
Behavior Modification Fields
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are a set of fields in the :bro:see:`Notice::PolicyItem` record type that
indicate ways that either the notice or notice processing should be modified
if the predicate field (``$pred``) evaluated to true (``T``). Those fields are
explained in more detail in the following table.
.. list-table::
:widths: 20 30 20
:header-rows: 1
* - Field
- Description
- Example
* - ``$action=<Notice::Action>``
- Each :bro:see:`Notice::PolicyItem` can have a single action
applied to the notice with this field.
- ``$action = Notice::ACTION_EMAIL``
* - ``$suppress_for=<interval>``
- This field makes it possible for a user to modify the behavior of the
notice framework's automated suppression of intrinsically similar
notices. More information about the notice framework's automated
suppression can be found in the `Automated Suppression`_ section of
this document.
- ``$suppress_for = 10mins``
* - ``$halt=<bool>``
- This field can be used for modification of the notice policy
evaluation. To stop processing of notice policy items before
evaluating all of them, set this field to ``T`` and make the ``$pred``
field return ``T``. :bro:see:`Notice::PolicyItem` records defined at
a higher priority as defined by the ``$priority`` field will still be
evaluated but those at a lower priority won't.
- ``$halt = T``
.. code:: bro
redef Notice::policy += {
[$pred(n: Notice::Info) = {
return n$note == SSH::Login && n$id$resp_h == 10.0.0.1;
},
$action = Notice::ACTION_EMAIL,
$priority=5]
};
Notice Policy Shortcuts
***********************
Although the notice framework provides a great deal of flexibility and
configurability there are many times that the full expressiveness isn't needed
and actually becomes a hindrance to achieving results. The framework provides
a default :bro:see:`Notice::policy` suite as a way of giving users the
shortcuts to easily apply many common actions to notices.
These are implemented as sets and tables indexed with a
:bro:see:`Notice::Type` enum value. The following table shows and describes
all of the variables available for shortcut configuration of the notice
framework.
.. list-table::
:widths: 32 40
:header-rows: 1
* - Variable name
- Description
* - :bro:see:`Notice::ignored_types`
- Adding a :bro:see:`Notice::Type` to this set results in the notice
being ignored. It won't have any other action applied to it, not even
:bro:see:`Notice::ACTION_LOG`.
* - :bro:see:`Notice::emailed_types`
- Adding a :bro:see:`Notice::Type` to this set results in
:bro:see:`Notice::ACTION_EMAIL` being applied to the notices of
that type.
* - :bro:see:`Notice::alarmed_types`
- Adding a :bro:see:`Notice::Type` to this set results in
:bro:see:`Notice::ACTION_ALARM` being applied to the notices of
that type.
* - :bro:see:`Notice::not_suppressed_types`
- Adding a :bro:see:`Notice::Type` to this set results in that notice
no longer undergoing the normal notice suppression that would
take place. Be careful when using this in production it could
result in a dramatic increase in the number of notices being
processed.
* - :bro:see:`Notice::type_suppression_intervals`
- This is a table indexed on :bro:see:`Notice::Type` and yielding an
interval. It can be used as an easy way to extend the default
suppression interval for an entire :bro:see:`Notice::Type`
without having to create a whole :bro:see:`Notice::policy` entry
and setting the ``$suppress_for`` field.
Raising Notices
---------------
A script should raise a notice for any occurrence that a user may want
to be notified about or take action on. For example, whenever the base
SSH analysis scripts sees an SSH session where it is heuristically
guessed to be a successful login, it raises a Notice of the type
:bro:see:`SSH::Login`. The code in the base SSH analysis script looks
like this:
.. code:: bro
NOTICE([$note=SSH::Login,
$msg="Heuristically detected successful SSH login.",
$conn=c]);
:bro:see:`NOTICE` is a normal function in the global namespace which
wraps a function within the ``Notice`` namespace. It takes a single
argument of the :bro:see:`Notice::Info` record type. The most common
fields used when raising notices are described in the following table:
.. list-table::
:widths: 32 40
:header-rows: 1
* - Field name
- Description
* - ``$note``
- This field is required and is an enum value which represents the
notice type.
* - ``$msg``
- This is a human readable message which is meant to provide more
information about this particular instance of the notice type.
* - ``$sub``
- This is a sub-message meant for human readability but will
frequently also be used to contain data meant to be matched with the
``Notice::policy``.
* - ``$conn``
- If a connection record is available when the notice is being raised
and the notice represents some attribute of the connection, then the
connection record can be given here. Other fields such as ``$id`` and
``$src`` will automatically be populated from this value.
* - ``$id``
- If a conn_id record is available when the notice is being raised and
the notice represents some attribute of the connection, then the
connection can be given here. Other fields such as ``$src`` will
automatically be populated from this value.
* - ``$src``
- If the notice represents an attribute of a single host then it's
possible that only this field should be filled out to represent the
host that is being "noticed".
* - ``$n``
- This normally represents a number if the notice has to do with some
number. It's most frequently used for numeric tests in the
``Notice::policy`` for making policy decisions.
* - ``$identifier``
- This represents a unique identifier for this notice. This field is
described in more detail in the `Automated Suppression`_ section.
* - ``$suppress_for``
- This field can be set if there is a natural suppression interval for
the notice that may be different than the default value. The
value set to this field can also be modified by a user's
:bro:see:`Notice::policy` so the value is not set permanently
and unchangeably.
When writing Bro scripts which raise notices, some thought should be given to
what the notice represents and what data should be provided to give a consumer
of the notice the best information about the notice. If the notice is
representative of many connections and is an attribute of a host (e.g. a
scanning host) it probably makes most sense to fill out the ``$src`` field and
not give a connection or conn_id. If a notice is representative of a
connection attribute (e.g. an apparent SSH login) then it makes sense to fill
out either ``$conn`` or ``$id`` based on the data that is available when the
notice is raised. Using care when inserting data into a notice will make later
analysis easier when only the data to fully represent the occurrence that
raised the notice is available. If complete connection information is
available when an SSL server certificate is expiring, the logs will be very
confusing because the connection that the certificate was detected on is a
side topic to the fact that an expired certificate was detected. It's possible
in many cases that two or more separate notices may need to be generated. As
an example, one could be for the detection of the expired SSL certificate and
another could be for if the client decided to go ahead with the connection
neglecting the expired certificate.
Automated Suppression
---------------------
The notice framework supports suppression for notices if the author of the
script that is generating the notice has indicated to the notice framework how
to identify notices that are intrinsically the same. Identification of these
"intrinsically duplicate" notices is implemented with an optional field in
:bro:see:`Notice::Info` records named ``$identifier`` which is a simple string.
If the ``$identifier`` and ``$type`` fields are the same for two notices, the
notice framework actually considers them to be the same thing and can use that
information to suppress duplicates for a configurable period of time.
.. note::
If the ``$identifier`` is left out of a notice, no notice suppression
takes place due to the framework's inability to identify duplicates. This
could be completely legitimate usage if no notices could ever be
considered to be duplicates.
The ``$identifier`` field is typically comprised of several pieces of
data related to the notice that when combined represent a unique
instance of that notice. Here is an example of the script
:doc:`scripts/policy/protocols/ssl/validate-certs` raising a notice
for session negotiations where the certificate or certificate chain did
not validate successfully against the available certificate authority
certificates.
.. code:: bro
NOTICE([$note=SSL::Invalid_Server_Cert,
$msg=fmt("SSL certificate validation failed with (%s)", c$ssl$validation_status),
$sub=c$ssl$subject,
$conn=c,
$identifier=cat(c$id$resp_h,c$id$resp_p,c$ssl$validation_status,c$ssl$cert_hash)]);
In the above example you can see that the ``$identifier`` field contains a
string that is built from the responder IP address and port, the validation
status message, and the MD5 sum of the server certificate. Those fields in
particular are chosen because different SSL certificates could be seen on any
port of a host, certificates could fail validation for different reasons, and
multiple server certificates could be used on that combination of IP address
and port with the ``server_name`` SSL extension (explaining the addition of
the MD5 sum of the certificate). The result is that if a certificate fails
validation and all four pieces of data match (IP address, port, validation
status, and certificate hash) that particular notice won't be raised again for
the default suppression period.
Setting the ``$identifier`` field is left to those raising notices because
it's assumed that the script author who is raising the notice understands the
full problem set and edge cases of the notice which may not be readily
apparent to users. If users don't want the suppression to take place or simply
want a different interval, they can always modify it with the
:bro:see:`Notice::policy`.
Extending Notice Framework
--------------------------
Adding Custom Notice Actions
****************************
Extending Notice Emails
***********************
Cluster Considerations
----------------------

598
doc/quickstart.rst Normal file
View file

@ -0,0 +1,598 @@
.. _CMake: http://www.cmake.org
.. _SWIG: http://www.swig.org
.. _MacPorts: http://www.macports.org
.. _Fink: http://www.finkproject.org
.. _Homebrew: http://mxcl.github.com/homebrew
.. _bro downloads page: http://bro-ids.org/download/index.html
=================
Quick Start Guide
=================
.. rst-class:: opening
The short story for getting Bro up and running in a simple configuration
for analysis of either live traffic from a network interface or a packet
capture trace file.
.. contents::
Installation
============
Bro works on most modern, Unix-based systems and requires no custom
hardware. It can be downloaded in either pre-built binary package or
source code forms.
Pre-Built Binary Release Packages
---------------------------------
See the `bro downloads page`_ for currently supported/targeted platforms.
* RPM
.. console::
sudo yum localinstall Bro-*.rpm
* DEB
.. console::
sudo gdebi Bro-*.deb
* MacOS Disk Image with Installer
Just open the ``Bro-*.dmg`` and then run the ``.pkg`` installer.
Everything installed by the package will go into ``/opt/bro``.
The primary install prefix for binary packages is ``/opt/bro``.
Non-MacOS packages that include BroControl also put variable/runtime
data (e.g. Bro logs) in ``/var/opt/bro``.
Building From Source
--------------------
Required Dependencies
~~~~~~~~~~~~~~~~~~~~~
The following dependencies are required to build Bro:
* RPM/RedHat-based Linux:
.. console::
sudo yum install cmake make gcc gcc-c++ flex bison libpcap-devel openssl-devel python-devel swig zlib-devel file-devel
* DEB/Debian-based Linux:
.. console::
sudo apt-get install cmake make gcc g++ flex bison libpcap-dev libssl-dev python-dev swig zlib1g-dev libmagic-dev
* FreeBSD
Most required dependencies should come with a minimal FreeBSD install
except for the following.
.. console::
sudo pkg_add -r bash cmake swig bison python
Note that ``bash`` needs to be in ``PATH``, which by default it is
not. The FreeBSD package installs the binary into
``/usr/local/bin``.
* Mac OS X
Snow Leopard (10.6) comes with all required dependencies except for CMake_.
Lion (10.7) comes with all required dependencies except for CMake_ and SWIG_.
Distributions of these dependencies can be obtained from the project websites
linked above, but they're also likely available from your preferred Mac OS X
package management system (e.g. MacPorts_, Fink_, or Homebrew_).
Note that the MacPorts ``swig`` package may not include any specific
language support so you may need to also install ``swig-ruby`` and
``swig-python``.
Optional Dependencies
~~~~~~~~~~~~~~~~~~~~~
Bro can use libGeoIP for geo-locating IP addresses, and sendmail for
sending emails.
* RedHat Enterprise Linux:
.. console::
sudo yum install geoip-devel sendmail
* CentOS Linux:
.. console::
sudo yum install GeoIP-devel sendmail
* DEB/Debian-based Linux:
.. console::
sudo apt-get install libgeoip-dev sendmail
* Ports-based FreeBSD
.. console::
sudo pkg_add -r GeoIP
sendmail is typically already available.
* Mac OS X
Vanilla OS X installations don't ship with libmagic or libGeoIP, but
if installed from your preferred package management system (e.g. MacPorts,
Fink, or Homebrew), they should be automatically detected and Bro will compile
against them.
Additional steps may be needed to :doc:`get the right GeoIP database <geoip>`
Compiling Bro Source Code
~~~~~~~~~~~~~~~~~~~~~~~~~
Bro releases are bundled into source packages for convenience and
available from the `bro downloads page`_.
The latest Bro development versions are obtainable through git
repositories hosted at `git.bro-ids.org <http://git.bro-ids.org>`_. See
our `git development documentation
<http://bro-ids.org/development/process.html>`_ for comprehensive
information on Bro's use of git revision control, but the short story
for downloading the full source code experience for Bro via git is:
.. console::
git clone --recursive git://git.bro-ids.org/bro
.. note:: If you choose to clone the ``bro`` repository non-recursively for
a "minimal Bro experience", be aware that compiling it depends on
BinPAC, which has its own ``binpac`` repository. Either install it
first or initialize/update the cloned ``bro`` repository's
``aux/binpac`` submodule.
See the ``INSTALL`` file included with the source code for more information
on compiling, but this is the typical way to build and install from source
(of course, changing the value of the ``--prefix`` option to point to the
desired root install path):
.. console::
./configure --prefix=/desired/install/path
make
make install
The default installation prefix is ``/usr/local/bro``, which would typically
require root privileges when doing the ``make install``.
Configure the Run-Time Environment
----------------------------------
Just remember that you may need to adjust your ``PATH`` environment variable
according to the platform/shell/package you're using. For example:
Bourne-Shell Syntax:
.. console::
export PATH=/usr/local/bro/bin:$PATH
C-Shell Syntax:
.. console::
setenv PATH /usr/local/bro/bin:$PATH
Or substitute ``/opt/bro/bin`` instead if you installed from a binary package.
Using BroControl
================
BroControl is an interactive shell for easily operating/managing Bro
installations on a single system or even across multiple systems in a
traffic-monitoring cluster.
.. note:: Below, ``$PREFIX`` is used to reference the Bro installation
root directory.
A Minimal Starting Configuration
--------------------------------
These are the basic configuration changes to make for a minimal BroControl installation
that will manage a single Bro instance on the ``localhost``:
1) In ``$PREFIX/etc/node.cfg``, set the right interface to monitor.
2) In ``$PREFIX/etc/networks.cfg``, comment out the default settings and add
the networks that Bro will consider local to the monitored environment.
3) In ``$PREFIX/etc/broctl.cfg``, change the ``MailTo`` email address to a
desired recipient and the ``LogRotationInterval`` to a desired log
archival frequency.
Now start the BroControl shell like:
.. console::
broctl
Since this is the first-time use of the shell, perform an initial installation
of the BroControl configuration:
.. console::
[BroControl] > install
Then start up a Bro instance:
.. console::
[BroControl] > start
If there are errors while trying to start the Bro instance, you can
can view the details with the ``diag`` command. If started successfully,
the Bro instance will begin analyzing traffic according to a default
policy and output the results in ``$PREFIX/logs``.
.. note:: The user starting BroControl needs permission to capture
network traffic. If you are not root, you may need to grant further
privileges to the account you're using; see the :doc:`FAQ <faq>`.
Also, if it looks like Bro is not seeing any traffic, check out
the FAQ entry on checksum offloading.
You can leave it running for now, but to stop this Bro instance you would do:
.. console::
[BroControl] > stop
We also recommend to insert the following entry into `crontab`::
0-59/5 * * * * $PREFIX/bin/broctl cron
This will perform a number of regular housekeeping tasks, including
verifying that the process is still running (and restarting if not in
case of any abnormal termination).
Browsing Log Files
------------------
By default, logs are written out in human-readable (ASCII) format and
data is organized into columns (tab-delimited). Logs that are part of
the current rotation interval are accumulated in
``$PREFIX/logs/current/`` (if Bro is not running, the directory will
be empty). For example, the ``http.log`` contains the results of Bro
HTTP protocol analysis. Here are the first few columns of
``http.log``::
# ts uid orig_h orig_p resp_h resp_p
1311627961.8 HSH4uV8KVJg 192.168.1.100 52303 192.150.187.43 80
Logs that deal with analysis of a network protocol will often start like this:
a timestamp, a unique connection identifier (UID), and a connection 4-tuple
(originator host/port and responder host/port). The UID can be used to
identify all logged activity (possibly across multiple log files) associated
with a given connection 4-tuple over its lifetime.
The remaining columns of protocol-specific logs then detail the
protocol-dependent activity that's occurring. E.g. ``http.log``'s next few
columns (shortened for brevity) show a request to the root of Bro website::
# method host uri referrer user_agent
GET bro-ids.org / - <...>Chrome/12.0.742.122<...>
Some logs are worth explicit mention:
``weird.log``
Contains unusual/exceptional activity that can indicate
malformed connections, traffic that doesn't conform to a particular
protocol, malfunctioning/misconfigured hardware, or even an attacker
attempting to avoid/confuse a sensor. Without context, it's hard to
judge whether this category of activity is interesting and so that is
left up to the user to configure.
``notice.log``
Identifies specific activity that Bro recognizes as
potentially interesting, odd, or bad. In Bro-speak, such
activity is called a "notice".
By default, ``BroControl`` regularly takes all the logs from
``$PREFIX/logs/current`` and archives/compresses them to a directory
named by date, e.g. ``$PREFIX/logs/2011-10-06``. The frequency at
which this is done can be configured via the ``LogRotationInterval``
option in ``$PREFIX/etc/broctl.cfg``.
Deployment Customization
------------------------
The goal of most Bro *deployments* may be to send email alarms when a network
event requires human intervention/investigation, but sometimes that conflicts
with Bro's goal as a *distribution* to remain policy and site neutral -- the
events on one network may be less noteworthy than the same events on another.
As a result, deploying Bro can be an iterative process of
updating its policy to take different actions for events that are noticed, and
using its scripting language to programmatically extend traffic analysis
in a precise way.
One of the first steps to take in customizing Bro might be to get familiar
with the notices it can generate by default and either tone down or escalate
the action that's taken when specific ones occur.
Let's say that we've been looking at the ``notice.log`` for a bit and see two
changes we want to make:
1) ``SSL::Invalid_Server_Cert`` (found in the ``note`` column) is one type of
notice that means an SSL connection was established and the server's
certificate couldn't be validated using Bro's default trust roots, but
we want to ignore it.
2) ``SSH::Login`` is a notice type that is triggered when an SSH connection
attempt looks like it may have been successful, and we want email when
that happens, but only for certain servers.
So we've defined *what* we want to do, but need to know *where* to do it.
The answer is to use a script written in the Bro programming language, so
let's do a quick intro to Bro scripting.
Bro Scripts
~~~~~~~~~~~
Bro ships with many pre-written scripts that are highly customizable
to support traffic analysis for your specific environment. By
default, these will be installed into ``$PREFIX/share/bro`` and can be
identified by the use of a ``.bro`` file name extension. These files
should **never** be edited directly as changes will be lost when
upgrading to newer versions of Bro. The exception to this rule is the
directory ``$PREFIX/share/bro/site`` where local site-specific files
can be put without fear of being clobbered later. The other main
script directories under ``$PREFIX/share/bro`` are ``base`` and
``policy``. By default, Bro automatically loads all scripts under
``base`` (unless the ``-b`` command line option is supplied), which
deal either with collecting basic/useful state about network
activities or providing frameworks/utilities that extend Bro's
functionality without any performance cost. Scripts under the
``policy`` directory may be more situational or costly, and so users
must explicitly choose if they want to load them.
The main entry point for the default analysis configuration of a standalone
Bro instance managed by BroControl is the ``$PREFIX/share/bro/site/local.bro``
script. So we'll be adding to that in the following sections, but first
we have to figure out what to add.
Redefining Script Option Variables
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Many simple customizations just require you to redefine a variable
from a standard Bro script with your own value, using Bro's ``redef``
operator.
The typical way a standard Bro script advertises tweak-able options to users
is by defining variables with the ``&redef`` attribute and ``const`` qualifier.
A redefineable constant might seem strange, but what that really means is that
the variable's value may not change at run-time, but whose initial value can be
modified via the ``redef`` operator at parse-time.
So let's continue on our path to modify the behavior for the two SSL
and SSH notices. Looking at :doc:`scripts/base/frameworks/notice/main`,
we see that it advertises:
.. code:: bro
module Notice;
export {
...
## Ignored notice types.
const ignored_types: set[Notice::Type] = {} &redef;
}
That's exactly what we want to do for the SSL notice. So add to ``local.bro``:
.. code:: bro
redef Notice::ignored_types += { SSL::Invalid_Server_Cert };
.. note:: The ``Notice`` namespace scoping is necessary here because the
variable was declared and exported inside the ``Notice`` module, but is
being referenced from outside of it. Variables declared and exported
inside a module do not have to be scoped if referring to them while still
inside the module.
Then go into the BroControl shell to check whether the configuration change
is valid before installing it and then restarting the Bro instance:
.. console::
[BroControl] > check
bro is ok.
[BroControl] > install
removing old policies in /usr/local/bro/spool/policy/site ... done.
removing old policies in /usr/local/bro/spool/policy/auto ... done.
creating policy directories ... done.
installing site policies ... done.
generating standalone-layout.bro ... done.
generating local-networks.bro ... done.
generating broctl-config.bro ... done.
updating nodes ... done.
[BroControl] > restart
stopping bro ...
starting bro ...
Now that the SSL notice is ignored, let's look at how to send an email on
the SSH notice. The notice framework has a similar option called
``emailed_types``, but that can't differentiate between SSH servers and we
only want email for logins to certain ones. Then we come to the ``PolicyItem``
record and ``policy`` set and realize that those are actually what get used
to implement the simple functionality of ``ignored_types`` and
``emailed_types``, but it's extensible such that the condition and action taken
on notices can be user-defined.
In ``local.bro``, let's add a new ``PolicyItem`` record to the ``policy`` set
that only takes the email action for SSH logins to a defined set of servers:
.. code:: bro
const watched_servers: set[addr] = {
192.168.1.100,
192.168.1.101,
192.168.1.102,
} &redef;
redef Notice::policy += {
[$action = Notice::ACTION_EMAIL,
$pred(n: Notice::Info) =
{
return n$note == SSH::Login && n$id$resp_h in watched_servers;
}
]
};
You'll just have to trust the syntax for now, but what we've done is
first declare our own variable to hold a set of watched addresses,
``watched_servers``; then added a record to the policy that will generate
an email on the condition that the predicate function evaluates to true, which
is whenever the notice type is an SSH login and the responding host stored
inside the ``Info`` record's connection field is in the set of watched servers.
.. note:: record field member access is done with the '$' character
instead of a '.' as might be expected from other languages, in
order to avoid ambiguity with the builtin address type's use of '.'
in IPv4 dotted decimal representations.
Remember, to finalize that configuration change perform the ``check``,
``install``, ``restart`` commands in that order inside the BroControl shell.
Next Steps
----------
By this point, we've learned how to set up the most basic Bro instance and
tweak the most basic options. Here's some suggestions on what to explore next:
* We only looked at how to change options declared in the notice framework,
there's many more options to look at in other script packages.
* Look at the scripts in ``$PREFIX/share/bro/policy`` for further ones
you may want to load.
* Reading the code of scripts that ship with Bro is also a great way to gain
understanding of the language and how you can start writing your own custom
analysis.
* Review the :doc:`FAQ <faq>`.
* Continue reading below for another mini-tutorial on using Bro as a standalone
command-line utility.
Bro, the Command-Line Utility
=============================
If you prefer not to use BroControl (e.g. don't need its automation and
management features), here's how to directly control Bro for your analysis
activities.
Monitoring Live Traffic
-----------------------
Analyzing live traffic from an interface is simple:
.. console::
bro -i en0 <list of scripts to load>
``en0`` can be replaced by the interface of your choice and for the list of
scripts, you can just use "all" for now to perform all the default analysis
that's available.
Bro will output log files into the working directory.
.. note:: The :doc:`FAQ <faq>` entries about
capturing as an unprivileged user and checksum offloading are particularly
relevant at this point.
To use the site-specific ``local.bro`` script, just add it to the
command-line:
.. console::
bro -i en0 local
This will cause Bro to print a warning about lacking the
``Site::local_nets`` variable being configured. You can supply this
information at the command line like this (supply your "local" subnets
in place of the example subnets):
.. console::
bro -r mypackets.trace local "Site::local_nets += { 1.2.3.0/24, 5.6.7.0/24 }"
Reading Packet Capture (pcap) Files
-----------------------------------
Capturing packets from an interface and writing them to a file can be done
like this:
.. console::
sudo tcpdump -i en0 -s 0 -w mypackets.trace
Where ``en0`` can be replaced by the correct interface for your system as
shown by e.g. ``ifconfig``. (The ``-s 0`` argument tells it to capture
whole packets; in cases where it's not supported use ``-s 65535`` instead).
After a while of capturing traffic, kill the ``tcpdump`` (with ctrl-c),
and tell Bro to perform all the default analysis on the capture which primarily includes :
.. console::
bro -r mypackets.trace
Bro will output log files into the working directory.
If you are interested in more detection, you can again load the ``local``
script that we include as a suggested configuration:
.. console::
bro -r mypackets.trace local
Telling Bro Which Scripts to Load
---------------------------------
A command-line invocation of Bro typically looks like:
.. console::
bro <options> <policies...>
Where the last arguments are the specific policy scripts that this Bro
instance will load. These arguments don't have to include the ``.bro``
file extension, and if the corresponding script resides under the default
installation path, ``$PREFIX/share/bro``, then it requires no path
qualification. Further, a directory of scripts can be specified as
an argument to be loaded as a "package" if it contains a ``__load__.bro``
script that defines the scripts that are part of the package.
This example does all of the base analysis (primarily protocol
logging) and adds SSL certificate validation.
.. console::
bro -r mypackets.trace protocols/ssl/validate-certs
You might notice that a script you load from the command line uses the
``@load`` directive in the Bro language to declare dependence on other scripts.
This directive is similar to the ``#include`` of C/C++, except the semantics
are "load this script if it hasn't already been loaded".
.. note:: If one wants Bro to be able to load scripts that live outside the
default directories in Bro's installation root, the ``BROPATH`` environment
variable will need to be extended to include all the directories that need
to be searched for scripts. See the default search path by doing
``bro --help``.

194
doc/reporting-problems.rst Normal file
View file

@ -0,0 +1,194 @@
Reporting Problems
==================
.. rst-class:: opening
Here we summarize some steps to follow when you see Bro doing
something it shouldn't. To provide help, it is often crucial for
us to have a way of reliably reproducing the effect you're seeing.
Unfortunately, reproducing problems can be rather tricky with Bro
because more often than not, they occur only in either very rare
situations or only after Bro has been running for some time. In
particular, getting a small trace showing a specific effect can be
a real problem. In the following, we'll summarize some strategies
to this end.
Reporting Problems
------------------
Generally, when you encounter a problem with Bro, the best thing to do
is opening a new ticket in `Bro's issue tracker
<http://tracker.bro-ids.org/>`__ and include information on how to
reproduce the issue. Ideally, your ticket should come with the
following:
* The Bro version you're using (if working directly from the git
repository, the branch and revision number.)
* The output you're seeing along with a description of what you'd expect
Bro to do instead.
* A *small* trace in `libpcap format <http://www.tcpdump.org>`__
demonstrating the effect (assuming the problem doesn't happen right
at startup already).
* The exact command-line you're using to run Bro with that trace. If
you can, please try to run the Bro binary directly from the command
line rather than using BroControl.
* Any non-standard scripts you're using (but please only those really
necessary; just a small code snippet triggering the problem would
be perfect).
* If you encounter a crash, information from the core dump, such as
the stack backtrace, can be very helpful. See below for more on
this.
How Do I Get a Trace File?
--------------------------
As Bro is usually running live, coming up with a small trace file that
reproduces a problem can turn out to be quite a challenge. Often it
works best to start with a large trace that triggers the problem,
and then successively thin it out as much as possible.
To get to the initial large trace, here are a few things you can try:
* Capture a trace with `tcpdump <http://www.tcpdump.org/>`__, either
on the same interface Bro is running on, or on another host where
you can generate traffic of the kind likely triggering the problem
(e.g., if you're seeing problems with the HTTP analyzer, record some
of your Web browsing on your desktop.) When using tcpdump, don't
forget to record *complete* packets (``tcpdump -s 0 ...``). You can
reduce the amount of traffic captured by using a suitable BPF filter
(e.g., for HTTP only, try ``port 80``).
* Bro's command-line option ``-w <trace>`` records all packets it
processes into the given file. You can then later run Bro
offline on this trace and it will process the packets in the same
way as it did live. This is particularly helpful with problems that
only occur after Bro has already been running for some time. For
example, sometimes a crash may be triggered by a particular kind of
traffic only occurring rarely. Running Bro live with ``-w`` and
then, after the crash, offline on the recorded trace might, with a
little bit of luck, reproduce the problem reliably. However, be
careful with ``-w``: it can result in huge trace files, quickly
filling up your disk. (One way to mitigate the space issues is to
periodically delete the trace file by configuring
``rotate-logs.bro`` accordingly. BroControl does that for you if you
set its ``SaveTraces`` option.)
* Finally, you can try running Bro on a publically available trace
file, such as `anonymized FTP traffic <http://www-nrg.ee.lbl.gov
/anonymized-traces.html>`__, `headers-only enterprise traffic
<http://www.icir.org/enterprise-tracing/Overview.html>`__, or
`Defcon traffic <http://cctf.shmoo.com/>`__. Some of these
particularly stress certain components of Bro (e.g., the Defcon
traces contain tons of scans).
Once you have a trace that demonstrates the effect, you will often
notice that it's pretty big, in particular if recorded from the link
you're monitoring. Therefore, the next step is to shrink its size as
much as possible. Here are a few things you can try to this end:
* Very often, a single connection is able to demonstrate the problem.
If you can identify which one it is (e.g., from one of Bro's
``*.log`` files) you can extract the connection's packets from the
trace using tcpdump by filtering for the corresponding 4-tuple of
addresses and ports:
.. console::
> tcpdump -r large.trace -w small.trace host <ip1> and port <port1> and host <ip2> and port <port2>
* If you can't reduce the problem to a connection, try to identify
either a host pair or a single host triggering it, and filter down
the trace accordingly.
* You can try to extract a smaller time slice from the trace using
`TCPslice <http://www.tcpdump.org/related.html>`__. For example, to
extract the first 100 seconds from the trace:
.. console::
# Test comment
> tcpslice +100 <in >out
Alternatively, tcpdump extracts the first ``n`` packets with its
option ``-c <n>``.
Getting More Information After a Crash
--------------------------------------
If Bro crashes, a *core dump* can be very helpful to nail down the
problem. Examining a core is not for the faint of heart but can reveal
extremely useful information.
First, you should configure Bro with the option ``--enable-debug`` and
recompile; this will disable all compiler optimizations and thus make
the core dump more useful (don't expect great performance with this
version though; compiling Bro without optimization has a noticeable
impact on its CPU usage.). Then enable core dumps if you haven't
already (e.g., ``ulimit -c unlimited`` if you're using bash).
Once Bro has crashed, start gdb with the Bro binary and the file
containing the core dump. (Alternatively, you can also run Bro
directly inside gdb instead of working from a core file.) The first
helpful information to include with your tracker ticket is a stack
backtrace, which you get with gdb's ``bt`` command:
.. console::
> gdb bro core
[...]
> bt
If the crash occurs inside Bro's script interpreter, the next thing to
do is identifying the line of script code processed just before the
abnormal termination. Look for methods in the stack backtrace which
belong to any of the script interpreter's classes. Roughly speaking,
these are all classes with names ending in ``Expr``, ``Stmt``, or
``Val``. Then climb up the stack with ``up`` until you reach the first
of these methods. The object to which ``this`` is pointing will have a
``Location`` object, which in turn contains the file name and line
number of the corresponding piece of script code. Continuing the
example from above, here's how to get that information:
.. console::
[in gdb]
> up
> ...
> up
> print this->location->filename
> print this->location->first_line
If the crash occurs while processing input packets but you cannot
directly tell which connection is responsible (and thus not extract
its packets from the trace as suggested above), try getting the
4-tuple of the connection currently being processed from the core dump
by again examining the stack backtrace, this time looking for methods
belonging to the ``Connection`` class. That class has members
``orig_addr``/``resp_addr`` and ``orig_port``/``resp_port`` storing
(pointers to) the IP addresses and ports respectively:
.. console::
[in gdb]
> up
> ...
> up
> printf "%08x:%04x %08x:%04x\n", *this->orig_addr, this->orig_port, *this->resp_addr, this->resp_port
Note that these values are stored in `network byte order
<http://en.wikipedia.org/wiki/Endianness#Endianness_in_networking>`__
so you will need to flip the bytes around if you are on a low-endian
machine (which is why the above example prints them in hex). For
example, if an IP address prints as ``0100007f`` , that's 127.0.0.1 .

View file

@ -1,16 +1,3 @@
set(BIF_SRC_DIR ${PROJECT_SOURCE_DIR}/src)
set(RST_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/rest_output)
set(DOC_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/out)
set(DOC_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/source)
set(DOC_SOURCE_WORKDIR ${CMAKE_CURRENT_BINARY_DIR}/source)
file(GLOB_RECURSE DOC_SOURCES FOLLOW_SYMLINKS "*")
# configure the Sphinx config file (expand variables CMake might know about)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in
${CMAKE_CURRENT_BINARY_DIR}/conf.py
@ONLY)
# find out what BROPATH to use when executing bro # find out what BROPATH to use when executing bro
execute_process(COMMAND ${CMAKE_BINARY_DIR}/bro-path-dev execute_process(COMMAND ${CMAKE_BINARY_DIR}/bro-path-dev
OUTPUT_VARIABLE BROPATH OUTPUT_VARIABLE BROPATH
@ -48,7 +35,7 @@ endif ()
# which summary text can be extracted at build time # which summary text can be extracted at build time
# ${group}_doc_names: a running list of reST style document names that can be # ${group}_doc_names: a running list of reST style document names that can be
# given to a :doc: role, shared indices with ${group}_files # given to a :doc: role, shared indices with ${group}_files
#
macro(REST_TARGET srcDir broInput) macro(REST_TARGET srcDir broInput)
set(absSrcPath ${srcDir}/${broInput}) set(absSrcPath ${srcDir}/${broInput})
get_filename_component(basename ${broInput} NAME) get_filename_component(basename ${broInput} NAME)
@ -86,12 +73,14 @@ macro(REST_TARGET srcDir broInput)
elseif (${extension} STREQUAL ".bif.bro") elseif (${extension} STREQUAL ".bif.bro")
set(group bifs) set(group bifs)
elseif (relDstDir) elseif (relDstDir)
set(pkgIndex scripts/${relDstDir}/index) set(group ${relDstDir}/index)
set(group ${pkgIndex})
# add package index to master package list if not already in it # add package index to master package list if not already in it
list(FIND MASTER_PKG_LIST ${pkgIndex} _found) # and if a __load__.bro exists in the original script directory
list(FIND MASTER_PKG_LIST ${relDstDir} _found)
if (_found EQUAL -1) if (_found EQUAL -1)
list(APPEND MASTER_PKG_LIST ${pkgIndex}) if (EXISTS ${CMAKE_SOURCE_DIR}/scripts/${relDstDir}/__load__.bro)
list(APPEND MASTER_PKG_LIST ${relDstDir})
endif ()
endif () endif ()
else () else ()
set(group "") set(group "")
@ -119,7 +108,7 @@ macro(REST_TARGET srcDir broInput)
ARGS -E remove_directory .state ARGS -E remove_directory .state
# generate the reST documentation using bro # generate the reST documentation using bro
COMMAND BROPATH=${BROPATH}:${srcDir} ${CMAKE_BINARY_DIR}/src/bro COMMAND BROPATH=${BROPATH}:${srcDir} ${CMAKE_BINARY_DIR}/src/bro
ARGS --doc-scripts ${broInput} || (rm -rf .state *.log *.rst && exit 1) ARGS -b -Z ${broInput} || (rm -rf .state *.log *.rst && exit 1)
# move generated doc into a new directory tree that # move generated doc into a new directory tree that
# defines the final structure of documents # defines the final structure of documents
COMMAND "${CMAKE_COMMAND}" COMMAND "${CMAKE_COMMAND}"
@ -134,6 +123,7 @@ macro(REST_TARGET srcDir broInput)
ARGS -rf .state *.log *.rst ARGS -rf .state *.log *.rst
DEPENDS bro DEPENDS bro
DEPENDS ${absSrcPath} DEPENDS ${absSrcPath}
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
COMMENT "[Bro] Generating reST docs for ${broInput}" COMMENT "[Bro] Generating reST docs for ${broInput}"
) )
@ -143,19 +133,21 @@ endmacro(REST_TARGET)
include(DocSourcesList.cmake) include(DocSourcesList.cmake)
# create temporary list of all docs to include in the master policy/index file # create temporary list of all docs to include in the master policy/index file
set(MASTER_POLICY_INDEX ${CMAKE_CURRENT_BINARY_DIR}/policy_index)
file(WRITE ${MASTER_POLICY_INDEX} "${MASTER_POLICY_INDEX_TEXT}") file(WRITE ${MASTER_POLICY_INDEX} "${MASTER_POLICY_INDEX_TEXT}")
# create the temporary list of all packages to include in the master # create the temporary list of all packages to include in the master
# policy/packages.rst file # policy/packages.rst file
set(MASTER_PACKAGE_INDEX ${CMAKE_CURRENT_BINARY_DIR}/pkg_index)
set(MASTER_PKG_INDEX_TEXT "") set(MASTER_PKG_INDEX_TEXT "")
foreach (pkg ${MASTER_PKG_LIST}) foreach (pkg ${MASTER_PKG_LIST})
# strip of the trailing /index for the link name set(MASTER_PKG_INDEX_TEXT
get_filename_component(lnktxt ${pkg} PATH) "${MASTER_PKG_INDEX_TEXT}\n:doc:`${pkg} <${pkg}/index>`\n")
# pretty-up the link name by removing common scripts/ prefix if (EXISTS ${CMAKE_SOURCE_DIR}/scripts/${pkg}/README)
string(REPLACE "scripts/" "" lnktxt "${lnktxt}") file(STRINGS ${CMAKE_SOURCE_DIR}/scripts/${pkg}/README pkgreadme)
set(MASTER_PKG_INDEX_TEXT "${MASTER_PKG_INDEX_TEXT}\n ${lnktxt} <${pkg}>") foreach (line ${pkgreadme})
set(MASTER_PKG_INDEX_TEXT "${MASTER_PKG_INDEX_TEXT}\n ${line}")
endforeach ()
set(MASTER_PKG_INDEX_TEXT "${MASTER_PKG_INDEX_TEXT}\n")
endif ()
endforeach () endforeach ()
file(WRITE ${MASTER_PACKAGE_INDEX} "${MASTER_PKG_INDEX_TEXT}") file(WRITE ${MASTER_PACKAGE_INDEX} "${MASTER_PKG_INDEX_TEXT}")
@ -186,11 +178,11 @@ if (EXISTS ${RST_OUTPUT_DIR})
list(FIND ALL_REST_OUTPUTS ${_doc} _found) list(FIND ALL_REST_OUTPUTS ${_doc} _found)
if (_found EQUAL -1) if (_found EQUAL -1)
file(REMOVE ${_doc}) file(REMOVE ${_doc})
message(STATUS "AutoDoc: remove stale reST doc: ${_doc}") message(STATUS "Broxygen: remove stale reST doc: ${_doc}")
string(REPLACE .rst .bro _brofile ${_doc}) string(REPLACE .rst .bro _brofile ${_doc})
if (EXISTS ${_brofile}) if (EXISTS ${_brofile})
file(REMOVE ${_brofile}) file(REMOVE ${_brofile})
message(STATUS "AutoDoc: remove stale bro source: ${_brofile}") message(STATUS "Broxygen: remove stale bro source: ${_brofile}")
endif () endif ()
endif () endif ()
endforeach () endforeach ()
@ -211,54 +203,3 @@ add_custom_target(restclean
COMMAND "${CMAKE_COMMAND}" -E remove_directory COMMAND "${CMAKE_COMMAND}" -E remove_directory
${RST_OUTPUT_DIR} ${RST_OUTPUT_DIR}
VERBATIM) VERBATIM)
# The "doc" target generates reST documentation for any outdated bro scripts
# and then uses Sphinx to generate HTML documentation from the reST
add_custom_target(doc
# copy the template documentation to the build directory
# to give as input for sphinx
COMMAND "${CMAKE_COMMAND}" -E copy_directory
${DOC_SOURCE_DIR}
${DOC_SOURCE_WORKDIR}
# copy generated policy script documentation into the
# working copy of the template documentation
COMMAND "${CMAKE_COMMAND}" -E copy_directory
${RST_OUTPUT_DIR}
${DOC_SOURCE_WORKDIR}/scripts
# append to the master index of all policy scripts
COMMAND cat ${MASTER_POLICY_INDEX} >>
${DOC_SOURCE_WORKDIR}/scripts/index.rst
# append to the master index of all policy packages
COMMAND cat ${MASTER_PACKAGE_INDEX} >>
${DOC_SOURCE_WORKDIR}/packages.rst
# construct a reST file for each group
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/group_index_generator.py
${CMAKE_CURRENT_BINARY_DIR}/group_list
${CMAKE_CURRENT_BINARY_DIR}
${DOC_SOURCE_WORKDIR}
# tell sphinx to generate html
COMMAND sphinx-build
-b html
-c ${CMAKE_CURRENT_BINARY_DIR}
-d ${DOC_OUTPUT_DIR}/doctrees
${DOC_SOURCE_WORKDIR}
${DOC_OUTPUT_DIR}/html
# create symlink to the html output directory for convenience
COMMAND "${CMAKE_COMMAND}" -E create_symlink
${DOC_OUTPUT_DIR}/html
${CMAKE_BINARY_DIR}/html
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "[Sphinx] Generating HTML policy script docs"
# SOURCES just adds stuff to IDE projects as a convenience
SOURCES ${DOC_SOURCES})
# The "docclean" target removes just the Sphinx input/output directories
# from the build directory.
add_custom_target(docclean
COMMAND "${CMAKE_COMMAND}" -E remove_directory
${DOC_SOURCE_WORKDIR}
COMMAND "${CMAKE_COMMAND}" -E remove_directory
${DOC_OUTPUT_DIR}
VERBATIM)
add_dependencies(doc docclean restdoc)

View file

@ -16,13 +16,13 @@ rest_target(${CMAKE_CURRENT_SOURCE_DIR} example.bro internal)
rest_target(${psd} base/init-default.bro internal) rest_target(${psd} base/init-default.bro internal)
rest_target(${psd} base/init-bare.bro internal) rest_target(${psd} base/init-bare.bro internal)
rest_target(${CMAKE_BINARY_DIR}/src bro.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/bro.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src const.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/const.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src event.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/event.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src logging.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/logging.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src reporter.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/reporter.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src strings.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/strings.bif.bro)
rest_target(${CMAKE_BINARY_DIR}/src types.bif.bro) rest_target(${CMAKE_BINARY_DIR}/src base/types.bif.bro)
rest_target(${psd} base/frameworks/cluster/main.bro) rest_target(${psd} base/frameworks/cluster/main.bro)
rest_target(${psd} base/frameworks/cluster/nodes/manager.bro) rest_target(${psd} base/frameworks/cluster/nodes/manager.bro)
rest_target(${psd} base/frameworks/cluster/nodes/proxy.bro) rest_target(${psd} base/frameworks/cluster/nodes/proxy.bro)
@ -33,12 +33,18 @@ rest_target(${psd} base/frameworks/control/main.bro)
rest_target(${psd} base/frameworks/dpd/main.bro) rest_target(${psd} base/frameworks/dpd/main.bro)
rest_target(${psd} base/frameworks/intel/main.bro) rest_target(${psd} base/frameworks/intel/main.bro)
rest_target(${psd} base/frameworks/logging/main.bro) rest_target(${psd} base/frameworks/logging/main.bro)
rest_target(${psd} base/frameworks/logging/postprocessors/scp.bro)
rest_target(${psd} base/frameworks/logging/postprocessors/sftp.bro)
rest_target(${psd} base/frameworks/logging/writers/ascii.bro) rest_target(${psd} base/frameworks/logging/writers/ascii.bro)
rest_target(${psd} base/frameworks/metrics/cluster.bro)
rest_target(${psd} base/frameworks/metrics/main.bro) rest_target(${psd} base/frameworks/metrics/main.bro)
rest_target(${psd} base/frameworks/metrics/non-cluster.bro)
rest_target(${psd} base/frameworks/notice/actions/add-geodata.bro) rest_target(${psd} base/frameworks/notice/actions/add-geodata.bro)
rest_target(${psd} base/frameworks/notice/actions/drop.bro) rest_target(${psd} base/frameworks/notice/actions/drop.bro)
rest_target(${psd} base/frameworks/notice/actions/email_admin.bro) rest_target(${psd} base/frameworks/notice/actions/email_admin.bro)
rest_target(${psd} base/frameworks/notice/actions/page.bro) rest_target(${psd} base/frameworks/notice/actions/page.bro)
rest_target(${psd} base/frameworks/notice/actions/pp-alarms.bro)
rest_target(${psd} base/frameworks/notice/cluster.bro)
rest_target(${psd} base/frameworks/notice/extend-email/hostnames.bro) rest_target(${psd} base/frameworks/notice/extend-email/hostnames.bro)
rest_target(${psd} base/frameworks/notice/main.bro) rest_target(${psd} base/frameworks/notice/main.bro)
rest_target(${psd} base/frameworks/notice/weird.bro) rest_target(${psd} base/frameworks/notice/weird.bro)
@ -47,7 +53,6 @@ rest_target(${psd} base/frameworks/packet-filter/netstats.bro)
rest_target(${psd} base/frameworks/reporter/main.bro) rest_target(${psd} base/frameworks/reporter/main.bro)
rest_target(${psd} base/frameworks/signatures/main.bro) rest_target(${psd} base/frameworks/signatures/main.bro)
rest_target(${psd} base/frameworks/software/main.bro) rest_target(${psd} base/frameworks/software/main.bro)
rest_target(${psd} base/frameworks/time-machine/notice.bro)
rest_target(${psd} base/protocols/conn/contents.bro) rest_target(${psd} base/protocols/conn/contents.bro)
rest_target(${psd} base/protocols/conn/inactivity.bro) rest_target(${psd} base/protocols/conn/inactivity.bro)
rest_target(${psd} base/protocols/conn/main.bro) rest_target(${psd} base/protocols/conn/main.bro)
@ -60,15 +65,9 @@ rest_target(${psd} base/protocols/http/file-extract.bro)
rest_target(${psd} base/protocols/http/file-hash.bro) rest_target(${psd} base/protocols/http/file-hash.bro)
rest_target(${psd} base/protocols/http/file-ident.bro) rest_target(${psd} base/protocols/http/file-ident.bro)
rest_target(${psd} base/protocols/http/main.bro) rest_target(${psd} base/protocols/http/main.bro)
rest_target(${psd} base/protocols/http/partial-content.bro)
rest_target(${psd} base/protocols/http/utils.bro) rest_target(${psd} base/protocols/http/utils.bro)
rest_target(${psd} base/protocols/irc/dcc-send.bro) rest_target(${psd} base/protocols/irc/dcc-send.bro)
rest_target(${psd} base/protocols/irc/main.bro) rest_target(${psd} base/protocols/irc/main.bro)
rest_target(${psd} base/protocols/mime/base.bro)
rest_target(${psd} base/protocols/mime/file-extract.bro)
rest_target(${psd} base/protocols/mime/file-hash.bro)
rest_target(${psd} base/protocols/mime/file-ident.bro)
rest_target(${psd} base/protocols/rpc/base.bro)
rest_target(${psd} base/protocols/smtp/entities-excerpt.bro) rest_target(${psd} base/protocols/smtp/entities-excerpt.bro)
rest_target(${psd} base/protocols/smtp/entities.bro) rest_target(${psd} base/protocols/smtp/entities.bro)
rest_target(${psd} base/protocols/smtp/main.bro) rest_target(${psd} base/protocols/smtp/main.bro)
@ -88,8 +87,7 @@ rest_target(${psd} base/utils/patterns.bro)
rest_target(${psd} base/utils/site.bro) rest_target(${psd} base/utils/site.bro)
rest_target(${psd} base/utils/strings.bro) rest_target(${psd} base/utils/strings.bro)
rest_target(${psd} base/utils/thresholds.bro) rest_target(${psd} base/utils/thresholds.bro)
rest_target(${psd} policy/frameworks/communication/listen-clear.bro) rest_target(${psd} policy/frameworks/communication/listen.bro)
rest_target(${psd} policy/frameworks/communication/listen-ssl.bro)
rest_target(${psd} policy/frameworks/control/controllee.bro) rest_target(${psd} policy/frameworks/control/controllee.bro)
rest_target(${psd} policy/frameworks/control/controller.bro) rest_target(${psd} policy/frameworks/control/controller.bro)
rest_target(${psd} policy/frameworks/dpd/detect-protocols.bro) rest_target(${psd} policy/frameworks/dpd/detect-protocols.bro)
@ -100,15 +98,17 @@ rest_target(${psd} policy/frameworks/metrics/ssl-example.bro)
rest_target(${psd} policy/frameworks/software/version-changes.bro) rest_target(${psd} policy/frameworks/software/version-changes.bro)
rest_target(${psd} policy/frameworks/software/vulnerable.bro) rest_target(${psd} policy/frameworks/software/vulnerable.bro)
rest_target(${psd} policy/frameworks/tunnel.bro) rest_target(${psd} policy/frameworks/tunnel.bro)
rest_target(${psd} policy/integration/barnyard2/base.bro) rest_target(${psd} policy/integration/barnyard2/main.bro)
rest_target(${psd} policy/integration/barnyard2/event.bro)
rest_target(${psd} policy/integration/barnyard2/types.bro) rest_target(${psd} policy/integration/barnyard2/types.bro)
rest_target(${psd} policy/misc/analysis-groups.bro) rest_target(${psd} policy/misc/analysis-groups.bro)
rest_target(${psd} policy/misc/capture-loss.bro)
rest_target(${psd} policy/misc/loaded-scripts.bro) rest_target(${psd} policy/misc/loaded-scripts.bro)
rest_target(${psd} policy/misc/profiling.bro) rest_target(${psd} policy/misc/profiling.bro)
rest_target(${psd} policy/misc/stats.bro)
rest_target(${psd} policy/misc/trim-trace-file.bro) rest_target(${psd} policy/misc/trim-trace-file.bro)
rest_target(${psd} policy/protocols/conn/known-hosts.bro) rest_target(${psd} policy/protocols/conn/known-hosts.bro)
rest_target(${psd} policy/protocols/conn/known-services.bro) rest_target(${psd} policy/protocols/conn/known-services.bro)
rest_target(${psd} policy/protocols/conn/weirds.bro)
rest_target(${psd} policy/protocols/dns/auth-addl.bro) rest_target(${psd} policy/protocols/dns/auth-addl.bro)
rest_target(${psd} policy/protocols/dns/detect-external-names.bro) rest_target(${psd} policy/protocols/dns/detect-external-names.bro)
rest_target(${psd} policy/protocols/ftp/detect.bro) rest_target(${psd} policy/protocols/ftp/detect.bro)
@ -117,7 +117,8 @@ rest_target(${psd} policy/protocols/http/detect-MHR.bro)
rest_target(${psd} policy/protocols/http/detect-intel.bro) rest_target(${psd} policy/protocols/http/detect-intel.bro)
rest_target(${psd} policy/protocols/http/detect-sqli.bro) rest_target(${psd} policy/protocols/http/detect-sqli.bro)
rest_target(${psd} policy/protocols/http/detect-webapps.bro) rest_target(${psd} policy/protocols/http/detect-webapps.bro)
rest_target(${psd} policy/protocols/http/headers.bro) rest_target(${psd} policy/protocols/http/header-names.bro)
rest_target(${psd} policy/protocols/http/software-browser-plugins.bro)
rest_target(${psd} policy/protocols/http/software.bro) rest_target(${psd} policy/protocols/http/software.bro)
rest_target(${psd} policy/protocols/http/var-extraction-cookies.bro) rest_target(${psd} policy/protocols/http/var-extraction-cookies.bro)
rest_target(${psd} policy/protocols/http/var-extraction-uri.bro) rest_target(${psd} policy/protocols/http/var-extraction-uri.bro)
@ -128,10 +129,12 @@ rest_target(${psd} policy/protocols/ssh/detect-bruteforcing.bro)
rest_target(${psd} policy/protocols/ssh/geo-data.bro) rest_target(${psd} policy/protocols/ssh/geo-data.bro)
rest_target(${psd} policy/protocols/ssh/interesting-hostnames.bro) rest_target(${psd} policy/protocols/ssh/interesting-hostnames.bro)
rest_target(${psd} policy/protocols/ssh/software.bro) rest_target(${psd} policy/protocols/ssh/software.bro)
rest_target(${psd} policy/protocols/ssl/cert-hash.bro)
rest_target(${psd} policy/protocols/ssl/expiring-certs.bro)
rest_target(${psd} policy/protocols/ssl/extract-certs-pem.bro)
rest_target(${psd} policy/protocols/ssl/known-certs.bro) rest_target(${psd} policy/protocols/ssl/known-certs.bro)
rest_target(${psd} policy/protocols/ssl/validate-certs.bro) rest_target(${psd} policy/protocols/ssl/validate-certs.bro)
rest_target(${psd} policy/tuning/defaults/packet-fragments.bro) rest_target(${psd} policy/tuning/defaults/packet-fragments.bro)
rest_target(${psd} policy/tuning/defaults/remove-high-volume-notices.bro)
rest_target(${psd} policy/tuning/defaults/warnings.bro) rest_target(${psd} policy/tuning/defaults/warnings.bro)
rest_target(${psd} policy/tuning/track-all-assets.bro) rest_target(${psd} policy/tuning/track-all-assets.bro)
rest_target(${psd} site/local-manager.bro) rest_target(${psd} site/local-manager.bro)

View file

@ -1,6 +1,6 @@
This directory contains scripts and templates that can be used to automate This directory contains scripts and templates that can be used to automate
the generation of Bro script documentation. Several build targets are defined the generation of Bro script documentation. Several build targets are defined
by CMake: by CMake and available in the top-level Makefile:
``restdoc`` ``restdoc``
@ -15,29 +15,10 @@ by CMake:
``build/`` directory inside ``reST`` (a symlink to ``build/`` directory inside ``reST`` (a symlink to
``doc/scripts/rest_output``). ``doc/scripts/rest_output``).
``doc``
This target depends on a Python interpreter (>=2.5) and
`Sphinx <http://sphinx.pocoo.org/>`_ being installed. Sphinx can be
installed like::
> sudo easy_install sphinx
This target will first build ``restdoc`` target and then copy the
resulting reST files as an input directory to Sphinx.
After completion, HTML documentation can be located in the CMake
``build/`` directory inside ``html`` (a symlink to
``doc/scripts/out/html``)
``restclean`` ``restclean``
This target removes any reST documentation that has been generated so far. This target removes any reST documentation that has been generated so far.
``docclean``
This target removes Sphinx inputs and outputs from the CMake ``build/`` dir.
The ``genDocSourcesList.sh`` script can be run to automatically generate The ``genDocSourcesList.sh`` script can be run to automatically generate
``DocSourcesList.cmake``, which is the file CMake uses to define the list ``DocSourcesList.cmake``, which is the file CMake uses to define the list
of documentation targets. This script should be run after adding new of documentation targets. This script should be run after adding new
@ -54,18 +35,10 @@ script's name to the blacklist, then append a ``rest_target()`` to the
``statictext`` variable where the first argument is the source directory ``statictext`` variable where the first argument is the source directory
containing the policy script to document, the second argument is the file containing the policy script to document, the second argument is the file
name of the policy script, and the third argument is the path/name of a name of the policy script, and the third argument is the path/name of a
pre-created reST document in the ``source/`` directory to which the pre-created reST document in the ``../`` source directory to which the
``make doc`` process can append script documentation references. This ``make doc`` process can append script documentation references. This
pre-created reST document should also then be linked to from the TOC tree pre-created reST document should also then be linked to from the TOC tree
in ``source/index.rst``. in ``../index.rst``.
The Sphinx source tree template in ``source/`` can be modified to add more
common/general documentation, style sheets, JavaScript, etc. The Sphinx
config file is produced from ``conf.py.in``, so that can be edited to change
various Sphinx options, like setting the default HTML rendering theme.
There is also a custom Sphinx domain implemented in ``source/ext/bro.py``
which adds some reST directives and roles that aid in generating useful
index entries and cross-references.
See ``example.bro`` for an example of how to document a Bro script such that See ``example.bro`` for an example of how to document a Bro script such that
``make doc`` will be able to produce reST/HTML documentation for it. ``make doc`` will be able to produce reST/HTML documentation for it.

5
doc/scripts/bifs.rst Normal file
View file

@ -0,0 +1,5 @@
.. This is a stub doc to which broxygen appends during the build process
Built-In Functions (BIFs)
=========================

642
doc/scripts/builtins.rst Normal file
View file

@ -0,0 +1,642 @@
Builtin Types and Attributes
============================
Types
-----
The Bro scripting language supports the following built-in types.
.. bro:type:: void
An internal Bro type representing an absence of a type. Should
most often be seen as a possible function return type.
.. bro:type:: bool
Reflects a value with one of two meanings: true or false. The two
``bool`` constants are ``T`` and ``F``.
.. bro:type:: int
A numeric type representing a signed integer. An ``int`` constant
is a string of digits preceded by a ``+`` or ``-`` sign, e.g.
``-42`` or ``+5``. When using type inferencing use care so that the
intended type is inferred, e.g. ``local size_difference = 0`` will
infer :bro:type:`count`, while ``local size_difference = +0``
will infer :bro:type:`int`.
.. bro:type:: count
A numeric type representing an unsigned integer. A ``count``
constant is a string of digits, e.g. ``1234`` or ``0``.
.. bro:type:: counter
An alias to :bro:type:`count`.
.. TODO: is there anything special about this type?
.. bro:type:: double
A numeric type representing a double-precision floating-point
number. Floating-point constants are written as a string of digits
with an optional decimal point, optional scale-factor in scientific
notation, and optional ``+`` or ``-`` sign. Examples are ``-1234``,
``-1234e0``, ``3.14159``, and ``.003e-23``.
.. bro:type:: time
A temporal type representing an absolute time. There is currently
no way to specify a ``time`` constant, but one can use the
:bro:id:`current_time` or :bro:id:`network_time` built-in functions
to assign a value to a ``time``-typed variable.
.. bro:type:: interval
A temporal type representing a relative time. An ``interval``
constant can be written as a numeric constant followed by a time
unit where the time unit is one of ``usec``, ``sec``, ``min``,
``hr``, or ``day`` which respectively represent microseconds,
seconds, minutes, hours, and days. Whitespace between the numeric
constant and time unit is optional. Appending the letter "s" to the
time unit in order to pluralize it is also optional (to no semantic
effect). Examples of ``interval`` constants are ``3.5 min`` and
``3.5mins``. An ``interval`` can also be negated, for example ``-
12 hr`` represents "twelve hours in the past". Intervals also
support addition, subtraction, multiplication, division, and
comparison operations.
.. bro:type:: string
A type used to hold character-string values which represent text.
String constants are created by enclosing text in double quotes (")
and the backslash character (\\) introduces escape sequences.
Note that Bro represents strings internally as a count and vector of
bytes rather than a NUL-terminated byte string (although string
constants are also automatically NUL-terminated). This is because
network traffic can easily introduce NULs into strings either by
nature of an application, inadvertently, or maliciously. And while
NULs are allowed in Bro strings, when present in strings passed as
arguments to many functions, a run-time error can occur as their
presence likely indicates a sort of problem. In that case, the
string will also only be represented to the user as the literal
"<string-with-NUL>" string.
.. bro:type:: pattern
A type representing regular-expression patterns which can be used
for fast text-searching operations. Pattern constants are created
by enclosing text within forward slashes (/) and is the same syntax
as the patterns supported by the `flex lexical analyzer
<http://flex.sourceforge.net/manual/Patterns.html>`_. The speed of
regular expression matching does not depend on the complexity or
size of the patterns. Patterns support two types of matching, exact
and embedded.
In exact matching the ``==`` equality relational operator is used
with one :bro:type:`string` operand and one :bro:type:`pattern`
operand to check whether the full string exactly matches the
pattern. In this case, the ``^`` beginning-of-line and ``$``
end-of-line anchors are redundant since pattern is implicitly
anchored to the beginning and end of the line to facilitate an exact
match. For example::
"foo" == /foo|bar/
yields true, while::
/foo|bar/ == "foobar"
yields false. The ``!=`` operator would yield the negation of ``==``.
In embedded matching the ``in`` operator is again used with one
:bro:type:`string` operand and one :bro:type:`pattern` operand
(which must be on the left-hand side), but tests whether the pattern
appears anywhere within the given string. For example::
/foo|bar/ in "foobar"
yields true, while::
/^oob/ in "foobar"
is false since "oob" does not appear at the start of "foobar". The
``!in`` operator would yield the negation of ``in``.
.. bro:type:: enum
A type allowing the specification of a set of related values that
have no further structure. The only operations allowed on
enumerations are equality comparisons and they do not have
associated values or ordering. An example declaration:
.. code:: bro
type color: enum { Red, White, Blue, };
The last comma after ``Blue`` is optional.
.. bro:type:: timer
.. TODO: is this a type that's exposed to users?
.. bro:type:: port
A type representing transport-level port numbers. Besides TCP and
UDP ports, there is a concept of an ICMP "port" where the source
port is the ICMP message type and the destination port the ICMP
message code. A ``port`` constant is written as an unsigned integer
followed by one of ``/tcp``, ``/udp``, ``/icmp``, or ``/unknown``.
Ports can be compared for equality and also for ordering. When
comparing order across transport-level protocols, ``unknown`` <
``tcp`` < ``udp`` < ``icmp``, for example ``65535/tcp`` is smaller
than ``0/udp``.
.. bro:type:: addr
A type representing an IP address.
IPv4 address constants are written in "dotted quad" format,
``A1.A2.A3.A4``, where Ai all lie between 0 and 255.
IPv6 address constants are written as colon-separated hexadecimal form
as described by :rfc:`2373`, but additionally encased in square brackets.
The mixed notation with embedded IPv4 addresses as dotted-quads in the
lower 32 bits is also allowed.
Some examples: ``[2001:db8::1]``, ``[::ffff:192.168.1.100]``, or
``[aaaa:bbbb:cccc:dddd:eeee:ffff:1111:2222]``.
Hostname constants can also be used, but since a hostname can
correspond to multiple IP addresses, the type of such variable is a
:bro:type:`set` of :bro:type:`addr` elements. For example:
.. code:: bro
local a = www.google.com;
Addresses can be compared for (in)equality using ``==`` and ``!=``.
They can also be masked with ``/`` to produce a :bro:type:`subnet`:
.. code:: bro
local a: addr = 192.168.1.100;
local s: subnet = 192.168.0.0/16;
if ( a/16 == s )
print "true";
And checked for inclusion within a :bro:type:`subnet` using ``in`` :
.. code:: bro
local a: addr = 192.168.1.100;
local s: subnet = 192.168.0.0/16;
if ( a in s )
print "true";
.. bro:type:: subnet
A type representing a block of IP addresses in CIDR notation. A
``subnet`` constant is written as an :bro:type:`addr` followed by a
slash (/) and then the network prefix size specified as a decimal
number. For example, ``192.168.0.0/16`` or ``[fe80::]/64``.
.. bro:type:: any
Used to bypass strong typing. For example, a function can take an
argument of type ``any`` when it may be of different types.
.. bro:type:: table
An associate array that maps from one set of values to another. The
values being mapped are termed the *index* or *indices* and the
result of the mapping is called the *yield*. Indexing into tables
is very efficient, and internally it is just a single hash table
lookup.
The table declaration syntax is::
table [ type^+ ] of type
where *type^+* is one or more types, separated by commas. For example:
.. code:: bro
global a: table[count] of string;
declares a table indexed by :bro:type:`count` values and yielding
:bro:type:`string` values. The yield type can also be more complex:
.. code:: bro
global a: table[count] of table[addr, port] of string;
which declares a table indexed by :bro:type:`count` and yielding
another :bro:type:`table` which is indexed by an :bro:type:`addr`
and :bro:type:`port` to yield a :bro:type:`string`.
Initialization of tables occurs by enclosing a set of initializers within
braces, for example:
.. code:: bro
global t: table[count] of string = {
[11] = "eleven",
[5] = "five",
};
Accessing table elements if provided by enclosing values within square
brackets (``[]``), for example:
.. code:: bro
t[13] = "thirteen";
And membership can be tested with ``in``:
.. code:: bro
if ( 13 in t )
...
Iterate over tables with a ``for`` loop:
.. code:: bro
local t: table[count] of string;
for ( n in t )
...
local services: table[addr, port] of string;
for ( [a, p] in services )
...
Remove individual table elements with ``delete``:
.. code:: bro
delete t[13];
Nothing happens if the element with value ``13`` isn't present in
the table.
Table size can be obtained by placing the table identifier between
vertical pipe (|) characters:
.. code:: bro
|t|
.. bro:type:: set
A set is like a :bro:type:`table`, but it is a collection of indices
that do not map to any yield value. They are declared with the
syntax::
set [ type^+ ]
where *type^+* is one or more types separated by commas.
Sets are initialized by listing elements enclosed by curly braces:
.. code:: bro
global s: set[port] = { 21/tcp, 23/tcp, 80/tcp, 443/tcp };
global s2: set[port, string] = { [21/tcp, "ftp"], [23/tcp, "telnet"] };
The types are explicitly shown in the example above, but they could
have been left to type inference.
Set membership is tested with ``in``:
.. code:: bro
if ( 21/tcp in s )
...
Elements are added with ``add``:
.. code:: bro
add s[22/tcp];
And removed with ``delete``:
.. code:: bro
delete s[21/tcp];
Set size can be obtained by placing the set identifier between
vertical pipe (|) characters:
.. code:: bro
|s|
.. bro:type:: vector
A vector is like a :bro:type:`table`, except it's always indexed by a
:bro:type:`count`. A vector is declared like:
.. code:: bro
global v: vector of string;
And can be initialized with the vector constructor:
.. code:: bro
global v: vector of string = vector("one", "two", "three");
Adding an element to a vector involves accessing/assigning it:
.. code:: bro
v[3] = "four"
Note how the vector indexing is 0-based.
Vector size can be obtained by placing the vector identifier between
vertical pipe (|) characters:
.. code:: bro
|v|
.. bro:type:: record
A ``record`` is a collection of values. Each value has a field name
and a type. Values do not need to have the same type and the types
have no restrictions. An example record type definition:
.. code:: bro
type MyRecordType: record {
c: count;
s: string &optional;
};
Access to a record field uses the dollar sign (``$``) operator:
.. code:: bro
global r: MyRecordType;
r$c = 13;
Record assignment can be done field by field or as a whole like:
.. code:: bro
r = [$c = 13, $s = "thirteen"];
When assigning a whole record value, all fields that are not
:bro:attr:`&optional` or have a :bro:attr:`&default` attribute must
be specified.
To test for existence of a field that is :bro:attr:`&optional`, use the
``?$`` operator:
.. code:: bro
if ( r?$s )
...
.. bro:type:: file
Bro supports writing to files, but not reading from them. For
example, declare, open, and write to a file and finally close it
like:
.. code:: bro
global f: file = open("myfile");
print f, "hello, world";
close(f);
Writing to files like this for logging usually isn't recommended, for better
logging support see :doc:`/logging`.
.. bro:type:: func
See :bro:type:`function`.
.. bro:type:: function
Function types in Bro are declared using::
function( argument* ): type
where *argument* is a (possibly empty) comma-separated list of
arguments, and *type* is an optional return type. For example:
.. code:: bro
global greeting: function(name: string): string;
Here ``greeting`` is an identifier with a certain function type.
The function body is not defined yet and ``greeting`` could even
have different function body values at different times. To define
a function including a body value, the syntax is like:
.. code:: bro
function greeting(name: string): string
{
return "Hello, " + name;
}
Note that in the definition above, it's not necessary for us to have
done the first (forward) declaration of ``greeting`` as a function
type, but when it is, the argument list and return type much match
exactly.
Function types don't need to have a name and can be assigned anonymously:
.. code:: bro
greeting = function(name: string): string { return "Hi, " + name; };
And finally, the function can be called like:
.. code:: bro
print greeting("Dave");
.. bro:type:: event
Event handlers are nearly identical in both syntax and semantics to
a :bro:type:`function`, with the two differences being that event
handlers have no return type since they never return a value, and
you cannot call an event handler. Instead of directly calling an
event handler from a script, event handler bodies are executed when
they are invoked by one of three different methods:
- From the event engine
When the event engine detects an event for which you have
defined a corresponding event handler, it queues an event for
that handler. The handler is invoked as soon as the event
engine finishes processing the current packet and flushing the
invocation of other event handlers that were queued first.
- With the ``event`` statement from a script
Immediately queuing invocation of an event handler occurs like:
.. code:: bro
event password_exposed(user, password);
This assumes that ``password_exposed`` was previously declared
as an event handler type with compatible arguments.
- Via the ``schedule`` expression in a script
This delays the invocation of event handlers until some time in
the future. For example:
.. code:: bro
schedule 5 secs { password_exposed(user, password) };
Multiple event handler bodies can be defined for the same event handler
identifier and the body of each will be executed in turn. Ordering
of execution can be influenced with :bro:attr:`&priority`.
Attributes
----------
Attributes occur at the end of type/event declarations and change their
behavior. The syntax is ``&key`` or ``&key=val``, e.g., ``type T:
set[count] &read_expire=5min`` or ``event foo() &priority=-3``. The Bro
scripting language supports the following built-in attributes.
.. bro:attr:: &optional
Allows a record field to be missing. For example the type ``record {
a: int, b: port &optional }`` could be instantiated both as
singleton ``[$a=127.0.0.1]`` or pair ``[$a=127.0.0.1, $b=80/tcp]``.
.. bro:attr:: &default
Uses a default value for a record field or container elements. For
example, ``table[int] of string &default="foo" }`` would create a
table that returns the :bro:type:`string` ``"foo"`` for any
non-existing index.
.. bro:attr:: &redef
Allows for redefinition of initial object values. This is typically
used with constants, for example, ``const clever = T &redef;`` would
allow the constant to be redefined at some later point during script
execution.
.. bro:attr:: &rotate_interval
Rotates a file after a specified interval.
.. bro:attr:: &rotate_size
Rotates a file after it has reached a given size in bytes.
.. bro:attr:: &add_func
.. TODO: needs to be documented.
.. bro:attr:: &delete_func
.. TODO: needs to be documented.
.. bro:attr:: &expire_func
Called right before a container element expires. The function's
first parameter is of the same type of the container and the second
parameter the same type of the container's index. The return
value is a :bro:type:`interval` indicating the amount of additional
time to wait before expiring the container element at the given
index (which will trigger another execution of this function).
.. bro:attr:: &read_expire
Specifies a read expiration timeout for container elements. That is,
the element expires after the given amount of time since the last
time it has been read. Note that a write also counts as a read.
.. bro:attr:: &write_expire
Specifies a write expiration timeout for container elements. That
is, the element expires after the given amount of time since the
last time it has been written.
.. bro:attr:: &create_expire
Specifies a creation expiration timeout for container elements. That
is, the element expires after the given amount of time since it has
been inserted into the container, regardless of any reads or writes.
.. bro:attr:: &persistent
Makes a variable persistent, i.e., its value is writen to disk (per
default at shutdown time).
.. bro:attr:: &synchronized
Synchronizes variable accesses across nodes. The value of a
``&synchronized`` variable is automatically propagated to all peers
when it changes.
.. bro:attr:: &postprocessor
.. TODO: needs to be documented.
.. bro:attr:: &encrypt
Encrypts files right before writing them to disk.
.. TODO: needs to be documented in more detail.
.. bro:attr:: &match
.. TODO: needs to be documented.
.. bro:attr:: &disable_print_hook
Deprecated. Will be removed.
.. bro:attr:: &raw_output
Opens a file in raw mode, i.e., non-ASCII characters are not
escaped.
.. bro:attr:: &mergeable
Prefers set union to assignment for synchronized state. This
attribute is used in conjunction with :bro:attr:`&synchronized`
container types: when the same container is updated at two peers
with different value, the propagation of the state causes a race
condition, where the last update succeeds. This can cause
inconsistencies and can be avoided by unifying the two sets, rather
than merely overwriting the old value.
.. bro:attr:: &priority
Specifies the execution priority of an event handler. Higher values
are executed before lower ones. The default value is 0.
.. bro:attr:: &group
Groups event handlers such that those in the same group can be
jointly activated or deactivated.
.. bro:attr:: &log
Writes a record field to the associated log stream.
.. bro:attr:: &error_handler
.. TODO: needs documented
.. bro:attr:: (&tracked)
.. TODO: needs documented or removed if it's not used anywhere.

View file

@ -1,24 +1,20 @@
##! This is an example script that demonstrates how to document. Comments ##! This is an example script that demonstrates documentation features.
##! of the form ``##!`` are for the script summary. The contents of ##! Comments of the form ``##!`` are for the script summary. The contents of
##! these comments are transferred directly into the auto-generated ##! these comments are transferred directly into the auto-generated
##! `reStructuredText <http://docutils.sourceforge.net/rst.html>`_ ##! `reStructuredText <http://docutils.sourceforge.net/rst.html>`_
##! (reST) document's summary section. ##! (reST) document's summary section.
##! ##!
##! .. tip:: You can embed directives and roles within ``##``-stylized comments. ##! .. tip:: You can embed directives and roles within ``##``-stylized comments.
##! ##!
##! A script's logging information has to be documented manually as minimally ##! There's also a custom role to reference any identifier node in
##! shown below. Note that references may not always be possible (e.g. ##! the Bro Sphinx domain that's good for "see alsos", e.g.
##! anonymous filter functions) and a script may not need to document
##! each of "columns", "event", "filter" depending on exactly what it's doing.
##! ##!
##! **Logging Stream ID:** :bro:enum:`Example::EXAMPLE` ##! See also: :bro:see:`Example::a_var`, :bro:see:`Example::ONE`,
##! :Columns: :bro:type:`Example::Info` ##! :bro:see:`SSH::Info`
##! :Event: :bro:id:`Example::log_example`
##! :Filter: ``example-filter``
##! uses :bro:id:`Example::filter_func` to determine whether to
##! exclude the ``ts`` field
##! ##!
##! :Author: Jon Siwek <jsiwek@ncsa.illinois.edu> ##! And a custom directive does the equivalent references:
##!
##! .. bro:see:: Example::a_var Example::ONE SSH::Info
# Comments that use a single pound sign (#) are not significant to # Comments that use a single pound sign (#) are not significant to
# a script's auto-generated documentation, but ones that use a # a script's auto-generated documentation, but ones that use a
@ -26,8 +22,8 @@
# field comments, it's necessary to disambiguate the field with # field comments, it's necessary to disambiguate the field with
# which a comment associates: e.g. "##<" can be used on the same line # which a comment associates: e.g. "##<" can be used on the same line
# as a field to signify the comment relates to it and not the # as a field to signify the comment relates to it and not the
# following field. "##<" is not meant for general use, just # following field. "##<" can also be used more generally in any
# record/enum fields. # variable declarations to associate with the last-declared identifier.
# #
# Generally, the auto-doc comments (##) are associated with the # Generally, the auto-doc comments (##) are associated with the
# next declaration/identifier found in the script, but the doc framework # next declaration/identifier found in the script, but the doc framework
@ -80,7 +76,7 @@ redef enum Notice::Type += {
# Comments of the "##" form can be use to further document it, but it's # Comments of the "##" form can be use to further document it, but it's
# better to do all documentation related to logging in the summary section # better to do all documentation related to logging in the summary section
# as is shown above. # as is shown above.
redef enum Log::ID += { EXAMPLE }; redef enum Log::ID += { LOG };
# Anything declared in the export section will show up in the rendered # Anything declared in the export section will show up in the rendered
# documentation's "public interface" section # documentation's "public interface" section
@ -155,7 +151,7 @@ export {
const an_option: set[addr, addr, string] &redef; const an_option: set[addr, addr, string] &redef;
# default initialization will be self-documenting # default initialization will be self-documenting
const option_with_init = 0.01 secs &redef; const option_with_init = 0.01 secs &redef; ##< More docs can be added here.
############## state variables ############ ############## state variables ############
# right now, I'm defining this as any global # right now, I'm defining this as any global
@ -187,6 +183,7 @@ export {
## Summarize "an_event" here. ## Summarize "an_event" here.
## Give more details about "an_event" here. ## Give more details about "an_event" here.
## Example::an_event should not be confused as a parameter.
## name: describe the argument here ## name: describe the argument here
global an_event: event(name: string); global an_event: event(name: string);
@ -218,8 +215,8 @@ type PrivateRecord: record {
event bro_init() event bro_init()
{ {
Log::create_stream(EXAMPLE, [$columns=Info, $ev=log_example]); Log::create_stream(Example::LOG, [$columns=Info, $ev=log_example]);
Log::add_filter(EXAMPLE, [ Log::add_filter(Example::LOG, [
$name="example-filter", $name="example-filter",
$path="example-filter", $path="example-filter",
$pred=filter_func, $pred=filter_func,

291
doc/scripts/example.rst Normal file
View file

@ -0,0 +1,291 @@
.. Automatically generated. Do not edit.
example.bro
===========
:download:`Original Source File <example.bro>`
Overview
--------
This is an example script that demonstrates how to document. Comments
of the form ``##!`` are for the script summary. The contents of
these comments are transferred directly into the auto-generated
`reStructuredText <http://docutils.sourceforge.net/rst.html>`_
(reST) document's summary section.
.. tip:: You can embed directives and roles within ``##``-stylized comments.
:Imports: :doc:`policy/frameworks/software/vulnerable </scripts/policy/frameworks/software/vulnerable>`
Summary
~~~~~~~
Options
#######
============================================================================ ======================================
:bro:id:`Example::an_option`: :bro:type:`set` :bro:attr:`&redef` add documentation for "an_option" here
:bro:id:`Example::option_with_init`: :bro:type:`interval` :bro:attr:`&redef`
============================================================================ ======================================
State Variables
###############
=========================================================================== =======================================
:bro:id:`Example::a_var`: :bro:type:`bool` put some documentation for "a_var" here
:bro:id:`Example::var_with_attr`: :bro:type:`count` :bro:attr:`&persistent`
:bro:id:`Example::var_without_explicit_type`: :bro:type:`string`
=========================================================================== =======================================
Types
#####
====================================================== ==========================================================
:bro:type:`Example::SimpleEnum`: :bro:type:`enum` documentation for "SimpleEnum"
goes here.
:bro:type:`Example::SimpleRecord`: :bro:type:`record` general documentation for a type "SimpleRecord"
goes here.
:bro:type:`Example::ComplexRecord`: :bro:type:`record` general documentation for a type "ComplexRecord" goes here
:bro:type:`Example::Info`: :bro:type:`record` An example record to be used with a logging stream.
====================================================== ==========================================================
Events
######
================================================= =============================================================
:bro:id:`Example::an_event`: :bro:type:`event` Summarize "an_event" here.
:bro:id:`Example::log_example`: :bro:type:`event` This is a declaration of an example event that can be used in
logging streams and is raised once for each log entry.
================================================= =============================================================
Functions
#########
=============================================== =======================================
:bro:id:`Example::a_function`: :bro:type:`func` Summarize purpose of "a_function" here.
=============================================== =======================================
Redefinitions
#############
===================================================== ========================================
:bro:type:`Log::ID`: :bro:type:`enum`
:bro:type:`Example::SimpleEnum`: :bro:type:`enum` document the "SimpleEnum" redef here
:bro:type:`Example::SimpleRecord`: :bro:type:`record` document the record extension redef here
===================================================== ========================================
Namespaces
~~~~~~~~~~
.. bro:namespace:: Example
Notices
~~~~~~~
:bro:type:`Notice::Type`
:Type: :bro:type:`enum`
.. bro:enum:: Example::Notice_One Notice::Type
any number of this type of comment
will document "Notice_One"
.. bro:enum:: Example::Notice_Two Notice::Type
any number of this type of comment
will document "Notice_Two"
.. bro:enum:: Example::Notice_Three Notice::Type
.. bro:enum:: Example::Notice_Four Notice::Type
Public Interface
----------------
Options
~~~~~~~
.. bro:id:: Example::an_option
:Type: :bro:type:`set` [:bro:type:`addr`, :bro:type:`addr`, :bro:type:`string`]
:Attributes: :bro:attr:`&redef`
:Default: ``{}``
add documentation for "an_option" here
.. bro:id:: Example::option_with_init
:Type: :bro:type:`interval`
:Attributes: :bro:attr:`&redef`
:Default: ``10.0 msecs``
State Variables
~~~~~~~~~~~~~~~
.. bro:id:: Example::a_var
:Type: :bro:type:`bool`
put some documentation for "a_var" here
.. bro:id:: Example::var_with_attr
:Type: :bro:type:`count`
:Attributes: :bro:attr:`&persistent`
.. bro:id:: Example::var_without_explicit_type
:Type: :bro:type:`string`
:Default: ``"this works"``
Types
~~~~~
.. bro:type:: Example::SimpleEnum
:Type: :bro:type:`enum`
.. bro:enum:: Example::ONE Example::SimpleEnum
and more specific info for "ONE"
can span multiple lines
.. bro:enum:: Example::TWO Example::SimpleEnum
or more info like this for "TWO"
can span multiple lines
.. bro:enum:: Example::THREE Example::SimpleEnum
documentation for "SimpleEnum"
goes here.
.. bro:type:: Example::SimpleRecord
:Type: :bro:type:`record`
field1: :bro:type:`count`
counts something
field2: :bro:type:`bool`
toggles something
general documentation for a type "SimpleRecord"
goes here.
.. bro:type:: Example::ComplexRecord
:Type: :bro:type:`record`
field1: :bro:type:`count`
counts something
field2: :bro:type:`bool`
toggles something
field3: :bro:type:`Example::SimpleRecord`
msg: :bro:type:`string` :bro:attr:`&default` = ``"blah"`` :bro:attr:`&optional`
attributes are self-documenting
general documentation for a type "ComplexRecord" goes here
.. bro:type:: Example::Info
:Type: :bro:type:`record`
ts: :bro:type:`time` :bro:attr:`&log`
uid: :bro:type:`string` :bro:attr:`&log`
status: :bro:type:`count` :bro:attr:`&log` :bro:attr:`&optional`
An example record to be used with a logging stream.
Events
~~~~~~
.. bro:id:: Example::an_event
:Type: :bro:type:`event` (name: :bro:type:`string`)
Summarize "an_event" here.
Give more details about "an_event" here.
:param name: describe the argument here
.. bro:id:: Example::log_example
:Type: :bro:type:`event` (rec: :bro:type:`Example::Info`)
This is a declaration of an example event that can be used in
logging streams and is raised once for each log entry.
Functions
~~~~~~~~~
.. bro:id:: Example::a_function
:Type: :bro:type:`function` (tag: :bro:type:`string`, msg: :bro:type:`string`) : :bro:type:`string`
Summarize purpose of "a_function" here.
Give more details about "a_function" here.
Separating the documentation of the params/return values with
empty comments is optional, but improves readability of script.
:param tag: function arguments can be described
like this
:param msg: another param
:returns: describe the return type here
Redefinitions
~~~~~~~~~~~~~
:bro:type:`Log::ID`
:Type: :bro:type:`enum`
.. bro:enum:: Example::LOG Log::ID
:bro:type:`Example::SimpleEnum`
:Type: :bro:type:`enum`
.. bro:enum:: Example::FOUR Example::SimpleEnum
and some documentation for "FOUR"
.. bro:enum:: Example::FIVE Example::SimpleEnum
also "FIVE" for good measure
document the "SimpleEnum" redef here
:bro:type:`Example::SimpleRecord`
:Type: :bro:type:`record`
field_ext: :bro:type:`string` :bro:attr:`&optional`
document the extending field here
(or here)
document the record extension redef here
Port Analysis
-------------
:ref:`More Information <common_port_analysis_doc>`
SSL::
[ports={
443/tcp,
562/tcp
}]
Packet Filter
-------------
:ref:`More Information <common_packet_filter_doc>`
Filters added::
[ssl] = tcp port 443,
[nntps] = tcp port 562

View file

@ -11,6 +11,8 @@
# Specific scripts can be blacklisted below when e.g. they currently aren't # Specific scripts can be blacklisted below when e.g. they currently aren't
# parseable or they just aren't meant to be documented. # parseable or they just aren't meant to be documented.
export LC_ALL=C # Make sorting stable.
blacklist () blacklist ()
{ {
if [[ "$blacklist" == "" ]]; then if [[ "$blacklist" == "" ]]; then
@ -34,9 +36,6 @@ blacklist all.bro
blacklist init-default.bro blacklist init-default.bro
blacklist init-bare.bro blacklist init-bare.bro
tmp_blacklist hot.conn.bro
tmp_blacklist scan.bro
statictext="\ statictext="\
# DO NOT EDIT # DO NOT EDIT
# This file is auto-generated from the "genDocSourcesList.sh" script. # This file is auto-generated from the "genDocSourcesList.sh" script.
@ -68,12 +67,12 @@ sourcedir=${thisdir}/../..
echo "$statictext" > $outfile echo "$statictext" > $outfile
bifs=`( cd ${sourcedir}/build/src && find . -name \*\.bro | sort )` bifs=`( cd ${sourcedir}/src && find . -name \*\.bif | sort )`
for file in $bifs for file in $bifs
do do
f=${file:2} f=${file:2}.bro
echo "rest_target(\${CMAKE_BINARY_DIR}/src $f)" >> $outfile echo "rest_target(\${CMAKE_BINARY_DIR}/src base/$f)" >> $outfile
done done
scriptfiles=`( cd ${sourcedir}/scripts && find . -name \*\.bro | sort )` scriptfiles=`( cd ${sourcedir}/scripts && find . -name \*\.bro | sort )`

8
doc/scripts/index.rst Normal file
View file

@ -0,0 +1,8 @@
.. This is a stub doc to which broxygen appends during the build process
Index of All Individual Bro Scripts
===================================
.. toctree::
:maxdepth: 1

5
doc/scripts/internal.rst Normal file
View file

@ -0,0 +1,5 @@
.. This is a stub doc to which broxygen appends during the build process
Internal Scripts
================

View file

@ -1,7 +1,7 @@
.. This is a stub doc to which the build process can append. .. This is a stub doc to which broxygen appends during the build process
Bro Script Packages Index of All Bro Script Packages
=================== ================================
Bro has the following script packages (e.g. collections of related scripts in Bro has the following script packages (e.g. collections of related scripts in
a common directory). If the package directory contains a ``__load__.bro`` a common directory). If the package directory contains a ``__load__.bro``
@ -10,8 +10,3 @@ script, it supports being loaded in mass as a whole directory for convenience.
Packages/scripts in the ``base/`` directory are all loaded by default, while Packages/scripts in the ``base/`` directory are all loaded by default, while
ones in ``policy/`` provide functionality and customization options that are ones in ``policy/`` provide functionality and customization options that are
more appropriate for users to decide whether they'd like to load it or not. more appropriate for users to decide whether they'd like to load it or not.
.. toctree::
:maxdepth: 1

View file

@ -1,64 +0,0 @@
// make literal blocks corresponding to identifier initial values
// hidden by default
$(document).ready(function() {
var showText='(Show Value)';
var hideText='(Hide Value)';
var is_visible = false;
// select field-list tables that come before a literal block
tables = $('.highlight-python').prev('table.docutils.field-list');
tables.find('th.field-name').filter(function(index) {
return $(this).html() == "Default :";
}).next().append('<a href="#" class="toggleLink">'+showText+'</a>');
// hide all literal blocks that follow a field-list table
tables.next('.highlight-python').hide();
// register handler for clicking a "toggle" link
$('a.toggleLink').click(function() {
is_visible = !is_visible;
$(this).html( (!is_visible) ? showText : hideText);
// the link is inside a <table><tbody><tr><td> and the next
// literal block after the table is the literal block that we want
// to show/hide
$(this).parent().parent().parent().parent().next('.highlight-python').slideToggle('fast');
// override default link behavior
return false;
});
});
// make "Private Interface" sections hidden by default
$(document).ready(function() {
var showText='Show Private Interface (for internal use)';
var hideText='Hide Private Interface';
var is_visible = false;
// insert show/hide links
$('#private-interface').children(":first-child").after('<a href="#" class="privateToggle">'+showText+'</a>');
// wrap all sub-sections in a new div that can be hidden/shown
$('#private-interface').children(".section").wrapAll('<div class="private" />');
// hide the given class
$('.private').hide();
// register handler for clicking a "toggle" link
$('a.privateToggle').click(function() {
is_visible = !is_visible;
$(this).html( (!is_visible) ? showText : hideText);
$('.private').slideToggle('fast');
// override default link behavior
return false;
});
});

Some files were not shown because too many files have changed in this diff Show more