Merge branch 'master' into topic/jsiwek/filter-rotation

This commit is contained in:
Jon Siwek 2011-09-07 12:30:47 -05:00
commit d2bf33ee19
161 changed files with 1345 additions and 946 deletions

72
CHANGES
View file

@ -1,4 +1,76 @@
1.6-dev-1184 | 2011-09-04 09:34:50 -0700
* FindPCAP now links against thread library when necessary (e.g.
PF_RING's libpcap). (Jon Siwek)
* Install binaries with an RPATH. (Jon Siwek)
* Fix for a case where nested records weren't coerced even though
possible. (Jon Siwek)
* Changed ASCII writer to delay creation of log after rotation until
next write.
* Changed default snaplen to 65535 and added a -l/--snaplen command
line option to set it explicitly. Addresses #447. (Jon Siwek)
* Various updates to logging framework. (Seth Hall)
* Changed presentation of enum labels to include namespace. (Jon
Siwek)
* HTTP analyzer is now enabled with any of the HTTP events. (Seth
Hall)
* Fixed missing format string that caused some segfaults. (Gregor
Maier)
* ASCII writer nows prints time interval with 6 decimal places.
(Gregor Maier)
* Added a Reporter::fatal BIF. (Jon Siwek)
* Fixes for GeoIP support. Addresses #538. (Jon Siwek)
* Fixed excessive memory usage of SSL analyzer on connections with
gaps. (Gregor Maier)
* Added a log postprocessing function that can SCP rotated logs to
remote hosts. (Jon Siwek)
* Added a BiF for getting the current Bro version string. (Jon
Siwek)
* Misc. doc/script/test cleanup. (Jon Siwek)
* Fixed bare-mode @load dependency problems. (Jon Siwek)
* Fixed check_for_unused_event_handlers option. (Jon Siwek)
* Fixing some more bare-mode @load dependency issues (Jon Siwek)
* Reorganizing btest/policy directory to match new scripts/
organization. Addresses #545 (Jon Siwek)
* bro scripts generated from bifs now install to
$prefix/share/bro/base. Addresses #545 (Jon Siwek)
* Changeed/fixed some cluster script error reporting. (Jon Siwek)
* Various script normalization. (Jon Siwek)
* Add a test that checks each individual script can be loaded in
bare-mode. Adressess #545. (Jon Siwek)
* Tune when c$conn is set. Addresses #554. (Gregor Maier)
* Add ConnSize_Analyzer's fields to conn.log. (Gregor Maier)
* Fixing bug in "interesting hostnames" detection. (Seth Hall)
* Adding metrics framework intermediate updates. (Seth Hall)
1.6-dev-1120 | 2011-08-19 19:00:15 -0700 1.6-dev-1120 | 2011-08-19 19:00:15 -0700
* Fix for the CompHash fix. (Robin Sommer) * Fix for the CompHash fix. (Robin Sommer)

View file

@ -31,6 +31,14 @@ if ("${PROJECT_SOURCE_DIR}" STREQUAL "${CMAKE_SOURCE_DIR}")
${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
endif () endif ()
# Keep RPATH upon installing so that user doesn't have to ensure the linker
# can find internal/private libraries or libraries external to the build
# directory that were explicitly linked against
if (NOT BINARY_PACKAGING_MODE)
SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")
endif ()
######################################################################## ########################################################################
## Project/Build Configuration ## Project/Build Configuration

View file

@ -5,12 +5,7 @@
# to offer. For more, execute that one directly. # to offer. For more, execute that one directly.
# #
SOURCE=$(PWD) BUILD=build
BUILD=$(SOURCE)/build
TMP=/tmp/bro-dist.$(UID)
BRO_V=`cat $(SOURCE)/VERSION`
BROCCOLI_V=`cat $(SOURCE)/aux/broccoli/VERSION`
BROCTL_V=`cat $(SOURCE)/aux/broctl/VERSION`
all: configured all: configured
( cd $(BUILD) && make ) ( cd $(BUILD) && make )
@ -29,20 +24,7 @@ docclean: configured
( cd $(BUILD) && make docclean && make restclean ) ( cd $(BUILD) && make docclean && make restclean )
dist: dist:
@( mkdir -p $(BUILD) && rm -rf $(TMP) && mkdir $(TMP) ) @./pkg/make-src-packages
@cp -R $(SOURCE) $(TMP)/Bro-$(BRO_V)
@( cd $(TMP) && find . -name .git\* | xargs rm -rf )
@( cd $(TMP) && find . -name \*.swp | xargs rm -rf )
@( cd $(TMP) && find . -type d -name build | xargs rm -rf )
@( cd $(TMP) && tar -czf $(BUILD)/Bro-all-$(BRO_V).tar.gz Bro-$(BRO_V) )
@( cd $(TMP)/Bro-$(BRO_V)/aux && mv broccoli Broccoli-$(BROCCOLI_V) && \
tar -czf $(BUILD)/Broccoli-$(BROCCOLI_V).tar.gz Broccoli-$(BROCCOLI_V) )
@( cd $(TMP)/Bro-$(BRO_V)/aux && mv broctl Broctl-$(BROCTL_V) && \
tar -czf $(BUILD)/Broctl-$(BROCTL_V).tar.gz Broctl-$(BROCTL_V) )
@( cd $(TMP)/Bro-$(BRO_V)/aux && rm -rf Broctl* Broccoli* )
@( cd $(TMP) && tar -czf $(BUILD)/Bro-$(BRO_V).tar.gz Bro-$(BRO_V) )
@rm -rf $(TMP)
@echo "Distribution source tarballs have been compiled in $(BUILD)"
bindist: bindist:
@( cd pkg && ( ./make-deb-packages || ./make-mac-packages || \ @( cd pkg && ( ./make-deb-packages || ./make-mac-packages || \

View file

@ -1 +1 @@
1.6-dev-1120 1.6-dev-1184

@ -1 +1 @@
Subproject commit a3a9410dedc842f6bb9859642f334ed354633b57 Subproject commit 032b4e0f028a08257be0c703b27a7559e57bd40a

@ -1 +1 @@
Subproject commit d68b98bb995a105b257f805ec4ff22c4929c7476 Subproject commit 04d149a194e06ed5410ea3af924ff48b9129cd3b

@ -1 +1 @@
Subproject commit 743f10dda8cd5655ea3dc6eb705ff5414ed4f535 Subproject commit 89c20c7f063afe5f39aa72bfec02d6996b291c13

@ -1 +1 @@
Subproject commit cf4ce9dfc5d6dc4e6d311955eeaec2d679e8669b Subproject commit 2b9053d40d7ef497c8cef6357b59f43129976d65

View file

@ -16,6 +16,8 @@
# LIBGEOIP_FOUND System has GeoIP libraries and headers # LIBGEOIP_FOUND System has GeoIP libraries and headers
# LibGeoIP_LIBRARY The GeoIP library # LibGeoIP_LIBRARY The GeoIP library
# LibGeoIP_INCLUDE_DIR The location of GeoIP headers # LibGeoIP_INCLUDE_DIR The location of GeoIP headers
# HAVE_GEOIP_COUNTRY_EDITION_V6 Whether the API support IPv6 country edition
# HAVE_GEOIP_CITY_EDITION_REV0_V6 Whether the API supports IPv6 city edition
find_path(LibGeoIP_ROOT_DIR find_path(LibGeoIP_ROOT_DIR
NAMES include/GeoIPCity.h NAMES include/GeoIPCity.h
@ -45,6 +47,20 @@ find_package_handle_standard_args(LibGeoIP DEFAULT_MSG
LibGeoIP_INCLUDE_DIR LibGeoIP_INCLUDE_DIR
) )
if (LIBGEOIP_FOUND)
include(CheckCXXSourceCompiles)
set(CMAKE_REQUIRED_INCLUDES ${LibGeoIP_INCLUDE_DIR})
check_cxx_source_compiles("
#include <GeoIPCity.h>
int main() { GEOIP_COUNTRY_EDITION_V6; return 0; }
" HAVE_GEOIP_COUNTRY_EDITION_V6)
check_cxx_source_compiles("
#include <GeoIPCity.h>
int main() { GEOIP_CITY_EDITION_REV0_V6; return 0; }
" HAVE_GEOIP_CITY_EDITION_REV0_V6)
set(CMAKE_REQUIRED_INCLUDES)
endif ()
mark_as_advanced( mark_as_advanced(
LibGeoIP_ROOT_DIR LibGeoIP_ROOT_DIR
LibGeoIP_LIBRARY LibGeoIP_LIBRARY

View file

@ -15,7 +15,8 @@
# #
# PCAP_FOUND System has libpcap, include and library dirs found # PCAP_FOUND System has libpcap, include and library dirs found
# PCAP_INCLUDE_DIR The libpcap include directories. # PCAP_INCLUDE_DIR The libpcap include directories.
# PCAP_LIBRARY The libpcap library. # PCAP_LIBRARY The libpcap library (possibly includes a thread
# library e.g. required by pf_ring's libpcap)
find_path(PCAP_ROOT_DIR find_path(PCAP_ROOT_DIR
NAMES include/pcap.h NAMES include/pcap.h
@ -37,6 +38,29 @@ find_package_handle_standard_args(PCAP DEFAULT_MSG
PCAP_INCLUDE_DIR PCAP_INCLUDE_DIR
) )
include(CheckCSourceCompiles)
set(CMAKE_REQUIRED_LIBRARIES ${PCAP_LIBRARY})
check_c_source_compiles("int main() { return 0; }" PCAP_LINKS_SOLO)
set(CMAKE_REQUIRED_LIBRARIES)
# check if linking against libpcap also needs to link against a thread library
if (NOT PCAP_LINKS_SOLO)
find_package(Threads)
if (THREADS_FOUND)
set(CMAKE_REQUIRED_LIBRARIES ${PCAP_LIBRARY} ${CMAKE_THREAD_LIBS_INIT})
check_c_source_compiles("int main() { return 0; }" PCAP_NEEDS_THREADS)
set(CMAKE_REQUIRED_LIBRARIES)
endif ()
if (THREADS_FOUND AND PCAP_NEEDS_THREADS)
set(_tmp ${PCAP_LIBRARY} ${CMAKE_THREAD_LIBS_INIT})
list(REMOVE_DUPLICATES _tmp)
set(PCAP_LIBRARY ${_tmp}
CACHE STRING "Libraries needed to link against libpcap" FORCE)
else ()
message(FATAL_ERROR "Couldn't determine how to link against libpcap")
endif ()
endif ()
mark_as_advanced( mark_as_advanced(
PCAP_ROOT_DIR PCAP_ROOT_DIR
PCAP_INCLUDE_DIR PCAP_INCLUDE_DIR

View file

@ -114,6 +114,12 @@
/* GeoIP geographic lookup functionality */ /* GeoIP geographic lookup functionality */
#cmakedefine USE_GEOIP #cmakedefine USE_GEOIP
/* Whether the found GeoIP API supports IPv6 Country Edition */
#cmakedefine HAVE_GEOIP_COUNTRY_EDITION_V6
/* Whether the found GeoIP API supports IPv6 City Edition */
#cmakedefine HAVE_GEOIP_CITY_EDITION_REV0_V6
/* Use Google's perftools */ /* Use Google's perftools */
#cmakedefine USE_PERFTOOLS #cmakedefine USE_PERFTOOLS

View file

@ -66,10 +66,6 @@ rest_target(${psd} base/protocols/http/partial-content.bro)
rest_target(${psd} base/protocols/http/utils.bro) rest_target(${psd} base/protocols/http/utils.bro)
rest_target(${psd} base/protocols/irc/dcc-send.bro) rest_target(${psd} base/protocols/irc/dcc-send.bro)
rest_target(${psd} base/protocols/irc/main.bro) rest_target(${psd} base/protocols/irc/main.bro)
rest_target(${psd} base/protocols/mime/file-extract.bro)
rest_target(${psd} base/protocols/mime/file-hash.bro)
rest_target(${psd} base/protocols/mime/file-ident.bro)
rest_target(${psd} base/protocols/mime/main.bro)
rest_target(${psd} base/protocols/rpc/main.bro) rest_target(${psd} base/protocols/rpc/main.bro)
rest_target(${psd} base/protocols/smtp/entities-excerpt.bro) rest_target(${psd} base/protocols/smtp/entities-excerpt.bro)
rest_target(${psd} base/protocols/smtp/entities.bro) rest_target(${psd} base/protocols/smtp/entities.bro)

View file

@ -80,7 +80,7 @@ redef enum Notice::Type += {
# Comments of the "##" form can be use to further document it, but it's # Comments of the "##" form can be use to further document it, but it's
# better to do all documentation related to logging in the summary section # better to do all documentation related to logging in the summary section
# as is shown above. # as is shown above.
redef enum Log::ID += { EXAMPLE }; redef enum Log::ID += { LOG };
# Anything declared in the export section will show up in the rendered # Anything declared in the export section will show up in the rendered
# documentation's "public interface" section # documentation's "public interface" section
@ -218,8 +218,8 @@ type PrivateRecord: record {
event bro_init() event bro_init()
{ {
Log::create_stream(EXAMPLE, [$columns=Info, $ev=log_example]); Log::create_stream(Example::LOG, [$columns=Info, $ev=log_example]);
Log::add_filter(EXAMPLE, [ Log::add_filter(Example::LOG, [
$name="example-filter", $name="example-filter",
$path="example-filter", $path="example-filter",
$pred=filter_func, $pred=filter_func,

23
pkg/make-src-packages Executable file
View file

@ -0,0 +1,23 @@
#!/bin/sh
SOURCE="$( cd "$( dirname "$0" )" && cd .. && pwd )"
BUILD=${SOURCE}/build
TMP=/tmp/bro-dist.${UID}
BRO_V=`cat ${SOURCE}/VERSION`
BROCCOLI_V=`cat ${SOURCE}/aux/broccoli/VERSION`
BROCTL_V=`cat ${SOURCE}/aux/broctl/VERSION`
( mkdir -p ${BUILD} && rm -rf ${TMP} && mkdir ${TMP} )
cp -R ${SOURCE} ${TMP}/Bro-${BRO_V}
( cd ${TMP} && find . -name .git\* | xargs rm -rf )
( cd ${TMP} && find . -name \*.swp | xargs rm -rf )
( cd ${TMP} && find . -type d -name build | xargs rm -rf )
( cd ${TMP} && tar -czf ${BUILD}/Bro-all-${BRO_V}.tar.gz Bro-${BRO_V} )
( cd ${TMP}/Bro-${BRO_V}/aux && mv broccoli Broccoli-${BROCCOLI_V} && \
tar -czf ${BUILD}/Broccoli-${BROCCOLI_V}.tar.gz Broccoli-${BROCCOLI_V} )
( cd ${TMP}/Bro-${BRO_V}/aux && mv broctl Broctl-${BROCTL_V} && \
tar -czf ${BUILD}/Broctl-${BROCTL_V}.tar.gz Broctl-${BROCTL_V} )
( cd ${TMP}/Bro-${BRO_V}/aux && rm -rf Broctl* Broccoli* )
( cd ${TMP} && tar -czf ${BUILD}/Bro-${BRO_V}.tar.gz Bro-${BRO_V} )
rm -rf ${TMP}
echo "Distribution source tarballs have been compiled in ${BUILD}"

View file

@ -3,7 +3,8 @@
module Cluster; module Cluster;
export { export {
redef enum Log::ID += { CLUSTER }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
ts: time; ts: time;
message: string; message: string;
@ -106,5 +107,5 @@ event bro_init() &priority=5
terminate(); terminate();
} }
Log::create_stream(CLUSTER, [$columns=Info]); Log::create_stream(Cluster::LOG, [$columns=Info]);
} }

View file

@ -22,10 +22,5 @@ redef record_all_packets = T;
# do remote logging since we forward the notice event directly. # do remote logging since we forward the notice event directly.
event bro_init() event bro_init()
{ {
Log::add_filter(Notice::NOTICE, Log::disable_stream(Notice::LOG);
[
$name="cluster-worker",
$pred=function(rec: Notice::Info): bool { return F; }
]
);
} }

View file

@ -6,7 +6,7 @@
module Communication; module Communication;
export { export {
redef enum Log::ID += { COMMUNICATION }; redef enum Log::ID += { LOG };
const default_port_ssl = 47756/tcp &redef; const default_port_ssl = 47756/tcp &redef;
const default_port_clear = 47757/tcp &redef; const default_port_clear = 47757/tcp &redef;
@ -107,17 +107,14 @@ const src_names = {
[REMOTE_SRC_SCRIPT] = "script", [REMOTE_SRC_SCRIPT] = "script",
}; };
event bro_init() event bro_init() &priority=5
{ {
Log::create_stream(COMMUNICATION, [$columns=Info]); Log::create_stream(Communication::LOG, [$columns=Info]);
if ( |nodes| > 0 )
enable_communication();
} }
function do_script_log_common(level: count, src: count, msg: string) function do_script_log_common(level: count, src: count, msg: string)
{ {
Log::write(COMMUNICATION, [$ts = network_time(), Log::write(Communication::LOG, [$ts = network_time(),
$level = (level == REMOTE_LOG_INFO ? "info" : "error"), $level = (level == REMOTE_LOG_INFO ? "info" : "error"),
$src_name = src_names[src], $src_name = src_names[src],
$peer = get_event_peer()$descr, $peer = get_event_peer()$descr,
@ -147,7 +144,7 @@ function connect_peer(peer: string)
local id = connect(node$host, p, class, node$retry, node$ssl); local id = connect(node$host, p, class, node$retry, node$ssl);
if ( id == PEER_ID_NONE ) if ( id == PEER_ID_NONE )
Log::write(COMMUNICATION, [$ts = network_time(), Log::write(Communication::LOG, [$ts = network_time(),
$peer = get_event_peer()$descr, $peer = get_event_peer()$descr,
$message = "can't trigger connect"]); $message = "can't trigger connect"]);
pending_peers[id] = node; pending_peers[id] = node;
@ -275,7 +272,7 @@ event remote_state_inconsistency(operation: string, id: string,
local msg = fmt("state inconsistency: %s should be %s but is %s before %s", local msg = fmt("state inconsistency: %s should be %s but is %s before %s",
id, expected_old, real_old, operation); id, expected_old, real_old, operation);
Log::write(COMMUNICATION, [$ts = network_time(), Log::write(Communication::LOG, [$ts = network_time(),
$peer = get_event_peer()$descr, $peer = get_event_peer()$descr,
$message = msg]); $message = msg]);
} }
@ -284,6 +281,9 @@ event remote_state_inconsistency(operation: string, id: string,
# Actually initiate the connections that need to be established. # Actually initiate the connections that need to be established.
event bro_init() &priority = -10 # let others modify nodes event bro_init() &priority = -10 # let others modify nodes
{ {
if ( |nodes| > 0 )
enable_communication();
for ( tag in nodes ) for ( tag in nodes )
{ {
if ( ! nodes[tag]$connect ) if ( ! nodes[tag]$connect )

View file

@ -7,7 +7,7 @@ module DPD;
redef signature_files += "base/frameworks/dpd/dpd.sig"; redef signature_files += "base/frameworks/dpd/dpd.sig";
export { export {
redef enum Log::ID += { DPD }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
## Timestamp for when protocol analysis failed. ## Timestamp for when protocol analysis failed.
@ -38,9 +38,9 @@ redef record connection += {
dpd: Info &optional; dpd: Info &optional;
}; };
event bro_init() event bro_init() &priority=5
{ {
Log::create_stream(DPD, [$columns=Info]); Log::create_stream(DPD::LOG, [$columns=Info]);
# Populate the internal DPD analysis variable. # Populate the internal DPD analysis variable.
for ( a in dpd_config ) for ( a in dpd_config )
@ -104,5 +104,5 @@ event protocol_violation(c: connection, atype: count, aid: count,
reason: string) &priority=-5 reason: string) &priority=-5
{ {
if ( c?$dpd ) if ( c?$dpd )
Log::write(DPD, c$dpd); Log::write(DPD::LOG, c$dpd);
} }

View file

@ -25,7 +25,7 @@
module Intel; module Intel;
export { export {
redef enum Log::ID += { INTEL }; redef enum Log::ID += { LOG };
redef enum Notice::Type += { redef enum Notice::Type += {
## This notice should be used in all detector scripts to indicate ## This notice should be used in all detector scripts to indicate
@ -101,7 +101,7 @@ export {
event bro_init() event bro_init()
{ {
Log::create_stream(INTEL, [$columns=Info]); Log::create_stream(Intel::LOG, [$columns=Info]);
} }
@ -163,7 +163,7 @@ function insert(item: Item): bool
} }
if ( err_msg != "" ) if ( err_msg != "" )
Log::write(INTEL, [$ts=network_time(), $level="warn", $message=fmt(err_msg)]); Log::write(Intel::LOG, [$ts=network_time(), $level="warn", $message=fmt(err_msg)]);
return F; return F;
} }
@ -272,6 +272,6 @@ function matcher(item: QueryItem): bool
} }
if ( err_msg != "" ) if ( err_msg != "" )
Log::write(INTEL, [$ts=network_time(), $level="error", $message=fmt(err_msg)]); Log::write(Intel::LOG, [$ts=network_time(), $level="error", $message=fmt(err_msg)]);
return F; return F;
} }

View file

@ -177,8 +177,47 @@ function __default_rotation_postprocessor(info: RotationInfo) : bool
function default_path_func(id: ID, path: string, rec: any) : string function default_path_func(id: ID, path: string, rec: any) : string
{ {
# TODO for Seth: Do what you want. :) local id_str = fmt("%s", id);
local parts = split1(id_str, /::/);
if ( |parts| == 2 )
{
# TODO: the core shouldn't be suggesting paths anymore. Only
# statically defined paths should be sent into here. This
# is only to cope with the core generated paths.
if ( to_lower(parts[2]) != path )
return path; return path;
# Example: Notice::LOG -> "notice"
if ( parts[2] == "LOG" )
{
local module_parts = split_n(parts[1], /[^A-Z][A-Z][a-z]*/, T, 4);
local output = "";
if ( 1 in module_parts )
output = module_parts[1];
if ( 2 in module_parts && module_parts[2] != "" )
output = cat(output, sub_bytes(module_parts[2],1,1), "_", sub_bytes(module_parts[2], 2, |module_parts[2]|));
if ( 3 in module_parts && module_parts[3] != "" )
output = cat(output, "_", module_parts[3]);
if ( 4 in module_parts && module_parts[4] != "" )
output = cat(output, sub_bytes(module_parts[4],1,1), "_", sub_bytes(module_parts[4], 2, |module_parts[4]|));
# TODO: There seems to be some problem with the split function
# not putting \0 at the end of the string. fmt will make
# a better internal string.
return fmt("%s", to_lower(output));
}
# Example: Notice::POLICY_LOG -> "notice_policy"
if ( /_LOG$/ in parts[2] )
parts[2] = sub(parts[2], /_LOG$/, "");
return cat(to_lower(parts[1]),"_",to_lower(parts[2]));
}
else
{
# In case there is a logging stream in the global namespace.
return to_lower(id_str);
}
} }
# Run post-processor on file. If there isn't any postprocessor defined, # Run post-processor on file. If there isn't any postprocessor defined,

View file

@ -0,0 +1,42 @@
##! This script defines a postprocessing function that can be applied
##! to a logging filter in order to automatically SCP (secure copy)
##! a log stream (or a subset of it) to a remote host at configurable
##! rotation time intervals.
module Log;
export {
## This postprocessor SCP's the rotated-log to all the remote hosts
## defined in :bro:id:`Log::scp_destinations` and then deletes
## the local copy of the rotated-log. It's not active when
## reading from trace files.
global scp_postprocessor: function(info: Log::RotationInfo): bool;
## A container that describes the remote destination for the SCP command
## argument as ``user@host:path``.
type SCPDestination: record {
user: string;
host: string;
path: string;
};
## A table indexed by a particular log writer and filter path, that yields
## a set remote destinations. The :bro:id:`Log::scp_postprocessor`
## function queries this table upon log rotation and performs a secure
## copy of the rotated-log to each destination in the set.
global scp_destinations: table[Writer, string] of set[SCPDestination];
}
function scp_postprocessor(info: Log::RotationInfo): bool
{
if ( reading_traces() || [info$writer, info$path] !in scp_destinations )
return T;
local command = "";
for ( d in scp_destinations[info$writer, info$path] )
command += fmt("scp %s %s@%s:%s;", info$fname, d$user, d$host, d$path);
command += fmt("/bin/rm %s", info$fname);
system(command);
return T;
}

View file

@ -5,7 +5,7 @@
module Metrics; module Metrics;
export { export {
redef enum Log::ID += { METRICS }; redef enum Log::ID += { LOG };
type ID: enum { type ID: enum {
NOTHING, NOTHING,
@ -124,7 +124,7 @@ global thresholds: table[ID, string, Index] of count = {} &create_expire=renotic
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(METRICS, [$columns=Info, $ev=log_metrics]); Log::create_stream(Metrics::LOG, [$columns=Info, $ev=log_metrics]);
} }
function index2str(index: Index): string function index2str(index: Index): string
@ -151,7 +151,7 @@ function write_log(ts: time, filter: Filter, data: MetricTable)
$value=val]; $value=val];
if ( filter$log ) if ( filter$log )
Log::write(METRICS, m); Log::write(Metrics::LOG, m);
} }
} }

View file

@ -9,15 +9,13 @@ module Notice;
export { export {
redef enum Log::ID += { redef enum Log::ID += {
## This is the primary logging stream for notices. It must always be ## This is the primary logging stream for notices.
## referenced with the module name included because the name is LOG,
## also used by the global function :bro:id:`NOTICE`.
NOTICE,
## This is the notice policy auditing log. It records what the current ## This is the notice policy auditing log. It records what the current
## notice policy is at Bro init time. ## notice policy is at Bro init time.
NOTICE_POLICY, POLICY_LOG,
## This is the alarm stream. ## This is the alarm stream.
ALARM, ALARM_LOG,
}; };
## Scripts creating new notices need to redef this enum to add their own ## Scripts creating new notices need to redef this enum to add their own
@ -41,7 +39,9 @@ export {
## Indicates that the notice should be sent to the email address(es) ## Indicates that the notice should be sent to the email address(es)
## configured in the :bro:id:`Notice::mail_dest` variable. ## configured in the :bro:id:`Notice::mail_dest` variable.
ACTION_EMAIL, ACTION_EMAIL,
## Indicates that the notice should be alarmed. ## Indicates that the notice should be alarmed. A readable ASCII
## version of the alarm log is emailed in bulk to the address(es)
## configured in :bro:id:`Notice::mail_dest`.
ACTION_ALARM, ACTION_ALARM,
}; };
@ -136,7 +136,8 @@ export {
## Local system sendmail program. ## Local system sendmail program.
const sendmail = "/usr/sbin/sendmail" &redef; const sendmail = "/usr/sbin/sendmail" &redef;
## Email address to send notices with the :bro:enum:`ACTION_EMAIL` action. ## Email address to send notices with the :bro:enum:`ACTION_EMAIL` action
## or to send bulk alarm logs on rotation with :bro:enum:`ACTION_ALARM`.
const mail_dest = "" &redef; const mail_dest = "" &redef;
## Address that emails will be from. ## Address that emails will be from.
@ -146,6 +147,11 @@ export {
## Text string prefixed to the subject of all emails sent out. ## Text string prefixed to the subject of all emails sent out.
const mail_subject_prefix = "[Bro]" &redef; const mail_subject_prefix = "[Bro]" &redef;
## A log postprocessing function that implements emailing the contents
## of a log upon rotation to any configured :bro:id:`Notice::mail_dest`.
## The rotated log is removed upon being sent.
global log_mailing_postprocessor: function(info: Log::RotationInfo): bool;
## This is the event that is called as the entry point to the ## This is the event that is called as the entry point to the
## notice framework by the global :bro:id:`NOTICE` function. By the time ## notice framework by the global :bro:id:`NOTICE` function. By the time
## this event is generated, default values have already been filled out in ## this event is generated, default values have already been filled out in
@ -172,6 +178,13 @@ export {
## :bro:enum:`ACTION_PAGE` actions. ## :bro:enum:`ACTION_PAGE` actions.
global email_notice_to: function(n: Info, dest: string, extend: bool); global email_notice_to: function(n: Info, dest: string, extend: bool);
## Constructs mail headers to which an email body can be appended for
## sending with sendmail.
## subject_desc: a subject string to use for the mail
## dest: recipient string to use for the mail
## Returns: a string of mail headers to which an email body can be appended
global email_headers: function(subject_desc: string, dest: string): string;
## This is an internally used function, please ignore it. It's only used ## This is an internally used function, please ignore it. It's only used
## for filling out missing details of :bro:type:`Notice:Info` records ## for filling out missing details of :bro:type:`Notice:Info` records
## before the synchronous and asynchronous event pathways have begun. ## before the synchronous and asynchronous event pathways have begun.
@ -186,21 +199,48 @@ export {
# priority. # priority.
global ordered_policy: vector of PolicyItem = vector(); global ordered_policy: vector of PolicyItem = vector();
event bro_init() function log_mailing_postprocessor(info: Log::RotationInfo): bool
{ {
Log::create_stream(NOTICE_POLICY, [$columns=PolicyItem]); if ( ! reading_traces() && mail_dest != "" )
Log::create_stream(Notice::NOTICE, [$columns=Info, $ev=log_notice]); {
local headers = email_headers(fmt("Log Contents: %s", info$fname),
Log::create_stream(ALARM, [$columns=Notice::Info]); mail_dest);
# Make sure that this log is output as text so that it can be packaged local tmpfilename = fmt("%s.mailheaders.tmp", info$fname);
# up and emailed later. local tmpfile = open(tmpfilename);
Log::add_filter(ALARM, [$name="default", $writer=Log::WRITER_ASCII]); write_file(tmpfile, headers);
close(tmpfile);
system(fmt("/bin/cat %s %s | %s -t -oi && /bin/rm %s %s",
tmpfilename, info$fname, sendmail, tmpfilename, info$fname));
}
return T;
}
# This extra export section here is just because this redefinition should
# be documented as part of the "public API" of this script, but the redef
# needs to occur after the postprocessor function implementation.
export {
## By default, an ASCII version of the the alarm log is emailed daily to any
## configured :bro:id:`Notice::mail_dest` if not operating on trace files.
redef Log::rotation_control += {
[Log::WRITER_ASCII, "alarm-mail"] =
[$interv=24hrs, $postprocessor=log_mailing_postprocessor]
};
}
event bro_init() &priority=5
{
Log::create_stream(Notice::LOG, [$columns=Info, $ev=log_notice]);
Log::create_stream(Notice::POLICY_LOG, [$columns=PolicyItem]);
Log::create_stream(Notice::ALARM_LOG, [$columns=Notice::Info]);
# If Bro is configured for mailing notices, set up mailing for alarms.
# Make sure that this alarm log is also output as text so that it can
# be packaged up and emailed later.
if ( ! reading_traces() && mail_dest != "" )
Log::add_filter(Notice::ALARM_LOG, [$name="alarm-mail",
$path="alarm-mail",
$writer=Log::WRITER_ASCII]);
} }
# TODO: need a way to call a Bro script level callback during file rotation.
# we need more than a just $postprocessor.
#redef Log::rotation_control += {
# [Log::WRITER_ASCII, "alarm"] = [$postprocessor="mail-alarms"];
#};
# TODO: fix this. # TODO: fix this.
#function notice_tags(n: Notice::Info) : table[string] of string #function notice_tags(n: Notice::Info) : table[string] of string
@ -220,20 +260,24 @@ event bro_init()
# return tgs; # return tgs;
# } # }
function email_headers(subject_desc: string, dest: string): string
{
local header_text = string_cat(
"From: ", mail_from, "\n",
"Subject: ", mail_subject_prefix, " ", subject_desc, "\n",
"To: ", dest, "\n",
"User-Agent: Bro-IDS/", bro_version(), "\n");
if ( reply_to != "" )
header_text = string_cat(header_text, "Reply-To: ", reply_to, "\n");
return header_text;
}
function email_notice_to(n: Notice::Info, dest: string, extend: bool) function email_notice_to(n: Notice::Info, dest: string, extend: bool)
{ {
if ( reading_traces() || dest == "" ) if ( reading_traces() || dest == "" )
return; return;
local email_text = string_cat( local email_text = email_headers(fmt("%s", n$note), dest);
"From: ", mail_from, "\n",
"Subject: ", mail_subject_prefix, " ", fmt("%s", n$note), "\n",
"To: ", dest, "\n",
# TODO: BiF to get version (the resource_usage Bif seems like overkill).
"User-Agent: Bro-IDS/?.?.?\n");
if ( reply_to != "" )
email_text = string_cat(email_text, "Reply-To: ", reply_to, "\n");
# The notice emails always start off with the human readable message. # The notice emails always start off with the human readable message.
email_text = string_cat(email_text, "\n", n$msg, "\n"); email_text = string_cat(email_text, "\n", n$msg, "\n");
@ -257,9 +301,9 @@ event notice(n: Notice::Info) &priority=-5
if ( ACTION_EMAIL in n$actions ) if ( ACTION_EMAIL in n$actions )
email_notice_to(n, mail_dest, T); email_notice_to(n, mail_dest, T);
if ( ACTION_LOG in n$actions ) if ( ACTION_LOG in n$actions )
Log::write(Notice::NOTICE, n); Log::write(Notice::LOG, n);
if ( ACTION_ALARM in n$actions ) if ( ACTION_ALARM in n$actions )
Log::write(ALARM, n); Log::write(Notice::ALARM_LOG, n);
} }
# Executes a script with all of the notice fields put into the # Executes a script with all of the notice fields put into the
@ -348,10 +392,7 @@ event bro_init() &priority=10
for ( pi in policy ) for ( pi in policy )
{ {
if ( pi$priority < 0 || pi$priority > 10 ) if ( pi$priority < 0 || pi$priority > 10 )
{ Reporter::fatal("All Notice::PolicyItem priorities must be within 0 and 10");
print "All Notice::PolicyItem priorities must be within 0 and 10";
exit();
}
if ( pi$priority !in tmp ) if ( pi$priority !in tmp )
tmp[pi$priority] = set(); tmp[pi$priority] = set();
@ -368,7 +409,7 @@ event bro_init() &priority=10
{ {
pi$position = |ordered_policy|; pi$position = |ordered_policy|;
ordered_policy[|ordered_policy|] = pi; ordered_policy[|ordered_policy|] = pi;
Log::write(NOTICE_POLICY, pi); Log::write(Notice::POLICY_LOG, pi);
} }
} }
} }

View file

@ -5,7 +5,7 @@
module Weird; module Weird;
export { export {
redef enum Log::ID += { WEIRD }; redef enum Log::ID += { LOG };
redef enum Notice::Type += { redef enum Notice::Type += {
## Generic unusual but alarm-worthy activity. ## Generic unusual but alarm-worthy activity.
@ -259,9 +259,9 @@ global did_inconsistency_msg: set[conn_id];
# Used to pass the optional connection into report_weird(). # Used to pass the optional connection into report_weird().
global current_conn: connection; global current_conn: connection;
event bro_init() event bro_init() &priority=5
{ {
Log::create_stream(WEIRD, [$columns=Info, $ev=log_weird]); Log::create_stream(Weird::LOG, [$columns=Info, $ev=log_weird]);
} }
function report_weird(t: time, name: string, id: string, have_conn: bool, function report_weird(t: time, name: string, id: string, have_conn: bool,
@ -311,7 +311,7 @@ function report_weird(t: time, name: string, id: string, have_conn: bool,
add weird_ignore[id][name]; add weird_ignore[id][name];
} }
Log::write(WEIRD, info); Log::write(Weird::LOG, info);
} }
function report_weird_conn(t: time, name: string, id: string, addl: string, function report_weird_conn(t: time, name: string, id: string, addl: string,

View file

@ -9,7 +9,7 @@
module PacketFilter; module PacketFilter;
export { export {
redef enum Log::ID += { PACKET_FILTER }; redef enum Log::ID += { LOG };
redef enum Notice::Type += { redef enum Notice::Type += {
## This notice is generated if a packet filter is unable to be compiled. ## This notice is generated if a packet filter is unable to be compiled.
@ -121,7 +121,7 @@ function install()
NOTICE([$note=Compile_Failure, NOTICE([$note=Compile_Failure,
$msg=fmt("Compiling packet filter failed"), $msg=fmt("Compiling packet filter failed"),
$sub=default_filter]); $sub=default_filter]);
exit(); Reporter::fatal(fmt("Bad pcap filter '%s'", default_filter));
} }
# Do an audit log for the packet filter. # Do an audit log for the packet filter.
@ -144,11 +144,11 @@ function install()
$sub=default_filter]); $sub=default_filter]);
} }
Log::write(PACKET_FILTER, info); Log::write(PacketFilter::LOG, info);
} }
event bro_init() &priority=10 event bro_init() &priority=10
{ {
Log::create_stream(PACKET_FILTER, [$columns=Info]); Log::create_stream(PacketFilter::LOG, [$columns=Info]);
PacketFilter::install(); PacketFilter::install();
} }

View file

@ -5,9 +5,13 @@
module Reporter; module Reporter;
export { export {
redef enum Log::ID += { REPORTER }; redef enum Log::ID += { LOG };
type Level: enum { INFO, WARNING, ERROR }; type Level: enum {
INFO,
WARNING,
ERROR
};
type Info: record { type Info: record {
ts: time &log; ts: time &log;
@ -19,22 +23,22 @@ export {
}; };
} }
event bro_init() event bro_init() &priority=5
{ {
Log::create_stream(REPORTER, [$columns=Info]); Log::create_stream(Reporter::LOG, [$columns=Info]);
} }
event reporter_info(t: time, msg: string, location: string) event reporter_info(t: time, msg: string, location: string)
{ {
Log::write(REPORTER, [$ts=t, $level=INFO, $message=msg, $location=location]); Log::write(Reporter::LOG, [$ts=t, $level=INFO, $message=msg, $location=location]);
} }
event reporter_warning(t: time, msg: string, location: string) event reporter_warning(t: time, msg: string, location: string)
{ {
Log::write(REPORTER, [$ts=t, $level=WARNING, $message=msg, $location=location]); Log::write(Reporter::LOG, [$ts=t, $level=WARNING, $message=msg, $location=location]);
} }
event reporter_error(t: time, msg: string, location: string) event reporter_error(t: time, msg: string, location: string)
{ {
Log::write(REPORTER, [$ts=t, $level=ERROR, $message=msg, $location=location]); Log::write(Reporter::LOG, [$ts=t, $level=ERROR, $message=msg, $location=location]);
} }

View file

@ -25,7 +25,7 @@ export {
Signature_Summary, Signature_Summary,
}; };
redef enum Log::ID += { SIGNATURES }; redef enum Log::ID += { LOG };
## These are the default actions you can apply to signature matches. ## These are the default actions you can apply to signature matches.
## All of them write the signature record to the logging stream unless ## All of them write the signature record to the logging stream unless
@ -114,7 +114,7 @@ global did_sig_log: set[string] &read_expire = 1 hr;
event bro_init() event bro_init()
{ {
Log::create_stream(SIGNATURES, [$columns=Info, $ev=log_signature]); Log::create_stream(Signatures::LOG, [$columns=Info, $ev=log_signature]);
} }
# Returns true if the given signature has already been triggered for the given # Returns true if the given signature has already been triggered for the given
@ -174,7 +174,7 @@ event signature_match(state: signature_state, msg: string, data: string)
$event_msg=fmt("%s: %s", src_addr, msg), $event_msg=fmt("%s: %s", src_addr, msg),
$sig_id=sig_id, $sig_id=sig_id,
$sub_msg=data]; $sub_msg=data];
Log::write(SIGNATURES, info); Log::write(Signatures::LOG, info);
} }
local notice = F; local notice = F;
@ -248,7 +248,7 @@ event signature_match(state: signature_state, msg: string, data: string)
fmt("%s has triggered signature %s on %d hosts", fmt("%s has triggered signature %s on %d hosts",
orig, sig_id, hcount); orig, sig_id, hcount);
Log::write(SIGNATURES, Log::write(Signatures::LOG,
[$note=Multiple_Sig_Responders, [$note=Multiple_Sig_Responders,
$src_addr=orig, $sig_id=sig_id, $event_msg=msg, $src_addr=orig, $sig_id=sig_id, $event_msg=msg,
$host_count=hcount, $sub_msg=horz_scan_msg]); $host_count=hcount, $sub_msg=horz_scan_msg]);
@ -265,7 +265,7 @@ event signature_match(state: signature_state, msg: string, data: string)
fmt("%s has triggered %d different signatures on host %s", fmt("%s has triggered %d different signatures on host %s",
orig, vcount, resp); orig, vcount, resp);
Log::write(SIGNATURES, Log::write(Signatures::LOG,
[$ts=network_time(), [$ts=network_time(),
$note=Multiple_Signatures, $note=Multiple_Signatures,
$src_addr=orig, $src_addr=orig,

View file

@ -11,7 +11,7 @@ module Software;
export { export {
redef enum Log::ID += { SOFTWARE }; redef enum Log::ID += { LOG };
type Type: enum { type Type: enum {
UNKNOWN, UNKNOWN,
@ -103,7 +103,7 @@ export {
event bro_init() event bro_init()
{ {
Log::create_stream(SOFTWARE, [$columns=Info, $ev=log_software]); Log::create_stream(Software::LOG, [$columns=Info, $ev=log_software]);
} }
function parse_mozilla(unparsed_version: string, function parse_mozilla(unparsed_version: string,
@ -379,7 +379,7 @@ event software_register(id: conn_id, info: Info)
return; return;
} }
Log::write(SOFTWARE, info); Log::write(Software::LOG, info);
ts[info$name] = info; ts[info$name] = info;
} }

View file

@ -3,7 +3,7 @@
module Conn; module Conn;
export { export {
redef enum Log::ID += { CONN }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
## This is the time of the first packet. ## This is the time of the first packet.
@ -95,7 +95,7 @@ redef record connection += {
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(CONN, [$columns=Info, $ev=log_conn]); Log::create_stream(Conn::LOG, [$columns=Info, $ev=log_conn]);
} }
function conn_state(c: connection, trans: transport_proto): string function conn_state(c: connection, trans: transport_proto): string
@ -216,6 +216,6 @@ event connection_state_remove(c: connection) &priority=5
event connection_state_remove(c: connection) &priority=-5 event connection_state_remove(c: connection) &priority=-5
{ {
Log::write(CONN, c$conn); Log::write(Conn::LOG, c$conn);
} }

View file

@ -3,7 +3,7 @@
module DNS; module DNS;
export { export {
redef enum Log::ID += { DNS }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
ts: time &log; ts: time &log;
@ -77,7 +77,7 @@ redef dpd_config += { [ANALYZER_DNS_TCP_BINPAC] = [$ports = dns_tcp_ports] };
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(DNS, [$columns=Info, $ev=log_dns]); Log::create_stream(DNS::LOG, [$columns=Info, $ev=log_dns]);
} }
function new_session(c: connection, trans_id: count): Info function new_session(c: connection, trans_id: count): Info
@ -163,7 +163,7 @@ event do_reply(c: connection, msg: dns_msg, ans: dns_answer, reply: string) &pri
{ {
if ( c$dns$ready ) if ( c$dns$ready )
{ {
Log::write(DNS, c$dns); Log::write(DNS::LOG, c$dns);
add c$dns_state$finished_answers[c$dns$trans_id]; add c$dns_state$finished_answers[c$dns$trans_id];
# This record is logged and no longer pending. # This record is logged and no longer pending.
delete c$dns_state$pending[c$dns$trans_id]; delete c$dns_state$pending[c$dns$trans_id];
@ -275,6 +275,6 @@ event connection_state_remove(c: connection) &priority=-5
# If Bro is expiring state, we should go ahead and log all unlogged # If Bro is expiring state, we should go ahead and log all unlogged
# request/response pairs now. # request/response pairs now.
for ( trans_id in c$dns_state$pending ) for ( trans_id in c$dns_state$pending )
Log::write(DNS, c$dns_state$pending[trans_id]); Log::write(DNS::LOG, c$dns_state$pending[trans_id]);
} }

View file

@ -14,7 +14,7 @@
module FTP; module FTP;
export { export {
redef enum Log::ID += { FTP }; redef enum Log::ID += { LOG };
## This setting changes if passwords used in FTP sessions are captured or not. ## This setting changes if passwords used in FTP sessions are captured or not.
const default_capture_password = F &redef; const default_capture_password = F &redef;
@ -95,7 +95,7 @@ global ftp_data_expected: table[addr, port] of ExpectedConn &create_expire=5mins
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(FTP, [$columns=Info, $ev=log_ftp]); Log::create_stream(FTP::LOG, [$columns=Info, $ev=log_ftp]);
} }
## A set of commands where the argument can be expected to refer ## A set of commands where the argument can be expected to refer
@ -165,7 +165,7 @@ function ftp_message(s: Info)
else else
s$arg=arg; s$arg=arg;
Log::write(FTP, s); Log::write(FTP::LOG, s);
} }
# The MIME and file_size fields are specific to file transfer commands # The MIME and file_size fields are specific to file transfer commands

View file

@ -4,7 +4,7 @@
module HTTP; module HTTP;
export { export {
redef enum Log::ID += { HTTP }; redef enum Log::ID += { LOG };
## Indicate a type of attack or compromise in the record to be logged. ## Indicate a type of attack or compromise in the record to be logged.
type Tags: enum { type Tags: enum {
@ -86,7 +86,7 @@ redef record connection += {
# Initialize the HTTP logging stream. # Initialize the HTTP logging stream.
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(HTTP, [$columns=Info, $ev=log_http]); Log::create_stream(HTTP::LOG, [$columns=Info, $ev=log_http]);
} }
# DPD configuration. # DPD configuration.
@ -230,7 +230,7 @@ event http_message_done(c: connection, is_orig: bool, stat: http_message_stat) &
# The reply body is done so we're ready to log. # The reply body is done so we're ready to log.
if ( ! is_orig ) if ( ! is_orig )
{ {
Log::write(HTTP, c$http); Log::write(HTTP::LOG, c$http);
delete c$http_state$pending[c$http_state$current_response]; delete c$http_state$pending[c$http_state$current_response];
} }
} }
@ -242,7 +242,7 @@ event connection_state_remove(c: connection)
{ {
for ( r in c$http_state$pending ) for ( r in c$http_state$pending )
{ {
Log::write(HTTP, c$http_state$pending[r]); Log::write(HTTP::LOG, c$http_state$pending[r]);
} }
} }
} }

View file

@ -73,7 +73,7 @@ event file_transferred(c: connection, prefix: string, descr: string,
local tmp = irc$command; local tmp = irc$command;
irc$command = "DCC"; irc$command = "DCC";
Log::write(IRC, irc); Log::write(IRC::LOG, irc);
irc$command = tmp; irc$command = tmp;
if ( irc$extract_file && irc?$extraction_file ) if ( irc$extract_file && irc?$extraction_file )

View file

@ -7,7 +7,7 @@
module IRC; module IRC;
export { export {
redef enum Log::ID += { IRC }; redef enum Log::ID += { LOG };
type Tag: enum { type Tag: enum {
EMPTY EMPTY
@ -44,7 +44,7 @@ redef dpd_config += { [ANALYZER_IRC] = [$ports = irc_ports] };
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(IRC, [$columns=Info, $ev=irc_log]); Log::create_stream(IRC::LOG, [$columns=Info, $ev=irc_log]);
} }
function new_session(c: connection): Info function new_session(c: connection): Info
@ -78,7 +78,7 @@ event irc_nick_message(c: connection, is_orig: bool, who: string, newnick: strin
{ {
if ( is_orig ) if ( is_orig )
{ {
Log::write(IRC, c$irc); Log::write(IRC::LOG, c$irc);
c$irc$nick = newnick; c$irc$nick = newnick;
} }
} }
@ -98,7 +98,7 @@ event irc_user_message(c: connection, is_orig: bool, user: string, host: string,
{ {
if ( is_orig ) if ( is_orig )
{ {
Log::write(IRC, c$irc); Log::write(IRC::LOG, c$irc);
c$irc$user = user; c$irc$user = user;
} }
} }
@ -118,7 +118,7 @@ event irc_join_message(c: connection, is_orig: bool, info_list: irc_join_list) &
{ {
c$irc$value = l$channel; c$irc$value = l$channel;
c$irc$addl = (l$password != "" ? fmt(" with channel key: '%s'", l$password) : ""); c$irc$addl = (l$password != "" ? fmt(" with channel key: '%s'", l$password) : "");
Log::write(IRC, c$irc); Log::write(IRC::LOG, c$irc);
} }
} }
} }

View file

@ -1,4 +0,0 @@
@load ./main
@load ./file-ident
@load ./file-extract
@load ./file-hash

View file

@ -1,62 +0,0 @@
@load ./file-ident
@load base/utils/files
module MIME;
export {
## Pattern of file mime types to extract from MIME bodies.
const extract_file_types = /NO_DEFAULT/ &redef;
## The on-disk prefix for files to be extracted from MIME entity bodies.
const extraction_prefix = "mime-item" &redef;
redef record Info += {
## Optionally write the file to disk. Must be set prior to first
## data chunk being seen in an event.
extract_file: bool &default=F;
## Store the file handle here for the file currently being extracted.
extraction_file: file &log &optional;
};
redef record State += {
## Store a count of the number of files that have been transferred in
## this conversation to create unique file names on disk.
num_extracted_files: count &default=0;
};
}
event mime_segment_data(c: connection, length: count, data: string) &priority=5
{
if ( extract_file_types in c$mime$mime_type )
c$mime$extract_file = T;
}
event mime_segment_data(c: connection, length: count, data: string) &priority=3
{
if ( c$mime$extract_file && c$mime$content_len == 0 )
{
local suffix = fmt("%d.dat", ++c$mime_state$num_extracted_files);
local fname = generate_extraction_filename(extraction_prefix, c, suffix);
c$mime$extraction_file = open(fname);
enable_raw_output(c$mime$extraction_file);
}
}
event mime_segment_data(c: connection, length: count, data: string) &priority=-5
{
if ( c$mime$extract_file && c$mime?$extraction_file )
print c$mime$extraction_file, data;
}
event mime_end_entity(c: connection) &priority=-3
{
# TODO: this check is only due to a bug in mime_end_entity that
# causes the event to be generated twice for the same real event.
if ( ! c?$mime )
return;
if ( c$mime?$extraction_file )
close(c$mime$extraction_file);
}

View file

@ -1,79 +0,0 @@
@load ./file-ident
@load base/frameworks/notice
module MIME;
export {
redef enum Notice::Type += {
## Indicates that an MD5 sum was calculated for a MIME message.
MD5,
};
redef record Info += {
## The calculated MD5 sum for the MIME entity.
md5: string &log &optional;
## Optionally calculate the file's MD5 sum. Must be set prior to the
## first data chunk being see in an event.
calc_md5: bool &default=F;
## This boolean value indicates if an MD5 sum is being calculated
## for the current file transfer.
calculating_md5: bool &default=F;
};
## Generate MD5 sums for these filetypes.
const generate_md5 = /application\/x-dosexec/ # Windows and DOS executables
| /application\/x-executable/ # *NIX executable binary
&redef;
}
event mime_segment_data(c: connection, length: count, data: string) &priority=-5
{
if ( ! c?$mime ) return;
if ( c$mime$content_len == 0 )
{
if ( generate_md5 in c$mime$mime_type )
c$mime$calc_md5 = T;
if ( c$mime$calc_md5 )
{
c$mime$calculating_md5 = T;
md5_hash_init(c$id);
}
}
if ( c$mime$calculating_md5 )
md5_hash_update(c$id, data);
}
## In the event of a content gap during the MIME transfer, detect the state for
## the MD5 sum calculation and stop calculating the MD5 since it would be
## incorrect anyway.
event content_gap(c: connection, is_orig: bool, seq: count, length: count) &priority=5
{
if ( is_orig || ! c?$mime ) return;
if ( c$mime$calculating_md5 )
{
c$mime$calculating_md5 = F;
md5_hash_finish(c$id);
}
}
event mime_end_entity(c: connection) &priority=-3
{
# TODO: this check is only due to a bug in mime_end_entity that
# causes the event to be generated twice for the same real event.
if ( ! c?$mime )
return;
if ( c$mime$calculating_md5 )
{
c$mime$md5 = md5_hash_finish(c$id);
NOTICE([$note=MD5, $msg=fmt("Calculated a hash for a MIME entity from %s", c$id$orig_h),
$sub=c$mime$md5, $conn=c]);
}
}

View file

@ -1,16 +0,0 @@
@load ./main
module MIME;
export {
redef record Info += {
## Sniffed MIME type for the transfer.
mime_type: string &log &optional;
};
}
event mime_segment_data(c: connection, length: count, data: string) &priority=7
{
if ( c$mime$content_len == 0 )
c$mime$mime_type = split1(identify_data(data, T), /;/)[1];
}

View file

@ -1,101 +0,0 @@
##! The mime script does analysis of MIME encoded messages seen in certain
##! protocols (only SMTP and POP3 at the moment).
@load base/utils/strings
module MIME;
export {
redef enum Log::ID += { MIME };
# Let's assume for now that nothing transferring files using
# MIME attachments is multiplexing for simplicity's sake.
# We can make the assumption that one connection == one file (at a time)
type Info: record {
## This is the timestamp of when the MIME content transfer began.
ts: time &log;
uid: string &log;
id: conn_id &log;
## The application layer protocol over which the transfer was seen.
app_protocol: string &log &optional;
## The filename seen in the Content-Disposition header.
filename: string &log &optional;
## Track how many byte of the MIME encoded file have been seen.
content_len: count &log &default=0;
};
type State: record {
## Track the number of MIME encoded files transferred during this session.
level: count &default=0;
};
global log_mime: event(rec: Info);
}
redef record connection += {
mime: Info &optional;
mime_state: State &optional;
};
event bro_init()
{
Log::create_stream(MIME, [$columns=Info, $ev=log_mime]);
}
function new_mime_session(c: connection): Info
{
local info: Info;
info$ts=network_time();
info$uid=c$uid;
info$id=c$id;
return info;
}
function set_session(c: connection, new_entity: bool)
{
if ( ! c?$mime_state )
c$mime_state = [];
if ( ! c?$mime || new_entity )
c$mime = new_mime_session(c);
}
event mime_begin_entity(c: connection) &priority=10
{
set_session(c, T);
++c$mime_state$level;
if ( |c$service| > 0 )
c$mime$app_protocol = join_string_set(c$service, ",");
}
# This has priority -10 because other handlers need to know the current
# content_len before it's updated by this handler.
event mime_segment_data(c: connection, length: count, data: string) &priority=-10
{
c$mime$content_len = c$mime$content_len + length;
}
event mime_one_header(c: connection, h: mime_header_rec)
{
if ( h$name == "CONTENT-DISPOSITION" &&
/[fF][iI][lL][eE][nN][aA][mM][eE]/ in h$value )
c$mime$filename = sub(h$value, /^.*[fF][iI][lL][eE][nN][aA][mM][eE]=/, "");
}
event mime_end_entity(c: connection) &priority=-5
{
# This check and the delete below are just to cope with a bug where
# mime_end_entity can be generated multiple times for the same event.
if ( ! c?$mime )
return;
# Don't log anything if there wasn't any content.
if ( c$mime$content_len > 0 )
Log::write(MIME, c$mime);
delete c$mime;
}

View file

@ -12,7 +12,7 @@ export {
MD5, MD5,
}; };
redef enum Log::ID += { SMTP_ENTITIES }; redef enum Log::ID += { ENTITIES_LOG };
type EntityInfo: record { type EntityInfo: record {
## This is the timestamp of when the MIME content transfer began. ## This is the timestamp of when the MIME content transfer began.
@ -74,7 +74,7 @@ export {
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(SMTP_ENTITIES, [$columns=EntityInfo, $ev=log_mime]); Log::create_stream(SMTP::ENTITIES_LOG, [$columns=EntityInfo, $ev=log_mime]);
} }
function set_session(c: connection, new_entity: bool) function set_session(c: connection, new_entity: bool)
@ -185,7 +185,7 @@ event mime_end_entity(c: connection) &priority=-5
# Only log is there was some content. # Only log is there was some content.
if ( c$smtp$current_entity$content_len > 0 ) if ( c$smtp$current_entity$content_len > 0 )
Log::write(SMTP_ENTITIES, c$smtp$current_entity); Log::write(SMTP::ENTITIES_LOG, c$smtp$current_entity);
delete c$smtp$current_entity; delete c$smtp$current_entity;
} }

View file

@ -5,7 +5,7 @@
module SMTP; module SMTP;
export { export {
redef enum Log::ID += { SMTP }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
ts: time &log; ts: time &log;
@ -73,7 +73,7 @@ redef dpd_config += { [ANALYZER_SMTP] = [$ports = ports] };
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(SMTP, [$columns=SMTP::Info, $ev=log_smtp]); Log::create_stream(SMTP::LOG, [$columns=SMTP::Info, $ev=log_smtp]);
} }
function find_address_in_smtp_header(header: string): string function find_address_in_smtp_header(header: string): string
@ -119,7 +119,7 @@ function set_smtp_session(c: connection)
function smtp_message(c: connection) function smtp_message(c: connection)
{ {
if ( c$smtp$has_client_activity ) if ( c$smtp$has_client_activity )
Log::write(SMTP, c$smtp); Log::write(SMTP::LOG, c$smtp);
} }
event smtp_request(c: connection, is_orig: bool, command: string, arg: string) &priority=5 event smtp_request(c: connection, is_orig: bool, command: string, arg: string) &priority=5

View file

@ -14,7 +14,7 @@
module SSH; module SSH;
export { export {
redef enum Log::ID += { SSH }; redef enum Log::ID += { LOG };
redef enum Notice::Type += { redef enum Notice::Type += {
## This indicates that a heuristically detected "successful" SSH ## This indicates that a heuristically detected "successful" SSH
@ -79,7 +79,7 @@ redef record connection += {
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(SSH, [$columns=Info, $ev=log_ssh]); Log::create_stream(SSH::LOG, [$columns=Info, $ev=log_ssh]);
} }
function set_session(c: connection) function set_session(c: connection)
@ -149,11 +149,11 @@ event SSH::heuristic_successful_login(c: connection) &priority=-5
$msg="Heuristically detected successful SSH login.", $msg="Heuristically detected successful SSH login.",
$conn=c]); $conn=c]);
Log::write(SSH, c$ssh); Log::write(SSH::LOG, c$ssh);
} }
event SSH::heuristic_failed_login(c: connection) &priority=-5 event SSH::heuristic_failed_login(c: connection) &priority=-5
{ {
Log::write(SSH, c$ssh); Log::write(SSH::LOG, c$ssh);
} }
event connection_state_remove(c: connection) &priority=-5 event connection_state_remove(c: connection) &priority=-5

View file

@ -4,7 +4,7 @@
module SSL; module SSL;
export { export {
redef enum Log::ID += { SSL }; redef enum Log::ID += { LOG };
redef enum Notice::Type += { redef enum Notice::Type += {
Self_Signed_Cert Self_Signed_Cert
@ -43,7 +43,7 @@ redef record connection += {
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(SSL, [$columns=Info, $ev=log_ssl]); Log::create_stream(SSL::LOG, [$columns=Info, $ev=log_ssl]);
} }
redef capture_filters += { redef capture_filters += {
@ -117,6 +117,6 @@ event ssl_established(c: connection) &priority=-5
{ {
set_session(c); set_session(c);
Log::write(SSL, c$ssl); Log::write(SSL::LOG, c$ssl);
} }

View file

@ -5,7 +5,7 @@
module Syslog; module Syslog;
export { export {
redef enum Log::ID += { SYSLOG }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
ts: time &log; ts: time &log;
@ -29,7 +29,7 @@ redef record connection += {
event bro_init() &priority=5 event bro_init() &priority=5
{ {
Log::create_stream(SYSLOG, [$columns=Info]); Log::create_stream(Syslog::LOG, [$columns=Info]);
} }
event syslog_message(c: connection, facility: count, severity: count, msg: string) &priority=5 event syslog_message(c: connection, facility: count, severity: count, msg: string) &priority=5
@ -48,5 +48,5 @@ event syslog_message(c: connection, facility: count, severity: count, msg: strin
event syslog_message(c: connection, facility: count, severity: count, msg: string) &priority=-5 event syslog_message(c: connection, facility: count, severity: count, msg: string) &priority=-5
{ {
Log::write(SYSLOG, c$syslog); Log::write(Syslog::LOG, c$syslog);
} }

View file

@ -7,7 +7,7 @@
module Barnyard2; module Barnyard2;
export { export {
redef enum Log::ID += { BARNYARD2 }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
ts: time &log; ts: time &log;
@ -21,9 +21,9 @@ export {
global pid2cid: function(p: PacketID): conn_id; global pid2cid: function(p: PacketID): conn_id;
} }
event bro_init() event bro_init() &priority=5
{ {
Log::create_stream(BARNYARD2, [$columns=Info]); Log::create_stream(Barnyard2::LOG, [$columns=Info]);
} }
@ -34,7 +34,7 @@ function pid2cid(p: PacketID): conn_id
event barnyard_alert(id: PacketID, alert: AlertData, msg: string, data: string) event barnyard_alert(id: PacketID, alert: AlertData, msg: string, data: string)
{ {
Log::write(BARNYARD2, [$ts=network_time(), $pid=id, $alert=alert]); Log::write(Barnyard2::LOG, [$ts=network_time(), $pid=id, $alert=alert]);
#local proto_connection_string: string; #local proto_connection_string: string;
#if ( id$src_p == 0/tcp ) #if ( id$src_p == 0/tcp )

View file

@ -1,7 +1,7 @@
module LoadedScripts; module LoadedScripts;
export { export {
redef enum Log::ID += { LOADED_SCRIPTS }; redef enum Log::ID += { LOG };
type Info: record { type Info: record {
depth: count &log; depth: count &log;
@ -11,10 +11,10 @@ export {
event bro_init() event bro_init()
{ {
Log::create_stream(LOADED_SCRIPTS, [$columns=Info]); Log::create_stream(LoadedScripts::LOG, [$columns=Info]);
} }
event bro_script_loaded(path: string, level: count) event bro_script_loaded(path: string, level: count)
{ {
Log::write(LOADED_SCRIPTS, [$depth=level, $name=path]); Log::write(LoadedScripts::LOG, [$depth=level, $name=path]);
} }

View file

@ -0,0 +1,22 @@
##! This script adds the necessary environment variables for Bro to make use
##! of PF_RING's clustering (and load balancing) support through the libpcap
##! wrapper.
module PFRing;
export {
## Define the pf_ring cluster ID that you would like this instance
## of Bro to use. Please set a value from 0 to 255
const cluster_id = 150 &redef;
}
event bro_init() &priority=10
{
if ( cluster_id > 255 || cluster_id < 0 )
Reporter::fatal(fmt("%d is an invalid value for PFRing::cluster_id", cluster_id));
if ( ! setenv("PCAP_PF_RING_USE_CLUSTER_PER_FLOW", "1") ||
! setenv("PCAP_PF_RING_CLUSTER_ID", fmt("%d", cluster_id)) )
Reporter::fatal("Unable to set one or both of the PF_RING environment variables.");
}

View file

@ -5,10 +5,10 @@
@load base/utils/directions-and-hosts @load base/utils/directions-and-hosts
module KnownHosts; module Known;
export { export {
redef enum Log::ID += { KNOWN_HOSTS }; redef enum Log::ID += { HOSTS_LOG };
type Info: record { type Info: record {
## The timestamp at which the host was detected. ## The timestamp at which the host was detected.
@ -20,7 +20,7 @@ export {
## The hosts whose existence should be logged and tracked. ## The hosts whose existence should be logged and tracked.
## Choices are: LOCAL_HOSTS, REMOTE_HOSTS, ALL_HOSTS, NO_HOSTS ## Choices are: LOCAL_HOSTS, REMOTE_HOSTS, ALL_HOSTS, NO_HOSTS
const asset_tracking = LOCAL_HOSTS &redef; const host_tracking = LOCAL_HOSTS &redef;
## The set of all known addresses to store for preventing duplicate ## The set of all known addresses to store for preventing duplicate
## logging of addresses. It can also be used from other scripts to ## logging of addresses. It can also be used from other scripts to
@ -34,7 +34,7 @@ export {
event bro_init() event bro_init()
{ {
Log::create_stream(KNOWN_HOSTS, [$columns=Info, $ev=log_known_hosts]); Log::create_stream(Known::HOSTS_LOG, [$columns=Info, $ev=log_known_hosts]);
} }
event connection_established(c: connection) &priority=5 event connection_established(c: connection) &priority=5
@ -43,10 +43,10 @@ event connection_established(c: connection) &priority=5
for ( host in set(id$orig_h, id$resp_h) ) for ( host in set(id$orig_h, id$resp_h) )
{ {
if ( host !in known_hosts && addr_matches_host(host, asset_tracking) ) if ( host !in known_hosts && addr_matches_host(host, host_tracking) )
{ {
add known_hosts[host]; add known_hosts[host];
Log::write(KNOWN_HOSTS, [$ts=network_time(), $host=host]); Log::write(Known::HOSTS_LOG, [$ts=network_time(), $host=host]);
} }
} }
} }

View file

@ -5,9 +5,9 @@
@load base/utils/directions-and-hosts @load base/utils/directions-and-hosts
module KnownServices; module Known;
redef enum Log::ID += { KNOWN_SERVICES }; redef enum Log::ID += { SERVICES_LOG };
export { export {
type Info: record { type Info: record {
@ -21,7 +21,7 @@ export {
}; };
## The hosts whose services should be tracked and logged. ## The hosts whose services should be tracked and logged.
const asset_tracking = LOCAL_HOSTS &redef; const service_tracking = LOCAL_HOSTS &redef;
global known_services: set[addr, port] &create_expire=1day &synchronized; global known_services: set[addr, port] &create_expire=1day &synchronized;
@ -35,7 +35,7 @@ redef record connection += {
event bro_init() event bro_init()
{ {
Log::create_stream(KNOWN_SERVICES, [$columns=Info, Log::create_stream(Known::SERVICES_LOG, [$columns=Info,
$ev=log_known_services]); $ev=log_known_services]);
} }
@ -44,7 +44,7 @@ function known_services_done(c: connection)
local id = c$id; local id = c$id;
if ( ! c$known_services_done && if ( ! c$known_services_done &&
get_port_transport_proto(id$resp_p) == tcp && get_port_transport_proto(id$resp_p) == tcp &&
addr_matches_host(id$resp_h, asset_tracking) && addr_matches_host(id$resp_h, service_tracking) &&
[id$resp_h, id$resp_p] !in known_services && [id$resp_h, id$resp_p] !in known_services &&
"ftp-data" !in c$service ) # don't include ftp data sessions "ftp-data" !in c$service ) # don't include ftp data sessions
{ {
@ -56,7 +56,7 @@ function known_services_done(c: connection)
i$service=c$service; i$service=c$service;
add known_services[id$resp_h, id$resp_p]; add known_services[id$resp_h, id$resp_p];
Log::write(KNOWN_SERVICES, i); Log::write(Known::SERVICES_LOG, i);
c$known_services_done = T; c$known_services_done = T;
} }
} }

View file

@ -25,11 +25,13 @@ event log_smtp(rec: Info)
ip = rec$x_originating_ip; ip = rec$x_originating_ip;
loc = lookup_location(ip); loc = lookup_location(ip);
if ( loc$country_code in suspicious_origination_countries || if ( (loc?$country_code &&
loc$country_code in suspicious_origination_countries) ||
ip in suspicious_origination_networks ) ip in suspicious_origination_networks )
{ {
NOTICE([$note=Suspicious_Origination, NOTICE([$note=Suspicious_Origination,
$msg=fmt("An email originated from %s (%s).", loc$country_code, ip), $msg=fmt("An email originated from %s (%s).",
loc?$country_code ? loc$country_code : "", ip),
$id=rec$id]); $id=rec$id]);
} }
} }
@ -38,11 +40,12 @@ event log_smtp(rec: Info)
ip = rec$path[|rec$path|-1]; ip = rec$path[|rec$path|-1];
loc = lookup_location(ip); loc = lookup_location(ip);
if ( loc$country_code in suspicious_origination_countries || if ( (loc?$country_code &&
loc$country_code in suspicious_origination_countries) ||
ip in suspicious_origination_networks ) ip in suspicious_origination_networks )
{ {
NOTICE([$note=Suspicious_Origination, NOTICE([$note=Suspicious_Origination,
$msg=fmt("Based up Received headers, email originated from %s (%s).", loc$country_code, ip), $msg=fmt("Based up Received headers, email originated from %s (%s).", loc?$country_code ? loc$country_code : "", ip),
$id=rec$id]); $id=rec$id]);
} }
} }

View file

@ -33,7 +33,7 @@ event SSH::heuristic_successful_login(c: connection) &priority=5
# Add the location data to the SSH record. # Add the location data to the SSH record.
c$ssh$remote_location = location; c$ssh$remote_location = location;
if ( location$country_code in watched_countries ) if ( location?$country_code && location$country_code in watched_countries )
{ {
NOTICE([$note=Login_From_Watched_Country, NOTICE([$note=Login_From_Watched_Country,
$conn=c, $conn=c,

View file

@ -1,9 +1,9 @@
@load base/utils/directions-and-hosts @load base/utils/directions-and-hosts
module KnownCerts; module Known;
export { export {
redef enum Log::ID += { KNOWN_CERTS }; redef enum Log::ID += { CERTS_LOG };
type Info: record { type Info: record {
## The timestamp when the certificate was detected. ## The timestamp when the certificate was detected.
@ -23,7 +23,7 @@ export {
## The certificates whose existence should be logged and tracked. ## The certificates whose existence should be logged and tracked.
## Choices are: LOCAL_HOSTS, REMOTE_HOSTS, ALL_HOSTS, NO_HOSTS ## Choices are: LOCAL_HOSTS, REMOTE_HOSTS, ALL_HOSTS, NO_HOSTS
const asset_tracking = LOCAL_HOSTS &redef; const cert_tracking = LOCAL_HOSTS &redef;
## The set of all known certificates to store for preventing duplicate ## The set of all known certificates to store for preventing duplicate
## logging. It can also be used from other scripts to ## logging. It can also be used from other scripts to
@ -36,7 +36,7 @@ export {
event bro_init() event bro_init()
{ {
Log::create_stream(KNOWN_CERTS, [$columns=Info, $ev=log_known_certs]); Log::create_stream(Known::CERTS_LOG, [$columns=Info, $ev=log_known_certs]);
} }
event x509_certificate(c: connection, cert: X509, is_server: bool, chain_idx: count, chain_len: count, der_cert: string) event x509_certificate(c: connection, cert: X509, is_server: bool, chain_idx: count, chain_len: count, der_cert: string)
@ -47,10 +47,10 @@ event x509_certificate(c: connection, cert: X509, is_server: bool, chain_idx: co
if ( chain_idx != 0 ) return; if ( chain_idx != 0 ) return;
local host = c$id$resp_h; local host = c$id$resp_h;
if ( [host, cert$serial] !in known_certs && addr_matches_host(host, asset_tracking) ) if ( [host, cert$serial] !in known_certs && addr_matches_host(host, cert_tracking) )
{ {
add known_certs[host, cert$serial]; add known_certs[host, cert$serial];
Log::write(KNOWN_CERTS, [$ts=network_time(), $host=host, Log::write(Known::CERTS_LOG, [$ts=network_time(), $host=host,
$port_num=c$id$resp_p, $subject=cert$subject, $port_num=c$id$resp_p, $subject=cert$subject,
$issuer_subject=cert$issuer, $issuer_subject=cert$issuer,
$serial=cert$serial]); $serial=cert$serial]);

View file

@ -4,6 +4,6 @@
@load protocols/ssl/known-certs @load protocols/ssl/known-certs
redef Software::asset_tracking = ALL_HOSTS; redef Software::asset_tracking = ALL_HOSTS;
redef KnownHosts::asset_tracking = ALL_HOSTS; redef Known::host_tracking = ALL_HOSTS;
redef KnownServices::asset_tracking = ALL_HOSTS; redef Known::service_tracking = ALL_HOSTS;
redef KnownCerts::asset_tracking = ALL_HOSTS; redef Known::cert_tracking = ALL_HOSTS;

View file

@ -107,10 +107,10 @@ macro(BIF_TARGET bifInput)
add_custom_command(OUTPUT ${bifOutputs} add_custom_command(OUTPUT ${bifOutputs}
COMMAND bifcl COMMAND bifcl
ARGS ${CMAKE_CURRENT_SOURCE_DIR}/${bifInput} || (rm -f ${bifOutputs} && exit 1) ARGS ${CMAKE_CURRENT_SOURCE_DIR}/${bifInput} || (rm -f ${bifOutputs} && exit 1)
# in order be able to run bro from the build directory, # In order be able to run bro from the build directory,
# the generated bro script needs to be inside a # the generated bro script needs to be inside a
# a directory tree named the same way it will be # a directory tree named the same way it will be
# referenced from an @load # referenced from an @load.
COMMAND "${CMAKE_COMMAND}" COMMAND "${CMAKE_COMMAND}"
ARGS -E copy ${bifInput}.bro base/${bifInput}.bro ARGS -E copy ${bifInput}.bro base/${bifInput}.bro
COMMAND "${CMAKE_COMMAND}" COMMAND "${CMAKE_COMMAND}"

View file

@ -3971,6 +3971,11 @@ RecordCoerceExpr::RecordCoerceExpr(Expr* op, RecordType* r)
if ( ! same_type(sup_t_i, sub_t_i) ) if ( ! same_type(sup_t_i, sub_t_i) )
{ {
if ( sup_t_i->Tag() != TYPE_RECORD ||
sub_t_i->Tag() != TYPE_RECORD ||
! record_promotion_compatible(sup_t_i->AsRecordType(),
sub_t_i->AsRecordType()) )
{
char buf[512]; char buf[512];
safe_snprintf(buf, sizeof(buf), safe_snprintf(buf, sizeof(buf),
"type clash for field \"%s\"", sub_r->FieldName(i)); "type clash for field \"%s\"", sub_r->FieldName(i));
@ -3978,6 +3983,7 @@ RecordCoerceExpr::RecordCoerceExpr(Expr* op, RecordType* r)
SetError(); SetError();
break; break;
} }
}
map[t_i] = i; map[t_i] = i;
} }
@ -4024,6 +4030,24 @@ Val* RecordCoerceExpr::Fold(Val* v) const
rhs = rhs->Ref(); rhs = rhs->Ref();
assert(rhs || Type()->AsRecordType()->FieldDecl(i)->FindAttr(ATTR_OPTIONAL)); assert(rhs || Type()->AsRecordType()->FieldDecl(i)->FindAttr(ATTR_OPTIONAL));
BroType* rhs_type = rhs->Type();
RecordType* val_type = val->Type()->AsRecordType();
BroType* field_type = val_type->FieldType(i);
if ( rhs_type->Tag() == TYPE_RECORD &&
field_type->Tag() == TYPE_RECORD &&
! same_type(rhs_type, field_type) )
{
Val* new_val = rhs->AsRecordVal()->CoerceTo(
field_type->AsRecordType());
if ( new_val )
{
Unref(rhs);
rhs = new_val;
}
}
val->Assign(i, rhs); val->Assign(i, rhs);
} }
else else

View file

@ -180,7 +180,10 @@ public:
{ return new HTTP_Analyzer(conn); } { return new HTTP_Analyzer(conn); }
static bool Available() static bool Available()
{ return (http_request || http_reply) && !FLAGS_use_binpac; } { return (http_request || http_reply || http_header ||
http_all_headers || http_begin_entity || http_end_entity ||
http_content_type || http_entity_data || http_message_done ||
http_event || http_stats) && !FLAGS_use_binpac; }
int IsConnectionClose() { return connection_close; } int IsConnectionClose() { return connection_close; }

View file

@ -6,6 +6,27 @@
#include "LogWriterAscii.h" #include "LogWriterAscii.h"
#include "NetVar.h" #include "NetVar.h"
/**
* Takes a string, escapes each character into its equivalent hex code (\x##), and
* returns a string containing all escaped values.
*
* @param str string to escape
* @return A std::string containing a list of escaped hex values of the form \x##
*/
static string get_escaped_string(const std::string& str)
{
char tbuf[16];
string esc = "";
for ( size_t i = 0; i < str.length(); ++i )
{
snprintf(tbuf, sizeof(tbuf), "\\x%02x", str[i]);
esc += tbuf;
}
return esc;
}
LogWriterAscii::LogWriterAscii() LogWriterAscii::LogWriterAscii()
{ {
file = 0; file = 0;
@ -52,6 +73,14 @@ LogWriterAscii::~LogWriterAscii()
delete [] header_prefix; delete [] header_prefix;
} }
bool LogWriterAscii::WriteHeaderField(const string& key, const string& val)
{
string str = string(header_prefix, header_prefix_len) +
key + string(separator, separator_len) + val + "\n";
return (fwrite(str.c_str(), str.length(), 1, file) == 1);
}
bool LogWriterAscii::DoInit(string path, int num_fields, bool LogWriterAscii::DoInit(string path, int num_fields,
const LogField* const * fields) const LogField* const * fields)
{ {
@ -70,22 +99,35 @@ bool LogWriterAscii::DoInit(string path, int num_fields,
if ( include_header ) if ( include_header )
{ {
if ( fwrite(header_prefix, header_prefix_len, 1, file) != 1 ) string str = string(header_prefix, header_prefix_len)
+ "separator " // Always use space as separator here.
+ get_escaped_string(string(separator, separator_len))
+ "\n";
if( fwrite(str.c_str(), str.length(), 1, file) != 1 )
goto write_error; goto write_error;
for ( int i = 0; i < num_fields; i++ ) if ( ! WriteHeaderField("path", path) )
goto write_error;
string names;
string types;
for ( int i = 0; i < num_fields; ++i )
{ {
if ( i > 0 && if ( i > 0 )
fwrite(separator, separator_len, 1, file) != 1 ) {
goto write_error; names += string(separator, separator_len);
types += string(separator, separator_len);
const LogField* field = fields[i];
if ( fputs(field->name.c_str(), file) == EOF )
goto write_error;
} }
if ( fputc('\n', file) == EOF ) const LogField* field = fields[i];
names += field->name;
types += type_name(field->type);
}
if ( ! (WriteHeaderField("fields", names)
&& WriteHeaderField("types", types)) )
goto write_error; goto write_error;
} }
@ -141,13 +183,13 @@ bool LogWriterAscii::DoWriteOne(ODesc* desc, LogVal* val, const LogField* field)
break; break;
case TYPE_TIME: case TYPE_TIME:
case TYPE_INTERVAL:
char buf[32]; char buf[32];
snprintf(buf, sizeof(buf), "%.6f", val->val.double_val); snprintf(buf, sizeof(buf), "%.6f", val->val.double_val);
desc->Add(buf); desc->Add(buf);
break; break;
case TYPE_DOUBLE: case TYPE_DOUBLE:
case TYPE_INTERVAL:
desc->Add(val->val.double_val); desc->Add(val->val.double_val);
break; break;
@ -216,6 +258,9 @@ bool LogWriterAscii::DoWriteOne(ODesc* desc, LogVal* val, const LogField* field)
bool LogWriterAscii::DoWrite(int num_fields, const LogField* const * fields, bool LogWriterAscii::DoWrite(int num_fields, const LogField* const * fields,
LogVal** vals) LogVal** vals)
{ {
if ( ! file )
DoInit(Path(), NumFields(), Fields());
ODesc desc(DESC_READABLE); ODesc desc(DESC_READABLE);
desc.SetEscape(separator, separator_len); desc.SetEscape(separator, separator_len);
@ -245,19 +290,23 @@ bool LogWriterAscii::DoWrite(int num_fields, const LogField* const * fields,
bool LogWriterAscii::DoRotate(string rotated_path, double open, bool LogWriterAscii::DoRotate(string rotated_path, double open,
double close, bool terminating) double close, bool terminating)
{ {
if ( IsSpecial(Path()) ) // Don't rotate special files or if there's not one currently open.
// Don't rotate special files. if ( ! file || IsSpecial(Path()) )
return true; return true;
fclose(file); fclose(file);
file = 0;
string nname = rotated_path + ".log"; string nname = rotated_path + ".log";
rename(fname.c_str(), nname.c_str()); rename(fname.c_str(), nname.c_str());
if ( ! FinishedRotation(nname, fname, open, close, terminating) ) if ( ! FinishedRotation(nname, fname, open, close, terminating) )
{
Error(Fmt("error rotating %s to %s", fname.c_str(), nname.c_str())); Error(Fmt("error rotating %s to %s", fname.c_str(), nname.c_str()));
return false;
}
return DoInit(Path(), NumFields(), Fields()); return true;
} }
bool LogWriterAscii::DoSetBuf(bool enabled) bool LogWriterAscii::DoSetBuf(bool enabled)

View file

@ -28,6 +28,7 @@ protected:
private: private:
bool IsSpecial(string path) { return path.find("/dev/") == 0; } bool IsSpecial(string path) { return path.find("/dev/") == 0; }
bool DoWriteOne(ODesc* desc, LogVal* val, const LogField* field); bool DoWriteOne(ODesc* desc, LogVal* val, const LogField* field);
bool WriteHeaderField(const string& key, const string& value);
FILE* file; FILE* file;
string fname; string fname;

View file

@ -75,6 +75,9 @@ extern bool terminating;
// True if the remote serializer is to be activated. // True if the remote serializer is to be activated.
extern bool using_communication; extern bool using_communication;
// Snaplen passed to libpcap.
extern int snaplen;
extern const struct pcap_pkthdr* current_hdr; extern const struct pcap_pkthdr* current_hdr;
extern const u_char* current_pkt; extern const u_char* current_pkt;
extern int current_dispatched; extern int current_dispatched;

View file

@ -19,9 +19,6 @@
#include <pcap-int.h> #include <pcap-int.h>
#endif #endif
int snaplen = 8192; // really want "capture entire packet"
PktSrc::PktSrc() PktSrc::PktSrc()
{ {
interface = readfile = 0; interface = readfile = 0;
@ -492,7 +489,7 @@ PktInterfaceSrc::PktInterfaceSrc(const char* arg_interface, const char* filter,
// Couldn't get header size. // Couldn't get header size.
return; return;
reporter->Info("listening on %s\n", interface); reporter->Info("listening on %s, capture length %d bytes\n", interface, snaplen);
} }
else else
closed = true; closed = true;

View file

@ -121,7 +121,7 @@ void Reporter::Syslog(const char* fmt, ...)
va_end(ap); va_end(ap);
} }
void Reporter::WeirdHelper(EventHandlerPtr event, Val* conn_val, const char* name, const char* addl, ...) void Reporter::WeirdHelper(EventHandlerPtr event, Val* conn_val, const char* addl, const char* fmt_name, ...)
{ {
val_list* vl = new val_list(1); val_list* vl = new val_list(1);
@ -132,22 +132,22 @@ void Reporter::WeirdHelper(EventHandlerPtr event, Val* conn_val, const char* nam
vl->append(new StringVal(addl)); vl->append(new StringVal(addl));
va_list ap; va_list ap;
va_start(ap, addl); va_start(ap, fmt_name);
DoLog("weird", event, stderr, 0, vl, false, false, name, ap); DoLog("weird", event, stderr, 0, vl, false, false, fmt_name, ap);
va_end(ap); va_end(ap);
delete vl; delete vl;
} }
void Reporter::WeirdFlowHelper(const uint32* orig, const uint32* resp, const char* name, ...) void Reporter::WeirdFlowHelper(const uint32* orig, const uint32* resp, const char* fmt_name, ...)
{ {
val_list* vl = new val_list(2); val_list* vl = new val_list(2);
vl->append(new AddrVal(orig)); vl->append(new AddrVal(orig));
vl->append(new AddrVal(resp)); vl->append(new AddrVal(resp));
va_list ap; va_list ap;
va_start(ap, name); va_start(ap, fmt_name);
DoLog("weird", flow_weird, stderr, 0, vl, false, false, name, ap); DoLog("weird", flow_weird, stderr, 0, vl, false, false, fmt_name, ap);
va_end(ap); va_end(ap);
delete vl; delete vl;
@ -155,22 +155,22 @@ void Reporter::WeirdFlowHelper(const uint32* orig, const uint32* resp, const cha
void Reporter::Weird(const char* name) void Reporter::Weird(const char* name)
{ {
WeirdHelper(net_weird, 0, name, 0); WeirdHelper(net_weird, 0, 0, name);
} }
void Reporter::Weird(Connection* conn, const char* name, const char* addl) void Reporter::Weird(Connection* conn, const char* name, const char* addl)
{ {
WeirdHelper(conn_weird, conn->BuildConnVal(), name, addl); WeirdHelper(conn_weird, conn->BuildConnVal(), addl, "%s", name);
} }
void Reporter::Weird(Val* conn_val, const char* name, const char* addl) void Reporter::Weird(Val* conn_val, const char* name, const char* addl)
{ {
WeirdHelper(conn_weird, conn_val, name, addl); WeirdHelper(conn_weird, conn_val, addl, "%s", name);
} }
void Reporter::Weird(const uint32* orig, const uint32* resp, const char* name) void Reporter::Weird(const uint32* orig, const uint32* resp, const char* name)
{ {
WeirdFlowHelper(orig, resp, name); WeirdFlowHelper(orig, resp, "%s", name);
} }
void Reporter::DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Connection* conn, val_list* addl, bool location, bool time, const char* fmt, va_list ap) void Reporter::DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Connection* conn, val_list* addl, bool location, bool time, const char* fmt, va_list ap)

View file

@ -89,8 +89,10 @@ public:
private: private:
void DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Connection* conn, val_list* addl, bool location, bool time, const char* fmt, va_list ap); void DoLog(const char* prefix, EventHandlerPtr event, FILE* out, Connection* conn, val_list* addl, bool location, bool time, const char* fmt, va_list ap);
void WeirdHelper(EventHandlerPtr event, Val* conn_val, const char* name, const char* addl, ...); // The order if addl, name needs to be like that since fmt_name can
void WeirdFlowHelper(const uint32* orig, const uint32* resp, const char* name, ...); // contain format specifiers
void WeirdHelper(EventHandlerPtr event, Val* conn_val, const char* addl, const char* fmt_name, ...);
void WeirdFlowHelper(const uint32* orig, const uint32* resp, const char* fmt_name, ...);
int errors; int errors;
bool via_events; bool via_events;

View file

@ -7,6 +7,7 @@ SSL_Analyzer_binpac::SSL_Analyzer_binpac(Connection* c)
: TCP_ApplicationAnalyzer(AnalyzerTag::SSL, c) : TCP_ApplicationAnalyzer(AnalyzerTag::SSL, c)
{ {
interp = new binpac::SSL::SSL_Conn(this); interp = new binpac::SSL::SSL_Conn(this);
had_gap = false;
} }
SSL_Analyzer_binpac::~SSL_Analyzer_binpac() SSL_Analyzer_binpac::~SSL_Analyzer_binpac()
@ -37,11 +38,24 @@ void SSL_Analyzer_binpac::DeliverStream(int len, const u_char* data, bool orig)
if ( TCP()->IsPartial() ) if ( TCP()->IsPartial() )
return; return;
if ( had_gap )
// XXX: If only one side had a content gap, we could still try to
// deliver data to the other side if the script layer can handle this.
return;
try
{
interp->NewData(orig, data, data + len); interp->NewData(orig, data, data + len);
} }
catch ( const binpac::Exception& e )
{
ProtocolViolation(fmt("Binpac exception: %s", e.c_msg()));
}
}
void SSL_Analyzer_binpac::Undelivered(int seq, int len, bool orig) void SSL_Analyzer_binpac::Undelivered(int seq, int len, bool orig)
{ {
TCP_ApplicationAnalyzer::Undelivered(seq, len, orig); TCP_ApplicationAnalyzer::Undelivered(seq, len, orig);
had_gap = true;
interp->NewGap(orig, len); interp->NewGap(orig, len);
} }

View file

@ -30,6 +30,7 @@ public:
protected: protected:
binpac::SSL::SSL_Conn* interp; binpac::SSL::SSL_Conn* interp;
bool had_gap;
}; };

View file

@ -3104,10 +3104,6 @@ void EnumVal::ValDescribe(ODesc* d) const
if ( ! ename ) if ( ! ename )
ename = "<undefined>"; ename = "<undefined>";
const char* module_offset = strstr(ename, "::");
if ( module_offset )
ename = module_offset + 2;
d->Add(ename); d->Add(ename);
} }

View file

@ -791,6 +791,16 @@ function getenv%(var: string%): string
return new StringVal(env_val); return new StringVal(env_val);
%} %}
function setenv%(var: string, val: string%): bool
%{
int result = setenv(var->AsString()->CheckString(),
val->AsString()->CheckString(), 1);
if ( result < 0 )
return new Val(0, TYPE_BOOL);
return new Val(1, TYPE_BOOL);
%}
function sqrt%(x: double%): double function sqrt%(x: double%): double
%{ %{
if ( x < 0 ) if ( x < 0 )
@ -3019,6 +3029,20 @@ function syslog%(s: string%): any
extern "C" { extern "C" {
#include <GeoIPCity.h> #include <GeoIPCity.h>
} }
static GeoIP* open_geoip_db(GeoIPDBTypes type)
{
GeoIP* geoip = 0;
if ( GeoIP_db_avail(type) )
geoip = GeoIP_open_type(type, GEOIP_MEMORY_CACHE);
if ( ! geoip )
reporter->Warning("Failed to open GeoIP database: %s",
GeoIPDBFileName[type]);
return geoip;
}
#endif #endif
%%} %%}
@ -3028,86 +3052,112 @@ function lookup_location%(a: addr%) : geo_location
RecordVal* location = new RecordVal(geo_location); RecordVal* location = new RecordVal(geo_location);
#ifdef USE_GEOIP #ifdef USE_GEOIP
static bool geoip_initialized = false;
static GeoIP* geoip = 0; static GeoIP* geoip = 0;
static GeoIP* geoip_v6 = 0; static GeoIP* geoip_v6 = 0;
static bool geoip_initialized = false; static bool have_city_db = false;
static bool have_cityv6_db = false;
GeoIPRecord* gir = 0; GeoIPRecord* gir = 0;
const char* cc = 0;
if ( ! geoip_initialized ) if ( ! geoip_initialized )
{ {
geoip_initialized = true; geoip_initialized = true;
geoip = GeoIP_open_type(GEOIP_CITY_EDITION_REV0, geoip = open_geoip_db(GEOIP_CITY_EDITION_REV0);
GEOIP_MEMORY_CACHE);
if ( ! geoip ) if ( ! geoip )
{ {
builtin_error("can't initialize GeoIP City database.. trying Country version"); geoip = open_geoip_db(GEOIP_COUNTRY_EDITION);
geoip = GeoIP_open_type(GEOIP_COUNTRY_EDITION,
GEOIP_MEMORY_CACHE);
if ( ! geoip ) if ( ! geoip )
builtin_error("can't initialize GeoIP Country database"); builtin_error("Can't initialize GeoIP City/Country database");
else
reporter->Warning("Fell back to GeoIP Country database");
} }
else
have_city_db = true;
#ifdef BROv6 #ifdef BROv6
#ifdef GEOIP_COUNTRY_EDITION_V6
geoip_v6 = GeoIP_open_type(GEOIP_COUNTRY_EDITION_V6, #ifdef HAVE_GEOIP_CITY_EDITION_REV0_V6
GEOIP_MEMORY_CACHE); geoip_v6 = open_geoip_db(GEOIP_CITY_EDITION_REV0_V6);
if ( geoip_v6 )
have_cityv6_db = true;
#endif
#ifdef HAVE_GEOIP_COUNTRY_EDITION_V6
if ( ! geoip_v6 ) if ( ! geoip_v6 )
builtin_error("can't initialize the GeoIPv6 Country database"); geoip_v6 = open_geoip_db(GEOIP_COUNTRY_EDITION_V6);
#endif #endif
if ( ! geoip_v6 )
builtin_error("Can't initialize GeoIPv6 City/Country database");
#endif #endif
} }
#ifdef BROv6 #ifdef BROv6
#ifdef GEOIP_COUNTRY_EDITION_V6
#ifdef HAVE_GEOIP_COUNTRY_EDITION_V6
if ( geoip_v6 && ! is_v4_addr(a) ) if ( geoip_v6 && ! is_v4_addr(a) )
gir = GeoIP_record_by_ipnum_v6(geoip_v6, geoipv6_t(a)); {
geoipv6_t ga;
memcpy(&ga, a, 16);
if ( have_cityv6_db )
gir = GeoIP_record_by_ipnum_v6(geoip_v6, ga);
else
cc = GeoIP_country_code_by_ipnum_v6(geoip_v6, ga);
}
else else
#endif #endif
if ( geoip && is_v4_addr(a) ) if ( geoip && is_v4_addr(a) )
{ {
uint32 addr = to_v4_addr(a); uint32 addr = to_v4_addr(a);
if ( have_city_db )
gir = GeoIP_record_by_ipnum(geoip, ntohl(addr)); gir = GeoIP_record_by_ipnum(geoip, ntohl(addr));
else
cc = GeoIP_country_code_by_ipnum(geoip, ntohl(addr));
} }
#else
#else // not BROv6
if ( geoip ) if ( geoip )
{
if ( have_city_db )
gir = GeoIP_record_by_ipnum(geoip, ntohl(a)); gir = GeoIP_record_by_ipnum(geoip, ntohl(a));
else
cc = GeoIP_country_code_by_ipnum(geoip, ntohl(a));
}
#endif #endif
if ( gir ) if ( gir )
{ {
if ( gir->country_code ) if ( gir->country_code )
location->Assign(0, new StringVal(gir->country_code)); location->Assign(0, new StringVal(gir->country_code));
else
location->Assign(0, new StringVal(""));
if ( gir->region ) if ( gir->region )
location->Assign(1, new StringVal(gir->region)); location->Assign(1, new StringVal(gir->region));
else
location->Assign(1, new StringVal(""));
if ( gir->city ) if ( gir->city )
location->Assign(2, new StringVal(gir->city)); location->Assign(2, new StringVal(gir->city));
else
location->Assign(2, new StringVal(""));
if ( gir->latitude ) if ( gir->latitude )
location->Assign(3, new Val(gir->latitude, location->Assign(3, new Val(gir->latitude,
TYPE_DOUBLE)); TYPE_DOUBLE));
else
location->Assign(3, new Val(0.0, TYPE_DOUBLE));
if ( gir->longitude ) if ( gir->longitude )
location->Assign(4, new Val(gir->longitude, location->Assign(4, new Val(gir->longitude,
TYPE_DOUBLE)); TYPE_DOUBLE));
else
location->Assign(4, new Val(0.0, TYPE_DOUBLE));
GeoIPRecord_delete(gir); GeoIPRecord_delete(gir);
return location; return location;
} }
#else else if ( cc )
{
location->Assign(0, new StringVal(cc));
return location;
}
#else // not USE_GEOIP
static int missing_geoip_reported = 0; static int missing_geoip_reported = 0;
if ( ! missing_geoip_reported ) if ( ! missing_geoip_reported )
@ -3120,11 +3170,6 @@ function lookup_location%(a: addr%) : geo_location
// We can get here even if we have GeoIP support if we weren't // We can get here even if we have GeoIP support if we weren't
// able to initialize it or it didn't return any information for // able to initialize it or it didn't return any information for
// the address. // the address.
location->Assign(0, new StringVal(""));
location->Assign(1, new StringVal(""));
location->Assign(2, new StringVal(""));
location->Assign(3, new Val(0.0, TYPE_DOUBLE));
location->Assign(4, new Val(0.0, TYPE_DOUBLE));
return location; return location;
%} %}
@ -3139,10 +3184,9 @@ function lookup_asn%(a: addr%) : count
if ( ! geoip_asn_initialized ) if ( ! geoip_asn_initialized )
{ {
geoip_asn_initialized = true; geoip_asn_initialized = true;
geoip_asn = GeoIP_open_type(GEOIP_ASNUM_EDITION, geoip_asn = open_geoip_db(GEOIP_ASNUM_EDITION);
GEOIP_MEMORY_CACHE);
if ( ! geoip_asn ) if ( ! geoip_asn )
builtin_error("can't initialize GeoIP ASNUM database"); builtin_error("Can't initialize GeoIP ASNUM database");
} }
if ( geoip_asn ) if ( geoip_asn )
@ -3150,17 +3194,23 @@ function lookup_asn%(a: addr%) : count
#ifdef BROv6 #ifdef BROv6
// IPv6 support showed up in 1.4.5. // IPv6 support showed up in 1.4.5.
#ifdef GEOIP_COUNTRY_EDITION_V6 #ifdef HAVE_GEOIP_COUNTRY_EDITION_V6
if ( ! is_v4_addr(a) ) if ( ! is_v4_addr(a) )
gir = GeoIP_name_by_ipnum_v6(geoip_asn, geoipv6_t(a)); {
geoipv6_t ga;
memcpy(&ga, a, 16);
gir = GeoIP_name_by_ipnum_v6(geoip_asn, ga);
}
else else
#endif #endif
if ( is_v4_addr(a) ) if ( is_v4_addr(a) )
{ {
uint32 addr = to_v4_addr(a); uint32 addr = to_v4_addr(a);
gir = GeoIP_name_by_ipnum(geoip_asn, ntohl(addr)); gir = GeoIP_name_by_ipnum(geoip_asn, ntohl(addr));
} }
#else
#else // not BROv6
gir = GeoIP_name_by_ipnum(geoip_asn, ntohl(a)); gir = GeoIP_name_by_ipnum(geoip_asn, ntohl(a));
#endif #endif
} }
@ -3171,7 +3221,8 @@ function lookup_asn%(a: addr%) : count
// the first two characters: "AS". // the first two characters: "AS".
return new Val(atoi(gir+2), TYPE_COUNT); return new Val(atoi(gir+2), TYPE_COUNT);
} }
#else
#else // not USE_GEOIP
static int missing_geoip_reported = 0; static int missing_geoip_reported = 0;
if ( ! missing_geoip_reported ) if ( ! missing_geoip_reported )
@ -3607,3 +3658,9 @@ function enable_communication%(%): any
remote_serializer->Init(); remote_serializer->Init();
return 0; return 0;
%} %}
## Returns the Bro version string
function bro_version%(%): string
%{
return new StringVal(bro_version());
%}

View file

@ -98,6 +98,7 @@ extern char version[];
char* command_line_policy = 0; char* command_line_policy = 0;
vector<string> params; vector<string> params;
char* proc_status_file = 0; char* proc_status_file = 0;
int snaplen = 65535; // really want "capture entire packet"
int FLAGS_use_binpac = false; int FLAGS_use_binpac = false;
@ -145,7 +146,7 @@ void usage()
fprintf(stderr, " -g|--dump-config | dump current config into .state dir\n"); fprintf(stderr, " -g|--dump-config | dump current config into .state dir\n");
fprintf(stderr, " -h|--help|-? | command line help\n"); fprintf(stderr, " -h|--help|-? | command line help\n");
fprintf(stderr, " -i|--iface <interface> | read from given interface\n"); fprintf(stderr, " -i|--iface <interface> | read from given interface\n");
fprintf(stderr, " -Z|--doc-scripts | generate documentation for all loaded scripts\n"); fprintf(stderr, " -l|--snaplen <snaplen> | number of bytes per packet to capture from interfaces (default 65535)\n");
fprintf(stderr, " -p|--prefix <prefix> | add given prefix to policy file resolution\n"); fprintf(stderr, " -p|--prefix <prefix> | add given prefix to policy file resolution\n");
fprintf(stderr, " -r|--readfile <readfile> | read from given tcpdump file\n"); fprintf(stderr, " -r|--readfile <readfile> | read from given tcpdump file\n");
fprintf(stderr, " -y|--flowfile <file>[=<ident>] | read from given flow file\n"); fprintf(stderr, " -y|--flowfile <file>[=<ident>] | read from given flow file\n");
@ -172,6 +173,7 @@ void usage()
fprintf(stderr, " -T|--re-level <level> | set 'RE_level' for rules\n"); fprintf(stderr, " -T|--re-level <level> | set 'RE_level' for rules\n");
fprintf(stderr, " -U|--status-file <file> | Record process status in file\n"); fprintf(stderr, " -U|--status-file <file> | Record process status in file\n");
fprintf(stderr, " -W|--watchdog | activate watchdog timer\n"); fprintf(stderr, " -W|--watchdog | activate watchdog timer\n");
fprintf(stderr, " -Z|--doc-scripts | generate documentation for all loaded scripts\n");
#ifdef USE_PERFTOOLS #ifdef USE_PERFTOOLS
fprintf(stderr, " -m|--mem-leaks | show leaks [perftools]\n"); fprintf(stderr, " -m|--mem-leaks | show leaks [perftools]\n");
@ -367,7 +369,7 @@ int main(int argc, char** argv)
{"filter", required_argument, 0, 'f'}, {"filter", required_argument, 0, 'f'},
{"help", no_argument, 0, 'h'}, {"help", no_argument, 0, 'h'},
{"iface", required_argument, 0, 'i'}, {"iface", required_argument, 0, 'i'},
{"print-scripts", no_argument, 0, 'l'}, {"snaplen", required_argument, 0, 'l'},
{"doc-scripts", no_argument, 0, 'Z'}, {"doc-scripts", no_argument, 0, 'Z'},
{"prefix", required_argument, 0, 'p'}, {"prefix", required_argument, 0, 'p'},
{"readfile", required_argument, 0, 'r'}, {"readfile", required_argument, 0, 'r'},
@ -441,7 +443,7 @@ int main(int argc, char** argv)
opterr = 0; opterr = 0;
char opts[256]; char opts[256];
safe_strncpy(opts, "B:D:e:f:I:i:K:n:p:R:r:s:T:t:U:w:x:X:y:Y:z:CFGLOPSWbdghvZ", safe_strncpy(opts, "B:D:e:f:I:i:K:l:n:p:R:r:s:T:t:U:w:x:X:y:Y:z:CFGLOPSWbdghvZ",
sizeof(opts)); sizeof(opts));
#ifdef USE_PERFTOOLS #ifdef USE_PERFTOOLS
@ -476,6 +478,10 @@ int main(int argc, char** argv)
interfaces.append(optarg); interfaces.append(optarg);
break; break;
case 'l':
snaplen = atoi(optarg);
break;
case 'p': case 'p':
prefixes.append(optarg); prefixes.append(optarg);
break; break;

View file

@ -28,3 +28,11 @@ function Reporter::error%(msg: string%): bool
reporter->PopLocation(); reporter->PopLocation();
return new Val(1, TYPE_BOOL); return new Val(1, TYPE_BOOL);
%} %}
function Reporter::fatal%(msg: string%): bool
%{
reporter->PushLocation(frame->GetCall()->GetLocationInfo());
reporter->FatalError("%s", msg->CheckString());
reporter->PopLocation();
return new Val(1, TYPE_BOOL);
%}

View file

@ -1,2 +1,5 @@
# ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state local_orig missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes #separator \x09
1128727435.450898 UWkUyAuUGXf 141.42.64.125 56730 125.190.109.199 80 tcp http 1.73330307006836 98 9417 SF - 0 ShADdFaf 12 710 10 9945 #path conn
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state local_orig missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes
#types time string addr port addr port enum string interval count count string bool count string count count count count
1128727435.450898 UWkUyAuUGXf 141.42.64.125 56730 125.190.109.199 80 tcp http 1.733303 98 9417 SF - 0 ShADdFaf 12 710 10 9945

View file

@ -1,8 +1,20 @@
# ts node filter init success #separator \x09
1312570784.336354 - not ip6 F T #path packet_filter
# ts node filter init success #fields ts node filter init success
1312570784.550594 - (((((((((((((((((((((((port 53) or (tcp port 989)) or (tcp port 443)) or (udp and port 5353)) or (udp and port 5355)) or (tcp port 22)) or (tcp port 995)) or (port 21)) or (tcp port smtp or tcp port 587)) or (port 6667)) or (tcp port 614)) or (tcp port 990)) or (udp port 137)) or (tcp port 993)) or (tcp port 5223)) or (port 514)) or (tcp port 585)) or (tcp port 992)) or (tcp port 563)) or (tcp port 994)) or (tcp port 636)) or (tcp and port (80 or 81 or 631 or 1080 or 3138 or 8000 or 8080 or 8888))) or (port 6666)) and (not ip6) F T #types time string string bool bool
# ts node filter init success 1315167051.418730 - not ip6 F T
1312570784.765990 - port 42 F T #separator \x09
# ts node filter init success #path packet_filter
1312570784.992999 - port 56730 T T #fields ts node filter init success
#types time string string bool bool
1315167051.652097 - (((((((((((((((((((((((port 53) or (tcp port 989)) or (tcp port 443)) or (udp and port 5353)) or (udp and port 5355)) or (tcp port 22)) or (tcp port 995)) or (port 21)) or (tcp port smtp or tcp port 587)) or (port 6667)) or (tcp port 614)) or (tcp port 990)) or (udp port 137)) or (tcp port 993)) or (tcp port 5223)) or (port 514)) or (tcp port 585)) or (tcp port 992)) or (tcp port 563)) or (tcp port 994)) or (tcp port 636)) or (tcp and port (80 or 81 or 631 or 1080 or 3138 or 8000 or 8080 or 8888))) or (port 6666)) and (not ip6) F T
#separator \x09
#path packet_filter
#fields ts node filter init success
#types time string string bool bool
1315167051.885416 - port 42 F T
#separator \x09
#path packet_filter
#fields ts node filter init success
#types time string string bool bool
1315167052.120658 - port 56730 T T

View file

@ -1,4 +1,7 @@
# ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state local_orig missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes #separator \x09
952109346.874907 UWkUyAuUGXf 10.1.2.1 11001 10.34.0.1 23 tcp - 2.10255992412567 25 0 SH - 0 - 11 280 0 0 #path conn
1128727435.450898 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 tcp http 1.73330307006836 98 9417 SF - 0 ShADdFaf 12 710 10 9945 #fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state local_orig missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes
1278600802.069419 50da4BEzauh 10.20.80.1 50343 10.0.0.15 80 tcp - 0.00415205955505371 9 3429 SF - 0 ShADadfF 7 361 7 3801 #types time string addr port addr port enum string interval count count string bool count string count count count count
952109346.874907 UWkUyAuUGXf 10.1.2.1 11001 10.34.0.1 23 tcp - 2.102560 25 0 SH - 0 - 11 280 0 0
1128727435.450898 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 tcp http 1.733303 98 9417 SF - 0 ShADdFaf 12 710 10 9945
1278600802.069419 50da4BEzauh 10.20.80.1 50343 10.0.0.15 80 tcp - 0.004152 9 3429 SF - 0 ShADadfF 7 361 7 3801

View file

@ -263,7 +263,7 @@ Redefinitions
:Type: :bro:type:`enum` :Type: :bro:type:`enum`
.. bro:enum:: Example::EXAMPLE Log::ID .. bro:enum:: Example::LOG Log::ID
:bro:type:`Example::SimpleEnum` :bro:type:`Example::SimpleEnum`

View file

@ -1,2 +1,5 @@
# ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file #separator \x09
1310750785.32134 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - - #path http
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file
#types time string addr port addr port string string string string string count count count string string table string string table string string file
1315167107.671488 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - -

View file

@ -1,2 +1,5 @@
# ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file #separator \x09
1310750785.32134 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - - #path http
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file
#types time string addr port addr port string string string string string count count count string string table string string table string string file
1315167107.671488 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - -

View file

@ -1,2 +1,5 @@
# ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file #separator \x09
1310750770.8185 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - - #path http
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file
#types time string addr port addr port string string string string string count count count string string table string string table string string file
1315167116.842377 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - -

View file

@ -1,2 +1,5 @@
# ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file #separator \x09
1310750770.8185 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - - #path http
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p method host uri referrer user_agent request_content_length response_content_length status_code status_msg filename tags username password proxied mime_type md5 extraction_file
#types time string addr port addr port string string string string string count count count string string table string string table string string file
1315167116.842377 56gKBmhBBB6 141.42.64.125 56730 125.190.109.199 80 GET www.icir.org / - Wget/1.10 - 9130 200 OK - - - - - text/html - -

View file

@ -0,0 +1,4 @@
ONE
ONE
TEST::TWO
TEST::TWO

View file

@ -1 +1 @@
c test::c

View file

@ -1 +1,2 @@
[major=4, minor=4, minor2=<uninitialized>, addl=<uninitialized>] [major=4, minor=4, minor2=<uninitialized>, addl=<uninitialized>]
[c=1, f=[i=2.0 hrs, s=<uninitialized>]]

View file

@ -1,3 +1,6 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1313212563.234939 1.2.3.4 1234 2.3.4.5 80 success unknown #path ssh-new-default
1313212563.234939 1.2.3.4 1234 2.3.4.5 80 failure US #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167052.603186 1.2.3.4 1234 2.3.4.5 80 success unknown
1315167052.603186 1.2.3.4 1234 2.3.4.5 80 failure US

View file

@ -1,6 +1,9 @@
PREFIX<>t|id.orig_h|id.orig_p|id.resp_h|id.resp_p|status|country|b PREFIX<>separator \x7c
1299718506.56593|1.2.3.4|1234|2.3.4.5|80|success|unknown|NOT-SET PREFIX<>path|ssh
1299718506.56593|1.2.3.4|1234|2.3.4.5|80|NOT-SET|US|NOT-SET PREFIX<>fields|t|id.orig_h|id.orig_p|id.resp_h|id.resp_p|status|country|b
1299718506.56593|1.2.3.4|1234|2.3.4.5|80|failure|UK|NOT-SET PREFIX<>types|time|addr|port|addr|port|string|string|bool
1299718506.56593|1.2.3.4|1234|2.3.4.5|80|NOT-SET|BR|NOT-SET 1315167052.828457|1.2.3.4|1234|2.3.4.5|80|success|unknown|NOT-SET
1299718506.56593|1.2.3.4|1234|2.3.4.5|80|failure|EMPTY|T 1315167052.828457|1.2.3.4|1234|2.3.4.5|80|NOT-SET|US|NOT-SET
1315167052.828457|1.2.3.4|1234|2.3.4.5|80|failure|UK|NOT-SET
1315167052.828457|1.2.3.4|1234|2.3.4.5|80|NOT-SET|BR|NOT-SET
1315167052.828457|1.2.3.4|1234|2.3.4.5|80|failure|EMPTY|T

View file

@ -1,4 +1,7 @@
# data #separator \x09
#path test
#fields data
#types time
1234567890.000000 1234567890.000000
1234567890.000000 1234567890.000000
1234567890.010000 1234567890.010000

View file

@ -1,2 +1,5 @@
# status country a1 b1 b2 #separator \x09
#path ssh
#fields status country a1 b1 b2
#types string string count count count
success unknown 1 3 4 success unknown 1 3 4

View file

@ -1,4 +1,7 @@
# status country #separator \x09
#path ssh
#fields status country
#types string string
success unknown success unknown
failure US failure US
failure UK failure UK

View file

@ -1,6 +1,9 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1299809561.67372 1.2.3.4 1234 2.3.4.5 80 success unknown #path ssh
1299809561.67372 1.2.3.4 1234 2.3.4.5 80 failure US #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
1299809561.67372 1.2.3.4 1234 2.3.4.5 80 failure UK #types time addr port addr port string string
1299809561.67372 1.2.3.4 1234 2.3.4.5 80 success BR 1315167053.369918 1.2.3.4 1234 2.3.4.5 80 success unknown
1299809561.67372 1.2.3.4 1234 2.3.4.5 80 failure MX 1315167053.369918 1.2.3.4 1234 2.3.4.5 80 failure US
1315167053.369918 1.2.3.4 1234 2.3.4.5 80 failure UK
1315167053.369918 1.2.3.4 1234 2.3.4.5 80 success BR
1315167053.369918 1.2.3.4 1234 2.3.4.5 80 failure MX

View file

@ -1,4 +1,7 @@
# id.orig_p id.resp_h id.resp_p status country #separator \x09
#path ssh
#fields id.orig_p id.resp_h id.resp_p status country
#types port addr port string string
1234 2.3.4.5 80 success unknown 1234 2.3.4.5 80 success unknown
1234 2.3.4.5 80 failure US 1234 2.3.4.5 80 failure US
1234 2.3.4.5 80 failure UK 1234 2.3.4.5 80 failure UK

View file

@ -1,2 +1,5 @@
# t f #separator \x09
1303098703.62603 Foo.log #path ssh
#fields t f
#types time file
1315167053.585834 Foo.log

View file

@ -1,6 +1,9 @@
# t id.orig_h #separator \x09
1303064007.48299 1.2.3.4 #path ssh
1303064007.48299 1.2.3.4 #fields t id.orig_h
1303064007.48299 1.2.3.4 #types time addr
1303064007.48299 1.2.3.4 1315167053.694473 1.2.3.4
1303064007.48299 1.2.3.4 1315167053.694473 1.2.3.4
1315167053.694473 1.2.3.4
1315167053.694473 1.2.3.4
1315167053.694473 1.2.3.4

View file

@ -5,17 +5,38 @@ static-prefix-1-MX.log
static-prefix-1-US.log static-prefix-1-US.log
static-prefix-2-MX2.log static-prefix-2-MX2.log
static-prefix-2-UK.log static-prefix-2-UK.log
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 success BR #path static-prefix-0-BR
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 failure MX3 #types time addr port addr port string string
# t id.orig_h id.orig_p id.resp_h id.resp_p status country 1315167053.803346 1.2.3.4 1234 2.3.4.5 80 success BR
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 success unknown #separator \x09
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #path static-prefix-0-MX3
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 failure MX #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #types time addr port addr port string string
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 failure US 1315167053.803346 1.2.3.4 1234 2.3.4.5 80 failure MX3
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 failure MX2 #path static-prefix-0-unknown
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
1313212701.542245 1.2.3.4 1234 2.3.4.5 80 failure UK #types time addr port addr port string string
1315167053.803346 1.2.3.4 1234 2.3.4.5 80 success unknown
#separator \x09
#path static-prefix-1-MX
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167053.803346 1.2.3.4 1234 2.3.4.5 80 failure MX
#separator \x09
#path static-prefix-1-US
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167053.803346 1.2.3.4 1234 2.3.4.5 80 failure US
#separator \x09
#path static-prefix-2-MX2
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167053.803346 1.2.3.4 1234 2.3.4.5 80 failure MX2
#separator \x09
#path static-prefix-2-UK
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167053.803346 1.2.3.4 1234 2.3.4.5 80 failure UK

View file

@ -1,2 +0,0 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country
1299718503.16177 1.2.3.4 1234 2.3.4.5 80 failure US

View file

@ -1,2 +0,0 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country
1299718503.16177 1.2.3.4 1234 2.3.4.5 80 success -

View file

@ -0,0 +1,5 @@
#separator \x09
#path test.failure
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167053.923545 1.2.3.4 1234 2.3.4.5 80 failure US

View file

@ -0,0 +1,5 @@
#separator \x09
#path test.success
#fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167053.923545 1.2.3.4 1234 2.3.4.5 80 success -

View file

@ -1,2 +1,5 @@
# b i e c p sn a d t iv s sc ss se vc ve #separator \x09
T -42 Test::TEST 21 123 10.0.0.0/24 1.2.3.4 3.14 1313623666.027768 100.0 hurz 2,4,1,3 CC,AA,BB EMPTY 10,20,30 EMPTY #path test
#fields b i e c p sn a d t iv s sc ss se vc ve
#types bool int enum count port subnet addr double time interval string table table table vector vector
T -42 Test::LOG 21 123 10.0.0.0/24 1.2.3.4 3.14 1315167054.320958 100.000000 hurz 2,4,1,3 CC,AA,BB EMPTY 10,20,30 EMPTY

View file

@ -1,4 +1,7 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 failure US #path test.failure
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 failure UK #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 failure MX #types time addr port addr port string string
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 failure US
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 failure UK
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 failure MX

View file

@ -1,6 +1,9 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 success - #path test
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 failure US #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 failure UK #types time addr port addr port string string
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 success BR 1315167059.502670 1.2.3.4 1234 2.3.4.5 80 success -
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 failure MX 1315167059.502670 1.2.3.4 1234 2.3.4.5 80 failure US
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 failure UK
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 success BR
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 failure MX

View file

@ -1,3 +1,6 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 success - #path test.success
1312565744.470171 1.2.3.4 1234 2.3.4.5 80 success BR #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 success -
1315167059.502670 1.2.3.4 1234 2.3.4.5 80 success BR

View file

@ -1,3 +1,6 @@
# t id.orig_h id.orig_p id.resp_h id.resp_p status country #separator \x09
1299718503.28253 1.2.3.4 1234 2.3.4.5 80 failure US #path ssh.failure
1299718503.28253 1.2.3.4 1234 2.3.4.5 80 failure UK #fields t id.orig_h id.orig_p id.resp_h id.resp_p status country
#types time addr port addr port string string
1315167066.575996 1.2.3.4 1234 2.3.4.5 80 failure US
1315167066.575996 1.2.3.4 1234 2.3.4.5 80 failure UK

Some files were not shown because too many files have changed in this diff Show more