Merge remote-tracking branch 'origin/master' into topic/johanna/openflow

This commit is contained in:
Johanna Amann 2015-07-08 12:15:09 -07:00
commit eb9fbd1258
93 changed files with 1289 additions and 544 deletions

View file

@ -0,0 +1 @@
Support for Portable Executable (PE) file analysis.

View file

@ -0,0 +1,2 @@
The Broker communication framework facilitates connecting to remote Bro
instances to share state and transfer events.

View file

@ -78,6 +78,12 @@ signature file-coldfusion {
file-magic /^([\x0d\x0a[:blank:]]*(<!--.*-->)?)*<(CFPARAM|CFSET|CFIF)/
}
# Adobe Flash Media Manifest
signature file-f4m {
file-mime "application/f4m", 49
file-magic /^(\xef\xbb\xbf)?([\x0d\x0a[:blank:]]*(<!--.*-->)?[\x0d\x0a[:blank:]]*)*(<\?xml .*\?>)?([\x0d\x0a[:blank:]]*(<!--.*-->)?[\x0d\x0a[:blank:]]*)*<[mM][aA][nN][iI][fF][eE][sS][tT][\x0d\x0a[:blank:]]{1,}xmlns=\"http:\/\/ns\.adobe\.com\/f4m\//
}
# Microsoft LNK files
signature file-lnk {
file-mime "application/x-ms-shortcut", 49

View file

@ -6,9 +6,10 @@
module Log;
export {
## Type that defines an ID unique to each log stream. Scripts creating new log
## streams need to redef this enum to add their own specific log ID. The log ID
## implicitly determines the default name of the generated log file.
## Type that defines an ID unique to each log stream. Scripts creating new
## log streams need to redef this enum to add their own specific log ID.
## The log ID implicitly determines the default name of the generated log
## file.
type Log::ID: enum {
## Dummy place-holder.
UNKNOWN
@ -20,25 +21,24 @@ export {
## If true, remote logging is by default enabled for all filters.
const enable_remote_logging = T &redef;
## Default writer to use if a filter does not specify
## anything else.
## Default writer to use if a filter does not specify anything else.
const default_writer = WRITER_ASCII &redef;
## Default separator between fields for logwriters.
## Can be overwritten by individual writers.
## Default separator to use between fields.
## Individual writers can use a different value.
const separator = "\t" &redef;
## Separator between set elements.
## Can be overwritten by individual writers.
## Default separator to use between elements of a set.
## Individual writers can use a different value.
const set_separator = "," &redef;
## String to use for empty fields. This should be different from
## *unset_field* to make the output unambiguous.
## Can be overwritten by individual writers.
## Default string to use for empty fields. This should be different
## from *unset_field* to make the output unambiguous.
## Individual writers can use a different value.
const empty_field = "(empty)" &redef;
## String to use for an unset &optional field.
## Can be overwritten by individual writers.
## Default string to use for an unset &optional field.
## Individual writers can use a different value.
const unset_field = "-" &redef;
## Type defining the content of a logging stream.
@ -69,7 +69,7 @@ export {
## If no ``path`` is defined for the filter, then the first call
## to the function will contain an empty string.
##
## rec: An instance of the streams's ``columns`` type with its
## rec: An instance of the stream's ``columns`` type with its
## fields set to the values to be logged.
##
## Returns: The path to be used for the filter.
@ -87,7 +87,8 @@ export {
terminating: bool; ##< True if rotation occured due to Bro shutting down.
};
## Default rotation interval. Zero disables rotation.
## Default rotation interval to use for filters that do not specify
## an interval. Zero disables rotation.
##
## Note that this is overridden by the BroControl LogRotationInterval
## option.
@ -122,8 +123,8 @@ export {
## Indicates whether a log entry should be recorded.
## If not given, all entries are recorded.
##
## rec: An instance of the streams's ``columns`` type with its
## fields set to the values to logged.
## rec: An instance of the stream's ``columns`` type with its
## fields set to the values to be logged.
##
## Returns: True if the entry is to be recorded.
pred: function(rec: any): bool &optional;
@ -131,10 +132,10 @@ export {
## Output path for recording entries matching this
## filter.
##
## The specific interpretation of the string is up to
## the used writer, and may for example be the destination
## The specific interpretation of the string is up to the
## logging writer, and may for example be the destination
## file name. Generally, filenames are expected to be given
## without any extensions; writers will add appropiate
## without any extensions; writers will add appropriate
## extensions automatically.
##
## If this path is found to conflict with another filter's
@ -151,7 +152,7 @@ export {
## easy to flood the disk by returning a new string for each
## connection. Upon adding a filter to a stream, if neither
## ``path`` nor ``path_func`` is explicitly set by them, then
## :bro:see:`default_path_func` is used.
## :bro:see:`Log::default_path_func` is used.
##
## id: The ID associated with the log stream.
##
@ -161,7 +162,7 @@ export {
## then the first call to the function will contain an
## empty string.
##
## rec: An instance of the streams's ``columns`` type with its
## rec: An instance of the stream's ``columns`` type with its
## fields set to the values to be logged.
##
## Returns: The path to be used for the filter, which will be
@ -185,7 +186,7 @@ export {
## If true, entries are passed on to remote peers.
log_remote: bool &default=enable_remote_logging;
## Rotation interval.
## Rotation interval. Zero disables rotation.
interv: interval &default=default_rotation_interval;
## Callback function to trigger for rotated files. If not set, the
@ -215,9 +216,9 @@ export {
## Removes a logging stream completely, stopping all the threads.
##
## id: The ID enum to be associated with the new logging stream.
## id: The ID associated with the logging stream.
##
## Returns: True if a new stream was successfully removed.
## Returns: True if the stream was successfully removed.
##
## .. bro:see:: Log::create_stream
global remove_stream: function(id: ID) : bool;

View file

@ -1,15 +1,15 @@
##! Interface for the ASCII log writer. Redefinable options are available
##! to tweak the output format of ASCII logs.
##!
##! The ASCII writer supports currently one writer-specific filter option via
##! ``config``: setting ``tsv`` to the string ``T`` turns the output into
##! The ASCII writer currently supports one writer-specific per-filter config
##! option: setting ``tsv`` to the string ``T`` turns the output into
##! "tab-separated-value" mode where only a single header row with the column
##! names is printed out as meta information, with no "# fields" prepended; no
##! other meta data gets included in that mode.
##! other meta data gets included in that mode. Example filter using this::
##!
##! Example filter using this::
##!
##! local my_filter: Log::Filter = [$name = "my-filter", $writer = Log::WRITER_ASCII, $config = table(["tsv"] = "T")];
##! local f: Log::Filter = [$name = "my-filter",
##! $writer = Log::WRITER_ASCII,
##! $config = table(["tsv"] = "T")];
##!
module LogAscii;
@ -29,6 +29,8 @@ export {
## Format of timestamps when writing out JSON. By default, the JSON
## formatter will use double values for timestamps which represent the
## number of seconds from the UNIX epoch.
##
## This option is also available as a per-filter ``$config`` option.
const json_timestamps: JSON::TimestampFormat = JSON::TS_EPOCH &redef;
## If true, include lines with log meta information such as column names

View file

@ -19,7 +19,7 @@ export {
const unset_field = Log::unset_field &redef;
## String to use for empty fields. This should be different from
## *unset_field* to make the output unambiguous.
## *unset_field* to make the output unambiguous.
const empty_field = Log::empty_field &redef;
}

View file

@ -966,6 +966,11 @@ const tcp_max_above_hole_without_any_acks = 16384 &redef;
## .. bro:see:: tcp_max_initial_window tcp_max_above_hole_without_any_acks
const tcp_excessive_data_without_further_acks = 10 * 1024 * 1024 &redef;
## Number of TCP segments to buffer beyond what's been acknowledged already
## to detect retransmission inconsistencies. Zero disables any additonal
## buffering.
const tcp_max_old_segments = 0 &redef;
## For services without a handler, these sets define originator-side ports
## that still trigger reassembly.
##

View file

@ -86,7 +86,7 @@ event gridftp_possibility_timeout(c: connection)
{
# only remove if we did not already detect it and the connection
# is not yet at its end.
if ( "gridftp-data" !in c$service && ! c$conn?$service )
if ( "gridftp-data" !in c$service && ! (c?$conn && c$conn?$service) )
{
ConnThreshold::delete_bytes_threshold(c, size_threshold, T);
ConnThreshold::delete_bytes_threshold(c, size_threshold, F);

View file

@ -0,0 +1 @@
Support for Kerberos protocol analysis.

View file

@ -1,4 +1,5 @@
##! Implements base functionality for KRB analysis. Generates the krb.log file.
##! Implements base functionality for KRB analysis. Generates the kerberos.log
##! file.
module KRB;

View file

@ -0,0 +1 @@
Support for MySQL protocol analysis.

View file

@ -0,0 +1 @@
Support for RADIUS protocol analysis.

View file

@ -0,0 +1 @@
Support for Remote Desktop Protocol (RDP) analysis.

View file

@ -0,0 +1 @@
Support for Session Initiation Protocol (SIP) analysis.

View file

@ -0,0 +1 @@
Support for SSH protocol analysis.

File diff suppressed because one or more lines are too long

View file

@ -6,23 +6,23 @@ const url_regex = /^([a-zA-Z\-]{3,5})(:\/\/[^\/?#"'\r\n><]*)([^?#"'\r\n><]*)([^[
## A URI, as parsed by :bro:id:`decompose_uri`.
type URI: record {
## The URL's scheme..
scheme: string &optional;
scheme: string &optional;
## The location, which could be a domain name or an IP address. Left empty if not
## specified.
netlocation: string;
netlocation: string;
## Port number, if included in URI.
portnum: count &optional;
portnum: count &optional;
## Full including the file name. Will be '/' if there's not path given.
path: string;
path: string;
## Full file name, including extension, if there is a file name.
file_name: string &optional;
file_name: string &optional;
## The base filename, without extension, if there is a file name.
file_base: string &optional;
file_base: string &optional;
## The filename's extension, if there is a file name.
file_ext: string &optional;
file_ext: string &optional;
## A table of all query parameters, mapping their keys to values, if there's a
## query.
params: table[string] of string &optional;
params: table[string] of string &optional;
};
## Extracts URLs discovered in arbitrary text.
@ -46,19 +46,19 @@ function find_all_urls_without_scheme(s: string): string_set
return return_urls;
}
function decompose_uri(s: string): URI
function decompose_uri(uri: string): URI
{
local parts: string_vec;
local u: URI = [$netlocation="", $path="/"];
local u = URI($netlocation="", $path="/");
local s = uri;
if ( /\?/ in s)
if ( /\?/ in s )
{
# Parse query.
u$params = table();
parts = split_string1(s, /\?/);
s = parts[0];
local query: string = parts[1];
local query = parts[1];
if ( /&/ in query )
{
@ -73,7 +73,7 @@ function decompose_uri(s: string): URI
}
}
}
else
else if ( /=/ in query )
{
parts = split_string1(query, /=/);
u$params[parts[0]] = parts[1];
@ -97,14 +97,14 @@ function decompose_uri(s: string): URI
if ( |u$path| > 1 && u$path[|u$path| - 1] != "/" )
{
local last_token: string = find_last(u$path, /\/.+/);
local last_token = find_last(u$path, /\/.+/);
local full_filename = split_string1(last_token, /\//)[1];
if ( /\./ in full_filename )
{
u$file_name = full_filename;
u$file_base = split_string1(full_filename, /\./)[0];
u$file_ext = split_string1(full_filename, /\./)[1];
u$file_ext = split_string1(full_filename, /\./)[1];
}
else
{
@ -122,7 +122,9 @@ function decompose_uri(s: string): URI
u$portnum = to_count(parts[1]);
}
else
{
u$netlocation = s;
}
return u;
}