Merge remote-tracking branch 'origin/master' into topic/bernhard/sqlite

Conflicts:
	scripts/base/frameworks/logging/__load__.bro
	src/CMakeLists.txt
	src/logging.bif
	src/types.bif
This commit is contained in:
Bernhard Amann 2012-07-25 15:04:23 -07:00
commit da157c8ded
296 changed files with 4703 additions and 2175 deletions

View file

@ -8,12 +8,13 @@ export {
## into files. This is primarily for debugging purposes.
const output_to_stdout = F &redef;
## If true, include a header line with column names and description
## of the other ASCII logging options that were used.
const include_header = T &redef;
## If true, include lines with log meta information such as column names with
## types, the values of ASCII logging options that in use, and the time when the
## file was opened and closes (the latter at the end).
const include_meta = T &redef;
## Prefix for the header line if included.
const header_prefix = "#" &redef;
## Prefix for lines with meta information.
const meta_prefix = "#" &redef;
## Separator between fields.
const separator = "\t" &redef;

View file

@ -0,0 +1,46 @@
##! Log writer for sending logs to an ElasticSearch server.
##!
##! Note: This module is in testing and is not yet considered stable!
##!
##! There is one known memory issue. If your elasticsearch server is
##! running slowly and taking too long to return from bulk insert
##! requests, the message queue to the writer thread will continue
##! growing larger and larger giving the appearance of a memory leak.
module LogElasticSearch;
export {
## Name of the ES cluster
const cluster_name = "elasticsearch" &redef;
## ES Server
const server_host = "127.0.0.1" &redef;
## ES Port
const server_port = 9200 &redef;
## Name of the ES index
const index_prefix = "bro" &redef;
## The ES type prefix comes before the name of the related log.
## e.g. prefix = "bro_" would create types of bro_dns, bro_software, etc.
const type_prefix = "" &redef;
## The time before an ElasticSearch transfer will timeout.
## This is not working!
const transfer_timeout = 2secs;
## The batch size is the number of messages that will be queued up before
## they are sent to be bulk indexed.
const max_batch_size = 1000 &redef;
## The maximum amount of wall-clock time that is allowed to pass without
## finishing a bulk log send. This represents the maximum delay you
## would like to have with your logs before they are sent to ElasticSearch.
const max_batch_interval = 1min &redef;
## The maximum byte size for a buffered JSON string to send to the bulk
## insert API.
const max_byte_size = 1024 * 1024 &redef;
}

View file

@ -3,8 +3,8 @@
module LogNone;
export {
## If true, output some debugging output that can be useful for unit
##testing the logging framework.
## If true, output debugging output that can be useful for unit
## testing the logging framework.
const debug = F &redef;
}