mirror of
https://github.com/zeek/zeek.git
synced 2025-10-03 07:08:19 +00:00

- Fixed bug with how data is sent to elasticsearch. - Added a feature to only allow data of a certain size to be buffered before sending to the elasticsearch server. Configured with the LogElasticSearch::max_byte_size variable.
34 lines
1 KiB
Text
34 lines
1 KiB
Text
module LogElasticSearch;
|
|
|
|
export {
|
|
## Name of the ES cluster
|
|
const cluster_name = "elasticsearch" &redef;
|
|
|
|
## ES Server
|
|
const server_host = "127.0.0.1" &redef;
|
|
|
|
## ES Port
|
|
const server_port = 9200 &redef;
|
|
|
|
## Name of the ES index
|
|
const index_name = "bro" &redef;
|
|
|
|
## The ES type prefix comes before the name of the related log.
|
|
## e.g. prefix = "bro_" would create types of bro_dns, bro_software, etc.
|
|
const type_prefix = "" &redef;
|
|
|
|
## The batch size is the number of messages that will be queued up before
|
|
## they are sent to be bulk indexed.
|
|
## Note: this is mainly a memory usage parameter.
|
|
const max_batch_size = 1000 &redef;
|
|
|
|
## The maximum amount of wall-clock time that is allowed to pass without
|
|
## finishing a bulk log send. This represents the maximum delay you
|
|
## would like to have with your logs before they show up in ElasticSearch.
|
|
const max_batch_interval = 1min &redef;
|
|
|
|
## The maximum byte size for a buffered JSON string to send to the bulk
|
|
## insert API.
|
|
const max_byte_size = 1024 * 1024 &redef;
|
|
}
|
|
|