mirror of
https://github.com/zeek/zeek.git
synced 2025-10-09 01:58:20 +00:00
Merge branch 'master' into topic/jsiwek/broxygen
This commit is contained in:
commit
15b1904ca8
155 changed files with 1502 additions and 721 deletions
21
src/bro.bif
21
src/bro.bif
|
@ -1,10 +1,11 @@
|
|||
##! A collection of built-in functions that implement a variety of things
|
||||
##! such as general programming algorithms, string processing, math functions,
|
||||
##! introspection, type conversion, file/directory manipulation, packet filtering,
|
||||
##! inter-process communication and controlling protocol analyzer behavior.
|
||||
##! introspection, type conversion, file/directory manipulation, packet
|
||||
##! filtering, interprocess communication and controlling protocol analyzer
|
||||
##! behavior.
|
||||
##!
|
||||
##! You'll find most of Bro's built-in functions that aren't protocol-specific in
|
||||
##! this file.
|
||||
##! You'll find most of Bro's built-in functions that aren't protocol-specific
|
||||
##! in this file.
|
||||
|
||||
%%{ // C segment
|
||||
#include <math.h>
|
||||
|
@ -615,7 +616,7 @@ function md5_hmac%(...%): string
|
|||
## Constructs an MD5 handle to enable incremental hash computation. You can
|
||||
## feed data to the returned opaque value with :bro:id:`md5_hash_update` and
|
||||
## eventually need to call :bro:id:`md5_hash_finish` to finish the computation
|
||||
## and get the hash digest as result.
|
||||
## and get the hash digest.
|
||||
##
|
||||
## For example, when computing incremental MD5 values of transferred files in
|
||||
## multiple concurrent HTTP connections, one keeps an optional handle in the
|
||||
|
@ -640,12 +641,12 @@ function md5_hash_init%(%): opaque of md5
|
|||
## Constructs an SHA1 handle to enable incremental hash computation. You can
|
||||
## feed data to the returned opaque value with :bro:id:`sha1_hash_update` and
|
||||
## finally need to call :bro:id:`sha1_hash_finish` to finish the computation
|
||||
## and get the hash digest as result.
|
||||
## and get the hash digest.
|
||||
##
|
||||
## For example, when computing incremental SHA1 values of transferred files in
|
||||
## multiple concurrent HTTP connections, one keeps an optional handle in the
|
||||
## HTTP session record. Then, one would call
|
||||
## ``c$http$sha1_handle = sha1_hash_init()`` ## once before invoking
|
||||
## ``c$http$sha1_handle = sha1_hash_init()`` once before invoking
|
||||
## ``sha1_hash_update(c$http$sha1_handle, some_more_data)`` in the
|
||||
## :bro:id:`http_entity_data` event handler. When all data has arrived, a call
|
||||
## to :bro:id:`sha1_hash_finish` returns the final hash value.
|
||||
|
@ -665,12 +666,12 @@ function sha1_hash_init%(%): opaque of sha1
|
|||
## Constructs an SHA256 handle to enable incremental hash computation. You can
|
||||
## feed data to the returned opaque value with :bro:id:`sha256_hash_update` and
|
||||
## finally need to call :bro:id:`sha256_hash_finish` to finish the computation
|
||||
## and get the hash digest as result.
|
||||
## and get the hash digest.
|
||||
##
|
||||
## For example, when computing incremental SHA256 values of transferred files in
|
||||
## multiple concurrent HTTP connections, one keeps an optional handle in the
|
||||
## HTTP session record. Then, one would call
|
||||
## ``c$http$sha256_handle = sha256_hash_init()`` ## once before invoking
|
||||
## ``c$http$sha256_handle = sha256_hash_init()`` once before invoking
|
||||
## ``sha256_hash_update(c$http$sha256_handle, some_more_data)`` in the
|
||||
## :bro:id:`http_entity_data` event handler. When all data has arrived, a call
|
||||
## to :bro:id:`sha256_hash_finish` returns the final hash value.
|
||||
|
@ -1532,7 +1533,7 @@ function log10%(d: double%): double
|
|||
#
|
||||
# ===========================================================================
|
||||
|
||||
## Determines whether *c* has been received externally. For example,
|
||||
## Determines whether a connection has been received externally. For example,
|
||||
## Broccoli or the Time Machine can send packets to Bro via a mechanism that is
|
||||
## one step lower than sending events. This function checks whether the packets
|
||||
## of a connection stem from one of these external *packet sources*.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
## This event is generated when a file extraction analyzer is about
|
||||
## to exceed the maximum permitted file size allowed by
|
||||
## *extract_size_limit* field of :bro:see:`Files::AnalyzerArgs`.
|
||||
## to exceed the maximum permitted file size allowed by the
|
||||
## *extract_limit* field of :bro:see:`Files::AnalyzerArgs`.
|
||||
## The analyzer is automatically removed from file *f*.
|
||||
##
|
||||
## f: The file.
|
||||
|
@ -13,7 +13,7 @@
|
|||
##
|
||||
## offset: The offset at which a file chunk is about to be written.
|
||||
##
|
||||
## len:: The length of the file chunk about to be written.
|
||||
## len: The length of the file chunk about to be written.
|
||||
##
|
||||
## .. bro:see:: Files::add_analyzer Files::ANALYZER_EXTRACT
|
||||
event file_extraction_limit%(f: fa_file, args: any, limit: count, offset: count, len: count%);
|
||||
|
|
|
@ -54,7 +54,7 @@ void SQLite::DoClose()
|
|||
}
|
||||
}
|
||||
|
||||
bool SQLite::checkError( int code )
|
||||
bool SQLite::checkError(int code)
|
||||
{
|
||||
if ( code != SQLITE_OK && code != SQLITE_DONE )
|
||||
{
|
||||
|
@ -73,23 +73,19 @@ bool SQLite::DoInit(const ReaderInfo& info, int arg_num_fields, const threading:
|
|||
return false;
|
||||
}
|
||||
|
||||
if ( Info().mode != MODE_MANUAL )
|
||||
{
|
||||
Error("SQLite only supports manual reading mode.");
|
||||
return false;
|
||||
}
|
||||
|
||||
started = false;
|
||||
|
||||
string fullpath(info.source);
|
||||
fullpath.append(".sqlite");
|
||||
|
||||
string dbname;
|
||||
map<const char*, const char*>::const_iterator it = info.config.find("dbname");
|
||||
if ( it == info.config.end() )
|
||||
{
|
||||
MsgThread::Info(Fmt("dbname configuration option not found. Defaulting to source %s", info.source));
|
||||
dbname = info.source;
|
||||
}
|
||||
else
|
||||
dbname = it->second;
|
||||
|
||||
string query;
|
||||
it = info.config.find("query");
|
||||
map<const char*, const char*>::const_iterator it = info.config.find("query");
|
||||
if ( it == info.config.end() )
|
||||
{
|
||||
Error(Fmt("No query specified when setting up SQLite data source. Aborting.", info.source));
|
||||
|
|
|
@ -124,16 +124,16 @@ bool SQLite::DoInit(const WriterInfo& info, int arg_num_fields,
|
|||
|
||||
string fullpath(info.path);
|
||||
fullpath.append(".sqlite");
|
||||
string dbname;
|
||||
string tablename;
|
||||
|
||||
map<const char*, const char*>::const_iterator it = info.config.find("dbname");
|
||||
map<const char*, const char*>::const_iterator it = info.config.find("tablename");
|
||||
if ( it == info.config.end() )
|
||||
{
|
||||
MsgThread::Info(Fmt("dbname configuration option not found. Defaulting to path %s", info.path));
|
||||
dbname = info.path;
|
||||
MsgThread::Info(Fmt("tablename configuration option not found. Defaulting to path %s", info.path));
|
||||
tablename = info.path;
|
||||
}
|
||||
else
|
||||
dbname = it->second;
|
||||
tablename = it->second;
|
||||
|
||||
if ( checkError(sqlite3_open_v2(
|
||||
fullpath.c_str(),
|
||||
|
@ -145,7 +145,7 @@ bool SQLite::DoInit(const WriterInfo& info, int arg_num_fields,
|
|||
NULL)) )
|
||||
return false;
|
||||
|
||||
string create = "CREATE TABLE IF NOT EXISTS " + dbname + " (\n";
|
||||
string create = "CREATE TABLE IF NOT EXISTS " + tablename + " (\n";
|
||||
//"id SERIAL UNIQUE NOT NULL"; // SQLite has rowids, we do not need a counter here.
|
||||
|
||||
for ( unsigned int i = 0; i < num_fields; ++i )
|
||||
|
@ -193,7 +193,7 @@ bool SQLite::DoInit(const WriterInfo& info, int arg_num_fields,
|
|||
|
||||
// create the prepared statement that will be re-used forever...
|
||||
string insert = "VALUES (";
|
||||
string names = "INSERT INTO " + dbname + " ( ";
|
||||
string names = "INSERT INTO " + tablename + " ( ";
|
||||
|
||||
for ( unsigned int i = 0; i < num_fields; i++ )
|
||||
{
|
||||
|
|
|
@ -24,8 +24,8 @@ module GLOBAL;
|
|||
## name: A name that uniquely identifies and seeds the Bloom filter. If empty,
|
||||
## the filter will use :bro:id:`global_hash_seed` if that's set, and
|
||||
## otherwise use a local seed tied to the current Bro process. Only
|
||||
## filters with the same seed can be merged with
|
||||
## :bro:id:`bloomfilter_merge` .
|
||||
## filters with the same seed can be merged with
|
||||
## :bro:id:`bloomfilter_merge`.
|
||||
##
|
||||
## Returns: A Bloom filter handle.
|
||||
##
|
||||
|
@ -50,8 +50,9 @@ function bloomfilter_basic_init%(fp: double, capacity: count,
|
|||
%}
|
||||
|
||||
## Creates a basic Bloom filter. This function serves as a low-level
|
||||
## alternative to bloomfilter_basic_init where the user has full control over
|
||||
## the number of hash functions and cells in the underlying bit vector.
|
||||
## alternative to :bro:id:`bloomfilter_basic_init` where the user has full
|
||||
## control over the number of hash functions and cells in the underlying bit
|
||||
## vector.
|
||||
##
|
||||
## k: The number of hash functions to use.
|
||||
##
|
||||
|
@ -61,7 +62,7 @@ function bloomfilter_basic_init%(fp: double, capacity: count,
|
|||
## the filter will use :bro:id:`global_hash_seed` if that's set, and
|
||||
## otherwise use a local seed tied to the current Bro process. Only
|
||||
## filters with the same seed can be merged with
|
||||
## :bro:id:`bloomfilter_merge` .
|
||||
## :bro:id:`bloomfilter_merge`.
|
||||
##
|
||||
## Returns: A Bloom filter handle.
|
||||
##
|
||||
|
@ -97,7 +98,7 @@ function bloomfilter_basic_init2%(k: count, cells: count,
|
|||
## counting Bloom filter, we refer to the Bloom filter literature
|
||||
## here for choosing an appropiate value.
|
||||
##
|
||||
## max: The maximum counter value associated with each each element
|
||||
## max: The maximum counter value associated with each element
|
||||
## described by *w = ceil(log_2(max))* bits. Each bit in the underlying
|
||||
## counter vector becomes a cell of size *w* bits.
|
||||
##
|
||||
|
@ -105,7 +106,7 @@ function bloomfilter_basic_init2%(k: count, cells: count,
|
|||
## the filter will use :bro:id:`global_hash_seed` if that's set, and
|
||||
## otherwise use a local seed tied to the current Bro process. Only
|
||||
## filters with the same seed can be merged with
|
||||
## :bro:id:`bloomfilter_merge` .
|
||||
## :bro:id:`bloomfilter_merge`.
|
||||
##
|
||||
## Returns: A Bloom filter handle.
|
||||
##
|
||||
|
@ -187,9 +188,9 @@ function bloomfilter_lookup%(bf: opaque of bloomfilter, x: any%): count
|
|||
return new Val(0, TYPE_COUNT);
|
||||
%}
|
||||
|
||||
## Removes all elements from a Bloom filter. This function resets all bits in the
|
||||
## underlying bitvector back to 0 but does not change the parameterization of the
|
||||
## Bloom filter, such as the element type and the hasher seed.
|
||||
## Removes all elements from a Bloom filter. This function resets all bits in
|
||||
## the underlying bitvector back to 0 but does not change the parameterization
|
||||
## of the Bloom filter, such as the element type and the hasher seed.
|
||||
##
|
||||
## bf: The Bloom filter handle.
|
||||
##
|
||||
|
@ -242,6 +243,8 @@ function bloomfilter_merge%(bf1: opaque of bloomfilter,
|
|||
## state. This is for debugging/testing purposes only.
|
||||
##
|
||||
## bf: The Bloom filter handle.
|
||||
##
|
||||
## Returns: a string with a representation of a Bloom filter's internal state.
|
||||
function bloomfilter_internal_state%(bf: opaque of bloomfilter%): string
|
||||
%{
|
||||
BloomFilterVal* bfv = static_cast<BloomFilterVal*>(bf);
|
||||
|
|
|
@ -8,11 +8,12 @@ using namespace probabilistic;
|
|||
|
||||
module GLOBAL;
|
||||
|
||||
## Initializes a probabilistic cardinality counter that uses the HyperLogLog algorithm.
|
||||
## Initializes a probabilistic cardinality counter that uses the HyperLogLog
|
||||
## algorithm.
|
||||
##
|
||||
## err: the desired error rate (e.g. 0.01).
|
||||
##
|
||||
## confidence: the desirec confidence for the error rate (e.g., 0.95).
|
||||
## confidence: the desired confidence for the error rate (e.g., 0.95).
|
||||
##
|
||||
## Returns: a HLL cardinality handle.
|
||||
##
|
||||
|
@ -30,9 +31,9 @@ function hll_cardinality_init%(err: double, confidence: double%): opaque of card
|
|||
##
|
||||
## handle: the HLL handle.
|
||||
##
|
||||
## elem: the element to add
|
||||
## elem: the element to add.
|
||||
##
|
||||
## Returns: true on success
|
||||
## Returns: true on success.
|
||||
##
|
||||
## .. bro:see:: hll_cardinality_estimate hll_cardinality_merge_into
|
||||
## hll_cardinality_init hll_cardinality_copy
|
||||
|
@ -61,11 +62,11 @@ function hll_cardinality_add%(handle: opaque of cardinality, elem: any%): bool
|
|||
## .. note:: The same restrictions as for Bloom filter merging apply,
|
||||
## see :bro:id:`bloomfilter_merge`.
|
||||
##
|
||||
## handle1: the first HLL handle, which will contain the merged result
|
||||
## handle1: the first HLL handle, which will contain the merged result.
|
||||
##
|
||||
## handle2: the second HLL handle, which will be merged into the first
|
||||
## handle2: the second HLL handle, which will be merged into the first.
|
||||
##
|
||||
## Returns: true on success
|
||||
## Returns: true on success.
|
||||
##
|
||||
## .. bro:see:: hll_cardinality_estimate hll_cardinality_add
|
||||
## hll_cardinality_init hll_cardinality_copy
|
||||
|
@ -98,7 +99,7 @@ function hll_cardinality_merge_into%(handle1: opaque of cardinality, handle2: op
|
|||
|
||||
## Estimate the current cardinality of an HLL cardinality counter.
|
||||
##
|
||||
## handle: the HLL handle
|
||||
## handle: the HLL handle.
|
||||
##
|
||||
## Returns: the cardinality estimate. Returns -1.0 if the counter is empty.
|
||||
##
|
||||
|
@ -116,9 +117,9 @@ function hll_cardinality_estimate%(handle: opaque of cardinality%): double
|
|||
|
||||
## Copy a HLL cardinality counter.
|
||||
##
|
||||
## handle: cardinality counter to copy
|
||||
## handle: cardinality counter to copy.
|
||||
##
|
||||
## Returns: copy of handle
|
||||
## Returns: copy of handle.
|
||||
##
|
||||
## .. bro:see:: hll_cardinality_estimate hll_cardinality_merge_into hll_cardinality_add
|
||||
## hll_cardinality_init
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
## Creates a top-k data structure which tracks *size* elements.
|
||||
##
|
||||
## size: number of elements to track
|
||||
## size: number of elements to track.
|
||||
##
|
||||
## Returns: Opaque pointer to the data structure.
|
||||
##
|
||||
|
@ -24,9 +24,9 @@ function topk_init%(size: count%): opaque of topk
|
|||
## the top-k data structure. All following values have to be of the same
|
||||
## type.
|
||||
##
|
||||
## handle: the TopK handle
|
||||
## handle: the TopK handle.
|
||||
##
|
||||
## value: observed value
|
||||
## value: observed value.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_get_top topk_count topk_epsilon
|
||||
## topk_size topk_sum topk_merge topk_merge_prune
|
||||
|
@ -41,11 +41,11 @@ function topk_add%(handle: opaque of topk, value: any%): any
|
|||
|
||||
## Get the first *k* elements of the top-k data structure.
|
||||
##
|
||||
## handle: the TopK handle
|
||||
## handle: the TopK handle.
|
||||
##
|
||||
## k: number of elements to return
|
||||
## k: number of elements to return.
|
||||
##
|
||||
## Returns: vector of the first k elements
|
||||
## Returns: vector of the first k elements.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_count topk_epsilon
|
||||
## topk_size topk_sum topk_merge topk_merge_prune
|
||||
|
@ -56,16 +56,17 @@ function topk_get_top%(handle: opaque of topk, k: count%): any
|
|||
return h->GetTopK(k);
|
||||
%}
|
||||
|
||||
## Get an overestimated count of how often value has been encountered.
|
||||
## Get an overestimated count of how often a value has been encountered.
|
||||
##
|
||||
## .. note:: value has to be part of the currently tracked elements, otherwise
|
||||
## 0 will be returned and an error message will be added to reporter.
|
||||
## .. note:: The value has to be part of the currently tracked elements,
|
||||
## otherwise 0 will be returned and an error message will be added to
|
||||
## reporter.
|
||||
##
|
||||
## handle: the TopK handle
|
||||
## handle: the TopK handle.
|
||||
##
|
||||
## value: Value to look up count for.
|
||||
##
|
||||
## Returns: Overestimated number for how often the element has been encountered
|
||||
## Returns: Overestimated number for how often the element has been encountered.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_get_top topk_epsilon
|
||||
## topk_size topk_sum topk_merge topk_merge_prune
|
||||
|
@ -78,13 +79,14 @@ function topk_count%(handle: opaque of topk, value: any%): count
|
|||
|
||||
## Get the maximal overestimation for count.
|
||||
##
|
||||
## .. note:: Same restrictions as for :bro:id:`topk_count` apply.
|
||||
## .. note:: Same restrictions as for :bro:id:`topk_count` apply.
|
||||
##
|
||||
## handle: the TopK handle
|
||||
## handle: the TopK handle.
|
||||
##
|
||||
## value: Value to look up epsilon for.
|
||||
##
|
||||
## Returns: Number which represents the maximal overesimation for the count of this element.
|
||||
## Returns: Number which represents the maximal overestimation for the count of
|
||||
## this element.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_get_top topk_count
|
||||
## topk_size topk_sum topk_merge topk_merge_prune
|
||||
|
@ -95,14 +97,15 @@ function topk_epsilon%(handle: opaque of topk, value: any%): count
|
|||
return new Val(h->GetEpsilon(value), TYPE_COUNT);
|
||||
%}
|
||||
|
||||
## Get the number of elements this data structure is supposed to track (given on init).
|
||||
## Get the number of elements this data structure is supposed to track (given
|
||||
## on init).
|
||||
##
|
||||
## .. note ::Note that the actual number of elements in the data structure can be lower
|
||||
## or higher (due to non-pruned merges) than this.
|
||||
## .. note:: Note that the actual number of elements in the data structure can
|
||||
## be lower or higher (due to non-pruned merges) than this.
|
||||
##
|
||||
## handle: the TopK handle
|
||||
## handle: the TopK handle.
|
||||
##
|
||||
## Returns: size given during initialization
|
||||
## Returns: size given during initialization.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_get_top topk_count topk_epsilon
|
||||
## topk_sum topk_merge topk_merge_prune
|
||||
|
@ -115,13 +118,14 @@ function topk_size%(handle: opaque of topk%): count
|
|||
|
||||
## Get the sum of all counts of all elements in the data structure.
|
||||
##
|
||||
## .. note:: This is equal to the number of all inserted objects if the data structure
|
||||
## never has been pruned. Do not use after calling topk_merge_prune (will throw a
|
||||
## warning message if used afterwards)
|
||||
## .. note:: This is equal to the number of all inserted objects if the data
|
||||
## structure never has been pruned. Do not use after
|
||||
## calling :bro:id:`topk_merge_prune` (will throw a warning message if used
|
||||
## afterwards).
|
||||
##
|
||||
## handle: the TopK handle
|
||||
## handle: the TopK handle.
|
||||
##
|
||||
## Returns: sum of all counts
|
||||
## Returns: sum of all counts.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_get_top topk_count topk_epsilon
|
||||
## topk_size topk_merge topk_merge_prune
|
||||
|
@ -132,10 +136,10 @@ function topk_sum%(handle: opaque of topk%): count
|
|||
return new Val(h->GetSum(), TYPE_COUNT);
|
||||
%}
|
||||
|
||||
## Merge the second topk data structure into the first.
|
||||
## Merge the second top-k data structure into the first.
|
||||
##
|
||||
## .. note:: This does not remove any elements, the resulting data structure can be
|
||||
## bigger than the maximum size given on initialization.
|
||||
## .. note:: This does not remove any elements, the resulting data structure
|
||||
## can be bigger than the maximum size given on initialization.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_get_top topk_count topk_epsilon
|
||||
## topk_size topk_sum topk_merge_prune
|
||||
|
@ -152,16 +156,16 @@ function topk_merge%(handle1: opaque of topk, handle2: opaque of topk%): any
|
|||
return 0;
|
||||
%}
|
||||
|
||||
## Merge the second topk data structure into the first and prunes the final data
|
||||
## structure back to the size given on initialization.
|
||||
## Merge the second top-k data structure into the first and prunes the final
|
||||
## data structure back to the size given on initialization.
|
||||
##
|
||||
## .. note:: Use with care and only when being aware of the restrictions this
|
||||
## entails. Do not call :bro:id:`topk_size` or :bro:id:`topk_add` afterwards,
|
||||
## entails. Do not call :bro:id:`topk_size` or :bro:id:`topk_add` afterwards,
|
||||
## results will probably not be what you expect.
|
||||
##
|
||||
## handle1: the TopK handle in which the second TopK structure is merged
|
||||
## handle1: the TopK handle in which the second TopK structure is merged.
|
||||
##
|
||||
## handle2: the TopK handle in which is merged into the first TopK structure
|
||||
## handle2: the TopK handle in which is merged into the first TopK structure.
|
||||
##
|
||||
## .. bro:see:: topk_init topk_add topk_get_top topk_count topk_epsilon
|
||||
## topk_size topk_sum topk_merge
|
||||
|
|
|
@ -1052,6 +1052,8 @@ function find_last%(str: string, re: pattern%) : string
|
|||
##
|
||||
## data_str: The string to dump in hex format.
|
||||
##
|
||||
## Returns: The hex dump of the given string.
|
||||
##
|
||||
## .. bro:see:: string_to_ascii_hex bytestring_to_hexstr
|
||||
##
|
||||
## .. note:: Based on Netdude's hex editor code.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue