mirror of
https://github.com/zeek/zeek.git
synced 2025-10-06 16:48:19 +00:00
Merge remote-tracking branch 'origin/master' into topic/johanna/spicy-tls
* origin/master: (200 commits) Update link to slack in README.md Update bifcl, binpac, and broker repos for cmake changes bison_target argument changes from HEADER to DEFINES_FILE Update cmake submodule [nomail] Start of 6.1.0 development Switch broker submodule back to master Update broker submodule to 2.6.0 release tag [nomail] [skip ci] Update docs submodule [nomail] [skip ci] Updating CHANGES and VERSION. Update baseline for coverage.test-all-policy-cluster test catch-and-release: Mark cr_check_rule as is_used Add test-all-policy-cluster Revert "Merge remote-tracking branch 'origin/topic/vern/at-if-analyze'" Simplify code generated for Spicy analyzer port ranges. Register test analyzer only for required ports. Update doc submodule [nomail] [skip ci] Update broker submodule [nomail] Update ZeekJS submodule to 0.9.1 Fix disappearing unit fields in Spicy type export. Bump cluster testsuite to latest main ...
This commit is contained in:
commit
0d462c37fa
684 changed files with 18065 additions and 3963 deletions
37
.cirrus.yml
37
.cirrus.yml
|
@ -160,22 +160,25 @@ env:
|
||||||
# a solution for the mtime pruning above.
|
# a solution for the mtime pruning above.
|
||||||
ZEEK_CCACHE_EPOCH: 2
|
ZEEK_CCACHE_EPOCH: 2
|
||||||
|
|
||||||
|
# Cache Spicy JIT results.
|
||||||
|
HILTI_CXX_COMPILER_LAUNCHER: ccache
|
||||||
|
|
||||||
# Linux EOL timelines: https://linuxlifecycle.com/
|
# Linux EOL timelines: https://linuxlifecycle.com/
|
||||||
# Fedora (~13 months): https://fedoraproject.org/wiki/Fedora_Release_Life_Cycle
|
# Fedora (~13 months): https://fedoraproject.org/wiki/Fedora_Release_Life_Cycle
|
||||||
|
|
||||||
|
fedora38_task:
|
||||||
|
container:
|
||||||
|
# Fedora 38 EOL: Around May 2024
|
||||||
|
dockerfile: ci/fedora-38/Dockerfile
|
||||||
|
<< : *RESOURCES_TEMPLATE
|
||||||
|
<< : *CI_TEMPLATE
|
||||||
|
|
||||||
fedora37_task:
|
fedora37_task:
|
||||||
container:
|
container:
|
||||||
# Fedora 37 EOL: Around Dec 2024
|
# Fedora 37 EOL: Around Dec 2024
|
||||||
dockerfile: ci/fedora-37/Dockerfile
|
dockerfile: ci/fedora-37/Dockerfile
|
||||||
<< : *RESOURCES_TEMPLATE
|
<< : *RESOURCES_TEMPLATE
|
||||||
<< : *CI_TEMPLATE
|
<< : *CI_TEMPLATE
|
||||||
|
|
||||||
fedora36_task:
|
|
||||||
container:
|
|
||||||
# Fedora 36 EOL: Around May 2023
|
|
||||||
dockerfile: ci/fedora-36/Dockerfile
|
|
||||||
<< : *RESOURCES_TEMPLATE
|
|
||||||
<< : *CI_TEMPLATE
|
|
||||||
<< : *SKIP_TASK_ON_PR
|
<< : *SKIP_TASK_ON_PR
|
||||||
|
|
||||||
centosstream9_task:
|
centosstream9_task:
|
||||||
|
@ -253,8 +256,9 @@ opensuse_tumbleweed_task:
|
||||||
# Opensuse Tumbleweed has no EOL
|
# Opensuse Tumbleweed has no EOL
|
||||||
dockerfile: ci/opensuse-tumbleweed/Dockerfile
|
dockerfile: ci/opensuse-tumbleweed/Dockerfile
|
||||||
<< : *RESOURCES_TEMPLATE
|
<< : *RESOURCES_TEMPLATE
|
||||||
|
prepare_script: ./ci/opensuse-tumbleweed/prepare.sh
|
||||||
<< : *CI_TEMPLATE
|
<< : *CI_TEMPLATE
|
||||||
<< : *SKIP_TASK_ON_PR
|
# << : *SKIP_TASK_ON_PR
|
||||||
|
|
||||||
ubuntu2210_task:
|
ubuntu2210_task:
|
||||||
container:
|
container:
|
||||||
|
@ -611,6 +615,21 @@ container_image_manifest_docker_builder:
|
||||||
- arm64_container_image
|
- arm64_container_image
|
||||||
- amd64_container_image
|
- amd64_container_image
|
||||||
|
|
||||||
|
# Once we've published new images in container_image_manifest, remove any untagged
|
||||||
|
# images from the public ECR repository to stay within free-tier bounds.
|
||||||
|
public_ecr_cleanup_docker_builder:
|
||||||
|
cpu: 1
|
||||||
|
only_if: >
|
||||||
|
$CIRRUS_CRON == '' && $CIRRUS_REPO_FULL_NAME == 'zeek/zeek' && $CIRRUS_BRANCH == 'master'
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ENCRYPTED[!eff52f6442e1bc78bce5b15a23546344df41bf519f6201924cb70c7af12db23f442c0e5f2b3687c2d856ceb11fcb8c49!]
|
||||||
|
AWS_SECRET_ACCESS_KEY: ENCRYPTED[!748bc302dd196140a5fa8e89c9efd148882dc846d4e723787d2de152eb136fa98e8dea7e6d2d6779d94f72dd3c088228!]
|
||||||
|
AWS_REGION: us-east-1
|
||||||
|
cleanup_script:
|
||||||
|
- ./ci/public-ecr-cleanup.sh
|
||||||
|
depends_on:
|
||||||
|
- container_image_manifest
|
||||||
|
|
||||||
cluster_testing_docker_builder:
|
cluster_testing_docker_builder:
|
||||||
cpu: *CPUS
|
cpu: *CPUS
|
||||||
memory: *MEMORY
|
memory: *MEMORY
|
||||||
|
@ -695,7 +714,7 @@ include_plugins_debian11_task:
|
||||||
folder: /tmp/ccache
|
folder: /tmp/ccache
|
||||||
fingerprint_script: echo builtin-plugins-ccache-$ZEEK_CCACHE_EPOCH-$CIRRUS_TASK_NAME-$CIRRUS_OS
|
fingerprint_script: echo builtin-plugins-ccache-$ZEEK_CCACHE_EPOCH-$CIRRUS_TASK_NAME-$CIRRUS_OS
|
||||||
reupload_on_changes: true
|
reupload_on_changes: true
|
||||||
build_script: ZEEK_CI_CONFIGURE_FLAGS="${ZEEK_CI_CONFIGURE_FLAGS} --include-plugins='/zeek/testing/builtin-plugins/Files/protocol-plugin;/zeek/testing/builtin-plugins/Files/py-lib-plugin'" ./ci/build.sh
|
build_script: ZEEK_CI_CONFIGURE_FLAGS="${ZEEK_CI_CONFIGURE_FLAGS} --include-plugins='/zeek/testing/builtin-plugins/Files/protocol-plugin;/zeek/testing/builtin-plugins/Files/py-lib-plugin;/zeek/testing/builtin-plugins/Files/zeek-version-plugin'" ./ci/build.sh
|
||||||
test_script:
|
test_script:
|
||||||
- cd testing/builtin-plugins && ../../auxil/btest/btest -d -b -j ${ZEEK_CI_BTEST_JOBS}
|
- cd testing/builtin-plugins && ../../auxil/btest/btest -d -b -j ${ZEEK_CI_BTEST_JOBS}
|
||||||
on_failure:
|
on_failure:
|
||||||
|
|
80
.cmake-format.json
Normal file
80
.cmake-format.json
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
{
|
||||||
|
"parse": {
|
||||||
|
"additional_commands": {
|
||||||
|
"CheckIPProto": {
|
||||||
|
"kwargs": {
|
||||||
|
"_proto": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"CheckType": {
|
||||||
|
"kwargs": {
|
||||||
|
"_type": "*",
|
||||||
|
"_alt_type": "*",
|
||||||
|
"_var": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"SetPackageVersion": {
|
||||||
|
"kwargs": {
|
||||||
|
"_version": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"SetPackageFileName": {
|
||||||
|
"kwargs": {
|
||||||
|
"_version": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"SetPackageInstallScripts": {
|
||||||
|
"kwargs": {
|
||||||
|
"VERSION": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ConfigurePackaging": {
|
||||||
|
"kwargs": {
|
||||||
|
"_version": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"SetPackageGenerators": {},
|
||||||
|
"SetPackageMetadata": {},
|
||||||
|
"FindRequiredPackage": {
|
||||||
|
"kwargs": {
|
||||||
|
"packageName": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"InstallClobberImmune": {
|
||||||
|
"kwargs": {
|
||||||
|
"_srcfile": "*",
|
||||||
|
"_dstfile": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"InstallPackageConfigFile": {
|
||||||
|
"kwargs": {
|
||||||
|
"_srcfile": "*",
|
||||||
|
"_dstdir": "*",
|
||||||
|
"_dstfilename": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"InstallShellScript": {
|
||||||
|
"kwargs": {
|
||||||
|
"_srcfile": "*",
|
||||||
|
"_dstfile": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"InstallSymLink": {
|
||||||
|
"kwargs": {
|
||||||
|
"_filepath": "*",
|
||||||
|
"_sympath": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"format": {
|
||||||
|
"line_width": 100,
|
||||||
|
"tab_size": 4,
|
||||||
|
"separate_ctrl_name_with_space": true,
|
||||||
|
"max_subgroups_hwrap": 3,
|
||||||
|
"line_ending": "unix"
|
||||||
|
},
|
||||||
|
"markup": {
|
||||||
|
"enable_markup": false
|
||||||
|
}
|
||||||
|
}
|
6
.github/workflows/generate-docs.yml
vendored
6
.github/workflows/generate-docs.yml
vendored
|
@ -34,7 +34,13 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: "recursive"
|
submodules: "recursive"
|
||||||
|
|
||||||
|
# Only reset the submodule pointer for scheduled builds. The reason to do
|
||||||
|
# this is to pick up any merge commits or anything that may have been
|
||||||
|
# missed in a merge, but not have any actual content. We don't want to do
|
||||||
|
# it otherwise because PRs should just use the submodule they're pointing
|
||||||
|
# at.
|
||||||
- name: Switch doc submodule to master
|
- name: Switch doc submodule to master
|
||||||
|
if: github.event_name == 'schedule'
|
||||||
run: cd doc && git checkout master
|
run: cd doc && git checkout master
|
||||||
|
|
||||||
- name: Fetch Dependencies
|
- name: Fetch Dependencies
|
||||||
|
|
5
.gitmodules
vendored
5
.gitmodules
vendored
|
@ -58,11 +58,8 @@
|
||||||
[submodule "auxil/out_ptr"]
|
[submodule "auxil/out_ptr"]
|
||||||
path = auxil/out_ptr
|
path = auxil/out_ptr
|
||||||
url = https://github.com/soasis/out_ptr.git
|
url = https://github.com/soasis/out_ptr.git
|
||||||
[submodule "auxil/spicy-plugin"]
|
|
||||||
path = auxil/spicy-plugin
|
|
||||||
url = https://github.com/zeek/spicy-plugin
|
|
||||||
[submodule "auxil/spicy"]
|
[submodule "auxil/spicy"]
|
||||||
path = auxil/spicy/spicy
|
path = auxil/spicy
|
||||||
url = https://github.com/zeek/spicy
|
url = https://github.com/zeek/spicy
|
||||||
[submodule "auxil/filesystem"]
|
[submodule "auxil/filesystem"]
|
||||||
path = auxil/filesystem
|
path = auxil/filesystem
|
||||||
|
|
|
@ -17,3 +17,9 @@ repos:
|
||||||
rev: v0.31.0
|
rev: v0.31.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: yapf
|
- id: yapf
|
||||||
|
|
||||||
|
- repo: https://github.com/cheshirekow/cmake-format-precommit
|
||||||
|
rev: v0.6.13
|
||||||
|
hooks:
|
||||||
|
- id: cmake-format
|
||||||
|
exclude: '^auxil/.*$'
|
||||||
|
|
766
CHANGES
766
CHANGES
|
@ -1,3 +1,769 @@
|
||||||
|
6.1.0-dev.4 | 2023-05-31 13:48:49 -0700
|
||||||
|
|
||||||
|
* Update bifcl, binpac, and broker repos for cmake changes (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* bison_target argument changes from HEADER to DEFINES_FILE (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Update cmake submodule [nomail] (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Start of 6.1.0 development (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.683 | 2023-05-31 09:50:46 +0200
|
||||||
|
|
||||||
|
* Simplify code generated for Spicy analyzer port ranges. (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
We previously would reprent port ranges from EVT files element-wise.
|
||||||
|
This can potentially generate a lot of code (all on a single line
|
||||||
|
though) which some versions of GCC seem to have trouble with, and which
|
||||||
|
also causes JIT overhead.
|
||||||
|
|
||||||
|
With this patch we switch to directly representing ranges. Single ports
|
||||||
|
are represented as ranges `[start, start]`.
|
||||||
|
|
||||||
|
Closes #3094.
|
||||||
|
|
||||||
|
* catch-and-release: Mark cr_check_rule as is_used (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Add test-all-policy-cluster (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
After the introduction of @if ... analyze, a lot of warnings were
|
||||||
|
triggered due to nested @if and @if .. analyze usage.
|
||||||
|
|
||||||
|
Add a test for coverage of all policy scripts in cluster mode
|
||||||
|
for the usual node types so this does not happen again.
|
||||||
|
|
||||||
|
6.0.0-dev.662 | 2023-05-26 20:51:43 +0200
|
||||||
|
|
||||||
|
* Update ZeekJS submodule to 0.9.1 (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.660 | 2023-05-26 10:03:29 +0200
|
||||||
|
|
||||||
|
* CMakeLists: Skip zeek-version.h include for zeek_objs, too (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
I've continued to see somewhat slower builds after Zeek version bumps. It
|
||||||
|
appears files covered by zeek_objs didn't have -DZEEK_CONFIG_SKIP_VERSION_H
|
||||||
|
set causing ccache invalidation after a version bump.
|
||||||
|
|
||||||
|
* Bump cluster testsuite to latest main (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.657 | 2023-05-25 19:49:20 -0700
|
||||||
|
|
||||||
|
* Fix minor type-clash warning on Windows (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* Fix Coverity warning involving object copy in TLS binpac code (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.654 | 2023-05-25 20:01:37 +0200
|
||||||
|
|
||||||
|
* Address wire/capture length feedback (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* packet_analysis/TCP: Do not use untrusted len for DeliverPacket() (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
We should not be passing the untrusted TCP header length into
|
||||||
|
DeliverPacket(). Also, DeliverPacket() cap len parameter should
|
||||||
|
be the capture length of the packet, not remaining data.
|
||||||
|
|
||||||
|
* GH-2683: Add regression test using pcap from GH-2683 (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Add btest to test Geneve->VXLAN->Truncated inner packet (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* IP: Update packet->len with accumulated fragment size (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
With packet->len representing the wire length and other places
|
||||||
|
relying on it, ensure it's updated for fragments as well. This
|
||||||
|
assumes non-truncated fragments right now. Otherwise we'd need
|
||||||
|
to teach the FragmentReassembler to somehow track this independently
|
||||||
|
but it would be a mess.
|
||||||
|
|
||||||
|
* UDP: Forward any remaining data (also empty) to session-analysis (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
The protocol analyzers are prepared to receive truncated data and
|
||||||
|
this way we give analyzers a chance to look at data. We previously
|
||||||
|
allowed empty data being passed: When len ended up 0 and remaining
|
||||||
|
was 0 too.
|
||||||
|
|
||||||
|
* IPTunnel: Compute inner wire length based on cap_len differences. (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* IP: fix weird name to not be ipv6 specific (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* UDP: don't validate checksum if caplen < len (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
This may happen with truncated packets and will cause asan builds to bail out
|
||||||
|
before the packet can be forwarded along. The TCP analyzer already has this
|
||||||
|
check, but it's missing for UDP.
|
||||||
|
|
||||||
|
* PIA: Modernize how struct initialization is done (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.643 | 2023-05-25 09:03:40 -0700
|
||||||
|
|
||||||
|
* btest.cfg: Set HILTI_CXX_COMPILER_LAUNCHER based on build/CMakeCache.txt (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
If CMakeCache.txt indicates the Zeek build was done with ccache,
|
||||||
|
take the CCACHE_PROGRAM:FILEPATH= line and populate the
|
||||||
|
HILTI_CXX_COMPILER_LAUNCHER environment.
|
||||||
|
|
||||||
|
For local development with ccache, this speeds up the spicy tests
|
||||||
|
significantly after the initial run without knowing or needing know
|
||||||
|
that setting HILTI_CXX_COMPILER_LAUNCHER would've helped.
|
||||||
|
|
||||||
|
6.0.0-dev.641 | 2023-05-25 09:03:08 -0700
|
||||||
|
|
||||||
|
* Speed up Spicy-related tests. (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
This patch changes invocations of `spicyz` and similar Spicy tools in
|
||||||
|
tests which perform compilation to use debug mode via passing `-d`. This
|
||||||
|
in turn leads to Spicy compiling generated C++ code in debug as opposed
|
||||||
|
to release mode which typically seems to require less CPU time and RAM.
|
||||||
|
For a local test running with `btest -j 16` and no caching via
|
||||||
|
`HILTI_CXX_COMPILER_LAUNCER` this sped up running of BTests under
|
||||||
|
`spicy/` by about 40s on my machine (120s vs 160s).
|
||||||
|
|
||||||
|
6.0.0-dev.636 | 2023-05-24 09:41:01 +0200
|
||||||
|
|
||||||
|
* stats: Add zeek-net-packet-lag-seconds metric (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* GH-3060: Support negative index lookups for vectors (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.628 | 2023-05-23 19:32:05 +0200
|
||||||
|
|
||||||
|
* ci: Disable openh264 repository on tumbleweed (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.626 | 2023-05-23 17:34:43 +0200
|
||||||
|
|
||||||
|
* Bump cmake to fix build of multiple Spicy analyzers in a package. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.624 | 2023-05-23 15:45:34 +0200
|
||||||
|
|
||||||
|
* GH-3071: Fix crashing when disable_analyzer() called on root analyzers (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.622 | 2023-05-22 16:38:56 -0700
|
||||||
|
|
||||||
|
* Revert part of previous CMake debug type fix (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
6.0.0-dev.620 | 2023-05-22 16:34:06 -0700
|
||||||
|
|
||||||
|
* btest/input/raw: Fix reread test (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This seems to have relied on the reading file twice behavior simply
|
||||||
|
testing that 16 lines are observed. Switch to using two separate
|
||||||
|
files and doing a system("mv ...") to trigger the REREAD logic, there's
|
||||||
|
not force_update() needed and it wouldn't do anything if the file
|
||||||
|
hadn't changed anyway.
|
||||||
|
|
||||||
|
* input/Raw: Avoid reading file twice in MODE_REREAD (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Found while writing documentation and being confused why
|
||||||
|
all lines and end_of_data() arrive twice during startup.
|
||||||
|
|
||||||
|
The test is a bit fuzzy, but does fail reliably without
|
||||||
|
the changes to Raw.cc
|
||||||
|
|
||||||
|
Also fix not checking dev in the MODE_REREAD path.
|
||||||
|
|
||||||
|
Closes #3053
|
||||||
|
|
||||||
|
6.0.0-dev.617 | 2023-05-22 10:14:27 +0200
|
||||||
|
|
||||||
|
* Fix CMAKE_BUILD_TYPE_LOWER definition-vs-use ordering problem (Christian Kreibich, Corelight)
|
||||||
|
|
||||||
|
It needs to be defined by the time we create zeek-config, which happens before
|
||||||
|
its current definition. To avoid a redundant TOLOWER when we check for presence
|
||||||
|
of --enable-debug at the beginning, this also switches this to a case-unadjusted
|
||||||
|
comparison to "Debug", which we use elsewhere in the file too.
|
||||||
|
|
||||||
|
6.0.0-dev.614 | 2023-05-19 10:51:35 -0700
|
||||||
|
|
||||||
|
* Allow plugins to call find_package(Zeek) (Dominik Charousset)
|
||||||
|
|
||||||
|
6.0.0-dev.611 | 2023-05-19 09:37:39 -0700
|
||||||
|
|
||||||
|
* files: Warn once for missing get_file_handle() (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Repeating the message for every new call to get_file_handle() is not
|
||||||
|
very useful. It's pretty much an analyzer configuration issue so logging
|
||||||
|
it once should be enough.
|
||||||
|
|
||||||
|
* MIME: Re-use cur_entity_id for EndOfFile() (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
If DataIn() was called and a cur_entity_id (file_id) has been produced
|
||||||
|
previously, re-use it for calls to EndOfFile(). This avoids a costly
|
||||||
|
event_mgr.Drain() when we already have that information. It should be safer,
|
||||||
|
too, as `get_file_handle()` in script may generate a different ID and
|
||||||
|
thereby de-synchronizing.
|
||||||
|
|
||||||
|
* zeek-fuzzer-setup: Configure fake DNS (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
I'm not sure if we somehow set this for oss-fuzz through the environment,
|
||||||
|
but didn't find anything obvious.
|
||||||
|
|
||||||
|
Running oss-fuzz reproducers locally can triggers lookups to malware.hash.cymru.com
|
||||||
|
and potentially other domains due to loading local.zeek.
|
||||||
|
|
||||||
|
* SupportAnalyzer: Stop delivering to disabled parent analyzer (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
When the parent of a support analyzer has been disabled, short-circuit
|
||||||
|
delivering stream or packet data to it.
|
||||||
|
|
||||||
|
The specific scenario this avoids is the Content-Line analyzer continuing
|
||||||
|
to feed data lines into an disabled SMTP analyzer in turn creating more
|
||||||
|
events.
|
||||||
|
|
||||||
|
This is primarily useful for our fuzzing setup where data chunks up to 1MB
|
||||||
|
are generated and fed into the analyzer pipeline. In the real-world, chunk
|
||||||
|
sizes are usually bounded to packet size. Certain TCP reassembly constellations
|
||||||
|
may trigger these scenarios, however.
|
||||||
|
|
||||||
|
Closes #168
|
||||||
|
|
||||||
|
* Add length checking to ToRawPktHdrVal for truncated packets (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* ftp: No unbounded directory command re-use (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
OSS-Fuzz generated traffic containing a CWD command with a single very large
|
||||||
|
path argument (427kb) starting with ".___/` \x00\x00...", This is followed
|
||||||
|
by a large number of ftp replies with code 250. The directory logic in
|
||||||
|
ftp_reply() would match every incoming reply with the one pending CWD command,
|
||||||
|
triggering path buildup ending with something 120MB in size.
|
||||||
|
|
||||||
|
Protect from re-using a directory command by setting a flag in the
|
||||||
|
CmdArg record when it was consumed for the path traversal logic.
|
||||||
|
|
||||||
|
This doesn't prevent unbounded path build-up generally, but does prevent the
|
||||||
|
amplification of a single large command with very many small ftp_replies.
|
||||||
|
Re-using a pending path command seems like a bug as well.
|
||||||
|
|
||||||
|
6.0.0-dev.605 | 2023-05-18 08:54:41 -0700
|
||||||
|
|
||||||
|
* Fix CMake ordering issue leaving configuration paths unset. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.602 | 2023-05-17 16:10:57 +0200
|
||||||
|
|
||||||
|
* Add license header to zeek-config*.h and zeek-version.h (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Rename util-config.h to zeek-config-paths.h and install it (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
The util-config.h has never been installed previously. Skimming the history,
|
||||||
|
it was only meant for inclusion from util.cc, hence the name. Now that it's
|
||||||
|
included from some other headers, rename it to align with what it
|
||||||
|
contains and install it, too.
|
||||||
|
|
||||||
|
* Ensure spicyz/config.h is installed (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.598 | 2023-05-17 12:46:23 +0200
|
||||||
|
|
||||||
|
* Introduce environment variables to override more paths configured
|
||||||
|
into `spicyz` (`ZEEK_SPICY_DATA_PATH`, `ZEEK_SPICY_MODULE_PATH`).
|
||||||
|
(Robin Sommer, Corelight).
|
||||||
|
|
||||||
|
* Fix an ordering issue in Spicy support's CMake config. This led to
|
||||||
|
variables left unset inside the subdirectory. (Robin Sommer,
|
||||||
|
Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.595 | 2023-05-17 10:36:22 +0200
|
||||||
|
|
||||||
|
* Fix get_active_node_count for node types not present. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.592 | 2023-05-17 09:02:09 +0200
|
||||||
|
|
||||||
|
* zeekygen: Render function parameters as :param x: instead of :x: (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
We're currently rendering parameter descriptions from .bif file into
|
||||||
|
the .rst as follows:
|
||||||
|
|
||||||
|
:cid: The connection identifier.
|
||||||
|
|
||||||
|
:aid: The analyzer ID.
|
||||||
|
|
||||||
|
Switch this to :param cid: instead so that we can have Sphinx deal with
|
||||||
|
this as param docfield and group all parameters into a single section.
|
||||||
|
|
||||||
|
Currently, having the bare :cid: style causes sphinx to treat it as an
|
||||||
|
unknown field type, capitalize it and render it.
|
||||||
|
|
||||||
|
6.0.0-dev.589 | 2023-05-16 12:15:07 +0200
|
||||||
|
|
||||||
|
* Move Spicy submodule from `auxil/spicy/spicy` to `auxil/spicy`.
|
||||||
|
(Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.587 | 2023-05-16 11:40:40 +0200
|
||||||
|
|
||||||
|
* Integrate Spicy plugin into Zeek proper. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
The plugin code is now a part of Zeek, located inside the standard
|
||||||
|
`src/` tree. Going forward, we will maintain it here and phase out
|
||||||
|
the external plugin.
|
||||||
|
|
||||||
|
The integration reflects the `spicy-plugin` code as of
|
||||||
|
`d8c296b81cc2a11`.
|
||||||
|
|
||||||
|
In addition to moving the code into Zeek's source tree, this comes
|
||||||
|
with a couple small functional changes:
|
||||||
|
|
||||||
|
- `spicyz` no longer tries to infer if it's running from the build
|
||||||
|
directory. Instead `ZEEK_SPICY_LIBRARY` can be set to a custom
|
||||||
|
location. `zeek-set-path.sh` does that now.
|
||||||
|
|
||||||
|
- ZEEK_CONFIG can be set to change what `spicyz -z` print out. This is
|
||||||
|
primarily for backwards compatibility.
|
||||||
|
|
||||||
|
The minimum Spicy version is now 1.8 (i.e., current `main` branch
|
||||||
|
at the time of merge).
|
||||||
|
|
||||||
|
For now, this all remains backwards compatible with the current
|
||||||
|
`zkg` analyzer templates so that they work with both external and
|
||||||
|
integrated Spicy support. Later, once we don't need to support any
|
||||||
|
external Spicy plugin versions anymore, we can clean up the
|
||||||
|
templates as well.
|
||||||
|
|
||||||
|
* Add `zkg_provides` to `--build-info`. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
This makes dependencies explicit that Zeek provides built-in for
|
||||||
|
`zkg`. It's in support of
|
||||||
|
https://github.com/zeek/package-manager/pull/157.
|
||||||
|
|
||||||
|
For now, `zkg_provides` contains the same data as `included_plugins`
|
||||||
|
plus an entry for `spicy-plugin`.
|
||||||
|
|
||||||
|
* Fix Spicy tuple type conversion. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
With an anonymous Spicy-side tuple type, we'd be missing an ID to
|
||||||
|
create a Zeek-side record, leading to undefined behavior. To still
|
||||||
|
support this case, we now make up an ID. In addition, we also could
|
||||||
|
end up not correctly tracking type ID during conversion; using a stack
|
||||||
|
now to handle recursion correctly.
|
||||||
|
|
||||||
|
* Fix generation of Spicy's file IDs. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
They weren't stable, and potentially repetitive.
|
||||||
|
* Modernize plugin test. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
Not using the `zeek/` include style could lead to path problems.
|
||||||
|
|
||||||
|
6.0.0-dev.570 | 2023-05-12 23:29:09 +0200
|
||||||
|
|
||||||
|
* Introduce ZEEK_SEED_VALUES environment variable (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
For "individually different but deterministic" runs specifying Zeek's
|
||||||
|
seed as an environment variable eases setups as one can avoid creating
|
||||||
|
extra seed files for each of the individual processes.
|
||||||
|
|
||||||
|
It is an error to specify the new ZEEK_SEED_VALUES variable together
|
||||||
|
with the existing ZEEK_SEED_FILE and -G. ZEEK_SEED takes precedence over
|
||||||
|
deterministic mode (-D) like ZEEK_SEED_FILE does today already.
|
||||||
|
|
||||||
|
6.0.0-dev.568 | 2023-05-12 13:41:55 -0700
|
||||||
|
|
||||||
|
* enhancements for event-tracing: (Vern Paxson, Corelight)
|
||||||
|
- reporting of potentially sensitive constants
|
||||||
|
- tracking of unsupported types enabling hand-editing to fix them
|
||||||
|
- fixed generation of "unspecified" aggregates
|
||||||
|
- fixed generation of IPv6 constants
|
||||||
|
- fixed generation when running without a packet source
|
||||||
|
|
||||||
|
6.0.0-dev.563 | 2023-05-12 10:01:42 -0700
|
||||||
|
|
||||||
|
* Fix memory leak caused by pattern compilation failure (mAsk°)
|
||||||
|
|
||||||
|
6.0.0-dev.559 | 2023-05-11 15:01:44 +0100
|
||||||
|
|
||||||
|
* Add DTLSv1.3 support (Johanna Amann, Corelight)
|
||||||
|
|
||||||
|
DTLSv1.3 changes the DTLS record format, introducing a completely new
|
||||||
|
unified header - which is a first for DTLS.
|
||||||
|
|
||||||
|
In case connection IDs are used, parsing of the unified header
|
||||||
|
is skipped. This is due to the fact, that the header then contains a
|
||||||
|
variable length element, with the length of the element not given in the
|
||||||
|
header. Instead, the length is given in the client/server hello message
|
||||||
|
of the opposite side of the connection (which we might have missed).
|
||||||
|
|
||||||
|
Furthermore, parsing is not of a high importance, since we are not
|
||||||
|
passing the connection ID, or any of the other parsed values of the
|
||||||
|
unified header into scriptland.
|
||||||
|
|
||||||
|
* SSL: Fix logging of Hello Retry Requests (Johanna Amann, Corelight)
|
||||||
|
|
||||||
|
It turns out that we never logged hello retry requests correctly in the
|
||||||
|
ssl_history field.
|
||||||
|
|
||||||
|
Hello retry requests are (in their final version) signaled by a specific
|
||||||
|
random value in the server random.
|
||||||
|
|
||||||
|
This commit fixes this oversight, and hello retry requests are now
|
||||||
|
correctly logged as such.
|
||||||
|
|
||||||
|
* remote_event_ts_compat: Fix NETWORK_TIMESTAMP to NetworkTimestamp (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.551 | 2023-05-11 14:00:31 +0200
|
||||||
|
|
||||||
|
* Add compatibility tests for timestamped events. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
This adds compatibility tests for receiving non-timestamped events as
|
||||||
|
well as providing timestamps via broker websockets.
|
||||||
|
|
||||||
|
* Add timestamps to auto published broker events. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
* Add timestamps to manually published broker events. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
* Annotate scheduled events with intended timestamp. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
* Add timestamp to events. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.544 | 2023-05-11 00:01:20 +0200
|
||||||
|
|
||||||
|
* GH-3028: policy/community-id: Do not use new_connection() (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Issue #3028 tracks how a flipped connections reset a connection's value
|
||||||
|
including any state set during new_connection(). For the time being,
|
||||||
|
update community-id functionality back to the original connection_state_remove()
|
||||||
|
approach to avoid missing community_ids on flipped connections.
|
||||||
|
|
||||||
|
6.0.0-dev.541 | 2023-05-10 23:16:24 +0200
|
||||||
|
|
||||||
|
* testing/zeek-version-link: Assume nm is there (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Drop dependency for zeek_dynamic_plugin_base (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
* Fixup ifdef check in Plugin.h (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Update cmake_minimum_required() in test plugins (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* testing: Add zeek-version-link tests (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Clean up ZEEK_CONFIG_SKIP_VERSION_H issues (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.533 | 2023-05-09 13:38:37 -0700
|
||||||
|
|
||||||
|
* core.network_time.broker: Test reliability improvement (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
I wasn't able to reproduce this locally, but after looking at
|
||||||
|
-B main-loop,tm for a bit it dawned that if the manager is sending
|
||||||
|
ticks too fast, the Broker IO source may consume two ticks in one go
|
||||||
|
before expiring timers and that would explain the observed baseline
|
||||||
|
differences.
|
||||||
|
|
||||||
|
Solve this by removing the reliance on realtime delays and switch to
|
||||||
|
a request-reply pattern instead.
|
||||||
|
|
||||||
|
6.0.0-dev.531 | 2023-05-09 13:38:15 -0700
|
||||||
|
|
||||||
|
* ci: Add public-ecr-vacuum.sh (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.529 | 2023-05-09 12:54:28 -0700
|
||||||
|
|
||||||
|
* Updating submodule(s) [nomail] (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.527 | 2023-05-09 19:11:00 +0200
|
||||||
|
|
||||||
|
* GH-2930: zeek.bif: Add log2() and ceil() (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.525 | 2023-05-09 09:05:51 -0700
|
||||||
|
|
||||||
|
* Use the same rules as cmake submodule to reformat Zeek (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Update cmake submodule after reformat (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.522 | 2023-05-09 15:19:43 +0200
|
||||||
|
|
||||||
|
* cluster/supervisor: Multi-logger awareness (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
When multiple loggers are configured in a Supervisor controlled cluster
|
||||||
|
configuration, encode extra information into the rotated filename to
|
||||||
|
identify which logger produced the log.
|
||||||
|
|
||||||
|
This is similar to the approach taken for ZeekControl, re-using the
|
||||||
|
log_suffix terminology, but as there's only a single zeek-archiver
|
||||||
|
process and no postprocessors and no other side-channel for additional
|
||||||
|
information, we encode extra metadata into the filename. zeek-archiver
|
||||||
|
is extended to recognize the special metadata part of the filename.
|
||||||
|
|
||||||
|
This also solves the issue that multiple loggers in a supervisor setup
|
||||||
|
overwrite each others log files within a single log-queue directory.
|
||||||
|
|
||||||
|
* Bump zeek-archiver submodule (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.519 | 2023-05-09 11:03:32 +0200
|
||||||
|
|
||||||
|
* Fixup Val.h/Val.cc: Actually move ValFromJSON into zeek::detail (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.518 | 2023-05-09 10:19:46 +0200
|
||||||
|
|
||||||
|
* Implement from_json bif (Fupeng Zhao)
|
||||||
|
|
||||||
|
6.0.0-dev.516 | 2023-05-05 14:08:15 -0700
|
||||||
|
|
||||||
|
* BTest baseline updates for -O gen-C++ (Vern Paxson, Corelight)
|
||||||
|
|
||||||
|
* updates to C++ maintenance scripts to better handle uncompilable BTests (Vern Paxson, Corelight)
|
||||||
|
|
||||||
|
* added ZEEK_REPORT_UNCOMPILABLE environment variable for "-O report-uncompilable" (Vern Paxson, Corelight)
|
||||||
|
|
||||||
|
* BTest baseline updates for ZAM (Vern Paxson, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.511 | 2023-05-05 21:04:11 +0200
|
||||||
|
|
||||||
|
* Revert "Skip version.h by default for Zeek sources" (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This reverts commit 8246baf25c692b658e7a39527f6652e37f5b5095.
|
||||||
|
|
||||||
|
Actually fails the include_plugins CI test and I had just
|
||||||
|
opened another related issue.
|
||||||
|
|
||||||
|
6.0.0-dev.510 | 2023-05-05 20:14:21 +0200
|
||||||
|
|
||||||
|
* Skip version.h by default for Zeek sources (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.508 | 2023-05-05 08:48:10 -0700
|
||||||
|
|
||||||
|
* generate-docs: Only update submodule pointer during scheduled builds (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.505 | 2023-05-04 20:08:33 +0200
|
||||||
|
|
||||||
|
* GH-2998: NTP: Detect out-of-order packets (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
The NTP mode provides us with the identity of the endpoints. For the
|
||||||
|
simple CLIENT / SERVER modes, flip the connection if we detect
|
||||||
|
orig/resp disagreeing with what the message says. This mainly
|
||||||
|
results in the history getting a ^ and the ntp.log / conn.log
|
||||||
|
showing the corrected endpoints.
|
||||||
|
|
||||||
|
6.0.0-dev.503 | 2023-05-04 10:56:33 -0700
|
||||||
|
|
||||||
|
* Include compiler in --show-config output (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
* Fix CMake option defaults on Windows (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
* Move build defaults from configure to CMake (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
Moving the defaults for build variables from the `configure` script to
|
||||||
|
`CMakeLists.txt` gives the same default behavior on platforms where the
|
||||||
|
`configure` script is not available (Windows) and also allows a pure
|
||||||
|
CMake-based work flow (e.g., the standard `cmake -S . -B build`) without
|
||||||
|
having to manually adjust the defaults.
|
||||||
|
|
||||||
|
The `configure` script also becomes much simpler as a result.
|
||||||
|
|
||||||
|
6.0.0-dev.498 | 2023-05-04 09:30:18 +0200
|
||||||
|
|
||||||
|
* scripts/smb2-main: Reset script-level state upon smb2_discarded_messages_state() (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This is similar to what the external corelight/zeek-smb-clear-state script
|
||||||
|
does, but leverages the smb2_discarded_messages_state() event instead of
|
||||||
|
regularly checking on the state of SMB connections.
|
||||||
|
|
||||||
|
The pcap was created using the dperson/samba container image and mounting
|
||||||
|
a share with Linux's CIFS filesystem, then copying the content of a
|
||||||
|
directory with 100 files. The test uses a BPF filter to imitate mostly
|
||||||
|
"half-duplex" traffic.
|
||||||
|
|
||||||
|
* smb2: Limit per-connection read/ioctl/tree state (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Users on Slack observed memory growth in an environment with a lot of
|
||||||
|
SMB traffic. jeprof memory profiling pointed at the offset and fid maps
|
||||||
|
kept per-connection for smb2 read requests.
|
||||||
|
|
||||||
|
These maps can grow unbounded if responses are seen before requests, there's
|
||||||
|
packet drops, just one side of the connection is visible, or we fail to parse
|
||||||
|
responses properly.
|
||||||
|
|
||||||
|
Forcefully wipe out these maps when they grow too large and raise
|
||||||
|
smb2_discarded_messages_state() to notify script land about this.
|
||||||
|
|
||||||
|
6.0.0-dev.493 | 2023-05-03 11:13:15 -0700
|
||||||
|
|
||||||
|
* Update Mozilla CA and Google CT lists (Johanna Amann, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.491 | 2023-05-03 09:31:36 -0700
|
||||||
|
|
||||||
|
* Surround string assignments in zeek-config in quotes (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.489 | 2023-05-03 09:31:19 -0700
|
||||||
|
|
||||||
|
* Remove parser error message for bro_init, et al (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.487 | 2023-05-03 09:30:55 -0700
|
||||||
|
|
||||||
|
* Add additional length check to IEEE 802.11 analyzer (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.484 | 2023-05-03 14:18:03 +0100
|
||||||
|
|
||||||
|
* SSL: do not try to disable failed analyzer (Johanna Amann, Corelight)
|
||||||
|
|
||||||
|
Currently, if a TLS/DTLS analyzer fails with a protocol violation, we
|
||||||
|
will still try to remove the analyzer later, which results in the
|
||||||
|
following error message:
|
||||||
|
|
||||||
|
error: connection does not have analyzer specified to disable
|
||||||
|
|
||||||
|
Now, instead we don't try removing the analyzer anymore, after a
|
||||||
|
violation occurred.
|
||||||
|
|
||||||
|
6.0.0-dev.480 | 2023-05-02 20:28:55 +0200
|
||||||
|
|
||||||
|
* ip4_hdr: Add DF, MF, offset and sum fields (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
For low-level packet analysis use-cases, these fields are currently
|
||||||
|
not script-land accessible via raw_packet() or so. They are accessible
|
||||||
|
on the icmp_context record, but not on the actual ip4_hdr record, so
|
||||||
|
add them.
|
||||||
|
|
||||||
|
* GH-2991: unload: Fix unloading of packages (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
@ynadji found that unloading packages doesn't work due to @unload not
|
||||||
|
resolving the __load__.zeek file within a directory like @load does.
|
||||||
|
|
||||||
|
Fixes #2991
|
||||||
|
|
||||||
|
6.0.0-dev.476 | 2023-05-02 11:12:44 -0700
|
||||||
|
|
||||||
|
* Use workaround for setvbuf on Windows in DebugLogger/Extract file analyzer (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* Add ifdef'd implementation of setvbuf to zeek::util (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.473 | 2023-05-02 09:42:19 -0700
|
||||||
|
|
||||||
|
* Fix smith-waterman sorting to follow correct Compare semantics (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
6.0.0-dev.470 | 2023-04-28 16:33:46 -0700
|
||||||
|
|
||||||
|
* CI: Automatically update opensuse-tumbleweed (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* CI: Remove Fedora 36, add Fedora 38 (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.467 | 2023-04-28 10:01:13 +0200
|
||||||
|
|
||||||
|
* GH-2791: pcap/Source: Allow more than 32bit for link and dropped stats (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
The PktSrc::Stats object works with 64bit unsigned integers. Unfortunately,
|
||||||
|
libpcap's struct pcap_stat is using 32bit values and users have reported
|
||||||
|
the wrapping of these values being visible in their stats.log roughly every
|
||||||
|
7.5 hours (~160kpps).
|
||||||
|
|
||||||
|
This change moves tracking of link and drop counters into the PktSrc::Stats
|
||||||
|
object (like is done for received and bytes_received) and updates them
|
||||||
|
on a call to PcapSource::Statistics() with the difference to the
|
||||||
|
previous stats values to prevent the wrap from becoming visible to
|
||||||
|
script land.
|
||||||
|
|
||||||
|
This doesn't cover the case of the stats counters wrapping around multiple
|
||||||
|
times between two invocations of PktSrc::Statistics(). With the default
|
||||||
|
interval of 5 minutes for the stats script, this seems acceptable.
|
||||||
|
|
||||||
|
Closes #2791.
|
||||||
|
|
||||||
|
* record_fields: Include information about optionality of fields (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This was reported as a wish for log schema generation, so add it...
|
||||||
|
|
||||||
|
* Fix a few warnings from recent changes (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
6.0.0-dev.461 | 2023-04-28 09:37:08 +0200
|
||||||
|
|
||||||
|
* Simplify btests using cluster_started event. (Jan Grashoefer, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.457 | 2023-04-27 11:25:45 -0700
|
||||||
|
|
||||||
|
* Modify Windows test cmd file to actually run tests (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* Add template file and cmake call for zeek-path setup on Windows (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
6.0.0-dev.453 | 2023-04-27 13:22:43 +0200
|
||||||
|
|
||||||
|
* Revert putting plugins into a fresh scope for now (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
* Propagate zeek-version.h skip via CMake properties (Dominik Charousset, Corelight)
|
||||||
|
|
||||||
|
Add a define for `ZEEK_CONFIG_SKIP_VERSION_H` to static plugins as well
|
||||||
|
as to dynamic plugins that we build alongside Zeek. When including
|
||||||
|
`zeek-config.h` with this macro defined, the header skips including
|
||||||
|
`zeek-version.h`.
|
||||||
|
|
||||||
|
6.0.0-dev.449 | 2023-04-27 12:16:46 +0200
|
||||||
|
|
||||||
|
* Bump zeekctl to multi-logger version (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* logging: Support rotation_postprocessor_command_env (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This new table provides a mechanism to add environment variables to the
|
||||||
|
postprocessor execution. Use case is from ZeekControl to inject a suffix
|
||||||
|
to be used when running with multiple logger.
|
||||||
|
|
||||||
|
6.0.0-dev.445 | 2023-04-27 09:08:45 +0200
|
||||||
|
|
||||||
|
* Update ZeekJS submodule to 0.9.0 (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Performance improvements for the Redis Log::log_stream_policy example
|
||||||
|
around PortVal wrapping and caching field offsets for property lookups.
|
||||||
|
* Debug output is now channeled through PLUGIN_DBG_LOG() and available via
|
||||||
|
zeek -B plugin-Zeek-JavaScript instead of unconditionally on stderr.
|
||||||
|
* Reduced CMake output when Node.js isn't found
|
||||||
|
|
||||||
|
6.0.0-dev.443 | 2023-04-26 12:42:48 -0700
|
||||||
|
|
||||||
|
* extend ZEEK_PROFILER_FILE profiling to include summaries for functions/hooks/event handlers (Vern Paxson, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.441 | 2023-04-26 15:37:59 +0200
|
||||||
|
|
||||||
|
* Update AF-Packet submodule (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Include Tim's cleanup and modernization fixes, too.
|
||||||
|
|
||||||
|
* Update AF-Packet submodule (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Mask VLAN ID from tp_vlan_tci field to fix vlan > 4095 reported by Zeek
|
||||||
|
when PCP and/or DEI bits are set.
|
||||||
|
* Descriptive error message when interface is down. Instead of
|
||||||
|
"Invalid argument", Zeek now reports "interface is down".
|
||||||
|
|
||||||
|
6.0.0-dev.437 | 2023-04-25 13:07:57 -0700
|
||||||
|
|
||||||
|
* Update docs and NEWS to include LLC, SNAP, and Novell packet analyzers (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* Fix length checks in VLAN/Ethernet analyzers for non-ethertype protocols (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Add forwarding from VLAN analyzer into LLC, SNAP, and Novell 802.3 analyzers (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Remove non-standard way of forwarding out of the Ethernet analyzer (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Add basic LLC, SNAP, and Novell 802.3 packet analyzers (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* ARP: add support for IEEE802 hardware type (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
6.0.0-dev.430 | 2023-04-25 11:37:44 -0700
|
||||||
|
|
||||||
|
* Merge branch 'topic/timw/2167-aruba-expansion' (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* topic/timw/2167-aruba-expansion:
|
||||||
|
Add NEWS entry about 802.11 and ARUBA changes
|
||||||
|
Remove workaround for tunnels from IEEE 802.11 analyzer
|
||||||
|
PPP in the GRE analyzer is actually PP*T*P
|
||||||
|
Add support for 802.11 A-MSDU aggregates
|
||||||
|
Fix IEEE 802.11 analyzer to properly forward tunneled packets
|
||||||
|
Fix IEEE 802.11 analyzer to skip packets with the Protected bit enabled
|
||||||
|
Expand support for Aruba protocol types in GRE analyzer
|
||||||
|
Default tunnel_type in iosource::Packet to NONE
|
||||||
|
|
||||||
|
* Add NEWS entry about 802.11 and ARUBA changes (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* Remove workaround for tunnels from IEEE 802.11 analyzer (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* PPP in the GRE analyzer is actually PP*T*P (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Add support for 802.11 A-MSDU aggregates (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Fix IEEE 802.11 analyzer to properly forward tunneled packets (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
This mostly happens with Aruba, but could possibly happen with other tunnels too.
|
||||||
|
|
||||||
|
* Fix IEEE 802.11 analyzer to skip packets with the Protected bit enabled (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Expand support for Aruba protocol types in GRE analyzer (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
This also fixes the GRE analyzer to forward into the IEEE 802.11 analyzer
|
||||||
|
if it encounters Aruba packets with the proper protocol types. This way
|
||||||
|
the QoS header can be handled correctly.
|
||||||
|
|
||||||
|
* Default tunnel_type in iosource::Packet to NONE (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
6.0.0-dev.421 | 2023-04-25 12:39:01 +0200
|
6.0.0-dev.421 | 2023-04-25 12:39:01 +0200
|
||||||
|
|
||||||
* Given the -C flag, set script-layer ignore_checksums to true. (Christian Kreibich, Corelight)
|
* Given the -C flag, set script-layer ignore_checksums to true. (Christian Kreibich, Corelight)
|
||||||
|
|
955
CMakeLists.txt
955
CMakeLists.txt
File diff suppressed because it is too large
Load diff
164
NEWS
164
NEWS
|
@ -3,6 +3,24 @@ This document summarizes the most important changes in the current Zeek
|
||||||
release. For an exhaustive list of changes, see the ``CHANGES`` file
|
release. For an exhaustive list of changes, see the ``CHANGES`` file
|
||||||
(note that submodules, such as Broker, come with their own ``CHANGES``.)
|
(note that submodules, such as Broker, come with their own ``CHANGES``.)
|
||||||
|
|
||||||
|
Zeek 6.1.0
|
||||||
|
==========
|
||||||
|
|
||||||
|
Breaking Changes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
New Functionality
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Changed Functionality
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Removed Functionality
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Deprecated Functionality
|
||||||
|
------------------------
|
||||||
|
|
||||||
Zeek 6.0.0
|
Zeek 6.0.0
|
||||||
==========
|
==========
|
||||||
|
|
||||||
|
@ -28,22 +46,6 @@ Breaking Changes
|
||||||
come in handy for example when working with tests that compare results against
|
come in handy for example when working with tests that compare results against
|
||||||
log baselines that have not yet been updated.
|
log baselines that have not yet been updated.
|
||||||
|
|
||||||
- The zeek/zeek-config.h header does not provide the macros ZEEK_VERSION and
|
|
||||||
ZEEK_VERSION_NUMBER anymore when compiling builtin plugins. This may affect
|
|
||||||
external plugins included via the configure flag ``--include-plugins``
|
|
||||||
referencing these macros. A suggested update for these plugins is adding
|
|
||||||
the following snippet:
|
|
||||||
|
|
||||||
#if __has_include("zeek/zeek-version.h")
|
|
||||||
#include "zeek/zeek-version.h"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
External plugins that are built out-of-tree, e.g. via ``zkg`` are not
|
|
||||||
affected by this change.
|
|
||||||
|
|
||||||
The main motivation is improved ccache effectiveness for speedier CI and
|
|
||||||
development builds whenever a VERSION bump happens.
|
|
||||||
|
|
||||||
- Custom source tarballs require a ``repo-info.json`` file.
|
- Custom source tarballs require a ``repo-info.json`` file.
|
||||||
|
|
||||||
Note, should you be using official Zeek release tarballs only, or build
|
Note, should you be using official Zeek release tarballs only, or build
|
||||||
|
@ -75,6 +77,11 @@ Breaking Changes
|
||||||
|
|
||||||
- The IRC_Data analyzer declaration has been moved to protocols/irc/IRC.h.
|
- The IRC_Data analyzer declaration has been moved to protocols/irc/IRC.h.
|
||||||
|
|
||||||
|
- The error message returned when using ``bro_init``, ``bro_done``, and
|
||||||
|
``bro_script_loaded`` events is now removed. removed. Usage of these events
|
||||||
|
has returned that error during script parsing for a few years, and time has
|
||||||
|
come to finally remove it.
|
||||||
|
|
||||||
New Functionality
|
New Functionality
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
|
@ -102,6 +109,32 @@ New Functionality
|
||||||
|
|
||||||
To disable this functionality, pass ``--disable-javascript`` to configure.
|
To disable this functionality, pass ``--disable-javascript`` to configure.
|
||||||
|
|
||||||
|
- Zeek events now hold network timestamps. For scheduled events, the timestamp
|
||||||
|
represents the network time for which the event was scheduled for, otherwise
|
||||||
|
it is the network time at event creation. A new bif ``current_event_time()``
|
||||||
|
allows to retrieve the current event's network timestamp within the script-layer.
|
||||||
|
|
||||||
|
When Zeek sends events via Broker to other nodes in a cluster, an event's network
|
||||||
|
timestamp is attached to the Broker messages. On a receiving Zeek node executing a
|
||||||
|
handler for a remote event, ``current_event_time()`` returns the network time of
|
||||||
|
the sending node at the time the event was created.
|
||||||
|
|
||||||
|
The Broker level implementation allows to exchange arbitrary event metadata, but
|
||||||
|
Zeek's script and C++ APIs currently only expose network timestamp functionality.
|
||||||
|
|
||||||
|
- A new bif ``from_json()`` can be used to parse JSON strings into records.
|
||||||
|
|
||||||
|
type A: record { a: addr; };
|
||||||
|
local p = from_json({\"a\": \"192.168.0.1\"}", A);
|
||||||
|
if ( p$valid )
|
||||||
|
print (p$v as A)
|
||||||
|
|
||||||
|
Implicit conversion from JSON to Zeek types is implemented for bool, int, count,
|
||||||
|
real, interval (number as seconds) and time (number as unix timestamp), port
|
||||||
|
(strings in "80/tcp" notation), patterns, addr, subnet, enum, sets, vectors
|
||||||
|
and records similar to the rules of the input framework. Optional or default
|
||||||
|
record fields are allowed to be missing or null in the input.
|
||||||
|
|
||||||
- Zeek now provides native "Community ID" support with a new bif called
|
- Zeek now provides native "Community ID" support with a new bif called
|
||||||
``community_id_v1()``. Two policy scripts ``protocols/conn/community-id-logging``
|
``community_id_v1()``. Two policy scripts ``protocols/conn/community-id-logging``
|
||||||
and ``frameworks/notice/community-id`` extend the respective logs with a
|
and ``frameworks/notice/community-id`` extend the respective logs with a
|
||||||
|
@ -112,6 +145,25 @@ New Functionality
|
||||||
Loading the new policy scripts and using the external zeek-community-id
|
Loading the new policy scripts and using the external zeek-community-id
|
||||||
plugin at the same time is unsupported.
|
plugin at the same time is unsupported.
|
||||||
|
|
||||||
|
- ZeekControl is now multi-logger aware. When multiple logger nodes are configured
|
||||||
|
in ZeekControl's node.cfg, by default the log archival logic adds a logger's name
|
||||||
|
as suffix to the rotated file name:
|
||||||
|
|
||||||
|
stats.11:18:57-11:19:00-logger-1.log.gz
|
||||||
|
stats.11:18:57-11:19:00-logger-2.log.gz
|
||||||
|
|
||||||
|
Previously, in a multi-logger setup, individual logger processes would overwrite
|
||||||
|
each other's log files during rotation, causing data loss.
|
||||||
|
|
||||||
|
For setups with a single logger, there's no change in behavior. The naming
|
||||||
|
of the final logs can be customized by providing an alternative
|
||||||
|
``make-archive-name`` script and using the new ``ZEEK_ARG_LOG_SUFFIX``
|
||||||
|
environment variable.
|
||||||
|
|
||||||
|
- A supervisor controlled Zeek cluster is now multi-logger aware. This avoids
|
||||||
|
loggers overwriting each other's log files within a single log-queue directory.
|
||||||
|
By default, a logger's name is appended to the rotated logs by zeek-archiver.
|
||||||
|
|
||||||
- Introduce a new command-line option ``-V`` / ``--build-info``. It produces
|
- Introduce a new command-line option ``-V`` / ``--build-info``. It produces
|
||||||
verbose output in JSON format about the repository state and any included
|
verbose output in JSON format about the repository state and any included
|
||||||
plugins.
|
plugins.
|
||||||
|
@ -127,6 +179,18 @@ New Functionality
|
||||||
- Add logging metrics for streams (``zeek-log-stream-writes``) and writers
|
- Add logging metrics for streams (``zeek-log-stream-writes``) and writers
|
||||||
(``zeek-log-writer-writes-total``).
|
(``zeek-log-writer-writes-total``).
|
||||||
|
|
||||||
|
- Add networking metrics via the telemetry framework. These are enabled
|
||||||
|
when the ``misc/stats`` script is loaded.
|
||||||
|
|
||||||
|
zeek-net-dropped-packets
|
||||||
|
zeek-net-link-packets
|
||||||
|
zeek-net-received-bytes
|
||||||
|
zeek-net-packet-lag-seconds
|
||||||
|
zeek-net-received-packets-total
|
||||||
|
|
||||||
|
Except for lag, metrics originate from the ``get_net_stats()`` bif and are
|
||||||
|
updated through the ``Telemetry::sync()`` hook every 15 seconds by default.
|
||||||
|
|
||||||
- The DNS analyzer now parses RFC 2535's AD ("authentic data") and CD ("checking
|
- The DNS analyzer now parses RFC 2535's AD ("authentic data") and CD ("checking
|
||||||
disabled") flags from DNS requests and responses, making them available in
|
disabled") flags from DNS requests and responses, making them available in
|
||||||
the ``dns_msg`` record provided by many of the ``dns_*`` events. The existing
|
the ``dns_msg`` record provided by many of the ``dns_*`` events. The existing
|
||||||
|
@ -191,6 +255,38 @@ New Functionality
|
||||||
Note: There is no tracking of cluster node connectivity. Thus, there is no guarantee
|
Note: There is no tracking of cluster node connectivity. Thus, there is no guarantee
|
||||||
that all peerings still exist at the time of these events being raised.
|
that all peerings still exist at the time of these events being raised.
|
||||||
|
|
||||||
|
- The IEEE 802.11 packet analyzer gains the ability to parse encapsulated A-MSDU
|
||||||
|
packets, instead of just dropping them. It also gains the ability to properly
|
||||||
|
recognize CCMP-encrypted packets. These encrypted packets are currently
|
||||||
|
dropped to Zeek's inability to do anything with them.
|
||||||
|
|
||||||
|
- Add packet analzyers for LLC, SNAP, and Novell 802.3, called from the Ethernet
|
||||||
|
and VLAN analyzers by default.
|
||||||
|
|
||||||
|
- Environment variables for the execution of log rotation postprocessors can
|
||||||
|
be set via ``Log::default_rotation_postprocessor_cmd_env``.
|
||||||
|
|
||||||
|
- The ``record_field`` record was extended by ``optional`` and ``record_fields()``
|
||||||
|
can now be used to determine the optionality of record fields.
|
||||||
|
|
||||||
|
- The ``ip4_hdr`` record was extended by ``DF``, ``MF``, ``offset`` and ``sum``
|
||||||
|
to aid packet-level analysis use-cases.
|
||||||
|
|
||||||
|
- Zeek now supports parsing the recently standardized DTLS 1.3. Besides the protocol
|
||||||
|
messages being correctly parsed and raising the typical SSL/TLS events, the biggest
|
||||||
|
visible change is the newly added ``ssl_extension_connection_id`` event.
|
||||||
|
|
||||||
|
- The NTP analyzer now recognizes when client and server mode messages disagree
|
||||||
|
with the notion of "originator" and "responder" and flips the connection. This
|
||||||
|
can happen in packet loss or packet re-ordering scenarios. Such connections will
|
||||||
|
have a ``^`` added to their history.
|
||||||
|
|
||||||
|
- New bifs for ``ceil()`` and ``log2()`` have been added.
|
||||||
|
|
||||||
|
- Seeds for deterministic processing can now also be set through a new environment
|
||||||
|
variable called ``ZEEK_SEED_VALUES``. The format is expected to contain 21
|
||||||
|
positive numbers separated by spaces.
|
||||||
|
|
||||||
Changed Functionality
|
Changed Functionality
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
|
@ -280,6 +376,42 @@ Changed Functionality
|
||||||
- The ``ignore_checksums`` script variable now reflects the correct value
|
- The ``ignore_checksums`` script variable now reflects the correct value
|
||||||
when using the ``-C`` command-line flag.
|
when using the ``-C`` command-line flag.
|
||||||
|
|
||||||
|
- Support for ARUBA GRE tunnels now covers all of the known protocol type values
|
||||||
|
for those tunnels.
|
||||||
|
|
||||||
|
- The vlan field reported by the AF_PACKET packet source is now properly
|
||||||
|
masked to exclude PCP and DEI bits. Previously, these bits were included
|
||||||
|
and could cause invalid vlan values > 4095 to be reported.
|
||||||
|
|
||||||
|
- Libpcap based packet source now avoids the 32bit wraparound of link and
|
||||||
|
dropped packet counters as reported by users.
|
||||||
|
|
||||||
|
- The `ssl_history` field in ssl.log indicates that the letter `j` is reserved
|
||||||
|
for hello retry requests. However, this logging was never fully implemented;
|
||||||
|
instead, hello retry requests were logged like as a server hello (with the letter
|
||||||
|
`s`). This oversight was fixed, and hello retry requests are now correctly logged.
|
||||||
|
|
||||||
|
- When per-connection SMB parser state (read offsets, tree ids, ...) exceeds
|
||||||
|
``SMB::max_pending_messages`` (default 1000), Zeek discards such per-connection
|
||||||
|
state and raises a new ``smb2_discarded_messages_state()`` event. This event is
|
||||||
|
used to reset script-layer SMB state. This change provides protection against
|
||||||
|
unbounded state growth due to partial or one-sided SMB connections.
|
||||||
|
|
||||||
|
Setting ``SMB::max_pending_messages`` to 0 can be used to switch back to the
|
||||||
|
previous behavior of not discarding state. Setting ``SMB::enable_state_clear``
|
||||||
|
to ``F`` skips the script-layer state clearing logic.
|
||||||
|
|
||||||
|
- Fix ``disable_analyzer()`` builtin function crashing when attempting to
|
||||||
|
disable connection's root analyzers.
|
||||||
|
|
||||||
|
- Zeek script vectors now support negative indices.
|
||||||
|
|
||||||
|
local v = vector(1, 2, 3);
|
||||||
|
print v[-1]; # prints 3
|
||||||
|
|
||||||
|
- Function parameters are rendered by Zeekygen as ``:param x`` rather than just
|
||||||
|
``:x:``. This allows to group parameters Zeek's documentation.
|
||||||
|
|
||||||
Removed Functionality
|
Removed Functionality
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ Follow us on Twitter at [@zeekurity](https://twitter.com/zeekurity).
|
||||||
[](https://coveralls.io/github/zeek/zeek?branch=master)
|
[](https://coveralls.io/github/zeek/zeek?branch=master)
|
||||||
[](https://cirrus-ci.com/github/zeek/zeek)
|
[](https://cirrus-ci.com/github/zeek/zeek)
|
||||||
|
|
||||||
[](https://join.slack.com/t/zeekorg/shared_invite/zt-1ev1nr7z4-rEVSsaIzYzFWpdgh2I6ZOg)
|
[](https://zeek.org/slack)
|
||||||
[](https://community.zeek.org)
|
[](https://community.zeek.org)
|
||||||
|
|
||||||
</h4>
|
</h4>
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
6.0.0-dev.421
|
6.1.0-dev.4
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit b6f138be79f7d4408302b1297b0c63092b019773
|
Subproject commit 183a0c7fb0e04b843e4a2bafbee44117001228e6
|
|
@ -1 +1 @@
|
||||||
Subproject commit e820c29116a50a18079e783f47b96111eb7b5b0b
|
Subproject commit fad1f7322209d93cfa67be3420aecb441a90468b
|
|
@ -1 +1 @@
|
||||||
Subproject commit 4fc4c31592c4823d675314bc981931de9e246057
|
Subproject commit 3df48de38ef75a5d274c2fa59ad3f798a62c6bfc
|
|
@ -1 +1 @@
|
||||||
Subproject commit f1183514cd12468d34cb01b147c8d1859a657c31
|
Subproject commit 0c3cafb2ed638f88a446732fa03d90af9bcf796c
|
|
@ -1 +1 @@
|
||||||
Subproject commit 3e5b930d9690ef6b87dd034916598e0771f8688d
|
Subproject commit 72a76d774e4c7c605141fd6d11c33cc211209ed9
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8534f719a0a384769383bbd4ad71c9eb2084823d
|
Subproject commit 16841c95849d4d82f239bfb0c46bc217af368da2
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8011410a8a2ba9b57a544d20efd077f83ecb7eda
|
Subproject commit 2766a0c45a6dbcdcf26cd1209a73a13323854961
|
|
@ -1 +1 @@
|
||||||
Subproject commit 5ade27d716fd65342be592abf277eea114fefaa7
|
Subproject commit e1d8271af1f499b3d072c99ae717d2593f551645
|
1
auxil/spicy
Submodule
1
auxil/spicy
Submodule
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit ec87b43037dba50648cb93be8940a4db23658905
|
|
@ -1 +0,0 @@
|
||||||
Subproject commit a618f2ce0831c311f9bcff5d020b85fc44345221
|
|
|
@ -1,58 +0,0 @@
|
||||||
# Spicy needs the full prefix for Flex and Bison while Zeek captures only the
|
|
||||||
# paths to the executables. Derive the prefixes from the binary paths under the
|
|
||||||
# assumption that their bindir is under their prefix (which also implies that
|
|
||||||
# one such prefix even exists).
|
|
||||||
if ( NOT FLEX_EXECUTABLE )
|
|
||||||
find_package(FLEX REQUIRED)
|
|
||||||
endif ()
|
|
||||||
get_filename_component(dir ${FLEX_EXECUTABLE} DIRECTORY ABSOLUTE)
|
|
||||||
set(FLEX_ROOT ${dir}/..)
|
|
||||||
|
|
||||||
if ( NOT BISON_EXECUTABLE )
|
|
||||||
find_package(BISON REQUIRED)
|
|
||||||
endif ()
|
|
||||||
get_filename_component(dir ${BISON_EXECUTABLE} DIRECTORY ABSOLUTE)
|
|
||||||
set(BISON_ROOT ${dir}/..)
|
|
||||||
|
|
||||||
if ( NOT BINARY_PACKAGING_MODE )
|
|
||||||
# TODO: Broker seems to always turn on static libraries. We don't want that for Spicy by default.
|
|
||||||
set(BUILD_SHARED_LIBS yes)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Spicy uses slightly less strict warnings than Zeek proper. Mute a few warnings for Spicy.
|
|
||||||
# NOTE: Compiler flags are inherited down the directory tree, so in order to
|
|
||||||
# set these flags we do need a customizable subdirectory above the Spicy
|
|
||||||
# sources.
|
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-missing-braces")
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-vla")
|
|
||||||
|
|
||||||
# GCC 13 adds a new flag to check whether a symbol changes meaning. Due to an issue in one
|
|
||||||
# of the dependencies used by Spicy, this causes Zeek to fail to build on that compiler.
|
|
||||||
# Until this is fixed, ignore that warning, but check to to make sure the flag exists first.
|
|
||||||
include(CheckCXXCompilerFlag)
|
|
||||||
check_cxx_compiler_flag("-Wno-changes-meaning" _has_no_changes_meaning_flag)
|
|
||||||
if ( _has_no_changes_meaning_flag )
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-changes-meaning")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# The script generating precompiled headers for Spicy expects a different build
|
|
||||||
# system layout than provided for a bundled Spicy, disable it.
|
|
||||||
set(HILTI_DEV_PRECOMPILE_HEADERS OFF)
|
|
||||||
|
|
||||||
add_subdirectory(spicy)
|
|
||||||
|
|
||||||
# Disable Spicy unit test targets.
|
|
||||||
#
|
|
||||||
# Spicy builds its unit tests as part of `ALL`. They are usually not only
|
|
||||||
# uninteresting for us but might cause problems. Since any configuration
|
|
||||||
# we do for our unit tests happens through global C++ compiler flags, they
|
|
||||||
# would get inherited directly by Spicy which can cause issues, e.g., we set
|
|
||||||
# `-DDOCTEST_CONFIG_DISABLE` if `ENABLE_ZEEK_UNIT_TESTS` is false, but Spicy
|
|
||||||
# unit test do not anticipate this define being set.
|
|
||||||
set_target_properties(
|
|
||||||
hilti-rt-tests
|
|
||||||
hilti-rt-configuration-tests
|
|
||||||
spicy-rt-tests
|
|
||||||
hilti-toolchain-tests
|
|
||||||
spicy-toolchain-tests
|
|
||||||
PROPERTIES EXCLUDE_FROM_ALL TRUE)
|
|
|
@ -1 +0,0 @@
|
||||||
Subproject commit 64de5d0ef323428452827469f07b0a1da8e65e16
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 08935a1b93a2d7710d748737e5e653934977a9cf
|
Subproject commit acd8e365c652ea6113b70fbbb1339d42e496819d
|
|
@ -1 +1 @@
|
||||||
Subproject commit 2b40a6bafedbbf3fba16d292fd9fe8ba6e9de1bf
|
Subproject commit e36862b3a6e70bf8557885e12e74cbc91507a693
|
|
@ -1 +1 @@
|
||||||
Subproject commit 444a6fbdac555f08fbed43d0786355551641c805
|
Subproject commit 98e5bd2a4c6c23f6bc3a70bdfe26d17c084c8ab6
|
|
@ -1 +1 @@
|
||||||
Subproject commit 0ed55db83db06fbc0c74be5f7db2922256e95c14
|
Subproject commit 4b4d4242e6ebebe41bee0e3ba262cb453b02f88f
|
|
@ -1 +1 @@
|
||||||
Subproject commit 633b4b3aafebde91cc1ded20b2841113681aa60a
|
Subproject commit 01c54f8b385c42ac82553fc8e18b28b22f7be62a
|
|
@ -1 +1 @@
|
||||||
Subproject commit e4ae24051f31620e8bd7a93e8516797d6734b6d9
|
Subproject commit e77634d5f92db96e66de0c36ddc4d44893306fa7
|
|
@ -9,6 +9,7 @@ Example usage:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import copy
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import pathlib
|
import pathlib
|
||||||
|
@ -207,6 +208,11 @@ def main():
|
||||||
|
|
||||||
info["included_plugins"] = included_plugins_info
|
info["included_plugins"] = included_plugins_info
|
||||||
|
|
||||||
|
zkg_provides_info = copy.deepcopy(included_plugins_info)
|
||||||
|
# Hardcode the former spicy-plugin so that zkg knows Spicy is available.
|
||||||
|
zkg_provides_info.append({"name": "spicy-plugin", "version": info["version"].split("-")[0]})
|
||||||
|
info["zkg"] = {"provides": zkg_provides_info}
|
||||||
|
|
||||||
json_str = json.dumps(info, indent=2, sort_keys=True)
|
json_str = json.dumps(info, indent=2, sort_keys=True)
|
||||||
print(json_str)
|
print(json_str)
|
||||||
|
|
||||||
|
|
32
ci/fedora-38/Dockerfile
Normal file
32
ci/fedora-38/Dockerfile
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
FROM fedora:38
|
||||||
|
|
||||||
|
# A version field to invalidate Cirrus's build cache when needed, as suggested in
|
||||||
|
# https://github.com/cirruslabs/cirrus-ci-docs/issues/544#issuecomment-566066822
|
||||||
|
ENV DOCKERFILE_VERSION 20230428
|
||||||
|
|
||||||
|
RUN dnf -y install \
|
||||||
|
bison \
|
||||||
|
ccache \
|
||||||
|
cmake \
|
||||||
|
diffutils \
|
||||||
|
findutils \
|
||||||
|
flex \
|
||||||
|
gcc \
|
||||||
|
gcc-c++ \
|
||||||
|
git \
|
||||||
|
libpcap-devel \
|
||||||
|
make \
|
||||||
|
nodejs-devel \
|
||||||
|
openssl \
|
||||||
|
openssl-devel \
|
||||||
|
procps-ng \
|
||||||
|
python3 \
|
||||||
|
python3-devel \
|
||||||
|
python3-pip\
|
||||||
|
sqlite \
|
||||||
|
swig \
|
||||||
|
which \
|
||||||
|
zlib-devel \
|
||||||
|
&& dnf clean all && rm -rf /var/cache/dnf
|
||||||
|
|
||||||
|
RUN pip3 install websockets junit2html
|
|
@ -2,7 +2,11 @@ FROM opensuse/tumbleweed
|
||||||
|
|
||||||
# A version field to invalidate Cirrus's build cache when needed, as suggested in
|
# A version field to invalidate Cirrus's build cache when needed, as suggested in
|
||||||
# https://github.com/cirruslabs/cirrus-ci-docs/issues/544#issuecomment-566066822
|
# https://github.com/cirruslabs/cirrus-ci-docs/issues/544#issuecomment-566066822
|
||||||
ENV DOCKERFILE_VERSION 20230330
|
ENV DOCKERFILE_VERSION 20230523
|
||||||
|
|
||||||
|
# Remove the repo-openh264 repository, it caused intermittent issues
|
||||||
|
# and we should not be needing any packages from it.
|
||||||
|
RUN zypper modifyrepo --disable repo-openh264 || true
|
||||||
|
|
||||||
RUN zypper refresh \
|
RUN zypper refresh \
|
||||||
&& zypper in -y \
|
&& zypper in -y \
|
||||||
|
|
4
ci/opensuse-tumbleweed/prepare.sh
Executable file
4
ci/opensuse-tumbleweed/prepare.sh
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
zypper refresh
|
||||||
|
zypper patch -y --with-update --with-optional
|
63
ci/public-ecr-cleanup.sh
Executable file
63
ci/public-ecr-cleanup.sh
Executable file
|
@ -0,0 +1,63 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Script to batch-delete all untagged images from ECR public repositories,
|
||||||
|
# defaulting to the zeek/zeek-dev repository.
|
||||||
|
# First scans for manifest list images that are referencing other images and
|
||||||
|
# deletes them, then deletes all remaining untagged images.
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
if ! command -v aws >/dev/null; then
|
||||||
|
echo "missing aws command" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
REGISTRY_ID=${REGISTRY_ID:-103243056077}
|
||||||
|
REPOSITORY_NAME=${REPOSITORY_NAME:-zeek-dev}
|
||||||
|
BATCH_DELETE_SIZE=${BATCH_DELETE_SIZE:-50}
|
||||||
|
|
||||||
|
# Chunk up "$1" into BATCH_DELETE_SIZE entries and batch-delete them at once
|
||||||
|
# via aws batch-delete.
|
||||||
|
#
|
||||||
|
# Expected input looks as follows to keep things simple:
|
||||||
|
#
|
||||||
|
# imageDigest=sha256:db6...366
|
||||||
|
# imageDigest=sha256:2ad...9b0
|
||||||
|
#
|
||||||
|
function batch_delete {
|
||||||
|
while read -r batch; do
|
||||||
|
if [ -z "${batch}" ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Deleting ${batch}"
|
||||||
|
aws ecr-public batch-delete-image \
|
||||||
|
--registry-id "${REGISTRY_ID}" \
|
||||||
|
--repository-name "${REPOSITORY_NAME}" \
|
||||||
|
--image-ids ${batch}
|
||||||
|
|
||||||
|
done < <(xargs -L ${BATCH_DELETE_SIZE} <<<"$1")
|
||||||
|
}
|
||||||
|
|
||||||
|
# Find all untagged manifest lists with the following media types:
|
||||||
|
#
|
||||||
|
# application/vnd.docker.distribution.manifest.list.v2+json
|
||||||
|
# application/vnd.oci.image.index.v1+json
|
||||||
|
#
|
||||||
|
# These reference other images, so we need to delete them first as
|
||||||
|
# otherwise the referenced images can not be deleted.
|
||||||
|
IMAGE_DIGESTS=$(aws ecr-public describe-images \
|
||||||
|
--registry-id "${REGISTRY_ID}" \
|
||||||
|
--repository-name "${REPOSITORY_NAME}" \
|
||||||
|
--query 'imageDetails[?!imageTags && (contains(imageManifestMediaType, `manifest.list.v2`) || contains(imageManifestMediaType, `image.index.v1`))].{imageDigest: join(`=`, [`imageDigest`, imageDigest])}' \
|
||||||
|
--output text)
|
||||||
|
|
||||||
|
batch_delete "${IMAGE_DIGESTS}"
|
||||||
|
|
||||||
|
# Now find all untagged manifests that are left.
|
||||||
|
IMAGE_DIGESTS=$(aws ecr-public describe-images \
|
||||||
|
--registry-id "${REGISTRY_ID}" \
|
||||||
|
--repository-name "${REPOSITORY_NAME}" \
|
||||||
|
--query 'imageDetails[?!imageTags].{imageDigest: join(`=`, [`imageDigest`, imageDigest])}' \
|
||||||
|
--output text)
|
||||||
|
|
||||||
|
batch_delete "${IMAGE_DIGESTS}"
|
|
@ -1,7 +1,11 @@
|
||||||
:: See build.cmd for documentation on this call.
|
:: See build.cmd for documentation on this call.
|
||||||
call "c:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC\Auxiliary\Build\vcvarsall.bat" x86_amd64
|
call "c:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC\Auxiliary\Build\vcvarsall.bat" x86_amd64
|
||||||
|
|
||||||
:: We currently don't have any tests to run on Windows, so this is just commented out.
|
cd build
|
||||||
:: We'll expand on this later.
|
|
||||||
:: cd build
|
:: This sets up ZEEKPATH and ZEEK_PLUGIN_PATH
|
||||||
:: ctest -C release || exit \b 1
|
call zeek-path-dev.bat
|
||||||
|
|
||||||
|
:: Only run the unit tests for now. Btest is supported on Windows but a ton
|
||||||
|
:: of tests are still failing so it's not worth trying to run it.
|
||||||
|
src\zeek --test
|
||||||
|
|
2
cmake
2
cmake
|
@ -1 +1 @@
|
||||||
Subproject commit 6f096c3f6dcb9f35664cf25a58989bc5d123b995
|
Subproject commit 4e41cdd77f0aa617c23f37b4776a1ba5c4ea4ea3
|
61
configure
vendored
61
configure
vendored
|
@ -12,6 +12,9 @@ command="$0 $*"
|
||||||
usage="\
|
usage="\
|
||||||
Usage: $0 [OPTION]... [VAR=VALUE]...
|
Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||||
|
|
||||||
|
-h, --help display this help and exit
|
||||||
|
--show-config display the most relevant config parameters of an existing build
|
||||||
|
|
||||||
Build Options:
|
Build Options:
|
||||||
--cmake=PATH custom path to a CMake binary
|
--cmake=PATH custom path to a CMake binary
|
||||||
--builddir=DIR place build files in directory [build]
|
--builddir=DIR place build files in directory [build]
|
||||||
|
@ -101,7 +104,6 @@ Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||||
--with-python-inc=PATH path to Python headers
|
--with-python-inc=PATH path to Python headers
|
||||||
--with-python-lib=PATH path to libpython
|
--with-python-lib=PATH path to libpython
|
||||||
--with-spicy=PATH path to Spicy install root
|
--with-spicy=PATH path to Spicy install root
|
||||||
--with-spicy-plugin=PATH path to Spicy plugin source tree
|
|
||||||
--with-swig=PATH path to SWIG executable
|
--with-swig=PATH path to SWIG executable
|
||||||
|
|
||||||
Packaging Options (for developers):
|
Packaging Options (for developers):
|
||||||
|
@ -149,42 +151,10 @@ append_cache_entry() {
|
||||||
CMakeCacheEntries="$CMakeCacheEntries -D $1:$2=$3"
|
CMakeCacheEntries="$CMakeCacheEntries -D $1:$2=$3"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Function to remove a CMake cache entry definition from the
|
|
||||||
# CMakeCacheEntries variable
|
|
||||||
# $1 is the cache entry variable name
|
|
||||||
remove_cache_entry() {
|
|
||||||
CMakeCacheEntries="$CMakeCacheEntries -U $1"
|
|
||||||
|
|
||||||
# Even with -U, cmake still warns by default if
|
|
||||||
# added previously with -D.
|
|
||||||
CMakeCacheEntries="$CMakeCacheEntries --no-warn-unused-cli"
|
|
||||||
}
|
|
||||||
|
|
||||||
# set defaults
|
# set defaults
|
||||||
builddir=build
|
builddir=build
|
||||||
prefix=/usr/local/zeek
|
|
||||||
CMakeCacheEntries=""
|
CMakeCacheEntries=""
|
||||||
display_cmake=0
|
display_cmake=0
|
||||||
append_cache_entry CMAKE_INSTALL_PREFIX PATH $prefix
|
|
||||||
append_cache_entry ZEEK_ROOT_DIR PATH $prefix
|
|
||||||
append_cache_entry ZEEK_SCRIPT_INSTALL_PATH STRING $prefix/share/zeek
|
|
||||||
append_cache_entry ZEEK_ETC_INSTALL_DIR PATH $prefix/etc
|
|
||||||
append_cache_entry ENABLE_DEBUG BOOL false
|
|
||||||
append_cache_entry ENABLE_PERFTOOLS BOOL false
|
|
||||||
append_cache_entry ENABLE_JEMALLOC BOOL false
|
|
||||||
append_cache_entry ENABLE_ZEEK_UNIT_TESTS BOOL true
|
|
||||||
append_cache_entry BUILD_SHARED_LIBS BOOL true
|
|
||||||
append_cache_entry INSTALL_AUX_TOOLS BOOL true
|
|
||||||
append_cache_entry INSTALL_BTEST BOOL true
|
|
||||||
append_cache_entry INSTALL_BTEST_PCAPS BOOL true
|
|
||||||
append_cache_entry INSTALL_ZEEK_ARCHIVER BOOL true
|
|
||||||
append_cache_entry INSTALL_ZEEK_CLIENT BOOL true
|
|
||||||
append_cache_entry INSTALL_ZEEKCTL BOOL true
|
|
||||||
append_cache_entry INSTALL_ZKG BOOL true
|
|
||||||
append_cache_entry CPACK_SOURCE_IGNORE_FILES STRING
|
|
||||||
append_cache_entry ZEEK_SANITIZERS STRING ""
|
|
||||||
append_cache_entry ZEEK_INCLUDE_PLUGINS STRING ""
|
|
||||||
append_cache_entry PREALLOCATE_PORT_ARRAY BOOL true
|
|
||||||
|
|
||||||
# parse arguments
|
# parse arguments
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
|
@ -198,6 +168,14 @@ while [ $# -ne 0 ]; do
|
||||||
echo "${usage}" 1>&2
|
echo "${usage}" 1>&2
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
|
--show-config)
|
||||||
|
if [ ! -f "$builddir/CMakeCache.txt" ]; then
|
||||||
|
echo "Error: no CMake build found under '$builddir'." 1>&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
grep -E "^ENABLE_|^ZEEK_|^INSTALL_|^CMAKE_INSTALL_PRE|^CMAKE_C.*_FLAGS|^CMAKE_C.*_COMPILER|^CMAKE_.*_LINKER_FLAGS|^CMAKE_BUILD" "$builddir/CMakeCache.txt" | grep -v ':INTERNAL'
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
-D)
|
-D)
|
||||||
shift
|
shift
|
||||||
if [ $# -eq 0 ]; then
|
if [ $# -eq 0 ]; then
|
||||||
|
@ -217,10 +195,6 @@ while [ $# -ne 0 ]; do
|
||||||
;;
|
;;
|
||||||
--build-type=*)
|
--build-type=*)
|
||||||
append_cache_entry CMAKE_BUILD_TYPE STRING $optarg
|
append_cache_entry CMAKE_BUILD_TYPE STRING $optarg
|
||||||
|
|
||||||
if [ $(echo "$optarg" | tr [:upper:] [:lower:]) = "debug" ]; then
|
|
||||||
append_cache_entry ENABLE_DEBUG BOOL true
|
|
||||||
fi
|
|
||||||
;;
|
;;
|
||||||
--generator=*)
|
--generator=*)
|
||||||
CMakeGenerator="$optarg"
|
CMakeGenerator="$optarg"
|
||||||
|
@ -235,9 +209,7 @@ while [ $# -ne 0 ]; do
|
||||||
append_cache_entry ZEEK_INCLUDE_PLUGINS STRING \"$optarg\"
|
append_cache_entry ZEEK_INCLUDE_PLUGINS STRING \"$optarg\"
|
||||||
;;
|
;;
|
||||||
--prefix=*)
|
--prefix=*)
|
||||||
prefix=$optarg
|
|
||||||
append_cache_entry CMAKE_INSTALL_PREFIX PATH $optarg
|
append_cache_entry CMAKE_INSTALL_PREFIX PATH $optarg
|
||||||
append_cache_entry ZEEK_ROOT_DIR PATH $optarg
|
|
||||||
;;
|
;;
|
||||||
--libdir=*)
|
--libdir=*)
|
||||||
append_cache_entry CMAKE_INSTALL_LIBDIR PATH $optarg
|
append_cache_entry CMAKE_INSTALL_LIBDIR PATH $optarg
|
||||||
|
@ -405,9 +377,6 @@ while [ $# -ne 0 ]; do
|
||||||
--with-spicy=*)
|
--with-spicy=*)
|
||||||
append_cache_entry SPICY_ROOT_DIR PATH $optarg
|
append_cache_entry SPICY_ROOT_DIR PATH $optarg
|
||||||
;;
|
;;
|
||||||
--with-spicy-plugin=*)
|
|
||||||
append_cache_entry SPICY_PLUGIN_PATH PATH $optarg
|
|
||||||
;;
|
|
||||||
--with-swig=*)
|
--with-swig=*)
|
||||||
append_cache_entry SWIG_EXECUTABLE PATH $optarg
|
append_cache_entry SWIG_EXECUTABLE PATH $optarg
|
||||||
;;
|
;;
|
||||||
|
@ -454,14 +423,6 @@ if [ -z "$CMakeCommand" ]; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$user_set_scriptdir" != "true" ]; then
|
|
||||||
append_cache_entry ZEEK_SCRIPT_INSTALL_PATH STRING $prefix/share/zeek
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$user_set_conffilesdir" != "true" ]; then
|
|
||||||
append_cache_entry ZEEK_ETC_INSTALL_DIR PATH $prefix/etc
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d $builddir ]; then
|
if [ -d $builddir ]; then
|
||||||
# If build directory exists, check if it has a CMake cache
|
# If build directory exists, check if it has a CMake cache
|
||||||
if [ -f $builddir/CMakeCache.txt ]; then
|
if [ -f $builddir/CMakeCache.txt ]; then
|
||||||
|
|
2
doc
2
doc
|
@ -1 +1 @@
|
||||||
Subproject commit 6ccf06f0f6b0c24f120160aeb05307e4c4a44975
|
Subproject commit 8a0873c71095136ef1f611a01bf936f7a2805aed
|
|
@ -5,9 +5,9 @@
|
||||||
# variable `HILTI_CXX_INCLUDE_DIRS`.
|
# variable `HILTI_CXX_INCLUDE_DIRS`.
|
||||||
|
|
||||||
# Paths to support compiling Spicy parsers in the build tree.
|
# Paths to support compiling Spicy parsers in the build tree.
|
||||||
PATHS=@CMAKE_SOURCE_DIR@/auxil/spicy/spicy/hilti/runtime/include
|
PATHS=@CMAKE_SOURCE_DIR@/auxil/spicy/hilti/runtime/include
|
||||||
PATHS=$PATHS:@CMAKE_SOURCE_DIR@/auxil/spicy/spicy/spicy/runtime/include
|
PATHS=$PATHS:@CMAKE_SOURCE_DIR@/auxil/spicy/spicy/runtime/include
|
||||||
PATHS=$PATHS:@CMAKE_BINARY_DIR@/auxil/spicy/spicy/include
|
PATHS=$PATHS:@CMAKE_BINARY_DIR@/auxil/spicy/include
|
||||||
|
|
||||||
# Paths to support compiling against a build tree Zeek.
|
# Paths to support compiling against a build tree Zeek.
|
||||||
PATHS=$PATHS:@CMAKE_BINARY_DIR@
|
PATHS=$PATHS:@CMAKE_BINARY_DIR@
|
|
@ -1,5 +1,5 @@
|
||||||
|
install(
|
||||||
install(DIRECTORY . DESTINATION ${ZEEK_MAN_INSTALL_PATH}/man8 FILES_MATCHING
|
DIRECTORY .
|
||||||
PATTERN "*.8"
|
DESTINATION ${ZEEK_MAN_INSTALL_PATH}/man8
|
||||||
)
|
FILES_MATCHING
|
||||||
|
PATTERN "*.8")
|
||||||
|
|
|
@ -1,21 +1,21 @@
|
||||||
include(InstallPackageConfigFile)
|
include(InstallPackageConfigFile)
|
||||||
|
|
||||||
install(DIRECTORY ./ DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH} FILES_MATCHING
|
install(
|
||||||
|
DIRECTORY ./
|
||||||
|
DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}
|
||||||
|
FILES_MATCHING
|
||||||
PATTERN "site/local*" EXCLUDE
|
PATTERN "site/local*" EXCLUDE
|
||||||
PATTERN "*.zeek"
|
PATTERN "*.zeek"
|
||||||
PATTERN "*.sig"
|
PATTERN "*.sig"
|
||||||
PATTERN "*.fp"
|
PATTERN "*.fp")
|
||||||
)
|
|
||||||
|
|
||||||
# Install local script as a config file since it's meant to be modified directly.
|
# Install local script as a config file since it's meant to be modified
|
||||||
InstallPackageConfigFile(
|
# directly.
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/site/local.zeek
|
InstallPackageConfigFile(${CMAKE_CURRENT_SOURCE_DIR}/site/local.zeek
|
||||||
${ZEEK_SCRIPT_INSTALL_PATH}/site
|
${ZEEK_SCRIPT_INSTALL_PATH}/site local.zeek)
|
||||||
local.zeek)
|
|
||||||
|
|
||||||
# Substitute values in templated script files, and install them.
|
# Substitute values in templated script files, and install them.
|
||||||
configure_file(
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/base/misc/installation.zeek.in
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/base/misc/installation.zeek.in
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/base/misc/installation.zeek @ONLY)
|
${CMAKE_CURRENT_BINARY_DIR}/base/misc/installation.zeek @ONLY)
|
||||||
|
|
||||||
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/base/misc/installation.zeek
|
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/base/misc/installation.zeek
|
||||||
|
|
|
@ -318,7 +318,7 @@ function Cluster::get_node_count(node_type: NodeType): count
|
||||||
|
|
||||||
function Cluster::get_active_node_count(node_type: NodeType): count
|
function Cluster::get_active_node_count(node_type: NodeType): count
|
||||||
{
|
{
|
||||||
return |active_node_ids[node_type]|;
|
return node_type in active_node_ids ? |active_node_ids[node_type]| : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
function is_enabled(): bool
|
function is_enabled(): bool
|
||||||
|
@ -377,7 +377,7 @@ event Cluster::hello(name: string, id: string) &priority=10
|
||||||
|
|
||||||
@pragma push ignore-deprecations
|
@pragma push ignore-deprecations
|
||||||
if ( n$node_type == WORKER )
|
if ( n$node_type == WORKER )
|
||||||
worker_count = |active_node_ids[WORKER]|;
|
worker_count = get_active_node_count(WORKER);
|
||||||
@pragma pop ignore-deprecations
|
@pragma pop ignore-deprecations
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -402,7 +402,7 @@ event Broker::peer_lost(endpoint: Broker::EndpointInfo, msg: string) &priority=1
|
||||||
|
|
||||||
@pragma push ignore-deprecations
|
@pragma push ignore-deprecations
|
||||||
if ( n$node_type == WORKER )
|
if ( n$node_type == WORKER )
|
||||||
worker_count = |active_node_ids[WORKER]|;
|
worker_count = get_active_node_count(WORKER);
|
||||||
@pragma pop ignore-deprecations
|
@pragma pop ignore-deprecations
|
||||||
|
|
||||||
event Cluster::node_down(node_name, endpoint$id);
|
event Cluster::node_down(node_name, endpoint$id);
|
||||||
|
|
|
@ -22,6 +22,32 @@ redef Log::default_rotation_interval = 1 hrs;
|
||||||
## Alarm summary mail interval.
|
## Alarm summary mail interval.
|
||||||
redef Log::default_mail_alarms_interval = 24 hrs;
|
redef Log::default_mail_alarms_interval = 24 hrs;
|
||||||
|
|
||||||
|
## Generic log metadata rendered into the filename that zeek-archiver may interpret.
|
||||||
|
## This is populated with a log_suffix entry within zeek_init() when multiple
|
||||||
|
## logger nodes are defined in cluster-layout.zeek.
|
||||||
|
global log_metadata: table[string] of string;
|
||||||
|
|
||||||
|
## Encode the given table as zeek-archiver understood metadata part.
|
||||||
|
function encode_log_metadata(tbl: table[string] of string): string
|
||||||
|
{
|
||||||
|
local metadata_vec: vector of string;
|
||||||
|
for ( k, v in log_metadata )
|
||||||
|
{
|
||||||
|
if ( |v| == 0 ) # Assume concious decision to skip this entry.
|
||||||
|
next;
|
||||||
|
|
||||||
|
if ( /[,=]/ in k || /[,=]/ in v )
|
||||||
|
{
|
||||||
|
Reporter::warning(fmt("Invalid log_metadata: k='%s' v='%s'", k, v));
|
||||||
|
next;
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata_vec += fmt("%s=%s", strip(k), strip(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
return join_string_vec(metadata_vec, ",");
|
||||||
|
}
|
||||||
|
|
||||||
## This function will rotate logs in a format compatible with zeek-archiver.
|
## This function will rotate logs in a format compatible with zeek-archiver.
|
||||||
## If you're using the Supervisor framework, this function will be used,
|
## If you're using the Supervisor framework, this function will be used,
|
||||||
## if not, you can set :zeek:see:`Log::rotation_format_func` to this function.
|
## if not, you can set :zeek:see:`Log::rotation_format_func` to this function.
|
||||||
|
@ -30,6 +56,10 @@ function archiver_rotation_format_func(ri: Log::RotationFmtInfo): Log::RotationP
|
||||||
local open_str = strftime(Log::default_rotation_date_format, ri$open);
|
local open_str = strftime(Log::default_rotation_date_format, ri$open);
|
||||||
local close_str = strftime(Log::default_rotation_date_format, ri$close);
|
local close_str = strftime(Log::default_rotation_date_format, ri$close);
|
||||||
local base = fmt("%s__%s__%s__", ri$path, open_str, close_str);
|
local base = fmt("%s__%s__%s__", ri$path, open_str, close_str);
|
||||||
|
|
||||||
|
if ( |log_metadata| > 0 )
|
||||||
|
base = fmt("%s%s__", base, encode_log_metadata(log_metadata));
|
||||||
|
|
||||||
local rval = Log::RotationPath($file_basename=base);
|
local rval = Log::RotationPath($file_basename=base);
|
||||||
return rval;
|
return rval;
|
||||||
}
|
}
|
||||||
|
@ -42,6 +72,14 @@ redef Log::rotation_format_func = archiver_rotation_format_func;
|
||||||
|
|
||||||
redef LogAscii::enable_leftover_log_rotation = T;
|
redef LogAscii::enable_leftover_log_rotation = T;
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
if ( "log_suffix" in log_metadata )
|
||||||
|
return;
|
||||||
|
|
||||||
|
if ( Cluster::get_node_count(Cluster::LOGGER) > 1 )
|
||||||
|
log_metadata["log_suffix"] = Cluster::node;
|
||||||
|
}
|
||||||
@else
|
@else
|
||||||
|
|
||||||
## Use the cluster's archive logging script.
|
## Use the cluster's archive logging script.
|
||||||
|
|
|
@ -510,11 +510,19 @@ function describe(f: fa_file): string
|
||||||
return handler$describe(f);
|
return handler$describe(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Only warn once about un-registered get_file_handle()
|
||||||
|
global missing_get_file_handle_warned: table[Files::Tag] of bool &default=F;
|
||||||
|
|
||||||
event get_file_handle(tag: Files::Tag, c: connection, is_orig: bool) &priority=5
|
event get_file_handle(tag: Files::Tag, c: connection, is_orig: bool) &priority=5
|
||||||
{
|
{
|
||||||
if ( tag !in registered_protocols )
|
if ( tag !in registered_protocols )
|
||||||
{
|
{
|
||||||
Reporter::warning(fmt("get_file_handle() invoked for %s", tag));
|
if ( ! missing_get_file_handle_warned[tag] )
|
||||||
|
{
|
||||||
|
missing_get_file_handle_warned[tag] = T;
|
||||||
|
Reporter::warning(fmt("get_file_handle() handler missing for %s", tag));
|
||||||
|
}
|
||||||
|
|
||||||
set_file_handle(fmt("%s-fallback-%s-%s-%s", tag, c$uid, is_orig, network_time()));
|
set_file_handle(fmt("%s-fallback-%s-%s-%s", tag, c$uid, is_orig, network_time()));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,6 +172,14 @@ export {
|
||||||
## Default shell command to run on rotated files. Empty for none.
|
## Default shell command to run on rotated files. Empty for none.
|
||||||
const default_rotation_postprocessor_cmd = "" &redef;
|
const default_rotation_postprocessor_cmd = "" &redef;
|
||||||
|
|
||||||
|
## This table contains environment variables to be used for the
|
||||||
|
## :zeek:see:`Log::default_rotation_postprocessor_cmd` command
|
||||||
|
## when executed via :zeek:see:`Log::run_rotation_postprocessor_cmd`.
|
||||||
|
##
|
||||||
|
## The entries in this table will be prepended with ``ZEEK_ARG_``
|
||||||
|
## as done by :zeek:see:`system_env`.
|
||||||
|
option default_rotation_postprocessor_cmd_env: table[string] of string = {};
|
||||||
|
|
||||||
## Specifies the default postprocessor function per writer type.
|
## Specifies the default postprocessor function per writer type.
|
||||||
## Entries in this table are initialized by each writer type.
|
## Entries in this table are initialized by each writer type.
|
||||||
const default_rotation_postprocessors: table[Writer] of function(info: RotationInfo) : bool &redef;
|
const default_rotation_postprocessors: table[Writer] of function(info: RotationInfo) : bool &redef;
|
||||||
|
@ -578,6 +586,7 @@ export {
|
||||||
## to postprocess a rotated log file.
|
## to postprocess a rotated log file.
|
||||||
##
|
##
|
||||||
## .. zeek:see:: Log::default_rotation_date_format
|
## .. zeek:see:: Log::default_rotation_date_format
|
||||||
|
## Log::default_rotation_postprocessor_cmd_env
|
||||||
## Log::default_rotation_postprocessor_cmd
|
## Log::default_rotation_postprocessor_cmd
|
||||||
## Log::default_rotation_postprocessors
|
## Log::default_rotation_postprocessors
|
||||||
global run_rotation_postprocessor_cmd: function(info: RotationInfo, npath: string) : bool;
|
global run_rotation_postprocessor_cmd: function(info: RotationInfo, npath: string) : bool;
|
||||||
|
@ -654,8 +663,7 @@ function default_path_func(id: ID, path: string, rec: any) : string
|
||||||
return to_lower(id_str);
|
return to_lower(id_str);
|
||||||
}
|
}
|
||||||
|
|
||||||
# Run post-processor on file. If there isn't any postprocessor defined,
|
# Run post-processor on file.
|
||||||
# we move the file to a nicer name.
|
|
||||||
function run_rotation_postprocessor_cmd(info: RotationInfo, npath: string) : bool
|
function run_rotation_postprocessor_cmd(info: RotationInfo, npath: string) : bool
|
||||||
{
|
{
|
||||||
local pp_cmd = default_rotation_postprocessor_cmd;
|
local pp_cmd = default_rotation_postprocessor_cmd;
|
||||||
|
@ -668,11 +676,15 @@ function run_rotation_postprocessor_cmd(info: RotationInfo, npath: string) : boo
|
||||||
|
|
||||||
# The date format is hard-coded here to provide a standardized
|
# The date format is hard-coded here to provide a standardized
|
||||||
# script interface.
|
# script interface.
|
||||||
system(fmt("%s %s %s %s %s %d %s",
|
#
|
||||||
|
# Note that system_env() does not clear the environment, it only
|
||||||
|
# adds entries from the given table. Unusual, but useful here.
|
||||||
|
system_env(fmt("%s %s %s %s %s %d %s",
|
||||||
pp_cmd, safe_shell_quote(npath), safe_shell_quote(info$path),
|
pp_cmd, safe_shell_quote(npath), safe_shell_quote(info$path),
|
||||||
strftime("%y-%m-%d_%H.%M.%S", info$open),
|
strftime("%y-%m-%d_%H.%M.%S", info$open),
|
||||||
strftime("%y-%m-%d_%H.%M.%S", info$close),
|
strftime("%y-%m-%d_%H.%M.%S", info$close),
|
||||||
info$terminating, writer));
|
info$terminating, writer),
|
||||||
|
Log::default_rotation_postprocessor_cmd_env);
|
||||||
|
|
||||||
return T;
|
return T;
|
||||||
}
|
}
|
||||||
|
|
|
@ -210,6 +210,7 @@ export {
|
||||||
["spontaneous_FIN"] = ACTION_IGNORE,
|
["spontaneous_FIN"] = ACTION_IGNORE,
|
||||||
["spontaneous_RST"] = ACTION_IGNORE,
|
["spontaneous_RST"] = ACTION_IGNORE,
|
||||||
["SMB_parsing_error"] = ACTION_LOG,
|
["SMB_parsing_error"] = ACTION_LOG,
|
||||||
|
["SMB_discarded_messages_state"] = ACTION_LOG,
|
||||||
["no_smb_session_using_parsesambamsg"] = ACTION_LOG,
|
["no_smb_session_using_parsesambamsg"] = ACTION_LOG,
|
||||||
["smb_andx_command_failed_to_parse"] = ACTION_LOG,
|
["smb_andx_command_failed_to_parse"] = ACTION_LOG,
|
||||||
["smb_tree_connect_andx_response_without_tree"] = ACTION_LOG_PER_CONN,
|
["smb_tree_connect_andx_response_without_tree"] = ACTION_LOG_PER_CONN,
|
||||||
|
|
1
scripts/base/frameworks/spicy/__load__.zeek
Normal file
1
scripts/base/frameworks/spicy/__load__.zeek
Normal file
|
@ -0,0 +1 @@
|
||||||
|
@load ./main.zeek
|
38
scripts/base/frameworks/spicy/init-bare.zeek
Normal file
38
scripts/base/frameworks/spicy/init-bare.zeek
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
|
||||||
|
module Spicy;
|
||||||
|
|
||||||
|
export {
|
||||||
|
# doc-options-start
|
||||||
|
## Constant for testing if Spicy is available.
|
||||||
|
const available = T;
|
||||||
|
|
||||||
|
## Show output of Spicy print statements.
|
||||||
|
const enable_print = F &redef;
|
||||||
|
|
||||||
|
# Record and display profiling information, if compiled into analyzer.
|
||||||
|
const enable_profiling = F &redef;
|
||||||
|
|
||||||
|
## abort() instead of throwing HILTI exceptions.
|
||||||
|
const abort_on_exceptions = F &redef;
|
||||||
|
|
||||||
|
## Include backtraces when reporting unhandled exceptions.
|
||||||
|
const show_backtraces = F &redef;
|
||||||
|
|
||||||
|
## Maximum depth of recursive file analysis (Spicy analyzers only)
|
||||||
|
const max_file_depth: count = 5 &redef;
|
||||||
|
# doc-options-end
|
||||||
|
|
||||||
|
# doc-types-start
|
||||||
|
## Result type for `Spicy::resource_usage()`. The values reflect resource
|
||||||
|
## usage as reported by the Spicy runtime system.
|
||||||
|
type ResourceUsage: record {
|
||||||
|
user_time : interval; ##< user CPU time of the Zeek process
|
||||||
|
system_time :interval; ##< system CPU time of the Zeek process
|
||||||
|
memory_heap : count; ##< memory allocated on the heap by the Zeek process
|
||||||
|
num_fibers : count; ##< number of fibers currently in use
|
||||||
|
max_fibers: count; ##< maximum number of fibers ever in use
|
||||||
|
max_fiber_stack_size: count; ##< maximum fiber stack size ever in use
|
||||||
|
cached_fibers: count; ##< number of fibers currently cached
|
||||||
|
};
|
||||||
|
# doc-types-end
|
||||||
|
}
|
81
scripts/base/frameworks/spicy/init-framework.zeek
Normal file
81
scripts/base/frameworks/spicy/init-framework.zeek
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
@load base/misc/version
|
||||||
|
|
||||||
|
# doc-common-start
|
||||||
|
module Spicy;
|
||||||
|
|
||||||
|
export {
|
||||||
|
# doc-functions-start
|
||||||
|
## Enable a specific Spicy protocol analyzer if not already active. If this
|
||||||
|
## analyzer replaces an standard analyzer, that one will automatically be
|
||||||
|
## disabled.
|
||||||
|
##
|
||||||
|
## tag: analyzer to toggle
|
||||||
|
##
|
||||||
|
## Returns: true if the operation succeeded
|
||||||
|
global enable_protocol_analyzer: function(tag: Analyzer::Tag) : bool;
|
||||||
|
|
||||||
|
## Disable a specific Spicy protocol analyzer if not already inactive. If
|
||||||
|
## this analyzer replaces an standard analyzer, that one will automatically
|
||||||
|
## be re-enabled.
|
||||||
|
##
|
||||||
|
## tag: analyzer to toggle
|
||||||
|
##
|
||||||
|
## Returns: true if the operation succeeded
|
||||||
|
global disable_protocol_analyzer: function(tag: Analyzer::Tag) : bool;
|
||||||
|
|
||||||
|
|
||||||
|
## Enable a specific Spicy file analyzer if not already active. If this
|
||||||
|
## analyzer replaces an standard analyzer, that one will automatically be
|
||||||
|
## disabled.
|
||||||
|
##
|
||||||
|
## tag: analyzer to toggle
|
||||||
|
##
|
||||||
|
## Returns: true if the operation succeeded
|
||||||
|
global enable_file_analyzer: function(tag: Files::Tag) : bool;
|
||||||
|
|
||||||
|
## Disable a specific Spicy file analyzer if not already inactive. If
|
||||||
|
## this analyzer replaces an standard analyzer, that one will automatically
|
||||||
|
## be re-enabled.
|
||||||
|
##
|
||||||
|
## tag: analyzer to toggle
|
||||||
|
##
|
||||||
|
## Returns: true if the operation succeeded
|
||||||
|
global disable_file_analyzer: function(tag: Files::Tag) : bool;
|
||||||
|
|
||||||
|
## Returns current resource usage as reported by the Spicy runtime system.
|
||||||
|
global resource_usage: function() : ResourceUsage;
|
||||||
|
# doc-functions-end
|
||||||
|
}
|
||||||
|
|
||||||
|
# Marked with &is_used to suppress complaints when there aren't any
|
||||||
|
# Spicy file analyzers loaded, and hence this event can't be generated.
|
||||||
|
# The attribute is only supported for Zeek 5.0 and higher.
|
||||||
|
event spicy_analyzer_for_mime_type(a: Files::Tag, mt: string) &is_used
|
||||||
|
{
|
||||||
|
Files::register_for_mime_type(a, mt);
|
||||||
|
}
|
||||||
|
|
||||||
|
function enable_protocol_analyzer(tag: Analyzer::Tag) : bool
|
||||||
|
{
|
||||||
|
return Spicy::__toggle_analyzer(tag, T);
|
||||||
|
}
|
||||||
|
|
||||||
|
function disable_protocol_analyzer(tag: Analyzer::Tag) : bool
|
||||||
|
{
|
||||||
|
return Spicy::__toggle_analyzer(tag, F);
|
||||||
|
}
|
||||||
|
|
||||||
|
function enable_file_analyzer(tag: Files::Tag) : bool
|
||||||
|
{
|
||||||
|
return Spicy::__toggle_analyzer(tag, T);
|
||||||
|
}
|
||||||
|
|
||||||
|
function disable_file_analyzer(tag: Files::Tag) : bool
|
||||||
|
{
|
||||||
|
return Spicy::__toggle_analyzer(tag, F);
|
||||||
|
}
|
||||||
|
|
||||||
|
function resource_usage() : ResourceUsage
|
||||||
|
{
|
||||||
|
return Spicy::__resource_usage();
|
||||||
|
}
|
15
scripts/base/frameworks/spicy/main.zeek
Normal file
15
scripts/base/frameworks/spicy/main.zeek
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
@load base/frameworks/notice
|
||||||
|
|
||||||
|
module Spicy;
|
||||||
|
|
||||||
|
export {
|
||||||
|
redef enum Notice::Type += { Spicy_Max_File_Depth_Exceeded };
|
||||||
|
}
|
||||||
|
|
||||||
|
event max_file_depth_exceeded(f: fa_file, args: Files::AnalyzerArgs, limit: count)
|
||||||
|
{
|
||||||
|
NOTICE([
|
||||||
|
$note=Spicy::Spicy_Max_File_Depth_Exceeded,
|
||||||
|
$msg=fmt("Maximum file depth exceeded for file %s", f$id)
|
||||||
|
]);
|
||||||
|
}
|
|
@ -891,6 +891,7 @@ type record_field: record {
|
||||||
## :zeek:see:`record_fields` (if it has one).
|
## :zeek:see:`record_fields` (if it has one).
|
||||||
value: any &optional;
|
value: any &optional;
|
||||||
default_val: any &optional; ##< The value of the :zeek:attr:`&default` attribute if defined.
|
default_val: any &optional; ##< The value of the :zeek:attr:`&default` attribute if defined.
|
||||||
|
optional: bool; ##< True if the field is :zeek:attr:`&optional`, else false.
|
||||||
};
|
};
|
||||||
|
|
||||||
## Table type used to map record field declarations to meta-information
|
## Table type used to map record field declarations to meta-information
|
||||||
|
@ -1093,6 +1094,14 @@ type entropy_test_result: record {
|
||||||
serial_correlation: double; ##< Serial correlation coefficient.
|
serial_correlation: double; ##< Serial correlation coefficient.
|
||||||
};
|
};
|
||||||
|
|
||||||
|
## Return type for from_json BIF.
|
||||||
|
##
|
||||||
|
## .. zeek:see:: from_json
|
||||||
|
type from_json_result: record {
|
||||||
|
v: any &optional; ##< Parsed value.
|
||||||
|
valid: bool; ##< True if parsing was successful.
|
||||||
|
};
|
||||||
|
|
||||||
# TCP values for :zeek:see:`endpoint` *state* field.
|
# TCP values for :zeek:see:`endpoint` *state* field.
|
||||||
# todo:: these should go into an enum to make them autodoc'able.
|
# todo:: these should go into an enum to make them autodoc'able.
|
||||||
const TCP_INACTIVE = 0; ##< Endpoint is still inactive.
|
const TCP_INACTIVE = 0; ##< Endpoint is still inactive.
|
||||||
|
@ -1731,8 +1740,12 @@ type ip4_hdr: record {
|
||||||
tos: count; ##< Type of service.
|
tos: count; ##< Type of service.
|
||||||
len: count; ##< Total length.
|
len: count; ##< Total length.
|
||||||
id: count; ##< Identification.
|
id: count; ##< Identification.
|
||||||
|
DF: bool; ##< True if the packet's *don't fragment* flag is set.
|
||||||
|
MF: bool; ##< True if the packet's *more fragments* flag is set.
|
||||||
|
offset: count; ##< Fragment offset.
|
||||||
ttl: count; ##< Time to live.
|
ttl: count; ##< Time to live.
|
||||||
p: count; ##< Protocol.
|
p: count; ##< Protocol.
|
||||||
|
sum: count; ##< Checksum.
|
||||||
src: addr; ##< Source address.
|
src: addr; ##< Source address.
|
||||||
dst: addr; ##< Destination address.
|
dst: addr; ##< Destination address.
|
||||||
};
|
};
|
||||||
|
@ -2983,6 +2996,16 @@ export {
|
||||||
##
|
##
|
||||||
## .. zeek:see:: smb_pipe_connect_heuristic
|
## .. zeek:see:: smb_pipe_connect_heuristic
|
||||||
const SMB::pipe_filenames: set[string] &redef;
|
const SMB::pipe_filenames: set[string] &redef;
|
||||||
|
|
||||||
|
## The maximum number of messages for which to retain state
|
||||||
|
## about offsets, fids, or tree ids within the parser. When
|
||||||
|
## the limit is reached, internal parser state is discarded
|
||||||
|
## and :zeek:see:`smb2_discarded_messages_state` raised.
|
||||||
|
##
|
||||||
|
## Setting this to zero will disable the functionality.
|
||||||
|
##
|
||||||
|
## .. zeek:see:: smb2_discarded_messages_state
|
||||||
|
const SMB::max_pending_messages = 1000 &redef;
|
||||||
}
|
}
|
||||||
|
|
||||||
module SMB1;
|
module SMB1;
|
||||||
|
@ -5679,3 +5702,7 @@ event net_done(t: time)
|
||||||
@endif
|
@endif
|
||||||
|
|
||||||
@load base/packet-protocols
|
@load base/packet-protocols
|
||||||
|
|
||||||
|
@if ( have_spicy() )
|
||||||
|
@load base/frameworks/spicy/init-bare
|
||||||
|
@endif
|
||||||
|
|
|
@ -44,6 +44,10 @@
|
||||||
@load base/frameworks/netcontrol
|
@load base/frameworks/netcontrol
|
||||||
@load base/frameworks/telemetry
|
@load base/frameworks/telemetry
|
||||||
|
|
||||||
|
@if ( have_spicy() )
|
||||||
|
@load base/frameworks/spicy
|
||||||
|
@endif
|
||||||
|
|
||||||
@load base/protocols/conn
|
@load base/protocols/conn
|
||||||
@load base/protocols/dce-rpc
|
@load base/protocols/dce-rpc
|
||||||
@load base/protocols/dhcp
|
@load base/protocols/dhcp
|
||||||
|
|
|
@ -17,6 +17,10 @@
|
||||||
# Load BiFs defined by plugins.
|
# Load BiFs defined by plugins.
|
||||||
@load base/bif/plugins
|
@load base/bif/plugins
|
||||||
|
|
||||||
|
@if ( have_spicy() )
|
||||||
|
@load base/frameworks/spicy/init-framework
|
||||||
|
@endif
|
||||||
|
|
||||||
# This sets up secondary/subdir BIFs such that they can be used by any
|
# This sets up secondary/subdir BIFs such that they can be used by any
|
||||||
# further scripts within their global initializations and is intended to be
|
# further scripts within their global initializations and is intended to be
|
||||||
# the last thing done within this script. It's called within @if simply so
|
# the last thing done within this script. It's called within @if simply so
|
||||||
|
|
|
@ -20,6 +20,9 @@
|
||||||
@load base/packet-protocols/udp
|
@load base/packet-protocols/udp
|
||||||
@load base/packet-protocols/tcp
|
@load base/packet-protocols/tcp
|
||||||
@load base/packet-protocols/icmp
|
@load base/packet-protocols/icmp
|
||||||
|
@load base/packet-protocols/llc
|
||||||
|
@load base/packet-protocols/novell_802_3
|
||||||
|
@load base/packet-protocols/snap
|
||||||
|
|
||||||
@load base/packet-protocols/gre
|
@load base/packet-protocols/gre
|
||||||
@load base/packet-protocols/iptunnel
|
@load base/packet-protocols/iptunnel
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
module PacketAnalyzer::ETHERNET;
|
module PacketAnalyzer::ETHERNET;
|
||||||
|
|
||||||
export {
|
export
|
||||||
## IEEE 802.2 SNAP analyzer
|
{
|
||||||
global snap_analyzer: PacketAnalyzer::Tag &redef;
|
# We use some magic numbers here to denote these. The values here are outside the range of the
|
||||||
## Novell raw IEEE 802.3 analyzer
|
# standard ethertypes, which should always be above 1536.
|
||||||
global novell_raw_analyzer: PacketAnalyzer::Tag &redef;
|
const SNAP_FORWARDING_KEY : count = 0x0001;
|
||||||
## IEEE 802.2 LLC analyzer
|
const NOVELL_FORWARDING_KEY : count = 0x0002;
|
||||||
global llc_analyzer: PacketAnalyzer::Tag &redef;
|
const LLC_FORWARDING_KEY : count = 0x0003;
|
||||||
}
|
}
|
||||||
|
|
||||||
event zeek_init() &priority=20
|
event zeek_init() &priority=20
|
||||||
{
|
{
|
||||||
|
@ -22,4 +22,11 @@ event zeek_init() &priority=20
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, 0x9100, PacketAnalyzer::ANALYZER_VLAN);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, 0x9100, PacketAnalyzer::ANALYZER_VLAN);
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, 0x8864, PacketAnalyzer::ANALYZER_PPPOE);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, 0x8864, PacketAnalyzer::ANALYZER_PPPOE);
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, 0x8926, PacketAnalyzer::ANALYZER_VNTAG);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, 0x8926, PacketAnalyzer::ANALYZER_VNTAG);
|
||||||
|
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, SNAP_FORWARDING_KEY,
|
||||||
|
PacketAnalyzer::ANALYZER_SNAP);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, NOVELL_FORWARDING_KEY,
|
||||||
|
PacketAnalyzer::ANALYZER_NOVELL_802_3);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_ETHERNET, LLC_FORWARDING_KEY,
|
||||||
|
PacketAnalyzer::ANALYZER_LLC);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,3 +3,34 @@ module PacketAnalyzer::IPTUNNEL;
|
||||||
export {
|
export {
|
||||||
const default_analyzer: PacketAnalyzer::Tag = PacketAnalyzer::ANALYZER_IP &redef;
|
const default_analyzer: PacketAnalyzer::Tag = PacketAnalyzer::ANALYZER_IP &redef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
event zeek_init() &priority=20
|
||||||
|
{
|
||||||
|
# ARUBA is dispatched to 802.11. This currently relies on GRE to set
|
||||||
|
# gre_link_type = DLT_IEEE_802_11 as otherwise DLT_RAW is used.
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8200, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8210, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8220, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8230, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8240, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8250, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8260, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8270, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8280, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8290, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x82A0, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x82B0, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x82C0, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x82D0, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x82E0, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x82F0, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8300, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8310, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8320, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8330, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8340, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8350, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8360, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_IPTUNNEL, 0x8370, PacketAnalyzer::ANALYZER_IEEE802_11);
|
||||||
|
# TODO: how to handle 0x9000 here, which should just be dropped?
|
||||||
|
}
|
||||||
|
|
1
scripts/base/packet-protocols/llc/__load__.zeek
Normal file
1
scripts/base/packet-protocols/llc/__load__.zeek
Normal file
|
@ -0,0 +1 @@
|
||||||
|
@load ./main
|
1
scripts/base/packet-protocols/llc/main.zeek
Normal file
1
scripts/base/packet-protocols/llc/main.zeek
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module PacketAnalyzer::LLC;
|
1
scripts/base/packet-protocols/novell_802_3/__load__.zeek
Normal file
1
scripts/base/packet-protocols/novell_802_3/__load__.zeek
Normal file
|
@ -0,0 +1 @@
|
||||||
|
@load ./main
|
6
scripts/base/packet-protocols/novell_802_3/main.zeek
Normal file
6
scripts/base/packet-protocols/novell_802_3/main.zeek
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
module PacketAnalyzer::NOVELL_802_3;
|
||||||
|
|
||||||
|
export {
|
||||||
|
# The Novell 802.3 protocol should expect an IPX analyzer here. Since
|
||||||
|
# one doesn't exist yet, the default analyzer is left undefined.
|
||||||
|
}
|
1
scripts/base/packet-protocols/snap/__load__.zeek
Normal file
1
scripts/base/packet-protocols/snap/__load__.zeek
Normal file
|
@ -0,0 +1 @@
|
||||||
|
@load ./main
|
9
scripts/base/packet-protocols/snap/main.zeek
Normal file
9
scripts/base/packet-protocols/snap/main.zeek
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
module PacketAnalyzer::SNAP;
|
||||||
|
|
||||||
|
event zeek_init() &priority=20
|
||||||
|
{
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_SNAP, 0x0800, PacketAnalyzer::ANALYZER_IP);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_SNAP, 0x86DD, PacketAnalyzer::ANALYZER_IP);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_SNAP, 0x0806, PacketAnalyzer::ANALYZER_ARP);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_SNAP, 0x8035, PacketAnalyzer::ANALYZER_ARP);
|
||||||
|
}
|
|
@ -1,5 +1,14 @@
|
||||||
module PacketAnalyzer::VLAN;
|
module PacketAnalyzer::VLAN;
|
||||||
|
|
||||||
|
export
|
||||||
|
{
|
||||||
|
# We use some magic numbers here to denote these. The values here are outside the range of the
|
||||||
|
# standard ethertypes, which should always be above 1536.
|
||||||
|
const SNAP_FORWARDING_KEY : count = 0x0001;
|
||||||
|
const NOVELL_FORWARDING_KEY : count = 0x0002;
|
||||||
|
const LLC_FORWARDING_KEY : count = 0x0003;
|
||||||
|
}
|
||||||
|
|
||||||
event zeek_init() &priority=20
|
event zeek_init() &priority=20
|
||||||
{
|
{
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8847, PacketAnalyzer::ANALYZER_MPLS);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8847, PacketAnalyzer::ANALYZER_MPLS);
|
||||||
|
@ -10,4 +19,11 @@ event zeek_init() &priority=20
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8035, PacketAnalyzer::ANALYZER_ARP);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8035, PacketAnalyzer::ANALYZER_ARP);
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8100, PacketAnalyzer::ANALYZER_VLAN);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8100, PacketAnalyzer::ANALYZER_VLAN);
|
||||||
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8864, PacketAnalyzer::ANALYZER_PPPOE);
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, 0x8864, PacketAnalyzer::ANALYZER_PPPOE);
|
||||||
|
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, SNAP_FORWARDING_KEY,
|
||||||
|
PacketAnalyzer::ANALYZER_SNAP);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, NOVELL_FORWARDING_KEY,
|
||||||
|
PacketAnalyzer::ANALYZER_NOVELL_802_3);
|
||||||
|
PacketAnalyzer::register_packet_analyzer(PacketAnalyzer::ANALYZER_VLAN, LLC_FORWARDING_KEY,
|
||||||
|
PacketAnalyzer::ANALYZER_LLC);
|
||||||
}
|
}
|
||||||
|
|
|
@ -407,8 +407,10 @@ event ftp_reply(c: connection, code: count, msg: string, cont_resp: bool) &prior
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( [c$ftp$cmdarg$cmd, code] in directory_cmds )
|
if ( [c$ftp$cmdarg$cmd, code] in directory_cmds && ! c$ftp$cmdarg$cwd_consumed )
|
||||||
{
|
{
|
||||||
|
c$ftp$cmdarg$cwd_consumed = T;
|
||||||
|
|
||||||
if ( c$ftp$cmdarg$cmd == "CWD" )
|
if ( c$ftp$cmdarg$cmd == "CWD" )
|
||||||
c$ftp$cwd = build_path_compressed(c$ftp$cwd, c$ftp$cmdarg$arg);
|
c$ftp$cwd = build_path_compressed(c$ftp$cwd, c$ftp$cmdarg$arg);
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,9 @@ export {
|
||||||
arg: string &default="";
|
arg: string &default="";
|
||||||
## Counter to track how many commands have been executed.
|
## Counter to track how many commands have been executed.
|
||||||
seq: count &default=0;
|
seq: count &default=0;
|
||||||
|
## Flag indicating if the arg of this CmdArg has been used
|
||||||
|
## to update cwd of c$ftp.
|
||||||
|
cwd_consumed: bool &default=F;
|
||||||
};
|
};
|
||||||
|
|
||||||
## Structure for tracking pending commands in the event that the client
|
## Structure for tracking pending commands in the event that the client
|
||||||
|
|
|
@ -44,6 +44,13 @@ export {
|
||||||
PRINT_CLOSE,
|
PRINT_CLOSE,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
## Whether to reset a connection's SMB script state whenever a
|
||||||
|
## :zeek:see:`smb2_discarded_messages_state` event is raised.
|
||||||
|
##
|
||||||
|
## This setting protects from unbounded script state growth in
|
||||||
|
## environments with high capture loss or traffic anomalies.
|
||||||
|
option enable_clear_script_state = T;
|
||||||
|
|
||||||
## This record is for the smb_files.log
|
## This record is for the smb_files.log
|
||||||
type FileInfo: record {
|
type FileInfo: record {
|
||||||
## Time when the file was first discovered.
|
## Time when the file was first discovered.
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
@load base/frameworks/notice/weird
|
||||||
|
|
||||||
@load ./main
|
@load ./main
|
||||||
|
|
||||||
module SMB2;
|
module SMB2;
|
||||||
|
@ -344,3 +346,25 @@ event smb2_close_request(c: connection, hdr: SMB2::Header, file_id: SMB2::GUID)
|
||||||
#Reporter::warning("attempting to close an unknown file!");
|
#Reporter::warning("attempting to close an unknown file!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
event smb2_discarded_messages_state(c: connection, state: string)
|
||||||
|
{
|
||||||
|
if ( ! c?$smb_state )
|
||||||
|
return;
|
||||||
|
|
||||||
|
local addl = fmt("state=%s fid_map=%s tid_map=%s pending_cmds=%s pipe_map=%s",
|
||||||
|
state, |c$smb_state$fid_map|, |c$smb_state$tid_map|,
|
||||||
|
|c$smb_state$pending_cmds|, |c$smb_state$pipe_map|);
|
||||||
|
Reporter::conn_weird("SMB_discarded_messages_state", c, addl, "SMB2");
|
||||||
|
|
||||||
|
if ( ! SMB::enable_clear_script_state )
|
||||||
|
return;
|
||||||
|
|
||||||
|
# Wipe out script-level state for this connection.
|
||||||
|
c$smb_state$fid_map = table();
|
||||||
|
c$smb_state$pending_cmds = table();
|
||||||
|
# Not expected to grow overly large and the original
|
||||||
|
# zeek-smb-clear-state package didn't reset these either.
|
||||||
|
# c$smb_state$tid_map = table();
|
||||||
|
# c$smb_state$pipe_map = table();
|
||||||
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ export {
|
||||||
const DTLSv10 = 0xFEFF;
|
const DTLSv10 = 0xFEFF;
|
||||||
# DTLSv11 does not exist
|
# DTLSv11 does not exist
|
||||||
const DTLSv12 = 0xFEFD;
|
const DTLSv12 = 0xFEFD;
|
||||||
|
const DTLSv13 = 0xFEFC;
|
||||||
|
|
||||||
## Mapping between the constants and string values for SSL/TLS versions.
|
## Mapping between the constants and string values for SSL/TLS versions.
|
||||||
const version_strings: table[count] of string = {
|
const version_strings: table[count] of string = {
|
||||||
|
@ -21,7 +22,8 @@ export {
|
||||||
[TLSv12] = "TLSv12",
|
[TLSv12] = "TLSv12",
|
||||||
[TLSv13] = "TLSv13",
|
[TLSv13] = "TLSv13",
|
||||||
[DTLSv10] = "DTLSv10",
|
[DTLSv10] = "DTLSv10",
|
||||||
[DTLSv12] = "DTLSv12"
|
[DTLSv12] = "DTLSv12",
|
||||||
|
[DTLSv13] = "DTLSv13"
|
||||||
} &default=function(i: count):string
|
} &default=function(i: count):string
|
||||||
{
|
{
|
||||||
if ( i/0xFF == 0x7F ) # TLS 1.3 draft
|
if ( i/0xFF == 0x7F ) # TLS 1.3 draft
|
||||||
|
|
|
@ -1,44 +1,35 @@
|
||||||
#
|
#
|
||||||
# Do not edit this file. This file is automatically generated by gen-ct-list.pl
|
# Do not edit this file. This file is automatically generated by gen-ct-list.pl
|
||||||
# File generated at Thu Oct 6 13:17:02 2022
|
# File generated at Wed May 3 10:30:21 2023
|
||||||
# File generated from https://www.gstatic.com/ct/log_list/v3/log_list.json
|
# File generated from https://www.gstatic.com/ct/log_list/v3/log_list.json
|
||||||
# Source file generated at: 2022-10-05T12:55:24Z
|
# Source file generated at: 2023-05-02T12:55:51Z
|
||||||
# Source file version: 13.6
|
# Source file version: 20.60
|
||||||
#
|
#
|
||||||
|
|
||||||
@load base/protocols/ssl
|
@load base/protocols/ssl
|
||||||
module SSL;
|
module SSL;
|
||||||
redef ct_logs += {
|
redef ct_logs += {
|
||||||
["\x29\x79\xbe\xf0\x9e\x39\x39\x21\xf0\x56\x73\x9f\x63\xa5\x77\xe5\xbe\x57\x7d\x9c\x60\x0a\xf8\xf9\x4d\x5d\x26\x5c\x25\x5d\xc7\x84"] = CTInfo($description="Google 'Argon2022' log", $operator="Google", $url="https://ct.googleapis.com/logs/argon2022/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x78\x83\xdc\xe9\xf1\xa6\xb8\x18\x3a\x00\x99\x2f\xff\x3e\xcd\x15\xc9\x26\x1e\xf7\xff\x3a\xa9\xa3\x72\x16\x49\xeb\x09\xb6\xa8\xdd\xb4\xd2\x47\x91\x0e\x0d\xf9\xd9\xd5\xa9\x8b\xb0\x87\x9d\x25\x79\xd4\x1a\x50\x60\x08\xf5\x09\x06\x39\x26\xe4\x40\xc2\xba\xc3\xc2"),
|
|
||||||
["\xe8\x3e\xd0\xda\x3e\xf5\x06\x35\x32\xe7\x57\x28\xbc\x89\x6b\xc9\x03\xd3\xcb\xd1\x11\x6b\xec\xeb\x69\xe1\x77\x7d\x6d\x06\xbd\x6e"] = CTInfo($description="Google 'Argon2023' log", $operator="Google", $url="https://ct.googleapis.com/logs/argon2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xd0\x90\x8f\x64\x52\x4e\x42\xac\x84\xb6\x2e\x4c\xf2\x3d\x77\x00\xb3\x77\x08\x05\x47\xaa\x45\x4c\xe3\x2c\x8e\x70\xa5\x82\xbb\x6c\xb2\x7b\x9c\x98\x7a\xa0\xe9\x11\x76\x28\x00\xb2\x20\xb4\xcd\xd3\x98\x7b\x4d\x96\x27\xe6\xb7\xee\x22\x6a\xd1\xb0\x2e\x91\x77\x78"),
|
["\xe8\x3e\xd0\xda\x3e\xf5\x06\x35\x32\xe7\x57\x28\xbc\x89\x6b\xc9\x03\xd3\xcb\xd1\x11\x6b\xec\xeb\x69\xe1\x77\x7d\x6d\x06\xbd\x6e"] = CTInfo($description="Google 'Argon2023' log", $operator="Google", $url="https://ct.googleapis.com/logs/argon2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xd0\x90\x8f\x64\x52\x4e\x42\xac\x84\xb6\x2e\x4c\xf2\x3d\x77\x00\xb3\x77\x08\x05\x47\xaa\x45\x4c\xe3\x2c\x8e\x70\xa5\x82\xbb\x6c\xb2\x7b\x9c\x98\x7a\xa0\xe9\x11\x76\x28\x00\xb2\x20\xb4\xcd\xd3\x98\x7b\x4d\x96\x27\xe6\xb7\xee\x22\x6a\xd1\xb0\x2e\x91\x77\x78"),
|
||||||
["\xee\xcd\xd0\x64\xd5\xdb\x1a\xce\xc5\x5c\xb7\x9d\xb4\xcd\x13\xa2\x32\x87\x46\x7c\xbc\xec\xde\xc3\x51\x48\x59\x46\x71\x1f\xb5\x9b"] = CTInfo($description="Google 'Argon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1d\xb9\x6c\xa9\xcb\x69\x94\xc5\x5c\xe6\xb6\xa6\x03\xbb\xd2\xb8\xdc\x54\x43\x17\x28\x99\x0c\x06\x01\x50\x1d\x9d\x64\xc0\x59\x46\x2b\xdc\xc8\x03\x1d\x05\xb4\x2d\xa8\x09\xf7\x99\x41\xed\x04\xfb\xe5\x57\xba\x26\x04\xf6\x11\x52\xce\x14\x65\x3b\x2f\x76\x2b\xc0"),
|
["\xee\xcd\xd0\x64\xd5\xdb\x1a\xce\xc5\x5c\xb7\x9d\xb4\xcd\x13\xa2\x32\x87\x46\x7c\xbc\xec\xde\xc3\x51\x48\x59\x46\x71\x1f\xb5\x9b"] = CTInfo($description="Google 'Argon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1d\xb9\x6c\xa9\xcb\x69\x94\xc5\x5c\xe6\xb6\xa6\x03\xbb\xd2\xb8\xdc\x54\x43\x17\x28\x99\x0c\x06\x01\x50\x1d\x9d\x64\xc0\x59\x46\x2b\xdc\xc8\x03\x1d\x05\xb4\x2d\xa8\x09\xf7\x99\x41\xed\x04\xfb\xe5\x57\xba\x26\x04\xf6\x11\x52\xce\x14\x65\x3b\x2f\x76\x2b\xc0"),
|
||||||
["\x46\xa5\x55\xeb\x75\xfa\x91\x20\x30\xb5\xa2\x89\x69\xf4\xf3\x7d\x11\x2c\x41\x74\xbe\xfd\x49\xb8\x85\xab\xf2\xfc\x70\xfe\x6d\x47"] = CTInfo($description="Google 'Xenon2022' log", $operator="Google", $url="https://ct.googleapis.com/logs/xenon2022/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf9\x64\xbd\x15\x2c\x40\x62\x50\x95\x13\x38\x3c\xc7\x21\xb0\x3a\xb9\x8f\xa2\x7a\x15\xd7\x89\xd6\x8e\x31\x13\x00\x87\x59\xbc\xbb\xee\x90\xfc\xc8\x58\x13\x0a\xbf\xab\x43\x36\x54\x23\xa4\x81\xcd\xad\x47\x14\xb7\x58\xa0\x44\xfa\x6a\xa0\xa0\xd7\xc3\x63\x1e\x2b"),
|
|
||||||
["\xad\xf7\xbe\xfa\x7c\xff\x10\xc8\x8b\x9d\x3d\x9c\x1e\x3e\x18\x6a\xb4\x67\x29\x5d\xcf\xb1\x0c\x24\xca\x85\x86\x34\xeb\xdc\x82\x8a"] = CTInfo($description="Google 'Xenon2023' log", $operator="Google", $url="https://ct.googleapis.com/logs/xenon2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x72\x16\x3e\x0b\xef\xef\xce\x3e\x60\xdd\x95\xcb\x63\x7a\xb9\xa9\x8d\x4a\x6f\x6c\xdc\x61\x80\xa6\x45\x5e\x2f\x83\xac\x94\xf3\x85\x88\xd0\xa5\x74\xd0\x7b\x8e\xff\xc5\xee\x42\xa2\xf0\x2d\x93\xe3\xc2\xd0\xb2\x99\xe2\xe1\x42\xe9\xd2\xc6\x00\x27\x69\x74\xae\xce"),
|
["\xad\xf7\xbe\xfa\x7c\xff\x10\xc8\x8b\x9d\x3d\x9c\x1e\x3e\x18\x6a\xb4\x67\x29\x5d\xcf\xb1\x0c\x24\xca\x85\x86\x34\xeb\xdc\x82\x8a"] = CTInfo($description="Google 'Xenon2023' log", $operator="Google", $url="https://ct.googleapis.com/logs/xenon2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x72\x16\x3e\x0b\xef\xef\xce\x3e\x60\xdd\x95\xcb\x63\x7a\xb9\xa9\x8d\x4a\x6f\x6c\xdc\x61\x80\xa6\x45\x5e\x2f\x83\xac\x94\xf3\x85\x88\xd0\xa5\x74\xd0\x7b\x8e\xff\xc5\xee\x42\xa2\xf0\x2d\x93\xe3\xc2\xd0\xb2\x99\xe2\xe1\x42\xe9\xd2\xc6\x00\x27\x69\x74\xae\xce"),
|
||||||
["\x76\xff\x88\x3f\x0a\xb6\xfb\x95\x51\xc2\x61\xcc\xf5\x87\xba\x34\xb4\xa4\xcd\xbb\x29\xdc\x68\x42\x0a\x9f\xe6\x67\x4c\x5a\x3a\x74"] = CTInfo($description="Google 'Xenon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xb9\x60\xe0\x34\x1e\x35\xe4\x65\x00\x93\x4f\x90\x09\xbd\x5a\xec\x44\xdd\x8c\x0f\xce\xed\x11\x3e\x2a\x59\x46\x9a\x31\xb6\xc7\x99\xf7\xdc\xef\x3d\xcd\x8f\x86\xc2\x35\xa5\x3e\xdc\x29\xba\xbb\xf2\x54\xe2\xa8\x0c\x83\x08\x51\x06\xde\x21\x6d\x36\x50\x8e\x38\x4d"),
|
["\x76\xff\x88\x3f\x0a\xb6\xfb\x95\x51\xc2\x61\xcc\xf5\x87\xba\x34\xb4\xa4\xcd\xbb\x29\xdc\x68\x42\x0a\x9f\xe6\x67\x4c\x5a\x3a\x74"] = CTInfo($description="Google 'Xenon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xb9\x60\xe0\x34\x1e\x35\xe4\x65\x00\x93\x4f\x90\x09\xbd\x5a\xec\x44\xdd\x8c\x0f\xce\xed\x11\x3e\x2a\x59\x46\x9a\x31\xb6\xc7\x99\xf7\xdc\xef\x3d\xcd\x8f\x86\xc2\x35\xa5\x3e\xdc\x29\xba\xbb\xf2\x54\xe2\xa8\x0c\x83\x08\x51\x06\xde\x21\x6d\x36\x50\x8e\x38\x4d"),
|
||||||
["\x29\x3c\x51\x96\x54\xc8\x39\x65\xba\xaa\x50\xfc\x58\x07\xd4\xb7\x6f\xbf\x58\x7a\x29\x72\xdc\xa4\xc3\x0c\xf4\xe5\x45\x47\xf4\x78"] = CTInfo($description="Google 'Icarus' log", $operator="Google", $url="https://ct.googleapis.com/icarus/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x4e\xd2\xbc\xbf\xb3\x08\x0a\xf7\xb9\xea\xa4\xc7\x1c\x38\x61\x04\xeb\x95\xe0\x89\x54\x68\x44\xb1\x66\xbc\x82\x7e\x4f\x50\x6c\x6f\x5c\xa3\xf0\xaa\x3e\xf4\xec\x80\xf0\xdb\x0a\x9a\x7a\xa0\x5b\x72\x00\x7c\x25\x0e\x19\xef\xaf\xb2\x62\x8d\x74\x43\xf4\x26\xf6\x14"),
|
["\x29\x3c\x51\x96\x54\xc8\x39\x65\xba\xaa\x50\xfc\x58\x07\xd4\xb7\x6f\xbf\x58\x7a\x29\x72\xdc\xa4\xc3\x0c\xf4\xe5\x45\x47\xf4\x78"] = CTInfo($description="Google 'Icarus' log", $operator="Google", $url="https://ct.googleapis.com/icarus/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x4e\xd2\xbc\xbf\xb3\x08\x0a\xf7\xb9\xea\xa4\xc7\x1c\x38\x61\x04\xeb\x95\xe0\x89\x54\x68\x44\xb1\x66\xbc\x82\x7e\x4f\x50\x6c\x6f\x5c\xa3\xf0\xaa\x3e\xf4\xec\x80\xf0\xdb\x0a\x9a\x7a\xa0\x5b\x72\x00\x7c\x25\x0e\x19\xef\xaf\xb2\x62\x8d\x74\x43\xf4\x26\xf6\x14"),
|
||||||
["\xa4\xb9\x09\x90\xb4\x18\x58\x14\x87\xbb\x13\xa2\xcc\x67\x70\x0a\x3c\x35\x98\x04\xf9\x1b\xdf\xb8\xe3\x77\xcd\x0e\xc8\x0d\xdc\x10"] = CTInfo($description="Google 'Pilot' log", $operator="Google", $url="https://ct.googleapis.com/pilot/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7d\xa8\x4b\x12\x29\x80\xa3\x3d\xad\xd3\x5a\x77\xb8\xcc\xe2\x88\xb3\xa5\xfd\xf1\xd3\x0c\xcd\x18\x0c\xe8\x41\x46\xe8\x81\x01\x1b\x15\xe1\x4b\xf1\x1b\x62\xdd\x36\x0a\x08\x18\xba\xed\x0b\x35\x84\xd0\x9e\x40\x3c\x2d\x9e\x9b\x82\x65\xbd\x1f\x04\x10\x41\x4c\xa0"),
|
["\xa4\xb9\x09\x90\xb4\x18\x58\x14\x87\xbb\x13\xa2\xcc\x67\x70\x0a\x3c\x35\x98\x04\xf9\x1b\xdf\xb8\xe3\x77\xcd\x0e\xc8\x0d\xdc\x10"] = CTInfo($description="Google 'Pilot' log", $operator="Google", $url="https://ct.googleapis.com/pilot/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7d\xa8\x4b\x12\x29\x80\xa3\x3d\xad\xd3\x5a\x77\xb8\xcc\xe2\x88\xb3\xa5\xfd\xf1\xd3\x0c\xcd\x18\x0c\xe8\x41\x46\xe8\x81\x01\x1b\x15\xe1\x4b\xf1\x1b\x62\xdd\x36\x0a\x08\x18\xba\xed\x0b\x35\x84\xd0\x9e\x40\x3c\x2d\x9e\x9b\x82\x65\xbd\x1f\x04\x10\x41\x4c\xa0"),
|
||||||
["\xee\x4b\xbd\xb7\x75\xce\x60\xba\xe1\x42\x69\x1f\xab\xe1\x9e\x66\xa3\x0f\x7e\x5f\xb0\x72\xd8\x83\x00\xc4\x7b\x89\x7a\xa8\xfd\xcb"] = CTInfo($description="Google 'Rocketeer' log", $operator="Google", $url="https://ct.googleapis.com/rocketeer/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x20\x5b\x18\xc8\x3c\xc1\x8b\xb3\x31\x08\x00\xbf\xa0\x90\x57\x2b\xb7\x47\x8c\x6f\xb5\x68\xb0\x8e\x90\x78\xe9\xa0\x73\xea\x4f\x28\x21\x2e\x9c\xc0\xf4\x16\x1b\xaa\xf9\xd5\xd7\xa9\x80\xc3\x4e\x2f\x52\x3c\x98\x01\x25\x46\x24\x25\x28\x23\x77\x2d\x05\xc2\x40\x7a"),
|
["\xee\x4b\xbd\xb7\x75\xce\x60\xba\xe1\x42\x69\x1f\xab\xe1\x9e\x66\xa3\x0f\x7e\x5f\xb0\x72\xd8\x83\x00\xc4\x7b\x89\x7a\xa8\xfd\xcb"] = CTInfo($description="Google 'Rocketeer' log", $operator="Google", $url="https://ct.googleapis.com/rocketeer/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x20\x5b\x18\xc8\x3c\xc1\x8b\xb3\x31\x08\x00\xbf\xa0\x90\x57\x2b\xb7\x47\x8c\x6f\xb5\x68\xb0\x8e\x90\x78\xe9\xa0\x73\xea\x4f\x28\x21\x2e\x9c\xc0\xf4\x16\x1b\xaa\xf9\xd5\xd7\xa9\x80\xc3\x4e\x2f\x52\x3c\x98\x01\x25\x46\x24\x25\x28\x23\x77\x2d\x05\xc2\x40\x7a"),
|
||||||
["\xbb\xd9\xdf\xbc\x1f\x8a\x71\xb5\x93\x94\x23\x97\xaa\x92\x7b\x47\x38\x57\x95\x0a\xab\x52\xe8\x1a\x90\x96\x64\x36\x8e\x1e\xd1\x85"] = CTInfo($description="Google 'Skydiver' log", $operator="Google", $url="https://ct.googleapis.com/skydiver/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x12\x6c\x86\x0e\xf6\x17\xb1\x12\x6c\x37\x25\xd2\xad\x87\x3d\x0e\x31\xec\x21\xad\xb1\xcd\xbe\x14\x47\xb6\x71\x56\x85\x7a\x9a\xb7\x3d\x89\x90\x7b\xc6\x32\x3a\xf8\xda\xce\x8b\x01\xfe\x3f\xfc\x71\x91\x19\x8e\x14\x6e\x89\x7a\x5d\xb4\xab\x7e\xe1\x4e\x1e\x7c\xac"),
|
["\xbb\xd9\xdf\xbc\x1f\x8a\x71\xb5\x93\x94\x23\x97\xaa\x92\x7b\x47\x38\x57\x95\x0a\xab\x52\xe8\x1a\x90\x96\x64\x36\x8e\x1e\xd1\x85"] = CTInfo($description="Google 'Skydiver' log", $operator="Google", $url="https://ct.googleapis.com/skydiver/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x12\x6c\x86\x0e\xf6\x17\xb1\x12\x6c\x37\x25\xd2\xad\x87\x3d\x0e\x31\xec\x21\xad\xb1\xcd\xbe\x14\x47\xb6\x71\x56\x85\x7a\x9a\xb7\x3d\x89\x90\x7b\xc6\x32\x3a\xf8\xda\xce\x8b\x01\xfe\x3f\xfc\x71\x91\x19\x8e\x14\x6e\x89\x7a\x5d\xb4\xab\x7e\xe1\x4e\x1e\x7c\xac"),
|
||||||
["\x41\xc8\xca\xb1\xdf\x22\x46\x4a\x10\xc6\xa1\x3a\x09\x42\x87\x5e\x4e\x31\x8b\x1b\x03\xeb\xeb\x4b\xc7\x68\xf0\x90\x62\x96\x06\xf6"] = CTInfo($description="Cloudflare 'Nimbus2022' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2022/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x48\xb2\x47\x4e\x50\x32\x72\x62\x4a\x0d\x0c\x48\xbf\xad\x29\x64\x6f\x20\xdf\x79\x52\x63\x16\x29\x0a\x2e\x60\xb5\xe2\x3a\x1c\xb7\xaf\x59\xb1\x55\x09\xdb\x59\xc7\xe9\xbd\x6f\xed\x0b\xaf\x05\x96\x97\xff\x3b\x9a\x43\x4d\xeb\x11\x34\x33\x8a\xe7\xac\x83\xc0\xff"),
|
|
||||||
["\x7a\x32\x8c\x54\xd8\xb7\x2d\xb6\x20\xea\x38\xe0\x52\x1e\xe9\x84\x16\x70\x32\x13\x85\x4d\x3b\xd2\x2b\xc1\x3a\x57\xa3\x52\xeb\x52"] = CTInfo($description="Cloudflare 'Nimbus2023' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x8b\xff\x2d\x92\x18\xcb\x46\x9d\x12\x5e\xb9\x59\x75\x3c\xcd\x91\x37\x7a\x1e\xa9\x9c\x99\x78\x83\x27\x3d\xdf\x01\xd5\x8b\x80\xe8\x63\x9a\xfe\x26\xa2\x1b\xd1\x87\x05\xee\x97\xd6\xe0\x5b\x43\x83\x81\x1c\x02\xf5\x41\x80\x80\x7f\xef\xa4\x61\xcf\xbc\x84\xb5\xa8"),
|
["\x7a\x32\x8c\x54\xd8\xb7\x2d\xb6\x20\xea\x38\xe0\x52\x1e\xe9\x84\x16\x70\x32\x13\x85\x4d\x3b\xd2\x2b\xc1\x3a\x57\xa3\x52\xeb\x52"] = CTInfo($description="Cloudflare 'Nimbus2023' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x8b\xff\x2d\x92\x18\xcb\x46\x9d\x12\x5e\xb9\x59\x75\x3c\xcd\x91\x37\x7a\x1e\xa9\x9c\x99\x78\x83\x27\x3d\xdf\x01\xd5\x8b\x80\xe8\x63\x9a\xfe\x26\xa2\x1b\xd1\x87\x05\xee\x97\xd6\xe0\x5b\x43\x83\x81\x1c\x02\xf5\x41\x80\x80\x7f\xef\xa4\x61\xcf\xbc\x84\xb5\xa8"),
|
||||||
["\xda\xb6\xbf\x6b\x3f\xb5\xb6\x22\x9f\x9b\xc2\xbb\x5c\x6b\xe8\x70\x91\x71\x6c\xbb\x51\x84\x85\x34\xbd\xa4\x3d\x30\x48\xd7\xfb\xab"] = CTInfo($description="Cloudflare 'Nimbus2024' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x77\xb1\x9b\x7b\x8f\xe6\x8b\x35\xfe\x3a\x92\x29\x2d\xac\x8a\x8d\x51\x8a\x25\xfc\x93\xb6\xd7\xa0\x8b\x29\x37\x71\x1d\x33\xca\xcc\x33\xea\x28\xb9\x1f\xe2\xac\xc3\xa9\x5d\xdd\x97\xbe\xf6\x9e\x94\x25\xdd\x36\x81\xd1\xeb\x5d\x29\xc3\x2b\x44\xf1\x5b\xca\x15\x48"),
|
["\xda\xb6\xbf\x6b\x3f\xb5\xb6\x22\x9f\x9b\xc2\xbb\x5c\x6b\xe8\x70\x91\x71\x6c\xbb\x51\x84\x85\x34\xbd\xa4\x3d\x30\x48\xd7\xfb\xab"] = CTInfo($description="Cloudflare 'Nimbus2024' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x77\xb1\x9b\x7b\x8f\xe6\x8b\x35\xfe\x3a\x92\x29\x2d\xac\x8a\x8d\x51\x8a\x25\xfc\x93\xb6\xd7\xa0\x8b\x29\x37\x71\x1d\x33\xca\xcc\x33\xea\x28\xb9\x1f\xe2\xac\xc3\xa9\x5d\xdd\x97\xbe\xf6\x9e\x94\x25\xdd\x36\x81\xd1\xeb\x5d\x29\xc3\x2b\x44\xf1\x5b\xca\x15\x48"),
|
||||||
["\x56\x14\x06\x9a\x2f\xd7\xc2\xec\xd3\xf5\xe1\xbd\x44\xb2\x3e\xc7\x46\x76\xb9\xbc\x99\x11\x5c\xc0\xef\x94\x98\x55\xd6\x89\xd0\xdd"] = CTInfo($description="DigiCert Log Server", $operator="DigiCert", $url="https://ct1.digicert-ct.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x02\x46\xc5\xbe\x1b\xbb\x82\x40\x16\xe8\xc1\xd2\xac\x19\x69\x13\x59\xf8\xf8\x70\x85\x46\x40\xb9\x38\xb0\x23\x82\xa8\x64\x4c\x7f\xbf\xbb\x34\x9f\x4a\x5f\x28\x8a\xcf\x19\xc4\x00\xf6\x36\x06\x93\x65\xed\x4c\xf5\xa9\x21\x62\x5a\xd8\x91\xeb\x38\x24\x40\xac\xe8"),
|
|
||||||
["\x87\x75\xbf\xe7\x59\x7c\xf8\x8c\x43\x99\x5f\xbd\xf3\x6e\xff\x56\x8d\x47\x56\x36\xff\x4a\xb5\x60\xc1\xb4\xea\xff\x5e\xa0\x83\x0f"] = CTInfo($description="DigiCert Log Server 2", $operator="DigiCert", $url="https://ct2.digicert-ct.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xcc\x5d\x39\x2f\x66\xb8\x4c\x7f\xc1\x2e\x03\xa1\x34\xa3\xe8\x8a\x86\x02\xae\x4a\x11\xc6\xf7\x26\x6a\x37\x9b\xf0\x38\xf8\x5d\x09\x8d\x63\xe8\x31\x6b\x86\x66\xcf\x79\xb3\x25\x3c\x1e\xdf\x78\xb4\xa8\xc5\x69\xfa\xb7\xf0\x82\x79\x62\x43\xf6\xcc\xfe\x81\x66\x84"),
|
|
||||||
["\x22\x45\x45\x07\x59\x55\x24\x56\x96\x3f\xa1\x2f\xf1\xf7\x6d\x86\xe0\x23\x26\x63\xad\xc0\x4b\x7f\x5d\xc6\x83\x5c\x6e\xe2\x0f\x02"] = CTInfo($description="DigiCert Yeti2022 Log", $operator="DigiCert", $url="https://yeti2022.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x9f\xf8\xd8\x1d\xde\xfb\x5b\x51\xb5\xfb\x5d\xf5\xb5\xde\x66\x11\xb0\x9d\x5f\xfd\x6f\xfc\xa8\x98\x5b\x98\x4f\x2d\xc3\x91\x3a\xfb\xfe\xc4\x0f\x0d\xc3\x60\x43\x8c\x1e\xf2\xf9\x11\xb2\xba\xd0\xf6\xbc\xa5\xd2\xb6\x9f\xf9\x5c\x87\xa2\x7d\xfc\xd4\x7d\xd6\x13\x26"),
|
|
||||||
["\x35\xcf\x19\x1b\xbf\xb1\x6c\x57\xbf\x0f\xad\x4c\x6d\x42\xcb\xbb\xb6\x27\x20\x26\x51\xea\x3f\xe1\x2a\xef\xa8\x03\xc3\x3b\xd6\x4c"] = CTInfo($description="DigiCert Yeti2023 Log", $operator="DigiCert", $url="https://yeti2023.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7d\x0d\x03\xb1\xd5\x98\x8a\xdc\xf0\x15\x3b\xc6\xdc\x5e\x0d\x6e\x3f\x0d\xbf\x95\xc8\x55\x8c\xd0\xa6\x4c\x96\xb1\x4e\x27\xb9\x26\x25\x99\xcc\x2b\x02\x9e\xa6\xd3\xdd\x9f\xb1\xd5\xc4\xc3\xac\x35\x04\x07\x87\x97\x36\xaa\xad\x28\x0d\x7f\x2b\xd9\x68\x9f\x72\xd1"),
|
["\x35\xcf\x19\x1b\xbf\xb1\x6c\x57\xbf\x0f\xad\x4c\x6d\x42\xcb\xbb\xb6\x27\x20\x26\x51\xea\x3f\xe1\x2a\xef\xa8\x03\xc3\x3b\xd6\x4c"] = CTInfo($description="DigiCert Yeti2023 Log", $operator="DigiCert", $url="https://yeti2023.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7d\x0d\x03\xb1\xd5\x98\x8a\xdc\xf0\x15\x3b\xc6\xdc\x5e\x0d\x6e\x3f\x0d\xbf\x95\xc8\x55\x8c\xd0\xa6\x4c\x96\xb1\x4e\x27\xb9\x26\x25\x99\xcc\x2b\x02\x9e\xa6\xd3\xdd\x9f\xb1\xd5\xc4\xc3\xac\x35\x04\x07\x87\x97\x36\xaa\xad\x28\x0d\x7f\x2b\xd9\x68\x9f\x72\xd1"),
|
||||||
["\x48\xb0\xe3\x6b\xda\xa6\x47\x34\x0f\xe5\x6a\x02\xfa\x9d\x30\xeb\x1c\x52\x01\xcb\x56\xdd\x2c\x81\xd9\xbb\xbf\xab\x39\xd8\x84\x73"] = CTInfo($description="DigiCert Yeti2024 Log", $operator="DigiCert", $url="https://yeti2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x57\xb8\xc1\x6f\x30\xa4\x7f\x2e\xe4\xf0\xd0\xd9\x60\x62\x13\x95\xe3\x7a\xe3\x4e\x53\xc3\xb3\xb8\x73\x85\xc1\x18\x0d\x23\x0e\x58\x84\xd2\x78\xef\x9b\xb3\x1e\x2c\x1a\xde\xc1\x8f\x81\x1b\x19\x44\x58\xb7\x00\x77\x60\x20\x1a\x72\xd8\x82\xde\xae\x9e\xb1\xc6\x4b"),
|
["\x48\xb0\xe3\x6b\xda\xa6\x47\x34\x0f\xe5\x6a\x02\xfa\x9d\x30\xeb\x1c\x52\x01\xcb\x56\xdd\x2c\x81\xd9\xbb\xbf\xab\x39\xd8\x84\x73"] = CTInfo($description="DigiCert Yeti2024 Log", $operator="DigiCert", $url="https://yeti2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x57\xb8\xc1\x6f\x30\xa4\x7f\x2e\xe4\xf0\xd0\xd9\x60\x62\x13\x95\xe3\x7a\xe3\x4e\x53\xc3\xb3\xb8\x73\x85\xc1\x18\x0d\x23\x0e\x58\x84\xd2\x78\xef\x9b\xb3\x1e\x2c\x1a\xde\xc1\x8f\x81\x1b\x19\x44\x58\xb7\x00\x77\x60\x20\x1a\x72\xd8\x82\xde\xae\x9e\xb1\xc6\x4b"),
|
||||||
["\x7d\x59\x1e\x12\xe1\x78\x2a\x7b\x1c\x61\x67\x7c\x5e\xfd\xf8\xd0\x87\x5c\x14\xa0\x4e\x95\x9e\xb9\x03\x2f\xd9\x0e\x8c\x2e\x79\xb8"] = CTInfo($description="DigiCert Yeti2025 Log", $operator="DigiCert", $url="https://yeti2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xdf\x95\x00\x5e\x10\xc1\x01\xf7\x37\xe3\x10\x74\xd1\xff\xb2\xca\x90\xed\x32\x99\x5f\x0c\x39\xfe\xa1\xd1\x13\x11\xac\xd1\xb3\x73\x93\x20\xc2\x13\x3c\x4c\xb5\x7a\x52\x86\x86\x3d\xe3\x95\x24\x7c\xd8\x91\x98\x48\x3b\xf0\xf0\xdf\x21\xf1\xb0\x81\x5a\x59\x25\x43"),
|
["\x7d\x59\x1e\x12\xe1\x78\x2a\x7b\x1c\x61\x67\x7c\x5e\xfd\xf8\xd0\x87\x5c\x14\xa0\x4e\x95\x9e\xb9\x03\x2f\xd9\x0e\x8c\x2e\x79\xb8"] = CTInfo($description="DigiCert Yeti2025 Log", $operator="DigiCert", $url="https://yeti2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xdf\x95\x00\x5e\x10\xc1\x01\xf7\x37\xe3\x10\x74\xd1\xff\xb2\xca\x90\xed\x32\x99\x5f\x0c\x39\xfe\xa1\xd1\x13\x11\xac\xd1\xb3\x73\x93\x20\xc2\x13\x3c\x4c\xb5\x7a\x52\x86\x86\x3d\xe3\x95\x24\x7c\xd8\x91\x98\x48\x3b\xf0\xf0\xdf\x21\xf1\xb0\x81\x5a\x59\x25\x43"),
|
||||||
["\x51\xa3\xb0\xf5\xfd\x01\x79\x9c\x56\x6d\xb8\x37\x78\x8f\x0c\xa4\x7a\xcc\x1b\x27\xcb\xf7\x9e\x88\x42\x9a\x0d\xfe\xd4\x8b\x05\xe5"] = CTInfo($description="DigiCert Nessie2022 Log", $operator="DigiCert", $url="https://nessie2022.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x27\x24\xdd\x68\x03\x28\xcb\xfe\x63\xbe\x0e\x11\x47\x4d\x7d\x17\x68\xa1\x11\x5d\x4c\x71\xc9\x41\x28\xc7\xb6\xa2\x4b\x97\xec\xc0\xaf\xfc\x2f\x3b\xbf\xe9\xf1\xb1\xfc\xf5\x01\xff\xa9\xfb\x49\x40\x0c\x63\x24\x98\xd7\x79\x2e\xa6\x55\xab\x16\xc6\xbe\x51\xd8\x71"),
|
|
||||||
["\xb3\x73\x77\x07\xe1\x84\x50\xf8\x63\x86\xd6\x05\xa9\xdc\x11\x09\x4a\x79\x2d\xb1\x67\x0c\x0b\x87\xdc\xf0\x03\x0e\x79\x36\xa5\x9a"] = CTInfo($description="DigiCert Nessie2023 Log", $operator="DigiCert", $url="https://nessie2023.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x11\x7b\xbc\x89\x0c\x12\x09\x14\x9f\xd8\x26\xc8\x4c\x6a\x54\xa4\x1b\x45\x56\xdf\x3e\x23\x42\x14\xd1\xdd\x42\xdf\xa2\xdf\x7b\x5f\x9f\x6f\x07\x5a\x23\x46\x79\x16\x4b\x5f\x33\x67\xc1\xa0\x8d\x5b\x5c\x17\x75\xf2\x4d\xa0\x80\xa1\x98\x1a\x07\x59\x06\x02\xca\x4e"),
|
["\xb3\x73\x77\x07\xe1\x84\x50\xf8\x63\x86\xd6\x05\xa9\xdc\x11\x09\x4a\x79\x2d\xb1\x67\x0c\x0b\x87\xdc\xf0\x03\x0e\x79\x36\xa5\x9a"] = CTInfo($description="DigiCert Nessie2023 Log", $operator="DigiCert", $url="https://nessie2023.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x11\x7b\xbc\x89\x0c\x12\x09\x14\x9f\xd8\x26\xc8\x4c\x6a\x54\xa4\x1b\x45\x56\xdf\x3e\x23\x42\x14\xd1\xdd\x42\xdf\xa2\xdf\x7b\x5f\x9f\x6f\x07\x5a\x23\x46\x79\x16\x4b\x5f\x33\x67\xc1\xa0\x8d\x5b\x5c\x17\x75\xf2\x4d\xa0\x80\xa1\x98\x1a\x07\x59\x06\x02\xca\x4e"),
|
||||||
["\x73\xd9\x9e\x89\x1b\x4c\x96\x78\xa0\x20\x7d\x47\x9d\xe6\xb2\xc6\x1c\xd0\x51\x5e\x71\x19\x2a\x8c\x6b\x80\x10\x7a\xc1\x77\x72\xb5"] = CTInfo($description="DigiCert Nessie2024 Log", $operator="DigiCert", $url="https://nessie2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2d\xfc\xa2\x7b\x36\xbf\x56\x91\xe9\xfe\x3f\xe8\x3d\xfc\xc3\xa7\xe0\x61\x52\xea\x2c\xe9\x05\xa3\x9f\x27\x17\x81\x05\x70\x6b\x81\x61\x44\x8a\xf8\x3b\x10\x80\x42\xed\x03\x2f\x00\x50\x21\xfc\x41\x54\x84\xa3\x54\xd5\x2e\xb2\x7a\x16\x4b\x2a\x1f\x2b\x66\x04\x2b"),
|
["\x73\xd9\x9e\x89\x1b\x4c\x96\x78\xa0\x20\x7d\x47\x9d\xe6\xb2\xc6\x1c\xd0\x51\x5e\x71\x19\x2a\x8c\x6b\x80\x10\x7a\xc1\x77\x72\xb5"] = CTInfo($description="DigiCert Nessie2024 Log", $operator="DigiCert", $url="https://nessie2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2d\xfc\xa2\x7b\x36\xbf\x56\x91\xe9\xfe\x3f\xe8\x3d\xfc\xc3\xa7\xe0\x61\x52\xea\x2c\xe9\x05\xa3\x9f\x27\x17\x81\x05\x70\x6b\x81\x61\x44\x8a\xf8\x3b\x10\x80\x42\xed\x03\x2f\x00\x50\x21\xfc\x41\x54\x84\xa3\x54\xd5\x2e\xb2\x7a\x16\x4b\x2a\x1f\x2b\x66\x04\x2b"),
|
||||||
["\xe6\xd2\x31\x63\x40\x77\x8c\xc1\x10\x41\x06\xd7\x71\xb9\xce\xc1\xd2\x40\xf6\x96\x84\x86\xfb\xba\x87\x32\x1d\xfd\x1e\x37\x8e\x50"] = CTInfo($description="DigiCert Nessie2025 Log", $operator="DigiCert", $url="https://nessie2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\xf0\xf0\xa7\x8b\x81\x2e\x09\x39\x3b\x9f\x42\xda\x38\x44\x5f\xb4\xcc\xed\x36\xbb\xd8\x43\x7f\x16\x49\x57\x87\x04\x7f\xa5\x01\x34\xf7\xe8\x68\x3f\xb7\x78\x1f\x60\x66\x2d\x67\x9a\x75\x80\xb7\x53\xa7\x85\xd5\xbc\xab\x47\x06\x55\xdb\xb5\xdf\x88\xa1\x6f\x38"),
|
["\xe6\xd2\x31\x63\x40\x77\x8c\xc1\x10\x41\x06\xd7\x71\xb9\xce\xc1\xd2\x40\xf6\x96\x84\x86\xfb\xba\x87\x32\x1d\xfd\x1e\x37\x8e\x50"] = CTInfo($description="DigiCert Nessie2025 Log", $operator="DigiCert", $url="https://nessie2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\xf0\xf0\xa7\x8b\x81\x2e\x09\x39\x3b\x9f\x42\xda\x38\x44\x5f\xb4\xcc\xed\x36\xbb\xd8\x43\x7f\x16\x49\x57\x87\x04\x7f\xa5\x01\x34\xf7\xe8\x68\x3f\xb7\x78\x1f\x60\x66\x2d\x67\x9a\x75\x80\xb7\x53\xa7\x85\xd5\xbc\xab\x47\x06\x55\xdb\xb5\xdf\x88\xa1\x6f\x38"),
|
||||||
["\x05\x9c\x01\xd3\x20\xe0\x07\x84\x13\x95\x80\x49\x8d\x11\x7c\x90\x32\x66\xaf\xaf\x72\x50\xb5\xaf\x3b\x46\xa4\x3e\x11\x84\x0d\x4a"] = CTInfo($description="DigiCert Yeti2022-2 Log", $operator="DigiCert", $url="https://yeti2022-2.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1d\x69\x5e\x3f\x0a\xf2\x71\x77\xcd\x9d\x5d\xc3\x34\x49\x00\xee\x60\x77\xe1\x72\x76\x74\xa8\x7c\x5c\x7d\x09\xf2\x32\x1d\x05\x7e\x2e\xfe\x5b\x31\xd5\x3a\xfd\x73\x34\x6e\x49\x14\x4e\x50\x58\x96\x5a\xc4\xc7\xf4\xbe\x05\x64\xa4\xf7\xd7\xe5\xb1\x6d\x33\x0c\xc3"),
|
|
||||||
["\x55\x81\xd4\xc2\x16\x90\x36\x01\x4a\xea\x0b\x9b\x57\x3c\x53\xf0\xc0\xe4\x38\x78\x70\x25\x08\x17\x2f\xa3\xaa\x1d\x07\x13\xd3\x0c"] = CTInfo($description="Sectigo 'Sabre' CT log", $operator="Sectigo", $url="https://sabre.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\x6f\xd2\x89\x0f\x3f\xc5\xf8\x87\x1e\xab\x65\xb3\xd9\xbb\x17\x23\x8c\x06\x0e\x09\x55\x96\x3d\x0a\x08\xa2\xc5\x71\xb3\xd1\xa9\x2f\x28\x3e\x83\x10\xbf\x12\xd0\x44\x66\x15\xef\x54\xe1\x98\x80\xd0\xce\x24\x6d\x3e\x67\x9a\xe9\x37\x23\xce\x52\x93\x86\xda\x80"),
|
["\x55\x81\xd4\xc2\x16\x90\x36\x01\x4a\xea\x0b\x9b\x57\x3c\x53\xf0\xc0\xe4\x38\x78\x70\x25\x08\x17\x2f\xa3\xaa\x1d\x07\x13\xd3\x0c"] = CTInfo($description="Sectigo 'Sabre' CT log", $operator="Sectigo", $url="https://sabre.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\x6f\xd2\x89\x0f\x3f\xc5\xf8\x87\x1e\xab\x65\xb3\xd9\xbb\x17\x23\x8c\x06\x0e\x09\x55\x96\x3d\x0a\x08\xa2\xc5\x71\xb3\xd1\xa9\x2f\x28\x3e\x83\x10\xbf\x12\xd0\x44\x66\x15\xef\x54\xe1\x98\x80\xd0\xce\x24\x6d\x3e\x67\x9a\xe9\x37\x23\xce\x52\x93\x86\xda\x80"),
|
||||||
["\x6f\x53\x76\xac\x31\xf0\x31\x19\xd8\x99\x00\xa4\x51\x15\xff\x77\x15\x1c\x11\xd9\x02\xc1\x00\x29\x06\x8d\xb2\x08\x9a\x37\xd9\x13"] = CTInfo($description="Sectigo 'Mammoth' CT log", $operator="Sectigo", $url="https://mammoth.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xef\xe4\x7d\x74\x2e\x15\x15\xb6\xe9\xbb\x23\x8b\xfb\x2c\xb5\xe1\xc7\x80\x98\x47\xfb\x40\x69\x68\xfc\x49\xad\x61\x4e\x83\x47\x3c\x1a\xb7\x8d\xdf\xff\x7b\x30\xb4\xba\xff\x2f\xcb\xa0\x14\xe3\xad\xd5\x85\x3f\x44\x59\x8c\x8c\x60\x8b\xd7\xb8\xb1\xbf\xae\x8c\x67"),
|
["\x6f\x53\x76\xac\x31\xf0\x31\x19\xd8\x99\x00\xa4\x51\x15\xff\x77\x15\x1c\x11\xd9\x02\xc1\x00\x29\x06\x8d\xb2\x08\x9a\x37\xd9\x13"] = CTInfo($description="Sectigo 'Mammoth' CT log", $operator="Sectigo", $url="https://mammoth.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xef\xe4\x7d\x74\x2e\x15\x15\xb6\xe9\xbb\x23\x8b\xfb\x2c\xb5\xe1\xc7\x80\x98\x47\xfb\x40\x69\x68\xfc\x49\xad\x61\x4e\x83\x47\x3c\x1a\xb7\x8d\xdf\xff\x7b\x30\xb4\xba\xff\x2f\xcb\xa0\x14\xe3\xad\xd5\x85\x3f\x44\x59\x8c\x8c\x60\x8b\xd7\xb8\xb1\xbf\xae\x8c\x67"),
|
||||||
["\xdf\xa5\x5e\xab\x68\x82\x4f\x1f\x6c\xad\xee\xb8\x5f\x4e\x3e\x5a\xea\xcd\xa2\x12\xa4\x6a\x5e\x8e\x3b\x12\xc0\x20\x44\x5c\x2a\x73"] = CTInfo($description="Let's Encrypt 'Oak2022' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2022/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x86\x3c\xb1\x0d\x52\x23\x5a\xde\x6e\xf6\xc0\x7f\xa3\x64\xbc\xad\xc1\x89\xda\x97\x59\x4c\x60\x3c\xf8\x8a\x57\x84\x8f\xed\xbc\xa0\x63\x12\x51\x84\xe6\xbd\xdd\x7b\x8c\x80\x7d\xe3\x8f\x86\xa4\xea\xa9\xcf\xa8\xee\xf3\xe0\x5e\x70\xe0\xbb\xf6\xbd\xfc\x1f\x91\x2e"),
|
|
||||||
["\xb7\x3e\xfb\x24\xdf\x9c\x4d\xba\x75\xf2\x39\xc5\xba\x58\xf4\x6c\x5d\xfc\x42\xcf\x7a\x9f\x35\xc4\x9e\x1d\x09\x81\x25\xed\xb4\x99"] = CTInfo($description="Let's Encrypt 'Oak2023' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xb3\x3d\x0e\x78\xbe\xe3\xad\x5c\x44\x5c\x9b\xbe\xa3\x84\x16\x41\x82\xca\xca\x89\x17\x1e\x23\xce\x38\xa5\x54\x2f\x7f\xd3\x34\x51\x6a\xb9\x5c\xc3\x49\xea\xfb\x91\x9d\xe0\x8a\x3c\x73\x06\x9f\x7c\x65\x38\x11\x80\xc4\x9a\x5a\x00\xa6\x67\xc3\x83\xef\x89\x85\x51"),
|
["\xb7\x3e\xfb\x24\xdf\x9c\x4d\xba\x75\xf2\x39\xc5\xba\x58\xf4\x6c\x5d\xfc\x42\xcf\x7a\x9f\x35\xc4\x9e\x1d\x09\x81\x25\xed\xb4\x99"] = CTInfo($description="Let's Encrypt 'Oak2023' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xb3\x3d\x0e\x78\xbe\xe3\xad\x5c\x44\x5c\x9b\xbe\xa3\x84\x16\x41\x82\xca\xca\x89\x17\x1e\x23\xce\x38\xa5\x54\x2f\x7f\xd3\x34\x51\x6a\xb9\x5c\xc3\x49\xea\xfb\x91\x9d\xe0\x8a\x3c\x73\x06\x9f\x7c\x65\x38\x11\x80\xc4\x9a\x5a\x00\xa6\x67\xc3\x83\xef\x89\x85\x51"),
|
||||||
["\x3b\x53\x77\x75\x3e\x2d\xb9\x80\x4e\x8b\x30\x5b\x06\xfe\x40\x3b\x67\xd8\x4f\xc3\xf4\xc7\xbd\x00\x0d\x2d\x72\x6f\xe1\xfa\xd4\x17"] = CTInfo($description="Let's Encrypt 'Oak2024H1' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2024h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x56\x43\xd7\x7e\x7b\xd4\x72\xb7\xba\xa9\x51\xbd\x36\x93\xb7\xe9\xb5\x92\x0f\xea\x5e\xb7\x45\xa3\x92\xfd\xc9\xa5\x3c\x80\xac\x1a\x20\xef\x25\x2f\xb8\xe1\x20\xf7\xa8\x3a\x2e\x07\x8d\xe6\xeb\xa4\xe2\x7d\x24\x63\x9f\x46\xbf\x94\x73\x52\x8d\x96\xae\xa9\x26\xfd"),
|
["\x3b\x53\x77\x75\x3e\x2d\xb9\x80\x4e\x8b\x30\x5b\x06\xfe\x40\x3b\x67\xd8\x4f\xc3\xf4\xc7\xbd\x00\x0d\x2d\x72\x6f\xe1\xfa\xd4\x17"] = CTInfo($description="Let's Encrypt 'Oak2024H1' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2024h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x56\x43\xd7\x7e\x7b\xd4\x72\xb7\xba\xa9\x51\xbd\x36\x93\xb7\xe9\xb5\x92\x0f\xea\x5e\xb7\x45\xa3\x92\xfd\xc9\xa5\x3c\x80\xac\x1a\x20\xef\x25\x2f\xb8\xe1\x20\xf7\xa8\x3a\x2e\x07\x8d\xe6\xeb\xa4\xe2\x7d\x24\x63\x9f\x46\xbf\x94\x73\x52\x8d\x96\xae\xa9\x26\xfd"),
|
||||||
["\x3f\x17\x4b\x4f\xd7\x22\x47\x58\x94\x1d\x65\x1c\x84\xbe\x0d\x12\xed\x90\x37\x7f\x1f\x85\x6a\xeb\xc1\xbf\x28\x85\xec\xf8\x64\x6e"] = CTInfo($description="Let's Encrypt 'Oak2024H2' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2024h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xd7\x73\xd6\x53\x47\xe9\xf3\xc9\xd5\x7c\x16\xc2\xd6\x8f\x70\x65\xfa\xf2\x51\x36\xa9\x13\x80\x2f\xed\xf9\x94\xd3\x5a\x8b\xe8\x4f\x33\xcf\xc3\xd3\x89\xd4\x5f\x5a\x66\x89\xba\x20\x1f\x71\xcb\xca\xbb\x9f\x9f\xf3\x5c\x2d\x1e\xa3\x81\x59\xaf\x92\xb3\x6d\x30\x68"),
|
["\x3f\x17\x4b\x4f\xd7\x22\x47\x58\x94\x1d\x65\x1c\x84\xbe\x0d\x12\xed\x90\x37\x7f\x1f\x85\x6a\xeb\xc1\xbf\x28\x85\xec\xf8\x64\x6e"] = CTInfo($description="Let's Encrypt 'Oak2024H2' log", $operator="Let's Encrypt", $url="https://oak.ct.letsencrypt.org/2024h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xd7\x73\xd6\x53\x47\xe9\xf3\xc9\xd5\x7c\x16\xc2\xd6\x8f\x70\x65\xfa\xf2\x51\x36\xa9\x13\x80\x2f\xed\xf9\x94\xd3\x5a\x8b\xe8\x4f\x33\xcf\xc3\xd3\x89\xd4\x5f\x5a\x66\x89\xba\x20\x1f\x71\xcb\xca\xbb\x9f\x9f\xf3\x5c\x2d\x1e\xa3\x81\x59\xaf\x92\xb3\x6d\x30\x68"),
|
||||||
["\xc3\x65\xf9\xb3\x65\x4f\x32\x83\xc7\x9d\xa9\x8e\x93\xd7\x41\x8f\x5b\xab\x7b\xe3\x25\x2c\x98\xe1\xd2\xf0\x4b\xb9\xeb\x42\x7d\x23"] = CTInfo($description="Trust Asia Log2022", $operator="TrustAsia", $url="https://ct.trustasia.com/log2022/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xbb\x52\xf2\x16\xcf\x92\x0b\xce\x79\xe6\x54\x6d\xc2\x37\x53\xa4\xf5\xf9\x3a\xa9\xb3\x05\xec\x1d\xbd\x16\xec\x30\xac\x2e\xf8\x79\x62\x35\x15\x8e\x1a\xd8\x16\x2e\xe4\x48\x6b\xf6\xc6\x13\xf1\x96\x2e\x5c\x10\xfe\x19\x1e\xa8\x18\xb8\x0f\x2d\xc3\xa4\x86\x51\x97"),
|
|
||||||
["\xe8\x7e\xa7\x66\x0b\xc2\x6c\xf6\x00\x2e\xf5\x72\x5d\x3f\xe0\xe3\x31\xb9\x39\x3b\xb9\x2f\xbf\x58\xeb\x3b\x90\x49\xda\xf5\x43\x5a"] = CTInfo($description="Trust Asia Log2023", $operator="TrustAsia", $url="https://ct.trustasia.com/log2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa4\x11\x52\xdb\x17\x41\x4e\x90\xd4\x56\x51\x12\x30\x52\xf8\x9b\x03\xcf\x4c\x9f\xf8\x2e\x38\xb5\xf1\x5a\xba\xfa\x38\xb9\xd2\x8f\x1a\x81\xda\x95\xcc\x33\xec\x21\x28\x66\xc6\x56\x3e\x60\x36\x21\x20\xd9\xd4\xac\x5d\xfa\x5c\x19\xa1\x05\x7d\xfe\x20\x23\xfc\xf5"),
|
["\xe8\x7e\xa7\x66\x0b\xc2\x6c\xf6\x00\x2e\xf5\x72\x5d\x3f\xe0\xe3\x31\xb9\x39\x3b\xb9\x2f\xbf\x58\xeb\x3b\x90\x49\xda\xf5\x43\x5a"] = CTInfo($description="Trust Asia Log2023", $operator="TrustAsia", $url="https://ct.trustasia.com/log2023/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa4\x11\x52\xdb\x17\x41\x4e\x90\xd4\x56\x51\x12\x30\x52\xf8\x9b\x03\xcf\x4c\x9f\xf8\x2e\x38\xb5\xf1\x5a\xba\xfa\x38\xb9\xd2\x8f\x1a\x81\xda\x95\xcc\x33\xec\x21\x28\x66\xc6\x56\x3e\x60\x36\x21\x20\xd9\xd4\xac\x5d\xfa\x5c\x19\xa1\x05\x7d\xfe\x20\x23\xfc\xf5"),
|
||||||
|
["\x87\x4f\xb5\x0d\xc0\x29\xd9\x93\x1d\xe5\x73\xe9\xf2\x89\x9e\x8e\x45\x33\xb3\x92\xd3\x8b\x0a\x46\x25\x74\xbf\x0f\xee\xb2\xfc\x1e"] = CTInfo($description="Trust Asia Log2024-2", $operator="TrustAsia", $url="https://ct2024.trustasia.com/log2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa7\x64\xe2\x79\x81\x3f\x61\xd7\xec\xc6\xf8\x65\x28\x1d\xa0\xb4\x66\x33\xc3\x25\xd5\x0a\x95\x78\x9c\x8f\xfe\xa4\x2a\xd8\x8f\x7e\x72\xe0\xfe\xa8\x7f\xf8\xb1\x2d\x85\xc0\x8e\x12\x74\x0d\x2f\x8c\xab\xd7\x7f\x7a\x1e\xd9\x84\x33\x39\xe8\xfd\x89\x5f\x96\x48\x08"),
|
||||||
};
|
};
|
||||||
|
|
|
@ -68,6 +68,9 @@ export {
|
||||||
## Flag to indicate if this record already has been logged, to
|
## Flag to indicate if this record already has been logged, to
|
||||||
## prevent duplicates.
|
## prevent duplicates.
|
||||||
logged: bool &default=F;
|
logged: bool &default=F;
|
||||||
|
## Flag to indicate that we have seen a Hello Retry request message.
|
||||||
|
## Used internally for ssl_history logging
|
||||||
|
hrr_seen: bool &default=F;
|
||||||
|
|
||||||
## SSL history showing which types of packets we received in which order.
|
## SSL history showing which types of packets we received in which order.
|
||||||
## Letters have the following meaning with client-sent letters being capitalized:
|
## Letters have the following meaning with client-sent letters being capitalized:
|
||||||
|
@ -283,6 +286,10 @@ event ssl_server_hello(c: connection, version: count, record_version: count, pos
|
||||||
}
|
}
|
||||||
c$ssl$cipher = cipher_desc[cipher];
|
c$ssl$cipher = cipher_desc[cipher];
|
||||||
|
|
||||||
|
# Check if this is a hello retry request. A magic value in the random is used to signal this
|
||||||
|
if ( server_random == "\xCF\x21\xAD\x74\xE5\x9A\x61\x11\xBE\x1D\x8C\x02\x1E\x65\xB8\x91\xC2\xA2\x11\x16\x7A\xBB\x8C\x5E\x07\x9E\x09\xE2\xC8\xA8\x33\x9C" )
|
||||||
|
c$ssl$hrr_seen = T;
|
||||||
|
|
||||||
if ( c$ssl?$session_id && c$ssl$session_id == bytestring_to_hexstr(session_id) && c$ssl$version_num/0xFF != 0x7F && c$ssl$version_num != TLSv13 )
|
if ( c$ssl?$session_id && c$ssl$session_id == bytestring_to_hexstr(session_id) && c$ssl$version_num/0xFF != 0x7F && c$ssl$version_num != TLSv13 )
|
||||||
c$ssl$resumed = T;
|
c$ssl$resumed = T;
|
||||||
}
|
}
|
||||||
|
@ -360,6 +367,13 @@ event ssl_handshake_message(c: connection, is_client: bool, msg_type: count, len
|
||||||
add_to_history(c, is_client, "c");
|
add_to_history(c, is_client, "c");
|
||||||
break;
|
break;
|
||||||
case SSL::SERVER_HELLO:
|
case SSL::SERVER_HELLO:
|
||||||
|
if ( c$ssl$hrr_seen )
|
||||||
|
{
|
||||||
|
# the server_hello event is raised first, and sets the flag
|
||||||
|
add_to_history(c, is_client, "j");
|
||||||
|
c$ssl$hrr_seen = F;
|
||||||
|
}
|
||||||
|
else
|
||||||
add_to_history(c, is_client, "s");
|
add_to_history(c, is_client, "s");
|
||||||
break;
|
break;
|
||||||
case SSL::HELLO_VERIFY_REQUEST:
|
case SSL::HELLO_VERIFY_REQUEST:
|
||||||
|
@ -512,7 +526,11 @@ event ssl_plaintext_data(c: connection, is_client: bool, record_version: count,
|
||||||
|
|
||||||
event analyzer_violation_info(atype: AllAnalyzers::Tag, info: AnalyzerViolationInfo) &priority=5
|
event analyzer_violation_info(atype: AllAnalyzers::Tag, info: AnalyzerViolationInfo) &priority=5
|
||||||
{
|
{
|
||||||
# if ( atype == Analyzer::ANALYZER_SSL || atype == Analyzer::ANALYZER_DTLS )
|
# if ( atype == Analyzer::ANALYZER_SSL || atype == Analyzer::ANALYZER_DTLS )
|
||||||
# if ( info$c?$ssl )
|
# if ( info$c?$ssl )
|
||||||
# finish(info$c, T);
|
# {
|
||||||
|
# # analyzer errored out; prevent us from trying to remove it later
|
||||||
|
# delete info$c$ssl$analyzer_id;
|
||||||
|
# finish(info$c, F);
|
||||||
|
# }
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -244,7 +244,7 @@ event zeek_init()
|
||||||
|
|
||||||
@endif
|
@endif
|
||||||
|
|
||||||
function cr_check_rule(r: Rule): bool
|
function cr_check_rule(r: Rule): bool &is_used
|
||||||
{
|
{
|
||||||
if ( r$ty == DROP && r$entity$ty == ADDRESS )
|
if ( r$ty == DROP && r$entity$ty == ADDRESS )
|
||||||
{
|
{
|
||||||
|
|
|
@ -22,14 +22,7 @@ export {
|
||||||
|
|
||||||
hook Notice::notice(n: Notice::Info)
|
hook Notice::notice(n: Notice::Info)
|
||||||
{
|
{
|
||||||
if ( CommunityID::Notice::enabled && n?$conn && n$conn?$conn )
|
if ( CommunityID::Notice::enabled && n?$conn )
|
||||||
{
|
n$community_id = community_id_v1(n$conn$id);
|
||||||
local info = n$conn$conn;
|
|
||||||
# This is set during new_connection(), so it should
|
|
||||||
# always be there, but better safe than sorry.
|
|
||||||
if ( info?$community_id )
|
|
||||||
n$community_id = info$community_id;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@endif
|
@endif
|
||||||
|
|
90
scripts/policy/frameworks/spicy/record-spicy-batch.zeek
Normal file
90
scripts/policy/frameworks/spicy/record-spicy-batch.zeek
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
##! Saves all input traffic in Spicy's batch format.
|
||||||
|
|
||||||
|
module SpicyBatch;
|
||||||
|
|
||||||
|
export {
|
||||||
|
const filename = "batch.dat" &redef;
|
||||||
|
}
|
||||||
|
|
||||||
|
redef tcp_content_deliver_all_orig=T;
|
||||||
|
redef tcp_content_deliver_all_resp=T;
|
||||||
|
redef udp_content_deliver_all_orig=T;
|
||||||
|
redef udp_content_deliver_all_resp=T;
|
||||||
|
|
||||||
|
global output: file;
|
||||||
|
global conns: set[conn_id];
|
||||||
|
global num_conns = 0;
|
||||||
|
|
||||||
|
function id(c: connection) : string
|
||||||
|
{
|
||||||
|
local cid = c$id;
|
||||||
|
local proto = "???";
|
||||||
|
|
||||||
|
if ( is_tcp_port(cid$orig_p) )
|
||||||
|
proto = "tcp";
|
||||||
|
else if ( is_udp_port(cid$orig_p) )
|
||||||
|
proto = "udp";
|
||||||
|
else if ( is_icmp_port(cid$orig_p) )
|
||||||
|
proto = "icmp";
|
||||||
|
|
||||||
|
return fmt("%s-%d-%s-%d-%s", cid$orig_h, cid$orig_p, cid$resp_h, cid$resp_p, proto);
|
||||||
|
}
|
||||||
|
|
||||||
|
function begin(c: connection, type_: string)
|
||||||
|
{
|
||||||
|
add conns[c$id];
|
||||||
|
++num_conns;
|
||||||
|
print fmt("tracking %s", c$id);
|
||||||
|
|
||||||
|
local id_ = id(c);
|
||||||
|
print output, fmt("@begin-conn %s %s %s-orig %s%%orig %s-resp %s%%resp\n", id_, type_, id_, c$id$resp_p, id_, c$id$resp_p);
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
output = open(filename);
|
||||||
|
enable_raw_output(output);
|
||||||
|
print output, "!spicy-batch v2\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
event new_connection_contents(c: connection)
|
||||||
|
{
|
||||||
|
begin(c, "stream");
|
||||||
|
}
|
||||||
|
|
||||||
|
event tcp_contents(c: connection, is_orig: bool, seq: count, contents: string)
|
||||||
|
{
|
||||||
|
print output, fmt("@data %s-%s %d\n", id(c), (is_orig ? "orig" : "resp"), |contents|);
|
||||||
|
print output, contents;
|
||||||
|
print output, "\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
event content_gap(c: connection, is_orig: bool, seq: count, length: count)
|
||||||
|
{
|
||||||
|
print output, fmt("@gap %s-%s %d\n", id(c), (is_orig ? "orig" : "resp"), length);
|
||||||
|
}
|
||||||
|
|
||||||
|
event udp_contents(c: connection, is_orig: bool, contents: string)
|
||||||
|
{
|
||||||
|
if ( c$id !in conns )
|
||||||
|
begin(c, "block");
|
||||||
|
|
||||||
|
print output, fmt("@data %s-%s %d\n", id(c), (is_orig ? "orig" : "resp"), |contents|);
|
||||||
|
print output, contents;
|
||||||
|
print output, "\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
event connection_state_remove(c: connection)
|
||||||
|
{
|
||||||
|
if ( c$id !in conns )
|
||||||
|
return;
|
||||||
|
|
||||||
|
print output, fmt("@end-conn %s\n", id(c));
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_done()
|
||||||
|
{
|
||||||
|
close(output);
|
||||||
|
print fmt("recorded %d session%s total", num_conns, (num_conns > 1 ? "s" : ""));
|
||||||
|
print fmt("output in %s", filename);
|
||||||
|
}
|
20
scripts/policy/frameworks/spicy/resource-usage.zeek
Normal file
20
scripts/policy/frameworks/spicy/resource-usage.zeek
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
##! Logs Spicy-related resource usage continously for debugging purposes.
|
||||||
|
|
||||||
|
module Spicy;
|
||||||
|
|
||||||
|
event print_usage()
|
||||||
|
{
|
||||||
|
local r = Spicy::resource_usage();
|
||||||
|
|
||||||
|
print fmt("%.6f Spicy user=%f sys=%f heap=%d current_fibers=%d cached_fibers=%d max_fibers=%d max_stack=%d",
|
||||||
|
network_time(), r$user_time, r$system_time, r$memory_heap,
|
||||||
|
r$num_fibers, r$cached_fibers, r$max_fibers,
|
||||||
|
r$max_fiber_stack_size);
|
||||||
|
|
||||||
|
schedule 1 min { print_usage() };
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
schedule 1 min { print_usage() };
|
||||||
|
}
|
|
@ -123,22 +123,33 @@ global packets_filtered_cf = Telemetry::register_counter_family([
|
||||||
$help_text="Total number of packets filtered",
|
$help_text="Total number of packets filtered",
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
global packet_lag_gf = Telemetry::register_gauge_family([
|
||||||
|
$prefix="zeek",
|
||||||
|
$name="net-packet-lag",
|
||||||
|
$unit="seconds",
|
||||||
|
$help_text="Difference of network time and wallclock time in seconds.",
|
||||||
|
]);
|
||||||
|
|
||||||
|
global no_labels: vector of string;
|
||||||
|
|
||||||
hook Telemetry::sync() {
|
hook Telemetry::sync() {
|
||||||
local net_stats = get_net_stats();
|
local net_stats = get_net_stats();
|
||||||
Telemetry::counter_family_set(bytes_received_cf, vector(), net_stats$bytes_recvd);
|
Telemetry::counter_family_set(bytes_received_cf, no_labels, net_stats$bytes_recvd);
|
||||||
Telemetry::counter_family_set(packets_received_cf, vector(), net_stats$pkts_recvd);
|
Telemetry::counter_family_set(packets_received_cf, no_labels, net_stats$pkts_recvd);
|
||||||
|
|
||||||
if ( reading_live_traffic() )
|
if ( reading_live_traffic() )
|
||||||
{
|
{
|
||||||
Telemetry::counter_family_set(packets_dropped_cf, vector(), net_stats$pkts_dropped);
|
Telemetry::counter_family_set(packets_dropped_cf, no_labels, net_stats$pkts_dropped);
|
||||||
Telemetry::counter_family_set(link_packets_cf, vector(), net_stats$pkts_link);
|
Telemetry::counter_family_set(link_packets_cf, no_labels, net_stats$pkts_link);
|
||||||
|
|
||||||
if ( net_stats?$pkts_filtered )
|
if ( net_stats?$pkts_filtered )
|
||||||
Telemetry::counter_family_set(packets_filtered_cf, vector(), net_stats$pkts_filtered);
|
Telemetry::counter_family_set(packets_filtered_cf, no_labels, net_stats$pkts_filtered);
|
||||||
|
|
||||||
|
Telemetry::gauge_family_set(packet_lag_gf, no_labels,
|
||||||
|
interval_to_double(current_time() - network_time()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
event zeek_init() &priority=5
|
event zeek_init() &priority=5
|
||||||
{
|
{
|
||||||
Log::create_stream(Stats::LOG, [$columns=Info, $ev=log_stats, $path="stats", $policy=log_policy]);
|
Log::create_stream(Stats::LOG, [$columns=Info, $ev=log_stats, $path="stats", $policy=log_policy]);
|
||||||
|
|
|
@ -17,10 +17,7 @@ export {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module Conn;
|
event connection_state_remove(c: connection)
|
||||||
|
|
||||||
event new_connection(c: connection)
|
|
||||||
{
|
{
|
||||||
Conn::set_conn(c, F); # likely first to access :-/
|
|
||||||
c$conn$community_id = community_id_v1(c$id, CommunityID::seed, CommunityID::do_base64);
|
c$conn$community_id = community_id_v1(c$id, CommunityID::seed, CommunityID::do_base64);
|
||||||
}
|
}
|
||||||
|
|
157
scripts/spicy/zeek.spicy
Normal file
157
scripts/spicy/zeek.spicy
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
# Copyright (c) 2020-2021 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
module zeek;
|
||||||
|
|
||||||
|
# Note: Retain the formatting here, doc/scripts/autogen-spicy-lib is picking up on that.
|
||||||
|
|
||||||
|
%cxx-include = "zeek/spicy/runtime-support.h";
|
||||||
|
|
||||||
|
## [Deprecated] Triggers a DPD protocol confirmation for the current connection.
|
||||||
|
##
|
||||||
|
## This function has been deprecated and will be removed. Use ``spicy::accept_input``
|
||||||
|
## instead, which will have the same effect with Zeek.
|
||||||
|
public function confirm_protocol() : void &cxxname="zeek::spicy::rt::confirm_protocol";
|
||||||
|
|
||||||
|
## [Deprecated] Triggers a DPD protocol violation for the current connection.
|
||||||
|
##
|
||||||
|
## This function has been deprecated and will be removed. Use ``spicy::decline_input``
|
||||||
|
## instead, which will have the same effect with Zeek.
|
||||||
|
public function reject_protocol(reason: string) : void &cxxname="zeek::spicy::rt::reject_protocol";
|
||||||
|
|
||||||
|
## Reports a "weird" to Zeek. This should be used with similar semantics as in
|
||||||
|
## Zeek: something quite unexpected happening at the protocol level, which however
|
||||||
|
## does not prevent us from continuing to process the connection.
|
||||||
|
##
|
||||||
|
## id: the name of the weird, which (just like in Zeek) should be a *static*
|
||||||
|
## string identifying the situation reported (e.g., ``unexpected_command``).
|
||||||
|
##
|
||||||
|
## addl: additional information to record along with the weird
|
||||||
|
public function weird(id: string, addl: string = "") &cxxname="zeek::spicy::rt::weird";
|
||||||
|
|
||||||
|
## Returns true if we're currently parsing the originator side of a connection.
|
||||||
|
public function is_orig() : bool &cxxname="zeek::spicy::rt::is_orig";
|
||||||
|
|
||||||
|
## Returns the current connection's UID.
|
||||||
|
public function uid() : string &cxxname="zeek::spicy::rt::uid";
|
||||||
|
|
||||||
|
## Returns the current connection's 4-tuple ID.
|
||||||
|
public function conn_id() : tuple<orig_h: addr, orig_p: port, resp_h: addr, resp_p: port> &cxxname="zeek::spicy::rt::conn_id";
|
||||||
|
|
||||||
|
## Instructs Zeek to flip the directionality of the current connection.
|
||||||
|
public function flip_roles() : void &cxxname="zeek::spicy::rt::flip_roles";
|
||||||
|
|
||||||
|
## Returns the number of packets seen so far on the current side of the current connection.
|
||||||
|
public function number_packets() : uint64 &cxxname="zeek::spicy::rt::number_packets";
|
||||||
|
|
||||||
|
## Opaque handle to a protocol analyzer.
|
||||||
|
public type ProtocolHandle = __library_type("zeek::spicy::rt::ProtocolHandle");
|
||||||
|
|
||||||
|
## Adds a Zeek-side child protocol analyzer to the current connection.
|
||||||
|
##
|
||||||
|
## If the same analyzer was added previously with protocol_handle_get_or_create or
|
||||||
|
## protocol_begin with same argument, and not closed with protocol_handle_close
|
||||||
|
## or protocol_end, no new analyzer will be added.
|
||||||
|
##
|
||||||
|
## See `protocol_handle_get_or_create` for the error semantics of this function.
|
||||||
|
##
|
||||||
|
## analyzer: type of analyzer to instantiate, specified through its Zeek-side
|
||||||
|
## name (similar to what Zeek's signature action `enable` takes); if not
|
||||||
|
## specified, Zeek will perform its usual dynamic protocol detection to figure
|
||||||
|
## out how to parse the data (the latter will work only for TCP protocols, though.)
|
||||||
|
public function protocol_begin(analyzer: optional<string> = Null) : void &cxxname="zeek::spicy::rt::protocol_begin";
|
||||||
|
|
||||||
|
## Gets a handle to a Zeek-side child protocol analyzer for the current connection.
|
||||||
|
##
|
||||||
|
## If no such child exists it will be added; otherwise a handle to the
|
||||||
|
## existing child protocol analyzer will be returned.
|
||||||
|
##
|
||||||
|
## This function will return an error
|
||||||
|
##
|
||||||
|
## - if not called from a protocol analyzer, or
|
||||||
|
## - the requested child protocol analyzer is unknown, or
|
||||||
|
## - creation of a child analyzer of the requested type was prevented by a
|
||||||
|
## previous call of `disable_analyzer` with `prevent=T`
|
||||||
|
##
|
||||||
|
## analyzer: type of analyzer to instantiate, specified through its Zeek-side
|
||||||
|
## name (similar to what Zeek's signature action `enable` takes).
|
||||||
|
public function protocol_handle_get_or_create(analyzer: string) : ProtocolHandle &cxxname="zeek::spicy::rt::protocol_handle_get_or_create";
|
||||||
|
|
||||||
|
## Forwards protocol data to all previously instantiated Zeek-side child protocol analyzers.
|
||||||
|
##
|
||||||
|
## is_orig: true to feed the data to the child's originator side, false for the responder
|
||||||
|
## data: chunk of data to forward to child analyzer
|
||||||
|
## h: optional handle to the child analyzer to forward data into, else forward to all child analyzers
|
||||||
|
public function protocol_data_in(is_orig: bool, data: bytes, h: optional<ProtocolHandle> = Null) : void &cxxname="zeek::spicy::rt::protocol_data_in";
|
||||||
|
|
||||||
|
## Signals a gap in input data to all previously instantiated Zeek-side child protocol analyzers.
|
||||||
|
##
|
||||||
|
## is_orig: true to signal gap to the child's originator side, false for the responder
|
||||||
|
## offset: start offset of gap in input stream
|
||||||
|
## len: size of gap
|
||||||
|
## h: optional handle to the child analyzer signal a gap to, else signal to all child analyzers
|
||||||
|
public function protocol_gap(is_orig: bool, offset: uint64, len: uint64, h: optional<ProtocolHandle> = Null) : void &cxxname="zeek::spicy::rt::protocol_gap";
|
||||||
|
|
||||||
|
## Signals end-of-data to all previously instantiated Zeek-side child protocol
|
||||||
|
## analyzers and removes them.
|
||||||
|
public function protocol_end() : void &cxxname="zeek::spicy::rt::protocol_end";
|
||||||
|
|
||||||
|
## Signals end-of-data to the given child analyzer and removes it.
|
||||||
|
##
|
||||||
|
## The given handle must be live, i.e., it must not have been used in a
|
||||||
|
## previous protocol_handle_close call, and must not have been live when
|
||||||
|
## protocol_end was called. If the handle is not live a runtime error will
|
||||||
|
## be triggered.
|
||||||
|
##
|
||||||
|
## handle: handle to the child analyzer to remove
|
||||||
|
public function protocol_handle_close(handle: ProtocolHandle): void &cxxname="zeek::spicy::rt::protocol_handle_close";
|
||||||
|
|
||||||
|
## Signals the beginning of a file to Zeek's file analysis, associating it with the current connection.
|
||||||
|
## Optionally, a mime type can be provided. It will be passed on to Zeek's file analysis framework.
|
||||||
|
## Returns the Zeek-side file ID of the new file.
|
||||||
|
public function file_begin(mime_type: optional<string> = Null) : string &cxxname="zeek::spicy::rt::file_begin";
|
||||||
|
|
||||||
|
## Returns the current file's FUID.
|
||||||
|
public function fuid() : string &cxxname="zeek::spicy::rt::fuid";
|
||||||
|
|
||||||
|
## Terminates the currently active Zeek-side session, flushing all state. Any
|
||||||
|
## subsequent activity will start a new session from scratch. This can only be
|
||||||
|
## called from inside a protocol analyzer.
|
||||||
|
public function terminate_session() : void &cxxname="zeek::spicy::rt::terminate_session";
|
||||||
|
|
||||||
|
## Signals the expected size of a file to Zeek's file analysis.
|
||||||
|
##
|
||||||
|
## size: expected size of file
|
||||||
|
## fid: Zeek-side ID of the file to operate on; if not given, the file started by the most recent file_begin() will be used
|
||||||
|
public function file_set_size(size: uint64, fid: optional<string> = Null) : void &cxxname="zeek::spicy::rt::file_set_size";
|
||||||
|
|
||||||
|
## Passes file content on to Zeek's file analysis.
|
||||||
|
##
|
||||||
|
## data: chunk of raw data to pass into analysis
|
||||||
|
## fid: Zeek-side ID of the file to operate on; if not given, the file started by the most recent file_begin() will be used
|
||||||
|
public function file_data_in(data: bytes, fid: optional<string> = Null) : void &cxxname="zeek::spicy::rt::file_data_in";
|
||||||
|
|
||||||
|
## Passes file content at a specific offset on to Zeek's file analysis.
|
||||||
|
##
|
||||||
|
## data: chunk of raw data to pass into analysis
|
||||||
|
## offset: position in file where data starts
|
||||||
|
## fid: Zeek-side ID of the file to operate on; if not given, the file started by the most recent file_begin() will be used
|
||||||
|
public function file_data_in_at_offset(data: bytes, offset: uint64, fid: optional<string> = Null) : void &cxxname="zeek::spicy::rt::file_data_in_at_offset";
|
||||||
|
|
||||||
|
## Signals a gap in a file to Zeek's file analysis.
|
||||||
|
##
|
||||||
|
## offset: position in file where gap starts
|
||||||
|
## len: size of gap
|
||||||
|
## fid: Zeek-side ID of the file to operate on; if not given, the file started by the most recent file_begin() will be used
|
||||||
|
public function file_gap(offset: uint64, len: uint64, fid: optional<string> = Null) : void &cxxname="zeek::spicy::rt::file_gap";
|
||||||
|
|
||||||
|
## Signals the end of a file to Zeek's file analysis.
|
||||||
|
##
|
||||||
|
## fid: Zeek-side ID of the file to operate on; if not given, the file started by the most recent file_begin() will be used
|
||||||
|
public function file_end(fid: optional<string> = Null) : void &cxxname="zeek::spicy::rt::file_end";
|
||||||
|
|
||||||
|
## Inside a packet analyzer, forwards what data remains after parsing the top-level unit
|
||||||
|
## on to another analyzer. The index specifies the target, per the current dispatcher table.
|
||||||
|
public function forward_packet(identifier: uint32) : void &cxxname="zeek::spicy::rt::forward_packet";
|
||||||
|
|
||||||
|
## Gets the network time from Zeek.
|
||||||
|
public function network_time() : time &cxxname="zeek::spicy::rt::network_time";
|
31
scripts/spicy/zeek_file.spicy
Normal file
31
scripts/spicy/zeek_file.spicy
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Copyright (c) 2020-2021 by the Zeek Project. See LICENSE for details.
|
||||||
|
#
|
||||||
|
# TODO: This code would ideally just be a part of zeek.spicy, but that would
|
||||||
|
# come with compilation overhead currently even if not used, see
|
||||||
|
# https://github.com/zeek/spicy/issues/301.
|
||||||
|
|
||||||
|
module zeek_file;
|
||||||
|
|
||||||
|
import zeek;
|
||||||
|
|
||||||
|
## Convenience wrapper for passing content into Zeek's file analysis.
|
||||||
|
## After connecting an instance of this unit type to a sink, all data
|
||||||
|
## sent to the sink will be passed on to Zeek as a file.
|
||||||
|
##
|
||||||
|
## mime_type: MIME type of the file's content, if known; will be passed on to Zeek
|
||||||
|
## size: Total number of bytes the file contains, if known; will be passed on to Zeek
|
||||||
|
public type File = unit(mime_type: optional<string> = Null, size: optional<uint64> = Null) {
|
||||||
|
on %init {
|
||||||
|
self.fid = zeek::file_begin(mime_type);
|
||||||
|
|
||||||
|
if ( size )
|
||||||
|
zeek::file_set_size(*size, self.fid);
|
||||||
|
}
|
||||||
|
|
||||||
|
: bytes &chunked &eod { zeek::file_data_in($$, self.fid); }
|
||||||
|
|
||||||
|
on %finally { zeek::file_end(self.fid); }
|
||||||
|
|
||||||
|
## Zeek-side file ID
|
||||||
|
var fid: string;
|
||||||
|
};
|
46
scripts/spicy/zeek_rt.hlt
Normal file
46
scripts/spicy/zeek_rt.hlt
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
|
||||||
|
module zeek_rt {
|
||||||
|
|
||||||
|
import hilti;
|
||||||
|
|
||||||
|
%cxx-include = "zeek/spicy/runtime-support.h";
|
||||||
|
|
||||||
|
public type Val = __library_type("::zeek::ValPtr");
|
||||||
|
public type BroType = __library_type("::zeek::TypePtr");
|
||||||
|
public type EventHandlerPtr = __library_type("::zeek::EventHandlerPtr");
|
||||||
|
public type PortRange = __library_type("::zeek::spicy::rt::PortRange");
|
||||||
|
|
||||||
|
declare public PortRange make_port_range(port begin_, port end_) &cxxname="zeek::spicy::rt::make_port_range" &have_prototype;
|
||||||
|
|
||||||
|
type ZeekTypeTag = enum {
|
||||||
|
Addr, Any, Bool, Count, Double, Enum, Error, File, Func, Int, Interval, List, Opaque, Pattern, Port, Record, String, Subnet, Table, Time, Type, Vector, Void
|
||||||
|
} &cxxname="::zeek::spicy::rt::ZeekTypeTag";
|
||||||
|
|
||||||
|
declare public void register_protocol_analyzer(string name, hilti::Protocol protocol, vector<PortRange> ports, string parser_orig, string parser_resp, string replaces, string linker_scope) &cxxname="zeek::spicy::rt::register_protocol_analyzer" &have_prototype;
|
||||||
|
declare public void register_file_analyzer(string name, vector<string> mime_types, string parser, string replaces, string linker_scope) &cxxname="zeek::spicy::rt::register_file_analyzer" &have_prototype;
|
||||||
|
declare public void register_packet_analyzer(string name, string parser, string replaces, string linker_scope) &cxxname="zeek::spicy::rt::register_packet_analyzer" &have_prototype;
|
||||||
|
declare public void register_type(string ns, string id, BroType t) &cxxname="zeek::spicy::rt::register_type" &have_prototype;
|
||||||
|
|
||||||
|
declare public bool have_handler(EventHandlerPtr handler) &cxxname="zeek::spicy::rt::have_handler" &have_prototype;
|
||||||
|
declare public EventHandlerPtr internal_handler(string event) &cxxname="zeek::spicy::rt::internal_handler" &have_prototype;
|
||||||
|
declare public void install_handler(string event) &cxxname="zeek::spicy::rt::install_handler" &have_prototype;
|
||||||
|
|
||||||
|
declare public void raise_event(EventHandlerPtr handler, vector<Val> args) &cxxname="zeek::spicy::rt::raise_event" &have_prototype;
|
||||||
|
declare public BroType event_arg_type(EventHandlerPtr handler, uint<64> idx) &cxxname="zeek::spicy::rt::event_arg_type" &have_prototype;
|
||||||
|
declare public Val to_val(any x, BroType target) &cxxname="zeek::spicy::rt::to_val" &have_prototype;
|
||||||
|
|
||||||
|
type RecordField = tuple<string, BroType, bool>; # (ID, type, optional)
|
||||||
|
declare public BroType create_base_type(ZeekTypeTag tag) &cxxname="zeek::spicy::rt::create_base_type" &have_prototype;
|
||||||
|
declare public BroType create_enum_type(string ns, string id, vector<tuple<string, int<64>>> labels) &cxxname="zeek::spicy::rt::create_enum_type" &have_prototype;
|
||||||
|
declare public BroType create_record_type(string ns, string id, vector<RecordField> fields) &cxxname="zeek::spicy::rt::create_record_type" &have_prototype;
|
||||||
|
declare public BroType create_table_type(BroType key, optional<BroType> value = Null) &cxxname="zeek::spicy::rt::create_table_type" &have_prototype;
|
||||||
|
declare public BroType create_vector_type(BroType elem) &cxxname="zeek::spicy::rt::create_vector_type" &have_prototype;
|
||||||
|
|
||||||
|
declare public Val current_conn() &cxxname="zeek::spicy::rt::current_conn" &have_prototype;
|
||||||
|
declare public Val current_file() &cxxname="zeek::spicy::rt::current_file" &have_prototype;
|
||||||
|
declare public Val current_packet() &cxxname="zeek::spicy::rt::current_packet" &have_prototype;
|
||||||
|
declare public Val current_is_orig() &cxxname="zeek::spicy::rt::current_is_orig" &have_prototype;
|
||||||
|
|
||||||
|
declare public void debug(string msg) &cxxname="zeek::spicy::rt::debug" &have_prototype;
|
||||||
|
|
||||||
|
}
|
|
@ -72,6 +72,8 @@
|
||||||
@load frameworks/packet-filter/shunt.zeek
|
@load frameworks/packet-filter/shunt.zeek
|
||||||
@load frameworks/software/version-changes.zeek
|
@load frameworks/software/version-changes.zeek
|
||||||
@load frameworks/software/vulnerable.zeek
|
@load frameworks/software/vulnerable.zeek
|
||||||
|
# @load frameworks/spicy/record-spicy-batch.zeek
|
||||||
|
# @load frameworks/spicy/resource-usage.zeek
|
||||||
@load frameworks/software/windows-version-detection.zeek
|
@load frameworks/software/windows-version-detection.zeek
|
||||||
@load frameworks/telemetry/log.zeek
|
@load frameworks/telemetry/log.zeek
|
||||||
@load integration/collective-intel/__load__.zeek
|
@load integration/collective-intel/__load__.zeek
|
||||||
|
|
|
@ -13,6 +13,11 @@
|
||||||
@load policy/misc/dump-events.zeek
|
@load policy/misc/dump-events.zeek
|
||||||
@load policy/protocols/conn/speculative-service.zeek
|
@load policy/protocols/conn/speculative-service.zeek
|
||||||
|
|
||||||
|
@if ( have_spicy() )
|
||||||
|
@load frameworks/spicy/record-spicy-batch.zeek
|
||||||
|
@load frameworks/spicy/resource-usage.zeek
|
||||||
|
@endif
|
||||||
|
|
||||||
@load ./example.zeek
|
@load ./example.zeek
|
||||||
|
|
||||||
event zeek_init()
|
event zeek_init()
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
# Spicy tooling in the build tree.
|
# Spicy tooling in the build tree.
|
||||||
|
|
||||||
PATHS=.
|
PATHS=.
|
||||||
PATHS=$PATHS:@CMAKE_SOURCE_DIR@/auxil/spicy/spicy/spicy/lib
|
PATHS=$PATHS:@CMAKE_SOURCE_DIR@/auxil/spicy/spicy/lib
|
||||||
PATHS=$PATHS:@CMAKE_SOURCE_DIR@/auxil/spicy/spicy/hilti/lib
|
PATHS=$PATHS:@CMAKE_SOURCE_DIR@/auxil/spicy/hilti/lib
|
||||||
|
|
||||||
echo $PATHS
|
echo $PATHS
|
|
@ -1 +1 @@
|
||||||
Subproject commit 58e63b262e542895024c821e791214008604cd55
|
Subproject commit 410ada8bbe5839807a459a99c1b77221f790f1be
|
|
@ -1,121 +1,102 @@
|
||||||
include_directories(BEFORE
|
include_directories(BEFORE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/include
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}
|
${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR}/include)
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/include
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/include
|
|
||||||
)
|
|
||||||
|
|
||||||
# Force creation of includes symlink. This can't just be in the src directory
|
# Force creation of includes symlink. This can't just be in the src directory
|
||||||
# because MSVC will end up with an include loop.
|
# because MSVC will end up with an include loop.
|
||||||
execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory
|
execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory "${CMAKE_CURRENT_SOURCE_DIR}/include")
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}/include")
|
execute_process(COMMAND "${CMAKE_COMMAND}" -E create_symlink ".."
|
||||||
execute_process(COMMAND "${CMAKE_COMMAND}" -E create_symlink
|
|
||||||
".."
|
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/zeek")
|
"${CMAKE_CURRENT_SOURCE_DIR}/include/zeek")
|
||||||
|
|
||||||
# Allows header file inclusion via zeek/ within the build tree
|
# Allows header file inclusion via zeek/ within the build tree
|
||||||
execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory
|
execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory "${CMAKE_CURRENT_BINARY_DIR}/include")
|
||||||
"${CMAKE_CURRENT_BINARY_DIR}/include")
|
execute_process(COMMAND "${CMAKE_COMMAND}" -E create_symlink ".."
|
||||||
execute_process(COMMAND "${CMAKE_COMMAND}" -E create_symlink
|
|
||||||
".."
|
|
||||||
"${CMAKE_CURRENT_BINARY_DIR}/include/zeek")
|
"${CMAKE_CURRENT_BINARY_DIR}/include/zeek")
|
||||||
|
|
||||||
# Poor man's JSON escaping as this is rendered into a C string.
|
# Poor man's JSON escaping as this is rendered into a C string.
|
||||||
string(REPLACE "\"" "\\\"" ZEEK_BUILD_INFO_ESCAPED "${ZEEK_BUILD_INFO}")
|
string(REPLACE "\"" "\\\"" ZEEK_BUILD_INFO_ESCAPED "${ZEEK_BUILD_INFO}")
|
||||||
string(REPLACE "\n" "\\n" ZEEK_BUILD_INFO_ESCAPED "${ZEEK_BUILD_INFO_ESCAPED}")
|
string(REPLACE "\n" "\\n" ZEEK_BUILD_INFO_ESCAPED "${ZEEK_BUILD_INFO_ESCAPED}")
|
||||||
configure_file(version.c.in ${CMAKE_CURRENT_BINARY_DIR}/version.c)
|
configure_file(version.c.in ${CMAKE_CURRENT_BINARY_DIR}/version.c)
|
||||||
configure_file(util-config.h.in ${CMAKE_CURRENT_BINARY_DIR}/util-config.h)
|
|
||||||
|
|
||||||
# This creates a custom command to transform a bison output file (inFile)
|
# This creates a custom command to transform a bison output file (inFile) into
|
||||||
# into outFile in order to avoid symbol conflicts:
|
# outFile in order to avoid symbol conflicts: - replaces instances of 'yylex' in
|
||||||
# - replaces instances of 'yylex' in inFile with yylexPrefix
|
# inFile with yylexPrefix - replaces instances of 'yy' in inFile with yyPrefix -
|
||||||
# - replaces instances of 'yy' in inFile with yyPrefix
|
# deletes instances of 'extern char.*getenv' in inFile - writes results to
|
||||||
# - deletes instances of 'extern char.*getenv' in inFile
|
# outFile and adds it to list TRANSFORMED_BISON_OUTPUTS
|
||||||
# - writes results to outFile and adds it to list TRANSFORMED_BISON_OUTPUTS
|
macro (REPLACE_YY_PREFIX_TARGET inFile outFile yylexPrefix yyPrefix)
|
||||||
macro(REPLACE_YY_PREFIX_TARGET inFile outFile yylexPrefix yyPrefix)
|
|
||||||
set(args "\"/extern char.*getenv/d")
|
set(args "\"/extern char.*getenv/d")
|
||||||
set(args "${args}\;s/yylex/${yylexPrefix}lex/")
|
set(args "${args}\;s/yylex/${yylexPrefix}lex/")
|
||||||
set(args "${args}\;s/yy/${yyPrefix}/g\"" < ${inFile} > ${outFile})
|
set(args "${args}\;s/yy/${yyPrefix}/g\"" < ${inFile} > ${outFile})
|
||||||
add_custom_command(OUTPUT ${outFile}
|
add_custom_command(
|
||||||
COMMAND ${SED_EXE}
|
OUTPUT ${outFile}
|
||||||
ARGS ${args}
|
COMMAND ${SED_EXE} ARGS ${args}
|
||||||
DEPENDS ${inFile}
|
DEPENDS ${inFile}
|
||||||
COMMENT "[sed] replacing stuff in ${inFile}"
|
COMMENT "[sed] replacing stuff in ${inFile}")
|
||||||
)
|
|
||||||
list(APPEND TRANSFORMED_BISON_OUTPUTS ${outFile})
|
list(APPEND TRANSFORMED_BISON_OUTPUTS ${outFile})
|
||||||
endmacro(REPLACE_YY_PREFIX_TARGET)
|
endmacro (REPLACE_YY_PREFIX_TARGET)
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## Create targets to generate parser and scanner code
|
# Create targets to generate parser and scanner code
|
||||||
|
|
||||||
set(BISON_FLAGS "--debug")
|
set(BISON_FLAGS "--debug")
|
||||||
|
|
||||||
if ( MSVC )
|
if (MSVC)
|
||||||
set(SIGN_COMPARE_FLAG "/wd4018")
|
set(SIGN_COMPARE_FLAG "/wd4018")
|
||||||
else()
|
else ()
|
||||||
set(SIGN_COMPARE_FLAG "-Wno-sign-compare")
|
set(SIGN_COMPARE_FLAG "-Wno-sign-compare")
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
# Rule parser/scanner
|
# Rule parser/scanner
|
||||||
bison_target(RuleParser rule-parse.y
|
bison_target(
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rup.cc
|
RuleParser rule-parse.y ${CMAKE_CURRENT_BINARY_DIR}/rup.cc
|
||||||
HEADER ${CMAKE_CURRENT_BINARY_DIR}/rup.h
|
DEFINES_FILE ${CMAKE_CURRENT_BINARY_DIR}/rup.h
|
||||||
#VERBOSE ${CMAKE_CURRENT_BINARY_DIR}/rule_parse.output
|
# VERBOSE ${CMAKE_CURRENT_BINARY_DIR}/rule_parse.output
|
||||||
COMPILE_FLAGS "${BISON_FLAGS}")
|
COMPILE_FLAGS "${BISON_FLAGS}")
|
||||||
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/rup.cc
|
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/rup.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.cc
|
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.cc rules_ rules_)
|
||||||
rules_ rules_)
|
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/rup.h ${CMAKE_CURRENT_BINARY_DIR}/rule-parse.h
|
||||||
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/rup.h
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.h
|
|
||||||
rules_ rules_)
|
rules_ rules_)
|
||||||
flex_target(RuleScanner rule-scan.l ${CMAKE_CURRENT_BINARY_DIR}/rule-scan.cc
|
flex_target(RuleScanner rule-scan.l ${CMAKE_CURRENT_BINARY_DIR}/rule-scan.cc
|
||||||
COMPILE_FLAGS "-Prules_")
|
COMPILE_FLAGS "-Prules_")
|
||||||
set_property(SOURCE rule-scan.cc APPEND_STRING PROPERTY COMPILE_FLAGS "${SIGN_COMPARE_FLAG}")
|
set_property(SOURCE rule-scan.cc APPEND_STRING PROPERTY COMPILE_FLAGS "${SIGN_COMPARE_FLAG}")
|
||||||
|
|
||||||
# RE parser/scanner
|
# RE parser/scanner
|
||||||
bison_target(REParser re-parse.y
|
bison_target(
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rep.cc
|
REParser re-parse.y ${CMAKE_CURRENT_BINARY_DIR}/rep.cc
|
||||||
HEADER ${CMAKE_CURRENT_BINARY_DIR}/re-parse.h
|
DEFINES_FILE ${CMAKE_CURRENT_BINARY_DIR}/re-parse.h
|
||||||
#VERBOSE ${CMAKE_CURRENT_BINARY_DIR}/re_parse.output
|
# VERBOSE ${CMAKE_CURRENT_BINARY_DIR}/re_parse.output
|
||||||
COMPILE_FLAGS "${BISON_FLAGS}")
|
COMPILE_FLAGS "${BISON_FLAGS}")
|
||||||
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/rep.cc
|
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/rep.cc ${CMAKE_CURRENT_BINARY_DIR}/re-parse.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/re-parse.cc
|
|
||||||
re_ RE_)
|
re_ RE_)
|
||||||
flex_target(REScanner re-scan.l ${CMAKE_CURRENT_BINARY_DIR}/re-scan.cc
|
flex_target(REScanner re-scan.l ${CMAKE_CURRENT_BINARY_DIR}/re-scan.cc COMPILE_FLAGS "-Pre_")
|
||||||
COMPILE_FLAGS "-Pre_")
|
|
||||||
add_flex_bison_dependency(REScanner REParser)
|
add_flex_bison_dependency(REScanner REParser)
|
||||||
set_property(SOURCE re-scan.cc APPEND_STRING PROPERTY COMPILE_FLAGS "${SIGN_COMPARE_FLAG}")
|
set_property(SOURCE re-scan.cc APPEND_STRING PROPERTY COMPILE_FLAGS "${SIGN_COMPARE_FLAG}")
|
||||||
|
|
||||||
# Parser/Scanner
|
# Parser/Scanner
|
||||||
bison_target(Parser parse.y
|
bison_target(
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/p.cc
|
Parser parse.y ${CMAKE_CURRENT_BINARY_DIR}/p.cc
|
||||||
HEADER ${CMAKE_CURRENT_BINARY_DIR}/zeekparse.h
|
DEFINES_FILE ${CMAKE_CURRENT_BINARY_DIR}/zeekparse.h
|
||||||
#VERBOSE ${CMAKE_CURRENT_BINARY_DIR}/parse.output
|
# VERBOSE ${CMAKE_CURRENT_BINARY_DIR}/parse.output
|
||||||
COMPILE_FLAGS "${BISON_FLAGS}")
|
COMPILE_FLAGS "${BISON_FLAGS}")
|
||||||
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/p.cc
|
replace_yy_prefix_target(${CMAKE_CURRENT_BINARY_DIR}/p.cc ${CMAKE_CURRENT_BINARY_DIR}/parse.cc zeek
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/parse.cc
|
yy)
|
||||||
zeek yy)
|
flex_target(Scanner scan.l ${CMAKE_CURRENT_BINARY_DIR}/scan.cc COMPILE_FLAGS "-Pzeek")
|
||||||
flex_target(Scanner scan.l ${CMAKE_CURRENT_BINARY_DIR}/scan.cc
|
|
||||||
COMPILE_FLAGS "-Pzeek")
|
|
||||||
set_property(SOURCE scan.cc APPEND_STRING PROPERTY COMPILE_FLAGS "${SIGN_COMPARE_FLAG}")
|
set_property(SOURCE scan.cc APPEND_STRING PROPERTY COMPILE_FLAGS "${SIGN_COMPARE_FLAG}")
|
||||||
|
|
||||||
# Add a dependency for the generated files to zeek_autogen_files.
|
# Add a dependency for the generated files to zeek_autogen_files.
|
||||||
add_custom_target(
|
add_custom_target(
|
||||||
zeek_bison_outputs
|
zeek_bison_outputs
|
||||||
DEPENDS
|
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/parse.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/parse.cc
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/re-parse.cc
|
${CMAKE_CURRENT_BINARY_DIR}/re-parse.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/re-parse.h
|
${CMAKE_CURRENT_BINARY_DIR}/re-parse.h
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/re-scan.cc
|
${CMAKE_CURRENT_BINARY_DIR}/re-scan.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.cc
|
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.h
|
${CMAKE_CURRENT_BINARY_DIR}/rule-parse.h
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/rule-scan.cc
|
${CMAKE_CURRENT_BINARY_DIR}/rule-scan.cc
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/scan.cc
|
${CMAKE_CURRENT_BINARY_DIR}/scan.cc)
|
||||||
)
|
|
||||||
add_dependencies(zeek_autogen_files zeek_bison_outputs)
|
add_dependencies(zeek_autogen_files zeek_bison_outputs)
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## bifcl-dependent targets
|
# bifcl-dependent targets
|
||||||
|
|
||||||
include(BifCl)
|
include(BifCl)
|
||||||
|
|
||||||
|
@ -131,34 +112,30 @@ set(BIF_SRCS
|
||||||
strings.bif
|
strings.bif
|
||||||
reporter.bif
|
reporter.bif
|
||||||
option.bif
|
option.bif
|
||||||
# Note: the supervisor BIF file is treated like other top-level BIFs
|
# Note: the supervisor BIF file is treated like other top-level BIFs instead
|
||||||
# instead of contained in its own subdirectory CMake logic because
|
# of contained in its own subdirectory CMake logic because subdirectory BIFs
|
||||||
# subdirectory BIFs are treated differently and don't support being called
|
# are treated differently and don't support being called *during* parsing
|
||||||
# *during* parsing (e.g. within an @if directive).
|
# (e.g. within an @if directive).
|
||||||
supervisor/supervisor.bif
|
supervisor/supervisor.bif
|
||||||
# The packet analysis BIF is treated like other top-level BIFs because
|
# The packet analysis BIF is treated like other top-level BIFs because it's
|
||||||
# it's needed before parsing the packet protocol scripts, which happen
|
# needed before parsing the packet protocol scripts, which happen very near
|
||||||
# very near to the start of parsing.
|
# to the start of parsing.
|
||||||
packet_analysis/packet_analysis.bif
|
packet_analysis/packet_analysis.bif
|
||||||
# The C++ loading BIF is treated like other top-level BIFs to give
|
# The C++ loading BIF is treated like other top-level BIFs to give us
|
||||||
# us flexibility regarding when it's called.
|
# flexibility regarding when it's called.
|
||||||
script_opt/CPP/CPP-load.bif
|
script_opt/CPP/CPP-load.bif)
|
||||||
)
|
|
||||||
|
|
||||||
foreach (bift ${BIF_SRCS})
|
foreach (bift ${BIF_SRCS})
|
||||||
bif_target(${bift} "standard")
|
bif_target(${bift} "standard")
|
||||||
endforeach ()
|
endforeach ()
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## BinPAC-dependent targets
|
# BinPAC-dependent targets
|
||||||
|
|
||||||
include(BinPAC)
|
include(BinPAC)
|
||||||
|
|
||||||
set(BINPAC_AUXSRC
|
set(BINPAC_AUXSRC ${CMAKE_CURRENT_SOURCE_DIR}/binpac.pac ${CMAKE_CURRENT_SOURCE_DIR}/zeek.pac
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/binpac.pac
|
${CMAKE_CURRENT_SOURCE_DIR}/binpac_zeek.h)
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/zeek.pac
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/binpac_zeek.h
|
|
||||||
)
|
|
||||||
|
|
||||||
set(BINPAC_OUTPUTS "")
|
set(BINPAC_OUTPUTS "")
|
||||||
|
|
||||||
|
@ -168,8 +145,8 @@ list(APPEND BINPAC_OUTPUTS "${BINPAC_OUTPUT_CC}")
|
||||||
binpac_target(binpac_zeek-lib.pac)
|
binpac_target(binpac_zeek-lib.pac)
|
||||||
list(APPEND BINPAC_OUTPUTS "${BINPAC_OUTPUT_CC}")
|
list(APPEND BINPAC_OUTPUTS "${BINPAC_OUTPUT_CC}")
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## Gen-ZAM setup
|
# Gen-ZAM setup
|
||||||
|
|
||||||
include(Gen-ZAM)
|
include(Gen-ZAM)
|
||||||
|
|
||||||
|
@ -177,9 +154,9 @@ set(GEN_ZAM_SRC ${CMAKE_CURRENT_SOURCE_DIR}/script_opt/ZAM/Ops.in)
|
||||||
|
|
||||||
gen_zam_target(${GEN_ZAM_SRC})
|
gen_zam_target(${GEN_ZAM_SRC})
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## Including subdirectories.
|
# Including subdirectories.
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
|
|
||||||
option(USE_SQLITE "Should Zeek use SQLite?" ON)
|
option(USE_SQLITE "Should Zeek use SQLite?" ON)
|
||||||
|
|
||||||
|
@ -195,48 +172,67 @@ add_subdirectory(logging)
|
||||||
add_subdirectory(probabilistic)
|
add_subdirectory(probabilistic)
|
||||||
add_subdirectory(session)
|
add_subdirectory(session)
|
||||||
|
|
||||||
########################################################################
|
if (HAVE_SPICY)
|
||||||
## Build in the discovered external plugins and create the autogenerated scripts.
|
add_subdirectory(spicy)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
# ##############################################################################
|
||||||
|
# Build in the discovered external plugins and create the autogenerated scripts.
|
||||||
|
|
||||||
set(PRELOAD_SCRIPT ${PROJECT_BINARY_DIR}/scripts/builtin-plugins/__preload__.zeek)
|
set(PRELOAD_SCRIPT ${PROJECT_BINARY_DIR}/scripts/builtin-plugins/__preload__.zeek)
|
||||||
file(WRITE ${PRELOAD_SCRIPT} "# Warning, this is an autogenerated file!\n")
|
file(WRITE ${PRELOAD_SCRIPT} "# Warning, this is an autogenerated file!\n")
|
||||||
set(LOAD_SCRIPT ${PROJECT_BINARY_DIR}/scripts/builtin-plugins/__load__.zeek)
|
set(LOAD_SCRIPT ${PROJECT_BINARY_DIR}/scripts/builtin-plugins/__load__.zeek)
|
||||||
file(WRITE ${LOAD_SCRIPT} "# Warning, this is an autogenerated file!\n")
|
file(WRITE ${LOAD_SCRIPT} "# Warning, this is an autogenerated file!\n")
|
||||||
|
|
||||||
foreach (plugin_dir ${BUILTIN_PLUGIN_LIST})
|
# TODO: this really should be a function to make sure we have an isolated scope.
|
||||||
get_filename_component(plugin_name ${plugin_dir} NAME)
|
# However, for historic reasons, we're not doing that yet. Some plugin modify
|
||||||
|
# global state such as `zeekdeps`.
|
||||||
|
macro (add_extra_builtin_plugin plugin_dir)
|
||||||
|
get_filename_component(plugin_name "${plugin_dir}" NAME)
|
||||||
|
|
||||||
if(IS_DIRECTORY "${plugin_dir}/cmake")
|
if (IS_DIRECTORY "${plugin_dir}/cmake")
|
||||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${plugin_dir}/cmake")
|
list(APPEND CMAKE_MODULE_PATH "${plugin_dir}/cmake")
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
|
# Set this flag so that ZeekPluginStatic.cmake knows that this plugin is not
|
||||||
|
# from our source tree but from an external source (or git submodule). This
|
||||||
|
# will tell CMake to *not* define ZEEK_CONFIG_SKIP_VERSION_H for the plugin.
|
||||||
|
set(ZEEK_BUILDING_EXTRA_PLUGINS ON)
|
||||||
|
|
||||||
add_subdirectory(${plugin_dir} ${CMAKE_CURRENT_BINARY_DIR}/builtin-plugins/${plugin_name})
|
add_subdirectory(${plugin_dir} ${CMAKE_CURRENT_BINARY_DIR}/builtin-plugins/${plugin_name})
|
||||||
endforeach()
|
|
||||||
|
# TODO: drop once we turn this into a function.
|
||||||
|
set(ZEEK_BUILDING_EXTRA_PLUGINS OFF)
|
||||||
|
endmacro ()
|
||||||
|
|
||||||
|
foreach (plugin_dir ${BUILTIN_PLUGIN_LIST})
|
||||||
|
add_extra_builtin_plugin("${plugin_dir}")
|
||||||
|
endforeach ()
|
||||||
|
|
||||||
install(FILES ${PRELOAD_SCRIPT} DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}/builtin-plugins/)
|
install(FILES ${PRELOAD_SCRIPT} DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}/builtin-plugins/)
|
||||||
install(FILES ${LOAD_SCRIPT} DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}/builtin-plugins/)
|
install(FILES ${LOAD_SCRIPT} DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}/builtin-plugins/)
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## This has to happen after the parts for builtin plugins, or else
|
# This has to happen after the parts for builtin plugins, or else symbols are
|
||||||
## symbols are missing when it goes to link the fuzzer binaries.
|
# missing when it goes to link the fuzzer binaries.
|
||||||
add_subdirectory(fuzzers)
|
add_subdirectory(fuzzers)
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## zeek target
|
# zeek target
|
||||||
|
|
||||||
find_package (Threads)
|
find_package(Threads)
|
||||||
|
|
||||||
# Avoid CMake warning about "3rdparty" looking like a number.
|
# Avoid CMake warning about "3rdparty" looking like a number.
|
||||||
|
|
||||||
cmake_policy(PUSH)
|
cmake_policy(PUSH)
|
||||||
|
|
||||||
if (POLICY CMP0012)
|
if (POLICY CMP0012)
|
||||||
cmake_policy(SET CMP0012 NEW)
|
cmake_policy(SET CMP0012 NEW)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# This macro stores associated headers for any C/C++ source files given
|
# This macro stores associated headers for any C/C++ source files given as
|
||||||
# as arguments (past _var) as a list in the CMake variable named "_var".
|
# arguments (past _var) as a list in the CMake variable named "_var".
|
||||||
macro(COLLECT_HEADERS _var)
|
macro (COLLECT_HEADERS _var)
|
||||||
foreach (src ${ARGN})
|
foreach (src ${ARGN})
|
||||||
get_filename_component(ext ${src} EXT)
|
get_filename_component(ext ${src} EXT)
|
||||||
if ("${ext}" STREQUAL ".cc" OR "${ext}" STREQUAL ".c")
|
if ("${ext}" STREQUAL ".cc" OR "${ext}" STREQUAL ".c")
|
||||||
|
@ -251,39 +247,34 @@ macro(COLLECT_HEADERS _var)
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
endforeach ()
|
endforeach ()
|
||||||
endmacro(COLLECT_HEADERS _var)
|
endmacro (COLLECT_HEADERS _var)
|
||||||
|
|
||||||
cmake_policy(POP)
|
cmake_policy(POP)
|
||||||
|
|
||||||
# define a command that's used to run the make_dbg_constants.py script
|
# define a command that's used to run the make_dbg_constants.py script building
|
||||||
# building the zeek binary depends on the outputs of this script
|
# the zeek binary depends on the outputs of this script
|
||||||
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
add_custom_command(
|
||||||
|
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdInfoConstants.cc
|
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdInfoConstants.cc
|
||||||
COMMAND ${PYTHON_EXECUTABLE}
|
COMMAND ${PYTHON_EXECUTABLE} ARGS ${CMAKE_CURRENT_SOURCE_DIR}/make_dbg_constants.py
|
||||||
ARGS ${CMAKE_CURRENT_SOURCE_DIR}/make_dbg_constants.py
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/DebugCmdInfoConstants.in
|
${CMAKE_CURRENT_SOURCE_DIR}/DebugCmdInfoConstants.in
|
||||||
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/make_dbg_constants.py
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/make_dbg_constants.py
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/DebugCmdInfoConstants.in
|
${CMAKE_CURRENT_SOURCE_DIR}/DebugCmdInfoConstants.in
|
||||||
COMMENT "[Python] Processing debug commands"
|
COMMENT "[Python] Processing debug commands"
|
||||||
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
|
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
)
|
|
||||||
|
|
||||||
add_custom_target(
|
add_custom_target(zeek_debugcmd_gen DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
||||||
zeek_debugcmd_gen
|
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdInfoConstants.cc)
|
||||||
DEPENDS
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdConstants.h
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/DebugCmdInfoConstants.cc
|
|
||||||
)
|
|
||||||
add_dependencies(zeek_autogen_files zeek_debugcmd_gen)
|
add_dependencies(zeek_autogen_files zeek_debugcmd_gen)
|
||||||
|
|
||||||
set(_gen_zeek_script_cpp ${CMAKE_CURRENT_BINARY_DIR}/../CPP-gen.cc)
|
set(_gen_zeek_script_cpp ${CMAKE_CURRENT_BINARY_DIR}/../CPP-gen.cc)
|
||||||
add_custom_command(OUTPUT ${_gen_zeek_script_cpp}
|
add_custom_command(OUTPUT ${_gen_zeek_script_cpp} COMMAND ${CMAKE_COMMAND} -E touch
|
||||||
COMMAND ${CMAKE_COMMAND} -E touch ${_gen_zeek_script_cpp})
|
${_gen_zeek_script_cpp})
|
||||||
|
|
||||||
if (!MSVC)
|
if (!MSVC)
|
||||||
set_source_files_properties(legacy-netvar-init.cc PROPERTIES COMPILE_FLAGS
|
set_source_files_properties(legacy-netvar-init.cc PROPERTIES COMPILE_FLAGS
|
||||||
-Wno-deprecated-declarations)
|
-Wno-deprecated-declarations)
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
set(MAIN_SRCS
|
set(MAIN_SRCS
|
||||||
digest.cc
|
digest.cc
|
||||||
|
@ -371,9 +362,7 @@ set(MAIN_SRCS
|
||||||
ZeekArgs.cc
|
ZeekArgs.cc
|
||||||
ZeekString.cc
|
ZeekString.cc
|
||||||
ZVal.cc
|
ZVal.cc
|
||||||
|
|
||||||
${SUPERVISOR_SRCS}
|
${SUPERVISOR_SRCS}
|
||||||
|
|
||||||
threading/BasicThread.cc
|
threading/BasicThread.cc
|
||||||
threading/Formatter.cc
|
threading/Formatter.cc
|
||||||
threading/Manager.cc
|
threading/Manager.cc
|
||||||
|
@ -381,12 +370,10 @@ set(MAIN_SRCS
|
||||||
threading/SerialTypes.cc
|
threading/SerialTypes.cc
|
||||||
threading/formatters/Ascii.cc
|
threading/formatters/Ascii.cc
|
||||||
threading/formatters/JSON.cc
|
threading/formatters/JSON.cc
|
||||||
|
|
||||||
plugin/Component.cc
|
plugin/Component.cc
|
||||||
plugin/ComponentManager.h
|
plugin/ComponentManager.h
|
||||||
plugin/Manager.cc
|
plugin/Manager.cc
|
||||||
plugin/Plugin.cc
|
plugin/Plugin.cc
|
||||||
|
|
||||||
script_opt/CPP/Attrs.cc
|
script_opt/CPP/Attrs.cc
|
||||||
script_opt/CPP/Consts.cc
|
script_opt/CPP/Consts.cc
|
||||||
script_opt/CPP/DeclFunc.cc
|
script_opt/CPP/DeclFunc.cc
|
||||||
|
@ -406,9 +393,7 @@ set(MAIN_SRCS
|
||||||
script_opt/CPP/Types.cc
|
script_opt/CPP/Types.cc
|
||||||
script_opt/CPP/Util.cc
|
script_opt/CPP/Util.cc
|
||||||
script_opt/CPP/Vars.cc
|
script_opt/CPP/Vars.cc
|
||||||
|
|
||||||
${_gen_zeek_script_cpp}
|
${_gen_zeek_script_cpp}
|
||||||
|
|
||||||
script_opt/Expr.cc
|
script_opt/Expr.cc
|
||||||
script_opt/GenIDDefs.cc
|
script_opt/GenIDDefs.cc
|
||||||
script_opt/IDOptInfo.cc
|
script_opt/IDOptInfo.cc
|
||||||
|
@ -420,7 +405,6 @@ set(MAIN_SRCS
|
||||||
script_opt/TempVar.cc
|
script_opt/TempVar.cc
|
||||||
script_opt/UsageAnalyzer.cc
|
script_opt/UsageAnalyzer.cc
|
||||||
script_opt/UseDefs.cc
|
script_opt/UseDefs.cc
|
||||||
|
|
||||||
script_opt/ZAM/AM-Opt.cc
|
script_opt/ZAM/AM-Opt.cc
|
||||||
script_opt/ZAM/Branches.cc
|
script_opt/ZAM/Branches.cc
|
||||||
script_opt/ZAM/BuiltIn.cc
|
script_opt/ZAM/BuiltIn.cc
|
||||||
|
@ -435,9 +419,7 @@ set(MAIN_SRCS
|
||||||
script_opt/ZAM/ZBody.cc
|
script_opt/ZAM/ZBody.cc
|
||||||
script_opt/ZAM/ZInst.cc
|
script_opt/ZAM/ZInst.cc
|
||||||
script_opt/ZAM/ZOp.cc
|
script_opt/ZAM/ZOp.cc
|
||||||
|
digest.h)
|
||||||
digest.h
|
|
||||||
)
|
|
||||||
|
|
||||||
set(THIRD_PARTY_SRCS
|
set(THIRD_PARTY_SRCS
|
||||||
3rdparty/bro_inet_ntop.c # Remove in v6.1.
|
3rdparty/bro_inet_ntop.c # Remove in v6.1.
|
||||||
|
@ -449,10 +431,10 @@ set(THIRD_PARTY_SRCS
|
||||||
3rdparty/patricia.c
|
3rdparty/patricia.c
|
||||||
3rdparty/setsignal.c
|
3rdparty/setsignal.c
|
||||||
$<$<BOOL:USE_SQLITE>:3rdparty/sqlite3.c>
|
$<$<BOOL:USE_SQLITE>:3rdparty/sqlite3.c>
|
||||||
3rdparty/strsep.c
|
3rdparty/strsep.c)
|
||||||
)
|
|
||||||
|
|
||||||
# Highwayhash. Highwayhash is a bit special since it has architecture dependent code...
|
# Highwayhash. Highwayhash is a bit special since it has architecture dependent
|
||||||
|
# code...
|
||||||
set(hhash_dir ${PROJECT_SOURCE_DIR}/auxil/highwayhash/highwayhash)
|
set(hhash_dir ${PROJECT_SOURCE_DIR}/auxil/highwayhash/highwayhash)
|
||||||
zeek_add_subdir_library(
|
zeek_add_subdir_library(
|
||||||
hhash
|
hhash
|
||||||
|
@ -464,54 +446,43 @@ zeek_add_subdir_library(
|
||||||
${hhash_dir}/instruction_sets.cc
|
${hhash_dir}/instruction_sets.cc
|
||||||
${hhash_dir}/nanobenchmark.cc
|
${hhash_dir}/nanobenchmark.cc
|
||||||
${hhash_dir}/os_specific.cc
|
${hhash_dir}/os_specific.cc
|
||||||
${hhash_dir}/hh_portable.cc
|
${hhash_dir}/hh_portable.cc)
|
||||||
)
|
|
||||||
|
|
||||||
if (${COMPILER_ARCHITECTURE} STREQUAL "arm")
|
if (${COMPILER_ARCHITECTURE} STREQUAL "arm")
|
||||||
check_c_source_compiles("
|
check_c_source_compiles(
|
||||||
|
"
|
||||||
#if defined(__ARM_NEON__) || defined(__ARM_NEON)
|
#if defined(__ARM_NEON__) || defined(__ARM_NEON)
|
||||||
int main() { return 0; }
|
int main() { return 0; }
|
||||||
#else
|
#else
|
||||||
#error
|
#error
|
||||||
#endif
|
#endif
|
||||||
" test_arm_neon)
|
"
|
||||||
|
test_arm_neon)
|
||||||
|
|
||||||
if (test_arm_neon)
|
if (test_arm_neon)
|
||||||
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_neon.cc)
|
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_neon.cc)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
target_compile_options(
|
target_compile_options(zeek_hhash_obj PRIVATE -mfloat-abi=hard -march=armv7-a -mfpu=neon)
|
||||||
zeek_hhash_obj
|
|
||||||
PRIVATE
|
|
||||||
-mfloat-abi=hard
|
|
||||||
-march=armv7-a
|
|
||||||
-mfpu=neon
|
|
||||||
)
|
|
||||||
elseif (${COMPILER_ARCHITECTURE} STREQUAL "aarch64")
|
elseif (${COMPILER_ARCHITECTURE} STREQUAL "aarch64")
|
||||||
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_neon.cc)
|
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_neon.cc)
|
||||||
elseif (${COMPILER_ARCHITECTURE} STREQUAL "power")
|
elseif (${COMPILER_ARCHITECTURE} STREQUAL "power")
|
||||||
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_vsx.cc)
|
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_vsx.cc)
|
||||||
set_source_files_properties(${hhash_dir}/hh_vsx.cc PROPERTIES COMPILE_FLAGS
|
set_source_files_properties(${hhash_dir}/hh_vsx.cc PROPERTIES COMPILE_FLAGS -mvsx)
|
||||||
-mvsx)
|
elseif (${COMPILER_ARCHITECTURE} STREQUAL "x86_64")
|
||||||
elseif(${COMPILER_ARCHITECTURE} STREQUAL "x86_64")
|
target_sources(zeek_hhash_obj PRIVATE ${hhash_dir}/hh_avx2.cc ${hhash_dir}/hh_sse41.cc)
|
||||||
target_sources(
|
|
||||||
zeek_hhash_obj
|
|
||||||
PRIVATE
|
|
||||||
${hhash_dir}/hh_avx2.cc
|
|
||||||
${hhash_dir}/hh_sse41.cc)
|
|
||||||
if (MSVC)
|
if (MSVC)
|
||||||
set(_avx_flag /arch:AVX2)
|
set(_avx_flag /arch:AVX2)
|
||||||
# Using an undocumentd compiler flag: https://stackoverflow.com/questions/64053597/how-do-i-enable-sse4-1-and-sse3-but-not-avx-in-msvc/69328426#69328426
|
# Using an undocumentd compiler flag:
|
||||||
|
# https://stackoverflow.com/questions/64053597/how-do-i-enable-sse4-1-and-sse3-but-not-avx-in-msvc/69328426#69328426
|
||||||
set(_sse_flag /d2archSSE42)
|
set(_sse_flag /d2archSSE42)
|
||||||
else()
|
else ()
|
||||||
set(_avx_flag -mavx2)
|
set(_avx_flag -mavx2)
|
||||||
set(_sse_flag -msse4.1)
|
set(_sse_flag -msse4.1)
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
set_source_files_properties(${hhash_dir}/hh_avx2.cc PROPERTIES COMPILE_FLAGS
|
set_source_files_properties(${hhash_dir}/hh_avx2.cc PROPERTIES COMPILE_FLAGS ${_avx_flag})
|
||||||
${_avx_flag})
|
set_source_files_properties(${hhash_dir}/hh_sse41.cc PROPERTIES COMPILE_FLAGS ${_sse_flag})
|
||||||
set_source_files_properties(${hhash_dir}/hh_sse41.cc PROPERTIES COMPILE_FLAGS
|
|
||||||
${_sse_flag})
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set(zeek_SRCS
|
set(zeek_SRCS
|
||||||
|
@ -535,49 +506,49 @@ set(zeek_SRCS
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/ZAM-MethodDecls.h
|
${CMAKE_CURRENT_BINARY_DIR}/ZAM-MethodDecls.h
|
||||||
${THIRD_PARTY_SRCS}
|
${THIRD_PARTY_SRCS}
|
||||||
${HH_SRCS}
|
${HH_SRCS}
|
||||||
${MAIN_SRCS}
|
${MAIN_SRCS})
|
||||||
)
|
|
||||||
|
|
||||||
collect_headers(zeek_HEADERS ${zeek_SRCS})
|
collect_headers(zeek_HEADERS ${zeek_SRCS})
|
||||||
|
|
||||||
add_library(zeek_objs OBJECT ${zeek_SRCS})
|
add_library(zeek_objs OBJECT ${zeek_SRCS})
|
||||||
target_link_libraries(zeek_objs PRIVATE $<BUILD_INTERFACE:zeek_internal>)
|
target_link_libraries(zeek_objs PRIVATE $<BUILD_INTERFACE:zeek_internal>)
|
||||||
|
target_compile_definitions(zeek_objs PRIVATE ZEEK_CONFIG_SKIP_VERSION_H)
|
||||||
add_dependencies(zeek_objs zeek_autogen_files)
|
add_dependencies(zeek_objs zeek_autogen_files)
|
||||||
add_clang_tidy_files(${zeek_SRCS})
|
add_clang_tidy_files(${zeek_SRCS})
|
||||||
zeek_target_link_libraries(zeek_objs)
|
zeek_target_link_libraries(zeek_objs)
|
||||||
|
|
||||||
|
if (HAVE_SPICY)
|
||||||
|
target_link_libraries(zeek_objs PRIVATE hilti spicy)
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (TARGET zeek_exe)
|
if (TARGET zeek_exe)
|
||||||
target_sources(zeek_exe PRIVATE main.cc ${zeek_HEADERS})
|
target_sources(zeek_exe PRIVATE main.cc ${zeek_HEADERS})
|
||||||
|
|
||||||
# npcap/winpcap need to be loaded in delayed mode so that we can set the load path
|
# npcap/winpcap need to be loaded in delayed mode so that we can set the load
|
||||||
# correctly at runtime. See https://npcap.com/guide/npcap-devguide.html#npcap-feature-native
|
# path correctly at runtime. See
|
||||||
# for why this is necessary.
|
# https://npcap.com/guide/npcap-devguide.html#npcap-feature-native for why
|
||||||
if ( MSVC AND HAVE_WPCAP )
|
# this is necessary.
|
||||||
|
if (MSVC AND HAVE_WPCAP)
|
||||||
set(zeekdeps ${zeekdeps} delayimp.lib)
|
set(zeekdeps ${zeekdeps} delayimp.lib)
|
||||||
set_target_properties(zeek_exe PROPERTIES LINK_FLAGS "/DELAYLOAD:wpcap.dll")
|
set_target_properties(zeek_exe PROPERTIES LINK_FLAGS "/DELAYLOAD:wpcap.dll")
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
target_link_libraries(zeek_exe PRIVATE ${zeekdeps} ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS})
|
target_link_libraries(zeek_exe PRIVATE ${zeekdeps} ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS})
|
||||||
|
|
||||||
# Export symbols from zeek executable for use by plugins
|
# Export symbols from zeek executable for use by plugins
|
||||||
set_target_properties(zeek_exe PROPERTIES ENABLE_EXPORTS TRUE)
|
set_target_properties(zeek_exe PROPERTIES ENABLE_EXPORTS TRUE)
|
||||||
|
|
||||||
if ( MSVC )
|
if (MSVC)
|
||||||
set(WINDOWS_EXPORT_ALL_SYMBOLS ON)
|
set(WINDOWS_EXPORT_ALL_SYMBOLS ON)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
if (TARGET zeek_lib)
|
if (TARGET zeek_lib)
|
||||||
target_sources(zeek_lib PRIVATE ${zeek_HEADERS})
|
target_sources(zeek_lib PRIVATE ${zeek_HEADERS})
|
||||||
|
|
||||||
target_link_libraries(
|
target_link_libraries(zeek_lib PUBLIC ${zeekdeps} ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS})
|
||||||
zeek_lib
|
endif ()
|
||||||
PUBLIC
|
|
||||||
${zeekdeps}
|
|
||||||
${CMAKE_THREAD_LIBS_INIT}
|
|
||||||
${CMAKE_DL_LIBS})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
zeek_include_directories(
|
zeek_include_directories(
|
||||||
${CMAKE_BINARY_DIR}
|
${CMAKE_BINARY_DIR}
|
||||||
|
@ -586,21 +557,23 @@ zeek_include_directories(
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
${CMAKE_CURRENT_BINARY_DIR}
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/include
|
${CMAKE_CURRENT_SOURCE_DIR}/include
|
||||||
${CMAKE_SOURCE_DIR}/zeek/src
|
${CMAKE_SOURCE_DIR}/zeek/src
|
||||||
${CMAKE_SOURCE_DIR}/zeek/src/include
|
${CMAKE_SOURCE_DIR}/zeek/src/include)
|
||||||
)
|
|
||||||
|
|
||||||
# Install *.bif.zeek.
|
# Install *.bif.zeek.
|
||||||
install(DIRECTORY ${PROJECT_BINARY_DIR}/scripts/base/bif DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}/base)
|
install(DIRECTORY ${PROJECT_BINARY_DIR}/scripts/base/bif
|
||||||
|
DESTINATION ${ZEEK_SCRIPT_INSTALL_PATH}/base)
|
||||||
|
|
||||||
# Create plugin directory at install time.
|
# Create plugin directory at install time.
|
||||||
install(DIRECTORY DESTINATION ${ZEEK_PLUGIN_DIR})
|
install(DIRECTORY DESTINATION ${ZEEK_PLUGIN_DIR})
|
||||||
|
|
||||||
# Make clean removes the bif directory.
|
# Make clean removes the bif directory.
|
||||||
set_directory_properties(PROPERTIES ADDITIONAL_MAKE_CLEAN_FILES ${PROJECT_BINARY_DIR}/scripts/base/bif)
|
set_directory_properties(PROPERTIES ADDITIONAL_MAKE_CLEAN_FILES
|
||||||
|
${PROJECT_BINARY_DIR}/scripts/base/bif)
|
||||||
|
|
||||||
# Remove some stale files and scripts that previous Zeek versions put in
|
# Remove some stale files and scripts that previous Zeek versions put in place,
|
||||||
# place, yet make confuse us now. This makes upgrading easier.
|
# yet make confuse us now. This makes upgrading easier.
|
||||||
install(CODE "
|
install(
|
||||||
|
CODE "
|
||||||
file(REMOVE_RECURSE
|
file(REMOVE_RECURSE
|
||||||
${ZEEK_SCRIPT_INSTALL_PATH}/base/frameworks/logging/writers/dataseries.bro
|
${ZEEK_SCRIPT_INSTALL_PATH}/base/frameworks/logging/writers/dataseries.bro
|
||||||
${ZEEK_SCRIPT_INSTALL_PATH}/base/frameworks/logging/writers/elasticsearch.bro
|
${ZEEK_SCRIPT_INSTALL_PATH}/base/frameworks/logging/writers/elasticsearch.bro
|
||||||
|
@ -608,19 +581,21 @@ install(CODE "
|
||||||
)
|
)
|
||||||
")
|
")
|
||||||
|
|
||||||
# Make sure to escape a bunch of special characters in the path before trying to use it as a
|
# Make sure to escape a bunch of special characters in the path before trying to
|
||||||
# regular expression below.
|
# use it as a regular expression below.
|
||||||
string(REGEX REPLACE "([][+.*()^])" "\\\\\\1" escaped_include_path "${CMAKE_CURRENT_SOURCE_DIR}/include/*")
|
string(REGEX REPLACE "([][+.*()^])" "\\\\\\1" escaped_include_path
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/include/*")
|
||||||
|
|
||||||
if (WIN32)
|
if (WIN32)
|
||||||
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/windows/usr.include/
|
install(
|
||||||
|
DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/windows/usr.include/
|
||||||
DESTINATION include/
|
DESTINATION include/
|
||||||
FILES_MATCHING
|
FILES_MATCHING
|
||||||
PATTERN "*.h"
|
PATTERN "*.h")
|
||||||
)
|
endif ()
|
||||||
endif()
|
|
||||||
|
|
||||||
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/
|
install(
|
||||||
|
DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/
|
||||||
DESTINATION include/zeek
|
DESTINATION include/zeek
|
||||||
FILES_MATCHING
|
FILES_MATCHING
|
||||||
PATTERN "*.h"
|
PATTERN "*.h"
|
||||||
|
@ -628,15 +603,14 @@ install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/
|
||||||
PATTERN "3rdparty/*" EXCLUDE
|
PATTERN "3rdparty/*" EXCLUDE
|
||||||
# The "zeek -> ." symlink isn't needed in the install-tree
|
# The "zeek -> ." symlink isn't needed in the install-tree
|
||||||
REGEX "${escaped_include_path}$" EXCLUDE
|
REGEX "${escaped_include_path}$" EXCLUDE
|
||||||
|
|
||||||
# FILES_MATCHING creates empty directories:
|
# FILES_MATCHING creates empty directories:
|
||||||
# https://gitlab.kitware.com/cmake/cmake/-/issues/17122
|
# https://gitlab.kitware.com/cmake/cmake/-/issues/17122 Exclude the ones that
|
||||||
# Exclude the ones that this affects explicitly.
|
# this affects explicitly.
|
||||||
PATTERN "script_opt/CPP/maint" EXCLUDE
|
PATTERN "script_opt/CPP/maint" EXCLUDE
|
||||||
PATTERN "fuzzers/corpora" EXCLUDE
|
PATTERN "fuzzers/corpora" EXCLUDE)
|
||||||
)
|
|
||||||
|
|
||||||
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
|
install(
|
||||||
|
DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
|
||||||
DESTINATION include/zeek
|
DESTINATION include/zeek
|
||||||
FILES_MATCHING
|
FILES_MATCHING
|
||||||
PATTERN "*.bif.func_h"
|
PATTERN "*.bif.func_h"
|
||||||
|
@ -644,11 +618,10 @@ install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/
|
||||||
PATTERN "*.bif.h"
|
PATTERN "*.bif.h"
|
||||||
PATTERN "CMakeFiles" EXCLUDE
|
PATTERN "CMakeFiles" EXCLUDE
|
||||||
# The "include/zeek -> .." symlink isn't needed in the install-tree
|
# The "include/zeek -> .." symlink isn't needed in the install-tree
|
||||||
REGEX "${escaped_include_path}$" EXCLUDE
|
REGEX "${escaped_include_path}$" EXCLUDE)
|
||||||
)
|
|
||||||
|
|
||||||
install(FILES
|
install(
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/ConvertUTF.h
|
FILES ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/ConvertUTF.h
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/bro_inet_ntop.h # Remove in v6.1
|
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/bro_inet_ntop.h # Remove in v6.1
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/zeek_inet_ntop.h
|
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/zeek_inet_ntop.h
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/bsd-getopt-long.h
|
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/bsd-getopt-long.h
|
||||||
|
@ -657,11 +630,10 @@ install(FILES
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/setsignal.h
|
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/setsignal.h
|
||||||
$<$<BOOL:USE_SQLITE>:${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/sqlite3.h>
|
$<$<BOOL:USE_SQLITE>:${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/sqlite3.h>
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/doctest.h
|
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/doctest.h
|
||||||
DESTINATION include/zeek/3rdparty
|
DESTINATION include/zeek/3rdparty)
|
||||||
)
|
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## Clang-tidy target now that we have all of the sources
|
# Clang-tidy target now that we have all of the sources
|
||||||
|
|
||||||
add_clang_tidy_files(${MAIN_SRCS})
|
add_clang_tidy_files(${MAIN_SRCS})
|
||||||
|
|
||||||
|
@ -670,23 +642,22 @@ add_clang_tidy_files(${MAIN_SRCS})
|
||||||
# *.bif.register.cc)
|
# *.bif.register.cc)
|
||||||
create_clang_tidy_target()
|
create_clang_tidy_target()
|
||||||
|
|
||||||
########################################################################
|
# ##############################################################################
|
||||||
## CTest setup.
|
# CTest setup.
|
||||||
|
|
||||||
# Scan all .cc files for TEST_CASE macros and generate CTest targets.
|
# Scan all .cc files for TEST_CASE macros and generate CTest targets.
|
||||||
if (ENABLE_ZEEK_UNIT_TESTS)
|
if (ENABLE_ZEEK_UNIT_TESTS)
|
||||||
set(test_cases "")
|
set(test_cases "")
|
||||||
foreach (cc_file ${TIDY_SRCS})
|
foreach (cc_file ${TIDY_SRCS})
|
||||||
file (STRINGS ${cc_file} test_case_lines REGEX "TEST_CASE")
|
file(STRINGS ${cc_file} test_case_lines REGEX "TEST_CASE")
|
||||||
foreach (line ${test_case_lines})
|
foreach (line ${test_case_lines})
|
||||||
string(REGEX REPLACE "TEST_CASE\\(\"(.+)\"\\)" "\\1" test_case "${line}")
|
string(REGEX REPLACE "TEST_CASE\\(\"(.+)\"\\)" "\\1" test_case "${line}")
|
||||||
list(APPEND test_cases "${test_case}")
|
list(APPEND test_cases "${test_case}")
|
||||||
endforeach ()
|
endforeach ()
|
||||||
endforeach ()
|
endforeach ()
|
||||||
list(LENGTH test_cases num_test_cases)
|
list(LENGTH test_cases num_test_cases)
|
||||||
MESSAGE(STATUS "-- Found ${num_test_cases} test cases for CTest")
|
message(STATUS "-- Found ${num_test_cases} test cases for CTest")
|
||||||
foreach (test_case ${test_cases})
|
foreach (test_case ${test_cases})
|
||||||
add_test(NAME "\"${test_case}\""
|
add_test(NAME "\"${test_case}\"" COMMAND zeek --test "--test-case=${test_case}")
|
||||||
COMMAND zeek --test "--test-case=${test_case}")
|
|
||||||
endforeach ()
|
endforeach ()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
|
@ -22,8 +22,7 @@ DebugLogger::Stream DebugLogger::streams[NUM_DBGS] = {
|
||||||
{"logging", 0, false}, {"input", 0, false}, {"threading", 0, false},
|
{"logging", 0, false}, {"input", 0, false}, {"threading", 0, false},
|
||||||
{"plugins", 0, false}, {"zeekygen", 0, false}, {"pktio", 0, false},
|
{"plugins", 0, false}, {"zeekygen", 0, false}, {"pktio", 0, false},
|
||||||
{"broker", 0, false}, {"scripts", 0, false}, {"supervisor", 0, false},
|
{"broker", 0, false}, {"scripts", 0, false}, {"supervisor", 0, false},
|
||||||
{"hashkey", 0, false},
|
{"hashkey", 0, false}, {"spicy", 0, false}};
|
||||||
};
|
|
||||||
|
|
||||||
DebugLogger::DebugLogger()
|
DebugLogger::DebugLogger()
|
||||||
{
|
{
|
||||||
|
@ -56,7 +55,7 @@ void DebugLogger::OpenDebugLog(const char* filename)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setvbuf(file, NULL, _IOLBF, 0);
|
util::detail::setvbuf(file, NULL, _IOLBF, 0);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
file = stderr;
|
file = stderr;
|
||||||
|
|
|
@ -57,6 +57,7 @@ enum DebugStream
|
||||||
DBG_SCRIPTS, // Script initialization
|
DBG_SCRIPTS, // Script initialization
|
||||||
DBG_SUPERVISOR, // Process supervisor
|
DBG_SUPERVISOR, // Process supervisor
|
||||||
DBG_HASHKEY, // HashKey buffers
|
DBG_HASHKEY, // HashKey buffers
|
||||||
|
DBG_SPICY, // Spicy functionality
|
||||||
|
|
||||||
NUM_DBGS // Has to be last
|
NUM_DBGS // Has to be last
|
||||||
};
|
};
|
||||||
|
|
19
src/Event.cc
19
src/Event.cc
|
@ -7,7 +7,6 @@
|
||||||
#include "zeek/Desc.h"
|
#include "zeek/Desc.h"
|
||||||
#include "zeek/Func.h"
|
#include "zeek/Func.h"
|
||||||
#include "zeek/NetVar.h"
|
#include "zeek/NetVar.h"
|
||||||
#include "zeek/RunState.h"
|
|
||||||
#include "zeek/Trigger.h"
|
#include "zeek/Trigger.h"
|
||||||
#include "zeek/Val.h"
|
#include "zeek/Val.h"
|
||||||
#include "zeek/iosource/Manager.h"
|
#include "zeek/iosource/Manager.h"
|
||||||
|
@ -19,10 +18,10 @@ zeek::EventMgr zeek::event_mgr;
|
||||||
namespace zeek
|
namespace zeek
|
||||||
{
|
{
|
||||||
|
|
||||||
Event::Event(EventHandlerPtr arg_handler, zeek::Args arg_args, util::detail::SourceID arg_src,
|
Event::Event(const EventHandlerPtr& arg_handler, zeek::Args arg_args,
|
||||||
analyzer::ID arg_aid, Obj* arg_obj)
|
util::detail::SourceID arg_src, analyzer::ID arg_aid, Obj* arg_obj, double arg_ts)
|
||||||
: handler(arg_handler), args(std::move(arg_args)), src(arg_src), aid(arg_aid), obj(arg_obj),
|
: handler(arg_handler), args(std::move(arg_args)), src(arg_src), aid(arg_aid), ts(arg_ts),
|
||||||
next_event(nullptr)
|
obj(arg_obj), next_event(nullptr)
|
||||||
{
|
{
|
||||||
if ( obj )
|
if ( obj )
|
||||||
Ref(obj);
|
Ref(obj);
|
||||||
|
@ -53,7 +52,7 @@ void Event::Dispatch(bool no_remote)
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
handler->Call(&args, no_remote);
|
handler->Call(&args, no_remote, ts);
|
||||||
}
|
}
|
||||||
|
|
||||||
catch ( InterpreterException& e )
|
catch ( InterpreterException& e )
|
||||||
|
@ -74,6 +73,7 @@ EventMgr::EventMgr()
|
||||||
head = tail = nullptr;
|
head = tail = nullptr;
|
||||||
current_src = util::detail::SOURCE_LOCAL;
|
current_src = util::detail::SOURCE_LOCAL;
|
||||||
current_aid = 0;
|
current_aid = 0;
|
||||||
|
current_ts = 0;
|
||||||
src_val = nullptr;
|
src_val = nullptr;
|
||||||
draining = false;
|
draining = false;
|
||||||
}
|
}
|
||||||
|
@ -91,9 +91,9 @@ EventMgr::~EventMgr()
|
||||||
}
|
}
|
||||||
|
|
||||||
void EventMgr::Enqueue(const EventHandlerPtr& h, Args vl, util::detail::SourceID src,
|
void EventMgr::Enqueue(const EventHandlerPtr& h, Args vl, util::detail::SourceID src,
|
||||||
analyzer::ID aid, Obj* obj)
|
analyzer::ID aid, Obj* obj, double ts)
|
||||||
{
|
{
|
||||||
QueueEvent(new Event(h, std::move(vl), src, aid, obj));
|
QueueEvent(new Event(h, std::move(vl), src, aid, obj, ts));
|
||||||
}
|
}
|
||||||
|
|
||||||
void EventMgr::QueueEvent(Event* event)
|
void EventMgr::QueueEvent(Event* event)
|
||||||
|
@ -120,6 +120,8 @@ void EventMgr::QueueEvent(Event* event)
|
||||||
void EventMgr::Dispatch(Event* event, bool no_remote)
|
void EventMgr::Dispatch(Event* event, bool no_remote)
|
||||||
{
|
{
|
||||||
current_src = event->Source();
|
current_src = event->Source();
|
||||||
|
current_aid = event->Analyzer();
|
||||||
|
current_ts = event->Time();
|
||||||
event->Dispatch(no_remote);
|
event->Dispatch(no_remote);
|
||||||
Unref(event);
|
Unref(event);
|
||||||
}
|
}
|
||||||
|
@ -154,6 +156,7 @@ void EventMgr::Drain()
|
||||||
|
|
||||||
current_src = current->Source();
|
current_src = current->Source();
|
||||||
current_aid = current->Analyzer();
|
current_aid = current->Analyzer();
|
||||||
|
current_ts = current->Time();
|
||||||
current->Dispatch();
|
current->Dispatch();
|
||||||
Unref(current);
|
Unref(current);
|
||||||
|
|
||||||
|
|
21
src/Event.h
21
src/Event.h
|
@ -15,14 +15,19 @@
|
||||||
namespace zeek
|
namespace zeek
|
||||||
{
|
{
|
||||||
|
|
||||||
|
namespace run_state
|
||||||
|
{
|
||||||
|
extern double network_time;
|
||||||
|
} // namespace run_state
|
||||||
|
|
||||||
class EventMgr;
|
class EventMgr;
|
||||||
|
|
||||||
class Event final : public Obj
|
class Event final : public Obj
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Event(EventHandlerPtr handler, zeek::Args args,
|
Event(const EventHandlerPtr& handler, zeek::Args args,
|
||||||
util::detail::SourceID src = util::detail::SOURCE_LOCAL, analyzer::ID aid = 0,
|
util::detail::SourceID src = util::detail::SOURCE_LOCAL, analyzer::ID aid = 0,
|
||||||
Obj* obj = nullptr);
|
Obj* obj = nullptr, double ts = run_state::network_time);
|
||||||
|
|
||||||
void SetNext(Event* n) { next_event = n; }
|
void SetNext(Event* n) { next_event = n; }
|
||||||
Event* NextEvent() const { return next_event; }
|
Event* NextEvent() const { return next_event; }
|
||||||
|
@ -31,6 +36,7 @@ public:
|
||||||
analyzer::ID Analyzer() const { return aid; }
|
analyzer::ID Analyzer() const { return aid; }
|
||||||
EventHandlerPtr Handler() const { return handler; }
|
EventHandlerPtr Handler() const { return handler; }
|
||||||
const zeek::Args& Args() const { return args; }
|
const zeek::Args& Args() const { return args; }
|
||||||
|
double Time() const { return ts; }
|
||||||
|
|
||||||
void Describe(ODesc* d) const override;
|
void Describe(ODesc* d) const override;
|
||||||
|
|
||||||
|
@ -45,6 +51,7 @@ protected:
|
||||||
zeek::Args args;
|
zeek::Args args;
|
||||||
util::detail::SourceID src;
|
util::detail::SourceID src;
|
||||||
analyzer::ID aid;
|
analyzer::ID aid;
|
||||||
|
double ts;
|
||||||
Obj* obj;
|
Obj* obj;
|
||||||
Event* next_event;
|
Event* next_event;
|
||||||
};
|
};
|
||||||
|
@ -66,10 +73,12 @@ public:
|
||||||
* @param aid identifies the protocol analyzer generating the event.
|
* @param aid identifies the protocol analyzer generating the event.
|
||||||
* @param obj an arbitrary object to use as a "cookie" or just hold a
|
* @param obj an arbitrary object to use as a "cookie" or just hold a
|
||||||
* reference to until dispatching the event.
|
* reference to until dispatching the event.
|
||||||
|
* @param ts timestamp at which the event is intended to be executed
|
||||||
|
* (defaults to current network time).
|
||||||
*/
|
*/
|
||||||
void Enqueue(const EventHandlerPtr& h, zeek::Args vl,
|
void Enqueue(const EventHandlerPtr& h, zeek::Args vl,
|
||||||
util::detail::SourceID src = util::detail::SOURCE_LOCAL, analyzer::ID aid = 0,
|
util::detail::SourceID src = util::detail::SOURCE_LOCAL, analyzer::ID aid = 0,
|
||||||
Obj* obj = nullptr);
|
Obj* obj = nullptr, double ts = run_state::network_time);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A version of Enqueue() taking a variable number of arguments.
|
* A version of Enqueue() taking a variable number of arguments.
|
||||||
|
@ -95,6 +104,11 @@ public:
|
||||||
// non-analyzer event.
|
// non-analyzer event.
|
||||||
analyzer::ID CurrentAnalyzer() const { return current_aid; }
|
analyzer::ID CurrentAnalyzer() const { return current_aid; }
|
||||||
|
|
||||||
|
// Returns the timestamp of the last raised event. The timestamp reflects the network time
|
||||||
|
// the event was intended to be executed. For scheduled events, this is the time the event
|
||||||
|
// was scheduled to. For any other event, this is the time when the event was created.
|
||||||
|
double CurrentEventTime() const { return current_ts; }
|
||||||
|
|
||||||
int Size() const { return num_events_queued - num_events_dispatched; }
|
int Size() const { return num_events_queued - num_events_dispatched; }
|
||||||
|
|
||||||
void Describe(ODesc* d) const override;
|
void Describe(ODesc* d) const override;
|
||||||
|
@ -114,6 +128,7 @@ protected:
|
||||||
Event* tail;
|
Event* tail;
|
||||||
util::detail::SourceID current_src;
|
util::detail::SourceID current_src;
|
||||||
analyzer::ID current_aid;
|
analyzer::ID current_aid;
|
||||||
|
double current_ts;
|
||||||
RecordVal* src_val;
|
RecordVal* src_val;
|
||||||
bool draining;
|
bool draining;
|
||||||
detail::Flare queue_flare;
|
detail::Flare queue_flare;
|
||||||
|
|
|
@ -52,7 +52,7 @@ void EventHandler::SetFunc(FuncPtr f)
|
||||||
local = std::move(f);
|
local = std::move(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
void EventHandler::Call(Args* vl, bool no_remote)
|
void EventHandler::Call(Args* vl, bool no_remote, double ts)
|
||||||
{
|
{
|
||||||
if ( ! call_count )
|
if ( ! call_count )
|
||||||
{
|
{
|
||||||
|
@ -100,10 +100,10 @@ void EventHandler::Call(Args* vl, bool no_remote)
|
||||||
++it;
|
++it;
|
||||||
|
|
||||||
if ( it != auto_publish.end() )
|
if ( it != auto_publish.end() )
|
||||||
broker_mgr->PublishEvent(topic, Name(), xs);
|
broker_mgr->PublishEvent(topic, Name(), xs, ts);
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
broker_mgr->PublishEvent(topic, Name(), std::move(xs));
|
broker_mgr->PublishEvent(topic, Name(), std::move(xs), ts);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,11 @@
|
||||||
namespace zeek
|
namespace zeek
|
||||||
{
|
{
|
||||||
|
|
||||||
|
namespace run_state
|
||||||
|
{
|
||||||
|
extern double network_time;
|
||||||
|
} // namespace run_state
|
||||||
|
|
||||||
class Func;
|
class Func;
|
||||||
using FuncPtr = IntrusivePtr<Func>;
|
using FuncPtr = IntrusivePtr<Func>;
|
||||||
|
|
||||||
|
@ -34,7 +39,7 @@ public:
|
||||||
|
|
||||||
void AutoUnpublish(const std::string& topic) { auto_publish.erase(topic); }
|
void AutoUnpublish(const std::string& topic) { auto_publish.erase(topic); }
|
||||||
|
|
||||||
void Call(zeek::Args* vl, bool no_remote = false);
|
void Call(zeek::Args* vl, bool no_remote = false, double ts = run_state::network_time);
|
||||||
|
|
||||||
// Returns true if there is at least one local or remote handler.
|
// Returns true if there is at least one local or remote handler.
|
||||||
explicit operator bool() const;
|
explicit operator bool() const;
|
||||||
|
|
|
@ -213,12 +213,16 @@ void ValTrace::ComputeDelta(const ValTrace* prev, DeltaVector& deltas) const
|
||||||
// use the constant representation instead.
|
// use the constant representation instead.
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case TYPE_ANY:
|
||||||
case TYPE_FILE:
|
case TYPE_FILE:
|
||||||
case TYPE_OPAQUE:
|
case TYPE_OPAQUE:
|
||||||
case TYPE_ANY:
|
// If we have a previous instance, we can ignore this
|
||||||
// These we have no way of creating as constants.
|
// one, because we know it's equivalent (due to the
|
||||||
reporter->Error("cannot generate an event trace for an event of type %s",
|
// test at the beginning of this method), and it's
|
||||||
type_name(tag));
|
// not meaningful to recurse inside it looking for
|
||||||
|
// interior changes.
|
||||||
|
if ( ! prev )
|
||||||
|
deltas.emplace_back(std::make_unique<DeltaUnsupportedCreate>(this));
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case TYPE_LIST:
|
case TYPE_LIST:
|
||||||
|
@ -241,6 +245,13 @@ void ValTrace::ComputeDelta(const ValTrace* prev, DeltaVector& deltas) const
|
||||||
if ( prev )
|
if ( prev )
|
||||||
ComputeTableDelta(prev, deltas);
|
ComputeTableDelta(prev, deltas);
|
||||||
|
|
||||||
|
else if ( GetType()->AsTableType()->IsUnspecifiedTable() )
|
||||||
|
// For unspecified values, we generate them
|
||||||
|
// as empty constructors, because we don't
|
||||||
|
// know their yield type and thus can't
|
||||||
|
// create variables corresponding to them.
|
||||||
|
break;
|
||||||
|
|
||||||
else if ( t->Yield() )
|
else if ( t->Yield() )
|
||||||
deltas.emplace_back(std::make_unique<DeltaTableCreate>(this));
|
deltas.emplace_back(std::make_unique<DeltaTableCreate>(this));
|
||||||
else
|
else
|
||||||
|
@ -250,6 +261,11 @@ void ValTrace::ComputeDelta(const ValTrace* prev, DeltaVector& deltas) const
|
||||||
case TYPE_VECTOR:
|
case TYPE_VECTOR:
|
||||||
if ( prev )
|
if ( prev )
|
||||||
ComputeVectorDelta(prev, deltas);
|
ComputeVectorDelta(prev, deltas);
|
||||||
|
|
||||||
|
else if ( GetType()->AsVectorType()->IsUnspecifiedVector() )
|
||||||
|
// See above for empty tables/sets.
|
||||||
|
break;
|
||||||
|
|
||||||
else
|
else
|
||||||
deltas.emplace_back(std::make_unique<DeltaVectorCreate>(this));
|
deltas.emplace_back(std::make_unique<DeltaVectorCreate>(this));
|
||||||
break;
|
break;
|
||||||
|
@ -722,6 +738,11 @@ std::string DeltaVectorCreate::Generate(ValTraceMgr* vtm) const
|
||||||
return std::string(" = vector(") + vec + ")";
|
return std::string(" = vector(") + vec + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string DeltaUnsupportedCreate::Generate(ValTraceMgr* vtm) const
|
||||||
|
{
|
||||||
|
return " = UNSUPPORTED " + obj_desc_short(vt->GetVal()->GetType().get());
|
||||||
|
}
|
||||||
|
|
||||||
EventTrace::EventTrace(const ScriptFunc* _ev, double _nt, size_t event_num) : ev(_ev), nt(_nt)
|
EventTrace::EventTrace(const ScriptFunc* _ev, double _nt, size_t event_num) : ev(_ev), nt(_nt)
|
||||||
{
|
{
|
||||||
auto ev_name = std::regex_replace(ev->Name(), std::regex(":"), "_");
|
auto ev_name = std::regex_replace(ev->Name(), std::regex(":"), "_");
|
||||||
|
@ -770,13 +791,19 @@ void EventTrace::Generate(FILE* f, ValTraceMgr& vtm, const DeltaGenVec& dvec, st
|
||||||
fprintf(f, "\t");
|
fprintf(f, "\t");
|
||||||
|
|
||||||
auto& val = d.GetVal();
|
auto& val = d.GetVal();
|
||||||
|
bool define_local = d.IsFirstDef() && ! vtm.IsGlobal(val);
|
||||||
|
|
||||||
if ( d.IsFirstDef() && ! vtm.IsGlobal(val) )
|
if ( define_local )
|
||||||
fprintf(f, "local ");
|
fprintf(f, "local ");
|
||||||
|
|
||||||
if ( d.NeedsLHS() )
|
if ( d.NeedsLHS() )
|
||||||
|
{
|
||||||
fprintf(f, "%s", vtm.ValName(val).c_str());
|
fprintf(f, "%s", vtm.ValName(val).c_str());
|
||||||
|
|
||||||
|
if ( define_local )
|
||||||
|
fprintf(f, ": %s", obj_desc_short(val->GetType().get()).c_str());
|
||||||
|
}
|
||||||
|
|
||||||
auto anno = offset < num_pre ? " # from script" : "";
|
auto anno = offset < num_pre ? " # from script" : "";
|
||||||
|
|
||||||
fprintf(f, "%s;%s\n", d.RHS().c_str(), anno);
|
fprintf(f, "%s;%s\n", d.RHS().c_str(), anno);
|
||||||
|
@ -798,7 +825,8 @@ void EventTrace::Generate(FILE* f, ValTraceMgr& vtm, const DeltaGenVec& dvec, st
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
fprintf(f, "\tset_network_time(double_to_time(%.06f));\n", nt);
|
auto tm = vtm.TimeConstant(nt);
|
||||||
|
fprintf(f, "\tset_network_time(%s);\n", tm.c_str());
|
||||||
fprintf(f, "\tevent __EventTrace::%s();\n", successor.c_str());
|
fprintf(f, "\tevent __EventTrace::%s();\n", successor.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -870,66 +898,28 @@ const std::string& ValTraceMgr::ValName(const ValPtr& v)
|
||||||
{
|
{
|
||||||
auto find = val_names.find(v.get());
|
auto find = val_names.find(v.get());
|
||||||
if ( find == val_names.end() )
|
if ( find == val_names.end() )
|
||||||
{
|
find = val_names.insert({v.get(), GenValName(v)}).first;
|
||||||
if ( IsAggr(v->GetType()) )
|
|
||||||
{ // Aggregate shouldn't exist; create it
|
|
||||||
ASSERT(val_map.count(v.get()) == 0);
|
|
||||||
NewVal(v);
|
|
||||||
find = val_names.find(v.get());
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
|
||||||
{ // Non-aggregate can be expressed using a constant
|
|
||||||
auto tag = v->GetType()->Tag();
|
|
||||||
std::string rep;
|
|
||||||
|
|
||||||
if ( tag == TYPE_STRING )
|
|
||||||
{
|
|
||||||
auto s = v->AsStringVal();
|
|
||||||
rep = escape_string(s->Bytes(), s->Len());
|
|
||||||
}
|
|
||||||
|
|
||||||
else if ( tag == TYPE_LIST )
|
|
||||||
{
|
|
||||||
auto lv = cast_intrusive<ListVal>(v);
|
|
||||||
for ( auto& v_i : lv->Vals() )
|
|
||||||
{
|
|
||||||
if ( ! rep.empty() )
|
|
||||||
rep += ", ";
|
|
||||||
|
|
||||||
rep += ValName(v_i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else if ( tag == TYPE_FUNC )
|
|
||||||
rep = v->AsFunc()->Name();
|
|
||||||
|
|
||||||
else if ( tag == TYPE_TIME )
|
|
||||||
rep = std::string("double_to_time(") + std::to_string(v->AsDouble()) + ")";
|
|
||||||
|
|
||||||
else if ( tag == TYPE_INTERVAL )
|
|
||||||
rep = std::string("double_to_interval(") + std::to_string(v->AsDouble()) + ")";
|
|
||||||
|
|
||||||
else
|
|
||||||
{
|
|
||||||
ODesc d;
|
|
||||||
v->Describe(&d);
|
|
||||||
rep = d.Description();
|
|
||||||
}
|
|
||||||
|
|
||||||
val_names[v.get()] = rep;
|
|
||||||
vals.push_back(v);
|
|
||||||
find = val_names.find(v.get());
|
|
||||||
}
|
|
||||||
|
|
||||||
ASSERT(find != val_names.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
ValUsed(v);
|
ValUsed(v);
|
||||||
|
|
||||||
return find->second;
|
return find->second;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string ValTraceMgr::TimeConstant(double t)
|
||||||
|
{
|
||||||
|
if ( t < std::max(base_time, 1e6) )
|
||||||
|
return "double_to_time(" + std::to_string(t) + ")";
|
||||||
|
|
||||||
|
if ( ! base_time )
|
||||||
|
base_time = t;
|
||||||
|
|
||||||
|
if ( t == base_time )
|
||||||
|
return "double_to_time(__base_time)";
|
||||||
|
|
||||||
|
t -= base_time;
|
||||||
|
return "double_to_time(__base_time + " + std::to_string(t) + ")";
|
||||||
|
}
|
||||||
|
|
||||||
void ValTraceMgr::AddVal(ValPtr v)
|
void ValTraceMgr::AddVal(ValPtr v)
|
||||||
{
|
{
|
||||||
auto mapping = val_map.find(v.get());
|
auto mapping = val_map.find(v.get());
|
||||||
|
@ -1003,16 +993,143 @@ void ValTraceMgr::AssessChange(const ValTrace* vt, const ValTrace* prev_vt)
|
||||||
}
|
}
|
||||||
|
|
||||||
auto& v = vt->GetVal();
|
auto& v = vt->GetVal();
|
||||||
if ( IsAggr(v->GetType()) )
|
if ( IsAggr(v->GetType()) && (prev_vt || ! IsUnspecifiedAggregate(v)) )
|
||||||
ValUsed(vt->GetVal());
|
ValUsed(vt->GetVal());
|
||||||
}
|
}
|
||||||
|
|
||||||
void ValTraceMgr::TrackVar(const Val* v)
|
void ValTraceMgr::TrackVar(const Val* v)
|
||||||
{
|
{
|
||||||
auto val_name = std::string("__val") + std::to_string(num_vars++);
|
std::string base_name = IsUnsupported(v) ? "UNSUPPORTED" : "val";
|
||||||
|
auto val_name = "__" + base_name + std::to_string(num_vars++);
|
||||||
val_names[v] = val_name;
|
val_names[v] = val_name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string ValTraceMgr::GenValName(const ValPtr& v)
|
||||||
|
{
|
||||||
|
if ( IsAggr(v->GetType()) && ! IsUnspecifiedAggregate(v) )
|
||||||
|
{ // Aggregate shouldn't exist; create it
|
||||||
|
ASSERT(val_map.count(v.get()) == 0);
|
||||||
|
NewVal(v);
|
||||||
|
return val_names[v.get()];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-aggregate (or unspecified aggregate) can be expressed using
|
||||||
|
// a constant.
|
||||||
|
auto t = v->GetType();
|
||||||
|
auto tag = t->Tag();
|
||||||
|
std::string rep;
|
||||||
|
bool track_constant = false;
|
||||||
|
|
||||||
|
switch ( tag )
|
||||||
|
{
|
||||||
|
case TYPE_STRING:
|
||||||
|
{
|
||||||
|
auto s = v->AsStringVal();
|
||||||
|
rep = escape_string(s->Bytes(), s->Len());
|
||||||
|
track_constant = s->Len() > 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_LIST:
|
||||||
|
{
|
||||||
|
auto lv = cast_intrusive<ListVal>(v);
|
||||||
|
for ( auto& v_i : lv->Vals() )
|
||||||
|
{
|
||||||
|
if ( ! rep.empty() )
|
||||||
|
rep += ", ";
|
||||||
|
|
||||||
|
rep += ValName(v_i);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_FUNC:
|
||||||
|
rep = v->AsFunc()->Name();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_TIME:
|
||||||
|
{
|
||||||
|
auto tm = v->AsDouble();
|
||||||
|
rep = TimeConstant(tm);
|
||||||
|
|
||||||
|
if ( tm > 0.0 && rep.find("__base_time") == std::string::npos )
|
||||||
|
// We're not representing it using base_time.
|
||||||
|
track_constant = true;
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TYPE_INTERVAL:
|
||||||
|
rep = "double_to_interval(" + std::to_string(v->AsDouble()) + ")";
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_TABLE:
|
||||||
|
rep = t->Yield() ? "table()" : "set()";
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_VECTOR:
|
||||||
|
rep = "vector()";
|
||||||
|
break;
|
||||||
|
|
||||||
|
case TYPE_PATTERN:
|
||||||
|
case TYPE_PORT:
|
||||||
|
case TYPE_ADDR:
|
||||||
|
case TYPE_SUBNET:
|
||||||
|
{
|
||||||
|
ODesc d;
|
||||||
|
v->Describe(&d);
|
||||||
|
rep = d.Description();
|
||||||
|
track_constant = true;
|
||||||
|
|
||||||
|
if ( tag == TYPE_ADDR || tag == TYPE_SUBNET )
|
||||||
|
{
|
||||||
|
// Fix up deficiency that IPv6 addresses are
|
||||||
|
// described without surrounding []'s.
|
||||||
|
const auto& addr = tag == TYPE_ADDR ? v->AsAddr() : v->AsSubNet().Prefix();
|
||||||
|
if ( addr.GetFamily() == IPv6 )
|
||||||
|
rep = "[" + rep + "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
ODesc d;
|
||||||
|
v->Describe(&d);
|
||||||
|
rep = d.Description();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val_names[v.get()] = rep;
|
||||||
|
vals.push_back(v);
|
||||||
|
|
||||||
|
if ( track_constant )
|
||||||
|
constants[tag].insert(rep);
|
||||||
|
|
||||||
|
std::array<std::string, NUM_TYPES> constants;
|
||||||
|
|
||||||
|
return rep;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ValTraceMgr::IsUnspecifiedAggregate(const ValPtr& v) const
|
||||||
|
{
|
||||||
|
auto t = v->GetType()->Tag();
|
||||||
|
|
||||||
|
if ( t == TYPE_TABLE && v->GetType<TableType>()->IsUnspecifiedTable() )
|
||||||
|
return true;
|
||||||
|
|
||||||
|
if ( t == TYPE_VECTOR && v->GetType<VectorType>()->IsUnspecifiedVector() )
|
||||||
|
return true;
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool ValTraceMgr::IsUnsupported(const Val* v) const
|
||||||
|
{
|
||||||
|
auto t = v->GetType()->Tag();
|
||||||
|
return t == TYPE_ANY || t == TYPE_FILE || t == TYPE_OPAQUE;
|
||||||
|
}
|
||||||
|
|
||||||
EventTraceMgr::EventTraceMgr(const std::string& trace_file)
|
EventTraceMgr::EventTraceMgr(const std::string& trace_file)
|
||||||
{
|
{
|
||||||
f = fopen(trace_file.c_str(), "w");
|
f = fopen(trace_file.c_str(), "w");
|
||||||
|
@ -1027,6 +1144,11 @@ EventTraceMgr::~EventTraceMgr()
|
||||||
|
|
||||||
fprintf(f, "module __EventTrace;\n\n");
|
fprintf(f, "module __EventTrace;\n\n");
|
||||||
|
|
||||||
|
auto bt = vtm.GetBaseTime();
|
||||||
|
|
||||||
|
if ( bt )
|
||||||
|
fprintf(f, "global __base_time = %.06f;\n\n", bt);
|
||||||
|
|
||||||
for ( auto& e : events )
|
for ( auto& e : events )
|
||||||
fprintf(f, "global %s: event();\n", e->GetName());
|
fprintf(f, "global %s: event();\n", e->GetName());
|
||||||
|
|
||||||
|
@ -1044,6 +1166,22 @@ EventTraceMgr::~EventTraceMgr()
|
||||||
events[i]->Generate(f, vtm, predecessor.get(), successor);
|
events[i]->Generate(f, vtm, predecessor.get(), successor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const auto& constants = vtm.GetConstants();
|
||||||
|
|
||||||
|
for ( auto tag = 0; tag < NUM_TYPES; ++tag )
|
||||||
|
{
|
||||||
|
auto& c_t = constants[tag];
|
||||||
|
if ( c_t.empty() && (tag != TYPE_TIME || ! bt) )
|
||||||
|
continue;
|
||||||
|
|
||||||
|
fprintf(f, "\n# constants of type %s:\n", type_name(TypeTag(tag)));
|
||||||
|
if ( tag == TYPE_TIME && bt )
|
||||||
|
fprintf(f, "#\t__base_time = %.06f\n", bt);
|
||||||
|
|
||||||
|
for ( auto& c : c_t )
|
||||||
|
fprintf(f, "#\t%s\n", c.c_str());
|
||||||
|
}
|
||||||
|
|
||||||
fclose(f);
|
fclose(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1053,9 +1191,12 @@ void EventTraceMgr::StartEvent(const ScriptFunc* ev, const zeek::Args* args)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
auto nt = run_state::network_time;
|
auto nt = run_state::network_time;
|
||||||
if ( nt == 0.0 )
|
if ( nt == 0.0 || util::streq(ev->Name(), "zeek_init") )
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
if ( ! vtm.GetBaseTime() )
|
||||||
|
vtm.SetBaseTime(nt);
|
||||||
|
|
||||||
auto et = std::make_shared<EventTrace>(ev, nt, events.size());
|
auto et = std::make_shared<EventTrace>(ev, nt, events.size());
|
||||||
events.emplace_back(et);
|
events.emplace_back(et);
|
||||||
|
|
||||||
|
@ -1067,7 +1208,7 @@ void EventTraceMgr::EndEvent(const ScriptFunc* ev, const zeek::Args* args)
|
||||||
if ( script_events.count(ev->Name()) > 0 )
|
if ( script_events.count(ev->Name()) > 0 )
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if ( run_state::network_time > 0.0 )
|
if ( run_state::network_time > 0.0 && ! util::streq(ev->Name(), "zeek_init") )
|
||||||
vtm.FinishCurrentEvent(args);
|
vtm.FinishCurrentEvent(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -264,8 +264,16 @@ public:
|
||||||
DeltaVectorCreate(const ValTrace* _vt) : ValDelta(_vt) { }
|
DeltaVectorCreate(const ValTrace* _vt) : ValDelta(_vt) { }
|
||||||
|
|
||||||
std::string Generate(ValTraceMgr* vtm) const override;
|
std::string Generate(ValTraceMgr* vtm) const override;
|
||||||
|
};
|
||||||
|
|
||||||
private:
|
// Captures the notion of creating a value with an unsupported type
|
||||||
|
// (like "opaque").
|
||||||
|
class DeltaUnsupportedCreate : public ValDelta
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
DeltaUnsupportedCreate(const ValTrace* _vt) : ValDelta(_vt) { }
|
||||||
|
|
||||||
|
std::string Generate(ValTraceMgr* vtm) const override;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Manages the changes to (or creation of) a variable used to represent
|
// Manages the changes to (or creation of) a variable used to represent
|
||||||
|
@ -385,6 +393,19 @@ public:
|
||||||
// needs to be global (because it's used across multiple events).
|
// needs to be global (because it's used across multiple events).
|
||||||
bool IsGlobal(const ValPtr& v) const { return globals.count(v.get()) > 0; }
|
bool IsGlobal(const ValPtr& v) const { return globals.count(v.get()) > 0; }
|
||||||
|
|
||||||
|
// Returns or sets the "base time" from which eligible times are
|
||||||
|
// transformed into offsets rather than maintained as absolute
|
||||||
|
// values.
|
||||||
|
double GetBaseTime() const { return base_time; }
|
||||||
|
void SetBaseTime(double bt) { base_time = bt; }
|
||||||
|
|
||||||
|
// Returns a Zeek script representation of the given "time" value.
|
||||||
|
// This might be relative to base_time or might be absolute.
|
||||||
|
std::string TimeConstant(double t);
|
||||||
|
|
||||||
|
// Returns the array of per-type-tag constants.
|
||||||
|
const auto& GetConstants() const { return constants; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Traces the given value, which we may-or-may-not have seen before.
|
// Traces the given value, which we may-or-may-not have seen before.
|
||||||
void AddVal(ValPtr v);
|
void AddVal(ValPtr v);
|
||||||
|
@ -404,6 +425,17 @@ private:
|
||||||
// Create and track a script variable associated with the given value.
|
// Create and track a script variable associated with the given value.
|
||||||
void TrackVar(const Val* vt);
|
void TrackVar(const Val* vt);
|
||||||
|
|
||||||
|
// Generates a name for a value.
|
||||||
|
std::string GenValName(const ValPtr& v);
|
||||||
|
|
||||||
|
// True if the given value is an unspecified (and empty set,
|
||||||
|
// table, or vector appearing as a constant rather than an
|
||||||
|
// already-typed value).
|
||||||
|
bool IsUnspecifiedAggregate(const ValPtr& v) const;
|
||||||
|
|
||||||
|
// True if the given value has an unsupported type.
|
||||||
|
bool IsUnsupported(const Val* v) const;
|
||||||
|
|
||||||
// Maps values to their associated traces.
|
// Maps values to their associated traces.
|
||||||
std::unordered_map<const Val*, std::shared_ptr<ValTrace>> val_map;
|
std::unordered_map<const Val*, std::shared_ptr<ValTrace>> val_map;
|
||||||
|
|
||||||
|
@ -423,6 +455,15 @@ private:
|
||||||
// to be global.
|
// to be global.
|
||||||
std::unordered_set<const Val*> globals;
|
std::unordered_set<const Val*> globals;
|
||||||
|
|
||||||
|
// Indexed by type tag, stores an ordered set of all of the distinct
|
||||||
|
// representations of constants of that type.
|
||||||
|
std::array<std::set<std::string>, NUM_TYPES> constants;
|
||||||
|
|
||||||
|
// If non-zero, then we've established a "base time" and will report
|
||||||
|
// time constants as offsets from it (when reasonable, i.e., no
|
||||||
|
// negative offsets, and base_time can't be too close to 0.0).
|
||||||
|
double base_time = 0.0;
|
||||||
|
|
||||||
// The event we're currently tracing.
|
// The event we're currently tracing.
|
||||||
std::shared_ptr<EventTrace> curr_ev;
|
std::shared_ptr<EventTrace> curr_ev;
|
||||||
|
|
||||||
|
|
11
src/Expr.cc
11
src/Expr.cc
|
@ -2932,7 +2932,13 @@ ValPtr IndexExpr::Fold(Val* v1, Val* v2) const
|
||||||
const ListVal* lv = v2->AsListVal();
|
const ListVal* lv = v2->AsListVal();
|
||||||
|
|
||||||
if ( lv->Length() == 1 )
|
if ( lv->Length() == 1 )
|
||||||
v = vect->ValAt(lv->Idx(0)->CoerceToUnsigned());
|
{
|
||||||
|
auto index = lv->Idx(0)->CoerceToInt();
|
||||||
|
if ( index < 0 )
|
||||||
|
index = vect->Size() + index;
|
||||||
|
|
||||||
|
v = vect->ValAt(index);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
return index_slice(vect, lv);
|
return index_slice(vect, lv);
|
||||||
}
|
}
|
||||||
|
@ -4279,7 +4285,8 @@ ScheduleTimer::~ScheduleTimer() { }
|
||||||
void ScheduleTimer::Dispatch(double /* t */, bool /* is_expire */)
|
void ScheduleTimer::Dispatch(double /* t */, bool /* is_expire */)
|
||||||
{
|
{
|
||||||
if ( event )
|
if ( event )
|
||||||
event_mgr.Enqueue(event, std::move(args));
|
event_mgr.Enqueue(event, std::move(args), util::detail::SOURCE_LOCAL, 0, nullptr,
|
||||||
|
this->Time());
|
||||||
}
|
}
|
||||||
|
|
||||||
ScheduleExpr::ScheduleExpr(ExprPtr arg_when, EventExprPtr arg_event)
|
ScheduleExpr::ScheduleExpr(ExprPtr arg_when, EventExprPtr arg_event)
|
||||||
|
|
10
src/File.cc
10
src/File.cc
|
@ -202,16 +202,8 @@ void File::SetBuf(bool arg_buffered)
|
||||||
if ( ! f )
|
if ( ! f )
|
||||||
return;
|
return;
|
||||||
|
|
||||||
#ifndef _MSC_VER
|
if ( util::detail::setvbuf(f, NULL, arg_buffered ? _IOFBF : _IOLBF, 0) != 0 )
|
||||||
if ( setvbuf(f, NULL, arg_buffered ? _IOFBF : _IOLBF, 0) != 0 )
|
|
||||||
reporter->Error("setvbuf failed");
|
reporter->Error("setvbuf failed");
|
||||||
#else
|
|
||||||
// TODO: this turns off buffering altogether because Windows wants us to pass a valid
|
|
||||||
// buffer and length if we're going to pass one of the other modes. We need to
|
|
||||||
// investigate the performance ramifications of this.
|
|
||||||
if ( setvbuf(f, NULL, _IONBF, 0) != 0 )
|
|
||||||
reporter->Error("setvbuf failed");
|
|
||||||
#endif
|
|
||||||
|
|
||||||
buffered = arg_buffered;
|
buffered = arg_buffered;
|
||||||
}
|
}
|
||||||
|
|
12
src/IP.cc
12
src/IP.cc
|
@ -390,10 +390,14 @@ RecordValPtr IP_Hdr::ToIPHdrVal() const
|
||||||
rval->Assign(1, ip4->ip_tos);
|
rval->Assign(1, ip4->ip_tos);
|
||||||
rval->Assign(2, ntohs(ip4->ip_len));
|
rval->Assign(2, ntohs(ip4->ip_len));
|
||||||
rval->Assign(3, ntohs(ip4->ip_id));
|
rval->Assign(3, ntohs(ip4->ip_id));
|
||||||
rval->Assign(4, ip4->ip_ttl);
|
rval->Assign(4, DF());
|
||||||
rval->Assign(5, ip4->ip_p);
|
rval->Assign(5, MF());
|
||||||
rval->Assign(6, make_intrusive<AddrVal>(ip4->ip_src.s_addr));
|
rval->Assign(6, FragOffset()); // 13 bit offset as multiple of 8
|
||||||
rval->Assign(7, make_intrusive<AddrVal>(ip4->ip_dst.s_addr));
|
rval->Assign(7, ip4->ip_ttl);
|
||||||
|
rval->Assign(8, ip4->ip_p);
|
||||||
|
rval->Assign(9, ntohs(ip4->ip_sum));
|
||||||
|
rval->Assign(10, make_intrusive<AddrVal>(ip4->ip_src.s_addr));
|
||||||
|
rval->Assign(11, make_intrusive<AddrVal>(ip4->ip_dst.s_addr));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|
|
@ -166,6 +166,8 @@ void usage(const char* prog, int code)
|
||||||
util::zeek_prefixes().c_str());
|
util::zeek_prefixes().c_str());
|
||||||
fprintf(stderr, " $ZEEK_DNS_FAKE | disable DNS lookups (%s)\n",
|
fprintf(stderr, " $ZEEK_DNS_FAKE | disable DNS lookups (%s)\n",
|
||||||
fake_dns() ? "on" : "off");
|
fake_dns() ? "on" : "off");
|
||||||
|
fprintf(stderr, " $ZEEK_SEED_VALUES | list of space separated seeds (%s)\n",
|
||||||
|
getenv("ZEEK_SEED_VALUES") ? "set" : "not set");
|
||||||
fprintf(stderr, " $ZEEK_SEED_FILE | file to load seeds from (not set)\n");
|
fprintf(stderr, " $ZEEK_SEED_FILE | file to load seeds from (not set)\n");
|
||||||
fprintf(stderr, " $ZEEK_LOG_SUFFIX | ASCII log file extension (.%s)\n",
|
fprintf(stderr, " $ZEEK_LOG_SUFFIX | ASCII log file extension (.%s)\n",
|
||||||
logging::writer::detail::Ascii::LogExt().c_str());
|
logging::writer::detail::Ascii::LogExt().c_str());
|
||||||
|
|
|
@ -10,8 +10,7 @@
|
||||||
|
|
||||||
#include "zeek/Desc.h"
|
#include "zeek/Desc.h"
|
||||||
#include "zeek/Reporter.h"
|
#include "zeek/Reporter.h"
|
||||||
#include "zeek/Stmt.h"
|
#include "zeek/Type.h"
|
||||||
#include "zeek/util.h"
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
|
@ -20,19 +19,17 @@ namespace zeek::detail
|
||||||
|
|
||||||
ScriptCoverageManager::ScriptCoverageManager() : ignoring(0), delim('\t') { }
|
ScriptCoverageManager::ScriptCoverageManager() : ignoring(0), delim('\t') { }
|
||||||
|
|
||||||
ScriptCoverageManager::~ScriptCoverageManager()
|
|
||||||
{
|
|
||||||
for ( auto& s : stmts )
|
|
||||||
Unref(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ScriptCoverageManager::AddStmt(Stmt* s)
|
void ScriptCoverageManager::AddStmt(Stmt* s)
|
||||||
{
|
{
|
||||||
if ( ignoring != 0 )
|
if ( ignoring != 0 )
|
||||||
return;
|
return;
|
||||||
|
|
||||||
Ref(s);
|
stmts.push_back({NewRef{}, s});
|
||||||
stmts.push_back(s);
|
}
|
||||||
|
|
||||||
|
void ScriptCoverageManager::AddFunction(IDPtr func_id, StmtPtr body)
|
||||||
|
{
|
||||||
|
func_instances.push_back({func_id, body});
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ScriptCoverageManager::ReadStats()
|
bool ScriptCoverageManager::ReadStats()
|
||||||
|
@ -127,31 +124,47 @@ bool ScriptCoverageManager::WriteStats()
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
for ( auto s : stmts )
|
for ( auto& s : stmts )
|
||||||
{
|
{
|
||||||
ODesc location_info;
|
|
||||||
s->GetLocationInfo()->Describe(&location_info);
|
|
||||||
ODesc desc_info;
|
ODesc desc_info;
|
||||||
s->Describe(&desc_info);
|
s->Describe(&desc_info);
|
||||||
string desc(desc_info.Description());
|
TrackUsage(s, desc_info.Description(), s->GetAccessCount());
|
||||||
canonicalize_desc cd{delim};
|
|
||||||
for_each(desc.begin(), desc.end(), cd);
|
|
||||||
pair<string, string> location_desc(location_info.Description(), desc);
|
|
||||||
if ( usage_map.find(location_desc) != usage_map.end() )
|
|
||||||
usage_map[location_desc] += s->GetAccessCount();
|
|
||||||
else
|
|
||||||
usage_map[location_desc] = s->GetAccessCount();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
map<pair<string, string>, uint64_t>::const_iterator it;
|
for ( auto& [func, body] : func_instances )
|
||||||
for ( auto& um : usage_map )
|
|
||||||
{
|
{
|
||||||
fprintf(f, "%" PRIu64 "%c%s%c%s\n", um.second, delim, um.first.first.c_str(), delim,
|
auto ft = func->GetType<FuncType>();
|
||||||
um.first.second.c_str());
|
auto desc = ft->FlavorString() + " " + func->Name() + " BODY";
|
||||||
|
|
||||||
|
TrackUsage(body, desc, body->GetAccessCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for ( auto& [location_info, cnt] : usage_map )
|
||||||
|
Report(f, cnt, location_info.first, location_info.second);
|
||||||
|
|
||||||
fclose(f);
|
fclose(f);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ScriptCoverageManager::TrackUsage(const ObjPtr& obj, std::string desc, uint64_t cnt)
|
||||||
|
{
|
||||||
|
ODesc location_info;
|
||||||
|
obj->GetLocationInfo()->Describe(&location_info);
|
||||||
|
|
||||||
|
static canonicalize_desc cd{delim};
|
||||||
|
for_each(desc.begin(), desc.end(), cd);
|
||||||
|
|
||||||
|
pair<string, string> location_desc(location_info.Description(), desc);
|
||||||
|
|
||||||
|
if ( usage_map.find(location_desc) != usage_map.end() )
|
||||||
|
usage_map[location_desc] += cnt;
|
||||||
|
else
|
||||||
|
usage_map[location_desc] = cnt;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ScriptCoverageManager::Report(FILE* f, uint64_t cnt, std::string loc, std::string desc)
|
||||||
|
{
|
||||||
|
fprintf(f, "%" PRIu64 "%c%s%c%s\n", cnt, delim, loc.c_str(), delim, desc.c_str());
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace zeek::detail
|
} // namespace zeek::detail
|
||||||
|
|
|
@ -5,12 +5,14 @@
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
|
#include "zeek/ID.h"
|
||||||
|
#include "zeek/StmtBase.h"
|
||||||
#include "zeek/util.h"
|
#include "zeek/util.h"
|
||||||
|
|
||||||
namespace zeek::detail
|
namespace zeek::detail
|
||||||
{
|
{
|
||||||
|
|
||||||
class Stmt;
|
using ObjPtr = IntrusivePtr<Obj>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A simple class for managing stats of Zeek script coverage across Zeek runs.
|
* A simple class for managing stats of Zeek script coverage across Zeek runs.
|
||||||
|
@ -19,7 +21,7 @@ class ScriptCoverageManager
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ScriptCoverageManager();
|
ScriptCoverageManager();
|
||||||
virtual ~ScriptCoverageManager();
|
virtual ~ScriptCoverageManager() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imports Zeek script Stmt usage information from file pointed to by
|
* Imports Zeek script Stmt usage information from file pointed to by
|
||||||
|
@ -46,12 +48,18 @@ public:
|
||||||
void DecIgnoreDepth() { ignoring--; }
|
void DecIgnoreDepth() { ignoring--; }
|
||||||
|
|
||||||
void AddStmt(Stmt* s);
|
void AddStmt(Stmt* s);
|
||||||
|
void AddFunction(IDPtr func_id, StmtPtr body);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
/**
|
/**
|
||||||
* The current, global ScriptCoverageManager instance creates this list at parse-time.
|
* The current, global ScriptCoverageManager instance creates this list at parse-time.
|
||||||
*/
|
*/
|
||||||
std::list<Stmt*> stmts;
|
std::list<StmtPtr> stmts;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A similar list for tracking functions and their bodies.
|
||||||
|
*/
|
||||||
|
std::list<std::pair<IDPtr, StmtPtr>> func_instances;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicates whether new statements will not be considered as part of
|
* Indicates whether new statements will not be considered as part of
|
||||||
|
@ -88,6 +96,17 @@ private:
|
||||||
c = ' ';
|
c = ' ';
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tracks the usage of a given object with a given description
|
||||||
|
* and a given coverage count.
|
||||||
|
*/
|
||||||
|
void TrackUsage(const ObjPtr& obj, std::string desc, uint64_t cnt);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reports a single coverage instance.
|
||||||
|
*/
|
||||||
|
void Report(FILE* f, uint64_t cnt, std::string loc, std::string desc);
|
||||||
};
|
};
|
||||||
|
|
||||||
extern ScriptCoverageManager script_coverage_mgr;
|
extern ScriptCoverageManager script_coverage_mgr;
|
||||||
|
|
|
@ -57,7 +57,7 @@ void ScriptProfile::Report(FILE* f, bool with_traces) const
|
||||||
calls += util::fmt("%s|", s.c_str());
|
calls += util::fmt("%s|", s.c_str());
|
||||||
counts += util::fmt("%d|", stats.call_count);
|
counts += util::fmt("%d|", stats.call_count);
|
||||||
cpu += util::fmt("%f|", stats.cpu_time);
|
cpu += util::fmt("%f|", stats.cpu_time);
|
||||||
memory += util::fmt("%llu|", stats.memory);
|
memory += util::fmt("%" PRIu64 "|", stats.memory);
|
||||||
}
|
}
|
||||||
|
|
||||||
calls.pop_back();
|
calls.pop_back();
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue