mirror of
https://github.com/zeek/zeek.git
synced 2025-10-05 16:18:19 +00:00
Compare commits
44 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
3bf8bfaac6 | ||
![]() |
89b9f9a456 | ||
![]() |
8de8fb8fae | ||
![]() |
595cdf8b55 | ||
![]() |
74b832fa39 | ||
![]() |
15be682f63 | ||
![]() |
8f9c5f79c6 | ||
![]() |
382b4b5473 | ||
![]() |
6f65b88f1b | ||
![]() |
cfe47f40a4 | ||
![]() |
0fd6672dde | ||
![]() |
e7ab18b343 | ||
![]() |
8a92b150a5 | ||
![]() |
dd4597865a | ||
![]() |
056bbe04ea | ||
![]() |
f6b8864584 | ||
![]() |
d1f6e91988 | ||
![]() |
6bbaef3e09 | ||
![]() |
55d36fc2cd | ||
![]() |
f8fbeca504 | ||
![]() |
72ff343f17 | ||
![]() |
b76096a9ee | ||
![]() |
b9e4669632 | ||
![]() |
5974613cae | ||
![]() |
3a44bda957 | ||
![]() |
51262d02c7 | ||
![]() |
b46aeefbab | ||
![]() |
a4b746e5e8 | ||
![]() |
746ae4d2cc | ||
![]() |
a65a339aa8 | ||
![]() |
8014c4b8c3 | ||
![]() |
d9dc121e9a | ||
![]() |
5a56ff92d2 | ||
![]() |
b13dfa3b16 | ||
![]() |
d17a1f9822 | ||
![]() |
5cdddd92d5 | ||
![]() |
b8d11f4688 | ||
![]() |
91b23a6e2e | ||
![]() |
a8c56c1f25 | ||
![]() |
5f6df68463 | ||
![]() |
ac95484382 | ||
![]() |
962b03a431 | ||
![]() |
92a685df50 | ||
![]() |
1bf439cd58 |
104 changed files with 1466 additions and 241 deletions
|
@ -10,7 +10,7 @@ btest_jobs: &BTEST_JOBS 4
|
||||||
btest_retries: &BTEST_RETRIES 2
|
btest_retries: &BTEST_RETRIES 2
|
||||||
memory: &MEMORY 16GB
|
memory: &MEMORY 16GB
|
||||||
|
|
||||||
config: &CONFIG --build-type=release --disable-broker-tests --prefix=$CIRRUS_WORKING_DIR/install --ccache --enable-werror
|
config: &CONFIG --build-type=release --disable-broker-tests --prefix=$CIRRUS_WORKING_DIR/install --ccache --enable-werror -D FETCHCONTENT_FULLY_DISCONNECTED:BOOL=ON
|
||||||
no_spicy_config: &NO_SPICY_CONFIG --build-type=release --disable-broker-tests --disable-spicy --prefix=$CIRRUS_WORKING_DIR/install --ccache --enable-werror
|
no_spicy_config: &NO_SPICY_CONFIG --build-type=release --disable-broker-tests --disable-spicy --prefix=$CIRRUS_WORKING_DIR/install --ccache --enable-werror
|
||||||
static_config: &STATIC_CONFIG --build-type=release --disable-broker-tests --enable-static-broker --enable-static-binpac --prefix=$CIRRUS_WORKING_DIR/install --ccache --enable-werror
|
static_config: &STATIC_CONFIG --build-type=release --disable-broker-tests --enable-static-broker --enable-static-binpac --prefix=$CIRRUS_WORKING_DIR/install --ccache --enable-werror
|
||||||
binary_config: &BINARY_CONFIG --prefix=$CIRRUS_WORKING_DIR/install --libdir=$CIRRUS_WORKING_DIR/install/lib --binary-package --enable-static-broker --enable-static-binpac --disable-broker-tests --build-type=Release --ccache --enable-werror
|
binary_config: &BINARY_CONFIG --prefix=$CIRRUS_WORKING_DIR/install --libdir=$CIRRUS_WORKING_DIR/install/lib --binary-package --enable-static-broker --enable-static-binpac --disable-broker-tests --build-type=Release --ccache --enable-werror
|
||||||
|
@ -35,8 +35,7 @@ macos_environment: &MACOS_ENVIRONMENT
|
||||||
|
|
||||||
freebsd_resources_template: &FREEBSD_RESOURCES_TEMPLATE
|
freebsd_resources_template: &FREEBSD_RESOURCES_TEMPLATE
|
||||||
cpu: 8
|
cpu: 8
|
||||||
# Not allowed to request less than 8GB for an 8 CPU FreeBSD VM.
|
memory: *MEMORY
|
||||||
memory: 8GB
|
|
||||||
# For greediness, see https://medium.com/cirruslabs/introducing-greedy-container-instances-29aad06dc2b4
|
# For greediness, see https://medium.com/cirruslabs/introducing-greedy-container-instances-29aad06dc2b4
|
||||||
greedy: true
|
greedy: true
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,7 @@ extend-ignore-re = [
|
||||||
"\"BaR\"",
|
"\"BaR\"",
|
||||||
"\"xFoObar\"",
|
"\"xFoObar\"",
|
||||||
"\"FoO\"",
|
"\"FoO\"",
|
||||||
|
"Steve Smoot",
|
||||||
]
|
]
|
||||||
|
|
||||||
extend-ignore-identifiers-re = [
|
extend-ignore-identifiers-re = [
|
||||||
|
|
244
CHANGES
244
CHANGES
|
@ -1,3 +1,247 @@
|
||||||
|
7.0.1 | 2024-09-03 13:04:23 -0700
|
||||||
|
|
||||||
|
* Update CHANGES, VERSION, and NEWS for 7.0.1 release (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Update zeek-aux submodule to pick up zeek-archiver permissions fix (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
7.0.0-14 | 2024-09-03 09:02:19 -0700
|
||||||
|
|
||||||
|
* Bump auxil/spicy to latest release (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
7.0.0-11 | 2024-08-30 12:38:59 -0700
|
||||||
|
|
||||||
|
* Spicy: Register well-known ports through an event handler. (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
This avoids the earlier problem of not tracking ports correctly in
|
||||||
|
scriptland, while still supporting `port` in EVT files and `%port` in
|
||||||
|
Spicy files.
|
||||||
|
|
||||||
|
As it turns out we are already following the same approach for file
|
||||||
|
analyzers' MIME types, so I'm applying the same pattern: it's one
|
||||||
|
event per port, without further customization points. That leaves the
|
||||||
|
patch pretty small after all while fixing the original issue.
|
||||||
|
|
||||||
|
(cherry picked from commit a2079bcda6e40180b888240a281c12cc0ca735be)
|
||||||
|
|
||||||
|
* Revert "Remove deprecated port/ports fields for spicy analyzers" (Robin Sommer, Corelight)
|
||||||
|
|
||||||
|
This reverts commit 15d404dd191a723960e4efd956eec22739d3f1c2.
|
||||||
|
|
||||||
|
(cherry picked from commit a2079bcda6e40180b888240a281c12cc0ca735be)
|
||||||
|
|
||||||
|
7.0.0-9 | 2024-08-30 11:47:39 -0700
|
||||||
|
|
||||||
|
* ldap: Promote uint8 to uint64 before shifting (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit 97fa7cdc0a49869ee6605fac9cfc15f11d8c855b)
|
||||||
|
|
||||||
|
* ldap: Add heuristic for wrap tokens (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Instead of dissecting the GSSAPI handshake, add another heuristic
|
||||||
|
into MaybeEncrypted to check for the WRAP token identifier.
|
||||||
|
|
||||||
|
After this change, the pcap on the following ticket is processed
|
||||||
|
nicely: https://gitlab.com/wireshark/migration-test/-/issues/9398
|
||||||
|
|
||||||
|
(cherry picked from commit 6a6a5c3d0d60a1d4d32ba2173c035023c29fbf1d)
|
||||||
|
|
||||||
|
* ldap: Ignore ec/rrc for sealed wrap tokens (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
It shouldn't matter for the encrypted payload that we'll
|
||||||
|
just consume and ignore.
|
||||||
|
|
||||||
|
(cherry picked from commit 6a6a5c3d0d60a1d4d32ba2173c035023c29fbf1d)
|
||||||
|
|
||||||
|
* ldap: Add LDAP sample with SASL-SRP mechanism (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This is what @dopheide-esnet actually saw. Produced with a custom
|
||||||
|
cyrus-sasl and openldap build :-(
|
||||||
|
|
||||||
|
(cherry picked from commit 6a6a5c3d0d60a1d4d32ba2173c035023c29fbf1d)
|
||||||
|
|
||||||
|
* ldap: Reintroduce encryption after SASL heuristic (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
@dopheide-esnet provided sample captures where SASL SRP is used as
|
||||||
|
a SASL mechanism and the follow-up LDAP messages are encrypted. It's
|
||||||
|
not clear how to determine whether encryption will or will not happen,
|
||||||
|
so re-add a heuristic to determine this based on the first byte of
|
||||||
|
the first message *after* the successful bindResponse handshake. If
|
||||||
|
that byte is 0x30, assume cleartext.
|
||||||
|
|
||||||
|
I haven't been able to produce such pcaps, unfortunately, but the
|
||||||
|
cleartext path is tested via the existing sasl-ntlm.pcap.
|
||||||
|
|
||||||
|
(cherry picked from commit 6a6a5c3d0d60a1d4d32ba2173c035023c29fbf1d)
|
||||||
|
|
||||||
|
* ldap: Fix assuming GSS-SPNEGO for all bindResponses (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
In retrospect that's an obvious bug.
|
||||||
|
|
||||||
|
(cherry picked from commit 6a6a5c3d0d60a1d4d32ba2173c035023c29fbf1d)
|
||||||
|
|
||||||
|
* ldap: Implement extended request/response and StartTLS support (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
PCAP was produced with a local OpenLDAP server configured to support StartTLS.
|
||||||
|
|
||||||
|
This puts the Zeek calls into a separate ldap_zeek.spicy file/module
|
||||||
|
to separate it from LDAP.
|
||||||
|
|
||||||
|
(cherry picked from commit 6a6a5c3d0d60a1d4d32ba2173c035023c29fbf1d)
|
||||||
|
|
||||||
|
* ldap: Remove MessageWrapper with magic 0x30 searching (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
This unit implements a heuristic to search for the 0x30 sequence
|
||||||
|
byte if Message couldn't readily be parsed. Remove it with the
|
||||||
|
idea of explicit and predictable support for SASL mechanisms.
|
||||||
|
|
||||||
|
(cherry picked from commit 2ea3a651bd83b0dfa15924417e4667241531b57b)
|
||||||
|
|
||||||
|
* ldap: Harden parsing a bit (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
ASN1Message(True) may go off parsing arbitrary input data as
|
||||||
|
"something ASN.1" This could be GBs of octet strings or just very
|
||||||
|
long sequences. Avoid this by open-coding some top-level types expected.
|
||||||
|
|
||||||
|
This also tries to avoid some of the &parse-from usages that result
|
||||||
|
in unnecessary copies of data.
|
||||||
|
|
||||||
|
Adds a locally generated PCAP with addRequest/addResponse that we
|
||||||
|
don't currently handle.
|
||||||
|
|
||||||
|
(cherry picked from commit 2ea3a651bd83b0dfa15924417e4667241531b57b)
|
||||||
|
|
||||||
|
* ldap: Handle integrity-only KRB wrap tokens (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Mostly staring at the PCAPs and opened a few RFCs. For now, only if the
|
||||||
|
MS_KRB5 OID is used and accepted in a bind response, start stripping
|
||||||
|
KRB5 wrap tokens for both, client and server traffic.
|
||||||
|
|
||||||
|
Would probably be nice to forward the GSS-API data to the analyzer...
|
||||||
|
|
||||||
|
(cherry picked from commit 2ea3a651bd83b0dfa15924417e4667241531b57b)
|
||||||
|
|
||||||
|
* http: fix password capture when enabled (Pierre Lalet)
|
||||||
|
|
||||||
|
The current implementation would only log, if the password contains a
|
||||||
|
colon, the part before the first colon (e.g., the password
|
||||||
|
`password:password` would be logged as `password`).
|
||||||
|
|
||||||
|
(cherry picked from commit c27e18631c5d9c6f04c230bd421c9750a1f02342)
|
||||||
|
|
||||||
|
* Analyzer: Do not add child analyzers when finished (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Depending on an analyzer's implementation, its Done() method may
|
||||||
|
attempt to access analyzer or connection state when executing.
|
||||||
|
When this happens in the destructor of the parent analyzer during
|
||||||
|
the process of destructing a connection, this state may have been
|
||||||
|
deleted, resulting in use-after-free crashes or worse memory
|
||||||
|
corruption.
|
||||||
|
|
||||||
|
The following cases have been observed in the wild for when this happens.
|
||||||
|
|
||||||
|
* PIA matching during Done() for undelivered TCP data enables a Spicy
|
||||||
|
based analyzer which in turn attempts to raise an analyzer violation
|
||||||
|
during Done()->EndOfData().
|
||||||
|
|
||||||
|
* Spicy analyzers attaching new analyzers during their Done() processing
|
||||||
|
which in turn attempt to use TCP() (to call FindChild()) during Done()
|
||||||
|
while the analyzer tree / connection is being destructed.
|
||||||
|
|
||||||
|
The second scenario was previously found to happen in the HTTP analyzer
|
||||||
|
and fixed with 6ef9423f3cff13e6c73f97eb6a3a27d6f64cc320.
|
||||||
|
|
||||||
|
Plug these scenarios by short-circuiting AddChildAnalyzer() if the analyzer
|
||||||
|
or connection have finished or are being finished.
|
||||||
|
|
||||||
|
(cherry picked from commit 45b33bf5c17d5e8cf6c777a9bd57e4a803dfad19)
|
||||||
|
|
||||||
|
* TCP_Reassembler: Fix IsOrig() position in Match() call (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
Found during a debug session with @rsmmr. Undelivered TCP data
|
||||||
|
would only be matched for the responder and eol set to IsOrig().
|
||||||
|
|
||||||
|
(cherry picked from commit 4a4cbf25765f387f0aa20277afd133918292b9c4)
|
||||||
|
|
||||||
|
* Process metric callbacks from the main-loop thread (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
This avoids the callbacks from being processed on the worker thread
|
||||||
|
spawned by Civetweb. It fixes data race issues with lookups involving
|
||||||
|
global variables, amongst other threading issues.
|
||||||
|
|
||||||
|
(cherry picked from commit 3c3853dc7da9aad94a9b2d5a143cc7bd9476ea7a)
|
||||||
|
|
||||||
|
* CI: Use 16GB of memory for FreeBSD builds (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit 9d9cc51e9dd93668cd332aa1aef283c9dc23a677)
|
||||||
|
|
||||||
|
7.0.0 | 2024-07-31 09:37:03 -0700
|
||||||
|
|
||||||
|
* Release 7.0.0.
|
||||||
|
|
||||||
|
7.0.0-rc4.4 | 2024-07-31 09:36:51 -0700
|
||||||
|
|
||||||
|
* Allowlist a name for typos check (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
* Bump Spicy to latest release (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
7.0.0-rc4 | 2024-07-26 10:12:34 -0700
|
||||||
|
|
||||||
|
* Bump auxil/spicy to latest development snapshot (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
This in particular pulls in a fix for zeek/spicy#1808.
|
||||||
|
|
||||||
|
(cherry picked from commit 4c0c7581c835b4dcd5339a4b34c2b82fcfc40dc3)
|
||||||
|
|
||||||
|
7.0.0-rc3 | 2024-07-25 10:52:29 -0700
|
||||||
|
|
||||||
|
* Generate docs for 7.0.0-rc3 (Tim Wojtulewicz)
|
||||||
|
|
||||||
|
* Bump zeek-testing-cluster to reflect deprecation of prometheus.zeek (Christian Kreibich, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit 146cf99ff62d729705c155b44199a674911ade09)
|
||||||
|
|
||||||
|
* Update 7.0 NEWS with blurb about multi-PDU parsing causing increased load [nomail] [skip ci] (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit bd208f4c54f66074315479071c810d792e69f96b)
|
||||||
|
|
||||||
|
* Fix handling of zero-length SMB2 error responses (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit bd208f4c54f66074315479071c810d792e69f96b)
|
||||||
|
|
||||||
|
* Update Mozilla CA list and CT list (Johanna Amann, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit cb88f6316c7341da7a2af397932a145be3a0cc29)
|
||||||
|
|
||||||
|
* Bump auxil/spicy to latest development snapshot (Benjamin Bannier, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit da7c3d91385195a7a4ba957e46743bc52a9d4ecb)
|
||||||
|
|
||||||
|
7.0.0-rc2.7 | 2024-07-24 17:00:51 -0700
|
||||||
|
|
||||||
|
* Add contributors to 7.0.0 NEWS entry (Christian Kreibich, Corelight)
|
||||||
|
|
||||||
|
* telemetry: Deprecate prometheus.zeek policy script (Arne Welzel, Corelight)
|
||||||
|
|
||||||
|
* Update broker submodule [nomail] (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
7.0.0-rc2 | 2024-07-18 14:31:49 -0700
|
||||||
|
|
||||||
|
* Bump zeek-testing-cluster to pull in tee SIGPIPE fix (Christian Kreibich, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit b51a46f94d4012119fd27d5e46328c70af7270a2)
|
||||||
|
|
||||||
|
* CI: Set FETCH_CONTENT_FULLY_DISCONNECTED flag for configure (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Update broker and cmake submodules [nomail] (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
* Fix warning about grealpath when running 'make dist' on Linux (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
|
(cherry picked from commit e4716b6c912f86cf6b2afd6979c38667c45add95)
|
||||||
|
|
||||||
|
7.0.0-rc1 | 2024-07-11 12:21:02 -0700
|
||||||
|
|
||||||
|
* Updating submodule(s) [nomail] (Tim Wojtulewicz, Corelight)
|
||||||
|
|
||||||
7.0.0-dev.467 | 2024-07-11 12:14:52 -0700
|
7.0.0-dev.467 | 2024-07-11 12:14:52 -0700
|
||||||
|
|
||||||
* Update the scripts.base.frameworks.telemetry.internal-metrics test (Christian Kreibich, Corelight)
|
* Update the scripts.base.frameworks.telemetry.internal-metrics test (Christian Kreibich, Corelight)
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -9,7 +9,7 @@ BUILD=build
|
||||||
REPO=$$(cd $(CURDIR) && basename $$(git config --get remote.origin.url | sed 's/^[^:]*://g'))
|
REPO=$$(cd $(CURDIR) && basename $$(git config --get remote.origin.url | sed 's/^[^:]*://g'))
|
||||||
VERSION_FULL=$(REPO)-$$(cd $(CURDIR) && cat VERSION)
|
VERSION_FULL=$(REPO)-$$(cd $(CURDIR) && cat VERSION)
|
||||||
GITDIR=$$(test -f .git && echo $$(cut -d" " -f2 .git) || echo .git)
|
GITDIR=$$(test -f .git && echo $$(cut -d" " -f2 .git) || echo .git)
|
||||||
REALPATH=$$($$(realpath --relative-to=$(pwd) . >/dev/null 2>&1) && echo 'realpath' || echo 'grealpath')
|
REALPATH=$$($$(realpath --relative-to=$(shell pwd) . >/dev/null 2>&1) && echo 'realpath' || echo 'grealpath')
|
||||||
|
|
||||||
all: configured
|
all: configured
|
||||||
$(MAKE) -C $(BUILD) $@
|
$(MAKE) -C $(BUILD) $@
|
||||||
|
|
37
NEWS
37
NEWS
|
@ -3,9 +3,36 @@ This document summarizes the most important changes in the current Zeek
|
||||||
release. For an exhaustive list of changes, see the ``CHANGES`` file
|
release. For an exhaustive list of changes, see the ``CHANGES`` file
|
||||||
(note that submodules, such as Broker, come with their own ``CHANGES``.)
|
(note that submodules, such as Broker, come with their own ``CHANGES``.)
|
||||||
|
|
||||||
|
Zeek 7.0.1
|
||||||
|
==========
|
||||||
|
|
||||||
|
This release fixes the following bugs:
|
||||||
|
|
||||||
|
- HTTP passwords with colon characters in them are now correctly logged.
|
||||||
|
|
||||||
|
- The LDAP analyzer now supports handling of non-sealed GSS-API WRAP tokens.
|
||||||
|
|
||||||
|
- Heuristics for parsing SASL encrypted and signed LDAP traffic have been made
|
||||||
|
more strict and predictable. Please provide input if this results in less
|
||||||
|
visibility in your environment.
|
||||||
|
|
||||||
|
- StartTLS support was added to the LDAP analyzer. The SSL analyzer is enabled
|
||||||
|
for connections where client and server negotiate to TLS through the extended
|
||||||
|
request/response mechanism.
|
||||||
|
|
||||||
|
- Specify less-strict permissions for directories and files created by
|
||||||
|
zeek-archiver to play more nicely with user's umask setting.
|
||||||
|
|
||||||
Zeek 7.0.0
|
Zeek 7.0.0
|
||||||
==========
|
==========
|
||||||
|
|
||||||
|
We would like to thank the following people for their contributions to this
|
||||||
|
release: Christopher Knill (cknill), Jan Grashöfer (J-Gras), Martin van
|
||||||
|
Hensbergen (mvhensbergen), Matti Bispham (mbispham), Mike Dopheide
|
||||||
|
(dopheide-esnet), Oleksandr Pastushkov (opastushkov), Peter Cullen (pbcullen),
|
||||||
|
Steve Smoot (stevesmoot), Tanner Kvarfordt (Kardbord), Victor Dvornikov
|
||||||
|
(lydiym).
|
||||||
|
|
||||||
Breaking Changes
|
Breaking Changes
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
|
@ -20,7 +47,7 @@ Breaking Changes
|
||||||
|
|
||||||
All of the metrics-related script-level options, type, and methods have been
|
All of the metrics-related script-level options, type, and methods have been
|
||||||
moved to the Telemetry framework:
|
moved to the Telemetry framework:
|
||||||
* Option ``Broker::metrics_port` is now ``Telemetry::metrics_port``
|
* Option ``Broker::metrics_port`` is now ``Telemetry::metrics_port``
|
||||||
* Option ``Broker::metrics_export_endpoint_name`` is now ``Telemetry::metrics_endpoint_name``
|
* Option ``Broker::metrics_export_endpoint_name`` is now ``Telemetry::metrics_endpoint_name``
|
||||||
|
|
||||||
The following options have been removed:
|
The following options have been removed:
|
||||||
|
@ -85,7 +112,8 @@ New Functionality
|
||||||
environment variable configures the addition.
|
environment variable configures the addition.
|
||||||
|
|
||||||
- SMB2 packets containing multiple PDUs now correctly parse all of the headers,
|
- SMB2 packets containing multiple PDUs now correctly parse all of the headers,
|
||||||
instead of just the first one and ignoring the rest.
|
instead of just the first one and ignoring the rest. This may cause increased
|
||||||
|
CPU load on SMB2-heavy networks.
|
||||||
|
|
||||||
- The new built-in function ``lookup_connection_analyzer_id()`` retrieves the
|
- The new built-in function ``lookup_connection_analyzer_id()`` retrieves the
|
||||||
numeric identifier of an analyzer associated with a connection. This enables
|
numeric identifier of an analyzer associated with a connection. This enables
|
||||||
|
@ -167,6 +195,11 @@ Deprecated Functionality
|
||||||
- The ``--disable-archiver`` configure flag no longer does anything and will be
|
- The ``--disable-archiver`` configure flag no longer does anything and will be
|
||||||
removed in 7.1. zeek-archiver has moved into the zeek-aux repository.
|
removed in 7.1. zeek-archiver has moved into the zeek-aux repository.
|
||||||
|
|
||||||
|
- The policy/frameworks/telemetry/prometheus.zeek script has been deprecated
|
||||||
|
and will be removed with Zeek 7.1. Setting the ``metrics_port`` field on a
|
||||||
|
``Cluster::Node`` implies listening on that port and exposing telemetry
|
||||||
|
in Prometheus format.
|
||||||
|
|
||||||
Zeek 6.2.0
|
Zeek 6.2.0
|
||||||
==========
|
==========
|
||||||
|
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
7.0.0-dev.467
|
7.0.1
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit fada26ae504981f7f5524bf2a5c82ae49acd556d
|
Subproject commit 5b3ed87a93b2ded1f3c95ff1a3b99e2c6ab84ef4
|
|
@ -1 +1 @@
|
||||||
Subproject commit 2fec7205d1a9cb4829b86c943d599696d53de85c
|
Subproject commit 4649065e2a1dd21c81e41cd6007dce5486b77fc0
|
|
@ -1 +1 @@
|
||||||
Subproject commit 6581b1855a5ea8cc102c66b4ac6a431fc67484a0
|
Subproject commit a1b7c78287ecb29cf17a3ef8a94125d87eadb152
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8a66cd60fb29a1237b5070854cb194f43a3f7a30
|
Subproject commit e850412ab5dea10ee2ebb98e42527d80fcf9a7ed
|
2
cmake
2
cmake
|
@ -1 +1 @@
|
||||||
Subproject commit 690483f76c149ffa8e035b612b406b0964f9886f
|
Subproject commit 2d42baf8e63a7494224aa9d02afa2cb43ddb96b8
|
2
doc
2
doc
|
@ -1 +1 @@
|
||||||
Subproject commit f65820ff0faf2887799fe691a443b5db39eeed54
|
Subproject commit 8039548924d13b991a7329691fef4c64b03d13fc
|
|
@ -47,12 +47,18 @@ export {
|
||||||
|
|
||||||
# Marked with &is_used to suppress complaints when there aren't any
|
# Marked with &is_used to suppress complaints when there aren't any
|
||||||
# Spicy file analyzers loaded, and hence this event can't be generated.
|
# Spicy file analyzers loaded, and hence this event can't be generated.
|
||||||
# The attribute is only supported for Zeek 5.0 and higher.
|
|
||||||
event spicy_analyzer_for_mime_type(a: Files::Tag, mt: string) &is_used
|
event spicy_analyzer_for_mime_type(a: Files::Tag, mt: string) &is_used
|
||||||
{
|
{
|
||||||
Files::register_for_mime_type(a, mt);
|
Files::register_for_mime_type(a, mt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Marked with &is_used to suppress complaints when there aren't any
|
||||||
|
# Spicy protocol analyzers loaded, and hence this event can't be generated.
|
||||||
|
event spicy_analyzer_for_port(a: Analyzer::Tag, p: port) &is_used
|
||||||
|
{
|
||||||
|
Analyzer::register_for_port(a, p);
|
||||||
|
}
|
||||||
|
|
||||||
function enable_protocol_analyzer(tag: Analyzer::Tag) : bool
|
function enable_protocol_analyzer(tag: Analyzer::Tag) : bool
|
||||||
{
|
{
|
||||||
return Spicy::__toggle_analyzer(tag, T);
|
return Spicy::__toggle_analyzer(tag, T);
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
@load ./main
|
@load ./main
|
||||||
|
|
||||||
@load base/frameworks/cluster
|
|
||||||
|
|
|
@ -5,10 +5,28 @@
|
||||||
##! enabled by setting :zeek:see:`Telemetry::metrics_port`.
|
##! enabled by setting :zeek:see:`Telemetry::metrics_port`.
|
||||||
|
|
||||||
@load base/misc/version
|
@load base/misc/version
|
||||||
|
@load base/frameworks/cluster
|
||||||
|
|
||||||
@load base/frameworks/telemetry/options
|
@load base/frameworks/telemetry/options
|
||||||
|
|
||||||
module Telemetry;
|
module Telemetry;
|
||||||
|
|
||||||
|
# In a cluster configuration, open the port number for metrics
|
||||||
|
# from the cluster node configuration for exporting data to
|
||||||
|
# Prometheus.
|
||||||
|
#
|
||||||
|
# The manager node will also provide a ``/services.json`` endpoint
|
||||||
|
# for the HTTP Service Discovery system in Prometheus to use for
|
||||||
|
# configuration. This endpoint will include information for all of
|
||||||
|
# the other nodes in the cluster.
|
||||||
|
@if ( Cluster::is_enabled() )
|
||||||
|
redef Telemetry::metrics_endpoint_name = Cluster::node;
|
||||||
|
|
||||||
|
@if ( Cluster::local_node_metrics_port() != 0/unknown )
|
||||||
|
redef Telemetry::metrics_port = Cluster::local_node_metrics_port();
|
||||||
|
@endif
|
||||||
|
@endif
|
||||||
|
|
||||||
export {
|
export {
|
||||||
## Alias for a vector of label values.
|
## Alias for a vector of label values.
|
||||||
type labels_vector: vector of string;
|
type labels_vector: vector of string;
|
||||||
|
|
|
@ -5883,6 +5883,13 @@ export {
|
||||||
|
|
||||||
type MetricVector : vector of Metric;
|
type MetricVector : vector of Metric;
|
||||||
type HistogramMetricVector : vector of HistogramMetric;
|
type HistogramMetricVector : vector of HistogramMetric;
|
||||||
|
|
||||||
|
## Maximum amount of time for CivetWeb HTTP threads to
|
||||||
|
## wait for metric callbacks to complete on the IO loop.
|
||||||
|
const callback_timeout: interval = 5sec &redef;
|
||||||
|
|
||||||
|
## Number of CivetWeb threads to use.
|
||||||
|
const civetweb_threads: count = 2 &redef;
|
||||||
}
|
}
|
||||||
|
|
||||||
module GLOBAL;
|
module GLOBAL;
|
||||||
|
|
|
@ -338,8 +338,8 @@ event http_header(c: connection, is_orig: bool, name: string, value: string) &pr
|
||||||
if ( /^[bB][aA][sS][iI][cC] / in value )
|
if ( /^[bB][aA][sS][iI][cC] / in value )
|
||||||
{
|
{
|
||||||
local userpass = decode_base64_conn(c$id, sub(value, /[bB][aA][sS][iI][cC][[:blank:]]+/, ""));
|
local userpass = decode_base64_conn(c$id, sub(value, /[bB][aA][sS][iI][cC][[:blank:]]+/, ""));
|
||||||
local up = split_string(userpass, /:/);
|
local up = split_string1(userpass, /:/);
|
||||||
if ( |up| >= 2 )
|
if ( |up| == 2 )
|
||||||
{
|
{
|
||||||
c$http$username = up[0];
|
c$http$username = up[0];
|
||||||
if ( c$http$capture_password )
|
if ( c$http$capture_password )
|
||||||
|
|
|
@ -120,4 +120,11 @@ export {
|
||||||
"searching", [ LDAP::SearchDerefAlias_DEREF_FINDING_BASE ] =
|
"searching", [ LDAP::SearchDerefAlias_DEREF_FINDING_BASE ] =
|
||||||
"finding", [ LDAP::SearchDerefAlias_DEREF_ALWAYS ] = "always", }
|
"finding", [ LDAP::SearchDerefAlias_DEREF_ALWAYS ] = "always", }
|
||||||
&default="unknown";
|
&default="unknown";
|
||||||
|
|
||||||
|
const EXTENDED_REQUESTS = {
|
||||||
|
# StartTLS, https://datatracker.ietf.org/doc/html/rfc4511#section-4.14.1
|
||||||
|
[ "1.3.6.1.4.1.1466.20037" ] = "StartTLS",
|
||||||
|
# whoami, https://datatracker.ietf.org/doc/html/rfc4532#section-2
|
||||||
|
[ "1.3.6.1.4.1.4203.1.11.3" ] = "whoami",
|
||||||
|
} &default="unknown" &redef;
|
||||||
}
|
}
|
||||||
|
|
|
@ -258,6 +258,9 @@ event LDAP::message(c: connection,
|
||||||
}
|
}
|
||||||
|
|
||||||
m$object = object;
|
m$object = object;
|
||||||
|
|
||||||
|
if ( opcode == LDAP::ProtocolOpcode_EXTENDED_REQUEST )
|
||||||
|
m$object += fmt(" (%s)", EXTENDED_REQUESTS[object]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( argument != "" ) {
|
if ( argument != "" ) {
|
||||||
|
|
|
@ -98,3 +98,44 @@ global LDAP::search_result_entry: event (
|
||||||
message_id: int,
|
message_id: int,
|
||||||
object_name: string
|
object_name: string
|
||||||
);
|
);
|
||||||
|
|
||||||
|
## Event generated for each ExtendedRequest in LDAP messages.
|
||||||
|
##
|
||||||
|
## c: The connection.
|
||||||
|
##
|
||||||
|
## message_id: The messageID element.
|
||||||
|
##
|
||||||
|
## request_name: The name of the extended request.
|
||||||
|
##
|
||||||
|
## request_value: The value of the extended request (empty if missing).
|
||||||
|
global LDAP::extended_request: event (
|
||||||
|
c: connection,
|
||||||
|
message_id: int,
|
||||||
|
request_name: string,
|
||||||
|
request_value: string
|
||||||
|
);
|
||||||
|
|
||||||
|
## Event generated for each ExtendedResponse in LDAP messages.
|
||||||
|
##
|
||||||
|
## c: The connection.
|
||||||
|
##
|
||||||
|
## message_id: The messageID element.
|
||||||
|
##
|
||||||
|
## result: The result code of the response.
|
||||||
|
##
|
||||||
|
## response_name: The name of the extended response (empty if missing).
|
||||||
|
##
|
||||||
|
## response_value: The value of the extended response (empty if missing).
|
||||||
|
global LDAP::extended_response: event (
|
||||||
|
c: connection,
|
||||||
|
message_id: int,
|
||||||
|
result: LDAP::ResultCode,
|
||||||
|
response_name: string,
|
||||||
|
response_value: string
|
||||||
|
);
|
||||||
|
|
||||||
|
## Event generated when a plaintext LDAP connection switched to TLS.
|
||||||
|
##
|
||||||
|
## c: The connection.
|
||||||
|
##
|
||||||
|
global LDAP::starttls: event(c: connection);
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
#
|
#
|
||||||
# Do not edit this file. This file is automatically generated by gen-ct-list.pl
|
# Do not edit this file. This file is automatically generated by gen-ct-list.pl
|
||||||
# File generated at Fri Feb 23 11:37:01 2024
|
# File generated at Tue Jul 23 16:04:45 2024
|
||||||
# File generated from https://www.gstatic.com/ct/log_list/v3/log_list.json
|
# File generated from https://www.gstatic.com/ct/log_list/v3/log_list.json
|
||||||
# Source file generated at: 2024-02-22T12:56:21Z
|
# Source file generated at: 2024-07-23T13:06:08Z
|
||||||
# Source file version: 32.9
|
# Source file version: 39.1
|
||||||
#
|
#
|
||||||
|
|
||||||
@load base/protocols/ssl
|
@load base/protocols/ssl
|
||||||
|
@ -12,21 +12,32 @@ redef ct_logs += {
|
||||||
["\xee\xcd\xd0\x64\xd5\xdb\x1a\xce\xc5\x5c\xb7\x9d\xb4\xcd\x13\xa2\x32\x87\x46\x7c\xbc\xec\xde\xc3\x51\x48\x59\x46\x71\x1f\xb5\x9b"] = CTInfo($description="Google 'Argon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1d\xb9\x6c\xa9\xcb\x69\x94\xc5\x5c\xe6\xb6\xa6\x03\xbb\xd2\xb8\xdc\x54\x43\x17\x28\x99\x0c\x06\x01\x50\x1d\x9d\x64\xc0\x59\x46\x2b\xdc\xc8\x03\x1d\x05\xb4\x2d\xa8\x09\xf7\x99\x41\xed\x04\xfb\xe5\x57\xba\x26\x04\xf6\x11\x52\xce\x14\x65\x3b\x2f\x76\x2b\xc0"),
|
["\xee\xcd\xd0\x64\xd5\xdb\x1a\xce\xc5\x5c\xb7\x9d\xb4\xcd\x13\xa2\x32\x87\x46\x7c\xbc\xec\xde\xc3\x51\x48\x59\x46\x71\x1f\xb5\x9b"] = CTInfo($description="Google 'Argon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1d\xb9\x6c\xa9\xcb\x69\x94\xc5\x5c\xe6\xb6\xa6\x03\xbb\xd2\xb8\xdc\x54\x43\x17\x28\x99\x0c\x06\x01\x50\x1d\x9d\x64\xc0\x59\x46\x2b\xdc\xc8\x03\x1d\x05\xb4\x2d\xa8\x09\xf7\x99\x41\xed\x04\xfb\xe5\x57\xba\x26\x04\xf6\x11\x52\xce\x14\x65\x3b\x2f\x76\x2b\xc0"),
|
||||||
["\x4e\x75\xa3\x27\x5c\x9a\x10\xc3\x38\x5b\x6c\xd4\xdf\x3f\x52\xeb\x1d\xf0\xe0\x8e\x1b\x8d\x69\xc0\xb1\xfa\x64\xb1\x62\x9a\x39\xdf"] = CTInfo($description="Google 'Argon2025h1' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2025h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x20\x82\xa1\xf9\x67\x68\xa8\xe4\xdb\x94\x98\xe2\xe1\x68\x87\xe4\x09\x6d\x20\x35\x33\x38\x3c\xaf\x14\xaa\xd7\x08\x18\xf0\xfd\x16\x9b\xd3\xff\x7c\x27\x82\xd4\x87\xb7\x4e\x24\x46\x3b\xfb\xae\xbe\xc8\x23\x52\x20\x2b\xaa\x44\x05\xfe\x54\xf9\xd5\xf1\x1d\x45\x9a"),
|
["\x4e\x75\xa3\x27\x5c\x9a\x10\xc3\x38\x5b\x6c\xd4\xdf\x3f\x52\xeb\x1d\xf0\xe0\x8e\x1b\x8d\x69\xc0\xb1\xfa\x64\xb1\x62\x9a\x39\xdf"] = CTInfo($description="Google 'Argon2025h1' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2025h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x20\x82\xa1\xf9\x67\x68\xa8\xe4\xdb\x94\x98\xe2\xe1\x68\x87\xe4\x09\x6d\x20\x35\x33\x38\x3c\xaf\x14\xaa\xd7\x08\x18\xf0\xfd\x16\x9b\xd3\xff\x7c\x27\x82\xd4\x87\xb7\x4e\x24\x46\x3b\xfb\xae\xbe\xc8\x23\x52\x20\x2b\xaa\x44\x05\xfe\x54\xf9\xd5\xf1\x1d\x45\x9a"),
|
||||||
["\x12\xf1\x4e\x34\xbd\x53\x72\x4c\x84\x06\x19\xc3\x8f\x3f\x7a\x13\xf8\xe7\xb5\x62\x87\x88\x9c\x6d\x30\x05\x84\xeb\xe5\x86\x26\x3a"] = CTInfo($description="Google 'Argon2025h2' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2025h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xaf\xe4\xf3\x94\x2c\xdf\xa6\x27\xb5\xfe\xb2\x61\x83\x19\xc8\x21\x3a\x23\xa8\xa9\x3d\x54\xaf\xbc\x31\x9a\x1c\xd3\xc1\xe3\xb6\xc2\xf3\x0f\xc7\xb9\xca\x3b\x1d\x79\x65\x61\x22\x25\x82\x56\x4e\x98\xe8\xaa\x26\x29\x36\x1e\x28\x60\x6f\xeb\x15\x6e\xf7\x7c\xd0\xba"),
|
["\x12\xf1\x4e\x34\xbd\x53\x72\x4c\x84\x06\x19\xc3\x8f\x3f\x7a\x13\xf8\xe7\xb5\x62\x87\x88\x9c\x6d\x30\x05\x84\xeb\xe5\x86\x26\x3a"] = CTInfo($description="Google 'Argon2025h2' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2025h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xaf\xe4\xf3\x94\x2c\xdf\xa6\x27\xb5\xfe\xb2\x61\x83\x19\xc8\x21\x3a\x23\xa8\xa9\x3d\x54\xaf\xbc\x31\x9a\x1c\xd3\xc1\xe3\xb6\xc2\xf3\x0f\xc7\xb9\xca\x3b\x1d\x79\x65\x61\x22\x25\x82\x56\x4e\x98\xe8\xaa\x26\x29\x36\x1e\x28\x60\x6f\xeb\x15\x6e\xf7\x7c\xd0\xba"),
|
||||||
|
["\x0e\x57\x94\xbc\xf3\xae\xa9\x3e\x33\x1b\x2c\x99\x07\xb3\xf7\x90\xdf\x9b\xc2\x3d\x71\x32\x25\xdd\x21\xa9\x25\xac\x61\xc5\x4e\x21"] = CTInfo($description="Google 'Argon2026h1' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2026h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x07\xfc\x1e\xe8\x63\x8e\xff\x1c\x31\x8a\xfc\xb8\x1e\x19\x2b\x60\x50\x00\x3e\x8e\x9e\xda\x77\x37\xe3\xa5\xa8\xda\x8d\x94\xf8\x6b\xe8\x3d\x64\x8f\x27\x3f\x75\xb3\xfc\x6b\x12\xf0\x37\x06\x4f\x64\x58\x75\x14\x5d\x56\x52\xe6\x6a\x2b\x14\x4c\xec\x81\xd1\xea\x3e"),
|
||||||
|
["\xd7\x6d\x7d\x10\xd1\xa7\xf5\x77\xc2\xc7\xe9\x5f\xd7\x00\xbf\xf9\x82\xc9\x33\x5a\x65\xe1\xd0\xb3\x01\x73\x17\xc0\xc8\xc5\x69\x77"] = CTInfo($description="Google 'Argon2026h2' log", $operator="Google", $url="https://ct.googleapis.com/logs/us1/argon2026h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2a\x3a\x67\x8b\xfe\xba\x0c\x86\x2b\x4a\x51\x8a\xe9\x17\xfe\x7b\xa1\x76\x73\xfd\xbc\x65\x4b\xc3\x27\xbf\x4d\xf3\x5f\xa0\xca\x29\x80\x11\x20\x32\x78\xd6\x7e\xf9\x34\x60\x8c\x75\xa0\xf5\x35\x50\x9c\xa1\xd3\x49\x4d\x13\xd5\x3b\x6a\x0e\xea\x45\x9d\x24\x13\x22"),
|
||||||
["\x76\xff\x88\x3f\x0a\xb6\xfb\x95\x51\xc2\x61\xcc\xf5\x87\xba\x34\xb4\xa4\xcd\xbb\x29\xdc\x68\x42\x0a\x9f\xe6\x67\x4c\x5a\x3a\x74"] = CTInfo($description="Google 'Xenon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xb9\x60\xe0\x34\x1e\x35\xe4\x65\x00\x93\x4f\x90\x09\xbd\x5a\xec\x44\xdd\x8c\x0f\xce\xed\x11\x3e\x2a\x59\x46\x9a\x31\xb6\xc7\x99\xf7\xdc\xef\x3d\xcd\x8f\x86\xc2\x35\xa5\x3e\xdc\x29\xba\xbb\xf2\x54\xe2\xa8\x0c\x83\x08\x51\x06\xde\x21\x6d\x36\x50\x8e\x38\x4d"),
|
["\x76\xff\x88\x3f\x0a\xb6\xfb\x95\x51\xc2\x61\xcc\xf5\x87\xba\x34\xb4\xa4\xcd\xbb\x29\xdc\x68\x42\x0a\x9f\xe6\x67\x4c\x5a\x3a\x74"] = CTInfo($description="Google 'Xenon2024' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xb9\x60\xe0\x34\x1e\x35\xe4\x65\x00\x93\x4f\x90\x09\xbd\x5a\xec\x44\xdd\x8c\x0f\xce\xed\x11\x3e\x2a\x59\x46\x9a\x31\xb6\xc7\x99\xf7\xdc\xef\x3d\xcd\x8f\x86\xc2\x35\xa5\x3e\xdc\x29\xba\xbb\xf2\x54\xe2\xa8\x0c\x83\x08\x51\x06\xde\x21\x6d\x36\x50\x8e\x38\x4d"),
|
||||||
["\xcf\x11\x56\xee\xd5\x2e\x7c\xaf\xf3\x87\x5b\xd9\x69\x2e\x9b\xe9\x1a\x71\x67\x4a\xb0\x17\xec\xac\x01\xd2\x5b\x77\xce\xcc\x3b\x08"] = CTInfo($description="Google 'Xenon2025h1' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2025h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x82\xe2\xce\x90\x40\x3f\x81\x0e\xdf\xea\xe1\x20\x2b\x5e\x2e\x30\x54\x46\x81\xb9\x58\xed\xaf\xbd\xff\x36\xa7\x9e\x0b\x5f\x6a\x6b\x91\xa5\xc1\x98\xe1\xf2\xcd\xeb\x17\x20\x70\xca\x2a\x12\xe6\x54\x78\x50\xdc\xff\x6d\xfd\x1c\xa7\xb6\x3a\x1f\xf9\x26\xa9\x1b\xbd"),
|
["\xcf\x11\x56\xee\xd5\x2e\x7c\xaf\xf3\x87\x5b\xd9\x69\x2e\x9b\xe9\x1a\x71\x67\x4a\xb0\x17\xec\xac\x01\xd2\x5b\x77\xce\xcc\x3b\x08"] = CTInfo($description="Google 'Xenon2025h1' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2025h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x82\xe2\xce\x90\x40\x3f\x81\x0e\xdf\xea\xe1\x20\x2b\x5e\x2e\x30\x54\x46\x81\xb9\x58\xed\xaf\xbd\xff\x36\xa7\x9e\x0b\x5f\x6a\x6b\x91\xa5\xc1\x98\xe1\xf2\xcd\xeb\x17\x20\x70\xca\x2a\x12\xe6\x54\x78\x50\xdc\xff\x6d\xfd\x1c\xa7\xb6\x3a\x1f\xf9\x26\xa9\x1b\xbd"),
|
||||||
["\xdd\xdc\xca\x34\x95\xd7\xe1\x16\x05\xe7\x95\x32\xfa\xc7\x9f\xf8\x3d\x1c\x50\xdf\xdb\x00\x3a\x14\x12\x76\x0a\x2c\xac\xbb\xc8\x2a"] = CTInfo($description="Google 'Xenon2025h2' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2025h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x6b\xe0\xaf\xed\x06\x7c\x3d\xef\xd9\x0e\xe4\x58\x4b\x04\xd8\x2a\x47\x99\x90\x89\x7a\xb9\x36\xa5\x75\xc8\x04\xb8\xcb\xe2\xaa\x2b\xb5\x68\x9d\x88\x29\xa2\xa5\xcf\xce\x2b\x9a\x15\x9b\xa0\x3e\x9d\x94\x1c\xb2\xb7\x4a\xf2\x51\xec\x40\xed\x62\x47\xa4\x03\x49\x86"),
|
["\xdd\xdc\xca\x34\x95\xd7\xe1\x16\x05\xe7\x95\x32\xfa\xc7\x9f\xf8\x3d\x1c\x50\xdf\xdb\x00\x3a\x14\x12\x76\x0a\x2c\xac\xbb\xc8\x2a"] = CTInfo($description="Google 'Xenon2025h2' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2025h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x6b\xe0\xaf\xed\x06\x7c\x3d\xef\xd9\x0e\xe4\x58\x4b\x04\xd8\x2a\x47\x99\x90\x89\x7a\xb9\x36\xa5\x75\xc8\x04\xb8\xcb\xe2\xaa\x2b\xb5\x68\x9d\x88\x29\xa2\xa5\xcf\xce\x2b\x9a\x15\x9b\xa0\x3e\x9d\x94\x1c\xb2\xb7\x4a\xf2\x51\xec\x40\xed\x62\x47\xa4\x03\x49\x86"),
|
||||||
|
["\x96\x97\x64\xbf\x55\x58\x97\xad\xf7\x43\x87\x68\x37\x08\x42\x77\xe9\xf0\x3a\xd5\xf6\xa4\xf3\x36\x6e\x46\xa4\x3f\x0f\xca\xa9\xc6"] = CTInfo($description="Google 'Xenon2026h1' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2026h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x3a\x1f\xc8\xbb\xce\xd5\x90\x47\x34\xca\xca\x01\x04\x27\x21\x1c\xe2\x29\x3d\x92\xbb\x91\x45\xc7\x5a\x3e\xa5\xd4\xf2\x12\xe6\xe8\xe6\x43\xba\xf3\x7b\xc2\x38\xaf\xfc\x23\x8a\x05\x56\xeb\x03\x0a\x30\xcc\x63\x6c\xd9\x3c\xbe\xf5\x7b\x94\xba\x94\xd3\xbf\x88\x4c"),
|
||||||
|
["\xd8\x09\x55\x3b\x94\x4f\x7a\xff\xc8\x16\x19\x6f\x94\x4f\x85\xab\xb0\xf8\xfc\x5e\x87\x55\x26\x0f\x15\xd1\x2e\x72\xbb\x45\x4b\x14"] = CTInfo($description="Google 'Xenon2026h2' log", $operator="Google", $url="https://ct.googleapis.com/logs/eu1/xenon2026h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xe5\x77\x78\x95\x71\x28\xb3\x95\xc9\xa5\xcc\x7a\x4c\xe8\x32\x03\x96\x7b\xfc\x2e\x1d\xb9\xa4\xdb\x43\xa0\xbd\x69\x72\xf9\x45\xba\x9a\xc3\xe9\x96\xd5\x70\xe7\x0d\x7e\xc9\x95\x15\x27\x8a\x72\x30\x65\x86\x43\x53\xdc\x11\x44\x18\x49\x98\x25\x68\xa7\x3c\x05\xbf"),
|
||||||
["\xda\xb6\xbf\x6b\x3f\xb5\xb6\x22\x9f\x9b\xc2\xbb\x5c\x6b\xe8\x70\x91\x71\x6c\xbb\x51\x84\x85\x34\xbd\xa4\x3d\x30\x48\xd7\xfb\xab"] = CTInfo($description="Cloudflare 'Nimbus2024' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x77\xb1\x9b\x7b\x8f\xe6\x8b\x35\xfe\x3a\x92\x29\x2d\xac\x8a\x8d\x51\x8a\x25\xfc\x93\xb6\xd7\xa0\x8b\x29\x37\x71\x1d\x33\xca\xcc\x33\xea\x28\xb9\x1f\xe2\xac\xc3\xa9\x5d\xdd\x97\xbe\xf6\x9e\x94\x25\xdd\x36\x81\xd1\xeb\x5d\x29\xc3\x2b\x44\xf1\x5b\xca\x15\x48"),
|
["\xda\xb6\xbf\x6b\x3f\xb5\xb6\x22\x9f\x9b\xc2\xbb\x5c\x6b\xe8\x70\x91\x71\x6c\xbb\x51\x84\x85\x34\xbd\xa4\x3d\x30\x48\xd7\xfb\xab"] = CTInfo($description="Cloudflare 'Nimbus2024' Log", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x77\xb1\x9b\x7b\x8f\xe6\x8b\x35\xfe\x3a\x92\x29\x2d\xac\x8a\x8d\x51\x8a\x25\xfc\x93\xb6\xd7\xa0\x8b\x29\x37\x71\x1d\x33\xca\xcc\x33\xea\x28\xb9\x1f\xe2\xac\xc3\xa9\x5d\xdd\x97\xbe\xf6\x9e\x94\x25\xdd\x36\x81\xd1\xeb\x5d\x29\xc3\x2b\x44\xf1\x5b\xca\x15\x48"),
|
||||||
["\xcc\xfb\x0f\x6a\x85\x71\x09\x65\xfe\x95\x9b\x53\xce\xe9\xb2\x7c\x22\xe9\x85\x5c\x0d\x97\x8d\xb6\xa9\x7e\x54\xc0\xfe\x4c\x0d\xb0"] = CTInfo($description="Cloudflare 'Nimbus2025'", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2025/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1a\x80\x1a\x15\x19\x19\x23\x79\xb4\xfa\xa0\x79\x8e\x8d\xd5\xc1\xdc\xc2\xb5\x96\x92\x7e\x94\xe0\xc3\x7e\x14\x7c\x0a\x0d\x2d\x46\xa8\x9d\x1b\xb1\x41\x65\x0c\x5f\x98\xc4\x5a\x17\x79\x81\x5b\x4a\x14\x41\xec\xaf\xa9\x5d\x0e\xab\x12\x19\x71\xcd\x43\xef\xbb\x97"),
|
["\xcc\xfb\x0f\x6a\x85\x71\x09\x65\xfe\x95\x9b\x53\xce\xe9\xb2\x7c\x22\xe9\x85\x5c\x0d\x97\x8d\xb6\xa9\x7e\x54\xc0\xfe\x4c\x0d\xb0"] = CTInfo($description="Cloudflare 'Nimbus2025'", $operator="Cloudflare", $url="https://ct.cloudflare.com/logs/nimbus2025/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x1a\x80\x1a\x15\x19\x19\x23\x79\xb4\xfa\xa0\x79\x8e\x8d\xd5\xc1\xdc\xc2\xb5\x96\x92\x7e\x94\xe0\xc3\x7e\x14\x7c\x0a\x0d\x2d\x46\xa8\x9d\x1b\xb1\x41\x65\x0c\x5f\x98\xc4\x5a\x17\x79\x81\x5b\x4a\x14\x41\xec\xaf\xa9\x5d\x0e\xab\x12\x19\x71\xcd\x43\xef\xbb\x97"),
|
||||||
["\x48\xb0\xe3\x6b\xda\xa6\x47\x34\x0f\xe5\x6a\x02\xfa\x9d\x30\xeb\x1c\x52\x01\xcb\x56\xdd\x2c\x81\xd9\xbb\xbf\xab\x39\xd8\x84\x73"] = CTInfo($description="DigiCert Yeti2024 Log", $operator="DigiCert", $url="https://yeti2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x57\xb8\xc1\x6f\x30\xa4\x7f\x2e\xe4\xf0\xd0\xd9\x60\x62\x13\x95\xe3\x7a\xe3\x4e\x53\xc3\xb3\xb8\x73\x85\xc1\x18\x0d\x23\x0e\x58\x84\xd2\x78\xef\x9b\xb3\x1e\x2c\x1a\xde\xc1\x8f\x81\x1b\x19\x44\x58\xb7\x00\x77\x60\x20\x1a\x72\xd8\x82\xde\xae\x9e\xb1\xc6\x4b"),
|
["\x48\xb0\xe3\x6b\xda\xa6\x47\x34\x0f\xe5\x6a\x02\xfa\x9d\x30\xeb\x1c\x52\x01\xcb\x56\xdd\x2c\x81\xd9\xbb\xbf\xab\x39\xd8\x84\x73"] = CTInfo($description="DigiCert Yeti2024 Log", $operator="DigiCert", $url="https://yeti2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x57\xb8\xc1\x6f\x30\xa4\x7f\x2e\xe4\xf0\xd0\xd9\x60\x62\x13\x95\xe3\x7a\xe3\x4e\x53\xc3\xb3\xb8\x73\x85\xc1\x18\x0d\x23\x0e\x58\x84\xd2\x78\xef\x9b\xb3\x1e\x2c\x1a\xde\xc1\x8f\x81\x1b\x19\x44\x58\xb7\x00\x77\x60\x20\x1a\x72\xd8\x82\xde\xae\x9e\xb1\xc6\x4b"),
|
||||||
["\x7d\x59\x1e\x12\xe1\x78\x2a\x7b\x1c\x61\x67\x7c\x5e\xfd\xf8\xd0\x87\x5c\x14\xa0\x4e\x95\x9e\xb9\x03\x2f\xd9\x0e\x8c\x2e\x79\xb8"] = CTInfo($description="DigiCert Yeti2025 Log", $operator="DigiCert", $url="https://yeti2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xdf\x95\x00\x5e\x10\xc1\x01\xf7\x37\xe3\x10\x74\xd1\xff\xb2\xca\x90\xed\x32\x99\x5f\x0c\x39\xfe\xa1\xd1\x13\x11\xac\xd1\xb3\x73\x93\x20\xc2\x13\x3c\x4c\xb5\x7a\x52\x86\x86\x3d\xe3\x95\x24\x7c\xd8\x91\x98\x48\x3b\xf0\xf0\xdf\x21\xf1\xb0\x81\x5a\x59\x25\x43"),
|
["\x7d\x59\x1e\x12\xe1\x78\x2a\x7b\x1c\x61\x67\x7c\x5e\xfd\xf8\xd0\x87\x5c\x14\xa0\x4e\x95\x9e\xb9\x03\x2f\xd9\x0e\x8c\x2e\x79\xb8"] = CTInfo($description="DigiCert Yeti2025 Log", $operator="DigiCert", $url="https://yeti2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xdf\x95\x00\x5e\x10\xc1\x01\xf7\x37\xe3\x10\x74\xd1\xff\xb2\xca\x90\xed\x32\x99\x5f\x0c\x39\xfe\xa1\xd1\x13\x11\xac\xd1\xb3\x73\x93\x20\xc2\x13\x3c\x4c\xb5\x7a\x52\x86\x86\x3d\xe3\x95\x24\x7c\xd8\x91\x98\x48\x3b\xf0\xf0\xdf\x21\xf1\xb0\x81\x5a\x59\x25\x43"),
|
||||||
["\x73\xd9\x9e\x89\x1b\x4c\x96\x78\xa0\x20\x7d\x47\x9d\xe6\xb2\xc6\x1c\xd0\x51\x5e\x71\x19\x2a\x8c\x6b\x80\x10\x7a\xc1\x77\x72\xb5"] = CTInfo($description="DigiCert Nessie2024 Log", $operator="DigiCert", $url="https://nessie2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2d\xfc\xa2\x7b\x36\xbf\x56\x91\xe9\xfe\x3f\xe8\x3d\xfc\xc3\xa7\xe0\x61\x52\xea\x2c\xe9\x05\xa3\x9f\x27\x17\x81\x05\x70\x6b\x81\x61\x44\x8a\xf8\x3b\x10\x80\x42\xed\x03\x2f\x00\x50\x21\xfc\x41\x54\x84\xa3\x54\xd5\x2e\xb2\x7a\x16\x4b\x2a\x1f\x2b\x66\x04\x2b"),
|
["\x73\xd9\x9e\x89\x1b\x4c\x96\x78\xa0\x20\x7d\x47\x9d\xe6\xb2\xc6\x1c\xd0\x51\x5e\x71\x19\x2a\x8c\x6b\x80\x10\x7a\xc1\x77\x72\xb5"] = CTInfo($description="DigiCert Nessie2024 Log", $operator="DigiCert", $url="https://nessie2024.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2d\xfc\xa2\x7b\x36\xbf\x56\x91\xe9\xfe\x3f\xe8\x3d\xfc\xc3\xa7\xe0\x61\x52\xea\x2c\xe9\x05\xa3\x9f\x27\x17\x81\x05\x70\x6b\x81\x61\x44\x8a\xf8\x3b\x10\x80\x42\xed\x03\x2f\x00\x50\x21\xfc\x41\x54\x84\xa3\x54\xd5\x2e\xb2\x7a\x16\x4b\x2a\x1f\x2b\x66\x04\x2b"),
|
||||||
["\xe6\xd2\x31\x63\x40\x77\x8c\xc1\x10\x41\x06\xd7\x71\xb9\xce\xc1\xd2\x40\xf6\x96\x84\x86\xfb\xba\x87\x32\x1d\xfd\x1e\x37\x8e\x50"] = CTInfo($description="DigiCert Nessie2025 Log", $operator="DigiCert", $url="https://nessie2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\xf0\xf0\xa7\x8b\x81\x2e\x09\x39\x3b\x9f\x42\xda\x38\x44\x5f\xb4\xcc\xed\x36\xbb\xd8\x43\x7f\x16\x49\x57\x87\x04\x7f\xa5\x01\x34\xf7\xe8\x68\x3f\xb7\x78\x1f\x60\x66\x2d\x67\x9a\x75\x80\xb7\x53\xa7\x85\xd5\xbc\xab\x47\x06\x55\xdb\xb5\xdf\x88\xa1\x6f\x38"),
|
["\xe6\xd2\x31\x63\x40\x77\x8c\xc1\x10\x41\x06\xd7\x71\xb9\xce\xc1\xd2\x40\xf6\x96\x84\x86\xfb\xba\x87\x32\x1d\xfd\x1e\x37\x8e\x50"] = CTInfo($description="DigiCert Nessie2025 Log", $operator="DigiCert", $url="https://nessie2025.ct.digicert.com/log/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\xf0\xf0\xa7\x8b\x81\x2e\x09\x39\x3b\x9f\x42\xda\x38\x44\x5f\xb4\xcc\xed\x36\xbb\xd8\x43\x7f\x16\x49\x57\x87\x04\x7f\xa5\x01\x34\xf7\xe8\x68\x3f\xb7\x78\x1f\x60\x66\x2d\x67\x9a\x75\x80\xb7\x53\xa7\x85\xd5\xbc\xab\x47\x06\x55\xdb\xb5\xdf\x88\xa1\x6f\x38"),
|
||||||
|
["\xb6\x9d\xdc\xbc\x3c\x1a\xbd\xef\x6f\x9f\xd6\x0c\x88\xb1\x06\x7b\x77\xf0\x82\x68\x8b\x2d\x78\x65\xd0\x4b\x39\xab\xe9\x27\xa5\x75"] = CTInfo($description="DigiCert 'Wyvern2024h1' Log", $operator="DigiCert", $url="https://wyvern.ct.digicert.com/2024h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x68\xa6\x79\x14\xd1\x58\xe7\xab\xaa\x29\x69\x7f\x60\xed\x68\xe8\x10\xf6\x07\x84\xc0\xfb\x59\x04\x5a\x09\xc9\x1d\xe1\x4b\xfb\xcd\xdc\x03\xf3\xa8\x2a\x46\xb9\x84\x4d\x69\x30\xec\x23\x35\xc1\x8e\xfc\x9f\xb4\x20\x24\xd7\x15\xac\x87\xf7\x1e\xc1\x0b\x3c\x76\x1a"),
|
||||||
|
["\x0c\x2a\xef\x2c\x4a\x5b\x98\x83\xd4\xdd\xa3\x82\xfe\x50\xfb\x51\x88\xb3\xe9\x73\x33\xa1\xec\x53\xa0\x9d\xc9\xa7\x9d\x0d\x08\x20"] = CTInfo($description="DigiCert 'Wyvern2024h2' Log", $operator="DigiCert", $url="https://wyvern.ct.digicert.com/2024h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa8\x73\x12\x9c\x54\xd0\x7a\x7d\xc5\xb5\x17\x2b\x71\x52\x89\x04\x90\xbb\x42\xf1\x9d\xf8\x1c\xde\x4c\xcf\x82\x3c\xbd\x37\x1b\x74\x4c\x3c\xc7\xa3\x13\x87\x01\x51\x13\x14\xda\xa2\x12\x98\x84\xce\x1c\xbe\xcf\x4f\x7a\xef\x15\xfa\xd0\xee\xed\xed\x07\xad\x71\x6d"),
|
||||||
|
["\x73\x20\x22\x0f\x08\x16\x8a\xf9\xf3\xc4\xa6\x8b\x0a\xb2\x6a\x9a\x4a\x00\xee\xf5\x77\x85\x8a\x08\x4d\x05\x00\xd4\xa5\x42\x44\x59"] = CTInfo($description="DigiCert 'Wyvern2025h1' Log", $operator="DigiCert", $url="https://wyvern.ct.digicert.com/2025h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa7\xcb\x80\x61\x86\x1b\x1f\xb5\xab\x2b\x20\x76\x59\x83\x66\x0e\xce\xae\xb8\x6f\x3b\x88\x02\xeb\x43\xf4\x87\x90\xcb\x8b\xda\xac\x0e\x19\x50\xe0\xf9\x24\x0e\xab\x26\x93\x8c\x3f\x9e\x0d\x96\x58\x44\x9d\x3b\x8a\x80\xc5\xc8\xbe\xe1\x89\x46\x6b\x48\x4c\xd6\x09"),
|
||||||
|
["\xed\x3c\x4b\xd6\xe8\x06\xc2\xa4\xa2\x00\x57\xdb\xcb\x24\xe2\x38\x01\xdf\x51\x2f\xed\xc4\x86\xc5\x70\x0f\x20\xdd\xb7\x3e\x3f\xe0"] = CTInfo($description="DigiCert 'Wyvern2025h2' Log", $operator="DigiCert", $url="https://wyvern.ct.digicert.com/2025h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xe0\xdb\x41\xef\xe4\x04\xbd\xcb\x6b\x2e\x4c\xcc\xf1\x6c\xde\x41\x58\x7f\xfe\x94\xf6\x7a\xf6\x60\xed\x8b\x76\x72\xa3\xa2\x1c\x31\x13\x32\x35\xa1\xf2\x08\xd2\x68\xc5\x34\xa7\x56\x08\x1c\x63\xde\x95\xe2\x81\x69\x97\x8d\x1e\xa8\xb7\x66\x51\x25\x75\x4d\x78\x2e"),
|
||||||
|
["\xdb\x07\x6c\xde\x6a\x8b\x78\xec\x58\xd6\x05\x64\x96\xeb\x6a\x26\xa8\xc5\x9e\x72\x12\x93\xe8\xac\x03\x27\xdd\xde\x89\xdb\x5a\x2a"] = CTInfo($description="DigiCert 'Sphinx2024h1' Log", $operator="DigiCert", $url="https://sphinx.ct.digicert.com/2024h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xc6\xe4\x29\x69\x98\xfe\x28\x92\x57\x12\x4d\x9e\xed\x0e\xe7\x32\xa2\xe6\x9c\x27\x78\xa4\x29\x7c\x99\xd5\xdb\xfa\x22\xc1\xdd\x5e\xa7\xf4\xd8\xea\xc8\xd7\x44\x8d\xe0\xf1\x8c\x0a\x01\x1d\xd8\x22\xa8\xd3\xeb\xc9\x22\x8e\x36\xfb\x4a\xb1\x70\x9c\x5d\xc1\xe8\x33"),
|
||||||
|
["\xdc\xc9\x5e\x6f\xa2\x99\xb9\xb0\xfd\xbd\x6c\xa6\xa3\x6e\x1d\x72\xc4\x21\x2f\xdd\x1e\x0f\x47\x55\x3a\x36\xd6\xcf\x1a\xd1\x1d\x8d"] = CTInfo($description="DigiCert 'Sphinx2024h2' Log", $operator="DigiCert", $url="https://sphinx.ct.digicert.com/2024h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xdb\x09\x41\x84\xe7\xd1\xf1\x5b\x25\x09\x7b\xe8\xc6\x98\x51\x5e\x29\x85\xfd\x81\xde\x89\xd7\xd0\x86\xa4\xb0\xe5\x15\xec\x5d\x7b\x17\x55\x5f\xc9\x79\x8d\xe4\x22\x36\xe7\xe9\xbf\x38\x3f\xd1\xe9\xd4\x09\x84\x81\xbe\xb6\xc1\xed\x1b\x17\xea\x26\x97\xba\xe9\x9a"),
|
||||||
|
["\xde\x85\x81\xd7\x50\x24\x7c\x6b\xcd\xcb\xaf\x56\x37\xc5\xe7\x81\xc6\x4c\xe4\x6e\xd6\x17\x63\x9f\x8f\x34\xa7\x26\xc9\xe2\xbd\x37"] = CTInfo($description="DigiCert 'Sphinx2025h1' Log", $operator="DigiCert", $url="https://sphinx.ct.digicert.com/2025h1/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xe3\x2f\x1f\x4d\x89\x05\x75\x29\x78\xbb\x22\x3d\x07\x62\x51\x14\x70\x94\xe7\x3c\xea\xf5\xee\xae\xa6\x48\x9a\x86\x52\x4e\x9e\x5c\xe3\x95\x97\x28\xbb\x52\x4b\x2a\xfd\xc8\xc9\x89\x4e\x45\x31\x17\xd3\x8d\xf2\xe7\xce\x18\x11\x58\x98\x2c\x60\x6f\x58\x20\x36\x6e"),
|
||||||
|
["\xa4\x42\xc5\x06\x49\x60\x61\x54\x8f\x0f\xd4\xea\x9c\xfb\x7a\x2d\x26\x45\x4d\x87\xa9\x7f\x2f\xdf\x45\x59\xf6\x27\x4f\x3a\x84\x54"] = CTInfo($description="DigiCert 'Sphinx2025h2' Log", $operator="DigiCert", $url="https://sphinx.ct.digicert.com/2025h2/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x41\x8c\x50\x13\x54\xb1\x19\x05\xb7\x7f\x4a\x20\x6e\xa3\x75\x63\xca\x34\xf4\xcc\x74\xea\x32\x3b\xb6\x8b\x03\x14\xa8\x52\x7f\x32\x87\x5e\x59\x9e\x0f\xab\x18\x9e\x29\x6c\xb5\x72\x77\x1a\x27\x54\x85\x5d\xc1\x7b\x24\xa8\x34\xe3\xcd\x88\xce\xd4\x50\x1b\xbe\x69"),
|
||||||
["\x55\x81\xd4\xc2\x16\x90\x36\x01\x4a\xea\x0b\x9b\x57\x3c\x53\xf0\xc0\xe4\x38\x78\x70\x25\x08\x17\x2f\xa3\xaa\x1d\x07\x13\xd3\x0c"] = CTInfo($description="Sectigo 'Sabre' CT log", $operator="Sectigo", $url="https://sabre.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\x6f\xd2\x89\x0f\x3f\xc5\xf8\x87\x1e\xab\x65\xb3\xd9\xbb\x17\x23\x8c\x06\x0e\x09\x55\x96\x3d\x0a\x08\xa2\xc5\x71\xb3\xd1\xa9\x2f\x28\x3e\x83\x10\xbf\x12\xd0\x44\x66\x15\xef\x54\xe1\x98\x80\xd0\xce\x24\x6d\x3e\x67\x9a\xe9\x37\x23\xce\x52\x93\x86\xda\x80"),
|
["\x55\x81\xd4\xc2\x16\x90\x36\x01\x4a\xea\x0b\x9b\x57\x3c\x53\xf0\xc0\xe4\x38\x78\x70\x25\x08\x17\x2f\xa3\xaa\x1d\x07\x13\xd3\x0c"] = CTInfo($description="Sectigo 'Sabre' CT log", $operator="Sectigo", $url="https://sabre.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xf2\x6f\xd2\x89\x0f\x3f\xc5\xf8\x87\x1e\xab\x65\xb3\xd9\xbb\x17\x23\x8c\x06\x0e\x09\x55\x96\x3d\x0a\x08\xa2\xc5\x71\xb3\xd1\xa9\x2f\x28\x3e\x83\x10\xbf\x12\xd0\x44\x66\x15\xef\x54\xe1\x98\x80\xd0\xce\x24\x6d\x3e\x67\x9a\xe9\x37\x23\xce\x52\x93\x86\xda\x80"),
|
||||||
["\xa2\xe2\xbf\xd6\x1e\xde\x2f\x2f\x07\xa0\xd6\x4e\x6d\x37\xa7\xdc\x65\x43\xb0\xc6\xb5\x2e\xa2\xda\xb7\x8a\xf8\x9a\x6d\xf5\x17\xd8"] = CTInfo($description="Sectigo 'Sabre2024h1'", $operator="Sectigo", $url="https://sabre2024h1.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2c\x01\xf6\xce\x31\xbc\xaa\x14\x61\x51\xfe\x6b\x7a\x87\xae\xa6\xd3\x9b\xc7\x87\x2d\x0a\x5a\xc8\x4f\xb5\x54\xdc\xc9\x93\xa0\x00\xee\xca\x1c\xb9\xa7\xb6\x7b\x47\x3b\xe5\x4f\xaa\x6c\x16\x1c\x70\x2e\xc8\xec\x53\x5a\x4c\x21\x4c\x7e\x27\x0b\x13\x14\x5e\xfc\x85"),
|
["\xa2\xe2\xbf\xd6\x1e\xde\x2f\x2f\x07\xa0\xd6\x4e\x6d\x37\xa7\xdc\x65\x43\xb0\xc6\xb5\x2e\xa2\xda\xb7\x8a\xf8\x9a\x6d\xf5\x17\xd8"] = CTInfo($description="Sectigo 'Sabre2024h1'", $operator="Sectigo", $url="https://sabre2024h1.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x2c\x01\xf6\xce\x31\xbc\xaa\x14\x61\x51\xfe\x6b\x7a\x87\xae\xa6\xd3\x9b\xc7\x87\x2d\x0a\x5a\xc8\x4f\xb5\x54\xdc\xc9\x93\xa0\x00\xee\xca\x1c\xb9\xa7\xb6\x7b\x47\x3b\xe5\x4f\xaa\x6c\x16\x1c\x70\x2e\xc8\xec\x53\x5a\x4c\x21\x4c\x7e\x27\x0b\x13\x14\x5e\xfc\x85"),
|
||||||
["\x19\x98\x10\x71\x09\xf0\xd6\x52\x2e\x30\x80\xd2\x9e\x3f\x64\xbb\x83\x6e\x28\xcc\xf9\x0f\x52\x8e\xee\xdf\xce\x4a\x3f\x16\xb4\xca"] = CTInfo($description="Sectigo 'Sabre2024h2'", $operator="Sectigo", $url="https://sabre2024h2.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7a\x10\x4c\x8a\xe7\x22\x7b\x6d\x2a\xba\x8e\xfa\x6b\x4a\x81\xd5\x85\xae\x03\xef\xff\x4b\xfc\x4d\x53\x3d\xb7\x8c\xbb\x75\x09\xc9\xea\x16\x7e\xc1\x77\x16\xd2\xc2\x45\x74\x6d\x8d\xc4\xe1\x88\x37\xdf\xd4\xf3\x60\x65\xfc\xa0\x75\xf0\x20\x66\x8e\x4a\xcc\x19\xda"),
|
["\x19\x98\x10\x71\x09\xf0\xd6\x52\x2e\x30\x80\xd2\x9e\x3f\x64\xbb\x83\x6e\x28\xcc\xf9\x0f\x52\x8e\xee\xdf\xce\x4a\x3f\x16\xb4\xca"] = CTInfo($description="Sectigo 'Sabre2024h2'", $operator="Sectigo", $url="https://sabre2024h2.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7a\x10\x4c\x8a\xe7\x22\x7b\x6d\x2a\xba\x8e\xfa\x6b\x4a\x81\xd5\x85\xae\x03\xef\xff\x4b\xfc\x4d\x53\x3d\xb7\x8c\xbb\x75\x09\xc9\xea\x16\x7e\xc1\x77\x16\xd2\xc2\x45\x74\x6d\x8d\xc4\xe1\x88\x37\xdf\xd4\xf3\x60\x65\xfc\xa0\x75\xf0\x20\x66\x8e\x4a\xcc\x19\xda"),
|
||||||
["\xe0\x92\xb3\xfc\x0c\x1d\xc8\xe7\x68\x36\x1f\xde\x61\xb9\x96\x4d\x0a\x52\x78\x19\x8a\x72\xd6\x72\xc4\xb0\x4d\xa5\x6d\x6f\x54\x04"] = CTInfo($description="Sectigo 'Sabre2025h1'", $operator="Sectigo", $url="https://sabre2025h1.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7e\x2f\x39\xf1\xe8\x23\x8e\xb3\x32\x04\xaf\x4d\x57\xf6\xdb\xc5\x74\xa4\x7a\x6d\x3b\x07\x51\x0c\x5a\xfb\x80\x30\x05\xc6\x5a\x0c\xc4\x76\xd6\x06\xa8\x57\x4d\xfb\xdf\xe4\x82\x90\xc2\x41\xae\x70\xb3\x31\xa2\xe3\xfa\x3d\x5f\x2c\x5d\x04\xcd\xb4\x9d\x55\xab\x41"),
|
["\xe0\x92\xb3\xfc\x0c\x1d\xc8\xe7\x68\x36\x1f\xde\x61\xb9\x96\x4d\x0a\x52\x78\x19\x8a\x72\xd6\x72\xc4\xb0\x4d\xa5\x6d\x6f\x54\x04"] = CTInfo($description="Sectigo 'Sabre2025h1'", $operator="Sectigo", $url="https://sabre2025h1.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7e\x2f\x39\xf1\xe8\x23\x8e\xb3\x32\x04\xaf\x4d\x57\xf6\xdb\xc5\x74\xa4\x7a\x6d\x3b\x07\x51\x0c\x5a\xfb\x80\x30\x05\xc6\x5a\x0c\xc4\x76\xd6\x06\xa8\x57\x4d\xfb\xdf\xe4\x82\x90\xc2\x41\xae\x70\xb3\x31\xa2\xe3\xfa\x3d\x5f\x2c\x5d\x04\xcd\xb4\x9d\x55\xab\x41"),
|
||||||
["\x1a\x04\xff\x49\xd0\x54\x1d\x40\xaf\xf6\xa0\xc3\xbf\xf1\xd8\xc4\x67\x2f\x4e\xec\xee\x23\x40\x68\x98\x6b\x17\x40\x2e\xdc\x89\x7d"] = CTInfo($description="Sectigo 'Sabre2025h2'", $operator="Sectigo", $url="https://sabre2025h2.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x85\x13\x11\x2d\x7b\xf3\x93\x81\xe4\xb9\x7c\xd9\x64\x3b\xe7\xb5\x83\x99\x66\x79\x59\x47\x6a\x42\x5e\xd6\xbd\x63\x2e\xb7\x91\x4b\xae\xbc\x56\xc4\xc5\x6e\x09\xa0\xd7\x64\x1a\xc8\xc1\xaf\x89\x8b\xf5\x58\xd8\xba\xeb\x7b\x83\x52\xe9\xf4\xe0\xa5\xcd\xcd\x92\xcc"),
|
["\x1a\x04\xff\x49\xd0\x54\x1d\x40\xaf\xf6\xa0\xc3\xbf\xf1\xd8\xc4\x67\x2f\x4e\xec\xee\x23\x40\x68\x98\x6b\x17\x40\x2e\xdc\x89\x7d"] = CTInfo($description="Sectigo 'Sabre2025h2'", $operator="Sectigo", $url="https://sabre2025h2.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x85\x13\x11\x2d\x7b\xf3\x93\x81\xe4\xb9\x7c\xd9\x64\x3b\xe7\xb5\x83\x99\x66\x79\x59\x47\x6a\x42\x5e\xd6\xbd\x63\x2e\xb7\x91\x4b\xae\xbc\x56\xc4\xc5\x6e\x09\xa0\xd7\x64\x1a\xc8\xc1\xaf\x89\x8b\xf5\x58\xd8\xba\xeb\x7b\x83\x52\xe9\xf4\xe0\xa5\xcd\xcd\x92\xcc"),
|
||||||
["\x6f\x53\x76\xac\x31\xf0\x31\x19\xd8\x99\x00\xa4\x51\x15\xff\x77\x15\x1c\x11\xd9\x02\xc1\x00\x29\x06\x8d\xb2\x08\x9a\x37\xd9\x13"] = CTInfo($description="Sectigo 'Mammoth' CT log", $operator="Sectigo", $url="https://mammoth.ct.comodo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xef\xe4\x7d\x74\x2e\x15\x15\xb6\xe9\xbb\x23\x8b\xfb\x2c\xb5\xe1\xc7\x80\x98\x47\xfb\x40\x69\x68\xfc\x49\xad\x61\x4e\x83\x47\x3c\x1a\xb7\x8d\xdf\xff\x7b\x30\xb4\xba\xff\x2f\xcb\xa0\x14\xe3\xad\xd5\x85\x3f\x44\x59\x8c\x8c\x60\x8b\xd7\xb8\xb1\xbf\xae\x8c\x67"),
|
|
||||||
["\x29\xd0\x3a\x1b\xb6\x74\xaa\x71\x1c\xd3\x03\x5b\x65\x57\xc1\x4f\x8a\xa7\x8b\x4f\xe8\x38\x94\x49\xec\xa4\x53\xf9\x44\xbd\x24\x68"] = CTInfo($description="Sectigo 'Mammoth2024h1'", $operator="Sectigo", $url="https://mammoth2024h1.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa4\x59\x90\xf3\x71\x24\x24\xf7\xc3\x55\x27\x56\x9c\xa3\x59\x1e\xf7\xb7\x9f\xce\xab\x4e\x19\x66\x4d\xd0\x8a\xfa\x9d\x62\xa4\x24\xf0\x3b\x20\xe4\x1d\x14\x67\xc8\xfc\xe4\x37\xf2\x4b\x38\x54\x5a\xcf\x9f\x6b\x07\x90\xd0\x0e\x7e\x3d\x4c\x87\xb2\xe8\x3f\x07\xcc"),
|
["\x29\xd0\x3a\x1b\xb6\x74\xaa\x71\x1c\xd3\x03\x5b\x65\x57\xc1\x4f\x8a\xa7\x8b\x4f\xe8\x38\x94\x49\xec\xa4\x53\xf9\x44\xbd\x24\x68"] = CTInfo($description="Sectigo 'Mammoth2024h1'", $operator="Sectigo", $url="https://mammoth2024h1.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa4\x59\x90\xf3\x71\x24\x24\xf7\xc3\x55\x27\x56\x9c\xa3\x59\x1e\xf7\xb7\x9f\xce\xab\x4e\x19\x66\x4d\xd0\x8a\xfa\x9d\x62\xa4\x24\xf0\x3b\x20\xe4\x1d\x14\x67\xc8\xfc\xe4\x37\xf2\x4b\x38\x54\x5a\xcf\x9f\x6b\x07\x90\xd0\x0e\x7e\x3d\x4c\x87\xb2\xe8\x3f\x07\xcc"),
|
||||||
["\x50\x85\x01\x58\xdc\xb6\x05\x95\xc0\x0e\x92\xa8\x11\x02\xec\xcd\xfe\x3f\x6b\x78\x58\x42\x9f\x57\x98\x35\x38\xc9\xda\x52\x50\x63"] = CTInfo($description="Sectigo 'Mammoth2024h1b'", $operator="Sectigo", $url="https://mammoth2024h1b.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa3\xd5\x07\x28\x7a\x04\x34\xae\xca\xbe\x80\x79\x4f\x3e\xf6\x41\xf4\x24\x04\xe1\xd6\x36\x5a\x1a\x09\xf2\xd1\xba\x84\x17\xae\x1e\xa1\x7c\x00\x1d\x54\x73\x90\x75\x21\xa8\xd1\xda\x5e\x10\xe1\x8c\xec\xb2\x8a\x8c\xc8\xe7\xdd\xcd\xe2\x07\xf0\x4e\x16\x02\x57\x37"),
|
["\x50\x85\x01\x58\xdc\xb6\x05\x95\xc0\x0e\x92\xa8\x11\x02\xec\xcd\xfe\x3f\x6b\x78\x58\x42\x9f\x57\x98\x35\x38\xc9\xda\x52\x50\x63"] = CTInfo($description="Sectigo 'Mammoth2024h1b'", $operator="Sectigo", $url="https://mammoth2024h1b.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa3\xd5\x07\x28\x7a\x04\x34\xae\xca\xbe\x80\x79\x4f\x3e\xf6\x41\xf4\x24\x04\xe1\xd6\x36\x5a\x1a\x09\xf2\xd1\xba\x84\x17\xae\x1e\xa1\x7c\x00\x1d\x54\x73\x90\x75\x21\xa8\xd1\xda\x5e\x10\xe1\x8c\xec\xb2\x8a\x8c\xc8\xe7\xdd\xcd\xe2\x07\xf0\x4e\x16\x02\x57\x37"),
|
||||||
["\xdf\xe1\x56\xeb\xaa\x05\xaf\xb5\x9c\x0f\x86\x71\x8d\xa8\xc0\x32\x4e\xae\x56\xd9\x6e\xa7\xf5\xa5\x6a\x01\xd1\xc1\x3b\xbe\x52\x5c"] = CTInfo($description="Sectigo 'Mammoth2024h2'", $operator="Sectigo", $url="https://mammoth2024h2.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x85\x66\x22\x24\x6e\xbe\x52\x62\x0a\xa0\xaf\xc3\x25\x1a\x36\x2e\xa7\x60\x89\xa2\x65\xbf\xa4\x5f\xbd\x85\x6a\x94\x05\x81\x35\x90\x54\x31\x95\xe7\x11\x9e\xa3\x2e\x0f\x85\xef\xa7\x88\x57\x8b\x63\x1a\x81\xc1\x41\x9d\x7d\xec\x01\x3a\xdb\xb9\xc1\x27\xf4\x65\x1e"),
|
["\xdf\xe1\x56\xeb\xaa\x05\xaf\xb5\x9c\x0f\x86\x71\x8d\xa8\xc0\x32\x4e\xae\x56\xd9\x6e\xa7\xf5\xa5\x6a\x01\xd1\xc1\x3b\xbe\x52\x5c"] = CTInfo($description="Sectigo 'Mammoth2024h2'", $operator="Sectigo", $url="https://mammoth2024h2.ct.sectigo.com/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x85\x66\x22\x24\x6e\xbe\x52\x62\x0a\xa0\xaf\xc3\x25\x1a\x36\x2e\xa7\x60\x89\xa2\x65\xbf\xa4\x5f\xbd\x85\x6a\x94\x05\x81\x35\x90\x54\x31\x95\xe7\x11\x9e\xa3\x2e\x0f\x85\xef\xa7\x88\x57\x8b\x63\x1a\x81\xc1\x41\x9d\x7d\xec\x01\x3a\xdb\xb9\xc1\x27\xf4\x65\x1e"),
|
||||||
|
@ -39,4 +50,6 @@ redef ct_logs += {
|
||||||
["\x87\x4f\xb5\x0d\xc0\x29\xd9\x93\x1d\xe5\x73\xe9\xf2\x89\x9e\x8e\x45\x33\xb3\x92\xd3\x8b\x0a\x46\x25\x74\xbf\x0f\xee\xb2\xfc\x1e"] = CTInfo($description="Trust Asia Log2024-2", $operator="TrustAsia", $url="https://ct2024.trustasia.com/log2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa7\x64\xe2\x79\x81\x3f\x61\xd7\xec\xc6\xf8\x65\x28\x1d\xa0\xb4\x66\x33\xc3\x25\xd5\x0a\x95\x78\x9c\x8f\xfe\xa4\x2a\xd8\x8f\x7e\x72\xe0\xfe\xa8\x7f\xf8\xb1\x2d\x85\xc0\x8e\x12\x74\x0d\x2f\x8c\xab\xd7\x7f\x7a\x1e\xd9\x84\x33\x39\xe8\xfd\x89\x5f\x96\x48\x08"),
|
["\x87\x4f\xb5\x0d\xc0\x29\xd9\x93\x1d\xe5\x73\xe9\xf2\x89\x9e\x8e\x45\x33\xb3\x92\xd3\x8b\x0a\x46\x25\x74\xbf\x0f\xee\xb2\xfc\x1e"] = CTInfo($description="Trust Asia Log2024-2", $operator="TrustAsia", $url="https://ct2024.trustasia.com/log2024/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa7\x64\xe2\x79\x81\x3f\x61\xd7\xec\xc6\xf8\x65\x28\x1d\xa0\xb4\x66\x33\xc3\x25\xd5\x0a\x95\x78\x9c\x8f\xfe\xa4\x2a\xd8\x8f\x7e\x72\xe0\xfe\xa8\x7f\xf8\xb1\x2d\x85\xc0\x8e\x12\x74\x0d\x2f\x8c\xab\xd7\x7f\x7a\x1e\xd9\x84\x33\x39\xe8\xfd\x89\x5f\x96\x48\x08"),
|
||||||
["\x28\xe2\x81\x38\xfd\x83\x21\x45\xe9\xa9\xd6\xaa\x75\x37\x6d\x83\x77\xa8\x85\x12\xb3\xc0\x7f\x72\x41\x48\x21\xdc\xbd\xe9\x8c\x66"] = CTInfo($description="TrustAsia Log2025a", $operator="TrustAsia", $url="https://ct2025-a.trustasia.com/log2025a/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x70\xe5\xb1\xa4\x09\x79\x2b\x9d\xf8\xa3\xa0\xdf\x18\xef\x95\x5d\x03\x6c\x7b\xa1\x91\xa9\xb8\x80\x7d\xec\x5c\x02\x08\xe2\x6e\x2f\x7c\x32\x70\xbd\x96\x84\x5f\xa6\x62\xe9\x65\xb5\x7c\x90\x58\xba\x22\xd5\xf9\xf5\x69\x54\xb7\xa8\x94\x4e\x32\x09\xae\x26\x11\x4d"),
|
["\x28\xe2\x81\x38\xfd\x83\x21\x45\xe9\xa9\xd6\xaa\x75\x37\x6d\x83\x77\xa8\x85\x12\xb3\xc0\x7f\x72\x41\x48\x21\xdc\xbd\xe9\x8c\x66"] = CTInfo($description="TrustAsia Log2025a", $operator="TrustAsia", $url="https://ct2025-a.trustasia.com/log2025a/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x70\xe5\xb1\xa4\x09\x79\x2b\x9d\xf8\xa3\xa0\xdf\x18\xef\x95\x5d\x03\x6c\x7b\xa1\x91\xa9\xb8\x80\x7d\xec\x5c\x02\x08\xe2\x6e\x2f\x7c\x32\x70\xbd\x96\x84\x5f\xa6\x62\xe9\x65\xb5\x7c\x90\x58\xba\x22\xd5\xf9\xf5\x69\x54\xb7\xa8\x94\x4e\x32\x09\xae\x26\x11\x4d"),
|
||||||
["\x28\x2c\x8b\xdd\x81\x0f\xf9\x09\x12\x0a\xce\x16\xd6\xe0\xec\x20\x1b\xea\x82\xa3\xa4\xaf\x19\xd9\xef\xfb\x59\xe8\x3f\xdc\x42\x68"] = CTInfo($description="TrustAsia Log2025b", $operator="TrustAsia", $url="https://ct2025-b.trustasia.com/log2025b/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xaa\xa0\x8b\xdb\x67\x14\x5d\x97\x89\x1d\x08\x8d\x06\xd7\xc1\x94\x8e\xb0\xfa\x4c\x46\xd5\x53\x08\x78\x2b\x04\x53\x6c\xf3\xde\xb1\xd1\x53\x40\xda\x90\x57\xe6\x1a\x9e\x3c\xc7\x03\xb8\xbd\x2f\xa9\xcf\xe8\x7b\x5e\xe1\x4b\x60\xe5\x38\x43\x60\x97\xc1\x5b\x2f\x65"),
|
["\x28\x2c\x8b\xdd\x81\x0f\xf9\x09\x12\x0a\xce\x16\xd6\xe0\xec\x20\x1b\xea\x82\xa3\xa4\xaf\x19\xd9\xef\xfb\x59\xe8\x3f\xdc\x42\x68"] = CTInfo($description="TrustAsia Log2025b", $operator="TrustAsia", $url="https://ct2025-b.trustasia.com/log2025b/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xaa\xa0\x8b\xdb\x67\x14\x5d\x97\x89\x1d\x08\x8d\x06\xd7\xc1\x94\x8e\xb0\xfa\x4c\x46\xd5\x53\x08\x78\x2b\x04\x53\x6c\xf3\xde\xb1\xd1\x53\x40\xda\x90\x57\xe6\x1a\x9e\x3c\xc7\x03\xb8\xbd\x2f\xa9\xcf\xe8\x7b\x5e\xe1\x4b\x60\xe5\x38\x43\x60\x97\xc1\x5b\x2f\x65"),
|
||||||
|
["\x74\xdb\x9d\x58\xf7\xd4\x7e\x9d\xfd\x78\x7a\x16\x2a\x99\x1c\x18\xcf\x69\x8d\xa7\xc7\x29\x91\x8c\x9a\x18\xb0\x45\x0d\xba\x44\xbc"] = CTInfo($description="TrustAsia 'log2026a'", $operator="TrustAsia", $url="https://ct2026-a.trustasia.com/log2026a/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xa7\x4e\x7a\xc9\xa6\x07\xf9\xff\x74\xec\x98\xcb\x49\xe1\x00\x24\xb3\x59\x2e\x83\xfd\xc0\x70\x35\x33\x4c\x63\xca\x74\x83\xc0\x3c\x5b\x53\x40\x7c\x31\x1f\x35\xa4\x5f\x0f\xe4\xee\x4f\x89\x17\xe8\x5b\x2e\xc5\xac\x00\x05\xc9\x76\x37\x45\x97\x03\x15\xff\x60\x59"),
|
||||||
|
["\x25\xb7\xef\xde\xa1\x13\x01\x93\xed\x93\x07\x97\x70\xaa\x32\x2a\x26\x62\x0d\xe3\x5a\xc8\xaa\x7c\x75\x19\x7d\xe0\xb1\xa9\xe0\x65"] = CTInfo($description="TrustAsia 'log2026b'", $operator="TrustAsia", $url="https://ct2026-b.trustasia.com/log2026b/", $maximum_merge_delay=86400, $key="\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x0f\x12\x8c\xa9\xe6\xe3\xec\x62\xee\xdf\x58\xc8\x50\xe6\x26\x70\x76\x10\xb7\x04\x39\xb3\xa7\xf8\x4c\x73\x3b\xc3\x38\x5a\x12\x00\x4c\xe0\xda\x0e\x16\x8a\x45\x32\x0a\x31\xaa\x22\xc7\x9d\x7d\x05\x53\xc7\x9e\x94\xea\x9b\x57\x46\xbf\x4f\xa4\x7e\xfb\xdf\xfa\x85"),
|
||||||
};
|
};
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,19 +1,2 @@
|
||||||
##! In a cluster configuration, open the port number for metrics
|
@deprecated "Remove in v7.1: Cluster nodes now implicitly listen on metrics port if set in cluster-layout."
|
||||||
##! from the cluster node configuration for exporting data to
|
@load base/frameworks/telemetry
|
||||||
##! Prometheus.
|
|
||||||
##!
|
|
||||||
##! The manager node will also provide a ``/services.json`` endpoint
|
|
||||||
##! for the HTTP Service Discovery system in Prometheus to use for
|
|
||||||
##! configuration. This endpoint will include information for all of
|
|
||||||
##! the other nodes in the cluster.
|
|
||||||
@load base/frameworks/cluster
|
|
||||||
|
|
||||||
@if ( Cluster::is_enabled() )
|
|
||||||
|
|
||||||
redef Telemetry::metrics_endpoint_name = Cluster::node;
|
|
||||||
|
|
||||||
@if ( Cluster::local_node_metrics_port() != 0/unknown )
|
|
||||||
redef Telemetry::metrics_port = Cluster::local_node_metrics_port();
|
|
||||||
@endif
|
|
||||||
|
|
||||||
@endif
|
|
||||||
|
|
|
@ -94,10 +94,6 @@ redef digest_salt = "Please change this value.";
|
||||||
# telemetry_histogram.log.
|
# telemetry_histogram.log.
|
||||||
@load frameworks/telemetry/log
|
@load frameworks/telemetry/log
|
||||||
|
|
||||||
# Enable Prometheus metrics scraping in the cluster: each Zeek node will listen
|
|
||||||
# on the metrics port defined in its Cluster::nodes entry.
|
|
||||||
# @load frameworks/telemetry/prometheus
|
|
||||||
|
|
||||||
# Uncomment the following line to enable detection of the heartbleed attack. Enabling
|
# Uncomment the following line to enable detection of the heartbleed attack. Enabling
|
||||||
# this might impact performance a bit.
|
# this might impact performance a bit.
|
||||||
# @load policy/protocols/ssl/heartbleed
|
# @load policy/protocols/ssl/heartbleed
|
||||||
|
|
|
@ -201,6 +201,9 @@ public:
|
||||||
|
|
||||||
bool PermitWeird(const char* name, uint64_t threshold, uint64_t rate, double duration);
|
bool PermitWeird(const char* name, uint64_t threshold, uint64_t rate, double duration);
|
||||||
|
|
||||||
|
// Returns true once Done() is called.
|
||||||
|
bool IsFinished() { return finished; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
friend class session::detail::Timer;
|
friend class session::detail::Timer;
|
||||||
|
|
||||||
|
|
|
@ -113,19 +113,7 @@ void Analyzer::CtorInit(const zeek::Tag& arg_tag, Connection* arg_conn) {
|
||||||
|
|
||||||
Analyzer::~Analyzer() {
|
Analyzer::~Analyzer() {
|
||||||
assert(finished);
|
assert(finished);
|
||||||
|
assert(new_children.empty());
|
||||||
// Make sure any late entries into the analyzer tree are handled (e.g.
|
|
||||||
// from some Done() implementation).
|
|
||||||
LOOP_OVER_GIVEN_CHILDREN(i, new_children) {
|
|
||||||
if ( ! (*i)->finished )
|
|
||||||
(*i)->Done();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deletion of new_children done in separate loop in case a Done()
|
|
||||||
// implementation tries to inspect analyzer tree w/ assumption that
|
|
||||||
// all analyzers are still valid.
|
|
||||||
LOOP_OVER_GIVEN_CHILDREN(i, new_children)
|
|
||||||
delete *i;
|
|
||||||
|
|
||||||
LOOP_OVER_CHILDREN(i)
|
LOOP_OVER_CHILDREN(i)
|
||||||
delete *i;
|
delete *i;
|
||||||
|
@ -330,6 +318,30 @@ void Analyzer::ForwardEndOfData(bool orig) {
|
||||||
bool Analyzer::AddChildAnalyzer(Analyzer* analyzer, bool init) {
|
bool Analyzer::AddChildAnalyzer(Analyzer* analyzer, bool init) {
|
||||||
auto t = analyzer->GetAnalyzerTag();
|
auto t = analyzer->GetAnalyzerTag();
|
||||||
|
|
||||||
|
// Prevent attaching child analyzers to analyzer subtrees where
|
||||||
|
// either the parent has finished or is being removed. Further,
|
||||||
|
// don't attach analyzers when the connection has finished or is
|
||||||
|
// currently being finished (executing Done()).
|
||||||
|
//
|
||||||
|
// Scenarios in which analyzers have been observed that late in
|
||||||
|
// analyzer / connection lifetime are:
|
||||||
|
//
|
||||||
|
// * A DPD signature match on undelivered TCP data that is flushed
|
||||||
|
// during Connection::Done(). The PIA analyzer activates a new
|
||||||
|
// analyzer adding it to the TCP analyzer.
|
||||||
|
//
|
||||||
|
// * Analyzers flushing buffered state during Done(), resulting
|
||||||
|
// in new analyzers being created.
|
||||||
|
//
|
||||||
|
// Analyzers added during Done() are problematic as calling Done()
|
||||||
|
// within the parent's destructor isn't safe, so we prevent these
|
||||||
|
// situations.
|
||||||
|
if ( Removing() || IsFinished() || Conn()->IsFinished() ) {
|
||||||
|
analyzer->Done();
|
||||||
|
delete analyzer;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if ( HasChildAnalyzer(t) || IsPreventedChildAnalyzer(t) ) {
|
if ( HasChildAnalyzer(t) || IsPreventedChildAnalyzer(t) ) {
|
||||||
analyzer->Done();
|
analyzer->Done();
|
||||||
delete analyzer;
|
delete analyzer;
|
||||||
|
|
|
@ -15,7 +15,7 @@ public type Request = unit {
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
-> : /\/W/ { self.whois = True; }
|
-> : /\/W/ { self.whois = True; }
|
||||||
-> void;
|
-> : void;
|
||||||
};
|
};
|
||||||
|
|
||||||
: OptionalWhiteSpace;
|
: OptionalWhiteSpace;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
spicy_add_analyzer(
|
spicy_add_analyzer(
|
||||||
NAME LDAP
|
NAME LDAP
|
||||||
PACKAGE_NAME spicy-ldap
|
PACKAGE_NAME spicy-ldap
|
||||||
SOURCES ldap.spicy ldap.evt asn1.spicy
|
SOURCES ldap.spicy ldap.evt asn1.spicy ldap_zeek.spicy
|
||||||
MODULES LDAP ASN1)
|
MODULES LDAP ASN1 LDAP_Zeek)
|
||||||
|
|
|
@ -41,3 +41,18 @@ on LDAP::SearchRequest -> event LDAP::search_request($conn,
|
||||||
on LDAP::SearchResultEntry -> event LDAP::search_result_entry($conn,
|
on LDAP::SearchResultEntry -> event LDAP::search_result_entry($conn,
|
||||||
message.messageID,
|
message.messageID,
|
||||||
self.objectName);
|
self.objectName);
|
||||||
|
|
||||||
|
on LDAP::ExtendedRequest -> event LDAP::extended_request($conn,
|
||||||
|
message.messageID,
|
||||||
|
self.requestName,
|
||||||
|
self.requestValue);
|
||||||
|
|
||||||
|
on LDAP::ExtendedResponse -> event LDAP::extended_response($conn,
|
||||||
|
message.messageID,
|
||||||
|
message.result_.code,
|
||||||
|
self.responseName,
|
||||||
|
self.responseValue);
|
||||||
|
|
||||||
|
# Once switched into MessageMode::TLS, we won't parse messages anymore,
|
||||||
|
# so this is raised just once.
|
||||||
|
on LDAP::Message if (ctx.messageMode == LDAP::MessageMode::TLS) -> event LDAP::starttls($conn);
|
||||||
|
|
|
@ -126,125 +126,206 @@ public type Result = unit {
|
||||||
# https://tools.ietf.org/html/rfc4511#section-4.1.10
|
# https://tools.ietf.org/html/rfc4511#section-4.1.10
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# 1.2.840.48018.1.2.2 (MS KRB5 - Microsoft Kerberos 5)
|
||||||
|
const GSSAPI_MECH_MS_KRB5 = "1.2.840.48018.1.2.2";
|
||||||
|
|
||||||
|
# Supported SASL stripping modes.
|
||||||
|
type MessageMode = enum {
|
||||||
|
MS_KRB5 = 1, # Payload starts with a 4 byte length followed by a wrap token that may or may not be sealed.
|
||||||
|
TLS = 2, # Client/server used StartTLS, forward to SSL analyzer.
|
||||||
|
MAYBE_ENCRYPTED = 3, # Use a heuristic to determine encrypted traffic.
|
||||||
|
CLEARTEXT = 4, # Assume cleartext.
|
||||||
|
ENCRYPTED = 5, # Assume encrypted.
|
||||||
|
};
|
||||||
|
|
||||||
|
type Ctx = struct {
|
||||||
|
messageMode: MessageMode; # Message dispatching mode
|
||||||
|
saslMechanism: string; # The SASL mechanism selected by the client.
|
||||||
|
startTlsRequested: bool; # Did the client use the StartTLS extended request?
|
||||||
|
};
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
public type Messages = unit {
|
public type Messages = unit {
|
||||||
: MessageWrapper[];
|
%context = Ctx;
|
||||||
|
: MessageDispatch(self.context())[];
|
||||||
};
|
};
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
type SASLLayer = unit {
|
public type MessageDispatch = unit(ctx: Ctx&) {
|
||||||
# For the time being (before we support parsing the SASL layer) this unit
|
switch( ctx.messageMode ) {
|
||||||
# is used by MessageWrapper below to strip it (SASL) so that the parser
|
MessageMode::Undef -> : Message(ctx);
|
||||||
# can attempt to resume parsing afterward. It also sets the success flag
|
MessageMode::MS_KRB5 -> : SaslMsKrb5Stripper(ctx);
|
||||||
# if '\x30' is found, otherwise backtracks so that we can deal with encrypted
|
MessageMode::TLS -> : TlsForward; # never returns
|
||||||
# SASL payloads without raising a parse error.
|
MessageMode::MAYBE_ENCRYPTED -> : MaybeEncrypted(ctx);
|
||||||
var success: bool = False;
|
MessageMode::CLEARTEXT -> : Message(ctx);
|
||||||
: bytes &until=b"\x30" {
|
MessageMode::ENCRYPTED -> : EncryptedMessage;
|
||||||
self.success = True;
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
type MaybeEncrypted = unit(ctx: Ctx&) {
|
||||||
|
# A plaintext LDAP message always starts with at least 3 bytes and the first
|
||||||
|
# byte is 0x30 for the sequence. A SASL encrypted message starts with a 4 byte
|
||||||
|
# length field. The heuristic here is that if the first byte is a 0x30,
|
||||||
|
# assume it's unencrypted LDAP. This should be pretty good, if it was an
|
||||||
|
# encrypted/SASL wrapped message, it would have a size between 0x30000000 and
|
||||||
|
# 0x30FFFFFF, meaning at least a size of ~768MB, which seems unlikely.
|
||||||
|
var start: iterator<stream>;
|
||||||
|
var saslLen: uint64;
|
||||||
|
var mech: bytes;
|
||||||
|
|
||||||
|
on %init {
|
||||||
|
self.start = self.input();
|
||||||
|
# Don't have starts_with() on string, work around that.
|
||||||
|
# https://github.com/zeek/spicy/issues/1807
|
||||||
|
self.mech = ctx.saslMechanism.encode(spicy::Charset::UTF8);
|
||||||
}
|
}
|
||||||
|
|
||||||
on %error {
|
first: uint8 {
|
||||||
self.backtrack();
|
if ( $$ == 0x30 ) {
|
||||||
|
ctx.messageMode = MessageMode::CLEARTEXT;
|
||||||
|
} else {
|
||||||
|
ctx.messageMode = MessageMode::ENCRYPTED;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# As a further heuristic, if encrypted mode was decided and the client
|
||||||
|
# requested GSSAPI or GSS-SPNEGO (or we just didn't see it) peak a bit
|
||||||
|
# into the SASL payload and check if it starts with a 0504 (WRAP_TOKEN).
|
||||||
|
# If so, switch into KRB mode assuming that's what is being used and
|
||||||
|
# have a chance seeing some more plaintext LDAP in non-sealed tokens.
|
||||||
|
rem: uint8[3] if ( ctx.messageMode == MessageMode::ENCRYPTED && (|self.mech| == 0 || self.mech.starts_with(b"GSS")) ) {
|
||||||
|
self.saslLen = (uint64(self.first) << 24) + (uint64($$[0]) << 16) + (uint64($$[1]) << 8) + uint64($$[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
: uint16 if ( self.saslLen >= 2 ) {
|
||||||
|
if ( $$ == 0x0504 ) {
|
||||||
|
ctx.messageMode = MessageMode::MS_KRB5;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Rewind the input.
|
||||||
|
: void {
|
||||||
|
# Prevent MessageDispatch from recursing endlessly.
|
||||||
|
assert ctx.messageMode != MessageMode::MAYBE_ENCRYPTED;
|
||||||
|
self.set_input(self.start);
|
||||||
|
}
|
||||||
|
|
||||||
|
# One recursion to parse with the new ctx.messageMode setting.
|
||||||
|
: MessageDispatch(ctx);
|
||||||
};
|
};
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
public type MessageWrapper = unit {
|
type EncryptedMessage = unit {
|
||||||
# A wrapper around 'Message'. First, we try to parse a Message unit.
|
len: uint32;
|
||||||
# There are two possible outcomes:
|
: skip bytes &size=self.len;
|
||||||
# (1) Success -> We consumed all bytes and successfully parsed a Message unit
|
};
|
||||||
# (2) No success -> self.backtrack() is called in the Message unit,
|
|
||||||
# so effectively we didn't consume any bytes yet.
|
|
||||||
# The outcome can be determined by checking the `success` variable of the Message unit
|
|
||||||
|
|
||||||
# This success variable is different, because this keeps track of the status for the MessageWrapper object
|
|
||||||
var success: bool = False;
|
|
||||||
var message: Message;
|
|
||||||
|
|
||||||
# Here, we try to parse the message...
|
|
||||||
: Message &try {
|
|
||||||
|
|
||||||
# ... and only if the Message unit successfully parsed, we can set
|
|
||||||
# the status of this MessageWrapper's success to 'True'
|
|
||||||
if ( $$.success == True ) {
|
|
||||||
self.success = True;
|
|
||||||
self.message = $$;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# If we failed to parse the message, then we're going to scan the remaining bytes for the '\x30'
|
|
||||||
# start byte and try to parse a Message starting from that byte. This effectively
|
|
||||||
# strips the SASL layer if SASL Signing was enabled. Until now, I haven't found A
|
|
||||||
# better way to scan / determine the exact SASL header length yet, so we'll stick with this
|
|
||||||
# for the time being. If the entire LDAP packet was encrypted with SASL, then we skip parsing for
|
|
||||||
# now (in the long run we need to be parsing SASL/GSSAPI instead, in which case encrypted payloads
|
|
||||||
# are just another message type).
|
|
||||||
|
|
||||||
# SASLLayer (see unit above) just consumes bytes &until=b"\x30" or backtracks if it isn't found
|
|
||||||
# and sets a success flag we can use later to decide if those bytes contain a parsable message.
|
|
||||||
var sasl_success: bool = False;
|
|
||||||
: SASLLayer &try if ( self.success == False ) {
|
|
||||||
if ( $$.success == True ) {
|
|
||||||
self.sasl_success = True;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var remainder: bytes;
|
|
||||||
|
|
||||||
# SASLLayer consumes the delimiter ('\x30'), and because this is the first byte of a valid LDAP message
|
|
||||||
# we should re-add it to the remainder if the delimiter was found. If the delimiter was not found, we
|
|
||||||
# leave the remainder empty, but note that the bytes must be consumed either way to avoid stalling the
|
|
||||||
# parser and causing an infinite loop error.
|
|
||||||
: bytes &eod if ( self.success == False ) {
|
|
||||||
if ( self.sasl_success == True ) {
|
|
||||||
self.remainder = b"\x30" + $$;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Again, try to parse a Message unit. Be aware that in this will sometimes fail if the '\x30' byte is
|
|
||||||
# also present in the SASL header.
|
|
||||||
|
|
||||||
# Also, we could try to do this recursively or try a few iterations, but for now I would suggest
|
|
||||||
# to try this extra parsing once to get the best cost/benefit tradeoff.
|
|
||||||
: Message &try &parse-from=self.remainder if ( self.success == False && self.sasl_success == True ) {
|
|
||||||
if ( $$.success == True ) {
|
|
||||||
self.success = True;
|
|
||||||
self.message = $$;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# If we still didn't manage to parse a message (so the &try resulted in another backtrack()) then
|
|
||||||
# this is probably an encrypted LDAP message, so skip it
|
|
||||||
|
|
||||||
} &convert=self.message;
|
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
public type Message = unit {
|
type TlsForward = unit {
|
||||||
|
# Just consume everything. This is hooked in ldap_zeek.spicy
|
||||||
|
chunk: bytes &chunked &eod;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KrbWrapToken = unit {
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4121#section-4.2.6.2
|
||||||
|
|
||||||
|
# Number of bytes to expect *after* the payload.
|
||||||
|
var trailer_ec: uint64;
|
||||||
|
var header_ec: uint64;
|
||||||
|
|
||||||
|
ctx_flags: bitfield(8) {
|
||||||
|
send_by_acceptor: 0;
|
||||||
|
sealed: 1;
|
||||||
|
acceptor_subkey: 2;
|
||||||
|
};
|
||||||
|
filler: skip b"\xff";
|
||||||
|
ec: uint16; # extra count
|
||||||
|
rrc: uint16 { # right rotation count
|
||||||
|
# Handle rrc == ec or rrc == 0.
|
||||||
|
if ( self.rrc == self.ec ) {
|
||||||
|
self.header_ec = self.ec;
|
||||||
|
} else if ( self.rrc == 0 ) {
|
||||||
|
self.trailer_ec = self.ec;
|
||||||
|
} else {
|
||||||
|
if ( ! self.ctx_flags.sealed )
|
||||||
|
# If it's sealed, we'll consume until &eod anyhow
|
||||||
|
# and ec/rrc shouldn't apply, otherwise, bail.
|
||||||
|
throw "Unhandled rc %s and ec %s" % (self.ec, self.rrc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snd_seq: uint64;
|
||||||
|
header_e: skip bytes &size=self.header_ec;
|
||||||
|
};
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
type SaslMsKrb5Stripper = unit(ctx: Ctx&) {
|
||||||
|
# This is based on Wireshark output and example traffic we have. There's always
|
||||||
|
# a 4 byte length field followed by the krb5_tok_id field in messages after
|
||||||
|
# MS_KRB5 was selected. I haven't read enough specs to understand if it's
|
||||||
|
# just this one case that works, or others could use the same stripping.
|
||||||
|
var switch_size: uint64;
|
||||||
|
|
||||||
|
len: uint32;
|
||||||
|
krb5_tok_id: uint16;
|
||||||
|
|
||||||
|
switch ( self.krb5_tok_id ) {
|
||||||
|
0x0504 -> krb_wrap_token: KrbWrapToken;
|
||||||
|
* -> : void;
|
||||||
|
};
|
||||||
|
|
||||||
|
: skip bytes &size=0 {
|
||||||
|
self.switch_size = self.len - (self.offset() - 4);
|
||||||
|
if ( self?.krb_wrap_token )
|
||||||
|
self.switch_size -= self.krb_wrap_token.trailer_ec;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch ( self?.krb_wrap_token && ! self.krb_wrap_token.ctx_flags.sealed ) {
|
||||||
|
True -> : Message(ctx)[] &eod;
|
||||||
|
* -> : skip bytes &eod;
|
||||||
|
} &size=self.switch_size;
|
||||||
|
|
||||||
|
# Consume the wrap token trailer, if any.
|
||||||
|
trailer_e: skip bytes &size=self.krb_wrap_token.trailer_ec if (self?.krb_wrap_token);
|
||||||
|
};
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
public type Message = unit(ctx: Ctx&) {
|
||||||
var messageID: int64;
|
var messageID: int64;
|
||||||
var opcode: ProtocolOpcode = ProtocolOpcode::Undef;
|
var opcode: ProtocolOpcode = ProtocolOpcode::Undef;
|
||||||
var applicationBytes: bytes;
|
|
||||||
var unsetResultDefault: Result;
|
var unsetResultDefault: Result;
|
||||||
var result_: Result& = self.unsetResultDefault;
|
var result_: Result& = self.unsetResultDefault;
|
||||||
var obj: string = "";
|
var obj: string = "";
|
||||||
var arg: string = "";
|
var arg: string = "";
|
||||||
var success: bool = False;
|
var seqHeaderLen: uint64;
|
||||||
|
var msgLen: uint64;
|
||||||
|
var opLen: uint64;
|
||||||
|
|
||||||
: ASN1::ASN1Message(True) {
|
seqHeader: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::Universal && $$.tag.type_ == ASN1::ASN1Type::Sequence) {
|
||||||
if (($$.head.tag.type_ == ASN1::ASN1Type::Sequence) &&
|
self.msgLen = $$.len.len;
|
||||||
($$.body?.seq) &&
|
}
|
||||||
(|$$.body.seq.submessages| >= 2)) {
|
|
||||||
if ($$.body.seq.submessages[0].body?.num_value) {
|
# Use offset() to determine how many bytes the seqHeader took. This
|
||||||
self.messageID = $$.body.seq.submessages[0].body.num_value;
|
# needs to be done after the seqHeader field hook.
|
||||||
}
|
: void {
|
||||||
if ($$.body.seq.submessages[1]?.application_id) {
|
self.seqHeaderLen = self.offset();
|
||||||
self.opcode = cast<ProtocolOpcode>(cast<uint8>($$.body.seq.submessages[1].application_id));
|
}
|
||||||
self.applicationBytes = $$.body.seq.submessages[1].application_data;
|
|
||||||
}
|
messageID_header: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::Universal && $$.tag.type_ == ASN1::ASN1Type::Integer);
|
||||||
}
|
: ASN1::ASN1Body(self.messageID_header, False) {
|
||||||
|
self.messageID = $$.num_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
protocolOp: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::Application) {
|
||||||
|
self.opcode = cast<ProtocolOpcode>(cast<uint8>($$.tag.type_));
|
||||||
|
self.opLen = $$.len.len;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch ( self.opcode ) {
|
switch ( self.opcode ) {
|
||||||
ProtocolOpcode::BIND_REQUEST -> BIND_REQUEST: BindRequest(self);
|
ProtocolOpcode::BIND_REQUEST -> BIND_REQUEST: BindRequest(self, ctx);
|
||||||
ProtocolOpcode::BIND_RESPONSE -> BIND_RESPONSE: BindResponse(self);
|
ProtocolOpcode::BIND_RESPONSE -> BIND_RESPONSE: BindResponse(self, ctx);
|
||||||
ProtocolOpcode::UNBIND_REQUEST -> UNBIND_REQUEST: UnbindRequest(self);
|
ProtocolOpcode::UNBIND_REQUEST -> UNBIND_REQUEST: UnbindRequest(self);
|
||||||
ProtocolOpcode::SEARCH_REQUEST -> SEARCH_REQUEST: SearchRequest(self);
|
ProtocolOpcode::SEARCH_REQUEST -> SEARCH_REQUEST: SearchRequest(self);
|
||||||
ProtocolOpcode::SEARCH_RESULT_ENTRY -> SEARCH_RESULT_ENTRY: SearchResultEntry(self);
|
ProtocolOpcode::SEARCH_RESULT_ENTRY -> SEARCH_RESULT_ENTRY: SearchResultEntry(self);
|
||||||
|
@ -262,22 +343,20 @@ public type Message = unit {
|
||||||
# just commenting this out, it will stop processing LDAP Messages in this connection
|
# just commenting this out, it will stop processing LDAP Messages in this connection
|
||||||
ProtocolOpcode::ADD_REQUEST -> ADD_REQUEST: NotImplemented(self);
|
ProtocolOpcode::ADD_REQUEST -> ADD_REQUEST: NotImplemented(self);
|
||||||
ProtocolOpcode::COMPARE_REQUEST -> COMPARE_REQUEST: NotImplemented(self);
|
ProtocolOpcode::COMPARE_REQUEST -> COMPARE_REQUEST: NotImplemented(self);
|
||||||
ProtocolOpcode::EXTENDED_REQUEST -> EXTENDED_REQUEST: NotImplemented(self);
|
ProtocolOpcode::EXTENDED_REQUEST -> EXTENDED_REQUEST: ExtendedRequest(self, ctx);
|
||||||
ProtocolOpcode::EXTENDED_RESPONSE -> EXTENDED_RESPONSE: NotImplemented(self);
|
ProtocolOpcode::EXTENDED_RESPONSE -> EXTENDED_RESPONSE: ExtendedResponse(self, ctx);
|
||||||
ProtocolOpcode::INTERMEDIATE_RESPONSE -> INTERMEDIATE_RESPONSE: NotImplemented(self);
|
ProtocolOpcode::INTERMEDIATE_RESPONSE -> INTERMEDIATE_RESPONSE: NotImplemented(self);
|
||||||
ProtocolOpcode::MOD_DN_REQUEST -> MOD_DN_REQUEST: NotImplemented(self);
|
ProtocolOpcode::MOD_DN_REQUEST -> MOD_DN_REQUEST: NotImplemented(self);
|
||||||
ProtocolOpcode::SEARCH_RESULT_REFERENCE -> SEARCH_RESULT_REFERENCE: NotImplemented(self);
|
ProtocolOpcode::SEARCH_RESULT_REFERENCE -> SEARCH_RESULT_REFERENCE: NotImplemented(self);
|
||||||
} &parse-from=self.applicationBytes if ( self.opcode );
|
} &size=self.opLen;
|
||||||
|
|
||||||
on %error {
|
# Ensure some invariants hold after parsing the command.
|
||||||
self.backtrack();
|
: void &requires=(self.offset() >= self.seqHeaderLen);
|
||||||
}
|
: void &requires=(self.msgLen >= (self.offset() - self.seqHeaderLen));
|
||||||
|
|
||||||
on %done {
|
# Eat the controls field if it exists.
|
||||||
self.success = True;
|
: skip bytes &size=self.msgLen - (self.offset() - self.seqHeaderLen);
|
||||||
}
|
};
|
||||||
|
|
||||||
} &requires=((self?.messageID) && (self?.opcode) && (self.opcode != ProtocolOpcode::Undef));
|
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
# Bind Operation
|
# Bind Operation
|
||||||
|
@ -288,18 +367,88 @@ public type BindAuthType = enum {
|
||||||
BIND_AUTH_SASL = 3,
|
BIND_AUTH_SASL = 3,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type GSS_SPNEGO_negTokenInit = unit {
|
||||||
|
oidHeader: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::Universal && $$.tag.type_ == ASN1::ASN1Type::ObjectIdentifier);
|
||||||
|
oid: ASN1::ASN1ObjectIdentifier(self.oidHeader.len.len) &requires=(self.oid.oidstring == "1.3.6.1.5.5.2");
|
||||||
|
|
||||||
|
# TODO: Parse the rest of negTokenInit.
|
||||||
|
: skip bytes &eod;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Peak into GSS-SPNEGO payload and ensure it is indeed GSS-SPNEGO.
|
||||||
|
type GSS_SPNEGO_Init = unit {
|
||||||
|
# This is the optional octet string in SaslCredentials.
|
||||||
|
credentialsHeader: ASN1::ASN1Header &requires=($$.tag.type_ == ASN1::ASN1Type::OctetString);
|
||||||
|
|
||||||
|
# Now we either have the initial message as specified in RFC2743 or
|
||||||
|
# a continuation from RFC4178
|
||||||
|
#
|
||||||
|
# 60 -> APPLICATION [0] https://datatracker.ietf.org/doc/html/rfc2743#page-81)
|
||||||
|
# a1 -> CHOICE [1] https://www.rfc-editor.org/rfc/rfc4178#section-4.2
|
||||||
|
#
|
||||||
|
gssapiHeader: ASN1::ASN1Header &requires=(
|
||||||
|
$$.tag.class == ASN1::ASN1Class::Application && $$.tag.type_ == ASN1::ASN1Type(0)
|
||||||
|
|| $$.tag.class == ASN1::ASN1Class::ContextSpecific && $$.tag.type_ == ASN1::ASN1Type(1)
|
||||||
|
);
|
||||||
|
|
||||||
|
switch ( self.gssapiHeader.tag.type_ ) {
|
||||||
|
ASN1::ASN1Type(0) -> initial: GSS_SPNEGO_negTokenInit;
|
||||||
|
* -> : skip bytes &eod;
|
||||||
|
} &size=self.gssapiHeader.len.len;
|
||||||
|
};
|
||||||
|
|
||||||
type SaslCredentials = unit() {
|
type SaslCredentials = unit() {
|
||||||
mechanism: ASN1::ASN1Message(True) &convert=$$.body.str_value;
|
mechanism: ASN1::ASN1Message(False) &convert=$$.body.str_value;
|
||||||
# TODO: if we want to parse the (optional) credentials string
|
|
||||||
|
# Peak into GSS-SPNEGO payload if we have any.
|
||||||
|
switch ( self.mechanism ) {
|
||||||
|
"GSS-SPNEGO" -> gss_spnego: GSS_SPNEGO_Init;
|
||||||
|
* -> : skip bytes &eod;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type GSS_SPNEGO_Subsequent = unit {
|
||||||
|
token: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::ContextSpecific);
|
||||||
|
switch ( self.token.tag.type_ ) {
|
||||||
|
ASN1::ASN1Type(1) -> negTokenResp: GSS_SPNEGO_negTokenResp;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type GSS_SPNEGO_negTokenResp = unit {
|
||||||
|
var accepted: bool;
|
||||||
|
var supportedMech: ASN1::ASN1Message;
|
||||||
|
|
||||||
|
# Parse the contained Sequence.
|
||||||
|
seq: ASN1::ASN1Message(True) {
|
||||||
|
for ( msg in $$.body.seq.submessages ) {
|
||||||
|
# https://www.rfc-editor.org/rfc/rfc4178#section-4.2.2
|
||||||
|
if ( msg.application_id == 0 ) {
|
||||||
|
self.accepted = msg.application_data == b"\x0a\x01\x00";
|
||||||
|
} else if ( msg.application_id == 1 ) {
|
||||||
|
self.supportedMech = msg;
|
||||||
|
} else if ( msg.application_id == 2 ) {
|
||||||
|
# ignore responseToken
|
||||||
|
} else if ( msg.application_id == 3 ) {
|
||||||
|
# ignore mechListMec
|
||||||
|
} else {
|
||||||
|
throw "unhandled NegTokenResp id %s" % msg.application_id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch ( self?.supportedMech ) {
|
||||||
|
True -> supportedMechOid: ASN1::ASN1Message(False) &convert=$$.body.str_value;
|
||||||
|
* -> : void;
|
||||||
|
} &parse-from=self.supportedMech.application_data;
|
||||||
};
|
};
|
||||||
|
|
||||||
# TODO(fox-ds): A helper unit for requests for which no handling has been implemented.
|
# TODO(fox-ds): A helper unit for requests for which no handling has been implemented.
|
||||||
# Eventually all uses of this unit should be replaced with actual parsers so this unit can be removed.
|
# Eventually all uses of this unit should be replaced with actual parsers so this unit can be removed.
|
||||||
type NotImplemented = unit(inout message: Message) {
|
type NotImplemented = unit(inout message: Message) {
|
||||||
# Do nothing
|
: skip bytes &eod;
|
||||||
};
|
};
|
||||||
|
|
||||||
type BindRequest = unit(inout message: Message) {
|
type BindRequest = unit(inout message: Message, ctx: Ctx&) {
|
||||||
version: ASN1::ASN1Message(True) &convert=$$.body.num_value;
|
version: ASN1::ASN1Message(True) &convert=$$.body.num_value;
|
||||||
name: ASN1::ASN1Message(True) &convert=$$.body.str_value {
|
name: ASN1::ASN1Message(True) &convert=$$.body.str_value {
|
||||||
message.obj = self.name;
|
message.obj = self.name;
|
||||||
|
@ -323,15 +472,57 @@ type BindRequest = unit(inout message: Message) {
|
||||||
saslCreds: SaslCredentials() &parse-from=self.authData if ((self.authType == BindAuthType::BIND_AUTH_SASL) &&
|
saslCreds: SaslCredentials() &parse-from=self.authData if ((self.authType == BindAuthType::BIND_AUTH_SASL) &&
|
||||||
(|self.authData| > 0)) {
|
(|self.authData| > 0)) {
|
||||||
message.arg = self.saslCreds.mechanism;
|
message.arg = self.saslCreds.mechanism;
|
||||||
|
ctx.saslMechanism = self.saslCreds.mechanism;
|
||||||
}
|
}
|
||||||
} &requires=((self?.authType) && (self.authType != BindAuthType::Undef));
|
} &requires=(self?.authType && (self.authType != BindAuthType::Undef));
|
||||||
|
|
||||||
type BindResponse = unit(inout message: Message) {
|
type ServerSaslCreds = unit {
|
||||||
|
serverSaslCreds: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::ContextSpecific && $$.tag.type_ == ASN1::ASN1Type(7));
|
||||||
|
payload: bytes &size=self.serverSaslCreds.len.len;
|
||||||
|
};
|
||||||
|
|
||||||
|
type BindResponse = unit(inout message: Message, ctx: Ctx&) {
|
||||||
: Result {
|
: Result {
|
||||||
message.result_ = $$;
|
message.result_ = $$;
|
||||||
|
|
||||||
|
# The SASL authentication was successful. We do not actually
|
||||||
|
# know if the following messages are encrypted or not. This may be
|
||||||
|
# mechanism and parameter specific. For example SCRAM-SHA512 or NTLM
|
||||||
|
# will continue to be cleartext, while SRP or GSS-API would be encrypted.
|
||||||
|
#
|
||||||
|
# Switch messageMode into trial mode which is explored via MessageDispatch
|
||||||
|
# and the MaybeEncrypted unit.
|
||||||
|
#
|
||||||
|
# Note, messageMode may be changed to something more specific like
|
||||||
|
# MS_KRB5 below.
|
||||||
|
if ( |ctx.saslMechanism| > 0 && $$.code == ResultCode::SUCCESS ) {
|
||||||
|
ctx.messageMode = MessageMode::MAYBE_ENCRYPTED;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO: if we want to parse SASL credentials returned
|
# Try to parse serverSaslCreds if there's any input remaining. This
|
||||||
|
# unit is parsed with &size, so &eod here works.
|
||||||
|
#
|
||||||
|
# Technically we should be able to tell from the ASN.1 structure
|
||||||
|
# if the serverSaslCreds field exists or not. But, not sure we can
|
||||||
|
# check if there's any bytes left at this point outside of passing
|
||||||
|
# in the length and playing with offset().
|
||||||
|
serverSaslCreds: ServerSaslCreds[] &eod;
|
||||||
|
|
||||||
|
# If the client requested GSS-SPNEGO, try to parse the server's response
|
||||||
|
# to switch message mode.
|
||||||
|
gss_spnego: GSS_SPNEGO_Subsequent &parse-from=self.serverSaslCreds[0].payload
|
||||||
|
if (ctx.saslMechanism == "GSS-SPNEGO" && |self.serverSaslCreds| > 0) {
|
||||||
|
|
||||||
|
if ( $$?.negTokenResp ) {
|
||||||
|
local token = $$.negTokenResp;
|
||||||
|
if ( token.accepted && token?.supportedMechOid ) {
|
||||||
|
if ( token.supportedMechOid == GSSAPI_MECH_MS_KRB5 ) {
|
||||||
|
ctx.messageMode = MessageMode::MS_KRB5;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
|
@ -879,16 +1070,61 @@ type AbandonRequest = unit(inout message: Message) {
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
# Extended Operation
|
# Extended Operation
|
||||||
# https://tools.ietf.org/html/rfc4511#section-4.12
|
# https://tools.ietf.org/html/rfc4511#section-4.12
|
||||||
|
type ExtendedRequest = unit(inout message: Message, ctx: Ctx&) {
|
||||||
|
var requestValue: bytes;
|
||||||
|
header: ASN1::ASN1Header &requires=($$.tag.class == ASN1::ASN1Class::ContextSpecific);
|
||||||
|
requestName: bytes &size=self.header.len.len &convert=$$.decode(spicy::Charset::ASCII) {
|
||||||
|
message.obj = $$;
|
||||||
|
}
|
||||||
|
|
||||||
# TODO: implement ExtendedRequest
|
# If there's more byte to parse, it's the requestValue.
|
||||||
# type ExtendedRequest = unit(inout message: Message) {
|
: ASN1::ASN1Message(False)
|
||||||
#
|
&requires=($$.head.tag.class == ASN1::ASN1Class::ContextSpecific)
|
||||||
# };
|
if ( message.opLen > self.offset() ) {
|
||||||
|
|
||||||
# TODO: implement ExtendedResponse
|
self.requestValue = $$.application_data;
|
||||||
# type ExtendedResponse = unit(inout message: Message) {
|
}
|
||||||
#
|
|
||||||
# };
|
on %done {
|
||||||
|
# Did the client request StartTLS?
|
||||||
|
#
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4511#section-4.14.1
|
||||||
|
if ( self.requestName == "1.3.6.1.4.1.1466.20037" )
|
||||||
|
ctx.startTlsRequested = True;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
type ExtendedResponseEntry = unit(inout r: ExtendedResponse) {
|
||||||
|
: ASN1::ASN1Message(False) &requires=($$.head.tag.class == ASN1::ASN1Class::ContextSpecific) {
|
||||||
|
if ( $$.head.tag.type_ == ASN1::ASN1Type(10) )
|
||||||
|
r.responseName = $$.application_data;
|
||||||
|
else if ( $$.head.tag.type_ == ASN1::ASN1Type(11) )
|
||||||
|
r.responseValue = $$.application_data;
|
||||||
|
else
|
||||||
|
throw "Unhandled extended response tag %s" % $$.head.tag;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#-----------------------------------------------------------------------------
|
||||||
|
type ExtendedResponse = unit(inout message: Message, ctx: Ctx&) {
|
||||||
|
var responseName: bytes;
|
||||||
|
var responseValue: bytes;
|
||||||
|
: Result {
|
||||||
|
message.result_ = $$;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Try to parse two ASN1 entries if there are bytes left in the unit.
|
||||||
|
# Both are optional and identified by context specific tagging.
|
||||||
|
: ExtendedResponseEntry(self) if ( message.opLen > self.offset() );
|
||||||
|
: ExtendedResponseEntry(self) if ( message.opLen > self.offset() );
|
||||||
|
|
||||||
|
on %done {
|
||||||
|
# Client had requested StartTLS and it was successful? Switch to SSL.
|
||||||
|
if ( ctx.startTlsRequested && message.result_.code == ResultCode::SUCCESS )
|
||||||
|
ctx.messageMode = MessageMode::TLS;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
#-----------------------------------------------------------------------------
|
#-----------------------------------------------------------------------------
|
||||||
# IntermediateResponse Message
|
# IntermediateResponse Message
|
||||||
|
@ -899,6 +1135,6 @@ type AbandonRequest = unit(inout message: Message) {
|
||||||
#
|
#
|
||||||
# };
|
# };
|
||||||
|
|
||||||
on LDAP::MessageWrapper::%done {
|
on LDAP::Message::%done {
|
||||||
spicy::accept_input();
|
spicy::accept_input();
|
||||||
}
|
}
|
||||||
|
|
12
src/analyzer/protocol/ldap/ldap_zeek.spicy
Normal file
12
src/analyzer/protocol/ldap/ldap_zeek.spicy
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
module LDAP_Zeek;
|
||||||
|
|
||||||
|
import LDAP;
|
||||||
|
import zeek;
|
||||||
|
|
||||||
|
on LDAP::TlsForward::%init {
|
||||||
|
zeek::protocol_begin("SSL");
|
||||||
|
}
|
||||||
|
|
||||||
|
on LDAP::TlsForward::chunk {
|
||||||
|
zeek::protocol_data_in(zeek::is_orig(), self.chunk);
|
||||||
|
}
|
|
@ -413,7 +413,7 @@ type SMB2_error_response(header: SMB2_Header) = record {
|
||||||
byte_count : uint32;
|
byte_count : uint32;
|
||||||
# This is implemented incorrectly and is disabled for now.
|
# This is implemented incorrectly and is disabled for now.
|
||||||
#error_data : SMB2_error_data(header, byte_count);
|
#error_data : SMB2_error_data(header, byte_count);
|
||||||
stuff : bytestring &restofdata &transient;
|
stuff : bytestring &length=byte_count &transient;
|
||||||
} &byteorder = littleendian;
|
} &byteorder = littleendian;
|
||||||
|
|
||||||
type SMB2_logoff_request(header: SMB2_Header) = record {
|
type SMB2_logoff_request(header: SMB2_Header) = record {
|
||||||
|
|
|
@ -7,7 +7,7 @@ import spicy;
|
||||||
public type Message = unit {
|
public type Message = unit {
|
||||||
switch {
|
switch {
|
||||||
-> prio: Priority;
|
-> prio: Priority;
|
||||||
-> void;
|
-> : void;
|
||||||
};
|
};
|
||||||
|
|
||||||
msg: bytes &eod;
|
msg: bytes &eod;
|
||||||
|
|
|
@ -273,7 +273,12 @@ void TCP_Reassembler::MatchUndelivered(uint64_t up_to_seq, bool use_last_upper)
|
||||||
if ( b.upper > last_reassem_seq )
|
if ( b.upper > last_reassem_seq )
|
||||||
break;
|
break;
|
||||||
|
|
||||||
tcp_analyzer->Conn()->Match(zeek::detail::Rule::PAYLOAD, b.block, b.Size(), false, false, IsOrig(), false);
|
// Note: Even though this passes bol=false, at the point where
|
||||||
|
// this code runs, the matcher is re-initialized resulting in
|
||||||
|
// undelivered data implicitly being bol-anchored. It's unclear
|
||||||
|
// if that was intended, but there's hardly a right way here,
|
||||||
|
// so that seems ok.
|
||||||
|
tcp_analyzer->Conn()->Match(zeek::detail::Rule::PAYLOAD, b.block, b.Size(), IsOrig(), false, false, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
#include <glob.h>
|
#include <glob.h>
|
||||||
|
|
||||||
#include <exception>
|
#include <exception>
|
||||||
|
#include <iterator>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
|
@ -32,6 +33,7 @@
|
||||||
#include "zeek/spicy/file-analyzer.h"
|
#include "zeek/spicy/file-analyzer.h"
|
||||||
#include "zeek/spicy/packet-analyzer.h"
|
#include "zeek/spicy/packet-analyzer.h"
|
||||||
#include "zeek/spicy/protocol-analyzer.h"
|
#include "zeek/spicy/protocol-analyzer.h"
|
||||||
|
#include "zeek/spicy/runtime-support.h"
|
||||||
#include "zeek/zeek-config-paths.h"
|
#include "zeek/zeek-config-paths.h"
|
||||||
|
|
||||||
using namespace zeek;
|
using namespace zeek;
|
||||||
|
@ -74,9 +76,13 @@ void Manager::registerProtocolAnalyzer(const std::string& name, hilti::rt::Proto
|
||||||
info.name_zeek = hilti::rt::replace(name, "::", "_");
|
info.name_zeek = hilti::rt::replace(name, "::", "_");
|
||||||
info.name_zeekygen = hilti::rt::fmt("<Spicy-%s>", name);
|
info.name_zeekygen = hilti::rt::fmt("<Spicy-%s>", name);
|
||||||
info.protocol = proto;
|
info.protocol = proto;
|
||||||
info.ports = ports;
|
|
||||||
info.linker_scope = linker_scope;
|
info.linker_scope = linker_scope;
|
||||||
|
|
||||||
|
// Store ports in a deterministic order. We can't (easily) sort the
|
||||||
|
// `hilti::rt::Vector` unfortunately.
|
||||||
|
std::copy(ports.begin(), ports.end(), std::back_inserter(info.ports));
|
||||||
|
std::sort(info.ports.begin(), info.ports.end());
|
||||||
|
|
||||||
// We may have that analyzer already iff it was previously pre-registered
|
// We may have that analyzer already iff it was previously pre-registered
|
||||||
// without a linker scope. We'll then only set the scope now.
|
// without a linker scope. We'll then only set the scope now.
|
||||||
if ( auto t = _analyzer_name_to_tag_type.find(info.name_zeek); t != _analyzer_name_to_tag_type.end() ) {
|
if ( auto t = _analyzer_name_to_tag_type.find(info.name_zeek); t != _analyzer_name_to_tag_type.end() ) {
|
||||||
|
@ -701,14 +707,25 @@ void Manager::InitPostScript() {
|
||||||
if ( ! tag )
|
if ( ! tag )
|
||||||
reporter->InternalError("cannot get analyzer tag for '%s'", p.name_analyzer.c_str());
|
reporter->InternalError("cannot get analyzer tag for '%s'", p.name_analyzer.c_str());
|
||||||
|
|
||||||
|
auto register_analyzer_for_port = [&](auto tag, const hilti::rt::Port& port_) {
|
||||||
|
SPICY_DEBUG(hilti::rt::fmt(" Scheduling analyzer for port %s", port_));
|
||||||
|
|
||||||
|
// Well-known ports are registered in scriptland, so we'll raise an
|
||||||
|
// event that will do it for us through a predefined handler.
|
||||||
|
zeek::Args vals = Args();
|
||||||
|
vals.emplace_back(tag.AsVal());
|
||||||
|
vals.emplace_back(zeek::spicy::rt::to_val(port_, base_type(TYPE_PORT)));
|
||||||
|
EventHandlerPtr handler = event_registry->Register("spicy_analyzer_for_port");
|
||||||
|
event_mgr.Enqueue(handler, vals);
|
||||||
|
};
|
||||||
|
|
||||||
for ( const auto& ports : p.ports ) {
|
for ( const auto& ports : p.ports ) {
|
||||||
const auto proto = ports.begin.protocol();
|
const auto proto = ports.begin.protocol();
|
||||||
|
|
||||||
// Port ranges are closed intervals.
|
// Port ranges are closed intervals.
|
||||||
for ( auto port = ports.begin.port(); port <= ports.end.port(); ++port ) {
|
for ( auto port = ports.begin.port(); port <= ports.end.port(); ++port ) {
|
||||||
const auto port_ = hilti::rt::Port(port, proto);
|
const auto port_ = hilti::rt::Port(port, proto);
|
||||||
SPICY_DEBUG(hilti::rt::fmt(" Scheduling analyzer for port %s", port_));
|
register_analyzer_for_port(tag, port_);
|
||||||
analyzer_mgr->RegisterAnalyzerForPort(tag, transport_protocol(port_), port);
|
|
||||||
|
|
||||||
// Don't double register in case of single-port ranges.
|
// Don't double register in case of single-port ranges.
|
||||||
if ( ports.begin.port() == ports.end.port() )
|
if ( ports.begin.port() == ports.end.port() )
|
||||||
|
@ -727,7 +744,7 @@ void Manager::InitPostScript() {
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
SPICY_DEBUG(hilti::rt::fmt(" Scheduling analyzer for port %s", port.port));
|
SPICY_DEBUG(hilti::rt::fmt(" Scheduling analyzer for port %s", port.port));
|
||||||
analyzer_mgr->RegisterAnalyzerForPort(tag, transport_protocol(port.port), port.port.port());
|
register_analyzer_for_port(tag, port.port);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,7 +85,7 @@ public:
|
||||||
*
|
*
|
||||||
* @param name name of the analyzer as defined in its EVT file
|
* @param name name of the analyzer as defined in its EVT file
|
||||||
* @param proto analyzer's transport-layer protocol
|
* @param proto analyzer's transport-layer protocol
|
||||||
* @param prts well-known ports for the analyzer; it'll be activated automatically for these
|
* @param ports well-known ports for the analyzer; it'll be activated automatically for these
|
||||||
* @param parser_orig name of the Spicy parser for the originator side; must match the name that
|
* @param parser_orig name of the Spicy parser for the originator side; must match the name that
|
||||||
* Spicy registers the unit's parser with
|
* Spicy registers the unit's parser with
|
||||||
* @param parser_resp name of the Spicy parser for the originator side; must match the name that
|
* @param parser_resp name of the Spicy parser for the originator side; must match the name that
|
||||||
|
@ -343,7 +343,7 @@ private:
|
||||||
std::string name_parser_resp;
|
std::string name_parser_resp;
|
||||||
std::string name_replaces;
|
std::string name_replaces;
|
||||||
hilti::rt::Protocol protocol = hilti::rt::Protocol::Undef;
|
hilti::rt::Protocol protocol = hilti::rt::Protocol::Undef;
|
||||||
hilti::rt::Vector<::zeek::spicy::rt::PortRange> ports;
|
std::vector<::zeek::spicy::rt::PortRange> ports; // we keep this sorted
|
||||||
std::string linker_scope;
|
std::string linker_scope;
|
||||||
|
|
||||||
// Computed and available once the analyzer has been registered.
|
// Computed and available once the analyzer has been registered.
|
||||||
|
|
|
@ -19,6 +19,11 @@ struct PortRange {
|
||||||
|
|
||||||
hilti::rt::Port begin; /**< first port in the range */
|
hilti::rt::Port begin; /**< first port in the range */
|
||||||
hilti::rt::Port end; /**< last port in the range */
|
hilti::rt::Port end; /**< last port in the range */
|
||||||
|
|
||||||
|
bool operator<(const PortRange& other) const {
|
||||||
|
// Just get us a deterministic order.
|
||||||
|
return std::tie(begin, end) < std::tie(other.begin, other.end);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
inline bool operator==(const PortRange& a, const PortRange& b) {
|
inline bool operator==(const PortRange& a, const PortRange& b) {
|
||||||
|
|
|
@ -4,10 +4,8 @@
|
||||||
|
|
||||||
#include <getopt.h>
|
#include <getopt.h>
|
||||||
|
|
||||||
#include <algorithm>
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <type_traits>
|
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
@ -42,11 +40,10 @@ struct VisitorTypes : public spicy::visitor::PreOrder {
|
||||||
module = {};
|
module = {};
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
module = n->scopeID();
|
module = n->scopeID();
|
||||||
path = n->uid().path;
|
path = n->uid().path;
|
||||||
|
|
||||||
if ( is_resolved )
|
if ( is_resolved && ! n->skipImplementation() )
|
||||||
glue->addSpicyModule(module, path);
|
glue->addSpicyModule(module, path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1375,7 +1375,7 @@ bool GlueCompiler::CreateSpicyHook(glue::Event* ev) {
|
||||||
|
|
||||||
auto attrs = builder()->attributeSet({builder()->attribute("&priority", builder()->integer(ev->priority))});
|
auto attrs = builder()->attributeSet({builder()->attribute("&priority", builder()->integer(ev->priority))});
|
||||||
auto parameters = hilti::util::transform(ev->parameters, [](const auto& p) { return p.get(); });
|
auto parameters = hilti::util::transform(ev->parameters, [](const auto& p) { return p.get(); });
|
||||||
auto unit_hook = builder()->declarationHook(parameters, body.block(), ::spicy::Engine::All, attrs, meta);
|
auto unit_hook = builder()->declarationHook(parameters, body.block(), attrs, meta);
|
||||||
auto hook_decl = builder()->declarationUnitHook(ev->hook, unit_hook, meta);
|
auto hook_decl = builder()->declarationUnitHook(ev->hook, unit_hook, meta);
|
||||||
ev->spicy_module->spicy_module->add(context(), hook_decl);
|
ev->spicy_module->spicy_module->add(context(), hook_decl);
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ zeek_add_subdir_library(
|
||||||
ProcessStats.cc
|
ProcessStats.cc
|
||||||
Utils.cc
|
Utils.cc
|
||||||
BIFS
|
BIFS
|
||||||
|
consts.bif
|
||||||
telemetry.bif)
|
telemetry.bif)
|
||||||
|
|
||||||
# We don't need to include the civetweb headers across the whole project, only
|
# We don't need to include the civetweb headers across the whole project, only
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
|
|
||||||
// CivetServer is from the civetweb submodule in prometheus-cpp
|
// CivetServer is from the civetweb submodule in prometheus-cpp
|
||||||
#include <CivetServer.h>
|
#include <CivetServer.h>
|
||||||
|
#include <prometheus/collectable.h>
|
||||||
#include <prometheus/exposer.h>
|
#include <prometheus/exposer.h>
|
||||||
#include <prometheus/registry.h>
|
#include <prometheus/registry.h>
|
||||||
#include <rapidjson/document.h>
|
#include <rapidjson/document.h>
|
||||||
|
@ -16,19 +17,32 @@
|
||||||
|
|
||||||
#include "zeek/3rdparty/doctest.h"
|
#include "zeek/3rdparty/doctest.h"
|
||||||
#include "zeek/ID.h"
|
#include "zeek/ID.h"
|
||||||
|
#include "zeek/RunState.h"
|
||||||
#include "zeek/ZeekString.h"
|
#include "zeek/ZeekString.h"
|
||||||
#include "zeek/broker/Manager.h"
|
#include "zeek/broker/Manager.h"
|
||||||
|
#include "zeek/iosource/Manager.h"
|
||||||
#include "zeek/telemetry/ProcessStats.h"
|
#include "zeek/telemetry/ProcessStats.h"
|
||||||
#include "zeek/telemetry/Timer.h"
|
#include "zeek/telemetry/Timer.h"
|
||||||
|
#include "zeek/telemetry/consts.bif.h"
|
||||||
#include "zeek/telemetry/telemetry.bif.h"
|
#include "zeek/telemetry/telemetry.bif.h"
|
||||||
#include "zeek/threading/formatters/detail/json.h"
|
#include "zeek/threading/formatters/detail/json.h"
|
||||||
|
|
||||||
namespace zeek::telemetry {
|
namespace zeek::telemetry {
|
||||||
|
|
||||||
Manager::Manager() { prometheus_registry = std::make_shared<prometheus::Registry>(); }
|
/**
|
||||||
|
* Prometheus Collectable interface used to insert Zeek callback processing
|
||||||
|
* before the Prometheus registry's collection of metric data.
|
||||||
|
*/
|
||||||
|
class ZeekCollectable : public prometheus::Collectable {
|
||||||
|
public:
|
||||||
|
std::vector<prometheus::MetricFamily> Collect() const override {
|
||||||
|
telemetry_mgr->WaitForPrometheusCallbacks();
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Manager::Manager() : IOSource(true) { prometheus_registry = std::make_shared<prometheus::Registry>(); }
|
||||||
|
|
||||||
// This can't be defined as =default because of the use of unique_ptr with a forward-declared type
|
|
||||||
// in Manager.h
|
|
||||||
Manager::~Manager() {}
|
Manager::~Manager() {}
|
||||||
|
|
||||||
void Manager::InitPostScript() {
|
void Manager::InitPostScript() {
|
||||||
|
@ -75,7 +89,9 @@ void Manager::InitPostScript() {
|
||||||
|
|
||||||
if ( ! getenv("ZEEKCTL_CHECK_CONFIG") ) {
|
if ( ! getenv("ZEEKCTL_CHECK_CONFIG") ) {
|
||||||
try {
|
try {
|
||||||
prometheus_exposer = std::make_unique<prometheus::Exposer>(prometheus_url, 2, callbacks);
|
prometheus_exposer =
|
||||||
|
std::make_unique<prometheus::Exposer>(prometheus_url, BifConst::Telemetry::civetweb_threads,
|
||||||
|
callbacks);
|
||||||
|
|
||||||
// CivetWeb stores a copy of the callbacks, so we're safe to delete the pointer here
|
// CivetWeb stores a copy of the callbacks, so we're safe to delete the pointer here
|
||||||
delete callbacks;
|
delete callbacks;
|
||||||
|
@ -84,6 +100,13 @@ void Manager::InitPostScript() {
|
||||||
prometheus_url.c_str());
|
prometheus_url.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This has to be inserted before the registry below. The exposer
|
||||||
|
// processes the collectors in order of insertion. We want to make
|
||||||
|
// sure that the callbacks get called and the values in the metrics
|
||||||
|
// are updated before prometheus-cpp scrapes them.
|
||||||
|
zeek_collectable = std::make_shared<ZeekCollectable>();
|
||||||
|
prometheus_exposer->RegisterCollectable(zeek_collectable);
|
||||||
|
|
||||||
prometheus_exposer->RegisterCollectable(prometheus_registry);
|
prometheus_exposer->RegisterCollectable(prometheus_registry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -130,6 +153,21 @@ void Manager::InitPostScript() {
|
||||||
return metric;
|
return metric;
|
||||||
});
|
});
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
iosource_mgr->RegisterFd(collector_flare.FD(), this);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Manager::Terminate() {
|
||||||
|
// Notify the collector condition so that it doesn't hang waiting for
|
||||||
|
// a collector request to complete.
|
||||||
|
collector_cv.notify_all();
|
||||||
|
|
||||||
|
// Shut down the exposer first of all so we stop getting requests for
|
||||||
|
// data. This keeps us from getting a request on another thread while
|
||||||
|
// we're shutting down.
|
||||||
|
prometheus_exposer.reset();
|
||||||
|
|
||||||
|
iosource_mgr->UnregisterFd(collector_flare.FD(), this);
|
||||||
}
|
}
|
||||||
|
|
||||||
// -- collect metric stuff -----------------------------------------------------
|
// -- collect metric stuff -----------------------------------------------------
|
||||||
|
@ -545,6 +583,39 @@ HistogramPtr Manager::HistogramInstance(std::string_view prefix, std::string_vie
|
||||||
return HistogramInstance(prefix, name, lbls, bounds_span, helptext, unit);
|
return HistogramInstance(prefix, name, lbls, bounds_span, helptext, unit);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Manager::ProcessFd(int fd, int flags) {
|
||||||
|
std::unique_lock<std::mutex> lk(collector_cv_mtx);
|
||||||
|
|
||||||
|
collector_flare.Extinguish();
|
||||||
|
|
||||||
|
prometheus_registry->UpdateViaCallbacks();
|
||||||
|
collector_response_idx = collector_request_idx;
|
||||||
|
|
||||||
|
lk.unlock();
|
||||||
|
collector_cv.notify_all();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Manager::WaitForPrometheusCallbacks() {
|
||||||
|
std::unique_lock<std::mutex> lk(collector_cv_mtx);
|
||||||
|
|
||||||
|
++collector_request_idx;
|
||||||
|
uint64_t expected_idx = collector_request_idx;
|
||||||
|
collector_flare.Fire();
|
||||||
|
|
||||||
|
// It should *not* take 5 seconds to go through all of the callbacks, but
|
||||||
|
// set this to have a timeout anyways just to avoid a deadlock.
|
||||||
|
bool res = collector_cv.wait_for(lk,
|
||||||
|
std::chrono::microseconds(
|
||||||
|
static_cast<long>(BifConst::Telemetry::callback_timeout * 1000000)),
|
||||||
|
[expected_idx]() {
|
||||||
|
return telemetry_mgr->collector_response_idx >= expected_idx ||
|
||||||
|
zeek::run_state::terminating;
|
||||||
|
});
|
||||||
|
|
||||||
|
if ( ! res )
|
||||||
|
fprintf(stderr, "Timeout waiting for prometheus callbacks\n");
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace zeek::telemetry
|
} // namespace zeek::telemetry
|
||||||
|
|
||||||
// -- unit tests ---------------------------------------------------------------
|
// -- unit tests ---------------------------------------------------------------
|
||||||
|
|
|
@ -9,8 +9,10 @@
|
||||||
#include <string_view>
|
#include <string_view>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
#include "zeek/Flare.h"
|
||||||
#include "zeek/IntrusivePtr.h"
|
#include "zeek/IntrusivePtr.h"
|
||||||
#include "zeek/Span.h"
|
#include "zeek/Span.h"
|
||||||
|
#include "zeek/iosource/IOSource.h"
|
||||||
#include "zeek/telemetry/Counter.h"
|
#include "zeek/telemetry/Counter.h"
|
||||||
#include "zeek/telemetry/Gauge.h"
|
#include "zeek/telemetry/Gauge.h"
|
||||||
#include "zeek/telemetry/Histogram.h"
|
#include "zeek/telemetry/Histogram.h"
|
||||||
|
@ -29,15 +31,16 @@ class Registry;
|
||||||
|
|
||||||
namespace zeek::telemetry {
|
namespace zeek::telemetry {
|
||||||
|
|
||||||
|
class ZeekCollectable;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages a collection of metric families.
|
* Manages a collection of metric families.
|
||||||
*/
|
*/
|
||||||
class Manager final {
|
class Manager final : public iosource::IOSource {
|
||||||
public:
|
public:
|
||||||
Manager();
|
Manager();
|
||||||
|
|
||||||
Manager(const Manager&) = delete;
|
Manager(const Manager&) = delete;
|
||||||
|
|
||||||
Manager& operator=(const Manager&) = delete;
|
Manager& operator=(const Manager&) = delete;
|
||||||
|
|
||||||
~Manager();
|
~Manager();
|
||||||
|
@ -50,6 +53,8 @@ public:
|
||||||
*/
|
*/
|
||||||
void InitPostScript();
|
void InitPostScript();
|
||||||
|
|
||||||
|
void Terminate();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return A VectorVal containing all counter and gauge metrics and their values matching prefix and name.
|
* @return A VectorVal containing all counter and gauge metrics and their values matching prefix and name.
|
||||||
* @param prefix The prefix pattern to use for filtering. Supports globbing.
|
* @param prefix The prefix pattern to use for filtering. Supports globbing.
|
||||||
|
@ -88,8 +93,8 @@ public:
|
||||||
* @param labels Values for all label dimensions of the metric.
|
* @param labels Values for all label dimensions of the metric.
|
||||||
* @param helptext Short explanation of the metric.
|
* @param helptext Short explanation of the metric.
|
||||||
* @param unit Unit of measurement.
|
* @param unit Unit of measurement.
|
||||||
* @param callback Passing a callback method will enable asynchronous mode. The callback method will be called by
|
* @param callback Passing a callback method will enable asynchronous mode. The callback method will be called
|
||||||
* the metrics subsystem whenever data is requested.
|
* by the metrics subsystem whenever data is requested.
|
||||||
*/
|
*/
|
||||||
CounterPtr CounterInstance(std::string_view prefix, std::string_view name, Span<const LabelView> labels,
|
CounterPtr CounterInstance(std::string_view prefix, std::string_view name, Span<const LabelView> labels,
|
||||||
std::string_view helptext, std::string_view unit = "",
|
std::string_view helptext, std::string_view unit = "",
|
||||||
|
@ -124,8 +129,8 @@ public:
|
||||||
* @param labels Values for all label dimensions of the metric.
|
* @param labels Values for all label dimensions of the metric.
|
||||||
* @param helptext Short explanation of the metric.
|
* @param helptext Short explanation of the metric.
|
||||||
* @param unit Unit of measurement.
|
* @param unit Unit of measurement.
|
||||||
* @param callback Passing a callback method will enable asynchronous mode. The callback method will be called by
|
* @param callback Passing a callback method will enable asynchronous mode. The callback method will be called
|
||||||
* the metrics subsystem whenever data is requested.
|
* by the metrics subsystem whenever data is requested.
|
||||||
*/
|
*/
|
||||||
GaugePtr GaugeInstance(std::string_view prefix, std::string_view name, Span<const LabelView> labels,
|
GaugePtr GaugeInstance(std::string_view prefix, std::string_view name, Span<const LabelView> labels,
|
||||||
std::string_view helptext, std::string_view unit = "",
|
std::string_view helptext, std::string_view unit = "",
|
||||||
|
@ -212,6 +217,12 @@ public:
|
||||||
*/
|
*/
|
||||||
std::shared_ptr<prometheus::Registry> GetRegistry() const { return prometheus_registry; }
|
std::shared_ptr<prometheus::Registry> GetRegistry() const { return prometheus_registry; }
|
||||||
|
|
||||||
|
// IOSource interface
|
||||||
|
double GetNextTimeout() override { return -1.0; }
|
||||||
|
void Process() override {}
|
||||||
|
const char* Tag() override { return "Telemetry::Manager"; }
|
||||||
|
void ProcessFd(int fd, int flags) override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
template<class F>
|
template<class F>
|
||||||
static auto WithLabelNames(Span<const LabelView> xs, F continuation) {
|
static auto WithLabelNames(Span<const LabelView> xs, F continuation) {
|
||||||
|
@ -231,6 +242,15 @@ protected:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
friend class ZeekCollectable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fires the flare for prometheus-cpp callback handling and waits for it to complete.
|
||||||
|
* This can be called from other threads to ensure the callback handling happens on
|
||||||
|
* the main thread.
|
||||||
|
*/
|
||||||
|
void WaitForPrometheusCallbacks();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
RecordValPtr GetMetricOptsRecord(const prometheus::MetricFamily& metric_family);
|
RecordValPtr GetMetricOptsRecord(const prometheus::MetricFamily& metric_family);
|
||||||
void BuildClusterJson();
|
void BuildClusterJson();
|
||||||
|
@ -250,6 +270,14 @@ private:
|
||||||
std::unique_ptr<prometheus::Exposer> prometheus_exposer;
|
std::unique_ptr<prometheus::Exposer> prometheus_exposer;
|
||||||
|
|
||||||
std::string cluster_json;
|
std::string cluster_json;
|
||||||
|
|
||||||
|
std::shared_ptr<ZeekCollectable> zeek_collectable;
|
||||||
|
zeek::detail::Flare collector_flare;
|
||||||
|
std::condition_variable collector_cv;
|
||||||
|
std::mutex collector_cv_mtx;
|
||||||
|
// Only modified under collector_cv_mtx!
|
||||||
|
uint64_t collector_request_idx = 0;
|
||||||
|
uint64_t collector_response_idx = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace zeek::telemetry
|
} // namespace zeek::telemetry
|
||||||
|
|
2
src/telemetry/consts.bif
Normal file
2
src/telemetry/consts.bif
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
const Telemetry::callback_timeout: interval;
|
||||||
|
const Telemetry::civetweb_threads: count;
|
|
@ -376,6 +376,7 @@ static void terminate_zeek() {
|
||||||
input_mgr->Terminate();
|
input_mgr->Terminate();
|
||||||
thread_mgr->Terminate();
|
thread_mgr->Terminate();
|
||||||
broker_mgr->Terminate();
|
broker_mgr->Terminate();
|
||||||
|
telemetry_mgr->Terminate();
|
||||||
|
|
||||||
event_mgr.Drain();
|
event_mgr.Drain();
|
||||||
|
|
||||||
|
@ -716,6 +717,7 @@ SetupResult setup(int argc, char** argv, Options* zopts) {
|
||||||
// when that variable is defined.
|
// when that variable is defined.
|
||||||
auto early_shutdown = [] {
|
auto early_shutdown = [] {
|
||||||
broker_mgr->Terminate();
|
broker_mgr->Terminate();
|
||||||
|
telemetry_mgr->Terminate();
|
||||||
delete iosource_mgr;
|
delete iosource_mgr;
|
||||||
delete telemetry_mgr;
|
delete telemetry_mgr;
|
||||||
};
|
};
|
||||||
|
|
|
@ -22,5 +22,6 @@ warning in <params>, line 1: event handler never invoked: SupervisorControl::res
|
||||||
warning in <params>, line 1: event handler never invoked: SupervisorControl::status_request
|
warning in <params>, line 1: event handler never invoked: SupervisorControl::status_request
|
||||||
warning in <params>, line 1: event handler never invoked: SupervisorControl::stop_request
|
warning in <params>, line 1: event handler never invoked: SupervisorControl::stop_request
|
||||||
warning in <params>, line 1: event handler never invoked: spicy_analyzer_for_mime_type
|
warning in <params>, line 1: event handler never invoked: spicy_analyzer_for_mime_type
|
||||||
|
warning in <params>, line 1: event handler never invoked: spicy_analyzer_for_port
|
||||||
warning in <params>, line 1: event handler never invoked: terminate_event
|
warning in <params>, line 1: event handler never invoked: terminate_event
|
||||||
warning in <params>, line 1: event handler never invoked: this_is_never_used
|
warning in <params>, line 1: event handler never invoked: this_is_never_used
|
||||||
|
|
|
@ -146,6 +146,7 @@ scripts/base/init-frameworks-and-bifs.zeek
|
||||||
scripts/base/frameworks/files/magic/__load__.zeek
|
scripts/base/frameworks/files/magic/__load__.zeek
|
||||||
scripts/base/frameworks/telemetry/options.zeek
|
scripts/base/frameworks/telemetry/options.zeek
|
||||||
build/scripts/base/bif/__load__.zeek
|
build/scripts/base/bif/__load__.zeek
|
||||||
|
build/scripts/base/bif/consts.bif.zeek
|
||||||
build/scripts/base/bif/telemetry.bif.zeek
|
build/scripts/base/bif/telemetry.bif.zeek
|
||||||
build/scripts/base/bif/zeekygen.bif.zeek
|
build/scripts/base/bif/zeekygen.bif.zeek
|
||||||
build/scripts/base/bif/pcap.bif.zeek
|
build/scripts/base/bif/pcap.bif.zeek
|
||||||
|
|
|
@ -146,6 +146,7 @@ scripts/base/init-frameworks-and-bifs.zeek
|
||||||
scripts/base/frameworks/files/magic/__load__.zeek
|
scripts/base/frameworks/files/magic/__load__.zeek
|
||||||
scripts/base/frameworks/telemetry/options.zeek
|
scripts/base/frameworks/telemetry/options.zeek
|
||||||
build/scripts/base/bif/__load__.zeek
|
build/scripts/base/bif/__load__.zeek
|
||||||
|
build/scripts/base/bif/consts.bif.zeek
|
||||||
build/scripts/base/bif/telemetry.bif.zeek
|
build/scripts/base/bif/telemetry.bif.zeek
|
||||||
build/scripts/base/bif/zeekygen.bif.zeek
|
build/scripts/base/bif/zeekygen.bif.zeek
|
||||||
build/scripts/base/bif/pcap.bif.zeek
|
build/scripts/base/bif/pcap.bif.zeek
|
||||||
|
|
|
@ -464,6 +464,7 @@
|
||||||
0.000000 MetaHookPost LoadFile(0, ./comm.bif.zeek, <...>/comm.bif.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, ./comm.bif.zeek, <...>/comm.bif.zeek) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, ./const.bif.zeek, <...>/const.bif.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, ./const.bif.zeek, <...>/const.bif.zeek) -> -1
|
||||||
|
0.000000 MetaHookPost LoadFile(0, ./consts.bif.zeek, <...>/consts.bif.zeek) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, ./contents, <...>/contents.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, ./contents, <...>/contents.zeek) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, ./control, <...>/control.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, ./control, <...>/control.zeek) -> -1
|
||||||
0.000000 MetaHookPost LoadFile(0, ./data.bif.zeek, <...>/data.bif.zeek) -> -1
|
0.000000 MetaHookPost LoadFile(0, ./data.bif.zeek, <...>/data.bif.zeek) -> -1
|
||||||
|
@ -758,6 +759,7 @@
|
||||||
0.000000 MetaHookPost LoadFileExtended(0, ./comm.bif.zeek, <...>/comm.bif.zeek) -> (-1, <no content>)
|
0.000000 MetaHookPost LoadFileExtended(0, ./comm.bif.zeek, <...>/comm.bif.zeek) -> (-1, <no content>)
|
||||||
0.000000 MetaHookPost LoadFileExtended(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek) -> (-1, <no content>)
|
0.000000 MetaHookPost LoadFileExtended(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek) -> (-1, <no content>)
|
||||||
0.000000 MetaHookPost LoadFileExtended(0, ./const.bif.zeek, <...>/const.bif.zeek) -> (-1, <no content>)
|
0.000000 MetaHookPost LoadFileExtended(0, ./const.bif.zeek, <...>/const.bif.zeek) -> (-1, <no content>)
|
||||||
|
0.000000 MetaHookPost LoadFileExtended(0, ./consts.bif.zeek, <...>/consts.bif.zeek) -> (-1, <no content>)
|
||||||
0.000000 MetaHookPost LoadFileExtended(0, ./contents, <...>/contents.zeek) -> (-1, <no content>)
|
0.000000 MetaHookPost LoadFileExtended(0, ./contents, <...>/contents.zeek) -> (-1, <no content>)
|
||||||
0.000000 MetaHookPost LoadFileExtended(0, ./control, <...>/control.zeek) -> (-1, <no content>)
|
0.000000 MetaHookPost LoadFileExtended(0, ./control, <...>/control.zeek) -> (-1, <no content>)
|
||||||
0.000000 MetaHookPost LoadFileExtended(0, ./data.bif.zeek, <...>/data.bif.zeek) -> (-1, <no content>)
|
0.000000 MetaHookPost LoadFileExtended(0, ./data.bif.zeek, <...>/data.bif.zeek) -> (-1, <no content>)
|
||||||
|
@ -1384,6 +1386,7 @@
|
||||||
0.000000 MetaHookPre LoadFile(0, ./comm.bif.zeek, <...>/comm.bif.zeek)
|
0.000000 MetaHookPre LoadFile(0, ./comm.bif.zeek, <...>/comm.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFile(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek)
|
0.000000 MetaHookPre LoadFile(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFile(0, ./const.bif.zeek, <...>/const.bif.zeek)
|
0.000000 MetaHookPre LoadFile(0, ./const.bif.zeek, <...>/const.bif.zeek)
|
||||||
|
0.000000 MetaHookPre LoadFile(0, ./consts.bif.zeek, <...>/consts.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFile(0, ./contents, <...>/contents.zeek)
|
0.000000 MetaHookPre LoadFile(0, ./contents, <...>/contents.zeek)
|
||||||
0.000000 MetaHookPre LoadFile(0, ./control, <...>/control.zeek)
|
0.000000 MetaHookPre LoadFile(0, ./control, <...>/control.zeek)
|
||||||
0.000000 MetaHookPre LoadFile(0, ./data.bif.zeek, <...>/data.bif.zeek)
|
0.000000 MetaHookPre LoadFile(0, ./data.bif.zeek, <...>/data.bif.zeek)
|
||||||
|
@ -1678,6 +1681,7 @@
|
||||||
0.000000 MetaHookPre LoadFileExtended(0, ./comm.bif.zeek, <...>/comm.bif.zeek)
|
0.000000 MetaHookPre LoadFileExtended(0, ./comm.bif.zeek, <...>/comm.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFileExtended(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek)
|
0.000000 MetaHookPre LoadFileExtended(0, ./communityid.bif.zeek, <...>/communityid.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFileExtended(0, ./const.bif.zeek, <...>/const.bif.zeek)
|
0.000000 MetaHookPre LoadFileExtended(0, ./const.bif.zeek, <...>/const.bif.zeek)
|
||||||
|
0.000000 MetaHookPre LoadFileExtended(0, ./consts.bif.zeek, <...>/consts.bif.zeek)
|
||||||
0.000000 MetaHookPre LoadFileExtended(0, ./contents, <...>/contents.zeek)
|
0.000000 MetaHookPre LoadFileExtended(0, ./contents, <...>/contents.zeek)
|
||||||
0.000000 MetaHookPre LoadFileExtended(0, ./control, <...>/control.zeek)
|
0.000000 MetaHookPre LoadFileExtended(0, ./control, <...>/control.zeek)
|
||||||
0.000000 MetaHookPre LoadFileExtended(0, ./data.bif.zeek, <...>/data.bif.zeek)
|
0.000000 MetaHookPre LoadFileExtended(0, ./data.bif.zeek, <...>/data.bif.zeek)
|
||||||
|
@ -2305,6 +2309,7 @@
|
||||||
0.000000 | HookLoadFile ./comm.bif.zeek <...>/comm.bif.zeek
|
0.000000 | HookLoadFile ./comm.bif.zeek <...>/comm.bif.zeek
|
||||||
0.000000 | HookLoadFile ./communityid.bif.zeek <...>/communityid.bif.zeek
|
0.000000 | HookLoadFile ./communityid.bif.zeek <...>/communityid.bif.zeek
|
||||||
0.000000 | HookLoadFile ./const.bif.zeek <...>/const.bif.zeek
|
0.000000 | HookLoadFile ./const.bif.zeek <...>/const.bif.zeek
|
||||||
|
0.000000 | HookLoadFile ./consts.bif.zeek <...>/consts.bif.zeek
|
||||||
0.000000 | HookLoadFile ./contents <...>/contents.zeek
|
0.000000 | HookLoadFile ./contents <...>/contents.zeek
|
||||||
0.000000 | HookLoadFile ./control <...>/control.zeek
|
0.000000 | HookLoadFile ./control <...>/control.zeek
|
||||||
0.000000 | HookLoadFile ./data.bif.zeek <...>/data.bif.zeek
|
0.000000 | HookLoadFile ./data.bif.zeek <...>/data.bif.zeek
|
||||||
|
@ -2599,6 +2604,7 @@
|
||||||
0.000000 | HookLoadFileExtended ./comm.bif.zeek <...>/comm.bif.zeek
|
0.000000 | HookLoadFileExtended ./comm.bif.zeek <...>/comm.bif.zeek
|
||||||
0.000000 | HookLoadFileExtended ./communityid.bif.zeek <...>/communityid.bif.zeek
|
0.000000 | HookLoadFileExtended ./communityid.bif.zeek <...>/communityid.bif.zeek
|
||||||
0.000000 | HookLoadFileExtended ./const.bif.zeek <...>/const.bif.zeek
|
0.000000 | HookLoadFileExtended ./const.bif.zeek <...>/const.bif.zeek
|
||||||
|
0.000000 | HookLoadFileExtended ./consts.bif.zeek <...>/consts.bif.zeek
|
||||||
0.000000 | HookLoadFileExtended ./contents <...>/contents.zeek
|
0.000000 | HookLoadFileExtended ./contents <...>/contents.zeek
|
||||||
0.000000 | HookLoadFileExtended ./control <...>/control.zeek
|
0.000000 | HookLoadFileExtended ./control <...>/control.zeek
|
||||||
0.000000 | HookLoadFileExtended ./data.bif.zeek <...>/data.bif.zeek
|
0.000000 | HookLoadFileExtended ./data.bif.zeek <...>/data.bif.zeek
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path http
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p trans_depth method host uri referrer version user_agent origin request_body_len response_body_len status_code status_msg info_code info_msg tags username password proxied orig_fuids orig_filenames orig_mime_types resp_fuids resp_filenames resp_mime_types
|
||||||
|
#types time string addr port addr port count string string string string string string string count count count string count string set[enum] string string set[string] vector[string] vector[string] vector[string] vector[string] vector[string] vector[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 172.24.133.205 43090 172.24.133.205 8000 1 GET 172.24.133.205:8000 / - 1.0 python-requests/2.31.0 - 0 643 200 OK - - (empty) test 1:34 - - - - FM4Ls72L4REzbA61lg - text/html
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 46160 127.0.1.1 389 tcp ldap_tcp 3.537413 536 42 SF 0 ShADadFf 11 1116 6 362 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,14 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 46160 127.0.1.1 389 1 3 bind simple success - cn=admin,dc=example,dc=com REDACTED
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 46160 127.0.1.1 389 2 - add success - - -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 46160 127.0.1.1 389 3 - add success - - -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 46160 127.0.1.1 389 4 - unbind - - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60126 127.0.1.1 389 tcp ldap_tcp 2.290081 289 1509 SF 0 ShADadFf 12 921 15 2297 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,13 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60126 127.0.1.1 389 1 3 bind SASL SASL bind in progress SASL(0): successful result: - NTLM
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60126 127.0.1.1 389 2 3 bind SASL success - - NTLM
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60126 127.0.1.1 389 4 - unbind - - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap_search
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id scope deref_aliases base_object result_count result diagnostic_message filter attributes
|
||||||
|
#types time string addr port addr port int string string string count string string string vector[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60126 127.0.1.1 389 3 tree never dc=example,dc=com 9 success - (objectclass=*) -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 59552 127.0.1.1 389 tcp ldap_tcp 2.231680 353 1772 SF 0 ShADadFf 11 933 15 2560 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,13 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 59552 127.0.1.1 389 1 3 bind SASL SASL bind in progress SASL(0): successful result: user: sasladmin@slapd.ldap property: slapAuthzDN not found in sasldb - SCRAM-SHA-512
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 59552 127.0.1.1 389 2 3 bind SASL success - - SCRAM-SHA-512
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 59552 127.0.1.1 389 4 - unbind - - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap_search
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id scope deref_aliases base_object result_count result diagnostic_message filter attributes
|
||||||
|
#types time string addr port addr port int string string string count string string string vector[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 59552 127.0.1.1 389 3 tree never dc=example,dc=com 9 success - (objectclass=*) -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 tcp ldap_tcp 0.033404 3046 90400 RSTR 0 ShADdar 14 1733 68 93132 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,12 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 3 3 bind SASL success - - GSS-SPNEGO
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 9 - unbind - - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,14 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap_search
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id scope deref_aliases base_object result_count result diagnostic_message filter attributes
|
||||||
|
#types time string addr port addr port int string string string count string string string vector[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 1 base never - 1 success - (objectclass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 4 base never - 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 6 single never CN=Schema,CN=Configuration,DC=matrix,DC=local 424 success - (&(!(isdefunct=TRUE))(|(|(|(|(|(attributeSyntax=2.5.5.17)(attributeSyntax=2.5.5.10))(attributeSyntax=2.5.5.15))(attributeSyntax=2.5.5.1))(attributeSyntax=2.5.5.7))(attributeSyntax=2.5.5.14))) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 192.168.10.138 63815 192.168.10.186 389 8 tree never DC=matrix,DC=local 1 success - (samaccountname=krbtgt) -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,13 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 tcp ldap_tcp 63.273503 3963 400107 OTH 0 Dd 12 2595 282 411387 -
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 tcp ldap_tcp 0.007979 2630 3327 OTH 0 Dd 6 990 6 3567 -
|
||||||
|
XXXXXXXXXX.XXXXXX C4J4Th3PJpwUYZZ6gc 10.199.2.121 59356 10.199.2.111 389 tcp ldap_tcp 0.001925 2183 3436 OTH 0 Dd 4 463 5 3636 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,15 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 3 3 bind SASL success - - GSS-SPNEGO
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 3 3 bind SASL success - - GSS-SPNEGO
|
||||||
|
XXXXXXXXXX.XXXXXX C4J4Th3PJpwUYZZ6gc 10.199.2.121 59356 10.199.2.111 389 9 3 bind SASL success - - GSS-SPNEGO
|
||||||
|
XXXXXXXXXX.XXXXXX C4J4Th3PJpwUYZZ6gc 10.199.2.121 59356 10.199.2.111 389 12 - unbind - - - -
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 13 - unbind - - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,27 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap_search
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id scope deref_aliases base_object result_count result diagnostic_message filter attributes
|
||||||
|
#types time string addr port addr port int string string string count string string string vector[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 1 base never - 1 success - (objectclass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 4 base never - 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 5 base never CN=Enrollment Services,CN=Public Key Services,CN=Services,CN=Configuration,DC=DMC,DC=local 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 6 base never - 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 7 tree never CN=Enrollment Services,CN=Public Key Services,CN=Services,CN=Configuration,DC=DMC,DC=local 2 success - (objectCategory=pKIEnrollmentService) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 8 base never - 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 9 base never CN=Schema,CN=Configuration,DC=DMC,DC=local 1 success - (objectClass=dMD) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 10 base never CN=Schema,CN=Configuration,DC=DMC,DC=local 1 success - (objectClass=dMD) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 11 base never CN=Aggregate,CN=Schema,CN=Configuration,DC=DMC,DC=local 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 1 base never - 1 success - (objectclass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 4 base never CN=WS01,CN=Computers,DC=DMC,DC=local 1 success - (objectclass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 5 base never CN=WS01,CN=Computers,DC=DMC,DC=local 1 success - (objectclass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX ClEkJM2Vm5giqnMf4h 10.199.2.121 59355 10.199.2.111 389 6 base never CN=WS01,CN=Computers,DC=DMC,DC=local 1 success - (objectclass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX C4J4Th3PJpwUYZZ6gc 10.199.2.121 59356 10.199.2.111 389 10 base never - 1 success - (ObjectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX C4J4Th3PJpwUYZZ6gc 10.199.2.121 59356 10.199.2.111 389 11 base never CN=62a0ff2e-97b9-4513-943f-0d221bd30080,CN=Device Registration Configuration,CN=services,CN=Configuration,DC=DMC,DC=local 0 no such object 0000208D: NameErr: DSID-0310028B, problem 2001 (NO_OBJECT), data 0, best match of:??'CN=Services,CN=Configuration,DC=DMC,DC=local'?? (ObjectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 12 base never CN=Certificate Templates,CN=Public Key Services,CN=Services,CN=Configuration,DC=DMC,DC=local 1 success - (objectClass=*) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 10.199.2.121 59327 10.199.2.111 389 13 tree never CN=Certificate Templates,CN=Public Key Services,CN=Services,CN=Configuration,DC=DMC,DC=local 38 success - (objectclass=pKICertificateTemplate) -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60648 127.0.1.1 389 tcp ldap_tcp 2.114467 548 1020 SF 0 ShADadFf 9 1024 6 1340 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,12 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60648 127.0.1.1 389 1 3 bind SASL SASL bind in progress SASL(0): successful result: user: zeek@ubuntu-01.example.com property: slapAuthzDN not found in sasldb - SRP
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 60648 127.0.1.1 389 2 3 bind SASL success - - SRP
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 45936 127.0.1.1 389 tcp ldap_tcp,ssl 0.016922 683 3002 RSTO 0 ShADadFR 14 1407 14 3738 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 45936 127.0.1.1 389 1 - extended success - 1.3.6.1.4.1.1466.20037 (StartTLS) -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,4 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
CHhAvVGS1DHFjwGM9, extended_request, 1.3.6.1.4.1.1466.20037 (StartTLS),
|
||||||
|
CHhAvVGS1DHFjwGM9, extended_response, LDAP::ResultCode_SUCCESS, ,
|
||||||
|
CHhAvVGS1DHFjwGM9, LDAP::starttls
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ssl
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p version cipher curve server_name resumed last_alert next_protocol established ssl_history cert_chain_fps client_cert_chain_fps sni_matches_cert
|
||||||
|
#types time string addr port addr port string string string string bool string string bool string vector[string] vector[string] bool
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 45936 127.0.1.1 389 TLSv13 TLS_AES_256_GCM_SHA384 secp256r1 ubuntu-01.example.com F - - T CsiI - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,11 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path conn
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents
|
||||||
|
#types time string addr port addr port enum string interval count count string count string count count count count set[string]
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 48122 127.0.1.1 389 tcp ldap_tcp 0.001192 83 59 SF 0 ShADadFf 8 507 5 327 -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,13 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
#separator \x09
|
||||||
|
#set_separator ,
|
||||||
|
#empty_field (empty)
|
||||||
|
#unset_field -
|
||||||
|
#path ldap
|
||||||
|
#open XXXX-XX-XX-XX-XX-XX
|
||||||
|
#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p message_id version opcode result diagnostic_message object argument
|
||||||
|
#types time string addr port addr port int int string string string string string
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 48122 127.0.1.1 389 1 3 bind simple success - cn=admin,dc=example,dc=com REDACTED
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 48122 127.0.1.1 389 2 - extended success - 1.3.6.1.4.1.4203.1.11.3 (whoami) -
|
||||||
|
XXXXXXXXXX.XXXXXX CHhAvVGS1DHFjwGM9 127.0.0.1 48122 127.0.1.1 389 3 - unbind - - - -
|
||||||
|
#close XXXX-XX-XX-XX-XX-XX
|
|
@ -0,0 +1,3 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
CHhAvVGS1DHFjwGM9, extended_request, 1.3.6.1.4.1.4203.1.11.3 (whoami),
|
||||||
|
CHhAvVGS1DHFjwGM9, extended_response, LDAP::ResultCode_SUCCESS, , dn:cn=admin,dc=example,dc=com
|
|
@ -0,0 +1,13 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
smb2_close_request, [credit_charge=1, status=0, command=6, credits=256, flags=0, message_id=8, process_id=65279, tree_id=3905704575, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]
|
||||||
|
smb2_close_response, [credit_charge=1, status=0, command=6, credits=256, flags=1, message_id=8, process_id=65279, tree_id=3905704575, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00], [alloc_size=0, eof=0, times=[modified=0.0, modified_raw=116444736000000000, accessed=0.0, accessed_raw=116444736000000000, created=0.0, created_raw=116444736000000000, changed=0.0, changed_raw=116444736000000000], attrs=[read_only=F, hidden=F, system=F, directory=F, archive=F, normal=F, temporary=F, sparse_file=F, reparse_point=F, compressed=F, offline=F, not_content_indexed=F, encrypted=F, integrity_stream=F, no_scrub_data=F]]
|
||||||
|
smb2_close_request, [credit_charge=1, status=0, command=6, credits=256, flags=0, message_id=21, process_id=65279, tree_id=900627714, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]
|
||||||
|
smb2_close_response, [credit_charge=1, status=0, command=6, credits=256, flags=1, message_id=21, process_id=65279, tree_id=900627714, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00], [alloc_size=0, eof=0, times=[modified=0.0, modified_raw=116444736000000000, accessed=0.0, accessed_raw=116444736000000000, created=0.0, created_raw=116444736000000000, changed=0.0, changed_raw=116444736000000000], attrs=[read_only=F, hidden=F, system=F, directory=F, archive=F, normal=F, temporary=F, sparse_file=F, reparse_point=F, compressed=F, offline=F, not_content_indexed=F, encrypted=F, integrity_stream=F, no_scrub_data=F]]
|
||||||
|
smb2_close_request, [credit_charge=1, status=0, command=6, credits=256, flags=4, message_id=25, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]
|
||||||
|
smb2_close_response, [credit_charge=1, status=0, command=6, credits=768, flags=5, message_id=25, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00], [alloc_size=0, eof=0, times=[modified=0.0, modified_raw=116444736000000000, accessed=0.0, accessed_raw=116444736000000000, created=0.0, created_raw=116444736000000000, changed=0.0, changed_raw=116444736000000000], attrs=[read_only=F, hidden=F, system=F, directory=F, archive=F, normal=F, temporary=F, sparse_file=F, reparse_point=F, compressed=F, offline=F, not_content_indexed=F, encrypted=F, integrity_stream=F, no_scrub_data=F]]
|
||||||
|
smb2_close_request, [credit_charge=1, status=0, command=6, credits=256, flags=4, message_id=28, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]
|
||||||
|
smb2_close_response, [credit_charge=1, status=0, command=6, credits=768, flags=5, message_id=28, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00], [alloc_size=0, eof=0, times=[modified=0.0, modified_raw=116444736000000000, accessed=0.0, accessed_raw=116444736000000000, created=0.0, created_raw=116444736000000000, changed=0.0, changed_raw=116444736000000000], attrs=[read_only=F, hidden=F, system=F, directory=F, archive=F, normal=F, temporary=F, sparse_file=F, reparse_point=F, compressed=F, offline=F, not_content_indexed=F, encrypted=F, integrity_stream=F, no_scrub_data=F]]
|
||||||
|
smb2_close_request, [credit_charge=1, status=0, command=6, credits=256, flags=4, message_id=31, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]
|
||||||
|
smb2_close_response, [credit_charge=1, status=0, command=6, credits=768, flags=5, message_id=31, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00], [alloc_size=0, eof=0, times=[modified=0.0, modified_raw=116444736000000000, accessed=0.0, accessed_raw=116444736000000000, created=0.0, created_raw=116444736000000000, changed=0.0, changed_raw=116444736000000000], attrs=[read_only=F, hidden=F, system=F, directory=F, archive=F, normal=F, temporary=F, sparse_file=F, reparse_point=F, compressed=F, offline=F, not_content_indexed=F, encrypted=F, integrity_stream=F, no_scrub_data=F]]
|
||||||
|
smb2_close_request, [credit_charge=1, status=0, command=6, credits=256, flags=4, message_id=34, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]
|
||||||
|
smb2_close_response, [credit_charge=1, status=0, command=6, credits=768, flags=5, message_id=34, process_id=65279, tree_id=1248644238, session_id=66137014, signature=\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00], [alloc_size=0, eof=0, times=[modified=0.0, modified_raw=116444736000000000, accessed=0.0, accessed_raw=116444736000000000, created=0.0, created_raw=116444736000000000, changed=0.0, changed_raw=116444736000000000], attrs=[read_only=F, hidden=F, system=F, directory=F, archive=F, normal=F, temporary=F, sparse_file=F, reparse_point=F, compressed=F, offline=F, not_content_indexed=F, encrypted=F, integrity_stream=F, no_scrub_data=F]]
|
|
@ -1,2 +0,0 @@
|
||||||
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
|
||||||
[warning] <...>/udp-test.evt:4: Remove in v7.1: Analyzer spicy::TEST is using the deprecated 'port' or 'ports' keyword to register well-known ports. Use Analyzer::register_for_ports() in the accompanying Zeek script instead.
|
|
|
@ -1,3 +1,3 @@
|
||||||
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
[error] <...>/port-fail.evt:9: port outside of valid range
|
[error] <...>/port-fail.evt:7: port outside of valid range
|
||||||
[error] error loading EVT file "<...>/port-fail.evt"
|
[error] error loading EVT file "<...>/port-fail.evt"
|
||||||
|
|
19
testing/btest/Baseline/spicy.port/output
Normal file
19
testing/btest/Baseline/spicy.port/output
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63.
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 11337/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 11338/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 11339/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 11340/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 31337/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 31338/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 31339/udp
|
||||||
|
Analyzer::ANALYZER_SPICY_TEST, 31340/udp
|
||||||
|
{
|
||||||
|
31339/udp,
|
||||||
|
31337/udp,
|
||||||
|
31338/udp,
|
||||||
|
11339/udp,
|
||||||
|
11338/udp,
|
||||||
|
11340/udp,
|
||||||
|
31340/udp,
|
||||||
|
11337/udp
|
||||||
|
}
|
BIN
testing/btest/Traces/http/basic-auth-with-colon.trace
Normal file
BIN
testing/btest/Traces/http/basic-auth-with-colon.trace
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/ldap-add.pcap
Normal file
BIN
testing/btest/Traces/ldap/ldap-add.pcap
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/ldap-starttls.pcap
Normal file
BIN
testing/btest/Traces/ldap/ldap-starttls.pcap
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/ldap-who-am-i.pcap
Normal file
BIN
testing/btest/Traces/ldap/ldap-who-am-i.pcap
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/missing_krbtgt_ldap_request.pcapng
Normal file
BIN
testing/btest/Traces/ldap/missing_krbtgt_ldap_request.pcapng
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/missing_ldap_logs.pcapng
Normal file
BIN
testing/btest/Traces/ldap/missing_ldap_logs.pcapng
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/sasl-ntlm.pcap
Normal file
BIN
testing/btest/Traces/ldap/sasl-ntlm.pcap
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/sasl-scram-sha-512.pcap
Normal file
BIN
testing/btest/Traces/ldap/sasl-scram-sha-512.pcap
Normal file
Binary file not shown.
BIN
testing/btest/Traces/ldap/sasl-srp-who-am-i.pcap
Normal file
BIN
testing/btest/Traces/ldap/sasl-srp-who-am-i.pcap
Normal file
Binary file not shown.
BIN
testing/btest/Traces/smb/smb2-zero-byte-error-ioctl.pcap
Normal file
BIN
testing/btest/Traces/smb/smb2-zero-byte-error-ioctl.pcap
Normal file
Binary file not shown.
|
@ -9,4 +9,4 @@
|
||||||
#
|
#
|
||||||
# @TEST-EXEC: test -d $DIST/scripts
|
# @TEST-EXEC: test -d $DIST/scripts
|
||||||
# @TEST-EXEC: for script in `find $DIST/scripts/ -name \*\.zeek`; do zeek -b --parse-only $script >>errors 2>&1; done
|
# @TEST-EXEC: for script in `find $DIST/scripts/ -name \*\.zeek`; do zeek -b --parse-only $script >>errors 2>&1; done
|
||||||
# @TEST-EXEC: TEST_DIFF_CANONIFIER="grep -v -e 'load-balancing.zeek.*deprecated script loaded' | $SCRIPTS/diff-remove-abspath | $SCRIPTS/diff-sort" btest-diff errors
|
# @TEST-EXEC: TEST_DIFF_CANONIFIER="grep -v -e 'load-balancing.zeek.*deprecated script loaded' | grep -v -e 'prometheus.zeek.*deprecated script loaded' | $SCRIPTS/diff-remove-abspath | $SCRIPTS/diff-sort" btest-diff errors
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
# @TEST-EXEC: CLUSTER_NODE=logger-1 zeek %INPUT
|
# @TEST-EXEC: CLUSTER_NODE=logger-1 zeek %INPUT
|
||||||
# @TEST-EXEC: CLUSTER_NODE=proxy-1 zeek %INPUT
|
# @TEST-EXEC: CLUSTER_NODE=proxy-1 zeek %INPUT
|
||||||
# @TEST-EXEC: CLUSTER_NODE=worker-1 zeek %INPUT
|
# @TEST-EXEC: CLUSTER_NODE=worker-1 zeek %INPUT
|
||||||
# @TEST-EXEC: TEST_DIFF_CANONIFIER='grep -v "load-balancing.zeek.*deprecated script" | $SCRIPTS/diff-remove-abspath' btest-diff .stderr
|
# @TEST-EXEC: TEST_DIFF_CANONIFIER='grep -v "load-balancing.zeek.*deprecated script" | grep -v "prometheus.zeek.*deprecated script" | $SCRIPTS/diff-remove-abspath' btest-diff .stderr
|
||||||
|
|
||||||
@load base/frameworks/cluster
|
@load base/frameworks/cluster
|
||||||
@load misc/loaded-scripts
|
@load misc/loaded-scripts
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Authorization: Basic password has a colon in its value
|
||||||
|
#
|
||||||
|
# @TEST-EXEC: zeek -b -r $TRACES/http/basic-auth-with-colon.trace %INPUT
|
||||||
|
# @TEST-EXEC: btest-diff http.log
|
||||||
|
|
||||||
|
@load base/protocols/http
|
||||||
|
|
||||||
|
redef HTTP::default_capture_password = T;
|
11
testing/btest/scripts/base/protocols/ldap/add.zeek
Normal file
11
testing/btest/scripts/base/protocols/ldap/add.zeek
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/ldap-add.pcap %INPUT
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: The addRequest/addResponse operation is not implemented, yet we process it.
|
12
testing/btest/scripts/base/protocols/ldap/sasl-ntlm.zeek
Normal file
12
testing/btest/scripts/base/protocols/ldap/sasl-ntlm.zeek
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/sasl-ntlm.pcap %INPUT
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap_search.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: This broke after #3826 got merged
|
|
@ -0,0 +1,12 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/sasl-scram-sha-512.pcap %INPUT
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap_search.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: This broke after #3826 got merged
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/missing_krbtgt_ldap_request.pcapng %INPUT
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap_search.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: Test LDAP analyzer with GSS-API integrity traffic where we can still peak into LDAP wrapped into WRAP tokens.
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/missing_ldap_logs.pcapng %INPUT
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap_search.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: Test LDAP analyzer with GSS-API integrity traffic where we can still peak into LDAP wrapped into WRAP tokens.
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/sasl-srp-who-am-i.pcap %INPUT
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: SASL authentication using SRP (Secure Remote Password)
|
25
testing/btest/scripts/base/protocols/ldap/starttls.zeek
Normal file
25
testing/btest/scripts/base/protocols/ldap/starttls.zeek
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/ldap-starttls.pcap %INPUT >out
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff out
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: btest-diff ssl.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: LDAP supports StartTLS through extendedRequest 1.3.6.1.4.1.1466.20037
|
||||||
|
|
||||||
|
event LDAP::extended_request(c: connection, message_id: int, request_name: string, request_value: string) {
|
||||||
|
print c$uid, "extended_request", fmt("%s (%s)", request_name, LDAP::EXTENDED_REQUESTS[request_name]), request_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
event LDAP::extended_response(c: connection, message_id: int, result: LDAP::ResultCode, response_name: string, response_value: string) {
|
||||||
|
print c$uid, "extended_response", result, response_name, response_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
event LDAP::starttls(c: connection) {
|
||||||
|
print c$uid, "LDAP::starttls";
|
||||||
|
}
|
20
testing/btest/scripts/base/protocols/ldap/who-am-i.zeek
Normal file
20
testing/btest/scripts/base/protocols/ldap/who-am-i.zeek
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# Copyright (c) 2024 by the Zeek Project. See LICENSE for details.
|
||||||
|
|
||||||
|
# @TEST-REQUIRES: have-spicy
|
||||||
|
# @TEST-EXEC: zeek -C -r ${TRACES}/ldap/ldap-who-am-i.pcap %INPUT >out
|
||||||
|
# @TEST-EXEC: cat conn.log | zeek-cut -Cn local_orig local_resp > conn.log2 && mv conn.log2 conn.log
|
||||||
|
# @TEST-EXEC: btest-diff out
|
||||||
|
# @TEST-EXEC: btest-diff conn.log
|
||||||
|
# @TEST-EXEC: btest-diff ldap.log
|
||||||
|
# @TEST-EXEC: ! test -f dpd.log
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
#
|
||||||
|
# @TEST-DOC: Testing OpenLDAP's ldapwhoami utility with simple authentication.
|
||||||
|
|
||||||
|
event LDAP::extended_request(c: connection, message_id: int, request_name: string, request_value: string) {
|
||||||
|
print c$uid, "extended_request", fmt("%s (%s)", request_name, LDAP::EXTENDED_REQUESTS[request_name]), request_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
event LDAP::extended_response(c: connection, message_id: int, result: LDAP::ResultCode, response_name: string, response_value: string) {
|
||||||
|
print c$uid, "extended_response", result, response_name, response_value;
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
# @TEST-DOC: Tests handling of PDUs containing error ioctls with byte lengths of zero
|
||||||
|
# @TEST-EXEC: zeek -b -r $TRACES/smb/smb2-zero-byte-error-ioctl.pcap %INPUT 2>&1 >out
|
||||||
|
# @TEST-EXEC: ! test -f analyzer.log
|
||||||
|
# @TEST-EXEC: btest-diff out
|
||||||
|
|
||||||
|
@load base/protocols/smb
|
||||||
|
|
||||||
|
event smb2_close_request(c: connection, hdr: SMB2::Header, file_id: SMB2::GUID)
|
||||||
|
{
|
||||||
|
print "smb2_close_request", hdr;
|
||||||
|
}
|
||||||
|
|
||||||
|
event smb2_close_response(c: connection, hdr: SMB2::Header, response: SMB2::CloseResponse)
|
||||||
|
{
|
||||||
|
print "smb2_close_response", hdr, response;
|
||||||
|
}
|
|
@ -55,7 +55,6 @@ done
|
||||||
@TEST-END-FILE
|
@TEST-END-FILE
|
||||||
|
|
||||||
@load policy/frameworks/cluster/experimental
|
@load policy/frameworks/cluster/experimental
|
||||||
@load policy/frameworks/telemetry/prometheus
|
|
||||||
@load base/frameworks/telemetry
|
@load base/frameworks/telemetry
|
||||||
|
|
||||||
# So the cluster nodes don't terminate right away.
|
# So the cluster nodes don't terminate right away.
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
# @TEST-REQUIRES: have-spicy
|
|
||||||
#
|
|
||||||
# @TEST-EXEC: spicyz -d -o test.hlto ./udp-test.evt 2>out.stderr
|
|
||||||
# @TEST-EXEC: TEST_DIFF_CANONIFIER=$SCRIPTS/diff-remove-abspath btest-diff out.stderr
|
|
||||||
#
|
|
||||||
# @TEST-DOC: Remove with v7.1: Specifying ports is deprecated.
|
|
||||||
|
|
||||||
module Test;
|
|
||||||
|
|
||||||
import zeek;
|
|
||||||
|
|
||||||
public type Message = unit {
|
|
||||||
data: bytes &eod {}
|
|
||||||
};
|
|
||||||
|
|
||||||
# @TEST-START-FILE udp-test.evt
|
|
||||||
protocol analyzer spicy::TEST over UDP:
|
|
||||||
parse with Test::Message,
|
|
||||||
port 11337/udp-11340/udp,
|
|
||||||
ports {31337/udp-31340/udp};
|
|
||||||
# @TEST-END-FILE
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue