mirror of
https://github.com/zeek/zeek.git
synced 2025-10-02 14:48:21 +00:00
Added an option to the JSON formatter to use ISO 8601 for timestamps.
- It's not *exactly* ISO 8601 which doesn't seem to support subseconds, but subseconds are very important to us and most things that support ISO8601 seem to also support subseconds in the way I'm implemented it.
This commit is contained in:
parent
a56c343715
commit
c9aaf9e753
11 changed files with 77 additions and 13 deletions
|
@ -24,6 +24,12 @@ export {
|
||||||
## This is also available as a per-filter $config option.
|
## This is also available as a per-filter $config option.
|
||||||
const use_json = F &redef;
|
const use_json = F &redef;
|
||||||
|
|
||||||
|
## By default, the JSON formatter will use double values for timestamps
|
||||||
|
## which represent the number of seconds from the UNIX epoch. By setting
|
||||||
|
## this to 'T', it will use the 8601 format. This is also available as
|
||||||
|
## a per-filter $config option.
|
||||||
|
const json_iso_timestamps = F &redef;
|
||||||
|
|
||||||
## If true, include lines with log meta information such as column names
|
## If true, include lines with log meta information such as column names
|
||||||
## with types, the values of ASCII logging options that are in use, and
|
## with types, the values of ASCII logging options that are in use, and
|
||||||
## the time when the file was opened and closed (the latter at the end).
|
## the time when the file was opened and closed (the latter at the end).
|
||||||
|
|
|
@ -78,6 +78,7 @@ const set_separator: string;
|
||||||
const empty_field: string;
|
const empty_field: string;
|
||||||
const unset_field: string;
|
const unset_field: string;
|
||||||
const use_json: bool;
|
const use_json: bool;
|
||||||
|
const json_iso_timestamps: bool;
|
||||||
|
|
||||||
# Options for the DataSeries writer.
|
# Options for the DataSeries writer.
|
||||||
|
|
||||||
|
|
|
@ -59,6 +59,7 @@ bool Ascii::DoInit(const WriterInfo& info, int num_fields, const Field* const *
|
||||||
output_to_stdout = BifConst::LogAscii::output_to_stdout;
|
output_to_stdout = BifConst::LogAscii::output_to_stdout;
|
||||||
include_meta = BifConst::LogAscii::include_meta;
|
include_meta = BifConst::LogAscii::include_meta;
|
||||||
use_json = BifConst::LogAscii::use_json;
|
use_json = BifConst::LogAscii::use_json;
|
||||||
|
json_iso_timestamps = BifConst::LogAscii::json_iso_timestamps;
|
||||||
|
|
||||||
separator.assign(
|
separator.assign(
|
||||||
(const char*) BifConst::LogAscii::separator->Bytes(),
|
(const char*) BifConst::LogAscii::separator->Bytes(),
|
||||||
|
@ -147,7 +148,7 @@ bool Ascii::DoInit(const WriterInfo& info, int num_fields, const Field* const *
|
||||||
if ( use_json )
|
if ( use_json )
|
||||||
{
|
{
|
||||||
// Write out JSON formatted logs.
|
// Write out JSON formatted logs.
|
||||||
formatter = new threading::formatter::JSON(this);
|
formatter = new threading::formatter::JSON(this, json_iso_timestamps);
|
||||||
// Using JSON implicitly turns off the header meta fields.
|
// Using JSON implicitly turns off the header meta fields.
|
||||||
include_meta = false;
|
include_meta = false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,6 +48,7 @@ private:
|
||||||
bool include_meta;
|
bool include_meta;
|
||||||
bool tsv;
|
bool tsv;
|
||||||
bool use_json;
|
bool use_json;
|
||||||
|
bool json_iso_timestamps;
|
||||||
|
|
||||||
string separator;
|
string separator;
|
||||||
string set_separator;
|
string set_separator;
|
||||||
|
|
|
@ -52,7 +52,7 @@ ElasticSearch::ElasticSearch(WriterFrontend* frontend) : WriterBackend(frontend)
|
||||||
|
|
||||||
curl_handle = HTTPSetup();
|
curl_handle = HTTPSetup();
|
||||||
|
|
||||||
json = new threading::formatter::JSON(this);
|
json = new threading::formatter::JSON(this, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
ElasticSearch::~ElasticSearch()
|
ElasticSearch::~ElasticSearch()
|
||||||
|
|
|
@ -4,13 +4,15 @@
|
||||||
|
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
|
#include <math.h>
|
||||||
|
|
||||||
#include "./JSON.h"
|
#include "./JSON.h"
|
||||||
|
|
||||||
using namespace threading::formatter;
|
using namespace threading::formatter;
|
||||||
|
|
||||||
JSON::JSON(MsgThread* t) : Formatter(t)
|
JSON::JSON(MsgThread* t, bool json_iso_timestamps) : Formatter(t)
|
||||||
{
|
{
|
||||||
|
iso_timestamps = json_iso_timestamps;
|
||||||
}
|
}
|
||||||
|
|
||||||
JSON::~JSON()
|
JSON::~JSON()
|
||||||
|
@ -100,16 +102,35 @@ bool JSON::Describe(ODesc* desc, Value* val) const
|
||||||
|
|
||||||
case TYPE_TIME:
|
case TYPE_TIME:
|
||||||
{
|
{
|
||||||
// ElasticSearch uses milliseconds for timestamps and json only
|
if ( iso_timestamps )
|
||||||
// supports signed ints (uints can be too large).
|
|
||||||
uint64_t ts = (uint64_t) (val->val.double_val * 1000);
|
|
||||||
if ( ts >= INT64_MAX )
|
|
||||||
{
|
{
|
||||||
thread->Error(thread->Fmt("time value too large for JSON: %" PRIu64, ts));
|
char buffer[40];
|
||||||
desc->AddRaw("null", 4);
|
time_t t = time_t(val->val.double_val);
|
||||||
|
if ( strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%S", gmtime(&t)) == 0 )
|
||||||
|
thread->Error(thread->Fmt("strftime error for JSON: %" PRIu64));
|
||||||
|
else
|
||||||
|
{
|
||||||
|
double integ;
|
||||||
|
double frac = modf(val->val.double_val, &integ);
|
||||||
|
snprintf(buffer, sizeof(buffer), "%s.%06.0fZ", buffer, frac*1000000);
|
||||||
|
desc->AddRaw("\"", 1);
|
||||||
|
desc->Add(buffer);
|
||||||
|
desc->AddRaw("\"", 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
desc->Add(ts);
|
{
|
||||||
|
// ElasticSearch uses milliseconds for timestamps and json only
|
||||||
|
// supports signed ints (uints can be too large).
|
||||||
|
uint64_t ts = (uint64_t) (val->val.double_val * 1000);
|
||||||
|
if ( ts >= INT64_MAX )
|
||||||
|
{
|
||||||
|
thread->Error(thread->Fmt("time value too large for JSON: %" PRIu64, ts));
|
||||||
|
desc->AddRaw("null", 4);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
desc->Add(ts);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ namespace threading { namespace formatter {
|
||||||
*/
|
*/
|
||||||
class JSON : public Formatter {
|
class JSON : public Formatter {
|
||||||
public:
|
public:
|
||||||
JSON(threading::MsgThread* t);
|
JSON(threading::MsgThread* t, bool json_iso_timestamps);
|
||||||
virtual ~JSON();
|
virtual ~JSON();
|
||||||
|
|
||||||
virtual bool Describe(ODesc* desc, threading::Value* val) const;
|
virtual bool Describe(ODesc* desc, threading::Value* val) const;
|
||||||
|
@ -23,6 +23,7 @@ public:
|
||||||
virtual threading::Value* ParseValue(string s, string name, TypeTag type, TypeTag subtype = TYPE_ERROR) const;
|
virtual threading::Value* ParseValue(string s, string name, TypeTag type, TypeTag subtype = TYPE_ERROR) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
bool iso_timestamps;
|
||||||
};
|
};
|
||||||
|
|
||||||
}}
|
}}
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
{"t":"2008-07-09T16:13:30.005432Z"}
|
||||||
|
{"t":"1986-12-01T01:01:01.900000Z"}
|
|
@ -1 +1 @@
|
||||||
{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1394462315468,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["CC","AA","BB"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\u000a{ \u000aif (0 < SSH::i) \u000a\u0009return (Foo);\u000aelse\u000a\u0009return (Bar);\u000a\u000a}"}
|
{"b":true,"i":-42,"e":"SSH::LOG","c":21,"p":123,"sn":"10.0.0.0/24","a":"1.2.3.4","d":3.14,"t":1215620010543,"iv":100.0,"s":"hurz","sc":[2,4,1,3],"ss":["CC","AA","BB"],"se":[],"vc":[10,20,30],"ve":[],"f":"SSH::foo\u000a{ \u000aif (0 < SSH::i) \u000a\u0009return (Foo);\u000aelse\u000a\u0009return (Bar);\u000a\u000a}"}
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
#
|
||||||
|
# @TEST-EXEC: bro -b %INPUT
|
||||||
|
# @TEST-EXEC: btest-diff ssh.log
|
||||||
|
#
|
||||||
|
# Testing all possible types.
|
||||||
|
|
||||||
|
redef LogAscii::use_json = T;
|
||||||
|
redef LogAscii::json_iso_timestamps = T;
|
||||||
|
|
||||||
|
module SSH;
|
||||||
|
|
||||||
|
export {
|
||||||
|
redef enum Log::ID += { LOG };
|
||||||
|
|
||||||
|
type Log: record {
|
||||||
|
t: time;
|
||||||
|
} &log;
|
||||||
|
}
|
||||||
|
|
||||||
|
event bro_init()
|
||||||
|
{
|
||||||
|
Log::create_stream(SSH::LOG, [$columns=Log]);
|
||||||
|
Log::write(SSH::LOG, [
|
||||||
|
$t=(strptime("%Y-%m-%dT%H:%M:%SZ", "2008-07-09T16:13:30Z") + 0.00543210 secs)
|
||||||
|
]);
|
||||||
|
Log::write(SSH::LOG, [
|
||||||
|
$t=(strptime("%Y-%m-%dT%H:%M:%SZ", "1986-12-01T01:01:01Z") + 0.90 secs)
|
||||||
|
]);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -56,7 +56,7 @@ event bro_init()
|
||||||
$sn=10.0.0.1/24,
|
$sn=10.0.0.1/24,
|
||||||
$a=1.2.3.4,
|
$a=1.2.3.4,
|
||||||
$d=3.14,
|
$d=3.14,
|
||||||
$t=network_time(),
|
$t=(strptime("%Y-%m-%dT%H:%M:%SZ", "2008-07-09T16:13:30Z") + 0.543210 secs),
|
||||||
$iv=100secs,
|
$iv=100secs,
|
||||||
$s="hurz",
|
$s="hurz",
|
||||||
$sc=set(1,2,3,4),
|
$sc=set(1,2,3,4),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue