Modify JSON log writer to use the external JSON library

This commit is contained in:
Tim Wojtulewicz 2019-06-28 13:39:53 -07:00
parent d27c846ec5
commit 9b76e8faf4
4 changed files with 95 additions and 96 deletions

View file

@ -111,3 +111,14 @@ string Formatter::Render(double d)
return buf; return buf;
} }
string Formatter::Render(TransportProto proto)
{
if ( proto == TRANSPORT_UDP )
return "udp";
else if ( proto == TRANSPORT_TCP )
return "tcp";
else if ( proto == TRANSPORT_ICMP )
return "icmp";
else
return "unknown";
}

View file

@ -112,6 +112,17 @@ public:
*/ */
static string Render(double d); static string Render(double d);
/**
* Convert a transport protocol into a string.
*
* This is a helper function that formatter implementations may use.
*
* @param proto The transport protocol.
*
* @return An ASCII representation of the protocol.
*/
static string Render(TransportProto proto);
/** /**
* Convert a string into a TransportProto. The string must be one of * Convert a string into a TransportProto. The string must be one of
* \c tcp, \c udp, \c icmp, or \c unknown. * \c tcp, \c udp, \c icmp, or \c unknown.

View file

@ -11,7 +11,7 @@
#include <math.h> #include <math.h>
#include <stdint.h> #include <stdint.h>
#include "./JSON.h" #include "JSON.h"
using namespace threading::formatter; using namespace threading::formatter;
@ -25,80 +25,85 @@ JSON::~JSON()
} }
bool JSON::Describe(ODesc* desc, int num_fields, const Field* const * fields, bool JSON::Describe(ODesc* desc, int num_fields, const Field* const * fields,
Value** vals) const Value** vals) const
{ {
if ( surrounding_braces ) ZeekJson j = ZeekJson::object();
desc->AddRaw("{");
for ( int i = 0; i < num_fields; i++ ) for ( int i = 0; i < num_fields; i++ )
{ {
const u_char* bytes = desc->Bytes(); if ( vals[i]->present )
int len = desc->Len(); {
ZeekJson new_entry = BuildJSON(vals[i]);
if ( new_entry.is_null() )
return false;
if ( i > 0 && j[fields[i]->name] = new_entry;
len > 0 && }
bytes[len-1] != ',' &&
bytes[len-1] != '{' &&
bytes[len-1] != '[' &&
vals[i]->present )
desc->AddRaw(",");
if ( ! Describe(desc, vals[i], fields[i]->name) )
return false;
} }
if ( surrounding_braces ) desc->Add(j.dump());
desc->AddRaw("}");
return true; return true;
} }
bool JSON::Describe(ODesc* desc, Value* val, const string& name) const bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
{ {
if ( desc->IsBinary() )
{
GetThread()->Error("json formatter: binary format not supported");
return false;
}
if ( ! val->present ) if ( ! val->present )
return true; return true;
if ( name.size() ) ZeekJson j = BuildJSON(val, name);
{ if ( j.is_null() )
desc->AddRaw("\"", 1); return false;
desc->Add(name);
desc->AddRaw("\":", 2);
}
desc->Add(j.dump());
return true;
}
threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype) const
{
GetThread()->Error("JSON formatter does not support parsing yet.");
return NULL;
}
ZeekJson JSON::BuildJSON(Value* val, const string& name) const
{
ZeekJson j;
switch ( val->type ) switch ( val->type )
{ {
case TYPE_BOOL: case TYPE_BOOL:
desc->AddRaw(val->val.int_val == 0 ? "false" : "true"); j = val->val.int_val != 0;
break; break;
case TYPE_INT: case TYPE_INT:
desc->Add(val->val.int_val); j = val->val.int_val;
break; break;
case TYPE_COUNT: case TYPE_COUNT:
case TYPE_COUNTER: case TYPE_COUNTER:
desc->Add(val->val.uint_val); j = val->val.uint_val;
break; break;
case TYPE_PORT: case TYPE_PORT:
desc->Add(val->val.port_val.port); j = val->val.port_val.port;
break; break;
case TYPE_SUBNET: case TYPE_SUBNET:
desc->AddRaw("\"", 1); j = Formatter::Render(val->val.subnet_val);
desc->Add(Render(val->val.subnet_val));
desc->AddRaw("\"", 1);
break; break;
case TYPE_ADDR: case TYPE_ADDR:
desc->AddRaw("\"", 1); j = Formatter::Render(val->val.addr_val);
desc->Add(Render(val->val.addr_val));
desc->AddRaw("\"", 1);
break; break;
case TYPE_DOUBLE: case TYPE_DOUBLE:
case TYPE_INTERVAL: case TYPE_INTERVAL:
desc->Add(val->val.double_val); j = val->val.double_val;
break; break;
case TYPE_TIME: case TYPE_TIME:
@ -110,15 +115,13 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
time_t the_time = time_t(floor(val->val.double_val)); time_t the_time = time_t(floor(val->val.double_val));
struct tm t; struct tm t;
desc->AddRaw("\"", 1);
if ( ! gmtime_r(&the_time, &t) || if ( ! gmtime_r(&the_time, &t) ||
! strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%S", &t) ) ! strftime(buffer, sizeof(buffer), "%Y-%m-%dT%H:%M:%S", &t) )
{ {
GetThread()->Error(GetThread()->Fmt("json formatter: failure getting time: (%lf)", val->val.double_val)); GetThread()->Error(GetThread()->Fmt("json formatter: failure getting time: (%lf)", val->val.double_val));
// This was a failure, doesn't really matter what gets put here // This was a failure, doesn't really matter what gets put here
// but it should probably stand out... // but it should probably stand out...
desc->Add("2000-01-01T00:00:00.000000"); j = "2000-01-01T00:00:00.000000";
} }
else else
{ {
@ -129,20 +132,17 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
frac += 1; frac += 1;
snprintf(buffer2, sizeof(buffer2), "%s.%06.0fZ", buffer, fabs(frac) * 1000000); snprintf(buffer2, sizeof(buffer2), "%s.%06.0fZ", buffer, fabs(frac) * 1000000);
desc->Add(buffer2); j = buffer2;
} }
desc->AddRaw("\"", 1);
} }
else if ( timestamps == TS_EPOCH ) else if ( timestamps == TS_EPOCH )
desc->Add(val->val.double_val); j = val->val.double_val;
else if ( timestamps == TS_MILLIS ) else if ( timestamps == TS_MILLIS )
{ {
// ElasticSearch uses milliseconds for timestamps // ElasticSearch uses milliseconds for timestamps
uint64_t ts = (uint64_t) (val->val.double_val * 1000); j = (uint64_t) (val->val.double_val * 1000);
desc->Add(ts);
} }
break; break;
@ -153,74 +153,40 @@ bool JSON::Describe(ODesc* desc, Value* val, const string& name) const
case TYPE_FILE: case TYPE_FILE:
case TYPE_FUNC: case TYPE_FUNC:
{ {
desc->AddRaw("\"", 1); j = json_escape_utf8(string(val->val.string_val.data, val->val.string_val.length));
for ( int i = 0; i < val->val.string_val.length; ++i )
{
char c = val->val.string_val.data[i];
// 2byte Unicode escape special characters.
if ( c < 32 || c > 126 || c == '\n' || c == '"' || c == '\'' || c == '\\' || c == '&' )
{
desc->AddRaw("\\u00", 4);
char hex[2] = {'0', '0'};
bytetohex(c, hex);
desc->AddRaw(hex, 1);
desc->AddRaw(hex + 1, 1);
}
else
desc->AddRaw(&c, 1);
}
desc->AddRaw("\"", 1);
break; break;
} }
case TYPE_TABLE: case TYPE_TABLE:
{ {
desc->AddRaw("[", 1); j = ZeekJson::array();
for ( int j = 0; j < val->val.set_val.size; j++ ) for ( int idx = 0; idx < val->val.set_val.size; idx++ )
{ j.push_back(BuildJSON(val->val.set_val.vals[idx]));
if ( j > 0 )
desc->AddRaw(",", 1);
Describe(desc, val->val.set_val.vals[j]);
}
desc->AddRaw("]", 1);
break; break;
} }
case TYPE_VECTOR: case TYPE_VECTOR:
{ {
desc->AddRaw("[", 1); j = ZeekJson::array();
for ( int j = 0; j < val->val.vector_val.size; j++ ) for ( int idx = 0; idx < val->val.vector_val.size; idx++ )
{ j.push_back(BuildJSON(val->val.vector_val.vals[idx]));
if ( j > 0 )
desc->AddRaw(",", 1);
Describe(desc, val->val.vector_val.vals[j]);
}
desc->AddRaw("]", 1);
break; break;
} }
default: default:
return false; break;
} }
return true; if ( ! name.empty() && ! j.is_null() )
} {
ZeekJson j2 = ZeekJson::object();
j2[name] = j;
return j2;
}
threading::Value* JSON::ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype) const return j;
{
GetThread()->Error("JSON formatter does not support parsing yet.");
return NULL;
}
void JSON::SurroundingBraces(bool use_braces)
{
surrounding_braces = use_braces;
} }

View file

@ -4,9 +4,19 @@
#define THREADING_FORMATTERS_JSON_H #define THREADING_FORMATTERS_JSON_H
#include "../Formatter.h" #include "../Formatter.h"
#include "3rdparty/json.hpp"
#include "3rdparty/fifo_map.hpp"
namespace threading { namespace formatter { namespace threading { namespace formatter {
// Define a class for use with the json library that orders the keys in the same order that
// they were inserted. By default, the json library orders them alphabetically and we don't
// want it like that.
template<class K, class V, class compare, class A>
using json_fifo_map = nlohmann::fifo_map<K, V, nlohmann::fifo_map_compare<K>, A>;
using ZeekJson = nlohmann::basic_json<json_fifo_map>;
/** /**
* A thread-safe class for converting values into a JSON representation * A thread-safe class for converting values into a JSON representation
* and vice versa. * and vice versa.
@ -27,9 +37,10 @@ public:
threading::Value** vals) const override; threading::Value** vals) const override;
threading::Value* ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype = TYPE_ERROR) const override; threading::Value* ParseValue(const string& s, const string& name, TypeTag type, TypeTag subtype = TYPE_ERROR) const override;
void SurroundingBraces(bool use_braces);
private: private:
ZeekJson BuildJSON(Value* val, const string& name = "") const;
TimeFormat timestamps; TimeFormat timestamps;
bool surrounding_braces; bool surrounding_braces;
}; };