Deprecate the internal int/uint types in favor of the cstdint types they were based on

This commit is contained in:
Tim Wojtulewicz 2019-07-30 11:38:42 -07:00
parent 18e4976c6c
commit 54752ef9a1
218 changed files with 1331 additions and 1323 deletions

View file

@ -28,7 +28,7 @@ public:
* Construct a UID of a given bit-length, optionally from given values.
* @see UID::Set
*/
explicit UID(bro_uint_t bits, const uint64* v = 0, size_t n = 0)
explicit UID(bro_uint_t bits, const uint64_t* v = 0, size_t n = 0)
{ Set(bits, v, n); }
/**
@ -47,7 +47,7 @@ public:
* 64, then a value is truncated to bit in desired bit-length.
* @param n number of 64-bit elements in array pointed to by \a v.
*/
void Set(bro_uint_t bits, const uint64* v = 0, size_t n = 0);
void Set(bro_uint_t bits, const uint64_t* v = 0, size_t n = 0);
/**
* Returns a base62 (characters 0-9, A-Z, a-z) representation of the UID.
@ -81,7 +81,7 @@ public:
{ return ! ( u1 == u2 ); }
private:
uint64 uid[BRO_UID_LEN];
uint64_t uid[BRO_UID_LEN];
bool initialized; // Since technically uid == 0 is a legit UID
};