Merge remote-tracking branch 'origin/topic/vern/footprint'

* origin/topic/vern/footprint:
  new environment variable to enable BTests to skip ASAN checks
  skip new BiF test for ASAN CI runs
  btest update to include recursive value that doesn't require a record
  to avoid recursion, track all aggregates, not just records isolate the internal methods
  simpler public calling interface for computing footprint
  use stack-based set to prevent infinite recursion rather than a static one
  change value_footprint() to val_footprint() to be more similar to val_size()
  make including count of container elements non-optional
  btest for mutually-recursive case
  fix for tracking footprints of mutually-recursive records
  added value_footprint() and global_container_footprints() BiFs
This commit is contained in:
Tim Wojtulewicz 2022-05-06 11:32:13 -07:00
commit cf51931615
8 changed files with 324 additions and 1 deletions

View file

@ -1322,6 +1322,16 @@ ValPtr ListVal::DoClone(CloneState* state)
return lv;
}
unsigned int ListVal::ComputeFootprint(std::unordered_set<const Val*>* analyzed_vals) const
{
unsigned int fp = vals.size();
for ( const auto& val : vals )
fp += val->Footprint(analyzed_vals);
return fp;
}
unsigned int ListVal::MemoryAllocation() const
{
#pragma GCC diagnostic push
@ -2684,6 +2694,24 @@ ValPtr TableVal::DoClone(CloneState* state)
return tv;
}
unsigned int TableVal::ComputeFootprint(std::unordered_set<const Val*>* analyzed_vals) const
{
unsigned int fp = table_val->Length();
for ( const auto& iter : *table_val )
{
auto k = iter.GetHashKey();
auto vl = table_hash->RecoverVals(*k);
auto v = iter.GetValue<TableEntryVal*>()->GetVal();
fp += vl->Footprint(analyzed_vals);
if ( v )
fp += v->Footprint(analyzed_vals);
}
return fp;
}
unsigned int TableVal::MemoryAllocation() const
{
unsigned int size = 0;
@ -3049,6 +3077,24 @@ ValPtr RecordVal::DoClone(CloneState* state)
return rv;
}
unsigned int RecordVal::ComputeFootprint(std::unordered_set<const Val*>* analyzed_vals) const
{
int n = NumFields();
unsigned int fp = n;
for ( auto i = 0; i < n; ++i )
{
if ( ! HasField(i) )
continue;
auto f_i = GetField(i);
if ( f_i )
fp += f_i->Footprint(analyzed_vals);
}
return fp;
}
unsigned int RecordVal::MemoryAllocation() const
{
unsigned int size = 0;
@ -3572,6 +3618,21 @@ bool VectorVal::Concretize(const TypePtr& t)
return true;
}
unsigned int VectorVal::ComputeFootprint(std::unordered_set<const Val*>* analyzed_vals) const
{
auto n = vector_val->size();
unsigned int fp = n;
for ( auto i = 0U; i < n; ++i )
{
auto v = At(i);
if ( v )
fp += v->Footprint(analyzed_vals);
}
return fp;
}
unsigned int VectorVal::Resize(unsigned int new_num_elements)
{
unsigned int oldsize = vector_val->size();
@ -3953,6 +4014,31 @@ ValPtr Val::MakeCount(bro_uint_t u)
return make_intrusive<CountVal>(u);
}
unsigned int Val::Footprint(std::unordered_set<const Val*>* analyzed_vals) const
{
auto is_aggr = IsAggr(type);
// We only need to check containers for possible recursion, as there's
// no way to construct a cycle using only non-aggregates.
if ( is_aggr )
{
if ( analyzed_vals->count(this) > 0 )
// Footprint is 1 for generating a cycle.
return 1;
analyzed_vals->insert(this);
}
auto fp = ComputeFootprint(analyzed_vals);
if ( is_aggr )
// Allow the aggregate to be revisited providing it's not
// in the context of a cycle.
analyzed_vals->erase(this);
return fp;
}
ValManager::ValManager()
{
empty_string = make_intrusive<StringVal>("");