Clean up and virtualize the on-entry cache interface.
Cleanup/Virtualize the ssa_block_range class, and implement the current vector approach as a derived class. Allow memory allocation from the irange allocator obstack for easy freeing. * gimple-range-cache.cc (ssa_block_ranges): Virtualize. (sbr_vector): Renamed from ssa_block_cache. (sbr_vector::sbr_vector): Allocate from obstack abd initialize. (ssa_block_ranges::~ssa_block_ranges): Remove. (sbr_vector::set_bb_range): Use varying and undefined cached values. (ssa_block_ranges::set_bb_varying): Remove. (sbr_vector::get_bb_range): Adjust assert. (sbr_vector::bb_range_p): Adjust assert. (~block_range_cache): No freeing loop required. (block_range_cache::get_block_ranges): Remove. (block_range_cache::set_bb_range): Inline get_block_ranges. (block_range_cache::set_bb_varying): Remove. * gimple-range-cache.h (set_bb_varying): Remove prototype. * value-range.h (irange_allocator::get_memory): New.
This commit is contained in:
parent
a7943ea96d
commit
14b0f37a64
3 changed files with 104 additions and 119 deletions
|
@ -125,104 +125,19 @@ non_null_ref::process_name (tree name)
|
|||
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
// This class implements a cache of ranges indexed by basic block. It
|
||||
// represents all that is known about an SSA_NAME on entry to each
|
||||
// block. It caches a range-for-type varying range so it doesn't need
|
||||
// to be reformed all the time. If a range is ever always associated
|
||||
// with a type, we can use that instead. Whenever varying is being
|
||||
// set for a block, the cache simply points to this cached one rather
|
||||
// than create a new one each time.
|
||||
// This class represents the API into a cache of ranges for an SSA_NAME.
|
||||
// Routines must be implemented to set, get, and query if a value is set.
|
||||
|
||||
class ssa_block_ranges
|
||||
{
|
||||
public:
|
||||
ssa_block_ranges (tree t, irange_allocator *allocator);
|
||||
~ssa_block_ranges ();
|
||||
|
||||
void set_bb_range (const basic_block bb, const irange &r);
|
||||
void set_bb_varying (const basic_block bb);
|
||||
bool get_bb_range (irange &r, const basic_block bb);
|
||||
bool bb_range_p (const basic_block bb);
|
||||
virtual void set_bb_range (const basic_block bb, const irange &r) = 0;
|
||||
virtual bool get_bb_range (irange &r, const basic_block bb) = 0;
|
||||
virtual bool bb_range_p (const basic_block bb) = 0;
|
||||
|
||||
void dump(FILE *f);
|
||||
private:
|
||||
vec<irange *> m_tab;
|
||||
irange *m_type_range;
|
||||
tree m_type;
|
||||
irange_allocator *m_irange_allocator;
|
||||
};
|
||||
|
||||
|
||||
// Initialize a block cache for an ssa_name of type T.
|
||||
|
||||
ssa_block_ranges::ssa_block_ranges (tree t, irange_allocator *allocator)
|
||||
{
|
||||
gcc_checking_assert (TYPE_P (t));
|
||||
m_type = t;
|
||||
m_irange_allocator = allocator;
|
||||
|
||||
m_tab.create (0);
|
||||
m_tab.safe_grow_cleared (last_basic_block_for_fn (cfun));
|
||||
|
||||
// Create the cached type range.
|
||||
m_type_range = m_irange_allocator->allocate (2);
|
||||
m_type_range->set_varying (t);
|
||||
|
||||
m_tab[ENTRY_BLOCK_PTR_FOR_FN (cfun)->index] = m_type_range;
|
||||
}
|
||||
|
||||
// Destruct block range.
|
||||
|
||||
ssa_block_ranges::~ssa_block_ranges ()
|
||||
{
|
||||
m_tab.release ();
|
||||
}
|
||||
|
||||
// Set the range for block BB to be R.
|
||||
|
||||
void
|
||||
ssa_block_ranges::set_bb_range (const basic_block bb, const irange &r)
|
||||
{
|
||||
gcc_checking_assert ((unsigned) bb->index < m_tab.length ());
|
||||
irange *m = m_irange_allocator->allocate (r);
|
||||
m_tab[bb->index] = m;
|
||||
}
|
||||
|
||||
// Set the range for block BB to the range for the type.
|
||||
|
||||
void
|
||||
ssa_block_ranges::set_bb_varying (const basic_block bb)
|
||||
{
|
||||
gcc_checking_assert ((unsigned) bb->index < m_tab.length ());
|
||||
m_tab[bb->index] = m_type_range;
|
||||
}
|
||||
|
||||
// Return the range associated with block BB in R. Return false if
|
||||
// there is no range.
|
||||
|
||||
bool
|
||||
ssa_block_ranges::get_bb_range (irange &r, const basic_block bb)
|
||||
{
|
||||
gcc_checking_assert ((unsigned) bb->index < m_tab.length ());
|
||||
irange *m = m_tab[bb->index];
|
||||
if (m)
|
||||
{
|
||||
r = *m;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Return true if a range is present.
|
||||
|
||||
bool
|
||||
ssa_block_ranges::bb_range_p (const basic_block bb)
|
||||
{
|
||||
gcc_checking_assert ((unsigned) bb->index < m_tab.length ());
|
||||
return m_tab[bb->index] != NULL;
|
||||
}
|
||||
|
||||
|
||||
// Print the list of known ranges for file F in a nice format.
|
||||
|
||||
void
|
||||
|
@ -240,6 +155,85 @@ ssa_block_ranges::dump (FILE *f)
|
|||
}
|
||||
}
|
||||
|
||||
// This class implements the range cache as a linear vector, indexed by BB.
|
||||
// It caches a varying and undefined range which are used instead of
|
||||
// allocating new ones each time.
|
||||
|
||||
class sbr_vector : public ssa_block_ranges
|
||||
{
|
||||
public:
|
||||
sbr_vector (tree t, irange_allocator *allocator);
|
||||
|
||||
virtual void set_bb_range (const basic_block bb, const irange &r) OVERRIDE;
|
||||
virtual bool get_bb_range (irange &r, const basic_block bb) OVERRIDE;
|
||||
virtual bool bb_range_p (const basic_block bb) OVERRIDE;
|
||||
protected:
|
||||
irange **m_tab; // Non growing vector.
|
||||
int m_tab_size;
|
||||
int_range<2> m_varying;
|
||||
int_range<2> m_undefined;
|
||||
tree m_type;
|
||||
irange_allocator *m_irange_allocator;
|
||||
};
|
||||
|
||||
|
||||
// Initialize a block cache for an ssa_name of type T.
|
||||
|
||||
sbr_vector::sbr_vector (tree t, irange_allocator *allocator)
|
||||
{
|
||||
gcc_checking_assert (TYPE_P (t));
|
||||
m_type = t;
|
||||
m_irange_allocator = allocator;
|
||||
m_tab_size = last_basic_block_for_fn (cfun) + 1;
|
||||
m_tab = (irange **)allocator->get_memory (m_tab_size * sizeof (irange *));
|
||||
memset (m_tab, 0, m_tab_size * sizeof (irange *));
|
||||
|
||||
// Create the cached type range.
|
||||
m_varying.set_varying (t);
|
||||
m_undefined.set_undefined ();
|
||||
}
|
||||
|
||||
// Set the range for block BB to be R.
|
||||
|
||||
void
|
||||
sbr_vector::set_bb_range (const basic_block bb, const irange &r)
|
||||
{
|
||||
irange *m;
|
||||
gcc_checking_assert (bb->index < m_tab_size);
|
||||
if (r.varying_p ())
|
||||
m = &m_varying;
|
||||
else if (r.undefined_p ())
|
||||
m = &m_undefined;
|
||||
else
|
||||
m = m_irange_allocator->allocate (r);
|
||||
m_tab[bb->index] = m;
|
||||
}
|
||||
|
||||
// Return the range associated with block BB in R. Return false if
|
||||
// there is no range.
|
||||
|
||||
bool
|
||||
sbr_vector::get_bb_range (irange &r, const basic_block bb)
|
||||
{
|
||||
gcc_checking_assert (bb->index < m_tab_size);
|
||||
irange *m = m_tab[bb->index];
|
||||
if (m)
|
||||
{
|
||||
r = *m;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Return true if a range is present.
|
||||
|
||||
bool
|
||||
sbr_vector::bb_range_p (const basic_block bb)
|
||||
{
|
||||
gcc_checking_assert (bb->index < m_tab_size);
|
||||
return m_tab[bb->index] != NULL;
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
// Initialize the block cache.
|
||||
|
@ -255,38 +249,36 @@ block_range_cache::block_range_cache ()
|
|||
|
||||
block_range_cache::~block_range_cache ()
|
||||
{
|
||||
unsigned x;
|
||||
for (x = 0; x < m_ssa_ranges.length (); ++x)
|
||||
{
|
||||
if (m_ssa_ranges[x])
|
||||
delete m_ssa_ranges[x];
|
||||
}
|
||||
delete m_irange_allocator;
|
||||
// Release the vector itself.
|
||||
m_ssa_ranges.release ();
|
||||
}
|
||||
|
||||
// Return a reference to the ssa_block_cache for NAME. If it has not been
|
||||
// accessed yet, allocate it first.
|
||||
// Set the range for NAME on entry to block BB to R.
|
||||
// If it has not been // accessed yet, allocate it first.
|
||||
|
||||
ssa_block_ranges &
|
||||
block_range_cache::get_block_ranges (tree name)
|
||||
void
|
||||
block_range_cache::set_bb_range (tree name, const basic_block bb,
|
||||
const irange &r)
|
||||
{
|
||||
unsigned v = SSA_NAME_VERSION (name);
|
||||
if (v >= m_ssa_ranges.length ())
|
||||
m_ssa_ranges.safe_grow_cleared (num_ssa_names + 1);
|
||||
|
||||
if (!m_ssa_ranges[v])
|
||||
m_ssa_ranges[v] = new ssa_block_ranges (TREE_TYPE (name),
|
||||
{
|
||||
void *r = m_irange_allocator->get_memory (sizeof (sbr_vector));
|
||||
m_ssa_ranges[v] = new (r) sbr_vector (TREE_TYPE (name),
|
||||
m_irange_allocator);
|
||||
return *(m_ssa_ranges[v]);
|
||||
}
|
||||
m_ssa_ranges[v]->set_bb_range (bb, r);
|
||||
}
|
||||
|
||||
|
||||
// Return a pointer to the ssa_block_cache for NAME. If it has not been
|
||||
// accessed yet, return NULL.
|
||||
|
||||
ssa_block_ranges *
|
||||
inline ssa_block_ranges *
|
||||
block_range_cache::query_block_ranges (tree name)
|
||||
{
|
||||
unsigned v = SSA_NAME_VERSION (name);
|
||||
|
@ -295,22 +287,7 @@ block_range_cache::query_block_ranges (tree name)
|
|||
return m_ssa_ranges[v];
|
||||
}
|
||||
|
||||
// Set the range for NAME on entry to block BB to R.
|
||||
|
||||
void
|
||||
block_range_cache::set_bb_range (tree name, const basic_block bb,
|
||||
const irange &r)
|
||||
{
|
||||
return get_block_ranges (name).set_bb_range (bb, r);
|
||||
}
|
||||
|
||||
// Set the range for NAME on entry to block BB to varying.
|
||||
|
||||
void
|
||||
block_range_cache::set_bb_varying (tree name, const basic_block bb)
|
||||
{
|
||||
return get_block_ranges (name).set_bb_varying (bb);
|
||||
}
|
||||
|
||||
// Return the range for NAME on entry to BB in R. Return true if there
|
||||
// is one.
|
||||
|
|
|
@ -51,7 +51,6 @@ public:
|
|||
~block_range_cache ();
|
||||
|
||||
void set_bb_range (tree name, const basic_block bb, const irange &r);
|
||||
void set_bb_varying (tree name, const basic_block bb);
|
||||
bool get_bb_range (irange &r, tree name, const basic_block bb);
|
||||
bool bb_range_p (tree name, const basic_block bb);
|
||||
|
||||
|
|
|
@ -639,6 +639,7 @@ public:
|
|||
// Return a copy of SRC with the minimum amount of sub-ranges needed
|
||||
// to represent it.
|
||||
irange *allocate (const irange &src);
|
||||
void *get_memory (unsigned num_bytes);
|
||||
private:
|
||||
DISABLE_COPY_AND_ASSIGN (irange_allocator);
|
||||
struct obstack m_obstack;
|
||||
|
@ -656,6 +657,14 @@ irange_allocator::~irange_allocator ()
|
|||
obstack_free (&m_obstack, NULL);
|
||||
}
|
||||
|
||||
// Provide a hunk of memory from the obstack
|
||||
inline void *
|
||||
irange_allocator::get_memory (unsigned num_bytes)
|
||||
{
|
||||
void *r = obstack_alloc (&m_obstack, num_bytes);
|
||||
return r;
|
||||
}
|
||||
|
||||
// Return a new range with NUM_PAIRS.
|
||||
|
||||
inline irange *
|
||||
|
|
Loading…
Add table
Reference in a new issue