hash-table.h (hash_table): Add Lazy template parameter defaulted to false...
* hash-table.h (hash_table): Add Lazy template parameter defaulted to false, if true, don't alloc_entries during construction, but defer it to the first method that needs m_entries allocated. (hash_table::hash_table, hash_table::~hash_table, hash_table::alloc_entries, hash_table::find_empty_slot_for_expand, hash_table::too_empty_p, hash_table::expand, hash_table::empty_slow, hash_table::clear_slot, hash_table::traverse_noresize, hash_table::traverse, hash_table::iterator::slide): Adjust all methods. * hash-set.h (hash_set): Add Lazy template parameter defaulted to false. (hash_set::contains): If Lazy is true, use find_slot_with_hash with NO_INSERT instead of find_with_hash. (hash_set::traverse, hash_set::iterator, hash_set::iterator::m_iter, hash_set::m_table): Add Lazy to template params of hash_table. (gt_ggc_mx, gt_pch_nx): Use false as Lazy in hash_set template param. * attribs.c (test_attribute_exclusions): Likewise. * hash-set-tests.c (test_set_of_strings): Add iterator tests for hash_set. Add tests for hash_set with Lazy = true. c-family/ * c-common.c (per_file_includes_t): Use false as Lazy in hash_set template param. jit/ * jit-recording.c (reproducer::m_set_identifiers): Use false as Lazy in hash_set template param. From-SVN: r269859
This commit is contained in:
parent
2e4182ae07
commit
36a3a7a372
@ -1,3 +1,24 @@
|
||||
2019-03-21 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
* hash-table.h (hash_table): Add Lazy template parameter defaulted
|
||||
to false, if true, don't alloc_entries during construction, but defer
|
||||
it to the first method that needs m_entries allocated.
|
||||
(hash_table::hash_table, hash_table::~hash_table,
|
||||
hash_table::alloc_entries, hash_table::find_empty_slot_for_expand,
|
||||
hash_table::too_empty_p, hash_table::expand, hash_table::empty_slow,
|
||||
hash_table::clear_slot, hash_table::traverse_noresize,
|
||||
hash_table::traverse, hash_table::iterator::slide): Adjust all methods.
|
||||
* hash-set.h (hash_set): Add Lazy template parameter defaulted to
|
||||
false.
|
||||
(hash_set::contains): If Lazy is true, use find_slot_with_hash with
|
||||
NO_INSERT instead of find_with_hash.
|
||||
(hash_set::traverse, hash_set::iterator, hash_set::iterator::m_iter,
|
||||
hash_set::m_table): Add Lazy to template params of hash_table.
|
||||
(gt_ggc_mx, gt_pch_nx): Use false as Lazy in hash_set template param.
|
||||
* attribs.c (test_attribute_exclusions): Likewise.
|
||||
* hash-set-tests.c (test_set_of_strings): Add iterator tests for
|
||||
hash_set. Add tests for hash_set with Lazy = true.
|
||||
|
||||
2019-03-21 Richard Biener <rguenther@suse.de>
|
||||
|
||||
PR tree-optimization/89779
|
||||
|
@ -2075,7 +2075,7 @@ test_attribute_exclusions ()
|
||||
const size_t ntables = ARRAY_SIZE (attribute_tables);
|
||||
|
||||
/* Set of pairs of mutually exclusive attributes. */
|
||||
typedef hash_set<excl_pair, excl_hash_traits> exclusion_set;
|
||||
typedef hash_set<excl_pair, false, excl_hash_traits> exclusion_set;
|
||||
exclusion_set excl_set;
|
||||
|
||||
for (size_t ti0 = 0; ti0 != ntables; ++ti0)
|
||||
|
@ -1,3 +1,8 @@
|
||||
2019-03-21 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
* c-common.c (per_file_includes_t): Use false as Lazy in hash_set
|
||||
template param.
|
||||
|
||||
2019-03-19 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR tree-optimization/89688
|
||||
|
@ -8731,7 +8731,7 @@ try_to_locate_new_include_insertion_point (const char *file, location_t loc)
|
||||
no guarantee that two different diagnostics that are recommending
|
||||
adding e.g. "<stdio.h>" are using the same buffer. */
|
||||
|
||||
typedef hash_set <const char *, nofree_string_hash> per_file_includes_t;
|
||||
typedef hash_set <const char *, false, nofree_string_hash> per_file_includes_t;
|
||||
|
||||
/* The map itself. We don't need string comparison for the filename keys,
|
||||
as they come from libcpp. */
|
||||
|
@ -44,6 +44,9 @@ test_set_of_strings ()
|
||||
|
||||
ASSERT_EQ (false, s.contains (red));
|
||||
|
||||
for (hash_set<const char *>::iterator it = s.begin (); it != s.end (); ++it)
|
||||
ASSERT_EQ (true, false);
|
||||
|
||||
/* Populate the hash_set. */
|
||||
ASSERT_EQ (false, s.add (red));
|
||||
ASSERT_EQ (false, s.add (green));
|
||||
@ -68,6 +71,67 @@ test_set_of_strings ()
|
||||
ASSERT_EQ (true, s.contains (green));
|
||||
ASSERT_EQ (true, s.contains (blue));
|
||||
ASSERT_EQ (2, s.elements ());
|
||||
|
||||
int seen = 0;
|
||||
for (hash_set<const char *>::iterator it = s.begin (); it != s.end (); ++it)
|
||||
{
|
||||
int n = *it == green;
|
||||
if (n == 0)
|
||||
ASSERT_EQ (*it, blue);
|
||||
ASSERT_EQ (seen & (1 << n), 0);
|
||||
seen |= 1 << n;
|
||||
}
|
||||
ASSERT_EQ (seen, 3);
|
||||
|
||||
hash_set <const char *, true> t;
|
||||
ASSERT_EQ (0, t.elements ());
|
||||
|
||||
ASSERT_EQ (false, t.contains (red));
|
||||
|
||||
for (hash_set<const char *, true>::iterator it = t.begin ();
|
||||
it != t.end (); ++it)
|
||||
ASSERT_EQ (true, false);
|
||||
|
||||
/* Populate the hash_set. */
|
||||
ASSERT_EQ (false, t.add (red));
|
||||
ASSERT_EQ (false, t.add (green));
|
||||
ASSERT_EQ (false, t.add (blue));
|
||||
ASSERT_EQ (true, t.add (green));
|
||||
|
||||
/* Verify that the values are now within the set. */
|
||||
ASSERT_EQ (true, t.contains (red));
|
||||
ASSERT_EQ (true, t.contains (green));
|
||||
ASSERT_EQ (true, t.contains (blue));
|
||||
ASSERT_EQ (3, t.elements ());
|
||||
|
||||
seen = 0;
|
||||
for (hash_set<const char *, true>::iterator it = t.begin ();
|
||||
it != t.end (); ++it)
|
||||
{
|
||||
int n = 2;
|
||||
if (*it == green)
|
||||
n = 0;
|
||||
else if (*it == blue)
|
||||
n = 1;
|
||||
else
|
||||
ASSERT_EQ (*it, red);
|
||||
ASSERT_EQ (seen & (1 << n), 0);
|
||||
seen |= 1 << n;
|
||||
}
|
||||
ASSERT_EQ (seen, 7);
|
||||
|
||||
/* Test removal. */
|
||||
t.remove (red);
|
||||
ASSERT_EQ (false, t.contains (red));
|
||||
ASSERT_EQ (true, t.contains (green));
|
||||
ASSERT_EQ (true, t.contains (blue));
|
||||
ASSERT_EQ (2, t.elements ());
|
||||
|
||||
t.remove (red);
|
||||
ASSERT_EQ (false, t.contains (red));
|
||||
ASSERT_EQ (true, t.contains (green));
|
||||
ASSERT_EQ (true, t.contains (blue));
|
||||
ASSERT_EQ (2, t.elements ());
|
||||
}
|
||||
|
||||
/* Run all of the selftests within this file. */
|
||||
|
@ -21,7 +21,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
#ifndef hash_set_h
|
||||
#define hash_set_h
|
||||
|
||||
template<typename KeyId, typename Traits = default_hash_traits<KeyId> >
|
||||
template<typename KeyId, bool Lazy = false,
|
||||
typename Traits = default_hash_traits<KeyId> >
|
||||
class hash_set
|
||||
{
|
||||
public:
|
||||
@ -32,12 +33,12 @@ public:
|
||||
/* Create a hash_set in gc memory with space for at least n elements. */
|
||||
|
||||
static hash_set *
|
||||
create_ggc (size_t n)
|
||||
{
|
||||
hash_set *set = ggc_alloc<hash_set> ();
|
||||
new (set) hash_set (n, true);
|
||||
return set;
|
||||
}
|
||||
create_ggc (size_t n)
|
||||
{
|
||||
hash_set *set = ggc_alloc<hash_set> ();
|
||||
new (set) hash_set (n, true);
|
||||
return set;
|
||||
}
|
||||
|
||||
/* If key k isn't already in the map add it to the map, and
|
||||
return false. Otherwise return true. */
|
||||
@ -56,6 +57,9 @@ public:
|
||||
|
||||
bool contains (const Key &k)
|
||||
{
|
||||
if (Lazy)
|
||||
return (m_table.find_slot_with_hash (k, Traits::hash (k), NO_INSERT)
|
||||
!= NULL);
|
||||
Key &e = m_table.find_with_hash (k, Traits::hash (k));
|
||||
return !Traits::is_empty (e);
|
||||
}
|
||||
@ -71,7 +75,7 @@ public:
|
||||
template<typename Arg, bool (*f)(const typename Traits::value_type &, Arg)>
|
||||
void traverse (Arg a) const
|
||||
{
|
||||
for (typename hash_table<Traits>::iterator iter = m_table.begin ();
|
||||
for (typename hash_table<Traits, Lazy>::iterator iter = m_table.begin ();
|
||||
iter != m_table.end (); ++iter)
|
||||
f (*iter, a);
|
||||
}
|
||||
@ -87,7 +91,8 @@ public:
|
||||
class iterator
|
||||
{
|
||||
public:
|
||||
explicit iterator (const typename hash_table<Traits>::iterator &iter) :
|
||||
explicit iterator (const typename hash_table<Traits,
|
||||
Lazy>::iterator &iter) :
|
||||
m_iter (iter) {}
|
||||
|
||||
iterator &operator++ ()
|
||||
@ -109,7 +114,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
typename hash_table<Traits>::iterator m_iter;
|
||||
typename hash_table<Traits, Lazy>::iterator m_iter;
|
||||
};
|
||||
|
||||
/* Standard iterator retrieval methods. */
|
||||
@ -120,11 +125,14 @@ public:
|
||||
|
||||
private:
|
||||
|
||||
template<typename T, typename U> friend void gt_ggc_mx (hash_set<T, U> *);
|
||||
template<typename T, typename U> friend void gt_pch_nx (hash_set<T, U> *);
|
||||
template<typename T, typename U> friend void gt_pch_nx (hash_set<T, U> *, gt_pointer_operator, void *);
|
||||
template<typename T, typename U>
|
||||
friend void gt_ggc_mx (hash_set<T, false, U> *);
|
||||
template<typename T, typename U>
|
||||
friend void gt_pch_nx (hash_set<T, false, U> *);
|
||||
template<typename T, typename U>
|
||||
friend void gt_pch_nx (hash_set<T, false, U> *, gt_pointer_operator, void *);
|
||||
|
||||
hash_table<Traits> m_table;
|
||||
hash_table<Traits, Lazy> m_table;
|
||||
};
|
||||
|
||||
/* Generic hash_set<TYPE> debug helper.
|
||||
@ -169,21 +177,21 @@ debug_helper (hash_set<T> &ref)
|
||||
|
||||
template<typename K, typename H>
|
||||
static inline void
|
||||
gt_ggc_mx (hash_set<K, H> *h)
|
||||
gt_ggc_mx (hash_set<K, false, H> *h)
|
||||
{
|
||||
gt_ggc_mx (&h->m_table);
|
||||
}
|
||||
|
||||
template<typename K, typename H>
|
||||
static inline void
|
||||
gt_pch_nx (hash_set<K, H> *h)
|
||||
gt_pch_nx (hash_set<K, false, H> *h)
|
||||
{
|
||||
gt_pch_nx (&h->m_table);
|
||||
}
|
||||
|
||||
template<typename K, typename H>
|
||||
static inline void
|
||||
gt_pch_nx (hash_set<K, H> *h, gt_pointer_operator op, void *cookie)
|
||||
gt_pch_nx (hash_set<K, false, H> *h, gt_pointer_operator op, void *cookie)
|
||||
{
|
||||
op (&h->m_table.m_entries, cookie);
|
||||
}
|
||||
|
200
gcc/hash-table.h
200
gcc/hash-table.h
@ -167,6 +167,15 @@ along with GCC; see the file COPYING3. If not see
|
||||
See hash_table for details. The interface is very similar to libiberty's
|
||||
htab_t.
|
||||
|
||||
If a hash table is used only in some rare cases, it is possible
|
||||
to construct the hash_table lazily before first use. This is done
|
||||
through:
|
||||
|
||||
hash_table <some_type_hasher, true> some_type_hash_table;
|
||||
|
||||
which will cause whatever methods actually need the allocated entries
|
||||
array to allocate it later.
|
||||
|
||||
|
||||
EASY DESCRIPTORS FOR POINTERS
|
||||
|
||||
@ -241,7 +250,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
#include "hash-map-traits.h"
|
||||
|
||||
template<typename, typename, typename> class hash_map;
|
||||
template<typename, typename> class hash_set;
|
||||
template<typename, bool, typename> class hash_set;
|
||||
|
||||
/* The ordinary memory allocator. */
|
||||
/* FIXME (crowl): This allocator may be extracted for wider sharing later. */
|
||||
@ -353,8 +362,8 @@ class mem_usage;
|
||||
hash table code.
|
||||
|
||||
*/
|
||||
template <typename Descriptor,
|
||||
template<typename Type> class Allocator = xcallocator>
|
||||
template <typename Descriptor, bool Lazy = false,
|
||||
template<typename Type> class Allocator = xcallocator>
|
||||
class hash_table
|
||||
{
|
||||
typedef typename Descriptor::value_type value_type;
|
||||
@ -422,7 +431,7 @@ public:
|
||||
write the value you want into the returned slot. When inserting an
|
||||
entry, NULL may be returned if memory allocation fails. */
|
||||
value_type *find_slot_with_hash (const compare_type &comparable,
|
||||
hashval_t hash, enum insert_option insert);
|
||||
hashval_t hash, enum insert_option insert);
|
||||
|
||||
/* This function deletes an element with the given COMPARABLE value
|
||||
from hash table starting with the given HASH. If there is no
|
||||
@ -472,6 +481,8 @@ public:
|
||||
|
||||
iterator begin () const
|
||||
{
|
||||
if (Lazy && m_entries == NULL)
|
||||
return iterator ();
|
||||
iterator iter (m_entries, m_entries + m_size);
|
||||
iter.slide ();
|
||||
return iter;
|
||||
@ -491,9 +502,8 @@ private:
|
||||
hashtab_entry_note_pointers (void *, void *, gt_pointer_operator, void *);
|
||||
template<typename T, typename U, typename V> friend void
|
||||
gt_pch_nx (hash_map<T, U, V> *, gt_pointer_operator, void *);
|
||||
template<typename T, typename U> friend void gt_pch_nx (hash_set<T, U> *,
|
||||
gt_pointer_operator,
|
||||
void *);
|
||||
template<typename T, typename U>
|
||||
friend void gt_pch_nx (hash_set<T, false, U> *, gt_pointer_operator, void *);
|
||||
template<typename T> friend void gt_pch_nx (hash_table<T> *,
|
||||
gt_pointer_operator, void *);
|
||||
|
||||
@ -566,11 +576,12 @@ extern mem_alloc_description<mem_usage>& hash_table_usage (void);
|
||||
/* Support function for statistics. */
|
||||
extern void dump_hash_table_loc_statistics (void);
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Allocator>::hash_table (size_t size, bool ggc, bool
|
||||
gather_mem_stats,
|
||||
mem_alloc_origin origin
|
||||
MEM_STAT_DECL) :
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Lazy, Allocator>::hash_table (size_t size, bool ggc,
|
||||
bool gather_mem_stats,
|
||||
mem_alloc_origin origin
|
||||
MEM_STAT_DECL) :
|
||||
m_n_elements (0), m_n_deleted (0), m_searches (0), m_collisions (0),
|
||||
m_ggc (ggc), m_gather_mem_stats (gather_mem_stats)
|
||||
{
|
||||
@ -581,18 +592,23 @@ hash_table<Descriptor, Allocator>::hash_table (size_t size, bool ggc, bool
|
||||
|
||||
if (m_gather_mem_stats)
|
||||
hash_table_usage ().register_descriptor (this, origin, ggc
|
||||
FINAL_PASS_MEM_STAT);
|
||||
FINAL_PASS_MEM_STAT);
|
||||
|
||||
m_entries = alloc_entries (size PASS_MEM_STAT);
|
||||
if (Lazy)
|
||||
m_entries = NULL;
|
||||
else
|
||||
m_entries = alloc_entries (size PASS_MEM_STAT);
|
||||
m_size = size;
|
||||
m_size_prime_index = size_prime_index;
|
||||
}
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Allocator>::hash_table (const hash_table &h, bool ggc,
|
||||
bool gather_mem_stats,
|
||||
mem_alloc_origin origin
|
||||
MEM_STAT_DECL) :
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Lazy, Allocator>::hash_table (const hash_table &h,
|
||||
bool ggc,
|
||||
bool gather_mem_stats,
|
||||
mem_alloc_origin origin
|
||||
MEM_STAT_DECL) :
|
||||
m_n_elements (h.m_n_elements), m_n_deleted (h.m_n_deleted),
|
||||
m_searches (0), m_collisions (0), m_ggc (ggc),
|
||||
m_gather_mem_stats (gather_mem_stats)
|
||||
@ -603,43 +619,54 @@ hash_table<Descriptor, Allocator>::hash_table (const hash_table &h, bool ggc,
|
||||
hash_table_usage ().register_descriptor (this, origin, ggc
|
||||
FINAL_PASS_MEM_STAT);
|
||||
|
||||
value_type *nentries = alloc_entries (size PASS_MEM_STAT);
|
||||
for (size_t i = 0; i < size; ++i)
|
||||
if (Lazy && h.m_entries == NULL)
|
||||
m_entries = NULL;
|
||||
else
|
||||
{
|
||||
value_type &entry = h.m_entries[i];
|
||||
if (is_deleted (entry))
|
||||
mark_deleted (nentries[i]);
|
||||
else if (!is_empty (entry))
|
||||
nentries[i] = entry;
|
||||
value_type *nentries = alloc_entries (size PASS_MEM_STAT);
|
||||
for (size_t i = 0; i < size; ++i)
|
||||
{
|
||||
value_type &entry = h.m_entries[i];
|
||||
if (is_deleted (entry))
|
||||
mark_deleted (nentries[i]);
|
||||
else if (!is_empty (entry))
|
||||
nentries[i] = entry;
|
||||
}
|
||||
m_entries = nentries;
|
||||
}
|
||||
m_entries = nentries;
|
||||
m_size = size;
|
||||
m_size_prime_index = h.m_size_prime_index;
|
||||
}
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Allocator>::~hash_table ()
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
hash_table<Descriptor, Lazy, Allocator>::~hash_table ()
|
||||
{
|
||||
for (size_t i = m_size - 1; i < m_size; i--)
|
||||
if (!is_empty (m_entries[i]) && !is_deleted (m_entries[i]))
|
||||
Descriptor::remove (m_entries[i]);
|
||||
if (!Lazy || m_entries)
|
||||
{
|
||||
for (size_t i = m_size - 1; i < m_size; i--)
|
||||
if (!is_empty (m_entries[i]) && !is_deleted (m_entries[i]))
|
||||
Descriptor::remove (m_entries[i]);
|
||||
|
||||
if (!m_ggc)
|
||||
Allocator <value_type> ::data_free (m_entries);
|
||||
else
|
||||
ggc_free (m_entries);
|
||||
if (!m_ggc)
|
||||
Allocator <value_type> ::data_free (m_entries);
|
||||
else
|
||||
ggc_free (m_entries);
|
||||
}
|
||||
|
||||
if (m_gather_mem_stats)
|
||||
hash_table_usage ().release_instance_overhead (this,
|
||||
sizeof (value_type) * m_size,
|
||||
true);
|
||||
sizeof (value_type)
|
||||
* m_size, true);
|
||||
}
|
||||
|
||||
/* This function returns an array of empty hash table elements. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
inline typename hash_table<Descriptor, Allocator>::value_type *
|
||||
hash_table<Descriptor, Allocator>::alloc_entries (size_t n MEM_STAT_DECL) const
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
inline typename hash_table<Descriptor, Lazy, Allocator>::value_type *
|
||||
hash_table<Descriptor, Lazy,
|
||||
Allocator>::alloc_entries (size_t n MEM_STAT_DECL) const
|
||||
{
|
||||
value_type *nentries;
|
||||
|
||||
@ -665,9 +692,11 @@ hash_table<Descriptor, Allocator>::alloc_entries (size_t n MEM_STAT_DECL) const
|
||||
This function also assumes there are no deleted entries in the table.
|
||||
HASH is the hash value for the element to be inserted. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
typename hash_table<Descriptor, Allocator>::value_type *
|
||||
hash_table<Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
typename hash_table<Descriptor, Lazy, Allocator>::value_type *
|
||||
hash_table<Descriptor, Lazy,
|
||||
Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
{
|
||||
hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
|
||||
size_t size = m_size;
|
||||
@ -694,9 +723,10 @@ hash_table<Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
|
||||
|
||||
/* Return true if the current table is excessively big for ELTS elements. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
inline bool
|
||||
hash_table<Descriptor, Allocator>::too_empty_p (unsigned int elts)
|
||||
hash_table<Descriptor, Lazy, Allocator>::too_empty_p (unsigned int elts)
|
||||
{
|
||||
return elts * 8 < m_size && m_size > 32;
|
||||
}
|
||||
@ -708,9 +738,10 @@ hash_table<Descriptor, Allocator>::too_empty_p (unsigned int elts)
|
||||
table entries is changed. If memory allocation fails, this function
|
||||
will abort. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>::expand ()
|
||||
hash_table<Descriptor, Lazy, Allocator>::expand ()
|
||||
{
|
||||
value_type *oentries = m_entries;
|
||||
unsigned int oindex = m_size_prime_index;
|
||||
@ -769,9 +800,10 @@ hash_table<Descriptor, Allocator>::expand ()
|
||||
|
||||
/* Implements empty() in cases where it isn't a no-op. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>::empty_slow ()
|
||||
hash_table<Descriptor, Lazy, Allocator>::empty_slow ()
|
||||
{
|
||||
size_t size = m_size;
|
||||
size_t nsize = size;
|
||||
@ -819,9 +851,10 @@ hash_table<Descriptor, Allocator>::empty_slow ()
|
||||
useful when you've already done the lookup and don't want to do it
|
||||
again. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>::clear_slot (value_type *slot)
|
||||
hash_table<Descriptor, Lazy, Allocator>::clear_slot (value_type *slot)
|
||||
{
|
||||
gcc_checking_assert (!(slot < m_entries || slot >= m_entries + size ()
|
||||
|| is_empty (*slot) || is_deleted (*slot)));
|
||||
@ -836,15 +869,18 @@ hash_table<Descriptor, Allocator>::clear_slot (value_type *slot)
|
||||
COMPARABLE element starting with the given HASH value. It cannot
|
||||
be used to insert or delete an element. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
typename hash_table<Descriptor, Allocator>::value_type &
|
||||
hash_table<Descriptor, Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
typename hash_table<Descriptor, Lazy, Allocator>::value_type &
|
||||
hash_table<Descriptor, Lazy, Allocator>
|
||||
::find_with_hash (const compare_type &comparable, hashval_t hash)
|
||||
{
|
||||
m_searches++;
|
||||
size_t size = m_size;
|
||||
hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
|
||||
|
||||
if (Lazy && m_entries == NULL)
|
||||
m_entries = alloc_entries (size);
|
||||
value_type *entry = &m_entries[index];
|
||||
if (is_empty (*entry)
|
||||
|| (!is_deleted (*entry) && Descriptor::equal (*entry, comparable)))
|
||||
@ -873,12 +909,20 @@ hash_table<Descriptor, Allocator>
|
||||
write the value you want into the returned slot. When inserting an
|
||||
entry, NULL may be returned if memory allocation fails. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
typename hash_table<Descriptor, Allocator>::value_type *
|
||||
hash_table<Descriptor, Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
typename hash_table<Descriptor, Lazy, Allocator>::value_type *
|
||||
hash_table<Descriptor, Lazy, Allocator>
|
||||
::find_slot_with_hash (const compare_type &comparable, hashval_t hash,
|
||||
enum insert_option insert)
|
||||
{
|
||||
if (Lazy && m_entries == NULL)
|
||||
{
|
||||
if (insert == INSERT)
|
||||
m_entries = alloc_entries (m_size);
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
if (insert == INSERT && m_size * 3 <= m_n_elements * 4)
|
||||
expand ();
|
||||
|
||||
@ -934,9 +978,10 @@ hash_table<Descriptor, Allocator>
|
||||
from hash table starting with the given HASH. If there is no
|
||||
matching element in the hash table, this function does nothing. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>
|
||||
hash_table<Descriptor, Lazy, Allocator>
|
||||
::remove_elt_with_hash (const compare_type &comparable, hashval_t hash)
|
||||
{
|
||||
value_type *slot = find_slot_with_hash (comparable, hash, NO_INSERT);
|
||||
@ -953,15 +998,18 @@ hash_table<Descriptor, Allocator>
|
||||
each live entry. If CALLBACK returns false, the iteration stops.
|
||||
ARGUMENT is passed as CALLBACK's second argument. */
|
||||
|
||||
template<typename Descriptor,
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
template<typename Argument,
|
||||
int (*Callback)
|
||||
(typename hash_table<Descriptor, Allocator>::value_type *slot,
|
||||
Argument argument)>
|
||||
int (*Callback)
|
||||
(typename hash_table<Descriptor, Lazy, Allocator>::value_type *slot,
|
||||
Argument argument)>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>::traverse_noresize (Argument argument)
|
||||
hash_table<Descriptor, Lazy, Allocator>::traverse_noresize (Argument argument)
|
||||
{
|
||||
if (Lazy && m_entries == NULL)
|
||||
return;
|
||||
|
||||
value_type *slot = m_entries;
|
||||
value_type *limit = slot + size ();
|
||||
|
||||
@ -979,16 +1027,16 @@ hash_table<Descriptor, Allocator>::traverse_noresize (Argument argument)
|
||||
/* Like traverse_noresize, but does resize the table when it is too empty
|
||||
to improve effectivity of subsequent calls. */
|
||||
|
||||
template <typename Descriptor,
|
||||
template <typename Descriptor, bool Lazy,
|
||||
template <typename Type> class Allocator>
|
||||
template <typename Argument,
|
||||
int (*Callback)
|
||||
(typename hash_table<Descriptor, Allocator>::value_type *slot,
|
||||
Argument argument)>
|
||||
(typename hash_table<Descriptor, Lazy, Allocator>::value_type *slot,
|
||||
Argument argument)>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>::traverse (Argument argument)
|
||||
hash_table<Descriptor, Lazy, Allocator>::traverse (Argument argument)
|
||||
{
|
||||
if (too_empty_p (elements ()))
|
||||
if (too_empty_p (elements ()) && (!Lazy || m_entries))
|
||||
expand ();
|
||||
|
||||
traverse_noresize <Argument, Callback> (argument);
|
||||
@ -996,9 +1044,10 @@ hash_table<Descriptor, Allocator>::traverse (Argument argument)
|
||||
|
||||
/* Slide down the iterator slots until an active entry is found. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
void
|
||||
hash_table<Descriptor, Allocator>::iterator::slide ()
|
||||
hash_table<Descriptor, Lazy, Allocator>::iterator::slide ()
|
||||
{
|
||||
for ( ; m_slot < m_limit; ++m_slot )
|
||||
{
|
||||
@ -1012,9 +1061,10 @@ hash_table<Descriptor, Allocator>::iterator::slide ()
|
||||
|
||||
/* Bump the iterator. */
|
||||
|
||||
template<typename Descriptor, template<typename Type> class Allocator>
|
||||
inline typename hash_table<Descriptor, Allocator>::iterator &
|
||||
hash_table<Descriptor, Allocator>::iterator::operator ++ ()
|
||||
template<typename Descriptor, bool Lazy,
|
||||
template<typename Type> class Allocator>
|
||||
inline typename hash_table<Descriptor, Lazy, Allocator>::iterator &
|
||||
hash_table<Descriptor, Lazy, Allocator>::iterator::operator ++ ()
|
||||
{
|
||||
++m_slot;
|
||||
slide ();
|
||||
|
@ -1,3 +1,8 @@
|
||||
2019-03-21 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
* jit-recording.c (reproducer::m_set_identifiers): Use false as Lazy
|
||||
in hash_set template param.
|
||||
|
||||
2019-02-05 Andrea Corallo <andrea.corallo@arm.com>
|
||||
|
||||
* docs/topics/compatibility.rst (LIBGCCJIT_ABI_11): New ABI tag.
|
||||
|
@ -245,7 +245,7 @@ class reproducer : public dump
|
||||
{
|
||||
static void remove (const char *) {}
|
||||
};
|
||||
hash_set<const char *, hash_traits> m_set_identifiers;
|
||||
hash_set<const char *, false, hash_traits> m_set_identifiers;
|
||||
allocator m_allocator;
|
||||
};
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user