Turn 'bool force_collect' parameter to 'ggc_collect' into an 'enum ggc_collect mode'
... to make the meaning more explicit to the reader of the code.
Follow-up to recent commit 0edf2e81bb
"Turn global 'ggc_force_collect' variable into 'force_collect' parameter to
'ggc_collect'".
gcc/
* ggc.h (enum ggc_collect): New.
(ggc_collect): Use it.
* ggc-page.c: Adjust.
* ggc-common.c: Likewise.
* ggc-tests.c: Likewise.
* read-rtl-function.c: Likewise.
* selftest-run-tests.c: Likewise.
* doc/gty.texi (Invoking the garbage collector): Likewise.
Suggested-by: David Malcolm <dmalcolm@redhat.com>
This commit is contained in:
parent
76bb3c50dd
commit
602fca427d
|
@ -655,9 +655,9 @@ with many other garbage collectors, it is not implicitly invoked by
|
||||||
allocation routines when a lot of memory has been consumed. So the
|
allocation routines when a lot of memory has been consumed. So the
|
||||||
only way to have GGC reclaim storage is to call the @code{ggc_collect}
|
only way to have GGC reclaim storage is to call the @code{ggc_collect}
|
||||||
function explicitly.
|
function explicitly.
|
||||||
When the @var{force_collect} parameter is set or otherwise an internal
|
With @var{mode} @code{GGC_COLLECT_FORCE} or otherwise (default
|
||||||
heuristic decides whether to actually collect, this call is
|
@code{GGC_COLLECT_HEURISTIC}) when the internal heuristic decides to
|
||||||
potentially an expensive operation, as it may
|
collect, this call is potentially an expensive operation, as it may
|
||||||
have to scan the entire heap. Beware that local variables (on the GCC
|
have to scan the entire heap. Beware that local variables (on the GCC
|
||||||
call stack) are not followed by such an invocation (as many other
|
call stack) are not followed by such an invocation (as many other
|
||||||
garbage collectors do): you should reference all your data from static
|
garbage collectors do): you should reference all your data from static
|
||||||
|
|
|
@ -962,7 +962,7 @@ dump_ggc_loc_statistics ()
|
||||||
if (! GATHER_STATISTICS)
|
if (! GATHER_STATISTICS)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ggc_mem_desc.dump (GGC_ORIGIN);
|
ggc_mem_desc.dump (GGC_ORIGIN);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2184,7 +2184,7 @@ validate_free_objects (void)
|
||||||
/* Top level mark-and-sweep routine. */
|
/* Top level mark-and-sweep routine. */
|
||||||
|
|
||||||
void
|
void
|
||||||
ggc_collect (bool force_collect)
|
ggc_collect (enum ggc_collect mode)
|
||||||
{
|
{
|
||||||
/* Avoid frequent unnecessary work by skipping collection if the
|
/* Avoid frequent unnecessary work by skipping collection if the
|
||||||
total allocations haven't expanded much since the last
|
total allocations haven't expanded much since the last
|
||||||
|
@ -2196,7 +2196,8 @@ ggc_collect (bool force_collect)
|
||||||
memory_block_pool::trim ();
|
memory_block_pool::trim ();
|
||||||
|
|
||||||
float min_expand = allocated_last_gc * param_ggc_min_expand / 100;
|
float min_expand = allocated_last_gc * param_ggc_min_expand / 100;
|
||||||
if (G.allocated < allocated_last_gc + min_expand && !force_collect)
|
if (mode == GGC_COLLECT_HEURISTIC
|
||||||
|
&& G.allocated < allocated_last_gc + min_expand)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
timevar_push (TV_GC);
|
timevar_push (TV_GC);
|
||||||
|
|
|
@ -47,7 +47,7 @@ test_basic_struct ()
|
||||||
root_test_struct = ggc_cleared_alloc <test_struct> ();
|
root_test_struct = ggc_cleared_alloc <test_struct> ();
|
||||||
root_test_struct->other = ggc_cleared_alloc <test_struct> ();
|
root_test_struct->other = ggc_cleared_alloc <test_struct> ();
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ASSERT_TRUE (ggc_marked_p (root_test_struct));
|
ASSERT_TRUE (ggc_marked_p (root_test_struct));
|
||||||
ASSERT_TRUE (ggc_marked_p (root_test_struct->other));
|
ASSERT_TRUE (ggc_marked_p (root_test_struct->other));
|
||||||
|
@ -77,7 +77,7 @@ test_length ()
|
||||||
for (int i = 0; i < count; i++)
|
for (int i = 0; i < count; i++)
|
||||||
root_test_of_length->elem[i] = ggc_cleared_alloc <test_of_length> ();
|
root_test_of_length->elem[i] = ggc_cleared_alloc <test_of_length> ();
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ASSERT_TRUE (ggc_marked_p (root_test_of_length));
|
ASSERT_TRUE (ggc_marked_p (root_test_of_length));
|
||||||
for (int i = 0; i < count; i++)
|
for (int i = 0; i < count; i++)
|
||||||
|
@ -151,7 +151,7 @@ test_union ()
|
||||||
test_struct *referenced_by_other = ggc_cleared_alloc <test_struct> ();
|
test_struct *referenced_by_other = ggc_cleared_alloc <test_struct> ();
|
||||||
other->m_ptr = referenced_by_other;
|
other->m_ptr = referenced_by_other;
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ASSERT_TRUE (ggc_marked_p (root_test_of_union_1));
|
ASSERT_TRUE (ggc_marked_p (root_test_of_union_1));
|
||||||
ASSERT_TRUE (ggc_marked_p (ts));
|
ASSERT_TRUE (ggc_marked_p (ts));
|
||||||
|
@ -192,7 +192,7 @@ test_finalization ()
|
||||||
|
|
||||||
test_struct_with_dtor::dtor_call_count = 0;
|
test_struct_with_dtor::dtor_call_count = 0;
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
/* Verify that the destructor was run for each instance. */
|
/* Verify that the destructor was run for each instance. */
|
||||||
ASSERT_EQ (count, test_struct_with_dtor::dtor_call_count);
|
ASSERT_EQ (count, test_struct_with_dtor::dtor_call_count);
|
||||||
|
@ -210,7 +210,7 @@ test_deletable_global ()
|
||||||
test_of_deletable = ggc_cleared_alloc <test_struct> ();
|
test_of_deletable = ggc_cleared_alloc <test_struct> ();
|
||||||
ASSERT_TRUE (test_of_deletable != NULL);
|
ASSERT_TRUE (test_of_deletable != NULL);
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ASSERT_EQ (NULL, test_of_deletable);
|
ASSERT_EQ (NULL, test_of_deletable);
|
||||||
}
|
}
|
||||||
|
@ -283,7 +283,7 @@ test_inheritance ()
|
||||||
test_some_subclass_as_base_ptr = new some_subclass ();
|
test_some_subclass_as_base_ptr = new some_subclass ();
|
||||||
test_some_other_subclass_as_base_ptr = new some_other_subclass ();
|
test_some_other_subclass_as_base_ptr = new some_other_subclass ();
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
/* Verify that the roots and everything referenced by them got marked
|
/* Verify that the roots and everything referenced by them got marked
|
||||||
(both for fields in the base class and those in subclasses). */
|
(both for fields in the base class and those in subclasses). */
|
||||||
|
@ -362,7 +362,7 @@ test_chain_next ()
|
||||||
tail_node = new_node;
|
tail_node = new_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
/* If we got here, we survived. */
|
/* If we got here, we survived. */
|
||||||
|
|
||||||
|
@ -429,7 +429,7 @@ test_user_struct ()
|
||||||
|
|
||||||
num_calls_to_user_gt_ggc_mx = 0;
|
num_calls_to_user_gt_ggc_mx = 0;
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ASSERT_TRUE (ggc_marked_p (root_user_struct_ptr));
|
ASSERT_TRUE (ggc_marked_p (root_user_struct_ptr));
|
||||||
ASSERT_TRUE (ggc_marked_p (referenced));
|
ASSERT_TRUE (ggc_marked_p (referenced));
|
||||||
|
@ -447,7 +447,7 @@ test_tree_marking ()
|
||||||
{
|
{
|
||||||
dummy_unittesting_tree = build_int_cst (integer_type_node, 1066);
|
dummy_unittesting_tree = build_int_cst (integer_type_node, 1066);
|
||||||
|
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
ASSERT_TRUE (ggc_marked_p (dummy_unittesting_tree));
|
ASSERT_TRUE (ggc_marked_p (dummy_unittesting_tree));
|
||||||
}
|
}
|
||||||
|
|
10
gcc/ggc.h
10
gcc/ggc.h
|
@ -262,10 +262,12 @@ extern const char *ggc_alloc_string (const char *contents, int length
|
||||||
#define ggc_strdup(S) ggc_alloc_string ((S), -1 MEM_STAT_INFO)
|
#define ggc_strdup(S) ggc_alloc_string ((S), -1 MEM_STAT_INFO)
|
||||||
|
|
||||||
/* Invoke the collector. Garbage collection occurs only when this
|
/* Invoke the collector. Garbage collection occurs only when this
|
||||||
function is called, not during allocations.
|
function is called, not during allocations. */
|
||||||
Unless FORCE_COLLECT, an internal heuristic decides whether to actually
|
enum ggc_collect {
|
||||||
collect. */
|
GGC_COLLECT_HEURISTIC,
|
||||||
extern void ggc_collect (bool force_collect = false);
|
GGC_COLLECT_FORCE
|
||||||
|
};
|
||||||
|
extern void ggc_collect (enum ggc_collect mode = GGC_COLLECT_HEURISTIC);
|
||||||
|
|
||||||
/* Return unused memory pages to the system. */
|
/* Return unused memory pages to the system. */
|
||||||
extern void ggc_trim (void);
|
extern void ggc_trim (void);
|
||||||
|
|
|
@ -1861,7 +1861,7 @@ test_loading_labels ()
|
||||||
|
|
||||||
/* Ensure that label names read from a dump are GC-managed
|
/* Ensure that label names read from a dump are GC-managed
|
||||||
and are found through the insn. */
|
and are found through the insn. */
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
ASSERT_TRUE (ggc_marked_p (insn_200));
|
ASSERT_TRUE (ggc_marked_p (insn_200));
|
||||||
ASSERT_TRUE (ggc_marked_p (LABEL_NAME (insn_200)));
|
ASSERT_TRUE (ggc_marked_p (LABEL_NAME (insn_200)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -128,7 +128,7 @@ selftest::run_tests ()
|
||||||
issues. For example, if any GC-managed items have buggy (or missing)
|
issues. For example, if any GC-managed items have buggy (or missing)
|
||||||
finalizers, this last collection will ensure that things that were
|
finalizers, this last collection will ensure that things that were
|
||||||
failed to be finalized can be detected by valgrind. */
|
failed to be finalized can be detected by valgrind. */
|
||||||
ggc_collect (true);
|
ggc_collect (GGC_COLLECT_FORCE);
|
||||||
|
|
||||||
/* Finished running tests; the test_runner dtor will print a summary. */
|
/* Finished running tests; the test_runner dtor will print a summary. */
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue