@@ -1560,8 +1560,7 @@ contributes_to_priority_p (dep_t dep)
}
/* Compute the number of nondebug deps in list LIST for INSN. */
-
-static int
+int
dep_list_size (rtx_insn *insn, sd_list_types_def list)
{
sd_iterator_def sd_it;
@@ -1571,6 +1570,11 @@ dep_list_size (rtx_insn *insn, sd_list_types_def list)
if (!MAY_HAVE_DEBUG_INSNS)
return sd_lists_size (insn, list);
+ /* TODO: We should split normal and debug insns into separate SD_LIST_*
+ sub-lists, and then we'll be able to use something like
+ sd_lists_size(insn, list & SD_LIST_NON_DEBUG)
+ instead of walking dependencies below. */
+
FOR_EACH_DEP (insn, list, sd_it, dep)
{
if (DEBUG_INSN_P (DEP_CON (dep)))
@@ -4791,7 +4791,7 @@ find_inc (struct mem_inc_info *mii, bool backwards)
sd_iterator_def sd_it;
dep_t dep;
sd_list_types_def mem_deps = backwards ? SD_LIST_HARD_BACK : SD_LIST_FORW;
- int n_mem_deps = sd_lists_size (mii->mem_insn, mem_deps);
+ int n_mem_deps = dep_list_size (mii->mem_insn, mem_deps);
sd_it = sd_iterator_start (mii->mem_insn, mem_deps);
while (sd_iterator_cond (&sd_it, &dep))
@@ -4808,12 +4808,12 @@ find_inc (struct mem_inc_info *mii, bool backwards)
if (backwards)
{
inc_cand = pro;
- n_inc_deps = sd_lists_size (inc_cand, SD_LIST_BACK);
+ n_inc_deps = dep_list_size (inc_cand, SD_LIST_BACK);
}
else
{
inc_cand = con;
- n_inc_deps = sd_lists_size (inc_cand, SD_LIST_FORW);
+ n_inc_deps = dep_list_size (inc_cand, SD_LIST_FORW);
}
/* In the FOR_EACH_DEP loop below we will create additional n_inc_deps
@@ -1677,6 +1677,8 @@ extern void sd_copy_back_deps (rtx_insn *, rtx_insn *, bool);
extern void sd_delete_dep (sd_iterator_def);
extern void sd_debug_lists (rtx, sd_list_types_def);
+extern int dep_list_size (rtx_insn *, sd_list_types_def);
+
/* Macros and declarations for scheduling fusion. */
#define FUSION_MAX_PRIORITY (INT_MAX)
extern bool sched_fusion;