@@ -339,6 +339,11 @@ int isa207_get_constraint(u64 event, unsigned long *maskp, unsigned long *valp)
value |= CNST_L1_QUAL_VAL(cache);
}
+ if (cpu_has_feature(CPU_FTR_ARCH_31)) {
+ mask |= CNST_RADIX_SCOPE_GROUP_MASK;
+ value |= CNST_RADIX_SCOPE_GROUP_VAL(event >> p10_EVENT_RADIX_SCOPE_QUAL_SHIFT);
+ }
+
if (is_event_marked(event)) {
mask |= CNST_SAMPLE_MASK;
value |= CNST_SAMPLE_VAL(event >> EVENT_SAMPLE_SHIFT);
@@ -456,6 +461,13 @@ int isa207_compute_mmcr(u64 event[], int n_ev,
}
}
+ /* Set RADIX_SCOPE_QUAL bit */
+ if (cpu_has_feature(CPU_FTR_ARCH_31)) {
+ val = (event[i] >> p10_EVENT_RADIX_SCOPE_QUAL_SHIFT) &
+ p10_EVENT_RADIX_SCOPE_QUAL_MASK;
+ mmcr1 |= val << p10_MMCR1_RADIX_SCOPE_QUAL_SHIFT;
+ }
+
if (is_event_marked(event[i])) {
mmcra |= MMCRA_SAMPLE_ENABLE;
@@ -101,6 +101,9 @@
#define p10_EVENT_CACHE_SEL_MASK 0x3ull
#define p10_EVENT_MMCR3_MASK 0x7fffull
#define p10_EVENT_MMCR3_SHIFT 45
+#define p10_EVENT_RADIX_SCOPE_QUAL_SHIFT 9
+#define p10_EVENT_RADIX_SCOPE_QUAL_MASK 0x1
+#define p10_MMCR1_RADIX_SCOPE_QUAL_SHIFT 45
#define p10_EVENT_VALID_MASK \
((p10_SDAR_MODE_MASK << p10_SDAR_MODE_SHIFT | \
@@ -112,6 +115,7 @@
(p9_EVENT_COMBINE_MASK << p9_EVENT_COMBINE_SHIFT) | \
(p10_EVENT_MMCR3_MASK << p10_EVENT_MMCR3_SHIFT) | \
(EVENT_MARKED_MASK << EVENT_MARKED_SHIFT) | \
+ (p10_EVENT_RADIX_SCOPE_QUAL_MASK << p10_EVENT_RADIX_SCOPE_QUAL_SHIFT) | \
EVENT_LINUX_MASK | \
EVENT_PSEL_MASK))
/*
@@ -125,9 +129,9 @@
*
* 28 24 20 16 12 8 4 0
* | - - - - | - - - - | - - - - | - - - - | - - - - | - - - - | - - - - | - - - - |
- * [ ] | [ ] [ sample ] [ ] [6] [5] [4] [3] [2] [1]
- * | | | |
- * BHRB IFM -* | | | Count of events for each PMC.
+ * [ ] | [ ] | [ sample ] [ ] [6] [5] [4] [3] [2] [1]
+ * | | | | |
+ * BHRB IFM -* | | |*radix_scope | Count of events for each PMC.
* EBB -* | | p1, p2, p3, p4, p5, p6.
* L1 I/D qualifier -* |
* nc - number of counters -*
@@ -165,6 +169,9 @@
#define CNST_L2L3_GROUP_VAL(v) (((v) & 0x1full) << 55)
#define CNST_L2L3_GROUP_MASK CNST_L2L3_GROUP_VAL(0x1f)
+#define CNST_RADIX_SCOPE_GROUP_VAL(v) (((v) & 0x1ull) << 21)
+#define CNST_RADIX_SCOPE_GROUP_MASK CNST_RADIX_SCOPE_GROUP_VAL(1)
+
/*
* For NC we are counting up to 4 events. This requires three bits, and we need
* the fifth event to overflow and set the 4th bit. To achieve that we bias the
@@ -23,10 +23,10 @@
*
* 28 24 20 16 12 8 4 0
* | - - - - | - - - - | - - - - | - - - - | - - - - | - - - - | - - - - | - - - - |
- * [ ] [ sample ] [ ] [ ] [ pmc ] [unit ] [ ] m [ pmcxsel ]
- * | | | | | |
- * | | | | | *- mark
- * | | | *- L1/L2/L3 cache_sel |
+ * [ ] [ sample ] [ ] [ ] [ pmc ] [unit ] [ ] | m [ pmcxsel ]
+ * | | | | | | |
+ * | | | | | | *- mark
+ * | | | *- L1/L2/L3 cache_sel | |*-radix_scope_qual
* | | sdar_mode |
* | *- sampling mode for marked events *- combine
* |
@@ -59,6 +59,7 @@
*
* MMCR1[16] = cache_sel[0]
* MMCR1[17] = cache_sel[1]
+ * MMCR1[18] = radix_scope_qual
*
* if mark:
* MMCRA[63] = 1 (SAMPLE_ENABLE)
@@ -175,6 +176,7 @@ PMU_FORMAT_ATTR(src_sel, "config:45-46");
PMU_FORMAT_ATTR(invert_bit, "config:47");
PMU_FORMAT_ATTR(src_mask, "config:48-53");
PMU_FORMAT_ATTR(src_match, "config:54-59");
+PMU_FORMAT_ATTR(radix_scope, "config:9");
static struct attribute *power10_pmu_format_attr[] = {
&format_attr_event.attr,
@@ -194,6 +196,7 @@ static struct attribute *power10_pmu_format_attr[] = {
&format_attr_invert_bit.attr,
&format_attr_src_mask.attr,
&format_attr_src_match.attr,
+ &format_attr_radix_scope.attr,
NULL,
};