perf_events, x86: Fix event constraint masks
authorPeter Zijlstra <a.p.zijlstra@chello.nl>
Wed, 27 Jan 2010 22:07:46 +0000 (23:07 +0100)
committerIngo Molnar <mingo@elte.hu>
Fri, 29 Jan 2010 08:01:46 +0000 (09:01 +0100)
Since constraints are specified on the event number, not number
and unit mask shorten the constraint masks so that we'll
actually match something.

Signed-off-by: Peter Zijlstra <a.p.zijlstra@chello.nl>
Cc: Stephane Eranian <eranian@google.com>
LKML-Reference: <20100127221121.967610372@chello.nl>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
arch/x86/include/asm/perf_event.h
arch/x86/kernel/cpu/perf_event.c

index dbc0826..ff5ede1 100644 (file)
@@ -49,7 +49,7 @@
         INTEL_ARCH_INV_MASK| \
         INTEL_ARCH_EDGE_MASK|\
         INTEL_ARCH_UNIT_MASK|\
-        INTEL_ARCH_EVENT_MASK)
+        INTEL_ARCH_EVTSEL_MASK)
 
 #define ARCH_PERFMON_UNHALTED_CORE_CYCLES_SEL                0x3c
 #define ARCH_PERFMON_UNHALTED_CORE_CYCLES_UMASK                (0x00 << 8)
index 07fa0c2..951213a 100644 (file)
@@ -100,12 +100,17 @@ struct cpu_hw_events {
        .weight = HWEIGHT64((u64)(n)),  \
 }
 
-#define INTEL_EVENT_CONSTRAINT(c, n)           EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVENT_MASK)
-#define FIXED_EVENT_CONSTRAINT(c, n)           EVENT_CONSTRAINT(c, n, INTEL_ARCH_FIXED_MASK)
+#define INTEL_EVENT_CONSTRAINT(c, n)   \
+       EVENT_CONSTRAINT(c, n, INTEL_ARCH_EVTSEL_MASK)
 
-#define EVENT_CONSTRAINT_END                   EVENT_CONSTRAINT(0, 0, 0)
+#define FIXED_EVENT_CONSTRAINT(c, n)   \
+       EVENT_CONSTRAINT(c, n, INTEL_ARCH_FIXED_MASK)
 
-#define for_each_event_constraint(e, c)                for ((e) = (c); (e)->cmask; (e)++)
+#define EVENT_CONSTRAINT_END           \
+       EVENT_CONSTRAINT(0, 0, 0)
+
+#define for_each_event_constraint(e, c)        \
+       for ((e) = (c); (e)->cmask; (e)++)
 
 /*
  * struct x86_pmu - generic x86 pmu