aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2011-07-19 17:25:55 -0400
committerWill Deacon <will.deacon@arm.com>2011-08-31 05:18:00 -0400
commitc691bb6249b25104fcb6dad31bd772c139ce4a50 (patch)
tree0bcff4d0b191d91ab6738aa3a3120a3cc5425b17 /arch/arm
parent25e29c7c0f4fcbe911b77a69f015bd6424cedcd0 (diff)
ARM: perf: index ARMv7 event counters starting from zero
The current ARMv7 PMU backend indexes event counters from two, with index zero being reserved and index one being used to represent the cycle counter. This patch tidies up the code by indexing from one instead (with zero for the cycle counter). This allows us to remove many of the accessor macros along with the counter enumeration and makes the code much more readable. Acked-by: Jamie Iles <jamie@jamieiles.com> Reviewed-by: Jean Pihet <j-pihet@ti.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/kernel/perf_event_v7.c239
1 files changed, 88 insertions, 151 deletions
diff --git a/arch/arm/kernel/perf_event_v7.c b/arch/arm/kernel/perf_event_v7.c
index e39bc8935cbe..0934c8214304 100644
--- a/arch/arm/kernel/perf_event_v7.c
+++ b/arch/arm/kernel/perf_event_v7.c
@@ -676,23 +676,24 @@ static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
676}; 676};
677 677
678/* 678/*
679 * Perf Events counters 679 * Perf Events' indices
680 */ 680 */
681enum armv7_counters { 681#define ARMV7_IDX_CYCLE_COUNTER 0
682 ARMV7_CYCLE_COUNTER = 1, /* Cycle counter */ 682#define ARMV7_IDX_COUNTER0 1
683 ARMV7_COUNTER0 = 2, /* First event counter */ 683#define ARMV7_IDX_COUNTER_LAST (ARMV7_IDX_CYCLE_COUNTER + armpmu->num_events - 1)
684}; 684
685#define ARMV7_MAX_COUNTERS 32
686#define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
685 687
686/* 688/*
687 * The cycle counter is ARMV7_CYCLE_COUNTER. 689 * ARMv7 low level PMNC access
688 * The first event counter is ARMV7_COUNTER0.
689 * The last event counter is (ARMV7_COUNTER0 + armpmu->num_events - 1).
690 */ 690 */
691#define ARMV7_COUNTER_LAST (ARMV7_COUNTER0 + armpmu->num_events - 1)
692 691
693/* 692/*
694 * ARMv7 low level PMNC access 693 * Perf Event to low level counters mapping
695 */ 694 */
695#define ARMV7_IDX_TO_COUNTER(x) \
696 (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
696 697
697/* 698/*
698 * Per-CPU PMNC: config reg 699 * Per-CPU PMNC: config reg
@@ -708,53 +709,13 @@ enum armv7_counters {
708#define ARMV7_PMNC_MASK 0x3f /* Mask for writable bits */ 709#define ARMV7_PMNC_MASK 0x3f /* Mask for writable bits */
709 710
710/* 711/*
711 * Available counters
712 */
713#define ARMV7_CNT0 0 /* First event counter */
714#define ARMV7_CCNT 31 /* Cycle counter */
715
716/* Perf Event to low level counters mapping */
717#define ARMV7_EVENT_CNT_TO_CNTx (ARMV7_COUNTER0 - ARMV7_CNT0)
718
719/*
720 * CNTENS: counters enable reg
721 */
722#define ARMV7_CNTENS_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
723#define ARMV7_CNTENS_C (1 << ARMV7_CCNT)
724
725/*
726 * CNTENC: counters disable reg
727 */
728#define ARMV7_CNTENC_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
729#define ARMV7_CNTENC_C (1 << ARMV7_CCNT)
730
731/*
732 * INTENS: counters overflow interrupt enable reg
733 */
734#define ARMV7_INTENS_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
735#define ARMV7_INTENS_C (1 << ARMV7_CCNT)
736
737/*
738 * INTENC: counters overflow interrupt disable reg
739 */
740#define ARMV7_INTENC_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
741#define ARMV7_INTENC_C (1 << ARMV7_CCNT)
742
743/*
744 * EVTSEL: Event selection reg 712 * EVTSEL: Event selection reg
745 */ 713 */
746#define ARMV7_EVTSEL_MASK 0xff /* Mask for writable bits */ 714#define ARMV7_EVTSEL_MASK 0xff /* Mask for writable bits */
747 715
748/* 716/*
749 * SELECT: Counter selection reg
750 */
751#define ARMV7_SELECT_MASK 0x1f /* Mask for writable bits */
752
753/*
754 * FLAG: counters overflow flag status reg 717 * FLAG: counters overflow flag status reg
755 */ 718 */
756#define ARMV7_FLAG_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
757#define ARMV7_FLAG_C (1 << ARMV7_CCNT)
758#define ARMV7_FLAG_MASK 0xffffffff /* Mask for writable bits */ 719#define ARMV7_FLAG_MASK 0xffffffff /* Mask for writable bits */
759#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK 720#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
760 721
@@ -777,34 +738,39 @@ static inline int armv7_pmnc_has_overflowed(u32 pmnc)
777 return pmnc & ARMV7_OVERFLOWED_MASK; 738 return pmnc & ARMV7_OVERFLOWED_MASK;
778} 739}
779 740
780static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, 741static inline int armv7_pmnc_counter_valid(int idx)
781 enum armv7_counters counter) 742{
743 return idx >= ARMV7_IDX_CYCLE_COUNTER && idx <= ARMV7_IDX_COUNTER_LAST;
744}
745
746static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
782{ 747{
783 int ret = 0; 748 int ret = 0;
749 u32 counter;
784 750
785 if (counter == ARMV7_CYCLE_COUNTER) 751 if (!armv7_pmnc_counter_valid(idx)) {
786 ret = pmnc & ARMV7_FLAG_C;
787 else if ((counter >= ARMV7_COUNTER0) && (counter <= ARMV7_COUNTER_LAST))
788 ret = pmnc & ARMV7_FLAG_P(counter);
789 else
790 pr_err("CPU%u checking wrong counter %d overflow status\n", 752 pr_err("CPU%u checking wrong counter %d overflow status\n",
791 smp_processor_id(), counter); 753 smp_processor_id(), idx);
754 } else {
755 counter = ARMV7_IDX_TO_COUNTER(idx);
756 ret = pmnc & BIT(counter);
757 }
792 758
793 return ret; 759 return ret;
794} 760}
795 761
796static inline int armv7_pmnc_select_counter(int idx) 762static inline int armv7_pmnc_select_counter(int idx)
797{ 763{
798 u32 val; 764 u32 counter;
799 765
800 if ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST)) { 766 if (!armv7_pmnc_counter_valid(idx)) {
801 pr_err("CPU%u selecting wrong PMNC counter" 767 pr_err("CPU%u selecting wrong PMNC counter %d\n",
802 " %d\n", smp_processor_id(), idx); 768 smp_processor_id(), idx);
803 return -1; 769 return -EINVAL;
804 } 770 }
805 771
806 val = (idx - ARMV7_EVENT_CNT_TO_CNTx) & ARMV7_SELECT_MASK; 772 counter = ARMV7_IDX_TO_COUNTER(idx);
807 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val)); 773 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
808 isb(); 774 isb();
809 775
810 return idx; 776 return idx;
@@ -814,30 +780,26 @@ static inline u32 armv7pmu_read_counter(int idx)
814{ 780{
815 u32 value = 0; 781 u32 value = 0;
816 782
817 if (idx == ARMV7_CYCLE_COUNTER) 783 if (!armv7_pmnc_counter_valid(idx))
818 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
819 else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
820 if (armv7_pmnc_select_counter(idx) == idx)
821 asm volatile("mrc p15, 0, %0, c9, c13, 2"
822 : "=r" (value));
823 } else
824 pr_err("CPU%u reading wrong counter %d\n", 784 pr_err("CPU%u reading wrong counter %d\n",
825 smp_processor_id(), idx); 785 smp_processor_id(), idx);
786 else if (idx == ARMV7_IDX_CYCLE_COUNTER)
787 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
788 else if (armv7_pmnc_select_counter(idx) == idx)
789 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
826 790
827 return value; 791 return value;
828} 792}
829 793
830static inline void armv7pmu_write_counter(int idx, u32 value) 794static inline void armv7pmu_write_counter(int idx, u32 value)
831{ 795{
832 if (idx == ARMV7_CYCLE_COUNTER) 796 if (!armv7_pmnc_counter_valid(idx))
833 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
834 else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
835 if (armv7_pmnc_select_counter(idx) == idx)
836 asm volatile("mcr p15, 0, %0, c9, c13, 2"
837 : : "r" (value));
838 } else
839 pr_err("CPU%u writing wrong counter %d\n", 797 pr_err("CPU%u writing wrong counter %d\n",
840 smp_processor_id(), idx); 798 smp_processor_id(), idx);
799 else if (idx == ARMV7_IDX_CYCLE_COUNTER)
800 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
801 else if (armv7_pmnc_select_counter(idx) == idx)
802 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
841} 803}
842 804
843static inline void armv7_pmnc_write_evtsel(int idx, u32 val) 805static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
@@ -850,86 +812,61 @@ static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
850 812
851static inline int armv7_pmnc_enable_counter(int idx) 813static inline int armv7_pmnc_enable_counter(int idx)
852{ 814{
853 u32 val; 815 u32 counter;
854 816
855 if ((idx != ARMV7_CYCLE_COUNTER) && 817 if (!armv7_pmnc_counter_valid(idx)) {
856 ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) { 818 pr_err("CPU%u enabling wrong PMNC counter %d\n",
857 pr_err("CPU%u enabling wrong PMNC counter" 819 smp_processor_id(), idx);
858 " %d\n", smp_processor_id(), idx); 820 return -EINVAL;
859 return -1;
860 } 821 }
861 822
862 if (idx == ARMV7_CYCLE_COUNTER) 823 counter = ARMV7_IDX_TO_COUNTER(idx);
863 val = ARMV7_CNTENS_C; 824 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
864 else
865 val = ARMV7_CNTENS_P(idx);
866
867 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val));
868
869 return idx; 825 return idx;
870} 826}
871 827
872static inline int armv7_pmnc_disable_counter(int idx) 828static inline int armv7_pmnc_disable_counter(int idx)
873{ 829{
874 u32 val; 830 u32 counter;
875 831
876 832 if (!armv7_pmnc_counter_valid(idx)) {
877 if ((idx != ARMV7_CYCLE_COUNTER) && 833 pr_err("CPU%u disabling wrong PMNC counter %d\n",
878 ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) { 834 smp_processor_id(), idx);
879 pr_err("CPU%u disabling wrong PMNC counter" 835 return -EINVAL;
880 " %d\n", smp_processor_id(), idx);
881 return -1;
882 } 836 }
883 837
884 if (idx == ARMV7_CYCLE_COUNTER) 838 counter = ARMV7_IDX_TO_COUNTER(idx);
885 val = ARMV7_CNTENC_C; 839 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
886 else
887 val = ARMV7_CNTENC_P(idx);
888
889 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val));
890
891 return idx; 840 return idx;
892} 841}
893 842
894static inline int armv7_pmnc_enable_intens(int idx) 843static inline int armv7_pmnc_enable_intens(int idx)
895{ 844{
896 u32 val; 845 u32 counter;
897 846
898 if ((idx != ARMV7_CYCLE_COUNTER) && 847 if (!armv7_pmnc_counter_valid(idx)) {
899 ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) { 848 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
900 pr_err("CPU%u enabling wrong PMNC counter" 849 smp_processor_id(), idx);
901 " interrupt enable %d\n", smp_processor_id(), idx); 850 return -EINVAL;
902 return -1;
903 } 851 }
904 852
905 if (idx == ARMV7_CYCLE_COUNTER) 853 counter = ARMV7_IDX_TO_COUNTER(idx);
906 val = ARMV7_INTENS_C; 854 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
907 else
908 val = ARMV7_INTENS_P(idx);
909
910 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val));
911
912 return idx; 855 return idx;
913} 856}
914 857
915static inline int armv7_pmnc_disable_intens(int idx) 858static inline int armv7_pmnc_disable_intens(int idx)
916{ 859{
917 u32 val; 860 u32 counter;
918 861
919 if ((idx != ARMV7_CYCLE_COUNTER) && 862 if (!armv7_pmnc_counter_valid(idx)) {
920 ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) { 863 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
921 pr_err("CPU%u disabling wrong PMNC counter" 864 smp_processor_id(), idx);
922 " interrupt enable %d\n", smp_processor_id(), idx); 865 return -EINVAL;
923 return -1;
924 } 866 }
925 867
926 if (idx == ARMV7_CYCLE_COUNTER) 868 counter = ARMV7_IDX_TO_COUNTER(idx);
927 val = ARMV7_INTENC_C; 869 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
928 else
929 val = ARMV7_INTENC_P(idx);
930
931 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val));
932
933 return idx; 870 return idx;
934} 871}
935 872
@@ -973,14 +910,14 @@ static void armv7_pmnc_dump_regs(void)
973 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val)); 910 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
974 printk(KERN_INFO "CCNT =0x%08x\n", val); 911 printk(KERN_INFO "CCNT =0x%08x\n", val);
975 912
976 for (cnt = ARMV7_COUNTER0; cnt < ARMV7_COUNTER_LAST; cnt++) { 913 for (cnt = ARMV7_IDX_COUNTER0; cnt <= ARMV7_IDX_COUNTER_LAST; cnt++) {
977 armv7_pmnc_select_counter(cnt); 914 armv7_pmnc_select_counter(cnt);
978 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val)); 915 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
979 printk(KERN_INFO "CNT[%d] count =0x%08x\n", 916 printk(KERN_INFO "CNT[%d] count =0x%08x\n",
980 cnt-ARMV7_EVENT_CNT_TO_CNTx, val); 917 ARMV7_IDX_TO_COUNTER(cnt), val);
981 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val)); 918 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
982 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n", 919 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
983 cnt-ARMV7_EVENT_CNT_TO_CNTx, val); 920 ARMV7_IDX_TO_COUNTER(cnt), val);
984 } 921 }
985} 922}
986#endif 923#endif
@@ -1004,7 +941,7 @@ static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
1004 * Set event (if destined for PMNx counters) 941 * Set event (if destined for PMNx counters)
1005 * We don't need to set the event if it's a cycle count 942 * We don't need to set the event if it's a cycle count
1006 */ 943 */
1007 if (idx != ARMV7_CYCLE_COUNTER) 944 if (idx != ARMV7_IDX_CYCLE_COUNTER)
1008 armv7_pmnc_write_evtsel(idx, hwc->config_base); 945 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1009 946
1010 /* 947 /*
@@ -1069,7 +1006,7 @@ static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
1069 perf_sample_data_init(&data, 0); 1006 perf_sample_data_init(&data, 0);
1070 1007
1071 cpuc = &__get_cpu_var(cpu_hw_events); 1008 cpuc = &__get_cpu_var(cpu_hw_events);
1072 for (idx = 0; idx <= armpmu->num_events; ++idx) { 1009 for (idx = 0; idx < armpmu->num_events; ++idx) {
1073 struct perf_event *event = cpuc->events[idx]; 1010 struct perf_event *event = cpuc->events[idx];
1074 struct hw_perf_event *hwc; 1011 struct hw_perf_event *hwc;
1075 1012
@@ -1132,23 +1069,23 @@ static int armv7pmu_get_event_idx(struct cpu_hw_events *cpuc,
1132 1069
1133 /* Always place a cycle counter into the cycle counter. */ 1070 /* Always place a cycle counter into the cycle counter. */
1134 if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) { 1071 if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
1135 if (test_and_set_bit(ARMV7_CYCLE_COUNTER, cpuc->used_mask)) 1072 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1136 return -EAGAIN; 1073 return -EAGAIN;
1137 1074
1138 return ARMV7_CYCLE_COUNTER; 1075 return ARMV7_IDX_CYCLE_COUNTER;
1139 } else { 1076 }
1140 /*
1141 * For anything other than a cycle counter, try and use
1142 * the events counters
1143 */
1144 for (idx = ARMV7_COUNTER0; idx <= armpmu->num_events; ++idx) {
1145 if (!test_and_set_bit(idx, cpuc->used_mask))
1146 return idx;
1147 }
1148 1077
1149 /* The counters are all in use. */ 1078 /*
1150 return -EAGAIN; 1079 * For anything other than a cycle counter, try and use
1080 * the events counters
1081 */
1082 for (idx = ARMV7_IDX_COUNTER0; idx < armpmu->num_events; ++idx) {
1083 if (!test_and_set_bit(idx, cpuc->used_mask))
1084 return idx;
1151 } 1085 }
1086
1087 /* The counters are all in use. */
1088 return -EAGAIN;
1152} 1089}
1153 1090
1154static void armv7pmu_reset(void *info) 1091static void armv7pmu_reset(void *info)
@@ -1156,7 +1093,7 @@ static void armv7pmu_reset(void *info)
1156 u32 idx, nb_cnt = armpmu->num_events; 1093 u32 idx, nb_cnt = armpmu->num_events;
1157 1094
1158 /* The counter and interrupt enable registers are unknown at reset. */ 1095 /* The counter and interrupt enable registers are unknown at reset. */
1159 for (idx = 1; idx < nb_cnt; ++idx) 1096 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx)
1160 armv7pmu_disable_event(NULL, idx); 1097 armv7pmu_disable_event(NULL, idx);
1161 1098
1162 /* Initialize & Reset PMNC: C and P bits */ 1099 /* Initialize & Reset PMNC: C and P bits */