aboutsummaryrefslogtreecommitdiffstats
path: root/tools/testing
diff options
context:
space:
mode:
authorCyril Bur <cyrilbur@gmail.com>2016-09-23 02:18:15 -0400
committerMichael Ellerman <mpe@ellerman.id.au>2016-10-04 05:10:12 -0400
commit2b4093790abdf0219f8ca192ecc4ecc63cdb5124 (patch)
treea3e438d0ae5b58047447032d0231af26044b466a /tools/testing
parentbe4a9f56666af94eccf7993661c1a62db033bff9 (diff)
selftests/powerpc: Move VMX stack frame macros to header file
Signed-off-by: Cyril Bur <cyrilbur@gmail.com> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'tools/testing')
-rw-r--r--tools/testing/selftests/powerpc/math/vmx_asm.S85
-rw-r--r--tools/testing/selftests/powerpc/vmx_asm.h96
2 files changed, 97 insertions, 84 deletions
diff --git a/tools/testing/selftests/powerpc/math/vmx_asm.S b/tools/testing/selftests/powerpc/math/vmx_asm.S
index 1b8c248b3ac1..fd74da488625 100644
--- a/tools/testing/selftests/powerpc/math/vmx_asm.S
+++ b/tools/testing/selftests/powerpc/math/vmx_asm.S
@@ -8,90 +8,7 @@
8 */ 8 */
9 9
10#include "../basic_asm.h" 10#include "../basic_asm.h"
11 11#include "../vmx_asm.h"
12# POS MUST BE 16 ALIGNED!
13#define PUSH_VMX(pos,reg) \
14 li reg,pos; \
15 stvx v20,reg,sp; \
16 addi reg,reg,16; \
17 stvx v21,reg,sp; \
18 addi reg,reg,16; \
19 stvx v22,reg,sp; \
20 addi reg,reg,16; \
21 stvx v23,reg,sp; \
22 addi reg,reg,16; \
23 stvx v24,reg,sp; \
24 addi reg,reg,16; \
25 stvx v25,reg,sp; \
26 addi reg,reg,16; \
27 stvx v26,reg,sp; \
28 addi reg,reg,16; \
29 stvx v27,reg,sp; \
30 addi reg,reg,16; \
31 stvx v28,reg,sp; \
32 addi reg,reg,16; \
33 stvx v29,reg,sp; \
34 addi reg,reg,16; \
35 stvx v30,reg,sp; \
36 addi reg,reg,16; \
37 stvx v31,reg,sp;
38
39# POS MUST BE 16 ALIGNED!
40#define POP_VMX(pos,reg) \
41 li reg,pos; \
42 lvx v20,reg,sp; \
43 addi reg,reg,16; \
44 lvx v21,reg,sp; \
45 addi reg,reg,16; \
46 lvx v22,reg,sp; \
47 addi reg,reg,16; \
48 lvx v23,reg,sp; \
49 addi reg,reg,16; \
50 lvx v24,reg,sp; \
51 addi reg,reg,16; \
52 lvx v25,reg,sp; \
53 addi reg,reg,16; \
54 lvx v26,reg,sp; \
55 addi reg,reg,16; \
56 lvx v27,reg,sp; \
57 addi reg,reg,16; \
58 lvx v28,reg,sp; \
59 addi reg,reg,16; \
60 lvx v29,reg,sp; \
61 addi reg,reg,16; \
62 lvx v30,reg,sp; \
63 addi reg,reg,16; \
64 lvx v31,reg,sp;
65
66# Carefull this will 'clobber' vmx (by design)
67# Don't call this from C
68FUNC_START(load_vmx)
69 li r5,0
70 lvx v20,r5,r3
71 addi r5,r5,16
72 lvx v21,r5,r3
73 addi r5,r5,16
74 lvx v22,r5,r3
75 addi r5,r5,16
76 lvx v23,r5,r3
77 addi r5,r5,16
78 lvx v24,r5,r3
79 addi r5,r5,16
80 lvx v25,r5,r3
81 addi r5,r5,16
82 lvx v26,r5,r3
83 addi r5,r5,16
84 lvx v27,r5,r3
85 addi r5,r5,16
86 lvx v28,r5,r3
87 addi r5,r5,16
88 lvx v29,r5,r3
89 addi r5,r5,16
90 lvx v30,r5,r3
91 addi r5,r5,16
92 lvx v31,r5,r3
93 blr
94FUNC_END(load_vmx)
95 12
96# Should be safe from C, only touches r4, r5 and v0,v1,v2 13# Should be safe from C, only touches r4, r5 and v0,v1,v2
97FUNC_START(check_vmx) 14FUNC_START(check_vmx)
diff --git a/tools/testing/selftests/powerpc/vmx_asm.h b/tools/testing/selftests/powerpc/vmx_asm.h
new file mode 100644
index 000000000000..2eaaeca9cf1d
--- /dev/null
+++ b/tools/testing/selftests/powerpc/vmx_asm.h
@@ -0,0 +1,96 @@
1/*
2 * Copyright 2015, Cyril Bur, IBM Corp.
3 *
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version
7 * 2 of the License, or (at your option) any later version.
8 */
9
10#include "basic_asm.h"
11
12/* POS MUST BE 16 ALIGNED! */
13#define PUSH_VMX(pos,reg) \
14 li reg,pos; \
15 stvx v20,reg,%r1; \
16 addi reg,reg,16; \
17 stvx v21,reg,%r1; \
18 addi reg,reg,16; \
19 stvx v22,reg,%r1; \
20 addi reg,reg,16; \
21 stvx v23,reg,%r1; \
22 addi reg,reg,16; \
23 stvx v24,reg,%r1; \
24 addi reg,reg,16; \
25 stvx v25,reg,%r1; \
26 addi reg,reg,16; \
27 stvx v26,reg,%r1; \
28 addi reg,reg,16; \
29 stvx v27,reg,%r1; \
30 addi reg,reg,16; \
31 stvx v28,reg,%r1; \
32 addi reg,reg,16; \
33 stvx v29,reg,%r1; \
34 addi reg,reg,16; \
35 stvx v30,reg,%r1; \
36 addi reg,reg,16; \
37 stvx v31,reg,%r1;
38
39/* POS MUST BE 16 ALIGNED! */
40#define POP_VMX(pos,reg) \
41 li reg,pos; \
42 lvx v20,reg,%r1; \
43 addi reg,reg,16; \
44 lvx v21,reg,%r1; \
45 addi reg,reg,16; \
46 lvx v22,reg,%r1; \
47 addi reg,reg,16; \
48 lvx v23,reg,%r1; \
49 addi reg,reg,16; \
50 lvx v24,reg,%r1; \
51 addi reg,reg,16; \
52 lvx v25,reg,%r1; \
53 addi reg,reg,16; \
54 lvx v26,reg,%r1; \
55 addi reg,reg,16; \
56 lvx v27,reg,%r1; \
57 addi reg,reg,16; \
58 lvx v28,reg,%r1; \
59 addi reg,reg,16; \
60 lvx v29,reg,%r1; \
61 addi reg,reg,16; \
62 lvx v30,reg,%r1; \
63 addi reg,reg,16; \
64 lvx v31,reg,%r1;
65
66/*
67 * Careful this will 'clobber' vmx (by design)
68 * Don't call this from C
69 */
70FUNC_START(load_vmx)
71 li r5,0
72 lvx v20,r5,r3
73 addi r5,r5,16
74 lvx v21,r5,r3
75 addi r5,r5,16
76 lvx v22,r5,r3
77 addi r5,r5,16
78 lvx v23,r5,r3
79 addi r5,r5,16
80 lvx v24,r5,r3
81 addi r5,r5,16
82 lvx v25,r5,r3
83 addi r5,r5,16
84 lvx v26,r5,r3
85 addi r5,r5,16
86 lvx v27,r5,r3
87 addi r5,r5,16
88 lvx v28,r5,r3
89 addi r5,r5,16
90 lvx v29,r5,r3
91 addi r5,r5,16
92 lvx v30,r5,r3
93 addi r5,r5,16
94 lvx v31,r5,r3
95 blr
96FUNC_END(load_vmx)