diff options
Diffstat (limited to 'arch')
30 files changed, 4909 insertions, 883 deletions
diff --git a/arch/arm/Makefile b/arch/arm/Makefile index 770da51242c4..b86e57ef146b 100644 --- a/arch/arm/Makefile +++ b/arch/arm/Makefile | |||
@@ -254,6 +254,7 @@ core-$(CONFIG_VFP) += arch/arm/vfp/ | |||
254 | # If we have a machine-specific directory, then include it in the build. | 254 | # If we have a machine-specific directory, then include it in the build. |
255 | core-y += arch/arm/kernel/ arch/arm/mm/ arch/arm/common/ | 255 | core-y += arch/arm/kernel/ arch/arm/mm/ arch/arm/common/ |
256 | core-y += arch/arm/net/ | 256 | core-y += arch/arm/net/ |
257 | core-y += arch/arm/crypto/ | ||
257 | core-y += $(machdirs) $(platdirs) | 258 | core-y += $(machdirs) $(platdirs) |
258 | 259 | ||
259 | drivers-$(CONFIG_OPROFILE) += arch/arm/oprofile/ | 260 | drivers-$(CONFIG_OPROFILE) += arch/arm/oprofile/ |
diff --git a/arch/arm/crypto/Makefile b/arch/arm/crypto/Makefile new file mode 100644 index 000000000000..a2c83851bc90 --- /dev/null +++ b/arch/arm/crypto/Makefile | |||
@@ -0,0 +1,9 @@ | |||
1 | # | ||
2 | # Arch-specific CryptoAPI modules. | ||
3 | # | ||
4 | |||
5 | obj-$(CONFIG_CRYPTO_AES_ARM) += aes-arm.o | ||
6 | obj-$(CONFIG_CRYPTO_SHA1_ARM) += sha1-arm.o | ||
7 | |||
8 | aes-arm-y := aes-armv4.o aes_glue.o | ||
9 | sha1-arm-y := sha1-armv4-large.o sha1_glue.o | ||
diff --git a/arch/arm/crypto/aes-armv4.S b/arch/arm/crypto/aes-armv4.S new file mode 100644 index 000000000000..e59b1d505d6c --- /dev/null +++ b/arch/arm/crypto/aes-armv4.S | |||
@@ -0,0 +1,1112 @@ | |||
1 | #define __ARM_ARCH__ __LINUX_ARM_ARCH__ | ||
2 | @ ==================================================================== | ||
3 | @ Written by Andy Polyakov <appro@fy.chalmers.se> for the OpenSSL | ||
4 | @ project. The module is, however, dual licensed under OpenSSL and | ||
5 | @ CRYPTOGAMS licenses depending on where you obtain it. For further | ||
6 | @ details see http://www.openssl.org/~appro/cryptogams/. | ||
7 | @ ==================================================================== | ||
8 | |||
9 | @ AES for ARMv4 | ||
10 | |||
11 | @ January 2007. | ||
12 | @ | ||
13 | @ Code uses single 1K S-box and is >2 times faster than code generated | ||
14 | @ by gcc-3.4.1. This is thanks to unique feature of ARMv4 ISA, which | ||
15 | @ allows to merge logical or arithmetic operation with shift or rotate | ||
16 | @ in one instruction and emit combined result every cycle. The module | ||
17 | @ is endian-neutral. The performance is ~42 cycles/byte for 128-bit | ||
18 | @ key [on single-issue Xscale PXA250 core]. | ||
19 | |||
20 | @ May 2007. | ||
21 | @ | ||
22 | @ AES_set_[en|de]crypt_key is added. | ||
23 | |||
24 | @ July 2010. | ||
25 | @ | ||
26 | @ Rescheduling for dual-issue pipeline resulted in 12% improvement on | ||
27 | @ Cortex A8 core and ~25 cycles per byte processed with 128-bit key. | ||
28 | |||
29 | @ February 2011. | ||
30 | @ | ||
31 | @ Profiler-assisted and platform-specific optimization resulted in 16% | ||
32 | @ improvement on Cortex A8 core and ~21.5 cycles per byte. | ||
33 | |||
34 | @ A little glue here to select the correct code below for the ARM CPU | ||
35 | @ that is being targetted. | ||
36 | |||
37 | .text | ||
38 | .code 32 | ||
39 | |||
40 | .type AES_Te,%object | ||
41 | .align 5 | ||
42 | AES_Te: | ||
43 | .word 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d | ||
44 | .word 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554 | ||
45 | .word 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d | ||
46 | .word 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a | ||
47 | .word 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87 | ||
48 | .word 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b | ||
49 | .word 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea | ||
50 | .word 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b | ||
51 | .word 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a | ||
52 | .word 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f | ||
53 | .word 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108 | ||
54 | .word 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f | ||
55 | .word 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e | ||
56 | .word 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5 | ||
57 | .word 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d | ||
58 | .word 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f | ||
59 | .word 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e | ||
60 | .word 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb | ||
61 | .word 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce | ||
62 | .word 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497 | ||
63 | .word 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c | ||
64 | .word 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed | ||
65 | .word 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b | ||
66 | .word 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a | ||
67 | .word 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16 | ||
68 | .word 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594 | ||
69 | .word 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81 | ||
70 | .word 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3 | ||
71 | .word 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a | ||
72 | .word 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504 | ||
73 | .word 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163 | ||
74 | .word 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d | ||
75 | .word 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f | ||
76 | .word 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739 | ||
77 | .word 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47 | ||
78 | .word 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395 | ||
79 | .word 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f | ||
80 | .word 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883 | ||
81 | .word 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c | ||
82 | .word 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76 | ||
83 | .word 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e | ||
84 | .word 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4 | ||
85 | .word 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6 | ||
86 | .word 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b | ||
87 | .word 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7 | ||
88 | .word 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0 | ||
89 | .word 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25 | ||
90 | .word 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818 | ||
91 | .word 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72 | ||
92 | .word 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651 | ||
93 | .word 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21 | ||
94 | .word 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85 | ||
95 | .word 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa | ||
96 | .word 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12 | ||
97 | .word 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0 | ||
98 | .word 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9 | ||
99 | .word 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133 | ||
100 | .word 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7 | ||
101 | .word 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920 | ||
102 | .word 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a | ||
103 | .word 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17 | ||
104 | .word 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8 | ||
105 | .word 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11 | ||
106 | .word 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a | ||
107 | @ Te4[256] | ||
108 | .byte 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5 | ||
109 | .byte 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76 | ||
110 | .byte 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0 | ||
111 | .byte 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0 | ||
112 | .byte 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc | ||
113 | .byte 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15 | ||
114 | .byte 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a | ||
115 | .byte 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75 | ||
116 | .byte 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0 | ||
117 | .byte 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84 | ||
118 | .byte 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b | ||
119 | .byte 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf | ||
120 | .byte 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85 | ||
121 | .byte 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8 | ||
122 | .byte 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5 | ||
123 | .byte 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2 | ||
124 | .byte 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17 | ||
125 | .byte 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73 | ||
126 | .byte 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88 | ||
127 | .byte 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb | ||
128 | .byte 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c | ||
129 | .byte 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79 | ||
130 | .byte 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9 | ||
131 | .byte 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08 | ||
132 | .byte 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6 | ||
133 | .byte 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a | ||
134 | .byte 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e | ||
135 | .byte 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e | ||
136 | .byte 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94 | ||
137 | .byte 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf | ||
138 | .byte 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68 | ||
139 | .byte 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 | ||
140 | @ rcon[] | ||
141 | .word 0x01000000, 0x02000000, 0x04000000, 0x08000000 | ||
142 | .word 0x10000000, 0x20000000, 0x40000000, 0x80000000 | ||
143 | .word 0x1B000000, 0x36000000, 0, 0, 0, 0, 0, 0 | ||
144 | .size AES_Te,.-AES_Te | ||
145 | |||
146 | @ void AES_encrypt(const unsigned char *in, unsigned char *out, | ||
147 | @ const AES_KEY *key) { | ||
148 | .global AES_encrypt | ||
149 | .type AES_encrypt,%function | ||
150 | .align 5 | ||
151 | AES_encrypt: | ||
152 | sub r3,pc,#8 @ AES_encrypt | ||
153 | stmdb sp!,{r1,r4-r12,lr} | ||
154 | mov r12,r0 @ inp | ||
155 | mov r11,r2 | ||
156 | sub r10,r3,#AES_encrypt-AES_Te @ Te | ||
157 | #if __ARM_ARCH__<7 | ||
158 | ldrb r0,[r12,#3] @ load input data in endian-neutral | ||
159 | ldrb r4,[r12,#2] @ manner... | ||
160 | ldrb r5,[r12,#1] | ||
161 | ldrb r6,[r12,#0] | ||
162 | orr r0,r0,r4,lsl#8 | ||
163 | ldrb r1,[r12,#7] | ||
164 | orr r0,r0,r5,lsl#16 | ||
165 | ldrb r4,[r12,#6] | ||
166 | orr r0,r0,r6,lsl#24 | ||
167 | ldrb r5,[r12,#5] | ||
168 | ldrb r6,[r12,#4] | ||
169 | orr r1,r1,r4,lsl#8 | ||
170 | ldrb r2,[r12,#11] | ||
171 | orr r1,r1,r5,lsl#16 | ||
172 | ldrb r4,[r12,#10] | ||
173 | orr r1,r1,r6,lsl#24 | ||
174 | ldrb r5,[r12,#9] | ||
175 | ldrb r6,[r12,#8] | ||
176 | orr r2,r2,r4,lsl#8 | ||
177 | ldrb r3,[r12,#15] | ||
178 | orr r2,r2,r5,lsl#16 | ||
179 | ldrb r4,[r12,#14] | ||
180 | orr r2,r2,r6,lsl#24 | ||
181 | ldrb r5,[r12,#13] | ||
182 | ldrb r6,[r12,#12] | ||
183 | orr r3,r3,r4,lsl#8 | ||
184 | orr r3,r3,r5,lsl#16 | ||
185 | orr r3,r3,r6,lsl#24 | ||
186 | #else | ||
187 | ldr r0,[r12,#0] | ||
188 | ldr r1,[r12,#4] | ||
189 | ldr r2,[r12,#8] | ||
190 | ldr r3,[r12,#12] | ||
191 | #ifdef __ARMEL__ | ||
192 | rev r0,r0 | ||
193 | rev r1,r1 | ||
194 | rev r2,r2 | ||
195 | rev r3,r3 | ||
196 | #endif | ||
197 | #endif | ||
198 | bl _armv4_AES_encrypt | ||
199 | |||
200 | ldr r12,[sp],#4 @ pop out | ||
201 | #if __ARM_ARCH__>=7 | ||
202 | #ifdef __ARMEL__ | ||
203 | rev r0,r0 | ||
204 | rev r1,r1 | ||
205 | rev r2,r2 | ||
206 | rev r3,r3 | ||
207 | #endif | ||
208 | str r0,[r12,#0] | ||
209 | str r1,[r12,#4] | ||
210 | str r2,[r12,#8] | ||
211 | str r3,[r12,#12] | ||
212 | #else | ||
213 | mov r4,r0,lsr#24 @ write output in endian-neutral | ||
214 | mov r5,r0,lsr#16 @ manner... | ||
215 | mov r6,r0,lsr#8 | ||
216 | strb r4,[r12,#0] | ||
217 | strb r5,[r12,#1] | ||
218 | mov r4,r1,lsr#24 | ||
219 | strb r6,[r12,#2] | ||
220 | mov r5,r1,lsr#16 | ||
221 | strb r0,[r12,#3] | ||
222 | mov r6,r1,lsr#8 | ||
223 | strb r4,[r12,#4] | ||
224 | strb r5,[r12,#5] | ||
225 | mov r4,r2,lsr#24 | ||
226 | strb r6,[r12,#6] | ||
227 | mov r5,r2,lsr#16 | ||
228 | strb r1,[r12,#7] | ||
229 | mov r6,r2,lsr#8 | ||
230 | strb r4,[r12,#8] | ||
231 | strb r5,[r12,#9] | ||
232 | mov r4,r3,lsr#24 | ||
233 | strb r6,[r12,#10] | ||
234 | mov r5,r3,lsr#16 | ||
235 | strb r2,[r12,#11] | ||
236 | mov r6,r3,lsr#8 | ||
237 | strb r4,[r12,#12] | ||
238 | strb r5,[r12,#13] | ||
239 | strb r6,[r12,#14] | ||
240 | strb r3,[r12,#15] | ||
241 | #endif | ||
242 | #if __ARM_ARCH__>=5 | ||
243 | ldmia sp!,{r4-r12,pc} | ||
244 | #else | ||
245 | ldmia sp!,{r4-r12,lr} | ||
246 | tst lr,#1 | ||
247 | moveq pc,lr @ be binary compatible with V4, yet | ||
248 | .word 0xe12fff1e @ interoperable with Thumb ISA:-) | ||
249 | #endif | ||
250 | .size AES_encrypt,.-AES_encrypt | ||
251 | |||
252 | .type _armv4_AES_encrypt,%function | ||
253 | .align 2 | ||
254 | _armv4_AES_encrypt: | ||
255 | str lr,[sp,#-4]! @ push lr | ||
256 | ldmia r11!,{r4-r7} | ||
257 | eor r0,r0,r4 | ||
258 | ldr r12,[r11,#240-16] | ||
259 | eor r1,r1,r5 | ||
260 | eor r2,r2,r6 | ||
261 | eor r3,r3,r7 | ||
262 | sub r12,r12,#1 | ||
263 | mov lr,#255 | ||
264 | |||
265 | and r7,lr,r0 | ||
266 | and r8,lr,r0,lsr#8 | ||
267 | and r9,lr,r0,lsr#16 | ||
268 | mov r0,r0,lsr#24 | ||
269 | .Lenc_loop: | ||
270 | ldr r4,[r10,r7,lsl#2] @ Te3[s0>>0] | ||
271 | and r7,lr,r1,lsr#16 @ i0 | ||
272 | ldr r5,[r10,r8,lsl#2] @ Te2[s0>>8] | ||
273 | and r8,lr,r1 | ||
274 | ldr r6,[r10,r9,lsl#2] @ Te1[s0>>16] | ||
275 | and r9,lr,r1,lsr#8 | ||
276 | ldr r0,[r10,r0,lsl#2] @ Te0[s0>>24] | ||
277 | mov r1,r1,lsr#24 | ||
278 | |||
279 | ldr r7,[r10,r7,lsl#2] @ Te1[s1>>16] | ||
280 | ldr r8,[r10,r8,lsl#2] @ Te3[s1>>0] | ||
281 | ldr r9,[r10,r9,lsl#2] @ Te2[s1>>8] | ||
282 | eor r0,r0,r7,ror#8 | ||
283 | ldr r1,[r10,r1,lsl#2] @ Te0[s1>>24] | ||
284 | and r7,lr,r2,lsr#8 @ i0 | ||
285 | eor r5,r5,r8,ror#8 | ||
286 | and r8,lr,r2,lsr#16 @ i1 | ||
287 | eor r6,r6,r9,ror#8 | ||
288 | and r9,lr,r2 | ||
289 | ldr r7,[r10,r7,lsl#2] @ Te2[s2>>8] | ||
290 | eor r1,r1,r4,ror#24 | ||
291 | ldr r8,[r10,r8,lsl#2] @ Te1[s2>>16] | ||
292 | mov r2,r2,lsr#24 | ||
293 | |||
294 | ldr r9,[r10,r9,lsl#2] @ Te3[s2>>0] | ||
295 | eor r0,r0,r7,ror#16 | ||
296 | ldr r2,[r10,r2,lsl#2] @ Te0[s2>>24] | ||
297 | and r7,lr,r3 @ i0 | ||
298 | eor r1,r1,r8,ror#8 | ||
299 | and r8,lr,r3,lsr#8 @ i1 | ||
300 | eor r6,r6,r9,ror#16 | ||
301 | and r9,lr,r3,lsr#16 @ i2 | ||
302 | ldr r7,[r10,r7,lsl#2] @ Te3[s3>>0] | ||
303 | eor r2,r2,r5,ror#16 | ||
304 | ldr r8,[r10,r8,lsl#2] @ Te2[s3>>8] | ||
305 | mov r3,r3,lsr#24 | ||
306 | |||
307 | ldr r9,[r10,r9,lsl#2] @ Te1[s3>>16] | ||
308 | eor r0,r0,r7,ror#24 | ||
309 | ldr r7,[r11],#16 | ||
310 | eor r1,r1,r8,ror#16 | ||
311 | ldr r3,[r10,r3,lsl#2] @ Te0[s3>>24] | ||
312 | eor r2,r2,r9,ror#8 | ||
313 | ldr r4,[r11,#-12] | ||
314 | eor r3,r3,r6,ror#8 | ||
315 | |||
316 | ldr r5,[r11,#-8] | ||
317 | eor r0,r0,r7 | ||
318 | ldr r6,[r11,#-4] | ||
319 | and r7,lr,r0 | ||
320 | eor r1,r1,r4 | ||
321 | and r8,lr,r0,lsr#8 | ||
322 | eor r2,r2,r5 | ||
323 | and r9,lr,r0,lsr#16 | ||
324 | eor r3,r3,r6 | ||
325 | mov r0,r0,lsr#24 | ||
326 | |||
327 | subs r12,r12,#1 | ||
328 | bne .Lenc_loop | ||
329 | |||
330 | add r10,r10,#2 | ||
331 | |||
332 | ldrb r4,[r10,r7,lsl#2] @ Te4[s0>>0] | ||
333 | and r7,lr,r1,lsr#16 @ i0 | ||
334 | ldrb r5,[r10,r8,lsl#2] @ Te4[s0>>8] | ||
335 | and r8,lr,r1 | ||
336 | ldrb r6,[r10,r9,lsl#2] @ Te4[s0>>16] | ||
337 | and r9,lr,r1,lsr#8 | ||
338 | ldrb r0,[r10,r0,lsl#2] @ Te4[s0>>24] | ||
339 | mov r1,r1,lsr#24 | ||
340 | |||
341 | ldrb r7,[r10,r7,lsl#2] @ Te4[s1>>16] | ||
342 | ldrb r8,[r10,r8,lsl#2] @ Te4[s1>>0] | ||
343 | ldrb r9,[r10,r9,lsl#2] @ Te4[s1>>8] | ||
344 | eor r0,r7,r0,lsl#8 | ||
345 | ldrb r1,[r10,r1,lsl#2] @ Te4[s1>>24] | ||
346 | and r7,lr,r2,lsr#8 @ i0 | ||
347 | eor r5,r8,r5,lsl#8 | ||
348 | and r8,lr,r2,lsr#16 @ i1 | ||
349 | eor r6,r9,r6,lsl#8 | ||
350 | and r9,lr,r2 | ||
351 | ldrb r7,[r10,r7,lsl#2] @ Te4[s2>>8] | ||
352 | eor r1,r4,r1,lsl#24 | ||
353 | ldrb r8,[r10,r8,lsl#2] @ Te4[s2>>16] | ||
354 | mov r2,r2,lsr#24 | ||
355 | |||
356 | ldrb r9,[r10,r9,lsl#2] @ Te4[s2>>0] | ||
357 | eor r0,r7,r0,lsl#8 | ||
358 | ldrb r2,[r10,r2,lsl#2] @ Te4[s2>>24] | ||
359 | and r7,lr,r3 @ i0 | ||
360 | eor r1,r1,r8,lsl#16 | ||
361 | and r8,lr,r3,lsr#8 @ i1 | ||
362 | eor r6,r9,r6,lsl#8 | ||
363 | and r9,lr,r3,lsr#16 @ i2 | ||
364 | ldrb r7,[r10,r7,lsl#2] @ Te4[s3>>0] | ||
365 | eor r2,r5,r2,lsl#24 | ||
366 | ldrb r8,[r10,r8,lsl#2] @ Te4[s3>>8] | ||
367 | mov r3,r3,lsr#24 | ||
368 | |||
369 | ldrb r9,[r10,r9,lsl#2] @ Te4[s3>>16] | ||
370 | eor r0,r7,r0,lsl#8 | ||
371 | ldr r7,[r11,#0] | ||
372 | ldrb r3,[r10,r3,lsl#2] @ Te4[s3>>24] | ||
373 | eor r1,r1,r8,lsl#8 | ||
374 | ldr r4,[r11,#4] | ||
375 | eor r2,r2,r9,lsl#16 | ||
376 | ldr r5,[r11,#8] | ||
377 | eor r3,r6,r3,lsl#24 | ||
378 | ldr r6,[r11,#12] | ||
379 | |||
380 | eor r0,r0,r7 | ||
381 | eor r1,r1,r4 | ||
382 | eor r2,r2,r5 | ||
383 | eor r3,r3,r6 | ||
384 | |||
385 | sub r10,r10,#2 | ||
386 | ldr pc,[sp],#4 @ pop and return | ||
387 | .size _armv4_AES_encrypt,.-_armv4_AES_encrypt | ||
388 | |||
389 | .global private_AES_set_encrypt_key | ||
390 | .type private_AES_set_encrypt_key,%function | ||
391 | .align 5 | ||
392 | private_AES_set_encrypt_key: | ||
393 | _armv4_AES_set_encrypt_key: | ||
394 | sub r3,pc,#8 @ AES_set_encrypt_key | ||
395 | teq r0,#0 | ||
396 | moveq r0,#-1 | ||
397 | beq .Labrt | ||
398 | teq r2,#0 | ||
399 | moveq r0,#-1 | ||
400 | beq .Labrt | ||
401 | |||
402 | teq r1,#128 | ||
403 | beq .Lok | ||
404 | teq r1,#192 | ||
405 | beq .Lok | ||
406 | teq r1,#256 | ||
407 | movne r0,#-1 | ||
408 | bne .Labrt | ||
409 | |||
410 | .Lok: stmdb sp!,{r4-r12,lr} | ||
411 | sub r10,r3,#_armv4_AES_set_encrypt_key-AES_Te-1024 @ Te4 | ||
412 | |||
413 | mov r12,r0 @ inp | ||
414 | mov lr,r1 @ bits | ||
415 | mov r11,r2 @ key | ||
416 | |||
417 | #if __ARM_ARCH__<7 | ||
418 | ldrb r0,[r12,#3] @ load input data in endian-neutral | ||
419 | ldrb r4,[r12,#2] @ manner... | ||
420 | ldrb r5,[r12,#1] | ||
421 | ldrb r6,[r12,#0] | ||
422 | orr r0,r0,r4,lsl#8 | ||
423 | ldrb r1,[r12,#7] | ||
424 | orr r0,r0,r5,lsl#16 | ||
425 | ldrb r4,[r12,#6] | ||
426 | orr r0,r0,r6,lsl#24 | ||
427 | ldrb r5,[r12,#5] | ||
428 | ldrb r6,[r12,#4] | ||
429 | orr r1,r1,r4,lsl#8 | ||
430 | ldrb r2,[r12,#11] | ||
431 | orr r1,r1,r5,lsl#16 | ||
432 | ldrb r4,[r12,#10] | ||
433 | orr r1,r1,r6,lsl#24 | ||
434 | ldrb r5,[r12,#9] | ||
435 | ldrb r6,[r12,#8] | ||
436 | orr r2,r2,r4,lsl#8 | ||
437 | ldrb r3,[r12,#15] | ||
438 | orr r2,r2,r5,lsl#16 | ||
439 | ldrb r4,[r12,#14] | ||
440 | orr r2,r2,r6,lsl#24 | ||
441 | ldrb r5,[r12,#13] | ||
442 | ldrb r6,[r12,#12] | ||
443 | orr r3,r3,r4,lsl#8 | ||
444 | str r0,[r11],#16 | ||
445 | orr r3,r3,r5,lsl#16 | ||
446 | str r1,[r11,#-12] | ||
447 | orr r3,r3,r6,lsl#24 | ||
448 | str r2,[r11,#-8] | ||
449 | str r3,[r11,#-4] | ||
450 | #else | ||
451 | ldr r0,[r12,#0] | ||
452 | ldr r1,[r12,#4] | ||
453 | ldr r2,[r12,#8] | ||
454 | ldr r3,[r12,#12] | ||
455 | #ifdef __ARMEL__ | ||
456 | rev r0,r0 | ||
457 | rev r1,r1 | ||
458 | rev r2,r2 | ||
459 | rev r3,r3 | ||
460 | #endif | ||
461 | str r0,[r11],#16 | ||
462 | str r1,[r11,#-12] | ||
463 | str r2,[r11,#-8] | ||
464 | str r3,[r11,#-4] | ||
465 | #endif | ||
466 | |||
467 | teq lr,#128 | ||
468 | bne .Lnot128 | ||
469 | mov r12,#10 | ||
470 | str r12,[r11,#240-16] | ||
471 | add r6,r10,#256 @ rcon | ||
472 | mov lr,#255 | ||
473 | |||
474 | .L128_loop: | ||
475 | and r5,lr,r3,lsr#24 | ||
476 | and r7,lr,r3,lsr#16 | ||
477 | ldrb r5,[r10,r5] | ||
478 | and r8,lr,r3,lsr#8 | ||
479 | ldrb r7,[r10,r7] | ||
480 | and r9,lr,r3 | ||
481 | ldrb r8,[r10,r8] | ||
482 | orr r5,r5,r7,lsl#24 | ||
483 | ldrb r9,[r10,r9] | ||
484 | orr r5,r5,r8,lsl#16 | ||
485 | ldr r4,[r6],#4 @ rcon[i++] | ||
486 | orr r5,r5,r9,lsl#8 | ||
487 | eor r5,r5,r4 | ||
488 | eor r0,r0,r5 @ rk[4]=rk[0]^... | ||
489 | eor r1,r1,r0 @ rk[5]=rk[1]^rk[4] | ||
490 | str r0,[r11],#16 | ||
491 | eor r2,r2,r1 @ rk[6]=rk[2]^rk[5] | ||
492 | str r1,[r11,#-12] | ||
493 | eor r3,r3,r2 @ rk[7]=rk[3]^rk[6] | ||
494 | str r2,[r11,#-8] | ||
495 | subs r12,r12,#1 | ||
496 | str r3,[r11,#-4] | ||
497 | bne .L128_loop | ||
498 | sub r2,r11,#176 | ||
499 | b .Ldone | ||
500 | |||
501 | .Lnot128: | ||
502 | #if __ARM_ARCH__<7 | ||
503 | ldrb r8,[r12,#19] | ||
504 | ldrb r4,[r12,#18] | ||
505 | ldrb r5,[r12,#17] | ||
506 | ldrb r6,[r12,#16] | ||
507 | orr r8,r8,r4,lsl#8 | ||
508 | ldrb r9,[r12,#23] | ||
509 | orr r8,r8,r5,lsl#16 | ||
510 | ldrb r4,[r12,#22] | ||
511 | orr r8,r8,r6,lsl#24 | ||
512 | ldrb r5,[r12,#21] | ||
513 | ldrb r6,[r12,#20] | ||
514 | orr r9,r9,r4,lsl#8 | ||
515 | orr r9,r9,r5,lsl#16 | ||
516 | str r8,[r11],#8 | ||
517 | orr r9,r9,r6,lsl#24 | ||
518 | str r9,[r11,#-4] | ||
519 | #else | ||
520 | ldr r8,[r12,#16] | ||
521 | ldr r9,[r12,#20] | ||
522 | #ifdef __ARMEL__ | ||
523 | rev r8,r8 | ||
524 | rev r9,r9 | ||
525 | #endif | ||
526 | str r8,[r11],#8 | ||
527 | str r9,[r11,#-4] | ||
528 | #endif | ||
529 | |||
530 | teq lr,#192 | ||
531 | bne .Lnot192 | ||
532 | mov r12,#12 | ||
533 | str r12,[r11,#240-24] | ||
534 | add r6,r10,#256 @ rcon | ||
535 | mov lr,#255 | ||
536 | mov r12,#8 | ||
537 | |||
538 | .L192_loop: | ||
539 | and r5,lr,r9,lsr#24 | ||
540 | and r7,lr,r9,lsr#16 | ||
541 | ldrb r5,[r10,r5] | ||
542 | and r8,lr,r9,lsr#8 | ||
543 | ldrb r7,[r10,r7] | ||
544 | and r9,lr,r9 | ||
545 | ldrb r8,[r10,r8] | ||
546 | orr r5,r5,r7,lsl#24 | ||
547 | ldrb r9,[r10,r9] | ||
548 | orr r5,r5,r8,lsl#16 | ||
549 | ldr r4,[r6],#4 @ rcon[i++] | ||
550 | orr r5,r5,r9,lsl#8 | ||
551 | eor r9,r5,r4 | ||
552 | eor r0,r0,r9 @ rk[6]=rk[0]^... | ||
553 | eor r1,r1,r0 @ rk[7]=rk[1]^rk[6] | ||
554 | str r0,[r11],#24 | ||
555 | eor r2,r2,r1 @ rk[8]=rk[2]^rk[7] | ||
556 | str r1,[r11,#-20] | ||
557 | eor r3,r3,r2 @ rk[9]=rk[3]^rk[8] | ||
558 | str r2,[r11,#-16] | ||
559 | subs r12,r12,#1 | ||
560 | str r3,[r11,#-12] | ||
561 | subeq r2,r11,#216 | ||
562 | beq .Ldone | ||
563 | |||
564 | ldr r7,[r11,#-32] | ||
565 | ldr r8,[r11,#-28] | ||
566 | eor r7,r7,r3 @ rk[10]=rk[4]^rk[9] | ||
567 | eor r9,r8,r7 @ rk[11]=rk[5]^rk[10] | ||
568 | str r7,[r11,#-8] | ||
569 | str r9,[r11,#-4] | ||
570 | b .L192_loop | ||
571 | |||
572 | .Lnot192: | ||
573 | #if __ARM_ARCH__<7 | ||
574 | ldrb r8,[r12,#27] | ||
575 | ldrb r4,[r12,#26] | ||
576 | ldrb r5,[r12,#25] | ||
577 | ldrb r6,[r12,#24] | ||
578 | orr r8,r8,r4,lsl#8 | ||
579 | ldrb r9,[r12,#31] | ||
580 | orr r8,r8,r5,lsl#16 | ||
581 | ldrb r4,[r12,#30] | ||
582 | orr r8,r8,r6,lsl#24 | ||
583 | ldrb r5,[r12,#29] | ||
584 | ldrb r6,[r12,#28] | ||
585 | orr r9,r9,r4,lsl#8 | ||
586 | orr r9,r9,r5,lsl#16 | ||
587 | str r8,[r11],#8 | ||
588 | orr r9,r9,r6,lsl#24 | ||
589 | str r9,[r11,#-4] | ||
590 | #else | ||
591 | ldr r8,[r12,#24] | ||
592 | ldr r9,[r12,#28] | ||
593 | #ifdef __ARMEL__ | ||
594 | rev r8,r8 | ||
595 | rev r9,r9 | ||
596 | #endif | ||
597 | str r8,[r11],#8 | ||
598 | str r9,[r11,#-4] | ||
599 | #endif | ||
600 | |||
601 | mov r12,#14 | ||
602 | str r12,[r11,#240-32] | ||
603 | add r6,r10,#256 @ rcon | ||
604 | mov lr,#255 | ||
605 | mov r12,#7 | ||
606 | |||
607 | .L256_loop: | ||
608 | and r5,lr,r9,lsr#24 | ||
609 | and r7,lr,r9,lsr#16 | ||
610 | ldrb r5,[r10,r5] | ||
611 | and r8,lr,r9,lsr#8 | ||
612 | ldrb r7,[r10,r7] | ||
613 | and r9,lr,r9 | ||
614 | ldrb r8,[r10,r8] | ||
615 | orr r5,r5,r7,lsl#24 | ||
616 | ldrb r9,[r10,r9] | ||
617 | orr r5,r5,r8,lsl#16 | ||
618 | ldr r4,[r6],#4 @ rcon[i++] | ||
619 | orr r5,r5,r9,lsl#8 | ||
620 | eor r9,r5,r4 | ||
621 | eor r0,r0,r9 @ rk[8]=rk[0]^... | ||
622 | eor r1,r1,r0 @ rk[9]=rk[1]^rk[8] | ||
623 | str r0,[r11],#32 | ||
624 | eor r2,r2,r1 @ rk[10]=rk[2]^rk[9] | ||
625 | str r1,[r11,#-28] | ||
626 | eor r3,r3,r2 @ rk[11]=rk[3]^rk[10] | ||
627 | str r2,[r11,#-24] | ||
628 | subs r12,r12,#1 | ||
629 | str r3,[r11,#-20] | ||
630 | subeq r2,r11,#256 | ||
631 | beq .Ldone | ||
632 | |||
633 | and r5,lr,r3 | ||
634 | and r7,lr,r3,lsr#8 | ||
635 | ldrb r5,[r10,r5] | ||
636 | and r8,lr,r3,lsr#16 | ||
637 | ldrb r7,[r10,r7] | ||
638 | and r9,lr,r3,lsr#24 | ||
639 | ldrb r8,[r10,r8] | ||
640 | orr r5,r5,r7,lsl#8 | ||
641 | ldrb r9,[r10,r9] | ||
642 | orr r5,r5,r8,lsl#16 | ||
643 | ldr r4,[r11,#-48] | ||
644 | orr r5,r5,r9,lsl#24 | ||
645 | |||
646 | ldr r7,[r11,#-44] | ||
647 | ldr r8,[r11,#-40] | ||
648 | eor r4,r4,r5 @ rk[12]=rk[4]^... | ||
649 | ldr r9,[r11,#-36] | ||
650 | eor r7,r7,r4 @ rk[13]=rk[5]^rk[12] | ||
651 | str r4,[r11,#-16] | ||
652 | eor r8,r8,r7 @ rk[14]=rk[6]^rk[13] | ||
653 | str r7,[r11,#-12] | ||
654 | eor r9,r9,r8 @ rk[15]=rk[7]^rk[14] | ||
655 | str r8,[r11,#-8] | ||
656 | str r9,[r11,#-4] | ||
657 | b .L256_loop | ||
658 | |||
659 | .Ldone: mov r0,#0 | ||
660 | ldmia sp!,{r4-r12,lr} | ||
661 | .Labrt: tst lr,#1 | ||
662 | moveq pc,lr @ be binary compatible with V4, yet | ||
663 | .word 0xe12fff1e @ interoperable with Thumb ISA:-) | ||
664 | .size private_AES_set_encrypt_key,.-private_AES_set_encrypt_key | ||
665 | |||
666 | .global private_AES_set_decrypt_key | ||
667 | .type private_AES_set_decrypt_key,%function | ||
668 | .align 5 | ||
669 | private_AES_set_decrypt_key: | ||
670 | str lr,[sp,#-4]! @ push lr | ||
671 | #if 0 | ||
672 | @ kernel does both of these in setkey so optimise this bit out by | ||
673 | @ expecting the key to already have the enc_key work done (see aes_glue.c) | ||
674 | bl _armv4_AES_set_encrypt_key | ||
675 | #else | ||
676 | mov r0,#0 | ||
677 | #endif | ||
678 | teq r0,#0 | ||
679 | ldrne lr,[sp],#4 @ pop lr | ||
680 | bne .Labrt | ||
681 | |||
682 | stmdb sp!,{r4-r12} | ||
683 | |||
684 | ldr r12,[r2,#240] @ AES_set_encrypt_key preserves r2, | ||
685 | mov r11,r2 @ which is AES_KEY *key | ||
686 | mov r7,r2 | ||
687 | add r8,r2,r12,lsl#4 | ||
688 | |||
689 | .Linv: ldr r0,[r7] | ||
690 | ldr r1,[r7,#4] | ||
691 | ldr r2,[r7,#8] | ||
692 | ldr r3,[r7,#12] | ||
693 | ldr r4,[r8] | ||
694 | ldr r5,[r8,#4] | ||
695 | ldr r6,[r8,#8] | ||
696 | ldr r9,[r8,#12] | ||
697 | str r0,[r8],#-16 | ||
698 | str r1,[r8,#16+4] | ||
699 | str r2,[r8,#16+8] | ||
700 | str r3,[r8,#16+12] | ||
701 | str r4,[r7],#16 | ||
702 | str r5,[r7,#-12] | ||
703 | str r6,[r7,#-8] | ||
704 | str r9,[r7,#-4] | ||
705 | teq r7,r8 | ||
706 | bne .Linv | ||
707 | ldr r0,[r11,#16]! @ prefetch tp1 | ||
708 | mov r7,#0x80 | ||
709 | mov r8,#0x1b | ||
710 | orr r7,r7,#0x8000 | ||
711 | orr r8,r8,#0x1b00 | ||
712 | orr r7,r7,r7,lsl#16 | ||
713 | orr r8,r8,r8,lsl#16 | ||
714 | sub r12,r12,#1 | ||
715 | mvn r9,r7 | ||
716 | mov r12,r12,lsl#2 @ (rounds-1)*4 | ||
717 | |||
718 | .Lmix: and r4,r0,r7 | ||
719 | and r1,r0,r9 | ||
720 | sub r4,r4,r4,lsr#7 | ||
721 | and r4,r4,r8 | ||
722 | eor r1,r4,r1,lsl#1 @ tp2 | ||
723 | |||
724 | and r4,r1,r7 | ||
725 | and r2,r1,r9 | ||
726 | sub r4,r4,r4,lsr#7 | ||
727 | and r4,r4,r8 | ||
728 | eor r2,r4,r2,lsl#1 @ tp4 | ||
729 | |||
730 | and r4,r2,r7 | ||
731 | and r3,r2,r9 | ||
732 | sub r4,r4,r4,lsr#7 | ||
733 | and r4,r4,r8 | ||
734 | eor r3,r4,r3,lsl#1 @ tp8 | ||
735 | |||
736 | eor r4,r1,r2 | ||
737 | eor r5,r0,r3 @ tp9 | ||
738 | eor r4,r4,r3 @ tpe | ||
739 | eor r4,r4,r1,ror#24 | ||
740 | eor r4,r4,r5,ror#24 @ ^= ROTATE(tpb=tp9^tp2,8) | ||
741 | eor r4,r4,r2,ror#16 | ||
742 | eor r4,r4,r5,ror#16 @ ^= ROTATE(tpd=tp9^tp4,16) | ||
743 | eor r4,r4,r5,ror#8 @ ^= ROTATE(tp9,24) | ||
744 | |||
745 | ldr r0,[r11,#4] @ prefetch tp1 | ||
746 | str r4,[r11],#4 | ||
747 | subs r12,r12,#1 | ||
748 | bne .Lmix | ||
749 | |||
750 | mov r0,#0 | ||
751 | #if __ARM_ARCH__>=5 | ||
752 | ldmia sp!,{r4-r12,pc} | ||
753 | #else | ||
754 | ldmia sp!,{r4-r12,lr} | ||
755 | tst lr,#1 | ||
756 | moveq pc,lr @ be binary compatible with V4, yet | ||
757 | .word 0xe12fff1e @ interoperable with Thumb ISA:-) | ||
758 | #endif | ||
759 | .size private_AES_set_decrypt_key,.-private_AES_set_decrypt_key | ||
760 | |||
761 | .type AES_Td,%object | ||
762 | .align 5 | ||
763 | AES_Td: | ||
764 | .word 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96 | ||
765 | .word 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393 | ||
766 | .word 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25 | ||
767 | .word 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f | ||
768 | .word 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1 | ||
769 | .word 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6 | ||
770 | .word 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da | ||
771 | .word 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844 | ||
772 | .word 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd | ||
773 | .word 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4 | ||
774 | .word 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45 | ||
775 | .word 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94 | ||
776 | .word 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7 | ||
777 | .word 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a | ||
778 | .word 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5 | ||
779 | .word 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c | ||
780 | .word 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1 | ||
781 | .word 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a | ||
782 | .word 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75 | ||
783 | .word 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051 | ||
784 | .word 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46 | ||
785 | .word 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff | ||
786 | .word 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77 | ||
787 | .word 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb | ||
788 | .word 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000 | ||
789 | .word 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e | ||
790 | .word 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927 | ||
791 | .word 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a | ||
792 | .word 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e | ||
793 | .word 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16 | ||
794 | .word 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d | ||
795 | .word 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8 | ||
796 | .word 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd | ||
797 | .word 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34 | ||
798 | .word 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163 | ||
799 | .word 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120 | ||
800 | .word 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d | ||
801 | .word 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0 | ||
802 | .word 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422 | ||
803 | .word 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef | ||
804 | .word 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36 | ||
805 | .word 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4 | ||
806 | .word 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662 | ||
807 | .word 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5 | ||
808 | .word 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3 | ||
809 | .word 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b | ||
810 | .word 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8 | ||
811 | .word 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6 | ||
812 | .word 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6 | ||
813 | .word 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0 | ||
814 | .word 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815 | ||
815 | .word 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f | ||
816 | .word 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df | ||
817 | .word 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f | ||
818 | .word 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e | ||
819 | .word 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713 | ||
820 | .word 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89 | ||
821 | .word 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c | ||
822 | .word 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf | ||
823 | .word 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86 | ||
824 | .word 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f | ||
825 | .word 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541 | ||
826 | .word 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190 | ||
827 | .word 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 | ||
828 | @ Td4[256] | ||
829 | .byte 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38 | ||
830 | .byte 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb | ||
831 | .byte 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87 | ||
832 | .byte 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb | ||
833 | .byte 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d | ||
834 | .byte 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e | ||
835 | .byte 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2 | ||
836 | .byte 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25 | ||
837 | .byte 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16 | ||
838 | .byte 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92 | ||
839 | .byte 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda | ||
840 | .byte 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84 | ||
841 | .byte 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a | ||
842 | .byte 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06 | ||
843 | .byte 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02 | ||
844 | .byte 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b | ||
845 | .byte 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea | ||
846 | .byte 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73 | ||
847 | .byte 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85 | ||
848 | .byte 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e | ||
849 | .byte 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89 | ||
850 | .byte 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b | ||
851 | .byte 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20 | ||
852 | .byte 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4 | ||
853 | .byte 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31 | ||
854 | .byte 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f | ||
855 | .byte 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d | ||
856 | .byte 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef | ||
857 | .byte 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0 | ||
858 | .byte 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61 | ||
859 | .byte 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26 | ||
860 | .byte 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d | ||
861 | .size AES_Td,.-AES_Td | ||
862 | |||
863 | @ void AES_decrypt(const unsigned char *in, unsigned char *out, | ||
864 | @ const AES_KEY *key) { | ||
865 | .global AES_decrypt | ||
866 | .type AES_decrypt,%function | ||
867 | .align 5 | ||
868 | AES_decrypt: | ||
869 | sub r3,pc,#8 @ AES_decrypt | ||
870 | stmdb sp!,{r1,r4-r12,lr} | ||
871 | mov r12,r0 @ inp | ||
872 | mov r11,r2 | ||
873 | sub r10,r3,#AES_decrypt-AES_Td @ Td | ||
874 | #if __ARM_ARCH__<7 | ||
875 | ldrb r0,[r12,#3] @ load input data in endian-neutral | ||
876 | ldrb r4,[r12,#2] @ manner... | ||
877 | ldrb r5,[r12,#1] | ||
878 | ldrb r6,[r12,#0] | ||
879 | orr r0,r0,r4,lsl#8 | ||
880 | ldrb r1,[r12,#7] | ||
881 | orr r0,r0,r5,lsl#16 | ||
882 | ldrb r4,[r12,#6] | ||
883 | orr r0,r0,r6,lsl#24 | ||
884 | ldrb r5,[r12,#5] | ||
885 | ldrb r6,[r12,#4] | ||
886 | orr r1,r1,r4,lsl#8 | ||
887 | ldrb r2,[r12,#11] | ||
888 | orr r1,r1,r5,lsl#16 | ||
889 | ldrb r4,[r12,#10] | ||
890 | orr r1,r1,r6,lsl#24 | ||
891 | ldrb r5,[r12,#9] | ||
892 | ldrb r6,[r12,#8] | ||
893 | orr r2,r2,r4,lsl#8 | ||
894 | ldrb r3,[r12,#15] | ||
895 | orr r2,r2,r5,lsl#16 | ||
896 | ldrb r4,[r12,#14] | ||
897 | orr r2,r2,r6,lsl#24 | ||
898 | ldrb r5,[r12,#13] | ||
899 | ldrb r6,[r12,#12] | ||
900 | orr r3,r3,r4,lsl#8 | ||
901 | orr r3,r3,r5,lsl#16 | ||
902 | orr r3,r3,r6,lsl#24 | ||
903 | #else | ||
904 | ldr r0,[r12,#0] | ||
905 | ldr r1,[r12,#4] | ||
906 | ldr r2,[r12,#8] | ||
907 | ldr r3,[r12,#12] | ||
908 | #ifdef __ARMEL__ | ||
909 | rev r0,r0 | ||
910 | rev r1,r1 | ||
911 | rev r2,r2 | ||
912 | rev r3,r3 | ||
913 | #endif | ||
914 | #endif | ||
915 | bl _armv4_AES_decrypt | ||
916 | |||
917 | ldr r12,[sp],#4 @ pop out | ||
918 | #if __ARM_ARCH__>=7 | ||
919 | #ifdef __ARMEL__ | ||
920 | rev r0,r0 | ||
921 | rev r1,r1 | ||
922 | rev r2,r2 | ||
923 | rev r3,r3 | ||
924 | #endif | ||
925 | str r0,[r12,#0] | ||
926 | str r1,[r12,#4] | ||
927 | str r2,[r12,#8] | ||
928 | str r3,[r12,#12] | ||
929 | #else | ||
930 | mov r4,r0,lsr#24 @ write output in endian-neutral | ||
931 | mov r5,r0,lsr#16 @ manner... | ||
932 | mov r6,r0,lsr#8 | ||
933 | strb r4,[r12,#0] | ||
934 | strb r5,[r12,#1] | ||
935 | mov r4,r1,lsr#24 | ||
936 | strb r6,[r12,#2] | ||
937 | mov r5,r1,lsr#16 | ||
938 | strb r0,[r12,#3] | ||
939 | mov r6,r1,lsr#8 | ||
940 | strb r4,[r12,#4] | ||
941 | strb r5,[r12,#5] | ||
942 | mov r4,r2,lsr#24 | ||
943 | strb r6,[r12,#6] | ||
944 | mov r5,r2,lsr#16 | ||
945 | strb r1,[r12,#7] | ||
946 | mov r6,r2,lsr#8 | ||
947 | strb r4,[r12,#8] | ||
948 | strb r5,[r12,#9] | ||
949 | mov r4,r3,lsr#24 | ||
950 | strb r6,[r12,#10] | ||
951 | mov r5,r3,lsr#16 | ||
952 | strb r2,[r12,#11] | ||
953 | mov r6,r3,lsr#8 | ||
954 | strb r4,[r12,#12] | ||
955 | strb r5,[r12,#13] | ||
956 | strb r6,[r12,#14] | ||
957 | strb r3,[r12,#15] | ||
958 | #endif | ||
959 | #if __ARM_ARCH__>=5 | ||
960 | ldmia sp!,{r4-r12,pc} | ||
961 | #else | ||
962 | ldmia sp!,{r4-r12,lr} | ||
963 | tst lr,#1 | ||
964 | moveq pc,lr @ be binary compatible with V4, yet | ||
965 | .word 0xe12fff1e @ interoperable with Thumb ISA:-) | ||
966 | #endif | ||
967 | .size AES_decrypt,.-AES_decrypt | ||
968 | |||
969 | .type _armv4_AES_decrypt,%function | ||
970 | .align 2 | ||
971 | _armv4_AES_decrypt: | ||
972 | str lr,[sp,#-4]! @ push lr | ||
973 | ldmia r11!,{r4-r7} | ||
974 | eor r0,r0,r4 | ||
975 | ldr r12,[r11,#240-16] | ||
976 | eor r1,r1,r5 | ||
977 | eor r2,r2,r6 | ||
978 | eor r3,r3,r7 | ||
979 | sub r12,r12,#1 | ||
980 | mov lr,#255 | ||
981 | |||
982 | and r7,lr,r0,lsr#16 | ||
983 | and r8,lr,r0,lsr#8 | ||
984 | and r9,lr,r0 | ||
985 | mov r0,r0,lsr#24 | ||
986 | .Ldec_loop: | ||
987 | ldr r4,[r10,r7,lsl#2] @ Td1[s0>>16] | ||
988 | and r7,lr,r1 @ i0 | ||
989 | ldr r5,[r10,r8,lsl#2] @ Td2[s0>>8] | ||
990 | and r8,lr,r1,lsr#16 | ||
991 | ldr r6,[r10,r9,lsl#2] @ Td3[s0>>0] | ||
992 | and r9,lr,r1,lsr#8 | ||
993 | ldr r0,[r10,r0,lsl#2] @ Td0[s0>>24] | ||
994 | mov r1,r1,lsr#24 | ||
995 | |||
996 | ldr r7,[r10,r7,lsl#2] @ Td3[s1>>0] | ||
997 | ldr r8,[r10,r8,lsl#2] @ Td1[s1>>16] | ||
998 | ldr r9,[r10,r9,lsl#2] @ Td2[s1>>8] | ||
999 | eor r0,r0,r7,ror#24 | ||
1000 | ldr r1,[r10,r1,lsl#2] @ Td0[s1>>24] | ||
1001 | and r7,lr,r2,lsr#8 @ i0 | ||
1002 | eor r5,r8,r5,ror#8 | ||
1003 | and r8,lr,r2 @ i1 | ||
1004 | eor r6,r9,r6,ror#8 | ||
1005 | and r9,lr,r2,lsr#16 | ||
1006 | ldr r7,[r10,r7,lsl#2] @ Td2[s2>>8] | ||
1007 | eor r1,r1,r4,ror#8 | ||
1008 | ldr r8,[r10,r8,lsl#2] @ Td3[s2>>0] | ||
1009 | mov r2,r2,lsr#24 | ||
1010 | |||
1011 | ldr r9,[r10,r9,lsl#2] @ Td1[s2>>16] | ||
1012 | eor r0,r0,r7,ror#16 | ||
1013 | ldr r2,[r10,r2,lsl#2] @ Td0[s2>>24] | ||
1014 | and r7,lr,r3,lsr#16 @ i0 | ||
1015 | eor r1,r1,r8,ror#24 | ||
1016 | and r8,lr,r3,lsr#8 @ i1 | ||
1017 | eor r6,r9,r6,ror#8 | ||
1018 | and r9,lr,r3 @ i2 | ||
1019 | ldr r7,[r10,r7,lsl#2] @ Td1[s3>>16] | ||
1020 | eor r2,r2,r5,ror#8 | ||
1021 | ldr r8,[r10,r8,lsl#2] @ Td2[s3>>8] | ||
1022 | mov r3,r3,lsr#24 | ||
1023 | |||
1024 | ldr r9,[r10,r9,lsl#2] @ Td3[s3>>0] | ||
1025 | eor r0,r0,r7,ror#8 | ||
1026 | ldr r7,[r11],#16 | ||
1027 | eor r1,r1,r8,ror#16 | ||
1028 | ldr r3,[r10,r3,lsl#2] @ Td0[s3>>24] | ||
1029 | eor r2,r2,r9,ror#24 | ||
1030 | |||
1031 | ldr r4,[r11,#-12] | ||
1032 | eor r0,r0,r7 | ||
1033 | ldr r5,[r11,#-8] | ||
1034 | eor r3,r3,r6,ror#8 | ||
1035 | ldr r6,[r11,#-4] | ||
1036 | and r7,lr,r0,lsr#16 | ||
1037 | eor r1,r1,r4 | ||
1038 | and r8,lr,r0,lsr#8 | ||
1039 | eor r2,r2,r5 | ||
1040 | and r9,lr,r0 | ||
1041 | eor r3,r3,r6 | ||
1042 | mov r0,r0,lsr#24 | ||
1043 | |||
1044 | subs r12,r12,#1 | ||
1045 | bne .Ldec_loop | ||
1046 | |||
1047 | add r10,r10,#1024 | ||
1048 | |||
1049 | ldr r5,[r10,#0] @ prefetch Td4 | ||
1050 | ldr r6,[r10,#32] | ||
1051 | ldr r4,[r10,#64] | ||
1052 | ldr r5,[r10,#96] | ||
1053 | ldr r6,[r10,#128] | ||
1054 | ldr r4,[r10,#160] | ||
1055 | ldr r5,[r10,#192] | ||
1056 | ldr r6,[r10,#224] | ||
1057 | |||
1058 | ldrb r0,[r10,r0] @ Td4[s0>>24] | ||
1059 | ldrb r4,[r10,r7] @ Td4[s0>>16] | ||
1060 | and r7,lr,r1 @ i0 | ||
1061 | ldrb r5,[r10,r8] @ Td4[s0>>8] | ||
1062 | and r8,lr,r1,lsr#16 | ||
1063 | ldrb r6,[r10,r9] @ Td4[s0>>0] | ||
1064 | and r9,lr,r1,lsr#8 | ||
1065 | |||
1066 | ldrb r7,[r10,r7] @ Td4[s1>>0] | ||
1067 | ldrb r1,[r10,r1,lsr#24] @ Td4[s1>>24] | ||
1068 | ldrb r8,[r10,r8] @ Td4[s1>>16] | ||
1069 | eor r0,r7,r0,lsl#24 | ||
1070 | ldrb r9,[r10,r9] @ Td4[s1>>8] | ||
1071 | eor r1,r4,r1,lsl#8 | ||
1072 | and r7,lr,r2,lsr#8 @ i0 | ||
1073 | eor r5,r5,r8,lsl#8 | ||
1074 | and r8,lr,r2 @ i1 | ||
1075 | ldrb r7,[r10,r7] @ Td4[s2>>8] | ||
1076 | eor r6,r6,r9,lsl#8 | ||
1077 | ldrb r8,[r10,r8] @ Td4[s2>>0] | ||
1078 | and r9,lr,r2,lsr#16 | ||
1079 | |||
1080 | ldrb r2,[r10,r2,lsr#24] @ Td4[s2>>24] | ||
1081 | eor r0,r0,r7,lsl#8 | ||
1082 | ldrb r9,[r10,r9] @ Td4[s2>>16] | ||
1083 | eor r1,r8,r1,lsl#16 | ||
1084 | and r7,lr,r3,lsr#16 @ i0 | ||
1085 | eor r2,r5,r2,lsl#16 | ||
1086 | and r8,lr,r3,lsr#8 @ i1 | ||
1087 | ldrb r7,[r10,r7] @ Td4[s3>>16] | ||
1088 | eor r6,r6,r9,lsl#16 | ||
1089 | ldrb r8,[r10,r8] @ Td4[s3>>8] | ||
1090 | and r9,lr,r3 @ i2 | ||
1091 | |||
1092 | ldrb r9,[r10,r9] @ Td4[s3>>0] | ||
1093 | ldrb r3,[r10,r3,lsr#24] @ Td4[s3>>24] | ||
1094 | eor r0,r0,r7,lsl#16 | ||
1095 | ldr r7,[r11,#0] | ||
1096 | eor r1,r1,r8,lsl#8 | ||
1097 | ldr r4,[r11,#4] | ||
1098 | eor r2,r9,r2,lsl#8 | ||
1099 | ldr r5,[r11,#8] | ||
1100 | eor r3,r6,r3,lsl#24 | ||
1101 | ldr r6,[r11,#12] | ||
1102 | |||
1103 | eor r0,r0,r7 | ||
1104 | eor r1,r1,r4 | ||
1105 | eor r2,r2,r5 | ||
1106 | eor r3,r3,r6 | ||
1107 | |||
1108 | sub r10,r10,#1024 | ||
1109 | ldr pc,[sp],#4 @ pop and return | ||
1110 | .size _armv4_AES_decrypt,.-_armv4_AES_decrypt | ||
1111 | .asciz "AES for ARMv4, CRYPTOGAMS by <appro@openssl.org>" | ||
1112 | .align 2 | ||
diff --git a/arch/arm/crypto/aes_glue.c b/arch/arm/crypto/aes_glue.c new file mode 100644 index 000000000000..59f7877ead6a --- /dev/null +++ b/arch/arm/crypto/aes_glue.c | |||
@@ -0,0 +1,108 @@ | |||
1 | /* | ||
2 | * Glue Code for the asm optimized version of the AES Cipher Algorithm | ||
3 | */ | ||
4 | |||
5 | #include <linux/module.h> | ||
6 | #include <linux/crypto.h> | ||
7 | #include <crypto/aes.h> | ||
8 | |||
9 | #define AES_MAXNR 14 | ||
10 | |||
11 | typedef struct { | ||
12 | unsigned int rd_key[4 *(AES_MAXNR + 1)]; | ||
13 | int rounds; | ||
14 | } AES_KEY; | ||
15 | |||
16 | struct AES_CTX { | ||
17 | AES_KEY enc_key; | ||
18 | AES_KEY dec_key; | ||
19 | }; | ||
20 | |||
21 | asmlinkage void AES_encrypt(const u8 *in, u8 *out, AES_KEY *ctx); | ||
22 | asmlinkage void AES_decrypt(const u8 *in, u8 *out, AES_KEY *ctx); | ||
23 | asmlinkage int private_AES_set_decrypt_key(const unsigned char *userKey, const int bits, AES_KEY *key); | ||
24 | asmlinkage int private_AES_set_encrypt_key(const unsigned char *userKey, const int bits, AES_KEY *key); | ||
25 | |||
26 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
27 | { | ||
28 | struct AES_CTX *ctx = crypto_tfm_ctx(tfm); | ||
29 | AES_encrypt(src, dst, &ctx->enc_key); | ||
30 | } | ||
31 | |||
32 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
33 | { | ||
34 | struct AES_CTX *ctx = crypto_tfm_ctx(tfm); | ||
35 | AES_decrypt(src, dst, &ctx->dec_key); | ||
36 | } | ||
37 | |||
38 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
39 | unsigned int key_len) | ||
40 | { | ||
41 | struct AES_CTX *ctx = crypto_tfm_ctx(tfm); | ||
42 | |||
43 | switch (key_len) { | ||
44 | case AES_KEYSIZE_128: | ||
45 | key_len = 128; | ||
46 | break; | ||
47 | case AES_KEYSIZE_192: | ||
48 | key_len = 192; | ||
49 | break; | ||
50 | case AES_KEYSIZE_256: | ||
51 | key_len = 256; | ||
52 | break; | ||
53 | default: | ||
54 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
55 | return -EINVAL; | ||
56 | } | ||
57 | |||
58 | if (private_AES_set_encrypt_key(in_key, key_len, &ctx->enc_key) == -1) { | ||
59 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
60 | return -EINVAL; | ||
61 | } | ||
62 | /* private_AES_set_decrypt_key expects an encryption key as input */ | ||
63 | ctx->dec_key = ctx->enc_key; | ||
64 | if (private_AES_set_decrypt_key(in_key, key_len, &ctx->dec_key) == -1) { | ||
65 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
66 | return -EINVAL; | ||
67 | } | ||
68 | return 0; | ||
69 | } | ||
70 | |||
71 | static struct crypto_alg aes_alg = { | ||
72 | .cra_name = "aes", | ||
73 | .cra_driver_name = "aes-asm", | ||
74 | .cra_priority = 200, | ||
75 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | ||
76 | .cra_blocksize = AES_BLOCK_SIZE, | ||
77 | .cra_ctxsize = sizeof(struct AES_CTX), | ||
78 | .cra_module = THIS_MODULE, | ||
79 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | ||
80 | .cra_u = { | ||
81 | .cipher = { | ||
82 | .cia_min_keysize = AES_MIN_KEY_SIZE, | ||
83 | .cia_max_keysize = AES_MAX_KEY_SIZE, | ||
84 | .cia_setkey = aes_set_key, | ||
85 | .cia_encrypt = aes_encrypt, | ||
86 | .cia_decrypt = aes_decrypt | ||
87 | } | ||
88 | } | ||
89 | }; | ||
90 | |||
91 | static int __init aes_init(void) | ||
92 | { | ||
93 | return crypto_register_alg(&aes_alg); | ||
94 | } | ||
95 | |||
96 | static void __exit aes_fini(void) | ||
97 | { | ||
98 | crypto_unregister_alg(&aes_alg); | ||
99 | } | ||
100 | |||
101 | module_init(aes_init); | ||
102 | module_exit(aes_fini); | ||
103 | |||
104 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm (ASM)"); | ||
105 | MODULE_LICENSE("GPL"); | ||
106 | MODULE_ALIAS("aes"); | ||
107 | MODULE_ALIAS("aes-asm"); | ||
108 | MODULE_AUTHOR("David McCullough <ucdevel@gmail.com>"); | ||
diff --git a/arch/arm/crypto/sha1-armv4-large.S b/arch/arm/crypto/sha1-armv4-large.S new file mode 100644 index 000000000000..7050ab133b9d --- /dev/null +++ b/arch/arm/crypto/sha1-armv4-large.S | |||
@@ -0,0 +1,503 @@ | |||
1 | #define __ARM_ARCH__ __LINUX_ARM_ARCH__ | ||
2 | @ ==================================================================== | ||
3 | @ Written by Andy Polyakov <appro@fy.chalmers.se> for the OpenSSL | ||
4 | @ project. The module is, however, dual licensed under OpenSSL and | ||
5 | @ CRYPTOGAMS licenses depending on where you obtain it. For further | ||
6 | @ details see http://www.openssl.org/~appro/cryptogams/. | ||
7 | @ ==================================================================== | ||
8 | |||
9 | @ sha1_block procedure for ARMv4. | ||
10 | @ | ||
11 | @ January 2007. | ||
12 | |||
13 | @ Size/performance trade-off | ||
14 | @ ==================================================================== | ||
15 | @ impl size in bytes comp cycles[*] measured performance | ||
16 | @ ==================================================================== | ||
17 | @ thumb 304 3212 4420 | ||
18 | @ armv4-small 392/+29% 1958/+64% 2250/+96% | ||
19 | @ armv4-compact 740/+89% 1552/+26% 1840/+22% | ||
20 | @ armv4-large 1420/+92% 1307/+19% 1370/+34%[***] | ||
21 | @ full unroll ~5100/+260% ~1260/+4% ~1300/+5% | ||
22 | @ ==================================================================== | ||
23 | @ thumb = same as 'small' but in Thumb instructions[**] and | ||
24 | @ with recurring code in two private functions; | ||
25 | @ small = detached Xload/update, loops are folded; | ||
26 | @ compact = detached Xload/update, 5x unroll; | ||
27 | @ large = interleaved Xload/update, 5x unroll; | ||
28 | @ full unroll = interleaved Xload/update, full unroll, estimated[!]; | ||
29 | @ | ||
30 | @ [*] Manually counted instructions in "grand" loop body. Measured | ||
31 | @ performance is affected by prologue and epilogue overhead, | ||
32 | @ i-cache availability, branch penalties, etc. | ||
33 | @ [**] While each Thumb instruction is twice smaller, they are not as | ||
34 | @ diverse as ARM ones: e.g., there are only two arithmetic | ||
35 | @ instructions with 3 arguments, no [fixed] rotate, addressing | ||
36 | @ modes are limited. As result it takes more instructions to do | ||
37 | @ the same job in Thumb, therefore the code is never twice as | ||
38 | @ small and always slower. | ||
39 | @ [***] which is also ~35% better than compiler generated code. Dual- | ||
40 | @ issue Cortex A8 core was measured to process input block in | ||
41 | @ ~990 cycles. | ||
42 | |||
43 | @ August 2010. | ||
44 | @ | ||
45 | @ Rescheduling for dual-issue pipeline resulted in 13% improvement on | ||
46 | @ Cortex A8 core and in absolute terms ~870 cycles per input block | ||
47 | @ [or 13.6 cycles per byte]. | ||
48 | |||
49 | @ February 2011. | ||
50 | @ | ||
51 | @ Profiler-assisted and platform-specific optimization resulted in 10% | ||
52 | @ improvement on Cortex A8 core and 12.2 cycles per byte. | ||
53 | |||
54 | .text | ||
55 | |||
56 | .global sha1_block_data_order | ||
57 | .type sha1_block_data_order,%function | ||
58 | |||
59 | .align 2 | ||
60 | sha1_block_data_order: | ||
61 | stmdb sp!,{r4-r12,lr} | ||
62 | add r2,r1,r2,lsl#6 @ r2 to point at the end of r1 | ||
63 | ldmia r0,{r3,r4,r5,r6,r7} | ||
64 | .Lloop: | ||
65 | ldr r8,.LK_00_19 | ||
66 | mov r14,sp | ||
67 | sub sp,sp,#15*4 | ||
68 | mov r5,r5,ror#30 | ||
69 | mov r6,r6,ror#30 | ||
70 | mov r7,r7,ror#30 @ [6] | ||
71 | .L_00_15: | ||
72 | #if __ARM_ARCH__<7 | ||
73 | ldrb r10,[r1,#2] | ||
74 | ldrb r9,[r1,#3] | ||
75 | ldrb r11,[r1,#1] | ||
76 | add r7,r8,r7,ror#2 @ E+=K_00_19 | ||
77 | ldrb r12,[r1],#4 | ||
78 | orr r9,r9,r10,lsl#8 | ||
79 | eor r10,r5,r6 @ F_xx_xx | ||
80 | orr r9,r9,r11,lsl#16 | ||
81 | add r7,r7,r3,ror#27 @ E+=ROR(A,27) | ||
82 | orr r9,r9,r12,lsl#24 | ||
83 | #else | ||
84 | ldr r9,[r1],#4 @ handles unaligned | ||
85 | add r7,r8,r7,ror#2 @ E+=K_00_19 | ||
86 | eor r10,r5,r6 @ F_xx_xx | ||
87 | add r7,r7,r3,ror#27 @ E+=ROR(A,27) | ||
88 | #ifdef __ARMEL__ | ||
89 | rev r9,r9 @ byte swap | ||
90 | #endif | ||
91 | #endif | ||
92 | and r10,r4,r10,ror#2 | ||
93 | add r7,r7,r9 @ E+=X[i] | ||
94 | eor r10,r10,r6,ror#2 @ F_00_19(B,C,D) | ||
95 | str r9,[r14,#-4]! | ||
96 | add r7,r7,r10 @ E+=F_00_19(B,C,D) | ||
97 | #if __ARM_ARCH__<7 | ||
98 | ldrb r10,[r1,#2] | ||
99 | ldrb r9,[r1,#3] | ||
100 | ldrb r11,[r1,#1] | ||
101 | add r6,r8,r6,ror#2 @ E+=K_00_19 | ||
102 | ldrb r12,[r1],#4 | ||
103 | orr r9,r9,r10,lsl#8 | ||
104 | eor r10,r4,r5 @ F_xx_xx | ||
105 | orr r9,r9,r11,lsl#16 | ||
106 | add r6,r6,r7,ror#27 @ E+=ROR(A,27) | ||
107 | orr r9,r9,r12,lsl#24 | ||
108 | #else | ||
109 | ldr r9,[r1],#4 @ handles unaligned | ||
110 | add r6,r8,r6,ror#2 @ E+=K_00_19 | ||
111 | eor r10,r4,r5 @ F_xx_xx | ||
112 | add r6,r6,r7,ror#27 @ E+=ROR(A,27) | ||
113 | #ifdef __ARMEL__ | ||
114 | rev r9,r9 @ byte swap | ||
115 | #endif | ||
116 | #endif | ||
117 | and r10,r3,r10,ror#2 | ||
118 | add r6,r6,r9 @ E+=X[i] | ||
119 | eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) | ||
120 | str r9,[r14,#-4]! | ||
121 | add r6,r6,r10 @ E+=F_00_19(B,C,D) | ||
122 | #if __ARM_ARCH__<7 | ||
123 | ldrb r10,[r1,#2] | ||
124 | ldrb r9,[r1,#3] | ||
125 | ldrb r11,[r1,#1] | ||
126 | add r5,r8,r5,ror#2 @ E+=K_00_19 | ||
127 | ldrb r12,[r1],#4 | ||
128 | orr r9,r9,r10,lsl#8 | ||
129 | eor r10,r3,r4 @ F_xx_xx | ||
130 | orr r9,r9,r11,lsl#16 | ||
131 | add r5,r5,r6,ror#27 @ E+=ROR(A,27) | ||
132 | orr r9,r9,r12,lsl#24 | ||
133 | #else | ||
134 | ldr r9,[r1],#4 @ handles unaligned | ||
135 | add r5,r8,r5,ror#2 @ E+=K_00_19 | ||
136 | eor r10,r3,r4 @ F_xx_xx | ||
137 | add r5,r5,r6,ror#27 @ E+=ROR(A,27) | ||
138 | #ifdef __ARMEL__ | ||
139 | rev r9,r9 @ byte swap | ||
140 | #endif | ||
141 | #endif | ||
142 | and r10,r7,r10,ror#2 | ||
143 | add r5,r5,r9 @ E+=X[i] | ||
144 | eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) | ||
145 | str r9,[r14,#-4]! | ||
146 | add r5,r5,r10 @ E+=F_00_19(B,C,D) | ||
147 | #if __ARM_ARCH__<7 | ||
148 | ldrb r10,[r1,#2] | ||
149 | ldrb r9,[r1,#3] | ||
150 | ldrb r11,[r1,#1] | ||
151 | add r4,r8,r4,ror#2 @ E+=K_00_19 | ||
152 | ldrb r12,[r1],#4 | ||
153 | orr r9,r9,r10,lsl#8 | ||
154 | eor r10,r7,r3 @ F_xx_xx | ||
155 | orr r9,r9,r11,lsl#16 | ||
156 | add r4,r4,r5,ror#27 @ E+=ROR(A,27) | ||
157 | orr r9,r9,r12,lsl#24 | ||
158 | #else | ||
159 | ldr r9,[r1],#4 @ handles unaligned | ||
160 | add r4,r8,r4,ror#2 @ E+=K_00_19 | ||
161 | eor r10,r7,r3 @ F_xx_xx | ||
162 | add r4,r4,r5,ror#27 @ E+=ROR(A,27) | ||
163 | #ifdef __ARMEL__ | ||
164 | rev r9,r9 @ byte swap | ||
165 | #endif | ||
166 | #endif | ||
167 | and r10,r6,r10,ror#2 | ||
168 | add r4,r4,r9 @ E+=X[i] | ||
169 | eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) | ||
170 | str r9,[r14,#-4]! | ||
171 | add r4,r4,r10 @ E+=F_00_19(B,C,D) | ||
172 | #if __ARM_ARCH__<7 | ||
173 | ldrb r10,[r1,#2] | ||
174 | ldrb r9,[r1,#3] | ||
175 | ldrb r11,[r1,#1] | ||
176 | add r3,r8,r3,ror#2 @ E+=K_00_19 | ||
177 | ldrb r12,[r1],#4 | ||
178 | orr r9,r9,r10,lsl#8 | ||
179 | eor r10,r6,r7 @ F_xx_xx | ||
180 | orr r9,r9,r11,lsl#16 | ||
181 | add r3,r3,r4,ror#27 @ E+=ROR(A,27) | ||
182 | orr r9,r9,r12,lsl#24 | ||
183 | #else | ||
184 | ldr r9,[r1],#4 @ handles unaligned | ||
185 | add r3,r8,r3,ror#2 @ E+=K_00_19 | ||
186 | eor r10,r6,r7 @ F_xx_xx | ||
187 | add r3,r3,r4,ror#27 @ E+=ROR(A,27) | ||
188 | #ifdef __ARMEL__ | ||
189 | rev r9,r9 @ byte swap | ||
190 | #endif | ||
191 | #endif | ||
192 | and r10,r5,r10,ror#2 | ||
193 | add r3,r3,r9 @ E+=X[i] | ||
194 | eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) | ||
195 | str r9,[r14,#-4]! | ||
196 | add r3,r3,r10 @ E+=F_00_19(B,C,D) | ||
197 | teq r14,sp | ||
198 | bne .L_00_15 @ [((11+4)*5+2)*3] | ||
199 | #if __ARM_ARCH__<7 | ||
200 | ldrb r10,[r1,#2] | ||
201 | ldrb r9,[r1,#3] | ||
202 | ldrb r11,[r1,#1] | ||
203 | add r7,r8,r7,ror#2 @ E+=K_00_19 | ||
204 | ldrb r12,[r1],#4 | ||
205 | orr r9,r9,r10,lsl#8 | ||
206 | eor r10,r5,r6 @ F_xx_xx | ||
207 | orr r9,r9,r11,lsl#16 | ||
208 | add r7,r7,r3,ror#27 @ E+=ROR(A,27) | ||
209 | orr r9,r9,r12,lsl#24 | ||
210 | #else | ||
211 | ldr r9,[r1],#4 @ handles unaligned | ||
212 | add r7,r8,r7,ror#2 @ E+=K_00_19 | ||
213 | eor r10,r5,r6 @ F_xx_xx | ||
214 | add r7,r7,r3,ror#27 @ E+=ROR(A,27) | ||
215 | #ifdef __ARMEL__ | ||
216 | rev r9,r9 @ byte swap | ||
217 | #endif | ||
218 | #endif | ||
219 | and r10,r4,r10,ror#2 | ||
220 | add r7,r7,r9 @ E+=X[i] | ||
221 | eor r10,r10,r6,ror#2 @ F_00_19(B,C,D) | ||
222 | str r9,[r14,#-4]! | ||
223 | add r7,r7,r10 @ E+=F_00_19(B,C,D) | ||
224 | ldr r9,[r14,#15*4] | ||
225 | ldr r10,[r14,#13*4] | ||
226 | ldr r11,[r14,#7*4] | ||
227 | add r6,r8,r6,ror#2 @ E+=K_xx_xx | ||
228 | ldr r12,[r14,#2*4] | ||
229 | eor r9,r9,r10 | ||
230 | eor r11,r11,r12 @ 1 cycle stall | ||
231 | eor r10,r4,r5 @ F_xx_xx | ||
232 | mov r9,r9,ror#31 | ||
233 | add r6,r6,r7,ror#27 @ E+=ROR(A,27) | ||
234 | eor r9,r9,r11,ror#31 | ||
235 | str r9,[r14,#-4]! | ||
236 | and r10,r3,r10,ror#2 @ F_xx_xx | ||
237 | @ F_xx_xx | ||
238 | add r6,r6,r9 @ E+=X[i] | ||
239 | eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) | ||
240 | add r6,r6,r10 @ E+=F_00_19(B,C,D) | ||
241 | ldr r9,[r14,#15*4] | ||
242 | ldr r10,[r14,#13*4] | ||
243 | ldr r11,[r14,#7*4] | ||
244 | add r5,r8,r5,ror#2 @ E+=K_xx_xx | ||
245 | ldr r12,[r14,#2*4] | ||
246 | eor r9,r9,r10 | ||
247 | eor r11,r11,r12 @ 1 cycle stall | ||
248 | eor r10,r3,r4 @ F_xx_xx | ||
249 | mov r9,r9,ror#31 | ||
250 | add r5,r5,r6,ror#27 @ E+=ROR(A,27) | ||
251 | eor r9,r9,r11,ror#31 | ||
252 | str r9,[r14,#-4]! | ||
253 | and r10,r7,r10,ror#2 @ F_xx_xx | ||
254 | @ F_xx_xx | ||
255 | add r5,r5,r9 @ E+=X[i] | ||
256 | eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) | ||
257 | add r5,r5,r10 @ E+=F_00_19(B,C,D) | ||
258 | ldr r9,[r14,#15*4] | ||
259 | ldr r10,[r14,#13*4] | ||
260 | ldr r11,[r14,#7*4] | ||
261 | add r4,r8,r4,ror#2 @ E+=K_xx_xx | ||
262 | ldr r12,[r14,#2*4] | ||
263 | eor r9,r9,r10 | ||
264 | eor r11,r11,r12 @ 1 cycle stall | ||
265 | eor r10,r7,r3 @ F_xx_xx | ||
266 | mov r9,r9,ror#31 | ||
267 | add r4,r4,r5,ror#27 @ E+=ROR(A,27) | ||
268 | eor r9,r9,r11,ror#31 | ||
269 | str r9,[r14,#-4]! | ||
270 | and r10,r6,r10,ror#2 @ F_xx_xx | ||
271 | @ F_xx_xx | ||
272 | add r4,r4,r9 @ E+=X[i] | ||
273 | eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) | ||
274 | add r4,r4,r10 @ E+=F_00_19(B,C,D) | ||
275 | ldr r9,[r14,#15*4] | ||
276 | ldr r10,[r14,#13*4] | ||
277 | ldr r11,[r14,#7*4] | ||
278 | add r3,r8,r3,ror#2 @ E+=K_xx_xx | ||
279 | ldr r12,[r14,#2*4] | ||
280 | eor r9,r9,r10 | ||
281 | eor r11,r11,r12 @ 1 cycle stall | ||
282 | eor r10,r6,r7 @ F_xx_xx | ||
283 | mov r9,r9,ror#31 | ||
284 | add r3,r3,r4,ror#27 @ E+=ROR(A,27) | ||
285 | eor r9,r9,r11,ror#31 | ||
286 | str r9,[r14,#-4]! | ||
287 | and r10,r5,r10,ror#2 @ F_xx_xx | ||
288 | @ F_xx_xx | ||
289 | add r3,r3,r9 @ E+=X[i] | ||
290 | eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) | ||
291 | add r3,r3,r10 @ E+=F_00_19(B,C,D) | ||
292 | |||
293 | ldr r8,.LK_20_39 @ [+15+16*4] | ||
294 | sub sp,sp,#25*4 | ||
295 | cmn sp,#0 @ [+3], clear carry to denote 20_39 | ||
296 | .L_20_39_or_60_79: | ||
297 | ldr r9,[r14,#15*4] | ||
298 | ldr r10,[r14,#13*4] | ||
299 | ldr r11,[r14,#7*4] | ||
300 | add r7,r8,r7,ror#2 @ E+=K_xx_xx | ||
301 | ldr r12,[r14,#2*4] | ||
302 | eor r9,r9,r10 | ||
303 | eor r11,r11,r12 @ 1 cycle stall | ||
304 | eor r10,r5,r6 @ F_xx_xx | ||
305 | mov r9,r9,ror#31 | ||
306 | add r7,r7,r3,ror#27 @ E+=ROR(A,27) | ||
307 | eor r9,r9,r11,ror#31 | ||
308 | str r9,[r14,#-4]! | ||
309 | eor r10,r4,r10,ror#2 @ F_xx_xx | ||
310 | @ F_xx_xx | ||
311 | add r7,r7,r9 @ E+=X[i] | ||
312 | add r7,r7,r10 @ E+=F_20_39(B,C,D) | ||
313 | ldr r9,[r14,#15*4] | ||
314 | ldr r10,[r14,#13*4] | ||
315 | ldr r11,[r14,#7*4] | ||
316 | add r6,r8,r6,ror#2 @ E+=K_xx_xx | ||
317 | ldr r12,[r14,#2*4] | ||
318 | eor r9,r9,r10 | ||
319 | eor r11,r11,r12 @ 1 cycle stall | ||
320 | eor r10,r4,r5 @ F_xx_xx | ||
321 | mov r9,r9,ror#31 | ||
322 | add r6,r6,r7,ror#27 @ E+=ROR(A,27) | ||
323 | eor r9,r9,r11,ror#31 | ||
324 | str r9,[r14,#-4]! | ||
325 | eor r10,r3,r10,ror#2 @ F_xx_xx | ||
326 | @ F_xx_xx | ||
327 | add r6,r6,r9 @ E+=X[i] | ||
328 | add r6,r6,r10 @ E+=F_20_39(B,C,D) | ||
329 | ldr r9,[r14,#15*4] | ||
330 | ldr r10,[r14,#13*4] | ||
331 | ldr r11,[r14,#7*4] | ||
332 | add r5,r8,r5,ror#2 @ E+=K_xx_xx | ||
333 | ldr r12,[r14,#2*4] | ||
334 | eor r9,r9,r10 | ||
335 | eor r11,r11,r12 @ 1 cycle stall | ||
336 | eor r10,r3,r4 @ F_xx_xx | ||
337 | mov r9,r9,ror#31 | ||
338 | add r5,r5,r6,ror#27 @ E+=ROR(A,27) | ||
339 | eor r9,r9,r11,ror#31 | ||
340 | str r9,[r14,#-4]! | ||
341 | eor r10,r7,r10,ror#2 @ F_xx_xx | ||
342 | @ F_xx_xx | ||
343 | add r5,r5,r9 @ E+=X[i] | ||
344 | add r5,r5,r10 @ E+=F_20_39(B,C,D) | ||
345 | ldr r9,[r14,#15*4] | ||
346 | ldr r10,[r14,#13*4] | ||
347 | ldr r11,[r14,#7*4] | ||
348 | add r4,r8,r4,ror#2 @ E+=K_xx_xx | ||
349 | ldr r12,[r14,#2*4] | ||
350 | eor r9,r9,r10 | ||
351 | eor r11,r11,r12 @ 1 cycle stall | ||
352 | eor r10,r7,r3 @ F_xx_xx | ||
353 | mov r9,r9,ror#31 | ||
354 | add r4,r4,r5,ror#27 @ E+=ROR(A,27) | ||
355 | eor r9,r9,r11,ror#31 | ||
356 | str r9,[r14,#-4]! | ||
357 | eor r10,r6,r10,ror#2 @ F_xx_xx | ||
358 | @ F_xx_xx | ||
359 | add r4,r4,r9 @ E+=X[i] | ||
360 | add r4,r4,r10 @ E+=F_20_39(B,C,D) | ||
361 | ldr r9,[r14,#15*4] | ||
362 | ldr r10,[r14,#13*4] | ||
363 | ldr r11,[r14,#7*4] | ||
364 | add r3,r8,r3,ror#2 @ E+=K_xx_xx | ||
365 | ldr r12,[r14,#2*4] | ||
366 | eor r9,r9,r10 | ||
367 | eor r11,r11,r12 @ 1 cycle stall | ||
368 | eor r10,r6,r7 @ F_xx_xx | ||
369 | mov r9,r9,ror#31 | ||
370 | add r3,r3,r4,ror#27 @ E+=ROR(A,27) | ||
371 | eor r9,r9,r11,ror#31 | ||
372 | str r9,[r14,#-4]! | ||
373 | eor r10,r5,r10,ror#2 @ F_xx_xx | ||
374 | @ F_xx_xx | ||
375 | add r3,r3,r9 @ E+=X[i] | ||
376 | add r3,r3,r10 @ E+=F_20_39(B,C,D) | ||
377 | teq r14,sp @ preserve carry | ||
378 | bne .L_20_39_or_60_79 @ [+((12+3)*5+2)*4] | ||
379 | bcs .L_done @ [+((12+3)*5+2)*4], spare 300 bytes | ||
380 | |||
381 | ldr r8,.LK_40_59 | ||
382 | sub sp,sp,#20*4 @ [+2] | ||
383 | .L_40_59: | ||
384 | ldr r9,[r14,#15*4] | ||
385 | ldr r10,[r14,#13*4] | ||
386 | ldr r11,[r14,#7*4] | ||
387 | add r7,r8,r7,ror#2 @ E+=K_xx_xx | ||
388 | ldr r12,[r14,#2*4] | ||
389 | eor r9,r9,r10 | ||
390 | eor r11,r11,r12 @ 1 cycle stall | ||
391 | eor r10,r5,r6 @ F_xx_xx | ||
392 | mov r9,r9,ror#31 | ||
393 | add r7,r7,r3,ror#27 @ E+=ROR(A,27) | ||
394 | eor r9,r9,r11,ror#31 | ||
395 | str r9,[r14,#-4]! | ||
396 | and r10,r4,r10,ror#2 @ F_xx_xx | ||
397 | and r11,r5,r6 @ F_xx_xx | ||
398 | add r7,r7,r9 @ E+=X[i] | ||
399 | add r7,r7,r10 @ E+=F_40_59(B,C,D) | ||
400 | add r7,r7,r11,ror#2 | ||
401 | ldr r9,[r14,#15*4] | ||
402 | ldr r10,[r14,#13*4] | ||
403 | ldr r11,[r14,#7*4] | ||
404 | add r6,r8,r6,ror#2 @ E+=K_xx_xx | ||
405 | ldr r12,[r14,#2*4] | ||
406 | eor r9,r9,r10 | ||
407 | eor r11,r11,r12 @ 1 cycle stall | ||
408 | eor r10,r4,r5 @ F_xx_xx | ||
409 | mov r9,r9,ror#31 | ||
410 | add r6,r6,r7,ror#27 @ E+=ROR(A,27) | ||
411 | eor r9,r9,r11,ror#31 | ||
412 | str r9,[r14,#-4]! | ||
413 | and r10,r3,r10,ror#2 @ F_xx_xx | ||
414 | and r11,r4,r5 @ F_xx_xx | ||
415 | add r6,r6,r9 @ E+=X[i] | ||
416 | add r6,r6,r10 @ E+=F_40_59(B,C,D) | ||
417 | add r6,r6,r11,ror#2 | ||
418 | ldr r9,[r14,#15*4] | ||
419 | ldr r10,[r14,#13*4] | ||
420 | ldr r11,[r14,#7*4] | ||
421 | add r5,r8,r5,ror#2 @ E+=K_xx_xx | ||
422 | ldr r12,[r14,#2*4] | ||
423 | eor r9,r9,r10 | ||
424 | eor r11,r11,r12 @ 1 cycle stall | ||
425 | eor r10,r3,r4 @ F_xx_xx | ||
426 | mov r9,r9,ror#31 | ||
427 | add r5,r5,r6,ror#27 @ E+=ROR(A,27) | ||
428 | eor r9,r9,r11,ror#31 | ||
429 | str r9,[r14,#-4]! | ||
430 | and r10,r7,r10,ror#2 @ F_xx_xx | ||
431 | and r11,r3,r4 @ F_xx_xx | ||
432 | add r5,r5,r9 @ E+=X[i] | ||
433 | add r5,r5,r10 @ E+=F_40_59(B,C,D) | ||
434 | add r5,r5,r11,ror#2 | ||
435 | ldr r9,[r14,#15*4] | ||
436 | ldr r10,[r14,#13*4] | ||
437 | ldr r11,[r14,#7*4] | ||
438 | add r4,r8,r4,ror#2 @ E+=K_xx_xx | ||
439 | ldr r12,[r14,#2*4] | ||
440 | eor r9,r9,r10 | ||
441 | eor r11,r11,r12 @ 1 cycle stall | ||
442 | eor r10,r7,r3 @ F_xx_xx | ||
443 | mov r9,r9,ror#31 | ||
444 | add r4,r4,r5,ror#27 @ E+=ROR(A,27) | ||
445 | eor r9,r9,r11,ror#31 | ||
446 | str r9,[r14,#-4]! | ||
447 | and r10,r6,r10,ror#2 @ F_xx_xx | ||
448 | and r11,r7,r3 @ F_xx_xx | ||
449 | add r4,r4,r9 @ E+=X[i] | ||
450 | add r4,r4,r10 @ E+=F_40_59(B,C,D) | ||
451 | add r4,r4,r11,ror#2 | ||
452 | ldr r9,[r14,#15*4] | ||
453 | ldr r10,[r14,#13*4] | ||
454 | ldr r11,[r14,#7*4] | ||
455 | add r3,r8,r3,ror#2 @ E+=K_xx_xx | ||
456 | ldr r12,[r14,#2*4] | ||
457 | eor r9,r9,r10 | ||
458 | eor r11,r11,r12 @ 1 cycle stall | ||
459 | eor r10,r6,r7 @ F_xx_xx | ||
460 | mov r9,r9,ror#31 | ||
461 | add r3,r3,r4,ror#27 @ E+=ROR(A,27) | ||
462 | eor r9,r9,r11,ror#31 | ||
463 | str r9,[r14,#-4]! | ||
464 | and r10,r5,r10,ror#2 @ F_xx_xx | ||
465 | and r11,r6,r7 @ F_xx_xx | ||
466 | add r3,r3,r9 @ E+=X[i] | ||
467 | add r3,r3,r10 @ E+=F_40_59(B,C,D) | ||
468 | add r3,r3,r11,ror#2 | ||
469 | teq r14,sp | ||
470 | bne .L_40_59 @ [+((12+5)*5+2)*4] | ||
471 | |||
472 | ldr r8,.LK_60_79 | ||
473 | sub sp,sp,#20*4 | ||
474 | cmp sp,#0 @ set carry to denote 60_79 | ||
475 | b .L_20_39_or_60_79 @ [+4], spare 300 bytes | ||
476 | .L_done: | ||
477 | add sp,sp,#80*4 @ "deallocate" stack frame | ||
478 | ldmia r0,{r8,r9,r10,r11,r12} | ||
479 | add r3,r8,r3 | ||
480 | add r4,r9,r4 | ||
481 | add r5,r10,r5,ror#2 | ||
482 | add r6,r11,r6,ror#2 | ||
483 | add r7,r12,r7,ror#2 | ||
484 | stmia r0,{r3,r4,r5,r6,r7} | ||
485 | teq r1,r2 | ||
486 | bne .Lloop @ [+18], total 1307 | ||
487 | |||
488 | #if __ARM_ARCH__>=5 | ||
489 | ldmia sp!,{r4-r12,pc} | ||
490 | #else | ||
491 | ldmia sp!,{r4-r12,lr} | ||
492 | tst lr,#1 | ||
493 | moveq pc,lr @ be binary compatible with V4, yet | ||
494 | .word 0xe12fff1e @ interoperable with Thumb ISA:-) | ||
495 | #endif | ||
496 | .align 2 | ||
497 | .LK_00_19: .word 0x5a827999 | ||
498 | .LK_20_39: .word 0x6ed9eba1 | ||
499 | .LK_40_59: .word 0x8f1bbcdc | ||
500 | .LK_60_79: .word 0xca62c1d6 | ||
501 | .size sha1_block_data_order,.-sha1_block_data_order | ||
502 | .asciz "SHA1 block transform for ARMv4, CRYPTOGAMS by <appro@openssl.org>" | ||
503 | .align 2 | ||
diff --git a/arch/arm/crypto/sha1_glue.c b/arch/arm/crypto/sha1_glue.c new file mode 100644 index 000000000000..76cd976230bc --- /dev/null +++ b/arch/arm/crypto/sha1_glue.c | |||
@@ -0,0 +1,179 @@ | |||
1 | /* | ||
2 | * Cryptographic API. | ||
3 | * Glue code for the SHA1 Secure Hash Algorithm assembler implementation | ||
4 | * | ||
5 | * This file is based on sha1_generic.c and sha1_ssse3_glue.c | ||
6 | * | ||
7 | * Copyright (c) Alan Smithee. | ||
8 | * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk> | ||
9 | * Copyright (c) Jean-Francois Dive <jef@linuxbe.org> | ||
10 | * Copyright (c) Mathias Krause <minipli@googlemail.com> | ||
11 | * | ||
12 | * This program is free software; you can redistribute it and/or modify it | ||
13 | * under the terms of the GNU General Public License as published by the Free | ||
14 | * Software Foundation; either version 2 of the License, or (at your option) | ||
15 | * any later version. | ||
16 | * | ||
17 | */ | ||
18 | |||
19 | #include <crypto/internal/hash.h> | ||
20 | #include <linux/init.h> | ||
21 | #include <linux/module.h> | ||
22 | #include <linux/cryptohash.h> | ||
23 | #include <linux/types.h> | ||
24 | #include <crypto/sha.h> | ||
25 | #include <asm/byteorder.h> | ||
26 | |||
27 | struct SHA1_CTX { | ||
28 | uint32_t h0,h1,h2,h3,h4; | ||
29 | u64 count; | ||
30 | u8 data[SHA1_BLOCK_SIZE]; | ||
31 | }; | ||
32 | |||
33 | asmlinkage void sha1_block_data_order(struct SHA1_CTX *digest, | ||
34 | const unsigned char *data, unsigned int rounds); | ||
35 | |||
36 | |||
37 | static int sha1_init(struct shash_desc *desc) | ||
38 | { | ||
39 | struct SHA1_CTX *sctx = shash_desc_ctx(desc); | ||
40 | memset(sctx, 0, sizeof(*sctx)); | ||
41 | sctx->h0 = SHA1_H0; | ||
42 | sctx->h1 = SHA1_H1; | ||
43 | sctx->h2 = SHA1_H2; | ||
44 | sctx->h3 = SHA1_H3; | ||
45 | sctx->h4 = SHA1_H4; | ||
46 | return 0; | ||
47 | } | ||
48 | |||
49 | |||
50 | static int __sha1_update(struct SHA1_CTX *sctx, const u8 *data, | ||
51 | unsigned int len, unsigned int partial) | ||
52 | { | ||
53 | unsigned int done = 0; | ||
54 | |||
55 | sctx->count += len; | ||
56 | |||
57 | if (partial) { | ||
58 | done = SHA1_BLOCK_SIZE - partial; | ||
59 | memcpy(sctx->data + partial, data, done); | ||
60 | sha1_block_data_order(sctx, sctx->data, 1); | ||
61 | } | ||
62 | |||
63 | if (len - done >= SHA1_BLOCK_SIZE) { | ||
64 | const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE; | ||
65 | sha1_block_data_order(sctx, data + done, rounds); | ||
66 | done += rounds * SHA1_BLOCK_SIZE; | ||
67 | } | ||
68 | |||
69 | memcpy(sctx->data, data + done, len - done); | ||
70 | return 0; | ||
71 | } | ||
72 | |||
73 | |||
74 | static int sha1_update(struct shash_desc *desc, const u8 *data, | ||
75 | unsigned int len) | ||
76 | { | ||
77 | struct SHA1_CTX *sctx = shash_desc_ctx(desc); | ||
78 | unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; | ||
79 | int res; | ||
80 | |||
81 | /* Handle the fast case right here */ | ||
82 | if (partial + len < SHA1_BLOCK_SIZE) { | ||
83 | sctx->count += len; | ||
84 | memcpy(sctx->data + partial, data, len); | ||
85 | return 0; | ||
86 | } | ||
87 | res = __sha1_update(sctx, data, len, partial); | ||
88 | return res; | ||
89 | } | ||
90 | |||
91 | |||
92 | /* Add padding and return the message digest. */ | ||
93 | static int sha1_final(struct shash_desc *desc, u8 *out) | ||
94 | { | ||
95 | struct SHA1_CTX *sctx = shash_desc_ctx(desc); | ||
96 | unsigned int i, index, padlen; | ||
97 | __be32 *dst = (__be32 *)out; | ||
98 | __be64 bits; | ||
99 | static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, }; | ||
100 | |||
101 | bits = cpu_to_be64(sctx->count << 3); | ||
102 | |||
103 | /* Pad out to 56 mod 64 and append length */ | ||
104 | index = sctx->count % SHA1_BLOCK_SIZE; | ||
105 | padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index); | ||
106 | /* We need to fill a whole block for __sha1_update() */ | ||
107 | if (padlen <= 56) { | ||
108 | sctx->count += padlen; | ||
109 | memcpy(sctx->data + index, padding, padlen); | ||
110 | } else { | ||
111 | __sha1_update(sctx, padding, padlen, index); | ||
112 | } | ||
113 | __sha1_update(sctx, (const u8 *)&bits, sizeof(bits), 56); | ||
114 | |||
115 | /* Store state in digest */ | ||
116 | for (i = 0; i < 5; i++) | ||
117 | dst[i] = cpu_to_be32(((u32 *)sctx)[i]); | ||
118 | |||
119 | /* Wipe context */ | ||
120 | memset(sctx, 0, sizeof(*sctx)); | ||
121 | return 0; | ||
122 | } | ||
123 | |||
124 | |||
125 | static int sha1_export(struct shash_desc *desc, void *out) | ||
126 | { | ||
127 | struct SHA1_CTX *sctx = shash_desc_ctx(desc); | ||
128 | memcpy(out, sctx, sizeof(*sctx)); | ||
129 | return 0; | ||
130 | } | ||
131 | |||
132 | |||
133 | static int sha1_import(struct shash_desc *desc, const void *in) | ||
134 | { | ||
135 | struct SHA1_CTX *sctx = shash_desc_ctx(desc); | ||
136 | memcpy(sctx, in, sizeof(*sctx)); | ||
137 | return 0; | ||
138 | } | ||
139 | |||
140 | |||
141 | static struct shash_alg alg = { | ||
142 | .digestsize = SHA1_DIGEST_SIZE, | ||
143 | .init = sha1_init, | ||
144 | .update = sha1_update, | ||
145 | .final = sha1_final, | ||
146 | .export = sha1_export, | ||
147 | .import = sha1_import, | ||
148 | .descsize = sizeof(struct SHA1_CTX), | ||
149 | .statesize = sizeof(struct SHA1_CTX), | ||
150 | .base = { | ||
151 | .cra_name = "sha1", | ||
152 | .cra_driver_name= "sha1-asm", | ||
153 | .cra_priority = 150, | ||
154 | .cra_flags = CRYPTO_ALG_TYPE_SHASH, | ||
155 | .cra_blocksize = SHA1_BLOCK_SIZE, | ||
156 | .cra_module = THIS_MODULE, | ||
157 | } | ||
158 | }; | ||
159 | |||
160 | |||
161 | static int __init sha1_mod_init(void) | ||
162 | { | ||
163 | return crypto_register_shash(&alg); | ||
164 | } | ||
165 | |||
166 | |||
167 | static void __exit sha1_mod_fini(void) | ||
168 | { | ||
169 | crypto_unregister_shash(&alg); | ||
170 | } | ||
171 | |||
172 | |||
173 | module_init(sha1_mod_init); | ||
174 | module_exit(sha1_mod_fini); | ||
175 | |||
176 | MODULE_LICENSE("GPL"); | ||
177 | MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm (ARM)"); | ||
178 | MODULE_ALIAS("sha1"); | ||
179 | MODULE_AUTHOR("David McCullough <ucdevel@gmail.com>"); | ||
diff --git a/arch/powerpc/configs/ppc64_defconfig b/arch/powerpc/configs/ppc64_defconfig index 06b56245d78c..de7c4c53f5cf 100644 --- a/arch/powerpc/configs/ppc64_defconfig +++ b/arch/powerpc/configs/ppc64_defconfig | |||
@@ -486,7 +486,8 @@ CONFIG_CRYPTO_TWOFISH=m | |||
486 | CONFIG_CRYPTO_LZO=m | 486 | CONFIG_CRYPTO_LZO=m |
487 | # CONFIG_CRYPTO_ANSI_CPRNG is not set | 487 | # CONFIG_CRYPTO_ANSI_CPRNG is not set |
488 | CONFIG_CRYPTO_HW=y | 488 | CONFIG_CRYPTO_HW=y |
489 | CONFIG_CRYPTO_DEV_NX=m | 489 | CONFIG_CRYPTO_DEV_NX=y |
490 | CONFIG_CRYPTO_DEV_NX_ENCRYPT=m | ||
490 | CONFIG_VIRTUALIZATION=y | 491 | CONFIG_VIRTUALIZATION=y |
491 | CONFIG_KVM_BOOK3S_64=m | 492 | CONFIG_KVM_BOOK3S_64=m |
492 | CONFIG_KVM_BOOK3S_64_HV=y | 493 | CONFIG_KVM_BOOK3S_64_HV=y |
diff --git a/arch/powerpc/configs/pseries_defconfig b/arch/powerpc/configs/pseries_defconfig index 1f65b3c9b59a..9f4a9368f51b 100644 --- a/arch/powerpc/configs/pseries_defconfig +++ b/arch/powerpc/configs/pseries_defconfig | |||
@@ -369,7 +369,8 @@ CONFIG_CRYPTO_TWOFISH=m | |||
369 | CONFIG_CRYPTO_LZO=m | 369 | CONFIG_CRYPTO_LZO=m |
370 | # CONFIG_CRYPTO_ANSI_CPRNG is not set | 370 | # CONFIG_CRYPTO_ANSI_CPRNG is not set |
371 | CONFIG_CRYPTO_HW=y | 371 | CONFIG_CRYPTO_HW=y |
372 | CONFIG_CRYPTO_DEV_NX=m | 372 | CONFIG_CRYPTO_DEV_NX=y |
373 | CONFIG_CRYPTO_DEV_NX_ENCRYPT=m | ||
373 | CONFIG_VIRTUALIZATION=y | 374 | CONFIG_VIRTUALIZATION=y |
374 | CONFIG_KVM_BOOK3S_64=m | 375 | CONFIG_KVM_BOOK3S_64=m |
375 | CONFIG_KVM_BOOK3S_64_HV=y | 376 | CONFIG_KVM_BOOK3S_64_HV=y |
diff --git a/arch/powerpc/kernel/prom_init.c b/arch/powerpc/kernel/prom_init.c index e144498bcddd..47834a3f4938 100644 --- a/arch/powerpc/kernel/prom_init.c +++ b/arch/powerpc/kernel/prom_init.c | |||
@@ -705,6 +705,7 @@ static void __init early_cmdline_parse(void) | |||
705 | #endif | 705 | #endif |
706 | #define OV5_TYPE1_AFFINITY 0x80 /* Type 1 NUMA affinity */ | 706 | #define OV5_TYPE1_AFFINITY 0x80 /* Type 1 NUMA affinity */ |
707 | #define OV5_PFO_HW_RNG 0x80 /* PFO Random Number Generator */ | 707 | #define OV5_PFO_HW_RNG 0x80 /* PFO Random Number Generator */ |
708 | #define OV5_PFO_HW_842 0x40 /* PFO Compression Accelerator */ | ||
708 | #define OV5_PFO_HW_ENCR 0x20 /* PFO Encryption Accelerator */ | 709 | #define OV5_PFO_HW_ENCR 0x20 /* PFO Encryption Accelerator */ |
709 | 710 | ||
710 | /* Option Vector 6: IBM PAPR hints */ | 711 | /* Option Vector 6: IBM PAPR hints */ |
@@ -774,8 +775,7 @@ static unsigned char ibm_architecture_vec[] = { | |||
774 | 0, | 775 | 0, |
775 | 0, | 776 | 0, |
776 | 0, | 777 | 0, |
777 | OV5_PFO_HW_RNG | OV5_PFO_HW_ENCR, | 778 | OV5_PFO_HW_RNG | OV5_PFO_HW_ENCR | OV5_PFO_HW_842, |
778 | |||
779 | /* option vector 6: IBM PAPR hints */ | 779 | /* option vector 6: IBM PAPR hints */ |
780 | 4 - 2, /* length */ | 780 | 4 - 2, /* length */ |
781 | 0, | 781 | 0, |
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index e402a9dd4eda..da3c1a7dcd8e 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -216,7 +216,6 @@ static struct crypto_alg aes_alg = { | |||
216 | .cra_blocksize = AES_BLOCK_SIZE, | 216 | .cra_blocksize = AES_BLOCK_SIZE, |
217 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | 217 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
218 | .cra_module = THIS_MODULE, | 218 | .cra_module = THIS_MODULE, |
219 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | ||
220 | .cra_init = fallback_init_cip, | 219 | .cra_init = fallback_init_cip, |
221 | .cra_exit = fallback_exit_cip, | 220 | .cra_exit = fallback_exit_cip, |
222 | .cra_u = { | 221 | .cra_u = { |
@@ -398,7 +397,6 @@ static struct crypto_alg ecb_aes_alg = { | |||
398 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | 397 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
399 | .cra_type = &crypto_blkcipher_type, | 398 | .cra_type = &crypto_blkcipher_type, |
400 | .cra_module = THIS_MODULE, | 399 | .cra_module = THIS_MODULE, |
401 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), | ||
402 | .cra_init = fallback_init_blk, | 400 | .cra_init = fallback_init_blk, |
403 | .cra_exit = fallback_exit_blk, | 401 | .cra_exit = fallback_exit_blk, |
404 | .cra_u = { | 402 | .cra_u = { |
@@ -508,7 +506,6 @@ static struct crypto_alg cbc_aes_alg = { | |||
508 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | 506 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
509 | .cra_type = &crypto_blkcipher_type, | 507 | .cra_type = &crypto_blkcipher_type, |
510 | .cra_module = THIS_MODULE, | 508 | .cra_module = THIS_MODULE, |
511 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), | ||
512 | .cra_init = fallback_init_blk, | 509 | .cra_init = fallback_init_blk, |
513 | .cra_exit = fallback_exit_blk, | 510 | .cra_exit = fallback_exit_blk, |
514 | .cra_u = { | 511 | .cra_u = { |
@@ -710,7 +707,6 @@ static struct crypto_alg xts_aes_alg = { | |||
710 | .cra_ctxsize = sizeof(struct s390_xts_ctx), | 707 | .cra_ctxsize = sizeof(struct s390_xts_ctx), |
711 | .cra_type = &crypto_blkcipher_type, | 708 | .cra_type = &crypto_blkcipher_type, |
712 | .cra_module = THIS_MODULE, | 709 | .cra_module = THIS_MODULE, |
713 | .cra_list = LIST_HEAD_INIT(xts_aes_alg.cra_list), | ||
714 | .cra_init = xts_fallback_init, | 710 | .cra_init = xts_fallback_init, |
715 | .cra_exit = xts_fallback_exit, | 711 | .cra_exit = xts_fallback_exit, |
716 | .cra_u = { | 712 | .cra_u = { |
@@ -832,7 +828,6 @@ static struct crypto_alg ctr_aes_alg = { | |||
832 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | 828 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
833 | .cra_type = &crypto_blkcipher_type, | 829 | .cra_type = &crypto_blkcipher_type, |
834 | .cra_module = THIS_MODULE, | 830 | .cra_module = THIS_MODULE, |
835 | .cra_list = LIST_HEAD_INIT(ctr_aes_alg.cra_list), | ||
836 | .cra_u = { | 831 | .cra_u = { |
837 | .blkcipher = { | 832 | .blkcipher = { |
838 | .min_keysize = AES_MIN_KEY_SIZE, | 833 | .min_keysize = AES_MIN_KEY_SIZE, |
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index 1eaa371ca3ee..b49fb96f4207 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c | |||
@@ -70,7 +70,6 @@ static struct crypto_alg des_alg = { | |||
70 | .cra_blocksize = DES_BLOCK_SIZE, | 70 | .cra_blocksize = DES_BLOCK_SIZE, |
71 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 71 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
72 | .cra_module = THIS_MODULE, | 72 | .cra_module = THIS_MODULE, |
73 | .cra_list = LIST_HEAD_INIT(des_alg.cra_list), | ||
74 | .cra_u = { | 73 | .cra_u = { |
75 | .cipher = { | 74 | .cipher = { |
76 | .cia_min_keysize = DES_KEY_SIZE, | 75 | .cia_min_keysize = DES_KEY_SIZE, |
@@ -163,7 +162,6 @@ static struct crypto_alg ecb_des_alg = { | |||
163 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 162 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
164 | .cra_type = &crypto_blkcipher_type, | 163 | .cra_type = &crypto_blkcipher_type, |
165 | .cra_module = THIS_MODULE, | 164 | .cra_module = THIS_MODULE, |
166 | .cra_list = LIST_HEAD_INIT(ecb_des_alg.cra_list), | ||
167 | .cra_u = { | 165 | .cra_u = { |
168 | .blkcipher = { | 166 | .blkcipher = { |
169 | .min_keysize = DES_KEY_SIZE, | 167 | .min_keysize = DES_KEY_SIZE, |
@@ -206,7 +204,6 @@ static struct crypto_alg cbc_des_alg = { | |||
206 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 204 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
207 | .cra_type = &crypto_blkcipher_type, | 205 | .cra_type = &crypto_blkcipher_type, |
208 | .cra_module = THIS_MODULE, | 206 | .cra_module = THIS_MODULE, |
209 | .cra_list = LIST_HEAD_INIT(cbc_des_alg.cra_list), | ||
210 | .cra_u = { | 207 | .cra_u = { |
211 | .blkcipher = { | 208 | .blkcipher = { |
212 | .min_keysize = DES_KEY_SIZE, | 209 | .min_keysize = DES_KEY_SIZE, |
@@ -271,7 +268,6 @@ static struct crypto_alg des3_alg = { | |||
271 | .cra_blocksize = DES_BLOCK_SIZE, | 268 | .cra_blocksize = DES_BLOCK_SIZE, |
272 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 269 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
273 | .cra_module = THIS_MODULE, | 270 | .cra_module = THIS_MODULE, |
274 | .cra_list = LIST_HEAD_INIT(des3_alg.cra_list), | ||
275 | .cra_u = { | 271 | .cra_u = { |
276 | .cipher = { | 272 | .cipher = { |
277 | .cia_min_keysize = DES3_KEY_SIZE, | 273 | .cia_min_keysize = DES3_KEY_SIZE, |
@@ -314,8 +310,6 @@ static struct crypto_alg ecb_des3_alg = { | |||
314 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 310 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
315 | .cra_type = &crypto_blkcipher_type, | 311 | .cra_type = &crypto_blkcipher_type, |
316 | .cra_module = THIS_MODULE, | 312 | .cra_module = THIS_MODULE, |
317 | .cra_list = LIST_HEAD_INIT( | ||
318 | ecb_des3_alg.cra_list), | ||
319 | .cra_u = { | 313 | .cra_u = { |
320 | .blkcipher = { | 314 | .blkcipher = { |
321 | .min_keysize = DES3_KEY_SIZE, | 315 | .min_keysize = DES3_KEY_SIZE, |
@@ -358,8 +352,6 @@ static struct crypto_alg cbc_des3_alg = { | |||
358 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 352 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
359 | .cra_type = &crypto_blkcipher_type, | 353 | .cra_type = &crypto_blkcipher_type, |
360 | .cra_module = THIS_MODULE, | 354 | .cra_module = THIS_MODULE, |
361 | .cra_list = LIST_HEAD_INIT( | ||
362 | cbc_des3_alg.cra_list), | ||
363 | .cra_u = { | 355 | .cra_u = { |
364 | .blkcipher = { | 356 | .blkcipher = { |
365 | .min_keysize = DES3_KEY_SIZE, | 357 | .min_keysize = DES3_KEY_SIZE, |
@@ -452,7 +444,6 @@ static struct crypto_alg ctr_des_alg = { | |||
452 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 444 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
453 | .cra_type = &crypto_blkcipher_type, | 445 | .cra_type = &crypto_blkcipher_type, |
454 | .cra_module = THIS_MODULE, | 446 | .cra_module = THIS_MODULE, |
455 | .cra_list = LIST_HEAD_INIT(ctr_des_alg.cra_list), | ||
456 | .cra_u = { | 447 | .cra_u = { |
457 | .blkcipher = { | 448 | .blkcipher = { |
458 | .min_keysize = DES_KEY_SIZE, | 449 | .min_keysize = DES_KEY_SIZE, |
@@ -496,7 +487,6 @@ static struct crypto_alg ctr_des3_alg = { | |||
496 | .cra_ctxsize = sizeof(struct s390_des_ctx), | 487 | .cra_ctxsize = sizeof(struct s390_des_ctx), |
497 | .cra_type = &crypto_blkcipher_type, | 488 | .cra_type = &crypto_blkcipher_type, |
498 | .cra_module = THIS_MODULE, | 489 | .cra_module = THIS_MODULE, |
499 | .cra_list = LIST_HEAD_INIT(ctr_des3_alg.cra_list), | ||
500 | .cra_u = { | 490 | .cra_u = { |
501 | .blkcipher = { | 491 | .blkcipher = { |
502 | .min_keysize = DES3_KEY_SIZE, | 492 | .min_keysize = DES3_KEY_SIZE, |
diff --git a/arch/s390/crypto/ghash_s390.c b/arch/s390/crypto/ghash_s390.c index b1bd170f24b1..1ebd3a15cca4 100644 --- a/arch/s390/crypto/ghash_s390.c +++ b/arch/s390/crypto/ghash_s390.c | |||
@@ -135,7 +135,6 @@ static struct shash_alg ghash_alg = { | |||
135 | .cra_blocksize = GHASH_BLOCK_SIZE, | 135 | .cra_blocksize = GHASH_BLOCK_SIZE, |
136 | .cra_ctxsize = sizeof(struct ghash_ctx), | 136 | .cra_ctxsize = sizeof(struct ghash_ctx), |
137 | .cra_module = THIS_MODULE, | 137 | .cra_module = THIS_MODULE, |
138 | .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list), | ||
139 | }, | 138 | }, |
140 | }; | 139 | }; |
141 | 140 | ||
diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile index e908e5de82d3..5bacb4a226ac 100644 --- a/arch/x86/crypto/Makefile +++ b/arch/x86/crypto/Makefile | |||
@@ -12,6 +12,8 @@ obj-$(CONFIG_CRYPTO_SERPENT_SSE2_586) += serpent-sse2-i586.o | |||
12 | 12 | ||
13 | obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o | 13 | obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o |
14 | obj-$(CONFIG_CRYPTO_CAMELLIA_X86_64) += camellia-x86_64.o | 14 | obj-$(CONFIG_CRYPTO_CAMELLIA_X86_64) += camellia-x86_64.o |
15 | obj-$(CONFIG_CRYPTO_CAST5_AVX_X86_64) += cast5-avx-x86_64.o | ||
16 | obj-$(CONFIG_CRYPTO_CAST6_AVX_X86_64) += cast6-avx-x86_64.o | ||
15 | obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o | 17 | obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o |
16 | obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o | 18 | obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o |
17 | obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o | 19 | obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o |
@@ -32,6 +34,8 @@ serpent-sse2-i586-y := serpent-sse2-i586-asm_32.o serpent_sse2_glue.o | |||
32 | 34 | ||
33 | aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o | 35 | aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o |
34 | camellia-x86_64-y := camellia-x86_64-asm_64.o camellia_glue.o | 36 | camellia-x86_64-y := camellia-x86_64-asm_64.o camellia_glue.o |
37 | cast5-avx-x86_64-y := cast5-avx-x86_64-asm_64.o cast5_avx_glue.o | ||
38 | cast6-avx-x86_64-y := cast6-avx-x86_64-asm_64.o cast6_avx_glue.o | ||
35 | blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o | 39 | blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o |
36 | twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o | 40 | twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o |
37 | twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o | 41 | twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o |
diff --git a/arch/x86/crypto/aes_glue.c b/arch/x86/crypto/aes_glue.c index 59b37deb8c8d..aafe8ce0d65d 100644 --- a/arch/x86/crypto/aes_glue.c +++ b/arch/x86/crypto/aes_glue.c | |||
@@ -40,7 +40,6 @@ static struct crypto_alg aes_alg = { | |||
40 | .cra_blocksize = AES_BLOCK_SIZE, | 40 | .cra_blocksize = AES_BLOCK_SIZE, |
41 | .cra_ctxsize = sizeof(struct crypto_aes_ctx), | 41 | .cra_ctxsize = sizeof(struct crypto_aes_ctx), |
42 | .cra_module = THIS_MODULE, | 42 | .cra_module = THIS_MODULE, |
43 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | ||
44 | .cra_u = { | 43 | .cra_u = { |
45 | .cipher = { | 44 | .cipher = { |
46 | .cia_min_keysize = AES_MIN_KEY_SIZE, | 45 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c index 34fdcff4d2c8..7c04d0da709b 100644 --- a/arch/x86/crypto/aesni-intel_glue.c +++ b/arch/x86/crypto/aesni-intel_glue.c | |||
@@ -28,6 +28,9 @@ | |||
28 | #include <crypto/aes.h> | 28 | #include <crypto/aes.h> |
29 | #include <crypto/cryptd.h> | 29 | #include <crypto/cryptd.h> |
30 | #include <crypto/ctr.h> | 30 | #include <crypto/ctr.h> |
31 | #include <crypto/b128ops.h> | ||
32 | #include <crypto/lrw.h> | ||
33 | #include <crypto/xts.h> | ||
31 | #include <asm/cpu_device_id.h> | 34 | #include <asm/cpu_device_id.h> |
32 | #include <asm/i387.h> | 35 | #include <asm/i387.h> |
33 | #include <asm/crypto/aes.h> | 36 | #include <asm/crypto/aes.h> |
@@ -41,18 +44,10 @@ | |||
41 | #define HAS_CTR | 44 | #define HAS_CTR |
42 | #endif | 45 | #endif |
43 | 46 | ||
44 | #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE) | ||
45 | #define HAS_LRW | ||
46 | #endif | ||
47 | |||
48 | #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE) | 47 | #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE) |
49 | #define HAS_PCBC | 48 | #define HAS_PCBC |
50 | #endif | 49 | #endif |
51 | 50 | ||
52 | #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE) | ||
53 | #define HAS_XTS | ||
54 | #endif | ||
55 | |||
56 | /* This data is stored at the end of the crypto_tfm struct. | 51 | /* This data is stored at the end of the crypto_tfm struct. |
57 | * It's a type of per "session" data storage location. | 52 | * It's a type of per "session" data storage location. |
58 | * This needs to be 16 byte aligned. | 53 | * This needs to be 16 byte aligned. |
@@ -79,6 +74,16 @@ struct aesni_hash_subkey_req_data { | |||
79 | #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) | 74 | #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) |
80 | #define RFC4106_HASH_SUBKEY_SIZE 16 | 75 | #define RFC4106_HASH_SUBKEY_SIZE 16 |
81 | 76 | ||
77 | struct aesni_lrw_ctx { | ||
78 | struct lrw_table_ctx lrw_table; | ||
79 | u8 raw_aes_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1]; | ||
80 | }; | ||
81 | |||
82 | struct aesni_xts_ctx { | ||
83 | u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1]; | ||
84 | u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1]; | ||
85 | }; | ||
86 | |||
82 | asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, | 87 | asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, |
83 | unsigned int key_len); | 88 | unsigned int key_len); |
84 | asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, | 89 | asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, |
@@ -398,13 +403,6 @@ static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm) | |||
398 | #endif | 403 | #endif |
399 | #endif | 404 | #endif |
400 | 405 | ||
401 | #ifdef HAS_LRW | ||
402 | static int ablk_lrw_init(struct crypto_tfm *tfm) | ||
403 | { | ||
404 | return ablk_init_common(tfm, "fpu(lrw(__driver-aes-aesni))"); | ||
405 | } | ||
406 | #endif | ||
407 | |||
408 | #ifdef HAS_PCBC | 406 | #ifdef HAS_PCBC |
409 | static int ablk_pcbc_init(struct crypto_tfm *tfm) | 407 | static int ablk_pcbc_init(struct crypto_tfm *tfm) |
410 | { | 408 | { |
@@ -412,12 +410,160 @@ static int ablk_pcbc_init(struct crypto_tfm *tfm) | |||
412 | } | 410 | } |
413 | #endif | 411 | #endif |
414 | 412 | ||
415 | #ifdef HAS_XTS | 413 | static void lrw_xts_encrypt_callback(void *ctx, u8 *blks, unsigned int nbytes) |
416 | static int ablk_xts_init(struct crypto_tfm *tfm) | ||
417 | { | 414 | { |
418 | return ablk_init_common(tfm, "fpu(xts(__driver-aes-aesni))"); | 415 | aesni_ecb_enc(ctx, blks, blks, nbytes); |
416 | } | ||
417 | |||
418 | static void lrw_xts_decrypt_callback(void *ctx, u8 *blks, unsigned int nbytes) | ||
419 | { | ||
420 | aesni_ecb_dec(ctx, blks, blks, nbytes); | ||
421 | } | ||
422 | |||
423 | static int lrw_aesni_setkey(struct crypto_tfm *tfm, const u8 *key, | ||
424 | unsigned int keylen) | ||
425 | { | ||
426 | struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm); | ||
427 | int err; | ||
428 | |||
429 | err = aes_set_key_common(tfm, ctx->raw_aes_ctx, key, | ||
430 | keylen - AES_BLOCK_SIZE); | ||
431 | if (err) | ||
432 | return err; | ||
433 | |||
434 | return lrw_init_table(&ctx->lrw_table, key + keylen - AES_BLOCK_SIZE); | ||
435 | } | ||
436 | |||
437 | static void lrw_aesni_exit_tfm(struct crypto_tfm *tfm) | ||
438 | { | ||
439 | struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm); | ||
440 | |||
441 | lrw_free_table(&ctx->lrw_table); | ||
442 | } | ||
443 | |||
444 | static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
445 | struct scatterlist *src, unsigned int nbytes) | ||
446 | { | ||
447 | struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
448 | be128 buf[8]; | ||
449 | struct lrw_crypt_req req = { | ||
450 | .tbuf = buf, | ||
451 | .tbuflen = sizeof(buf), | ||
452 | |||
453 | .table_ctx = &ctx->lrw_table, | ||
454 | .crypt_ctx = aes_ctx(ctx->raw_aes_ctx), | ||
455 | .crypt_fn = lrw_xts_encrypt_callback, | ||
456 | }; | ||
457 | int ret; | ||
458 | |||
459 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
460 | |||
461 | kernel_fpu_begin(); | ||
462 | ret = lrw_crypt(desc, dst, src, nbytes, &req); | ||
463 | kernel_fpu_end(); | ||
464 | |||
465 | return ret; | ||
466 | } | ||
467 | |||
468 | static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
469 | struct scatterlist *src, unsigned int nbytes) | ||
470 | { | ||
471 | struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
472 | be128 buf[8]; | ||
473 | struct lrw_crypt_req req = { | ||
474 | .tbuf = buf, | ||
475 | .tbuflen = sizeof(buf), | ||
476 | |||
477 | .table_ctx = &ctx->lrw_table, | ||
478 | .crypt_ctx = aes_ctx(ctx->raw_aes_ctx), | ||
479 | .crypt_fn = lrw_xts_decrypt_callback, | ||
480 | }; | ||
481 | int ret; | ||
482 | |||
483 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
484 | |||
485 | kernel_fpu_begin(); | ||
486 | ret = lrw_crypt(desc, dst, src, nbytes, &req); | ||
487 | kernel_fpu_end(); | ||
488 | |||
489 | return ret; | ||
490 | } | ||
491 | |||
492 | static int xts_aesni_setkey(struct crypto_tfm *tfm, const u8 *key, | ||
493 | unsigned int keylen) | ||
494 | { | ||
495 | struct aesni_xts_ctx *ctx = crypto_tfm_ctx(tfm); | ||
496 | u32 *flags = &tfm->crt_flags; | ||
497 | int err; | ||
498 | |||
499 | /* key consists of keys of equal size concatenated, therefore | ||
500 | * the length must be even | ||
501 | */ | ||
502 | if (keylen % 2) { | ||
503 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
504 | return -EINVAL; | ||
505 | } | ||
506 | |||
507 | /* first half of xts-key is for crypt */ | ||
508 | err = aes_set_key_common(tfm, ctx->raw_crypt_ctx, key, keylen / 2); | ||
509 | if (err) | ||
510 | return err; | ||
511 | |||
512 | /* second half of xts-key is for tweak */ | ||
513 | return aes_set_key_common(tfm, ctx->raw_tweak_ctx, key + keylen / 2, | ||
514 | keylen / 2); | ||
515 | } | ||
516 | |||
517 | |||
518 | static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
519 | struct scatterlist *src, unsigned int nbytes) | ||
520 | { | ||
521 | struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
522 | be128 buf[8]; | ||
523 | struct xts_crypt_req req = { | ||
524 | .tbuf = buf, | ||
525 | .tbuflen = sizeof(buf), | ||
526 | |||
527 | .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx), | ||
528 | .tweak_fn = XTS_TWEAK_CAST(aesni_enc), | ||
529 | .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx), | ||
530 | .crypt_fn = lrw_xts_encrypt_callback, | ||
531 | }; | ||
532 | int ret; | ||
533 | |||
534 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
535 | |||
536 | kernel_fpu_begin(); | ||
537 | ret = xts_crypt(desc, dst, src, nbytes, &req); | ||
538 | kernel_fpu_end(); | ||
539 | |||
540 | return ret; | ||
541 | } | ||
542 | |||
543 | static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
544 | struct scatterlist *src, unsigned int nbytes) | ||
545 | { | ||
546 | struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
547 | be128 buf[8]; | ||
548 | struct xts_crypt_req req = { | ||
549 | .tbuf = buf, | ||
550 | .tbuflen = sizeof(buf), | ||
551 | |||
552 | .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx), | ||
553 | .tweak_fn = XTS_TWEAK_CAST(aesni_enc), | ||
554 | .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx), | ||
555 | .crypt_fn = lrw_xts_decrypt_callback, | ||
556 | }; | ||
557 | int ret; | ||
558 | |||
559 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
560 | |||
561 | kernel_fpu_begin(); | ||
562 | ret = xts_crypt(desc, dst, src, nbytes, &req); | ||
563 | kernel_fpu_end(); | ||
564 | |||
565 | return ret; | ||
419 | } | 566 | } |
420 | #endif | ||
421 | 567 | ||
422 | #ifdef CONFIG_X86_64 | 568 | #ifdef CONFIG_X86_64 |
423 | static int rfc4106_init(struct crypto_tfm *tfm) | 569 | static int rfc4106_init(struct crypto_tfm *tfm) |
@@ -1035,10 +1181,10 @@ static struct crypto_alg aesni_algs[] = { { | |||
1035 | }, | 1181 | }, |
1036 | #endif | 1182 | #endif |
1037 | #endif | 1183 | #endif |
1038 | #ifdef HAS_LRW | 1184 | #ifdef HAS_PCBC |
1039 | }, { | 1185 | }, { |
1040 | .cra_name = "lrw(aes)", | 1186 | .cra_name = "pcbc(aes)", |
1041 | .cra_driver_name = "lrw-aes-aesni", | 1187 | .cra_driver_name = "pcbc-aes-aesni", |
1042 | .cra_priority = 400, | 1188 | .cra_priority = 400, |
1043 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | 1189 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
1044 | .cra_blocksize = AES_BLOCK_SIZE, | 1190 | .cra_blocksize = AES_BLOCK_SIZE, |
@@ -1046,12 +1192,12 @@ static struct crypto_alg aesni_algs[] = { { | |||
1046 | .cra_alignmask = 0, | 1192 | .cra_alignmask = 0, |
1047 | .cra_type = &crypto_ablkcipher_type, | 1193 | .cra_type = &crypto_ablkcipher_type, |
1048 | .cra_module = THIS_MODULE, | 1194 | .cra_module = THIS_MODULE, |
1049 | .cra_init = ablk_lrw_init, | 1195 | .cra_init = ablk_pcbc_init, |
1050 | .cra_exit = ablk_exit, | 1196 | .cra_exit = ablk_exit, |
1051 | .cra_u = { | 1197 | .cra_u = { |
1052 | .ablkcipher = { | 1198 | .ablkcipher = { |
1053 | .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, | 1199 | .min_keysize = AES_MIN_KEY_SIZE, |
1054 | .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, | 1200 | .max_keysize = AES_MAX_KEY_SIZE, |
1055 | .ivsize = AES_BLOCK_SIZE, | 1201 | .ivsize = AES_BLOCK_SIZE, |
1056 | .setkey = ablk_set_key, | 1202 | .setkey = ablk_set_key, |
1057 | .encrypt = ablk_encrypt, | 1203 | .encrypt = ablk_encrypt, |
@@ -1059,10 +1205,50 @@ static struct crypto_alg aesni_algs[] = { { | |||
1059 | }, | 1205 | }, |
1060 | }, | 1206 | }, |
1061 | #endif | 1207 | #endif |
1062 | #ifdef HAS_PCBC | ||
1063 | }, { | 1208 | }, { |
1064 | .cra_name = "pcbc(aes)", | 1209 | .cra_name = "__lrw-aes-aesni", |
1065 | .cra_driver_name = "pcbc-aes-aesni", | 1210 | .cra_driver_name = "__driver-lrw-aes-aesni", |
1211 | .cra_priority = 0, | ||
1212 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
1213 | .cra_blocksize = AES_BLOCK_SIZE, | ||
1214 | .cra_ctxsize = sizeof(struct aesni_lrw_ctx), | ||
1215 | .cra_alignmask = 0, | ||
1216 | .cra_type = &crypto_blkcipher_type, | ||
1217 | .cra_module = THIS_MODULE, | ||
1218 | .cra_exit = lrw_aesni_exit_tfm, | ||
1219 | .cra_u = { | ||
1220 | .blkcipher = { | ||
1221 | .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, | ||
1222 | .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, | ||
1223 | .ivsize = AES_BLOCK_SIZE, | ||
1224 | .setkey = lrw_aesni_setkey, | ||
1225 | .encrypt = lrw_encrypt, | ||
1226 | .decrypt = lrw_decrypt, | ||
1227 | }, | ||
1228 | }, | ||
1229 | }, { | ||
1230 | .cra_name = "__xts-aes-aesni", | ||
1231 | .cra_driver_name = "__driver-xts-aes-aesni", | ||
1232 | .cra_priority = 0, | ||
1233 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
1234 | .cra_blocksize = AES_BLOCK_SIZE, | ||
1235 | .cra_ctxsize = sizeof(struct aesni_xts_ctx), | ||
1236 | .cra_alignmask = 0, | ||
1237 | .cra_type = &crypto_blkcipher_type, | ||
1238 | .cra_module = THIS_MODULE, | ||
1239 | .cra_u = { | ||
1240 | .blkcipher = { | ||
1241 | .min_keysize = 2 * AES_MIN_KEY_SIZE, | ||
1242 | .max_keysize = 2 * AES_MAX_KEY_SIZE, | ||
1243 | .ivsize = AES_BLOCK_SIZE, | ||
1244 | .setkey = xts_aesni_setkey, | ||
1245 | .encrypt = xts_encrypt, | ||
1246 | .decrypt = xts_decrypt, | ||
1247 | }, | ||
1248 | }, | ||
1249 | }, { | ||
1250 | .cra_name = "lrw(aes)", | ||
1251 | .cra_driver_name = "lrw-aes-aesni", | ||
1066 | .cra_priority = 400, | 1252 | .cra_priority = 400, |
1067 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | 1253 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, |
1068 | .cra_blocksize = AES_BLOCK_SIZE, | 1254 | .cra_blocksize = AES_BLOCK_SIZE, |
@@ -1070,20 +1256,18 @@ static struct crypto_alg aesni_algs[] = { { | |||
1070 | .cra_alignmask = 0, | 1256 | .cra_alignmask = 0, |
1071 | .cra_type = &crypto_ablkcipher_type, | 1257 | .cra_type = &crypto_ablkcipher_type, |
1072 | .cra_module = THIS_MODULE, | 1258 | .cra_module = THIS_MODULE, |
1073 | .cra_init = ablk_pcbc_init, | 1259 | .cra_init = ablk_init, |
1074 | .cra_exit = ablk_exit, | 1260 | .cra_exit = ablk_exit, |
1075 | .cra_u = { | 1261 | .cra_u = { |
1076 | .ablkcipher = { | 1262 | .ablkcipher = { |
1077 | .min_keysize = AES_MIN_KEY_SIZE, | 1263 | .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE, |
1078 | .max_keysize = AES_MAX_KEY_SIZE, | 1264 | .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE, |
1079 | .ivsize = AES_BLOCK_SIZE, | 1265 | .ivsize = AES_BLOCK_SIZE, |
1080 | .setkey = ablk_set_key, | 1266 | .setkey = ablk_set_key, |
1081 | .encrypt = ablk_encrypt, | 1267 | .encrypt = ablk_encrypt, |
1082 | .decrypt = ablk_decrypt, | 1268 | .decrypt = ablk_decrypt, |
1083 | }, | 1269 | }, |
1084 | }, | 1270 | }, |
1085 | #endif | ||
1086 | #ifdef HAS_XTS | ||
1087 | }, { | 1271 | }, { |
1088 | .cra_name = "xts(aes)", | 1272 | .cra_name = "xts(aes)", |
1089 | .cra_driver_name = "xts-aes-aesni", | 1273 | .cra_driver_name = "xts-aes-aesni", |
@@ -1094,7 +1278,7 @@ static struct crypto_alg aesni_algs[] = { { | |||
1094 | .cra_alignmask = 0, | 1278 | .cra_alignmask = 0, |
1095 | .cra_type = &crypto_ablkcipher_type, | 1279 | .cra_type = &crypto_ablkcipher_type, |
1096 | .cra_module = THIS_MODULE, | 1280 | .cra_module = THIS_MODULE, |
1097 | .cra_init = ablk_xts_init, | 1281 | .cra_init = ablk_init, |
1098 | .cra_exit = ablk_exit, | 1282 | .cra_exit = ablk_exit, |
1099 | .cra_u = { | 1283 | .cra_u = { |
1100 | .ablkcipher = { | 1284 | .ablkcipher = { |
@@ -1106,7 +1290,6 @@ static struct crypto_alg aesni_algs[] = { { | |||
1106 | .decrypt = ablk_decrypt, | 1290 | .decrypt = ablk_decrypt, |
1107 | }, | 1291 | }, |
1108 | }, | 1292 | }, |
1109 | #endif | ||
1110 | } }; | 1293 | } }; |
1111 | 1294 | ||
1112 | 1295 | ||
@@ -1118,7 +1301,7 @@ MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id); | |||
1118 | 1301 | ||
1119 | static int __init aesni_init(void) | 1302 | static int __init aesni_init(void) |
1120 | { | 1303 | { |
1121 | int err, i; | 1304 | int err; |
1122 | 1305 | ||
1123 | if (!x86_match_cpu(aesni_cpu_id)) | 1306 | if (!x86_match_cpu(aesni_cpu_id)) |
1124 | return -ENODEV; | 1307 | return -ENODEV; |
@@ -1127,9 +1310,6 @@ static int __init aesni_init(void) | |||
1127 | if (err) | 1310 | if (err) |
1128 | return err; | 1311 | return err; |
1129 | 1312 | ||
1130 | for (i = 0; i < ARRAY_SIZE(aesni_algs); i++) | ||
1131 | INIT_LIST_HEAD(&aesni_algs[i].cra_list); | ||
1132 | |||
1133 | return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); | 1313 | return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs)); |
1134 | } | 1314 | } |
1135 | 1315 | ||
diff --git a/arch/x86/crypto/blowfish_glue.c b/arch/x86/crypto/blowfish_glue.c index 7967474de8f7..50ec333b70e6 100644 --- a/arch/x86/crypto/blowfish_glue.c +++ b/arch/x86/crypto/blowfish_glue.c | |||
@@ -367,7 +367,6 @@ static struct crypto_alg bf_algs[4] = { { | |||
367 | .cra_ctxsize = sizeof(struct bf_ctx), | 367 | .cra_ctxsize = sizeof(struct bf_ctx), |
368 | .cra_alignmask = 0, | 368 | .cra_alignmask = 0, |
369 | .cra_module = THIS_MODULE, | 369 | .cra_module = THIS_MODULE, |
370 | .cra_list = LIST_HEAD_INIT(bf_algs[0].cra_list), | ||
371 | .cra_u = { | 370 | .cra_u = { |
372 | .cipher = { | 371 | .cipher = { |
373 | .cia_min_keysize = BF_MIN_KEY_SIZE, | 372 | .cia_min_keysize = BF_MIN_KEY_SIZE, |
@@ -387,7 +386,6 @@ static struct crypto_alg bf_algs[4] = { { | |||
387 | .cra_alignmask = 0, | 386 | .cra_alignmask = 0, |
388 | .cra_type = &crypto_blkcipher_type, | 387 | .cra_type = &crypto_blkcipher_type, |
389 | .cra_module = THIS_MODULE, | 388 | .cra_module = THIS_MODULE, |
390 | .cra_list = LIST_HEAD_INIT(bf_algs[1].cra_list), | ||
391 | .cra_u = { | 389 | .cra_u = { |
392 | .blkcipher = { | 390 | .blkcipher = { |
393 | .min_keysize = BF_MIN_KEY_SIZE, | 391 | .min_keysize = BF_MIN_KEY_SIZE, |
@@ -407,7 +405,6 @@ static struct crypto_alg bf_algs[4] = { { | |||
407 | .cra_alignmask = 0, | 405 | .cra_alignmask = 0, |
408 | .cra_type = &crypto_blkcipher_type, | 406 | .cra_type = &crypto_blkcipher_type, |
409 | .cra_module = THIS_MODULE, | 407 | .cra_module = THIS_MODULE, |
410 | .cra_list = LIST_HEAD_INIT(bf_algs[2].cra_list), | ||
411 | .cra_u = { | 408 | .cra_u = { |
412 | .blkcipher = { | 409 | .blkcipher = { |
413 | .min_keysize = BF_MIN_KEY_SIZE, | 410 | .min_keysize = BF_MIN_KEY_SIZE, |
@@ -428,7 +425,6 @@ static struct crypto_alg bf_algs[4] = { { | |||
428 | .cra_alignmask = 0, | 425 | .cra_alignmask = 0, |
429 | .cra_type = &crypto_blkcipher_type, | 426 | .cra_type = &crypto_blkcipher_type, |
430 | .cra_module = THIS_MODULE, | 427 | .cra_module = THIS_MODULE, |
431 | .cra_list = LIST_HEAD_INIT(bf_algs[3].cra_list), | ||
432 | .cra_u = { | 428 | .cra_u = { |
433 | .blkcipher = { | 429 | .blkcipher = { |
434 | .min_keysize = BF_MIN_KEY_SIZE, | 430 | .min_keysize = BF_MIN_KEY_SIZE, |
diff --git a/arch/x86/crypto/camellia_glue.c b/arch/x86/crypto/camellia_glue.c index eeb2b3b743e9..42ffd2bbab5b 100644 --- a/arch/x86/crypto/camellia_glue.c +++ b/arch/x86/crypto/camellia_glue.c | |||
@@ -92,715 +92,715 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | |||
92 | 92 | ||
93 | /* camellia sboxes */ | 93 | /* camellia sboxes */ |
94 | const u64 camellia_sp10011110[256] = { | 94 | const u64 camellia_sp10011110[256] = { |
95 | 0x7000007070707000, 0x8200008282828200, 0x2c00002c2c2c2c00, | 95 | 0x7000007070707000ULL, 0x8200008282828200ULL, 0x2c00002c2c2c2c00ULL, |
96 | 0xec0000ecececec00, 0xb30000b3b3b3b300, 0x2700002727272700, | 96 | 0xec0000ecececec00ULL, 0xb30000b3b3b3b300ULL, 0x2700002727272700ULL, |
97 | 0xc00000c0c0c0c000, 0xe50000e5e5e5e500, 0xe40000e4e4e4e400, | 97 | 0xc00000c0c0c0c000ULL, 0xe50000e5e5e5e500ULL, 0xe40000e4e4e4e400ULL, |
98 | 0x8500008585858500, 0x5700005757575700, 0x3500003535353500, | 98 | 0x8500008585858500ULL, 0x5700005757575700ULL, 0x3500003535353500ULL, |
99 | 0xea0000eaeaeaea00, 0x0c00000c0c0c0c00, 0xae0000aeaeaeae00, | 99 | 0xea0000eaeaeaea00ULL, 0x0c00000c0c0c0c00ULL, 0xae0000aeaeaeae00ULL, |
100 | 0x4100004141414100, 0x2300002323232300, 0xef0000efefefef00, | 100 | 0x4100004141414100ULL, 0x2300002323232300ULL, 0xef0000efefefef00ULL, |
101 | 0x6b00006b6b6b6b00, 0x9300009393939300, 0x4500004545454500, | 101 | 0x6b00006b6b6b6b00ULL, 0x9300009393939300ULL, 0x4500004545454500ULL, |
102 | 0x1900001919191900, 0xa50000a5a5a5a500, 0x2100002121212100, | 102 | 0x1900001919191900ULL, 0xa50000a5a5a5a500ULL, 0x2100002121212100ULL, |
103 | 0xed0000edededed00, 0x0e00000e0e0e0e00, 0x4f00004f4f4f4f00, | 103 | 0xed0000edededed00ULL, 0x0e00000e0e0e0e00ULL, 0x4f00004f4f4f4f00ULL, |
104 | 0x4e00004e4e4e4e00, 0x1d00001d1d1d1d00, 0x6500006565656500, | 104 | 0x4e00004e4e4e4e00ULL, 0x1d00001d1d1d1d00ULL, 0x6500006565656500ULL, |
105 | 0x9200009292929200, 0xbd0000bdbdbdbd00, 0x8600008686868600, | 105 | 0x9200009292929200ULL, 0xbd0000bdbdbdbd00ULL, 0x8600008686868600ULL, |
106 | 0xb80000b8b8b8b800, 0xaf0000afafafaf00, 0x8f00008f8f8f8f00, | 106 | 0xb80000b8b8b8b800ULL, 0xaf0000afafafaf00ULL, 0x8f00008f8f8f8f00ULL, |
107 | 0x7c00007c7c7c7c00, 0xeb0000ebebebeb00, 0x1f00001f1f1f1f00, | 107 | 0x7c00007c7c7c7c00ULL, 0xeb0000ebebebeb00ULL, 0x1f00001f1f1f1f00ULL, |
108 | 0xce0000cececece00, 0x3e00003e3e3e3e00, 0x3000003030303000, | 108 | 0xce0000cececece00ULL, 0x3e00003e3e3e3e00ULL, 0x3000003030303000ULL, |
109 | 0xdc0000dcdcdcdc00, 0x5f00005f5f5f5f00, 0x5e00005e5e5e5e00, | 109 | 0xdc0000dcdcdcdc00ULL, 0x5f00005f5f5f5f00ULL, 0x5e00005e5e5e5e00ULL, |
110 | 0xc50000c5c5c5c500, 0x0b00000b0b0b0b00, 0x1a00001a1a1a1a00, | 110 | 0xc50000c5c5c5c500ULL, 0x0b00000b0b0b0b00ULL, 0x1a00001a1a1a1a00ULL, |
111 | 0xa60000a6a6a6a600, 0xe10000e1e1e1e100, 0x3900003939393900, | 111 | 0xa60000a6a6a6a600ULL, 0xe10000e1e1e1e100ULL, 0x3900003939393900ULL, |
112 | 0xca0000cacacaca00, 0xd50000d5d5d5d500, 0x4700004747474700, | 112 | 0xca0000cacacaca00ULL, 0xd50000d5d5d5d500ULL, 0x4700004747474700ULL, |
113 | 0x5d00005d5d5d5d00, 0x3d00003d3d3d3d00, 0xd90000d9d9d9d900, | 113 | 0x5d00005d5d5d5d00ULL, 0x3d00003d3d3d3d00ULL, 0xd90000d9d9d9d900ULL, |
114 | 0x0100000101010100, 0x5a00005a5a5a5a00, 0xd60000d6d6d6d600, | 114 | 0x0100000101010100ULL, 0x5a00005a5a5a5a00ULL, 0xd60000d6d6d6d600ULL, |
115 | 0x5100005151515100, 0x5600005656565600, 0x6c00006c6c6c6c00, | 115 | 0x5100005151515100ULL, 0x5600005656565600ULL, 0x6c00006c6c6c6c00ULL, |
116 | 0x4d00004d4d4d4d00, 0x8b00008b8b8b8b00, 0x0d00000d0d0d0d00, | 116 | 0x4d00004d4d4d4d00ULL, 0x8b00008b8b8b8b00ULL, 0x0d00000d0d0d0d00ULL, |
117 | 0x9a00009a9a9a9a00, 0x6600006666666600, 0xfb0000fbfbfbfb00, | 117 | 0x9a00009a9a9a9a00ULL, 0x6600006666666600ULL, 0xfb0000fbfbfbfb00ULL, |
118 | 0xcc0000cccccccc00, 0xb00000b0b0b0b000, 0x2d00002d2d2d2d00, | 118 | 0xcc0000cccccccc00ULL, 0xb00000b0b0b0b000ULL, 0x2d00002d2d2d2d00ULL, |
119 | 0x7400007474747400, 0x1200001212121200, 0x2b00002b2b2b2b00, | 119 | 0x7400007474747400ULL, 0x1200001212121200ULL, 0x2b00002b2b2b2b00ULL, |
120 | 0x2000002020202000, 0xf00000f0f0f0f000, 0xb10000b1b1b1b100, | 120 | 0x2000002020202000ULL, 0xf00000f0f0f0f000ULL, 0xb10000b1b1b1b100ULL, |
121 | 0x8400008484848400, 0x9900009999999900, 0xdf0000dfdfdfdf00, | 121 | 0x8400008484848400ULL, 0x9900009999999900ULL, 0xdf0000dfdfdfdf00ULL, |
122 | 0x4c00004c4c4c4c00, 0xcb0000cbcbcbcb00, 0xc20000c2c2c2c200, | 122 | 0x4c00004c4c4c4c00ULL, 0xcb0000cbcbcbcb00ULL, 0xc20000c2c2c2c200ULL, |
123 | 0x3400003434343400, 0x7e00007e7e7e7e00, 0x7600007676767600, | 123 | 0x3400003434343400ULL, 0x7e00007e7e7e7e00ULL, 0x7600007676767600ULL, |
124 | 0x0500000505050500, 0x6d00006d6d6d6d00, 0xb70000b7b7b7b700, | 124 | 0x0500000505050500ULL, 0x6d00006d6d6d6d00ULL, 0xb70000b7b7b7b700ULL, |
125 | 0xa90000a9a9a9a900, 0x3100003131313100, 0xd10000d1d1d1d100, | 125 | 0xa90000a9a9a9a900ULL, 0x3100003131313100ULL, 0xd10000d1d1d1d100ULL, |
126 | 0x1700001717171700, 0x0400000404040400, 0xd70000d7d7d7d700, | 126 | 0x1700001717171700ULL, 0x0400000404040400ULL, 0xd70000d7d7d7d700ULL, |
127 | 0x1400001414141400, 0x5800005858585800, 0x3a00003a3a3a3a00, | 127 | 0x1400001414141400ULL, 0x5800005858585800ULL, 0x3a00003a3a3a3a00ULL, |
128 | 0x6100006161616100, 0xde0000dededede00, 0x1b00001b1b1b1b00, | 128 | 0x6100006161616100ULL, 0xde0000dededede00ULL, 0x1b00001b1b1b1b00ULL, |
129 | 0x1100001111111100, 0x1c00001c1c1c1c00, 0x3200003232323200, | 129 | 0x1100001111111100ULL, 0x1c00001c1c1c1c00ULL, 0x3200003232323200ULL, |
130 | 0x0f00000f0f0f0f00, 0x9c00009c9c9c9c00, 0x1600001616161600, | 130 | 0x0f00000f0f0f0f00ULL, 0x9c00009c9c9c9c00ULL, 0x1600001616161600ULL, |
131 | 0x5300005353535300, 0x1800001818181800, 0xf20000f2f2f2f200, | 131 | 0x5300005353535300ULL, 0x1800001818181800ULL, 0xf20000f2f2f2f200ULL, |
132 | 0x2200002222222200, 0xfe0000fefefefe00, 0x4400004444444400, | 132 | 0x2200002222222200ULL, 0xfe0000fefefefe00ULL, 0x4400004444444400ULL, |
133 | 0xcf0000cfcfcfcf00, 0xb20000b2b2b2b200, 0xc30000c3c3c3c300, | 133 | 0xcf0000cfcfcfcf00ULL, 0xb20000b2b2b2b200ULL, 0xc30000c3c3c3c300ULL, |
134 | 0xb50000b5b5b5b500, 0x7a00007a7a7a7a00, 0x9100009191919100, | 134 | 0xb50000b5b5b5b500ULL, 0x7a00007a7a7a7a00ULL, 0x9100009191919100ULL, |
135 | 0x2400002424242400, 0x0800000808080800, 0xe80000e8e8e8e800, | 135 | 0x2400002424242400ULL, 0x0800000808080800ULL, 0xe80000e8e8e8e800ULL, |
136 | 0xa80000a8a8a8a800, 0x6000006060606000, 0xfc0000fcfcfcfc00, | 136 | 0xa80000a8a8a8a800ULL, 0x6000006060606000ULL, 0xfc0000fcfcfcfc00ULL, |
137 | 0x6900006969696900, 0x5000005050505000, 0xaa0000aaaaaaaa00, | 137 | 0x6900006969696900ULL, 0x5000005050505000ULL, 0xaa0000aaaaaaaa00ULL, |
138 | 0xd00000d0d0d0d000, 0xa00000a0a0a0a000, 0x7d00007d7d7d7d00, | 138 | 0xd00000d0d0d0d000ULL, 0xa00000a0a0a0a000ULL, 0x7d00007d7d7d7d00ULL, |
139 | 0xa10000a1a1a1a100, 0x8900008989898900, 0x6200006262626200, | 139 | 0xa10000a1a1a1a100ULL, 0x8900008989898900ULL, 0x6200006262626200ULL, |
140 | 0x9700009797979700, 0x5400005454545400, 0x5b00005b5b5b5b00, | 140 | 0x9700009797979700ULL, 0x5400005454545400ULL, 0x5b00005b5b5b5b00ULL, |
141 | 0x1e00001e1e1e1e00, 0x9500009595959500, 0xe00000e0e0e0e000, | 141 | 0x1e00001e1e1e1e00ULL, 0x9500009595959500ULL, 0xe00000e0e0e0e000ULL, |
142 | 0xff0000ffffffff00, 0x6400006464646400, 0xd20000d2d2d2d200, | 142 | 0xff0000ffffffff00ULL, 0x6400006464646400ULL, 0xd20000d2d2d2d200ULL, |
143 | 0x1000001010101000, 0xc40000c4c4c4c400, 0x0000000000000000, | 143 | 0x1000001010101000ULL, 0xc40000c4c4c4c400ULL, 0x0000000000000000ULL, |
144 | 0x4800004848484800, 0xa30000a3a3a3a300, 0xf70000f7f7f7f700, | 144 | 0x4800004848484800ULL, 0xa30000a3a3a3a300ULL, 0xf70000f7f7f7f700ULL, |
145 | 0x7500007575757500, 0xdb0000dbdbdbdb00, 0x8a00008a8a8a8a00, | 145 | 0x7500007575757500ULL, 0xdb0000dbdbdbdb00ULL, 0x8a00008a8a8a8a00ULL, |
146 | 0x0300000303030300, 0xe60000e6e6e6e600, 0xda0000dadadada00, | 146 | 0x0300000303030300ULL, 0xe60000e6e6e6e600ULL, 0xda0000dadadada00ULL, |
147 | 0x0900000909090900, 0x3f00003f3f3f3f00, 0xdd0000dddddddd00, | 147 | 0x0900000909090900ULL, 0x3f00003f3f3f3f00ULL, 0xdd0000dddddddd00ULL, |
148 | 0x9400009494949400, 0x8700008787878700, 0x5c00005c5c5c5c00, | 148 | 0x9400009494949400ULL, 0x8700008787878700ULL, 0x5c00005c5c5c5c00ULL, |
149 | 0x8300008383838300, 0x0200000202020200, 0xcd0000cdcdcdcd00, | 149 | 0x8300008383838300ULL, 0x0200000202020200ULL, 0xcd0000cdcdcdcd00ULL, |
150 | 0x4a00004a4a4a4a00, 0x9000009090909000, 0x3300003333333300, | 150 | 0x4a00004a4a4a4a00ULL, 0x9000009090909000ULL, 0x3300003333333300ULL, |
151 | 0x7300007373737300, 0x6700006767676700, 0xf60000f6f6f6f600, | 151 | 0x7300007373737300ULL, 0x6700006767676700ULL, 0xf60000f6f6f6f600ULL, |
152 | 0xf30000f3f3f3f300, 0x9d00009d9d9d9d00, 0x7f00007f7f7f7f00, | 152 | 0xf30000f3f3f3f300ULL, 0x9d00009d9d9d9d00ULL, 0x7f00007f7f7f7f00ULL, |
153 | 0xbf0000bfbfbfbf00, 0xe20000e2e2e2e200, 0x5200005252525200, | 153 | 0xbf0000bfbfbfbf00ULL, 0xe20000e2e2e2e200ULL, 0x5200005252525200ULL, |
154 | 0x9b00009b9b9b9b00, 0xd80000d8d8d8d800, 0x2600002626262600, | 154 | 0x9b00009b9b9b9b00ULL, 0xd80000d8d8d8d800ULL, 0x2600002626262600ULL, |
155 | 0xc80000c8c8c8c800, 0x3700003737373700, 0xc60000c6c6c6c600, | 155 | 0xc80000c8c8c8c800ULL, 0x3700003737373700ULL, 0xc60000c6c6c6c600ULL, |
156 | 0x3b00003b3b3b3b00, 0x8100008181818100, 0x9600009696969600, | 156 | 0x3b00003b3b3b3b00ULL, 0x8100008181818100ULL, 0x9600009696969600ULL, |
157 | 0x6f00006f6f6f6f00, 0x4b00004b4b4b4b00, 0x1300001313131300, | 157 | 0x6f00006f6f6f6f00ULL, 0x4b00004b4b4b4b00ULL, 0x1300001313131300ULL, |
158 | 0xbe0000bebebebe00, 0x6300006363636300, 0x2e00002e2e2e2e00, | 158 | 0xbe0000bebebebe00ULL, 0x6300006363636300ULL, 0x2e00002e2e2e2e00ULL, |
159 | 0xe90000e9e9e9e900, 0x7900007979797900, 0xa70000a7a7a7a700, | 159 | 0xe90000e9e9e9e900ULL, 0x7900007979797900ULL, 0xa70000a7a7a7a700ULL, |
160 | 0x8c00008c8c8c8c00, 0x9f00009f9f9f9f00, 0x6e00006e6e6e6e00, | 160 | 0x8c00008c8c8c8c00ULL, 0x9f00009f9f9f9f00ULL, 0x6e00006e6e6e6e00ULL, |
161 | 0xbc0000bcbcbcbc00, 0x8e00008e8e8e8e00, 0x2900002929292900, | 161 | 0xbc0000bcbcbcbc00ULL, 0x8e00008e8e8e8e00ULL, 0x2900002929292900ULL, |
162 | 0xf50000f5f5f5f500, 0xf90000f9f9f9f900, 0xb60000b6b6b6b600, | 162 | 0xf50000f5f5f5f500ULL, 0xf90000f9f9f9f900ULL, 0xb60000b6b6b6b600ULL, |
163 | 0x2f00002f2f2f2f00, 0xfd0000fdfdfdfd00, 0xb40000b4b4b4b400, | 163 | 0x2f00002f2f2f2f00ULL, 0xfd0000fdfdfdfd00ULL, 0xb40000b4b4b4b400ULL, |
164 | 0x5900005959595900, 0x7800007878787800, 0x9800009898989800, | 164 | 0x5900005959595900ULL, 0x7800007878787800ULL, 0x9800009898989800ULL, |
165 | 0x0600000606060600, 0x6a00006a6a6a6a00, 0xe70000e7e7e7e700, | 165 | 0x0600000606060600ULL, 0x6a00006a6a6a6a00ULL, 0xe70000e7e7e7e700ULL, |
166 | 0x4600004646464600, 0x7100007171717100, 0xba0000babababa00, | 166 | 0x4600004646464600ULL, 0x7100007171717100ULL, 0xba0000babababa00ULL, |
167 | 0xd40000d4d4d4d400, 0x2500002525252500, 0xab0000abababab00, | 167 | 0xd40000d4d4d4d400ULL, 0x2500002525252500ULL, 0xab0000abababab00ULL, |
168 | 0x4200004242424200, 0x8800008888888800, 0xa20000a2a2a2a200, | 168 | 0x4200004242424200ULL, 0x8800008888888800ULL, 0xa20000a2a2a2a200ULL, |
169 | 0x8d00008d8d8d8d00, 0xfa0000fafafafa00, 0x7200007272727200, | 169 | 0x8d00008d8d8d8d00ULL, 0xfa0000fafafafa00ULL, 0x7200007272727200ULL, |
170 | 0x0700000707070700, 0xb90000b9b9b9b900, 0x5500005555555500, | 170 | 0x0700000707070700ULL, 0xb90000b9b9b9b900ULL, 0x5500005555555500ULL, |
171 | 0xf80000f8f8f8f800, 0xee0000eeeeeeee00, 0xac0000acacacac00, | 171 | 0xf80000f8f8f8f800ULL, 0xee0000eeeeeeee00ULL, 0xac0000acacacac00ULL, |
172 | 0x0a00000a0a0a0a00, 0x3600003636363600, 0x4900004949494900, | 172 | 0x0a00000a0a0a0a00ULL, 0x3600003636363600ULL, 0x4900004949494900ULL, |
173 | 0x2a00002a2a2a2a00, 0x6800006868686800, 0x3c00003c3c3c3c00, | 173 | 0x2a00002a2a2a2a00ULL, 0x6800006868686800ULL, 0x3c00003c3c3c3c00ULL, |
174 | 0x3800003838383800, 0xf10000f1f1f1f100, 0xa40000a4a4a4a400, | 174 | 0x3800003838383800ULL, 0xf10000f1f1f1f100ULL, 0xa40000a4a4a4a400ULL, |
175 | 0x4000004040404000, 0x2800002828282800, 0xd30000d3d3d3d300, | 175 | 0x4000004040404000ULL, 0x2800002828282800ULL, 0xd30000d3d3d3d300ULL, |
176 | 0x7b00007b7b7b7b00, 0xbb0000bbbbbbbb00, 0xc90000c9c9c9c900, | 176 | 0x7b00007b7b7b7b00ULL, 0xbb0000bbbbbbbb00ULL, 0xc90000c9c9c9c900ULL, |
177 | 0x4300004343434300, 0xc10000c1c1c1c100, 0x1500001515151500, | 177 | 0x4300004343434300ULL, 0xc10000c1c1c1c100ULL, 0x1500001515151500ULL, |
178 | 0xe30000e3e3e3e300, 0xad0000adadadad00, 0xf40000f4f4f4f400, | 178 | 0xe30000e3e3e3e300ULL, 0xad0000adadadad00ULL, 0xf40000f4f4f4f400ULL, |
179 | 0x7700007777777700, 0xc70000c7c7c7c700, 0x8000008080808000, | 179 | 0x7700007777777700ULL, 0xc70000c7c7c7c700ULL, 0x8000008080808000ULL, |
180 | 0x9e00009e9e9e9e00, | 180 | 0x9e00009e9e9e9e00ULL, |
181 | }; | 181 | }; |
182 | 182 | ||
183 | const u64 camellia_sp22000222[256] = { | 183 | const u64 camellia_sp22000222[256] = { |
184 | 0xe0e0000000e0e0e0, 0x0505000000050505, 0x5858000000585858, | 184 | 0xe0e0000000e0e0e0ULL, 0x0505000000050505ULL, 0x5858000000585858ULL, |
185 | 0xd9d9000000d9d9d9, 0x6767000000676767, 0x4e4e0000004e4e4e, | 185 | 0xd9d9000000d9d9d9ULL, 0x6767000000676767ULL, 0x4e4e0000004e4e4eULL, |
186 | 0x8181000000818181, 0xcbcb000000cbcbcb, 0xc9c9000000c9c9c9, | 186 | 0x8181000000818181ULL, 0xcbcb000000cbcbcbULL, 0xc9c9000000c9c9c9ULL, |
187 | 0x0b0b0000000b0b0b, 0xaeae000000aeaeae, 0x6a6a0000006a6a6a, | 187 | 0x0b0b0000000b0b0bULL, 0xaeae000000aeaeaeULL, 0x6a6a0000006a6a6aULL, |
188 | 0xd5d5000000d5d5d5, 0x1818000000181818, 0x5d5d0000005d5d5d, | 188 | 0xd5d5000000d5d5d5ULL, 0x1818000000181818ULL, 0x5d5d0000005d5d5dULL, |
189 | 0x8282000000828282, 0x4646000000464646, 0xdfdf000000dfdfdf, | 189 | 0x8282000000828282ULL, 0x4646000000464646ULL, 0xdfdf000000dfdfdfULL, |
190 | 0xd6d6000000d6d6d6, 0x2727000000272727, 0x8a8a0000008a8a8a, | 190 | 0xd6d6000000d6d6d6ULL, 0x2727000000272727ULL, 0x8a8a0000008a8a8aULL, |
191 | 0x3232000000323232, 0x4b4b0000004b4b4b, 0x4242000000424242, | 191 | 0x3232000000323232ULL, 0x4b4b0000004b4b4bULL, 0x4242000000424242ULL, |
192 | 0xdbdb000000dbdbdb, 0x1c1c0000001c1c1c, 0x9e9e0000009e9e9e, | 192 | 0xdbdb000000dbdbdbULL, 0x1c1c0000001c1c1cULL, 0x9e9e0000009e9e9eULL, |
193 | 0x9c9c0000009c9c9c, 0x3a3a0000003a3a3a, 0xcaca000000cacaca, | 193 | 0x9c9c0000009c9c9cULL, 0x3a3a0000003a3a3aULL, 0xcaca000000cacacaULL, |
194 | 0x2525000000252525, 0x7b7b0000007b7b7b, 0x0d0d0000000d0d0d, | 194 | 0x2525000000252525ULL, 0x7b7b0000007b7b7bULL, 0x0d0d0000000d0d0dULL, |
195 | 0x7171000000717171, 0x5f5f0000005f5f5f, 0x1f1f0000001f1f1f, | 195 | 0x7171000000717171ULL, 0x5f5f0000005f5f5fULL, 0x1f1f0000001f1f1fULL, |
196 | 0xf8f8000000f8f8f8, 0xd7d7000000d7d7d7, 0x3e3e0000003e3e3e, | 196 | 0xf8f8000000f8f8f8ULL, 0xd7d7000000d7d7d7ULL, 0x3e3e0000003e3e3eULL, |
197 | 0x9d9d0000009d9d9d, 0x7c7c0000007c7c7c, 0x6060000000606060, | 197 | 0x9d9d0000009d9d9dULL, 0x7c7c0000007c7c7cULL, 0x6060000000606060ULL, |
198 | 0xb9b9000000b9b9b9, 0xbebe000000bebebe, 0xbcbc000000bcbcbc, | 198 | 0xb9b9000000b9b9b9ULL, 0xbebe000000bebebeULL, 0xbcbc000000bcbcbcULL, |
199 | 0x8b8b0000008b8b8b, 0x1616000000161616, 0x3434000000343434, | 199 | 0x8b8b0000008b8b8bULL, 0x1616000000161616ULL, 0x3434000000343434ULL, |
200 | 0x4d4d0000004d4d4d, 0xc3c3000000c3c3c3, 0x7272000000727272, | 200 | 0x4d4d0000004d4d4dULL, 0xc3c3000000c3c3c3ULL, 0x7272000000727272ULL, |
201 | 0x9595000000959595, 0xabab000000ababab, 0x8e8e0000008e8e8e, | 201 | 0x9595000000959595ULL, 0xabab000000abababULL, 0x8e8e0000008e8e8eULL, |
202 | 0xbaba000000bababa, 0x7a7a0000007a7a7a, 0xb3b3000000b3b3b3, | 202 | 0xbaba000000bababaULL, 0x7a7a0000007a7a7aULL, 0xb3b3000000b3b3b3ULL, |
203 | 0x0202000000020202, 0xb4b4000000b4b4b4, 0xadad000000adadad, | 203 | 0x0202000000020202ULL, 0xb4b4000000b4b4b4ULL, 0xadad000000adadadULL, |
204 | 0xa2a2000000a2a2a2, 0xacac000000acacac, 0xd8d8000000d8d8d8, | 204 | 0xa2a2000000a2a2a2ULL, 0xacac000000acacacULL, 0xd8d8000000d8d8d8ULL, |
205 | 0x9a9a0000009a9a9a, 0x1717000000171717, 0x1a1a0000001a1a1a, | 205 | 0x9a9a0000009a9a9aULL, 0x1717000000171717ULL, 0x1a1a0000001a1a1aULL, |
206 | 0x3535000000353535, 0xcccc000000cccccc, 0xf7f7000000f7f7f7, | 206 | 0x3535000000353535ULL, 0xcccc000000ccccccULL, 0xf7f7000000f7f7f7ULL, |
207 | 0x9999000000999999, 0x6161000000616161, 0x5a5a0000005a5a5a, | 207 | 0x9999000000999999ULL, 0x6161000000616161ULL, 0x5a5a0000005a5a5aULL, |
208 | 0xe8e8000000e8e8e8, 0x2424000000242424, 0x5656000000565656, | 208 | 0xe8e8000000e8e8e8ULL, 0x2424000000242424ULL, 0x5656000000565656ULL, |
209 | 0x4040000000404040, 0xe1e1000000e1e1e1, 0x6363000000636363, | 209 | 0x4040000000404040ULL, 0xe1e1000000e1e1e1ULL, 0x6363000000636363ULL, |
210 | 0x0909000000090909, 0x3333000000333333, 0xbfbf000000bfbfbf, | 210 | 0x0909000000090909ULL, 0x3333000000333333ULL, 0xbfbf000000bfbfbfULL, |
211 | 0x9898000000989898, 0x9797000000979797, 0x8585000000858585, | 211 | 0x9898000000989898ULL, 0x9797000000979797ULL, 0x8585000000858585ULL, |
212 | 0x6868000000686868, 0xfcfc000000fcfcfc, 0xecec000000ececec, | 212 | 0x6868000000686868ULL, 0xfcfc000000fcfcfcULL, 0xecec000000ecececULL, |
213 | 0x0a0a0000000a0a0a, 0xdada000000dadada, 0x6f6f0000006f6f6f, | 213 | 0x0a0a0000000a0a0aULL, 0xdada000000dadadaULL, 0x6f6f0000006f6f6fULL, |
214 | 0x5353000000535353, 0x6262000000626262, 0xa3a3000000a3a3a3, | 214 | 0x5353000000535353ULL, 0x6262000000626262ULL, 0xa3a3000000a3a3a3ULL, |
215 | 0x2e2e0000002e2e2e, 0x0808000000080808, 0xafaf000000afafaf, | 215 | 0x2e2e0000002e2e2eULL, 0x0808000000080808ULL, 0xafaf000000afafafULL, |
216 | 0x2828000000282828, 0xb0b0000000b0b0b0, 0x7474000000747474, | 216 | 0x2828000000282828ULL, 0xb0b0000000b0b0b0ULL, 0x7474000000747474ULL, |
217 | 0xc2c2000000c2c2c2, 0xbdbd000000bdbdbd, 0x3636000000363636, | 217 | 0xc2c2000000c2c2c2ULL, 0xbdbd000000bdbdbdULL, 0x3636000000363636ULL, |
218 | 0x2222000000222222, 0x3838000000383838, 0x6464000000646464, | 218 | 0x2222000000222222ULL, 0x3838000000383838ULL, 0x6464000000646464ULL, |
219 | 0x1e1e0000001e1e1e, 0x3939000000393939, 0x2c2c0000002c2c2c, | 219 | 0x1e1e0000001e1e1eULL, 0x3939000000393939ULL, 0x2c2c0000002c2c2cULL, |
220 | 0xa6a6000000a6a6a6, 0x3030000000303030, 0xe5e5000000e5e5e5, | 220 | 0xa6a6000000a6a6a6ULL, 0x3030000000303030ULL, 0xe5e5000000e5e5e5ULL, |
221 | 0x4444000000444444, 0xfdfd000000fdfdfd, 0x8888000000888888, | 221 | 0x4444000000444444ULL, 0xfdfd000000fdfdfdULL, 0x8888000000888888ULL, |
222 | 0x9f9f0000009f9f9f, 0x6565000000656565, 0x8787000000878787, | 222 | 0x9f9f0000009f9f9fULL, 0x6565000000656565ULL, 0x8787000000878787ULL, |
223 | 0x6b6b0000006b6b6b, 0xf4f4000000f4f4f4, 0x2323000000232323, | 223 | 0x6b6b0000006b6b6bULL, 0xf4f4000000f4f4f4ULL, 0x2323000000232323ULL, |
224 | 0x4848000000484848, 0x1010000000101010, 0xd1d1000000d1d1d1, | 224 | 0x4848000000484848ULL, 0x1010000000101010ULL, 0xd1d1000000d1d1d1ULL, |
225 | 0x5151000000515151, 0xc0c0000000c0c0c0, 0xf9f9000000f9f9f9, | 225 | 0x5151000000515151ULL, 0xc0c0000000c0c0c0ULL, 0xf9f9000000f9f9f9ULL, |
226 | 0xd2d2000000d2d2d2, 0xa0a0000000a0a0a0, 0x5555000000555555, | 226 | 0xd2d2000000d2d2d2ULL, 0xa0a0000000a0a0a0ULL, 0x5555000000555555ULL, |
227 | 0xa1a1000000a1a1a1, 0x4141000000414141, 0xfafa000000fafafa, | 227 | 0xa1a1000000a1a1a1ULL, 0x4141000000414141ULL, 0xfafa000000fafafaULL, |
228 | 0x4343000000434343, 0x1313000000131313, 0xc4c4000000c4c4c4, | 228 | 0x4343000000434343ULL, 0x1313000000131313ULL, 0xc4c4000000c4c4c4ULL, |
229 | 0x2f2f0000002f2f2f, 0xa8a8000000a8a8a8, 0xb6b6000000b6b6b6, | 229 | 0x2f2f0000002f2f2fULL, 0xa8a8000000a8a8a8ULL, 0xb6b6000000b6b6b6ULL, |
230 | 0x3c3c0000003c3c3c, 0x2b2b0000002b2b2b, 0xc1c1000000c1c1c1, | 230 | 0x3c3c0000003c3c3cULL, 0x2b2b0000002b2b2bULL, 0xc1c1000000c1c1c1ULL, |
231 | 0xffff000000ffffff, 0xc8c8000000c8c8c8, 0xa5a5000000a5a5a5, | 231 | 0xffff000000ffffffULL, 0xc8c8000000c8c8c8ULL, 0xa5a5000000a5a5a5ULL, |
232 | 0x2020000000202020, 0x8989000000898989, 0x0000000000000000, | 232 | 0x2020000000202020ULL, 0x8989000000898989ULL, 0x0000000000000000ULL, |
233 | 0x9090000000909090, 0x4747000000474747, 0xefef000000efefef, | 233 | 0x9090000000909090ULL, 0x4747000000474747ULL, 0xefef000000efefefULL, |
234 | 0xeaea000000eaeaea, 0xb7b7000000b7b7b7, 0x1515000000151515, | 234 | 0xeaea000000eaeaeaULL, 0xb7b7000000b7b7b7ULL, 0x1515000000151515ULL, |
235 | 0x0606000000060606, 0xcdcd000000cdcdcd, 0xb5b5000000b5b5b5, | 235 | 0x0606000000060606ULL, 0xcdcd000000cdcdcdULL, 0xb5b5000000b5b5b5ULL, |
236 | 0x1212000000121212, 0x7e7e0000007e7e7e, 0xbbbb000000bbbbbb, | 236 | 0x1212000000121212ULL, 0x7e7e0000007e7e7eULL, 0xbbbb000000bbbbbbULL, |
237 | 0x2929000000292929, 0x0f0f0000000f0f0f, 0xb8b8000000b8b8b8, | 237 | 0x2929000000292929ULL, 0x0f0f0000000f0f0fULL, 0xb8b8000000b8b8b8ULL, |
238 | 0x0707000000070707, 0x0404000000040404, 0x9b9b0000009b9b9b, | 238 | 0x0707000000070707ULL, 0x0404000000040404ULL, 0x9b9b0000009b9b9bULL, |
239 | 0x9494000000949494, 0x2121000000212121, 0x6666000000666666, | 239 | 0x9494000000949494ULL, 0x2121000000212121ULL, 0x6666000000666666ULL, |
240 | 0xe6e6000000e6e6e6, 0xcece000000cecece, 0xeded000000ededed, | 240 | 0xe6e6000000e6e6e6ULL, 0xcece000000cececeULL, 0xeded000000edededULL, |
241 | 0xe7e7000000e7e7e7, 0x3b3b0000003b3b3b, 0xfefe000000fefefe, | 241 | 0xe7e7000000e7e7e7ULL, 0x3b3b0000003b3b3bULL, 0xfefe000000fefefeULL, |
242 | 0x7f7f0000007f7f7f, 0xc5c5000000c5c5c5, 0xa4a4000000a4a4a4, | 242 | 0x7f7f0000007f7f7fULL, 0xc5c5000000c5c5c5ULL, 0xa4a4000000a4a4a4ULL, |
243 | 0x3737000000373737, 0xb1b1000000b1b1b1, 0x4c4c0000004c4c4c, | 243 | 0x3737000000373737ULL, 0xb1b1000000b1b1b1ULL, 0x4c4c0000004c4c4cULL, |
244 | 0x9191000000919191, 0x6e6e0000006e6e6e, 0x8d8d0000008d8d8d, | 244 | 0x9191000000919191ULL, 0x6e6e0000006e6e6eULL, 0x8d8d0000008d8d8dULL, |
245 | 0x7676000000767676, 0x0303000000030303, 0x2d2d0000002d2d2d, | 245 | 0x7676000000767676ULL, 0x0303000000030303ULL, 0x2d2d0000002d2d2dULL, |
246 | 0xdede000000dedede, 0x9696000000969696, 0x2626000000262626, | 246 | 0xdede000000dededeULL, 0x9696000000969696ULL, 0x2626000000262626ULL, |
247 | 0x7d7d0000007d7d7d, 0xc6c6000000c6c6c6, 0x5c5c0000005c5c5c, | 247 | 0x7d7d0000007d7d7dULL, 0xc6c6000000c6c6c6ULL, 0x5c5c0000005c5c5cULL, |
248 | 0xd3d3000000d3d3d3, 0xf2f2000000f2f2f2, 0x4f4f0000004f4f4f, | 248 | 0xd3d3000000d3d3d3ULL, 0xf2f2000000f2f2f2ULL, 0x4f4f0000004f4f4fULL, |
249 | 0x1919000000191919, 0x3f3f0000003f3f3f, 0xdcdc000000dcdcdc, | 249 | 0x1919000000191919ULL, 0x3f3f0000003f3f3fULL, 0xdcdc000000dcdcdcULL, |
250 | 0x7979000000797979, 0x1d1d0000001d1d1d, 0x5252000000525252, | 250 | 0x7979000000797979ULL, 0x1d1d0000001d1d1dULL, 0x5252000000525252ULL, |
251 | 0xebeb000000ebebeb, 0xf3f3000000f3f3f3, 0x6d6d0000006d6d6d, | 251 | 0xebeb000000ebebebULL, 0xf3f3000000f3f3f3ULL, 0x6d6d0000006d6d6dULL, |
252 | 0x5e5e0000005e5e5e, 0xfbfb000000fbfbfb, 0x6969000000696969, | 252 | 0x5e5e0000005e5e5eULL, 0xfbfb000000fbfbfbULL, 0x6969000000696969ULL, |
253 | 0xb2b2000000b2b2b2, 0xf0f0000000f0f0f0, 0x3131000000313131, | 253 | 0xb2b2000000b2b2b2ULL, 0xf0f0000000f0f0f0ULL, 0x3131000000313131ULL, |
254 | 0x0c0c0000000c0c0c, 0xd4d4000000d4d4d4, 0xcfcf000000cfcfcf, | 254 | 0x0c0c0000000c0c0cULL, 0xd4d4000000d4d4d4ULL, 0xcfcf000000cfcfcfULL, |
255 | 0x8c8c0000008c8c8c, 0xe2e2000000e2e2e2, 0x7575000000757575, | 255 | 0x8c8c0000008c8c8cULL, 0xe2e2000000e2e2e2ULL, 0x7575000000757575ULL, |
256 | 0xa9a9000000a9a9a9, 0x4a4a0000004a4a4a, 0x5757000000575757, | 256 | 0xa9a9000000a9a9a9ULL, 0x4a4a0000004a4a4aULL, 0x5757000000575757ULL, |
257 | 0x8484000000848484, 0x1111000000111111, 0x4545000000454545, | 257 | 0x8484000000848484ULL, 0x1111000000111111ULL, 0x4545000000454545ULL, |
258 | 0x1b1b0000001b1b1b, 0xf5f5000000f5f5f5, 0xe4e4000000e4e4e4, | 258 | 0x1b1b0000001b1b1bULL, 0xf5f5000000f5f5f5ULL, 0xe4e4000000e4e4e4ULL, |
259 | 0x0e0e0000000e0e0e, 0x7373000000737373, 0xaaaa000000aaaaaa, | 259 | 0x0e0e0000000e0e0eULL, 0x7373000000737373ULL, 0xaaaa000000aaaaaaULL, |
260 | 0xf1f1000000f1f1f1, 0xdddd000000dddddd, 0x5959000000595959, | 260 | 0xf1f1000000f1f1f1ULL, 0xdddd000000ddddddULL, 0x5959000000595959ULL, |
261 | 0x1414000000141414, 0x6c6c0000006c6c6c, 0x9292000000929292, | 261 | 0x1414000000141414ULL, 0x6c6c0000006c6c6cULL, 0x9292000000929292ULL, |
262 | 0x5454000000545454, 0xd0d0000000d0d0d0, 0x7878000000787878, | 262 | 0x5454000000545454ULL, 0xd0d0000000d0d0d0ULL, 0x7878000000787878ULL, |
263 | 0x7070000000707070, 0xe3e3000000e3e3e3, 0x4949000000494949, | 263 | 0x7070000000707070ULL, 0xe3e3000000e3e3e3ULL, 0x4949000000494949ULL, |
264 | 0x8080000000808080, 0x5050000000505050, 0xa7a7000000a7a7a7, | 264 | 0x8080000000808080ULL, 0x5050000000505050ULL, 0xa7a7000000a7a7a7ULL, |
265 | 0xf6f6000000f6f6f6, 0x7777000000777777, 0x9393000000939393, | 265 | 0xf6f6000000f6f6f6ULL, 0x7777000000777777ULL, 0x9393000000939393ULL, |
266 | 0x8686000000868686, 0x8383000000838383, 0x2a2a0000002a2a2a, | 266 | 0x8686000000868686ULL, 0x8383000000838383ULL, 0x2a2a0000002a2a2aULL, |
267 | 0xc7c7000000c7c7c7, 0x5b5b0000005b5b5b, 0xe9e9000000e9e9e9, | 267 | 0xc7c7000000c7c7c7ULL, 0x5b5b0000005b5b5bULL, 0xe9e9000000e9e9e9ULL, |
268 | 0xeeee000000eeeeee, 0x8f8f0000008f8f8f, 0x0101000000010101, | 268 | 0xeeee000000eeeeeeULL, 0x8f8f0000008f8f8fULL, 0x0101000000010101ULL, |
269 | 0x3d3d0000003d3d3d, | 269 | 0x3d3d0000003d3d3dULL, |
270 | }; | 270 | }; |
271 | 271 | ||
272 | const u64 camellia_sp03303033[256] = { | 272 | const u64 camellia_sp03303033[256] = { |
273 | 0x0038380038003838, 0x0041410041004141, 0x0016160016001616, | 273 | 0x0038380038003838ULL, 0x0041410041004141ULL, 0x0016160016001616ULL, |
274 | 0x0076760076007676, 0x00d9d900d900d9d9, 0x0093930093009393, | 274 | 0x0076760076007676ULL, 0x00d9d900d900d9d9ULL, 0x0093930093009393ULL, |
275 | 0x0060600060006060, 0x00f2f200f200f2f2, 0x0072720072007272, | 275 | 0x0060600060006060ULL, 0x00f2f200f200f2f2ULL, 0x0072720072007272ULL, |
276 | 0x00c2c200c200c2c2, 0x00abab00ab00abab, 0x009a9a009a009a9a, | 276 | 0x00c2c200c200c2c2ULL, 0x00abab00ab00ababULL, 0x009a9a009a009a9aULL, |
277 | 0x0075750075007575, 0x0006060006000606, 0x0057570057005757, | 277 | 0x0075750075007575ULL, 0x0006060006000606ULL, 0x0057570057005757ULL, |
278 | 0x00a0a000a000a0a0, 0x0091910091009191, 0x00f7f700f700f7f7, | 278 | 0x00a0a000a000a0a0ULL, 0x0091910091009191ULL, 0x00f7f700f700f7f7ULL, |
279 | 0x00b5b500b500b5b5, 0x00c9c900c900c9c9, 0x00a2a200a200a2a2, | 279 | 0x00b5b500b500b5b5ULL, 0x00c9c900c900c9c9ULL, 0x00a2a200a200a2a2ULL, |
280 | 0x008c8c008c008c8c, 0x00d2d200d200d2d2, 0x0090900090009090, | 280 | 0x008c8c008c008c8cULL, 0x00d2d200d200d2d2ULL, 0x0090900090009090ULL, |
281 | 0x00f6f600f600f6f6, 0x0007070007000707, 0x00a7a700a700a7a7, | 281 | 0x00f6f600f600f6f6ULL, 0x0007070007000707ULL, 0x00a7a700a700a7a7ULL, |
282 | 0x0027270027002727, 0x008e8e008e008e8e, 0x00b2b200b200b2b2, | 282 | 0x0027270027002727ULL, 0x008e8e008e008e8eULL, 0x00b2b200b200b2b2ULL, |
283 | 0x0049490049004949, 0x00dede00de00dede, 0x0043430043004343, | 283 | 0x0049490049004949ULL, 0x00dede00de00dedeULL, 0x0043430043004343ULL, |
284 | 0x005c5c005c005c5c, 0x00d7d700d700d7d7, 0x00c7c700c700c7c7, | 284 | 0x005c5c005c005c5cULL, 0x00d7d700d700d7d7ULL, 0x00c7c700c700c7c7ULL, |
285 | 0x003e3e003e003e3e, 0x00f5f500f500f5f5, 0x008f8f008f008f8f, | 285 | 0x003e3e003e003e3eULL, 0x00f5f500f500f5f5ULL, 0x008f8f008f008f8fULL, |
286 | 0x0067670067006767, 0x001f1f001f001f1f, 0x0018180018001818, | 286 | 0x0067670067006767ULL, 0x001f1f001f001f1fULL, 0x0018180018001818ULL, |
287 | 0x006e6e006e006e6e, 0x00afaf00af00afaf, 0x002f2f002f002f2f, | 287 | 0x006e6e006e006e6eULL, 0x00afaf00af00afafULL, 0x002f2f002f002f2fULL, |
288 | 0x00e2e200e200e2e2, 0x0085850085008585, 0x000d0d000d000d0d, | 288 | 0x00e2e200e200e2e2ULL, 0x0085850085008585ULL, 0x000d0d000d000d0dULL, |
289 | 0x0053530053005353, 0x00f0f000f000f0f0, 0x009c9c009c009c9c, | 289 | 0x0053530053005353ULL, 0x00f0f000f000f0f0ULL, 0x009c9c009c009c9cULL, |
290 | 0x0065650065006565, 0x00eaea00ea00eaea, 0x00a3a300a300a3a3, | 290 | 0x0065650065006565ULL, 0x00eaea00ea00eaeaULL, 0x00a3a300a300a3a3ULL, |
291 | 0x00aeae00ae00aeae, 0x009e9e009e009e9e, 0x00ecec00ec00ecec, | 291 | 0x00aeae00ae00aeaeULL, 0x009e9e009e009e9eULL, 0x00ecec00ec00ececULL, |
292 | 0x0080800080008080, 0x002d2d002d002d2d, 0x006b6b006b006b6b, | 292 | 0x0080800080008080ULL, 0x002d2d002d002d2dULL, 0x006b6b006b006b6bULL, |
293 | 0x00a8a800a800a8a8, 0x002b2b002b002b2b, 0x0036360036003636, | 293 | 0x00a8a800a800a8a8ULL, 0x002b2b002b002b2bULL, 0x0036360036003636ULL, |
294 | 0x00a6a600a600a6a6, 0x00c5c500c500c5c5, 0x0086860086008686, | 294 | 0x00a6a600a600a6a6ULL, 0x00c5c500c500c5c5ULL, 0x0086860086008686ULL, |
295 | 0x004d4d004d004d4d, 0x0033330033003333, 0x00fdfd00fd00fdfd, | 295 | 0x004d4d004d004d4dULL, 0x0033330033003333ULL, 0x00fdfd00fd00fdfdULL, |
296 | 0x0066660066006666, 0x0058580058005858, 0x0096960096009696, | 296 | 0x0066660066006666ULL, 0x0058580058005858ULL, 0x0096960096009696ULL, |
297 | 0x003a3a003a003a3a, 0x0009090009000909, 0x0095950095009595, | 297 | 0x003a3a003a003a3aULL, 0x0009090009000909ULL, 0x0095950095009595ULL, |
298 | 0x0010100010001010, 0x0078780078007878, 0x00d8d800d800d8d8, | 298 | 0x0010100010001010ULL, 0x0078780078007878ULL, 0x00d8d800d800d8d8ULL, |
299 | 0x0042420042004242, 0x00cccc00cc00cccc, 0x00efef00ef00efef, | 299 | 0x0042420042004242ULL, 0x00cccc00cc00ccccULL, 0x00efef00ef00efefULL, |
300 | 0x0026260026002626, 0x00e5e500e500e5e5, 0x0061610061006161, | 300 | 0x0026260026002626ULL, 0x00e5e500e500e5e5ULL, 0x0061610061006161ULL, |
301 | 0x001a1a001a001a1a, 0x003f3f003f003f3f, 0x003b3b003b003b3b, | 301 | 0x001a1a001a001a1aULL, 0x003f3f003f003f3fULL, 0x003b3b003b003b3bULL, |
302 | 0x0082820082008282, 0x00b6b600b600b6b6, 0x00dbdb00db00dbdb, | 302 | 0x0082820082008282ULL, 0x00b6b600b600b6b6ULL, 0x00dbdb00db00dbdbULL, |
303 | 0x00d4d400d400d4d4, 0x0098980098009898, 0x00e8e800e800e8e8, | 303 | 0x00d4d400d400d4d4ULL, 0x0098980098009898ULL, 0x00e8e800e800e8e8ULL, |
304 | 0x008b8b008b008b8b, 0x0002020002000202, 0x00ebeb00eb00ebeb, | 304 | 0x008b8b008b008b8bULL, 0x0002020002000202ULL, 0x00ebeb00eb00ebebULL, |
305 | 0x000a0a000a000a0a, 0x002c2c002c002c2c, 0x001d1d001d001d1d, | 305 | 0x000a0a000a000a0aULL, 0x002c2c002c002c2cULL, 0x001d1d001d001d1dULL, |
306 | 0x00b0b000b000b0b0, 0x006f6f006f006f6f, 0x008d8d008d008d8d, | 306 | 0x00b0b000b000b0b0ULL, 0x006f6f006f006f6fULL, 0x008d8d008d008d8dULL, |
307 | 0x0088880088008888, 0x000e0e000e000e0e, 0x0019190019001919, | 307 | 0x0088880088008888ULL, 0x000e0e000e000e0eULL, 0x0019190019001919ULL, |
308 | 0x0087870087008787, 0x004e4e004e004e4e, 0x000b0b000b000b0b, | 308 | 0x0087870087008787ULL, 0x004e4e004e004e4eULL, 0x000b0b000b000b0bULL, |
309 | 0x00a9a900a900a9a9, 0x000c0c000c000c0c, 0x0079790079007979, | 309 | 0x00a9a900a900a9a9ULL, 0x000c0c000c000c0cULL, 0x0079790079007979ULL, |
310 | 0x0011110011001111, 0x007f7f007f007f7f, 0x0022220022002222, | 310 | 0x0011110011001111ULL, 0x007f7f007f007f7fULL, 0x0022220022002222ULL, |
311 | 0x00e7e700e700e7e7, 0x0059590059005959, 0x00e1e100e100e1e1, | 311 | 0x00e7e700e700e7e7ULL, 0x0059590059005959ULL, 0x00e1e100e100e1e1ULL, |
312 | 0x00dada00da00dada, 0x003d3d003d003d3d, 0x00c8c800c800c8c8, | 312 | 0x00dada00da00dadaULL, 0x003d3d003d003d3dULL, 0x00c8c800c800c8c8ULL, |
313 | 0x0012120012001212, 0x0004040004000404, 0x0074740074007474, | 313 | 0x0012120012001212ULL, 0x0004040004000404ULL, 0x0074740074007474ULL, |
314 | 0x0054540054005454, 0x0030300030003030, 0x007e7e007e007e7e, | 314 | 0x0054540054005454ULL, 0x0030300030003030ULL, 0x007e7e007e007e7eULL, |
315 | 0x00b4b400b400b4b4, 0x0028280028002828, 0x0055550055005555, | 315 | 0x00b4b400b400b4b4ULL, 0x0028280028002828ULL, 0x0055550055005555ULL, |
316 | 0x0068680068006868, 0x0050500050005050, 0x00bebe00be00bebe, | 316 | 0x0068680068006868ULL, 0x0050500050005050ULL, 0x00bebe00be00bebeULL, |
317 | 0x00d0d000d000d0d0, 0x00c4c400c400c4c4, 0x0031310031003131, | 317 | 0x00d0d000d000d0d0ULL, 0x00c4c400c400c4c4ULL, 0x0031310031003131ULL, |
318 | 0x00cbcb00cb00cbcb, 0x002a2a002a002a2a, 0x00adad00ad00adad, | 318 | 0x00cbcb00cb00cbcbULL, 0x002a2a002a002a2aULL, 0x00adad00ad00adadULL, |
319 | 0x000f0f000f000f0f, 0x00caca00ca00caca, 0x0070700070007070, | 319 | 0x000f0f000f000f0fULL, 0x00caca00ca00cacaULL, 0x0070700070007070ULL, |
320 | 0x00ffff00ff00ffff, 0x0032320032003232, 0x0069690069006969, | 320 | 0x00ffff00ff00ffffULL, 0x0032320032003232ULL, 0x0069690069006969ULL, |
321 | 0x0008080008000808, 0x0062620062006262, 0x0000000000000000, | 321 | 0x0008080008000808ULL, 0x0062620062006262ULL, 0x0000000000000000ULL, |
322 | 0x0024240024002424, 0x00d1d100d100d1d1, 0x00fbfb00fb00fbfb, | 322 | 0x0024240024002424ULL, 0x00d1d100d100d1d1ULL, 0x00fbfb00fb00fbfbULL, |
323 | 0x00baba00ba00baba, 0x00eded00ed00eded, 0x0045450045004545, | 323 | 0x00baba00ba00babaULL, 0x00eded00ed00ededULL, 0x0045450045004545ULL, |
324 | 0x0081810081008181, 0x0073730073007373, 0x006d6d006d006d6d, | 324 | 0x0081810081008181ULL, 0x0073730073007373ULL, 0x006d6d006d006d6dULL, |
325 | 0x0084840084008484, 0x009f9f009f009f9f, 0x00eeee00ee00eeee, | 325 | 0x0084840084008484ULL, 0x009f9f009f009f9fULL, 0x00eeee00ee00eeeeULL, |
326 | 0x004a4a004a004a4a, 0x00c3c300c300c3c3, 0x002e2e002e002e2e, | 326 | 0x004a4a004a004a4aULL, 0x00c3c300c300c3c3ULL, 0x002e2e002e002e2eULL, |
327 | 0x00c1c100c100c1c1, 0x0001010001000101, 0x00e6e600e600e6e6, | 327 | 0x00c1c100c100c1c1ULL, 0x0001010001000101ULL, 0x00e6e600e600e6e6ULL, |
328 | 0x0025250025002525, 0x0048480048004848, 0x0099990099009999, | 328 | 0x0025250025002525ULL, 0x0048480048004848ULL, 0x0099990099009999ULL, |
329 | 0x00b9b900b900b9b9, 0x00b3b300b300b3b3, 0x007b7b007b007b7b, | 329 | 0x00b9b900b900b9b9ULL, 0x00b3b300b300b3b3ULL, 0x007b7b007b007b7bULL, |
330 | 0x00f9f900f900f9f9, 0x00cece00ce00cece, 0x00bfbf00bf00bfbf, | 330 | 0x00f9f900f900f9f9ULL, 0x00cece00ce00ceceULL, 0x00bfbf00bf00bfbfULL, |
331 | 0x00dfdf00df00dfdf, 0x0071710071007171, 0x0029290029002929, | 331 | 0x00dfdf00df00dfdfULL, 0x0071710071007171ULL, 0x0029290029002929ULL, |
332 | 0x00cdcd00cd00cdcd, 0x006c6c006c006c6c, 0x0013130013001313, | 332 | 0x00cdcd00cd00cdcdULL, 0x006c6c006c006c6cULL, 0x0013130013001313ULL, |
333 | 0x0064640064006464, 0x009b9b009b009b9b, 0x0063630063006363, | 333 | 0x0064640064006464ULL, 0x009b9b009b009b9bULL, 0x0063630063006363ULL, |
334 | 0x009d9d009d009d9d, 0x00c0c000c000c0c0, 0x004b4b004b004b4b, | 334 | 0x009d9d009d009d9dULL, 0x00c0c000c000c0c0ULL, 0x004b4b004b004b4bULL, |
335 | 0x00b7b700b700b7b7, 0x00a5a500a500a5a5, 0x0089890089008989, | 335 | 0x00b7b700b700b7b7ULL, 0x00a5a500a500a5a5ULL, 0x0089890089008989ULL, |
336 | 0x005f5f005f005f5f, 0x00b1b100b100b1b1, 0x0017170017001717, | 336 | 0x005f5f005f005f5fULL, 0x00b1b100b100b1b1ULL, 0x0017170017001717ULL, |
337 | 0x00f4f400f400f4f4, 0x00bcbc00bc00bcbc, 0x00d3d300d300d3d3, | 337 | 0x00f4f400f400f4f4ULL, 0x00bcbc00bc00bcbcULL, 0x00d3d300d300d3d3ULL, |
338 | 0x0046460046004646, 0x00cfcf00cf00cfcf, 0x0037370037003737, | 338 | 0x0046460046004646ULL, 0x00cfcf00cf00cfcfULL, 0x0037370037003737ULL, |
339 | 0x005e5e005e005e5e, 0x0047470047004747, 0x0094940094009494, | 339 | 0x005e5e005e005e5eULL, 0x0047470047004747ULL, 0x0094940094009494ULL, |
340 | 0x00fafa00fa00fafa, 0x00fcfc00fc00fcfc, 0x005b5b005b005b5b, | 340 | 0x00fafa00fa00fafaULL, 0x00fcfc00fc00fcfcULL, 0x005b5b005b005b5bULL, |
341 | 0x0097970097009797, 0x00fefe00fe00fefe, 0x005a5a005a005a5a, | 341 | 0x0097970097009797ULL, 0x00fefe00fe00fefeULL, 0x005a5a005a005a5aULL, |
342 | 0x00acac00ac00acac, 0x003c3c003c003c3c, 0x004c4c004c004c4c, | 342 | 0x00acac00ac00acacULL, 0x003c3c003c003c3cULL, 0x004c4c004c004c4cULL, |
343 | 0x0003030003000303, 0x0035350035003535, 0x00f3f300f300f3f3, | 343 | 0x0003030003000303ULL, 0x0035350035003535ULL, 0x00f3f300f300f3f3ULL, |
344 | 0x0023230023002323, 0x00b8b800b800b8b8, 0x005d5d005d005d5d, | 344 | 0x0023230023002323ULL, 0x00b8b800b800b8b8ULL, 0x005d5d005d005d5dULL, |
345 | 0x006a6a006a006a6a, 0x0092920092009292, 0x00d5d500d500d5d5, | 345 | 0x006a6a006a006a6aULL, 0x0092920092009292ULL, 0x00d5d500d500d5d5ULL, |
346 | 0x0021210021002121, 0x0044440044004444, 0x0051510051005151, | 346 | 0x0021210021002121ULL, 0x0044440044004444ULL, 0x0051510051005151ULL, |
347 | 0x00c6c600c600c6c6, 0x007d7d007d007d7d, 0x0039390039003939, | 347 | 0x00c6c600c600c6c6ULL, 0x007d7d007d007d7dULL, 0x0039390039003939ULL, |
348 | 0x0083830083008383, 0x00dcdc00dc00dcdc, 0x00aaaa00aa00aaaa, | 348 | 0x0083830083008383ULL, 0x00dcdc00dc00dcdcULL, 0x00aaaa00aa00aaaaULL, |
349 | 0x007c7c007c007c7c, 0x0077770077007777, 0x0056560056005656, | 349 | 0x007c7c007c007c7cULL, 0x0077770077007777ULL, 0x0056560056005656ULL, |
350 | 0x0005050005000505, 0x001b1b001b001b1b, 0x00a4a400a400a4a4, | 350 | 0x0005050005000505ULL, 0x001b1b001b001b1bULL, 0x00a4a400a400a4a4ULL, |
351 | 0x0015150015001515, 0x0034340034003434, 0x001e1e001e001e1e, | 351 | 0x0015150015001515ULL, 0x0034340034003434ULL, 0x001e1e001e001e1eULL, |
352 | 0x001c1c001c001c1c, 0x00f8f800f800f8f8, 0x0052520052005252, | 352 | 0x001c1c001c001c1cULL, 0x00f8f800f800f8f8ULL, 0x0052520052005252ULL, |
353 | 0x0020200020002020, 0x0014140014001414, 0x00e9e900e900e9e9, | 353 | 0x0020200020002020ULL, 0x0014140014001414ULL, 0x00e9e900e900e9e9ULL, |
354 | 0x00bdbd00bd00bdbd, 0x00dddd00dd00dddd, 0x00e4e400e400e4e4, | 354 | 0x00bdbd00bd00bdbdULL, 0x00dddd00dd00ddddULL, 0x00e4e400e400e4e4ULL, |
355 | 0x00a1a100a100a1a1, 0x00e0e000e000e0e0, 0x008a8a008a008a8a, | 355 | 0x00a1a100a100a1a1ULL, 0x00e0e000e000e0e0ULL, 0x008a8a008a008a8aULL, |
356 | 0x00f1f100f100f1f1, 0x00d6d600d600d6d6, 0x007a7a007a007a7a, | 356 | 0x00f1f100f100f1f1ULL, 0x00d6d600d600d6d6ULL, 0x007a7a007a007a7aULL, |
357 | 0x00bbbb00bb00bbbb, 0x00e3e300e300e3e3, 0x0040400040004040, | 357 | 0x00bbbb00bb00bbbbULL, 0x00e3e300e300e3e3ULL, 0x0040400040004040ULL, |
358 | 0x004f4f004f004f4f, | 358 | 0x004f4f004f004f4fULL, |
359 | }; | 359 | }; |
360 | 360 | ||
361 | const u64 camellia_sp00444404[256] = { | 361 | const u64 camellia_sp00444404[256] = { |
362 | 0x0000707070700070, 0x00002c2c2c2c002c, 0x0000b3b3b3b300b3, | 362 | 0x0000707070700070ULL, 0x00002c2c2c2c002cULL, 0x0000b3b3b3b300b3ULL, |
363 | 0x0000c0c0c0c000c0, 0x0000e4e4e4e400e4, 0x0000575757570057, | 363 | 0x0000c0c0c0c000c0ULL, 0x0000e4e4e4e400e4ULL, 0x0000575757570057ULL, |
364 | 0x0000eaeaeaea00ea, 0x0000aeaeaeae00ae, 0x0000232323230023, | 364 | 0x0000eaeaeaea00eaULL, 0x0000aeaeaeae00aeULL, 0x0000232323230023ULL, |
365 | 0x00006b6b6b6b006b, 0x0000454545450045, 0x0000a5a5a5a500a5, | 365 | 0x00006b6b6b6b006bULL, 0x0000454545450045ULL, 0x0000a5a5a5a500a5ULL, |
366 | 0x0000edededed00ed, 0x00004f4f4f4f004f, 0x00001d1d1d1d001d, | 366 | 0x0000edededed00edULL, 0x00004f4f4f4f004fULL, 0x00001d1d1d1d001dULL, |
367 | 0x0000929292920092, 0x0000868686860086, 0x0000afafafaf00af, | 367 | 0x0000929292920092ULL, 0x0000868686860086ULL, 0x0000afafafaf00afULL, |
368 | 0x00007c7c7c7c007c, 0x00001f1f1f1f001f, 0x00003e3e3e3e003e, | 368 | 0x00007c7c7c7c007cULL, 0x00001f1f1f1f001fULL, 0x00003e3e3e3e003eULL, |
369 | 0x0000dcdcdcdc00dc, 0x00005e5e5e5e005e, 0x00000b0b0b0b000b, | 369 | 0x0000dcdcdcdc00dcULL, 0x00005e5e5e5e005eULL, 0x00000b0b0b0b000bULL, |
370 | 0x0000a6a6a6a600a6, 0x0000393939390039, 0x0000d5d5d5d500d5, | 370 | 0x0000a6a6a6a600a6ULL, 0x0000393939390039ULL, 0x0000d5d5d5d500d5ULL, |
371 | 0x00005d5d5d5d005d, 0x0000d9d9d9d900d9, 0x00005a5a5a5a005a, | 371 | 0x00005d5d5d5d005dULL, 0x0000d9d9d9d900d9ULL, 0x00005a5a5a5a005aULL, |
372 | 0x0000515151510051, 0x00006c6c6c6c006c, 0x00008b8b8b8b008b, | 372 | 0x0000515151510051ULL, 0x00006c6c6c6c006cULL, 0x00008b8b8b8b008bULL, |
373 | 0x00009a9a9a9a009a, 0x0000fbfbfbfb00fb, 0x0000b0b0b0b000b0, | 373 | 0x00009a9a9a9a009aULL, 0x0000fbfbfbfb00fbULL, 0x0000b0b0b0b000b0ULL, |
374 | 0x0000747474740074, 0x00002b2b2b2b002b, 0x0000f0f0f0f000f0, | 374 | 0x0000747474740074ULL, 0x00002b2b2b2b002bULL, 0x0000f0f0f0f000f0ULL, |
375 | 0x0000848484840084, 0x0000dfdfdfdf00df, 0x0000cbcbcbcb00cb, | 375 | 0x0000848484840084ULL, 0x0000dfdfdfdf00dfULL, 0x0000cbcbcbcb00cbULL, |
376 | 0x0000343434340034, 0x0000767676760076, 0x00006d6d6d6d006d, | 376 | 0x0000343434340034ULL, 0x0000767676760076ULL, 0x00006d6d6d6d006dULL, |
377 | 0x0000a9a9a9a900a9, 0x0000d1d1d1d100d1, 0x0000040404040004, | 377 | 0x0000a9a9a9a900a9ULL, 0x0000d1d1d1d100d1ULL, 0x0000040404040004ULL, |
378 | 0x0000141414140014, 0x00003a3a3a3a003a, 0x0000dededede00de, | 378 | 0x0000141414140014ULL, 0x00003a3a3a3a003aULL, 0x0000dededede00deULL, |
379 | 0x0000111111110011, 0x0000323232320032, 0x00009c9c9c9c009c, | 379 | 0x0000111111110011ULL, 0x0000323232320032ULL, 0x00009c9c9c9c009cULL, |
380 | 0x0000535353530053, 0x0000f2f2f2f200f2, 0x0000fefefefe00fe, | 380 | 0x0000535353530053ULL, 0x0000f2f2f2f200f2ULL, 0x0000fefefefe00feULL, |
381 | 0x0000cfcfcfcf00cf, 0x0000c3c3c3c300c3, 0x00007a7a7a7a007a, | 381 | 0x0000cfcfcfcf00cfULL, 0x0000c3c3c3c300c3ULL, 0x00007a7a7a7a007aULL, |
382 | 0x0000242424240024, 0x0000e8e8e8e800e8, 0x0000606060600060, | 382 | 0x0000242424240024ULL, 0x0000e8e8e8e800e8ULL, 0x0000606060600060ULL, |
383 | 0x0000696969690069, 0x0000aaaaaaaa00aa, 0x0000a0a0a0a000a0, | 383 | 0x0000696969690069ULL, 0x0000aaaaaaaa00aaULL, 0x0000a0a0a0a000a0ULL, |
384 | 0x0000a1a1a1a100a1, 0x0000626262620062, 0x0000545454540054, | 384 | 0x0000a1a1a1a100a1ULL, 0x0000626262620062ULL, 0x0000545454540054ULL, |
385 | 0x00001e1e1e1e001e, 0x0000e0e0e0e000e0, 0x0000646464640064, | 385 | 0x00001e1e1e1e001eULL, 0x0000e0e0e0e000e0ULL, 0x0000646464640064ULL, |
386 | 0x0000101010100010, 0x0000000000000000, 0x0000a3a3a3a300a3, | 386 | 0x0000101010100010ULL, 0x0000000000000000ULL, 0x0000a3a3a3a300a3ULL, |
387 | 0x0000757575750075, 0x00008a8a8a8a008a, 0x0000e6e6e6e600e6, | 387 | 0x0000757575750075ULL, 0x00008a8a8a8a008aULL, 0x0000e6e6e6e600e6ULL, |
388 | 0x0000090909090009, 0x0000dddddddd00dd, 0x0000878787870087, | 388 | 0x0000090909090009ULL, 0x0000dddddddd00ddULL, 0x0000878787870087ULL, |
389 | 0x0000838383830083, 0x0000cdcdcdcd00cd, 0x0000909090900090, | 389 | 0x0000838383830083ULL, 0x0000cdcdcdcd00cdULL, 0x0000909090900090ULL, |
390 | 0x0000737373730073, 0x0000f6f6f6f600f6, 0x00009d9d9d9d009d, | 390 | 0x0000737373730073ULL, 0x0000f6f6f6f600f6ULL, 0x00009d9d9d9d009dULL, |
391 | 0x0000bfbfbfbf00bf, 0x0000525252520052, 0x0000d8d8d8d800d8, | 391 | 0x0000bfbfbfbf00bfULL, 0x0000525252520052ULL, 0x0000d8d8d8d800d8ULL, |
392 | 0x0000c8c8c8c800c8, 0x0000c6c6c6c600c6, 0x0000818181810081, | 392 | 0x0000c8c8c8c800c8ULL, 0x0000c6c6c6c600c6ULL, 0x0000818181810081ULL, |
393 | 0x00006f6f6f6f006f, 0x0000131313130013, 0x0000636363630063, | 393 | 0x00006f6f6f6f006fULL, 0x0000131313130013ULL, 0x0000636363630063ULL, |
394 | 0x0000e9e9e9e900e9, 0x0000a7a7a7a700a7, 0x00009f9f9f9f009f, | 394 | 0x0000e9e9e9e900e9ULL, 0x0000a7a7a7a700a7ULL, 0x00009f9f9f9f009fULL, |
395 | 0x0000bcbcbcbc00bc, 0x0000292929290029, 0x0000f9f9f9f900f9, | 395 | 0x0000bcbcbcbc00bcULL, 0x0000292929290029ULL, 0x0000f9f9f9f900f9ULL, |
396 | 0x00002f2f2f2f002f, 0x0000b4b4b4b400b4, 0x0000787878780078, | 396 | 0x00002f2f2f2f002fULL, 0x0000b4b4b4b400b4ULL, 0x0000787878780078ULL, |
397 | 0x0000060606060006, 0x0000e7e7e7e700e7, 0x0000717171710071, | 397 | 0x0000060606060006ULL, 0x0000e7e7e7e700e7ULL, 0x0000717171710071ULL, |
398 | 0x0000d4d4d4d400d4, 0x0000abababab00ab, 0x0000888888880088, | 398 | 0x0000d4d4d4d400d4ULL, 0x0000abababab00abULL, 0x0000888888880088ULL, |
399 | 0x00008d8d8d8d008d, 0x0000727272720072, 0x0000b9b9b9b900b9, | 399 | 0x00008d8d8d8d008dULL, 0x0000727272720072ULL, 0x0000b9b9b9b900b9ULL, |
400 | 0x0000f8f8f8f800f8, 0x0000acacacac00ac, 0x0000363636360036, | 400 | 0x0000f8f8f8f800f8ULL, 0x0000acacacac00acULL, 0x0000363636360036ULL, |
401 | 0x00002a2a2a2a002a, 0x00003c3c3c3c003c, 0x0000f1f1f1f100f1, | 401 | 0x00002a2a2a2a002aULL, 0x00003c3c3c3c003cULL, 0x0000f1f1f1f100f1ULL, |
402 | 0x0000404040400040, 0x0000d3d3d3d300d3, 0x0000bbbbbbbb00bb, | 402 | 0x0000404040400040ULL, 0x0000d3d3d3d300d3ULL, 0x0000bbbbbbbb00bbULL, |
403 | 0x0000434343430043, 0x0000151515150015, 0x0000adadadad00ad, | 403 | 0x0000434343430043ULL, 0x0000151515150015ULL, 0x0000adadadad00adULL, |
404 | 0x0000777777770077, 0x0000808080800080, 0x0000828282820082, | 404 | 0x0000777777770077ULL, 0x0000808080800080ULL, 0x0000828282820082ULL, |
405 | 0x0000ecececec00ec, 0x0000272727270027, 0x0000e5e5e5e500e5, | 405 | 0x0000ecececec00ecULL, 0x0000272727270027ULL, 0x0000e5e5e5e500e5ULL, |
406 | 0x0000858585850085, 0x0000353535350035, 0x00000c0c0c0c000c, | 406 | 0x0000858585850085ULL, 0x0000353535350035ULL, 0x00000c0c0c0c000cULL, |
407 | 0x0000414141410041, 0x0000efefefef00ef, 0x0000939393930093, | 407 | 0x0000414141410041ULL, 0x0000efefefef00efULL, 0x0000939393930093ULL, |
408 | 0x0000191919190019, 0x0000212121210021, 0x00000e0e0e0e000e, | 408 | 0x0000191919190019ULL, 0x0000212121210021ULL, 0x00000e0e0e0e000eULL, |
409 | 0x00004e4e4e4e004e, 0x0000656565650065, 0x0000bdbdbdbd00bd, | 409 | 0x00004e4e4e4e004eULL, 0x0000656565650065ULL, 0x0000bdbdbdbd00bdULL, |
410 | 0x0000b8b8b8b800b8, 0x00008f8f8f8f008f, 0x0000ebebebeb00eb, | 410 | 0x0000b8b8b8b800b8ULL, 0x00008f8f8f8f008fULL, 0x0000ebebebeb00ebULL, |
411 | 0x0000cececece00ce, 0x0000303030300030, 0x00005f5f5f5f005f, | 411 | 0x0000cececece00ceULL, 0x0000303030300030ULL, 0x00005f5f5f5f005fULL, |
412 | 0x0000c5c5c5c500c5, 0x00001a1a1a1a001a, 0x0000e1e1e1e100e1, | 412 | 0x0000c5c5c5c500c5ULL, 0x00001a1a1a1a001aULL, 0x0000e1e1e1e100e1ULL, |
413 | 0x0000cacacaca00ca, 0x0000474747470047, 0x00003d3d3d3d003d, | 413 | 0x0000cacacaca00caULL, 0x0000474747470047ULL, 0x00003d3d3d3d003dULL, |
414 | 0x0000010101010001, 0x0000d6d6d6d600d6, 0x0000565656560056, | 414 | 0x0000010101010001ULL, 0x0000d6d6d6d600d6ULL, 0x0000565656560056ULL, |
415 | 0x00004d4d4d4d004d, 0x00000d0d0d0d000d, 0x0000666666660066, | 415 | 0x00004d4d4d4d004dULL, 0x00000d0d0d0d000dULL, 0x0000666666660066ULL, |
416 | 0x0000cccccccc00cc, 0x00002d2d2d2d002d, 0x0000121212120012, | 416 | 0x0000cccccccc00ccULL, 0x00002d2d2d2d002dULL, 0x0000121212120012ULL, |
417 | 0x0000202020200020, 0x0000b1b1b1b100b1, 0x0000999999990099, | 417 | 0x0000202020200020ULL, 0x0000b1b1b1b100b1ULL, 0x0000999999990099ULL, |
418 | 0x00004c4c4c4c004c, 0x0000c2c2c2c200c2, 0x00007e7e7e7e007e, | 418 | 0x00004c4c4c4c004cULL, 0x0000c2c2c2c200c2ULL, 0x00007e7e7e7e007eULL, |
419 | 0x0000050505050005, 0x0000b7b7b7b700b7, 0x0000313131310031, | 419 | 0x0000050505050005ULL, 0x0000b7b7b7b700b7ULL, 0x0000313131310031ULL, |
420 | 0x0000171717170017, 0x0000d7d7d7d700d7, 0x0000585858580058, | 420 | 0x0000171717170017ULL, 0x0000d7d7d7d700d7ULL, 0x0000585858580058ULL, |
421 | 0x0000616161610061, 0x00001b1b1b1b001b, 0x00001c1c1c1c001c, | 421 | 0x0000616161610061ULL, 0x00001b1b1b1b001bULL, 0x00001c1c1c1c001cULL, |
422 | 0x00000f0f0f0f000f, 0x0000161616160016, 0x0000181818180018, | 422 | 0x00000f0f0f0f000fULL, 0x0000161616160016ULL, 0x0000181818180018ULL, |
423 | 0x0000222222220022, 0x0000444444440044, 0x0000b2b2b2b200b2, | 423 | 0x0000222222220022ULL, 0x0000444444440044ULL, 0x0000b2b2b2b200b2ULL, |
424 | 0x0000b5b5b5b500b5, 0x0000919191910091, 0x0000080808080008, | 424 | 0x0000b5b5b5b500b5ULL, 0x0000919191910091ULL, 0x0000080808080008ULL, |
425 | 0x0000a8a8a8a800a8, 0x0000fcfcfcfc00fc, 0x0000505050500050, | 425 | 0x0000a8a8a8a800a8ULL, 0x0000fcfcfcfc00fcULL, 0x0000505050500050ULL, |
426 | 0x0000d0d0d0d000d0, 0x00007d7d7d7d007d, 0x0000898989890089, | 426 | 0x0000d0d0d0d000d0ULL, 0x00007d7d7d7d007dULL, 0x0000898989890089ULL, |
427 | 0x0000979797970097, 0x00005b5b5b5b005b, 0x0000959595950095, | 427 | 0x0000979797970097ULL, 0x00005b5b5b5b005bULL, 0x0000959595950095ULL, |
428 | 0x0000ffffffff00ff, 0x0000d2d2d2d200d2, 0x0000c4c4c4c400c4, | 428 | 0x0000ffffffff00ffULL, 0x0000d2d2d2d200d2ULL, 0x0000c4c4c4c400c4ULL, |
429 | 0x0000484848480048, 0x0000f7f7f7f700f7, 0x0000dbdbdbdb00db, | 429 | 0x0000484848480048ULL, 0x0000f7f7f7f700f7ULL, 0x0000dbdbdbdb00dbULL, |
430 | 0x0000030303030003, 0x0000dadadada00da, 0x00003f3f3f3f003f, | 430 | 0x0000030303030003ULL, 0x0000dadadada00daULL, 0x00003f3f3f3f003fULL, |
431 | 0x0000949494940094, 0x00005c5c5c5c005c, 0x0000020202020002, | 431 | 0x0000949494940094ULL, 0x00005c5c5c5c005cULL, 0x0000020202020002ULL, |
432 | 0x00004a4a4a4a004a, 0x0000333333330033, 0x0000676767670067, | 432 | 0x00004a4a4a4a004aULL, 0x0000333333330033ULL, 0x0000676767670067ULL, |
433 | 0x0000f3f3f3f300f3, 0x00007f7f7f7f007f, 0x0000e2e2e2e200e2, | 433 | 0x0000f3f3f3f300f3ULL, 0x00007f7f7f7f007fULL, 0x0000e2e2e2e200e2ULL, |
434 | 0x00009b9b9b9b009b, 0x0000262626260026, 0x0000373737370037, | 434 | 0x00009b9b9b9b009bULL, 0x0000262626260026ULL, 0x0000373737370037ULL, |
435 | 0x00003b3b3b3b003b, 0x0000969696960096, 0x00004b4b4b4b004b, | 435 | 0x00003b3b3b3b003bULL, 0x0000969696960096ULL, 0x00004b4b4b4b004bULL, |
436 | 0x0000bebebebe00be, 0x00002e2e2e2e002e, 0x0000797979790079, | 436 | 0x0000bebebebe00beULL, 0x00002e2e2e2e002eULL, 0x0000797979790079ULL, |
437 | 0x00008c8c8c8c008c, 0x00006e6e6e6e006e, 0x00008e8e8e8e008e, | 437 | 0x00008c8c8c8c008cULL, 0x00006e6e6e6e006eULL, 0x00008e8e8e8e008eULL, |
438 | 0x0000f5f5f5f500f5, 0x0000b6b6b6b600b6, 0x0000fdfdfdfd00fd, | 438 | 0x0000f5f5f5f500f5ULL, 0x0000b6b6b6b600b6ULL, 0x0000fdfdfdfd00fdULL, |
439 | 0x0000595959590059, 0x0000989898980098, 0x00006a6a6a6a006a, | 439 | 0x0000595959590059ULL, 0x0000989898980098ULL, 0x00006a6a6a6a006aULL, |
440 | 0x0000464646460046, 0x0000babababa00ba, 0x0000252525250025, | 440 | 0x0000464646460046ULL, 0x0000babababa00baULL, 0x0000252525250025ULL, |
441 | 0x0000424242420042, 0x0000a2a2a2a200a2, 0x0000fafafafa00fa, | 441 | 0x0000424242420042ULL, 0x0000a2a2a2a200a2ULL, 0x0000fafafafa00faULL, |
442 | 0x0000070707070007, 0x0000555555550055, 0x0000eeeeeeee00ee, | 442 | 0x0000070707070007ULL, 0x0000555555550055ULL, 0x0000eeeeeeee00eeULL, |
443 | 0x00000a0a0a0a000a, 0x0000494949490049, 0x0000686868680068, | 443 | 0x00000a0a0a0a000aULL, 0x0000494949490049ULL, 0x0000686868680068ULL, |
444 | 0x0000383838380038, 0x0000a4a4a4a400a4, 0x0000282828280028, | 444 | 0x0000383838380038ULL, 0x0000a4a4a4a400a4ULL, 0x0000282828280028ULL, |
445 | 0x00007b7b7b7b007b, 0x0000c9c9c9c900c9, 0x0000c1c1c1c100c1, | 445 | 0x00007b7b7b7b007bULL, 0x0000c9c9c9c900c9ULL, 0x0000c1c1c1c100c1ULL, |
446 | 0x0000e3e3e3e300e3, 0x0000f4f4f4f400f4, 0x0000c7c7c7c700c7, | 446 | 0x0000e3e3e3e300e3ULL, 0x0000f4f4f4f400f4ULL, 0x0000c7c7c7c700c7ULL, |
447 | 0x00009e9e9e9e009e, | 447 | 0x00009e9e9e9e009eULL, |
448 | }; | 448 | }; |
449 | 449 | ||
450 | const u64 camellia_sp02220222[256] = { | 450 | const u64 camellia_sp02220222[256] = { |
451 | 0x00e0e0e000e0e0e0, 0x0005050500050505, 0x0058585800585858, | 451 | 0x00e0e0e000e0e0e0ULL, 0x0005050500050505ULL, 0x0058585800585858ULL, |
452 | 0x00d9d9d900d9d9d9, 0x0067676700676767, 0x004e4e4e004e4e4e, | 452 | 0x00d9d9d900d9d9d9ULL, 0x0067676700676767ULL, 0x004e4e4e004e4e4eULL, |
453 | 0x0081818100818181, 0x00cbcbcb00cbcbcb, 0x00c9c9c900c9c9c9, | 453 | 0x0081818100818181ULL, 0x00cbcbcb00cbcbcbULL, 0x00c9c9c900c9c9c9ULL, |
454 | 0x000b0b0b000b0b0b, 0x00aeaeae00aeaeae, 0x006a6a6a006a6a6a, | 454 | 0x000b0b0b000b0b0bULL, 0x00aeaeae00aeaeaeULL, 0x006a6a6a006a6a6aULL, |
455 | 0x00d5d5d500d5d5d5, 0x0018181800181818, 0x005d5d5d005d5d5d, | 455 | 0x00d5d5d500d5d5d5ULL, 0x0018181800181818ULL, 0x005d5d5d005d5d5dULL, |
456 | 0x0082828200828282, 0x0046464600464646, 0x00dfdfdf00dfdfdf, | 456 | 0x0082828200828282ULL, 0x0046464600464646ULL, 0x00dfdfdf00dfdfdfULL, |
457 | 0x00d6d6d600d6d6d6, 0x0027272700272727, 0x008a8a8a008a8a8a, | 457 | 0x00d6d6d600d6d6d6ULL, 0x0027272700272727ULL, 0x008a8a8a008a8a8aULL, |
458 | 0x0032323200323232, 0x004b4b4b004b4b4b, 0x0042424200424242, | 458 | 0x0032323200323232ULL, 0x004b4b4b004b4b4bULL, 0x0042424200424242ULL, |
459 | 0x00dbdbdb00dbdbdb, 0x001c1c1c001c1c1c, 0x009e9e9e009e9e9e, | 459 | 0x00dbdbdb00dbdbdbULL, 0x001c1c1c001c1c1cULL, 0x009e9e9e009e9e9eULL, |
460 | 0x009c9c9c009c9c9c, 0x003a3a3a003a3a3a, 0x00cacaca00cacaca, | 460 | 0x009c9c9c009c9c9cULL, 0x003a3a3a003a3a3aULL, 0x00cacaca00cacacaULL, |
461 | 0x0025252500252525, 0x007b7b7b007b7b7b, 0x000d0d0d000d0d0d, | 461 | 0x0025252500252525ULL, 0x007b7b7b007b7b7bULL, 0x000d0d0d000d0d0dULL, |
462 | 0x0071717100717171, 0x005f5f5f005f5f5f, 0x001f1f1f001f1f1f, | 462 | 0x0071717100717171ULL, 0x005f5f5f005f5f5fULL, 0x001f1f1f001f1f1fULL, |
463 | 0x00f8f8f800f8f8f8, 0x00d7d7d700d7d7d7, 0x003e3e3e003e3e3e, | 463 | 0x00f8f8f800f8f8f8ULL, 0x00d7d7d700d7d7d7ULL, 0x003e3e3e003e3e3eULL, |
464 | 0x009d9d9d009d9d9d, 0x007c7c7c007c7c7c, 0x0060606000606060, | 464 | 0x009d9d9d009d9d9dULL, 0x007c7c7c007c7c7cULL, 0x0060606000606060ULL, |
465 | 0x00b9b9b900b9b9b9, 0x00bebebe00bebebe, 0x00bcbcbc00bcbcbc, | 465 | 0x00b9b9b900b9b9b9ULL, 0x00bebebe00bebebeULL, 0x00bcbcbc00bcbcbcULL, |
466 | 0x008b8b8b008b8b8b, 0x0016161600161616, 0x0034343400343434, | 466 | 0x008b8b8b008b8b8bULL, 0x0016161600161616ULL, 0x0034343400343434ULL, |
467 | 0x004d4d4d004d4d4d, 0x00c3c3c300c3c3c3, 0x0072727200727272, | 467 | 0x004d4d4d004d4d4dULL, 0x00c3c3c300c3c3c3ULL, 0x0072727200727272ULL, |
468 | 0x0095959500959595, 0x00ababab00ababab, 0x008e8e8e008e8e8e, | 468 | 0x0095959500959595ULL, 0x00ababab00abababULL, 0x008e8e8e008e8e8eULL, |
469 | 0x00bababa00bababa, 0x007a7a7a007a7a7a, 0x00b3b3b300b3b3b3, | 469 | 0x00bababa00bababaULL, 0x007a7a7a007a7a7aULL, 0x00b3b3b300b3b3b3ULL, |
470 | 0x0002020200020202, 0x00b4b4b400b4b4b4, 0x00adadad00adadad, | 470 | 0x0002020200020202ULL, 0x00b4b4b400b4b4b4ULL, 0x00adadad00adadadULL, |
471 | 0x00a2a2a200a2a2a2, 0x00acacac00acacac, 0x00d8d8d800d8d8d8, | 471 | 0x00a2a2a200a2a2a2ULL, 0x00acacac00acacacULL, 0x00d8d8d800d8d8d8ULL, |
472 | 0x009a9a9a009a9a9a, 0x0017171700171717, 0x001a1a1a001a1a1a, | 472 | 0x009a9a9a009a9a9aULL, 0x0017171700171717ULL, 0x001a1a1a001a1a1aULL, |
473 | 0x0035353500353535, 0x00cccccc00cccccc, 0x00f7f7f700f7f7f7, | 473 | 0x0035353500353535ULL, 0x00cccccc00ccccccULL, 0x00f7f7f700f7f7f7ULL, |
474 | 0x0099999900999999, 0x0061616100616161, 0x005a5a5a005a5a5a, | 474 | 0x0099999900999999ULL, 0x0061616100616161ULL, 0x005a5a5a005a5a5aULL, |
475 | 0x00e8e8e800e8e8e8, 0x0024242400242424, 0x0056565600565656, | 475 | 0x00e8e8e800e8e8e8ULL, 0x0024242400242424ULL, 0x0056565600565656ULL, |
476 | 0x0040404000404040, 0x00e1e1e100e1e1e1, 0x0063636300636363, | 476 | 0x0040404000404040ULL, 0x00e1e1e100e1e1e1ULL, 0x0063636300636363ULL, |
477 | 0x0009090900090909, 0x0033333300333333, 0x00bfbfbf00bfbfbf, | 477 | 0x0009090900090909ULL, 0x0033333300333333ULL, 0x00bfbfbf00bfbfbfULL, |
478 | 0x0098989800989898, 0x0097979700979797, 0x0085858500858585, | 478 | 0x0098989800989898ULL, 0x0097979700979797ULL, 0x0085858500858585ULL, |
479 | 0x0068686800686868, 0x00fcfcfc00fcfcfc, 0x00ececec00ececec, | 479 | 0x0068686800686868ULL, 0x00fcfcfc00fcfcfcULL, 0x00ececec00ecececULL, |
480 | 0x000a0a0a000a0a0a, 0x00dadada00dadada, 0x006f6f6f006f6f6f, | 480 | 0x000a0a0a000a0a0aULL, 0x00dadada00dadadaULL, 0x006f6f6f006f6f6fULL, |
481 | 0x0053535300535353, 0x0062626200626262, 0x00a3a3a300a3a3a3, | 481 | 0x0053535300535353ULL, 0x0062626200626262ULL, 0x00a3a3a300a3a3a3ULL, |
482 | 0x002e2e2e002e2e2e, 0x0008080800080808, 0x00afafaf00afafaf, | 482 | 0x002e2e2e002e2e2eULL, 0x0008080800080808ULL, 0x00afafaf00afafafULL, |
483 | 0x0028282800282828, 0x00b0b0b000b0b0b0, 0x0074747400747474, | 483 | 0x0028282800282828ULL, 0x00b0b0b000b0b0b0ULL, 0x0074747400747474ULL, |
484 | 0x00c2c2c200c2c2c2, 0x00bdbdbd00bdbdbd, 0x0036363600363636, | 484 | 0x00c2c2c200c2c2c2ULL, 0x00bdbdbd00bdbdbdULL, 0x0036363600363636ULL, |
485 | 0x0022222200222222, 0x0038383800383838, 0x0064646400646464, | 485 | 0x0022222200222222ULL, 0x0038383800383838ULL, 0x0064646400646464ULL, |
486 | 0x001e1e1e001e1e1e, 0x0039393900393939, 0x002c2c2c002c2c2c, | 486 | 0x001e1e1e001e1e1eULL, 0x0039393900393939ULL, 0x002c2c2c002c2c2cULL, |
487 | 0x00a6a6a600a6a6a6, 0x0030303000303030, 0x00e5e5e500e5e5e5, | 487 | 0x00a6a6a600a6a6a6ULL, 0x0030303000303030ULL, 0x00e5e5e500e5e5e5ULL, |
488 | 0x0044444400444444, 0x00fdfdfd00fdfdfd, 0x0088888800888888, | 488 | 0x0044444400444444ULL, 0x00fdfdfd00fdfdfdULL, 0x0088888800888888ULL, |
489 | 0x009f9f9f009f9f9f, 0x0065656500656565, 0x0087878700878787, | 489 | 0x009f9f9f009f9f9fULL, 0x0065656500656565ULL, 0x0087878700878787ULL, |
490 | 0x006b6b6b006b6b6b, 0x00f4f4f400f4f4f4, 0x0023232300232323, | 490 | 0x006b6b6b006b6b6bULL, 0x00f4f4f400f4f4f4ULL, 0x0023232300232323ULL, |
491 | 0x0048484800484848, 0x0010101000101010, 0x00d1d1d100d1d1d1, | 491 | 0x0048484800484848ULL, 0x0010101000101010ULL, 0x00d1d1d100d1d1d1ULL, |
492 | 0x0051515100515151, 0x00c0c0c000c0c0c0, 0x00f9f9f900f9f9f9, | 492 | 0x0051515100515151ULL, 0x00c0c0c000c0c0c0ULL, 0x00f9f9f900f9f9f9ULL, |
493 | 0x00d2d2d200d2d2d2, 0x00a0a0a000a0a0a0, 0x0055555500555555, | 493 | 0x00d2d2d200d2d2d2ULL, 0x00a0a0a000a0a0a0ULL, 0x0055555500555555ULL, |
494 | 0x00a1a1a100a1a1a1, 0x0041414100414141, 0x00fafafa00fafafa, | 494 | 0x00a1a1a100a1a1a1ULL, 0x0041414100414141ULL, 0x00fafafa00fafafaULL, |
495 | 0x0043434300434343, 0x0013131300131313, 0x00c4c4c400c4c4c4, | 495 | 0x0043434300434343ULL, 0x0013131300131313ULL, 0x00c4c4c400c4c4c4ULL, |
496 | 0x002f2f2f002f2f2f, 0x00a8a8a800a8a8a8, 0x00b6b6b600b6b6b6, | 496 | 0x002f2f2f002f2f2fULL, 0x00a8a8a800a8a8a8ULL, 0x00b6b6b600b6b6b6ULL, |
497 | 0x003c3c3c003c3c3c, 0x002b2b2b002b2b2b, 0x00c1c1c100c1c1c1, | 497 | 0x003c3c3c003c3c3cULL, 0x002b2b2b002b2b2bULL, 0x00c1c1c100c1c1c1ULL, |
498 | 0x00ffffff00ffffff, 0x00c8c8c800c8c8c8, 0x00a5a5a500a5a5a5, | 498 | 0x00ffffff00ffffffULL, 0x00c8c8c800c8c8c8ULL, 0x00a5a5a500a5a5a5ULL, |
499 | 0x0020202000202020, 0x0089898900898989, 0x0000000000000000, | 499 | 0x0020202000202020ULL, 0x0089898900898989ULL, 0x0000000000000000ULL, |
500 | 0x0090909000909090, 0x0047474700474747, 0x00efefef00efefef, | 500 | 0x0090909000909090ULL, 0x0047474700474747ULL, 0x00efefef00efefefULL, |
501 | 0x00eaeaea00eaeaea, 0x00b7b7b700b7b7b7, 0x0015151500151515, | 501 | 0x00eaeaea00eaeaeaULL, 0x00b7b7b700b7b7b7ULL, 0x0015151500151515ULL, |
502 | 0x0006060600060606, 0x00cdcdcd00cdcdcd, 0x00b5b5b500b5b5b5, | 502 | 0x0006060600060606ULL, 0x00cdcdcd00cdcdcdULL, 0x00b5b5b500b5b5b5ULL, |
503 | 0x0012121200121212, 0x007e7e7e007e7e7e, 0x00bbbbbb00bbbbbb, | 503 | 0x0012121200121212ULL, 0x007e7e7e007e7e7eULL, 0x00bbbbbb00bbbbbbULL, |
504 | 0x0029292900292929, 0x000f0f0f000f0f0f, 0x00b8b8b800b8b8b8, | 504 | 0x0029292900292929ULL, 0x000f0f0f000f0f0fULL, 0x00b8b8b800b8b8b8ULL, |
505 | 0x0007070700070707, 0x0004040400040404, 0x009b9b9b009b9b9b, | 505 | 0x0007070700070707ULL, 0x0004040400040404ULL, 0x009b9b9b009b9b9bULL, |
506 | 0x0094949400949494, 0x0021212100212121, 0x0066666600666666, | 506 | 0x0094949400949494ULL, 0x0021212100212121ULL, 0x0066666600666666ULL, |
507 | 0x00e6e6e600e6e6e6, 0x00cecece00cecece, 0x00ededed00ededed, | 507 | 0x00e6e6e600e6e6e6ULL, 0x00cecece00cececeULL, 0x00ededed00edededULL, |
508 | 0x00e7e7e700e7e7e7, 0x003b3b3b003b3b3b, 0x00fefefe00fefefe, | 508 | 0x00e7e7e700e7e7e7ULL, 0x003b3b3b003b3b3bULL, 0x00fefefe00fefefeULL, |
509 | 0x007f7f7f007f7f7f, 0x00c5c5c500c5c5c5, 0x00a4a4a400a4a4a4, | 509 | 0x007f7f7f007f7f7fULL, 0x00c5c5c500c5c5c5ULL, 0x00a4a4a400a4a4a4ULL, |
510 | 0x0037373700373737, 0x00b1b1b100b1b1b1, 0x004c4c4c004c4c4c, | 510 | 0x0037373700373737ULL, 0x00b1b1b100b1b1b1ULL, 0x004c4c4c004c4c4cULL, |
511 | 0x0091919100919191, 0x006e6e6e006e6e6e, 0x008d8d8d008d8d8d, | 511 | 0x0091919100919191ULL, 0x006e6e6e006e6e6eULL, 0x008d8d8d008d8d8dULL, |
512 | 0x0076767600767676, 0x0003030300030303, 0x002d2d2d002d2d2d, | 512 | 0x0076767600767676ULL, 0x0003030300030303ULL, 0x002d2d2d002d2d2dULL, |
513 | 0x00dedede00dedede, 0x0096969600969696, 0x0026262600262626, | 513 | 0x00dedede00dededeULL, 0x0096969600969696ULL, 0x0026262600262626ULL, |
514 | 0x007d7d7d007d7d7d, 0x00c6c6c600c6c6c6, 0x005c5c5c005c5c5c, | 514 | 0x007d7d7d007d7d7dULL, 0x00c6c6c600c6c6c6ULL, 0x005c5c5c005c5c5cULL, |
515 | 0x00d3d3d300d3d3d3, 0x00f2f2f200f2f2f2, 0x004f4f4f004f4f4f, | 515 | 0x00d3d3d300d3d3d3ULL, 0x00f2f2f200f2f2f2ULL, 0x004f4f4f004f4f4fULL, |
516 | 0x0019191900191919, 0x003f3f3f003f3f3f, 0x00dcdcdc00dcdcdc, | 516 | 0x0019191900191919ULL, 0x003f3f3f003f3f3fULL, 0x00dcdcdc00dcdcdcULL, |
517 | 0x0079797900797979, 0x001d1d1d001d1d1d, 0x0052525200525252, | 517 | 0x0079797900797979ULL, 0x001d1d1d001d1d1dULL, 0x0052525200525252ULL, |
518 | 0x00ebebeb00ebebeb, 0x00f3f3f300f3f3f3, 0x006d6d6d006d6d6d, | 518 | 0x00ebebeb00ebebebULL, 0x00f3f3f300f3f3f3ULL, 0x006d6d6d006d6d6dULL, |
519 | 0x005e5e5e005e5e5e, 0x00fbfbfb00fbfbfb, 0x0069696900696969, | 519 | 0x005e5e5e005e5e5eULL, 0x00fbfbfb00fbfbfbULL, 0x0069696900696969ULL, |
520 | 0x00b2b2b200b2b2b2, 0x00f0f0f000f0f0f0, 0x0031313100313131, | 520 | 0x00b2b2b200b2b2b2ULL, 0x00f0f0f000f0f0f0ULL, 0x0031313100313131ULL, |
521 | 0x000c0c0c000c0c0c, 0x00d4d4d400d4d4d4, 0x00cfcfcf00cfcfcf, | 521 | 0x000c0c0c000c0c0cULL, 0x00d4d4d400d4d4d4ULL, 0x00cfcfcf00cfcfcfULL, |
522 | 0x008c8c8c008c8c8c, 0x00e2e2e200e2e2e2, 0x0075757500757575, | 522 | 0x008c8c8c008c8c8cULL, 0x00e2e2e200e2e2e2ULL, 0x0075757500757575ULL, |
523 | 0x00a9a9a900a9a9a9, 0x004a4a4a004a4a4a, 0x0057575700575757, | 523 | 0x00a9a9a900a9a9a9ULL, 0x004a4a4a004a4a4aULL, 0x0057575700575757ULL, |
524 | 0x0084848400848484, 0x0011111100111111, 0x0045454500454545, | 524 | 0x0084848400848484ULL, 0x0011111100111111ULL, 0x0045454500454545ULL, |
525 | 0x001b1b1b001b1b1b, 0x00f5f5f500f5f5f5, 0x00e4e4e400e4e4e4, | 525 | 0x001b1b1b001b1b1bULL, 0x00f5f5f500f5f5f5ULL, 0x00e4e4e400e4e4e4ULL, |
526 | 0x000e0e0e000e0e0e, 0x0073737300737373, 0x00aaaaaa00aaaaaa, | 526 | 0x000e0e0e000e0e0eULL, 0x0073737300737373ULL, 0x00aaaaaa00aaaaaaULL, |
527 | 0x00f1f1f100f1f1f1, 0x00dddddd00dddddd, 0x0059595900595959, | 527 | 0x00f1f1f100f1f1f1ULL, 0x00dddddd00ddddddULL, 0x0059595900595959ULL, |
528 | 0x0014141400141414, 0x006c6c6c006c6c6c, 0x0092929200929292, | 528 | 0x0014141400141414ULL, 0x006c6c6c006c6c6cULL, 0x0092929200929292ULL, |
529 | 0x0054545400545454, 0x00d0d0d000d0d0d0, 0x0078787800787878, | 529 | 0x0054545400545454ULL, 0x00d0d0d000d0d0d0ULL, 0x0078787800787878ULL, |
530 | 0x0070707000707070, 0x00e3e3e300e3e3e3, 0x0049494900494949, | 530 | 0x0070707000707070ULL, 0x00e3e3e300e3e3e3ULL, 0x0049494900494949ULL, |
531 | 0x0080808000808080, 0x0050505000505050, 0x00a7a7a700a7a7a7, | 531 | 0x0080808000808080ULL, 0x0050505000505050ULL, 0x00a7a7a700a7a7a7ULL, |
532 | 0x00f6f6f600f6f6f6, 0x0077777700777777, 0x0093939300939393, | 532 | 0x00f6f6f600f6f6f6ULL, 0x0077777700777777ULL, 0x0093939300939393ULL, |
533 | 0x0086868600868686, 0x0083838300838383, 0x002a2a2a002a2a2a, | 533 | 0x0086868600868686ULL, 0x0083838300838383ULL, 0x002a2a2a002a2a2aULL, |
534 | 0x00c7c7c700c7c7c7, 0x005b5b5b005b5b5b, 0x00e9e9e900e9e9e9, | 534 | 0x00c7c7c700c7c7c7ULL, 0x005b5b5b005b5b5bULL, 0x00e9e9e900e9e9e9ULL, |
535 | 0x00eeeeee00eeeeee, 0x008f8f8f008f8f8f, 0x0001010100010101, | 535 | 0x00eeeeee00eeeeeeULL, 0x008f8f8f008f8f8fULL, 0x0001010100010101ULL, |
536 | 0x003d3d3d003d3d3d, | 536 | 0x003d3d3d003d3d3dULL, |
537 | }; | 537 | }; |
538 | 538 | ||
539 | const u64 camellia_sp30333033[256] = { | 539 | const u64 camellia_sp30333033[256] = { |
540 | 0x3800383838003838, 0x4100414141004141, 0x1600161616001616, | 540 | 0x3800383838003838ULL, 0x4100414141004141ULL, 0x1600161616001616ULL, |
541 | 0x7600767676007676, 0xd900d9d9d900d9d9, 0x9300939393009393, | 541 | 0x7600767676007676ULL, 0xd900d9d9d900d9d9ULL, 0x9300939393009393ULL, |
542 | 0x6000606060006060, 0xf200f2f2f200f2f2, 0x7200727272007272, | 542 | 0x6000606060006060ULL, 0xf200f2f2f200f2f2ULL, 0x7200727272007272ULL, |
543 | 0xc200c2c2c200c2c2, 0xab00ababab00abab, 0x9a009a9a9a009a9a, | 543 | 0xc200c2c2c200c2c2ULL, 0xab00ababab00ababULL, 0x9a009a9a9a009a9aULL, |
544 | 0x7500757575007575, 0x0600060606000606, 0x5700575757005757, | 544 | 0x7500757575007575ULL, 0x0600060606000606ULL, 0x5700575757005757ULL, |
545 | 0xa000a0a0a000a0a0, 0x9100919191009191, 0xf700f7f7f700f7f7, | 545 | 0xa000a0a0a000a0a0ULL, 0x9100919191009191ULL, 0xf700f7f7f700f7f7ULL, |
546 | 0xb500b5b5b500b5b5, 0xc900c9c9c900c9c9, 0xa200a2a2a200a2a2, | 546 | 0xb500b5b5b500b5b5ULL, 0xc900c9c9c900c9c9ULL, 0xa200a2a2a200a2a2ULL, |
547 | 0x8c008c8c8c008c8c, 0xd200d2d2d200d2d2, 0x9000909090009090, | 547 | 0x8c008c8c8c008c8cULL, 0xd200d2d2d200d2d2ULL, 0x9000909090009090ULL, |
548 | 0xf600f6f6f600f6f6, 0x0700070707000707, 0xa700a7a7a700a7a7, | 548 | 0xf600f6f6f600f6f6ULL, 0x0700070707000707ULL, 0xa700a7a7a700a7a7ULL, |
549 | 0x2700272727002727, 0x8e008e8e8e008e8e, 0xb200b2b2b200b2b2, | 549 | 0x2700272727002727ULL, 0x8e008e8e8e008e8eULL, 0xb200b2b2b200b2b2ULL, |
550 | 0x4900494949004949, 0xde00dedede00dede, 0x4300434343004343, | 550 | 0x4900494949004949ULL, 0xde00dedede00dedeULL, 0x4300434343004343ULL, |
551 | 0x5c005c5c5c005c5c, 0xd700d7d7d700d7d7, 0xc700c7c7c700c7c7, | 551 | 0x5c005c5c5c005c5cULL, 0xd700d7d7d700d7d7ULL, 0xc700c7c7c700c7c7ULL, |
552 | 0x3e003e3e3e003e3e, 0xf500f5f5f500f5f5, 0x8f008f8f8f008f8f, | 552 | 0x3e003e3e3e003e3eULL, 0xf500f5f5f500f5f5ULL, 0x8f008f8f8f008f8fULL, |
553 | 0x6700676767006767, 0x1f001f1f1f001f1f, 0x1800181818001818, | 553 | 0x6700676767006767ULL, 0x1f001f1f1f001f1fULL, 0x1800181818001818ULL, |
554 | 0x6e006e6e6e006e6e, 0xaf00afafaf00afaf, 0x2f002f2f2f002f2f, | 554 | 0x6e006e6e6e006e6eULL, 0xaf00afafaf00afafULL, 0x2f002f2f2f002f2fULL, |
555 | 0xe200e2e2e200e2e2, 0x8500858585008585, 0x0d000d0d0d000d0d, | 555 | 0xe200e2e2e200e2e2ULL, 0x8500858585008585ULL, 0x0d000d0d0d000d0dULL, |
556 | 0x5300535353005353, 0xf000f0f0f000f0f0, 0x9c009c9c9c009c9c, | 556 | 0x5300535353005353ULL, 0xf000f0f0f000f0f0ULL, 0x9c009c9c9c009c9cULL, |
557 | 0x6500656565006565, 0xea00eaeaea00eaea, 0xa300a3a3a300a3a3, | 557 | 0x6500656565006565ULL, 0xea00eaeaea00eaeaULL, 0xa300a3a3a300a3a3ULL, |
558 | 0xae00aeaeae00aeae, 0x9e009e9e9e009e9e, 0xec00ececec00ecec, | 558 | 0xae00aeaeae00aeaeULL, 0x9e009e9e9e009e9eULL, 0xec00ececec00ececULL, |
559 | 0x8000808080008080, 0x2d002d2d2d002d2d, 0x6b006b6b6b006b6b, | 559 | 0x8000808080008080ULL, 0x2d002d2d2d002d2dULL, 0x6b006b6b6b006b6bULL, |
560 | 0xa800a8a8a800a8a8, 0x2b002b2b2b002b2b, 0x3600363636003636, | 560 | 0xa800a8a8a800a8a8ULL, 0x2b002b2b2b002b2bULL, 0x3600363636003636ULL, |
561 | 0xa600a6a6a600a6a6, 0xc500c5c5c500c5c5, 0x8600868686008686, | 561 | 0xa600a6a6a600a6a6ULL, 0xc500c5c5c500c5c5ULL, 0x8600868686008686ULL, |
562 | 0x4d004d4d4d004d4d, 0x3300333333003333, 0xfd00fdfdfd00fdfd, | 562 | 0x4d004d4d4d004d4dULL, 0x3300333333003333ULL, 0xfd00fdfdfd00fdfdULL, |
563 | 0x6600666666006666, 0x5800585858005858, 0x9600969696009696, | 563 | 0x6600666666006666ULL, 0x5800585858005858ULL, 0x9600969696009696ULL, |
564 | 0x3a003a3a3a003a3a, 0x0900090909000909, 0x9500959595009595, | 564 | 0x3a003a3a3a003a3aULL, 0x0900090909000909ULL, 0x9500959595009595ULL, |
565 | 0x1000101010001010, 0x7800787878007878, 0xd800d8d8d800d8d8, | 565 | 0x1000101010001010ULL, 0x7800787878007878ULL, 0xd800d8d8d800d8d8ULL, |
566 | 0x4200424242004242, 0xcc00cccccc00cccc, 0xef00efefef00efef, | 566 | 0x4200424242004242ULL, 0xcc00cccccc00ccccULL, 0xef00efefef00efefULL, |
567 | 0x2600262626002626, 0xe500e5e5e500e5e5, 0x6100616161006161, | 567 | 0x2600262626002626ULL, 0xe500e5e5e500e5e5ULL, 0x6100616161006161ULL, |
568 | 0x1a001a1a1a001a1a, 0x3f003f3f3f003f3f, 0x3b003b3b3b003b3b, | 568 | 0x1a001a1a1a001a1aULL, 0x3f003f3f3f003f3fULL, 0x3b003b3b3b003b3bULL, |
569 | 0x8200828282008282, 0xb600b6b6b600b6b6, 0xdb00dbdbdb00dbdb, | 569 | 0x8200828282008282ULL, 0xb600b6b6b600b6b6ULL, 0xdb00dbdbdb00dbdbULL, |
570 | 0xd400d4d4d400d4d4, 0x9800989898009898, 0xe800e8e8e800e8e8, | 570 | 0xd400d4d4d400d4d4ULL, 0x9800989898009898ULL, 0xe800e8e8e800e8e8ULL, |
571 | 0x8b008b8b8b008b8b, 0x0200020202000202, 0xeb00ebebeb00ebeb, | 571 | 0x8b008b8b8b008b8bULL, 0x0200020202000202ULL, 0xeb00ebebeb00ebebULL, |
572 | 0x0a000a0a0a000a0a, 0x2c002c2c2c002c2c, 0x1d001d1d1d001d1d, | 572 | 0x0a000a0a0a000a0aULL, 0x2c002c2c2c002c2cULL, 0x1d001d1d1d001d1dULL, |
573 | 0xb000b0b0b000b0b0, 0x6f006f6f6f006f6f, 0x8d008d8d8d008d8d, | 573 | 0xb000b0b0b000b0b0ULL, 0x6f006f6f6f006f6fULL, 0x8d008d8d8d008d8dULL, |
574 | 0x8800888888008888, 0x0e000e0e0e000e0e, 0x1900191919001919, | 574 | 0x8800888888008888ULL, 0x0e000e0e0e000e0eULL, 0x1900191919001919ULL, |
575 | 0x8700878787008787, 0x4e004e4e4e004e4e, 0x0b000b0b0b000b0b, | 575 | 0x8700878787008787ULL, 0x4e004e4e4e004e4eULL, 0x0b000b0b0b000b0bULL, |
576 | 0xa900a9a9a900a9a9, 0x0c000c0c0c000c0c, 0x7900797979007979, | 576 | 0xa900a9a9a900a9a9ULL, 0x0c000c0c0c000c0cULL, 0x7900797979007979ULL, |
577 | 0x1100111111001111, 0x7f007f7f7f007f7f, 0x2200222222002222, | 577 | 0x1100111111001111ULL, 0x7f007f7f7f007f7fULL, 0x2200222222002222ULL, |
578 | 0xe700e7e7e700e7e7, 0x5900595959005959, 0xe100e1e1e100e1e1, | 578 | 0xe700e7e7e700e7e7ULL, 0x5900595959005959ULL, 0xe100e1e1e100e1e1ULL, |
579 | 0xda00dadada00dada, 0x3d003d3d3d003d3d, 0xc800c8c8c800c8c8, | 579 | 0xda00dadada00dadaULL, 0x3d003d3d3d003d3dULL, 0xc800c8c8c800c8c8ULL, |
580 | 0x1200121212001212, 0x0400040404000404, 0x7400747474007474, | 580 | 0x1200121212001212ULL, 0x0400040404000404ULL, 0x7400747474007474ULL, |
581 | 0x5400545454005454, 0x3000303030003030, 0x7e007e7e7e007e7e, | 581 | 0x5400545454005454ULL, 0x3000303030003030ULL, 0x7e007e7e7e007e7eULL, |
582 | 0xb400b4b4b400b4b4, 0x2800282828002828, 0x5500555555005555, | 582 | 0xb400b4b4b400b4b4ULL, 0x2800282828002828ULL, 0x5500555555005555ULL, |
583 | 0x6800686868006868, 0x5000505050005050, 0xbe00bebebe00bebe, | 583 | 0x6800686868006868ULL, 0x5000505050005050ULL, 0xbe00bebebe00bebeULL, |
584 | 0xd000d0d0d000d0d0, 0xc400c4c4c400c4c4, 0x3100313131003131, | 584 | 0xd000d0d0d000d0d0ULL, 0xc400c4c4c400c4c4ULL, 0x3100313131003131ULL, |
585 | 0xcb00cbcbcb00cbcb, 0x2a002a2a2a002a2a, 0xad00adadad00adad, | 585 | 0xcb00cbcbcb00cbcbULL, 0x2a002a2a2a002a2aULL, 0xad00adadad00adadULL, |
586 | 0x0f000f0f0f000f0f, 0xca00cacaca00caca, 0x7000707070007070, | 586 | 0x0f000f0f0f000f0fULL, 0xca00cacaca00cacaULL, 0x7000707070007070ULL, |
587 | 0xff00ffffff00ffff, 0x3200323232003232, 0x6900696969006969, | 587 | 0xff00ffffff00ffffULL, 0x3200323232003232ULL, 0x6900696969006969ULL, |
588 | 0x0800080808000808, 0x6200626262006262, 0x0000000000000000, | 588 | 0x0800080808000808ULL, 0x6200626262006262ULL, 0x0000000000000000ULL, |
589 | 0x2400242424002424, 0xd100d1d1d100d1d1, 0xfb00fbfbfb00fbfb, | 589 | 0x2400242424002424ULL, 0xd100d1d1d100d1d1ULL, 0xfb00fbfbfb00fbfbULL, |
590 | 0xba00bababa00baba, 0xed00ededed00eded, 0x4500454545004545, | 590 | 0xba00bababa00babaULL, 0xed00ededed00ededULL, 0x4500454545004545ULL, |
591 | 0x8100818181008181, 0x7300737373007373, 0x6d006d6d6d006d6d, | 591 | 0x8100818181008181ULL, 0x7300737373007373ULL, 0x6d006d6d6d006d6dULL, |
592 | 0x8400848484008484, 0x9f009f9f9f009f9f, 0xee00eeeeee00eeee, | 592 | 0x8400848484008484ULL, 0x9f009f9f9f009f9fULL, 0xee00eeeeee00eeeeULL, |
593 | 0x4a004a4a4a004a4a, 0xc300c3c3c300c3c3, 0x2e002e2e2e002e2e, | 593 | 0x4a004a4a4a004a4aULL, 0xc300c3c3c300c3c3ULL, 0x2e002e2e2e002e2eULL, |
594 | 0xc100c1c1c100c1c1, 0x0100010101000101, 0xe600e6e6e600e6e6, | 594 | 0xc100c1c1c100c1c1ULL, 0x0100010101000101ULL, 0xe600e6e6e600e6e6ULL, |
595 | 0x2500252525002525, 0x4800484848004848, 0x9900999999009999, | 595 | 0x2500252525002525ULL, 0x4800484848004848ULL, 0x9900999999009999ULL, |
596 | 0xb900b9b9b900b9b9, 0xb300b3b3b300b3b3, 0x7b007b7b7b007b7b, | 596 | 0xb900b9b9b900b9b9ULL, 0xb300b3b3b300b3b3ULL, 0x7b007b7b7b007b7bULL, |
597 | 0xf900f9f9f900f9f9, 0xce00cecece00cece, 0xbf00bfbfbf00bfbf, | 597 | 0xf900f9f9f900f9f9ULL, 0xce00cecece00ceceULL, 0xbf00bfbfbf00bfbfULL, |
598 | 0xdf00dfdfdf00dfdf, 0x7100717171007171, 0x2900292929002929, | 598 | 0xdf00dfdfdf00dfdfULL, 0x7100717171007171ULL, 0x2900292929002929ULL, |
599 | 0xcd00cdcdcd00cdcd, 0x6c006c6c6c006c6c, 0x1300131313001313, | 599 | 0xcd00cdcdcd00cdcdULL, 0x6c006c6c6c006c6cULL, 0x1300131313001313ULL, |
600 | 0x6400646464006464, 0x9b009b9b9b009b9b, 0x6300636363006363, | 600 | 0x6400646464006464ULL, 0x9b009b9b9b009b9bULL, 0x6300636363006363ULL, |
601 | 0x9d009d9d9d009d9d, 0xc000c0c0c000c0c0, 0x4b004b4b4b004b4b, | 601 | 0x9d009d9d9d009d9dULL, 0xc000c0c0c000c0c0ULL, 0x4b004b4b4b004b4bULL, |
602 | 0xb700b7b7b700b7b7, 0xa500a5a5a500a5a5, 0x8900898989008989, | 602 | 0xb700b7b7b700b7b7ULL, 0xa500a5a5a500a5a5ULL, 0x8900898989008989ULL, |
603 | 0x5f005f5f5f005f5f, 0xb100b1b1b100b1b1, 0x1700171717001717, | 603 | 0x5f005f5f5f005f5fULL, 0xb100b1b1b100b1b1ULL, 0x1700171717001717ULL, |
604 | 0xf400f4f4f400f4f4, 0xbc00bcbcbc00bcbc, 0xd300d3d3d300d3d3, | 604 | 0xf400f4f4f400f4f4ULL, 0xbc00bcbcbc00bcbcULL, 0xd300d3d3d300d3d3ULL, |
605 | 0x4600464646004646, 0xcf00cfcfcf00cfcf, 0x3700373737003737, | 605 | 0x4600464646004646ULL, 0xcf00cfcfcf00cfcfULL, 0x3700373737003737ULL, |
606 | 0x5e005e5e5e005e5e, 0x4700474747004747, 0x9400949494009494, | 606 | 0x5e005e5e5e005e5eULL, 0x4700474747004747ULL, 0x9400949494009494ULL, |
607 | 0xfa00fafafa00fafa, 0xfc00fcfcfc00fcfc, 0x5b005b5b5b005b5b, | 607 | 0xfa00fafafa00fafaULL, 0xfc00fcfcfc00fcfcULL, 0x5b005b5b5b005b5bULL, |
608 | 0x9700979797009797, 0xfe00fefefe00fefe, 0x5a005a5a5a005a5a, | 608 | 0x9700979797009797ULL, 0xfe00fefefe00fefeULL, 0x5a005a5a5a005a5aULL, |
609 | 0xac00acacac00acac, 0x3c003c3c3c003c3c, 0x4c004c4c4c004c4c, | 609 | 0xac00acacac00acacULL, 0x3c003c3c3c003c3cULL, 0x4c004c4c4c004c4cULL, |
610 | 0x0300030303000303, 0x3500353535003535, 0xf300f3f3f300f3f3, | 610 | 0x0300030303000303ULL, 0x3500353535003535ULL, 0xf300f3f3f300f3f3ULL, |
611 | 0x2300232323002323, 0xb800b8b8b800b8b8, 0x5d005d5d5d005d5d, | 611 | 0x2300232323002323ULL, 0xb800b8b8b800b8b8ULL, 0x5d005d5d5d005d5dULL, |
612 | 0x6a006a6a6a006a6a, 0x9200929292009292, 0xd500d5d5d500d5d5, | 612 | 0x6a006a6a6a006a6aULL, 0x9200929292009292ULL, 0xd500d5d5d500d5d5ULL, |
613 | 0x2100212121002121, 0x4400444444004444, 0x5100515151005151, | 613 | 0x2100212121002121ULL, 0x4400444444004444ULL, 0x5100515151005151ULL, |
614 | 0xc600c6c6c600c6c6, 0x7d007d7d7d007d7d, 0x3900393939003939, | 614 | 0xc600c6c6c600c6c6ULL, 0x7d007d7d7d007d7dULL, 0x3900393939003939ULL, |
615 | 0x8300838383008383, 0xdc00dcdcdc00dcdc, 0xaa00aaaaaa00aaaa, | 615 | 0x8300838383008383ULL, 0xdc00dcdcdc00dcdcULL, 0xaa00aaaaaa00aaaaULL, |
616 | 0x7c007c7c7c007c7c, 0x7700777777007777, 0x5600565656005656, | 616 | 0x7c007c7c7c007c7cULL, 0x7700777777007777ULL, 0x5600565656005656ULL, |
617 | 0x0500050505000505, 0x1b001b1b1b001b1b, 0xa400a4a4a400a4a4, | 617 | 0x0500050505000505ULL, 0x1b001b1b1b001b1bULL, 0xa400a4a4a400a4a4ULL, |
618 | 0x1500151515001515, 0x3400343434003434, 0x1e001e1e1e001e1e, | 618 | 0x1500151515001515ULL, 0x3400343434003434ULL, 0x1e001e1e1e001e1eULL, |
619 | 0x1c001c1c1c001c1c, 0xf800f8f8f800f8f8, 0x5200525252005252, | 619 | 0x1c001c1c1c001c1cULL, 0xf800f8f8f800f8f8ULL, 0x5200525252005252ULL, |
620 | 0x2000202020002020, 0x1400141414001414, 0xe900e9e9e900e9e9, | 620 | 0x2000202020002020ULL, 0x1400141414001414ULL, 0xe900e9e9e900e9e9ULL, |
621 | 0xbd00bdbdbd00bdbd, 0xdd00dddddd00dddd, 0xe400e4e4e400e4e4, | 621 | 0xbd00bdbdbd00bdbdULL, 0xdd00dddddd00ddddULL, 0xe400e4e4e400e4e4ULL, |
622 | 0xa100a1a1a100a1a1, 0xe000e0e0e000e0e0, 0x8a008a8a8a008a8a, | 622 | 0xa100a1a1a100a1a1ULL, 0xe000e0e0e000e0e0ULL, 0x8a008a8a8a008a8aULL, |
623 | 0xf100f1f1f100f1f1, 0xd600d6d6d600d6d6, 0x7a007a7a7a007a7a, | 623 | 0xf100f1f1f100f1f1ULL, 0xd600d6d6d600d6d6ULL, 0x7a007a7a7a007a7aULL, |
624 | 0xbb00bbbbbb00bbbb, 0xe300e3e3e300e3e3, 0x4000404040004040, | 624 | 0xbb00bbbbbb00bbbbULL, 0xe300e3e3e300e3e3ULL, 0x4000404040004040ULL, |
625 | 0x4f004f4f4f004f4f, | 625 | 0x4f004f4f4f004f4fULL, |
626 | }; | 626 | }; |
627 | 627 | ||
628 | const u64 camellia_sp44044404[256] = { | 628 | const u64 camellia_sp44044404[256] = { |
629 | 0x7070007070700070, 0x2c2c002c2c2c002c, 0xb3b300b3b3b300b3, | 629 | 0x7070007070700070ULL, 0x2c2c002c2c2c002cULL, 0xb3b300b3b3b300b3ULL, |
630 | 0xc0c000c0c0c000c0, 0xe4e400e4e4e400e4, 0x5757005757570057, | 630 | 0xc0c000c0c0c000c0ULL, 0xe4e400e4e4e400e4ULL, 0x5757005757570057ULL, |
631 | 0xeaea00eaeaea00ea, 0xaeae00aeaeae00ae, 0x2323002323230023, | 631 | 0xeaea00eaeaea00eaULL, 0xaeae00aeaeae00aeULL, 0x2323002323230023ULL, |
632 | 0x6b6b006b6b6b006b, 0x4545004545450045, 0xa5a500a5a5a500a5, | 632 | 0x6b6b006b6b6b006bULL, 0x4545004545450045ULL, 0xa5a500a5a5a500a5ULL, |
633 | 0xeded00ededed00ed, 0x4f4f004f4f4f004f, 0x1d1d001d1d1d001d, | 633 | 0xeded00ededed00edULL, 0x4f4f004f4f4f004fULL, 0x1d1d001d1d1d001dULL, |
634 | 0x9292009292920092, 0x8686008686860086, 0xafaf00afafaf00af, | 634 | 0x9292009292920092ULL, 0x8686008686860086ULL, 0xafaf00afafaf00afULL, |
635 | 0x7c7c007c7c7c007c, 0x1f1f001f1f1f001f, 0x3e3e003e3e3e003e, | 635 | 0x7c7c007c7c7c007cULL, 0x1f1f001f1f1f001fULL, 0x3e3e003e3e3e003eULL, |
636 | 0xdcdc00dcdcdc00dc, 0x5e5e005e5e5e005e, 0x0b0b000b0b0b000b, | 636 | 0xdcdc00dcdcdc00dcULL, 0x5e5e005e5e5e005eULL, 0x0b0b000b0b0b000bULL, |
637 | 0xa6a600a6a6a600a6, 0x3939003939390039, 0xd5d500d5d5d500d5, | 637 | 0xa6a600a6a6a600a6ULL, 0x3939003939390039ULL, 0xd5d500d5d5d500d5ULL, |
638 | 0x5d5d005d5d5d005d, 0xd9d900d9d9d900d9, 0x5a5a005a5a5a005a, | 638 | 0x5d5d005d5d5d005dULL, 0xd9d900d9d9d900d9ULL, 0x5a5a005a5a5a005aULL, |
639 | 0x5151005151510051, 0x6c6c006c6c6c006c, 0x8b8b008b8b8b008b, | 639 | 0x5151005151510051ULL, 0x6c6c006c6c6c006cULL, 0x8b8b008b8b8b008bULL, |
640 | 0x9a9a009a9a9a009a, 0xfbfb00fbfbfb00fb, 0xb0b000b0b0b000b0, | 640 | 0x9a9a009a9a9a009aULL, 0xfbfb00fbfbfb00fbULL, 0xb0b000b0b0b000b0ULL, |
641 | 0x7474007474740074, 0x2b2b002b2b2b002b, 0xf0f000f0f0f000f0, | 641 | 0x7474007474740074ULL, 0x2b2b002b2b2b002bULL, 0xf0f000f0f0f000f0ULL, |
642 | 0x8484008484840084, 0xdfdf00dfdfdf00df, 0xcbcb00cbcbcb00cb, | 642 | 0x8484008484840084ULL, 0xdfdf00dfdfdf00dfULL, 0xcbcb00cbcbcb00cbULL, |
643 | 0x3434003434340034, 0x7676007676760076, 0x6d6d006d6d6d006d, | 643 | 0x3434003434340034ULL, 0x7676007676760076ULL, 0x6d6d006d6d6d006dULL, |
644 | 0xa9a900a9a9a900a9, 0xd1d100d1d1d100d1, 0x0404000404040004, | 644 | 0xa9a900a9a9a900a9ULL, 0xd1d100d1d1d100d1ULL, 0x0404000404040004ULL, |
645 | 0x1414001414140014, 0x3a3a003a3a3a003a, 0xdede00dedede00de, | 645 | 0x1414001414140014ULL, 0x3a3a003a3a3a003aULL, 0xdede00dedede00deULL, |
646 | 0x1111001111110011, 0x3232003232320032, 0x9c9c009c9c9c009c, | 646 | 0x1111001111110011ULL, 0x3232003232320032ULL, 0x9c9c009c9c9c009cULL, |
647 | 0x5353005353530053, 0xf2f200f2f2f200f2, 0xfefe00fefefe00fe, | 647 | 0x5353005353530053ULL, 0xf2f200f2f2f200f2ULL, 0xfefe00fefefe00feULL, |
648 | 0xcfcf00cfcfcf00cf, 0xc3c300c3c3c300c3, 0x7a7a007a7a7a007a, | 648 | 0xcfcf00cfcfcf00cfULL, 0xc3c300c3c3c300c3ULL, 0x7a7a007a7a7a007aULL, |
649 | 0x2424002424240024, 0xe8e800e8e8e800e8, 0x6060006060600060, | 649 | 0x2424002424240024ULL, 0xe8e800e8e8e800e8ULL, 0x6060006060600060ULL, |
650 | 0x6969006969690069, 0xaaaa00aaaaaa00aa, 0xa0a000a0a0a000a0, | 650 | 0x6969006969690069ULL, 0xaaaa00aaaaaa00aaULL, 0xa0a000a0a0a000a0ULL, |
651 | 0xa1a100a1a1a100a1, 0x6262006262620062, 0x5454005454540054, | 651 | 0xa1a100a1a1a100a1ULL, 0x6262006262620062ULL, 0x5454005454540054ULL, |
652 | 0x1e1e001e1e1e001e, 0xe0e000e0e0e000e0, 0x6464006464640064, | 652 | 0x1e1e001e1e1e001eULL, 0xe0e000e0e0e000e0ULL, 0x6464006464640064ULL, |
653 | 0x1010001010100010, 0x0000000000000000, 0xa3a300a3a3a300a3, | 653 | 0x1010001010100010ULL, 0x0000000000000000ULL, 0xa3a300a3a3a300a3ULL, |
654 | 0x7575007575750075, 0x8a8a008a8a8a008a, 0xe6e600e6e6e600e6, | 654 | 0x7575007575750075ULL, 0x8a8a008a8a8a008aULL, 0xe6e600e6e6e600e6ULL, |
655 | 0x0909000909090009, 0xdddd00dddddd00dd, 0x8787008787870087, | 655 | 0x0909000909090009ULL, 0xdddd00dddddd00ddULL, 0x8787008787870087ULL, |
656 | 0x8383008383830083, 0xcdcd00cdcdcd00cd, 0x9090009090900090, | 656 | 0x8383008383830083ULL, 0xcdcd00cdcdcd00cdULL, 0x9090009090900090ULL, |
657 | 0x7373007373730073, 0xf6f600f6f6f600f6, 0x9d9d009d9d9d009d, | 657 | 0x7373007373730073ULL, 0xf6f600f6f6f600f6ULL, 0x9d9d009d9d9d009dULL, |
658 | 0xbfbf00bfbfbf00bf, 0x5252005252520052, 0xd8d800d8d8d800d8, | 658 | 0xbfbf00bfbfbf00bfULL, 0x5252005252520052ULL, 0xd8d800d8d8d800d8ULL, |
659 | 0xc8c800c8c8c800c8, 0xc6c600c6c6c600c6, 0x8181008181810081, | 659 | 0xc8c800c8c8c800c8ULL, 0xc6c600c6c6c600c6ULL, 0x8181008181810081ULL, |
660 | 0x6f6f006f6f6f006f, 0x1313001313130013, 0x6363006363630063, | 660 | 0x6f6f006f6f6f006fULL, 0x1313001313130013ULL, 0x6363006363630063ULL, |
661 | 0xe9e900e9e9e900e9, 0xa7a700a7a7a700a7, 0x9f9f009f9f9f009f, | 661 | 0xe9e900e9e9e900e9ULL, 0xa7a700a7a7a700a7ULL, 0x9f9f009f9f9f009fULL, |
662 | 0xbcbc00bcbcbc00bc, 0x2929002929290029, 0xf9f900f9f9f900f9, | 662 | 0xbcbc00bcbcbc00bcULL, 0x2929002929290029ULL, 0xf9f900f9f9f900f9ULL, |
663 | 0x2f2f002f2f2f002f, 0xb4b400b4b4b400b4, 0x7878007878780078, | 663 | 0x2f2f002f2f2f002fULL, 0xb4b400b4b4b400b4ULL, 0x7878007878780078ULL, |
664 | 0x0606000606060006, 0xe7e700e7e7e700e7, 0x7171007171710071, | 664 | 0x0606000606060006ULL, 0xe7e700e7e7e700e7ULL, 0x7171007171710071ULL, |
665 | 0xd4d400d4d4d400d4, 0xabab00ababab00ab, 0x8888008888880088, | 665 | 0xd4d400d4d4d400d4ULL, 0xabab00ababab00abULL, 0x8888008888880088ULL, |
666 | 0x8d8d008d8d8d008d, 0x7272007272720072, 0xb9b900b9b9b900b9, | 666 | 0x8d8d008d8d8d008dULL, 0x7272007272720072ULL, 0xb9b900b9b9b900b9ULL, |
667 | 0xf8f800f8f8f800f8, 0xacac00acacac00ac, 0x3636003636360036, | 667 | 0xf8f800f8f8f800f8ULL, 0xacac00acacac00acULL, 0x3636003636360036ULL, |
668 | 0x2a2a002a2a2a002a, 0x3c3c003c3c3c003c, 0xf1f100f1f1f100f1, | 668 | 0x2a2a002a2a2a002aULL, 0x3c3c003c3c3c003cULL, 0xf1f100f1f1f100f1ULL, |
669 | 0x4040004040400040, 0xd3d300d3d3d300d3, 0xbbbb00bbbbbb00bb, | 669 | 0x4040004040400040ULL, 0xd3d300d3d3d300d3ULL, 0xbbbb00bbbbbb00bbULL, |
670 | 0x4343004343430043, 0x1515001515150015, 0xadad00adadad00ad, | 670 | 0x4343004343430043ULL, 0x1515001515150015ULL, 0xadad00adadad00adULL, |
671 | 0x7777007777770077, 0x8080008080800080, 0x8282008282820082, | 671 | 0x7777007777770077ULL, 0x8080008080800080ULL, 0x8282008282820082ULL, |
672 | 0xecec00ececec00ec, 0x2727002727270027, 0xe5e500e5e5e500e5, | 672 | 0xecec00ececec00ecULL, 0x2727002727270027ULL, 0xe5e500e5e5e500e5ULL, |
673 | 0x8585008585850085, 0x3535003535350035, 0x0c0c000c0c0c000c, | 673 | 0x8585008585850085ULL, 0x3535003535350035ULL, 0x0c0c000c0c0c000cULL, |
674 | 0x4141004141410041, 0xefef00efefef00ef, 0x9393009393930093, | 674 | 0x4141004141410041ULL, 0xefef00efefef00efULL, 0x9393009393930093ULL, |
675 | 0x1919001919190019, 0x2121002121210021, 0x0e0e000e0e0e000e, | 675 | 0x1919001919190019ULL, 0x2121002121210021ULL, 0x0e0e000e0e0e000eULL, |
676 | 0x4e4e004e4e4e004e, 0x6565006565650065, 0xbdbd00bdbdbd00bd, | 676 | 0x4e4e004e4e4e004eULL, 0x6565006565650065ULL, 0xbdbd00bdbdbd00bdULL, |
677 | 0xb8b800b8b8b800b8, 0x8f8f008f8f8f008f, 0xebeb00ebebeb00eb, | 677 | 0xb8b800b8b8b800b8ULL, 0x8f8f008f8f8f008fULL, 0xebeb00ebebeb00ebULL, |
678 | 0xcece00cecece00ce, 0x3030003030300030, 0x5f5f005f5f5f005f, | 678 | 0xcece00cecece00ceULL, 0x3030003030300030ULL, 0x5f5f005f5f5f005fULL, |
679 | 0xc5c500c5c5c500c5, 0x1a1a001a1a1a001a, 0xe1e100e1e1e100e1, | 679 | 0xc5c500c5c5c500c5ULL, 0x1a1a001a1a1a001aULL, 0xe1e100e1e1e100e1ULL, |
680 | 0xcaca00cacaca00ca, 0x4747004747470047, 0x3d3d003d3d3d003d, | 680 | 0xcaca00cacaca00caULL, 0x4747004747470047ULL, 0x3d3d003d3d3d003dULL, |
681 | 0x0101000101010001, 0xd6d600d6d6d600d6, 0x5656005656560056, | 681 | 0x0101000101010001ULL, 0xd6d600d6d6d600d6ULL, 0x5656005656560056ULL, |
682 | 0x4d4d004d4d4d004d, 0x0d0d000d0d0d000d, 0x6666006666660066, | 682 | 0x4d4d004d4d4d004dULL, 0x0d0d000d0d0d000dULL, 0x6666006666660066ULL, |
683 | 0xcccc00cccccc00cc, 0x2d2d002d2d2d002d, 0x1212001212120012, | 683 | 0xcccc00cccccc00ccULL, 0x2d2d002d2d2d002dULL, 0x1212001212120012ULL, |
684 | 0x2020002020200020, 0xb1b100b1b1b100b1, 0x9999009999990099, | 684 | 0x2020002020200020ULL, 0xb1b100b1b1b100b1ULL, 0x9999009999990099ULL, |
685 | 0x4c4c004c4c4c004c, 0xc2c200c2c2c200c2, 0x7e7e007e7e7e007e, | 685 | 0x4c4c004c4c4c004cULL, 0xc2c200c2c2c200c2ULL, 0x7e7e007e7e7e007eULL, |
686 | 0x0505000505050005, 0xb7b700b7b7b700b7, 0x3131003131310031, | 686 | 0x0505000505050005ULL, 0xb7b700b7b7b700b7ULL, 0x3131003131310031ULL, |
687 | 0x1717001717170017, 0xd7d700d7d7d700d7, 0x5858005858580058, | 687 | 0x1717001717170017ULL, 0xd7d700d7d7d700d7ULL, 0x5858005858580058ULL, |
688 | 0x6161006161610061, 0x1b1b001b1b1b001b, 0x1c1c001c1c1c001c, | 688 | 0x6161006161610061ULL, 0x1b1b001b1b1b001bULL, 0x1c1c001c1c1c001cULL, |
689 | 0x0f0f000f0f0f000f, 0x1616001616160016, 0x1818001818180018, | 689 | 0x0f0f000f0f0f000fULL, 0x1616001616160016ULL, 0x1818001818180018ULL, |
690 | 0x2222002222220022, 0x4444004444440044, 0xb2b200b2b2b200b2, | 690 | 0x2222002222220022ULL, 0x4444004444440044ULL, 0xb2b200b2b2b200b2ULL, |
691 | 0xb5b500b5b5b500b5, 0x9191009191910091, 0x0808000808080008, | 691 | 0xb5b500b5b5b500b5ULL, 0x9191009191910091ULL, 0x0808000808080008ULL, |
692 | 0xa8a800a8a8a800a8, 0xfcfc00fcfcfc00fc, 0x5050005050500050, | 692 | 0xa8a800a8a8a800a8ULL, 0xfcfc00fcfcfc00fcULL, 0x5050005050500050ULL, |
693 | 0xd0d000d0d0d000d0, 0x7d7d007d7d7d007d, 0x8989008989890089, | 693 | 0xd0d000d0d0d000d0ULL, 0x7d7d007d7d7d007dULL, 0x8989008989890089ULL, |
694 | 0x9797009797970097, 0x5b5b005b5b5b005b, 0x9595009595950095, | 694 | 0x9797009797970097ULL, 0x5b5b005b5b5b005bULL, 0x9595009595950095ULL, |
695 | 0xffff00ffffff00ff, 0xd2d200d2d2d200d2, 0xc4c400c4c4c400c4, | 695 | 0xffff00ffffff00ffULL, 0xd2d200d2d2d200d2ULL, 0xc4c400c4c4c400c4ULL, |
696 | 0x4848004848480048, 0xf7f700f7f7f700f7, 0xdbdb00dbdbdb00db, | 696 | 0x4848004848480048ULL, 0xf7f700f7f7f700f7ULL, 0xdbdb00dbdbdb00dbULL, |
697 | 0x0303000303030003, 0xdada00dadada00da, 0x3f3f003f3f3f003f, | 697 | 0x0303000303030003ULL, 0xdada00dadada00daULL, 0x3f3f003f3f3f003fULL, |
698 | 0x9494009494940094, 0x5c5c005c5c5c005c, 0x0202000202020002, | 698 | 0x9494009494940094ULL, 0x5c5c005c5c5c005cULL, 0x0202000202020002ULL, |
699 | 0x4a4a004a4a4a004a, 0x3333003333330033, 0x6767006767670067, | 699 | 0x4a4a004a4a4a004aULL, 0x3333003333330033ULL, 0x6767006767670067ULL, |
700 | 0xf3f300f3f3f300f3, 0x7f7f007f7f7f007f, 0xe2e200e2e2e200e2, | 700 | 0xf3f300f3f3f300f3ULL, 0x7f7f007f7f7f007fULL, 0xe2e200e2e2e200e2ULL, |
701 | 0x9b9b009b9b9b009b, 0x2626002626260026, 0x3737003737370037, | 701 | 0x9b9b009b9b9b009bULL, 0x2626002626260026ULL, 0x3737003737370037ULL, |
702 | 0x3b3b003b3b3b003b, 0x9696009696960096, 0x4b4b004b4b4b004b, | 702 | 0x3b3b003b3b3b003bULL, 0x9696009696960096ULL, 0x4b4b004b4b4b004bULL, |
703 | 0xbebe00bebebe00be, 0x2e2e002e2e2e002e, 0x7979007979790079, | 703 | 0xbebe00bebebe00beULL, 0x2e2e002e2e2e002eULL, 0x7979007979790079ULL, |
704 | 0x8c8c008c8c8c008c, 0x6e6e006e6e6e006e, 0x8e8e008e8e8e008e, | 704 | 0x8c8c008c8c8c008cULL, 0x6e6e006e6e6e006eULL, 0x8e8e008e8e8e008eULL, |
705 | 0xf5f500f5f5f500f5, 0xb6b600b6b6b600b6, 0xfdfd00fdfdfd00fd, | 705 | 0xf5f500f5f5f500f5ULL, 0xb6b600b6b6b600b6ULL, 0xfdfd00fdfdfd00fdULL, |
706 | 0x5959005959590059, 0x9898009898980098, 0x6a6a006a6a6a006a, | 706 | 0x5959005959590059ULL, 0x9898009898980098ULL, 0x6a6a006a6a6a006aULL, |
707 | 0x4646004646460046, 0xbaba00bababa00ba, 0x2525002525250025, | 707 | 0x4646004646460046ULL, 0xbaba00bababa00baULL, 0x2525002525250025ULL, |
708 | 0x4242004242420042, 0xa2a200a2a2a200a2, 0xfafa00fafafa00fa, | 708 | 0x4242004242420042ULL, 0xa2a200a2a2a200a2ULL, 0xfafa00fafafa00faULL, |
709 | 0x0707000707070007, 0x5555005555550055, 0xeeee00eeeeee00ee, | 709 | 0x0707000707070007ULL, 0x5555005555550055ULL, 0xeeee00eeeeee00eeULL, |
710 | 0x0a0a000a0a0a000a, 0x4949004949490049, 0x6868006868680068, | 710 | 0x0a0a000a0a0a000aULL, 0x4949004949490049ULL, 0x6868006868680068ULL, |
711 | 0x3838003838380038, 0xa4a400a4a4a400a4, 0x2828002828280028, | 711 | 0x3838003838380038ULL, 0xa4a400a4a4a400a4ULL, 0x2828002828280028ULL, |
712 | 0x7b7b007b7b7b007b, 0xc9c900c9c9c900c9, 0xc1c100c1c1c100c1, | 712 | 0x7b7b007b7b7b007bULL, 0xc9c900c9c9c900c9ULL, 0xc1c100c1c1c100c1ULL, |
713 | 0xe3e300e3e3e300e3, 0xf4f400f4f4f400f4, 0xc7c700c7c7c700c7, | 713 | 0xe3e300e3e3e300e3ULL, 0xf4f400f4f4f400f4ULL, 0xc7c700c7c7c700c7ULL, |
714 | 0x9e9e009e9e9e009e, | 714 | 0x9e9e009e9e9e009eULL, |
715 | }; | 715 | }; |
716 | 716 | ||
717 | const u64 camellia_sp11101110[256] = { | 717 | const u64 camellia_sp11101110[256] = { |
718 | 0x7070700070707000, 0x8282820082828200, 0x2c2c2c002c2c2c00, | 718 | 0x7070700070707000ULL, 0x8282820082828200ULL, 0x2c2c2c002c2c2c00ULL, |
719 | 0xececec00ececec00, 0xb3b3b300b3b3b300, 0x2727270027272700, | 719 | 0xececec00ececec00ULL, 0xb3b3b300b3b3b300ULL, 0x2727270027272700ULL, |
720 | 0xc0c0c000c0c0c000, 0xe5e5e500e5e5e500, 0xe4e4e400e4e4e400, | 720 | 0xc0c0c000c0c0c000ULL, 0xe5e5e500e5e5e500ULL, 0xe4e4e400e4e4e400ULL, |
721 | 0x8585850085858500, 0x5757570057575700, 0x3535350035353500, | 721 | 0x8585850085858500ULL, 0x5757570057575700ULL, 0x3535350035353500ULL, |
722 | 0xeaeaea00eaeaea00, 0x0c0c0c000c0c0c00, 0xaeaeae00aeaeae00, | 722 | 0xeaeaea00eaeaea00ULL, 0x0c0c0c000c0c0c00ULL, 0xaeaeae00aeaeae00ULL, |
723 | 0x4141410041414100, 0x2323230023232300, 0xefefef00efefef00, | 723 | 0x4141410041414100ULL, 0x2323230023232300ULL, 0xefefef00efefef00ULL, |
724 | 0x6b6b6b006b6b6b00, 0x9393930093939300, 0x4545450045454500, | 724 | 0x6b6b6b006b6b6b00ULL, 0x9393930093939300ULL, 0x4545450045454500ULL, |
725 | 0x1919190019191900, 0xa5a5a500a5a5a500, 0x2121210021212100, | 725 | 0x1919190019191900ULL, 0xa5a5a500a5a5a500ULL, 0x2121210021212100ULL, |
726 | 0xededed00ededed00, 0x0e0e0e000e0e0e00, 0x4f4f4f004f4f4f00, | 726 | 0xededed00ededed00ULL, 0x0e0e0e000e0e0e00ULL, 0x4f4f4f004f4f4f00ULL, |
727 | 0x4e4e4e004e4e4e00, 0x1d1d1d001d1d1d00, 0x6565650065656500, | 727 | 0x4e4e4e004e4e4e00ULL, 0x1d1d1d001d1d1d00ULL, 0x6565650065656500ULL, |
728 | 0x9292920092929200, 0xbdbdbd00bdbdbd00, 0x8686860086868600, | 728 | 0x9292920092929200ULL, 0xbdbdbd00bdbdbd00ULL, 0x8686860086868600ULL, |
729 | 0xb8b8b800b8b8b800, 0xafafaf00afafaf00, 0x8f8f8f008f8f8f00, | 729 | 0xb8b8b800b8b8b800ULL, 0xafafaf00afafaf00ULL, 0x8f8f8f008f8f8f00ULL, |
730 | 0x7c7c7c007c7c7c00, 0xebebeb00ebebeb00, 0x1f1f1f001f1f1f00, | 730 | 0x7c7c7c007c7c7c00ULL, 0xebebeb00ebebeb00ULL, 0x1f1f1f001f1f1f00ULL, |
731 | 0xcecece00cecece00, 0x3e3e3e003e3e3e00, 0x3030300030303000, | 731 | 0xcecece00cecece00ULL, 0x3e3e3e003e3e3e00ULL, 0x3030300030303000ULL, |
732 | 0xdcdcdc00dcdcdc00, 0x5f5f5f005f5f5f00, 0x5e5e5e005e5e5e00, | 732 | 0xdcdcdc00dcdcdc00ULL, 0x5f5f5f005f5f5f00ULL, 0x5e5e5e005e5e5e00ULL, |
733 | 0xc5c5c500c5c5c500, 0x0b0b0b000b0b0b00, 0x1a1a1a001a1a1a00, | 733 | 0xc5c5c500c5c5c500ULL, 0x0b0b0b000b0b0b00ULL, 0x1a1a1a001a1a1a00ULL, |
734 | 0xa6a6a600a6a6a600, 0xe1e1e100e1e1e100, 0x3939390039393900, | 734 | 0xa6a6a600a6a6a600ULL, 0xe1e1e100e1e1e100ULL, 0x3939390039393900ULL, |
735 | 0xcacaca00cacaca00, 0xd5d5d500d5d5d500, 0x4747470047474700, | 735 | 0xcacaca00cacaca00ULL, 0xd5d5d500d5d5d500ULL, 0x4747470047474700ULL, |
736 | 0x5d5d5d005d5d5d00, 0x3d3d3d003d3d3d00, 0xd9d9d900d9d9d900, | 736 | 0x5d5d5d005d5d5d00ULL, 0x3d3d3d003d3d3d00ULL, 0xd9d9d900d9d9d900ULL, |
737 | 0x0101010001010100, 0x5a5a5a005a5a5a00, 0xd6d6d600d6d6d600, | 737 | 0x0101010001010100ULL, 0x5a5a5a005a5a5a00ULL, 0xd6d6d600d6d6d600ULL, |
738 | 0x5151510051515100, 0x5656560056565600, 0x6c6c6c006c6c6c00, | 738 | 0x5151510051515100ULL, 0x5656560056565600ULL, 0x6c6c6c006c6c6c00ULL, |
739 | 0x4d4d4d004d4d4d00, 0x8b8b8b008b8b8b00, 0x0d0d0d000d0d0d00, | 739 | 0x4d4d4d004d4d4d00ULL, 0x8b8b8b008b8b8b00ULL, 0x0d0d0d000d0d0d00ULL, |
740 | 0x9a9a9a009a9a9a00, 0x6666660066666600, 0xfbfbfb00fbfbfb00, | 740 | 0x9a9a9a009a9a9a00ULL, 0x6666660066666600ULL, 0xfbfbfb00fbfbfb00ULL, |
741 | 0xcccccc00cccccc00, 0xb0b0b000b0b0b000, 0x2d2d2d002d2d2d00, | 741 | 0xcccccc00cccccc00ULL, 0xb0b0b000b0b0b000ULL, 0x2d2d2d002d2d2d00ULL, |
742 | 0x7474740074747400, 0x1212120012121200, 0x2b2b2b002b2b2b00, | 742 | 0x7474740074747400ULL, 0x1212120012121200ULL, 0x2b2b2b002b2b2b00ULL, |
743 | 0x2020200020202000, 0xf0f0f000f0f0f000, 0xb1b1b100b1b1b100, | 743 | 0x2020200020202000ULL, 0xf0f0f000f0f0f000ULL, 0xb1b1b100b1b1b100ULL, |
744 | 0x8484840084848400, 0x9999990099999900, 0xdfdfdf00dfdfdf00, | 744 | 0x8484840084848400ULL, 0x9999990099999900ULL, 0xdfdfdf00dfdfdf00ULL, |
745 | 0x4c4c4c004c4c4c00, 0xcbcbcb00cbcbcb00, 0xc2c2c200c2c2c200, | 745 | 0x4c4c4c004c4c4c00ULL, 0xcbcbcb00cbcbcb00ULL, 0xc2c2c200c2c2c200ULL, |
746 | 0x3434340034343400, 0x7e7e7e007e7e7e00, 0x7676760076767600, | 746 | 0x3434340034343400ULL, 0x7e7e7e007e7e7e00ULL, 0x7676760076767600ULL, |
747 | 0x0505050005050500, 0x6d6d6d006d6d6d00, 0xb7b7b700b7b7b700, | 747 | 0x0505050005050500ULL, 0x6d6d6d006d6d6d00ULL, 0xb7b7b700b7b7b700ULL, |
748 | 0xa9a9a900a9a9a900, 0x3131310031313100, 0xd1d1d100d1d1d100, | 748 | 0xa9a9a900a9a9a900ULL, 0x3131310031313100ULL, 0xd1d1d100d1d1d100ULL, |
749 | 0x1717170017171700, 0x0404040004040400, 0xd7d7d700d7d7d700, | 749 | 0x1717170017171700ULL, 0x0404040004040400ULL, 0xd7d7d700d7d7d700ULL, |
750 | 0x1414140014141400, 0x5858580058585800, 0x3a3a3a003a3a3a00, | 750 | 0x1414140014141400ULL, 0x5858580058585800ULL, 0x3a3a3a003a3a3a00ULL, |
751 | 0x6161610061616100, 0xdedede00dedede00, 0x1b1b1b001b1b1b00, | 751 | 0x6161610061616100ULL, 0xdedede00dedede00ULL, 0x1b1b1b001b1b1b00ULL, |
752 | 0x1111110011111100, 0x1c1c1c001c1c1c00, 0x3232320032323200, | 752 | 0x1111110011111100ULL, 0x1c1c1c001c1c1c00ULL, 0x3232320032323200ULL, |
753 | 0x0f0f0f000f0f0f00, 0x9c9c9c009c9c9c00, 0x1616160016161600, | 753 | 0x0f0f0f000f0f0f00ULL, 0x9c9c9c009c9c9c00ULL, 0x1616160016161600ULL, |
754 | 0x5353530053535300, 0x1818180018181800, 0xf2f2f200f2f2f200, | 754 | 0x5353530053535300ULL, 0x1818180018181800ULL, 0xf2f2f200f2f2f200ULL, |
755 | 0x2222220022222200, 0xfefefe00fefefe00, 0x4444440044444400, | 755 | 0x2222220022222200ULL, 0xfefefe00fefefe00ULL, 0x4444440044444400ULL, |
756 | 0xcfcfcf00cfcfcf00, 0xb2b2b200b2b2b200, 0xc3c3c300c3c3c300, | 756 | 0xcfcfcf00cfcfcf00ULL, 0xb2b2b200b2b2b200ULL, 0xc3c3c300c3c3c300ULL, |
757 | 0xb5b5b500b5b5b500, 0x7a7a7a007a7a7a00, 0x9191910091919100, | 757 | 0xb5b5b500b5b5b500ULL, 0x7a7a7a007a7a7a00ULL, 0x9191910091919100ULL, |
758 | 0x2424240024242400, 0x0808080008080800, 0xe8e8e800e8e8e800, | 758 | 0x2424240024242400ULL, 0x0808080008080800ULL, 0xe8e8e800e8e8e800ULL, |
759 | 0xa8a8a800a8a8a800, 0x6060600060606000, 0xfcfcfc00fcfcfc00, | 759 | 0xa8a8a800a8a8a800ULL, 0x6060600060606000ULL, 0xfcfcfc00fcfcfc00ULL, |
760 | 0x6969690069696900, 0x5050500050505000, 0xaaaaaa00aaaaaa00, | 760 | 0x6969690069696900ULL, 0x5050500050505000ULL, 0xaaaaaa00aaaaaa00ULL, |
761 | 0xd0d0d000d0d0d000, 0xa0a0a000a0a0a000, 0x7d7d7d007d7d7d00, | 761 | 0xd0d0d000d0d0d000ULL, 0xa0a0a000a0a0a000ULL, 0x7d7d7d007d7d7d00ULL, |
762 | 0xa1a1a100a1a1a100, 0x8989890089898900, 0x6262620062626200, | 762 | 0xa1a1a100a1a1a100ULL, 0x8989890089898900ULL, 0x6262620062626200ULL, |
763 | 0x9797970097979700, 0x5454540054545400, 0x5b5b5b005b5b5b00, | 763 | 0x9797970097979700ULL, 0x5454540054545400ULL, 0x5b5b5b005b5b5b00ULL, |
764 | 0x1e1e1e001e1e1e00, 0x9595950095959500, 0xe0e0e000e0e0e000, | 764 | 0x1e1e1e001e1e1e00ULL, 0x9595950095959500ULL, 0xe0e0e000e0e0e000ULL, |
765 | 0xffffff00ffffff00, 0x6464640064646400, 0xd2d2d200d2d2d200, | 765 | 0xffffff00ffffff00ULL, 0x6464640064646400ULL, 0xd2d2d200d2d2d200ULL, |
766 | 0x1010100010101000, 0xc4c4c400c4c4c400, 0x0000000000000000, | 766 | 0x1010100010101000ULL, 0xc4c4c400c4c4c400ULL, 0x0000000000000000ULL, |
767 | 0x4848480048484800, 0xa3a3a300a3a3a300, 0xf7f7f700f7f7f700, | 767 | 0x4848480048484800ULL, 0xa3a3a300a3a3a300ULL, 0xf7f7f700f7f7f700ULL, |
768 | 0x7575750075757500, 0xdbdbdb00dbdbdb00, 0x8a8a8a008a8a8a00, | 768 | 0x7575750075757500ULL, 0xdbdbdb00dbdbdb00ULL, 0x8a8a8a008a8a8a00ULL, |
769 | 0x0303030003030300, 0xe6e6e600e6e6e600, 0xdadada00dadada00, | 769 | 0x0303030003030300ULL, 0xe6e6e600e6e6e600ULL, 0xdadada00dadada00ULL, |
770 | 0x0909090009090900, 0x3f3f3f003f3f3f00, 0xdddddd00dddddd00, | 770 | 0x0909090009090900ULL, 0x3f3f3f003f3f3f00ULL, 0xdddddd00dddddd00ULL, |
771 | 0x9494940094949400, 0x8787870087878700, 0x5c5c5c005c5c5c00, | 771 | 0x9494940094949400ULL, 0x8787870087878700ULL, 0x5c5c5c005c5c5c00ULL, |
772 | 0x8383830083838300, 0x0202020002020200, 0xcdcdcd00cdcdcd00, | 772 | 0x8383830083838300ULL, 0x0202020002020200ULL, 0xcdcdcd00cdcdcd00ULL, |
773 | 0x4a4a4a004a4a4a00, 0x9090900090909000, 0x3333330033333300, | 773 | 0x4a4a4a004a4a4a00ULL, 0x9090900090909000ULL, 0x3333330033333300ULL, |
774 | 0x7373730073737300, 0x6767670067676700, 0xf6f6f600f6f6f600, | 774 | 0x7373730073737300ULL, 0x6767670067676700ULL, 0xf6f6f600f6f6f600ULL, |
775 | 0xf3f3f300f3f3f300, 0x9d9d9d009d9d9d00, 0x7f7f7f007f7f7f00, | 775 | 0xf3f3f300f3f3f300ULL, 0x9d9d9d009d9d9d00ULL, 0x7f7f7f007f7f7f00ULL, |
776 | 0xbfbfbf00bfbfbf00, 0xe2e2e200e2e2e200, 0x5252520052525200, | 776 | 0xbfbfbf00bfbfbf00ULL, 0xe2e2e200e2e2e200ULL, 0x5252520052525200ULL, |
777 | 0x9b9b9b009b9b9b00, 0xd8d8d800d8d8d800, 0x2626260026262600, | 777 | 0x9b9b9b009b9b9b00ULL, 0xd8d8d800d8d8d800ULL, 0x2626260026262600ULL, |
778 | 0xc8c8c800c8c8c800, 0x3737370037373700, 0xc6c6c600c6c6c600, | 778 | 0xc8c8c800c8c8c800ULL, 0x3737370037373700ULL, 0xc6c6c600c6c6c600ULL, |
779 | 0x3b3b3b003b3b3b00, 0x8181810081818100, 0x9696960096969600, | 779 | 0x3b3b3b003b3b3b00ULL, 0x8181810081818100ULL, 0x9696960096969600ULL, |
780 | 0x6f6f6f006f6f6f00, 0x4b4b4b004b4b4b00, 0x1313130013131300, | 780 | 0x6f6f6f006f6f6f00ULL, 0x4b4b4b004b4b4b00ULL, 0x1313130013131300ULL, |
781 | 0xbebebe00bebebe00, 0x6363630063636300, 0x2e2e2e002e2e2e00, | 781 | 0xbebebe00bebebe00ULL, 0x6363630063636300ULL, 0x2e2e2e002e2e2e00ULL, |
782 | 0xe9e9e900e9e9e900, 0x7979790079797900, 0xa7a7a700a7a7a700, | 782 | 0xe9e9e900e9e9e900ULL, 0x7979790079797900ULL, 0xa7a7a700a7a7a700ULL, |
783 | 0x8c8c8c008c8c8c00, 0x9f9f9f009f9f9f00, 0x6e6e6e006e6e6e00, | 783 | 0x8c8c8c008c8c8c00ULL, 0x9f9f9f009f9f9f00ULL, 0x6e6e6e006e6e6e00ULL, |
784 | 0xbcbcbc00bcbcbc00, 0x8e8e8e008e8e8e00, 0x2929290029292900, | 784 | 0xbcbcbc00bcbcbc00ULL, 0x8e8e8e008e8e8e00ULL, 0x2929290029292900ULL, |
785 | 0xf5f5f500f5f5f500, 0xf9f9f900f9f9f900, 0xb6b6b600b6b6b600, | 785 | 0xf5f5f500f5f5f500ULL, 0xf9f9f900f9f9f900ULL, 0xb6b6b600b6b6b600ULL, |
786 | 0x2f2f2f002f2f2f00, 0xfdfdfd00fdfdfd00, 0xb4b4b400b4b4b400, | 786 | 0x2f2f2f002f2f2f00ULL, 0xfdfdfd00fdfdfd00ULL, 0xb4b4b400b4b4b400ULL, |
787 | 0x5959590059595900, 0x7878780078787800, 0x9898980098989800, | 787 | 0x5959590059595900ULL, 0x7878780078787800ULL, 0x9898980098989800ULL, |
788 | 0x0606060006060600, 0x6a6a6a006a6a6a00, 0xe7e7e700e7e7e700, | 788 | 0x0606060006060600ULL, 0x6a6a6a006a6a6a00ULL, 0xe7e7e700e7e7e700ULL, |
789 | 0x4646460046464600, 0x7171710071717100, 0xbababa00bababa00, | 789 | 0x4646460046464600ULL, 0x7171710071717100ULL, 0xbababa00bababa00ULL, |
790 | 0xd4d4d400d4d4d400, 0x2525250025252500, 0xababab00ababab00, | 790 | 0xd4d4d400d4d4d400ULL, 0x2525250025252500ULL, 0xababab00ababab00ULL, |
791 | 0x4242420042424200, 0x8888880088888800, 0xa2a2a200a2a2a200, | 791 | 0x4242420042424200ULL, 0x8888880088888800ULL, 0xa2a2a200a2a2a200ULL, |
792 | 0x8d8d8d008d8d8d00, 0xfafafa00fafafa00, 0x7272720072727200, | 792 | 0x8d8d8d008d8d8d00ULL, 0xfafafa00fafafa00ULL, 0x7272720072727200ULL, |
793 | 0x0707070007070700, 0xb9b9b900b9b9b900, 0x5555550055555500, | 793 | 0x0707070007070700ULL, 0xb9b9b900b9b9b900ULL, 0x5555550055555500ULL, |
794 | 0xf8f8f800f8f8f800, 0xeeeeee00eeeeee00, 0xacacac00acacac00, | 794 | 0xf8f8f800f8f8f800ULL, 0xeeeeee00eeeeee00ULL, 0xacacac00acacac00ULL, |
795 | 0x0a0a0a000a0a0a00, 0x3636360036363600, 0x4949490049494900, | 795 | 0x0a0a0a000a0a0a00ULL, 0x3636360036363600ULL, 0x4949490049494900ULL, |
796 | 0x2a2a2a002a2a2a00, 0x6868680068686800, 0x3c3c3c003c3c3c00, | 796 | 0x2a2a2a002a2a2a00ULL, 0x6868680068686800ULL, 0x3c3c3c003c3c3c00ULL, |
797 | 0x3838380038383800, 0xf1f1f100f1f1f100, 0xa4a4a400a4a4a400, | 797 | 0x3838380038383800ULL, 0xf1f1f100f1f1f100ULL, 0xa4a4a400a4a4a400ULL, |
798 | 0x4040400040404000, 0x2828280028282800, 0xd3d3d300d3d3d300, | 798 | 0x4040400040404000ULL, 0x2828280028282800ULL, 0xd3d3d300d3d3d300ULL, |
799 | 0x7b7b7b007b7b7b00, 0xbbbbbb00bbbbbb00, 0xc9c9c900c9c9c900, | 799 | 0x7b7b7b007b7b7b00ULL, 0xbbbbbb00bbbbbb00ULL, 0xc9c9c900c9c9c900ULL, |
800 | 0x4343430043434300, 0xc1c1c100c1c1c100, 0x1515150015151500, | 800 | 0x4343430043434300ULL, 0xc1c1c100c1c1c100ULL, 0x1515150015151500ULL, |
801 | 0xe3e3e300e3e3e300, 0xadadad00adadad00, 0xf4f4f400f4f4f400, | 801 | 0xe3e3e300e3e3e300ULL, 0xadadad00adadad00ULL, 0xf4f4f400f4f4f400ULL, |
802 | 0x7777770077777700, 0xc7c7c700c7c7c700, 0x8080800080808000, | 802 | 0x7777770077777700ULL, 0xc7c7c700c7c7c700ULL, 0x8080800080808000ULL, |
803 | 0x9e9e9e009e9e9e00, | 803 | 0x9e9e9e009e9e9e00ULL, |
804 | }; | 804 | }; |
805 | 805 | ||
806 | /* key constants */ | 806 | /* key constants */ |
@@ -1601,7 +1601,6 @@ static struct crypto_alg camellia_algs[6] = { { | |||
1601 | .cra_ctxsize = sizeof(struct camellia_ctx), | 1601 | .cra_ctxsize = sizeof(struct camellia_ctx), |
1602 | .cra_alignmask = 0, | 1602 | .cra_alignmask = 0, |
1603 | .cra_module = THIS_MODULE, | 1603 | .cra_module = THIS_MODULE, |
1604 | .cra_list = LIST_HEAD_INIT(camellia_algs[0].cra_list), | ||
1605 | .cra_u = { | 1604 | .cra_u = { |
1606 | .cipher = { | 1605 | .cipher = { |
1607 | .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE, | 1606 | .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE, |
@@ -1621,7 +1620,6 @@ static struct crypto_alg camellia_algs[6] = { { | |||
1621 | .cra_alignmask = 0, | 1620 | .cra_alignmask = 0, |
1622 | .cra_type = &crypto_blkcipher_type, | 1621 | .cra_type = &crypto_blkcipher_type, |
1623 | .cra_module = THIS_MODULE, | 1622 | .cra_module = THIS_MODULE, |
1624 | .cra_list = LIST_HEAD_INIT(camellia_algs[1].cra_list), | ||
1625 | .cra_u = { | 1623 | .cra_u = { |
1626 | .blkcipher = { | 1624 | .blkcipher = { |
1627 | .min_keysize = CAMELLIA_MIN_KEY_SIZE, | 1625 | .min_keysize = CAMELLIA_MIN_KEY_SIZE, |
@@ -1641,7 +1639,6 @@ static struct crypto_alg camellia_algs[6] = { { | |||
1641 | .cra_alignmask = 0, | 1639 | .cra_alignmask = 0, |
1642 | .cra_type = &crypto_blkcipher_type, | 1640 | .cra_type = &crypto_blkcipher_type, |
1643 | .cra_module = THIS_MODULE, | 1641 | .cra_module = THIS_MODULE, |
1644 | .cra_list = LIST_HEAD_INIT(camellia_algs[2].cra_list), | ||
1645 | .cra_u = { | 1642 | .cra_u = { |
1646 | .blkcipher = { | 1643 | .blkcipher = { |
1647 | .min_keysize = CAMELLIA_MIN_KEY_SIZE, | 1644 | .min_keysize = CAMELLIA_MIN_KEY_SIZE, |
@@ -1662,7 +1659,6 @@ static struct crypto_alg camellia_algs[6] = { { | |||
1662 | .cra_alignmask = 0, | 1659 | .cra_alignmask = 0, |
1663 | .cra_type = &crypto_blkcipher_type, | 1660 | .cra_type = &crypto_blkcipher_type, |
1664 | .cra_module = THIS_MODULE, | 1661 | .cra_module = THIS_MODULE, |
1665 | .cra_list = LIST_HEAD_INIT(camellia_algs[3].cra_list), | ||
1666 | .cra_u = { | 1662 | .cra_u = { |
1667 | .blkcipher = { | 1663 | .blkcipher = { |
1668 | .min_keysize = CAMELLIA_MIN_KEY_SIZE, | 1664 | .min_keysize = CAMELLIA_MIN_KEY_SIZE, |
@@ -1683,7 +1679,6 @@ static struct crypto_alg camellia_algs[6] = { { | |||
1683 | .cra_alignmask = 0, | 1679 | .cra_alignmask = 0, |
1684 | .cra_type = &crypto_blkcipher_type, | 1680 | .cra_type = &crypto_blkcipher_type, |
1685 | .cra_module = THIS_MODULE, | 1681 | .cra_module = THIS_MODULE, |
1686 | .cra_list = LIST_HEAD_INIT(camellia_algs[4].cra_list), | ||
1687 | .cra_exit = lrw_exit_tfm, | 1682 | .cra_exit = lrw_exit_tfm, |
1688 | .cra_u = { | 1683 | .cra_u = { |
1689 | .blkcipher = { | 1684 | .blkcipher = { |
@@ -1707,7 +1702,6 @@ static struct crypto_alg camellia_algs[6] = { { | |||
1707 | .cra_alignmask = 0, | 1702 | .cra_alignmask = 0, |
1708 | .cra_type = &crypto_blkcipher_type, | 1703 | .cra_type = &crypto_blkcipher_type, |
1709 | .cra_module = THIS_MODULE, | 1704 | .cra_module = THIS_MODULE, |
1710 | .cra_list = LIST_HEAD_INIT(camellia_algs[5].cra_list), | ||
1711 | .cra_u = { | 1705 | .cra_u = { |
1712 | .blkcipher = { | 1706 | .blkcipher = { |
1713 | .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2, | 1707 | .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2, |
diff --git a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S new file mode 100644 index 000000000000..a41a3aaba220 --- /dev/null +++ b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S | |||
@@ -0,0 +1,376 @@ | |||
1 | /* | ||
2 | * Cast5 Cipher 16-way parallel algorithm (AVX/x86_64) | ||
3 | * | ||
4 | * Copyright (C) 2012 Johannes Goetzfried | ||
5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> | ||
6 | * | ||
7 | * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> | ||
8 | * | ||
9 | * This program is free software; you can redistribute it and/or modify | ||
10 | * it under the terms of the GNU General Public License as published by | ||
11 | * the Free Software Foundation; either version 2 of the License, or | ||
12 | * (at your option) any later version. | ||
13 | * | ||
14 | * This program is distributed in the hope that it will be useful, | ||
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
17 | * GNU General Public License for more details. | ||
18 | * | ||
19 | * You should have received a copy of the GNU General Public License | ||
20 | * along with this program; if not, write to the Free Software | ||
21 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
22 | * USA | ||
23 | * | ||
24 | */ | ||
25 | |||
26 | .file "cast5-avx-x86_64-asm_64.S" | ||
27 | |||
28 | .extern cast5_s1 | ||
29 | .extern cast5_s2 | ||
30 | .extern cast5_s3 | ||
31 | .extern cast5_s4 | ||
32 | |||
33 | /* structure of crypto context */ | ||
34 | #define km 0 | ||
35 | #define kr (16*4) | ||
36 | #define rr ((16*4)+16) | ||
37 | |||
38 | /* s-boxes */ | ||
39 | #define s1 cast5_s1 | ||
40 | #define s2 cast5_s2 | ||
41 | #define s3 cast5_s3 | ||
42 | #define s4 cast5_s4 | ||
43 | |||
44 | /********************************************************************** | ||
45 | 16-way AVX cast5 | ||
46 | **********************************************************************/ | ||
47 | #define CTX %rdi | ||
48 | |||
49 | #define RL1 %xmm0 | ||
50 | #define RR1 %xmm1 | ||
51 | #define RL2 %xmm2 | ||
52 | #define RR2 %xmm3 | ||
53 | #define RL3 %xmm4 | ||
54 | #define RR3 %xmm5 | ||
55 | #define RL4 %xmm6 | ||
56 | #define RR4 %xmm7 | ||
57 | |||
58 | #define RX %xmm8 | ||
59 | |||
60 | #define RKM %xmm9 | ||
61 | #define RKR %xmm10 | ||
62 | #define RKRF %xmm11 | ||
63 | #define RKRR %xmm12 | ||
64 | |||
65 | #define R32 %xmm13 | ||
66 | #define R1ST %xmm14 | ||
67 | |||
68 | #define RTMP %xmm15 | ||
69 | |||
70 | #define RID1 %rbp | ||
71 | #define RID1d %ebp | ||
72 | #define RID2 %rsi | ||
73 | #define RID2d %esi | ||
74 | |||
75 | #define RGI1 %rdx | ||
76 | #define RGI1bl %dl | ||
77 | #define RGI1bh %dh | ||
78 | #define RGI2 %rcx | ||
79 | #define RGI2bl %cl | ||
80 | #define RGI2bh %ch | ||
81 | |||
82 | #define RGI3 %rax | ||
83 | #define RGI3bl %al | ||
84 | #define RGI3bh %ah | ||
85 | #define RGI4 %rbx | ||
86 | #define RGI4bl %bl | ||
87 | #define RGI4bh %bh | ||
88 | |||
89 | #define RFS1 %r8 | ||
90 | #define RFS1d %r8d | ||
91 | #define RFS2 %r9 | ||
92 | #define RFS2d %r9d | ||
93 | #define RFS3 %r10 | ||
94 | #define RFS3d %r10d | ||
95 | |||
96 | |||
97 | #define lookup_32bit(src, dst, op1, op2, op3, interleave_op, il_reg) \ | ||
98 | movzbl src ## bh, RID1d; \ | ||
99 | movzbl src ## bl, RID2d; \ | ||
100 | shrq $16, src; \ | ||
101 | movl s1(, RID1, 4), dst ## d; \ | ||
102 | op1 s2(, RID2, 4), dst ## d; \ | ||
103 | movzbl src ## bh, RID1d; \ | ||
104 | movzbl src ## bl, RID2d; \ | ||
105 | interleave_op(il_reg); \ | ||
106 | op2 s3(, RID1, 4), dst ## d; \ | ||
107 | op3 s4(, RID2, 4), dst ## d; | ||
108 | |||
109 | #define dummy(d) /* do nothing */ | ||
110 | |||
111 | #define shr_next(reg) \ | ||
112 | shrq $16, reg; | ||
113 | |||
114 | #define F_head(a, x, gi1, gi2, op0) \ | ||
115 | op0 a, RKM, x; \ | ||
116 | vpslld RKRF, x, RTMP; \ | ||
117 | vpsrld RKRR, x, x; \ | ||
118 | vpor RTMP, x, x; \ | ||
119 | \ | ||
120 | vmovq x, gi1; \ | ||
121 | vpextrq $1, x, gi2; | ||
122 | |||
123 | #define F_tail(a, x, gi1, gi2, op1, op2, op3) \ | ||
124 | lookup_32bit(##gi1, RFS1, op1, op2, op3, shr_next, ##gi1); \ | ||
125 | lookup_32bit(##gi2, RFS3, op1, op2, op3, shr_next, ##gi2); \ | ||
126 | \ | ||
127 | lookup_32bit(##gi1, RFS2, op1, op2, op3, dummy, none); \ | ||
128 | shlq $32, RFS2; \ | ||
129 | orq RFS1, RFS2; \ | ||
130 | lookup_32bit(##gi2, RFS1, op1, op2, op3, dummy, none); \ | ||
131 | shlq $32, RFS1; \ | ||
132 | orq RFS1, RFS3; \ | ||
133 | \ | ||
134 | vmovq RFS2, x; \ | ||
135 | vpinsrq $1, RFS3, x, x; | ||
136 | |||
137 | #define F_2(a1, b1, a2, b2, op0, op1, op2, op3) \ | ||
138 | F_head(b1, RX, RGI1, RGI2, op0); \ | ||
139 | F_head(b2, RX, RGI3, RGI4, op0); \ | ||
140 | \ | ||
141 | F_tail(b1, RX, RGI1, RGI2, op1, op2, op3); \ | ||
142 | F_tail(b2, RTMP, RGI3, RGI4, op1, op2, op3); \ | ||
143 | \ | ||
144 | vpxor a1, RX, a1; \ | ||
145 | vpxor a2, RTMP, a2; | ||
146 | |||
147 | #define F1_2(a1, b1, a2, b2) \ | ||
148 | F_2(a1, b1, a2, b2, vpaddd, xorl, subl, addl) | ||
149 | #define F2_2(a1, b1, a2, b2) \ | ||
150 | F_2(a1, b1, a2, b2, vpxor, subl, addl, xorl) | ||
151 | #define F3_2(a1, b1, a2, b2) \ | ||
152 | F_2(a1, b1, a2, b2, vpsubd, addl, xorl, subl) | ||
153 | |||
154 | #define subround(a1, b1, a2, b2, f) \ | ||
155 | F ## f ## _2(a1, b1, a2, b2); | ||
156 | |||
157 | #define round(l, r, n, f) \ | ||
158 | vbroadcastss (km+(4*n))(CTX), RKM; \ | ||
159 | vpand R1ST, RKR, RKRF; \ | ||
160 | vpsubq RKRF, R32, RKRR; \ | ||
161 | vpsrldq $1, RKR, RKR; \ | ||
162 | subround(l ## 1, r ## 1, l ## 2, r ## 2, f); \ | ||
163 | subround(l ## 3, r ## 3, l ## 4, r ## 4, f); | ||
164 | |||
165 | #define enc_preload_rkr() \ | ||
166 | vbroadcastss .L16_mask, RKR; \ | ||
167 | /* add 16-bit rotation to key rotations (mod 32) */ \ | ||
168 | vpxor kr(CTX), RKR, RKR; | ||
169 | |||
170 | #define dec_preload_rkr() \ | ||
171 | vbroadcastss .L16_mask, RKR; \ | ||
172 | /* add 16-bit rotation to key rotations (mod 32) */ \ | ||
173 | vpxor kr(CTX), RKR, RKR; \ | ||
174 | vpshufb .Lbswap128_mask, RKR, RKR; | ||
175 | |||
176 | #define transpose_2x4(x0, x1, t0, t1) \ | ||
177 | vpunpckldq x1, x0, t0; \ | ||
178 | vpunpckhdq x1, x0, t1; \ | ||
179 | \ | ||
180 | vpunpcklqdq t1, t0, x0; \ | ||
181 | vpunpckhqdq t1, t0, x1; | ||
182 | |||
183 | #define inpack_blocks(in, x0, x1, t0, t1, rmask) \ | ||
184 | vmovdqu (0*4*4)(in), x0; \ | ||
185 | vmovdqu (1*4*4)(in), x1; \ | ||
186 | vpshufb rmask, x0, x0; \ | ||
187 | vpshufb rmask, x1, x1; \ | ||
188 | \ | ||
189 | transpose_2x4(x0, x1, t0, t1) | ||
190 | |||
191 | #define outunpack_blocks(out, x0, x1, t0, t1, rmask) \ | ||
192 | transpose_2x4(x0, x1, t0, t1) \ | ||
193 | \ | ||
194 | vpshufb rmask, x0, x0; \ | ||
195 | vpshufb rmask, x1, x1; \ | ||
196 | vmovdqu x0, (0*4*4)(out); \ | ||
197 | vmovdqu x1, (1*4*4)(out); | ||
198 | |||
199 | #define outunpack_xor_blocks(out, x0, x1, t0, t1, rmask) \ | ||
200 | transpose_2x4(x0, x1, t0, t1) \ | ||
201 | \ | ||
202 | vpshufb rmask, x0, x0; \ | ||
203 | vpshufb rmask, x1, x1; \ | ||
204 | vpxor (0*4*4)(out), x0, x0; \ | ||
205 | vmovdqu x0, (0*4*4)(out); \ | ||
206 | vpxor (1*4*4)(out), x1, x1; \ | ||
207 | vmovdqu x1, (1*4*4)(out); | ||
208 | |||
209 | .data | ||
210 | |||
211 | .align 16 | ||
212 | .Lbswap_mask: | ||
213 | .byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12 | ||
214 | .Lbswap128_mask: | ||
215 | .byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 | ||
216 | .L16_mask: | ||
217 | .byte 16, 16, 16, 16 | ||
218 | .L32_mask: | ||
219 | .byte 32, 0, 0, 0 | ||
220 | .Lfirst_mask: | ||
221 | .byte 0x1f, 0, 0, 0 | ||
222 | |||
223 | .text | ||
224 | |||
225 | .align 16 | ||
226 | .global __cast5_enc_blk_16way | ||
227 | .type __cast5_enc_blk_16way,@function; | ||
228 | |||
229 | __cast5_enc_blk_16way: | ||
230 | /* input: | ||
231 | * %rdi: ctx, CTX | ||
232 | * %rsi: dst | ||
233 | * %rdx: src | ||
234 | * %rcx: bool, if true: xor output | ||
235 | */ | ||
236 | |||
237 | pushq %rbp; | ||
238 | pushq %rbx; | ||
239 | pushq %rcx; | ||
240 | |||
241 | vmovdqa .Lbswap_mask, RKM; | ||
242 | vmovd .Lfirst_mask, R1ST; | ||
243 | vmovd .L32_mask, R32; | ||
244 | enc_preload_rkr(); | ||
245 | |||
246 | leaq 1*(2*4*4)(%rdx), %rax; | ||
247 | inpack_blocks(%rdx, RL1, RR1, RTMP, RX, RKM); | ||
248 | inpack_blocks(%rax, RL2, RR2, RTMP, RX, RKM); | ||
249 | leaq 2*(2*4*4)(%rdx), %rax; | ||
250 | inpack_blocks(%rax, RL3, RR3, RTMP, RX, RKM); | ||
251 | leaq 3*(2*4*4)(%rdx), %rax; | ||
252 | inpack_blocks(%rax, RL4, RR4, RTMP, RX, RKM); | ||
253 | |||
254 | movq %rsi, %r11; | ||
255 | |||
256 | round(RL, RR, 0, 1); | ||
257 | round(RR, RL, 1, 2); | ||
258 | round(RL, RR, 2, 3); | ||
259 | round(RR, RL, 3, 1); | ||
260 | round(RL, RR, 4, 2); | ||
261 | round(RR, RL, 5, 3); | ||
262 | round(RL, RR, 6, 1); | ||
263 | round(RR, RL, 7, 2); | ||
264 | round(RL, RR, 8, 3); | ||
265 | round(RR, RL, 9, 1); | ||
266 | round(RL, RR, 10, 2); | ||
267 | round(RR, RL, 11, 3); | ||
268 | |||
269 | movzbl rr(CTX), %eax; | ||
270 | testl %eax, %eax; | ||
271 | jnz __skip_enc; | ||
272 | |||
273 | round(RL, RR, 12, 1); | ||
274 | round(RR, RL, 13, 2); | ||
275 | round(RL, RR, 14, 3); | ||
276 | round(RR, RL, 15, 1); | ||
277 | |||
278 | __skip_enc: | ||
279 | popq %rcx; | ||
280 | popq %rbx; | ||
281 | popq %rbp; | ||
282 | |||
283 | vmovdqa .Lbswap_mask, RKM; | ||
284 | leaq 1*(2*4*4)(%r11), %rax; | ||
285 | |||
286 | testb %cl, %cl; | ||
287 | jnz __enc_xor16; | ||
288 | |||
289 | outunpack_blocks(%r11, RR1, RL1, RTMP, RX, RKM); | ||
290 | outunpack_blocks(%rax, RR2, RL2, RTMP, RX, RKM); | ||
291 | leaq 2*(2*4*4)(%r11), %rax; | ||
292 | outunpack_blocks(%rax, RR3, RL3, RTMP, RX, RKM); | ||
293 | leaq 3*(2*4*4)(%r11), %rax; | ||
294 | outunpack_blocks(%rax, RR4, RL4, RTMP, RX, RKM); | ||
295 | |||
296 | ret; | ||
297 | |||
298 | __enc_xor16: | ||
299 | outunpack_xor_blocks(%r11, RR1, RL1, RTMP, RX, RKM); | ||
300 | outunpack_xor_blocks(%rax, RR2, RL2, RTMP, RX, RKM); | ||
301 | leaq 2*(2*4*4)(%r11), %rax; | ||
302 | outunpack_xor_blocks(%rax, RR3, RL3, RTMP, RX, RKM); | ||
303 | leaq 3*(2*4*4)(%r11), %rax; | ||
304 | outunpack_xor_blocks(%rax, RR4, RL4, RTMP, RX, RKM); | ||
305 | |||
306 | ret; | ||
307 | |||
308 | .align 16 | ||
309 | .global cast5_dec_blk_16way | ||
310 | .type cast5_dec_blk_16way,@function; | ||
311 | |||
312 | cast5_dec_blk_16way: | ||
313 | /* input: | ||
314 | * %rdi: ctx, CTX | ||
315 | * %rsi: dst | ||
316 | * %rdx: src | ||
317 | */ | ||
318 | |||
319 | pushq %rbp; | ||
320 | pushq %rbx; | ||
321 | |||
322 | vmovdqa .Lbswap_mask, RKM; | ||
323 | vmovd .Lfirst_mask, R1ST; | ||
324 | vmovd .L32_mask, R32; | ||
325 | dec_preload_rkr(); | ||
326 | |||
327 | leaq 1*(2*4*4)(%rdx), %rax; | ||
328 | inpack_blocks(%rdx, RL1, RR1, RTMP, RX, RKM); | ||
329 | inpack_blocks(%rax, RL2, RR2, RTMP, RX, RKM); | ||
330 | leaq 2*(2*4*4)(%rdx), %rax; | ||
331 | inpack_blocks(%rax, RL3, RR3, RTMP, RX, RKM); | ||
332 | leaq 3*(2*4*4)(%rdx), %rax; | ||
333 | inpack_blocks(%rax, RL4, RR4, RTMP, RX, RKM); | ||
334 | |||
335 | movq %rsi, %r11; | ||
336 | |||
337 | movzbl rr(CTX), %eax; | ||
338 | testl %eax, %eax; | ||
339 | jnz __skip_dec; | ||
340 | |||
341 | round(RL, RR, 15, 1); | ||
342 | round(RR, RL, 14, 3); | ||
343 | round(RL, RR, 13, 2); | ||
344 | round(RR, RL, 12, 1); | ||
345 | |||
346 | __dec_tail: | ||
347 | round(RL, RR, 11, 3); | ||
348 | round(RR, RL, 10, 2); | ||
349 | round(RL, RR, 9, 1); | ||
350 | round(RR, RL, 8, 3); | ||
351 | round(RL, RR, 7, 2); | ||
352 | round(RR, RL, 6, 1); | ||
353 | round(RL, RR, 5, 3); | ||
354 | round(RR, RL, 4, 2); | ||
355 | round(RL, RR, 3, 1); | ||
356 | round(RR, RL, 2, 3); | ||
357 | round(RL, RR, 1, 2); | ||
358 | round(RR, RL, 0, 1); | ||
359 | |||
360 | vmovdqa .Lbswap_mask, RKM; | ||
361 | popq %rbx; | ||
362 | popq %rbp; | ||
363 | |||
364 | leaq 1*(2*4*4)(%r11), %rax; | ||
365 | outunpack_blocks(%r11, RR1, RL1, RTMP, RX, RKM); | ||
366 | outunpack_blocks(%rax, RR2, RL2, RTMP, RX, RKM); | ||
367 | leaq 2*(2*4*4)(%r11), %rax; | ||
368 | outunpack_blocks(%rax, RR3, RL3, RTMP, RX, RKM); | ||
369 | leaq 3*(2*4*4)(%r11), %rax; | ||
370 | outunpack_blocks(%rax, RR4, RL4, RTMP, RX, RKM); | ||
371 | |||
372 | ret; | ||
373 | |||
374 | __skip_dec: | ||
375 | vpsrldq $4, RKR, RKR; | ||
376 | jmp __dec_tail; | ||
diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c new file mode 100644 index 000000000000..e0ea14f9547f --- /dev/null +++ b/arch/x86/crypto/cast5_avx_glue.c | |||
@@ -0,0 +1,530 @@ | |||
1 | /* | ||
2 | * Glue Code for the AVX assembler implemention of the Cast5 Cipher | ||
3 | * | ||
4 | * Copyright (C) 2012 Johannes Goetzfried | ||
5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> | ||
6 | * | ||
7 | * This program is free software; you can redistribute it and/or modify | ||
8 | * it under the terms of the GNU General Public License as published by | ||
9 | * the Free Software Foundation; either version 2 of the License, or | ||
10 | * (at your option) any later version. | ||
11 | * | ||
12 | * This program is distributed in the hope that it will be useful, | ||
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
15 | * GNU General Public License for more details. | ||
16 | * | ||
17 | * You should have received a copy of the GNU General Public License | ||
18 | * along with this program; if not, write to the Free Software | ||
19 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
20 | * USA | ||
21 | * | ||
22 | */ | ||
23 | |||
24 | #include <linux/module.h> | ||
25 | #include <linux/hardirq.h> | ||
26 | #include <linux/types.h> | ||
27 | #include <linux/crypto.h> | ||
28 | #include <linux/err.h> | ||
29 | #include <crypto/algapi.h> | ||
30 | #include <crypto/cast5.h> | ||
31 | #include <crypto/cryptd.h> | ||
32 | #include <crypto/ctr.h> | ||
33 | #include <asm/xcr.h> | ||
34 | #include <asm/xsave.h> | ||
35 | #include <asm/crypto/ablk_helper.h> | ||
36 | #include <asm/crypto/glue_helper.h> | ||
37 | |||
38 | #define CAST5_PARALLEL_BLOCKS 16 | ||
39 | |||
40 | asmlinkage void __cast5_enc_blk_16way(struct cast5_ctx *ctx, u8 *dst, | ||
41 | const u8 *src, bool xor); | ||
42 | asmlinkage void cast5_dec_blk_16way(struct cast5_ctx *ctx, u8 *dst, | ||
43 | const u8 *src); | ||
44 | |||
45 | static inline void cast5_enc_blk_xway(struct cast5_ctx *ctx, u8 *dst, | ||
46 | const u8 *src) | ||
47 | { | ||
48 | __cast5_enc_blk_16way(ctx, dst, src, false); | ||
49 | } | ||
50 | |||
51 | static inline void cast5_enc_blk_xway_xor(struct cast5_ctx *ctx, u8 *dst, | ||
52 | const u8 *src) | ||
53 | { | ||
54 | __cast5_enc_blk_16way(ctx, dst, src, true); | ||
55 | } | ||
56 | |||
57 | static inline void cast5_dec_blk_xway(struct cast5_ctx *ctx, u8 *dst, | ||
58 | const u8 *src) | ||
59 | { | ||
60 | cast5_dec_blk_16way(ctx, dst, src); | ||
61 | } | ||
62 | |||
63 | |||
64 | static inline bool cast5_fpu_begin(bool fpu_enabled, unsigned int nbytes) | ||
65 | { | ||
66 | return glue_fpu_begin(CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS, | ||
67 | NULL, fpu_enabled, nbytes); | ||
68 | } | ||
69 | |||
70 | static inline void cast5_fpu_end(bool fpu_enabled) | ||
71 | { | ||
72 | return glue_fpu_end(fpu_enabled); | ||
73 | } | ||
74 | |||
75 | static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, | ||
76 | bool enc) | ||
77 | { | ||
78 | bool fpu_enabled = false; | ||
79 | struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
80 | const unsigned int bsize = CAST5_BLOCK_SIZE; | ||
81 | unsigned int nbytes; | ||
82 | int err; | ||
83 | |||
84 | err = blkcipher_walk_virt(desc, walk); | ||
85 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
86 | |||
87 | while ((nbytes = walk->nbytes)) { | ||
88 | u8 *wsrc = walk->src.virt.addr; | ||
89 | u8 *wdst = walk->dst.virt.addr; | ||
90 | |||
91 | fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); | ||
92 | |||
93 | /* Process multi-block batch */ | ||
94 | if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { | ||
95 | do { | ||
96 | if (enc) | ||
97 | cast5_enc_blk_xway(ctx, wdst, wsrc); | ||
98 | else | ||
99 | cast5_dec_blk_xway(ctx, wdst, wsrc); | ||
100 | |||
101 | wsrc += bsize * CAST5_PARALLEL_BLOCKS; | ||
102 | wdst += bsize * CAST5_PARALLEL_BLOCKS; | ||
103 | nbytes -= bsize * CAST5_PARALLEL_BLOCKS; | ||
104 | } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); | ||
105 | |||
106 | if (nbytes < bsize) | ||
107 | goto done; | ||
108 | } | ||
109 | |||
110 | /* Handle leftovers */ | ||
111 | do { | ||
112 | if (enc) | ||
113 | __cast5_encrypt(ctx, wdst, wsrc); | ||
114 | else | ||
115 | __cast5_decrypt(ctx, wdst, wsrc); | ||
116 | |||
117 | wsrc += bsize; | ||
118 | wdst += bsize; | ||
119 | nbytes -= bsize; | ||
120 | } while (nbytes >= bsize); | ||
121 | |||
122 | done: | ||
123 | err = blkcipher_walk_done(desc, walk, nbytes); | ||
124 | } | ||
125 | |||
126 | cast5_fpu_end(fpu_enabled); | ||
127 | return err; | ||
128 | } | ||
129 | |||
130 | static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
131 | struct scatterlist *src, unsigned int nbytes) | ||
132 | { | ||
133 | struct blkcipher_walk walk; | ||
134 | |||
135 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
136 | return ecb_crypt(desc, &walk, true); | ||
137 | } | ||
138 | |||
139 | static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
140 | struct scatterlist *src, unsigned int nbytes) | ||
141 | { | ||
142 | struct blkcipher_walk walk; | ||
143 | |||
144 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
145 | return ecb_crypt(desc, &walk, false); | ||
146 | } | ||
147 | |||
148 | static unsigned int __cbc_encrypt(struct blkcipher_desc *desc, | ||
149 | struct blkcipher_walk *walk) | ||
150 | { | ||
151 | struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
152 | const unsigned int bsize = CAST5_BLOCK_SIZE; | ||
153 | unsigned int nbytes = walk->nbytes; | ||
154 | u64 *src = (u64 *)walk->src.virt.addr; | ||
155 | u64 *dst = (u64 *)walk->dst.virt.addr; | ||
156 | u64 *iv = (u64 *)walk->iv; | ||
157 | |||
158 | do { | ||
159 | *dst = *src ^ *iv; | ||
160 | __cast5_encrypt(ctx, (u8 *)dst, (u8 *)dst); | ||
161 | iv = dst; | ||
162 | |||
163 | src += 1; | ||
164 | dst += 1; | ||
165 | nbytes -= bsize; | ||
166 | } while (nbytes >= bsize); | ||
167 | |||
168 | *(u64 *)walk->iv = *iv; | ||
169 | return nbytes; | ||
170 | } | ||
171 | |||
172 | static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
173 | struct scatterlist *src, unsigned int nbytes) | ||
174 | { | ||
175 | struct blkcipher_walk walk; | ||
176 | int err; | ||
177 | |||
178 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
179 | err = blkcipher_walk_virt(desc, &walk); | ||
180 | |||
181 | while ((nbytes = walk.nbytes)) { | ||
182 | nbytes = __cbc_encrypt(desc, &walk); | ||
183 | err = blkcipher_walk_done(desc, &walk, nbytes); | ||
184 | } | ||
185 | |||
186 | return err; | ||
187 | } | ||
188 | |||
189 | static unsigned int __cbc_decrypt(struct blkcipher_desc *desc, | ||
190 | struct blkcipher_walk *walk) | ||
191 | { | ||
192 | struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
193 | const unsigned int bsize = CAST5_BLOCK_SIZE; | ||
194 | unsigned int nbytes = walk->nbytes; | ||
195 | u64 *src = (u64 *)walk->src.virt.addr; | ||
196 | u64 *dst = (u64 *)walk->dst.virt.addr; | ||
197 | u64 ivs[CAST5_PARALLEL_BLOCKS - 1]; | ||
198 | u64 last_iv; | ||
199 | int i; | ||
200 | |||
201 | /* Start of the last block. */ | ||
202 | src += nbytes / bsize - 1; | ||
203 | dst += nbytes / bsize - 1; | ||
204 | |||
205 | last_iv = *src; | ||
206 | |||
207 | /* Process multi-block batch */ | ||
208 | if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { | ||
209 | do { | ||
210 | nbytes -= bsize * (CAST5_PARALLEL_BLOCKS - 1); | ||
211 | src -= CAST5_PARALLEL_BLOCKS - 1; | ||
212 | dst -= CAST5_PARALLEL_BLOCKS - 1; | ||
213 | |||
214 | for (i = 0; i < CAST5_PARALLEL_BLOCKS - 1; i++) | ||
215 | ivs[i] = src[i]; | ||
216 | |||
217 | cast5_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); | ||
218 | |||
219 | for (i = 0; i < CAST5_PARALLEL_BLOCKS - 1; i++) | ||
220 | *(dst + (i + 1)) ^= *(ivs + i); | ||
221 | |||
222 | nbytes -= bsize; | ||
223 | if (nbytes < bsize) | ||
224 | goto done; | ||
225 | |||
226 | *dst ^= *(src - 1); | ||
227 | src -= 1; | ||
228 | dst -= 1; | ||
229 | } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); | ||
230 | |||
231 | if (nbytes < bsize) | ||
232 | goto done; | ||
233 | } | ||
234 | |||
235 | /* Handle leftovers */ | ||
236 | for (;;) { | ||
237 | __cast5_decrypt(ctx, (u8 *)dst, (u8 *)src); | ||
238 | |||
239 | nbytes -= bsize; | ||
240 | if (nbytes < bsize) | ||
241 | break; | ||
242 | |||
243 | *dst ^= *(src - 1); | ||
244 | src -= 1; | ||
245 | dst -= 1; | ||
246 | } | ||
247 | |||
248 | done: | ||
249 | *dst ^= *(u64 *)walk->iv; | ||
250 | *(u64 *)walk->iv = last_iv; | ||
251 | |||
252 | return nbytes; | ||
253 | } | ||
254 | |||
255 | static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
256 | struct scatterlist *src, unsigned int nbytes) | ||
257 | { | ||
258 | bool fpu_enabled = false; | ||
259 | struct blkcipher_walk walk; | ||
260 | int err; | ||
261 | |||
262 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
263 | err = blkcipher_walk_virt(desc, &walk); | ||
264 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
265 | |||
266 | while ((nbytes = walk.nbytes)) { | ||
267 | fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); | ||
268 | nbytes = __cbc_decrypt(desc, &walk); | ||
269 | err = blkcipher_walk_done(desc, &walk, nbytes); | ||
270 | } | ||
271 | |||
272 | cast5_fpu_end(fpu_enabled); | ||
273 | return err; | ||
274 | } | ||
275 | |||
276 | static void ctr_crypt_final(struct blkcipher_desc *desc, | ||
277 | struct blkcipher_walk *walk) | ||
278 | { | ||
279 | struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
280 | u8 *ctrblk = walk->iv; | ||
281 | u8 keystream[CAST5_BLOCK_SIZE]; | ||
282 | u8 *src = walk->src.virt.addr; | ||
283 | u8 *dst = walk->dst.virt.addr; | ||
284 | unsigned int nbytes = walk->nbytes; | ||
285 | |||
286 | __cast5_encrypt(ctx, keystream, ctrblk); | ||
287 | crypto_xor(keystream, src, nbytes); | ||
288 | memcpy(dst, keystream, nbytes); | ||
289 | |||
290 | crypto_inc(ctrblk, CAST5_BLOCK_SIZE); | ||
291 | } | ||
292 | |||
293 | static unsigned int __ctr_crypt(struct blkcipher_desc *desc, | ||
294 | struct blkcipher_walk *walk) | ||
295 | { | ||
296 | struct cast5_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
297 | const unsigned int bsize = CAST5_BLOCK_SIZE; | ||
298 | unsigned int nbytes = walk->nbytes; | ||
299 | u64 *src = (u64 *)walk->src.virt.addr; | ||
300 | u64 *dst = (u64 *)walk->dst.virt.addr; | ||
301 | u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); | ||
302 | __be64 ctrblocks[CAST5_PARALLEL_BLOCKS]; | ||
303 | int i; | ||
304 | |||
305 | /* Process multi-block batch */ | ||
306 | if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { | ||
307 | do { | ||
308 | /* create ctrblks for parallel encrypt */ | ||
309 | for (i = 0; i < CAST5_PARALLEL_BLOCKS; i++) { | ||
310 | if (dst != src) | ||
311 | dst[i] = src[i]; | ||
312 | |||
313 | ctrblocks[i] = cpu_to_be64(ctrblk++); | ||
314 | } | ||
315 | |||
316 | cast5_enc_blk_xway_xor(ctx, (u8 *)dst, | ||
317 | (u8 *)ctrblocks); | ||
318 | |||
319 | src += CAST5_PARALLEL_BLOCKS; | ||
320 | dst += CAST5_PARALLEL_BLOCKS; | ||
321 | nbytes -= bsize * CAST5_PARALLEL_BLOCKS; | ||
322 | } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); | ||
323 | |||
324 | if (nbytes < bsize) | ||
325 | goto done; | ||
326 | } | ||
327 | |||
328 | /* Handle leftovers */ | ||
329 | do { | ||
330 | if (dst != src) | ||
331 | *dst = *src; | ||
332 | |||
333 | ctrblocks[0] = cpu_to_be64(ctrblk++); | ||
334 | |||
335 | __cast5_encrypt(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks); | ||
336 | *dst ^= ctrblocks[0]; | ||
337 | |||
338 | src += 1; | ||
339 | dst += 1; | ||
340 | nbytes -= bsize; | ||
341 | } while (nbytes >= bsize); | ||
342 | |||
343 | done: | ||
344 | *(__be64 *)walk->iv = cpu_to_be64(ctrblk); | ||
345 | return nbytes; | ||
346 | } | ||
347 | |||
348 | static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
349 | struct scatterlist *src, unsigned int nbytes) | ||
350 | { | ||
351 | bool fpu_enabled = false; | ||
352 | struct blkcipher_walk walk; | ||
353 | int err; | ||
354 | |||
355 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
356 | err = blkcipher_walk_virt_block(desc, &walk, CAST5_BLOCK_SIZE); | ||
357 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
358 | |||
359 | while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) { | ||
360 | fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); | ||
361 | nbytes = __ctr_crypt(desc, &walk); | ||
362 | err = blkcipher_walk_done(desc, &walk, nbytes); | ||
363 | } | ||
364 | |||
365 | cast5_fpu_end(fpu_enabled); | ||
366 | |||
367 | if (walk.nbytes) { | ||
368 | ctr_crypt_final(desc, &walk); | ||
369 | err = blkcipher_walk_done(desc, &walk, 0); | ||
370 | } | ||
371 | |||
372 | return err; | ||
373 | } | ||
374 | |||
375 | |||
376 | static struct crypto_alg cast5_algs[6] = { { | ||
377 | .cra_name = "__ecb-cast5-avx", | ||
378 | .cra_driver_name = "__driver-ecb-cast5-avx", | ||
379 | .cra_priority = 0, | ||
380 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
381 | .cra_blocksize = CAST5_BLOCK_SIZE, | ||
382 | .cra_ctxsize = sizeof(struct cast5_ctx), | ||
383 | .cra_alignmask = 0, | ||
384 | .cra_type = &crypto_blkcipher_type, | ||
385 | .cra_module = THIS_MODULE, | ||
386 | .cra_u = { | ||
387 | .blkcipher = { | ||
388 | .min_keysize = CAST5_MIN_KEY_SIZE, | ||
389 | .max_keysize = CAST5_MAX_KEY_SIZE, | ||
390 | .setkey = cast5_setkey, | ||
391 | .encrypt = ecb_encrypt, | ||
392 | .decrypt = ecb_decrypt, | ||
393 | }, | ||
394 | }, | ||
395 | }, { | ||
396 | .cra_name = "__cbc-cast5-avx", | ||
397 | .cra_driver_name = "__driver-cbc-cast5-avx", | ||
398 | .cra_priority = 0, | ||
399 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
400 | .cra_blocksize = CAST5_BLOCK_SIZE, | ||
401 | .cra_ctxsize = sizeof(struct cast5_ctx), | ||
402 | .cra_alignmask = 0, | ||
403 | .cra_type = &crypto_blkcipher_type, | ||
404 | .cra_module = THIS_MODULE, | ||
405 | .cra_u = { | ||
406 | .blkcipher = { | ||
407 | .min_keysize = CAST5_MIN_KEY_SIZE, | ||
408 | .max_keysize = CAST5_MAX_KEY_SIZE, | ||
409 | .setkey = cast5_setkey, | ||
410 | .encrypt = cbc_encrypt, | ||
411 | .decrypt = cbc_decrypt, | ||
412 | }, | ||
413 | }, | ||
414 | }, { | ||
415 | .cra_name = "__ctr-cast5-avx", | ||
416 | .cra_driver_name = "__driver-ctr-cast5-avx", | ||
417 | .cra_priority = 0, | ||
418 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
419 | .cra_blocksize = 1, | ||
420 | .cra_ctxsize = sizeof(struct cast5_ctx), | ||
421 | .cra_alignmask = 0, | ||
422 | .cra_type = &crypto_blkcipher_type, | ||
423 | .cra_module = THIS_MODULE, | ||
424 | .cra_u = { | ||
425 | .blkcipher = { | ||
426 | .min_keysize = CAST5_MIN_KEY_SIZE, | ||
427 | .max_keysize = CAST5_MAX_KEY_SIZE, | ||
428 | .ivsize = CAST5_BLOCK_SIZE, | ||
429 | .setkey = cast5_setkey, | ||
430 | .encrypt = ctr_crypt, | ||
431 | .decrypt = ctr_crypt, | ||
432 | }, | ||
433 | }, | ||
434 | }, { | ||
435 | .cra_name = "ecb(cast5)", | ||
436 | .cra_driver_name = "ecb-cast5-avx", | ||
437 | .cra_priority = 200, | ||
438 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
439 | .cra_blocksize = CAST5_BLOCK_SIZE, | ||
440 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
441 | .cra_alignmask = 0, | ||
442 | .cra_type = &crypto_ablkcipher_type, | ||
443 | .cra_module = THIS_MODULE, | ||
444 | .cra_init = ablk_init, | ||
445 | .cra_exit = ablk_exit, | ||
446 | .cra_u = { | ||
447 | .ablkcipher = { | ||
448 | .min_keysize = CAST5_MIN_KEY_SIZE, | ||
449 | .max_keysize = CAST5_MAX_KEY_SIZE, | ||
450 | .setkey = ablk_set_key, | ||
451 | .encrypt = ablk_encrypt, | ||
452 | .decrypt = ablk_decrypt, | ||
453 | }, | ||
454 | }, | ||
455 | }, { | ||
456 | .cra_name = "cbc(cast5)", | ||
457 | .cra_driver_name = "cbc-cast5-avx", | ||
458 | .cra_priority = 200, | ||
459 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
460 | .cra_blocksize = CAST5_BLOCK_SIZE, | ||
461 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
462 | .cra_alignmask = 0, | ||
463 | .cra_type = &crypto_ablkcipher_type, | ||
464 | .cra_module = THIS_MODULE, | ||
465 | .cra_init = ablk_init, | ||
466 | .cra_exit = ablk_exit, | ||
467 | .cra_u = { | ||
468 | .ablkcipher = { | ||
469 | .min_keysize = CAST5_MIN_KEY_SIZE, | ||
470 | .max_keysize = CAST5_MAX_KEY_SIZE, | ||
471 | .ivsize = CAST5_BLOCK_SIZE, | ||
472 | .setkey = ablk_set_key, | ||
473 | .encrypt = __ablk_encrypt, | ||
474 | .decrypt = ablk_decrypt, | ||
475 | }, | ||
476 | }, | ||
477 | }, { | ||
478 | .cra_name = "ctr(cast5)", | ||
479 | .cra_driver_name = "ctr-cast5-avx", | ||
480 | .cra_priority = 200, | ||
481 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
482 | .cra_blocksize = 1, | ||
483 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
484 | .cra_alignmask = 0, | ||
485 | .cra_type = &crypto_ablkcipher_type, | ||
486 | .cra_module = THIS_MODULE, | ||
487 | .cra_init = ablk_init, | ||
488 | .cra_exit = ablk_exit, | ||
489 | .cra_u = { | ||
490 | .ablkcipher = { | ||
491 | .min_keysize = CAST5_MIN_KEY_SIZE, | ||
492 | .max_keysize = CAST5_MAX_KEY_SIZE, | ||
493 | .ivsize = CAST5_BLOCK_SIZE, | ||
494 | .setkey = ablk_set_key, | ||
495 | .encrypt = ablk_encrypt, | ||
496 | .decrypt = ablk_encrypt, | ||
497 | .geniv = "chainiv", | ||
498 | }, | ||
499 | }, | ||
500 | } }; | ||
501 | |||
502 | static int __init cast5_init(void) | ||
503 | { | ||
504 | u64 xcr0; | ||
505 | |||
506 | if (!cpu_has_avx || !cpu_has_osxsave) { | ||
507 | pr_info("AVX instructions are not detected.\n"); | ||
508 | return -ENODEV; | ||
509 | } | ||
510 | |||
511 | xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK); | ||
512 | if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) { | ||
513 | pr_info("AVX detected but unusable.\n"); | ||
514 | return -ENODEV; | ||
515 | } | ||
516 | |||
517 | return crypto_register_algs(cast5_algs, ARRAY_SIZE(cast5_algs)); | ||
518 | } | ||
519 | |||
520 | static void __exit cast5_exit(void) | ||
521 | { | ||
522 | crypto_unregister_algs(cast5_algs, ARRAY_SIZE(cast5_algs)); | ||
523 | } | ||
524 | |||
525 | module_init(cast5_init); | ||
526 | module_exit(cast5_exit); | ||
527 | |||
528 | MODULE_DESCRIPTION("Cast5 Cipher Algorithm, AVX optimized"); | ||
529 | MODULE_LICENSE("GPL"); | ||
530 | MODULE_ALIAS("cast5"); | ||
diff --git a/arch/x86/crypto/cast6-avx-x86_64-asm_64.S b/arch/x86/crypto/cast6-avx-x86_64-asm_64.S new file mode 100644 index 000000000000..218d283772f4 --- /dev/null +++ b/arch/x86/crypto/cast6-avx-x86_64-asm_64.S | |||
@@ -0,0 +1,383 @@ | |||
1 | /* | ||
2 | * Cast6 Cipher 8-way parallel algorithm (AVX/x86_64) | ||
3 | * | ||
4 | * Copyright (C) 2012 Johannes Goetzfried | ||
5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> | ||
6 | * | ||
7 | * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> | ||
8 | * | ||
9 | * This program is free software; you can redistribute it and/or modify | ||
10 | * it under the terms of the GNU General Public License as published by | ||
11 | * the Free Software Foundation; either version 2 of the License, or | ||
12 | * (at your option) any later version. | ||
13 | * | ||
14 | * This program is distributed in the hope that it will be useful, | ||
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
17 | * GNU General Public License for more details. | ||
18 | * | ||
19 | * You should have received a copy of the GNU General Public License | ||
20 | * along with this program; if not, write to the Free Software | ||
21 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
22 | * USA | ||
23 | * | ||
24 | */ | ||
25 | |||
26 | .file "cast6-avx-x86_64-asm_64.S" | ||
27 | |||
28 | .extern cast6_s1 | ||
29 | .extern cast6_s2 | ||
30 | .extern cast6_s3 | ||
31 | .extern cast6_s4 | ||
32 | |||
33 | /* structure of crypto context */ | ||
34 | #define km 0 | ||
35 | #define kr (12*4*4) | ||
36 | |||
37 | /* s-boxes */ | ||
38 | #define s1 cast6_s1 | ||
39 | #define s2 cast6_s2 | ||
40 | #define s3 cast6_s3 | ||
41 | #define s4 cast6_s4 | ||
42 | |||
43 | /********************************************************************** | ||
44 | 8-way AVX cast6 | ||
45 | **********************************************************************/ | ||
46 | #define CTX %rdi | ||
47 | |||
48 | #define RA1 %xmm0 | ||
49 | #define RB1 %xmm1 | ||
50 | #define RC1 %xmm2 | ||
51 | #define RD1 %xmm3 | ||
52 | |||
53 | #define RA2 %xmm4 | ||
54 | #define RB2 %xmm5 | ||
55 | #define RC2 %xmm6 | ||
56 | #define RD2 %xmm7 | ||
57 | |||
58 | #define RX %xmm8 | ||
59 | |||
60 | #define RKM %xmm9 | ||
61 | #define RKR %xmm10 | ||
62 | #define RKRF %xmm11 | ||
63 | #define RKRR %xmm12 | ||
64 | #define R32 %xmm13 | ||
65 | #define R1ST %xmm14 | ||
66 | |||
67 | #define RTMP %xmm15 | ||
68 | |||
69 | #define RID1 %rbp | ||
70 | #define RID1d %ebp | ||
71 | #define RID2 %rsi | ||
72 | #define RID2d %esi | ||
73 | |||
74 | #define RGI1 %rdx | ||
75 | #define RGI1bl %dl | ||
76 | #define RGI1bh %dh | ||
77 | #define RGI2 %rcx | ||
78 | #define RGI2bl %cl | ||
79 | #define RGI2bh %ch | ||
80 | |||
81 | #define RGI3 %rax | ||
82 | #define RGI3bl %al | ||
83 | #define RGI3bh %ah | ||
84 | #define RGI4 %rbx | ||
85 | #define RGI4bl %bl | ||
86 | #define RGI4bh %bh | ||
87 | |||
88 | #define RFS1 %r8 | ||
89 | #define RFS1d %r8d | ||
90 | #define RFS2 %r9 | ||
91 | #define RFS2d %r9d | ||
92 | #define RFS3 %r10 | ||
93 | #define RFS3d %r10d | ||
94 | |||
95 | |||
96 | #define lookup_32bit(src, dst, op1, op2, op3, interleave_op, il_reg) \ | ||
97 | movzbl src ## bh, RID1d; \ | ||
98 | movzbl src ## bl, RID2d; \ | ||
99 | shrq $16, src; \ | ||
100 | movl s1(, RID1, 4), dst ## d; \ | ||
101 | op1 s2(, RID2, 4), dst ## d; \ | ||
102 | movzbl src ## bh, RID1d; \ | ||
103 | movzbl src ## bl, RID2d; \ | ||
104 | interleave_op(il_reg); \ | ||
105 | op2 s3(, RID1, 4), dst ## d; \ | ||
106 | op3 s4(, RID2, 4), dst ## d; | ||
107 | |||
108 | #define dummy(d) /* do nothing */ | ||
109 | |||
110 | #define shr_next(reg) \ | ||
111 | shrq $16, reg; | ||
112 | |||
113 | #define F_head(a, x, gi1, gi2, op0) \ | ||
114 | op0 a, RKM, x; \ | ||
115 | vpslld RKRF, x, RTMP; \ | ||
116 | vpsrld RKRR, x, x; \ | ||
117 | vpor RTMP, x, x; \ | ||
118 | \ | ||
119 | vmovq x, gi1; \ | ||
120 | vpextrq $1, x, gi2; | ||
121 | |||
122 | #define F_tail(a, x, gi1, gi2, op1, op2, op3) \ | ||
123 | lookup_32bit(##gi1, RFS1, op1, op2, op3, shr_next, ##gi1); \ | ||
124 | lookup_32bit(##gi2, RFS3, op1, op2, op3, shr_next, ##gi2); \ | ||
125 | \ | ||
126 | lookup_32bit(##gi1, RFS2, op1, op2, op3, dummy, none); \ | ||
127 | shlq $32, RFS2; \ | ||
128 | orq RFS1, RFS2; \ | ||
129 | lookup_32bit(##gi2, RFS1, op1, op2, op3, dummy, none); \ | ||
130 | shlq $32, RFS1; \ | ||
131 | orq RFS1, RFS3; \ | ||
132 | \ | ||
133 | vmovq RFS2, x; \ | ||
134 | vpinsrq $1, RFS3, x, x; | ||
135 | |||
136 | #define F_2(a1, b1, a2, b2, op0, op1, op2, op3) \ | ||
137 | F_head(b1, RX, RGI1, RGI2, op0); \ | ||
138 | F_head(b2, RX, RGI3, RGI4, op0); \ | ||
139 | \ | ||
140 | F_tail(b1, RX, RGI1, RGI2, op1, op2, op3); \ | ||
141 | F_tail(b2, RTMP, RGI3, RGI4, op1, op2, op3); \ | ||
142 | \ | ||
143 | vpxor a1, RX, a1; \ | ||
144 | vpxor a2, RTMP, a2; | ||
145 | |||
146 | #define F1_2(a1, b1, a2, b2) \ | ||
147 | F_2(a1, b1, a2, b2, vpaddd, xorl, subl, addl) | ||
148 | #define F2_2(a1, b1, a2, b2) \ | ||
149 | F_2(a1, b1, a2, b2, vpxor, subl, addl, xorl) | ||
150 | #define F3_2(a1, b1, a2, b2) \ | ||
151 | F_2(a1, b1, a2, b2, vpsubd, addl, xorl, subl) | ||
152 | |||
153 | #define qop(in, out, f) \ | ||
154 | F ## f ## _2(out ## 1, in ## 1, out ## 2, in ## 2); | ||
155 | |||
156 | #define get_round_keys(nn) \ | ||
157 | vbroadcastss (km+(4*(nn)))(CTX), RKM; \ | ||
158 | vpand R1ST, RKR, RKRF; \ | ||
159 | vpsubq RKRF, R32, RKRR; \ | ||
160 | vpsrldq $1, RKR, RKR; | ||
161 | |||
162 | #define Q(n) \ | ||
163 | get_round_keys(4*n+0); \ | ||
164 | qop(RD, RC, 1); \ | ||
165 | \ | ||
166 | get_round_keys(4*n+1); \ | ||
167 | qop(RC, RB, 2); \ | ||
168 | \ | ||
169 | get_round_keys(4*n+2); \ | ||
170 | qop(RB, RA, 3); \ | ||
171 | \ | ||
172 | get_round_keys(4*n+3); \ | ||
173 | qop(RA, RD, 1); | ||
174 | |||
175 | #define QBAR(n) \ | ||
176 | get_round_keys(4*n+3); \ | ||
177 | qop(RA, RD, 1); \ | ||
178 | \ | ||
179 | get_round_keys(4*n+2); \ | ||
180 | qop(RB, RA, 3); \ | ||
181 | \ | ||
182 | get_round_keys(4*n+1); \ | ||
183 | qop(RC, RB, 2); \ | ||
184 | \ | ||
185 | get_round_keys(4*n+0); \ | ||
186 | qop(RD, RC, 1); | ||
187 | |||
188 | #define shuffle(mask) \ | ||
189 | vpshufb mask, RKR, RKR; | ||
190 | |||
191 | #define preload_rkr(n, do_mask, mask) \ | ||
192 | vbroadcastss .L16_mask, RKR; \ | ||
193 | /* add 16-bit rotation to key rotations (mod 32) */ \ | ||
194 | vpxor (kr+n*16)(CTX), RKR, RKR; \ | ||
195 | do_mask(mask); | ||
196 | |||
197 | #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ | ||
198 | vpunpckldq x1, x0, t0; \ | ||
199 | vpunpckhdq x1, x0, t2; \ | ||
200 | vpunpckldq x3, x2, t1; \ | ||
201 | vpunpckhdq x3, x2, x3; \ | ||
202 | \ | ||
203 | vpunpcklqdq t1, t0, x0; \ | ||
204 | vpunpckhqdq t1, t0, x1; \ | ||
205 | vpunpcklqdq x3, t2, x2; \ | ||
206 | vpunpckhqdq x3, t2, x3; | ||
207 | |||
208 | #define inpack_blocks(in, x0, x1, x2, x3, t0, t1, t2, rmask) \ | ||
209 | vmovdqu (0*4*4)(in), x0; \ | ||
210 | vmovdqu (1*4*4)(in), x1; \ | ||
211 | vmovdqu (2*4*4)(in), x2; \ | ||
212 | vmovdqu (3*4*4)(in), x3; \ | ||
213 | vpshufb rmask, x0, x0; \ | ||
214 | vpshufb rmask, x1, x1; \ | ||
215 | vpshufb rmask, x2, x2; \ | ||
216 | vpshufb rmask, x3, x3; \ | ||
217 | \ | ||
218 | transpose_4x4(x0, x1, x2, x3, t0, t1, t2) | ||
219 | |||
220 | #define outunpack_blocks(out, x0, x1, x2, x3, t0, t1, t2, rmask) \ | ||
221 | transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ | ||
222 | \ | ||
223 | vpshufb rmask, x0, x0; \ | ||
224 | vpshufb rmask, x1, x1; \ | ||
225 | vpshufb rmask, x2, x2; \ | ||
226 | vpshufb rmask, x3, x3; \ | ||
227 | vmovdqu x0, (0*4*4)(out); \ | ||
228 | vmovdqu x1, (1*4*4)(out); \ | ||
229 | vmovdqu x2, (2*4*4)(out); \ | ||
230 | vmovdqu x3, (3*4*4)(out); | ||
231 | |||
232 | #define outunpack_xor_blocks(out, x0, x1, x2, x3, t0, t1, t2, rmask) \ | ||
233 | transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ | ||
234 | \ | ||
235 | vpshufb rmask, x0, x0; \ | ||
236 | vpshufb rmask, x1, x1; \ | ||
237 | vpshufb rmask, x2, x2; \ | ||
238 | vpshufb rmask, x3, x3; \ | ||
239 | vpxor (0*4*4)(out), x0, x0; \ | ||
240 | vmovdqu x0, (0*4*4)(out); \ | ||
241 | vpxor (1*4*4)(out), x1, x1; \ | ||
242 | vmovdqu x1, (1*4*4)(out); \ | ||
243 | vpxor (2*4*4)(out), x2, x2; \ | ||
244 | vmovdqu x2, (2*4*4)(out); \ | ||
245 | vpxor (3*4*4)(out), x3, x3; \ | ||
246 | vmovdqu x3, (3*4*4)(out); | ||
247 | |||
248 | .data | ||
249 | |||
250 | .align 16 | ||
251 | .Lbswap_mask: | ||
252 | .byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12 | ||
253 | .Lrkr_enc_Q_Q_QBAR_QBAR: | ||
254 | .byte 0, 1, 2, 3, 4, 5, 6, 7, 11, 10, 9, 8, 15, 14, 13, 12 | ||
255 | .Lrkr_enc_QBAR_QBAR_QBAR_QBAR: | ||
256 | .byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12 | ||
257 | .Lrkr_dec_Q_Q_Q_Q: | ||
258 | .byte 12, 13, 14, 15, 8, 9, 10, 11, 4, 5, 6, 7, 0, 1, 2, 3 | ||
259 | .Lrkr_dec_Q_Q_QBAR_QBAR: | ||
260 | .byte 12, 13, 14, 15, 8, 9, 10, 11, 7, 6, 5, 4, 3, 2, 1, 0 | ||
261 | .Lrkr_dec_QBAR_QBAR_QBAR_QBAR: | ||
262 | .byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 | ||
263 | .L16_mask: | ||
264 | .byte 16, 16, 16, 16 | ||
265 | .L32_mask: | ||
266 | .byte 32, 0, 0, 0 | ||
267 | .Lfirst_mask: | ||
268 | .byte 0x1f, 0, 0, 0 | ||
269 | |||
270 | .text | ||
271 | |||
272 | .align 16 | ||
273 | .global __cast6_enc_blk_8way | ||
274 | .type __cast6_enc_blk_8way,@function; | ||
275 | |||
276 | __cast6_enc_blk_8way: | ||
277 | /* input: | ||
278 | * %rdi: ctx, CTX | ||
279 | * %rsi: dst | ||
280 | * %rdx: src | ||
281 | * %rcx: bool, if true: xor output | ||
282 | */ | ||
283 | |||
284 | pushq %rbp; | ||
285 | pushq %rbx; | ||
286 | pushq %rcx; | ||
287 | |||
288 | vmovdqa .Lbswap_mask, RKM; | ||
289 | vmovd .Lfirst_mask, R1ST; | ||
290 | vmovd .L32_mask, R32; | ||
291 | |||
292 | leaq (4*4*4)(%rdx), %rax; | ||
293 | inpack_blocks(%rdx, RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); | ||
294 | inpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); | ||
295 | |||
296 | movq %rsi, %r11; | ||
297 | |||
298 | preload_rkr(0, dummy, none); | ||
299 | Q(0); | ||
300 | Q(1); | ||
301 | Q(2); | ||
302 | Q(3); | ||
303 | preload_rkr(1, shuffle, .Lrkr_enc_Q_Q_QBAR_QBAR); | ||
304 | Q(4); | ||
305 | Q(5); | ||
306 | QBAR(6); | ||
307 | QBAR(7); | ||
308 | preload_rkr(2, shuffle, .Lrkr_enc_QBAR_QBAR_QBAR_QBAR); | ||
309 | QBAR(8); | ||
310 | QBAR(9); | ||
311 | QBAR(10); | ||
312 | QBAR(11); | ||
313 | |||
314 | popq %rcx; | ||
315 | popq %rbx; | ||
316 | popq %rbp; | ||
317 | |||
318 | vmovdqa .Lbswap_mask, RKM; | ||
319 | leaq (4*4*4)(%r11), %rax; | ||
320 | |||
321 | testb %cl, %cl; | ||
322 | jnz __enc_xor8; | ||
323 | |||
324 | outunpack_blocks(%r11, RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); | ||
325 | outunpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); | ||
326 | |||
327 | ret; | ||
328 | |||
329 | __enc_xor8: | ||
330 | outunpack_xor_blocks(%r11, RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); | ||
331 | outunpack_xor_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); | ||
332 | |||
333 | ret; | ||
334 | |||
335 | .align 16 | ||
336 | .global cast6_dec_blk_8way | ||
337 | .type cast6_dec_blk_8way,@function; | ||
338 | |||
339 | cast6_dec_blk_8way: | ||
340 | /* input: | ||
341 | * %rdi: ctx, CTX | ||
342 | * %rsi: dst | ||
343 | * %rdx: src | ||
344 | */ | ||
345 | |||
346 | pushq %rbp; | ||
347 | pushq %rbx; | ||
348 | |||
349 | vmovdqa .Lbswap_mask, RKM; | ||
350 | vmovd .Lfirst_mask, R1ST; | ||
351 | vmovd .L32_mask, R32; | ||
352 | |||
353 | leaq (4*4*4)(%rdx), %rax; | ||
354 | inpack_blocks(%rdx, RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); | ||
355 | inpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); | ||
356 | |||
357 | movq %rsi, %r11; | ||
358 | |||
359 | preload_rkr(2, shuffle, .Lrkr_dec_Q_Q_Q_Q); | ||
360 | Q(11); | ||
361 | Q(10); | ||
362 | Q(9); | ||
363 | Q(8); | ||
364 | preload_rkr(1, shuffle, .Lrkr_dec_Q_Q_QBAR_QBAR); | ||
365 | Q(7); | ||
366 | Q(6); | ||
367 | QBAR(5); | ||
368 | QBAR(4); | ||
369 | preload_rkr(0, shuffle, .Lrkr_dec_QBAR_QBAR_QBAR_QBAR); | ||
370 | QBAR(3); | ||
371 | QBAR(2); | ||
372 | QBAR(1); | ||
373 | QBAR(0); | ||
374 | |||
375 | popq %rbx; | ||
376 | popq %rbp; | ||
377 | |||
378 | vmovdqa .Lbswap_mask, RKM; | ||
379 | leaq (4*4*4)(%r11), %rax; | ||
380 | outunpack_blocks(%r11, RA1, RB1, RC1, RD1, RTMP, RX, RKRF, RKM); | ||
381 | outunpack_blocks(%rax, RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM); | ||
382 | |||
383 | ret; | ||
diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c new file mode 100644 index 000000000000..15e5f85a5011 --- /dev/null +++ b/arch/x86/crypto/cast6_avx_glue.c | |||
@@ -0,0 +1,648 @@ | |||
1 | /* | ||
2 | * Glue Code for the AVX assembler implemention of the Cast6 Cipher | ||
3 | * | ||
4 | * Copyright (C) 2012 Johannes Goetzfried | ||
5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> | ||
6 | * | ||
7 | * This program is free software; you can redistribute it and/or modify | ||
8 | * it under the terms of the GNU General Public License as published by | ||
9 | * the Free Software Foundation; either version 2 of the License, or | ||
10 | * (at your option) any later version. | ||
11 | * | ||
12 | * This program is distributed in the hope that it will be useful, | ||
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
15 | * GNU General Public License for more details. | ||
16 | * | ||
17 | * You should have received a copy of the GNU General Public License | ||
18 | * along with this program; if not, write to the Free Software | ||
19 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
20 | * USA | ||
21 | * | ||
22 | */ | ||
23 | |||
24 | #include <linux/module.h> | ||
25 | #include <linux/hardirq.h> | ||
26 | #include <linux/types.h> | ||
27 | #include <linux/crypto.h> | ||
28 | #include <linux/err.h> | ||
29 | #include <crypto/algapi.h> | ||
30 | #include <crypto/cast6.h> | ||
31 | #include <crypto/cryptd.h> | ||
32 | #include <crypto/b128ops.h> | ||
33 | #include <crypto/ctr.h> | ||
34 | #include <crypto/lrw.h> | ||
35 | #include <crypto/xts.h> | ||
36 | #include <asm/xcr.h> | ||
37 | #include <asm/xsave.h> | ||
38 | #include <asm/crypto/ablk_helper.h> | ||
39 | #include <asm/crypto/glue_helper.h> | ||
40 | |||
41 | #define CAST6_PARALLEL_BLOCKS 8 | ||
42 | |||
43 | asmlinkage void __cast6_enc_blk_8way(struct cast6_ctx *ctx, u8 *dst, | ||
44 | const u8 *src, bool xor); | ||
45 | asmlinkage void cast6_dec_blk_8way(struct cast6_ctx *ctx, u8 *dst, | ||
46 | const u8 *src); | ||
47 | |||
48 | static inline void cast6_enc_blk_xway(struct cast6_ctx *ctx, u8 *dst, | ||
49 | const u8 *src) | ||
50 | { | ||
51 | __cast6_enc_blk_8way(ctx, dst, src, false); | ||
52 | } | ||
53 | |||
54 | static inline void cast6_enc_blk_xway_xor(struct cast6_ctx *ctx, u8 *dst, | ||
55 | const u8 *src) | ||
56 | { | ||
57 | __cast6_enc_blk_8way(ctx, dst, src, true); | ||
58 | } | ||
59 | |||
60 | static inline void cast6_dec_blk_xway(struct cast6_ctx *ctx, u8 *dst, | ||
61 | const u8 *src) | ||
62 | { | ||
63 | cast6_dec_blk_8way(ctx, dst, src); | ||
64 | } | ||
65 | |||
66 | |||
67 | static void cast6_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src) | ||
68 | { | ||
69 | u128 ivs[CAST6_PARALLEL_BLOCKS - 1]; | ||
70 | unsigned int j; | ||
71 | |||
72 | for (j = 0; j < CAST6_PARALLEL_BLOCKS - 1; j++) | ||
73 | ivs[j] = src[j]; | ||
74 | |||
75 | cast6_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); | ||
76 | |||
77 | for (j = 0; j < CAST6_PARALLEL_BLOCKS - 1; j++) | ||
78 | u128_xor(dst + (j + 1), dst + (j + 1), ivs + j); | ||
79 | } | ||
80 | |||
81 | static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, u128 *iv) | ||
82 | { | ||
83 | be128 ctrblk; | ||
84 | |||
85 | u128_to_be128(&ctrblk, iv); | ||
86 | u128_inc(iv); | ||
87 | |||
88 | __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); | ||
89 | u128_xor(dst, src, (u128 *)&ctrblk); | ||
90 | } | ||
91 | |||
92 | static void cast6_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src, | ||
93 | u128 *iv) | ||
94 | { | ||
95 | be128 ctrblks[CAST6_PARALLEL_BLOCKS]; | ||
96 | unsigned int i; | ||
97 | |||
98 | for (i = 0; i < CAST6_PARALLEL_BLOCKS; i++) { | ||
99 | if (dst != src) | ||
100 | dst[i] = src[i]; | ||
101 | |||
102 | u128_to_be128(&ctrblks[i], iv); | ||
103 | u128_inc(iv); | ||
104 | } | ||
105 | |||
106 | cast6_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks); | ||
107 | } | ||
108 | |||
109 | static const struct common_glue_ctx cast6_enc = { | ||
110 | .num_funcs = 2, | ||
111 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | ||
112 | |||
113 | .funcs = { { | ||
114 | .num_blocks = CAST6_PARALLEL_BLOCKS, | ||
115 | .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_enc_blk_xway) } | ||
116 | }, { | ||
117 | .num_blocks = 1, | ||
118 | .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) } | ||
119 | } } | ||
120 | }; | ||
121 | |||
122 | static const struct common_glue_ctx cast6_ctr = { | ||
123 | .num_funcs = 2, | ||
124 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | ||
125 | |||
126 | .funcs = { { | ||
127 | .num_blocks = CAST6_PARALLEL_BLOCKS, | ||
128 | .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr_xway) } | ||
129 | }, { | ||
130 | .num_blocks = 1, | ||
131 | .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) } | ||
132 | } } | ||
133 | }; | ||
134 | |||
135 | static const struct common_glue_ctx cast6_dec = { | ||
136 | .num_funcs = 2, | ||
137 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | ||
138 | |||
139 | .funcs = { { | ||
140 | .num_blocks = CAST6_PARALLEL_BLOCKS, | ||
141 | .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_dec_blk_xway) } | ||
142 | }, { | ||
143 | .num_blocks = 1, | ||
144 | .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) } | ||
145 | } } | ||
146 | }; | ||
147 | |||
148 | static const struct common_glue_ctx cast6_dec_cbc = { | ||
149 | .num_funcs = 2, | ||
150 | .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, | ||
151 | |||
152 | .funcs = { { | ||
153 | .num_blocks = CAST6_PARALLEL_BLOCKS, | ||
154 | .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_decrypt_cbc_xway) } | ||
155 | }, { | ||
156 | .num_blocks = 1, | ||
157 | .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) } | ||
158 | } } | ||
159 | }; | ||
160 | |||
161 | static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
162 | struct scatterlist *src, unsigned int nbytes) | ||
163 | { | ||
164 | return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes); | ||
165 | } | ||
166 | |||
167 | static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
168 | struct scatterlist *src, unsigned int nbytes) | ||
169 | { | ||
170 | return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes); | ||
171 | } | ||
172 | |||
173 | static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
174 | struct scatterlist *src, unsigned int nbytes) | ||
175 | { | ||
176 | return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc, | ||
177 | dst, src, nbytes); | ||
178 | } | ||
179 | |||
180 | static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
181 | struct scatterlist *src, unsigned int nbytes) | ||
182 | { | ||
183 | return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src, | ||
184 | nbytes); | ||
185 | } | ||
186 | |||
187 | static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
188 | struct scatterlist *src, unsigned int nbytes) | ||
189 | { | ||
190 | return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes); | ||
191 | } | ||
192 | |||
193 | static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes) | ||
194 | { | ||
195 | return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS, | ||
196 | NULL, fpu_enabled, nbytes); | ||
197 | } | ||
198 | |||
199 | static inline void cast6_fpu_end(bool fpu_enabled) | ||
200 | { | ||
201 | glue_fpu_end(fpu_enabled); | ||
202 | } | ||
203 | |||
204 | struct crypt_priv { | ||
205 | struct cast6_ctx *ctx; | ||
206 | bool fpu_enabled; | ||
207 | }; | ||
208 | |||
209 | static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) | ||
210 | { | ||
211 | const unsigned int bsize = CAST6_BLOCK_SIZE; | ||
212 | struct crypt_priv *ctx = priv; | ||
213 | int i; | ||
214 | |||
215 | ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); | ||
216 | |||
217 | if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { | ||
218 | cast6_enc_blk_xway(ctx->ctx, srcdst, srcdst); | ||
219 | return; | ||
220 | } | ||
221 | |||
222 | for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) | ||
223 | __cast6_encrypt(ctx->ctx, srcdst, srcdst); | ||
224 | } | ||
225 | |||
226 | static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) | ||
227 | { | ||
228 | const unsigned int bsize = CAST6_BLOCK_SIZE; | ||
229 | struct crypt_priv *ctx = priv; | ||
230 | int i; | ||
231 | |||
232 | ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); | ||
233 | |||
234 | if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { | ||
235 | cast6_dec_blk_xway(ctx->ctx, srcdst, srcdst); | ||
236 | return; | ||
237 | } | ||
238 | |||
239 | for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) | ||
240 | __cast6_decrypt(ctx->ctx, srcdst, srcdst); | ||
241 | } | ||
242 | |||
243 | struct cast6_lrw_ctx { | ||
244 | struct lrw_table_ctx lrw_table; | ||
245 | struct cast6_ctx cast6_ctx; | ||
246 | }; | ||
247 | |||
248 | static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, | ||
249 | unsigned int keylen) | ||
250 | { | ||
251 | struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); | ||
252 | int err; | ||
253 | |||
254 | err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE, | ||
255 | &tfm->crt_flags); | ||
256 | if (err) | ||
257 | return err; | ||
258 | |||
259 | return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE); | ||
260 | } | ||
261 | |||
262 | static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
263 | struct scatterlist *src, unsigned int nbytes) | ||
264 | { | ||
265 | struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
266 | be128 buf[CAST6_PARALLEL_BLOCKS]; | ||
267 | struct crypt_priv crypt_ctx = { | ||
268 | .ctx = &ctx->cast6_ctx, | ||
269 | .fpu_enabled = false, | ||
270 | }; | ||
271 | struct lrw_crypt_req req = { | ||
272 | .tbuf = buf, | ||
273 | .tbuflen = sizeof(buf), | ||
274 | |||
275 | .table_ctx = &ctx->lrw_table, | ||
276 | .crypt_ctx = &crypt_ctx, | ||
277 | .crypt_fn = encrypt_callback, | ||
278 | }; | ||
279 | int ret; | ||
280 | |||
281 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
282 | ret = lrw_crypt(desc, dst, src, nbytes, &req); | ||
283 | cast6_fpu_end(crypt_ctx.fpu_enabled); | ||
284 | |||
285 | return ret; | ||
286 | } | ||
287 | |||
288 | static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
289 | struct scatterlist *src, unsigned int nbytes) | ||
290 | { | ||
291 | struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
292 | be128 buf[CAST6_PARALLEL_BLOCKS]; | ||
293 | struct crypt_priv crypt_ctx = { | ||
294 | .ctx = &ctx->cast6_ctx, | ||
295 | .fpu_enabled = false, | ||
296 | }; | ||
297 | struct lrw_crypt_req req = { | ||
298 | .tbuf = buf, | ||
299 | .tbuflen = sizeof(buf), | ||
300 | |||
301 | .table_ctx = &ctx->lrw_table, | ||
302 | .crypt_ctx = &crypt_ctx, | ||
303 | .crypt_fn = decrypt_callback, | ||
304 | }; | ||
305 | int ret; | ||
306 | |||
307 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
308 | ret = lrw_crypt(desc, dst, src, nbytes, &req); | ||
309 | cast6_fpu_end(crypt_ctx.fpu_enabled); | ||
310 | |||
311 | return ret; | ||
312 | } | ||
313 | |||
314 | static void lrw_exit_tfm(struct crypto_tfm *tfm) | ||
315 | { | ||
316 | struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); | ||
317 | |||
318 | lrw_free_table(&ctx->lrw_table); | ||
319 | } | ||
320 | |||
321 | struct cast6_xts_ctx { | ||
322 | struct cast6_ctx tweak_ctx; | ||
323 | struct cast6_ctx crypt_ctx; | ||
324 | }; | ||
325 | |||
326 | static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, | ||
327 | unsigned int keylen) | ||
328 | { | ||
329 | struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm); | ||
330 | u32 *flags = &tfm->crt_flags; | ||
331 | int err; | ||
332 | |||
333 | /* key consists of keys of equal size concatenated, therefore | ||
334 | * the length must be even | ||
335 | */ | ||
336 | if (keylen % 2) { | ||
337 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
338 | return -EINVAL; | ||
339 | } | ||
340 | |||
341 | /* first half of xts-key is for crypt */ | ||
342 | err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags); | ||
343 | if (err) | ||
344 | return err; | ||
345 | |||
346 | /* second half of xts-key is for tweak */ | ||
347 | return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2, | ||
348 | flags); | ||
349 | } | ||
350 | |||
351 | static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
352 | struct scatterlist *src, unsigned int nbytes) | ||
353 | { | ||
354 | struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
355 | be128 buf[CAST6_PARALLEL_BLOCKS]; | ||
356 | struct crypt_priv crypt_ctx = { | ||
357 | .ctx = &ctx->crypt_ctx, | ||
358 | .fpu_enabled = false, | ||
359 | }; | ||
360 | struct xts_crypt_req req = { | ||
361 | .tbuf = buf, | ||
362 | .tbuflen = sizeof(buf), | ||
363 | |||
364 | .tweak_ctx = &ctx->tweak_ctx, | ||
365 | .tweak_fn = XTS_TWEAK_CAST(__cast6_encrypt), | ||
366 | .crypt_ctx = &crypt_ctx, | ||
367 | .crypt_fn = encrypt_callback, | ||
368 | }; | ||
369 | int ret; | ||
370 | |||
371 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
372 | ret = xts_crypt(desc, dst, src, nbytes, &req); | ||
373 | cast6_fpu_end(crypt_ctx.fpu_enabled); | ||
374 | |||
375 | return ret; | ||
376 | } | ||
377 | |||
378 | static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, | ||
379 | struct scatterlist *src, unsigned int nbytes) | ||
380 | { | ||
381 | struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); | ||
382 | be128 buf[CAST6_PARALLEL_BLOCKS]; | ||
383 | struct crypt_priv crypt_ctx = { | ||
384 | .ctx = &ctx->crypt_ctx, | ||
385 | .fpu_enabled = false, | ||
386 | }; | ||
387 | struct xts_crypt_req req = { | ||
388 | .tbuf = buf, | ||
389 | .tbuflen = sizeof(buf), | ||
390 | |||
391 | .tweak_ctx = &ctx->tweak_ctx, | ||
392 | .tweak_fn = XTS_TWEAK_CAST(__cast6_encrypt), | ||
393 | .crypt_ctx = &crypt_ctx, | ||
394 | .crypt_fn = decrypt_callback, | ||
395 | }; | ||
396 | int ret; | ||
397 | |||
398 | desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; | ||
399 | ret = xts_crypt(desc, dst, src, nbytes, &req); | ||
400 | cast6_fpu_end(crypt_ctx.fpu_enabled); | ||
401 | |||
402 | return ret; | ||
403 | } | ||
404 | |||
405 | static struct crypto_alg cast6_algs[10] = { { | ||
406 | .cra_name = "__ecb-cast6-avx", | ||
407 | .cra_driver_name = "__driver-ecb-cast6-avx", | ||
408 | .cra_priority = 0, | ||
409 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
410 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
411 | .cra_ctxsize = sizeof(struct cast6_ctx), | ||
412 | .cra_alignmask = 0, | ||
413 | .cra_type = &crypto_blkcipher_type, | ||
414 | .cra_module = THIS_MODULE, | ||
415 | .cra_u = { | ||
416 | .blkcipher = { | ||
417 | .min_keysize = CAST6_MIN_KEY_SIZE, | ||
418 | .max_keysize = CAST6_MAX_KEY_SIZE, | ||
419 | .setkey = cast6_setkey, | ||
420 | .encrypt = ecb_encrypt, | ||
421 | .decrypt = ecb_decrypt, | ||
422 | }, | ||
423 | }, | ||
424 | }, { | ||
425 | .cra_name = "__cbc-cast6-avx", | ||
426 | .cra_driver_name = "__driver-cbc-cast6-avx", | ||
427 | .cra_priority = 0, | ||
428 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
429 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
430 | .cra_ctxsize = sizeof(struct cast6_ctx), | ||
431 | .cra_alignmask = 0, | ||
432 | .cra_type = &crypto_blkcipher_type, | ||
433 | .cra_module = THIS_MODULE, | ||
434 | .cra_u = { | ||
435 | .blkcipher = { | ||
436 | .min_keysize = CAST6_MIN_KEY_SIZE, | ||
437 | .max_keysize = CAST6_MAX_KEY_SIZE, | ||
438 | .setkey = cast6_setkey, | ||
439 | .encrypt = cbc_encrypt, | ||
440 | .decrypt = cbc_decrypt, | ||
441 | }, | ||
442 | }, | ||
443 | }, { | ||
444 | .cra_name = "__ctr-cast6-avx", | ||
445 | .cra_driver_name = "__driver-ctr-cast6-avx", | ||
446 | .cra_priority = 0, | ||
447 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
448 | .cra_blocksize = 1, | ||
449 | .cra_ctxsize = sizeof(struct cast6_ctx), | ||
450 | .cra_alignmask = 0, | ||
451 | .cra_type = &crypto_blkcipher_type, | ||
452 | .cra_module = THIS_MODULE, | ||
453 | .cra_u = { | ||
454 | .blkcipher = { | ||
455 | .min_keysize = CAST6_MIN_KEY_SIZE, | ||
456 | .max_keysize = CAST6_MAX_KEY_SIZE, | ||
457 | .ivsize = CAST6_BLOCK_SIZE, | ||
458 | .setkey = cast6_setkey, | ||
459 | .encrypt = ctr_crypt, | ||
460 | .decrypt = ctr_crypt, | ||
461 | }, | ||
462 | }, | ||
463 | }, { | ||
464 | .cra_name = "__lrw-cast6-avx", | ||
465 | .cra_driver_name = "__driver-lrw-cast6-avx", | ||
466 | .cra_priority = 0, | ||
467 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
468 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
469 | .cra_ctxsize = sizeof(struct cast6_lrw_ctx), | ||
470 | .cra_alignmask = 0, | ||
471 | .cra_type = &crypto_blkcipher_type, | ||
472 | .cra_module = THIS_MODULE, | ||
473 | .cra_exit = lrw_exit_tfm, | ||
474 | .cra_u = { | ||
475 | .blkcipher = { | ||
476 | .min_keysize = CAST6_MIN_KEY_SIZE + | ||
477 | CAST6_BLOCK_SIZE, | ||
478 | .max_keysize = CAST6_MAX_KEY_SIZE + | ||
479 | CAST6_BLOCK_SIZE, | ||
480 | .ivsize = CAST6_BLOCK_SIZE, | ||
481 | .setkey = lrw_cast6_setkey, | ||
482 | .encrypt = lrw_encrypt, | ||
483 | .decrypt = lrw_decrypt, | ||
484 | }, | ||
485 | }, | ||
486 | }, { | ||
487 | .cra_name = "__xts-cast6-avx", | ||
488 | .cra_driver_name = "__driver-xts-cast6-avx", | ||
489 | .cra_priority = 0, | ||
490 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
491 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
492 | .cra_ctxsize = sizeof(struct cast6_xts_ctx), | ||
493 | .cra_alignmask = 0, | ||
494 | .cra_type = &crypto_blkcipher_type, | ||
495 | .cra_module = THIS_MODULE, | ||
496 | .cra_u = { | ||
497 | .blkcipher = { | ||
498 | .min_keysize = CAST6_MIN_KEY_SIZE * 2, | ||
499 | .max_keysize = CAST6_MAX_KEY_SIZE * 2, | ||
500 | .ivsize = CAST6_BLOCK_SIZE, | ||
501 | .setkey = xts_cast6_setkey, | ||
502 | .encrypt = xts_encrypt, | ||
503 | .decrypt = xts_decrypt, | ||
504 | }, | ||
505 | }, | ||
506 | }, { | ||
507 | .cra_name = "ecb(cast6)", | ||
508 | .cra_driver_name = "ecb-cast6-avx", | ||
509 | .cra_priority = 200, | ||
510 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
511 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
512 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
513 | .cra_alignmask = 0, | ||
514 | .cra_type = &crypto_ablkcipher_type, | ||
515 | .cra_module = THIS_MODULE, | ||
516 | .cra_init = ablk_init, | ||
517 | .cra_exit = ablk_exit, | ||
518 | .cra_u = { | ||
519 | .ablkcipher = { | ||
520 | .min_keysize = CAST6_MIN_KEY_SIZE, | ||
521 | .max_keysize = CAST6_MAX_KEY_SIZE, | ||
522 | .setkey = ablk_set_key, | ||
523 | .encrypt = ablk_encrypt, | ||
524 | .decrypt = ablk_decrypt, | ||
525 | }, | ||
526 | }, | ||
527 | }, { | ||
528 | .cra_name = "cbc(cast6)", | ||
529 | .cra_driver_name = "cbc-cast6-avx", | ||
530 | .cra_priority = 200, | ||
531 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
532 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
533 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
534 | .cra_alignmask = 0, | ||
535 | .cra_type = &crypto_ablkcipher_type, | ||
536 | .cra_module = THIS_MODULE, | ||
537 | .cra_init = ablk_init, | ||
538 | .cra_exit = ablk_exit, | ||
539 | .cra_u = { | ||
540 | .ablkcipher = { | ||
541 | .min_keysize = CAST6_MIN_KEY_SIZE, | ||
542 | .max_keysize = CAST6_MAX_KEY_SIZE, | ||
543 | .ivsize = CAST6_BLOCK_SIZE, | ||
544 | .setkey = ablk_set_key, | ||
545 | .encrypt = __ablk_encrypt, | ||
546 | .decrypt = ablk_decrypt, | ||
547 | }, | ||
548 | }, | ||
549 | }, { | ||
550 | .cra_name = "ctr(cast6)", | ||
551 | .cra_driver_name = "ctr-cast6-avx", | ||
552 | .cra_priority = 200, | ||
553 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
554 | .cra_blocksize = 1, | ||
555 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
556 | .cra_alignmask = 0, | ||
557 | .cra_type = &crypto_ablkcipher_type, | ||
558 | .cra_module = THIS_MODULE, | ||
559 | .cra_init = ablk_init, | ||
560 | .cra_exit = ablk_exit, | ||
561 | .cra_u = { | ||
562 | .ablkcipher = { | ||
563 | .min_keysize = CAST6_MIN_KEY_SIZE, | ||
564 | .max_keysize = CAST6_MAX_KEY_SIZE, | ||
565 | .ivsize = CAST6_BLOCK_SIZE, | ||
566 | .setkey = ablk_set_key, | ||
567 | .encrypt = ablk_encrypt, | ||
568 | .decrypt = ablk_encrypt, | ||
569 | .geniv = "chainiv", | ||
570 | }, | ||
571 | }, | ||
572 | }, { | ||
573 | .cra_name = "lrw(cast6)", | ||
574 | .cra_driver_name = "lrw-cast6-avx", | ||
575 | .cra_priority = 200, | ||
576 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
577 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
578 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
579 | .cra_alignmask = 0, | ||
580 | .cra_type = &crypto_ablkcipher_type, | ||
581 | .cra_module = THIS_MODULE, | ||
582 | .cra_init = ablk_init, | ||
583 | .cra_exit = ablk_exit, | ||
584 | .cra_u = { | ||
585 | .ablkcipher = { | ||
586 | .min_keysize = CAST6_MIN_KEY_SIZE + | ||
587 | CAST6_BLOCK_SIZE, | ||
588 | .max_keysize = CAST6_MAX_KEY_SIZE + | ||
589 | CAST6_BLOCK_SIZE, | ||
590 | .ivsize = CAST6_BLOCK_SIZE, | ||
591 | .setkey = ablk_set_key, | ||
592 | .encrypt = ablk_encrypt, | ||
593 | .decrypt = ablk_decrypt, | ||
594 | }, | ||
595 | }, | ||
596 | }, { | ||
597 | .cra_name = "xts(cast6)", | ||
598 | .cra_driver_name = "xts-cast6-avx", | ||
599 | .cra_priority = 200, | ||
600 | .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, | ||
601 | .cra_blocksize = CAST6_BLOCK_SIZE, | ||
602 | .cra_ctxsize = sizeof(struct async_helper_ctx), | ||
603 | .cra_alignmask = 0, | ||
604 | .cra_type = &crypto_ablkcipher_type, | ||
605 | .cra_module = THIS_MODULE, | ||
606 | .cra_init = ablk_init, | ||
607 | .cra_exit = ablk_exit, | ||
608 | .cra_u = { | ||
609 | .ablkcipher = { | ||
610 | .min_keysize = CAST6_MIN_KEY_SIZE * 2, | ||
611 | .max_keysize = CAST6_MAX_KEY_SIZE * 2, | ||
612 | .ivsize = CAST6_BLOCK_SIZE, | ||
613 | .setkey = ablk_set_key, | ||
614 | .encrypt = ablk_encrypt, | ||
615 | .decrypt = ablk_decrypt, | ||
616 | }, | ||
617 | }, | ||
618 | } }; | ||
619 | |||
620 | static int __init cast6_init(void) | ||
621 | { | ||
622 | u64 xcr0; | ||
623 | |||
624 | if (!cpu_has_avx || !cpu_has_osxsave) { | ||
625 | pr_info("AVX instructions are not detected.\n"); | ||
626 | return -ENODEV; | ||
627 | } | ||
628 | |||
629 | xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK); | ||
630 | if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) { | ||
631 | pr_info("AVX detected but unusable.\n"); | ||
632 | return -ENODEV; | ||
633 | } | ||
634 | |||
635 | return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); | ||
636 | } | ||
637 | |||
638 | static void __exit cast6_exit(void) | ||
639 | { | ||
640 | crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); | ||
641 | } | ||
642 | |||
643 | module_init(cast6_init); | ||
644 | module_exit(cast6_exit); | ||
645 | |||
646 | MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized"); | ||
647 | MODULE_LICENSE("GPL"); | ||
648 | MODULE_ALIAS("cast6"); | ||
diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c index b4bf0a63b520..6759dd1135be 100644 --- a/arch/x86/crypto/ghash-clmulni-intel_glue.c +++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c | |||
@@ -150,7 +150,6 @@ static struct shash_alg ghash_alg = { | |||
150 | .cra_blocksize = GHASH_BLOCK_SIZE, | 150 | .cra_blocksize = GHASH_BLOCK_SIZE, |
151 | .cra_ctxsize = sizeof(struct ghash_ctx), | 151 | .cra_ctxsize = sizeof(struct ghash_ctx), |
152 | .cra_module = THIS_MODULE, | 152 | .cra_module = THIS_MODULE, |
153 | .cra_list = LIST_HEAD_INIT(ghash_alg.base.cra_list), | ||
154 | }, | 153 | }, |
155 | }; | 154 | }; |
156 | 155 | ||
@@ -288,7 +287,6 @@ static struct ahash_alg ghash_async_alg = { | |||
288 | .cra_blocksize = GHASH_BLOCK_SIZE, | 287 | .cra_blocksize = GHASH_BLOCK_SIZE, |
289 | .cra_type = &crypto_ahash_type, | 288 | .cra_type = &crypto_ahash_type, |
290 | .cra_module = THIS_MODULE, | 289 | .cra_module = THIS_MODULE, |
291 | .cra_list = LIST_HEAD_INIT(ghash_async_alg.halg.base.cra_list), | ||
292 | .cra_init = ghash_async_init_tfm, | 290 | .cra_init = ghash_async_init_tfm, |
293 | .cra_exit = ghash_async_exit_tfm, | 291 | .cra_exit = ghash_async_exit_tfm, |
294 | }, | 292 | }, |
diff --git a/arch/x86/crypto/glue_helper.c b/arch/x86/crypto/glue_helper.c index 4854f0f31e4f..30b3927bd733 100644 --- a/arch/x86/crypto/glue_helper.c +++ b/arch/x86/crypto/glue_helper.c | |||
@@ -110,7 +110,7 @@ static unsigned int __glue_cbc_encrypt_128bit(const common_glue_func_t fn, | |||
110 | nbytes -= bsize; | 110 | nbytes -= bsize; |
111 | } while (nbytes >= bsize); | 111 | } while (nbytes >= bsize); |
112 | 112 | ||
113 | u128_xor((u128 *)walk->iv, (u128 *)walk->iv, iv); | 113 | *(u128 *)walk->iv = *iv; |
114 | return nbytes; | 114 | return nbytes; |
115 | } | 115 | } |
116 | 116 | ||
diff --git a/arch/x86/crypto/salsa20_glue.c b/arch/x86/crypto/salsa20_glue.c index bccb76d80987..a3a3c0205c16 100644 --- a/arch/x86/crypto/salsa20_glue.c +++ b/arch/x86/crypto/salsa20_glue.c | |||
@@ -97,7 +97,6 @@ static struct crypto_alg alg = { | |||
97 | .cra_ctxsize = sizeof(struct salsa20_ctx), | 97 | .cra_ctxsize = sizeof(struct salsa20_ctx), |
98 | .cra_alignmask = 3, | 98 | .cra_alignmask = 3, |
99 | .cra_module = THIS_MODULE, | 99 | .cra_module = THIS_MODULE, |
100 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | ||
101 | .cra_u = { | 100 | .cra_u = { |
102 | .blkcipher = { | 101 | .blkcipher = { |
103 | .setkey = setkey, | 102 | .setkey = setkey, |
diff --git a/arch/x86/crypto/serpent_avx_glue.c b/arch/x86/crypto/serpent_avx_glue.c index b36bdac237eb..3f543a04cf1e 100644 --- a/arch/x86/crypto/serpent_avx_glue.c +++ b/arch/x86/crypto/serpent_avx_glue.c | |||
@@ -390,7 +390,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
390 | .cra_alignmask = 0, | 390 | .cra_alignmask = 0, |
391 | .cra_type = &crypto_blkcipher_type, | 391 | .cra_type = &crypto_blkcipher_type, |
392 | .cra_module = THIS_MODULE, | 392 | .cra_module = THIS_MODULE, |
393 | .cra_list = LIST_HEAD_INIT(serpent_algs[0].cra_list), | ||
394 | .cra_u = { | 393 | .cra_u = { |
395 | .blkcipher = { | 394 | .blkcipher = { |
396 | .min_keysize = SERPENT_MIN_KEY_SIZE, | 395 | .min_keysize = SERPENT_MIN_KEY_SIZE, |
@@ -410,7 +409,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
410 | .cra_alignmask = 0, | 409 | .cra_alignmask = 0, |
411 | .cra_type = &crypto_blkcipher_type, | 410 | .cra_type = &crypto_blkcipher_type, |
412 | .cra_module = THIS_MODULE, | 411 | .cra_module = THIS_MODULE, |
413 | .cra_list = LIST_HEAD_INIT(serpent_algs[1].cra_list), | ||
414 | .cra_u = { | 412 | .cra_u = { |
415 | .blkcipher = { | 413 | .blkcipher = { |
416 | .min_keysize = SERPENT_MIN_KEY_SIZE, | 414 | .min_keysize = SERPENT_MIN_KEY_SIZE, |
@@ -430,7 +428,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
430 | .cra_alignmask = 0, | 428 | .cra_alignmask = 0, |
431 | .cra_type = &crypto_blkcipher_type, | 429 | .cra_type = &crypto_blkcipher_type, |
432 | .cra_module = THIS_MODULE, | 430 | .cra_module = THIS_MODULE, |
433 | .cra_list = LIST_HEAD_INIT(serpent_algs[2].cra_list), | ||
434 | .cra_u = { | 431 | .cra_u = { |
435 | .blkcipher = { | 432 | .blkcipher = { |
436 | .min_keysize = SERPENT_MIN_KEY_SIZE, | 433 | .min_keysize = SERPENT_MIN_KEY_SIZE, |
@@ -451,7 +448,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
451 | .cra_alignmask = 0, | 448 | .cra_alignmask = 0, |
452 | .cra_type = &crypto_blkcipher_type, | 449 | .cra_type = &crypto_blkcipher_type, |
453 | .cra_module = THIS_MODULE, | 450 | .cra_module = THIS_MODULE, |
454 | .cra_list = LIST_HEAD_INIT(serpent_algs[3].cra_list), | ||
455 | .cra_exit = lrw_exit_tfm, | 451 | .cra_exit = lrw_exit_tfm, |
456 | .cra_u = { | 452 | .cra_u = { |
457 | .blkcipher = { | 453 | .blkcipher = { |
@@ -475,7 +471,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
475 | .cra_alignmask = 0, | 471 | .cra_alignmask = 0, |
476 | .cra_type = &crypto_blkcipher_type, | 472 | .cra_type = &crypto_blkcipher_type, |
477 | .cra_module = THIS_MODULE, | 473 | .cra_module = THIS_MODULE, |
478 | .cra_list = LIST_HEAD_INIT(serpent_algs[4].cra_list), | ||
479 | .cra_u = { | 474 | .cra_u = { |
480 | .blkcipher = { | 475 | .blkcipher = { |
481 | .min_keysize = SERPENT_MIN_KEY_SIZE * 2, | 476 | .min_keysize = SERPENT_MIN_KEY_SIZE * 2, |
@@ -496,7 +491,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
496 | .cra_alignmask = 0, | 491 | .cra_alignmask = 0, |
497 | .cra_type = &crypto_ablkcipher_type, | 492 | .cra_type = &crypto_ablkcipher_type, |
498 | .cra_module = THIS_MODULE, | 493 | .cra_module = THIS_MODULE, |
499 | .cra_list = LIST_HEAD_INIT(serpent_algs[5].cra_list), | ||
500 | .cra_init = ablk_init, | 494 | .cra_init = ablk_init, |
501 | .cra_exit = ablk_exit, | 495 | .cra_exit = ablk_exit, |
502 | .cra_u = { | 496 | .cra_u = { |
@@ -518,7 +512,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
518 | .cra_alignmask = 0, | 512 | .cra_alignmask = 0, |
519 | .cra_type = &crypto_ablkcipher_type, | 513 | .cra_type = &crypto_ablkcipher_type, |
520 | .cra_module = THIS_MODULE, | 514 | .cra_module = THIS_MODULE, |
521 | .cra_list = LIST_HEAD_INIT(serpent_algs[6].cra_list), | ||
522 | .cra_init = ablk_init, | 515 | .cra_init = ablk_init, |
523 | .cra_exit = ablk_exit, | 516 | .cra_exit = ablk_exit, |
524 | .cra_u = { | 517 | .cra_u = { |
@@ -541,7 +534,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
541 | .cra_alignmask = 0, | 534 | .cra_alignmask = 0, |
542 | .cra_type = &crypto_ablkcipher_type, | 535 | .cra_type = &crypto_ablkcipher_type, |
543 | .cra_module = THIS_MODULE, | 536 | .cra_module = THIS_MODULE, |
544 | .cra_list = LIST_HEAD_INIT(serpent_algs[7].cra_list), | ||
545 | .cra_init = ablk_init, | 537 | .cra_init = ablk_init, |
546 | .cra_exit = ablk_exit, | 538 | .cra_exit = ablk_exit, |
547 | .cra_u = { | 539 | .cra_u = { |
@@ -565,7 +557,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
565 | .cra_alignmask = 0, | 557 | .cra_alignmask = 0, |
566 | .cra_type = &crypto_ablkcipher_type, | 558 | .cra_type = &crypto_ablkcipher_type, |
567 | .cra_module = THIS_MODULE, | 559 | .cra_module = THIS_MODULE, |
568 | .cra_list = LIST_HEAD_INIT(serpent_algs[8].cra_list), | ||
569 | .cra_init = ablk_init, | 560 | .cra_init = ablk_init, |
570 | .cra_exit = ablk_exit, | 561 | .cra_exit = ablk_exit, |
571 | .cra_u = { | 562 | .cra_u = { |
@@ -590,7 +581,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
590 | .cra_alignmask = 0, | 581 | .cra_alignmask = 0, |
591 | .cra_type = &crypto_ablkcipher_type, | 582 | .cra_type = &crypto_ablkcipher_type, |
592 | .cra_module = THIS_MODULE, | 583 | .cra_module = THIS_MODULE, |
593 | .cra_list = LIST_HEAD_INIT(serpent_algs[9].cra_list), | ||
594 | .cra_init = ablk_init, | 584 | .cra_init = ablk_init, |
595 | .cra_exit = ablk_exit, | 585 | .cra_exit = ablk_exit, |
596 | .cra_u = { | 586 | .cra_u = { |
diff --git a/arch/x86/crypto/serpent_sse2_glue.c b/arch/x86/crypto/serpent_sse2_glue.c index d679c8675f4a..9107a9908c41 100644 --- a/arch/x86/crypto/serpent_sse2_glue.c +++ b/arch/x86/crypto/serpent_sse2_glue.c | |||
@@ -393,7 +393,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
393 | .cra_alignmask = 0, | 393 | .cra_alignmask = 0, |
394 | .cra_type = &crypto_blkcipher_type, | 394 | .cra_type = &crypto_blkcipher_type, |
395 | .cra_module = THIS_MODULE, | 395 | .cra_module = THIS_MODULE, |
396 | .cra_list = LIST_HEAD_INIT(serpent_algs[0].cra_list), | ||
397 | .cra_u = { | 396 | .cra_u = { |
398 | .blkcipher = { | 397 | .blkcipher = { |
399 | .min_keysize = SERPENT_MIN_KEY_SIZE, | 398 | .min_keysize = SERPENT_MIN_KEY_SIZE, |
@@ -413,7 +412,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
413 | .cra_alignmask = 0, | 412 | .cra_alignmask = 0, |
414 | .cra_type = &crypto_blkcipher_type, | 413 | .cra_type = &crypto_blkcipher_type, |
415 | .cra_module = THIS_MODULE, | 414 | .cra_module = THIS_MODULE, |
416 | .cra_list = LIST_HEAD_INIT(serpent_algs[1].cra_list), | ||
417 | .cra_u = { | 415 | .cra_u = { |
418 | .blkcipher = { | 416 | .blkcipher = { |
419 | .min_keysize = SERPENT_MIN_KEY_SIZE, | 417 | .min_keysize = SERPENT_MIN_KEY_SIZE, |
@@ -433,7 +431,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
433 | .cra_alignmask = 0, | 431 | .cra_alignmask = 0, |
434 | .cra_type = &crypto_blkcipher_type, | 432 | .cra_type = &crypto_blkcipher_type, |
435 | .cra_module = THIS_MODULE, | 433 | .cra_module = THIS_MODULE, |
436 | .cra_list = LIST_HEAD_INIT(serpent_algs[2].cra_list), | ||
437 | .cra_u = { | 434 | .cra_u = { |
438 | .blkcipher = { | 435 | .blkcipher = { |
439 | .min_keysize = SERPENT_MIN_KEY_SIZE, | 436 | .min_keysize = SERPENT_MIN_KEY_SIZE, |
@@ -454,7 +451,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
454 | .cra_alignmask = 0, | 451 | .cra_alignmask = 0, |
455 | .cra_type = &crypto_blkcipher_type, | 452 | .cra_type = &crypto_blkcipher_type, |
456 | .cra_module = THIS_MODULE, | 453 | .cra_module = THIS_MODULE, |
457 | .cra_list = LIST_HEAD_INIT(serpent_algs[3].cra_list), | ||
458 | .cra_exit = lrw_exit_tfm, | 454 | .cra_exit = lrw_exit_tfm, |
459 | .cra_u = { | 455 | .cra_u = { |
460 | .blkcipher = { | 456 | .blkcipher = { |
@@ -478,7 +474,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
478 | .cra_alignmask = 0, | 474 | .cra_alignmask = 0, |
479 | .cra_type = &crypto_blkcipher_type, | 475 | .cra_type = &crypto_blkcipher_type, |
480 | .cra_module = THIS_MODULE, | 476 | .cra_module = THIS_MODULE, |
481 | .cra_list = LIST_HEAD_INIT(serpent_algs[4].cra_list), | ||
482 | .cra_u = { | 477 | .cra_u = { |
483 | .blkcipher = { | 478 | .blkcipher = { |
484 | .min_keysize = SERPENT_MIN_KEY_SIZE * 2, | 479 | .min_keysize = SERPENT_MIN_KEY_SIZE * 2, |
@@ -499,7 +494,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
499 | .cra_alignmask = 0, | 494 | .cra_alignmask = 0, |
500 | .cra_type = &crypto_ablkcipher_type, | 495 | .cra_type = &crypto_ablkcipher_type, |
501 | .cra_module = THIS_MODULE, | 496 | .cra_module = THIS_MODULE, |
502 | .cra_list = LIST_HEAD_INIT(serpent_algs[5].cra_list), | ||
503 | .cra_init = ablk_init, | 497 | .cra_init = ablk_init, |
504 | .cra_exit = ablk_exit, | 498 | .cra_exit = ablk_exit, |
505 | .cra_u = { | 499 | .cra_u = { |
@@ -521,7 +515,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
521 | .cra_alignmask = 0, | 515 | .cra_alignmask = 0, |
522 | .cra_type = &crypto_ablkcipher_type, | 516 | .cra_type = &crypto_ablkcipher_type, |
523 | .cra_module = THIS_MODULE, | 517 | .cra_module = THIS_MODULE, |
524 | .cra_list = LIST_HEAD_INIT(serpent_algs[6].cra_list), | ||
525 | .cra_init = ablk_init, | 518 | .cra_init = ablk_init, |
526 | .cra_exit = ablk_exit, | 519 | .cra_exit = ablk_exit, |
527 | .cra_u = { | 520 | .cra_u = { |
@@ -544,7 +537,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
544 | .cra_alignmask = 0, | 537 | .cra_alignmask = 0, |
545 | .cra_type = &crypto_ablkcipher_type, | 538 | .cra_type = &crypto_ablkcipher_type, |
546 | .cra_module = THIS_MODULE, | 539 | .cra_module = THIS_MODULE, |
547 | .cra_list = LIST_HEAD_INIT(serpent_algs[7].cra_list), | ||
548 | .cra_init = ablk_init, | 540 | .cra_init = ablk_init, |
549 | .cra_exit = ablk_exit, | 541 | .cra_exit = ablk_exit, |
550 | .cra_u = { | 542 | .cra_u = { |
@@ -568,7 +560,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
568 | .cra_alignmask = 0, | 560 | .cra_alignmask = 0, |
569 | .cra_type = &crypto_ablkcipher_type, | 561 | .cra_type = &crypto_ablkcipher_type, |
570 | .cra_module = THIS_MODULE, | 562 | .cra_module = THIS_MODULE, |
571 | .cra_list = LIST_HEAD_INIT(serpent_algs[8].cra_list), | ||
572 | .cra_init = ablk_init, | 563 | .cra_init = ablk_init, |
573 | .cra_exit = ablk_exit, | 564 | .cra_exit = ablk_exit, |
574 | .cra_u = { | 565 | .cra_u = { |
@@ -593,7 +584,6 @@ static struct crypto_alg serpent_algs[10] = { { | |||
593 | .cra_alignmask = 0, | 584 | .cra_alignmask = 0, |
594 | .cra_type = &crypto_ablkcipher_type, | 585 | .cra_type = &crypto_ablkcipher_type, |
595 | .cra_module = THIS_MODULE, | 586 | .cra_module = THIS_MODULE, |
596 | .cra_list = LIST_HEAD_INIT(serpent_algs[9].cra_list), | ||
597 | .cra_init = ablk_init, | 587 | .cra_init = ablk_init, |
598 | .cra_exit = ablk_exit, | 588 | .cra_exit = ablk_exit, |
599 | .cra_u = { | 589 | .cra_u = { |
diff --git a/arch/x86/crypto/twofish-avx-x86_64-asm_64.S b/arch/x86/crypto/twofish-avx-x86_64-asm_64.S index 35f45574390d..1585abb13dde 100644 --- a/arch/x86/crypto/twofish-avx-x86_64-asm_64.S +++ b/arch/x86/crypto/twofish-avx-x86_64-asm_64.S | |||
@@ -4,6 +4,8 @@ | |||
4 | * Copyright (C) 2012 Johannes Goetzfried | 4 | * Copyright (C) 2012 Johannes Goetzfried |
5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> | 5 | * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> |
6 | * | 6 | * |
7 | * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> | ||
8 | * | ||
7 | * This program is free software; you can redistribute it and/or modify | 9 | * This program is free software; you can redistribute it and/or modify |
8 | * it under the terms of the GNU General Public License as published by | 10 | * it under the terms of the GNU General Public License as published by |
9 | * the Free Software Foundation; either version 2 of the License, or | 11 | * the Free Software Foundation; either version 2 of the License, or |
@@ -47,16 +49,22 @@ | |||
47 | #define RC2 %xmm6 | 49 | #define RC2 %xmm6 |
48 | #define RD2 %xmm7 | 50 | #define RD2 %xmm7 |
49 | 51 | ||
50 | #define RX %xmm8 | 52 | #define RX0 %xmm8 |
51 | #define RY %xmm9 | 53 | #define RY0 %xmm9 |
54 | |||
55 | #define RX1 %xmm10 | ||
56 | #define RY1 %xmm11 | ||
52 | 57 | ||
53 | #define RK1 %xmm10 | 58 | #define RK1 %xmm12 |
54 | #define RK2 %xmm11 | 59 | #define RK2 %xmm13 |
55 | 60 | ||
56 | #define RID1 %rax | 61 | #define RT %xmm14 |
57 | #define RID1b %al | 62 | #define RR %xmm15 |
58 | #define RID2 %rbx | 63 | |
59 | #define RID2b %bl | 64 | #define RID1 %rbp |
65 | #define RID1d %ebp | ||
66 | #define RID2 %rsi | ||
67 | #define RID2d %esi | ||
60 | 68 | ||
61 | #define RGI1 %rdx | 69 | #define RGI1 %rdx |
62 | #define RGI1bl %dl | 70 | #define RGI1bl %dl |
@@ -65,6 +73,13 @@ | |||
65 | #define RGI2bl %cl | 73 | #define RGI2bl %cl |
66 | #define RGI2bh %ch | 74 | #define RGI2bh %ch |
67 | 75 | ||
76 | #define RGI3 %rax | ||
77 | #define RGI3bl %al | ||
78 | #define RGI3bh %ah | ||
79 | #define RGI4 %rbx | ||
80 | #define RGI4bl %bl | ||
81 | #define RGI4bh %bh | ||
82 | |||
68 | #define RGS1 %r8 | 83 | #define RGS1 %r8 |
69 | #define RGS1d %r8d | 84 | #define RGS1d %r8d |
70 | #define RGS2 %r9 | 85 | #define RGS2 %r9 |
@@ -73,89 +88,123 @@ | |||
73 | #define RGS3d %r10d | 88 | #define RGS3d %r10d |
74 | 89 | ||
75 | 90 | ||
76 | #define lookup_32bit(t0, t1, t2, t3, src, dst) \ | 91 | #define lookup_32bit(t0, t1, t2, t3, src, dst, interleave_op, il_reg) \ |
77 | movb src ## bl, RID1b; \ | 92 | movzbl src ## bl, RID1d; \ |
78 | movb src ## bh, RID2b; \ | 93 | movzbl src ## bh, RID2d; \ |
79 | movl t0(CTX, RID1, 4), dst ## d; \ | ||
80 | xorl t1(CTX, RID2, 4), dst ## d; \ | ||
81 | shrq $16, src; \ | 94 | shrq $16, src; \ |
82 | movb src ## bl, RID1b; \ | 95 | movl t0(CTX, RID1, 4), dst ## d; \ |
83 | movb src ## bh, RID2b; \ | 96 | movl t1(CTX, RID2, 4), RID2d; \ |
97 | movzbl src ## bl, RID1d; \ | ||
98 | xorl RID2d, dst ## d; \ | ||
99 | movzbl src ## bh, RID2d; \ | ||
100 | interleave_op(il_reg); \ | ||
84 | xorl t2(CTX, RID1, 4), dst ## d; \ | 101 | xorl t2(CTX, RID1, 4), dst ## d; \ |
85 | xorl t3(CTX, RID2, 4), dst ## d; | 102 | xorl t3(CTX, RID2, 4), dst ## d; |
86 | 103 | ||
87 | #define G(a, x, t0, t1, t2, t3) \ | 104 | #define dummy(d) /* do nothing */ |
88 | vmovq a, RGI1; \ | 105 | |
89 | vpsrldq $8, a, x; \ | 106 | #define shr_next(reg) \ |
90 | vmovq x, RGI2; \ | 107 | shrq $16, reg; |
108 | |||
109 | #define G(gi1, gi2, x, t0, t1, t2, t3) \ | ||
110 | lookup_32bit(t0, t1, t2, t3, ##gi1, RGS1, shr_next, ##gi1); \ | ||
111 | lookup_32bit(t0, t1, t2, t3, ##gi2, RGS3, shr_next, ##gi2); \ | ||
112 | \ | ||
113 | lookup_32bit(t0, t1, t2, t3, ##gi1, RGS2, dummy, none); \ | ||
114 | shlq $32, RGS2; \ | ||
115 | orq RGS1, RGS2; \ | ||
116 | lookup_32bit(t0, t1, t2, t3, ##gi2, RGS1, dummy, none); \ | ||
117 | shlq $32, RGS1; \ | ||
118 | orq RGS1, RGS3; | ||
119 | |||
120 | #define round_head_2(a, b, x1, y1, x2, y2) \ | ||
121 | vmovq b ## 1, RGI3; \ | ||
122 | vpextrq $1, b ## 1, RGI4; \ | ||
91 | \ | 123 | \ |
92 | lookup_32bit(t0, t1, t2, t3, RGI1, RGS1); \ | 124 | G(RGI1, RGI2, x1, s0, s1, s2, s3); \ |
93 | shrq $16, RGI1; \ | 125 | vmovq a ## 2, RGI1; \ |
94 | lookup_32bit(t0, t1, t2, t3, RGI1, RGS2); \ | 126 | vpextrq $1, a ## 2, RGI2; \ |
95 | shlq $32, RGS2; \ | 127 | vmovq RGS2, x1; \ |
96 | orq RGS1, RGS2; \ | 128 | vpinsrq $1, RGS3, x1, x1; \ |
97 | \ | 129 | \ |
98 | lookup_32bit(t0, t1, t2, t3, RGI2, RGS1); \ | 130 | G(RGI3, RGI4, y1, s1, s2, s3, s0); \ |
99 | shrq $16, RGI2; \ | 131 | vmovq b ## 2, RGI3; \ |
100 | lookup_32bit(t0, t1, t2, t3, RGI2, RGS3); \ | 132 | vpextrq $1, b ## 2, RGI4; \ |
101 | shlq $32, RGS3; \ | 133 | vmovq RGS2, y1; \ |
102 | orq RGS1, RGS3; \ | 134 | vpinsrq $1, RGS3, y1, y1; \ |
103 | \ | 135 | \ |
104 | vmovq RGS2, x; \ | 136 | G(RGI1, RGI2, x2, s0, s1, s2, s3); \ |
105 | vpinsrq $1, RGS3, x, x; | 137 | vmovq RGS2, x2; \ |
138 | vpinsrq $1, RGS3, x2, x2; \ | ||
139 | \ | ||
140 | G(RGI3, RGI4, y2, s1, s2, s3, s0); \ | ||
141 | vmovq RGS2, y2; \ | ||
142 | vpinsrq $1, RGS3, y2, y2; | ||
106 | 143 | ||
107 | #define encround(a, b, c, d, x, y) \ | 144 | #define encround_tail(a, b, c, d, x, y, prerotate) \ |
108 | G(a, x, s0, s1, s2, s3); \ | ||
109 | G(b, y, s1, s2, s3, s0); \ | ||
110 | vpaddd x, y, x; \ | 145 | vpaddd x, y, x; \ |
146 | vpaddd x, RK1, RT;\ | ||
147 | prerotate(b); \ | ||
148 | vpxor RT, c, c; \ | ||
111 | vpaddd y, x, y; \ | 149 | vpaddd y, x, y; \ |
112 | vpaddd x, RK1, x; \ | ||
113 | vpaddd y, RK2, y; \ | 150 | vpaddd y, RK2, y; \ |
114 | vpxor x, c, c; \ | 151 | vpsrld $1, c, RT; \ |
115 | vpsrld $1, c, x; \ | ||
116 | vpslld $(32 - 1), c, c; \ | 152 | vpslld $(32 - 1), c, c; \ |
117 | vpor c, x, c; \ | 153 | vpor c, RT, c; \ |
118 | vpslld $1, d, x; \ | 154 | vpxor d, y, d; \ |
119 | vpsrld $(32 - 1), d, d; \ | 155 | |
120 | vpor d, x, d; \ | 156 | #define decround_tail(a, b, c, d, x, y, prerotate) \ |
121 | vpxor d, y, d; | ||
122 | |||
123 | #define decround(a, b, c, d, x, y) \ | ||
124 | G(a, x, s0, s1, s2, s3); \ | ||
125 | G(b, y, s1, s2, s3, s0); \ | ||
126 | vpaddd x, y, x; \ | 157 | vpaddd x, y, x; \ |
158 | vpaddd x, RK1, RT;\ | ||
159 | prerotate(a); \ | ||
160 | vpxor RT, c, c; \ | ||
127 | vpaddd y, x, y; \ | 161 | vpaddd y, x, y; \ |
128 | vpaddd y, RK2, y; \ | 162 | vpaddd y, RK2, y; \ |
129 | vpxor d, y, d; \ | 163 | vpxor d, y, d; \ |
130 | vpsrld $1, d, y; \ | 164 | vpsrld $1, d, y; \ |
131 | vpslld $(32 - 1), d, d; \ | 165 | vpslld $(32 - 1), d, d; \ |
132 | vpor d, y, d; \ | 166 | vpor d, y, d; \ |
133 | vpslld $1, c, y; \ | 167 | |
134 | vpsrld $(32 - 1), c, c; \ | 168 | #define rotate_1l(x) \ |
135 | vpor c, y, c; \ | 169 | vpslld $1, x, RR; \ |
136 | vpaddd x, RK1, x; \ | 170 | vpsrld $(32 - 1), x, x; \ |
137 | vpxor x, c, c; | 171 | vpor x, RR, x; |
138 | 172 | ||
139 | #define encrypt_round(n, a, b, c, d) \ | 173 | #define preload_rgi(c) \ |
140 | vbroadcastss (k+4*(2*(n)))(CTX), RK1; \ | 174 | vmovq c, RGI1; \ |
141 | vbroadcastss (k+4*(2*(n)+1))(CTX), RK2; \ | 175 | vpextrq $1, c, RGI2; |
142 | encround(a ## 1, b ## 1, c ## 1, d ## 1, RX, RY); \ | 176 | |
143 | encround(a ## 2, b ## 2, c ## 2, d ## 2, RX, RY); | 177 | #define encrypt_round(n, a, b, c, d, preload, prerotate) \ |
144 | 178 | vbroadcastss (k+4*(2*(n)))(CTX), RK1; \ | |
145 | #define decrypt_round(n, a, b, c, d) \ | 179 | vbroadcastss (k+4*(2*(n)+1))(CTX), RK2; \ |
146 | vbroadcastss (k+4*(2*(n)))(CTX), RK1; \ | 180 | round_head_2(a, b, RX0, RY0, RX1, RY1); \ |
147 | vbroadcastss (k+4*(2*(n)+1))(CTX), RK2; \ | 181 | encround_tail(a ## 1, b ## 1, c ## 1, d ## 1, RX0, RY0, prerotate); \ |
148 | decround(a ## 1, b ## 1, c ## 1, d ## 1, RX, RY); \ | 182 | preload(c ## 1); \ |
149 | decround(a ## 2, b ## 2, c ## 2, d ## 2, RX, RY); | 183 | encround_tail(a ## 2, b ## 2, c ## 2, d ## 2, RX1, RY1, prerotate); |
184 | |||
185 | #define decrypt_round(n, a, b, c, d, preload, prerotate) \ | ||
186 | vbroadcastss (k+4*(2*(n)))(CTX), RK1; \ | ||
187 | vbroadcastss (k+4*(2*(n)+1))(CTX), RK2; \ | ||
188 | round_head_2(a, b, RX0, RY0, RX1, RY1); \ | ||
189 | decround_tail(a ## 1, b ## 1, c ## 1, d ## 1, RX0, RY0, prerotate); \ | ||
190 | preload(c ## 1); \ | ||
191 | decround_tail(a ## 2, b ## 2, c ## 2, d ## 2, RX1, RY1, prerotate); | ||
150 | 192 | ||
151 | #define encrypt_cycle(n) \ | 193 | #define encrypt_cycle(n) \ |
152 | encrypt_round((2*n), RA, RB, RC, RD); \ | 194 | encrypt_round((2*n), RA, RB, RC, RD, preload_rgi, rotate_1l); \ |
153 | encrypt_round(((2*n) + 1), RC, RD, RA, RB); | 195 | encrypt_round(((2*n) + 1), RC, RD, RA, RB, preload_rgi, rotate_1l); |
196 | |||
197 | #define encrypt_cycle_last(n) \ | ||
198 | encrypt_round((2*n), RA, RB, RC, RD, preload_rgi, rotate_1l); \ | ||
199 | encrypt_round(((2*n) + 1), RC, RD, RA, RB, dummy, dummy); | ||
154 | 200 | ||
155 | #define decrypt_cycle(n) \ | 201 | #define decrypt_cycle(n) \ |
156 | decrypt_round(((2*n) + 1), RC, RD, RA, RB); \ | 202 | decrypt_round(((2*n) + 1), RC, RD, RA, RB, preload_rgi, rotate_1l); \ |
157 | decrypt_round((2*n), RA, RB, RC, RD); | 203 | decrypt_round((2*n), RA, RB, RC, RD, preload_rgi, rotate_1l); |
158 | 204 | ||
205 | #define decrypt_cycle_last(n) \ | ||
206 | decrypt_round(((2*n) + 1), RC, RD, RA, RB, preload_rgi, rotate_1l); \ | ||
207 | decrypt_round((2*n), RA, RB, RC, RD, dummy, dummy); | ||
159 | 208 | ||
160 | #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ | 209 | #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ |
161 | vpunpckldq x1, x0, t0; \ | 210 | vpunpckldq x1, x0, t0; \ |
@@ -216,17 +265,20 @@ __twofish_enc_blk_8way: | |||
216 | * %rcx: bool, if true: xor output | 265 | * %rcx: bool, if true: xor output |
217 | */ | 266 | */ |
218 | 267 | ||
268 | pushq %rbp; | ||
219 | pushq %rbx; | 269 | pushq %rbx; |
220 | pushq %rcx; | 270 | pushq %rcx; |
221 | 271 | ||
222 | vmovdqu w(CTX), RK1; | 272 | vmovdqu w(CTX), RK1; |
223 | 273 | ||
224 | leaq (4*4*4)(%rdx), %rax; | 274 | leaq (4*4*4)(%rdx), %rax; |
225 | inpack_blocks(%rdx, RA1, RB1, RC1, RD1, RK1, RX, RY, RK2); | 275 | inpack_blocks(%rdx, RA1, RB1, RC1, RD1, RK1, RX0, RY0, RK2); |
226 | inpack_blocks(%rax, RA2, RB2, RC2, RD2, RK1, RX, RY, RK2); | 276 | preload_rgi(RA1); |
277 | rotate_1l(RD1); | ||
278 | inpack_blocks(%rax, RA2, RB2, RC2, RD2, RK1, RX0, RY0, RK2); | ||
279 | rotate_1l(RD2); | ||
227 | 280 | ||
228 | xorq RID1, RID1; | 281 | movq %rsi, %r11; |
229 | xorq RID2, RID2; | ||
230 | 282 | ||
231 | encrypt_cycle(0); | 283 | encrypt_cycle(0); |
232 | encrypt_cycle(1); | 284 | encrypt_cycle(1); |
@@ -235,26 +287,27 @@ __twofish_enc_blk_8way: | |||
235 | encrypt_cycle(4); | 287 | encrypt_cycle(4); |
236 | encrypt_cycle(5); | 288 | encrypt_cycle(5); |
237 | encrypt_cycle(6); | 289 | encrypt_cycle(6); |
238 | encrypt_cycle(7); | 290 | encrypt_cycle_last(7); |
239 | 291 | ||
240 | vmovdqu (w+4*4)(CTX), RK1; | 292 | vmovdqu (w+4*4)(CTX), RK1; |
241 | 293 | ||
242 | popq %rcx; | 294 | popq %rcx; |
243 | popq %rbx; | 295 | popq %rbx; |
296 | popq %rbp; | ||
244 | 297 | ||
245 | leaq (4*4*4)(%rsi), %rax; | 298 | leaq (4*4*4)(%r11), %rax; |
246 | 299 | ||
247 | testb %cl, %cl; | 300 | testb %cl, %cl; |
248 | jnz __enc_xor8; | 301 | jnz __enc_xor8; |
249 | 302 | ||
250 | outunpack_blocks(%rsi, RC1, RD1, RA1, RB1, RK1, RX, RY, RK2); | 303 | outunpack_blocks(%r11, RC1, RD1, RA1, RB1, RK1, RX0, RY0, RK2); |
251 | outunpack_blocks(%rax, RC2, RD2, RA2, RB2, RK1, RX, RY, RK2); | 304 | outunpack_blocks(%rax, RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2); |
252 | 305 | ||
253 | ret; | 306 | ret; |
254 | 307 | ||
255 | __enc_xor8: | 308 | __enc_xor8: |
256 | outunpack_xor_blocks(%rsi, RC1, RD1, RA1, RB1, RK1, RX, RY, RK2); | 309 | outunpack_xor_blocks(%r11, RC1, RD1, RA1, RB1, RK1, RX0, RY0, RK2); |
257 | outunpack_xor_blocks(%rax, RC2, RD2, RA2, RB2, RK1, RX, RY, RK2); | 310 | outunpack_xor_blocks(%rax, RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2); |
258 | 311 | ||
259 | ret; | 312 | ret; |
260 | 313 | ||
@@ -269,16 +322,19 @@ twofish_dec_blk_8way: | |||
269 | * %rdx: src | 322 | * %rdx: src |
270 | */ | 323 | */ |
271 | 324 | ||
325 | pushq %rbp; | ||
272 | pushq %rbx; | 326 | pushq %rbx; |
273 | 327 | ||
274 | vmovdqu (w+4*4)(CTX), RK1; | 328 | vmovdqu (w+4*4)(CTX), RK1; |
275 | 329 | ||
276 | leaq (4*4*4)(%rdx), %rax; | 330 | leaq (4*4*4)(%rdx), %rax; |
277 | inpack_blocks(%rdx, RC1, RD1, RA1, RB1, RK1, RX, RY, RK2); | 331 | inpack_blocks(%rdx, RC1, RD1, RA1, RB1, RK1, RX0, RY0, RK2); |
278 | inpack_blocks(%rax, RC2, RD2, RA2, RB2, RK1, RX, RY, RK2); | 332 | preload_rgi(RC1); |
333 | rotate_1l(RA1); | ||
334 | inpack_blocks(%rax, RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2); | ||
335 | rotate_1l(RA2); | ||
279 | 336 | ||
280 | xorq RID1, RID1; | 337 | movq %rsi, %r11; |
281 | xorq RID2, RID2; | ||
282 | 338 | ||
283 | decrypt_cycle(7); | 339 | decrypt_cycle(7); |
284 | decrypt_cycle(6); | 340 | decrypt_cycle(6); |
@@ -287,14 +343,15 @@ twofish_dec_blk_8way: | |||
287 | decrypt_cycle(3); | 343 | decrypt_cycle(3); |
288 | decrypt_cycle(2); | 344 | decrypt_cycle(2); |
289 | decrypt_cycle(1); | 345 | decrypt_cycle(1); |
290 | decrypt_cycle(0); | 346 | decrypt_cycle_last(0); |
291 | 347 | ||
292 | vmovdqu (w)(CTX), RK1; | 348 | vmovdqu (w)(CTX), RK1; |
293 | 349 | ||
294 | popq %rbx; | 350 | popq %rbx; |
351 | popq %rbp; | ||
295 | 352 | ||
296 | leaq (4*4*4)(%rsi), %rax; | 353 | leaq (4*4*4)(%r11), %rax; |
297 | outunpack_blocks(%rsi, RA1, RB1, RC1, RD1, RK1, RX, RY, RK2); | 354 | outunpack_blocks(%r11, RA1, RB1, RC1, RD1, RK1, RX0, RY0, RK2); |
298 | outunpack_blocks(%rax, RA2, RB2, RC2, RD2, RK1, RX, RY, RK2); | 355 | outunpack_blocks(%rax, RA2, RB2, RC2, RD2, RK1, RX0, RY0, RK2); |
299 | 356 | ||
300 | ret; | 357 | ret; |
diff --git a/arch/x86/crypto/twofish_avx_glue.c b/arch/x86/crypto/twofish_avx_glue.c index 782b67ddaf6a..e7708b5442e0 100644 --- a/arch/x86/crypto/twofish_avx_glue.c +++ b/arch/x86/crypto/twofish_avx_glue.c | |||
@@ -378,7 +378,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
378 | .cra_alignmask = 0, | 378 | .cra_alignmask = 0, |
379 | .cra_type = &crypto_blkcipher_type, | 379 | .cra_type = &crypto_blkcipher_type, |
380 | .cra_module = THIS_MODULE, | 380 | .cra_module = THIS_MODULE, |
381 | .cra_list = LIST_HEAD_INIT(twofish_algs[0].cra_list), | ||
382 | .cra_u = { | 381 | .cra_u = { |
383 | .blkcipher = { | 382 | .blkcipher = { |
384 | .min_keysize = TF_MIN_KEY_SIZE, | 383 | .min_keysize = TF_MIN_KEY_SIZE, |
@@ -398,7 +397,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
398 | .cra_alignmask = 0, | 397 | .cra_alignmask = 0, |
399 | .cra_type = &crypto_blkcipher_type, | 398 | .cra_type = &crypto_blkcipher_type, |
400 | .cra_module = THIS_MODULE, | 399 | .cra_module = THIS_MODULE, |
401 | .cra_list = LIST_HEAD_INIT(twofish_algs[1].cra_list), | ||
402 | .cra_u = { | 400 | .cra_u = { |
403 | .blkcipher = { | 401 | .blkcipher = { |
404 | .min_keysize = TF_MIN_KEY_SIZE, | 402 | .min_keysize = TF_MIN_KEY_SIZE, |
@@ -418,7 +416,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
418 | .cra_alignmask = 0, | 416 | .cra_alignmask = 0, |
419 | .cra_type = &crypto_blkcipher_type, | 417 | .cra_type = &crypto_blkcipher_type, |
420 | .cra_module = THIS_MODULE, | 418 | .cra_module = THIS_MODULE, |
421 | .cra_list = LIST_HEAD_INIT(twofish_algs[2].cra_list), | ||
422 | .cra_u = { | 419 | .cra_u = { |
423 | .blkcipher = { | 420 | .blkcipher = { |
424 | .min_keysize = TF_MIN_KEY_SIZE, | 421 | .min_keysize = TF_MIN_KEY_SIZE, |
@@ -439,7 +436,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
439 | .cra_alignmask = 0, | 436 | .cra_alignmask = 0, |
440 | .cra_type = &crypto_blkcipher_type, | 437 | .cra_type = &crypto_blkcipher_type, |
441 | .cra_module = THIS_MODULE, | 438 | .cra_module = THIS_MODULE, |
442 | .cra_list = LIST_HEAD_INIT(twofish_algs[3].cra_list), | ||
443 | .cra_exit = lrw_twofish_exit_tfm, | 439 | .cra_exit = lrw_twofish_exit_tfm, |
444 | .cra_u = { | 440 | .cra_u = { |
445 | .blkcipher = { | 441 | .blkcipher = { |
@@ -463,7 +459,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
463 | .cra_alignmask = 0, | 459 | .cra_alignmask = 0, |
464 | .cra_type = &crypto_blkcipher_type, | 460 | .cra_type = &crypto_blkcipher_type, |
465 | .cra_module = THIS_MODULE, | 461 | .cra_module = THIS_MODULE, |
466 | .cra_list = LIST_HEAD_INIT(twofish_algs[4].cra_list), | ||
467 | .cra_u = { | 462 | .cra_u = { |
468 | .blkcipher = { | 463 | .blkcipher = { |
469 | .min_keysize = TF_MIN_KEY_SIZE * 2, | 464 | .min_keysize = TF_MIN_KEY_SIZE * 2, |
@@ -484,7 +479,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
484 | .cra_alignmask = 0, | 479 | .cra_alignmask = 0, |
485 | .cra_type = &crypto_ablkcipher_type, | 480 | .cra_type = &crypto_ablkcipher_type, |
486 | .cra_module = THIS_MODULE, | 481 | .cra_module = THIS_MODULE, |
487 | .cra_list = LIST_HEAD_INIT(twofish_algs[5].cra_list), | ||
488 | .cra_init = ablk_init, | 482 | .cra_init = ablk_init, |
489 | .cra_exit = ablk_exit, | 483 | .cra_exit = ablk_exit, |
490 | .cra_u = { | 484 | .cra_u = { |
@@ -506,7 +500,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
506 | .cra_alignmask = 0, | 500 | .cra_alignmask = 0, |
507 | .cra_type = &crypto_ablkcipher_type, | 501 | .cra_type = &crypto_ablkcipher_type, |
508 | .cra_module = THIS_MODULE, | 502 | .cra_module = THIS_MODULE, |
509 | .cra_list = LIST_HEAD_INIT(twofish_algs[6].cra_list), | ||
510 | .cra_init = ablk_init, | 503 | .cra_init = ablk_init, |
511 | .cra_exit = ablk_exit, | 504 | .cra_exit = ablk_exit, |
512 | .cra_u = { | 505 | .cra_u = { |
@@ -529,7 +522,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
529 | .cra_alignmask = 0, | 522 | .cra_alignmask = 0, |
530 | .cra_type = &crypto_ablkcipher_type, | 523 | .cra_type = &crypto_ablkcipher_type, |
531 | .cra_module = THIS_MODULE, | 524 | .cra_module = THIS_MODULE, |
532 | .cra_list = LIST_HEAD_INIT(twofish_algs[7].cra_list), | ||
533 | .cra_init = ablk_init, | 525 | .cra_init = ablk_init, |
534 | .cra_exit = ablk_exit, | 526 | .cra_exit = ablk_exit, |
535 | .cra_u = { | 527 | .cra_u = { |
@@ -553,7 +545,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
553 | .cra_alignmask = 0, | 545 | .cra_alignmask = 0, |
554 | .cra_type = &crypto_ablkcipher_type, | 546 | .cra_type = &crypto_ablkcipher_type, |
555 | .cra_module = THIS_MODULE, | 547 | .cra_module = THIS_MODULE, |
556 | .cra_list = LIST_HEAD_INIT(twofish_algs[8].cra_list), | ||
557 | .cra_init = ablk_init, | 548 | .cra_init = ablk_init, |
558 | .cra_exit = ablk_exit, | 549 | .cra_exit = ablk_exit, |
559 | .cra_u = { | 550 | .cra_u = { |
@@ -578,7 +569,6 @@ static struct crypto_alg twofish_algs[10] = { { | |||
578 | .cra_alignmask = 0, | 569 | .cra_alignmask = 0, |
579 | .cra_type = &crypto_ablkcipher_type, | 570 | .cra_type = &crypto_ablkcipher_type, |
580 | .cra_module = THIS_MODULE, | 571 | .cra_module = THIS_MODULE, |
581 | .cra_list = LIST_HEAD_INIT(twofish_algs[9].cra_list), | ||
582 | .cra_init = ablk_init, | 572 | .cra_init = ablk_init, |
583 | .cra_exit = ablk_exit, | 573 | .cra_exit = ablk_exit, |
584 | .cra_u = { | 574 | .cra_u = { |
diff --git a/arch/x86/crypto/twofish_glue.c b/arch/x86/crypto/twofish_glue.c index 359ae084275c..0a5202303501 100644 --- a/arch/x86/crypto/twofish_glue.c +++ b/arch/x86/crypto/twofish_glue.c | |||
@@ -70,7 +70,6 @@ static struct crypto_alg alg = { | |||
70 | .cra_ctxsize = sizeof(struct twofish_ctx), | 70 | .cra_ctxsize = sizeof(struct twofish_ctx), |
71 | .cra_alignmask = 0, | 71 | .cra_alignmask = 0, |
72 | .cra_module = THIS_MODULE, | 72 | .cra_module = THIS_MODULE, |
73 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | ||
74 | .cra_u = { | 73 | .cra_u = { |
75 | .cipher = { | 74 | .cipher = { |
76 | .cia_min_keysize = TF_MIN_KEY_SIZE, | 75 | .cia_min_keysize = TF_MIN_KEY_SIZE, |
diff --git a/arch/x86/crypto/twofish_glue_3way.c b/arch/x86/crypto/twofish_glue_3way.c index 15f9347316c8..aa3eb358b7e8 100644 --- a/arch/x86/crypto/twofish_glue_3way.c +++ b/arch/x86/crypto/twofish_glue_3way.c | |||
@@ -342,7 +342,6 @@ static struct crypto_alg tf_algs[5] = { { | |||
342 | .cra_alignmask = 0, | 342 | .cra_alignmask = 0, |
343 | .cra_type = &crypto_blkcipher_type, | 343 | .cra_type = &crypto_blkcipher_type, |
344 | .cra_module = THIS_MODULE, | 344 | .cra_module = THIS_MODULE, |
345 | .cra_list = LIST_HEAD_INIT(tf_algs[0].cra_list), | ||
346 | .cra_u = { | 345 | .cra_u = { |
347 | .blkcipher = { | 346 | .blkcipher = { |
348 | .min_keysize = TF_MIN_KEY_SIZE, | 347 | .min_keysize = TF_MIN_KEY_SIZE, |
@@ -362,7 +361,6 @@ static struct crypto_alg tf_algs[5] = { { | |||
362 | .cra_alignmask = 0, | 361 | .cra_alignmask = 0, |
363 | .cra_type = &crypto_blkcipher_type, | 362 | .cra_type = &crypto_blkcipher_type, |
364 | .cra_module = THIS_MODULE, | 363 | .cra_module = THIS_MODULE, |
365 | .cra_list = LIST_HEAD_INIT(tf_algs[1].cra_list), | ||
366 | .cra_u = { | 364 | .cra_u = { |
367 | .blkcipher = { | 365 | .blkcipher = { |
368 | .min_keysize = TF_MIN_KEY_SIZE, | 366 | .min_keysize = TF_MIN_KEY_SIZE, |
@@ -383,7 +381,6 @@ static struct crypto_alg tf_algs[5] = { { | |||
383 | .cra_alignmask = 0, | 381 | .cra_alignmask = 0, |
384 | .cra_type = &crypto_blkcipher_type, | 382 | .cra_type = &crypto_blkcipher_type, |
385 | .cra_module = THIS_MODULE, | 383 | .cra_module = THIS_MODULE, |
386 | .cra_list = LIST_HEAD_INIT(tf_algs[2].cra_list), | ||
387 | .cra_u = { | 384 | .cra_u = { |
388 | .blkcipher = { | 385 | .blkcipher = { |
389 | .min_keysize = TF_MIN_KEY_SIZE, | 386 | .min_keysize = TF_MIN_KEY_SIZE, |
@@ -404,7 +401,6 @@ static struct crypto_alg tf_algs[5] = { { | |||
404 | .cra_alignmask = 0, | 401 | .cra_alignmask = 0, |
405 | .cra_type = &crypto_blkcipher_type, | 402 | .cra_type = &crypto_blkcipher_type, |
406 | .cra_module = THIS_MODULE, | 403 | .cra_module = THIS_MODULE, |
407 | .cra_list = LIST_HEAD_INIT(tf_algs[3].cra_list), | ||
408 | .cra_exit = lrw_twofish_exit_tfm, | 404 | .cra_exit = lrw_twofish_exit_tfm, |
409 | .cra_u = { | 405 | .cra_u = { |
410 | .blkcipher = { | 406 | .blkcipher = { |
@@ -426,7 +422,6 @@ static struct crypto_alg tf_algs[5] = { { | |||
426 | .cra_alignmask = 0, | 422 | .cra_alignmask = 0, |
427 | .cra_type = &crypto_blkcipher_type, | 423 | .cra_type = &crypto_blkcipher_type, |
428 | .cra_module = THIS_MODULE, | 424 | .cra_module = THIS_MODULE, |
429 | .cra_list = LIST_HEAD_INIT(tf_algs[4].cra_list), | ||
430 | .cra_u = { | 425 | .cra_u = { |
431 | .blkcipher = { | 426 | .blkcipher = { |
432 | .min_keysize = TF_MIN_KEY_SIZE * 2, | 427 | .min_keysize = TF_MIN_KEY_SIZE * 2, |