diff options
| author | Jussi Kivilinna <jussi.kivilinna@mbnet.fi> | 2013-01-19 06:39:11 -0500 |
|---|---|---|
| committer | Herbert Xu <herbert@gondor.apana.org.au> | 2013-01-19 18:16:48 -0500 |
| commit | e17e209ea44ae69bcfdcfacd6974cf48d04e6f71 (patch) | |
| tree | 084d12696769f6bb2e0e2a04a72435be195abce0 | |
| parent | 59990684b0d2b5ab57e37141412bc41cb6c9a2e9 (diff) | |
crypto: cast5-avx: use ENTRY()/ENDPROC() for assembler functions and localize jump targets
Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
Acked-by: David S. Miller <davem@davemloft.net>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
| -rw-r--r-- | arch/x86/crypto/cast5-avx-x86_64-asm_64.S | 48 |
1 files changed, 18 insertions, 30 deletions
diff --git a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S index 15b00ac7cbd3..c35fd5d6ecd2 100644 --- a/arch/x86/crypto/cast5-avx-x86_64-asm_64.S +++ b/arch/x86/crypto/cast5-avx-x86_64-asm_64.S | |||
| @@ -23,6 +23,8 @@ | |||
| 23 | * | 23 | * |
| 24 | */ | 24 | */ |
| 25 | 25 | ||
| 26 | #include <linux/linkage.h> | ||
| 27 | |||
| 26 | .file "cast5-avx-x86_64-asm_64.S" | 28 | .file "cast5-avx-x86_64-asm_64.S" |
| 27 | 29 | ||
| 28 | .extern cast_s1 | 30 | .extern cast_s1 |
| @@ -211,8 +213,6 @@ | |||
| 211 | .text | 213 | .text |
| 212 | 214 | ||
| 213 | .align 16 | 215 | .align 16 |
| 214 | .type __cast5_enc_blk16,@function; | ||
| 215 | |||
| 216 | __cast5_enc_blk16: | 216 | __cast5_enc_blk16: |
| 217 | /* input: | 217 | /* input: |
| 218 | * %rdi: ctx, CTX | 218 | * %rdi: ctx, CTX |
| @@ -263,14 +263,14 @@ __cast5_enc_blk16: | |||
| 263 | 263 | ||
| 264 | movzbl rr(CTX), %eax; | 264 | movzbl rr(CTX), %eax; |
| 265 | testl %eax, %eax; | 265 | testl %eax, %eax; |
| 266 | jnz __skip_enc; | 266 | jnz .L__skip_enc; |
| 267 | 267 | ||
| 268 | round(RL, RR, 12, 1); | 268 | round(RL, RR, 12, 1); |
| 269 | round(RR, RL, 13, 2); | 269 | round(RR, RL, 13, 2); |
| 270 | round(RL, RR, 14, 3); | 270 | round(RL, RR, 14, 3); |
| 271 | round(RR, RL, 15, 1); | 271 | round(RR, RL, 15, 1); |
| 272 | 272 | ||
| 273 | __skip_enc: | 273 | .L__skip_enc: |
| 274 | popq %rbx; | 274 | popq %rbx; |
| 275 | popq %rbp; | 275 | popq %rbp; |
| 276 | 276 | ||
| @@ -282,10 +282,9 @@ __skip_enc: | |||
| 282 | outunpack_blocks(RR4, RL4, RTMP, RX, RKM); | 282 | outunpack_blocks(RR4, RL4, RTMP, RX, RKM); |
| 283 | 283 | ||
| 284 | ret; | 284 | ret; |
| 285 | ENDPROC(__cast5_enc_blk16) | ||
| 285 | 286 | ||
| 286 | .align 16 | 287 | .align 16 |
| 287 | .type __cast5_dec_blk16,@function; | ||
| 288 | |||
| 289 | __cast5_dec_blk16: | 288 | __cast5_dec_blk16: |
| 290 | /* input: | 289 | /* input: |
| 291 | * %rdi: ctx, CTX | 290 | * %rdi: ctx, CTX |
| @@ -323,14 +322,14 @@ __cast5_dec_blk16: | |||
| 323 | 322 | ||
| 324 | movzbl rr(CTX), %eax; | 323 | movzbl rr(CTX), %eax; |
| 325 | testl %eax, %eax; | 324 | testl %eax, %eax; |
| 326 | jnz __skip_dec; | 325 | jnz .L__skip_dec; |
| 327 | 326 | ||
| 328 | round(RL, RR, 15, 1); | 327 | round(RL, RR, 15, 1); |
| 329 | round(RR, RL, 14, 3); | 328 | round(RR, RL, 14, 3); |
| 330 | round(RL, RR, 13, 2); | 329 | round(RL, RR, 13, 2); |
| 331 | round(RR, RL, 12, 1); | 330 | round(RR, RL, 12, 1); |
| 332 | 331 | ||
| 333 | __dec_tail: | 332 | .L__dec_tail: |
| 334 | round(RL, RR, 11, 3); | 333 | round(RL, RR, 11, 3); |
| 335 | round(RR, RL, 10, 2); | 334 | round(RR, RL, 10, 2); |
| 336 | round(RL, RR, 9, 1); | 335 | round(RL, RR, 9, 1); |
| @@ -355,15 +354,12 @@ __dec_tail: | |||
| 355 | 354 | ||
| 356 | ret; | 355 | ret; |
| 357 | 356 | ||
| 358 | __skip_dec: | 357 | .L__skip_dec: |
| 359 | vpsrldq $4, RKR, RKR; | 358 | vpsrldq $4, RKR, RKR; |
| 360 | jmp __dec_tail; | 359 | jmp .L__dec_tail; |
| 360 | ENDPROC(__cast5_dec_blk16) | ||
| 361 | 361 | ||
| 362 | .align 16 | 362 | ENTRY(cast5_ecb_enc_16way) |
| 363 | .global cast5_ecb_enc_16way | ||
| 364 | .type cast5_ecb_enc_16way,@function; | ||
| 365 | |||
| 366 | cast5_ecb_enc_16way: | ||
| 367 | /* input: | 363 | /* input: |
| 368 | * %rdi: ctx, CTX | 364 | * %rdi: ctx, CTX |
| 369 | * %rsi: dst | 365 | * %rsi: dst |
| @@ -393,12 +389,9 @@ cast5_ecb_enc_16way: | |||
| 393 | vmovdqu RL4, (7*4*4)(%r11); | 389 | vmovdqu RL4, (7*4*4)(%r11); |
| 394 | 390 | ||
| 395 | ret; | 391 | ret; |
| 392 | ENDPROC(cast5_ecb_enc_16way) | ||
| 396 | 393 | ||
| 397 | .align 16 | 394 | ENTRY(cast5_ecb_dec_16way) |
| 398 | .global cast5_ecb_dec_16way | ||
| 399 | .type cast5_ecb_dec_16way,@function; | ||
| 400 | |||
| 401 | cast5_ecb_dec_16way: | ||
| 402 | /* input: | 395 | /* input: |
| 403 | * %rdi: ctx, CTX | 396 | * %rdi: ctx, CTX |
| 404 | * %rsi: dst | 397 | * %rsi: dst |
| @@ -428,12 +421,9 @@ cast5_ecb_dec_16way: | |||
| 428 | vmovdqu RL4, (7*4*4)(%r11); | 421 | vmovdqu RL4, (7*4*4)(%r11); |
| 429 | 422 | ||
| 430 | ret; | 423 | ret; |
| 424 | ENDPROC(cast5_ecb_dec_16way) | ||
| 431 | 425 | ||
| 432 | .align 16 | 426 | ENTRY(cast5_cbc_dec_16way) |
| 433 | .global cast5_cbc_dec_16way | ||
| 434 | .type cast5_cbc_dec_16way,@function; | ||
| 435 | |||
| 436 | cast5_cbc_dec_16way: | ||
| 437 | /* input: | 427 | /* input: |
| 438 | * %rdi: ctx, CTX | 428 | * %rdi: ctx, CTX |
| 439 | * %rsi: dst | 429 | * %rsi: dst |
| @@ -480,12 +470,9 @@ cast5_cbc_dec_16way: | |||
| 480 | popq %r12; | 470 | popq %r12; |
| 481 | 471 | ||
| 482 | ret; | 472 | ret; |
| 473 | ENDPROC(cast5_cbc_dec_16way) | ||
| 483 | 474 | ||
| 484 | .align 16 | 475 | ENTRY(cast5_ctr_16way) |
| 485 | .global cast5_ctr_16way | ||
| 486 | .type cast5_ctr_16way,@function; | ||
| 487 | |||
| 488 | cast5_ctr_16way: | ||
| 489 | /* input: | 476 | /* input: |
| 490 | * %rdi: ctx, CTX | 477 | * %rdi: ctx, CTX |
| 491 | * %rsi: dst | 478 | * %rsi: dst |
| @@ -556,3 +543,4 @@ cast5_ctr_16way: | |||
| 556 | popq %r12; | 543 | popq %r12; |
| 557 | 544 | ||
| 558 | ret; | 545 | ret; |
| 546 | ENDPROC(cast5_ctr_16way) | ||
