diff options
author | M. Vefa Bicakci <m.v.b@runbox.com> | 2015-03-28 21:07:45 -0400 |
---|---|---|
committer | Greg Kroah-Hartman <gregkh@linuxfoundation.org> | 2015-04-03 09:25:45 -0400 |
commit | 9c0cfd4b6f51af9a935dbec680a60a4521b5f2b8 (patch) | |
tree | 1513f94f65bb5690b4e52eed93e01036730cdcf9 /drivers/staging | |
parent | f5d8bde742a5dbea11eed35a026bc4d968001119 (diff) |
staging: rtl8723au: Fix "before/around/after" whitespace issues
Correct a number of "space(s) required before/around/after" checkpatch.pl
issues in a number of functions in rtl8723au's rtw_security.c.
Signed-off-by: M. Vefa Bicakci <m.v.b@runbox.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Diffstat (limited to 'drivers/staging')
-rw-r--r-- | drivers/staging/rtl8723au/core/rtw_security.c | 219 |
1 files changed, 113 insertions, 106 deletions
diff --git a/drivers/staging/rtl8723au/core/rtw_security.c b/drivers/staging/rtl8723au/core/rtw_security.c index 92882f60bc0e..572fcf3f936d 100644 --- a/drivers/staging/rtl8723au/core/rtw_security.c +++ b/drivers/staging/rtl8723au/core/rtw_security.c | |||
@@ -129,14 +129,14 @@ static void crc32_init(void) | |||
129 | static u32 getcrc32(u8 *buf, int len) | 129 | static u32 getcrc32(u8 *buf, int len) |
130 | { | 130 | { |
131 | u8 *p; | 131 | u8 *p; |
132 | u32 crc; | 132 | u32 crc; |
133 | 133 | ||
134 | if (bcrc32initialized == 0) crc32_init(); | 134 | if (bcrc32initialized == 0) crc32_init(); |
135 | 135 | ||
136 | crc = 0xffffffff; /* preload shift register, per CRC-32 spec */ | 136 | crc = 0xffffffff; /* preload shift register, per CRC-32 spec */ |
137 | 137 | ||
138 | for (p = buf; len > 0; ++p, --len) | 138 | for (p = buf; len > 0; ++p, --len) |
139 | crc = crc32_table[ (crc ^ *p) & 0xff] ^ (crc >> 8); | 139 | crc = crc32_table[(crc ^ *p) & 0xff] ^ (crc >> 8); |
140 | 140 | ||
141 | return ~crc; /* transmit complement, per CRC-32 spec */ | 141 | return ~crc; /* transmit complement, per CRC-32 spec */ |
142 | } | 142 | } |
@@ -182,7 +182,7 @@ void rtw_wep_encrypt23a(struct rtw_adapter *padapter, | |||
182 | if ((curfragnum + 1) == pattrib->nr_frags) { | 182 | if ((curfragnum + 1) == pattrib->nr_frags) { |
183 | /* the last fragment */ | 183 | /* the last fragment */ |
184 | length = pattrib->last_txcmdsz - pattrib->hdrlen - | 184 | length = pattrib->last_txcmdsz - pattrib->hdrlen - |
185 | pattrib->iv_len- pattrib->icv_len; | 185 | pattrib->iv_len - pattrib->icv_len; |
186 | 186 | ||
187 | *((u32 *)crc) = cpu_to_le32(getcrc32(payload, length)); | 187 | *((u32 *)crc) = cpu_to_le32(getcrc32(payload, length)); |
188 | 188 | ||
@@ -262,8 +262,8 @@ static u32 secmicgetuint32(u8 *p) | |||
262 | s32 i; | 262 | s32 i; |
263 | u32 res = 0; | 263 | u32 res = 0; |
264 | 264 | ||
265 | for (i = 0; i<4; i++) { | 265 | for (i = 0; i < 4; i++) { |
266 | res |= ((u32)(*p++)) << (8*i); | 266 | res |= ((u32)(*p++)) << (8 * i); |
267 | } | 267 | } |
268 | 268 | ||
269 | return res; | 269 | return res; |
@@ -274,7 +274,7 @@ static void secmicputuint32(u8 *p, u32 val) | |||
274 | { | 274 | { |
275 | long i; | 275 | long i; |
276 | 276 | ||
277 | for (i = 0; i<4; i++) { | 277 | for (i = 0; i < 4; i++) { |
278 | *p++ = (u8) (val & 0xff); | 278 | *p++ = (u8) (val & 0xff); |
279 | val >>= 8; | 279 | val >>= 8; |
280 | } | 280 | } |
@@ -307,7 +307,7 @@ void rtw_secmicappend23abyte23a(struct mic_data *pmicdata, u8 b) | |||
307 | { | 307 | { |
308 | 308 | ||
309 | /* Append the byte to our word-sized buffer */ | 309 | /* Append the byte to our word-sized buffer */ |
310 | pmicdata->M |= ((unsigned long)b) << (8*pmicdata->nBytesInM); | 310 | pmicdata->M |= ((unsigned long)b) << (8 * pmicdata->nBytesInM); |
311 | pmicdata->nBytesInM++; | 311 | pmicdata->nBytesInM++; |
312 | /* Process the word if it is full. */ | 312 | /* Process the word if it is full. */ |
313 | if (pmicdata->nBytesInM >= 4) { | 313 | if (pmicdata->nBytesInM >= 4) { |
@@ -331,7 +331,7 @@ void rtw_secmicappend23a(struct mic_data *pmicdata, u8 *src, u32 nbytes) | |||
331 | { | 331 | { |
332 | 332 | ||
333 | /* This is simple */ | 333 | /* This is simple */ |
334 | while(nbytes > 0) { | 334 | while (nbytes > 0) { |
335 | rtw_secmicappend23abyte23a(pmicdata, *src++); | 335 | rtw_secmicappend23abyte23a(pmicdata, *src++); |
336 | nbytes--; | 336 | nbytes--; |
337 | } | 337 | } |
@@ -348,12 +348,12 @@ void rtw_secgetmic23a(struct mic_data *pmicdata, u8 *dst) | |||
348 | rtw_secmicappend23abyte23a(pmicdata, 0); | 348 | rtw_secmicappend23abyte23a(pmicdata, 0); |
349 | rtw_secmicappend23abyte23a(pmicdata, 0); | 349 | rtw_secmicappend23abyte23a(pmicdata, 0); |
350 | /* and then zeroes until the length is a multiple of 4 */ | 350 | /* and then zeroes until the length is a multiple of 4 */ |
351 | while(pmicdata->nBytesInM != 0) { | 351 | while (pmicdata->nBytesInM != 0) { |
352 | rtw_secmicappend23abyte23a(pmicdata, 0); | 352 | rtw_secmicappend23abyte23a(pmicdata, 0); |
353 | } | 353 | } |
354 | /* The appendByte function has already computed the result. */ | 354 | /* The appendByte function has already computed the result. */ |
355 | secmicputuint32(dst, pmicdata->L); | 355 | secmicputuint32(dst, pmicdata->L); |
356 | secmicputuint32(dst+4, pmicdata->R); | 356 | secmicputuint32(dst + 4, pmicdata->R); |
357 | /* Reset to the empty message. */ | 357 | /* Reset to the empty message. */ |
358 | secmicclear(pmicdata); | 358 | secmicclear(pmicdata); |
359 | 359 | ||
@@ -364,10 +364,10 @@ void rtw_seccalctkipmic23a(u8 *key, u8 *header, u8 *data, u32 data_len, | |||
364 | { | 364 | { |
365 | 365 | ||
366 | struct mic_data micdata; | 366 | struct mic_data micdata; |
367 | u8 priority[4]={0x0, 0x0, 0x0, 0x0}; | 367 | u8 priority[4] = {0x0, 0x0, 0x0, 0x0}; |
368 | 368 | ||
369 | rtw_secmicsetkey23a(&micdata, key); | 369 | rtw_secmicsetkey23a(&micdata, key); |
370 | priority[0]= pri; | 370 | priority[0] = pri; |
371 | 371 | ||
372 | /* Michael MIC pseudo header: DA, SA, 3 x 0, Priority */ | 372 | /* Michael MIC pseudo header: DA, SA, 3 x 0, Priority */ |
373 | if (header[1]&1) { /* ToDS == 1 */ | 373 | if (header[1]&1) { /* ToDS == 1 */ |
@@ -398,11 +398,11 @@ void rtw_seccalctkipmic23a(u8 *key, u8 *header, u8 *data, u32 data_len, | |||
398 | #define Lo8(v16) ((u8)((v16) & 0x00FF)) | 398 | #define Lo8(v16) ((u8)((v16) & 0x00FF)) |
399 | #define Hi8(v16) ((u8)(((v16) >> 8) & 0x00FF)) | 399 | #define Hi8(v16) ((u8)(((v16) >> 8) & 0x00FF)) |
400 | #define Lo16(v32) ((u16)((v32) & 0xFFFF)) | 400 | #define Lo16(v32) ((u16)((v32) & 0xFFFF)) |
401 | #define Hi16(v32) ((u16)(((v32) >>16) & 0xFFFF)) | 401 | #define Hi16(v32) ((u16)(((v32) >> 16) & 0xFFFF)) |
402 | #define Mk16(hi, lo) ((lo) ^ (((u16)(hi)) << 8)) | 402 | #define Mk16(hi, lo) ((lo) ^ (((u16)(hi)) << 8)) |
403 | 403 | ||
404 | /* select the Nth 16-bit word of the temporal key unsigned char array TK[] */ | 404 | /* select the Nth 16-bit word of the temporal key unsigned char array TK[] */ |
405 | #define TK16(N) Mk16(tk[2*(N)+1], tk[2*(N)]) | 405 | #define TK16(N) Mk16(tk[2 * (N) + 1], tk[2 * (N)]) |
406 | 406 | ||
407 | /* S-box lookup: 16 bits --> 16 bits */ | 407 | /* S-box lookup: 16 bits --> 16 bits */ |
408 | #define _S_(v16) (Sbox1[0][Lo8(v16)] ^ Sbox1[1][Hi8(v16)]) | 408 | #define _S_(v16) (Sbox1[0][Lo8(v16)] ^ Sbox1[1][Hi8(v16)]) |
@@ -415,7 +415,7 @@ void rtw_seccalctkipmic23a(u8 *key, u8 *header, u8 *data, u32 data_len, | |||
415 | #define RC4_KEY_SIZE 16 /* 128-bit RC4KEY (104 bits unknown) */ | 415 | #define RC4_KEY_SIZE 16 /* 128-bit RC4KEY (104 bits unknown) */ |
416 | 416 | ||
417 | /* 2-unsigned char by 2-unsigned char subset of the full AES S-box table */ | 417 | /* 2-unsigned char by 2-unsigned char subset of the full AES S-box table */ |
418 | static const unsigned short Sbox1[2][256]= /* Sbox for hash (can be in ROM) */ | 418 | static const unsigned short Sbox1[2][256] = /* Sbox for hash (can be in ROM) */ |
419 | { { | 419 | { { |
420 | 0xC6A5, 0xF884, 0xEE99, 0xF68D, 0xFF0D, 0xD6BD, 0xDEB1, 0x9154, | 420 | 0xC6A5, 0xF884, 0xEE99, 0xF68D, 0xFF0D, 0xD6BD, 0xDEB1, 0x9154, |
421 | 0x6050, 0x0203, 0xCEA9, 0x567D, 0xE719, 0xB562, 0x4DE6, 0xEC9A, | 421 | 0x6050, 0x0203, 0xCEA9, 0x567D, 0xE719, 0xB562, 0x4DE6, 0xEC9A, |
@@ -517,13 +517,13 @@ static void phase1(u16 *p1k, const u8 *tk, const u8 *ta, u32 iv32) | |||
517 | 517 | ||
518 | /* Now compute an unbalanced Feistel cipher with 80-bit block */ | 518 | /* Now compute an unbalanced Feistel cipher with 80-bit block */ |
519 | /* size on the 80-bit block P1K[], using the 128-bit key TK[] */ | 519 | /* size on the 80-bit block P1K[], using the 128-bit key TK[] */ |
520 | for (i = 0; i < PHASE1_LOOP_CNT ;i++) { | 520 | for (i = 0; i < PHASE1_LOOP_CNT; i++) { |
521 | /* Each add operation here is mod 2**16 */ | 521 | /* Each add operation here is mod 2**16 */ |
522 | p1k[0] += _S_(p1k[4] ^ TK16((i&1)+0)); | 522 | p1k[0] += _S_(p1k[4] ^ TK16((i & 1) + 0)); |
523 | p1k[1] += _S_(p1k[0] ^ TK16((i&1)+2)); | 523 | p1k[1] += _S_(p1k[0] ^ TK16((i & 1) + 2)); |
524 | p1k[2] += _S_(p1k[1] ^ TK16((i&1)+4)); | 524 | p1k[2] += _S_(p1k[1] ^ TK16((i & 1) + 4)); |
525 | p1k[3] += _S_(p1k[2] ^ TK16((i&1)+6)); | 525 | p1k[3] += _S_(p1k[2] ^ TK16((i & 1) + 6)); |
526 | p1k[4] += _S_(p1k[3] ^ TK16((i&1)+0)); | 526 | p1k[4] += _S_(p1k[3] ^ TK16((i & 1) + 0)); |
527 | p1k[4] += (unsigned short)i; /* avoid "slide attacks" */ | 527 | p1k[4] += (unsigned short)i; /* avoid "slide attacks" */ |
528 | } | 528 | } |
529 | 529 | ||
@@ -558,8 +558,8 @@ static void phase2(u8 *rc4key, const u8 *tk, const u16 *p1k, u16 iv16) | |||
558 | u16 PPK[6]; /* temporary key for mixing */ | 558 | u16 PPK[6]; /* temporary key for mixing */ |
559 | 559 | ||
560 | /* Note: all adds in the PPK[] equations below are mod 2**16 */ | 560 | /* Note: all adds in the PPK[] equations below are mod 2**16 */ |
561 | for (i = 0;i<5;i++) PPK[i]= p1k[i]; /* first, copy P1K to PPK */ | 561 | for (i = 0; i < 5; i++) PPK[i] = p1k[i]; /* first, copy P1K to PPK */ |
562 | PPK[5] = p1k[4] +iv16; /* next, add in IV16 */ | 562 | PPK[5] = p1k[4] + iv16; /* next, add in IV16 */ |
563 | 563 | ||
564 | /* Bijective non-linear mixing of the 96 bits of PPK[0..5] */ | 564 | /* Bijective non-linear mixing of the 96 bits of PPK[0..5] */ |
565 | PPK[0] += _S_(PPK[5] ^ TK16(0)); /* Mix key in each "round" */ | 565 | PPK[0] += _S_(PPK[5] ^ TK16(0)); /* Mix key in each "round" */ |
@@ -588,9 +588,9 @@ static void phase2(u8 *rc4key, const u8 *tk, const u16 *p1k, u16 iv16) | |||
588 | rc4key[3] = Lo8((PPK[5] ^ TK16(0)) >> 1); | 588 | rc4key[3] = Lo8((PPK[5] ^ TK16(0)) >> 1); |
589 | 589 | ||
590 | /* Copy 96 bits of PPK[0..5] to RC4KEY[4..15] (little-endian) */ | 590 | /* Copy 96 bits of PPK[0..5] to RC4KEY[4..15] (little-endian) */ |
591 | for (i = 0;i<6;i++) { | 591 | for (i = 0; i < 6; i++) { |
592 | rc4key[4+2*i] = Lo8(PPK[i]); | 592 | rc4key[4 + 2 * i] = Lo8(PPK[i]); |
593 | rc4key[5+2*i] = Hi8(PPK[i]); | 593 | rc4key[5 + 2 * i] = Hi8(PPK[i]); |
594 | } | 594 | } |
595 | 595 | ||
596 | } | 596 | } |
@@ -632,9 +632,9 @@ int rtw_tkip_encrypt23a(struct rtw_adapter *padapter, | |||
632 | &pattrib->ra[0]); | 632 | &pattrib->ra[0]); |
633 | } | 633 | } |
634 | 634 | ||
635 | if (stainfo!= NULL) { | 635 | if (stainfo != NULL) { |
636 | 636 | ||
637 | if (!(stainfo->state &_FW_LINKED)) { | 637 | if (!(stainfo->state & _FW_LINKED)) { |
638 | DBG_8723A("%s, psta->state(0x%x) != _FW_LINKED\n", __func__, stainfo->state); | 638 | DBG_8723A("%s, psta->state(0x%x) != _FW_LINKED\n", __func__, stainfo->state); |
639 | return _FAIL; | 639 | return _FAIL; |
640 | } | 640 | } |
@@ -649,21 +649,25 @@ int rtw_tkip_encrypt23a(struct rtw_adapter *padapter, | |||
649 | 649 | ||
650 | prwskeylen = 16; | 650 | prwskeylen = 16; |
651 | 651 | ||
652 | for (curfragnum = 0;curfragnum<pattrib->nr_frags;curfragnum++) { | 652 | for (curfragnum = 0; curfragnum < pattrib->nr_frags; curfragnum++) { |
653 | iv = pframe+pattrib->hdrlen; | 653 | iv = pframe + pattrib->hdrlen; |
654 | payload = pframe+pattrib->iv_len+pattrib->hdrlen; | 654 | payload = pframe + pattrib->iv_len + pattrib->hdrlen; |
655 | 655 | ||
656 | GET_TKIP_PN(iv, dot11txpn); | 656 | GET_TKIP_PN(iv, dot11txpn); |
657 | 657 | ||
658 | pnl = (u16)(dot11txpn.val); | 658 | pnl = (u16)(dot11txpn.val); |
659 | pnh = (u32)(dot11txpn.val>>16); | 659 | pnh = (u32)(dot11txpn.val>>16); |
660 | 660 | ||
661 | phase1((u16 *)&ttkey[0], prwskey,&pattrib->ta[0], pnh); | 661 | phase1((u16 *)&ttkey[0], prwskey, &pattrib->ta[0], pnh); |
662 | 662 | ||
663 | phase2(&rc4key[0], prwskey, (u16 *)&ttkey[0], pnl); | 663 | phase2(&rc4key[0], prwskey, (u16 *)&ttkey[0], pnl); |
664 | 664 | ||
665 | if ((curfragnum+1) == pattrib->nr_frags) { /* 4 the last fragment */ | 665 | if ((curfragnum + 1) == pattrib->nr_frags) { /* 4 the last fragment */ |
666 | length = pattrib->last_txcmdsz-pattrib->hdrlen-pattrib->iv_len- pattrib->icv_len; | 666 | length = (pattrib->last_txcmdsz - |
667 | pattrib->hdrlen - | ||
668 | pattrib->iv_len - | ||
669 | pattrib->icv_len); | ||
670 | |||
667 | RT_TRACE(_module_rtl871x_security_c_, _drv_info_, | 671 | RT_TRACE(_module_rtl871x_security_c_, _drv_info_, |
668 | "pattrib->iv_len =%x, pattrib->icv_len =%x\n", | 672 | "pattrib->iv_len =%x, pattrib->icv_len =%x\n", |
669 | pattrib->iv_len, | 673 | pattrib->iv_len, |
@@ -672,23 +676,27 @@ int rtw_tkip_encrypt23a(struct rtw_adapter *padapter, | |||
672 | 676 | ||
673 | arcfour_init(&mycontext, rc4key, 16); | 677 | arcfour_init(&mycontext, rc4key, 16); |
674 | arcfour_encrypt(&mycontext, payload, payload, length); | 678 | arcfour_encrypt(&mycontext, payload, payload, length); |
675 | arcfour_encrypt(&mycontext, payload+length, crc, 4); | 679 | arcfour_encrypt(&mycontext, payload + length, crc, 4); |
676 | 680 | ||
677 | } | 681 | } |
678 | else{ | 682 | else { |
679 | length = pxmitpriv->frag_len-pattrib->hdrlen-pattrib->iv_len-pattrib->icv_len ; | 683 | length = (pxmitpriv->frag_len - |
684 | pattrib->hdrlen - | ||
685 | pattrib->iv_len - | ||
686 | pattrib->icv_len); | ||
687 | |||
680 | *((u32 *)crc) = cpu_to_le32(getcrc32(payload, length));/* modified by Amy*/ | 688 | *((u32 *)crc) = cpu_to_le32(getcrc32(payload, length));/* modified by Amy*/ |
681 | arcfour_init(&mycontext, rc4key, 16); | 689 | arcfour_init(&mycontext, rc4key, 16); |
682 | arcfour_encrypt(&mycontext, payload, payload, length); | 690 | arcfour_encrypt(&mycontext, payload, payload, length); |
683 | arcfour_encrypt(&mycontext, payload+length, crc, 4); | 691 | arcfour_encrypt(&mycontext, payload + length, crc, 4); |
684 | 692 | ||
685 | pframe+= pxmitpriv->frag_len; | 693 | pframe += pxmitpriv->frag_len; |
686 | pframe = PTR_ALIGN(pframe, 4); | 694 | pframe = PTR_ALIGN(pframe, 4); |
687 | } | 695 | } |
688 | } | 696 | } |
689 | 697 | ||
690 | } | 698 | } |
691 | else{ | 699 | else { |
692 | RT_TRACE(_module_rtl871x_security_c_, _drv_err_, | 700 | RT_TRACE(_module_rtl871x_security_c_, _drv_err_, |
693 | "rtw_tkip_encrypt23a: stainfo == NULL!!!\n"); | 701 | "rtw_tkip_encrypt23a: stainfo == NULL!!!\n"); |
694 | DBG_8723A("%s, psta == NUL\n", __func__); | 702 | DBG_8723A("%s, psta == NUL\n", __func__); |
@@ -727,7 +735,7 @@ int rtw_tkip_decrypt23a(struct rtw_adapter *padapter, | |||
727 | 735 | ||
728 | stainfo = rtw_get_stainfo23a(&padapter->stapriv, | 736 | stainfo = rtw_get_stainfo23a(&padapter->stapriv, |
729 | &prxattrib->ta[0]); | 737 | &prxattrib->ta[0]); |
730 | if (stainfo!= NULL) { | 738 | if (stainfo != NULL) { |
731 | 739 | ||
732 | if (is_multicast_ether_addr(prxattrib->ra)) { | 740 | if (is_multicast_ether_addr(prxattrib->ra)) { |
733 | if (psecuritypriv->binstallGrpkey == 0) { | 741 | if (psecuritypriv->binstallGrpkey == 0) { |
@@ -744,32 +752,32 @@ int rtw_tkip_decrypt23a(struct rtw_adapter *padapter, | |||
744 | prwskeylen = 16; | 752 | prwskeylen = 16; |
745 | } | 753 | } |
746 | 754 | ||
747 | iv = pframe+prxattrib->hdrlen; | 755 | iv = pframe + prxattrib->hdrlen; |
748 | payload = pframe+prxattrib->iv_len+prxattrib->hdrlen; | 756 | payload = pframe + prxattrib->iv_len + prxattrib->hdrlen; |
749 | length = skb->len - prxattrib->hdrlen-prxattrib->iv_len; | 757 | length = skb->len - prxattrib->hdrlen - prxattrib->iv_len; |
750 | 758 | ||
751 | GET_TKIP_PN(iv, dot11txpn); | 759 | GET_TKIP_PN(iv, dot11txpn); |
752 | 760 | ||
753 | pnl = (u16)(dot11txpn.val); | 761 | pnl = (u16)(dot11txpn.val); |
754 | pnh = (u32)(dot11txpn.val>>16); | 762 | pnh = (u32)(dot11txpn.val>>16); |
755 | 763 | ||
756 | phase1((u16 *)&ttkey[0], prwskey,&prxattrib->ta[0], pnh); | 764 | phase1((u16 *)&ttkey[0], prwskey, &prxattrib->ta[0], pnh); |
757 | phase2(&rc4key[0], prwskey, (unsigned short *)&ttkey[0], pnl); | 765 | phase2(&rc4key[0], prwskey, (unsigned short *)&ttkey[0], pnl); |
758 | 766 | ||
759 | /* 4 decrypt payload include icv */ | 767 | /* 4 decrypt payload include icv */ |
760 | arcfour_init(&mycontext, rc4key, 16); | 768 | arcfour_init(&mycontext, rc4key, 16); |
761 | arcfour_encrypt(&mycontext, payload, payload, length); | 769 | arcfour_encrypt(&mycontext, payload, payload, length); |
762 | 770 | ||
763 | *((u32 *)crc) = le32_to_cpu(getcrc32(payload, length-4)); | 771 | *((u32 *)crc) = le32_to_cpu(getcrc32(payload, length - 4)); |
764 | 772 | ||
765 | if (crc[3]!= payload[length-1] || crc[2]!= payload[length-2] || crc[1]!= payload[length-3] || crc[0]!= payload[length-4]) | 773 | if (crc[3] != payload[length - 1] || crc[2] != payload[length - 2] || crc[1] != payload[length - 3] || crc[0] != payload[length - 4]) |
766 | { | 774 | { |
767 | RT_TRACE(_module_rtl871x_security_c_, _drv_err_, | 775 | RT_TRACE(_module_rtl871x_security_c_, _drv_err_, |
768 | "rtw_wep_decrypt23a:icv error crc[3](%x)!= payload[length-1](%x) || crc[2](%x)!= payload[length-2](%x) || crc[1](%x)!= payload[length-3](%x) || crc[0](%x)!= payload[length-4](%x)\n", | 776 | "rtw_wep_decrypt23a:icv error crc[3](%x)!= payload[length-1](%x) || crc[2](%x)!= payload[length-2](%x) || crc[1](%x)!= payload[length-3](%x) || crc[0](%x)!= payload[length-4](%x)\n", |
769 | crc[3], payload[length-1], | 777 | crc[3], payload[length - 1], |
770 | crc[2], payload[length-2], | 778 | crc[2], payload[length - 2], |
771 | crc[1], payload[length-3], | 779 | crc[1], payload[length - 3], |
772 | crc[0], payload[length-4]); | 780 | crc[0], payload[length - 4]); |
773 | res = _FAIL; | 781 | res = _FAIL; |
774 | } | 782 | } |
775 | } else { | 783 | } else { |
@@ -835,7 +843,7 @@ static void xor_128(u8 *a, u8 *b, u8 *out) | |||
835 | { | 843 | { |
836 | int i; | 844 | int i; |
837 | 845 | ||
838 | for (i = 0;i<16; i++) | 846 | for (i = 0; i < 16; i++) |
839 | out[i] = a[i] ^ b[i]; | 847 | out[i] = a[i] ^ b[i]; |
840 | } | 848 | } |
841 | 849 | ||
@@ -882,7 +890,7 @@ static void byte_sub(u8 *in, u8 *out) | |||
882 | { | 890 | { |
883 | int i; | 891 | int i; |
884 | 892 | ||
885 | for (i = 0; i< 16; i++) { | 893 | for (i = 0; i < 16; i++) { |
886 | out[i] = sbox(in[i]); | 894 | out[i] = sbox(in[i]); |
887 | } | 895 | } |
888 | 896 | ||
@@ -922,7 +930,7 @@ static void mix_column(u8 *in, u8 *out) | |||
922 | u8 temp[4]; | 930 | u8 temp[4]; |
923 | u8 tempb[4]; | 931 | u8 tempb[4]; |
924 | 932 | ||
925 | for (i = 0 ; i<4; i++) { | 933 | for (i = 0; i < 4; i++) { |
926 | if ((in[i] & 0x80) == 0x80) | 934 | if ((in[i] & 0x80) == 0x80) |
927 | add1b[i] = 0x1b; | 935 | add1b[i] = 0x1b; |
928 | else | 936 | else |
@@ -944,10 +952,10 @@ static void mix_column(u8 *in, u8 *out) | |||
944 | andf7[2] = in[2] & 0x7f; | 952 | andf7[2] = in[2] & 0x7f; |
945 | andf7[3] = in[3] & 0x7f; | 953 | andf7[3] = in[3] & 0x7f; |
946 | 954 | ||
947 | for (i = 3; i>0; i--) { /* logical shift left 1 bit */ | 955 | for (i = 3; i > 0; i--) { /* logical shift left 1 bit */ |
948 | andf7[i] = andf7[i] << 1; | 956 | andf7[i] = andf7[i] << 1; |
949 | if ((andf7[i-1] & 0x80) == 0x80) { | 957 | if ((andf7[i - 1] & 0x80) == 0x80) { |
950 | andf7[i] = (andf7[i] | 0x01); | 958 | andf7[i] = (andf7[i] | 0x01); |
951 | } | 959 | } |
952 | } | 960 | } |
953 | andf7[0] = andf7[0] << 1; | 961 | andf7[0] = andf7[0] << 1; |
@@ -977,7 +985,7 @@ static void aes128k128d(u8 *key, u8 *data, u8 *ciphertext) | |||
977 | u8 intermediateb[16]; | 985 | u8 intermediateb[16]; |
978 | u8 round_key[16]; | 986 | u8 round_key[16]; |
979 | 987 | ||
980 | for (i = 0; i<16; i++) round_key[i] = key[i]; | 988 | for (i = 0; i < 16; i++) round_key[i] = key[i]; |
981 | 989 | ||
982 | for (round = 0; round < 11; round++) { | 990 | for (round = 0; round < 11; round++) { |
983 | if (round == 0) { | 991 | if (round == 0) { |
@@ -1061,7 +1069,7 @@ static void construct_mic_header2(u8 *mic_header2, u8 *mpdu, int a4_exists, | |||
1061 | { | 1069 | { |
1062 | int i; | 1070 | int i; |
1063 | 1071 | ||
1064 | for (i = 0; i<16; i++) mic_header2[i]= 0x00; | 1072 | for (i = 0; i < 16; i++) mic_header2[i] = 0x00; |
1065 | 1073 | ||
1066 | mic_header2[0] = mpdu[16]; /* A3 */ | 1074 | mic_header2[0] = mpdu[16]; /* A3 */ |
1067 | mic_header2[1] = mpdu[17]; | 1075 | mic_header2[1] = mpdu[17]; |
@@ -1074,8 +1082,7 @@ static void construct_mic_header2(u8 *mic_header2, u8 *mpdu, int a4_exists, | |||
1074 | mic_header2[7] = 0x00; /* mpdu[23]; */ | 1082 | mic_header2[7] = 0x00; /* mpdu[23]; */ |
1075 | 1083 | ||
1076 | if (!qc_exists && a4_exists) { | 1084 | if (!qc_exists && a4_exists) { |
1077 | for (i = 0;i<6;i++) mic_header2[8+i] = mpdu[24+i]; /* A4 */ | 1085 | for (i = 0; i < 6; i++) mic_header2[8+i] = mpdu[24+i]; /* A4 */ |
1078 | |||
1079 | } | 1086 | } |
1080 | 1087 | ||
1081 | if (qc_exists && !a4_exists) { | 1088 | if (qc_exists && !a4_exists) { |
@@ -1084,7 +1091,7 @@ static void construct_mic_header2(u8 *mic_header2, u8 *mpdu, int a4_exists, | |||
1084 | } | 1091 | } |
1085 | 1092 | ||
1086 | if (qc_exists && a4_exists) { | 1093 | if (qc_exists && a4_exists) { |
1087 | for (i = 0;i<6;i++) mic_header2[8+i] = mpdu[24+i]; /* A4 */ | 1094 | for (i = 0; i < 6; i++) mic_header2[8+i] = mpdu[24+i]; /* A4 */ |
1088 | 1095 | ||
1089 | mic_header2[14] = mpdu[30] & 0x0f; | 1096 | mic_header2[14] = mpdu[30] & 0x0f; |
1090 | mic_header2[15] = mpdu[31] & 0x00; | 1097 | mic_header2[15] = mpdu[31] & 0x00; |
@@ -1102,7 +1109,7 @@ static void construct_ctr_preload(u8 *ctr_preload, int a4_exists, int qc_exists, | |||
1102 | { | 1109 | { |
1103 | int i = 0; | 1110 | int i = 0; |
1104 | 1111 | ||
1105 | for (i = 0; i<16; i++) ctr_preload[i] = 0x00; | 1112 | for (i = 0; i < 16; i++) ctr_preload[i] = 0x00; |
1106 | i = 0; | 1113 | i = 0; |
1107 | 1114 | ||
1108 | ctr_preload[0] = 0x01; /* flag */ | 1115 | ctr_preload[0] = 0x01; /* flag */ |
@@ -1183,12 +1190,12 @@ static int aes_cipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1183 | } else { | 1190 | } else { |
1184 | qc_exists = 0; | 1191 | qc_exists = 0; |
1185 | } | 1192 | } |
1186 | pn_vector[0]= pframe[hdrlen]; | 1193 | pn_vector[0] = pframe[hdrlen]; |
1187 | pn_vector[1]= pframe[hdrlen+1]; | 1194 | pn_vector[1] = pframe[hdrlen + 1]; |
1188 | pn_vector[2]= pframe[hdrlen+4]; | 1195 | pn_vector[2] = pframe[hdrlen + 4]; |
1189 | pn_vector[3]= pframe[hdrlen+5]; | 1196 | pn_vector[3] = pframe[hdrlen + 5]; |
1190 | pn_vector[4]= pframe[hdrlen+6]; | 1197 | pn_vector[4] = pframe[hdrlen + 6]; |
1191 | pn_vector[5]= pframe[hdrlen+7]; | 1198 | pn_vector[5] = pframe[hdrlen + 7]; |
1192 | 1199 | ||
1193 | construct_mic_iv(mic_iv, qc_exists, a4_exists, pframe, plen, pn_vector); | 1200 | construct_mic_iv(mic_iv, qc_exists, a4_exists, pframe, plen, pn_vector); |
1194 | 1201 | ||
@@ -1230,12 +1237,12 @@ static int aes_cipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1230 | 1237 | ||
1231 | /* Insert MIC into payload */ | 1238 | /* Insert MIC into payload */ |
1232 | for (j = 0; j < 8; j++) | 1239 | for (j = 0; j < 8; j++) |
1233 | pframe[payload_index+j] = mic[j]; | 1240 | pframe[payload_index + j] = mic[j]; |
1234 | 1241 | ||
1235 | payload_index = hdrlen + 8; | 1242 | payload_index = hdrlen + 8; |
1236 | for (i = 0; i < num_blocks; i++) { | 1243 | for (i = 0; i < num_blocks; i++) { |
1237 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, | 1244 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, |
1238 | pframe, pn_vector, i+1); | 1245 | pframe, pn_vector, i + 1); |
1239 | aes128k128d(key, ctr_preload, aes_out); | 1246 | aes128k128d(key, ctr_preload, aes_out); |
1240 | bitwise_xor(aes_out, &pframe[payload_index], chain_buffer); | 1247 | bitwise_xor(aes_out, &pframe[payload_index], chain_buffer); |
1241 | for (j = 0; j < 16; j++) | 1248 | for (j = 0; j < 16; j++) |
@@ -1247,15 +1254,15 @@ static int aes_cipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1247 | * encrypt it and copy the unpadded part back | 1254 | * encrypt it and copy the unpadded part back |
1248 | */ | 1255 | */ |
1249 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, pframe, | 1256 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, pframe, |
1250 | pn_vector, num_blocks+1); | 1257 | pn_vector, num_blocks + 1); |
1251 | 1258 | ||
1252 | for (j = 0; j < 16; j++) | 1259 | for (j = 0; j < 16; j++) |
1253 | padded_buffer[j] = 0x00; | 1260 | padded_buffer[j] = 0x00; |
1254 | for (j = 0; j < payload_remainder; j++) | 1261 | for (j = 0; j < payload_remainder; j++) |
1255 | padded_buffer[j] = pframe[payload_index+j]; | 1262 | padded_buffer[j] = pframe[payload_index + j]; |
1256 | aes128k128d(key, ctr_preload, aes_out); | 1263 | aes128k128d(key, ctr_preload, aes_out); |
1257 | bitwise_xor(aes_out, padded_buffer, chain_buffer); | 1264 | bitwise_xor(aes_out, padded_buffer, chain_buffer); |
1258 | for (j = 0; j < payload_remainder;j++) | 1265 | for (j = 0; j < payload_remainder; j++) |
1259 | pframe[payload_index++] = chain_buffer[j]; | 1266 | pframe[payload_index++] = chain_buffer[j]; |
1260 | } | 1267 | } |
1261 | 1268 | ||
@@ -1266,11 +1273,11 @@ static int aes_cipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1266 | for (j = 0; j < 16; j++) | 1273 | for (j = 0; j < 16; j++) |
1267 | padded_buffer[j] = 0x00; | 1274 | padded_buffer[j] = 0x00; |
1268 | for (j = 0; j < 8; j++) | 1275 | for (j = 0; j < 8; j++) |
1269 | padded_buffer[j] = pframe[j+hdrlen+8+plen]; | 1276 | padded_buffer[j] = pframe[j + hdrlen + 8 + plen]; |
1270 | 1277 | ||
1271 | aes128k128d(key, ctr_preload, aes_out); | 1278 | aes128k128d(key, ctr_preload, aes_out); |
1272 | bitwise_xor(aes_out, padded_buffer, chain_buffer); | 1279 | bitwise_xor(aes_out, padded_buffer, chain_buffer); |
1273 | for (j = 0; j < 8;j++) | 1280 | for (j = 0; j < 8; j++) |
1274 | pframe[payload_index++] = chain_buffer[j]; | 1281 | pframe[payload_index++] = chain_buffer[j]; |
1275 | 1282 | ||
1276 | return _SUCCESS; | 1283 | return _SUCCESS; |
@@ -1315,7 +1322,7 @@ int rtw_aes_encrypt23a(struct rtw_adapter *padapter, | |||
1315 | res = _FAIL; | 1322 | res = _FAIL; |
1316 | goto out; | 1323 | goto out; |
1317 | } | 1324 | } |
1318 | if (!(stainfo->state &_FW_LINKED)) { | 1325 | if (!(stainfo->state & _FW_LINKED)) { |
1319 | DBG_8723A("%s, psta->state(0x%x) != _FW_LINKED\n", | 1326 | DBG_8723A("%s, psta->state(0x%x) != _FW_LINKED\n", |
1320 | __func__, stainfo->state); | 1327 | __func__, stainfo->state); |
1321 | return _FAIL; | 1328 | return _FAIL; |
@@ -1380,16 +1387,16 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1380 | 1387 | ||
1381 | /* start to decrypt the payload */ | 1388 | /* start to decrypt the payload */ |
1382 | 1389 | ||
1383 | num_blocks = (plen-8) / 16; /* plen including llc, payload_length and mic) */ | 1390 | num_blocks = (plen - 8) / 16; /* plen including llc, payload_length and mic) */ |
1384 | 1391 | ||
1385 | payload_remainder = (plen-8) % 16; | 1392 | payload_remainder = (plen - 8) % 16; |
1386 | 1393 | ||
1387 | pn_vector[0] = pframe[hdrlen]; | 1394 | pn_vector[0] = pframe[hdrlen]; |
1388 | pn_vector[1] = pframe[hdrlen+1]; | 1395 | pn_vector[1] = pframe[hdrlen + 1]; |
1389 | pn_vector[2] = pframe[hdrlen+4]; | 1396 | pn_vector[2] = pframe[hdrlen + 4]; |
1390 | pn_vector[3] = pframe[hdrlen+5]; | 1397 | pn_vector[3] = pframe[hdrlen + 5]; |
1391 | pn_vector[4] = pframe[hdrlen+6]; | 1398 | pn_vector[4] = pframe[hdrlen + 6]; |
1392 | pn_vector[5] = pframe[hdrlen+7]; | 1399 | pn_vector[5] = pframe[hdrlen + 7]; |
1393 | 1400 | ||
1394 | if ((hdrlen == sizeof(struct ieee80211_hdr_3addr) || | 1401 | if ((hdrlen == sizeof(struct ieee80211_hdr_3addr) || |
1395 | (hdrlen == sizeof(struct ieee80211_qos_hdr)))) | 1402 | (hdrlen == sizeof(struct ieee80211_qos_hdr)))) |
@@ -1424,7 +1431,7 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1424 | 1431 | ||
1425 | for (i = 0; i < num_blocks; i++) { | 1432 | for (i = 0; i < num_blocks; i++) { |
1426 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, | 1433 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, |
1427 | pframe, pn_vector, i+1); | 1434 | pframe, pn_vector, i + 1); |
1428 | 1435 | ||
1429 | aes128k128d(key, ctr_preload, aes_out); | 1436 | aes128k128d(key, ctr_preload, aes_out); |
1430 | bitwise_xor(aes_out, &pframe[payload_index], chain_buffer); | 1437 | bitwise_xor(aes_out, &pframe[payload_index], chain_buffer); |
@@ -1438,12 +1445,12 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1438 | * encrypt it and copy the unpadded part back | 1445 | * encrypt it and copy the unpadded part back |
1439 | */ | 1446 | */ |
1440 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, pframe, | 1447 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, pframe, |
1441 | pn_vector, num_blocks+1); | 1448 | pn_vector, num_blocks + 1); |
1442 | 1449 | ||
1443 | for (j = 0; j < 16; j++) | 1450 | for (j = 0; j < 16; j++) |
1444 | padded_buffer[j] = 0x00; | 1451 | padded_buffer[j] = 0x00; |
1445 | for (j = 0; j < payload_remainder; j++) | 1452 | for (j = 0; j < payload_remainder; j++) |
1446 | padded_buffer[j] = pframe[payload_index+j]; | 1453 | padded_buffer[j] = pframe[payload_index + j]; |
1447 | aes128k128d(key, ctr_preload, aes_out); | 1454 | aes128k128d(key, ctr_preload, aes_out); |
1448 | bitwise_xor(aes_out, padded_buffer, chain_buffer); | 1455 | bitwise_xor(aes_out, padded_buffer, chain_buffer); |
1449 | for (j = 0; j < payload_remainder; j++) | 1456 | for (j = 0; j < payload_remainder; j++) |
@@ -1451,24 +1458,24 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1451 | } | 1458 | } |
1452 | 1459 | ||
1453 | /* start to calculate the mic */ | 1460 | /* start to calculate the mic */ |
1454 | if ((hdrlen +plen+8) <= MAX_MSG_SIZE) | 1461 | if ((hdrlen + plen + 8) <= MAX_MSG_SIZE) |
1455 | memcpy(message, pframe, (hdrlen+plen+8)); /* 8 is for ext iv len */ | 1462 | memcpy(message, pframe, (hdrlen + plen + 8)); /* 8 is for ext iv len */ |
1456 | 1463 | ||
1457 | pn_vector[0] = pframe[hdrlen]; | 1464 | pn_vector[0] = pframe[hdrlen]; |
1458 | pn_vector[1] = pframe[hdrlen+1]; | 1465 | pn_vector[1] = pframe[hdrlen + 1]; |
1459 | pn_vector[2] = pframe[hdrlen+4]; | 1466 | pn_vector[2] = pframe[hdrlen + 4]; |
1460 | pn_vector[3] = pframe[hdrlen+5]; | 1467 | pn_vector[3] = pframe[hdrlen + 5]; |
1461 | pn_vector[4] = pframe[hdrlen+6]; | 1468 | pn_vector[4] = pframe[hdrlen + 6]; |
1462 | pn_vector[5] = pframe[hdrlen+7]; | 1469 | pn_vector[5] = pframe[hdrlen + 7]; |
1463 | 1470 | ||
1464 | construct_mic_iv(mic_iv, qc_exists, a4_exists, message, | 1471 | construct_mic_iv(mic_iv, qc_exists, a4_exists, message, |
1465 | plen-8, pn_vector); | 1472 | plen - 8, pn_vector); |
1466 | 1473 | ||
1467 | construct_mic_header1(mic_header1, hdrlen, message); | 1474 | construct_mic_header1(mic_header1, hdrlen, message); |
1468 | construct_mic_header2(mic_header2, message, a4_exists, qc_exists); | 1475 | construct_mic_header2(mic_header2, message, a4_exists, qc_exists); |
1469 | 1476 | ||
1470 | payload_remainder = (plen-8) % 16; | 1477 | payload_remainder = (plen - 8) % 16; |
1471 | num_blocks = (plen-8) / 16; | 1478 | num_blocks = (plen - 8) / 16; |
1472 | 1479 | ||
1473 | /* Find start of payload */ | 1480 | /* Find start of payload */ |
1474 | payload_index = hdrlen + 8; | 1481 | payload_index = hdrlen + 8; |
@@ -1502,12 +1509,12 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1502 | 1509 | ||
1503 | /* Insert MIC into payload */ | 1510 | /* Insert MIC into payload */ |
1504 | for (j = 0; j < 8; j++) | 1511 | for (j = 0; j < 8; j++) |
1505 | message[payload_index+j] = mic[j]; | 1512 | message[payload_index + j] = mic[j]; |
1506 | 1513 | ||
1507 | payload_index = hdrlen + 8; | 1514 | payload_index = hdrlen + 8; |
1508 | for (i = 0; i< num_blocks; i++) { | 1515 | for (i = 0; i < num_blocks; i++) { |
1509 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, | 1516 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, |
1510 | message, pn_vector, i+1); | 1517 | message, pn_vector, i + 1); |
1511 | aes128k128d(key, ctr_preload, aes_out); | 1518 | aes128k128d(key, ctr_preload, aes_out); |
1512 | bitwise_xor(aes_out, &message[payload_index], chain_buffer); | 1519 | bitwise_xor(aes_out, &message[payload_index], chain_buffer); |
1513 | for (j = 0; j < 16; j++) | 1520 | for (j = 0; j < 16; j++) |
@@ -1519,12 +1526,12 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1519 | * encrypt it and copy the unpadded part back | 1526 | * encrypt it and copy the unpadded part back |
1520 | */ | 1527 | */ |
1521 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, | 1528 | construct_ctr_preload(ctr_preload, a4_exists, qc_exists, |
1522 | message, pn_vector, num_blocks+1); | 1529 | message, pn_vector, num_blocks + 1); |
1523 | 1530 | ||
1524 | for (j = 0; j < 16; j++) | 1531 | for (j = 0; j < 16; j++) |
1525 | padded_buffer[j] = 0x00; | 1532 | padded_buffer[j] = 0x00; |
1526 | for (j = 0; j < payload_remainder; j++) | 1533 | for (j = 0; j < payload_remainder; j++) |
1527 | padded_buffer[j] = message[payload_index+j]; | 1534 | padded_buffer[j] = message[payload_index + j]; |
1528 | aes128k128d(key, ctr_preload, aes_out); | 1535 | aes128k128d(key, ctr_preload, aes_out); |
1529 | bitwise_xor(aes_out, padded_buffer, chain_buffer); | 1536 | bitwise_xor(aes_out, padded_buffer, chain_buffer); |
1530 | for (j = 0; j < payload_remainder; j++) | 1537 | for (j = 0; j < payload_remainder; j++) |
@@ -1538,7 +1545,7 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1538 | for (j = 0; j < 16; j++) | 1545 | for (j = 0; j < 16; j++) |
1539 | padded_buffer[j] = 0x00; | 1546 | padded_buffer[j] = 0x00; |
1540 | for (j = 0; j < 8; j++) | 1547 | for (j = 0; j < 8; j++) |
1541 | padded_buffer[j] = message[j+hdrlen+8+plen-8]; | 1548 | padded_buffer[j] = message[j + hdrlen + 8 + plen - 8]; |
1542 | 1549 | ||
1543 | aes128k128d(key, ctr_preload, aes_out); | 1550 | aes128k128d(key, ctr_preload, aes_out); |
1544 | bitwise_xor(aes_out, padded_buffer, chain_buffer); | 1551 | bitwise_xor(aes_out, padded_buffer, chain_buffer); |
@@ -1547,13 +1554,13 @@ static int aes_decipher(u8 *key, uint hdrlen, u8 *pframe, uint plen) | |||
1547 | 1554 | ||
1548 | /* compare the mic */ | 1555 | /* compare the mic */ |
1549 | for (i = 0; i < 8; i++) { | 1556 | for (i = 0; i < 8; i++) { |
1550 | if (pframe[hdrlen+8+plen-8+i] != message[hdrlen+8+plen-8+i]) { | 1557 | if (pframe[hdrlen + 8 + plen - 8 + i] != message[hdrlen + 8 + plen - 8 + i]) { |
1551 | RT_TRACE(_module_rtl871x_security_c_, _drv_err_, | 1558 | RT_TRACE(_module_rtl871x_security_c_, _drv_err_, |
1552 | "aes_decipher:mic check error mic[%d]: pframe(%x) != message(%x)\n", | 1559 | "aes_decipher:mic check error mic[%d]: pframe(%x) != message(%x)\n", |
1553 | i, pframe[hdrlen + 8 + plen - 8 + i], | 1560 | i, pframe[hdrlen + 8 + plen - 8 + i], |
1554 | message[hdrlen + 8 + plen - 8 + i]); | 1561 | message[hdrlen + 8 + plen - 8 + i]); |
1555 | DBG_8723A("aes_decipher:mic check error mic[%d]: pframe(%x) != message(%x)\n", | 1562 | DBG_8723A("aes_decipher:mic check error mic[%d]: pframe(%x) != message(%x)\n", |
1556 | i, pframe[hdrlen+8+plen-8+i], message[hdrlen+8+plen-8+i]); | 1563 | i, pframe[hdrlen + 8 + plen - 8 + i], message[hdrlen + 8 + plen - 8 + i]); |
1557 | res = _FAIL; | 1564 | res = _FAIL; |
1558 | } | 1565 | } |
1559 | } | 1566 | } |