aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-cris/checksum.h
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2006-11-15 00:15:19 -0500
committerDavid S. Miller <davem@sunset.davemloft.net>2006-12-03 00:23:02 -0500
commit3532010bcf7699f2ce9a2baab58b4b9a5426d97e (patch)
tree47d1c423fe2345bea93ff3a576363971b9b0a572 /include/asm-cris/checksum.h
parent9be259aae5264511fe0a8b5e3d6711e0fd1d55df (diff)
[NET]: Cris checksum annotations and cleanups.
* sanitize prototypes and annotate * kill cast-as-lvalue abuses in csum_partial() * usual ntohs-equals-shift for checksum purposes Signed-off-by: Al Viro <viro@zeniv.linux.org.uk> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'include/asm-cris/checksum.h')
-rw-r--r--include/asm-cris/checksum.h34
1 files changed, 15 insertions, 19 deletions
diff --git a/include/asm-cris/checksum.h b/include/asm-cris/checksum.h
index 26a7719bbb84..180dbf2757b0 100644
--- a/include/asm-cris/checksum.h
+++ b/include/asm-cris/checksum.h
@@ -17,7 +17,7 @@
17 * 17 *
18 * it's best to have buff aligned on a 32-bit boundary 18 * it's best to have buff aligned on a 32-bit boundary
19 */ 19 */
20unsigned int csum_partial(const unsigned char * buff, int len, unsigned int sum); 20__wsum csum_partial(const void *buff, int len, __wsum sum);
21 21
22/* 22/*
23 * the same as csum_partial, but copies from src while it 23 * the same as csum_partial, but copies from src while it
@@ -27,26 +27,23 @@ unsigned int csum_partial(const unsigned char * buff, int len, unsigned int sum)
27 * better 64-bit) boundary 27 * better 64-bit) boundary
28 */ 28 */
29 29
30unsigned int csum_partial_copy_nocheck(const char *src, char *dst, 30__wsum csum_partial_copy_nocheck(const void *src, void *dst,
31 int len, unsigned int sum); 31 int len, __wsum sum);
32 32
33/* 33/*
34 * Fold a partial checksum into a word 34 * Fold a partial checksum into a word
35 */ 35 */
36 36
37static inline unsigned int csum_fold(unsigned int sum) 37static inline __sum16 csum_fold(__wsum csum)
38{ 38{
39 /* the while loop is unnecessary really, it's always enough with two 39 u32 sum = (__force u32)csum;
40 iterations */ 40 sum = (sum & 0xffff) + (sum >> 16); /* add in end-around carry */
41 41 sum = (sum & 0xffff) + (sum >> 16); /* add in end-around carry */
42 while(sum >> 16) 42 return (__force __sum16)~sum;
43 sum = (sum & 0xffff) + (sum >> 16); /* add in end-around carry */
44
45 return ~sum;
46} 43}
47 44
48extern unsigned int csum_partial_copy_from_user(const char *src, char *dst, 45extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst,
49 int len, unsigned int sum, 46 int len, __wsum sum,
50 int *errptr); 47 int *errptr);
51 48
52/* 49/*
@@ -55,8 +52,7 @@ extern unsigned int csum_partial_copy_from_user(const char *src, char *dst,
55 * 52 *
56 */ 53 */
57 54
58static inline unsigned short ip_fast_csum(unsigned char * iph, 55static inline __sum16 ip_fast_csum(const void *iph, unsigned int ihl)
59 unsigned int ihl)
60{ 56{
61 return csum_fold(csum_partial(iph, ihl * 4, 0)); 57 return csum_fold(csum_partial(iph, ihl * 4, 0));
62} 58}
@@ -66,11 +62,10 @@ static inline unsigned short ip_fast_csum(unsigned char * iph,
66 * returns a 16-bit checksum, already complemented 62 * returns a 16-bit checksum, already complemented
67 */ 63 */
68 64
69static inline unsigned short int csum_tcpudp_magic(unsigned long saddr, 65static inline __sum16 int csum_tcpudp_magic(__be32 saddr, __be32 daddr,
70 unsigned long daddr,
71 unsigned short len, 66 unsigned short len,
72 unsigned short proto, 67 unsigned short proto,
73 unsigned int sum) 68 __wsum sum)
74{ 69{
75 return csum_fold(csum_tcpudp_nofold(saddr,daddr,len,proto,sum)); 70 return csum_fold(csum_tcpudp_nofold(saddr,daddr,len,proto,sum));
76} 71}
@@ -80,7 +75,8 @@ static inline unsigned short int csum_tcpudp_magic(unsigned long saddr,
80 * in icmp.c 75 * in icmp.c
81 */ 76 */
82 77
83static inline unsigned short ip_compute_csum(unsigned char * buff, int len) { 78static inline __sum16 ip_compute_csum(const void *buff, int len)
79{
84 return csum_fold (csum_partial(buff, len, 0)); 80 return csum_fold (csum_partial(buff, len, 0));
85} 81}
86 82