aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-powerpc/checksum.h
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2006-11-15 00:21:58 -0500
committerDavid S. Miller <davem@sunset.davemloft.net>2006-12-03 00:23:20 -0500
commit879178cfbe56baa42ee73b9668816872c97d8ccd (patch)
tree33ba9c52c3ca978863bab0897cb5bff6a0ac4dfa /include/asm-powerpc/checksum.h
parent72685fcd286e94fef0b692f634d304b7240cef04 (diff)
[NET]: POWERPC checksum annotations and cleanups.
* sanitize prototypes, annotate * kill useless shifts Signed-off-by: Al Viro <viro@zeniv.linux.org.uk> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'include/asm-powerpc/checksum.h')
-rw-r--r--include/asm-powerpc/checksum.h59
1 files changed, 21 insertions, 38 deletions
diff --git a/include/asm-powerpc/checksum.h b/include/asm-powerpc/checksum.h
index 609ecbbd7210..7cdf358337cf 100644
--- a/include/asm-powerpc/checksum.h
+++ b/include/asm-powerpc/checksum.h
@@ -14,17 +14,16 @@
14 * which always checksum on 4 octet boundaries. ihl is the number 14 * which always checksum on 4 octet boundaries. ihl is the number
15 * of 32-bit words and is always >= 5. 15 * of 32-bit words and is always >= 5.
16 */ 16 */
17extern unsigned short ip_fast_csum(unsigned char * iph, unsigned int ihl); 17extern __sum16 ip_fast_csum(const void *iph, unsigned int ihl);
18 18
19/* 19/*
20 * computes the checksum of the TCP/UDP pseudo-header 20 * computes the checksum of the TCP/UDP pseudo-header
21 * returns a 16-bit checksum, already complemented 21 * returns a 16-bit checksum, already complemented
22 */ 22 */
23extern unsigned short csum_tcpudp_magic(unsigned long saddr, 23extern __sum16 csum_tcpudp_magic(__be32 saddr, __be32 daddr,
24 unsigned long daddr,
25 unsigned short len, 24 unsigned short len,
26 unsigned short proto, 25 unsigned short proto,
27 unsigned int sum); 26 __wsum sum);
28 27
29/* 28/*
30 * computes the checksum of a memory block at buff, length len, 29 * computes the checksum of a memory block at buff, length len,
@@ -38,8 +37,7 @@ extern unsigned short csum_tcpudp_magic(unsigned long saddr,
38 * 37 *
39 * it's best to have buff aligned on a 32-bit boundary 38 * it's best to have buff aligned on a 32-bit boundary
40 */ 39 */
41extern unsigned int csum_partial(const unsigned char * buff, int len, 40extern __wsum csum_partial(const void *buff, int len, __wsum sum);
42 unsigned int sum);
43 41
44/* 42/*
45 * Computes the checksum of a memory block at src, length len, 43 * Computes the checksum of a memory block at src, length len,
@@ -51,20 +49,15 @@ extern unsigned int csum_partial(const unsigned char * buff, int len,
51 * Like csum_partial, this must be called with even lengths, 49 * Like csum_partial, this must be called with even lengths,
52 * except for the last fragment. 50 * except for the last fragment.
53 */ 51 */
54extern unsigned int csum_partial_copy_generic(const char *src, char *dst, 52extern __wsum csum_partial_copy_generic(const void *src, void *dst,
55 int len, unsigned int sum, 53 int len, __wsum sum,
56 int *src_err, int *dst_err); 54 int *src_err, int *dst_err);
57/* 55/*
58 * the same as csum_partial, but copies from src to dst while it 56 * the same as csum_partial, but copies from src to dst while it
59 * checksums. 57 * checksums.
60 */ 58 */
61unsigned int csum_partial_copy_nocheck(const char *src,
62 char *dst,
63 int len,
64 unsigned int sum);
65
66#define csum_partial_copy_from_user(src, dst, len, sum, errp) \ 59#define csum_partial_copy_from_user(src, dst, len, sum, errp) \
67 csum_partial_copy_generic((src), (dst), (len), (sum), (errp), NULL) 60 csum_partial_copy_generic((__force const void *)(src), (dst), (len), (sum), (errp), NULL)
68 61
69#define csum_partial_copy_nocheck(src, dst, len, sum) \ 62#define csum_partial_copy_nocheck(src, dst, len, sum) \
70 csum_partial_copy_generic((src), (dst), (len), (sum), NULL, NULL) 63 csum_partial_copy_generic((src), (dst), (len), (sum), NULL, NULL)
@@ -74,7 +67,7 @@ unsigned int csum_partial_copy_nocheck(const char *src,
74 * turns a 32-bit partial checksum (e.g. from csum_partial) into a 67 * turns a 32-bit partial checksum (e.g. from csum_partial) into a
75 * 1's complement 16-bit checksum. 68 * 1's complement 16-bit checksum.
76 */ 69 */
77static inline unsigned int csum_fold(unsigned int sum) 70static inline __sum16 csum_fold(__wsum sum)
78{ 71{
79 unsigned int tmp; 72 unsigned int tmp;
80 73
@@ -83,41 +76,32 @@ static inline unsigned int csum_fold(unsigned int sum)
83 /* if there is a carry from adding the two 16-bit halves, 76 /* if there is a carry from adding the two 16-bit halves,
84 it will carry from the lower half into the upper half, 77 it will carry from the lower half into the upper half,
85 giving us the correct sum in the upper half. */ 78 giving us the correct sum in the upper half. */
86 sum = ~(sum + tmp) >> 16; 79 return (__force __sum16)(~((__force u32)sum + tmp) >> 16);
87 return sum;
88} 80}
89 81
90/* 82/*
91 * this routine is used for miscellaneous IP-like checksums, mainly 83 * this routine is used for miscellaneous IP-like checksums, mainly
92 * in icmp.c 84 * in icmp.c
93 */ 85 */
94static inline unsigned short ip_compute_csum(unsigned char * buff, int len) 86static inline __sum16 ip_compute_csum(const void *buff, int len)
95{ 87{
96 return csum_fold(csum_partial(buff, len, 0)); 88 return csum_fold(csum_partial(buff, len, 0));
97} 89}
98 90
99#ifdef __powerpc64__ 91static inline __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr,
100static inline u32 csum_tcpudp_nofold(u32 saddr,
101 u32 daddr,
102 unsigned short len, 92 unsigned short len,
103 unsigned short proto, 93 unsigned short proto,
104 unsigned int sum) 94 __wsum sum)
105{ 95{
106 unsigned long s = sum; 96#ifdef __powerpc64__
97 unsigned long s = (__force u32)sum;
107 98
108 s += saddr; 99 s += (__force u32)saddr;
109 s += daddr; 100 s += (__force u32)daddr;
110 s += (proto << 16) + len; 101 s += proto + len;
111 s += (s >> 32); 102 s += (s >> 32);
112 return (u32) s; 103 return (__force __wsum) s;
113}
114#else 104#else
115static inline unsigned long csum_tcpudp_nofold(unsigned long saddr,
116 unsigned long daddr,
117 unsigned short len,
118 unsigned short proto,
119 unsigned int sum)
120{
121 __asm__("\n\ 105 __asm__("\n\
122 addc %0,%0,%1 \n\ 106 addc %0,%0,%1 \n\
123 adde %0,%0,%2 \n\ 107 adde %0,%0,%2 \n\
@@ -125,10 +109,9 @@ static inline unsigned long csum_tcpudp_nofold(unsigned long saddr,
125 addze %0,%0 \n\ 109 addze %0,%0 \n\
126 " 110 "
127 : "=r" (sum) 111 : "=r" (sum)
128 : "r" (daddr), "r"(saddr), "r"((proto<<16)+len), "0"(sum)); 112 : "r" (daddr), "r"(saddr), "r"(proto + len), "0"(sum));
129 return sum; 113 return sum;
130}
131
132#endif 114#endif
115}
133#endif /* __KERNEL__ */ 116#endif /* __KERNEL__ */
134#endif 117#endif