diff options
Diffstat (limited to 'include/asm-sparc64/checksum.h')
-rw-r--r-- | include/asm-sparc64/checksum.h | 77 |
1 files changed, 36 insertions, 41 deletions
diff --git a/include/asm-sparc64/checksum.h b/include/asm-sparc64/checksum.h index dc8bed246fc9..70a006da7634 100644 --- a/include/asm-sparc64/checksum.h +++ b/include/asm-sparc64/checksum.h | |||
@@ -30,7 +30,7 @@ | |||
30 | * | 30 | * |
31 | * it's best to have buff aligned on a 32-bit boundary | 31 | * it's best to have buff aligned on a 32-bit boundary |
32 | */ | 32 | */ |
33 | extern unsigned int csum_partial(const unsigned char * buff, int len, unsigned int sum); | 33 | extern __wsum csum_partial(const void * buff, int len, __wsum sum); |
34 | 34 | ||
35 | /* the same as csum_partial, but copies from user space while it | 35 | /* the same as csum_partial, but copies from user space while it |
36 | * checksums | 36 | * checksums |
@@ -38,52 +38,50 @@ extern unsigned int csum_partial(const unsigned char * buff, int len, unsigned i | |||
38 | * here even more important to align src and dst on a 32-bit (or even | 38 | * here even more important to align src and dst on a 32-bit (or even |
39 | * better 64-bit) boundary | 39 | * better 64-bit) boundary |
40 | */ | 40 | */ |
41 | extern unsigned int csum_partial_copy_nocheck(const unsigned char *src, | 41 | extern __wsum csum_partial_copy_nocheck(const void *src, void *dst, |
42 | unsigned char *dst, | 42 | int len, __wsum sum); |
43 | int len, unsigned int sum); | 43 | |
44 | 44 | extern long __csum_partial_copy_from_user(const void __user *src, | |
45 | extern long __csum_partial_copy_from_user(const unsigned char __user *src, | 45 | void *dst, int len, |
46 | unsigned char *dst, int len, | 46 | __wsum sum); |
47 | unsigned int sum); | 47 | |
48 | 48 | static inline __wsum | |
49 | static inline unsigned int | 49 | csum_partial_copy_from_user(const void __user *src, |
50 | csum_partial_copy_from_user(const unsigned char __user *src, | 50 | void *dst, int len, |
51 | unsigned char *dst, int len, | 51 | __wsum sum, int *err) |
52 | unsigned int sum, int *err) | ||
53 | { | 52 | { |
54 | long ret = __csum_partial_copy_from_user(src, dst, len, sum); | 53 | long ret = __csum_partial_copy_from_user(src, dst, len, sum); |
55 | if (ret < 0) | 54 | if (ret < 0) |
56 | *err = -EFAULT; | 55 | *err = -EFAULT; |
57 | return (unsigned int) ret; | 56 | return (__force __wsum) ret; |
58 | } | 57 | } |
59 | 58 | ||
60 | /* | 59 | /* |
61 | * Copy and checksum to user | 60 | * Copy and checksum to user |
62 | */ | 61 | */ |
63 | #define HAVE_CSUM_COPY_USER | 62 | #define HAVE_CSUM_COPY_USER |
64 | extern long __csum_partial_copy_to_user(const unsigned char *src, | 63 | extern long __csum_partial_copy_to_user(const void *src, |
65 | unsigned char __user *dst, int len, | 64 | void __user *dst, int len, |
66 | unsigned int sum); | 65 | __wsum sum); |
67 | 66 | ||
68 | static inline unsigned int | 67 | static inline __wsum |
69 | csum_and_copy_to_user(const unsigned char *src, | 68 | csum_and_copy_to_user(const void *src, |
70 | unsigned char __user *dst, int len, | 69 | void __user *dst, int len, |
71 | unsigned int sum, int *err) | 70 | __wsum sum, int *err) |
72 | { | 71 | { |
73 | long ret = __csum_partial_copy_to_user(src, dst, len, sum); | 72 | long ret = __csum_partial_copy_to_user(src, dst, len, sum); |
74 | if (ret < 0) | 73 | if (ret < 0) |
75 | *err = -EFAULT; | 74 | *err = -EFAULT; |
76 | return (unsigned int) ret; | 75 | return (__force __wsum) ret; |
77 | } | 76 | } |
78 | 77 | ||
79 | /* ihl is always 5 or greater, almost always is 5, and iph is word aligned | 78 | /* ihl is always 5 or greater, almost always is 5, and iph is word aligned |
80 | * the majority of the time. | 79 | * the majority of the time. |
81 | */ | 80 | */ |
82 | extern unsigned short ip_fast_csum(__const__ unsigned char *iph, | 81 | extern __sum16 ip_fast_csum(const void *iph, unsigned int ihl); |
83 | unsigned int ihl); | ||
84 | 82 | ||
85 | /* Fold a partial checksum without adding pseudo headers. */ | 83 | /* Fold a partial checksum without adding pseudo headers. */ |
86 | static inline unsigned short csum_fold(unsigned int sum) | 84 | static inline __sum16 csum_fold(__wsum sum) |
87 | { | 85 | { |
88 | unsigned int tmp; | 86 | unsigned int tmp; |
89 | 87 | ||
@@ -93,16 +91,15 @@ static inline unsigned short csum_fold(unsigned int sum) | |||
93 | " addc %1, %%g0, %1\n" | 91 | " addc %1, %%g0, %1\n" |
94 | " xnor %%g0, %1, %0\n" | 92 | " xnor %%g0, %1, %0\n" |
95 | : "=&r" (sum), "=r" (tmp) | 93 | : "=&r" (sum), "=r" (tmp) |
96 | : "0" (sum), "1" (sum<<16) | 94 | : "0" (sum), "1" ((__force u32)sum<<16) |
97 | : "cc"); | 95 | : "cc"); |
98 | return (sum & 0xffff); | 96 | return (__force __sum16)sum; |
99 | } | 97 | } |
100 | 98 | ||
101 | static inline unsigned long csum_tcpudp_nofold(unsigned long saddr, | 99 | static inline __wsum csum_tcpudp_nofold(__be32 saddr, __be32 daddr, |
102 | unsigned long daddr, | ||
103 | unsigned int len, | 100 | unsigned int len, |
104 | unsigned short proto, | 101 | unsigned short proto, |
105 | unsigned int sum) | 102 | __wsum sum) |
106 | { | 103 | { |
107 | __asm__ __volatile__( | 104 | __asm__ __volatile__( |
108 | " addcc %1, %0, %0\n" | 105 | " addcc %1, %0, %0\n" |
@@ -110,7 +107,7 @@ static inline unsigned long csum_tcpudp_nofold(unsigned long saddr, | |||
110 | " addccc %3, %0, %0\n" | 107 | " addccc %3, %0, %0\n" |
111 | " addc %0, %%g0, %0\n" | 108 | " addc %0, %%g0, %0\n" |
112 | : "=r" (sum), "=r" (saddr) | 109 | : "=r" (sum), "=r" (saddr) |
113 | : "r" (daddr), "r" ((proto<<16)+len), "0" (sum), "1" (saddr) | 110 | : "r" (daddr), "r" (proto + len), "0" (sum), "1" (saddr) |
114 | : "cc"); | 111 | : "cc"); |
115 | return sum; | 112 | return sum; |
116 | } | 113 | } |
@@ -119,22 +116,20 @@ static inline unsigned long csum_tcpudp_nofold(unsigned long saddr, | |||
119 | * computes the checksum of the TCP/UDP pseudo-header | 116 | * computes the checksum of the TCP/UDP pseudo-header |
120 | * returns a 16-bit checksum, already complemented | 117 | * returns a 16-bit checksum, already complemented |
121 | */ | 118 | */ |
122 | static inline unsigned short int csum_tcpudp_magic(unsigned long saddr, | 119 | static inline __sum16 csum_tcpudp_magic(__be32 saddr, __be32 daddr, |
123 | unsigned long daddr, | ||
124 | unsigned short len, | 120 | unsigned short len, |
125 | unsigned short proto, | 121 | unsigned short proto, |
126 | unsigned int sum) | 122 | __wsum sum) |
127 | { | 123 | { |
128 | return csum_fold(csum_tcpudp_nofold(saddr,daddr,len,proto,sum)); | 124 | return csum_fold(csum_tcpudp_nofold(saddr,daddr,len,proto,sum)); |
129 | } | 125 | } |
130 | 126 | ||
131 | #define _HAVE_ARCH_IPV6_CSUM | 127 | #define _HAVE_ARCH_IPV6_CSUM |
132 | 128 | ||
133 | static inline unsigned short int csum_ipv6_magic(struct in6_addr *saddr, | 129 | static inline __sum16 csum_ipv6_magic(const struct in6_addr *saddr, |
134 | struct in6_addr *daddr, | 130 | const struct in6_addr *daddr, |
135 | __u32 len, | 131 | __u32 len, unsigned short proto, |
136 | unsigned short proto, | 132 | __wsum sum) |
137 | unsigned int sum) | ||
138 | { | 133 | { |
139 | __asm__ __volatile__ ( | 134 | __asm__ __volatile__ ( |
140 | " addcc %3, %4, %%g7\n" | 135 | " addcc %3, %4, %%g7\n" |
@@ -165,7 +160,7 @@ static inline unsigned short int csum_ipv6_magic(struct in6_addr *saddr, | |||
165 | } | 160 | } |
166 | 161 | ||
167 | /* this routine is used for miscellaneous IP-like checksums, mainly in icmp.c */ | 162 | /* this routine is used for miscellaneous IP-like checksums, mainly in icmp.c */ |
168 | static inline unsigned short ip_compute_csum(unsigned char * buff, int len) | 163 | static inline __sum16 ip_compute_csum(const void *buff, int len) |
169 | { | 164 | { |
170 | return csum_fold(csum_partial(buff, len, 0)); | 165 | return csum_fold(csum_partial(buff, len, 0)); |
171 | } | 166 | } |