diff options
| author | Jan Glauber <jan.glauber@de.ibm.com> | 2006-01-14 16:20:55 -0500 |
|---|---|---|
| committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-01-14 21:27:08 -0500 |
| commit | fda5e142598341d30006e3715e53b2c983a9fca7 (patch) | |
| tree | 5db77e1175acc1529d62a54b0dc7d8d4b1f7f34f | |
| parent | b8dc6038ff894d0eb0b5d61c9fafdf323ec10251 (diff) | |
[PATCH] s390: aes crypto code fixes
Call KM[C] only with a multiple of block size. Check return value of KM[C]
instructions and complain about erros
Signed-off-by: Jan Glauber <jan.glauber@de.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
| -rw-r--r-- | arch/s390/crypto/aes_s390.c | 60 |
1 files changed, 44 insertions, 16 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index 7a1033d8e00f..c5ca2dc5d428 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
| @@ -114,80 +114,108 @@ static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out, | |||
| 114 | const u8 *in, unsigned int nbytes) | 114 | const u8 *in, unsigned int nbytes) |
| 115 | { | 115 | { |
| 116 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); | 116 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); |
| 117 | int ret; | ||
| 118 | |||
| 119 | /* only use complete blocks */ | ||
| 120 | nbytes &= ~(AES_BLOCK_SIZE - 1); | ||
| 117 | 121 | ||
| 118 | switch (sctx->key_len) { | 122 | switch (sctx->key_len) { |
| 119 | case 16: | 123 | case 16: |
| 120 | crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes); | 124 | ret = crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes); |
| 125 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 121 | break; | 126 | break; |
| 122 | case 24: | 127 | case 24: |
| 123 | crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes); | 128 | ret = crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes); |
| 129 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 124 | break; | 130 | break; |
| 125 | case 32: | 131 | case 32: |
| 126 | crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes); | 132 | ret = crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes); |
| 133 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 127 | break; | 134 | break; |
| 128 | } | 135 | } |
| 129 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 136 | return nbytes; |
| 130 | } | 137 | } |
| 131 | 138 | ||
| 132 | static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, | 139 | static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, |
| 133 | const u8 *in, unsigned int nbytes) | 140 | const u8 *in, unsigned int nbytes) |
| 134 | { | 141 | { |
| 135 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); | 142 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); |
| 143 | int ret; | ||
| 144 | |||
| 145 | /* only use complete blocks */ | ||
| 146 | nbytes &= ~(AES_BLOCK_SIZE - 1); | ||
| 136 | 147 | ||
| 137 | switch (sctx->key_len) { | 148 | switch (sctx->key_len) { |
| 138 | case 16: | 149 | case 16: |
| 139 | crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes); | 150 | ret = crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes); |
| 151 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 140 | break; | 152 | break; |
| 141 | case 24: | 153 | case 24: |
| 142 | crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes); | 154 | ret = crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes); |
| 155 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 143 | break; | 156 | break; |
| 144 | case 32: | 157 | case 32: |
| 145 | crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes); | 158 | ret = crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes); |
| 159 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 146 | break; | 160 | break; |
| 147 | } | 161 | } |
| 148 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 162 | return nbytes; |
| 149 | } | 163 | } |
| 150 | 164 | ||
| 151 | static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, | 165 | static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, |
| 152 | const u8 *in, unsigned int nbytes) | 166 | const u8 *in, unsigned int nbytes) |
| 153 | { | 167 | { |
| 154 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); | 168 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); |
| 169 | int ret; | ||
| 170 | |||
| 171 | /* only use complete blocks */ | ||
| 172 | nbytes &= ~(AES_BLOCK_SIZE - 1); | ||
| 155 | 173 | ||
| 156 | memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE); | 174 | memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE); |
| 157 | switch (sctx->key_len) { | 175 | switch (sctx->key_len) { |
| 158 | case 16: | 176 | case 16: |
| 159 | crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes); | 177 | ret = crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes); |
| 178 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 160 | break; | 179 | break; |
| 161 | case 24: | 180 | case 24: |
| 162 | crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes); | 181 | ret = crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes); |
| 182 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 163 | break; | 183 | break; |
| 164 | case 32: | 184 | case 32: |
| 165 | crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes); | 185 | ret = crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes); |
| 186 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 166 | break; | 187 | break; |
| 167 | } | 188 | } |
| 168 | memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE); | 189 | memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE); |
| 169 | 190 | ||
| 170 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 191 | return nbytes; |
| 171 | } | 192 | } |
| 172 | 193 | ||
| 173 | static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out, | 194 | static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out, |
| 174 | const u8 *in, unsigned int nbytes) | 195 | const u8 *in, unsigned int nbytes) |
| 175 | { | 196 | { |
| 176 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); | 197 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm); |
| 198 | int ret; | ||
| 199 | |||
| 200 | /* only use complete blocks */ | ||
| 201 | nbytes &= ~(AES_BLOCK_SIZE - 1); | ||
| 177 | 202 | ||
| 178 | memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE); | 203 | memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE); |
| 179 | switch (sctx->key_len) { | 204 | switch (sctx->key_len) { |
| 180 | case 16: | 205 | case 16: |
| 181 | crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes); | 206 | ret = crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes); |
| 207 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 182 | break; | 208 | break; |
| 183 | case 24: | 209 | case 24: |
| 184 | crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes); | 210 | ret = crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes); |
| 211 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 185 | break; | 212 | break; |
| 186 | case 32: | 213 | case 32: |
| 187 | crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes); | 214 | ret = crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes); |
| 215 | BUG_ON((ret < 0) || (ret != nbytes)); | ||
| 188 | break; | 216 | break; |
| 189 | } | 217 | } |
| 190 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 218 | return nbytes; |
| 191 | } | 219 | } |
| 192 | 220 | ||
| 193 | 221 | ||
