aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/linux/sunrpc/gss_krb5.h9
-rw-r--r--net/sunrpc/auth_gss/gss_krb5_crypto.c255
-rw-r--r--net/sunrpc/auth_gss/gss_krb5_mech.c96
-rw-r--r--net/sunrpc/auth_gss/gss_krb5_seal.c1
-rw-r--r--net/sunrpc/auth_gss/gss_krb5_seqnum.c77
-rw-r--r--net/sunrpc/auth_gss/gss_krb5_unseal.c1
-rw-r--r--net/sunrpc/auth_gss/gss_krb5_wrap.c66
7 files changed, 492 insertions, 13 deletions
diff --git a/include/linux/sunrpc/gss_krb5.h b/include/linux/sunrpc/gss_krb5.h
index 79f6ac2492f5..5e774a5abf2c 100644
--- a/include/linux/sunrpc/gss_krb5.h
+++ b/include/linux/sunrpc/gss_krb5.h
@@ -317,5 +317,14 @@ gss_krb5_aes_decrypt(struct krb5_ctx *kctx, u32 offset,
317 struct xdr_buf *buf, u32 *plainoffset, 317 struct xdr_buf *buf, u32 *plainoffset,
318 u32 *plainlen); 318 u32 *plainlen);
319 319
320int
321krb5_rc4_setup_seq_key(struct krb5_ctx *kctx,
322 struct crypto_blkcipher *cipher,
323 unsigned char *cksum);
324
325int
326krb5_rc4_setup_enc_key(struct krb5_ctx *kctx,
327 struct crypto_blkcipher *cipher,
328 s32 seqnum);
320void 329void
321gss_krb5_make_confounder(char *p, u32 conflen); 330gss_krb5_make_confounder(char *p, u32 conflen);
diff --git a/net/sunrpc/auth_gss/gss_krb5_crypto.c b/net/sunrpc/auth_gss/gss_krb5_crypto.c
index ed4106a3daf2..75ee993ea057 100644
--- a/net/sunrpc/auth_gss/gss_krb5_crypto.c
+++ b/net/sunrpc/auth_gss/gss_krb5_crypto.c
@@ -124,6 +124,114 @@ checksummer(struct scatterlist *sg, void *data)
124 return crypto_hash_update(desc, sg, sg->length); 124 return crypto_hash_update(desc, sg, sg->length);
125} 125}
126 126
127static int
128arcfour_hmac_md5_usage_to_salt(unsigned int usage, u8 salt[4])
129{
130 unsigned int ms_usage;
131
132 switch (usage) {
133 case KG_USAGE_SIGN:
134 ms_usage = 15;
135 break;
136 case KG_USAGE_SEAL:
137 ms_usage = 13;
138 break;
139 default:
140 return EINVAL;;
141 }
142 salt[0] = (ms_usage >> 0) & 0xff;
143 salt[1] = (ms_usage >> 8) & 0xff;
144 salt[2] = (ms_usage >> 16) & 0xff;
145 salt[3] = (ms_usage >> 24) & 0xff;
146
147 return 0;
148}
149
150static u32
151make_checksum_hmac_md5(struct krb5_ctx *kctx, char *header, int hdrlen,
152 struct xdr_buf *body, int body_offset, u8 *cksumkey,
153 unsigned int usage, struct xdr_netobj *cksumout)
154{
155 struct hash_desc desc;
156 struct scatterlist sg[1];
157 int err;
158 u8 checksumdata[GSS_KRB5_MAX_CKSUM_LEN];
159 u8 rc4salt[4];
160 struct crypto_hash *md5;
161 struct crypto_hash *hmac_md5;
162
163 if (cksumkey == NULL)
164 return GSS_S_FAILURE;
165
166 if (cksumout->len < kctx->gk5e->cksumlength) {
167 dprintk("%s: checksum buffer length, %u, too small for %s\n",
168 __func__, cksumout->len, kctx->gk5e->name);
169 return GSS_S_FAILURE;
170 }
171
172 if (arcfour_hmac_md5_usage_to_salt(usage, rc4salt)) {
173 dprintk("%s: invalid usage value %u\n", __func__, usage);
174 return GSS_S_FAILURE;
175 }
176
177 md5 = crypto_alloc_hash("md5", 0, CRYPTO_ALG_ASYNC);
178 if (IS_ERR(md5))
179 return GSS_S_FAILURE;
180
181 hmac_md5 = crypto_alloc_hash(kctx->gk5e->cksum_name, 0,
182 CRYPTO_ALG_ASYNC);
183 if (IS_ERR(hmac_md5)) {
184 crypto_free_hash(md5);
185 return GSS_S_FAILURE;
186 }
187
188 desc.tfm = md5;
189 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
190
191 err = crypto_hash_init(&desc);
192 if (err)
193 goto out;
194 sg_init_one(sg, rc4salt, 4);
195 err = crypto_hash_update(&desc, sg, 4);
196 if (err)
197 goto out;
198
199 sg_init_one(sg, header, hdrlen);
200 err = crypto_hash_update(&desc, sg, hdrlen);
201 if (err)
202 goto out;
203 err = xdr_process_buf(body, body_offset, body->len - body_offset,
204 checksummer, &desc);
205 if (err)
206 goto out;
207 err = crypto_hash_final(&desc, checksumdata);
208 if (err)
209 goto out;
210
211 desc.tfm = hmac_md5;
212 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
213
214 err = crypto_hash_init(&desc);
215 if (err)
216 goto out;
217 err = crypto_hash_setkey(hmac_md5, cksumkey, kctx->gk5e->keylength);
218 if (err)
219 goto out;
220
221 sg_init_one(sg, checksumdata, crypto_hash_digestsize(md5));
222 err = crypto_hash_digest(&desc, sg, crypto_hash_digestsize(md5),
223 checksumdata);
224 if (err)
225 goto out;
226
227 memcpy(cksumout->data, checksumdata, kctx->gk5e->cksumlength);
228 cksumout->len = kctx->gk5e->cksumlength;
229out:
230 crypto_free_hash(md5);
231 crypto_free_hash(hmac_md5);
232 return err ? GSS_S_FAILURE : 0;
233}
234
127/* 235/*
128 * checksum the plaintext data and hdrlen bytes of the token header 236 * checksum the plaintext data and hdrlen bytes of the token header
129 * The checksum is performed over the first 8 bytes of the 237 * The checksum is performed over the first 8 bytes of the
@@ -140,6 +248,11 @@ make_checksum(struct krb5_ctx *kctx, char *header, int hdrlen,
140 u8 checksumdata[GSS_KRB5_MAX_CKSUM_LEN]; 248 u8 checksumdata[GSS_KRB5_MAX_CKSUM_LEN];
141 unsigned int checksumlen; 249 unsigned int checksumlen;
142 250
251 if (kctx->gk5e->ctype == CKSUMTYPE_HMAC_MD5_ARCFOUR)
252 return make_checksum_hmac_md5(kctx, header, hdrlen,
253 body, body_offset,
254 cksumkey, usage, cksumout);
255
143 if (cksumout->len < kctx->gk5e->cksumlength) { 256 if (cksumout->len < kctx->gk5e->cksumlength) {
144 dprintk("%s: checksum buffer length, %u, too small for %s\n", 257 dprintk("%s: checksum buffer length, %u, too small for %s\n",
145 __func__, cksumout->len, kctx->gk5e->name); 258 __func__, cksumout->len, kctx->gk5e->name);
@@ -733,3 +846,145 @@ out_err:
733 ret = GSS_S_FAILURE; 846 ret = GSS_S_FAILURE;
734 return ret; 847 return ret;
735} 848}
849
850/*
851 * Compute Kseq given the initial session key and the checksum.
852 * Set the key of the given cipher.
853 */
854int
855krb5_rc4_setup_seq_key(struct krb5_ctx *kctx, struct crypto_blkcipher *cipher,
856 unsigned char *cksum)
857{
858 struct crypto_hash *hmac;
859 struct hash_desc desc;
860 struct scatterlist sg[1];
861 u8 Kseq[GSS_KRB5_MAX_KEYLEN];
862 u32 zeroconstant = 0;
863 int err;
864
865 dprintk("%s: entered\n", __func__);
866
867 hmac = crypto_alloc_hash(kctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC);
868 if (IS_ERR(hmac)) {
869 dprintk("%s: error %ld, allocating hash '%s'\n",
870 __func__, PTR_ERR(hmac), kctx->gk5e->cksum_name);
871 return PTR_ERR(hmac);
872 }
873
874 desc.tfm = hmac;
875 desc.flags = 0;
876
877 err = crypto_hash_init(&desc);
878 if (err)
879 goto out_err;
880
881 /* Compute intermediate Kseq from session key */
882 err = crypto_hash_setkey(hmac, kctx->Ksess, kctx->gk5e->keylength);
883 if (err)
884 goto out_err;
885
886 sg_init_table(sg, 1);
887 sg_set_buf(sg, &zeroconstant, 4);
888
889 err = crypto_hash_digest(&desc, sg, 4, Kseq);
890 if (err)
891 goto out_err;
892
893 /* Compute final Kseq from the checksum and intermediate Kseq */
894 err = crypto_hash_setkey(hmac, Kseq, kctx->gk5e->keylength);
895 if (err)
896 goto out_err;
897
898 sg_set_buf(sg, cksum, 8);
899
900 err = crypto_hash_digest(&desc, sg, 8, Kseq);
901 if (err)
902 goto out_err;
903
904 err = crypto_blkcipher_setkey(cipher, Kseq, kctx->gk5e->keylength);
905 if (err)
906 goto out_err;
907
908 err = 0;
909
910out_err:
911 crypto_free_hash(hmac);
912 dprintk("%s: returning %d\n", __func__, err);
913 return err;
914}
915
916/*
917 * Compute Kcrypt given the initial session key and the plaintext seqnum.
918 * Set the key of cipher kctx->enc.
919 */
920int
921krb5_rc4_setup_enc_key(struct krb5_ctx *kctx, struct crypto_blkcipher *cipher,
922 s32 seqnum)
923{
924 struct crypto_hash *hmac;
925 struct hash_desc desc;
926 struct scatterlist sg[1];
927 u8 Kcrypt[GSS_KRB5_MAX_KEYLEN];
928 u8 zeroconstant[4] = {0};
929 u8 seqnumarray[4];
930 int err, i;
931
932 dprintk("%s: entered, seqnum %u\n", __func__, seqnum);
933
934 hmac = crypto_alloc_hash(kctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC);
935 if (IS_ERR(hmac)) {
936 dprintk("%s: error %ld, allocating hash '%s'\n",
937 __func__, PTR_ERR(hmac), kctx->gk5e->cksum_name);
938 return PTR_ERR(hmac);
939 }
940
941 desc.tfm = hmac;
942 desc.flags = 0;
943
944 err = crypto_hash_init(&desc);
945 if (err)
946 goto out_err;
947
948 /* Compute intermediate Kcrypt from session key */
949 for (i = 0; i < kctx->gk5e->keylength; i++)
950 Kcrypt[i] = kctx->Ksess[i] ^ 0xf0;
951
952 err = crypto_hash_setkey(hmac, Kcrypt, kctx->gk5e->keylength);
953 if (err)
954 goto out_err;
955
956 sg_init_table(sg, 1);
957 sg_set_buf(sg, zeroconstant, 4);
958
959 err = crypto_hash_digest(&desc, sg, 4, Kcrypt);
960 if (err)
961 goto out_err;
962
963 /* Compute final Kcrypt from the seqnum and intermediate Kcrypt */
964 err = crypto_hash_setkey(hmac, Kcrypt, kctx->gk5e->keylength);
965 if (err)
966 goto out_err;
967
968 seqnumarray[0] = (unsigned char) ((seqnum >> 24) & 0xff);
969 seqnumarray[1] = (unsigned char) ((seqnum >> 16) & 0xff);
970 seqnumarray[2] = (unsigned char) ((seqnum >> 8) & 0xff);
971 seqnumarray[3] = (unsigned char) ((seqnum >> 0) & 0xff);
972
973 sg_set_buf(sg, seqnumarray, 4);
974
975 err = crypto_hash_digest(&desc, sg, 4, Kcrypt);
976 if (err)
977 goto out_err;
978
979 err = crypto_blkcipher_setkey(cipher, Kcrypt, kctx->gk5e->keylength);
980 if (err)
981 goto out_err;
982
983 err = 0;
984
985out_err:
986 crypto_free_hash(hmac);
987 dprintk("%s: returning %d\n", __func__, err);
988 return err;
989}
990
diff --git a/net/sunrpc/auth_gss/gss_krb5_mech.c b/net/sunrpc/auth_gss/gss_krb5_mech.c
index ef6b31349046..54eda5f0c58b 100644
--- a/net/sunrpc/auth_gss/gss_krb5_mech.c
+++ b/net/sunrpc/auth_gss/gss_krb5_mech.c
@@ -73,6 +73,27 @@ static const struct gss_krb5_enctype supported_gss_krb5_enctypes[] = {
73 .keyed_cksum = 0, 73 .keyed_cksum = 0,
74 }, 74 },
75 /* 75 /*
76 * RC4-HMAC
77 */
78 {
79 .etype = ENCTYPE_ARCFOUR_HMAC,
80 .ctype = CKSUMTYPE_HMAC_MD5_ARCFOUR,
81 .name = "rc4-hmac",
82 .encrypt_name = "ecb(arc4)",
83 .cksum_name = "hmac(md5)",
84 .encrypt = krb5_encrypt,
85 .decrypt = krb5_decrypt,
86 .mk_key = NULL,
87 .signalg = SGN_ALG_HMAC_MD5,
88 .sealalg = SEAL_ALG_MICROSOFT_RC4,
89 .keybytes = 16,
90 .keylength = 16,
91 .blocksize = 1,
92 .conflen = 8,
93 .cksumlength = 8,
94 .keyed_cksum = 1,
95 },
96 /*
76 * 3DES 97 * 3DES
77 */ 98 */
78 { 99 {
@@ -392,6 +413,79 @@ out_err:
392 return -EINVAL; 413 return -EINVAL;
393} 414}
394 415
416/*
417 * Note that RC4 depends on deriving keys using the sequence
418 * number or the checksum of a token. Therefore, the final keys
419 * cannot be calculated until the token is being constructed!
420 */
421static int
422context_derive_keys_rc4(struct krb5_ctx *ctx)
423{
424 struct crypto_hash *hmac;
425 char sigkeyconstant[] = "signaturekey";
426 int slen = strlen(sigkeyconstant) + 1; /* include null terminator */
427 struct hash_desc desc;
428 struct scatterlist sg[1];
429 int err;
430
431 dprintk("RPC: %s: entered\n", __func__);
432 /*
433 * derive cksum (aka Ksign) key
434 */
435 hmac = crypto_alloc_hash(ctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC);
436 if (IS_ERR(hmac)) {
437 dprintk("%s: error %ld allocating hash '%s'\n",
438 __func__, PTR_ERR(hmac), ctx->gk5e->cksum_name);
439 err = PTR_ERR(hmac);
440 goto out_err;
441 }
442
443 err = crypto_hash_setkey(hmac, ctx->Ksess, ctx->gk5e->keylength);
444 if (err)
445 goto out_err_free_hmac;
446
447 sg_init_table(sg, 1);
448 sg_set_buf(sg, sigkeyconstant, slen);
449
450 desc.tfm = hmac;
451 desc.flags = 0;
452
453 err = crypto_hash_init(&desc);
454 if (err)
455 goto out_err_free_hmac;
456
457 err = crypto_hash_digest(&desc, sg, slen, ctx->cksum);
458 if (err)
459 goto out_err_free_hmac;
460 /*
461 * allocate hash, and blkciphers for data and seqnum encryption
462 */
463 ctx->enc = crypto_alloc_blkcipher(ctx->gk5e->encrypt_name, 0,
464 CRYPTO_ALG_ASYNC);
465 if (IS_ERR(ctx->enc)) {
466 err = PTR_ERR(ctx->enc);
467 goto out_err_free_hmac;
468 }
469
470 ctx->seq = crypto_alloc_blkcipher(ctx->gk5e->encrypt_name, 0,
471 CRYPTO_ALG_ASYNC);
472 if (IS_ERR(ctx->seq)) {
473 crypto_free_blkcipher(ctx->enc);
474 err = PTR_ERR(ctx->seq);
475 goto out_err_free_hmac;
476 }
477
478 dprintk("RPC: %s: returning success\n", __func__);
479
480 err = 0;
481
482out_err_free_hmac:
483 crypto_free_hash(hmac);
484out_err:
485 dprintk("RPC: %s: returning %d\n", __func__, err);
486 return err;
487}
488
395static int 489static int
396context_derive_keys_new(struct krb5_ctx *ctx) 490context_derive_keys_new(struct krb5_ctx *ctx)
397{ 491{
@@ -561,6 +655,8 @@ gss_import_v2_context(const void *p, const void *end, struct krb5_ctx *ctx)
561 switch (ctx->enctype) { 655 switch (ctx->enctype) {
562 case ENCTYPE_DES3_CBC_RAW: 656 case ENCTYPE_DES3_CBC_RAW:
563 return context_derive_keys_des3(ctx); 657 return context_derive_keys_des3(ctx);
658 case ENCTYPE_ARCFOUR_HMAC:
659 return context_derive_keys_rc4(ctx);
564 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 660 case ENCTYPE_AES128_CTS_HMAC_SHA1_96:
565 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: 661 case ENCTYPE_AES256_CTS_HMAC_SHA1_96:
566 return context_derive_keys_new(ctx); 662 return context_derive_keys_new(ctx);
diff --git a/net/sunrpc/auth_gss/gss_krb5_seal.c b/net/sunrpc/auth_gss/gss_krb5_seal.c
index 36fe487d93d2..d7941eab7796 100644
--- a/net/sunrpc/auth_gss/gss_krb5_seal.c
+++ b/net/sunrpc/auth_gss/gss_krb5_seal.c
@@ -213,6 +213,7 @@ gss_get_mic_kerberos(struct gss_ctx *gss_ctx, struct xdr_buf *text,
213 BUG(); 213 BUG();
214 case ENCTYPE_DES_CBC_RAW: 214 case ENCTYPE_DES_CBC_RAW:
215 case ENCTYPE_DES3_CBC_RAW: 215 case ENCTYPE_DES3_CBC_RAW:
216 case ENCTYPE_ARCFOUR_HMAC:
216 return gss_get_mic_v1(ctx, text, token); 217 return gss_get_mic_v1(ctx, text, token);
217 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 218 case ENCTYPE_AES128_CTS_HMAC_SHA1_96:
218 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: 219 case ENCTYPE_AES256_CTS_HMAC_SHA1_96:
diff --git a/net/sunrpc/auth_gss/gss_krb5_seqnum.c b/net/sunrpc/auth_gss/gss_krb5_seqnum.c
index 83b593084976..415c013ba382 100644
--- a/net/sunrpc/auth_gss/gss_krb5_seqnum.c
+++ b/net/sunrpc/auth_gss/gss_krb5_seqnum.c
@@ -39,6 +39,38 @@
39# define RPCDBG_FACILITY RPCDBG_AUTH 39# define RPCDBG_FACILITY RPCDBG_AUTH
40#endif 40#endif
41 41
42static s32
43krb5_make_rc4_seq_num(struct krb5_ctx *kctx, int direction, s32 seqnum,
44 unsigned char *cksum, unsigned char *buf)
45{
46 struct crypto_blkcipher *cipher;
47 unsigned char plain[8];
48 s32 code;
49
50 dprintk("RPC: %s:\n", __func__);
51 cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0,
52 CRYPTO_ALG_ASYNC);
53 if (IS_ERR(cipher))
54 return PTR_ERR(cipher);
55
56 plain[0] = (unsigned char) ((seqnum >> 24) & 0xff);
57 plain[1] = (unsigned char) ((seqnum >> 16) & 0xff);
58 plain[2] = (unsigned char) ((seqnum >> 8) & 0xff);
59 plain[3] = (unsigned char) ((seqnum >> 0) & 0xff);
60 plain[4] = direction;
61 plain[5] = direction;
62 plain[6] = direction;
63 plain[7] = direction;
64
65 code = krb5_rc4_setup_seq_key(kctx, cipher, cksum);
66 if (code)
67 goto out;
68
69 code = krb5_encrypt(cipher, cksum, plain, buf, 8);
70out:
71 crypto_free_blkcipher(cipher);
72 return code;
73}
42s32 74s32
43krb5_make_seq_num(struct krb5_ctx *kctx, 75krb5_make_seq_num(struct krb5_ctx *kctx,
44 struct crypto_blkcipher *key, 76 struct crypto_blkcipher *key,
@@ -48,6 +80,10 @@ krb5_make_seq_num(struct krb5_ctx *kctx,
48{ 80{
49 unsigned char plain[8]; 81 unsigned char plain[8];
50 82
83 if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC)
84 return krb5_make_rc4_seq_num(kctx, direction, seqnum,
85 cksum, buf);
86
51 plain[0] = (unsigned char) (seqnum & 0xff); 87 plain[0] = (unsigned char) (seqnum & 0xff);
52 plain[1] = (unsigned char) ((seqnum >> 8) & 0xff); 88 plain[1] = (unsigned char) ((seqnum >> 8) & 0xff);
53 plain[2] = (unsigned char) ((seqnum >> 16) & 0xff); 89 plain[2] = (unsigned char) ((seqnum >> 16) & 0xff);
@@ -61,6 +97,43 @@ krb5_make_seq_num(struct krb5_ctx *kctx,
61 return krb5_encrypt(key, cksum, plain, buf, 8); 97 return krb5_encrypt(key, cksum, plain, buf, 8);
62} 98}
63 99
100static s32
101krb5_get_rc4_seq_num(struct krb5_ctx *kctx, unsigned char *cksum,
102 unsigned char *buf, int *direction, s32 *seqnum)
103{
104 struct crypto_blkcipher *cipher;
105 unsigned char plain[8];
106 s32 code;
107
108 dprintk("RPC: %s:\n", __func__);
109 cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0,
110 CRYPTO_ALG_ASYNC);
111 if (IS_ERR(cipher))
112 return PTR_ERR(cipher);
113
114 code = krb5_rc4_setup_seq_key(kctx, cipher, cksum);
115 if (code)
116 goto out;
117
118 code = krb5_decrypt(cipher, cksum, buf, plain, 8);
119 if (code)
120 goto out;
121
122 if ((plain[4] != plain[5]) || (plain[4] != plain[6])
123 || (plain[4] != plain[7])) {
124 code = (s32)KG_BAD_SEQ;
125 goto out;
126 }
127
128 *direction = plain[4];
129
130 *seqnum = ((plain[0] << 24) | (plain[1] << 16) |
131 (plain[2] << 8) | (plain[3]));
132out:
133 crypto_free_blkcipher(cipher);
134 return code;
135}
136
64s32 137s32
65krb5_get_seq_num(struct krb5_ctx *kctx, 138krb5_get_seq_num(struct krb5_ctx *kctx,
66 unsigned char *cksum, 139 unsigned char *cksum,
@@ -73,6 +146,10 @@ krb5_get_seq_num(struct krb5_ctx *kctx,
73 146
74 dprintk("RPC: krb5_get_seq_num:\n"); 147 dprintk("RPC: krb5_get_seq_num:\n");
75 148
149 if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC)
150 return krb5_get_rc4_seq_num(kctx, cksum, buf,
151 direction, seqnum);
152
76 if ((code = krb5_decrypt(key, cksum, buf, plain, 8))) 153 if ((code = krb5_decrypt(key, cksum, buf, plain, 8)))
77 return code; 154 return code;
78 155
diff --git a/net/sunrpc/auth_gss/gss_krb5_unseal.c b/net/sunrpc/auth_gss/gss_krb5_unseal.c
index 97eb91b8c70c..6cd930f3678f 100644
--- a/net/sunrpc/auth_gss/gss_krb5_unseal.c
+++ b/net/sunrpc/auth_gss/gss_krb5_unseal.c
@@ -216,6 +216,7 @@ gss_verify_mic_kerberos(struct gss_ctx *gss_ctx,
216 BUG(); 216 BUG();
217 case ENCTYPE_DES_CBC_RAW: 217 case ENCTYPE_DES_CBC_RAW:
218 case ENCTYPE_DES3_CBC_RAW: 218 case ENCTYPE_DES3_CBC_RAW:
219 case ENCTYPE_ARCFOUR_HMAC:
219 return gss_verify_mic_v1(ctx, message_buffer, read_token); 220 return gss_verify_mic_v1(ctx, message_buffer, read_token);
220 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 221 case ENCTYPE_AES128_CTS_HMAC_SHA1_96:
221 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: 222 case ENCTYPE_AES256_CTS_HMAC_SHA1_96:
diff --git a/net/sunrpc/auth_gss/gss_krb5_wrap.c b/net/sunrpc/auth_gss/gss_krb5_wrap.c
index 383db891c835..2763e3e48db4 100644
--- a/net/sunrpc/auth_gss/gss_krb5_wrap.c
+++ b/net/sunrpc/auth_gss/gss_krb5_wrap.c
@@ -232,9 +232,26 @@ gss_wrap_kerberos_v1(struct krb5_ctx *kctx, int offset,
232 seq_send, ptr + GSS_KRB5_TOK_HDR_LEN, ptr + 8))) 232 seq_send, ptr + GSS_KRB5_TOK_HDR_LEN, ptr + 8)))
233 return GSS_S_FAILURE; 233 return GSS_S_FAILURE;
234 234
235 if (gss_encrypt_xdr_buf(kctx->enc, buf, offset + headlen - conflen, 235 if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) {
236 pages)) 236 struct crypto_blkcipher *cipher;
237 return GSS_S_FAILURE; 237 int err;
238 cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0,
239 CRYPTO_ALG_ASYNC);
240 if (IS_ERR(cipher))
241 return GSS_S_FAILURE;
242
243 krb5_rc4_setup_enc_key(kctx, cipher, seq_send);
244
245 err = gss_encrypt_xdr_buf(cipher, buf,
246 offset + headlen - conflen, pages);
247 crypto_free_blkcipher(cipher);
248 if (err)
249 return GSS_S_FAILURE;
250 } else {
251 if (gss_encrypt_xdr_buf(kctx->enc, buf,
252 offset + headlen - conflen, pages))
253 return GSS_S_FAILURE;
254 }
238 255
239 return (kctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE; 256 return (kctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE;
240} 257}
@@ -291,8 +308,37 @@ gss_unwrap_kerberos_v1(struct krb5_ctx *kctx, int offset, struct xdr_buf *buf)
291 */ 308 */
292 crypt_offset = ptr + (GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength) - 309 crypt_offset = ptr + (GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength) -
293 (unsigned char *)buf->head[0].iov_base; 310 (unsigned char *)buf->head[0].iov_base;
294 if (gss_decrypt_xdr_buf(kctx->enc, buf, crypt_offset)) 311
295 return GSS_S_DEFECTIVE_TOKEN; 312 /*
313 * Need plaintext seqnum to derive encryption key for arcfour-hmac
314 */
315 if (krb5_get_seq_num(kctx, ptr + GSS_KRB5_TOK_HDR_LEN,
316 ptr + 8, &direction, &seqnum))
317 return GSS_S_BAD_SIG;
318
319 if ((kctx->initiate && direction != 0xff) ||
320 (!kctx->initiate && direction != 0))
321 return GSS_S_BAD_SIG;
322
323 if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) {
324 struct crypto_blkcipher *cipher;
325 int err;
326
327 cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0,
328 CRYPTO_ALG_ASYNC);
329 if (IS_ERR(cipher))
330 return GSS_S_FAILURE;
331
332 krb5_rc4_setup_enc_key(kctx, cipher, seqnum);
333
334 err = gss_decrypt_xdr_buf(cipher, buf, crypt_offset);
335 crypto_free_blkcipher(cipher);
336 if (err)
337 return GSS_S_DEFECTIVE_TOKEN;
338 } else {
339 if (gss_decrypt_xdr_buf(kctx->enc, buf, crypt_offset))
340 return GSS_S_DEFECTIVE_TOKEN;
341 }
296 342
297 if (kctx->gk5e->keyed_cksum) 343 if (kctx->gk5e->keyed_cksum)
298 cksumkey = kctx->cksum; 344 cksumkey = kctx->cksum;
@@ -316,14 +362,6 @@ gss_unwrap_kerberos_v1(struct krb5_ctx *kctx, int offset, struct xdr_buf *buf)
316 362
317 /* do sequencing checks */ 363 /* do sequencing checks */
318 364
319 if (krb5_get_seq_num(kctx, ptr + GSS_KRB5_TOK_HDR_LEN,
320 ptr + 8, &direction, &seqnum))
321 return GSS_S_BAD_SIG;
322
323 if ((kctx->initiate && direction != 0xff) ||
324 (!kctx->initiate && direction != 0))
325 return GSS_S_BAD_SIG;
326
327 /* Copy the data back to the right position. XXX: Would probably be 365 /* Copy the data back to the right position. XXX: Would probably be
328 * better to copy and encrypt at the same time. */ 366 * better to copy and encrypt at the same time. */
329 367
@@ -521,6 +559,7 @@ gss_wrap_kerberos(struct gss_ctx *gctx, int offset,
521 BUG(); 559 BUG();
522 case ENCTYPE_DES_CBC_RAW: 560 case ENCTYPE_DES_CBC_RAW:
523 case ENCTYPE_DES3_CBC_RAW: 561 case ENCTYPE_DES3_CBC_RAW:
562 case ENCTYPE_ARCFOUR_HMAC:
524 return gss_wrap_kerberos_v1(kctx, offset, buf, pages); 563 return gss_wrap_kerberos_v1(kctx, offset, buf, pages);
525 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 564 case ENCTYPE_AES128_CTS_HMAC_SHA1_96:
526 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: 565 case ENCTYPE_AES256_CTS_HMAC_SHA1_96:
@@ -538,6 +577,7 @@ gss_unwrap_kerberos(struct gss_ctx *gctx, int offset, struct xdr_buf *buf)
538 BUG(); 577 BUG();
539 case ENCTYPE_DES_CBC_RAW: 578 case ENCTYPE_DES_CBC_RAW:
540 case ENCTYPE_DES3_CBC_RAW: 579 case ENCTYPE_DES3_CBC_RAW:
580 case ENCTYPE_ARCFOUR_HMAC:
541 return gss_unwrap_kerberos_v1(kctx, offset, buf); 581 return gss_unwrap_kerberos_v1(kctx, offset, buf);
542 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 582 case ENCTYPE_AES128_CTS_HMAC_SHA1_96:
543 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: 583 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: