Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

gss_krb5: Add support for rc4-hmac encryption

Add necessary changes to add kernel support for the rc4-hmac Kerberos
encryption type used by Microsoft and described in rfc4757.

Signed-off-by: Kevin Coffman <kwc@citi.umich.edu>
Signed-off-by: Steve Dickson <steved@redhat.com>
Signed-off-by: Trond Myklebust <Trond.Myklebust@netapp.com>

authored by

Kevin Coffman and committed by
Trond Myklebust
fffdaef2 5af46547

+492 -13
+9
include/linux/sunrpc/gss_krb5.h
··· 317 317 struct xdr_buf *buf, u32 *plainoffset, 318 318 u32 *plainlen); 319 319 320 + int 321 + krb5_rc4_setup_seq_key(struct krb5_ctx *kctx, 322 + struct crypto_blkcipher *cipher, 323 + unsigned char *cksum); 324 + 325 + int 326 + krb5_rc4_setup_enc_key(struct krb5_ctx *kctx, 327 + struct crypto_blkcipher *cipher, 328 + s32 seqnum); 320 329 void 321 330 gss_krb5_make_confounder(char *p, u32 conflen);
+255
net/sunrpc/auth_gss/gss_krb5_crypto.c
··· 124 124 return crypto_hash_update(desc, sg, sg->length); 125 125 } 126 126 127 + static int 128 + arcfour_hmac_md5_usage_to_salt(unsigned int usage, u8 salt[4]) 129 + { 130 + unsigned int ms_usage; 131 + 132 + switch (usage) { 133 + case KG_USAGE_SIGN: 134 + ms_usage = 15; 135 + break; 136 + case KG_USAGE_SEAL: 137 + ms_usage = 13; 138 + break; 139 + default: 140 + return EINVAL;; 141 + } 142 + salt[0] = (ms_usage >> 0) & 0xff; 143 + salt[1] = (ms_usage >> 8) & 0xff; 144 + salt[2] = (ms_usage >> 16) & 0xff; 145 + salt[3] = (ms_usage >> 24) & 0xff; 146 + 147 + return 0; 148 + } 149 + 150 + static u32 151 + make_checksum_hmac_md5(struct krb5_ctx *kctx, char *header, int hdrlen, 152 + struct xdr_buf *body, int body_offset, u8 *cksumkey, 153 + unsigned int usage, struct xdr_netobj *cksumout) 154 + { 155 + struct hash_desc desc; 156 + struct scatterlist sg[1]; 157 + int err; 158 + u8 checksumdata[GSS_KRB5_MAX_CKSUM_LEN]; 159 + u8 rc4salt[4]; 160 + struct crypto_hash *md5; 161 + struct crypto_hash *hmac_md5; 162 + 163 + if (cksumkey == NULL) 164 + return GSS_S_FAILURE; 165 + 166 + if (cksumout->len < kctx->gk5e->cksumlength) { 167 + dprintk("%s: checksum buffer length, %u, too small for %s\n", 168 + __func__, cksumout->len, kctx->gk5e->name); 169 + return GSS_S_FAILURE; 170 + } 171 + 172 + if (arcfour_hmac_md5_usage_to_salt(usage, rc4salt)) { 173 + dprintk("%s: invalid usage value %u\n", __func__, usage); 174 + return GSS_S_FAILURE; 175 + } 176 + 177 + md5 = crypto_alloc_hash("md5", 0, CRYPTO_ALG_ASYNC); 178 + if (IS_ERR(md5)) 179 + return GSS_S_FAILURE; 180 + 181 + hmac_md5 = crypto_alloc_hash(kctx->gk5e->cksum_name, 0, 182 + CRYPTO_ALG_ASYNC); 183 + if (IS_ERR(hmac_md5)) { 184 + crypto_free_hash(md5); 185 + return GSS_S_FAILURE; 186 + } 187 + 188 + desc.tfm = md5; 189 + desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 190 + 191 + err = crypto_hash_init(&desc); 192 + if (err) 193 + goto out; 194 + sg_init_one(sg, rc4salt, 4); 195 + err = crypto_hash_update(&desc, sg, 4); 196 + if (err) 197 + goto out; 198 + 199 + sg_init_one(sg, header, hdrlen); 200 + err = crypto_hash_update(&desc, sg, hdrlen); 201 + if (err) 202 + goto out; 203 + err = xdr_process_buf(body, body_offset, body->len - body_offset, 204 + checksummer, &desc); 205 + if (err) 206 + goto out; 207 + err = crypto_hash_final(&desc, checksumdata); 208 + if (err) 209 + goto out; 210 + 211 + desc.tfm = hmac_md5; 212 + desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP; 213 + 214 + err = crypto_hash_init(&desc); 215 + if (err) 216 + goto out; 217 + err = crypto_hash_setkey(hmac_md5, cksumkey, kctx->gk5e->keylength); 218 + if (err) 219 + goto out; 220 + 221 + sg_init_one(sg, checksumdata, crypto_hash_digestsize(md5)); 222 + err = crypto_hash_digest(&desc, sg, crypto_hash_digestsize(md5), 223 + checksumdata); 224 + if (err) 225 + goto out; 226 + 227 + memcpy(cksumout->data, checksumdata, kctx->gk5e->cksumlength); 228 + cksumout->len = kctx->gk5e->cksumlength; 229 + out: 230 + crypto_free_hash(md5); 231 + crypto_free_hash(hmac_md5); 232 + return err ? GSS_S_FAILURE : 0; 233 + } 234 + 127 235 /* 128 236 * checksum the plaintext data and hdrlen bytes of the token header 129 237 * The checksum is performed over the first 8 bytes of the ··· 247 139 int err; 248 140 u8 checksumdata[GSS_KRB5_MAX_CKSUM_LEN]; 249 141 unsigned int checksumlen; 142 + 143 + if (kctx->gk5e->ctype == CKSUMTYPE_HMAC_MD5_ARCFOUR) 144 + return make_checksum_hmac_md5(kctx, header, hdrlen, 145 + body, body_offset, 146 + cksumkey, usage, cksumout); 250 147 251 148 if (cksumout->len < kctx->gk5e->cksumlength) { 252 149 dprintk("%s: checksum buffer length, %u, too small for %s\n", ··· 846 733 ret = GSS_S_FAILURE; 847 734 return ret; 848 735 } 736 + 737 + /* 738 + * Compute Kseq given the initial session key and the checksum. 739 + * Set the key of the given cipher. 740 + */ 741 + int 742 + krb5_rc4_setup_seq_key(struct krb5_ctx *kctx, struct crypto_blkcipher *cipher, 743 + unsigned char *cksum) 744 + { 745 + struct crypto_hash *hmac; 746 + struct hash_desc desc; 747 + struct scatterlist sg[1]; 748 + u8 Kseq[GSS_KRB5_MAX_KEYLEN]; 749 + u32 zeroconstant = 0; 750 + int err; 751 + 752 + dprintk("%s: entered\n", __func__); 753 + 754 + hmac = crypto_alloc_hash(kctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC); 755 + if (IS_ERR(hmac)) { 756 + dprintk("%s: error %ld, allocating hash '%s'\n", 757 + __func__, PTR_ERR(hmac), kctx->gk5e->cksum_name); 758 + return PTR_ERR(hmac); 759 + } 760 + 761 + desc.tfm = hmac; 762 + desc.flags = 0; 763 + 764 + err = crypto_hash_init(&desc); 765 + if (err) 766 + goto out_err; 767 + 768 + /* Compute intermediate Kseq from session key */ 769 + err = crypto_hash_setkey(hmac, kctx->Ksess, kctx->gk5e->keylength); 770 + if (err) 771 + goto out_err; 772 + 773 + sg_init_table(sg, 1); 774 + sg_set_buf(sg, &zeroconstant, 4); 775 + 776 + err = crypto_hash_digest(&desc, sg, 4, Kseq); 777 + if (err) 778 + goto out_err; 779 + 780 + /* Compute final Kseq from the checksum and intermediate Kseq */ 781 + err = crypto_hash_setkey(hmac, Kseq, kctx->gk5e->keylength); 782 + if (err) 783 + goto out_err; 784 + 785 + sg_set_buf(sg, cksum, 8); 786 + 787 + err = crypto_hash_digest(&desc, sg, 8, Kseq); 788 + if (err) 789 + goto out_err; 790 + 791 + err = crypto_blkcipher_setkey(cipher, Kseq, kctx->gk5e->keylength); 792 + if (err) 793 + goto out_err; 794 + 795 + err = 0; 796 + 797 + out_err: 798 + crypto_free_hash(hmac); 799 + dprintk("%s: returning %d\n", __func__, err); 800 + return err; 801 + } 802 + 803 + /* 804 + * Compute Kcrypt given the initial session key and the plaintext seqnum. 805 + * Set the key of cipher kctx->enc. 806 + */ 807 + int 808 + krb5_rc4_setup_enc_key(struct krb5_ctx *kctx, struct crypto_blkcipher *cipher, 809 + s32 seqnum) 810 + { 811 + struct crypto_hash *hmac; 812 + struct hash_desc desc; 813 + struct scatterlist sg[1]; 814 + u8 Kcrypt[GSS_KRB5_MAX_KEYLEN]; 815 + u8 zeroconstant[4] = {0}; 816 + u8 seqnumarray[4]; 817 + int err, i; 818 + 819 + dprintk("%s: entered, seqnum %u\n", __func__, seqnum); 820 + 821 + hmac = crypto_alloc_hash(kctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC); 822 + if (IS_ERR(hmac)) { 823 + dprintk("%s: error %ld, allocating hash '%s'\n", 824 + __func__, PTR_ERR(hmac), kctx->gk5e->cksum_name); 825 + return PTR_ERR(hmac); 826 + } 827 + 828 + desc.tfm = hmac; 829 + desc.flags = 0; 830 + 831 + err = crypto_hash_init(&desc); 832 + if (err) 833 + goto out_err; 834 + 835 + /* Compute intermediate Kcrypt from session key */ 836 + for (i = 0; i < kctx->gk5e->keylength; i++) 837 + Kcrypt[i] = kctx->Ksess[i] ^ 0xf0; 838 + 839 + err = crypto_hash_setkey(hmac, Kcrypt, kctx->gk5e->keylength); 840 + if (err) 841 + goto out_err; 842 + 843 + sg_init_table(sg, 1); 844 + sg_set_buf(sg, zeroconstant, 4); 845 + 846 + err = crypto_hash_digest(&desc, sg, 4, Kcrypt); 847 + if (err) 848 + goto out_err; 849 + 850 + /* Compute final Kcrypt from the seqnum and intermediate Kcrypt */ 851 + err = crypto_hash_setkey(hmac, Kcrypt, kctx->gk5e->keylength); 852 + if (err) 853 + goto out_err; 854 + 855 + seqnumarray[0] = (unsigned char) ((seqnum >> 24) & 0xff); 856 + seqnumarray[1] = (unsigned char) ((seqnum >> 16) & 0xff); 857 + seqnumarray[2] = (unsigned char) ((seqnum >> 8) & 0xff); 858 + seqnumarray[3] = (unsigned char) ((seqnum >> 0) & 0xff); 859 + 860 + sg_set_buf(sg, seqnumarray, 4); 861 + 862 + err = crypto_hash_digest(&desc, sg, 4, Kcrypt); 863 + if (err) 864 + goto out_err; 865 + 866 + err = crypto_blkcipher_setkey(cipher, Kcrypt, kctx->gk5e->keylength); 867 + if (err) 868 + goto out_err; 869 + 870 + err = 0; 871 + 872 + out_err: 873 + crypto_free_hash(hmac); 874 + dprintk("%s: returning %d\n", __func__, err); 875 + return err; 876 + } 877 +
+96
net/sunrpc/auth_gss/gss_krb5_mech.c
··· 73 73 .keyed_cksum = 0, 74 74 }, 75 75 /* 76 + * RC4-HMAC 77 + */ 78 + { 79 + .etype = ENCTYPE_ARCFOUR_HMAC, 80 + .ctype = CKSUMTYPE_HMAC_MD5_ARCFOUR, 81 + .name = "rc4-hmac", 82 + .encrypt_name = "ecb(arc4)", 83 + .cksum_name = "hmac(md5)", 84 + .encrypt = krb5_encrypt, 85 + .decrypt = krb5_decrypt, 86 + .mk_key = NULL, 87 + .signalg = SGN_ALG_HMAC_MD5, 88 + .sealalg = SEAL_ALG_MICROSOFT_RC4, 89 + .keybytes = 16, 90 + .keylength = 16, 91 + .blocksize = 1, 92 + .conflen = 8, 93 + .cksumlength = 8, 94 + .keyed_cksum = 1, 95 + }, 96 + /* 76 97 * 3DES 77 98 */ 78 99 { ··· 413 392 return -EINVAL; 414 393 } 415 394 395 + /* 396 + * Note that RC4 depends on deriving keys using the sequence 397 + * number or the checksum of a token. Therefore, the final keys 398 + * cannot be calculated until the token is being constructed! 399 + */ 400 + static int 401 + context_derive_keys_rc4(struct krb5_ctx *ctx) 402 + { 403 + struct crypto_hash *hmac; 404 + char sigkeyconstant[] = "signaturekey"; 405 + int slen = strlen(sigkeyconstant) + 1; /* include null terminator */ 406 + struct hash_desc desc; 407 + struct scatterlist sg[1]; 408 + int err; 409 + 410 + dprintk("RPC: %s: entered\n", __func__); 411 + /* 412 + * derive cksum (aka Ksign) key 413 + */ 414 + hmac = crypto_alloc_hash(ctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC); 415 + if (IS_ERR(hmac)) { 416 + dprintk("%s: error %ld allocating hash '%s'\n", 417 + __func__, PTR_ERR(hmac), ctx->gk5e->cksum_name); 418 + err = PTR_ERR(hmac); 419 + goto out_err; 420 + } 421 + 422 + err = crypto_hash_setkey(hmac, ctx->Ksess, ctx->gk5e->keylength); 423 + if (err) 424 + goto out_err_free_hmac; 425 + 426 + sg_init_table(sg, 1); 427 + sg_set_buf(sg, sigkeyconstant, slen); 428 + 429 + desc.tfm = hmac; 430 + desc.flags = 0; 431 + 432 + err = crypto_hash_init(&desc); 433 + if (err) 434 + goto out_err_free_hmac; 435 + 436 + err = crypto_hash_digest(&desc, sg, slen, ctx->cksum); 437 + if (err) 438 + goto out_err_free_hmac; 439 + /* 440 + * allocate hash, and blkciphers for data and seqnum encryption 441 + */ 442 + ctx->enc = crypto_alloc_blkcipher(ctx->gk5e->encrypt_name, 0, 443 + CRYPTO_ALG_ASYNC); 444 + if (IS_ERR(ctx->enc)) { 445 + err = PTR_ERR(ctx->enc); 446 + goto out_err_free_hmac; 447 + } 448 + 449 + ctx->seq = crypto_alloc_blkcipher(ctx->gk5e->encrypt_name, 0, 450 + CRYPTO_ALG_ASYNC); 451 + if (IS_ERR(ctx->seq)) { 452 + crypto_free_blkcipher(ctx->enc); 453 + err = PTR_ERR(ctx->seq); 454 + goto out_err_free_hmac; 455 + } 456 + 457 + dprintk("RPC: %s: returning success\n", __func__); 458 + 459 + err = 0; 460 + 461 + out_err_free_hmac: 462 + crypto_free_hash(hmac); 463 + out_err: 464 + dprintk("RPC: %s: returning %d\n", __func__, err); 465 + return err; 466 + } 467 + 416 468 static int 417 469 context_derive_keys_new(struct krb5_ctx *ctx) 418 470 { ··· 655 561 switch (ctx->enctype) { 656 562 case ENCTYPE_DES3_CBC_RAW: 657 563 return context_derive_keys_des3(ctx); 564 + case ENCTYPE_ARCFOUR_HMAC: 565 + return context_derive_keys_rc4(ctx); 658 566 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 659 567 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: 660 568 return context_derive_keys_new(ctx);
+1
net/sunrpc/auth_gss/gss_krb5_seal.c
··· 213 213 BUG(); 214 214 case ENCTYPE_DES_CBC_RAW: 215 215 case ENCTYPE_DES3_CBC_RAW: 216 + case ENCTYPE_ARCFOUR_HMAC: 216 217 return gss_get_mic_v1(ctx, text, token); 217 218 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 218 219 case ENCTYPE_AES256_CTS_HMAC_SHA1_96:
+77
net/sunrpc/auth_gss/gss_krb5_seqnum.c
··· 39 39 # define RPCDBG_FACILITY RPCDBG_AUTH 40 40 #endif 41 41 42 + static s32 43 + krb5_make_rc4_seq_num(struct krb5_ctx *kctx, int direction, s32 seqnum, 44 + unsigned char *cksum, unsigned char *buf) 45 + { 46 + struct crypto_blkcipher *cipher; 47 + unsigned char plain[8]; 48 + s32 code; 49 + 50 + dprintk("RPC: %s:\n", __func__); 51 + cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0, 52 + CRYPTO_ALG_ASYNC); 53 + if (IS_ERR(cipher)) 54 + return PTR_ERR(cipher); 55 + 56 + plain[0] = (unsigned char) ((seqnum >> 24) & 0xff); 57 + plain[1] = (unsigned char) ((seqnum >> 16) & 0xff); 58 + plain[2] = (unsigned char) ((seqnum >> 8) & 0xff); 59 + plain[3] = (unsigned char) ((seqnum >> 0) & 0xff); 60 + plain[4] = direction; 61 + plain[5] = direction; 62 + plain[6] = direction; 63 + plain[7] = direction; 64 + 65 + code = krb5_rc4_setup_seq_key(kctx, cipher, cksum); 66 + if (code) 67 + goto out; 68 + 69 + code = krb5_encrypt(cipher, cksum, plain, buf, 8); 70 + out: 71 + crypto_free_blkcipher(cipher); 72 + return code; 73 + } 42 74 s32 43 75 krb5_make_seq_num(struct krb5_ctx *kctx, 44 76 struct crypto_blkcipher *key, ··· 79 47 unsigned char *cksum, unsigned char *buf) 80 48 { 81 49 unsigned char plain[8]; 50 + 51 + if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) 52 + return krb5_make_rc4_seq_num(kctx, direction, seqnum, 53 + cksum, buf); 82 54 83 55 plain[0] = (unsigned char) (seqnum & 0xff); 84 56 plain[1] = (unsigned char) ((seqnum >> 8) & 0xff); ··· 97 61 return krb5_encrypt(key, cksum, plain, buf, 8); 98 62 } 99 63 64 + static s32 65 + krb5_get_rc4_seq_num(struct krb5_ctx *kctx, unsigned char *cksum, 66 + unsigned char *buf, int *direction, s32 *seqnum) 67 + { 68 + struct crypto_blkcipher *cipher; 69 + unsigned char plain[8]; 70 + s32 code; 71 + 72 + dprintk("RPC: %s:\n", __func__); 73 + cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0, 74 + CRYPTO_ALG_ASYNC); 75 + if (IS_ERR(cipher)) 76 + return PTR_ERR(cipher); 77 + 78 + code = krb5_rc4_setup_seq_key(kctx, cipher, cksum); 79 + if (code) 80 + goto out; 81 + 82 + code = krb5_decrypt(cipher, cksum, buf, plain, 8); 83 + if (code) 84 + goto out; 85 + 86 + if ((plain[4] != plain[5]) || (plain[4] != plain[6]) 87 + || (plain[4] != plain[7])) { 88 + code = (s32)KG_BAD_SEQ; 89 + goto out; 90 + } 91 + 92 + *direction = plain[4]; 93 + 94 + *seqnum = ((plain[0] << 24) | (plain[1] << 16) | 95 + (plain[2] << 8) | (plain[3])); 96 + out: 97 + crypto_free_blkcipher(cipher); 98 + return code; 99 + } 100 + 100 101 s32 101 102 krb5_get_seq_num(struct krb5_ctx *kctx, 102 103 unsigned char *cksum, ··· 145 72 struct crypto_blkcipher *key = kctx->seq; 146 73 147 74 dprintk("RPC: krb5_get_seq_num:\n"); 75 + 76 + if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) 77 + return krb5_get_rc4_seq_num(kctx, cksum, buf, 78 + direction, seqnum); 148 79 149 80 if ((code = krb5_decrypt(key, cksum, buf, plain, 8))) 150 81 return code;
+1
net/sunrpc/auth_gss/gss_krb5_unseal.c
··· 216 216 BUG(); 217 217 case ENCTYPE_DES_CBC_RAW: 218 218 case ENCTYPE_DES3_CBC_RAW: 219 + case ENCTYPE_ARCFOUR_HMAC: 219 220 return gss_verify_mic_v1(ctx, message_buffer, read_token); 220 221 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 221 222 case ENCTYPE_AES256_CTS_HMAC_SHA1_96:
+53 -13
net/sunrpc/auth_gss/gss_krb5_wrap.c
··· 232 232 seq_send, ptr + GSS_KRB5_TOK_HDR_LEN, ptr + 8))) 233 233 return GSS_S_FAILURE; 234 234 235 - if (gss_encrypt_xdr_buf(kctx->enc, buf, offset + headlen - conflen, 236 - pages)) 237 - return GSS_S_FAILURE; 235 + if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) { 236 + struct crypto_blkcipher *cipher; 237 + int err; 238 + cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0, 239 + CRYPTO_ALG_ASYNC); 240 + if (IS_ERR(cipher)) 241 + return GSS_S_FAILURE; 242 + 243 + krb5_rc4_setup_enc_key(kctx, cipher, seq_send); 244 + 245 + err = gss_encrypt_xdr_buf(cipher, buf, 246 + offset + headlen - conflen, pages); 247 + crypto_free_blkcipher(cipher); 248 + if (err) 249 + return GSS_S_FAILURE; 250 + } else { 251 + if (gss_encrypt_xdr_buf(kctx->enc, buf, 252 + offset + headlen - conflen, pages)) 253 + return GSS_S_FAILURE; 254 + } 238 255 239 256 return (kctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE; 240 257 } ··· 308 291 */ 309 292 crypt_offset = ptr + (GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength) - 310 293 (unsigned char *)buf->head[0].iov_base; 311 - if (gss_decrypt_xdr_buf(kctx->enc, buf, crypt_offset)) 312 - return GSS_S_DEFECTIVE_TOKEN; 294 + 295 + /* 296 + * Need plaintext seqnum to derive encryption key for arcfour-hmac 297 + */ 298 + if (krb5_get_seq_num(kctx, ptr + GSS_KRB5_TOK_HDR_LEN, 299 + ptr + 8, &direction, &seqnum)) 300 + return GSS_S_BAD_SIG; 301 + 302 + if ((kctx->initiate && direction != 0xff) || 303 + (!kctx->initiate && direction != 0)) 304 + return GSS_S_BAD_SIG; 305 + 306 + if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) { 307 + struct crypto_blkcipher *cipher; 308 + int err; 309 + 310 + cipher = crypto_alloc_blkcipher(kctx->gk5e->encrypt_name, 0, 311 + CRYPTO_ALG_ASYNC); 312 + if (IS_ERR(cipher)) 313 + return GSS_S_FAILURE; 314 + 315 + krb5_rc4_setup_enc_key(kctx, cipher, seqnum); 316 + 317 + err = gss_decrypt_xdr_buf(cipher, buf, crypt_offset); 318 + crypto_free_blkcipher(cipher); 319 + if (err) 320 + return GSS_S_DEFECTIVE_TOKEN; 321 + } else { 322 + if (gss_decrypt_xdr_buf(kctx->enc, buf, crypt_offset)) 323 + return GSS_S_DEFECTIVE_TOKEN; 324 + } 313 325 314 326 if (kctx->gk5e->keyed_cksum) 315 327 cksumkey = kctx->cksum; ··· 361 315 return GSS_S_CONTEXT_EXPIRED; 362 316 363 317 /* do sequencing checks */ 364 - 365 - if (krb5_get_seq_num(kctx, ptr + GSS_KRB5_TOK_HDR_LEN, 366 - ptr + 8, &direction, &seqnum)) 367 - return GSS_S_BAD_SIG; 368 - 369 - if ((kctx->initiate && direction != 0xff) || 370 - (!kctx->initiate && direction != 0)) 371 - return GSS_S_BAD_SIG; 372 318 373 319 /* Copy the data back to the right position. XXX: Would probably be 374 320 * better to copy and encrypt at the same time. */ ··· 559 521 BUG(); 560 522 case ENCTYPE_DES_CBC_RAW: 561 523 case ENCTYPE_DES3_CBC_RAW: 524 + case ENCTYPE_ARCFOUR_HMAC: 562 525 return gss_wrap_kerberos_v1(kctx, offset, buf, pages); 563 526 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 564 527 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: ··· 577 538 BUG(); 578 539 case ENCTYPE_DES_CBC_RAW: 579 540 case ENCTYPE_DES3_CBC_RAW: 541 + case ENCTYPE_ARCFOUR_HMAC: 580 542 return gss_unwrap_kerberos_v1(kctx, offset, buf); 581 543 case ENCTYPE_AES128_CTS_HMAC_SHA1_96: 582 544 case ENCTYPE_AES256_CTS_HMAC_SHA1_96: