X-Git-Url: https://git.whamcloud.com/?a=blobdiff_plain;f=lustre%2Fptlrpc%2Fgss%2Fgss_crypto.h;h=7653e2139dbef419b18d6b2a5db90d2fe540455b;hb=da1d93513fdff0a70257b13aa5649e478d4f70b6;hp=c0770e8d0cfe3c0501c614c13605bcaea47c7cd8;hpb=a21c13d4df4bea1bec0f5804136740ed53d5a57f;p=fs%2Flustre-release.git diff --git a/lustre/ptlrpc/gss/gss_crypto.h b/lustre/ptlrpc/gss/gss_crypto.h index c0770e8..7653e21 100644 --- a/lustre/ptlrpc/gss/gss_crypto.h +++ b/lustre/ptlrpc/gss/gss_crypto.h @@ -5,9 +5,103 @@ #include "gss_internal.h" +#include + +/* + * linux v4.19-rc2-66-gb350bee5ea0f + * crypto: skcipher - Introduce crypto_sync_skcipher + * + * crypto_sync_skcipher will replace crypto_blkcipher so start using + * crypto_sync_skcipher and provide wrappers for older kernels + */ +#ifdef SYNC_SKCIPHER_REQUEST_ON_STACK + +#define crypto_skcipher_encrypt_iv(desc, dst, src, blocksize) \ + crypto_skcipher_encrypt((desc)) + +#define crypto_skcipher_decrypt_iv(desc, dst, src, blocksize) \ + crypto_skcipher_decrypt((desc)) + +#define skcipher_request_set_crypt_iv(d) + +#else /* ! SYNC_SKCIPHER_REQUEST_ON_STACK */ + +#ifdef HAVE_CRYPTO_ALLOC_SKCIPHER + +#define crypto_sync_skcipher crypto_skcipher + +#define SYNC_SKCIPHER_REQUEST_ON_STACK SKCIPHER_REQUEST_ON_STACK + +#define skcipher_request_set_sync_tfm skcipher_request_set_tfm + +#define skcipher_request_set_crypt_iv(d) + +#define crypto_sync_skcipher_blocksize crypto_skcipher_blocksize + +#define crypto_sync_skcipher_setkey crypto_skcipher_setkey + +#define crypto_alloc_sync_skcipher crypto_alloc_skcipher + +#define crypto_free_sync_skcipher crypto_free_skcipher + +#define crypto_sync_skcipher_ivsize crypto_skcipher_ivsize + +#define crypto_skcipher_encrypt_iv(desc, dst, src, blocksize) \ + crypto_skcipher_encrypt((desc)) + +#define crypto_skcipher_decrypt_iv(desc, dst, src, blocksize) \ + crypto_skcipher_decrypt((desc)) + +#define skcipher_request_zero(req) /* nop */ + +#else /* ! HAVE_CRYPTO_ALLOC_SKCIPHER */ + +#define crypto_sync_skcipher crypto_blkcipher + +#define SYNC_SKCIPHER_REQUEST_ON_STACK(name, tfm) \ + struct blkcipher_desc __##name##_obj, *name = (void *)&__##name##_obj + +#define skcipher_request_set_sync_tfm(d, _tfm) \ + do { (d)->tfm = _tfm; } while (0) + +#define skcipher_request_set_callback(d, f, c, data) \ + do { (d)->flags = f; } while (0) + +#define skcipher_request_set_crypt(d, src, dst, cryptlen, iv) \ + do { (d)->info = iv; } while (0) + +#define skcipher_request_set_crypt_iv(d) \ + do { (d)->info = crypto_blkcipher_crt((d)->tfm)->iv; } while (0) + +#define crypto_sync_skcipher_blocksize(tfm) \ + crypto_blkcipher_blocksize((tfm)) + +#define crypto_sync_skcipher_setkey(tfm, key, keylen) \ + crypto_blkcipher_setkey((tfm), (key), (keylen)) + +#define crypto_alloc_sync_skcipher(name, type, mask) \ + crypto_alloc_blkcipher((name), (type), (mask)) + +#define crypto_free_sync_skcipher(tfm) \ + crypto_free_blkcipher((tfm)) + +#define crypto_sync_skcipher_ivsize(tfm) \ + crypto_blkcipher_ivsize((tfm)) + +#define crypto_skcipher_encrypt_iv(desc, dst, src, len) \ + crypto_blkcipher_encrypt_iv((desc), (dst), (src), (len)) + +#define crypto_skcipher_decrypt_iv(desc, dst, src, len) \ + crypto_blkcipher_decrypt_iv((desc), (dst), (src), (len)) + +#define skcipher_request_zero(req) /* nop */ + +#endif /* HAVE_CRYPTO_ALLOC_SKCIPHER */ +#endif /* SYNC_SKCIPHER_REQUEST_ON_STACK */ + struct gss_keyblock { - rawobj_t kb_key; - struct crypto_blkcipher *kb_tfm; + rawobj_t kb_key; + struct crypto_sync_skcipher *kb_tfm; }; int gss_keyblock_init(struct gss_keyblock *kb, const char *alg_name, @@ -21,13 +115,16 @@ int gss_get_keyblock(char **ptr, const char *end, struct gss_keyblock *kb, int gss_setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, const void *buf, unsigned int buf_len); void gss_teardown_sgtable(struct sg_table *sgt); -int gss_crypt_generic(struct crypto_blkcipher *tfm, int decrypt, const void *iv, - const void *in, void *out, size_t length); -int gss_digest_hash(struct cfs_crypto_hash_desc *desc, rawobj_t *hdr, - int msgcnt, rawobj_t *msgs, int iovcnt, lnet_kiov_t *iovs, - rawobj_t *cksum); +int gss_crypt_generic(struct crypto_sync_skcipher *tfm, int decrypt, + const void *iv, const void *in, void *out, size_t length); +int gss_digest_hash(struct ahash_request *req, rawobj_t *hdr, + int msgcnt, rawobj_t *msgs, int iovcnt, + struct bio_vec *iovs); +int gss_digest_hash_compat(struct ahash_request *req, + rawobj_t *hdr, int msgcnt, rawobj_t *msgs, + int iovcnt, struct bio_vec *iovs); int gss_add_padding(rawobj_t *msg, int msg_buflen, int blocksize); -int gss_crypt_rawobjs(struct crypto_blkcipher *tfm, __u8 *iv, +int gss_crypt_rawobjs(struct crypto_sync_skcipher *tfm, __u8 *iv, int inobj_cnt, rawobj_t *inobjs, rawobj_t *outobj, int enc);