1/* 2 * Shared glue code for 128bit block ciphers 3 */ 4 5#ifndef _CRYPTO_GLUE_HELPER_H 6#define _CRYPTO_GLUE_HELPER_H 7 8#include <linux/kernel.h> 9#include <linux/crypto.h> 10#include <asm/i387.h> 11#include <crypto/b128ops.h> 12 13typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src); 14typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src); 15typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src, 16 le128 *iv); 17typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src, 18 le128 *iv); 19 20#define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn)) 21#define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn)) 22#define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn)) 23#define GLUE_XTS_FUNC_CAST(fn) ((common_glue_xts_func_t)(fn)) 24 25struct common_glue_func_entry { 26 unsigned int num_blocks; /* number of blocks that @fn will process */ 27 union { 28 common_glue_func_t ecb; 29 common_glue_cbc_func_t cbc; 30 common_glue_ctr_func_t ctr; 31 common_glue_xts_func_t xts; 32 } fn_u; 33}; 34 35struct common_glue_ctx { 36 unsigned int num_funcs; 37 int fpu_blocks_limit; /* -1 means fpu not needed at all */ 38 39 /* 40 * First funcs entry must have largest num_blocks and last funcs entry 41 * must have num_blocks == 1! 42 */ 43 struct common_glue_func_entry funcs[]; 44}; 45 46static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit, 47 struct blkcipher_desc *desc, 48 bool fpu_enabled, unsigned int nbytes) 49{ 50 if (likely(fpu_blocks_limit < 0)) 51 return false; 52 53 if (fpu_enabled) 54 return true; 55 56 /* 57 * Vector-registers are only used when chunk to be processed is large 58 * enough, so do not enable FPU until it is necessary. 59 */ 60 if (nbytes < bsize * (unsigned int)fpu_blocks_limit) 61 return false; 62 63 if (desc) { 64 /* prevent sleeping if FPU is in use */ 65 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 66 } 67 68 kernel_fpu_begin(); 69 return true; 70} 71 72static inline void glue_fpu_end(bool fpu_enabled) 73{ 74 if (fpu_enabled) 75 kernel_fpu_end(); 76} 77 78static inline void le128_to_be128(be128 *dst, const le128 *src) 79{ 80 dst->a = cpu_to_be64(le64_to_cpu(src->a)); 81 dst->b = cpu_to_be64(le64_to_cpu(src->b)); 82} 83 84static inline void be128_to_le128(le128 *dst, const be128 *src) 85{ 86 dst->a = cpu_to_le64(be64_to_cpu(src->a)); 87 dst->b = cpu_to_le64(be64_to_cpu(src->b)); 88} 89 90static inline void le128_inc(le128 *i) 91{ 92 u64 a = le64_to_cpu(i->a); 93 u64 b = le64_to_cpu(i->b); 94 95 b++; 96 if (!b) 97 a++; 98 99 i->a = cpu_to_le64(a); 100 i->b = cpu_to_le64(b); 101} 102 103static inline void le128_gf128mul_x_ble(le128 *dst, const le128 *src) 104{ 105 u64 a = le64_to_cpu(src->a); 106 u64 b = le64_to_cpu(src->b); 107 u64 _tt = ((s64)a >> 63) & 0x87; 108 109 dst->a = cpu_to_le64((a << 1) ^ (b >> 63)); 110 dst->b = cpu_to_le64((b << 1) ^ _tt); 111} 112 113extern int glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx, 114 struct blkcipher_desc *desc, 115 struct scatterlist *dst, 116 struct scatterlist *src, unsigned int nbytes); 117 118extern int glue_cbc_encrypt_128bit(const common_glue_func_t fn, 119 struct blkcipher_desc *desc, 120 struct scatterlist *dst, 121 struct scatterlist *src, 122 unsigned int nbytes); 123 124extern int glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx, 125 struct blkcipher_desc *desc, 126 struct scatterlist *dst, 127 struct scatterlist *src, 128 unsigned int nbytes); 129 130extern int glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx, 131 struct blkcipher_desc *desc, 132 struct scatterlist *dst, 133 struct scatterlist *src, unsigned int nbytes); 134 135extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx, 136 struct blkcipher_desc *desc, 137 struct scatterlist *dst, 138 struct scatterlist *src, unsigned int nbytes, 139 common_glue_func_t tweak_fn, void *tweak_ctx, 140 void *crypt_ctx); 141 142extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, 143 le128 *iv, common_glue_func_t fn); 144 145#endif /* _CRYPTO_GLUE_HELPER_H */ 146