/* * HCTR2 is a length-preserving encryption mode that is efficient on * processors with instructions to accelerate AES and carryless * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM * processors with the ARMv8 crypto extensions. * * For more details, see the paper: "Length-preserving encryption with HCTR2" * (https://eprint.iacr.org/2021/1441.pdf)
*/
/* * The specification allows variable-length tweaks, but Linux's crypto API * currently only allows algorithms to support a single length. The "natural" * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for * the best performance. But longer tweaks are useful for fscrypt, to avoid * needing to derive per-file keys. So instead we use two blocks, or 32 bytes.
*/ #define TWEAK_SIZE 32
struct hctr2_tfm_ctx { struct crypto_cipher *blockcipher; struct crypto_skcipher *xctr; struct crypto_shash *polyval;
u8 L[BLOCKCIPHER_BLOCK_SIZE]; int hashed_tweak_offset; /* * This struct is allocated with extra space for two exported hash * states. Since the hash state size is not known at compile-time, we * can't add these to the struct directly. * * hashed_tweaklen_divisible; * hashed_tweaklen_remainder;
*/
};
struct hctr2_request_ctx {
u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE]; struct scatterlist *bulk_part_dst; struct scatterlist *bulk_part_src; struct scatterlist sg_src[2]; struct scatterlist sg_dst[2]; /* * Sub-request sizes are unknown at compile-time, so they need to go * after the members with known sizes.
*/ union { struct shash_desc hash_desc; struct skcipher_request xctr_req;
} u; /* * This struct is allocated with extra space for one exported hash * state. Since the hash state size is not known at compile-time, we * can't add it to the struct directly. * * hashed_tweak;
*/
};
/* * The input data for each HCTR2 hash step begins with a 16-byte block that * contains the tweak length and a flag that indicates whether the input is evenly * divisible into blocks. Since this implementation only supports one tweak * length, we precompute the two hash states resulting from hashing the two * possible values of this initial block. This reduces by one block the amount of * data that needs to be hashed for each encryption/decryption * * These precomputed hashes are stored in hctr2_tfm_ctx.
*/ staticint hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder)
{
SHASH_DESC_ON_STACK(shash, tfm->polyval);
__le64 tweak_length_block[2]; int err;
// Store the hashed tweak, since we need it when computing both // H(T || N) and H(T || V). return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx));
}
// Requests must be at least one block if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) return -EINVAL;
// Copy M (or U) into a temporary buffer
scatterwalk_map_and_copy(rctx->first_block, req->src,
0, BLOCKCIPHER_BLOCK_SIZE, 0);
// Create scatterlists for N and V
rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src,
BLOCKCIPHER_BLOCK_SIZE);
rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst,
BLOCKCIPHER_BLOCK_SIZE);
// MM = M ^ H(T || N) // or UU = U ^ H(T || V)
err = hctr2_hash_tweak(req); if (err) return err;
err = hctr2_hash_message(req, rctx->bulk_part_src, digest); if (err) return err;
crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
// UU = E(MM) // or MM = D(UU) if (enc)
crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block,
digest); else
crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block,
digest);
// S = MM ^ UU ^ L
crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE);
// V = XCTR(S, N) // or N = XCTR(S, V)
skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr);
skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src,
rctx->bulk_part_dst, bulk_len,
rctx->xctr_iv);
skcipher_request_set_callback(&rctx->u.xctr_req,
req->base.flags,
hctr2_xctr_done, req); return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?:
hctr2_finish(req);
}
inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) +
polyval_alg->statesize * 2;
inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask; /* * The hash function is called twice, so it is weighted higher than the * xctr and blockcipher.
*/
inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority +
4 * polyval_alg->base.cra_priority +
blockcipher_alg->cra_priority) / 7;
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.