mirror of
https://github.com/torvalds/linux.git
synced 2026-03-08 02:44:41 +01:00
crypto: arm/aes-neonbs - Use AES library for single blocks
aes-neonbs-glue.c calls __aes_arm_encrypt() and __aes_arm_decrypt() to en/decrypt single blocks for CBC encryption, XTS tweak encryption, and XTS ciphertext stealing. In preparation for making the AES library use this same ARM-optimized single-block AES en/decryption code and making it an internal implementation detail of the AES library, replace the calls to these functions with calls to the AES library. Note that this reduces the size of the aesbs_cbc_ctx and aesbs_xts_ctx structs, since unnecessary decryption round keys are no longer included. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20260112192035.10427-4-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
parent
a22fd0e3c4
commit
a4e4e44649
2 changed files with 16 additions and 14 deletions
|
|
@ -44,7 +44,6 @@ config CRYPTO_AES_ARM
|
|||
config CRYPTO_AES_ARM_BS
|
||||
tristate "Ciphers: AES, modes: ECB/CBC/CTR/XTS (bit-sliced NEON)"
|
||||
depends on KERNEL_MODE_NEON
|
||||
select CRYPTO_AES_ARM
|
||||
select CRYPTO_SKCIPHER
|
||||
select CRYPTO_LIB_AES
|
||||
help
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@
|
|||
#include <crypto/scatterwalk.h>
|
||||
#include <crypto/xts.h>
|
||||
#include <linux/module.h>
|
||||
#include "aes-cipher.h"
|
||||
|
||||
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
||||
MODULE_DESCRIPTION("Bit sliced AES using NEON instructions");
|
||||
|
|
@ -48,13 +47,13 @@ struct aesbs_ctx {
|
|||
|
||||
struct aesbs_cbc_ctx {
|
||||
struct aesbs_ctx key;
|
||||
struct crypto_aes_ctx fallback;
|
||||
struct aes_enckey fallback;
|
||||
};
|
||||
|
||||
struct aesbs_xts_ctx {
|
||||
struct aesbs_ctx key;
|
||||
struct crypto_aes_ctx fallback;
|
||||
struct crypto_aes_ctx tweak_key;
|
||||
struct aes_key fallback;
|
||||
struct aes_enckey tweak_key;
|
||||
};
|
||||
|
||||
static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
||||
|
|
@ -122,14 +121,19 @@ static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
int err;
|
||||
|
||||
err = aes_expandkey(&ctx->fallback, in_key, key_len);
|
||||
err = aes_prepareenckey(&ctx->fallback, in_key, key_len);
|
||||
if (err)
|
||||
return err;
|
||||
|
||||
ctx->key.rounds = 6 + key_len / 4;
|
||||
|
||||
/*
|
||||
* Note: this assumes that the arm implementation of the AES library
|
||||
* stores the standard round keys in k.rndkeys.
|
||||
*/
|
||||
kernel_neon_begin();
|
||||
aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
|
||||
aesbs_convert_key(ctx->key.rk, ctx->fallback.k.rndkeys,
|
||||
ctx->key.rounds);
|
||||
kernel_neon_end();
|
||||
|
||||
return 0;
|
||||
|
|
@ -152,8 +156,7 @@ static int cbc_encrypt(struct skcipher_request *req)
|
|||
|
||||
do {
|
||||
crypto_xor_cpy(dst, src, prev, AES_BLOCK_SIZE);
|
||||
__aes_arm_encrypt(ctx->fallback.key_enc,
|
||||
ctx->key.rounds, dst, dst);
|
||||
aes_encrypt(&ctx->fallback, dst, dst);
|
||||
prev = dst;
|
||||
src += AES_BLOCK_SIZE;
|
||||
dst += AES_BLOCK_SIZE;
|
||||
|
|
@ -239,10 +242,10 @@ static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
return err;
|
||||
|
||||
key_len /= 2;
|
||||
err = aes_expandkey(&ctx->fallback, in_key, key_len);
|
||||
err = aes_preparekey(&ctx->fallback, in_key, key_len);
|
||||
if (err)
|
||||
return err;
|
||||
err = aes_expandkey(&ctx->tweak_key, in_key + key_len, key_len);
|
||||
err = aes_prepareenckey(&ctx->tweak_key, in_key + key_len, key_len);
|
||||
if (err)
|
||||
return err;
|
||||
|
||||
|
|
@ -279,7 +282,7 @@ static int __xts_crypt(struct skcipher_request *req, bool encrypt,
|
|||
if (err)
|
||||
return err;
|
||||
|
||||
__aes_arm_encrypt(ctx->tweak_key.key_enc, rounds, walk.iv, walk.iv);
|
||||
aes_encrypt(&ctx->tweak_key, walk.iv, walk.iv);
|
||||
|
||||
while (walk.nbytes >= AES_BLOCK_SIZE) {
|
||||
unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
|
||||
|
|
@ -311,9 +314,9 @@ static int __xts_crypt(struct skcipher_request *req, bool encrypt,
|
|||
crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
|
||||
|
||||
if (encrypt)
|
||||
__aes_arm_encrypt(ctx->fallback.key_enc, rounds, buf, buf);
|
||||
aes_encrypt(&ctx->fallback, buf, buf);
|
||||
else
|
||||
__aes_arm_decrypt(ctx->fallback.key_dec, rounds, buf, buf);
|
||||
aes_decrypt(&ctx->fallback, buf, buf);
|
||||
|
||||
crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue