mirror of
https://github.com/torvalds/linux.git
synced 2026-03-08 03:24:45 +01:00
crypto: s390/paes - Refuse clear key material by default
This patch exploits the new xflag PKEY_XFLAG_NOCLEARKEY from the pkey
layer. So now by default all the paes algorithms refuse the use of
clear key material ("clear key tokens") in the setkey function with
-EINVAL.
With a new kernel module parameter "clrkey" this behavior can be
controlled. By default clrkey is 'N' but for testing purpose on module
load a true value (1, 'Y') may be given to accept clear key tokens.
Note that during selftest clear keys are always used and thus the
xflag PKEY_XFLAG_NOCLEARKEY is NOT set as long as the algorithm is in
a larval state indicated by crypto_skcipher_tested() returning false.
Signed-off-by: Harald Freudenberger <freude@linux.ibm.com>
Reviewed-by: Holger Dengler <dengler@linux.ibm.com>
Reviewed-by: Ingo Franzki <ifranzki@linux.ibm.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
452770a4fa
commit
9d58d22f36
1 changed files with 57 additions and 36 deletions
|
|
@ -40,6 +40,10 @@
|
|||
#define PAES_256_PROTKEY_SIZE (32 + 32) /* key + verification pattern */
|
||||
#define PXTS_256_PROTKEY_SIZE (32 + 32 + 32) /* k1 + k2 + verification pattern */
|
||||
|
||||
static bool pkey_clrkey_allowed;
|
||||
module_param_named(clrkey, pkey_clrkey_allowed, bool, 0444);
|
||||
MODULE_PARM_DESC(clrkey, "Allow clear key material (default N)");
|
||||
|
||||
static u8 *ctrblk;
|
||||
static DEFINE_MUTEX(ctrblk_lock);
|
||||
|
||||
|
|
@ -192,10 +196,14 @@ static inline int pxts_ctx_setkey(struct s390_pxts_ctx *ctx,
|
|||
* This function may sleep - don't call in non-sleeping context.
|
||||
*/
|
||||
static inline int convert_key(const u8 *key, unsigned int keylen,
|
||||
struct paes_protkey *pk)
|
||||
struct paes_protkey *pk, bool tested)
|
||||
{
|
||||
u32 xflags = PKEY_XFLAG_NOMEMALLOC;
|
||||
int rc, i;
|
||||
|
||||
if (tested && !pkey_clrkey_allowed)
|
||||
xflags |= PKEY_XFLAG_NOCLEARKEY;
|
||||
|
||||
pk->len = sizeof(pk->protkey);
|
||||
|
||||
/*
|
||||
|
|
@ -209,7 +217,7 @@ static inline int convert_key(const u8 *key, unsigned int keylen,
|
|||
}
|
||||
rc = pkey_key2protkey(key, keylen,
|
||||
pk->protkey, &pk->len, &pk->type,
|
||||
PKEY_XFLAG_NOMEMALLOC);
|
||||
xflags);
|
||||
}
|
||||
|
||||
out:
|
||||
|
|
@ -231,7 +239,7 @@ out:
|
|||
* unnecessary additional conversion but never to invalid data on en-
|
||||
* or decrypt operations.
|
||||
*/
|
||||
static int paes_convert_key(struct s390_paes_ctx *ctx)
|
||||
static int paes_convert_key(struct s390_paes_ctx *ctx, bool tested)
|
||||
{
|
||||
struct paes_protkey pk;
|
||||
int rc;
|
||||
|
|
@ -240,7 +248,7 @@ static int paes_convert_key(struct s390_paes_ctx *ctx)
|
|||
ctx->pk_state = PK_STATE_CONVERT_IN_PROGRESS;
|
||||
spin_unlock_bh(&ctx->pk_lock);
|
||||
|
||||
rc = convert_key(ctx->keybuf, ctx->keylen, &pk);
|
||||
rc = convert_key(ctx->keybuf, ctx->keylen, &pk, tested);
|
||||
|
||||
/* update context */
|
||||
spin_lock_bh(&ctx->pk_lock);
|
||||
|
|
@ -263,7 +271,7 @@ static int paes_convert_key(struct s390_paes_ctx *ctx)
|
|||
* pk_type, pk_len and the protected key in the tfm context.
|
||||
* See also comments on function paes_convert_key.
|
||||
*/
|
||||
static int pxts_convert_key(struct s390_pxts_ctx *ctx)
|
||||
static int pxts_convert_key(struct s390_pxts_ctx *ctx, bool tested)
|
||||
{
|
||||
struct paes_protkey pk0, pk1;
|
||||
size_t split_keylen;
|
||||
|
|
@ -273,7 +281,7 @@ static int pxts_convert_key(struct s390_pxts_ctx *ctx)
|
|||
ctx->pk_state = PK_STATE_CONVERT_IN_PROGRESS;
|
||||
spin_unlock_bh(&ctx->pk_lock);
|
||||
|
||||
rc = convert_key(ctx->keybuf, ctx->keylen, &pk0);
|
||||
rc = convert_key(ctx->keybuf, ctx->keylen, &pk0, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
|
||||
|
|
@ -287,7 +295,7 @@ static int pxts_convert_key(struct s390_pxts_ctx *ctx)
|
|||
}
|
||||
split_keylen = ctx->keylen / 2;
|
||||
rc = convert_key(ctx->keybuf + split_keylen,
|
||||
split_keylen, &pk1);
|
||||
split_keylen, &pk1, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
if (pk0.type != pk1.type) {
|
||||
|
|
@ -343,6 +351,7 @@ static int ecb_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
unsigned int key_len)
|
||||
{
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
long fc;
|
||||
int rc;
|
||||
|
||||
|
|
@ -352,7 +361,7 @@ static int ecb_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
goto out;
|
||||
|
||||
/* convert key into protected key */
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
|
||||
|
|
@ -382,7 +391,7 @@ out:
|
|||
|
||||
static int ecb_paes_do_crypt(struct s390_paes_ctx *ctx,
|
||||
struct s390_pecb_req_ctx *req_ctx,
|
||||
bool maysleep)
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
struct ecb_param *param = &req_ctx->param;
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
|
|
@ -430,7 +439,7 @@ static int ecb_paes_do_crypt(struct s390_paes_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
goto out;
|
||||
}
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
spin_lock_bh(&ctx->pk_lock);
|
||||
|
|
@ -450,6 +459,7 @@ static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/*
|
||||
|
|
@ -468,7 +478,7 @@ static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier)
|
|||
|
||||
/* Try synchronous operation if no active engine usage */
|
||||
if (!atomic_read(&ctx->via_engine_ctr)) {
|
||||
rc = ecb_paes_do_crypt(ctx, req_ctx, false);
|
||||
rc = ecb_paes_do_crypt(ctx, req_ctx, tested, false);
|
||||
if (rc == 0)
|
||||
goto out;
|
||||
}
|
||||
|
|
@ -531,11 +541,12 @@ static int ecb_paes_do_one_request(struct crypto_engine *engine, void *areq)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/* walk has already been prepared */
|
||||
|
||||
rc = ecb_paes_do_crypt(ctx, req_ctx, true);
|
||||
rc = ecb_paes_do_crypt(ctx, req_ctx, tested, true);
|
||||
if (rc == -EKEYEXPIRED) {
|
||||
/*
|
||||
* Protected key expired, conversion is in process.
|
||||
|
|
@ -602,6 +613,7 @@ static int cbc_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
unsigned int key_len)
|
||||
{
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
long fc;
|
||||
int rc;
|
||||
|
||||
|
|
@ -611,7 +623,7 @@ static int cbc_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
goto out;
|
||||
|
||||
/* convert raw key into protected key */
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
|
||||
|
|
@ -641,7 +653,7 @@ out:
|
|||
|
||||
static int cbc_paes_do_crypt(struct s390_paes_ctx *ctx,
|
||||
struct s390_pcbc_req_ctx *req_ctx,
|
||||
bool maysleep)
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
struct cbc_param *param = &req_ctx->param;
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
|
|
@ -693,7 +705,7 @@ static int cbc_paes_do_crypt(struct s390_paes_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
goto out;
|
||||
}
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
spin_lock_bh(&ctx->pk_lock);
|
||||
|
|
@ -713,6 +725,7 @@ static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/*
|
||||
|
|
@ -731,7 +744,7 @@ static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier)
|
|||
|
||||
/* Try synchronous operation if no active engine usage */
|
||||
if (!atomic_read(&ctx->via_engine_ctr)) {
|
||||
rc = cbc_paes_do_crypt(ctx, req_ctx, false);
|
||||
rc = cbc_paes_do_crypt(ctx, req_ctx, tested, false);
|
||||
if (rc == 0)
|
||||
goto out;
|
||||
}
|
||||
|
|
@ -794,11 +807,12 @@ static int cbc_paes_do_one_request(struct crypto_engine *engine, void *areq)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/* walk has already been prepared */
|
||||
|
||||
rc = cbc_paes_do_crypt(ctx, req_ctx, true);
|
||||
rc = cbc_paes_do_crypt(ctx, req_ctx, tested, true);
|
||||
if (rc == -EKEYEXPIRED) {
|
||||
/*
|
||||
* Protected key expired, conversion is in process.
|
||||
|
|
@ -865,6 +879,7 @@ static int ctr_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
unsigned int key_len)
|
||||
{
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
long fc;
|
||||
int rc;
|
||||
|
||||
|
|
@ -874,7 +889,7 @@ static int ctr_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
goto out;
|
||||
|
||||
/* convert raw key into protected key */
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
|
||||
|
|
@ -919,7 +934,7 @@ static inline unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes
|
|||
|
||||
static int ctr_paes_do_crypt(struct s390_paes_ctx *ctx,
|
||||
struct s390_pctr_req_ctx *req_ctx,
|
||||
bool maysleep)
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
struct ctr_param *param = &req_ctx->param;
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
|
|
@ -979,7 +994,7 @@ static int ctr_paes_do_crypt(struct s390_paes_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
goto out;
|
||||
}
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc) {
|
||||
if (locked)
|
||||
mutex_unlock(&ctrblk_lock);
|
||||
|
|
@ -1006,7 +1021,7 @@ static int ctr_paes_do_crypt(struct s390_paes_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
goto out;
|
||||
}
|
||||
rc = paes_convert_key(ctx);
|
||||
rc = paes_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
spin_lock_bh(&ctx->pk_lock);
|
||||
|
|
@ -1029,6 +1044,7 @@ static int ctr_paes_crypt(struct skcipher_request *req)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/*
|
||||
|
|
@ -1046,7 +1062,7 @@ static int ctr_paes_crypt(struct skcipher_request *req)
|
|||
|
||||
/* Try synchronous operation if no active engine usage */
|
||||
if (!atomic_read(&ctx->via_engine_ctr)) {
|
||||
rc = ctr_paes_do_crypt(ctx, req_ctx, false);
|
||||
rc = ctr_paes_do_crypt(ctx, req_ctx, tested, false);
|
||||
if (rc == 0)
|
||||
goto out;
|
||||
}
|
||||
|
|
@ -1099,11 +1115,12 @@ static int ctr_paes_do_one_request(struct crypto_engine *engine, void *areq)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/* walk has already been prepared */
|
||||
|
||||
rc = ctr_paes_do_crypt(ctx, req_ctx, true);
|
||||
rc = ctr_paes_do_crypt(ctx, req_ctx, tested, true);
|
||||
if (rc == -EKEYEXPIRED) {
|
||||
/*
|
||||
* Protected key expired, conversion is in process.
|
||||
|
|
@ -1190,6 +1207,7 @@ static int xts_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
unsigned int in_keylen)
|
||||
{
|
||||
struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
u8 ckey[2 * AES_MAX_KEY_SIZE];
|
||||
unsigned int ckey_len;
|
||||
long fc;
|
||||
|
|
@ -1205,7 +1223,7 @@ static int xts_paes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
|
|||
goto out;
|
||||
|
||||
/* convert raw key(s) into protected key(s) */
|
||||
rc = pxts_convert_key(ctx);
|
||||
rc = pxts_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
|
||||
|
|
@ -1255,7 +1273,7 @@ out:
|
|||
|
||||
static int xts_paes_do_crypt_fullkey(struct s390_pxts_ctx *ctx,
|
||||
struct s390_pxts_req_ctx *req_ctx,
|
||||
bool maysleep)
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
struct xts_full_km_param *param = &req_ctx->param.full_km_param;
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
|
|
@ -1299,7 +1317,7 @@ static int xts_paes_do_crypt_fullkey(struct s390_pxts_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
goto out;
|
||||
}
|
||||
rc = pxts_convert_key(ctx);
|
||||
rc = pxts_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
spin_lock_bh(&ctx->pk_lock);
|
||||
|
|
@ -1318,7 +1336,8 @@ static inline int __xts_2keys_prep_param(struct s390_pxts_ctx *ctx,
|
|||
struct xts_km_param *param,
|
||||
struct skcipher_walk *walk,
|
||||
unsigned int keylen,
|
||||
unsigned int offset, bool maysleep)
|
||||
unsigned int offset,
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
struct xts_pcc_param pcc_param;
|
||||
unsigned long cc = 1;
|
||||
|
|
@ -1337,7 +1356,7 @@ static inline int __xts_2keys_prep_param(struct s390_pxts_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
break;
|
||||
}
|
||||
rc = pxts_convert_key(ctx);
|
||||
rc = pxts_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
break;
|
||||
continue;
|
||||
|
|
@ -1351,7 +1370,7 @@ static inline int __xts_2keys_prep_param(struct s390_pxts_ctx *ctx,
|
|||
|
||||
static int xts_paes_do_crypt_2keys(struct s390_pxts_ctx *ctx,
|
||||
struct s390_pxts_req_ctx *req_ctx,
|
||||
bool maysleep)
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
struct xts_km_param *param = &req_ctx->param.km_param;
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
|
|
@ -1369,7 +1388,7 @@ static int xts_paes_do_crypt_2keys(struct s390_pxts_ctx *ctx,
|
|||
|
||||
if (!req_ctx->param_init_done) {
|
||||
rc = __xts_2keys_prep_param(ctx, param, walk,
|
||||
keylen, offset, maysleep);
|
||||
keylen, offset, tested, maysleep);
|
||||
if (rc)
|
||||
goto out;
|
||||
req_ctx->param_init_done = true;
|
||||
|
|
@ -1392,7 +1411,7 @@ static int xts_paes_do_crypt_2keys(struct s390_pxts_ctx *ctx,
|
|||
rc = -EKEYEXPIRED;
|
||||
goto out;
|
||||
}
|
||||
rc = pxts_convert_key(ctx);
|
||||
rc = pxts_convert_key(ctx, tested);
|
||||
if (rc)
|
||||
goto out;
|
||||
spin_lock_bh(&ctx->pk_lock);
|
||||
|
|
@ -1408,7 +1427,7 @@ out:
|
|||
|
||||
static int xts_paes_do_crypt(struct s390_pxts_ctx *ctx,
|
||||
struct s390_pxts_req_ctx *req_ctx,
|
||||
bool maysleep)
|
||||
bool tested, bool maysleep)
|
||||
{
|
||||
int pk_state, rc = 0;
|
||||
|
||||
|
|
@ -1436,11 +1455,11 @@ static int xts_paes_do_crypt(struct s390_pxts_ctx *ctx,
|
|||
switch (ctx->fc) {
|
||||
case CPACF_KM_PXTS_128:
|
||||
case CPACF_KM_PXTS_256:
|
||||
rc = xts_paes_do_crypt_2keys(ctx, req_ctx, maysleep);
|
||||
rc = xts_paes_do_crypt_2keys(ctx, req_ctx, tested, maysleep);
|
||||
break;
|
||||
case CPACF_KM_PXTS_128_FULL:
|
||||
case CPACF_KM_PXTS_256_FULL:
|
||||
rc = xts_paes_do_crypt_fullkey(ctx, req_ctx, maysleep);
|
||||
rc = xts_paes_do_crypt_fullkey(ctx, req_ctx, tested, maysleep);
|
||||
break;
|
||||
default:
|
||||
rc = -EINVAL;
|
||||
|
|
@ -1457,6 +1476,7 @@ static inline int xts_paes_crypt(struct skcipher_request *req, unsigned long mod
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/*
|
||||
|
|
@ -1475,7 +1495,7 @@ static inline int xts_paes_crypt(struct skcipher_request *req, unsigned long mod
|
|||
|
||||
/* Try synchronous operation if no active engine usage */
|
||||
if (!atomic_read(&ctx->via_engine_ctr)) {
|
||||
rc = xts_paes_do_crypt(ctx, req_ctx, false);
|
||||
rc = xts_paes_do_crypt(ctx, req_ctx, tested, false);
|
||||
if (rc == 0)
|
||||
goto out;
|
||||
}
|
||||
|
|
@ -1538,11 +1558,12 @@ static int xts_paes_do_one_request(struct crypto_engine *engine, void *areq)
|
|||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct skcipher_walk *walk = &req_ctx->walk;
|
||||
bool tested = crypto_skcipher_tested(tfm);
|
||||
int rc;
|
||||
|
||||
/* walk has already been prepared */
|
||||
|
||||
rc = xts_paes_do_crypt(ctx, req_ctx, true);
|
||||
rc = xts_paes_do_crypt(ctx, req_ctx, tested, true);
|
||||
if (rc == -EKEYEXPIRED) {
|
||||
/*
|
||||
* Protected key expired, conversion is in process.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue