ccp_crypto_ahash_alg(crypto_ahash_tfm(tfm));
u64 k0_hi, k0_lo, k1_hi, k1_lo, k2_hi, k2_lo;
u64 rb_hi = 0x00, rb_lo = 0x87;
+ struct crypto_aes_ctx aes;
__be64 *gk;
int ret;
ctx->u.aes.key_len = 0;
/* Set the key for the AES cipher used to generate the keys */
- ret = crypto_cipher_setkey(ctx->u.aes.tfm_cipher, key, key_len);
+ ret = aes_expandkey(&aes, key, key_len);
if (ret)
return ret;
/* Encrypt a block of zeroes - use key area in context */
memset(ctx->u.aes.key, 0, sizeof(ctx->u.aes.key));
- crypto_cipher_encrypt_one(ctx->u.aes.tfm_cipher, ctx->u.aes.key,
- ctx->u.aes.key);
+ aes_encrypt(&aes, ctx->u.aes.key, ctx->u.aes.key);
+ memzero_explicit(&aes, sizeof(aes));
/* Generate K1 and K2 */
k0_hi = be64_to_cpu(*((__be64 *)ctx->u.aes.key));
{
struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
- struct crypto_cipher *cipher_tfm;
ctx->complete = ccp_aes_cmac_complete;
ctx->u.aes.key_len = 0;
crypto_ahash_set_reqsize(ahash, sizeof(struct ccp_aes_cmac_req_ctx));
- cipher_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(cipher_tfm)) {
- pr_warn("could not load aes cipher driver\n");
- return PTR_ERR(cipher_tfm);
- }
- ctx->u.aes.tfm_cipher = cipher_tfm;
-
return 0;
}
-static void ccp_aes_cmac_cra_exit(struct crypto_tfm *tfm)
-{
- struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
-
- if (ctx->u.aes.tfm_cipher)
- crypto_free_cipher(ctx->u.aes.tfm_cipher);
- ctx->u.aes.tfm_cipher = NULL;
-}
-
int ccp_register_aes_cmac_algs(struct list_head *head)
{
struct ccp_crypto_ahash_alg *ccp_alg;
base->cra_ctxsize = sizeof(struct ccp_ctx);
base->cra_priority = CCP_CRA_PRIORITY;
base->cra_init = ccp_aes_cmac_cra_init;
- base->cra_exit = ccp_aes_cmac_cra_exit;
base->cra_module = THIS_MODULE;
ret = crypto_register_ahash(alg);