in axis/artpec6_crypto.c [1308:1531]
static int artpec6_crypto_prepare_hash(struct ahash_request *areq)
{
struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm);
struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq);
size_t digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
size_t contextsize = digestsize;
size_t blocksize = crypto_tfm_alg_blocksize(
crypto_ahash_tfm(crypto_ahash_reqtfm(areq)));
struct artpec6_crypto_req_common *common = &req_ctx->common;
struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
enum artpec6_crypto_variant variant = ac->variant;
u32 sel_ctx;
bool ext_ctx = false;
bool run_hw = false;
int error = 0;
artpec6_crypto_init_dma_operation(common);
/* Upload HMAC key, must be first the first packet */
if (req_ctx->hash_flags & HASH_FLAG_HMAC) {
if (variant == ARTPEC6_CRYPTO) {
req_ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER,
a6_regk_crypto_dlkey);
} else {
req_ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER,
a7_regk_crypto_dlkey);
}
/* Copy and pad up the key */
memcpy(req_ctx->key_buffer, ctx->hmac_key,
ctx->hmac_key_length);
memset(req_ctx->key_buffer + ctx->hmac_key_length, 0,
blocksize - ctx->hmac_key_length);
error = artpec6_crypto_setup_out_descr(common,
(void *)&req_ctx->key_md,
sizeof(req_ctx->key_md), false, false);
if (error)
return error;
error = artpec6_crypto_setup_out_descr(common,
req_ctx->key_buffer, blocksize,
true, false);
if (error)
return error;
}
if (!(req_ctx->hash_flags & HASH_FLAG_INIT_CTX)) {
/* Restore context */
sel_ctx = regk_crypto_ext;
ext_ctx = true;
} else {
sel_ctx = regk_crypto_init;
}
if (variant == ARTPEC6_CRYPTO) {
req_ctx->hash_md &= ~A6_CRY_MD_HASH_SEL_CTX;
req_ctx->hash_md |= FIELD_PREP(A6_CRY_MD_HASH_SEL_CTX, sel_ctx);
/* If this is the final round, set the final flag */
if (req_ctx->hash_flags & HASH_FLAG_FINALIZE)
req_ctx->hash_md |= A6_CRY_MD_HASH_HMAC_FIN;
} else {
req_ctx->hash_md &= ~A7_CRY_MD_HASH_SEL_CTX;
req_ctx->hash_md |= FIELD_PREP(A7_CRY_MD_HASH_SEL_CTX, sel_ctx);
/* If this is the final round, set the final flag */
if (req_ctx->hash_flags & HASH_FLAG_FINALIZE)
req_ctx->hash_md |= A7_CRY_MD_HASH_HMAC_FIN;
}
/* Setup up metadata descriptors */
error = artpec6_crypto_setup_out_descr(common,
(void *)&req_ctx->hash_md,
sizeof(req_ctx->hash_md), false, false);
if (error)
return error;
error = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
if (error)
return error;
if (ext_ctx) {
error = artpec6_crypto_setup_out_descr(common,
req_ctx->digeststate,
contextsize, false, false);
if (error)
return error;
}
if (req_ctx->hash_flags & HASH_FLAG_UPDATE) {
size_t done_bytes = 0;
size_t total_bytes = areq->nbytes + req_ctx->partial_bytes;
size_t ready_bytes = round_down(total_bytes, blocksize);
struct artpec6_crypto_walk walk;
run_hw = ready_bytes > 0;
if (req_ctx->partial_bytes && ready_bytes) {
/* We have a partial buffer and will at least some bytes
* to the HW. Empty this partial buffer before tackling
* the SG lists
*/
memcpy(req_ctx->partial_buffer_out,
req_ctx->partial_buffer,
req_ctx->partial_bytes);
error = artpec6_crypto_setup_out_descr(common,
req_ctx->partial_buffer_out,
req_ctx->partial_bytes,
false, true);
if (error)
return error;
/* Reset partial buffer */
done_bytes += req_ctx->partial_bytes;
req_ctx->partial_bytes = 0;
}
artpec6_crypto_walk_init(&walk, areq->src);
error = artpec6_crypto_setup_sg_descrs_out(common, &walk,
ready_bytes -
done_bytes);
if (error)
return error;
if (walk.sg) {
size_t sg_skip = ready_bytes - done_bytes;
size_t sg_rem = areq->nbytes - sg_skip;
sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
req_ctx->partial_buffer +
req_ctx->partial_bytes,
sg_rem, sg_skip);
req_ctx->partial_bytes += sg_rem;
}
req_ctx->digcnt += ready_bytes;
req_ctx->hash_flags &= ~(HASH_FLAG_UPDATE);
}
/* Finalize */
if (req_ctx->hash_flags & HASH_FLAG_FINALIZE) {
size_t hash_pad_len;
u64 digest_bits;
u32 oper;
if (variant == ARTPEC6_CRYPTO)
oper = FIELD_GET(A6_CRY_MD_OPER, req_ctx->hash_md);
else
oper = FIELD_GET(A7_CRY_MD_OPER, req_ctx->hash_md);
/* Write out the partial buffer if present */
if (req_ctx->partial_bytes) {
memcpy(req_ctx->partial_buffer_out,
req_ctx->partial_buffer,
req_ctx->partial_bytes);
error = artpec6_crypto_setup_out_descr(common,
req_ctx->partial_buffer_out,
req_ctx->partial_bytes,
false, true);
if (error)
return error;
req_ctx->digcnt += req_ctx->partial_bytes;
req_ctx->partial_bytes = 0;
}
if (req_ctx->hash_flags & HASH_FLAG_HMAC)
digest_bits = 8 * (req_ctx->digcnt + blocksize);
else
digest_bits = 8 * req_ctx->digcnt;
/* Add the hash pad */
hash_pad_len = create_hash_pad(oper, req_ctx->pad_buffer,
req_ctx->digcnt, digest_bits);
error = artpec6_crypto_setup_out_descr(common,
req_ctx->pad_buffer,
hash_pad_len, false,
true);
req_ctx->digcnt = 0;
if (error)
return error;
/* Descriptor for the final result */
error = artpec6_crypto_setup_in_descr(common, areq->result,
digestsize,
true);
if (error)
return error;
} else { /* This is not the final operation for this request */
if (!run_hw)
return ARTPEC6_CRYPTO_PREPARE_HASH_NO_START;
/* Save the result to the context */
error = artpec6_crypto_setup_in_descr(common,
req_ctx->digeststate,
contextsize, false);
if (error)
return error;
/* fall through */
}
req_ctx->hash_flags &= ~(HASH_FLAG_INIT_CTX | HASH_FLAG_UPDATE |
HASH_FLAG_FINALIZE);
error = artpec6_crypto_terminate_in_descrs(common);
if (error)
return error;
error = artpec6_crypto_terminate_out_descrs(common);
if (error)
return error;
error = artpec6_crypto_dma_map_descs(common);
if (error)
return error;
return ARTPEC6_CRYPTO_PREPARE_HASH_START;
}