Lines Matching refs:areq_ctx

55 	struct aead_req_ctx *areq_ctx = aead_request_ctx(req);  in cc_copy_mac()  local
58 cc_copy_sg_portion(dev, areq_ctx->backup_mac, req->src, in cc_copy_mac()
59 (skip - areq_ctx->req_authsize), skip, dir); in cc_copy_mac()
282 cc_set_aead_conf_buf(struct device *dev, struct aead_req_ctx *areq_ctx, in cc_set_aead_conf_buf() argument
288 sg_init_one(&areq_ctx->ccm_adata_sg, config_data, in cc_set_aead_conf_buf()
289 AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size); in cc_set_aead_conf_buf()
290 if (dma_map_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE) != 1) { in cc_set_aead_conf_buf()
295 &sg_dma_address(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf()
296 sg_page(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf()
297 sg_virt(&areq_ctx->ccm_adata_sg), in cc_set_aead_conf_buf()
298 areq_ctx->ccm_adata_sg.offset, areq_ctx->ccm_adata_sg.length); in cc_set_aead_conf_buf()
301 cc_add_sg_entry(dev, sg_data, 1, &areq_ctx->ccm_adata_sg, in cc_set_aead_conf_buf()
302 (AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size), in cc_set_aead_conf_buf()
308 static int cc_set_hash_buf(struct device *dev, struct ahash_req_ctx *areq_ctx, in cc_set_hash_buf() argument
314 sg_init_one(areq_ctx->buff_sg, curr_buff, curr_buff_cnt); in cc_set_hash_buf()
315 if (dma_map_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE) != 1) { in cc_set_hash_buf()
320 &sg_dma_address(areq_ctx->buff_sg), sg_page(areq_ctx->buff_sg), in cc_set_hash_buf()
321 sg_virt(areq_ctx->buff_sg), areq_ctx->buff_sg->offset, in cc_set_hash_buf()
322 areq_ctx->buff_sg->length); in cc_set_hash_buf()
323 areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; in cc_set_hash_buf()
324 areq_ctx->curr_sg = areq_ctx->buff_sg; in cc_set_hash_buf()
325 areq_ctx->in_nents = 0; in cc_set_hash_buf()
327 cc_add_sg_entry(dev, sg_data, 1, areq_ctx->buff_sg, curr_buff_cnt, 0, in cc_set_hash_buf()
449 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_unmap_aead_request() local
450 unsigned int hw_iv_size = areq_ctx->hw_iv_size; in cc_unmap_aead_request()
453 if (areq_ctx->mac_buf_dma_addr) { in cc_unmap_aead_request()
454 dma_unmap_single(dev, areq_ctx->mac_buf_dma_addr, in cc_unmap_aead_request()
458 if (areq_ctx->cipher_mode == DRV_CIPHER_GCTR) { in cc_unmap_aead_request()
459 if (areq_ctx->hkey_dma_addr) { in cc_unmap_aead_request()
460 dma_unmap_single(dev, areq_ctx->hkey_dma_addr, in cc_unmap_aead_request()
464 if (areq_ctx->gcm_block_len_dma_addr) { in cc_unmap_aead_request()
465 dma_unmap_single(dev, areq_ctx->gcm_block_len_dma_addr, in cc_unmap_aead_request()
469 if (areq_ctx->gcm_iv_inc1_dma_addr) { in cc_unmap_aead_request()
470 dma_unmap_single(dev, areq_ctx->gcm_iv_inc1_dma_addr, in cc_unmap_aead_request()
474 if (areq_ctx->gcm_iv_inc2_dma_addr) { in cc_unmap_aead_request()
475 dma_unmap_single(dev, areq_ctx->gcm_iv_inc2_dma_addr, in cc_unmap_aead_request()
480 if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { in cc_unmap_aead_request()
481 if (areq_ctx->ccm_iv0_dma_addr) { in cc_unmap_aead_request()
482 dma_unmap_single(dev, areq_ctx->ccm_iv0_dma_addr, in cc_unmap_aead_request()
486 dma_unmap_sg(dev, &areq_ctx->ccm_adata_sg, 1, DMA_TO_DEVICE); in cc_unmap_aead_request()
488 if (areq_ctx->gen_ctx.iv_dma_addr) { in cc_unmap_aead_request()
489 dma_unmap_single(dev, areq_ctx->gen_ctx.iv_dma_addr, in cc_unmap_aead_request()
491 kfree_sensitive(areq_ctx->gen_ctx.iv); in cc_unmap_aead_request()
495 if ((areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI || in cc_unmap_aead_request()
496 areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) && in cc_unmap_aead_request()
497 (areq_ctx->mlli_params.mlli_virt_addr)) { in cc_unmap_aead_request()
499 &areq_ctx->mlli_params.mlli_dma_addr, in cc_unmap_aead_request()
500 areq_ctx->mlli_params.mlli_virt_addr); in cc_unmap_aead_request()
501 dma_pool_free(areq_ctx->mlli_params.curr_pool, in cc_unmap_aead_request()
502 areq_ctx->mlli_params.mlli_virt_addr, in cc_unmap_aead_request()
503 areq_ctx->mlli_params.mlli_dma_addr); in cc_unmap_aead_request()
507 sg_virt(req->src), areq_ctx->src.nents, areq_ctx->assoc.nents, in cc_unmap_aead_request()
508 areq_ctx->assoclen, req->cryptlen); in cc_unmap_aead_request()
510 dma_unmap_sg(dev, req->src, areq_ctx->src.mapped_nents, in cc_unmap_aead_request()
515 dma_unmap_sg(dev, req->dst, areq_ctx->dst.mapped_nents, in cc_unmap_aead_request()
519 areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT && in cc_unmap_aead_request()
540 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_aead_chain_iv() local
541 unsigned int hw_iv_size = areq_ctx->hw_iv_size; in cc_aead_chain_iv()
547 areq_ctx->gen_ctx.iv_dma_addr = 0; in cc_aead_chain_iv()
548 areq_ctx->gen_ctx.iv = NULL; in cc_aead_chain_iv()
552 areq_ctx->gen_ctx.iv = kmemdup(req->iv, hw_iv_size, flags); in cc_aead_chain_iv()
553 if (!areq_ctx->gen_ctx.iv) in cc_aead_chain_iv()
556 areq_ctx->gen_ctx.iv_dma_addr = in cc_aead_chain_iv()
557 dma_map_single(dev, areq_ctx->gen_ctx.iv, hw_iv_size, in cc_aead_chain_iv()
559 if (dma_mapping_error(dev, areq_ctx->gen_ctx.iv_dma_addr)) { in cc_aead_chain_iv()
562 kfree_sensitive(areq_ctx->gen_ctx.iv); in cc_aead_chain_iv()
563 areq_ctx->gen_ctx.iv = NULL; in cc_aead_chain_iv()
569 hw_iv_size, req->iv, &areq_ctx->gen_ctx.iv_dma_addr); in cc_aead_chain_iv()
580 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_aead_chain_assoc() local
590 if (areq_ctx->assoclen == 0) { in cc_aead_chain_assoc()
591 areq_ctx->assoc_buff_type = CC_DMA_BUF_NULL; in cc_aead_chain_assoc()
592 areq_ctx->assoc.nents = 0; in cc_aead_chain_assoc()
593 areq_ctx->assoc.mlli_nents = 0; in cc_aead_chain_assoc()
595 cc_dma_buf_type(areq_ctx->assoc_buff_type), in cc_aead_chain_assoc()
596 areq_ctx->assoc.nents); in cc_aead_chain_assoc()
600 mapped_nents = sg_nents_for_len(req->src, areq_ctx->assoclen); in cc_aead_chain_assoc()
609 areq_ctx->assoc.nents = mapped_nents; in cc_aead_chain_assoc()
614 if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { in cc_aead_chain_assoc()
617 (areq_ctx->assoc.nents + 1), in cc_aead_chain_assoc()
624 if (mapped_nents == 1 && areq_ctx->ccm_hdr_size == ccm_header_size_null) in cc_aead_chain_assoc()
625 areq_ctx->assoc_buff_type = CC_DMA_BUF_DLLI; in cc_aead_chain_assoc()
627 areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_assoc()
629 if (do_chain || areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI) { in cc_aead_chain_assoc()
631 cc_dma_buf_type(areq_ctx->assoc_buff_type), in cc_aead_chain_assoc()
632 areq_ctx->assoc.nents); in cc_aead_chain_assoc()
633 cc_add_sg_entry(dev, sg_data, areq_ctx->assoc.nents, req->src, in cc_aead_chain_assoc()
634 areq_ctx->assoclen, 0, is_last, in cc_aead_chain_assoc()
635 &areq_ctx->assoc.mlli_nents); in cc_aead_chain_assoc()
636 areq_ctx->assoc_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_assoc()
646 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_prepare_aead_data_dlli() local
647 enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; in cc_prepare_aead_data_dlli()
648 unsigned int authsize = areq_ctx->req_authsize; in cc_prepare_aead_data_dlli()
652 areq_ctx->is_icv_fragmented = false; in cc_prepare_aead_data_dlli()
655 sg = areq_ctx->src_sgl; in cc_prepare_aead_data_dlli()
658 sg = areq_ctx->dst_sgl; in cc_prepare_aead_data_dlli()
662 areq_ctx->icv_dma_addr = sg_dma_address(sg) + offset; in cc_prepare_aead_data_dlli()
663 areq_ctx->icv_virt_addr = sg_virt(sg) + offset; in cc_prepare_aead_data_dlli()
672 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_prepare_aead_data_mlli() local
673 enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; in cc_prepare_aead_data_mlli()
674 unsigned int authsize = areq_ctx->req_authsize; in cc_prepare_aead_data_mlli()
680 cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
681 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
682 areq_ctx->src_offset, is_last_table, in cc_prepare_aead_data_mlli()
683 &areq_ctx->src.mlli_nents); in cc_prepare_aead_data_mlli()
685 areq_ctx->is_icv_fragmented = in cc_prepare_aead_data_mlli()
686 cc_is_icv_frag(areq_ctx->src.nents, authsize, in cc_prepare_aead_data_mlli()
689 if (areq_ctx->is_icv_fragmented) { in cc_prepare_aead_data_mlli()
703 areq_ctx->icv_virt_addr = areq_ctx->backup_mac; in cc_prepare_aead_data_mlli()
705 areq_ctx->icv_virt_addr = areq_ctx->mac_buf; in cc_prepare_aead_data_mlli()
706 areq_ctx->icv_dma_addr = in cc_prepare_aead_data_mlli()
707 areq_ctx->mac_buf_dma_addr; in cc_prepare_aead_data_mlli()
710 sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; in cc_prepare_aead_data_mlli()
712 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli()
714 areq_ctx->icv_virt_addr = sg_virt(sg) + in cc_prepare_aead_data_mlli()
720 cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
721 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
722 areq_ctx->src_offset, is_last_table, in cc_prepare_aead_data_mlli()
723 &areq_ctx->src.mlli_nents); in cc_prepare_aead_data_mlli()
724 cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, in cc_prepare_aead_data_mlli()
725 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
726 areq_ctx->dst_offset, is_last_table, in cc_prepare_aead_data_mlli()
727 &areq_ctx->dst.mlli_nents); in cc_prepare_aead_data_mlli()
729 areq_ctx->is_icv_fragmented = in cc_prepare_aead_data_mlli()
730 cc_is_icv_frag(areq_ctx->src.nents, authsize, in cc_prepare_aead_data_mlli()
737 if (areq_ctx->is_icv_fragmented) { in cc_prepare_aead_data_mlli()
739 areq_ctx->icv_virt_addr = areq_ctx->backup_mac; in cc_prepare_aead_data_mlli()
742 sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; in cc_prepare_aead_data_mlli()
744 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli()
746 areq_ctx->icv_virt_addr = sg_virt(sg) + in cc_prepare_aead_data_mlli()
752 cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, in cc_prepare_aead_data_mlli()
753 areq_ctx->dst_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
754 areq_ctx->dst_offset, is_last_table, in cc_prepare_aead_data_mlli()
755 &areq_ctx->dst.mlli_nents); in cc_prepare_aead_data_mlli()
756 cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, in cc_prepare_aead_data_mlli()
757 areq_ctx->src_sgl, areq_ctx->cryptlen, in cc_prepare_aead_data_mlli()
758 areq_ctx->src_offset, is_last_table, in cc_prepare_aead_data_mlli()
759 &areq_ctx->src.mlli_nents); in cc_prepare_aead_data_mlli()
761 areq_ctx->is_icv_fragmented = in cc_prepare_aead_data_mlli()
762 cc_is_icv_frag(areq_ctx->dst.nents, authsize, in cc_prepare_aead_data_mlli()
765 if (!areq_ctx->is_icv_fragmented) { in cc_prepare_aead_data_mlli()
766 sg = &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]; in cc_prepare_aead_data_mlli()
768 areq_ctx->icv_dma_addr = sg_dma_address(sg) + in cc_prepare_aead_data_mlli()
770 areq_ctx->icv_virt_addr = sg_virt(sg) + in cc_prepare_aead_data_mlli()
773 areq_ctx->icv_dma_addr = areq_ctx->mac_buf_dma_addr; in cc_prepare_aead_data_mlli()
774 areq_ctx->icv_virt_addr = areq_ctx->mac_buf; in cc_prepare_aead_data_mlli()
784 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_aead_chain_data() local
786 enum drv_crypto_direction direct = areq_ctx->gen_ctx.op_type; in cc_aead_chain_data()
787 unsigned int authsize = areq_ctx->req_authsize; in cc_aead_chain_data()
803 areq_ctx->src_sgl = req->src; in cc_aead_chain_data()
804 areq_ctx->dst_sgl = req->dst; in cc_aead_chain_data()
810 sg_index = areq_ctx->src_sgl->length; in cc_aead_chain_data()
814 offset -= areq_ctx->src_sgl->length; in cc_aead_chain_data()
815 sgl = sg_next(areq_ctx->src_sgl); in cc_aead_chain_data()
818 areq_ctx->src_sgl = sgl; in cc_aead_chain_data()
819 sg_index += areq_ctx->src_sgl->length; in cc_aead_chain_data()
827 areq_ctx->src.nents = src_mapped_nents; in cc_aead_chain_data()
829 areq_ctx->src_offset = offset; in cc_aead_chain_data()
840 &areq_ctx->dst.mapped_nents, in cc_aead_chain_data()
849 sg_index = areq_ctx->dst_sgl->length; in cc_aead_chain_data()
855 offset -= areq_ctx->dst_sgl->length; in cc_aead_chain_data()
856 sgl = sg_next(areq_ctx->dst_sgl); in cc_aead_chain_data()
859 areq_ctx->dst_sgl = sgl; in cc_aead_chain_data()
860 sg_index += areq_ctx->dst_sgl->length; in cc_aead_chain_data()
867 areq_ctx->dst.nents = dst_mapped_nents; in cc_aead_chain_data()
868 areq_ctx->dst_offset = offset; in cc_aead_chain_data()
872 areq_ctx->data_buff_type = CC_DMA_BUF_MLLI; in cc_aead_chain_data()
877 areq_ctx->data_buff_type = CC_DMA_BUF_DLLI; in cc_aead_chain_data()
889 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_update_aead_mlli_nents() local
892 if (areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI) { in cc_update_aead_mlli_nents()
893 areq_ctx->assoc.sram_addr = drvdata->mlli_sram_addr; in cc_update_aead_mlli_nents()
894 curr_mlli_size = areq_ctx->assoc.mlli_nents * in cc_update_aead_mlli_nents()
898 if (areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) { in cc_update_aead_mlli_nents()
901 areq_ctx->dst.mlli_nents = areq_ctx->src.mlli_nents; in cc_update_aead_mlli_nents()
902 areq_ctx->src.sram_addr = drvdata->mlli_sram_addr + in cc_update_aead_mlli_nents()
904 areq_ctx->dst.sram_addr = areq_ctx->src.sram_addr; in cc_update_aead_mlli_nents()
905 if (!areq_ctx->is_single_pass) in cc_update_aead_mlli_nents()
906 areq_ctx->assoc.mlli_nents += in cc_update_aead_mlli_nents()
907 areq_ctx->src.mlli_nents; in cc_update_aead_mlli_nents()
909 if (areq_ctx->gen_ctx.op_type == in cc_update_aead_mlli_nents()
911 areq_ctx->src.sram_addr = in cc_update_aead_mlli_nents()
914 areq_ctx->dst.sram_addr = in cc_update_aead_mlli_nents()
915 areq_ctx->src.sram_addr + in cc_update_aead_mlli_nents()
916 areq_ctx->src.mlli_nents * in cc_update_aead_mlli_nents()
918 if (!areq_ctx->is_single_pass) in cc_update_aead_mlli_nents()
919 areq_ctx->assoc.mlli_nents += in cc_update_aead_mlli_nents()
920 areq_ctx->src.mlli_nents; in cc_update_aead_mlli_nents()
922 areq_ctx->dst.sram_addr = in cc_update_aead_mlli_nents()
925 areq_ctx->src.sram_addr = in cc_update_aead_mlli_nents()
926 areq_ctx->dst.sram_addr + in cc_update_aead_mlli_nents()
927 areq_ctx->dst.mlli_nents * in cc_update_aead_mlli_nents()
929 if (!areq_ctx->is_single_pass) in cc_update_aead_mlli_nents()
930 areq_ctx->assoc.mlli_nents += in cc_update_aead_mlli_nents()
931 areq_ctx->dst.mlli_nents; in cc_update_aead_mlli_nents()
939 struct aead_req_ctx *areq_ctx = aead_request_ctx(req); in cc_map_aead_request() local
940 struct mlli_params *mlli_params = &areq_ctx->mlli_params; in cc_map_aead_request()
943 unsigned int authsize = areq_ctx->req_authsize; in cc_map_aead_request()
958 areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT && in cc_map_aead_request()
963 areq_ctx->cryptlen = (areq_ctx->gen_ctx.op_type == in cc_map_aead_request()
968 dma_addr = dma_map_single(dev, areq_ctx->mac_buf, MAX_MAC_SIZE, in cc_map_aead_request()
972 MAX_MAC_SIZE, areq_ctx->mac_buf); in cc_map_aead_request()
976 areq_ctx->mac_buf_dma_addr = dma_addr; in cc_map_aead_request()
978 if (areq_ctx->ccm_hdr_size != ccm_header_size_null) { in cc_map_aead_request()
979 void *addr = areq_ctx->ccm_config + CCM_CTR_COUNT_0_OFFSET; in cc_map_aead_request()
987 areq_ctx->ccm_iv0_dma_addr = 0; in cc_map_aead_request()
991 areq_ctx->ccm_iv0_dma_addr = dma_addr; in cc_map_aead_request()
993 rc = cc_set_aead_conf_buf(dev, areq_ctx, areq_ctx->ccm_config, in cc_map_aead_request()
994 &sg_data, areq_ctx->assoclen); in cc_map_aead_request()
999 if (areq_ctx->cipher_mode == DRV_CIPHER_GCTR) { in cc_map_aead_request()
1000 dma_addr = dma_map_single(dev, areq_ctx->hkey, AES_BLOCK_SIZE, in cc_map_aead_request()
1004 AES_BLOCK_SIZE, areq_ctx->hkey); in cc_map_aead_request()
1008 areq_ctx->hkey_dma_addr = dma_addr; in cc_map_aead_request()
1010 dma_addr = dma_map_single(dev, &areq_ctx->gcm_len_block, in cc_map_aead_request()
1014 AES_BLOCK_SIZE, &areq_ctx->gcm_len_block); in cc_map_aead_request()
1018 areq_ctx->gcm_block_len_dma_addr = dma_addr; in cc_map_aead_request()
1020 dma_addr = dma_map_single(dev, areq_ctx->gcm_iv_inc1, in cc_map_aead_request()
1025 AES_BLOCK_SIZE, (areq_ctx->gcm_iv_inc1)); in cc_map_aead_request()
1026 areq_ctx->gcm_iv_inc1_dma_addr = 0; in cc_map_aead_request()
1030 areq_ctx->gcm_iv_inc1_dma_addr = dma_addr; in cc_map_aead_request()
1032 dma_addr = dma_map_single(dev, areq_ctx->gcm_iv_inc2, in cc_map_aead_request()
1037 AES_BLOCK_SIZE, (areq_ctx->gcm_iv_inc2)); in cc_map_aead_request()
1038 areq_ctx->gcm_iv_inc2_dma_addr = 0; in cc_map_aead_request()
1042 areq_ctx->gcm_iv_inc2_dma_addr = dma_addr; in cc_map_aead_request()
1047 if ((areq_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_ENCRYPT) && in cc_map_aead_request()
1053 &areq_ctx->src.mapped_nents, in cc_map_aead_request()
1060 if (areq_ctx->is_single_pass) { in cc_map_aead_request()
1111 if (areq_ctx->assoc_buff_type == CC_DMA_BUF_MLLI || in cc_map_aead_request()
1112 areq_ctx->data_buff_type == CC_DMA_BUF_MLLI) { in cc_map_aead_request()
1120 areq_ctx->assoc.mlli_nents); in cc_map_aead_request()
1121 dev_dbg(dev, "src params mn %d\n", areq_ctx->src.mlli_nents); in cc_map_aead_request()
1122 dev_dbg(dev, "dst params mn %d\n", areq_ctx->dst.mlli_nents); in cc_map_aead_request()
1135 struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; in cc_map_hash_request_final() local
1137 u8 *curr_buff = cc_hash_buf(areq_ctx); in cc_map_hash_request_final()
1138 u32 *curr_buff_cnt = cc_hash_buf_cnt(areq_ctx); in cc_map_hash_request_final()
1139 struct mlli_params *mlli_params = &areq_ctx->mlli_params; in cc_map_hash_request_final()
1146 curr_buff, *curr_buff_cnt, nbytes, src, areq_ctx->buff_index); in cc_map_hash_request_final()
1148 areq_ctx->data_dma_buf_type = CC_DMA_BUF_NULL; in cc_map_hash_request_final()
1151 areq_ctx->in_nents = 0; in cc_map_hash_request_final()
1160 rc = cc_set_hash_buf(dev, areq_ctx, curr_buff, *curr_buff_cnt, in cc_map_hash_request_final()
1168 &areq_ctx->in_nents, LLI_MAX_NUM_OF_DATA_ENTRIES, in cc_map_hash_request_final()
1173 areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { in cc_map_hash_request_final()
1174 memcpy(areq_ctx->buff_sg, src, in cc_map_hash_request_final()
1176 areq_ctx->buff_sg->length = nbytes; in cc_map_hash_request_final()
1177 areq_ctx->curr_sg = areq_ctx->buff_sg; in cc_map_hash_request_final()
1178 areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; in cc_map_hash_request_final()
1180 areq_ctx->data_dma_buf_type = CC_DMA_BUF_MLLI; in cc_map_hash_request_final()
1185 if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_hash_request_final()
1188 cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, nbytes, in cc_map_hash_request_final()
1189 0, true, &areq_ctx->mlli_nents); in cc_map_hash_request_final()
1195 areq_ctx->buff_index = (areq_ctx->buff_index ^ 1); in cc_map_hash_request_final()
1197 cc_dma_buf_type(areq_ctx->data_dma_buf_type)); in cc_map_hash_request_final()
1201 dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); in cc_map_hash_request_final()
1205 dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); in cc_map_hash_request_final()
1214 struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; in cc_map_hash_request_update() local
1216 u8 *curr_buff = cc_hash_buf(areq_ctx); in cc_map_hash_request_update()
1217 u32 *curr_buff_cnt = cc_hash_buf_cnt(areq_ctx); in cc_map_hash_request_update()
1218 u8 *next_buff = cc_next_buf(areq_ctx); in cc_map_hash_request_update()
1219 u32 *next_buff_cnt = cc_next_buf_cnt(areq_ctx); in cc_map_hash_request_update()
1220 struct mlli_params *mlli_params = &areq_ctx->mlli_params; in cc_map_hash_request_update()
1230 curr_buff, *curr_buff_cnt, nbytes, src, areq_ctx->buff_index); in cc_map_hash_request_update()
1232 areq_ctx->data_dma_buf_type = CC_DMA_BUF_NULL; in cc_map_hash_request_update()
1234 areq_ctx->curr_sg = NULL; in cc_map_hash_request_update()
1236 areq_ctx->in_nents = 0; in cc_map_hash_request_update()
1241 areq_ctx->in_nents = sg_nents_for_len(src, nbytes); in cc_map_hash_request_update()
1242 sg_copy_to_buffer(src, areq_ctx->in_nents, in cc_map_hash_request_update()
1269 rc = cc_set_hash_buf(dev, areq_ctx, curr_buff, *curr_buff_cnt, in cc_map_hash_request_update()
1279 DMA_TO_DEVICE, &areq_ctx->in_nents, in cc_map_hash_request_update()
1285 areq_ctx->data_dma_buf_type == CC_DMA_BUF_NULL) { in cc_map_hash_request_update()
1287 memcpy(areq_ctx->buff_sg, src, in cc_map_hash_request_update()
1289 areq_ctx->buff_sg->length = update_data_len; in cc_map_hash_request_update()
1290 areq_ctx->data_dma_buf_type = CC_DMA_BUF_DLLI; in cc_map_hash_request_update()
1291 areq_ctx->curr_sg = areq_ctx->buff_sg; in cc_map_hash_request_update()
1293 areq_ctx->data_dma_buf_type = CC_DMA_BUF_MLLI; in cc_map_hash_request_update()
1297 if (areq_ctx->data_dma_buf_type == CC_DMA_BUF_MLLI) { in cc_map_hash_request_update()
1300 cc_add_sg_entry(dev, &sg_data, areq_ctx->in_nents, src, in cc_map_hash_request_update()
1302 &areq_ctx->mlli_nents); in cc_map_hash_request_update()
1307 areq_ctx->buff_index = (areq_ctx->buff_index ^ swap_index); in cc_map_hash_request_update()
1312 dma_unmap_sg(dev, src, areq_ctx->in_nents, DMA_TO_DEVICE); in cc_map_hash_request_update()
1316 dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); in cc_map_hash_request_update()
1324 struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx; in cc_unmap_hash_request() local
1325 u32 *prev_len = cc_next_buf_cnt(areq_ctx); in cc_unmap_hash_request()
1330 if (areq_ctx->mlli_params.curr_pool) { in cc_unmap_hash_request()
1332 &areq_ctx->mlli_params.mlli_dma_addr, in cc_unmap_hash_request()
1333 areq_ctx->mlli_params.mlli_virt_addr); in cc_unmap_hash_request()
1334 dma_pool_free(areq_ctx->mlli_params.curr_pool, in cc_unmap_hash_request()
1335 areq_ctx->mlli_params.mlli_virt_addr, in cc_unmap_hash_request()
1336 areq_ctx->mlli_params.mlli_dma_addr); in cc_unmap_hash_request()
1339 if (src && areq_ctx->in_nents) { in cc_unmap_hash_request()
1343 areq_ctx->in_nents, DMA_TO_DEVICE); in cc_unmap_hash_request()
1348 sg_virt(areq_ctx->buff_sg), in cc_unmap_hash_request()
1349 &sg_dma_address(areq_ctx->buff_sg), in cc_unmap_hash_request()
1350 sg_dma_len(areq_ctx->buff_sg)); in cc_unmap_hash_request()
1351 dma_unmap_sg(dev, areq_ctx->buff_sg, 1, DMA_TO_DEVICE); in cc_unmap_hash_request()
1358 areq_ctx->buff_index ^= 1; in cc_unmap_hash_request()