1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright 2018-2021 NXP
4  *
5  * Implementation of Cipher functions
6  */
7 #include <caam_cipher.h>
8 #include <caam_common.h>
9 #include <caam_io.h>
10 #include <caam_jr.h>
11 #include <caam_utils_mem.h>
12 #include <caam_utils_status.h>
13 #include <mm/core_memprot.h>
14 #include <string.h>
15 #include <tee/cache.h>
16 #include <tee/tee_cryp_utl.h>
17 #include <utee_defines.h>
18 
19 #include "local.h"
20 
21 /* Local Function declaration */
22 static TEE_Result do_update_streaming(struct drvcrypt_cipher_update *dupdate);
23 static TEE_Result do_update_cipher(struct drvcrypt_cipher_update *dupdate);
24 
25 /*
26  * Constants definition of the AES algorithm
27  */
28 static const struct cipheralg aes_alg[] = {
29 	[TEE_CHAIN_MODE_ECB_NOPAD] = {
30 		.type = OP_ALGO(AES) | ALGO_AAI(AES_ECB),
31 		.size_block = TEE_AES_BLOCK_SIZE,
32 		.size_ctx = 0,
33 		.ctx_offset = 0,
34 		.require_key = NEED_KEY1,
35 		.def_key = { .min = 16, .max = 32, .mod = 8 },
36 		.update = do_update_cipher,
37 	},
38 	[TEE_CHAIN_MODE_CBC_NOPAD] = {
39 		.type = OP_ALGO(AES) | ALGO_AAI(AES_CBC),
40 		.size_block = TEE_AES_BLOCK_SIZE,
41 		.size_ctx = 2 * sizeof(uint64_t),
42 		.ctx_offset = 0,
43 		.require_key = NEED_KEY1 | NEED_IV,
44 		.def_key = { .min = 16, .max = 32, .mod = 8 },
45 		.update = do_update_cipher,
46 	},
47 	[TEE_CHAIN_MODE_CTR] = {
48 		.type = OP_ALGO(AES) | ALGO_AAI(AES_CTR_MOD128),
49 		.size_block = TEE_AES_BLOCK_SIZE,
50 		.size_ctx = 2 * sizeof(uint64_t),
51 		.ctx_offset = 16,
52 		.require_key = NEED_KEY1 | NEED_IV,
53 		.def_key = { .min = 16, .max = 32, .mod = 8 },
54 		.update = do_update_streaming,
55 	},
56 	[TEE_CHAIN_MODE_CTS] = {
57 		.type = 0,
58 	},
59 	[TEE_CHAIN_MODE_XTS] = {
60 		.type = OP_ALGO(AES) | ALGO_AAI(AES_ECB),
61 		.size_block = TEE_AES_BLOCK_SIZE,
62 		.size_ctx = 0,
63 		.ctx_offset = 0,
64 		.require_key = NEED_KEY1 | NEED_KEY2 | NEED_TWEAK,
65 		.def_key = { .min = 16, .max = 32, .mod = 8 },
66 		.update = caam_cipher_update_xts,
67 	},
68 };
69 
70 /*
71  * Constants definition of the DES algorithm
72  */
73 static const struct cipheralg des_alg[] = {
74 	[TEE_CHAIN_MODE_ECB_NOPAD] = {
75 		.type = OP_ALGO(DES) | ALGO_AAI(DES_ECB),
76 		.size_block = TEE_DES_BLOCK_SIZE,
77 		.size_ctx = 0,
78 		.ctx_offset = 0,
79 		.require_key = NEED_KEY1,
80 		.def_key = { .min = 8, .max = 8, .mod = 8 },
81 		.update = do_update_cipher,
82 	},
83 	[TEE_CHAIN_MODE_CBC_NOPAD] = {
84 		.type = OP_ALGO(DES) | ALGO_AAI(DES_CBC),
85 		.size_block = TEE_DES_BLOCK_SIZE,
86 		.size_ctx = sizeof(uint64_t),
87 		.ctx_offset = 0,
88 		.require_key = NEED_KEY1 | NEED_IV,
89 		.def_key = { .min = 8, .max = 8, .mod = 8 },
90 		.update = do_update_cipher,
91 	},
92 };
93 
94 /*
95  * Constants definition of the DES3 algorithm
96  */
97 static const struct cipheralg des3_alg[] = {
98 	[TEE_CHAIN_MODE_ECB_NOPAD] = {
99 		.type = OP_ALGO(3DES) | ALGO_AAI(DES_ECB),
100 		.size_block = TEE_DES_BLOCK_SIZE,
101 		.size_ctx = 0,
102 		.ctx_offset = 0,
103 		.require_key = NEED_KEY1,
104 		.def_key = { .min = 16, .max = 24, .mod = 8 },
105 		.update = do_update_cipher,
106 	},
107 	[TEE_CHAIN_MODE_CBC_NOPAD] = {
108 		/* Triple-DES CBC No Pad */
109 		.type = OP_ALGO(3DES) | ALGO_AAI(DES_CBC),
110 		.size_block = TEE_DES_BLOCK_SIZE,
111 		.size_ctx = sizeof(uint64_t),
112 		.ctx_offset = 0,
113 		.require_key = NEED_KEY1 | NEED_IV,
114 		.def_key = { .min = 16, .max = 24, .mod = 8 },
115 		.update = do_update_cipher,
116 	},
117 };
118 
119 /*
120  * Allocate context data and copy input data into
121  *
122  * @dst  [out] Destination data to allocate and fill
123  * @src  Source of data to copy
124  */
copy_ctx_data(struct caambuf * dst,struct drvcrypt_buf * src)125 static enum caam_status copy_ctx_data(struct caambuf *dst,
126 				      struct drvcrypt_buf *src)
127 {
128 	enum caam_status ret = CAAM_OUT_MEMORY;
129 
130 	if (!dst->data) {
131 		/* Allocate the destination buffer */
132 		ret = caam_alloc_align_buf(dst, src->length);
133 		if (ret != CAAM_NO_ERROR)
134 			return CAAM_OUT_MEMORY;
135 	}
136 
137 	/* Do the copy */
138 	memcpy(dst->data, src->data, dst->length);
139 
140 	/* Push data to physical memory */
141 	cache_operation(TEE_CACHEFLUSH, dst->data, dst->length);
142 
143 	return CAAM_NO_ERROR;
144 }
145 
146 /*
147  * Verify the input key size with the requirements
148  *
149  * @def  Key requirements
150  * @size Key size to verify
151  */
do_check_keysize(const struct caamdefkey * def,size_t size)152 static enum caam_status do_check_keysize(const struct caamdefkey *def,
153 					 size_t size)
154 {
155 	if (size >= def->min && size <= def->max && !(size % def->mod))
156 		return CAAM_NO_ERROR;
157 
158 	return CAAM_BAD_PARAM;
159 }
160 
caam_cipher_block(struct cipherdata * ctx,bool savectx,uint8_t keyid,bool encrypt,struct caamdmaobj * src,struct caamdmaobj * dst)161 enum caam_status caam_cipher_block(struct cipherdata *ctx, bool savectx,
162 				   uint8_t keyid, bool encrypt,
163 				   struct caamdmaobj *src,
164 				   struct caamdmaobj *dst)
165 {
166 	enum caam_status retstatus = CAAM_FAILURE;
167 	struct caam_jobctx jobctx = { };
168 	uint32_t *desc = ctx->descriptor;
169 
170 	caam_desc_init(desc);
171 	caam_desc_add_word(desc, DESC_HEADER(0));
172 
173 	if (keyid == NEED_KEY1) {
174 		/* Build the descriptor */
175 		caam_desc_add_word(desc, LD_KEY_PLAIN(CLASS_1, REG,
176 						      ctx->key1.length));
177 		caam_desc_add_ptr(desc, ctx->key1.paddr);
178 	} else if (keyid == NEED_KEY2) {
179 		/* Build the descriptor */
180 		caam_desc_add_word(desc, LD_KEY_PLAIN(CLASS_1, REG,
181 						      ctx->key2.length));
182 		caam_desc_add_ptr(desc, ctx->key2.paddr);
183 	}
184 
185 	/* If there is a context register load it */
186 	if (ctx->ctx.length && ctx->alg->size_ctx) {
187 		caam_desc_add_word(desc, LD_NOIMM_OFF(CLASS_1, REG_CTX,
188 						      ctx->ctx.length,
189 						      ctx->alg->ctx_offset));
190 		caam_desc_add_ptr(desc, ctx->ctx.paddr);
191 		/* Operation with the direction */
192 		caam_desc_add_word(desc, CIPHER_INIT(ctx->alg->type, encrypt));
193 	} else {
194 		/* Operation with the direction */
195 		caam_desc_add_word(desc,
196 				   CIPHER_INITFINAL(ctx->alg->type, encrypt));
197 	}
198 
199 	/* Load the source data if any */
200 	if (src) {
201 		caam_desc_fifo_load(desc, src, CLASS_1, MSG, LAST_C1);
202 		caam_dmaobj_cache_push(src);
203 	}
204 
205 	/* Store the output data if any */
206 	if (dst) {
207 		caam_desc_fifo_store(desc, dst, MSG_DATA);
208 		caam_dmaobj_cache_push(dst);
209 	}
210 
211 	if (ctx->ctx.length && ctx->alg->size_ctx) {
212 		if (savectx) {
213 			/* Store the context */
214 			caam_desc_add_word(desc,
215 					   ST_NOIMM_OFF(CLASS_1, REG_CTX,
216 							ctx->ctx.length,
217 							ctx->alg->ctx_offset));
218 			caam_desc_add_ptr(desc, ctx->ctx.paddr);
219 		}
220 
221 		/* Ensure Context register data are not in cache */
222 		cache_operation(TEE_CACHEINVALIDATE, ctx->ctx.data,
223 				ctx->ctx.length);
224 	}
225 
226 	CIPHER_DUMPDESC(desc);
227 
228 	jobctx.desc = desc;
229 	retstatus = caam_jr_enqueue(&jobctx, NULL);
230 
231 	if (retstatus != CAAM_NO_ERROR) {
232 		CIPHER_TRACE("CAAM return 0x%08x Status 0x%08" PRIx32,
233 			     retstatus, jobctx.status);
234 		retstatus = CAAM_FAILURE;
235 	}
236 
237 	return retstatus;
238 }
239 
240 /*
241  * Checks if the algorithm @algo is supported and returns the
242  * local algorithm entry in the corresponding cipher array
243  */
get_cipheralgo(uint32_t algo)244 static const struct cipheralg *get_cipheralgo(uint32_t algo)
245 {
246 	unsigned int algo_id = TEE_ALG_GET_MAIN_ALG(algo);
247 	unsigned int algo_md = TEE_ALG_GET_CHAIN_MODE(algo);
248 	const struct cipheralg *ca = NULL;
249 
250 	CIPHER_TRACE("Algo id:%" PRId32 " md:%" PRId32, algo_id, algo_md);
251 
252 	switch (algo_id) {
253 	case TEE_MAIN_ALGO_AES:
254 		if (algo_md < ARRAY_SIZE(aes_alg))
255 			ca = &aes_alg[algo_md];
256 		break;
257 
258 	case TEE_MAIN_ALGO_DES:
259 		if (algo_md < ARRAY_SIZE(des_alg))
260 			ca = &des_alg[algo_md];
261 		break;
262 
263 	case TEE_MAIN_ALGO_DES3:
264 		if (algo_md < ARRAY_SIZE(des3_alg))
265 			ca = &des3_alg[algo_md];
266 		break;
267 
268 	default:
269 		break;
270 	}
271 
272 	if (ca && ca->type)
273 		return ca;
274 
275 	return NULL;
276 }
277 
278 /*
279  * Allocate the SW cipher data context
280  *
281  * @ctx   [out] Caller context variable
282  * @algo  Algorithm ID of the context
283  */
do_allocate(void ** ctx,uint32_t algo)284 static TEE_Result do_allocate(void **ctx, uint32_t algo)
285 {
286 	TEE_Result ret = TEE_ERROR_NOT_IMPLEMENTED;
287 	struct cipherdata *cipherdata = NULL;
288 	const struct cipheralg *alg = NULL;
289 
290 	CIPHER_TRACE("Allocate Algo 0x%" PRIX32 " Context (%p)", algo, ctx);
291 
292 	alg = get_cipheralgo(algo);
293 	if (!alg) {
294 		CIPHER_TRACE("Algorithm not supported");
295 		return TEE_ERROR_NOT_IMPLEMENTED;
296 	}
297 
298 	cipherdata = caam_calloc(sizeof(*cipherdata));
299 	if (!cipherdata) {
300 		CIPHER_TRACE("Allocation Cipher data error");
301 		return TEE_ERROR_OUT_OF_MEMORY;
302 	}
303 
304 	/* Allocate the descriptor */
305 	cipherdata->descriptor = caam_calloc_desc(MAX_DESC_ENTRIES);
306 	if (!cipherdata->descriptor) {
307 		CIPHER_TRACE("Allocation descriptor error");
308 		ret = TEE_ERROR_OUT_OF_MEMORY;
309 		goto out;
310 	}
311 
312 	/* Setup the Algorithm pointer */
313 	cipherdata->alg = alg;
314 
315 	/* Initialize the block buffer */
316 	cipherdata->blockbuf.max = cipherdata->alg->size_block;
317 
318 	*ctx = cipherdata;
319 
320 	return TEE_SUCCESS;
321 
322 out:
323 	caam_free_desc(&cipherdata->descriptor);
324 	caam_free(cipherdata);
325 
326 	return ret;
327 }
328 
329 /*
330  * Free the internal cipher data context
331  *
332  * @ctx    Caller context variable or NULL
333  */
do_free_intern(struct cipherdata * ctx)334 static void do_free_intern(struct cipherdata *ctx)
335 {
336 	CIPHER_TRACE("Free Context (%p)", ctx);
337 
338 	if (ctx) {
339 		/* Free the descriptor */
340 		caam_free_desc(&ctx->descriptor);
341 
342 		/* Free the Key 1  */
343 		caam_free_buf(&ctx->key1);
344 
345 		/* Free the Key 2  */
346 		caam_free_buf(&ctx->key2);
347 
348 		/* Free the Tweak */
349 		caam_free_buf(&ctx->tweak);
350 
351 		/* Free the Context Register */
352 		caam_free_buf(&ctx->ctx);
353 
354 		/* Free Temporary buffer */
355 		caam_free_buf(&ctx->blockbuf.buf);
356 	}
357 }
358 
caam_cipher_free(void * ctx)359 void caam_cipher_free(void *ctx)
360 {
361 	CIPHER_TRACE("Free Context (%p)", ctx);
362 
363 	if (ctx) {
364 		do_free_intern(ctx);
365 		caam_free(ctx);
366 	}
367 }
368 
caam_cipher_copy_state(void * dst_ctx,void * src_ctx)369 void caam_cipher_copy_state(void *dst_ctx, void *src_ctx)
370 {
371 	struct cipherdata *dst = dst_ctx;
372 	struct cipherdata *src = src_ctx;
373 
374 	CIPHER_TRACE("Copy State context (%p) to (%p)", src_ctx, dst_ctx);
375 
376 	dst->alg = src->alg;
377 	dst->encrypt = src->encrypt;
378 
379 	if (src->blockbuf.filled) {
380 		struct caambuf srcdata = {
381 			.data = src->blockbuf.buf.data,
382 			.length = src->blockbuf.filled
383 		};
384 		caam_cpy_block_src(&dst->blockbuf, &srcdata, 0);
385 	}
386 
387 	if (src->key1.length) {
388 		struct drvcrypt_buf key1 = {
389 			.data = src->key1.data,
390 			.length = src->key1.length
391 		};
392 		copy_ctx_data(&dst->key1, &key1);
393 	}
394 
395 	if (src->key2.length) {
396 		struct drvcrypt_buf key2 = {
397 			.data = src->key2.data,
398 			.length = src->key2.length
399 		};
400 		copy_ctx_data(&dst->key2, &key2);
401 	}
402 
403 	if (src->ctx.length) {
404 		struct drvcrypt_buf ctx = {
405 			.data = src->ctx.data,
406 			.length = src->ctx.length
407 		};
408 		cache_operation(TEE_CACHEINVALIDATE, ctx.data, ctx.length);
409 		copy_ctx_data(&dst->ctx, &ctx);
410 	}
411 
412 	if (src->tweak.length) {
413 		struct drvcrypt_buf tweak = {
414 			.data = src->tweak.data,
415 			.length = src->tweak.length
416 		};
417 		copy_ctx_data(&dst->tweak, &tweak);
418 	}
419 }
420 
caam_cipher_initialize(struct drvcrypt_cipher_init * dinit)421 TEE_Result caam_cipher_initialize(struct drvcrypt_cipher_init *dinit)
422 {
423 	TEE_Result ret = TEE_ERROR_BAD_PARAMETERS;
424 	enum caam_status retstatus = CAAM_FAILURE;
425 	struct cipherdata *cipherdata = dinit->ctx;
426 	const struct cipheralg *alg = NULL;
427 
428 	CIPHER_TRACE("Action %s", dinit->encrypt ? "Encrypt" : "Decrypt");
429 
430 	if (!cipherdata)
431 		return ret;
432 
433 	alg = cipherdata->alg;
434 
435 	/* Check if all required keys are defined */
436 	if (alg->require_key & NEED_KEY1) {
437 		if (!dinit->key1.data || !dinit->key1.length)
438 			goto out;
439 
440 		retstatus = do_check_keysize(&alg->def_key, dinit->key1.length);
441 		if (retstatus != CAAM_NO_ERROR) {
442 			CIPHER_TRACE("Bad Key 1 size");
443 			goto out;
444 		}
445 
446 		/* Copy the key 1 */
447 		retstatus = copy_ctx_data(&cipherdata->key1, &dinit->key1);
448 		CIPHER_TRACE("Copy Key 1 returned 0x%" PRIx32, retstatus);
449 
450 		if (retstatus != CAAM_NO_ERROR) {
451 			ret = TEE_ERROR_OUT_OF_MEMORY;
452 			goto out;
453 		}
454 	}
455 
456 	if (alg->require_key & NEED_KEY2) {
457 		if (!dinit->key2.data || !dinit->key2.length)
458 			goto out;
459 
460 		retstatus = do_check_keysize(&alg->def_key, dinit->key2.length);
461 		if (retstatus != CAAM_NO_ERROR) {
462 			CIPHER_TRACE("Bad Key 2 size");
463 			goto out;
464 		}
465 
466 		/* Copy the key 2 */
467 		retstatus = copy_ctx_data(&cipherdata->key2, &dinit->key2);
468 		CIPHER_TRACE("Copy Key 2 returned 0x%" PRIx32, retstatus);
469 
470 		if (retstatus != CAAM_NO_ERROR) {
471 			ret = TEE_ERROR_OUT_OF_MEMORY;
472 			goto out;
473 		}
474 	}
475 
476 	if (alg->require_key & NEED_IV) {
477 		if (!dinit->iv.data || !dinit->iv.length)
478 			goto out;
479 
480 		if (dinit->iv.length != alg->size_ctx) {
481 			CIPHER_TRACE("Bad IV size %zu (expected %" PRId32 ")",
482 				     dinit->iv.length, alg->size_ctx);
483 			goto out;
484 		}
485 
486 		CIPHER_TRACE("Allocate CAAM Context Register (%" PRId32
487 			     " bytes)",
488 			     alg->size_ctx);
489 
490 		/* Copy the IV into the context register */
491 		retstatus = copy_ctx_data(&cipherdata->ctx, &dinit->iv);
492 		CIPHER_TRACE("Copy IV returned 0x%" PRIx32, retstatus);
493 
494 		if (retstatus != CAAM_NO_ERROR) {
495 			ret = TEE_ERROR_OUT_OF_MEMORY;
496 			goto out;
497 		}
498 	}
499 
500 	if (alg->require_key & NEED_TWEAK) {
501 		/* This is accepted to start with a NULL Tweak */
502 		if (dinit->iv.length) {
503 			if (dinit->iv.length != alg->size_block) {
504 				CIPHER_TRACE("Bad tweak 2 size");
505 				goto out;
506 			}
507 
508 			/* Copy the tweak */
509 			retstatus = copy_ctx_data(&cipherdata->tweak,
510 						  &dinit->iv);
511 			CIPHER_TRACE("Copy Tweak returned 0x%" PRIx32,
512 				     retstatus);
513 
514 			if (retstatus != CAAM_NO_ERROR) {
515 				ret = TEE_ERROR_OUT_OF_MEMORY;
516 				goto out;
517 			}
518 		} else {
519 			/* Create tweak 0's */
520 			if (!cipherdata->tweak.data) {
521 				/*
522 				 * Allocate the destination buffer and
523 				 * fill it with 0's
524 				 */
525 				ret = caam_calloc_align_buf(&cipherdata->tweak,
526 							    alg->size_block);
527 				if (ret != CAAM_NO_ERROR)
528 					goto out;
529 			} else {
530 				/* Fill it with 0's */
531 				memset(cipherdata->tweak.data, 0,
532 				       cipherdata->tweak.length);
533 			}
534 
535 			/* Push data to physical memory */
536 			cache_operation(TEE_CACHEFLUSH, cipherdata->tweak.data,
537 					cipherdata->tweak.length);
538 		}
539 	}
540 
541 	/* Save the operation direction */
542 	cipherdata->encrypt = dinit->encrypt;
543 	cipherdata->blockbuf.filled = 0;
544 
545 	ret = TEE_SUCCESS;
546 
547 out:
548 	/* Free the internal context in case of error */
549 	if (ret != TEE_SUCCESS)
550 		do_free_intern(cipherdata);
551 
552 	return ret;
553 }
554 
555 /*
556  * Update of the cipher operation in streaming mode, meaning
557  * doing partial intermediate block.
558  * If there is a context, the context is saved only when a
559  * full block is done.
560  * The partial block (if not the last block) is encrypted or
561  * decrypted to return the result and it's saved to be concatened
562  * to next data to rebuild a full block.
563  *
564  * @dupdate  Data update object
565  */
do_update_streaming(struct drvcrypt_cipher_update * dupdate)566 static TEE_Result do_update_streaming(struct drvcrypt_cipher_update *dupdate)
567 {
568 	TEE_Result ret = TEE_ERROR_GENERIC;
569 	enum caam_status retstatus = CAAM_FAILURE;
570 	struct cipherdata *ctx = dupdate->ctx;
571 	struct caamdmaobj src = { };
572 	struct caamdmaobj dst = { };
573 	struct caamblock trash_bck = { };
574 	size_t fullsize = 0;
575 	size_t size_topost = 0;
576 	size_t size_todo = 0;
577 	size_t size_inmade = 0;
578 	size_t size_done = 0;
579 	size_t offset = 0;
580 
581 	CIPHER_TRACE("Length=%zu - %s", dupdate->src.length,
582 		     ctx->encrypt ? "Encrypt" : "Decrypt");
583 
584 	/* Calculate the total data to be handled */
585 	fullsize = ctx->blockbuf.filled + dupdate->src.length;
586 	CIPHER_TRACE("Fullsize %zu", fullsize);
587 	if (fullsize < ctx->alg->size_block) {
588 		size_topost = dupdate->src.length;
589 		goto end_streaming_post;
590 	} else {
591 		size_topost = fullsize % ctx->alg->size_block;
592 		/* Total size that is a cipher block multiple */
593 		size_todo = fullsize - size_topost;
594 		size_inmade = size_todo - ctx->blockbuf.filled;
595 	}
596 
597 	CIPHER_TRACE("FullSize %zu - posted %zu - todo %zu", fullsize,
598 		     size_topost, size_todo);
599 
600 	if (size_todo) {
601 		ret = caam_dmaobj_init_input(&src, dupdate->src.data,
602 					     dupdate->src.length);
603 		if (ret)
604 			goto end_streaming;
605 
606 		ret = caam_dmaobj_init_output(&dst, dupdate->dst.data,
607 					      dupdate->dst.length,
608 					      dupdate->dst.length);
609 		if (ret)
610 			goto end_streaming;
611 
612 		ret = caam_dmaobj_prepare(&src, &dst, ctx->alg->size_block);
613 		if (ret)
614 			goto end_streaming;
615 	}
616 
617 	/*
618 	 * Check first if there is some data saved to complete the
619 	 * buffer.
620 	 */
621 	if (ctx->blockbuf.filled) {
622 		ret = caam_dmaobj_add_first_block(&src, &ctx->blockbuf);
623 		if (ret)
624 			goto end_streaming;
625 
626 		ret = caam_dmaobj_add_first_block(&dst, &ctx->blockbuf);
627 		if (ret)
628 			goto end_streaming;
629 
630 		ctx->blockbuf.filled = 0;
631 	}
632 
633 	size_done = size_todo;
634 	dupdate->dst.length = 0;
635 	for (offset = 0; size_todo;
636 	     offset += size_done, size_todo -= size_done) {
637 		CIPHER_TRACE("Do input %zu bytes (%zu), offset %zu", size_done,
638 			     size_todo, offset);
639 
640 		size_done = size_todo;
641 		ret = caam_dmaobj_sgtbuf_inout_build(&src, &dst, &size_done,
642 						     offset,
643 						     ctx->alg->size_block);
644 		if (ret)
645 			goto end_streaming;
646 
647 		retstatus = caam_cipher_block(ctx, true, NEED_KEY1,
648 					      ctx->encrypt, &src, &dst);
649 
650 		if (retstatus != CAAM_NO_ERROR) {
651 			ret = caam_status_to_tee_result(retstatus);
652 			goto end_streaming;
653 		}
654 
655 		dupdate->dst.length += caam_dmaobj_copy_to_orig(&dst);
656 	}
657 
658 	CIPHER_DUMPBUF("Source", dupdate->src.data, dupdate->src.length);
659 	CIPHER_DUMPBUF("Result", dupdate->dst.data, dupdate->dst.length);
660 
661 end_streaming_post:
662 	if (size_topost) {
663 		/*
664 		 * Save the input data in the block buffer for next operation
665 		 * and prepare the source DMA Object with the overall saved
666 		 * data to generate destination bytes.
667 		 */
668 		struct caambuf cpysrc = {
669 			.data = dupdate->src.data,
670 			.length = dupdate->src.length
671 		};
672 
673 		caam_dmaobj_free(&src);
674 		caam_dmaobj_free(&dst);
675 		CIPHER_TRACE("Save input data %zu bytes (done %zu) - off %zu",
676 			     size_topost, size_inmade, offset);
677 
678 		size_todo = size_topost + ctx->blockbuf.filled;
679 
680 		/*
681 		 * Prepare the destination DMA Object:
682 		 *  - Use given destination parameter bytes to return
683 		 *  - If the previous operation saved data, use a trash
684 		 *    buffer to do the operation but not use unneeded data.
685 		 */
686 		ret = caam_dmaobj_init_output(&dst,
687 					      dupdate->dst.data + size_inmade,
688 					      size_topost, size_topost);
689 		if (ret)
690 			goto end_streaming;
691 
692 		ret = caam_dmaobj_prepare(NULL, &dst, ctx->alg->size_block);
693 		if (ret)
694 			goto end_streaming;
695 
696 		if (ctx->blockbuf.filled) {
697 			/*
698 			 * Because there are some bytes to trash, use
699 			 * a block buffer that will be added to the
700 			 * destination SGT/Buffer structure to do the
701 			 * cipher operation.
702 			 */
703 			ret = caam_alloc_align_buf(&trash_bck.buf,
704 						   ctx->blockbuf.filled);
705 			if (ret != CAAM_NO_ERROR) {
706 				CIPHER_TRACE("Allocation Trash Block error");
707 				goto end_streaming;
708 			}
709 			trash_bck.filled = ctx->blockbuf.filled;
710 
711 			ret = caam_dmaobj_add_first_block(&dst, &trash_bck);
712 			if (ret)
713 				goto end_streaming;
714 		}
715 
716 		retstatus = caam_cpy_block_src(&ctx->blockbuf, &cpysrc,
717 					       size_inmade);
718 		if (retstatus != CAAM_NO_ERROR) {
719 			ret = caam_status_to_tee_result(retstatus);
720 			goto end_streaming;
721 		}
722 
723 		ret = caam_dmaobj_init_input(&src, ctx->blockbuf.buf.data,
724 					     ctx->blockbuf.filled);
725 		if (ret)
726 			goto end_streaming;
727 
728 		ret = caam_dmaobj_prepare(&src, NULL, ctx->alg->size_block);
729 		if (ret)
730 			goto end_streaming;
731 
732 		/*
733 		 * Build input and output DMA Object with the same size.
734 		 */
735 		size_done = size_todo;
736 		ret = caam_dmaobj_sgtbuf_inout_build(&src, &dst, &size_done, 0,
737 						     size_todo);
738 		if (ret)
739 			goto end_streaming;
740 
741 		if (size_todo != size_done) {
742 			CIPHER_TRACE("Invalid end streaming size %zu vs %zu",
743 				     size_done, size_todo);
744 			ret = TEE_ERROR_GENERIC;
745 			goto end_streaming;
746 		}
747 
748 		retstatus = caam_cipher_block(ctx, false, NEED_KEY1,
749 					      ctx->encrypt, &src, &dst);
750 
751 		if (retstatus != CAAM_NO_ERROR) {
752 			ret = caam_status_to_tee_result(retstatus);
753 			goto end_streaming;
754 		}
755 
756 		dupdate->dst.length += caam_dmaobj_copy_to_orig(&dst);
757 
758 		CIPHER_DUMPBUF("Source", ctx->blockbuf.buf.data,
759 			       ctx->blockbuf.filled);
760 		CIPHER_DUMPBUF("Result", dupdate->dst.data + size_inmade,
761 			       size_topost);
762 	}
763 
764 	ret = TEE_SUCCESS;
765 
766 end_streaming:
767 	caam_dmaobj_free(&src);
768 	caam_dmaobj_free(&dst);
769 
770 	/* Free Trash block buffer */
771 	caam_free_buf(&trash_bck.buf);
772 
773 	return ret;
774 }
775 
776 /*
777  * Update of the cipher operation with complete block except
778  * if last block. Last block can be partial block.
779  *
780  * @dupdate  Data update object
781  */
do_update_cipher(struct drvcrypt_cipher_update * dupdate)782 static TEE_Result do_update_cipher(struct drvcrypt_cipher_update *dupdate)
783 {
784 	TEE_Result ret = TEE_ERROR_GENERIC;
785 	enum caam_status retstatus = CAAM_FAILURE;
786 	struct cipherdata *ctx = dupdate->ctx;
787 	struct caamdmaobj src = { };
788 	struct caamdmaobj dst = { };
789 	size_t offset = 0;
790 	size_t size_todo = 0;
791 	size_t size_done = 0;
792 
793 	CIPHER_TRACE("Length=%zu - %s", dupdate->src.length,
794 		     (ctx->encrypt ? "Encrypt" : "Decrypt"));
795 
796 	/*
797 	 * Check the length of the payload/cipher to be at least
798 	 * one or n cipher block.
799 	 */
800 	if (dupdate->src.length < ctx->alg->size_block ||
801 	    dupdate->src.length % ctx->alg->size_block) {
802 		CIPHER_TRACE("Bad payload/cipher size %zu bytes",
803 			     dupdate->src.length);
804 		return TEE_ERROR_BAD_PARAMETERS;
805 	}
806 
807 	ret = caam_dmaobj_init_input(&src, dupdate->src.data,
808 				     dupdate->src.length);
809 	if (ret)
810 		goto end_cipher;
811 
812 	ret = caam_dmaobj_init_output(&dst, dupdate->dst.data,
813 				      dupdate->dst.length, dupdate->dst.length);
814 	if (ret)
815 		goto end_cipher;
816 
817 	ret = caam_dmaobj_prepare(&src, &dst, ctx->alg->size_block);
818 	if (ret)
819 		goto end_cipher;
820 
821 	size_todo = dupdate->src.length;
822 	dupdate->dst.length = 0;
823 	for (offset = 0; size_todo;
824 	     offset += size_done, size_todo -= size_done) {
825 		size_done = size_todo;
826 		CIPHER_TRACE("Do input %zu bytes, offset %zu", size_done,
827 			     offset);
828 		ret = caam_dmaobj_sgtbuf_inout_build(&src, &dst, &size_done,
829 						     offset,
830 						     ctx->alg->size_block);
831 		if (ret)
832 			goto end_cipher;
833 
834 		retstatus = caam_cipher_block(ctx, true, NEED_KEY1,
835 					      ctx->encrypt, &src, &dst);
836 
837 		if (retstatus != CAAM_NO_ERROR) {
838 			ret = caam_status_to_tee_result(retstatus);
839 			goto end_cipher;
840 		}
841 
842 		dupdate->dst.length += caam_dmaobj_copy_to_orig(&dst);
843 	}
844 
845 	ret = TEE_SUCCESS;
846 
847 end_cipher:
848 	caam_dmaobj_free(&src);
849 	caam_dmaobj_free(&dst);
850 
851 	return ret;
852 }
853 
854 /*
855  * Update of the cipher operation. Call the algorithm update
856  * function associated.
857  *
858  * @dupdate  Data update object
859  */
do_update(struct drvcrypt_cipher_update * dupdate)860 static TEE_Result do_update(struct drvcrypt_cipher_update *dupdate)
861 {
862 	struct cipherdata *cipherdata = dupdate->ctx;
863 
864 	return cipherdata->alg->update(dupdate);
865 }
866 
867 /*
868  * Finalize of the cipher operation
869  *
870  * @ctx    Caller context variable or NULL
871  */
do_final(void * ctx __unused)872 static void do_final(void *ctx __unused)
873 {
874 }
875 
876 /*
877  * Registration of the Cipher Driver
878  */
879 static struct drvcrypt_cipher driver_cipher = {
880 	.alloc_ctx = do_allocate,
881 	.free_ctx = caam_cipher_free,
882 	.init = caam_cipher_initialize,
883 	.update = do_update,
884 	.final = do_final,
885 	.copy_state = caam_cipher_copy_state,
886 };
887 
888 /*
889  * Initialize the Cipher module
890  *
891  * @ctrl_addr   Controller base address
892  */
caam_cipher_init(vaddr_t ctrl_addr __unused)893 enum caam_status caam_cipher_init(vaddr_t ctrl_addr __unused)
894 {
895 	enum caam_status retstatus = CAAM_FAILURE;
896 
897 	if (drvcrypt_register_cipher(&driver_cipher) == TEE_SUCCESS)
898 		retstatus = CAAM_NO_ERROR;
899 
900 	return retstatus;
901 }
902