1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <initcall.h>
8 #include <io.h>
9 #include <kernel/boot.h>
10 #include <kernel/delay.h>
11 #include <kernel/dt.h>
12 #include <kernel/mutex.h>
13 #include <libfdt.h>
14 #include <mm/core_memprot.h>
15 #include <stdint.h>
16 #include <stm32_util.h>
17 #include <string.h>
18 #include <utee_defines.h>
19 #include <util.h>
20 
21 #include "stm32_cryp.h"
22 #include "common.h"
23 
24 #define INT8_BIT			8U
25 #define AES_BLOCK_SIZE_BIT		128U
26 #define AES_BLOCK_SIZE			(AES_BLOCK_SIZE_BIT / INT8_BIT)
27 #define AES_BLOCK_NB_U32		(AES_BLOCK_SIZE / sizeof(uint32_t))
28 #define DES_BLOCK_SIZE_BIT		64U
29 #define DES_BLOCK_SIZE			(DES_BLOCK_SIZE_BIT / INT8_BIT)
30 #define DES_BLOCK_NB_U32		(DES_BLOCK_SIZE / sizeof(uint32_t))
31 #define MAX_BLOCK_SIZE_BIT		AES_BLOCK_SIZE_BIT
32 #define MAX_BLOCK_SIZE			AES_BLOCK_SIZE
33 #define MAX_BLOCK_NB_U32		AES_BLOCK_NB_U32
34 #define AES_KEYSIZE_128			16U
35 #define AES_KEYSIZE_192			24U
36 #define AES_KEYSIZE_256			32U
37 
38 /* CRYP control register */
39 #define _CRYP_CR			0x0U
40 /* CRYP status register */
41 #define _CRYP_SR			0x04U
42 /* CRYP data input register */
43 #define _CRYP_DIN			0x08U
44 /* CRYP data output register */
45 #define _CRYP_DOUT			0x0CU
46 /* CRYP DMA control register */
47 #define _CRYP_DMACR			0x10U
48 /* CRYP interrupt mask set/clear register */
49 #define _CRYP_IMSCR			0x14U
50 /* CRYP raw interrupt status register */
51 #define _CRYP_RISR			0x18U
52 /* CRYP masked interrupt status register */
53 #define _CRYP_MISR			0x1CU
54 /* CRYP key registers */
55 #define _CRYP_K0LR			0x20U
56 #define _CRYP_K0RR			0x24U
57 #define _CRYP_K1LR			0x28U
58 #define _CRYP_K1RR			0x2CU
59 #define _CRYP_K2LR			0x30U
60 #define _CRYP_K2RR			0x34U
61 #define _CRYP_K3LR			0x38U
62 #define _CRYP_K3RR			0x3CU
63 /* CRYP initialization vector registers */
64 #define _CRYP_IV0LR			0x40U
65 #define _CRYP_IV0RR			0x44U
66 #define _CRYP_IV1LR			0x48U
67 #define _CRYP_IV1RR			0x4CU
68 /* CRYP context swap GCM-CCM registers */
69 #define _CRYP_CSGCMCCM0R		0x50U
70 #define _CRYP_CSGCMCCM1R		0x54U
71 #define _CRYP_CSGCMCCM2R		0x58U
72 #define _CRYP_CSGCMCCM3R		0x5CU
73 #define _CRYP_CSGCMCCM4R		0x60U
74 #define _CRYP_CSGCMCCM5R		0x64U
75 #define _CRYP_CSGCMCCM6R		0x68U
76 #define _CRYP_CSGCMCCM7R		0x6CU
77 /* CRYP context swap GCM registers */
78 #define _CRYP_CSGCM0R			0x70U
79 #define _CRYP_CSGCM1R			0x74U
80 #define _CRYP_CSGCM2R			0x78U
81 #define _CRYP_CSGCM3R			0x7CU
82 #define _CRYP_CSGCM4R			0x80U
83 #define _CRYP_CSGCM5R			0x84U
84 #define _CRYP_CSGCM6R			0x88U
85 #define _CRYP_CSGCM7R			0x8CU
86 /* CRYP hardware configuration register */
87 #define _CRYP_HWCFGR			0x3F0U
88 /* CRYP HW version register */
89 #define _CRYP_VERR			0x3F4U
90 /* CRYP identification */
91 #define _CRYP_IPIDR			0x3F8U
92 /* CRYP HW magic ID */
93 #define _CRYP_MID			0x3FCU
94 
95 #define CRYP_TIMEOUT_US			1000000U
96 #define TIMEOUT_US_1MS			1000U
97 
98 /* CRYP control register fields */
99 #define _CRYP_CR_RESET_VALUE		0x0U
100 #define _CRYP_CR_NPBLB_MSK		GENMASK_32(23, 20)
101 #define _CRYP_CR_NPBLB_OFF		20U
102 #define _CRYP_CR_GCM_CCMPH_MSK		GENMASK_32(17, 16)
103 #define _CRYP_CR_GCM_CCMPH_OFF		16U
104 #define _CRYP_CR_GCM_CCMPH_INIT		0U
105 #define _CRYP_CR_GCM_CCMPH_HEADER	1U
106 #define _CRYP_CR_GCM_CCMPH_PAYLOAD	2U
107 #define _CRYP_CR_GCM_CCMPH_FINAL	3U
108 #define _CRYP_CR_CRYPEN			BIT(15)
109 #define _CRYP_CR_FFLUSH			BIT(14)
110 #define _CRYP_CR_KEYSIZE_MSK		GENMASK_32(9, 8)
111 #define _CRYP_CR_KEYSIZE_OFF		8U
112 #define _CRYP_CR_KSIZE_128		0U
113 #define _CRYP_CR_KSIZE_192		1U
114 #define _CRYP_CR_KSIZE_256		2U
115 #define _CRYP_CR_DATATYPE_MSK		GENMASK_32(7, 6)
116 #define _CRYP_CR_DATATYPE_OFF		6U
117 #define _CRYP_CR_DATATYPE_NONE		0U
118 #define _CRYP_CR_DATATYPE_HALF_WORD	1U
119 #define _CRYP_CR_DATATYPE_BYTE		2U
120 #define _CRYP_CR_DATATYPE_BIT		3U
121 #define _CRYP_CR_ALGOMODE_MSK		(BIT(19) | GENMASK_32(5, 3))
122 #define _CRYP_CR_ALGOMODE_OFF		3U
123 #define _CRYP_CR_ALGOMODE_TDES_ECB	0x0U
124 #define _CRYP_CR_ALGOMODE_TDES_CBC	0x1U
125 #define _CRYP_CR_ALGOMODE_DES_ECB	0x2U
126 #define _CRYP_CR_ALGOMODE_DES_CBC	0x3U
127 #define _CRYP_CR_ALGOMODE_AES_ECB	0x4U
128 #define _CRYP_CR_ALGOMODE_AES_CBC	0x5U
129 #define _CRYP_CR_ALGOMODE_AES_CTR	0x6U
130 #define _CRYP_CR_ALGOMODE_AES		0x7U
131 #define _CRYP_CR_ALGOMODE_AES_GCM	BIT(16)
132 #define _CRYP_CR_ALGOMODE_AES_CCM	(BIT(16) | BIT(0))
133 #define _CRYP_CR_ALGODIR		BIT(2)
134 #define _CRYP_CR_ALGODIR_ENC		0U
135 #define _CRYP_CR_ALGODIR_DEC		BIT(2)
136 
137 /* CRYP status register fields */
138 #define _CRYP_SR_BUSY			BIT(4)
139 #define _CRYP_SR_OFFU			BIT(3)
140 #define _CRYP_SR_OFNE			BIT(2)
141 #define _CRYP_SR_IFNF			BIT(1)
142 #define _CRYP_SR_IFEM			BIT(0)
143 
144 /* CRYP DMA control register fields */
145 #define _CRYP_DMACR_DOEN		BIT(1)
146 #define _CRYP_DMACR_DIEN		BIT(0)
147 
148 /* CRYP interrupt fields */
149 #define _CRYP_I_OUT			BIT(1)
150 #define _CRYP_I_IN			BIT(0)
151 
152 /* CRYP hardware configuration register fields */
153 #define _CRYP_HWCFGR_CFG1_MSK		GENMASK_32(3, 0)
154 #define _CRYP_HWCFGR_CFG1_OFF		0U
155 #define _CRYP_HWCFGR_CFG2_MSK		GENMASK_32(7, 4)
156 #define _CRYP_HWCFGR_CFG2_OFF		4U
157 #define _CRYP_HWCFGR_CFG3_MSK		GENMASK_32(11, 8)
158 #define _CRYP_HWCFGR_CFG3_OFF		8U
159 #define _CRYP_HWCFGR_CFG4_MSK		GENMASK_32(15, 12)
160 #define _CRYP_HWCFGR_CFG4_OFF		12U
161 
162 /* CRYP HW version register */
163 #define _CRYP_VERR_MSK			GENMASK_32(7, 0)
164 #define _CRYP_VERR_OFF			0U
165 
166 /*
167  * Macro to manage bit manipulation when we work on local variable
168  * before writing only once to the real register.
169  */
170 #define CLRBITS(v, bits)		((v) &= ~(bits))
171 #define SETBITS(v, bits)		((v) |= (bits))
172 
173 #define IS_ALGOMODE(cr, mod) \
174 	(((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \
175 					  _CRYP_CR_ALGOMODE_OFF))
176 
177 #define SET_ALGOMODE(mod, cr) \
178 	clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \
179 						  _CRYP_CR_ALGOMODE_OFF))
180 
181 #define GET_ALGOMODE(cr) \
182 	(((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF)
183 
184 #define TOBE32(x)			TEE_U32_BSWAP(x)
185 #define FROMBE32(x)			TEE_U32_BSWAP(x)
186 
187 static struct stm32_cryp_platdata cryp_pdata;
188 static struct mutex cryp_lock = MUTEX_INITIALIZER;
189 
clrsetbits(uint32_t * v,uint32_t mask,uint32_t bits)190 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits)
191 {
192 	*v = (*v & ~mask) | bits;
193 }
194 
algo_mode_needs_iv(uint32_t cr)195 static bool algo_mode_needs_iv(uint32_t cr)
196 {
197 	return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) &&
198 		!IS_ALGOMODE(cr, AES_ECB);
199 }
200 
algo_mode_is_ecb_cbc(uint32_t cr)201 static bool algo_mode_is_ecb_cbc(uint32_t cr)
202 {
203 	return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR;
204 }
205 
algo_mode_is_aes(uint32_t cr)206 static bool algo_mode_is_aes(uint32_t cr)
207 {
208 	return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >=
209 		_CRYP_CR_ALGOMODE_AES_ECB;
210 }
211 
is_decrypt(uint32_t cr)212 static bool is_decrypt(uint32_t cr)
213 {
214 	return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC;
215 }
216 
is_encrypt(uint32_t cr)217 static bool is_encrypt(uint32_t cr)
218 {
219 	return !is_decrypt(cr);
220 }
221 
does_need_npblb(uint32_t cr)222 static bool does_need_npblb(uint32_t cr)
223 {
224 	return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) ||
225 	       (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr));
226 }
227 
wait_sr_bits(vaddr_t base,uint32_t bits)228 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits)
229 {
230 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
231 
232 	while ((io_read32(base + _CRYP_SR) & bits) != bits)
233 		if (timeout_elapsed(timeout_ref))
234 			break;
235 
236 	if  ((io_read32(base + _CRYP_SR) & bits) != bits)
237 		return TEE_ERROR_BUSY;
238 
239 	return TEE_SUCCESS;
240 }
241 
wait_end_busy(vaddr_t base)242 static TEE_Result wait_end_busy(vaddr_t base)
243 {
244 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
245 
246 	while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
247 		if (timeout_elapsed(timeout_ref))
248 			break;
249 
250 	if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
251 		return TEE_ERROR_BUSY;
252 
253 	return TEE_SUCCESS;
254 }
255 
wait_end_enable(vaddr_t base)256 static TEE_Result wait_end_enable(vaddr_t base)
257 {
258 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
259 
260 	while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
261 		if (timeout_elapsed(timeout_ref))
262 			break;
263 
264 	if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
265 		return TEE_ERROR_BUSY;
266 
267 	return TEE_SUCCESS;
268 }
269 
write_align_block(struct stm32_cryp_context * ctx,uint32_t * data)270 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx,
271 						 uint32_t *data)
272 {
273 	TEE_Result res = TEE_SUCCESS;
274 	unsigned int i = 0;
275 
276 	res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF);
277 	if (res)
278 		return res;
279 
280 	for (i = 0; i < ctx->block_u32; i++) {
281 		/* No need to htobe() as we configure the HW to swap bytes */
282 		io_write32(ctx->base + _CRYP_DIN, data[i]);
283 	}
284 
285 	return TEE_SUCCESS;
286 }
287 
write_block(struct stm32_cryp_context * ctx,uint8_t * data)288 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx,
289 					   uint8_t *data)
290 {
291 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
292 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
293 
294 		memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t));
295 		return write_align_block(ctx, data_u32);
296 	}
297 
298 	return write_align_block(ctx, (void *)data);
299 }
300 
read_align_block(struct stm32_cryp_context * ctx,uint32_t * data)301 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx,
302 						uint32_t *data)
303 {
304 	TEE_Result res = TEE_SUCCESS;
305 	unsigned int i = 0;
306 
307 	res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE);
308 	if (res)
309 		return res;
310 
311 	for (i = 0; i < ctx->block_u32; i++) {
312 		/* No need to htobe() as we configure the HW to swap bytes */
313 		data[i] = io_read32(ctx->base + _CRYP_DOUT);
314 	}
315 
316 	return TEE_SUCCESS;
317 }
318 
read_block(struct stm32_cryp_context * ctx,uint8_t * data)319 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx,
320 					  uint8_t *data)
321 {
322 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
323 		TEE_Result res = TEE_SUCCESS;
324 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
325 
326 		res = read_align_block(ctx, data_u32);
327 		if (res)
328 			return res;
329 
330 		memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t));
331 
332 		return TEE_SUCCESS;
333 	}
334 
335 	return read_align_block(ctx, (void *)data);
336 }
337 
cryp_end(struct stm32_cryp_context * ctx,TEE_Result prev_error)338 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error)
339 {
340 	if (prev_error) {
341 		stm32_reset_assert(cryp_pdata.reset_id, TIMEOUT_US_1MS);
342 		stm32_reset_deassert(cryp_pdata.reset_id, TIMEOUT_US_1MS);
343 	}
344 
345 	/* Disable the CRYP peripheral */
346 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
347 }
348 
cryp_write_iv(struct stm32_cryp_context * ctx)349 static void cryp_write_iv(struct stm32_cryp_context *ctx)
350 {
351 	if (algo_mode_needs_iv(ctx->cr)) {
352 		unsigned int i = 0;
353 
354 		/* Restore the _CRYP_IVRx */
355 		for (i = 0; i < ctx->block_u32; i++)
356 			io_write32(ctx->base + _CRYP_IV0LR + i *
357 				   sizeof(uint32_t), ctx->iv[i]);
358 	}
359 }
360 
cryp_save_suspend(struct stm32_cryp_context * ctx)361 static void cryp_save_suspend(struct stm32_cryp_context *ctx)
362 {
363 	unsigned int i = 0;
364 
365 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
366 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
367 			ctx->pm_gcmccm[i] = io_read32(ctx->base +
368 						      _CRYP_CSGCMCCM0R +
369 						      i * sizeof(uint32_t));
370 
371 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
372 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
373 			ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R +
374 						   i * sizeof(uint32_t));
375 }
376 
cryp_restore_suspend(struct stm32_cryp_context * ctx)377 static void cryp_restore_suspend(struct stm32_cryp_context *ctx)
378 {
379 	unsigned int i = 0;
380 
381 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
382 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
383 			io_write32(ctx->base + _CRYP_CSGCMCCM0R +
384 				   i * sizeof(uint32_t), ctx->pm_gcmccm[i]);
385 
386 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
387 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
388 			io_write32(ctx->base + _CRYP_CSGCM0R +
389 				   i * sizeof(uint32_t), ctx->pm_gcm[i]);
390 }
391 
cryp_write_key(struct stm32_cryp_context * ctx)392 static void cryp_write_key(struct stm32_cryp_context *ctx)
393 {
394 	vaddr_t reg = 0;
395 	int i = 0;
396 	uint32_t algo = GET_ALGOMODE(ctx->cr);
397 
398 	if (algo == _CRYP_CR_ALGOMODE_DES_ECB ||
399 	    algo == _CRYP_CR_ALGOMODE_DES_CBC)
400 		reg = ctx->base + _CRYP_K1RR;
401 	else
402 		reg = ctx->base + _CRYP_K3RR;
403 
404 	for (i = ctx->key_size / sizeof(uint32_t) - 1;
405 	     i >= 0;
406 	     i--, reg -= sizeof(uint32_t))
407 		io_write32(reg, ctx->key[i]);
408 }
409 
cryp_prepare_key(struct stm32_cryp_context * ctx)410 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx)
411 {
412 	TEE_Result res = TEE_SUCCESS;
413 
414 	/*
415 	 * For AES ECB/CBC decryption, key preparation mode must be selected
416 	 * to populate the key.
417 	 */
418 	if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) ||
419 				    IS_ALGOMODE(ctx->cr, AES_CBC))) {
420 		/* Select Algomode "prepare key" */
421 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
422 				_CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF);
423 
424 		cryp_write_key(ctx);
425 
426 		/* Enable CRYP */
427 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
428 
429 		res = wait_end_busy(ctx->base);
430 		if (res)
431 			return res;
432 
433 		/* Reset 'real' algomode */
434 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
435 				ctx->cr & _CRYP_CR_ALGOMODE_MSK);
436 	} else {
437 		cryp_write_key(ctx);
438 	}
439 
440 	return TEE_SUCCESS;
441 }
442 
save_context(struct stm32_cryp_context * ctx)443 static TEE_Result save_context(struct stm32_cryp_context *ctx)
444 {
445 	/* Device should not be in a processing phase */
446 	if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY)
447 		return TEE_ERROR_BAD_STATE;
448 
449 	/* Disable the CRYP peripheral */
450 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
451 
452 	/* Save CR */
453 	ctx->cr = io_read32(ctx->base + _CRYP_CR);
454 
455 	cryp_save_suspend(ctx);
456 
457 	/* If algo mode needs to save current IV */
458 	if (algo_mode_needs_iv(ctx->cr)) {
459 		unsigned int i = 0;
460 
461 		/* Save IV */
462 		for (i = 0; i < ctx->block_u32; i++)
463 			ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i *
464 					       sizeof(uint32_t));
465 	}
466 
467 	return TEE_SUCCESS;
468 }
469 
470 /* To resume the processing of a message */
restore_context(struct stm32_cryp_context * ctx)471 static TEE_Result restore_context(struct stm32_cryp_context *ctx)
472 {
473 	TEE_Result res = TEE_SUCCESS;
474 
475 	/* IP should be disabled */
476 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) {
477 		DMSG("Device is still enabled");
478 		return TEE_ERROR_BAD_STATE;
479 	}
480 
481 	/* Restore the _CRYP_CR */
482 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
483 
484 	/* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */
485 	res = cryp_prepare_key(ctx);
486 	if (res)
487 		return res;
488 
489 	cryp_restore_suspend(ctx);
490 
491 	cryp_write_iv(ctx);
492 
493 	/* Flush internal fifo */
494 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH);
495 
496 	/* Enable the CRYP peripheral */
497 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
498 
499 	return TEE_SUCCESS;
500 }
501 
502 /*
503  * Translate a byte index in an array of BE uint32_t into the index of same
504  * byte in the corresponding LE uint32_t array.
505  */
be_index(size_t index)506 static size_t be_index(size_t index)
507 {
508 	return (index & ~0x3) + 3 - (index & 0x3);
509 }
510 
ccm_first_context(struct stm32_cryp_context * ctx)511 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx)
512 {
513 	TEE_Result res = TEE_SUCCESS;
514 	uint32_t b0[AES_BLOCK_NB_U32] = { 0 };
515 	uint8_t *iv = (uint8_t *)ctx->iv;
516 	size_t l = 0;
517 	size_t i = 15;
518 
519 	/* IP should be disabled */
520 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN)
521 		return TEE_ERROR_BAD_STATE;
522 
523 	/* Write the _CRYP_CR */
524 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
525 
526 	/* Write key */
527 	res = cryp_prepare_key(ctx);
528 	if (res)
529 		return res;
530 
531 	/* Save full IV that will be b0 */
532 	memcpy(b0, iv, sizeof(b0));
533 
534 	/*
535 	 * Update IV to become CTR0/1 before setting it.
536 	 * IV is saved as LE uint32_t[4] as expected by hardware,
537 	 * but CCM RFC defines bytes to update in a BE array.
538 	 */
539 	/* Set flag bits to 0 (5 higher bits), keep 3 low bits */
540 	iv[be_index(0)] &= 0x7;
541 	/* Get size of length field (can be from 2 to 8) */
542 	l = iv[be_index(0)] + 1;
543 	/* Set Q to 0 */
544 	for (i = 15; i >= 15 - l + 1; i--)
545 		iv[be_index(i)] = 0;
546 	/* Save CTR0 */
547 	memcpy(ctx->ctr0_ccm, iv, sizeof(b0));
548 	/* Increment Q */
549 	iv[be_index(15)] |= 0x1;
550 
551 	cryp_write_iv(ctx);
552 
553 	/* Enable the CRYP peripheral */
554 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
555 
556 	res = write_align_block(ctx, b0);
557 
558 	return res;
559 }
560 
do_from_init_to_phase(struct stm32_cryp_context * ctx,uint32_t new_phase)561 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx,
562 					uint32_t new_phase)
563 {
564 	TEE_Result res = TEE_SUCCESS;
565 
566 	/*
567 	 * We didn't run the init phase yet
568 	 * CCM need a specific restore_context phase for the init phase
569 	 */
570 	if (IS_ALGOMODE(ctx->cr, AES_CCM))
571 		res = ccm_first_context(ctx);
572 	else
573 		res = restore_context(ctx);
574 
575 	if (res)
576 		return res;
577 
578 	res = wait_end_enable(ctx->base);
579 	if (res)
580 		return res;
581 
582 	/* Move to 'new_phase' */
583 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
584 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
585 
586 	/* Enable the CRYP peripheral (init disabled it) */
587 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
588 
589 	return TEE_SUCCESS;
590 }
591 
do_from_header_to_phase(struct stm32_cryp_context * ctx,uint32_t new_phase)592 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx,
593 					  uint32_t new_phase)
594 {
595 	TEE_Result res = TEE_SUCCESS;
596 
597 	res = restore_context(ctx);
598 	if (res)
599 		return res;
600 
601 	if (ctx->extra_size) {
602 		/* Manage unaligned header data before moving to next phase */
603 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
604 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
605 
606 		res = write_align_block(ctx, ctx->extra);
607 		if (res)
608 			return res;
609 
610 		ctx->assoc_len += (ctx->extra_size) * INT8_BIT;
611 		ctx->extra_size = 0;
612 	}
613 
614 	/* Move to 'new_phase' */
615 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
616 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
617 
618 	return TEE_SUCCESS;
619 }
620 
621 /**
622  * @brief Start a AES computation.
623  * @param ctx: CRYP process context
624  * @param is_dec: true if decryption, false if encryption
625  * @param algo: define the algo mode
626  * @param key: pointer to key
627  * @param key_size: key size
628  * @param iv: pointer to initialization vector (unused if algo is ECB)
629  * @param iv_size: iv size
630  * @note this function doesn't access to hardware but stores in ctx the values
631  *
632  * @retval TEE_SUCCESS if OK.
633  */
stm32_cryp_init(struct stm32_cryp_context * ctx,bool is_dec,enum stm32_cryp_algo_mode algo,const void * key,size_t key_size,const void * iv,size_t iv_size)634 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec,
635 			   enum stm32_cryp_algo_mode algo,
636 			   const void *key, size_t key_size, const void *iv,
637 			   size_t iv_size)
638 {
639 	unsigned int i = 0;
640 	const uint32_t *iv_u32 = NULL;
641 	uint32_t local_iv[4] = { 0 };
642 	const uint32_t *key_u32 = NULL;
643 	uint32_t local_key[8] = { 0 };
644 
645 	ctx->assoc_len = 0;
646 	ctx->load_len = 0;
647 	ctx->extra_size = 0;
648 	ctx->lock = &cryp_lock;
649 
650 	ctx->base = io_pa_or_va(&cryp_pdata.base, 1);
651 	ctx->cr = _CRYP_CR_RESET_VALUE;
652 
653 	/* We want buffer to be u32 aligned */
654 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
655 		key_u32 = key;
656 	} else {
657 		memcpy(local_key, key, key_size);
658 		key_u32 = local_key;
659 	}
660 
661 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
662 		iv_u32 = iv;
663 	} else {
664 		memcpy(local_iv, iv, iv_size);
665 		iv_u32 = local_iv;
666 	}
667 
668 	if (is_dec)
669 		SETBITS(ctx->cr, _CRYP_CR_ALGODIR);
670 	else
671 		CLRBITS(ctx->cr, _CRYP_CR_ALGODIR);
672 
673 	/* Save algo mode */
674 	switch (algo) {
675 	case STM32_CRYP_MODE_TDES_ECB:
676 		SET_ALGOMODE(TDES_ECB, ctx->cr);
677 		break;
678 	case STM32_CRYP_MODE_TDES_CBC:
679 		SET_ALGOMODE(TDES_CBC, ctx->cr);
680 		break;
681 	case STM32_CRYP_MODE_DES_ECB:
682 		SET_ALGOMODE(DES_ECB, ctx->cr);
683 		break;
684 	case STM32_CRYP_MODE_DES_CBC:
685 		SET_ALGOMODE(DES_CBC, ctx->cr);
686 		break;
687 	case STM32_CRYP_MODE_AES_ECB:
688 		SET_ALGOMODE(AES_ECB, ctx->cr);
689 		break;
690 	case STM32_CRYP_MODE_AES_CBC:
691 		SET_ALGOMODE(AES_CBC, ctx->cr);
692 		break;
693 	case STM32_CRYP_MODE_AES_CTR:
694 		SET_ALGOMODE(AES_CTR, ctx->cr);
695 		break;
696 	case STM32_CRYP_MODE_AES_GCM:
697 		SET_ALGOMODE(AES_GCM, ctx->cr);
698 		break;
699 	case STM32_CRYP_MODE_AES_CCM:
700 		SET_ALGOMODE(AES_CCM, ctx->cr);
701 		break;
702 	default:
703 		return TEE_ERROR_BAD_PARAMETERS;
704 	}
705 
706 	/*
707 	 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data.
708 	 * So we won't need to
709 	 * TOBE32(data) before write to DIN
710 	 * nor
711 	 * FROMBE32 after reading from DOUT.
712 	 */
713 	clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK,
714 		   _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF);
715 
716 	/*
717 	 * Configure keysize for AES algorithms
718 	 * And save block size
719 	 */
720 	if (algo_mode_is_aes(ctx->cr)) {
721 		switch (key_size) {
722 		case AES_KEYSIZE_128:
723 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
724 				   _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF);
725 			break;
726 		case AES_KEYSIZE_192:
727 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
728 				   _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF);
729 			break;
730 		case AES_KEYSIZE_256:
731 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
732 				   _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF);
733 			break;
734 		default:
735 			return TEE_ERROR_BAD_PARAMETERS;
736 		}
737 
738 		/* And set block size */
739 		ctx->block_u32 = AES_BLOCK_NB_U32;
740 	} else {
741 		/* And set DES/TDES block size */
742 		ctx->block_u32 = DES_BLOCK_NB_U32;
743 	}
744 
745 	/* Save key in HW order */
746 	ctx->key_size = key_size;
747 	for (i = 0; i < key_size / sizeof(uint32_t); i++)
748 		ctx->key[i] = TOBE32(key_u32[i]);
749 
750 	/* Save IV */
751 	if (algo_mode_needs_iv(ctx->cr)) {
752 		if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t))
753 			return TEE_ERROR_BAD_PARAMETERS;
754 
755 		/*
756 		 * We save IV in the byte order expected by the
757 		 * IV registers
758 		 */
759 		for (i = 0; i < ctx->block_u32; i++)
760 			ctx->iv[i] = TOBE32(iv_u32[i]);
761 	}
762 
763 	/* Reset suspend registers */
764 	memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm));
765 	memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm));
766 
767 	return TEE_SUCCESS;
768 }
769 
770 /**
771  * @brief Update (or start) a AES authenticate process of
772  *        associated data (CCM or GCM).
773  * @param ctx: CRYP process context
774  * @param data: pointer to associated data
775  * @param data_size: data size
776  * @retval TEE_SUCCESS if OK.
777  */
stm32_cryp_update_assodata(struct stm32_cryp_context * ctx,uint8_t * data,size_t data_size)778 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx,
779 				      uint8_t *data, size_t data_size)
780 {
781 	TEE_Result res = TEE_SUCCESS;
782 	unsigned int i = 0;
783 	uint32_t previous_phase = 0;
784 
785 	/* If no associated data, nothing to do */
786 	if (!data || !data_size)
787 		return TEE_SUCCESS;
788 
789 	mutex_lock(ctx->lock);
790 
791 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
792 			 _CRYP_CR_GCM_CCMPH_OFF;
793 
794 	switch (previous_phase) {
795 	case _CRYP_CR_GCM_CCMPH_INIT:
796 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER);
797 		break;
798 	case _CRYP_CR_GCM_CCMPH_HEADER:
799 		/*
800 		 * Function update_assodata was already called.
801 		 * We only need to restore the context.
802 		 */
803 		res = restore_context(ctx);
804 		break;
805 	default:
806 		assert(0);
807 		res = TEE_ERROR_BAD_STATE;
808 	}
809 
810 	if (res)
811 		goto out;
812 
813 	/* Manage if remaining data from a previous update_assodata call */
814 	if (ctx->extra_size &&
815 	    (ctx->extra_size + data_size >=
816 	     ctx->block_u32 * sizeof(uint32_t))) {
817 		uint32_t block[MAX_BLOCK_NB_U32] = { 0 };
818 
819 		memcpy(block, ctx->extra, ctx->extra_size);
820 		memcpy((uint8_t *)block + ctx->extra_size, data,
821 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
822 
823 		res = write_align_block(ctx, block);
824 		if (res)
825 			goto out;
826 
827 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
828 		ctx->extra_size = 0;
829 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
830 	}
831 
832 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
833 		res = write_block(ctx, data + i);
834 		if (res)
835 			goto out;
836 
837 		/* Process next block */
838 		i += ctx->block_u32 * sizeof(uint32_t);
839 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
840 	}
841 
842 	/*
843 	 * Manage last block if not a block size multiple:
844 	 * Save remaining data to manage them later (potentially with new
845 	 * associated data).
846 	 */
847 	if (i < data_size) {
848 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
849 		       data_size - i);
850 		ctx->extra_size += data_size - i;
851 	}
852 
853 	res = save_context(ctx);
854 out:
855 	if (res)
856 		cryp_end(ctx, res);
857 
858 	mutex_unlock(ctx->lock);
859 
860 	return res;
861 }
862 
863 /**
864  * @brief Update (or start) a AES authenticate and de/encrypt with
865  *        payload data (CCM or GCM).
866  * @param ctx: CRYP process context
867  * @param data_in: pointer to payload
868  * @param data_out: pointer where to save de/encrypted payload
869  * @param data_size: payload size
870  *
871  * @retval TEE_SUCCESS if OK.
872  */
stm32_cryp_update_load(struct stm32_cryp_context * ctx,uint8_t * data_in,uint8_t * data_out,size_t data_size)873 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx,
874 				  uint8_t *data_in, uint8_t *data_out,
875 				  size_t data_size)
876 {
877 	TEE_Result res = TEE_SUCCESS;
878 	unsigned int i = 0;
879 	uint32_t previous_phase = 0;
880 
881 	if (!data_in || !data_size)
882 		return TEE_SUCCESS;
883 
884 	mutex_lock(ctx->lock);
885 
886 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
887 			 _CRYP_CR_GCM_CCMPH_OFF;
888 
889 	switch (previous_phase) {
890 	case _CRYP_CR_GCM_CCMPH_INIT:
891 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
892 		break;
893 	case  _CRYP_CR_GCM_CCMPH_HEADER:
894 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
895 		break;
896 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
897 		/* new update_load call, we only need to restore context */
898 		res = restore_context(ctx);
899 		break;
900 	default:
901 		assert(0);
902 		res = TEE_ERROR_BAD_STATE;
903 	}
904 
905 	if (res)
906 		goto out;
907 
908 	/* Manage if incomplete block from a previous update_load call */
909 	if (ctx->extra_size &&
910 	    (ctx->extra_size + data_size >=
911 	     ctx->block_u32 * sizeof(uint32_t))) {
912 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
913 
914 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
915 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
916 
917 		res = write_align_block(ctx, ctx->extra);
918 		if (res)
919 			goto out;
920 
921 		res = read_align_block(ctx, block_out);
922 		if (res)
923 			goto out;
924 
925 		memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size,
926 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
927 
928 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
929 		ctx->extra_size = 0;
930 
931 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
932 	}
933 
934 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
935 		res = write_block(ctx, data_in + i);
936 		if (res)
937 			goto out;
938 
939 		res = read_block(ctx, data_out + i);
940 		if (res)
941 			goto out;
942 
943 		/* Process next block */
944 		i += ctx->block_u32 * sizeof(uint32_t);
945 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
946 	}
947 
948 	res = save_context(ctx);
949 	if (res)
950 		goto out;
951 
952 	/*
953 	 * Manage last block if not a block size multiple
954 	 * We saved context,
955 	 * Complete block with 0 and send to CRYP to get {en,de}crypted data
956 	 * Store data to resend as last block in final()
957 	 *           or to complete next update_load() to get correct tag.
958 	 */
959 	if (i < data_size) {
960 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
961 		size_t prev_extra_size = ctx->extra_size;
962 
963 		/* Re-enable the CRYP peripheral */
964 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
965 
966 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
967 		       data_size - i);
968 		ctx->extra_size += data_size - i;
969 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
970 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
971 
972 		res = write_align_block(ctx, ctx->extra);
973 		if (res)
974 			goto out;
975 
976 		res = read_align_block(ctx, block_out);
977 		if (res)
978 			goto out;
979 
980 		memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size,
981 		       data_size - i);
982 
983 		/* Disable the CRYP peripheral */
984 		io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
985 	}
986 
987 out:
988 	if (res)
989 		cryp_end(ctx, res);
990 
991 	mutex_unlock(ctx->lock);
992 
993 	return res;
994 }
995 
996 /**
997  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
998  * @param ctx: CRYP process context
999  * @param tag: pointer where to save the tag
1000  * @param data_size: tag size
1001  *
1002  * @retval TEE_SUCCESS if OK.
1003  */
stm32_cryp_final(struct stm32_cryp_context * ctx,uint8_t * tag,size_t tag_size)1004 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag,
1005 			    size_t tag_size)
1006 {
1007 	TEE_Result res = TEE_SUCCESS;
1008 	uint32_t tag_u32[4] = { 0 };
1009 	uint32_t previous_phase = 0;
1010 
1011 	mutex_lock(ctx->lock);
1012 
1013 	previous_phase = (ctx->cr &  _CRYP_CR_GCM_CCMPH_MSK) >>
1014 			 _CRYP_CR_GCM_CCMPH_OFF;
1015 
1016 	switch (previous_phase) {
1017 	case _CRYP_CR_GCM_CCMPH_INIT:
1018 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1019 		break;
1020 	case  _CRYP_CR_GCM_CCMPH_HEADER:
1021 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1022 		break;
1023 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
1024 		res = restore_context(ctx);
1025 		if (res)
1026 			break;
1027 
1028 		/* Manage if incomplete block from a previous update_load() */
1029 		if (ctx->extra_size) {
1030 			uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1031 			size_t sz = ctx->block_u32 * sizeof(uint32_t) -
1032 				    ctx->extra_size;
1033 
1034 			if (does_need_npblb(ctx->cr)) {
1035 				io_clrsetbits32(ctx->base + _CRYP_CR,
1036 						_CRYP_CR_NPBLB_MSK,
1037 						sz << _CRYP_CR_NPBLB_OFF);
1038 			}
1039 
1040 			memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz);
1041 
1042 			res = write_align_block(ctx, ctx->extra);
1043 			if (res)
1044 				break;
1045 
1046 			/* Don't care {en,de}crypted data, already saved */
1047 			res = read_align_block(ctx, block_out);
1048 			if (res)
1049 				break;
1050 
1051 			ctx->load_len += (ctx->extra_size * INT8_BIT);
1052 			ctx->extra_size = 0;
1053 		}
1054 
1055 		/* Move to final phase */
1056 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
1057 				_CRYP_CR_GCM_CCMPH_FINAL <<
1058 				_CRYP_CR_GCM_CCMPH_OFF);
1059 		break;
1060 	default:
1061 		assert(0);
1062 		res = TEE_ERROR_BAD_STATE;
1063 	}
1064 
1065 	if (res)
1066 		goto out;
1067 
1068 	if (IS_ALGOMODE(ctx->cr, AES_GCM)) {
1069 		/* No need to htobe() as we configure the HW to swap bytes */
1070 		io_write32(ctx->base + _CRYP_DIN, 0U);
1071 		io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len);
1072 		io_write32(ctx->base + _CRYP_DIN, 0U);
1073 		io_write32(ctx->base + _CRYP_DIN, ctx->load_len);
1074 	} else if (IS_ALGOMODE(ctx->cr, AES_CCM)) {
1075 		/* No need to htobe() in this phase */
1076 		res = write_align_block(ctx, ctx->ctr0_ccm);
1077 		if (res)
1078 			goto out;
1079 	}
1080 
1081 	res = read_align_block(ctx, tag_u32);
1082 	if (res)
1083 		goto out;
1084 
1085 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
1086 
1087 out:
1088 	cryp_end(ctx, res);
1089 	mutex_unlock(ctx->lock);
1090 
1091 	return res;
1092 }
1093 
1094 /**
1095  * @brief Update (or start) a de/encrypt process.
1096  * @param ctx: CRYP process context
1097  * @param last_block: true if last payload data block
1098  * @param data_in: pointer to payload
1099  * @param data_out: pointer where to save de/encrypted payload
1100  * @param data_size: payload size
1101  *
1102  * @retval TEE_SUCCESS if OK.
1103  */
stm32_cryp_update(struct stm32_cryp_context * ctx,bool last_block,uint8_t * data_in,uint8_t * data_out,size_t data_size)1104 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block,
1105 			     uint8_t *data_in, uint8_t *data_out,
1106 			     size_t data_size)
1107 {
1108 	TEE_Result res = TEE_SUCCESS;
1109 	unsigned int i = 0;
1110 
1111 	mutex_lock(ctx->lock);
1112 
1113 	/*
1114 	 * In CBC and ECB encryption we need to manage specifically last
1115 	 * 2 blocks if total size in not aligned to a block size.
1116 	 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to
1117 	 * know last 2 blocks, if unaligned and call with less than two blocks,
1118 	 * return TEE_ERROR_BAD_STATE.
1119 	 */
1120 	if (last_block && algo_mode_is_ecb_cbc(ctx->cr) &&
1121 	    is_encrypt(ctx->cr) &&
1122 	    (ROUNDDOWN(data_size, ctx->block_u32 * sizeof(uint32_t)) !=
1123 	     data_size)) {
1124 		if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) {
1125 			/*
1126 			 * If CBC, size of the last part should be at
1127 			 * least 2*BLOCK_SIZE
1128 			 */
1129 			EMSG("Unexpected last block size");
1130 			res = TEE_ERROR_BAD_STATE;
1131 			goto out;
1132 		}
1133 		/*
1134 		 * Moreover the ECB/CBC specific padding for encrypt is not
1135 		 * yet implemented, and not used in OPTEE
1136 		 */
1137 		res = TEE_ERROR_NOT_IMPLEMENTED;
1138 		goto out;
1139 	}
1140 
1141 	/* Manage remaining CTR mask from previous update call */
1142 	if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) {
1143 		unsigned int j = 0;
1144 		uint8_t *mask = (uint8_t *)ctx->extra;
1145 
1146 		for (j = 0; j < ctx->extra_size && i < data_size; j++, i++)
1147 			data_out[i] = data_in[i] ^ mask[j];
1148 
1149 		if (j != ctx->extra_size) {
1150 			/*
1151 			 * We didn't consume all saved mask,
1152 			 * but no more data.
1153 			 */
1154 
1155 			/* We save remaining mask and its new size */
1156 			memmove(ctx->extra, ctx->extra + j,
1157 				ctx->extra_size - j);
1158 			ctx->extra_size -= j;
1159 
1160 			/*
1161 			 * We don't need to save HW context we didn't
1162 			 * modify HW state.
1163 			 */
1164 			res = TEE_SUCCESS;
1165 			goto out;
1166 		}
1167 
1168 		/* All extra mask consumed */
1169 		ctx->extra_size = 0;
1170 	}
1171 
1172 	res = restore_context(ctx);
1173 	if (res)
1174 		goto out;
1175 
1176 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
1177 		/*
1178 		 * We only write/read one block at a time
1179 		 * but CRYP use a in (and out) FIFO of 8 * uint32_t
1180 		 */
1181 		res = write_block(ctx, data_in + i);
1182 		if (res)
1183 			goto out;
1184 
1185 		res = read_block(ctx, data_out + i);
1186 		if (res)
1187 			goto out;
1188 
1189 		/* Process next block */
1190 		i += ctx->block_u32 * sizeof(uint32_t);
1191 	}
1192 
1193 	/* Manage last block if not a block size multiple */
1194 	if (i < data_size) {
1195 		uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 };
1196 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1197 
1198 		if (!IS_ALGOMODE(ctx->cr, AES_CTR)) {
1199 			/*
1200 			 * Other algorithm than CTR can manage only multiple
1201 			 * of block_size.
1202 			 */
1203 			res = TEE_ERROR_BAD_PARAMETERS;
1204 			goto out;
1205 		}
1206 
1207 		/*
1208 		 * For CTR we save the generated mask to use it at next
1209 		 * update call.
1210 		 */
1211 		memcpy(block_in, data_in + i, data_size - i);
1212 
1213 		res = write_align_block(ctx, block_in);
1214 		if (res)
1215 			goto out;
1216 
1217 		res = read_align_block(ctx, block_out);
1218 		if (res)
1219 			goto out;
1220 
1221 		memcpy(data_out + i, block_out, data_size - i);
1222 
1223 		/* Save mask for possibly next call */
1224 		ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) -
1225 			(data_size - i);
1226 		memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1227 		       ctx->extra_size);
1228 	}
1229 
1230 	if (!last_block)
1231 		res = save_context(ctx);
1232 
1233 out:
1234 	/* If last block or error, end of CRYP process */
1235 	if (last_block || res)
1236 		cryp_end(ctx, res);
1237 
1238 	mutex_unlock(ctx->lock);
1239 
1240 	return res;
1241 }
1242 
fdt_stm32_cryp(struct stm32_cryp_platdata * pdata)1243 static int fdt_stm32_cryp(struct stm32_cryp_platdata *pdata)
1244 {
1245 	int node = -1;
1246 	struct dt_node_info dt_cryp = { };
1247 	void *fdt = NULL;
1248 
1249 	fdt = get_embedded_dt();
1250 	if (!fdt)
1251 		return -FDT_ERR_NOTFOUND;
1252 
1253 	node = fdt_node_offset_by_compatible(fdt, node, "st,stm32mp1-cryp");
1254 	if (node < 0) {
1255 		EMSG("No CRYP entry in DT");
1256 		return -FDT_ERR_NOTFOUND;
1257 	}
1258 
1259 	_fdt_fill_device_info(fdt, &dt_cryp, node);
1260 
1261 	if (dt_cryp.status == DT_STATUS_DISABLED)
1262 		return -FDT_ERR_NOTFOUND;
1263 
1264 	if (dt_cryp.clock == DT_INFO_INVALID_CLOCK ||
1265 	    dt_cryp.reset == DT_INFO_INVALID_RESET ||
1266 	    dt_cryp.reg == DT_INFO_INVALID_REG)
1267 		return -FDT_ERR_BADVALUE;
1268 
1269 	pdata->base.pa = dt_cryp.reg;
1270 	io_pa_or_va_secure(&pdata->base, 1);
1271 
1272 	pdata->clock_id = (unsigned long)dt_cryp.clock;
1273 	pdata->reset_id = (unsigned int)dt_cryp.reset;
1274 
1275 	return 0;
1276 }
1277 
stm32_cryp_driver_init(void)1278 static TEE_Result stm32_cryp_driver_init(void)
1279 {
1280 	TEE_Result res = TEE_SUCCESS;
1281 
1282 	switch (fdt_stm32_cryp(&cryp_pdata)) {
1283 	case 0:
1284 		break;
1285 	case -FDT_ERR_NOTFOUND:
1286 		return TEE_SUCCESS;
1287 	default:
1288 		panic();
1289 	}
1290 
1291 	stm32mp_register_secure_periph_iomem(cryp_pdata.base.pa);
1292 
1293 	stm32_clock_enable(cryp_pdata.clock_id);
1294 
1295 	if (stm32_reset_assert(cryp_pdata.reset_id, TIMEOUT_US_1MS))
1296 		panic();
1297 
1298 	if (stm32_reset_deassert(cryp_pdata.reset_id, TIMEOUT_US_1MS))
1299 		panic();
1300 
1301 	if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) {
1302 		res = stm32_register_authenc();
1303 		if (res) {
1304 			EMSG("Failed to register to authenc: %#"PRIx32, res);
1305 			panic();
1306 		}
1307 	}
1308 
1309 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1310 		res = stm32_register_cipher();
1311 		if (res) {
1312 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1313 			panic();
1314 		}
1315 	}
1316 
1317 	return TEE_SUCCESS;
1318 }
1319 
1320 driver_init(stm32_cryp_driver_init);
1321