Searched refs:CFG_NUM_THREADS (Results 1 – 25 of 34) sorted by relevance
12
45 #if CFG_NUM_THREADS < 247 #elif (CFG_NUM_THREADS == 2 && !defined(CFG_WITH_LPAE))50 #define PGT_CACHE_SIZE ROUNDUP(CFG_NUM_THREADS * 2, PGT_NUM_PGT_PER_PAGE)
25 static struct lockdep_lock_head owned[CFG_NUM_THREADS];31 for (n = 0; n < CFG_NUM_THREADS; n++) in mutex_lockdep_init()
47 CFG_NUM_THREADS ?= CFG_TEE_CORE_NB_CORE66 CFG_NUM_THREADS ?= CFG_TEE_CORE_NB_CORE85 CFG_NUM_THREADS ?= CFG_TEE_CORE_NB_CORE
40 struct thread_ctx threads[CFG_NUM_THREADS];134 DECLARE_STACK(stack_thread, CFG_NUM_THREADS,355 for (n = 0; n < CFG_NUM_THREADS; n++) { in print_stack_limits()550 for (n = 0; n < CFG_NUM_THREADS; n++) { in __thread_alloc_and_run()788 if (ct < 0 || ct >= CFG_NUM_THREADS) in get_stack_limits()968 if (thread_id >= CFG_NUM_THREADS) in thread_init_stack()992 COMPILE_TIME_ASSERT(CFG_NUM_THREADS <= SHRT_MAX); in thread_get_id()993 assert(ct >= 0 && ct < CFG_NUM_THREADS); in thread_get_id()1005 for (n = 0; n < CFG_NUM_THREADS; n++) { in init_thread_stacks()1041 for (n = 0; n < CFG_NUM_THREADS; n++) { in init_thread_stacks()[all …]
236 for (n = 0; n < CFG_NUM_THREADS; n++) { in thread_disable_prealloc_rpc_cache()246 for (n = 0; n < CFG_NUM_THREADS; n++) { in thread_disable_prealloc_rpc_cache()277 for (n = 0; n < CFG_NUM_THREADS; n++) { in thread_enable_prealloc_rpc_cache()
12 CFG_NUM_THREADS ?= $(CFG_TEE_CORE_NB_CORE)
17 CFG_NUM_THREADS ?= 4
9 CFG_NUM_THREADS ?= 8
4 CFG_NUM_THREADS ?= 16
18 CFG_NUM_THREADS ?= 4
25 CFG_NUM_THREADS ?= 4
59 #if CFG_NUM_THREADS > 100 in print_thread_id()61 #elif CFG_NUM_THREADS > 10 in print_thread_id()
21 CFG_NUM_THREADS ?= 4
46 CFG_NUM_THREADS ?= 4
37 CFG_NUM_THREADS)
15 CFG_NUM_THREADS ?= 4
16 CFG_NUM_THREADS ?= 8
9 CFG_NUM_THREADS ?= 4
116 #define CFG_NUM_THREADS 2 macro
119 CFG_NUM_THREADS=2
186 static uint32_t threaded_payload[CFG_NUM_THREADS][SCMI_PLAYLOAD_U32_MAX];
Completed in 51 milliseconds