Lines Matching refs:umax_value

676 				if (reg->smax_value != reg->umax_value &&  in print_verifier_state()
683 if (reg->umax_value != U64_MAX) in print_verifier_state()
685 (unsigned long long)reg->umax_value); in print_verifier_state()
704 if (reg->u32_max_value != reg->umax_value && in print_verifier_state()
1087 reg->umax_value = imm; in ___mark_reg_known()
1223 reg->umax_value = U64_MAX; in __mark_reg_unbounded()
1236 reg->umax_value = U64_MAX; in __mark_reg64_unbounded()
1271 reg->umax_value = min(reg->umax_value, in __update_reg64_bounds()
1326 reg->smax_value = reg->umax_value = min_t(u64, reg->smax_value, in __reg64_deduce_bounds()
1327 reg->umax_value); in __reg64_deduce_bounds()
1333 if ((s64)reg->umax_value >= 0) { in __reg64_deduce_bounds()
1338 reg->smax_value = reg->umax_value = min_t(u64, reg->smax_value, in __reg64_deduce_bounds()
1339 reg->umax_value); in __reg64_deduce_bounds()
1346 reg->smax_value = reg->umax_value; in __reg64_deduce_bounds()
1361 reg->umax_value)); in __reg_bound_offset()
1377 reg->umax_value = reg->u32_max_value; in __reg_assign_32_into_64()
1440 if (__reg64_bound_u32(reg->umin_value) && __reg64_bound_u32(reg->umax_value)) { in __reg_combine_64_into_32()
1442 reg->u32_max_value = (u32)reg->umax_value; in __reg_combine_64_into_32()
2758 reg->umin_value == 0 && reg->umax_value == U64_MAX && in __is_scalar_unbounded()
3406 if (reg->umax_value >= BPF_MAX_VAR_OFF) { in check_mem_region_access()
3411 err = __check_mem_access(env, regno, reg->umax_value + off, size, in check_mem_region_access()
3446 lock < reg->umax_value + off + size) { in check_map_access()
3455 t < reg->umax_value + off + size) { in check_map_access()
3549 off + reg->umax_value + size - 1); in check_packet_access()
4040 if ((reg->umin_value & ~mask) == (reg->umax_value & ~mask)) { in coerce_reg_to_size()
4042 reg->umax_value &= mask; in coerce_reg_to_size()
4045 reg->umax_value = mask; in coerce_reg_to_size()
4048 reg->smax_value = reg->umax_value; in coerce_reg_to_size()
5356 meta->msize_max_value = reg->umax_value; in check_func_arg()
5383 if (reg->umax_value >= BPF_MAX_VAR_SIZ) { in check_func_arg()
5389 reg->umax_value, in check_func_arg()
6882 ptr_reg->umax_value) + ptr_reg->off; in retrieve_ptr_limit()
7189 u64 umin_val = off_reg->umin_value, umax_val = off_reg->umax_value, in adjust_ptr_min_max_vals()
7190 umin_ptr = ptr_reg->umin_value, umax_ptr = ptr_reg->umax_value; in adjust_ptr_min_max_vals()
7276 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
7302 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
7305 dst_reg->umax_value = umax_ptr + umax_val; in adjust_ptr_min_max_vals()
7338 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
7360 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
7364 dst_reg->umax_value = umax_ptr - umin_val; in adjust_ptr_min_max_vals()
7441 u64 umax_val = src_reg->umax_value; in scalar_min_max_add()
7452 dst_reg->umax_value + umax_val < umax_val) { in scalar_min_max_add()
7454 dst_reg->umax_value = U64_MAX; in scalar_min_max_add()
7457 dst_reg->umax_value += umax_val; in scalar_min_max_add()
7495 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
7509 dst_reg->umax_value = U64_MAX; in scalar_min_max_sub()
7513 dst_reg->umax_value -= umin_val; in scalar_min_max_sub()
7554 u64 umax_val = src_reg->umax_value; in scalar_min_max_mul()
7564 if (umax_val > U32_MAX || dst_reg->umax_value > U32_MAX) { in scalar_min_max_mul()
7570 dst_reg->umax_value *= umax_val; in scalar_min_max_mul()
7571 if (dst_reg->umax_value > S64_MAX) { in scalar_min_max_mul()
7577 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_mul()
7621 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
7632 dst_reg->umax_value = min(dst_reg->umax_value, umax_val); in scalar_min_max_and()
7644 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_and()
7701 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_or()
7713 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_or()
7763 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_xor()
7770 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_xor()
7836 if (dst_reg->umax_value > 1ULL << (63 - umax_val)) { in __scalar64_min_max_lsh()
7838 dst_reg->umax_value = U64_MAX; in __scalar64_min_max_lsh()
7841 dst_reg->umax_value <<= umax_val; in __scalar64_min_max_lsh()
7848 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
7895 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
7916 dst_reg->umax_value >>= umin_val; in scalar_min_max_rsh()
7966 dst_reg->umax_value = U64_MAX; in scalar_min_max_arsh()
7999 umax_val = src_reg.umax_value; in adjust_scalar_min_max_vals()
8435 if (dst_reg->umax_value > MAX_PACKET_OFF || in find_good_pkt_pointers()
8436 dst_reg->umax_value + dst_reg->off > MAX_PACKET_OFF) in find_good_pkt_pointers()
8594 else if (reg->umax_value <= val) in is_branch64_taken()
8604 if (reg->umax_value < val) in is_branch64_taken()
8618 else if (reg->umax_value < val) in is_branch64_taken()
8628 if (reg->umax_value <= val) in is_branch64_taken()
8815 false_reg->umax_value = min(false_reg->umax_value, false_umax); in reg_set_min_max()
8854 true_reg->umax_value = min(true_reg->umax_value, true_umax); in reg_set_min_max()
8917 src_reg->umax_value = dst_reg->umax_value = min(src_reg->umax_value, in __reg_combine_min_max()
8918 dst_reg->umax_value); in __reg_combine_min_max()
10293 old->umax_value >= cur->umax_value && in range_within()