treewide: use get_random_{u8,u16}() when possible, part 2

Rather than truncate a 32-bit value to a 16-bit value or an 8-bit value,
simply use the get_random_{u8,u16}() functions, which are faster than
wasting the additional bytes from a 32-bit value. This was done by hand,
identifying all of the places where one of the random integer functions
was used in a non-32-bit context.

Reviewed-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Reviewed-by: Yury Norov <yury.norov@gmail.com>
Acked-by: Jakub Kicinski <kuba@kernel.org>
Acked-by: Heiko Carstens <hca@linux.ibm.com> # for s390
Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
This commit is contained in:
Jason A. Donenfeld 2022-10-05 17:23:53 +02:00
parent 7e3cf0843f
commit f743f16c54
6 changed files with 6 additions and 6 deletions

View File

@ -230,7 +230,7 @@ unsigned long arch_align_stack(unsigned long sp)
static inline unsigned long brk_rnd(void) static inline unsigned long brk_rnd(void)
{ {
return (get_random_int() & BRK_RND_MASK) << PAGE_SHIFT; return (get_random_u16() & BRK_RND_MASK) << PAGE_SHIFT;
} }
unsigned long arch_randomize_brk(struct mm_struct *mm) unsigned long arch_randomize_brk(struct mm_struct *mm)

View File

@ -1402,7 +1402,7 @@ static int ns_do_read_error(struct nandsim *ns, int num)
static void ns_do_bit_flips(struct nandsim *ns, int num) static void ns_do_bit_flips(struct nandsim *ns, int num)
{ {
if (bitflips && prandom_u32() < (1 << 22)) { if (bitflips && get_random_u16() < (1 << 6)) {
int flips = 1; int flips = 1;
if (bitflips > 1) if (bitflips > 1)
flips = prandom_u32_max(bitflips) + 1; flips = prandom_u32_max(bitflips) + 1;

View File

@ -177,7 +177,7 @@ static int brcmf_pno_set_random(struct brcmf_if *ifp, struct brcmf_pno_info *pi)
memcpy(pfn_mac.mac, mac_addr, ETH_ALEN); memcpy(pfn_mac.mac, mac_addr, ETH_ALEN);
for (i = 0; i < ETH_ALEN; i++) { for (i = 0; i < ETH_ALEN; i++) {
pfn_mac.mac[i] &= mac_mask[i]; pfn_mac.mac[i] &= mac_mask[i];
pfn_mac.mac[i] |= get_random_int() & ~(mac_mask[i]); pfn_mac.mac[i] |= get_random_u8() & ~(mac_mask[i]);
} }
/* Clear multi bit */ /* Clear multi bit */
pfn_mac.mac[0] &= 0xFE; pfn_mac.mac[0] &= 0xFE;

View File

@ -80,7 +80,7 @@ static int random_size_align_alloc_test(void)
int i; int i;
for (i = 0; i < test_loop_count; i++) { for (i = 0; i < test_loop_count; i++) {
rnd = prandom_u32(); rnd = get_random_u8();
/* /*
* Maximum 1024 pages, if PAGE_SIZE is 4096. * Maximum 1024 pages, if PAGE_SIZE is 4096.

View File

@ -104,7 +104,7 @@ static int rds_add_bound(struct rds_sock *rs, const struct in6_addr *addr,
return -EINVAL; return -EINVAL;
last = rover; last = rover;
} else { } else {
rover = max_t(u16, prandom_u32(), 2); rover = max_t(u16, get_random_u16(), 2);
last = rover - 1; last = rover - 1;
} }

View File

@ -379,7 +379,7 @@ static int sfb_enqueue(struct sk_buff *skb, struct Qdisc *sch,
goto enqueue; goto enqueue;
} }
r = prandom_u32() & SFB_MAX_PROB; r = get_random_u16() & SFB_MAX_PROB;
if (unlikely(r < p_min)) { if (unlikely(r < p_min)) {
if (unlikely(p_min > SFB_MAX_PROB / 2)) { if (unlikely(p_min > SFB_MAX_PROB / 2)) {