Commit c440408c authored by Jason A. Donenfeld's avatar Jason A. Donenfeld Committed by Theodore Ts'o

random: convert get_random_int/long into get_random_u32/u64

Many times, when a user wants a random number, he wants a random number
of a guaranteed size. So, thinking of get_random_int and get_random_long
in terms of get_random_u32 and get_random_u64 makes it much easier to
achieve this. It also makes the code simpler.

On 32-bit platforms, get_random_int and get_random_long are both aliased
to get_random_u32. On 64-bit platforms, int->u32 and long->u64.
Signed-off-by: default avatarJason A. Donenfeld <Jason@zx2c4.com>
Cc: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Cc: Theodore Ts'o <tytso@mit.edu>
Signed-off-by: default avatarTheodore Ts'o <tytso@mit.edu>
parent f5b98461
...@@ -2018,8 +2018,8 @@ struct ctl_table random_table[] = { ...@@ -2018,8 +2018,8 @@ struct ctl_table random_table[] = {
struct batched_entropy { struct batched_entropy {
union { union {
unsigned long entropy_long[CHACHA20_BLOCK_SIZE / sizeof(unsigned long)]; u64 entropy_u64[CHACHA20_BLOCK_SIZE / sizeof(u64)];
unsigned int entropy_int[CHACHA20_BLOCK_SIZE / sizeof(unsigned int)]; u32 entropy_u32[CHACHA20_BLOCK_SIZE / sizeof(u32)];
}; };
unsigned int position; unsigned int position;
}; };
...@@ -2029,52 +2029,51 @@ struct batched_entropy { ...@@ -2029,52 +2029,51 @@ struct batched_entropy {
* number is either as good as RDRAND or as good as /dev/urandom, with the * number is either as good as RDRAND or as good as /dev/urandom, with the
* goal of being quite fast and not depleting entropy. * goal of being quite fast and not depleting entropy.
*/ */
static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_long); static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_u64);
unsigned long get_random_long(void) u64 get_random_u64(void)
{ {
unsigned long ret; u64 ret;
struct batched_entropy *batch; struct batched_entropy *batch;
if (arch_get_random_long(&ret)) #if BITS_PER_LONG == 64
if (arch_get_random_long((unsigned long *)&ret))
return ret; return ret;
#else
if (arch_get_random_long((unsigned long *)&ret) &&
arch_get_random_long((unsigned long *)&ret + 1))
return ret;
#endif
batch = &get_cpu_var(batched_entropy_long); batch = &get_cpu_var(batched_entropy_u64);
if (batch->position % ARRAY_SIZE(batch->entropy_long) == 0) { if (batch->position % ARRAY_SIZE(batch->entropy_u64) == 0) {
extract_crng((u8 *)batch->entropy_long); extract_crng((u8 *)batch->entropy_u64);
batch->position = 0; batch->position = 0;
} }
ret = batch->entropy_long[batch->position++]; ret = batch->entropy_u64[batch->position++];
put_cpu_var(batched_entropy_long); put_cpu_var(batched_entropy_u64);
return ret; return ret;
} }
EXPORT_SYMBOL(get_random_long); EXPORT_SYMBOL(get_random_u64);
#if BITS_PER_LONG == 32 static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_u32);
unsigned int get_random_int(void) u32 get_random_u32(void)
{ {
return get_random_long(); u32 ret;
}
#else
static DEFINE_PER_CPU(struct batched_entropy, batched_entropy_int);
unsigned int get_random_int(void)
{
unsigned int ret;
struct batched_entropy *batch; struct batched_entropy *batch;
if (arch_get_random_int(&ret)) if (arch_get_random_int(&ret))
return ret; return ret;
batch = &get_cpu_var(batched_entropy_int); batch = &get_cpu_var(batched_entropy_u32);
if (batch->position % ARRAY_SIZE(batch->entropy_int) == 0) { if (batch->position % ARRAY_SIZE(batch->entropy_u32) == 0) {
extract_crng((u8 *)batch->entropy_int); extract_crng((u8 *)batch->entropy_u32);
batch->position = 0; batch->position = 0;
} }
ret = batch->entropy_int[batch->position++]; ret = batch->entropy_u32[batch->position++];
put_cpu_var(batched_entropy_int); put_cpu_var(batched_entropy_u32);
return ret; return ret;
} }
#endif EXPORT_SYMBOL(get_random_u32);
EXPORT_SYMBOL(get_random_int);
/** /**
* randomize_page - Generate a random, page aligned address * randomize_page - Generate a random, page aligned address
......
...@@ -42,8 +42,21 @@ extern void get_random_bytes_arch(void *buf, int nbytes); ...@@ -42,8 +42,21 @@ extern void get_random_bytes_arch(void *buf, int nbytes);
extern const struct file_operations random_fops, urandom_fops; extern const struct file_operations random_fops, urandom_fops;
#endif #endif
unsigned int get_random_int(void); u32 get_random_u32(void);
unsigned long get_random_long(void); u64 get_random_u64(void);
static inline unsigned int get_random_int(void)
{
return get_random_u32();
}
static inline unsigned long get_random_long(void)
{
#if BITS_PER_LONG == 64
return get_random_u64();
#else
return get_random_u32();
#endif
}
unsigned long randomize_page(unsigned long start, unsigned long range); unsigned long randomize_page(unsigned long start, unsigned long range);
u32 prandom_u32(void); u32 prandom_u32(void);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment