1use crate::distr::{Distribution, StandardUniform};
12use crate::Rng;
13#[cfg(all(target_arch = "x86", feature = "simd_support"))]
14use core::arch::x86::__m512i;
15#[cfg(target_arch = "x86")]
16use core::arch::x86::{__m128i, __m256i};
17#[cfg(all(target_arch = "x86_64", feature = "simd_support"))]
18use core::arch::x86_64::__m512i;
19#[cfg(target_arch = "x86_64")]
20use core::arch::x86_64::{__m128i, __m256i};
21use core::num::{
22 NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroU128, NonZeroU16,
23 NonZeroU32, NonZeroU64, NonZeroU8,
24};
25#[cfg(feature = "simd_support")]
26use core::simd::*;
27
28impl Distribution<u8> for StandardUniform {
29 #[inline]
30 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> u8 {
31 rng.next_u32() as u8
32 }
33}
34
35impl Distribution<u16> for StandardUniform {
36 #[inline]
37 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> u16 {
38 rng.next_u32() as u16
39 }
40}
41
42impl Distribution<u32> for StandardUniform {
43 #[inline]
44 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> u32 {
45 rng.next_u32()
46 }
47}
48
49impl Distribution<u64> for StandardUniform {
50 #[inline]
51 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> u64 {
52 rng.next_u64()
53 }
54}
55
56impl Distribution<u128> for StandardUniform {
57 #[inline]
58 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> u128 {
59 let x = u128::from(rng.next_u64());
61 let y = u128::from(rng.next_u64());
62 (y << 64) | x
63 }
64}
65
66macro_rules! impl_int_from_uint {
67 ($ty:ty, $uty:ty) => {
68 impl Distribution<$ty> for StandardUniform {
69 #[inline]
70 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty {
71 rng.random::<$uty>() as $ty
72 }
73 }
74 };
75}
76
77impl_int_from_uint! { i8, u8 }
78impl_int_from_uint! { i16, u16 }
79impl_int_from_uint! { i32, u32 }
80impl_int_from_uint! { i64, u64 }
81impl_int_from_uint! { i128, u128 }
82
83macro_rules! impl_nzint {
84 ($ty:ty, $new:path) => {
85 impl Distribution<$ty> for StandardUniform {
86 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $ty {
87 loop {
88 if let Some(nz) = $new(rng.random()) {
89 break nz;
90 }
91 }
92 }
93 }
94 };
95}
96
97impl_nzint!(NonZeroU8, NonZeroU8::new);
98impl_nzint!(NonZeroU16, NonZeroU16::new);
99impl_nzint!(NonZeroU32, NonZeroU32::new);
100impl_nzint!(NonZeroU64, NonZeroU64::new);
101impl_nzint!(NonZeroU128, NonZeroU128::new);
102
103impl_nzint!(NonZeroI8, NonZeroI8::new);
104impl_nzint!(NonZeroI16, NonZeroI16::new);
105impl_nzint!(NonZeroI32, NonZeroI32::new);
106impl_nzint!(NonZeroI64, NonZeroI64::new);
107impl_nzint!(NonZeroI128, NonZeroI128::new);
108
109#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
110impl Distribution<__m128i> for StandardUniform {
111 #[inline]
112 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> __m128i {
113 let mut buf = [0_u8; core::mem::size_of::<__m128i>()];
119 rng.fill_bytes(&mut buf);
120 unsafe { core::mem::transmute(buf) }
124 }
125}
126
127#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
128impl Distribution<__m256i> for StandardUniform {
129 #[inline]
130 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> __m256i {
131 let mut buf = [0_u8; core::mem::size_of::<__m256i>()];
132 rng.fill_bytes(&mut buf);
133 unsafe { core::mem::transmute(buf) }
137 }
138}
139
140#[cfg(all(
141 any(target_arch = "x86", target_arch = "x86_64"),
142 feature = "simd_support"
143))]
144impl Distribution<__m512i> for StandardUniform {
145 #[inline]
146 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> __m512i {
147 let mut buf = [0_u8; core::mem::size_of::<__m512i>()];
148 rng.fill_bytes(&mut buf);
149 unsafe { core::mem::transmute(buf) }
153 }
154}
155
156#[cfg(feature = "simd_support")]
157macro_rules! simd_impl {
158 ($($ty:ty),+) => {$(
159 #[cfg(feature = "simd_support")]
163 impl<const LANES: usize> Distribution<Simd<$ty, LANES>> for StandardUniform
164 where
165 LaneCount<LANES>: SupportedLaneCount,
166 {
167 #[inline]
168 fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Simd<$ty, LANES> {
169 let mut vec = Simd::default();
170 rng.fill(vec.as_mut_array().as_mut_slice());
171 vec
172 }
173 }
174 )+};
175}
176
177#[cfg(feature = "simd_support")]
178simd_impl!(u8, i8, u16, i16, u32, i32, u64, i64);
179
180#[cfg(test)]
181mod tests {
182 use super::*;
183
184 #[test]
185 fn test_integers() {
186 let mut rng = crate::test::rng(806);
187
188 rng.sample::<i8, _>(StandardUniform);
189 rng.sample::<i16, _>(StandardUniform);
190 rng.sample::<i32, _>(StandardUniform);
191 rng.sample::<i64, _>(StandardUniform);
192 rng.sample::<i128, _>(StandardUniform);
193
194 rng.sample::<u8, _>(StandardUniform);
195 rng.sample::<u16, _>(StandardUniform);
196 rng.sample::<u32, _>(StandardUniform);
197 rng.sample::<u64, _>(StandardUniform);
198 rng.sample::<u128, _>(StandardUniform);
199 }
200
201 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
202 #[test]
203 fn x86_integers() {
204 let mut rng = crate::test::rng(807);
205
206 rng.sample::<__m128i, _>(StandardUniform);
207 rng.sample::<__m256i, _>(StandardUniform);
208 #[cfg(feature = "simd_support")]
209 rng.sample::<__m512i, _>(StandardUniform);
210 }
211
212 #[test]
213 fn value_stability() {
214 fn test_samples<T: Copy + core::fmt::Debug + PartialEq>(zero: T, expected: &[T])
215 where
216 StandardUniform: Distribution<T>,
217 {
218 let mut rng = crate::test::rng(807);
219 let mut buf = [zero; 3];
220 for x in &mut buf {
221 *x = rng.sample(StandardUniform);
222 }
223 assert_eq!(&buf, expected);
224 }
225
226 test_samples(0u8, &[9, 247, 111]);
227 test_samples(0u16, &[32265, 42999, 38255]);
228 test_samples(0u32, &[2220326409, 2575017975, 2018088303]);
229 test_samples(
230 0u64,
231 &[
232 11059617991457472009,
233 16096616328739788143,
234 1487364411147516184,
235 ],
236 );
237 test_samples(
238 0u128,
239 &[
240 296930161868957086625409848350820761097,
241 145644820879247630242265036535529306392,
242 111087889832015897993126088499035356354,
243 ],
244 );
245
246 test_samples(0i8, &[9, -9, 111]);
247 #[cfg(feature = "simd_support")]
250 {
251 test_samples(
254 u8x4::default(),
255 &[
256 u8x4::from([9, 126, 87, 132]),
257 u8x4::from([247, 167, 123, 153]),
258 u8x4::from([111, 149, 73, 120]),
259 ],
260 );
261 test_samples(
262 u8x8::default(),
263 &[
264 u8x8::from([9, 126, 87, 132, 247, 167, 123, 153]),
265 u8x8::from([111, 149, 73, 120, 68, 171, 98, 223]),
266 u8x8::from([24, 121, 1, 50, 13, 46, 164, 20]),
267 ],
268 );
269
270 test_samples(
271 i64x8::default(),
272 &[
273 i64x8::from([
274 -7387126082252079607,
275 -2350127744969763473,
276 1487364411147516184,
277 7895421560427121838,
278 602190064936008898,
279 6022086574635100741,
280 -5080089175222015595,
281 -4066367846667249123,
282 ]),
283 i64x8::from([
284 9180885022207963908,
285 3095981199532211089,
286 6586075293021332726,
287 419343203796414657,
288 3186951873057035255,
289 5287129228749947252,
290 444726432079249540,
291 -1587028029513790706,
292 ]),
293 i64x8::from([
294 6075236523189346388,
295 1351763722368165432,
296 -6192309979959753740,
297 -7697775502176768592,
298 -4482022114172078123,
299 7522501477800909500,
300 -1837258847956201231,
301 -586926753024886735,
302 ]),
303 ],
304 );
305 }
306 }
307}