Skip to content

Commit 0600265

Browse files
authored
Merge pull request #49 from github/dependabot/cargo/rand-0.9
Update rand requirement from 0.8 to 0.9
2 parents 36e987b + e14e5f2 commit 0600265

File tree

13 files changed

+31
-31
lines changed

13 files changed

+31
-31
lines changed

crates/bpe/Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ aneubeck-daachorse = "1.1.1"
2121
base64 = { version = "0.22", optional = true }
2222
fnv = "1.0"
2323
itertools = "0.14"
24-
rand = { version = "0.8", optional = true }
24+
rand = { version = "0.9", optional = true }
2525
serde = { version = "1", features = ["derive"] }
2626

2727
[dev-dependencies]

crates/bpe/benchmarks/Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,6 @@ test = true
2121
bpe = { path = "../../bpe" }
2222
bpe-openai = { path = "../../bpe-openai" }
2323
criterion = "0.5"
24-
rand = "0.8"
24+
rand = "0.9"
2525
tiktoken-rs = "0.6"
2626
tokenizers = { version = "0.21", features = ["http"] }

crates/bpe/src/byte_pair_encoding.rs

+8-8
Original file line numberDiff line numberDiff line change
@@ -171,9 +171,9 @@ pub fn find_hash_factor_for_dictionary(tokens: impl IntoIterator<Item = Vec<u8>>
171171
use rand::Rng;
172172

173173
let all_tokens = tokens.into_iter().collect_vec();
174-
let mut rnd = rand::thread_rng();
174+
let mut rnd = rand::rng();
175175
loop {
176-
let factor: u64 = rnd.gen();
176+
let factor: u64 = rnd.random();
177177
let mut seen = HashSet::new();
178178
if all_tokens
179179
.iter()
@@ -568,7 +568,7 @@ pub fn create_test_string_with_predicate(
568568
min_bytes: usize,
569569
predicate: impl Fn(&str) -> bool,
570570
) -> String {
571-
use rand::{thread_rng, Rng};
571+
use rand::{rng, Rng};
572572
// the string we accumulated thus far
573573
let mut result = String::new();
574574
// the tokens we added so we can backtrack
@@ -577,7 +577,7 @@ pub fn create_test_string_with_predicate(
577577
// try a few times to find a suitable token
578578
'next: for _ in 0..8 {
579579
// pick a random token and provisionally add it
580-
let i = thread_rng().gen_range(0..bpe.num_tokens()) as u32;
580+
let i = rng().random_range(0..bpe.num_tokens()) as u32;
581581
// We only use tokens that are valid UTF-8. This is true for ~99% of tokens in OpenAI's
582582
// token set. The chance of constructing a valid UTF-8 character across a token boundary
583583
// by picking random tokens is so small that it is unlikely to happen anyway.
@@ -603,8 +603,8 @@ pub fn create_test_string_with_predicate(
603603

604604
#[cfg(feature = "rand")]
605605
pub fn select_test_string(text: &str, min_bytes: usize) -> &str {
606-
use rand::{thread_rng, Rng};
607-
let mut start = thread_rng().gen_range(0..text.len() - min_bytes);
606+
use rand::{rng, Rng};
607+
let mut start = rng().random_range(0..text.len() - min_bytes);
608608
while !text.is_char_boundary(start) {
609609
start -= 1;
610610
}
@@ -618,10 +618,10 @@ pub fn select_test_string(text: &str, min_bytes: usize) -> &str {
618618
/// Generate test bytes by concatenating random tokens.
619619
#[cfg(feature = "rand")]
620620
pub fn create_test_bytes(bpe: &BytePairEncoding, min_bytes: usize) -> Vec<u8> {
621-
use rand::{thread_rng, Rng};
621+
use rand::{rng, Rng};
622622
let mut result = Vec::new();
623623
while result.len() < min_bytes {
624-
let i = thread_rng().gen_range(0..bpe.num_tokens());
624+
let i = rng().random_range(0..bpe.num_tokens());
625625
result.extend(bpe.token_bytes(i as u32));
626626
}
627627
result

crates/bpe/tests/Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,5 @@ edition = "2021"
66
bpe = { path = "../../bpe", features = ["rand"] }
77
bpe-openai = { path = "../../bpe-openai" }
88
itertools = "0.14"
9-
rand = "0.8"
9+
rand = "0.9"
1010
tiktoken-rs = "0.6"

crates/bpe/tests/src/lib.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#[cfg(test)]
22
mod tests {
33
use itertools::Itertools;
4-
use rand::{thread_rng, Rng};
4+
use rand::{rng, Rng};
55
use tiktoken_rs::cl100k_base_singleton;
66

77
use bpe::appendable_encoder::AppendableEncoder;
@@ -122,8 +122,8 @@ mod tests {
122122
let input = create_test_bytes(bpe, 10000);
123123
let intervals = IntervalEncoding::new(bpe, &input);
124124
for _ in 0..1000 {
125-
let start = thread_rng().gen_range(0..input.len());
126-
let end = thread_rng().gen_range(0..input.len());
125+
let start = rng().random_range(0..input.len());
126+
let end = rng().random_range(0..input.len());
127127
let range = start.min(end)..start.max(end);
128128
assert_eq!(
129129
intervals.count(range.clone()),

crates/geo_filters/Cargo.toml

+3-3
Original file line numberDiff line numberDiff line change
@@ -28,15 +28,15 @@ fnv = "1.0"
2828
hyperloglogplus = { version = "0.4", optional = true }
2929
itertools = "0.14"
3030
once_cell = "1.18"
31-
rand = { version = "0.8", optional = true }
31+
rand = { version = "0.9", optional = true }
3232
rayon = { version = "1.7", optional = true }
3333
regex = { version = "1", optional = true }
3434

3535
[dev-dependencies]
3636
criterion = "0.5"
3737
geo_filters = { path = ".", features = ["evaluation"] }
38-
rand = "0.8"
39-
rand_chacha = "0.3"
38+
rand = "0.9"
39+
rand_chacha = "0.9"
4040
rayon = "1.7"
4141

4242
[[bench]]

crates/geo_filters/src/config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -312,7 +312,7 @@ pub(crate) mod tests {
312312

313313
/// Runs estimation trials and returns the average precision and variance.
314314
pub(crate) fn test_estimate<M: Method, C: Count<M>>(f: impl Fn() -> C) -> (f32, f32) {
315-
let mut rnd = rand::rngs::StdRng::from_entropy();
315+
let mut rnd = rand::rngs::StdRng::from_os_rng();
316316
let cnt = 10000usize;
317317
let mut avg_precision = 0.0;
318318
let mut avg_var = 0.0;

crates/geo_filters/src/config/lookup.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ mod tests {
6767
let phi = phi_f64(B);
6868
let buckets = HashToBucketLookup::new(B);
6969
let mut var = 0.0;
70-
let mut rnd = rand::rngs::StdRng::from_entropy();
70+
let mut rnd = rand::rngs::StdRng::from_os_rng();
7171
for _ in 0..n {
7272
let hash = rnd.next_u64();
7373
let estimate = buckets.lookup(hash) as f64;

crates/geo_filters/src/diff_count.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -446,7 +446,7 @@ mod tests {
446446

447447
#[test]
448448
fn test_estimate_diff_size_fast() {
449-
let mut rnd = rand::rngs::StdRng::from_entropy();
449+
let mut rnd = rand::rngs::StdRng::from_os_rng();
450450
let mut a_p = GeoDiffCount7_50::default();
451451
let mut a_hp = GeoDiffCount7::default();
452452
let mut b_p = GeoDiffCount7_50::default();
@@ -500,7 +500,7 @@ mod tests {
500500

501501
#[test]
502502
fn test_xor_plus_mask() {
503-
let mut rnd = rand::rngs::StdRng::from_entropy();
503+
let mut rnd = rand::rngs::StdRng::from_os_rng();
504504
let mask_size = 12;
505505
let mask = 0b100001100000;
506506
let mut a = GeoDiffCount7::default();
@@ -527,7 +527,7 @@ mod tests {
527527

528528
#[test]
529529
fn test_bit_chunks() {
530-
let mut rnd = rand::rngs::StdRng::from_entropy();
530+
let mut rnd = rand::rngs::StdRng::from_os_rng();
531531
for _ in 0..100 {
532532
let mut expected = GeoDiffCount7::default();
533533
for _ in 0..1000 {

crates/geo_filters/src/distinct_count.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ mod tests {
307307

308308
#[test]
309309
fn test_estimate_union_size_fast() {
310-
let mut rnd = rand::rngs::StdRng::from_entropy();
310+
let mut rnd = rand::rngs::StdRng::from_os_rng();
311311
let mut a = GeoDistinctCount7::default();
312312
let mut b = GeoDistinctCount7::default();
313313
for _ in 0..10000 {
@@ -374,7 +374,7 @@ mod tests {
374374

375375
#[test]
376376
fn test_bit_chunks() {
377-
let mut rnd = rand::rngs::StdRng::from_entropy();
377+
let mut rnd = rand::rngs::StdRng::from_os_rng();
378378
for _ in 0..100 {
379379
let mut expected = GeoDistinctCount7::default();
380380
for _ in 0..1000 {

crates/geo_filters/src/evaluation/simulation.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ pub fn simulate<F: Fn() -> Box<dyn SimulationCount> + Send + Sync>(
181181
.map(|_| {
182182
let mut t = f();
183183
let mut last_set_size = 0;
184-
let mut rnd = rand::rngs::StdRng::from_entropy();
184+
let mut rnd = rand::rngs::StdRng::from_os_rng();
185185
set_sizes
186186
.iter()
187187
.map(move |set_size| {

crates/string-offsets/Cargo.toml

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,5 +10,5 @@ keywords = ["unicode", "positions", "utf16", "characters", "lines"]
1010
categories = ["algorithms", "data-structures", "text-processing", "development-tools::ffi"]
1111

1212
[dev-dependencies]
13-
rand = "0.8"
14-
rand_chacha = "0.3"
13+
rand = "0.9"
14+
rand_chacha = "0.9"

crates/string-offsets/src/bitrank.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -214,12 +214,12 @@ impl BitRank {
214214

215215
#[cfg(test)]
216216
mod tests {
217-
use rand::distributions::Uniform;
217+
use super::*;
218+
use rand::distr::Uniform;
218219
use rand::prelude::*;
220+
use rand_chacha::rand_core::SeedableRng;
219221
use rand_chacha::ChaCha8Rng;
220222

221-
use super::*;
222-
223223
/// Creates a `BitRank` containing the integers in `iter` (which should be strictly
224224
/// increasing).
225225
pub fn bitrank<I: IntoIterator<Item = usize>>(iter: I) -> BitRank {
@@ -304,7 +304,7 @@ mod tests {
304304
#[test]
305305
fn test_rank_large_random() {
306306
let mut rng = ChaCha8Rng::seed_from_u64(2);
307-
let uniform = Uniform::<usize>::from(0..1_000_000);
307+
let uniform = Uniform::new(0, 1_000_000).unwrap();
308308
let mut random_bits = Vec::with_capacity(100_000);
309309
for _ in 0..100_000 {
310310
random_bits.push(uniform.sample(&mut rng));

0 commit comments

Comments
 (0)