Skip to content

Commit 861de6a

Browse files
committed
Apply clippy suggestions
1 parent 893f28d commit 861de6a

File tree

13 files changed

+44
-50
lines changed

13 files changed

+44
-50
lines changed

benches/array.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -25,18 +25,18 @@ fn allround_iter() -> impl Iterator<Item = String> + Clone {
2525
corpus.into_iter()
2626
}
2727

28-
fn known_iter<'a>(
29-
embeds: &'a Embeddings<VocabWrap, StorageWrap>,
30-
) -> impl 'a + Iterator<Item = String> + Clone {
28+
fn known_iter(
29+
embeds: &Embeddings<VocabWrap, StorageWrap>,
30+
) -> impl '_ + Iterator<Item = String> + Clone {
3131
allround_iter().filter_map(move |w| match embeds.vocab().idx(&w) {
3232
Some(WordIndex::Word(_)) => Some(w),
3333
_ => None,
3434
})
3535
}
3636

37-
fn unknown_iter<'a>(
38-
embeds: &'a Embeddings<VocabWrap, StorageWrap>,
39-
) -> impl 'a + Iterator<Item = String> + Clone {
37+
fn unknown_iter(
38+
embeds: &Embeddings<VocabWrap, StorageWrap>,
39+
) -> impl '_ + Iterator<Item = String> + Clone {
4040
allround_iter().filter_map(move |w| match embeds.vocab().idx(&w) {
4141
Some(WordIndex::Subword(_)) => Some(w),
4242
_ => None,

benches/quantized.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -25,18 +25,18 @@ fn allround_iter() -> impl Iterator<Item = String> + Clone {
2525
corpus.into_iter()
2626
}
2727

28-
fn known_iter<'a>(
29-
embeds: &'a Embeddings<VocabWrap, StorageWrap>,
30-
) -> impl 'a + Iterator<Item = String> + Clone {
28+
fn known_iter(
29+
embeds: &Embeddings<VocabWrap, StorageWrap>,
30+
) -> impl '_ + Iterator<Item = String> + Clone {
3131
allround_iter().filter_map(move |w| match embeds.vocab().idx(&w) {
3232
Some(WordIndex::Word(_)) => Some(w),
3333
_ => None,
3434
})
3535
}
3636

37-
fn unknown_iter<'a>(
38-
embeds: &'a Embeddings<VocabWrap, StorageWrap>,
39-
) -> impl 'a + Iterator<Item = String> + Clone {
37+
fn unknown_iter(
38+
embeds: &Embeddings<VocabWrap, StorageWrap>,
39+
) -> impl '_ + Iterator<Item = String> + Clone {
4040
allround_iter().filter_map(move |w| match embeds.vocab().idx(&w) {
4141
Some(WordIndex::Subword(_)) => Some(w),
4242
_ => None,

benches/subword.rs

-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ fn subwords(string: &str, min_n: usize, max_n: usize, indexer: &impl Indexer) ->
1313
// evaluates them.
1414
string
1515
.subword_indices(min_n, max_n, indexer)
16-
.into_iter()
1716
.fold(0, |sum, v| sum.wrapping_add(v))
1817
}
1918

src/chunks/norms.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ impl ReadChunk for NdNorms {
7171
f32::ensure_data_type(read)?;
7272

7373
let n_padding =
74-
padding::<f32>(read.seek(SeekFrom::Current(0)).map_err(|e| {
74+
padding::<f32>(read.stream_position().map_err(|e| {
7575
Error::read_error("Cannot get file position for computing padding", e)
7676
})?);
7777
read.seek(SeekFrom::Current(n_padding as i64))
@@ -109,12 +109,12 @@ impl WriteChunk for NdNorms {
109109
write
110110
.write_u32::<LittleEndian>(ChunkIdentifier::NdNorms as u32)
111111
.map_err(|e| Error::write_error("Cannot write norms chunk identifier", e))?;
112-
let n_padding = padding::<f32>(write.seek(SeekFrom::Current(0)).map_err(|e| {
112+
let n_padding = padding::<f32>(write.stream_position().map_err(|e| {
113113
Error::write_error("Cannot get file position for computing padding", e)
114114
})?);
115115

116116
let remaining_chunk_len =
117-
self.chunk_len(write.seek(SeekFrom::Current(0)).map_err(|e| {
117+
self.chunk_len(write.stream_position().map_err(|e| {
118118
Error::read_error("Cannot get file position for computing padding", e)
119119
})?) - (size_of::<u32>() + size_of::<u64>()) as u64;
120120

src/chunks/storage/array.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -134,15 +134,15 @@ mod mmap {
134134
// The components of the embedding matrix should be of type f32.
135135
f32::ensure_data_type(read)?;
136136

137-
let n_padding = padding::<f32>(read.seek(SeekFrom::Current(0)).map_err(|e| {
137+
let n_padding = padding::<f32>(read.stream_position().map_err(|e| {
138138
Error::read_error("Cannot get file position for computing padding", e)
139139
})?);
140140
read.seek(SeekFrom::Current(n_padding as i64))
141141
.map_err(|e| Error::read_error("Cannot skip padding", e))?;
142142

143143
// Set up memory mapping.
144144
let matrix_len = shape.size() * size_of::<f32>();
145-
let offset = read.seek(SeekFrom::Current(0)).map_err(|e| {
145+
let offset = read.stream_position().map_err(|e| {
146146
Error::read_error(
147147
"Cannot get file position for memory mapping embedding matrix",
148148
e,
@@ -153,7 +153,7 @@ mod mmap {
153153
mmap_opts
154154
.offset(offset)
155155
.len(matrix_len)
156-
.map(&*read.get_ref())
156+
.map(read.get_ref())
157157
.map_err(|e| Error::read_error("Cannot memory map embedding matrix", e))?
158158
};
159159

@@ -218,13 +218,13 @@ impl NdArray {
218218
write
219219
.write_u32::<LittleEndian>(ChunkIdentifier::NdArray as u32)
220220
.map_err(|e| Error::write_error("Cannot write embedding matrix chunk identifier", e))?;
221-
let n_padding = padding::<f32>(write.seek(SeekFrom::Current(0)).map_err(|e| {
221+
let n_padding = padding::<f32>(write.stream_position().map_err(|e| {
222222
Error::write_error("Cannot get file position for computing padding", e)
223223
})?);
224224

225225
let remaining_chunk_len = Self::chunk_len(
226226
data.view(),
227-
write.seek(SeekFrom::Current(0)).map_err(|e| {
227+
write.stream_position().map_err(|e| {
228228
Error::read_error("Cannot get file position for computing padding", e)
229229
})?,
230230
) - (size_of::<u32>() + size_of::<u64>()) as u64;
@@ -346,7 +346,7 @@ impl ReadChunk for NdArray {
346346
f32::ensure_data_type(read)?;
347347

348348
let n_padding =
349-
padding::<f32>(read.seek(SeekFrom::Current(0)).map_err(|e| {
349+
padding::<f32>(read.stream_position().map_err(|e| {
350350
Error::read_error("Cannot get file position for computing padding", e)
351351
})?);
352352
read.seek(SeekFrom::Current(n_padding as i64))

src/chunks/storage/quantized.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ impl QuantizedArray {
117117
f32::ensure_data_type(read)?;
118118

119119
let n_padding =
120-
padding::<f32>(read.seek(SeekFrom::Current(0)).map_err(|e| {
120+
padding::<f32>(read.stream_position().map_err(|e| {
121121
Error::read_error("Cannot get file position for computing padding", e)
122122
})?);
123123
read.seek(SeekFrom::Current(n_padding as i64))
@@ -171,12 +171,12 @@ impl QuantizedArray {
171171
quantizer,
172172
quantized.view(),
173173
norms,
174-
write.seek(SeekFrom::Current(0)).map_err(|e| {
174+
write.stream_position().map_err(|e| {
175175
Error::read_error("Cannot get file position for computing padding", e)
176176
})?,
177177
) - (size_of::<u32>() + size_of::<u64>()) as u64;
178178

179-
let n_padding = padding::<f32>(write.seek(SeekFrom::Current(0)).map_err(|e| {
179+
let n_padding = padding::<f32>(write.stream_position().map_err(|e| {
180180
Error::write_error("Cannot get file position for computing padding", e)
181181
})?);
182182

@@ -562,7 +562,7 @@ mod mmap {
562562
n_embeddings: usize,
563563
quantized_len: usize,
564564
) -> Result<Mmap> {
565-
let offset = read.seek(SeekFrom::Current(0)).map_err(|e| {
565+
let offset = read.stream_position().map_err(|e| {
566566
Error::read_error(
567567
"Cannot get file position for memory mapping embedding matrix",
568568
e,
@@ -574,7 +574,7 @@ mod mmap {
574574
mmap_opts
575575
.offset(offset)
576576
.len(matrix_len)
577-
.map(&*read.get_ref())
577+
.map(read.get_ref())
578578
.map_err(|e| {
579579
Error::read_error("Cannot memory map quantized embedding matrix", e)
580580
})?

src/chunks/storage/wrappers.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ impl ReadChunk for StorageWrap {
126126
R: Read + Seek,
127127
{
128128
let chunk_start_pos = read
129-
.seek(SeekFrom::Current(0))
129+
.stream_position()
130130
.map_err(|e| Error::read_error("Cannot get storage chunk start position", e))?;
131131

132132
let chunk_id = read
@@ -156,7 +156,7 @@ impl ReadChunk for StorageWrap {
156156
impl MmapChunk for StorageWrap {
157157
fn mmap_chunk(read: &mut BufReader<File>) -> Result<Self> {
158158
let chunk_start_pos = read
159-
.seek(SeekFrom::Current(0))
159+
.stream_position()
160160
.map_err(|e| Error::read_error("Cannot get storage chunk start position", e))?;
161161

162162
let chunk_id = read
@@ -306,7 +306,7 @@ impl ReadChunk for StorageViewWrap {
306306
R: Read + Seek,
307307
{
308308
let chunk_start_pos = read
309-
.seek(SeekFrom::Current(0))
309+
.stream_position()
310310
.map_err(|e| Error::read_error("Cannot get storage chunk start position", e))?;
311311

312312
let chunk_id = read
@@ -361,7 +361,7 @@ impl WriteChunk for StorageViewWrap {
361361
impl MmapChunk for StorageViewWrap {
362362
fn mmap_chunk(read: &mut BufReader<File>) -> Result<Self> {
363363
let chunk_start_pos = read
364-
.seek(SeekFrom::Current(0))
364+
.stream_position()
365365
.map_err(|e| Error::read_error("Cannot get storage chunk start position", e))?;
366366

367367
let chunk_id = read

src/chunks/vocab/simple.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use std::collections::HashMap;
22
use std::convert::TryInto;
3-
use std::io::{Read, Seek, SeekFrom, Write};
3+
use std::io::{Read, Seek, Write};
44
use std::mem::size_of;
55

66
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
@@ -103,7 +103,7 @@ impl WriteChunk for SimpleVocab {
103103
.map_err(|e| Error::write_error("Cannot write vocabulary chunk identifier", e))?;
104104

105105
let remaining_chunk_len =
106-
self.chunk_len(write.seek(SeekFrom::Current(0)).map_err(|e| {
106+
self.chunk_len(write.stream_position().map_err(|e| {
107107
Error::read_error("Cannot get file position for computing padding", e)
108108
})?) - (size_of::<u32>() + size_of::<u64>()) as u64;
109109

src/chunks/vocab/subword.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -400,7 +400,7 @@ where
400400
.read_u32::<LittleEndian>()
401401
.map_err(|e| Error::read_error("Cannot read number of buckets", e))?;
402402

403-
let words = read_vocab_items(read, vocab_len as usize)?;
403+
let words = read_vocab_items(read, vocab_len)?;
404404

405405
Ok(SubwordVocab::new(
406406
words,

src/chunks/vocab/wrappers.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ impl ReadChunk for VocabWrap {
110110
R: Read + Seek,
111111
{
112112
let chunk_start_pos = read
113-
.seek(SeekFrom::Current(0))
113+
.stream_position()
114114
.map_err(|e| Error::read_error("Cannot get vocabulary chunk start position", e))?;
115115
let chunk_id = read
116116
.read_u32::<LittleEndian>()

src/similarity.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ where
362362

363363
#[cfg(test)]
364364
mod tests {
365-
365+
use std::f32;
366366
use std::fs::File;
367367
use std::io::BufReader;
368368

@@ -482,7 +482,7 @@ mod tests {
482482
.abs_diff_eq(&1f32, 1e-5));
483483
assert!((WordSimilarityResult {
484484
word: "test",
485-
similarity: NotNan::new(0.70710678).unwrap()
485+
similarity: NotNan::new(f32::consts::FRAC_1_SQRT_2).unwrap()
486486
})
487487
.angular_similarity()
488488
.abs_diff_eq(&0.75, 1e-5));
@@ -510,7 +510,7 @@ mod tests {
510510
.abs_diff_eq(&0f32, 1e-5));
511511
assert!((WordSimilarityResult {
512512
word: "test",
513-
similarity: NotNan::new(0.70710678).unwrap()
513+
similarity: NotNan::new(f32::consts::FRAC_1_SQRT_2).unwrap()
514514
})
515515
.euclidean_distance()
516516
.abs_diff_eq(&0.76537, 1e-5));
@@ -538,7 +538,7 @@ mod tests {
538538
.abs_diff_eq(&1f32, 1e-5));
539539
assert!((WordSimilarityResult {
540540
word: "test",
541-
similarity: NotNan::new(0.70710678).unwrap()
541+
similarity: NotNan::new(f32::consts::FRAC_1_SQRT_2).unwrap()
542542
})
543543
.euclidean_similarity()
544544
.abs_diff_eq(&0.61732, 1e-5));
@@ -602,7 +602,7 @@ mod tests {
602602
assert_eq!(10, result.len());
603603
assert_eq!(result.next().unwrap().word, "Berlin");
604604

605-
for (idx, word_similarity) in result.into_iter().enumerate() {
605+
for (idx, word_similarity) in result.enumerate() {
606606
assert_eq!(SIMILARITY_ORDER[idx], word_similarity.word)
607607
}
608608
}

src/subword.rs

+3-8
Original file line numberDiff line numberDiff line change
@@ -86,17 +86,13 @@ where
8686
}
8787

8888
fn buckets(&self) -> usize {
89-
self.buckets_exp as usize
89+
self.buckets_exp
9090
}
9191
}
9292

9393
impl<H> Clone for HashIndexer<H> {
9494
fn clone(&self) -> Self {
95-
HashIndexer {
96-
buckets_exp: self.buckets_exp,
97-
mask: self.mask,
98-
_phantom: PhantomData,
99-
}
95+
*self
10096
}
10197
}
10298

@@ -699,8 +695,7 @@ mod tests {
699695
.subword_indices_with_ngrams(3, 6, &indexer)
700696
.collect::<Vec<_>>();
701697
ngrams_indices_test.sort_by_key(|ngrams_indices_pairs| ngrams_indices_pairs.1.clone());
702-
for (iter_check, iter_test) in ngrams_indices_check.into_iter().zip(ngrams_indices_test)
703-
{
698+
for (iter_check, iter_test) in ngrams_indices_check.iter().zip(ngrams_indices_test) {
704699
assert_eq!(iter_check.0, iter_test.0);
705700
}
706701
}

src/util.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ impl<T> FromIteratorWithCapacity<T> for Vec<T> {
2020
I: IntoIterator<Item = T>,
2121
{
2222
let mut v = Vec::with_capacity(capacity);
23-
v.extend(iter.into_iter());
23+
v.extend(iter);
2424
v
2525
}
2626
}
@@ -31,7 +31,7 @@ impl<T> FromIteratorWithCapacity<T> for VecDeque<T> {
3131
I: IntoIterator<Item = T>,
3232
{
3333
let mut v = VecDeque::with_capacity(capacity);
34-
v.extend(iter.into_iter());
34+
v.extend(iter);
3535
v
3636
}
3737
}

0 commit comments

Comments
 (0)