Skip to content

Commit 1639ae2

Browse files
committed
remove prints
1 parent 7e83218 commit 1639ae2

File tree

3 files changed

+0
-3
lines changed

3 files changed

+0
-3
lines changed

tokenizers/src/tokenizer/added_vocabulary.rs

-1
Original file line numberDiff line numberDiff line change
@@ -487,7 +487,6 @@ impl AddedVocabulary {
487487
Ok(self.split_with_indices(sequence, &self.split_normalized_trie))
488488
})
489489
.expect("AddedVocabulary bad split");
490-
println!("after pretokenized.split {:?}", pretokenized);
491490
// ["I read a book", " <s>", "Hey"] -> ["▁I read a book", "▁ <s>", "▁Hey"]
492491
// ["▁I read a book", "▁ <s>", "▁Hey"] -> [.., "▁ ", "<s>", "▁Hey"]
493492

tokenizers/src/tokenizer/mod.rs

-1
Original file line numberDiff line numberDiff line change
@@ -943,7 +943,6 @@ where
943943
pretokenized: P,
944944
) -> Result<PreTokenizedString> {
945945
let mut pretokenized: PreTokenizedString = pretokenized.into();
946-
println!("do pretokenized converted: {:?}", pretokenized);
947946
if let Some(ref pretok) = self.pre_tokenizer {
948947
pretok.pre_tokenize(&mut pretokenized)?;
949948
}

tokenizers/src/tokenizer/pre_tokenizer.rs

-1
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,6 @@ impl PreTokenizedString {
166166
if let Some(converter) = offset_converter {
167167
offsets = converter.convert(offsets).unwrap_or(offsets);
168168
}
169-
println!("into encode, {:?}, {:?}", normalized.clone(), token.value);
170169
(
171170
token.id,
172171
token.value,

0 commit comments

Comments
 (0)