Skip to content

Commit

Permalink
remove on of the trainers
Browse files Browse the repository at this point in the history
  • Loading branch information
ArthurZucker committed Feb 12, 2025
1 parent a387594 commit 9707851
Showing 1 changed file with 0 additions and 14 deletions.
14 changes: 0 additions & 14 deletions tokenizers/src/models/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ pub mod wordpiece;
use std::collections::HashMap;
use std::path::{Path, PathBuf};

use backtracking_bpe::Vocab;
use serde::{Deserialize, Deserializer, Serialize, Serializer};

use crate::models::backtracking_bpe::{BacktrackingBpe, BacktrackingBpeTrainer};

Check warning on line 14 in tokenizers/src/models/mod.rs

View workflow job for this annotation

GitHub Actions / Check it builds for Windows 32-bit (3.11)

unused import: `BacktrackingBpeTrainer`

Check warning on line 14 in tokenizers/src/models/mod.rs

View workflow job for this annotation

GitHub Actions / Check it builds for Windows 32-bit (3.12)

unused import: `BacktrackingBpeTrainer`

Check warning on line 14 in tokenizers/src/models/mod.rs

View workflow job for this annotation

GitHub Actions / Check it builds for Windows 32-bit (3.10)

unused import: `BacktrackingBpeTrainer`

Check warning on line 14 in tokenizers/src/models/mod.rs

View workflow job for this annotation

GitHub Actions / Check it builds for Windows 32-bit (3.13)

unused import: `BacktrackingBpeTrainer`

Check warning on line 14 in tokenizers/src/models/mod.rs

View workflow job for this annotation

GitHub Actions / Check it builds for Windows 32-bit (3.9)

unused import: `BacktrackingBpeTrainer`
Expand Down Expand Up @@ -250,7 +249,6 @@ pub enum TrainerWrapper {
WordPieceTrainer(WordPieceTrainer),
WordLevelTrainer(WordLevelTrainer),
UnigramTrainer(UnigramTrainer),
BacktrackingBpeTrainer(BacktrackingBpeTrainer),
}

impl Trainer for TrainerWrapper {
Expand All @@ -262,7 +260,6 @@ impl Trainer for TrainerWrapper {
Self::WordPieceTrainer(wpt) => wpt.should_show_progress(),
Self::WordLevelTrainer(wpt) => wpt.should_show_progress(),
Self::UnigramTrainer(wpt) => wpt.should_show_progress(),
Self::BacktrackingBpeTrainer(wpt) => wpt.should_show_progress(),
}
}

Expand All @@ -284,10 +281,6 @@ impl Trainer for TrainerWrapper {
ModelWrapper::Unigram(u) => t.train(u),
_ => Err("UnigramTrainer can only train a Unigram".into()),
},
Self::BacktrackingBpeTrainer(t) => match model {
ModelWrapper::BacktrackingBpe(bpe) => t.train(bpe),
_ => Err("BpeTrainer can only train a BPE".into()),
},
}
}

Expand All @@ -302,7 +295,6 @@ impl Trainer for TrainerWrapper {
Self::WordPieceTrainer(wpt) => wpt.feed(iterator, process),
Self::WordLevelTrainer(wpt) => wpt.feed(iterator, process),
Self::UnigramTrainer(wpt) => wpt.feed(iterator, process),
Self::BacktrackingBpeTrainer(wpt) => wpt.feed(iterator, process),
}
}
}
Expand All @@ -311,12 +303,6 @@ impl_enum_from!(BpeTrainer, TrainerWrapper, BpeTrainer);
impl_enum_from!(WordPieceTrainer, TrainerWrapper, WordPieceTrainer);
impl_enum_from!(UnigramTrainer, TrainerWrapper, UnigramTrainer);
impl_enum_from!(WordLevelTrainer, TrainerWrapper, WordLevelTrainer);
impl_enum_from!(
BacktrackingBpeTrainer,
TrainerWrapper,
BacktrackingBpeTrainer
);

#[cfg(test)]
mod tests {
use super::*;
Expand Down

0 comments on commit 9707851

Please # to comment.