From ef80a5f339ee50ebc859542dc60e547bb14ec27a Mon Sep 17 00:00:00 2001 From: armin Date: Fri, 14 Feb 2020 16:26:09 +0100 Subject: [PATCH] Update tokenization.py Logging "using sentence piece tokenzier." only when initializing the FullTokenizer --- tokenization.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tokenization.py b/tokenization.py index 3c1389b0..b3131ba4 100644 --- a/tokenization.py +++ b/tokenization.py @@ -281,7 +281,6 @@ def tokenize(self, text): def convert_tokens_to_ids(self, tokens): if self.sp_model: - tf.logging.info("using sentence piece tokenzier.") return [self.sp_model.PieceToId( printable_text(token)) for token in tokens] else: @@ -289,7 +288,6 @@ def convert_tokens_to_ids(self, tokens): def convert_ids_to_tokens(self, ids): if self.sp_model: - tf.logging.info("using sentence piece tokenzier.") return [self.sp_model.IdToPiece(id_) for id_ in ids] else: return convert_by_vocab(self.inv_vocab, ids)